code stringlengths 114 1.05M | path stringlengths 3 312 | quality_prob float64 0.5 0.99 | learning_prob float64 0.2 1 | filename stringlengths 3 168 | kind stringclasses 1 value |
|---|---|---|---|---|---|
package collections
import (
"fmt"
"math/rand"
)
type ImplicitTreap[T comparable] struct {
Root *ImplicitTreapNode[T]
}
type ImplicitTreapNode[T comparable] struct {
Left *ImplicitTreapNode[T]
Right *ImplicitTreapNode[T]
Value T
Size int
Priority float64
}
func NewImplicitTreapNode[T comparable](value T) *ImplicitTreapNode[T] {
return &ImplicitTreapNode[T]{
Value: value,
Size: 1,
Priority: rand.Float64(),
}
}
func NewImplicitTreap[T comparable](values ...T) *ImplicitTreap[T] {
return &ImplicitTreap[T]{NewImplicitTreapRoot(values...)}
}
func NewImplicitTreapRoot[T comparable](values ...T) *ImplicitTreapNode[T] {
if len(values) == 0 {
return nil
}
mid := len(values) / 2
newNode := NewImplicitTreapNode(values[mid])
newNode.Left = NewImplicitTreapRoot(values[:mid]...)
newNode.Right = NewImplicitTreapRoot(values[mid+1:]...)
newNode.updatePriorities()
newNode.updateSize()
return newNode
}
func (n *ImplicitTreap[T]) String() string {
return n.Root.String()
}
func (n *ImplicitTreapNode[T]) String() string {
if n == nil {
return ""
}
return fmt.Sprintf("%v%v %v", n.Left, n.Value, n.Right)
}
func (n *ImplicitTreapNode[T]) effectiveSize() int {
if n == nil {
return 0
}
return n.Size
}
func (n *ImplicitTreapNode[T]) updateSize() {
if n != nil {
n.Size = n.Left.effectiveSize() + 1 + n.Right.effectiveSize()
}
}
func (n *ImplicitTreapNode[T]) updatePriorities() {
if n == nil {
return
}
max := n
if n.Left != nil && n.Left.Priority > max.Priority {
max = n.Left
}
if n.Right != nil && n.Right.Priority > max.Priority {
max = n.Right
}
if max != n {
max.Priority, n.Priority = n.Priority, max.Priority
max.updatePriorities()
}
}
func (it *ImplicitTreap[T]) DeleteAt(pos int) {
it.Root = it.Root.DeleteAt(pos)
}
func (p *ImplicitTreapNode[T]) DeleteAt(pos int) *ImplicitTreapNode[T] {
if p == nil {
return p
} else if p.Left.effectiveSize() == pos {
return p.Left.Merge(p.Right)
} else if p.Left.effectiveSize() < pos {
p.Left = p.Left.DeleteAt(pos)
} else {
p.Right = p.Right.DeleteAt(pos - p.Left.effectiveSize() - 1)
}
p.updateSize()
return p
}
func (it *ImplicitTreap[T]) Append(item T) {
right := NewImplicitTreapNode(item)
it.Root = it.Root.Merge(right)
}
func (l *ImplicitTreap[T]) Merge(r *ImplicitTreap[T]) {
l.Root = l.Root.Merge(r.Root)
}
func (l *ImplicitTreapNode[T]) Merge(r *ImplicitTreapNode[T]) (p *ImplicitTreapNode[T]) {
if l == nil {
p = r
} else if r == nil {
p = l
} else if l.Priority > r.Priority {
l.Right = l.Right.Merge(r)
p = l
} else {
r.Left = l.Merge(r.Left)
p = r
}
p.updateSize()
return p
}
func (it *ImplicitTreap[T]) Split(key int) (l *ImplicitTreap[T], r *ImplicitTreap[T]) {
ln, rn := it.Root.Split(key, 0)
l = &ImplicitTreap[T]{ln}
r = &ImplicitTreap[T]{rn}
it.Root = nil
return
}
func (p *ImplicitTreapNode[T]) Split(key, offset int) (l *ImplicitTreapNode[T], r *ImplicitTreapNode[T]) {
if p == nil {
return
}
position := offset + p.Left.effectiveSize()
if key <= position {
l, p.Left = p.Left.Split(key, offset)
r = p
} else {
p.Right, r = p.Right.Split(key, position+1)
l = p
}
p.updateSize()
return
}
func (it *ImplicitTreap[T]) At(key int) T {
return it.Root.At(key)
}
func (n *ImplicitTreapNode[T]) At(key int) T {
leftSize := n.Left.effectiveSize()
if key == leftSize {
return n.Value
} else if key < leftSize {
return n.Left.At(key)
} else {
return n.Right.At(key - leftSize - 1)
}
} | implicit_treap.go | 0.581778 | 0.473109 | implicit_treap.go | starcoder |
package main
import (
"fmt"
)
type tile struct {
x int
y int
visited bool
name string
}
var allTiles []tile
var edges map[*tile][]*tile
/*Part1 runs the code for part 1 of this puzzle*/
func Part1(fname string) {
allTiles = make([]tile, 0)
edges = make(map[*tile][]*tile)
maze := GetInput(fname)
t := parseInput(maze)
fmt.Println("Parsed maze! ", t)
depth := bfs(t)
fmt.Println("[Part 1] The maze exit can be reached in", depth, "steps")
}
func bfs(t *tile) int {
depth := 0
queue := make([]*tile, 1)
queue[0] = t
nextLayer := make([]*tile, 0)
for len(queue) > 0 {
currentTile := queue[0]
if currentTile.name == "ZZ" {
return depth
}
neighbors := edges[currentTile]
for _, neighbor := range neighbors {
if !neighbor.visited {
nextLayer = append(nextLayer, neighbor)
}
}
currentTile.visited = true
if len(queue) == 1 {
queue = nextLayer
nextLayer = nil
depth++
} else {
queue = queue[1:]
}
}
panic("No exit found!")
}
func parseInput(input [][]string) *tile {
var entrance *tile
// first, find all the tiles
for row := range input {
for col := range input[row] {
if input[row][col] == "." {
allTiles = append(allTiles, tile{x: col, y: row})
}
}
}
for i := range allTiles {
t := &allTiles[i]
right := getRight(t, &input)
left := getLeft(t, &input)
up := getUp(t, &input)
down := getDown(t, &input)
// names
if len(right) == 2 {
t.name = right
if right != "AA" && right != "ZZ" {
neighbor := getNamedTile(right)
addEdge(t, neighbor)
}
} else if len(left) == 2 {
t.name = left
if left != "AA" && left != "ZZ" {
neighbor := getNamedTile(left)
addEdge(t, neighbor)
}
} else if len(up) == 2 {
t.name = up
if up != "AA" && up != "ZZ" {
neighbor := getNamedTile(up)
addEdge(t, neighbor)
}
} else if len(down) == 2 {
t.name = down
if down != "AA" && down != "ZZ" {
neighbor := getNamedTile(down)
addEdge(t, neighbor)
}
}
if t.name == "AA" {
entrance = t
}
if right == "." {
neighbor := getTile(t.x+1, t.y)
addEdge(t, neighbor)
}
if left == "." {
neighbor := getTile(t.x-1, t.y)
addEdge(t, neighbor)
}
if up == "." {
neighbor := getTile(t.x, t.y-1)
addEdge(t, neighbor)
}
if down == "." {
neighbor := getTile(t.x, t.y+1)
addEdge(t, neighbor)
}
}
return entrance
}
func addEdge(from *tile, to *tile) {
if from == nil || to == nil {
return
}
if edges[from] == nil {
edges[from] = make([]*tile, 0)
}
if edges[to] == nil {
edges[to] = make([]*tile, 0)
}
isRegistered := false
for _, neighbor := range edges[from] {
if neighbor == to {
isRegistered = true
break
}
}
if !isRegistered {
edges[from] = append(edges[from], to)
}
isRegistered = false
for _, neighbor := range edges[to] {
if neighbor == from {
isRegistered = true
break
}
}
if !isRegistered {
edges[to] = append(edges[to], from)
}
}
func getTile(x int, y int) *tile {
for i := range allTiles {
if allTiles[i].x == x && allTiles[i].y == y {
return &allTiles[i]
}
}
panic("Tried to get pointer to nonexistent tile")
}
func getNamedTile(name string) *tile {
for i := range allTiles {
if allTiles[i].name == name {
return &allTiles[i]
}
}
return nil
}
func getRight(t *tile, maze *[][]string) string {
row := (*maze)[t.y]
if t.x >= len(row)-1 {
// we're already at the rightmost edge
return "#"
}
char := row[t.x+1]
if char == "." || char == "#" {
return char
}
// otherwise, it's a two letter symbol indicating a portal
return char + row[t.x+2]
}
func getLeft(t *tile, maze *[][]string) string {
row := (*maze)[t.y]
if t.x == 0 {
// we're already at the leftmost edge
return "#"
}
char := row[t.x-1]
if char == "." || char == "#" {
return char
}
// otherwise, it's a two letter symbol indicating a portal
return row[t.x-2] + char
}
func getUp(t *tile, maze *[][]string) string {
if t.y == 0 {
return "#"
}
char := (*maze)[t.y-1][t.x]
if char == "." || char == "#" {
return char
}
// otherwise, it's a two letter symbol indicating a portal
return (*maze)[t.y-2][t.x] + char
}
func getDown(t *tile, maze *[][]string) string {
if t.y >= len(*maze)-1 {
return "#"
}
char := (*maze)[t.y+1][t.x]
if char == "." || char == "#" {
return char
}
// otherwise, it's a two letter symbol indicating a portal
return char + (*maze)[t.y+2][t.x]
} | puzzle20/part1.go | 0.544559 | 0.410993 | part1.go | starcoder |
package chronometer
import "time"
// Now returns a new timestamp.
func Now() time.Time {
return time.Now().UTC()
}
// Since returns the duration since another timestamp.
func Since(t time.Time) time.Duration {
return Now().Sub(t)
}
// Min returns the minimum of two times.
func Min(t1, t2 time.Time) time.Time {
if t1.Before(t2) {
return t1
}
return t2
}
// Max returns the maximum of two times.
func Max(t1, t2 time.Time) time.Time {
if t1.Before(t2) {
return t2
}
return t1
}
// FormatTime returns a string for a time.
func FormatTime(t time.Time) string {
return t.Format(time.RFC3339)
}
// OptionalUInt8 Returns a pointer to a value
func OptionalUInt8(value uint8) *uint8 {
return &value
}
// OptionalUInt16 Returns a pointer to a value
func OptionalUInt16(value uint16) *uint16 {
return &value
}
// OptionalUInt Returns a pointer to a value
func OptionalUInt(value uint) *uint {
return &value
}
// OptionalUInt64 Returns a pointer to a value
func OptionalUInt64(value uint64) *uint64 {
return &value
}
// OptionalInt16 Returns a pointer to a value
func OptionalInt16(value int16) *int16 {
return &value
}
// OptionalInt Returns a pointer to a value
func OptionalInt(value int) *int {
return &value
}
// OptionalInt64 Returns a pointer to a value
func OptionalInt64(value int64) *int64 {
return &value
}
// OptionalFloat32 Returns a pointer to a value
func OptionalFloat32(value float32) *float32 {
return &value
}
// OptionalFloat64 Returns a pointer to a value
func OptionalFloat64(value float64) *float64 {
return &value
}
// OptionalString Returns a pointer to a value
func OptionalString(value string) *string {
return &value
}
// OptionalBool Returns a pointer to a value
func OptionalBool(value bool) *bool {
return &value
}
// OptionalTime Returns a pointer to a value
func OptionalTime(value time.Time) *time.Time {
return &value
}
// OptionalDuration Returns a pointer to a value
func OptionalDuration(value time.Duration) *time.Duration {
return &value
} | util.go | 0.915157 | 0.48688 | util.go | starcoder |
package apivideosdk
import (
//"encoding/json"
)
// Watermark struct for Watermark
type Watermark struct {
// The unique identifier of the watermark.
WatermarkId *string `json:"watermarkId,omitempty"`
// When the watermark was created, presented in ISO-8601 format.
CreatedAt *string `json:"createdAt,omitempty"`
}
// NewWatermark instantiates a new Watermark object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewWatermark() *Watermark {
this := Watermark{}
return &this
}
// NewWatermarkWithDefaults instantiates a new Watermark object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewWatermarkWithDefaults() *Watermark {
this := Watermark{}
return &this
}
// GetWatermarkId returns the WatermarkId field value if set, zero value otherwise.
func (o *Watermark) GetWatermarkId() string {
if o == nil || o.WatermarkId == nil {
var ret string
return ret
}
return *o.WatermarkId
}
// GetWatermarkIdOk returns a tuple with the WatermarkId field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *Watermark) GetWatermarkIdOk() (*string, bool) {
if o == nil || o.WatermarkId == nil {
return nil, false
}
return o.WatermarkId, true
}
// HasWatermarkId returns a boolean if a field has been set.
func (o *Watermark) HasWatermarkId() bool {
if o != nil && o.WatermarkId != nil {
return true
}
return false
}
// SetWatermarkId gets a reference to the given string and assigns it to the WatermarkId field.
func (o *Watermark) SetWatermarkId(v string) {
o.WatermarkId = &v
}
// GetCreatedAt returns the CreatedAt field value if set, zero value otherwise.
func (o *Watermark) GetCreatedAt() string {
if o == nil || o.CreatedAt == nil {
var ret string
return ret
}
return *o.CreatedAt
}
// GetCreatedAtOk returns a tuple with the CreatedAt field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *Watermark) GetCreatedAtOk() (*string, bool) {
if o == nil || o.CreatedAt == nil {
return nil, false
}
return o.CreatedAt, true
}
// HasCreatedAt returns a boolean if a field has been set.
func (o *Watermark) HasCreatedAt() bool {
if o != nil && o.CreatedAt != nil {
return true
}
return false
}
// SetCreatedAt gets a reference to the given string and assigns it to the CreatedAt field.
func (o *Watermark) SetCreatedAt(v string) {
o.CreatedAt = &v
}
type NullableWatermark struct {
value *Watermark
isSet bool
}
func (v NullableWatermark) Get() *Watermark {
return v.value
}
func (v *NullableWatermark) Set(val *Watermark) {
v.value = val
v.isSet = true
}
func (v NullableWatermark) IsSet() bool {
return v.isSet
}
func (v *NullableWatermark) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableWatermark(val *Watermark) *NullableWatermark {
return &NullableWatermark{value: val, isSet: true}
} | model_watermark.go | 0.739422 | 0.418162 | model_watermark.go | starcoder |
package cdp
import (
"errors"
"log"
"math/big"
)
// CDP represents a CDP
type CDP struct {
ID int64
BytesID [32]byte
DaiDebt *big.Float
PethCol *big.Float
EthCol *big.Float
}
// GetRatio returns the collateralization ratio of the CDP at the actual price and Peth / Eth ratio
func (cdp *CDP) GetRatio(ethPrice *big.Float, pethRatio *big.Float) *big.Float {
ecol := new(big.Float).Mul(cdp.PethCol, pethRatio)
dcol := new(big.Float).Mul(ecol, ethPrice)
if cdp.DaiDebt.Cmp(big.NewFloat(0.0)) == 0 {
return nil
}
ratio := new(big.Float).Quo(dcol, cdp.DaiDebt)
return ratio
}
// GetMaxEthToFree returns the maximum number of eth to free
func (cdp *CDP) GetMaxEthToFree(ethPrice *big.Float) *big.Float {
if cdp.PethCol.Cmp(big.NewFloat(0.0)) == 0 {
return nil // nothing available to free!
}
edebt := new(big.Float).Quo(cdp.DaiDebt, ethPrice)
finalEcol := new(big.Float).Mul(edebt, big.NewFloat(1.5))
ethToFree := new(big.Float).Sub(cdp.EthCol, finalEcol)
return ethToFree
}
// GetEthToFree returns the number of eth to free to go to the target ratio
func (cdp *CDP) GetEthToFree(ethPrice, target *big.Float) *big.Float {
if cdp.EthCol.Cmp(big.NewFloat(0.0)) == 0 {
return nil // nothing available to free!
}
edebt := new(big.Float).Quo(cdp.DaiDebt, ethPrice)
net := new(big.Float).Sub(cdp.EthCol, edebt)
target2 := new(big.Float).Sub(target, big.NewFloat(1.0))
finalDebt := new(big.Float).Quo(net, target2)
finalEcol := new(big.Float).Mul(finalDebt, target)
ethToFree := new(big.Float).Sub(cdp.EthCol, finalEcol)
max := cdp.GetMaxEthToFree(ethPrice)
limit := new(big.Float).Mul(max, big.NewFloat(0.95))
if ethToFree.Cmp(limit) > 0 {
ethToFree = limit
}
return ethToFree
}
// GetMaxPethToFree returns the maximum number of peth to free
func (cdp *CDP) GetMaxPethToFree(ethPrice *big.Float, pethRatio *big.Float) *big.Float {
if cdp.PethCol.Cmp(big.NewFloat(0.0)) == 0 {
return nil // nothing available to free!
}
edebt := new(big.Float).Quo(cdp.DaiDebt, ethPrice)
pdebt := new(big.Float).Quo(edebt, pethRatio)
finalPcol := new(big.Float).Mul(pdebt, big.NewFloat(1.5))
pethToFree := new(big.Float).Sub(cdp.PethCol, finalPcol)
return pethToFree
}
// GetPethToFree returns the number of peth to free to go to the target ratio
func (cdp *CDP) GetPethToFree(ethPrice, pethRatio, target *big.Float) *big.Float {
if cdp.PethCol.Cmp(big.NewFloat(0.0)) == 0 {
return nil // nothing available to free!
}
edebt := new(big.Float).Quo(cdp.DaiDebt, ethPrice)
pdebt := new(big.Float).Quo(edebt, pethRatio)
net := new(big.Float).Sub(cdp.PethCol, pdebt)
target2 := new(big.Float).Sub(target, big.NewFloat(1.0))
finalDebt := new(big.Float).Quo(net, target2)
finalPcol := new(big.Float).Mul(finalDebt, target)
pethToFree := new(big.Float).Sub(cdp.PethCol, finalPcol)
max := cdp.GetMaxPethToFree(ethPrice, pethRatio)
limit := new(big.Float).Mul(max, big.NewFloat(0.95))
if pethToFree.Cmp(limit) > 0 {
pethToFree = limit
}
return pethToFree
}
// GetMaxDaiToDraw returns the maximum number of DAI to draw
func (cdp *CDP) GetMaxDaiToDraw(ethPrice *big.Float, pethRatio *big.Float) *big.Float {
ecol := new(big.Float).Mul(cdp.PethCol, pethRatio)
dcol := new(big.Float).Mul(ecol, ethPrice)
finalDaiDebt := new(big.Float).Quo(dcol, big.NewFloat(1.5))
daiToDraw := new(big.Float).Sub(finalDaiDebt, cdp.DaiDebt)
return daiToDraw
}
// GetDaiToDraw returns the number of DAI to draw to go to the target ratio
func (cdp *CDP) GetDaiToDraw(ethPrice, pethRatio, target *big.Float) *big.Float {
ecol := new(big.Float).Mul(cdp.PethCol, pethRatio)
dcol := new(big.Float).Mul(ecol, ethPrice)
net := new(big.Float).Sub(dcol, cdp.DaiDebt)
target2 := new(big.Float).Sub(target, big.NewFloat(1.0))
finalDebt := new(big.Float).Quo(net, target2)
daiToDraw := new(big.Float).Sub(finalDebt, cdp.DaiDebt)
max := cdp.GetMaxDaiToDraw(ethPrice, pethRatio)
limit := new(big.Float).Mul(max, big.NewFloat(0.95))
if daiToDraw.Cmp(limit) > 0 {
daiToDraw = limit
}
return daiToDraw
}
// GetChangePrices returns the prices (up and down) where this CDP must be equalized
func (cdp *CDP) GetChangePrices(ethPrice, minRatio, maxRatio, pethRatio *big.Float) (minPrice, maxPrice *big.Float) {
currentRatio := cdp.GetRatio(ethPrice, pethRatio)
minPrice = new(big.Float).Mul(ethPrice, new(big.Float).Quo(minRatio, currentRatio))
maxPrice = new(big.Float).Mul(ethPrice, new(big.Float).Quo(maxRatio, currentRatio))
return minPrice, maxPrice
}
// EqualizeCDP returns a new CDP equalized at targetRatio for a given price
func (cdp *CDP) EqualizeCDP(ethPrice, targetRatio, pethRatio *big.Float) (newCDP *CDP, err error) {
newCDP = new(CDP)
newCDP.ID = cdp.ID
newCDP.BytesID = cdp.BytesID
ethToFree := cdp.GetEthToFree(ethPrice, targetRatio)
if ethToFree.Cmp(big.NewFloat(0.0)) > 0 {
daiToWipe := new(big.Float).Mul(ethPrice, ethToFree)
pethToFree := new(big.Float).Quo(ethToFree, pethRatio)
newCDP.EthCol = new(big.Float).Sub(cdp.EthCol, ethToFree)
newCDP.PethCol = new(big.Float).Sub(cdp.PethCol, pethToFree)
newCDP.DaiDebt = new(big.Float).Sub(cdp.DaiDebt, daiToWipe)
} else {
daiToDraw := cdp.GetDaiToDraw(ethPrice, pethRatio, targetRatio)
if daiToDraw.Cmp(big.NewFloat(0.0)) < 0 {
return nil, errors.New("cannot equalize the CDP")
}
ethToLock := new(big.Float).Quo(daiToDraw, ethPrice)
pethToLock := new(big.Float).Quo(ethToLock, pethRatio)
newCDP.DaiDebt = new(big.Float).Add(cdp.DaiDebt, daiToDraw)
newCDP.EthCol = new(big.Float).Add(cdp.EthCol, ethToLock)
newCDP.PethCol = new(big.Float).Add(cdp.PethCol, pethToLock)
}
return newCDP, nil
}
// GetStatus returns the status of the CDP for this price
func (cdp *CDP) GetStatus(ethPrice, pethRatio, target *big.Float) (status *Status, err error) {
status = new(Status)
status.ID = cdp.ID
bigZero := *big.NewFloat(0.0)
daiDebt := Float(bigZero)
if cdp.DaiDebt != nil {
daiDebt = Float(*cdp.DaiDebt)
}
status.DaiDebt = &daiDebt
ethCol := Float(bigZero)
if cdp.EthCol != nil {
ethCol = Float(*cdp.EthCol)
}
status.EthCol = ðCol
price := Float(bigZero)
if ethPrice != nil {
price = Float(*ethPrice)
}
status.Price = &price
ratio := Float(bigZero)
if cdp.GetRatio(ethPrice, pethRatio) != nil {
ratio = Float(*(cdp.GetRatio(ethPrice, pethRatio)))
}
status.Ratio = &ratio
dcol := new(big.Float).Mul(cdp.EthCol, ethPrice)
net := new(big.Float).Sub(dcol, cdp.DaiDebt)
daiNet := Float(bigZero)
if net != nil {
daiNet = Float(*net)
}
status.DaiNet = &daiNet
enet := new(big.Float).Quo(net, ethPrice)
ethNet := Float(bigZero)
if enet != nil {
ethNet = Float(*enet)
}
status.EthNet = ðNet
return status, nil
}
// Log print infos on the CDP in the logs
func (cdp *CDP) Log(ethPrice, pethRatio, target *big.Float) {
ecol := new(big.Float).Mul(cdp.PethCol, pethRatio)
dcol := new(big.Float).Mul(ecol, ethPrice)
net := new(big.Float).Sub(dcol, cdp.DaiDebt)
log.Printf("CDP #%d status:\n", cdp.ID)
log.Printf("\tDebt : %.2f DAI", cdp.DaiDebt)
log.Printf("\tCol : %.5f PETH", cdp.PethCol)
log.Printf("\t : %.5f ETH", ecol)
log.Printf("\tETH price : %.5f DAI", ethPrice)
log.Printf("\tPETH / ETH : %.5f ", pethRatio)
log.Printf("\tRatio : %.2f %%", new(big.Float).Mul(cdp.GetRatio(ethPrice, pethRatio), big.NewFloat(100.0)))
log.Printf("\tNet Value : %.2f DAI", net)
log.Printf("\t : %.5f ETH", new(big.Float).Quo(net, ethPrice))
log.Printf("\tDraw : %.2f DAI", cdp.GetDaiToDraw(ethPrice, pethRatio, target))
log.Printf("\tFree : %.5f PETH", cdp.GetPethToFree(ethPrice, pethRatio, target))
log.Printf("\t : %.5f ETH", new(big.Float).Mul(cdp.GetPethToFree(ethPrice, pethRatio, target), pethRatio))
log.Println()
} | cdp/types.go | 0.759404 | 0.523238 | types.go | starcoder |
package roadmap
import (
"fmt"
"net/url"
"strings"
"time"
"github.com/peteraba/roadmapper/pkg/colors"
)
// VisualRoadmap represent a roadmap in a way that is prepared for visualization
type VisualRoadmap struct {
Title string
Projects []Project
Milestones []Milestone
Dates *Dates
DateFormat string
}
// ToVisual converts a roadmap to a visual roadmap
// main difference between the two is that the visual roadmap
// will contain calculated values where possible
func (r Roadmap) ToVisual() *VisualRoadmap {
visual := &VisualRoadmap{}
visual.Title = r.Title
visual.Dates = r.ToDates()
visual.Projects = r.Projects
visual.Milestones = r.Milestones
visual.DateFormat = r.DateFormat
visual.calculateProjectDates().calculateProjectColors().calculatePercentages().applyBaseURL(r.BaseURL)
projectMilestones := visual.collectProjectMilestones()
visual.applyProjectMilestone(projectMilestones)
return visual
}
// calculateProjectDates tries to find reasonable dates for all projects
// first it tries to find dates bottom up, meaning that based on the sub-projects
// then it tries to find dates top down, meaning that it will copy over dates from parents
func (vr *VisualRoadmap) calculateProjectDates() *VisualRoadmap {
for i := range vr.Projects {
p := &vr.Projects[i]
if p.Dates != nil {
continue
}
p.Dates = vr.findDatesBottomUp(i)
}
for i := range vr.Projects {
p := &vr.Projects[i]
if p.Dates != nil {
continue
}
p.Dates = vr.findDatesTopDown(i)
}
return vr
}
// findDatesBottomUp will look for the minimum start date and maximum end date of sub projects
func (vr *VisualRoadmap) findDatesBottomUp(start int) *Dates {
if vr.Projects == nil || len(vr.Projects) < start {
panic(fmt.Errorf("illegal start %d for finding visual dates", start))
}
if vr.Projects[start].Dates != nil {
return vr.Projects[start].Dates
}
minIndentation := vr.Projects[start].Indentation + 1
var dates *Dates
for i := start + 1; i < len(vr.Projects); i++ {
p := vr.Projects[i]
if p.Indentation < minIndentation {
break
}
if p.Dates == nil {
continue
}
if dates == nil {
dates = &Dates{StartAt: p.Dates.StartAt, EndAt: p.Dates.EndAt}
continue
}
if dates.StartAt.After(p.Dates.StartAt) {
dates.StartAt = p.Dates.StartAt
}
if dates.EndAt.Before(p.Dates.EndAt) {
dates.EndAt = p.Dates.EndAt
}
}
return dates
}
// findDatesTopDown will return the dates of the first ancestor it finds
func (vr *VisualRoadmap) findDatesTopDown(start int) *Dates {
if vr.Projects == nil || len(vr.Projects) < start {
panic(fmt.Errorf("illegal start %d for finding visual dates", start))
}
if vr.Projects[start].Dates != nil {
return vr.Projects[start].Dates
}
currentIndentation := vr.Projects[start].Indentation
var dates *Dates
for i := start - 1; i >= 0; i-- {
p := vr.Projects[i]
if p.Indentation >= currentIndentation {
continue
}
if p.Dates != nil {
return p.Dates
}
currentIndentation = p.Indentation
}
return dates
}
// calculateProjectColors will set a color for each projects without one
func (vr *VisualRoadmap) calculateProjectColors() *VisualRoadmap {
epicCount := -1
taskCount := -1
for i := range vr.Projects {
p := &vr.Projects[i]
if p.Indentation == 0 {
epicCount++
taskCount = -1
}
taskCount++
c := p.Color
if c == nil {
c = colors.PickFgColor(epicCount, taskCount, int(p.Indentation))
}
p.Color = c
}
return vr
}
// calculatePercentages will try to calculate the percentage of all projects without a percentage set bottom up,
// meaning looking at their subprojects
func (vr *VisualRoadmap) calculatePercentages() *VisualRoadmap {
for i := range vr.Projects {
p := &vr.Projects[i]
if p.Percentage != 0 {
continue
}
p.Percentage = vr.findPercentageBottomUp(i)
}
return vr
}
// findPercentageBottomUp will calculate the average percentage of sub-projects
func (vr *VisualRoadmap) findPercentageBottomUp(start int) uint8 {
if vr.Projects == nil || len(vr.Projects) < start {
panic(fmt.Errorf("illegal start %d for finding visual dates", start))
}
if vr.Projects[start].Percentage != 0 {
return vr.Projects[start].Percentage
}
matchIndentation := vr.Projects[start].Indentation + 1
var sum, count uint8
for i := start + 1; i < len(vr.Projects); i++ {
p := &vr.Projects[i]
if p.Indentation < matchIndentation {
break
}
if p.Indentation > matchIndentation {
continue
}
if p.Percentage == 0 {
p.Percentage = vr.findPercentageBottomUp(i)
}
sum += p.Percentage
count++
}
if count == 0 {
return 0
}
return sum / count
}
func (vr *VisualRoadmap) applyBaseURL(baseUrl string) *VisualRoadmap {
if baseUrl == "" {
return vr
}
for i := range vr.Projects {
p := &vr.Projects[i]
for j := range p.URLs {
u := &p.URLs[j]
parsedUrl, err := url.ParseRequestURI(*u)
if err == nil && parsedUrl.Scheme != "" && parsedUrl.Host != "" {
continue
}
*u = fmt.Sprintf("%s/%s", strings.TrimRight(baseUrl, "/"), strings.TrimLeft(*u, "/"))
}
}
for i := range vr.Milestones {
m := &vr.Milestones[i]
for j := range m.URLs {
u := &m.URLs[j]
parsedUrl, err := url.ParseRequestURI(*u)
if err == nil && parsedUrl.Scheme != "" && parsedUrl.Host != "" {
continue
}
*u = fmt.Sprintf("%s/%s", strings.TrimRight(baseUrl, "/"), strings.TrimLeft(*u, "/"))
}
}
return vr
}
// collectProjectMilestones creates temporary milestones based on project information
// these will then be used in applyProjectMilestones as default values from milestones
// at the moment only colors and deadlines are collected
func (vr *VisualRoadmap) collectProjectMilestones() map[int]*Milestone {
foundMilestones := map[int]*Milestone{}
for i := range vr.Projects {
p := &vr.Projects[i]
if p.Milestone == 0 {
continue
}
if p.Color == nil && p.Dates == nil {
continue
}
mk := int(p.Milestone) - 1
var endAt *time.Time
if p.Dates != nil {
endAt = &p.Dates.EndAt
}
milestone, ok := foundMilestones[mk]
if !ok {
foundMilestones[mk] = &Milestone{
DeadlineAt: endAt,
Color: p.Color,
}
continue
}
if endAt == nil {
continue
}
if milestone.DeadlineAt == nil || milestone.DeadlineAt.Before(*endAt) {
milestone.DeadlineAt = endAt
}
}
return foundMilestones
}
// applyProjectMilestone will apply temporary milestones created in collectProjectMilestones
// as default values for milestones
// this means that if milestones don't have deadlines or colors set, they can be set using what was
// found or generated for the projects linked to a milestone
func (vr *VisualRoadmap) applyProjectMilestone(projectMilestones map[int]*Milestone) *VisualRoadmap {
for i, m := range projectMilestones {
if len(vr.Milestones) < i {
panic("original milestone not found")
}
om := &vr.Milestones[i]
if om.Color == nil {
om.Color = m.Color
}
if om.DeadlineAt != nil {
continue
}
if m.DeadlineAt == nil {
continue
}
om.DeadlineAt = m.DeadlineAt
}
for i := range vr.Milestones {
if vr.Milestones[i].Color == nil {
vr.Milestones[i].Color = defaultMilestoneColor
}
}
return vr
} | pkg/roadmap/visual_roadmap_model.go | 0.652131 | 0.559531 | visual_roadmap_model.go | starcoder |
package traveler
import "strings"
// CountryAlpha2 is the country ISO_3166-1 alpha 2 code
type CountryAlpha2 string
// CountryAlpha3 is the country ISO_3166-1 alpha 3 code
type CountryAlpha3 string
// CountryInformation contains all the information associated with a country,
// according to ISO_3166-1
type CountryInformation struct {
FullName string
CountryAlpha3 CountryAlpha3
Numeric int
}
// All the following functions come in pair for CountryAlpha2 and CountryAlpha3
// WrongCountryAlpha2 represents what String() will print if
// CountryAlpha2 is wrong
var WrongCountryAlpha2 = CountryAlpha2("--")
// WrongCountryAlpha3 represents what String() will print if
// CountryAlpha3 is wrong
var WrongCountryAlpha3 = CountryAlpha3("---")
func (a CountryAlpha2) format() CountryAlpha2 {
return CountryAlpha2(strings.ToUpper(string(a)))
}
func (a CountryAlpha3) format() CountryAlpha3 {
return CountryAlpha3(strings.ToUpper(string(a)))
}
// Verify returns true if the CountryAlpha2 is valid
func (a CountryAlpha2) Verify() bool {
_, ok := countryList[a.format()]
return ok
}
// Verify returns true if the CountryAlpha3 is valid
func (a CountryAlpha3) Verify() bool {
k, _ := FromCountryAlpha3(a)
return k != ""
}
// Information returns the CountryInformation associated with the
// CountryAlpha2 code
func (a CountryAlpha2) Information() *CountryInformation {
new, ok := countryList[a.format()]
if !ok {
return nil
}
return &new
}
// Information returns the CountryInformation associated with the
// CountryAlpha2 code
func (a CountryAlpha3) Information() (CountryAlpha2, *CountryInformation) {
k, new := FromCountryAlpha3(a)
if k == "" {
return "", nil
}
return k, &new
}
// Continent returns the continent on which the country is
func (a CountryAlpha2) Continent() string {
return countryToContinents[a]
}
// Continent returns the continent on which the country is
func (a CountryAlpha3) Continent() string {
k, _ := FromCountryAlpha3(a)
return countryToContinents[k]
}
// Currency returns the currency of the country
func (a CountryAlpha2) Currency() string {
return countryToCurrency[a]
}
// Currency returns the currency of the country
func (a CountryAlpha3) Currency() string {
k, _ := FromCountryAlpha3(a)
return countryToCurrency[k]
}
// String for Stringer interface
func (a CountryAlpha2) String() string {
if ok := a.Verify(); !ok {
return string(WrongCountryAlpha2)
}
return string(a.format())
}
// String for Stringer interface
func (a CountryAlpha3) String() string {
if ok := a.Verify(); !ok {
return string(WrongCountryAlpha3)
}
return string(a.format())
}
// The following functions aren't quite as fast as the other ones,
// and not as safe to use (e.g. wiki standard for full names).
// These functions return "" as CountryAlpha2 if nothing is found.
// FromFullName returns the CountryAlpha2 code along with the CountryInformation
// based on the name provided.
func FromFullName(name string) (CountryAlpha2, CountryInformation) {
for k, v := range countryList {
if v.FullName == name {
return k, v
}
}
return "", CountryInformation{}
}
// FromCountryAlpha3 returns the CountryAlpha2 code along with the
// CountryInformation based on the CountryAlpha3 code provided
func FromCountryAlpha3(a CountryAlpha3) (CountryAlpha2, CountryInformation) {
a = a.format()
for k, v := range countryList {
if v.CountryAlpha3 == a {
return k, v
}
}
return "", CountryInformation{}
}
// FromNumericCode returns the CountryAlpha2 code along with the
// CountryInformation based on the numeric code provided
func FromNumericCode(code int) (CountryAlpha2, CountryInformation) {
for k, v := range countryList {
if v.Numeric == code {
return k, v
}
}
return "", CountryInformation{}
} | traveler.go | 0.718397 | 0.45048 | traveler.go | starcoder |
package db
import (
"time"
"strings"
"strconv"
"encoding/json"
"database/sql/driver"
"github.com/s0ulw1sh/soulgost/utils"
)
type TimeMin struct {
Minutes int
}
func (d TimeMin) Value() (driver.Value, error) {
return d.String(), nil
}
func (d *TimeMin) Scan(value interface{}) error {
var str string
switch value.(type) {
case []byte: str = string(value.([]byte))
case string: str = value.(string)
default:
return ErrInvalidType
}
s := strings.Split(str, ":")
if len(s) != 3 {
return ErrInvalidType
}
i, err := strconv.ParseInt(s[0], 10, 8)
if err != nil {
return err
}
d.Minutes = int(i * 60)
i, err = strconv.ParseInt(s[1], 10, 8)
if err != nil {
return err
}
d.Minutes += int(i)
return nil
}
func (d TimeMin) MarshalJSON() ([]byte, error) {
return json.Marshal(d.Minutes)
}
func (d *TimeMin) UnmarshalJSON(b []byte) error {
return json.Unmarshal(b, &d.Minutes)
}
func (d *TimeMin) Val() int {
return d.Minutes
}
func (d TimeMin) String() string {
var (
buf [32]byte
w int
)
w = len(buf)
w = utils.UintToBStrLeadZero(buf[:w], 0)
w--
buf[w] = ':'
w = utils.UintToBStrLeadZero(buf[:w], uint64(d.Minutes % 60))
w--
buf[w] = ':'
w = utils.UintToBStrLeadZero(buf[:w], uint64(d.Minutes / 60))
return string(buf[w:])
}
type TimeEmpty struct {
Valid bool
Time time.Time
}
func (t TimeEmpty) Value() (driver.Value, error) {
if !t.Valid {
return nil, nil
}
return t.String(), nil
}
func (t *TimeEmpty) Scan(value interface{}) error {
if value == nil {
t.Time, t.Valid = time.Time{}, false
return nil
}
switch v := value.(type) {
case time.Time: t.Time = v
case []byte: t.Time = utils.ParseDateTime(v, time.UTC)
case string: t.Time = utils.ParseDateTime([]byte(v), time.UTC)
default:
t.Valid = false
return ErrInvalidType
}
t.Valid = true
return nil
}
func (t TimeEmpty) String() string {
var (
buf [19]byte
w int
)
if !t.Valid {
return "-"
}
w = len(buf)
w = utils.UintToBStrLeadZero(buf[:w], uint64(t.Time.Second()))
w--
buf[w] = ':'
w = utils.UintToBStrLeadZero(buf[:w], uint64(t.Time.Minute()))
w--
buf[w] = ':'
w = utils.UintToBStrLeadZero(buf[:w], uint64(t.Time.Hour()))
w--
buf[w] = ' '
w = utils.UintToBStrLeadZero(buf[:w], uint64(t.Time.Day()))
w--
buf[w] = '-'
w = utils.UintToBStrLeadZero(buf[:w], uint64(t.Time.Month()))
w--
buf[w] = '-'
w = utils.UintToBStrLeadZero(buf[:w], uint64(t.Time.Year()))
return string(buf[:])
}
func (t TimeEmpty) MarshalJSON() ([]byte, error) {
var (
buf [16]byte
w int
)
if !t.Valid {
return []byte("\"-\""), nil
}
w = len(buf)
w = utils.UintToBStrLeadZero(buf[:w], uint64(t.Time.Year()))
w--
buf[w] = '.'
w = utils.UintToBStrLeadZero(buf[:w], uint64(t.Time.Month()))
w--
buf[w] = '.'
w = utils.UintToBStrLeadZero(buf[:w], uint64(t.Time.Day()))
w--
buf[w] = ' '
w = utils.UintToBStrLeadZero(buf[:w], uint64(t.Time.Minute()))
w--
buf[w] = ':'
w = utils.UintToBStrLeadZero(buf[:w], uint64(t.Time.Hour()))
return []byte("\"" + string(buf[:]) + "\""), nil
}
func (t *TimeEmpty) UnmarshalJSON(b []byte) (err error) {
ptime, err := time.Parse("15:04 02.01.2006", string(b))
t.Valid = err == nil
if t.Valid {
t.Time = ptime
} else {
t.Time = time.Time{}
}
return nil
} | db/time.go | 0.645455 | 0.414129 | time.go | starcoder |
package config
type configItem struct {
label string
defaultValue string
description string
}
var logLevelDescription = `The level is either a name or a numeric value. The following table describes the meaning of the value.
|Value|Name |
|-----|-------|
|` + "`" + `0` + "`" + ` |` + "`" + `debug` + "`" + ` |
|` + "`" + `1` + "`" + ` |` + "`" + `info` + "`" + ` |
|` + "`" + `2` + "`" + ` |` + "`" + `warn` + "`" + ` |
|` + "`" + `3` + "`" + ` |` + "`" + `error` + "`" + ` |
|` + "`" + `4` + "`" + ` |` + "`" + `fatal` + "`" + ` |
`
var defaultConf = map[string]*configItem{
"bind": {
defaultValue: "127.0.0.1:8080",
label: "<address>:<port>",
description: `
Specifies the address and the port number of a daemon in a form <code><var>address</var>:<var>port</var></code>.
`,
},
"pid": {
defaultValue: "",
label: "<file>",
description: `
Specifies a file where PID is written to.
`,
},
"access_log": {
defaultValue: "",
label: "<file>",
description: `
Specifies a file where API access log is written to. It defaults to standard output.
Each line in the file is a JSON string corresponds to a single log item.
`,
},
"access_log_tag": {
defaultValue: "fireworq.access",
label: "<tag>",
description: `
Specifies the value of ` + "`" + `tag` + "`" + ` field in a access log item.
`,
},
"error_log": {
defaultValue: "",
label: "<file>",
description: `
Specifies a file where error logs are written to. It defaults to standard error output.
If this value is specified, each line in the file is a JSON string corresponds to a single log item. Otherwise, each line of the output is a prettified log item.
`,
},
"error_log_level": {
defaultValue: "",
label: "<level>",
description: `
Specifies a log level of the access log. ` + logLevelDescription + `
If none of these values is specified, the level is determined by ` + "`" + `DEBUG` + "`" + ` environment variable. If ` + "`" + `DEBUG` + "`" + ` has a non-empty value, then the level is ` + "`" + `debug` + "`" + `. Otherwise, the level is ` + "`" + `info` + "`" + `.
`,
},
"shutdown_timeout": {
defaultValue: "30",
label: "<seconds>",
description: `
Specifies a timeout, in seconds, which the daemon waits on [gracefully shutting down or restarting][section-graceful-restart].
`,
},
"keep_alive": {
defaultValue: "false",
label: "true|false",
description: `
Specifies whether connections should be reused.
`,
},
"config_refresh_interval": {
defaultValue: "1000",
label: "<milliseconds>",
description: `
Specifies an interval, in milliseconds, at which a Fireworq daemon checks if configurations (such as queue definitions or routings) are changed by other daemons.
`,
},
"driver": {
defaultValue: "mysql",
label: "<driver>",
description: `
Specifies a driver for job queues and repositories. The available values are ` + "`" + `mysql` + "`" + ` and ` + "`in-memory`" + `.
Note that ` + "`in-memory`" + ` driver is not for production use. It is intended to be used for just playing with Fireworq without a storage middleware or to show the upper bound of performance in a benchmark.
`,
},
"mysql_dsn": {
defaultValue: "tcp(localhost:3306)/fireworq",
label: "<DSN>",
description: `
Specifies a data source name for the job queue and the repository database in a form <code><var>user</var>:<var>password</var>@tcp(<var>mysql_host</var>:<var>mysql_port</var>)/<var>database</var>?<var>options</var></code>. This is in effect only when [the driver](#env-driver) is ` + "`" + `mysql` + "`" + ` and is mandatory for that case.
`,
},
"repository_mysql_dsn": {
defaultValue: "",
label: "<DSN>",
description: `
Specifies a data source name for the repository database in a form <code><var>user</var>:<var>password</var>@tcp(<var>mysql_host</var>:<var>mysql_port</var>)/<var>database</var>?<var>options</var></code>. This is in effect only when the [driver](#env-driver) is ` + "`" + `mysql` + "`" + ` and overrides [the default DSN](#env-mysql-dsn). This should be used when you want to specify a DSN differs from [the queue DSN](#env-queue-mysql-dsn).
`,
},
"queue_default": {
defaultValue: "",
label: "<name>",
description: `
Specifies the name of a default queue. A job whose ` + "`" + `category` + "`" + ` is not defined via the [routing API][api-put-routing] will be delivered to this queue. If no default queue name is specified, pushing a job with an unknown category will fail.
If you already have a queue with the specified name in the job queue database, that one is used. Or otherwise a new queue is created automatically.
`,
},
"queue_default_polling_interval": {
defaultValue: "200",
label: "<milliseconds>",
description: `
Specifies the default interval, in milliseconds, at which Fireworq checks the arrival of new jobs, used when ` + "`" + `polling_interval` + "`" + ` in the [queue API][api-put-queue] is omitted.
`,
},
"queue_default_max_workers": {
defaultValue: "20",
label: "<number>",
description: `
Specifies the default maximum number of jobs that are processed simultaneously in a queue, used when ` + "`" + `max_workers` + "`" + ` in the [queue API][api-put-queue] is omitted.
`,
},
"queue_log": {
defaultValue: "",
label: "<file>",
description: `
Specifies a file where the job queue logs are written to. It defaults to standard output. No other logs than the job queue logs are written to this file.
Each line in the file is a JSON string corresponds to a single log item.
`,
},
"queue_log_tag": {
defaultValue: "fireworq.queue",
label: "<tag>",
description: `
Specifies the value of ` + "`" + `tag` + "`" + ` field in a job queue log item JSON.
`,
},
"queue_log_level": {
defaultValue: "",
label: "<level>",
description: `
Specifies a log level of the job queue logs. ` + logLevelDescription + `
If none of these values is specified, the level is determined by ` + "`" + `DEBUG` + "`" + ` environment variable. If ` + "`" + `DEBUG` + "`" + ` has a non-empty value, then the level is ` + "`" + `debug` + "`" + `. Otherwise, the level is ` + "`" + `info` + "`" + `.
`,
},
"queue_mysql_dsn": {
defaultValue: "",
label: "<DSN>",
description: `
Specifies a data source name for the job queue database in a form <code><var>user</var>:<var>password</var>@tcp(<var>mysql_host</var>:<var>mysql_port</var>)/<var>database</var>?<var>options</var></code>. This is in effect only when the [driver](#env-driver) is ` + "`" + `mysql` + "`" + ` and overrides [the default DSN](#env-mysql-dsn). This should be used when you want to specify a DSN differs from [the repository DSN](#env-repository-mysql-dsn).
`,
},
"dispatch_user_agent": {
defaultValue: "",
label: "<agent>",
description: `
Specifies the value of ` + "`" + `User-Agent` + "`" + ` header field used for an HTTP request to a worker. The default value is <code>Fireworq/<var>version</var></code>.
`,
},
"dispatch_keep_alive": {
label: "true|false",
description: `
Specifies whether a connection to a worker should be reused. This overrides [the default keep-alive setting](#env-keep-alive).
`,
},
"dispatch_max_conns_per_host": {
defaultValue: "10",
label: "<number>",
description: `
Specifies maximum idle connections to keep per-host. This value works only when [connections of the dispatcher are reused](#env-dispatch-keep-alive).
`,
},
"dispatch_idle_conn_timeout": {
defaultValue: "0",
label: "<seconds>",
description: `
Specifies the maximum amount of time of an idle (keep-alive) connection will remain idle before closing itself. If zero, an idle connections will not be closed.
`,
},
} | config/default.go | 0.739893 | 0.531574 | default.go | starcoder |
package graphics
import (
"fmt"
"github.com/go-gl/gl/v4.6-core/gl"
"github.com/mokiat/gomath/sprec"
"github.com/mokiat/lacking/framework/opengl"
"github.com/mokiat/lacking/framework/opengl/game/graphics/internal"
"github.com/mokiat/lacking/game/graphics"
)
const (
framebufferWidth = int32(1920)
framebufferHeight = int32(1080)
coordAttributeIndex = 0
normalAttributeIndex = 1
tangentAttributeIndex = 2
texCoordAttributeIndex = 3
colorAttributeIndex = 4
)
func newRenderer() *Renderer {
return &Renderer{
framebufferWidth: framebufferWidth,
framebufferHeight: framebufferHeight,
geometryAlbedoTexture: opengl.NewTwoDTexture(),
geometryNormalTexture: opengl.NewTwoDTexture(),
geometryDepthTexture: opengl.NewTwoDTexture(),
geometryFramebuffer: opengl.NewFramebuffer(),
lightingAlbedoTexture: opengl.NewTwoDTexture(),
lightingDepthTexture: opengl.NewTwoDTexture(),
lightingFramebuffer: opengl.NewFramebuffer(),
exposureAlbedoTexture: opengl.NewTwoDTexture(),
exposureFramebuffer: opengl.NewFramebuffer(),
exposureBuffer: opengl.NewBuffer(),
exposureTarget: 1.0,
screenFramebuffer: opengl.DefaultFramebuffer(),
quadMesh: newQuadMesh(),
skyboxMesh: newSkyboxMesh(),
}
}
type Renderer struct {
framebufferWidth int32
framebufferHeight int32
geometryAlbedoTexture *opengl.TwoDTexture
geometryNormalTexture *opengl.TwoDTexture
geometryDepthTexture *opengl.TwoDTexture
geometryFramebuffer *opengl.Framebuffer
lightingAlbedoTexture *opengl.TwoDTexture
lightingDepthTexture *opengl.TwoDTexture
lightingFramebuffer *opengl.Framebuffer
exposureAlbedoTexture *opengl.TwoDTexture
exposureFramebuffer *opengl.Framebuffer
exposurePresentation *internal.LightingPresentation
exposureBuffer *opengl.Buffer
exposureSync uintptr
exposureTarget float32
screenFramebuffer *opengl.Framebuffer
postprocessingPresentation *internal.PostprocessingPresentation
directionalLightPresentation *internal.LightingPresentation
ambientLightPresentation *internal.LightingPresentation
quadMesh *QuadMesh
skyboxPresentation *internal.SkyboxPresentation
skyboxMesh *SkyboxMesh
}
func (r *Renderer) Allocate() {
geometryAlbedoTextureInfo := opengl.TwoDTextureAllocateInfo{
Width: framebufferWidth,
Height: framebufferHeight,
MinFilter: gl.NEAREST,
MagFilter: gl.NEAREST,
InternalFormat: gl.RGBA8,
DataFormat: gl.RGBA,
DataComponentType: gl.UNSIGNED_BYTE,
}
r.geometryAlbedoTexture.Allocate(geometryAlbedoTextureInfo)
geometryNormalTextureInfo := opengl.TwoDTextureAllocateInfo{
Width: framebufferWidth,
Height: framebufferHeight,
MinFilter: gl.NEAREST,
MagFilter: gl.NEAREST,
InternalFormat: gl.RGBA32F,
DataFormat: gl.RGBA,
DataComponentType: gl.FLOAT,
}
r.geometryNormalTexture.Allocate(geometryNormalTextureInfo)
geometryDepthTextureInfo := opengl.TwoDTextureAllocateInfo{
Width: framebufferWidth,
Height: framebufferHeight,
MinFilter: gl.NEAREST,
MagFilter: gl.NEAREST,
InternalFormat: gl.DEPTH_COMPONENT32,
DataFormat: gl.DEPTH_COMPONENT,
DataComponentType: gl.FLOAT,
}
r.geometryDepthTexture.Allocate(geometryDepthTextureInfo)
geometryFramebufferInfo := opengl.FramebufferAllocateInfo{
ColorAttachments: []*opengl.Texture{
&r.geometryAlbedoTexture.Texture,
&r.geometryNormalTexture.Texture,
},
DepthAttachment: &r.geometryDepthTexture.Texture,
}
r.geometryFramebuffer.Allocate(geometryFramebufferInfo)
lightingAlbedoTextureInfo := opengl.TwoDTextureAllocateInfo{
Width: framebufferWidth,
Height: framebufferHeight,
MinFilter: gl.NEAREST,
MagFilter: gl.NEAREST,
InternalFormat: gl.RGBA32F,
DataFormat: gl.RGBA,
DataComponentType: gl.FLOAT,
}
r.lightingAlbedoTexture.Allocate(lightingAlbedoTextureInfo)
lightingDepthTextureInfo := opengl.TwoDTextureAllocateInfo{
Width: framebufferWidth,
Height: framebufferHeight,
MinFilter: gl.NEAREST,
MagFilter: gl.NEAREST,
InternalFormat: gl.DEPTH_COMPONENT32,
DataFormat: gl.DEPTH_COMPONENT,
DataComponentType: gl.FLOAT,
}
r.lightingDepthTexture.Allocate(lightingDepthTextureInfo)
lightingFramebufferInfo := opengl.FramebufferAllocateInfo{
ColorAttachments: []*opengl.Texture{
&r.lightingAlbedoTexture.Texture,
},
DepthAttachment: &r.lightingDepthTexture.Texture,
}
r.lightingFramebuffer.Allocate(lightingFramebufferInfo)
r.exposureAlbedoTexture.Allocate(opengl.TwoDTextureAllocateInfo{
Width: 1,
Height: 1,
MinFilter: gl.NEAREST,
MagFilter: gl.NEAREST,
InternalFormat: gl.RGBA32F,
DataFormat: gl.RGBA,
DataComponentType: gl.FLOAT,
})
r.exposureFramebuffer.Allocate(opengl.FramebufferAllocateInfo{
ColorAttachments: []*opengl.Texture{
&r.exposureAlbedoTexture.Texture,
},
})
r.exposurePresentation = internal.NewExposurePresentation()
r.exposureBuffer.Allocate(opengl.BufferAllocateInfo{
Dynamic: true,
Data: make([]byte, 4*4),
})
r.postprocessingPresentation = internal.NewTonePostprocessingPresentation(internal.ReinhardToneMapping)
r.directionalLightPresentation = internal.NewDirectionalLightPresentation()
r.ambientLightPresentation = internal.NewAmbientLightPresentation()
r.quadMesh.Allocate()
r.skyboxPresentation = internal.NewCubeSkyboxPresentation()
r.skyboxMesh.Allocate()
}
func (r *Renderer) Release() {
r.skyboxPresentation.Delete()
r.skyboxMesh.Release()
r.ambientLightPresentation.Delete()
r.directionalLightPresentation.Delete()
r.quadMesh.Release()
r.postprocessingPresentation.Delete()
r.exposureBuffer.Release()
r.exposurePresentation.Delete()
r.exposureFramebuffer.Release()
r.exposureAlbedoTexture.Release()
r.lightingFramebuffer.Release()
r.lightingAlbedoTexture.Release()
r.lightingDepthTexture.Release()
r.geometryFramebuffer.Release()
r.geometryDepthTexture.Release()
r.geometryNormalTexture.Release()
r.geometryAlbedoTexture.Release()
}
type renderCtx struct {
scene *Scene
x int
y int
width int
height int
projectionMatrix [16]float32
cameraMatrix [16]float32
viewMatrix [16]float32
camera *Camera
}
func (r *Renderer) Render(viewport graphics.Viewport, scene *Scene, camera *Camera) {
projectionMatrix := r.evaluateProjectionMatrix(camera, viewport.Width, viewport.Height)
cameraMatrix := camera.ModelMatrix()
viewMatrix := sprec.InverseMat4(cameraMatrix)
gl.Enable(gl.FRAMEBUFFER_SRGB)
ctx := renderCtx{
scene: scene,
x: viewport.X,
y: viewport.Y,
width: viewport.Width,
height: viewport.Height,
projectionMatrix: projectionMatrix.ColumnMajorArray(),
cameraMatrix: cameraMatrix.ColumnMajorArray(),
viewMatrix: viewMatrix.ColumnMajorArray(),
camera: camera,
}
r.renderGeometryPass(ctx)
gl.TextureBarrier()
r.renderLightingPass(ctx)
r.renderForwardPass(ctx)
if camera.autoExposureEnabled {
gl.TextureBarrier()
r.renderExposureProbePass(ctx)
}
r.renderPostprocessingPass(ctx)
}
func (r *Renderer) evaluateProjectionMatrix(camera *Camera, width, height int) sprec.Mat4 {
const (
near = float32(0.5)
far = float32(900.0)
)
var (
fWidth = sprec.Max(1.0, float32(width))
fHeight = sprec.Max(1.0, float32(height))
)
switch camera.fovMode {
case graphics.FoVModeHorizontalPlus:
halfHeight := near * sprec.Tan(camera.fov/2.0)
halfWidth := halfHeight * (fWidth / fHeight)
return sprec.PerspectiveMat4(
-halfWidth, halfWidth, -halfHeight, halfHeight, near, far,
)
case graphics.FoVModeVertialMinus:
halfWidth := near * sprec.Tan(camera.fov/2.0)
halfHeight := halfWidth * (fHeight / fWidth)
return sprec.PerspectiveMat4(
-halfWidth, halfWidth, -halfHeight, halfHeight, near, far,
)
case graphics.FoVModePixelBased:
halfWidth := fWidth / 2.0
halfHeight := fHeight / 2.0
return sprec.OrthoMat4(
-halfWidth, halfWidth, halfHeight, -halfHeight, near, far,
)
default:
panic(fmt.Errorf("unsupported fov mode: %s", camera.fovMode))
}
}
func (r *Renderer) renderGeometryPass(ctx renderCtx) {
r.geometryFramebuffer.Use()
gl.Viewport(0, 0, r.framebufferWidth, r.framebufferHeight)
gl.Enable(gl.DEPTH_TEST)
gl.DepthMask(true)
gl.DepthFunc(gl.LEQUAL)
r.geometryFramebuffer.ClearColor(0, sprec.NewVec4(
ctx.scene.sky.backgroundColor.X,
ctx.scene.sky.backgroundColor.Y,
ctx.scene.sky.backgroundColor.Z,
1.0,
))
r.geometryFramebuffer.ClearDepth(1.0)
// TODO: Traverse octree
for mesh := ctx.scene.firstMesh; mesh != nil; mesh = mesh.next {
r.renderMesh(ctx, mesh.ModelMatrix().ColumnMajorArray(), mesh.template)
}
}
func (r *Renderer) renderMesh(ctx renderCtx, modelMatrix [16]float32, template *MeshTemplate) {
for _, subMesh := range template.subMeshes {
if subMesh.material.backfaceCulling {
gl.Enable(gl.CULL_FACE)
} else {
gl.Disable(gl.CULL_FACE)
}
material := subMesh.material
presentation := material.geometryPresentation
presentation.Program.Use()
gl.UniformMatrix4fv(presentation.ProjectionMatrixLocation, 1, false, &ctx.projectionMatrix[0])
gl.UniformMatrix4fv(presentation.ViewMatrixLocation, 1, false, &ctx.viewMatrix[0])
gl.UniformMatrix4fv(presentation.ModelMatrixLocation, 1, false, &modelMatrix[0])
gl.Uniform1f(presentation.MetalnessLocation, material.vectors[1].Y)
gl.Uniform1f(presentation.RoughnessLocation, material.vectors[1].Z)
gl.Uniform4f(presentation.AlbedoColorLocation, material.vectors[0].X, material.vectors[0].Y, material.vectors[0].Z, material.vectors[0].Z)
textureUnit := uint32(0)
if material.twoDTextures[0] != nil {
gl.BindTextureUnit(textureUnit, material.twoDTextures[0].ID())
gl.Uniform1i(presentation.AlbedoTextureLocation, int32(textureUnit))
textureUnit++
}
gl.BindVertexArray(template.vertexArray.ID())
gl.DrawElements(subMesh.primitive, subMesh.indexCount, gl.UNSIGNED_SHORT, gl.PtrOffset(subMesh.indexOffsetBytes))
}
}
func (r *Renderer) renderLightingPass(ctx renderCtx) {
gl.BlitNamedFramebuffer(r.geometryFramebuffer.ID(), r.lightingFramebuffer.ID(),
0, 0, r.framebufferWidth, r.framebufferHeight,
0, 0, r.framebufferWidth, r.framebufferHeight,
gl.DEPTH_BUFFER_BIT,
gl.NEAREST,
)
r.lightingFramebuffer.Use()
gl.Viewport(0, 0, r.framebufferWidth, r.framebufferHeight)
gl.Disable(gl.DEPTH_TEST)
gl.DepthMask(false)
gl.Enable(gl.CULL_FACE)
r.lightingFramebuffer.ClearColor(0, sprec.NewVec4(0.0, 0.0, 0.0, 1.0))
gl.Enablei(gl.BLEND, 0)
gl.BlendEquationSeparate(gl.FUNC_ADD, gl.FUNC_ADD)
gl.BlendFuncSeparate(gl.ONE, gl.ONE, gl.ONE, gl.ZERO)
// TODO: Traverse octree
for light := ctx.scene.firstLight; light != nil; light = light.next {
switch light.mode {
case LightModeDirectional:
r.renderDirectionalLight(ctx, light)
case LightModeAmbient:
r.renderAmbientLight(ctx, light)
}
}
gl.Disablei(gl.BLEND, 0)
}
func (r *Renderer) renderAmbientLight(ctx renderCtx, light *Light) {
presentation := r.ambientLightPresentation
presentation.Program.Use()
gl.UniformMatrix4fv(presentation.ProjectionMatrixLocation, 1, false, &ctx.projectionMatrix[0])
gl.UniformMatrix4fv(presentation.CameraMatrixLocation, 1, false, &ctx.cameraMatrix[0])
gl.UniformMatrix4fv(presentation.ViewMatrixLocation, 1, false, &ctx.viewMatrix[0])
textureUnit := uint32(0)
gl.BindTextureUnit(textureUnit, r.geometryAlbedoTexture.ID())
gl.Uniform1i(presentation.FramebufferDraw0Location, int32(textureUnit))
textureUnit++
gl.BindTextureUnit(textureUnit, r.geometryNormalTexture.ID())
gl.Uniform1i(presentation.FramebufferDraw1Location, int32(textureUnit))
textureUnit++
gl.BindTextureUnit(textureUnit, r.geometryDepthTexture.ID())
gl.Uniform1i(presentation.FramebufferDepthLocation, int32(textureUnit))
textureUnit++
gl.BindTextureUnit(textureUnit, light.reflectionTexture.ID())
gl.Uniform1i(presentation.ReflectionTextureLocation, int32(textureUnit))
textureUnit++
gl.BindTextureUnit(textureUnit, light.refractionTexture.ID())
gl.Uniform1i(presentation.RefractionTextureLocation, int32(textureUnit))
textureUnit++
gl.BindVertexArray(r.quadMesh.VertexArray.ID())
gl.DrawElements(r.quadMesh.Primitive, r.quadMesh.IndexCount, gl.UNSIGNED_SHORT, gl.PtrOffset(r.quadMesh.IndexOffsetBytes))
}
func (r *Renderer) renderDirectionalLight(ctx renderCtx, light *Light) {
presentation := r.directionalLightPresentation
presentation.Program.Use()
gl.UniformMatrix4fv(presentation.ProjectionMatrixLocation, 1, false, &ctx.projectionMatrix[0])
gl.UniformMatrix4fv(presentation.CameraMatrixLocation, 1, false, &ctx.cameraMatrix[0])
gl.UniformMatrix4fv(presentation.ViewMatrixLocation, 1, false, &ctx.viewMatrix[0])
direction := light.Rotation().OrientationZ()
gl.Uniform3f(presentation.LightDirection, direction.X, direction.Y, direction.Z)
intensity := light.intensity
gl.Uniform3f(presentation.LightIntensity, intensity.X, intensity.Y, intensity.Z)
textureUnit := uint32(0)
gl.BindTextureUnit(textureUnit, r.geometryAlbedoTexture.ID())
gl.Uniform1i(presentation.FramebufferDraw0Location, int32(textureUnit))
textureUnit++
gl.BindTextureUnit(textureUnit, r.geometryNormalTexture.ID())
gl.Uniform1i(presentation.FramebufferDraw1Location, int32(textureUnit))
textureUnit++
gl.BindTextureUnit(textureUnit, r.geometryDepthTexture.ID())
gl.Uniform1i(presentation.FramebufferDepthLocation, int32(textureUnit))
textureUnit++
gl.BindVertexArray(r.quadMesh.VertexArray.ID())
gl.DrawElements(r.quadMesh.Primitive, r.quadMesh.IndexCount, gl.UNSIGNED_SHORT, gl.PtrOffset(r.quadMesh.IndexOffsetBytes))
}
func (r *Renderer) renderForwardPass(ctx renderCtx) {
r.lightingFramebuffer.Use()
gl.Viewport(0, 0, r.framebufferWidth, r.framebufferHeight)
gl.Enable(gl.DEPTH_TEST)
gl.DepthMask(false)
gl.DepthFunc(gl.LEQUAL)
if texture := ctx.scene.sky.skyboxTexture; texture != nil {
gl.Enable(gl.CULL_FACE)
presentation := r.skyboxPresentation
program := presentation.Program
program.Use()
gl.UniformMatrix4fv(presentation.ProjectionMatrixLocation, 1, false, &ctx.projectionMatrix[0])
gl.UniformMatrix4fv(presentation.ViewMatrixLocation, 1, false, &ctx.viewMatrix[0])
gl.BindTextureUnit(0, texture.ID())
gl.Uniform1i(presentation.AlbedoCubeTextureLocation, 0)
gl.BindVertexArray(r.skyboxMesh.VertexArray.ID())
gl.DrawElements(r.skyboxMesh.Primitive, r.skyboxMesh.IndexCount, gl.UNSIGNED_SHORT, gl.PtrOffset(r.skyboxMesh.IndexOffsetBytes))
}
}
func (r *Renderer) renderExposureProbePass(ctx renderCtx) {
if r.exposureSync != 0 {
status := gl.ClientWaitSync(r.exposureSync, gl.SYNC_FLUSH_COMMANDS_BIT, 0)
switch status {
case gl.ALREADY_SIGNALED, gl.CONDITION_SATISFIED:
data := make([]float32, 4)
gl.GetNamedBufferSubData(r.exposureBuffer.ID(), 0, 4*4, gl.Ptr(&data[0]))
brightness := 0.2126*data[0] + 0.7152*data[1] + 0.0722*data[2]
if brightness < 0.01 {
brightness = 0.01
}
r.exposureTarget = 1.0 / (9.8 * brightness)
gl.DeleteSync(r.exposureSync)
r.exposureSync = 0
case gl.WAIT_FAILED:
r.exposureSync = 0
}
}
ctx.camera.exposure = mix(ctx.camera.exposure, r.exposureTarget, float32(0.01))
if r.exposureSync == 0 {
r.exposureFramebuffer.Use()
gl.Viewport(0, 0, r.framebufferWidth, r.framebufferHeight)
gl.Disable(gl.DEPTH_TEST)
gl.DepthMask(false)
gl.Enable(gl.CULL_FACE)
r.exposureFramebuffer.ClearColor(0, sprec.ZeroVec4())
presentation := r.exposurePresentation
program := presentation.Program
program.Use()
textureUnit := uint32(0)
gl.BindTextureUnit(textureUnit, r.lightingAlbedoTexture.ID())
gl.Uniform1i(presentation.FramebufferDraw0Location, int32(textureUnit))
textureUnit++
gl.BindVertexArray(r.quadMesh.VertexArray.ID())
gl.DrawElements(r.quadMesh.Primitive, r.quadMesh.IndexCount, gl.UNSIGNED_SHORT, gl.PtrOffset(r.quadMesh.IndexOffsetBytes))
gl.TextureBarrier()
gl.BindBuffer(gl.PIXEL_PACK_BUFFER, r.exposureBuffer.ID())
gl.GetTextureImage(r.exposureAlbedoTexture.ID(), 0, gl.RGBA, gl.FLOAT, 4*4, gl.PtrOffset(0))
r.exposureSync = gl.FenceSync(gl.SYNC_GPU_COMMANDS_COMPLETE, 0)
gl.BindBuffer(gl.PIXEL_PACK_BUFFER, 0)
}
}
func (r *Renderer) renderPostprocessingPass(ctx renderCtx) {
r.screenFramebuffer.Use()
gl.Viewport(int32(ctx.x), int32(ctx.y), int32(ctx.width), int32(ctx.height))
gl.Scissor(int32(ctx.x), int32(ctx.y), int32(ctx.width), int32(ctx.height))
gl.Disable(gl.DEPTH_TEST)
gl.DepthMask(false)
gl.DepthFunc(gl.ALWAYS)
gl.Enable(gl.CULL_FACE)
presentation := r.postprocessingPresentation
presentation.Program.Use()
gl.BindTextureUnit(0, r.lightingAlbedoTexture.ID())
gl.Uniform1i(presentation.FramebufferDraw0Location, 0)
gl.Uniform1f(presentation.ExposureLocation, ctx.camera.exposure)
gl.BindVertexArray(r.quadMesh.VertexArray.ID())
gl.DrawElements(r.quadMesh.Primitive, r.quadMesh.IndexCount, gl.UNSIGNED_SHORT, gl.PtrOffset(r.quadMesh.IndexOffsetBytes))
}
// TODO: Move to gomath
func mix(a, b, amount float32) float32 {
return a*(1.0-amount) + b*amount
} | framework/opengl/game/graphics/renderer.go | 0.616359 | 0.574335 | renderer.go | starcoder |
package bynom
import "context"
// Switch takes the result of the first parser from noms which finished without error.
// If all noms failed the function will return the last error encountered.
func Switch(noms ...Nom) Nom {
const funcName = "Switch"
return func(ctx context.Context, p Plate) (err error) {
var startPos int
if startPos, err = p.TellPosition(ctx); err != nil {
return WrapBreadcrumb(err, funcName, -1)
}
for i, nom := range noms {
if i > 0 {
if err = p.SeekPosition(ctx, startPos); err != nil {
return ExtendBreadcrumb(WrapBreadcrumb(err, funcName, i), startPos, -1)
}
}
if err = nom(ctx, p); err == nil {
break
}
}
if err != nil {
return ExtendBreadcrumb(WrapBreadcrumb(err, funcName, -1), startPos, -1)
}
return
}
}
// When implements conditional parsing. When the parser test finishes without error
// noms run. If one of parsers in noms fails the function fails with that error.
func When(test Nom, noms ...Nom) Nom {
const funcName = "When"
return func(ctx context.Context, p Plate) (err error) {
var startPos int
if startPos, err = p.TellPosition(ctx); err != nil {
return WrapBreadcrumb(err, funcName, -1)
}
if err = test(ctx, p); err != nil {
_ = p.SeekPosition(ctx, startPos)
return ExtendBreadcrumb(WrapBreadcrumb(err, funcName, -1), startPos, -1)
}
for i, nom := range noms {
var nomStartPos int
if nomStartPos, err = p.TellPosition(ctx); err != nil {
return ExtendBreadcrumb(WrapBreadcrumb(err, funcName, i), startPos, -1)
}
if err = nom(ctx, p); err != nil {
var nomErrPos, _ = p.TellPosition(ctx)
return ExtendBreadcrumb(WrapBreadcrumb(err, funcName, i), nomStartPos, nomErrPos)
}
}
return
}
}
// WhenNot implements conditional parsing. When the parser test finishes with non-nil error
// noms run. If one of parsers in noms fails the function fails with that error.
func WhenNot(test Nom, noms ...Nom) Nom {
const funcName = "WhenNot"
return func(ctx context.Context, p Plate) (err error) {
var startPos int
if startPos, err = p.TellPosition(ctx); err != nil {
return WrapBreadcrumb(err, funcName, -1)
}
if err = test(ctx, p); err == nil {
if err = p.SeekPosition(ctx, startPos); err != nil {
return ExtendBreadcrumb(WrapBreadcrumb(err, funcName, -1), startPos, -1)
}
return
} else {
err = nil
}
for i, nom := range noms {
var nomStartPos int
if nomStartPos, err = p.TellPosition(ctx); err != nil {
return ExtendBreadcrumb(WrapBreadcrumb(err, funcName, i), startPos, -1)
}
if err = nom(ctx, p); err != nil {
var nomEndPos, _ = p.TellPosition(ctx)
return ExtendBreadcrumb(WrapBreadcrumb(err, funcName, i), nomStartPos, nomEndPos)
}
}
return
}
}
// Optional runs all parsers noms until all finished or at least one failed.
// If at least one of parsers return non-nil error the function
// will revert back the read position in the plate and return nil.
func Optional(noms ...Nom) Nom {
const funcName = "Optional"
return func(ctx context.Context, p Plate) (err error) {
var startPos int
if startPos, err = p.TellPosition(ctx); err != nil {
return WrapBreadcrumb(err, funcName, -1)
}
for i, nom := range noms {
if err = nom(ctx, p); err != nil {
if err = p.SeekPosition(ctx, startPos); err != nil {
return ExtendBreadcrumb(WrapBreadcrumb(err, funcName, i), startPos, -1)
}
return nil
}
}
return
}
}
// Repeat runs all parsers noms n times.
// If at least one of parsers return non-nil error the function
// will revert back the read position in the plate and return that error.
func Repeat(n int, noms ...Nom) Nom {
const funcName = "Repeat"
return func(ctx context.Context, p Plate) (err error) {
var startPos int
if startPos, err = p.TellPosition(ctx); err != nil {
return WrapBreadcrumb(err, funcName, -1)
}
TimesLoop:
for i := 0; i < n; i++ {
for j, nom := range noms {
var nomStartPos int
if nomStartPos, err = p.TellPosition(ctx); err != nil {
return ExtendBreadcrumb(WrapBreadcrumb(err, funcName, i), startPos, -1)
}
if err = nom(ctx, p); err != nil {
var nomErrPos, _ = p.TellPosition(ctx)
err = WrapBreadcrumb(ExtendBreadcrumb(err, nomStartPos, nomErrPos), funcName, j)
break TimesLoop
}
}
}
if err != nil {
_ = p.SeekPosition(ctx, startPos)
return
}
return
}
}
// Sequence runs all parsers noms until all finished or at least one failed.
func Sequence(noms ...Nom) Nom {
const funcName = "Sequence"
return func(ctx context.Context, p Plate) (err error) {
for i, nom := range noms {
var nomStartPos int
if nomStartPos, err = p.TellPosition(ctx); err != nil {
return WrapBreadcrumb(err, funcName, i)
}
if err = nom(ctx, p); err != nil {
var nomErrPos, _ = p.TellPosition(ctx)
return ExtendBreadcrumb(WrapBreadcrumb(err, funcName, i), nomStartPos, nomErrPos)
}
}
return
}
} | flow.go | 0.570331 | 0.441372 | flow.go | starcoder |
package record
import (
"encoding/json"
"errors"
"fmt"
"strings"
)
// Record represents data that belongs to an Skygear record
type Record struct {
RecordID string `json:"_id"`
Data map[string]interface{}
}
// Set sets value to a key in the record
func (r *Record) Set(key string, value interface{}) {
r.Data[key] = value
}
// Get gets value of a key in the record
func (r *Record) Get(key string) (value interface{}, err error) {
value, ok := r.Data[key]
if !ok {
value = ""
}
return
}
// Assign is a convenient method for setting value to a key using
// an expression syntax.
func (r *Record) Assign(expr string) error {
pair := strings.SplitN(expr, "=", 2)
if len(pair) < 2 || pair[0] == "" || pair[1] == "" {
return fmt.Errorf("Record assign '%s' not in correct format. Expected: key=value", expr)
}
if strings.HasPrefix(pair[0], "_") {
return fmt.Errorf("Cannot set data with reserved key: %s", pair[0])
}
r.Set(pair[0], pair[1])
return nil
}
// CheckRecordID checks if specified Record ID conforms to required format
func CheckRecordID(recordID string) error {
recordIDParts := strings.SplitN(recordID, "/", 2)
if len(recordIDParts) < 2 || recordIDParts[0] == "" || recordIDParts[1] == "" {
return errors.New("Error: Record ID not in correct format.")
}
return nil
}
// MakeEmptyRecord creates a record with empty data
func MakeEmptyRecord(recordID string) (record *Record, err error) {
err = CheckRecordID(recordID)
if err != nil {
return
}
record = &Record{
RecordID: recordID,
Data: map[string]interface{}{},
}
return
}
// MakeRecord creates a record
func MakeRecord(data map[string]interface{}) (record *Record, err error) {
recordID, ok := data["_id"].(string)
if !ok {
return nil, fmt.Errorf("Record data not in expected format: '_id' is not string.")
}
// Remove the id from the Data map: it is now stored in RecordID
delete(data, "_id")
record = &Record{
RecordID: recordID,
Data: data,
}
return record, nil
}
// MarshalJSON marshal a record in JSON representation
func (r *Record) MarshalJSON() ([]byte, error) {
jsonData := map[string]interface{}{
"_id": r.RecordID,
}
for k, v := range r.Data {
jsonData[k] = v
}
return json.Marshal(jsonData)
}
// UnmarshalJSON unmarshal a record from JSON representation
func (r *Record) UnmarshalJSON(b []byte) error {
jsonMap := map[string]interface{}{}
err := json.Unmarshal(b, &jsonMap)
if err != nil {
return err
}
recordID, ok := jsonMap["_id"].(string)
if !ok {
return fmt.Errorf("Record data not in expected format: '_id' is not string.")
}
r.RecordID = recordID
r.Data = jsonMap
return nil
}
// PreUploadValidate check whether the record format is valid.
func (r *Record) PreUploadValidate() error {
err := CheckRecordID(r.RecordID)
if err != nil {
return err
}
for idx := range r.Data {
if strings.HasPrefix(idx, "_") {
return fmt.Errorf("Cannot set data with reserved key: %s", idx)
}
}
return nil
}
func (r *Record) PostDownloadHandle() error {
err := CheckRecordID(r.RecordID)
if err != nil {
return err
}
for idx := range r.Data {
if strings.HasPrefix(idx, "_") && idx != "_id" {
delete(r.Data, idx)
}
}
return nil
}
func (r *Record) PrettyPrintBytes() ([]byte, error) {
result, err := json.MarshalIndent(r, "", " ")
if err != nil {
return nil, err
}
return result, nil
} | record/record.go | 0.699049 | 0.443239 | record.go | starcoder |
package smath
/*
Golang package implementing quaternion math
Purpose is to provide quaternion support under the MIT license as existing
Go quaternion packages are under more restrictive or unspecified licenses.
This project is licensed under the terms of the MIT license.
*/
import (
"math"
)
// Set sets this Q from 'q'
func (qin *Quaternion) Set(q *Quaternion) {
qin.X = q.X
qin.Y = q.Y
qin.Z = q.Z
qin.W = q.W
}
// SetFromComponents sets this Q from base components
func (qin *Quaternion) SetFromComponents(x, y, z, w float64) {
qin.X = x
qin.Y = y
qin.Z = z
qin.W = w
}
// Conj returns the conjugate of a Quaternion (W,X,Y,Z) -> (W,-X,-Y,-Z)
func Conj(qin Quaternion) Quaternion {
qout := Quaternion{}
qout.W = +qin.W
qout.X = -qin.X
qout.Y = -qin.Y
qout.Z = -qin.Z
return qout
}
// Conjugate conjugates this Quaternion (W,X,Y,Z) -> (W,-X,-Y,-Z)
func (qin *Quaternion) Conjugate() {
qin.X = -qin.X
qin.Y = -qin.Y
qin.Z = -qin.Z
}
// Norm2 returns the L2-Norm of a Quaternion (W,X,Y,Z) -> W*W+X*X+Y*Y+Z*Z
func Norm2(qin Quaternion) float64 {
return qin.W*qin.W + qin.X*qin.X + qin.Y*qin.Y + qin.Z*qin.Z
}
// Norm returns the L1-Norm of a Quaternion (W,X,Y,Z) -> Sqrt(W*W+X*X+Y*Y+Z*Z)
func Norm(qin Quaternion) float64 {
return math.Sqrt(qin.W*qin.W + qin.X*qin.X + qin.Y*qin.Y + qin.Z*qin.Z)
}
// Scalar returns a scalar-only Quaternion representation of a float (W,0,0,0)
func Scalar(w float64) Quaternion {
return Quaternion{W: w}
}
// Sum returns the vector sum of any number of Quaternions
func Sum(qin ...Quaternion) Quaternion {
qout := Quaternion{}
for _, q := range qin {
qout.W += q.W
qout.X += q.X
qout.Y += q.Y
qout.Z += q.Z
}
return qout
}
// Prod returns the non-commutative product of any number of Quaternions
func Prod(qin ...Quaternion) Quaternion {
qout := Quaternion{1, 0, 0, 0}
var w, x, y, z float64
for _, q := range qin {
w = qout.W*q.W - qout.X*q.X - qout.Y*q.Y - qout.Z*q.Z
x = qout.W*q.X + qout.X*q.W + qout.Y*q.Z - qout.Z*q.Y
y = qout.W*q.Y + qout.Y*q.W + qout.Z*q.X - qout.X*q.Z
z = qout.W*q.Z + qout.Z*q.W + qout.X*q.Y - qout.Y*q.X
qout = Quaternion{w, x, y, z}
}
return qout
}
// Unit returns the Quaternion rescaled to unit-L1-norm
func Unit(qin Quaternion) Quaternion {
k := Norm(qin)
return Quaternion{qin.W / k, qin.X / k, qin.Y / k, qin.Z / k}
}
// Inv returns the Quaternion conjugate rescaled so that Q Q* = 1
func Inv(qin Quaternion) Quaternion {
k2 := Norm2(qin)
q := Conj(qin)
return Quaternion{q.W / k2, q.X / k2, q.Y / k2, q.Z / k2}
}
// Euler returns the Euler angles phi, theta, psi corresponding to a Quaternion
func Euler(q Quaternion) (float64, float64, float64) {
r := Unit(q)
phi := math.Atan2(2*(r.W*r.X+r.Y*r.Z), 1-2*(r.X*r.X+r.Y*r.Y))
theta := math.Asin(2 * (r.W*r.Y - r.Z*r.X))
psi := math.Atan2(2*(r.X*r.Y+r.W*r.Z), 1-2*(r.Y*r.Y+r.Z*r.Z))
return phi, theta, psi
}
// FromEuler returns a Quaternion corresponding to Euler angles phi, theta, psi
func FromEuler(phi, theta, psi float64) Quaternion {
q := Quaternion{}
q.W = math.Cos(phi/2)*math.Cos(theta/2)*math.Cos(psi/2) +
math.Sin(phi/2)*math.Sin(theta/2)*math.Sin(psi/2)
q.X = math.Sin(phi/2)*math.Cos(theta/2)*math.Cos(psi/2) -
math.Cos(phi/2)*math.Sin(theta/2)*math.Sin(psi/2)
q.Y = math.Cos(phi/2)*math.Sin(theta/2)*math.Cos(psi/2) +
math.Sin(phi/2)*math.Cos(theta/2)*math.Sin(psi/2)
q.Z = math.Cos(phi/2)*math.Cos(theta/2)*math.Sin(psi/2) -
math.Sin(phi/2)*math.Sin(theta/2)*math.Cos(psi/2)
return q
}
// FromAxisAngle sets the quaternion as a rotation with with specified angle (radians) and axis.
// The axis should be normalized!
// Return this
func FromAxisAngle(angle float64, x, y, z float64, toQuat *Quaternion) {
toQuat.X = math.Cos(angle / 2)
toQuat.Y = math.Sin(angle/2) * x
toQuat.Z = math.Sin(angle/2) * y
toQuat.W = math.Sin(angle/2) * z
}
// RotMat returns the rotation matrix (as float array) corresponding to a Quaternion
func RotMat(qin Quaternion) [3][3]float64 {
q := Unit(qin)
m := [3][3]float64{}
m[0][0] = 1 - 2*(q.Y*q.Y+q.Z*q.Z)
m[0][1] = 2 * (q.X*q.Y - q.W*q.Z)
m[0][2] = 2 * (q.W*q.Y + q.X*q.Z)
m[1][1] = 1 - 2*(q.Z*q.Z+q.X*q.X)
m[1][2] = 2 * (q.Y*q.Z - q.W*q.X)
m[1][0] = 2 * (q.W*q.Z + q.Y*q.X)
m[2][2] = 1 - 2*(q.X*q.X+q.Y*q.Y)
m[2][0] = 2 * (q.Z*q.X - q.W*q.Y)
m[2][1] = 2 * (q.W*q.X + q.Z*q.Y)
return m
} | smath/quaternion.go | 0.926345 | 0.570152 | quaternion.go | starcoder |
package main
import (
"fmt"
"github.com/golang-collections/go-datastructures/queue"
"math"
)
type Node struct {
value int
left, right *Node
}
type Tree struct {
root *Node
}
func (t *Tree) Add(value int) {
t.root = Add(t.root, value)
}
func Add(n *Node, value int) *Node {
if n == nil {
// Equivalent to return &Tree{value: value}.
n = new(Node)
n.value = value
return n
}
if value < n.value {
n.left = Add(n.left, value)
} else {
n.right = Add(n.right, value)
}
return n
}
func (t *Tree) InOrder() {
InOrder(t.root)
fmt.Println()
}
func InOrder(n *Node) {
if n == nil {
return
}
InOrder(n.left)
fmt.Print(n.value)
InOrder(n.right)
}
func (t *Tree) PreOrder() {
PreOrder(t.root)
fmt.Println()
}
func PreOrder(n *Node) {
if n == nil {
return
}
fmt.Print(n.value)
PreOrder(n.left)
PreOrder(n.right)
}
func (t *Tree) PostOrder() {
PostOrder(t.root)
fmt.Println()
}
func PostOrder(n *Node) {
if n == nil {
return
}
PostOrder(n.left)
PostOrder(n.right)
fmt.Print(n.value)
}
// Sort sorts values in place.
func Sort(values []int) {
t := new(Tree)
for _, v := range values {
t.Add(v)
}
appendValues(values[:0], t.root)
}
// appendValues appends the elements of t to values in order
// and returns the resulting slice.
func appendValues(values []int, t *Node) []int {
if t != nil {
values = appendValues(values, t.left)
values = append(values, t.value)
values = appendValues(values, t.right)
}
return values
}
func (t *Tree) PrintBredthFirst() {
que := new(queue.Queue)
var temp *Node
if t.root != nil {
que.Put(t.root)
}
for que.Empty() == false {
temp2, _ := que.Get(1)
temp = temp2[0].(*Node)
fmt.Print(temp.value, " ")
if temp.left != nil {
que.Put(temp.left)
}
if temp.right != nil {
que.Put(temp.right)
}
}
fmt.Println()
}
func (t *Tree) NthPreOrder(index int) {
var counter int = 0
NthPreOrder(t.root, index, &counter)
}
func NthPreOrder(node *Node, index int, counter *int) {
if node != nil {
(*counter)++
if *counter == index {
fmt.Println(node.value)
}
NthPreOrder(node.left, index, counter)
NthPreOrder(node.right, index, counter)
}
}
func (t *Tree) NthPostOrder(index int) {
var counter int = 0
NthPostOrder(t.root, index, &counter)
}
func NthPostOrder(node *Node, index int, counter *int) {
if node != nil {
NthPostOrder(node.left, index, counter)
NthPostOrder(node.right, index, counter)
(*counter)++
if *counter == index {
fmt.Println(node.value)
}
}
}
func (t *Tree) NthInOrder(index int) {
var counter int = 0
NthInOrder(t.root, index, &counter)
}
func NthInOrder(node *Node, index int, counter *int) {
if node != nil {
NthInOrder(node.left, index, counter)
*counter++
if *counter == index {
fmt.Println(node.value)
}
NthInOrder(node.right, index, counter)
}
}
func (t *Tree) Find(value int) bool {
var curr *Node = t.root
for curr != nil {
if curr.value == value {
return true
} else if curr.value > value {
curr = curr.left
} else {
curr = curr.right
}
}
return false
}
func (t *Tree) FindMin() (int, bool) {
var node *Node = t.root
if node == nil {
fmt.Println("EmptyTreeException")
return 0, false
}
for node.left != nil {
node = node.left
}
return node.value, true
}
func (t *Tree) FindMax() (int, bool) {
var node *Node = t.root
if node == nil {
fmt.Println("EmptyTreeException")
return 0, false
}
for node.right != nil {
node = node.right
}
return node.value, true
}
func (t *Tree) FindMaxNode() *Node {
var node *Node = t.root
if node == nil {
fmt.Println("EmptyTreeException")
return nil
}
for node.right != nil {
node = node.right
}
return node
}
func (t *Tree) FindMinNode() *Node {
var node *Node = t.root
if node == nil {
fmt.Println("EmptyTreeException")
return nil
}
for node.left != nil {
node = node.left
}
return node
}
func FindMax(curr *Node) *Node {
var node *Node = curr
if node == nil {
fmt.Println("EmptyTreeException")
return nil
}
for node.right != nil {
node = node.right
}
return node
}
func FindMin(curr *Node) *Node {
var node *Node = curr
if node == nil {
fmt.Println("EmptyTreeException")
return nil
}
for node.left != nil {
node = node.left
}
return node
}
func (t *Tree) Free() {
t.root = nil
}
func (t *Tree) DeleteNode(value int) {
t.root = DeleteNode(t.root, value)
}
func DeleteNode(node *Node, value int) *Node {
var temp *Node = nil
if node != nil {
if node.value == value {
if node.left == nil && node.right == nil {
return nil
} else {
if node.left == nil {
temp = node.right
return temp
}
if node.right == nil {
temp = node.left
return temp
}
maxNode := FindMax(node.left)
maxValue := maxNode.value
node.value = maxValue
node.left = DeleteNode(node.left, maxValue)
}
} else {
if node.value > value {
node.left = DeleteNode(node.left, value)
} else {
node.right = DeleteNode(node.right, value)
}
}
}
return node
}
func (t *Tree) TreeDepth() int {
return TreeDepth(t.root)
}
func TreeDepth(root *Node) int {
if root == nil {
return 0
}
lDepth := TreeDepth(root.left)
rDepth := TreeDepth(root.right)
if lDepth > rDepth {
return lDepth + 1
} else {
return rDepth + 1
}
}
func (t *Tree) isEqual(t2 *Tree) bool {
return Identical(t.root, t2.root)
}
func Identical(node1 *Node, node2 *Node) bool {
if node1 == nil && node2 == nil {
return true
} else if node1 == nil || node2 == nil {
return false
} else {
return ((node1.value == node2.value) &&
Identical(node1.left, node2.left) &&
Identical(node1.right, node2.right))
}
}
func (t *Tree) Ancestor(first int, second int) *Node {
if first > second {
temp := first
first = second
second = temp
}
return Ancestor(t.root, first, second)
}
func Ancestor(curr *Node, first int, second int) *Node {
if curr == nil {
return nil
}
if curr.value > first && curr.value > second {
return Ancestor(curr.left, first, second)
}
if curr.value < first && curr.value < second {
return Ancestor(curr.right, first, second)
}
return curr
}
func (t *Tree) CopyTree() *Tree {
Tree2 := new(Tree)
Tree2.root = CopyTree(t.root)
return Tree2
}
func CopyTree(curr *Node) *Node {
var temp *Node
if curr != nil {
temp = new(Node)
temp.value = curr.value
temp.left = CopyTree(curr.left)
temp.right = CopyTree(curr.right)
return temp
} else {
return nil
}
}
func (t *Tree) CopyMirrorTree() *Tree {
Tree2 := new(Tree)
Tree2.root = CopyMirrorTree(t.root)
return Tree2
}
func CopyMirrorTree(curr *Node) *Node {
var temp *Node
if curr != nil {
temp = new(Node)
temp.value = curr.value
temp.right = CopyMirrorTree(curr.left)
temp.left = CopyMirrorTree(curr.right)
return temp
} else {
return nil
}
}
func (t *Tree) NumNodes() int {
return NumNodes(t.root)
}
func NumNodes(curr *Node) int {
if curr == nil {
return 0
} else {
return (1 + NumNodes(curr.right) + NumNodes(curr.left))
}
}
func (t *Tree) NumFullNodesBT() int {
return NumNodes(t.root)
}
func NumFullNodesBT(curr *Node) int {
var count int
if curr == nil {
return 0
}
count = NumFullNodesBT(curr.right) + NumFullNodesBT(curr.left)
if curr.right != nil && curr.left != nil {
count++
}
return count
}
func (t *Tree) MaxLengthPathBT() int {
return MaxLengthPathBT(t.root)
}
func MaxLengthPathBT(curr *Node) int {
var max, leftPath, rightPath, leftMax, rightMax int
if curr == nil {
return 0
}
leftPath = TreeDepth(curr.left)
rightPath = TreeDepth(curr.right)
max = leftPath + rightPath + 1
leftMax = MaxLengthPathBT(curr.left)
rightMax = MaxLengthPathBT(curr.right)
if leftMax > max {
max = leftMax
}
if rightMax > max {
max = rightMax
}
return max
}
func (t *Tree) NumLeafNodes() int {
return NumLeafNodes(t.root)
}
func NumLeafNodes(curr *Node) int {
if curr == nil {
return 0
}
if curr.left == nil && curr.right == nil {
return 1
} else {
return (NumLeafNodes(curr.right) + NumLeafNodes(curr.left))
}
}
func (t *Tree) SumAllBT() int {
return SumAllBT(t.root)
}
func SumAllBT(curr *Node) int {
var sum, leftSum, rightSum int
if curr == nil {
return 0
}
rightSum = SumAllBT(curr.right)
leftSum = SumAllBT(curr.left)
sum = rightSum + leftSum + curr.value
return sum
}
func IsBST3(root *Node) bool {
if root == nil {
return true
}
if root.left != nil && FindMax(root.left).value > root.value {
return false
}
if root.right != nil && FindMin(root.right).value <= root.value {
return false
}
return (IsBST3(root.left) && IsBST3(root.right))
}
func (t *Tree) IsBST() bool {
return IsBST(t.root, math.MinInt32, math.MaxInt32)
}
func IsBST(curr *Node, min int, max int) bool {
if curr == nil {
return true
}
if curr.value < min || curr.value > max {
return false
}
return IsBST(curr.left, min, curr.value) && IsBST(curr.right, curr.value, max)
}
func (t *Tree) IsBST2() bool {
var c int
return IsBST2(t.root, &c)
}
func IsBST2(root *Node, count *int) bool {
var ret bool
if root != nil {
ret = IsBST2(root.left, count)
if !ret {
return false
}
if *count > root.value {
return false
}
*count = root.value
ret = IsBST2(root.right, count)
if !ret {
return false
}
}
return true
}
type Stack struct {
s []int
}
func (s *Stack) Push(value int) {
s.s = append(s.s, value)
}
func (s *Stack) Pop() int {
length := len(s.s)
res := s.s[length-1]
s.s = s.s[:length-1]
return res
}
func (s *Stack) IsEmpty() bool {
length := len(s.s)
return length == 0
}
func (s *Stack) Length() int {
length := len(s.s)
return length
}
func (s *Stack) Print() {
length := len(s.s)
for i := 0; i < length; i++ {
fmt.Print(s.s[i], " ")
}
fmt.Println()
}
func (t *Tree) PrintAllPath() {
stk := new(Stack)
PrintAllPath(t.root, stk)
}
func PrintAllPath(curr *Node, stk *Stack) {
if curr == nil {
return
}
stk.Push(curr.value)
if curr.left == nil && curr.right == nil {
stk.Print()
stk.Pop()
return
}
PrintAllPath(curr.right, stk)
PrintAllPath(curr.left, stk)
stk.Pop()
}
func (t *Tree) LCA(first int, second int) (int, bool) {
ans := LCA(t.root, first, second)
if ans != nil {
return ans.value, true
} else {
fmt.Println("NotFoundException")
return 0, false
}
}
func LCA(curr *Node, first int, second int) *Node {
var left, right *Node
if curr == nil {
return nil
}
if curr.value == first || curr.value == second {
return curr
}
left = LCA(curr.left, first, second)
right = LCA(curr.right, first, second)
if left != nil && right != nil {
return curr
} else if left != nil {
return left
} else {
return right
}
}
func (t *Tree) LcaBST(first int, second int) (int, bool) {
return LcaBST(t.root, first, second)
}
func LcaBST(curr *Node, first int, second int) (int, bool) {
if curr == nil {
fmt.Println("NotFoundException")
return 0, false
}
if curr.value > first && curr.value > second {
return LcaBST(curr.left, first, second)
}
if curr.value < first && curr.value < second {
return LcaBST(curr.right, first, second)
}
return curr.value, true
}
func (t *Tree) TrimOutsidedataRange(min int, max int) {
t.root = TrimOutsidedataRange(t.root, min, max)
}
func TrimOutsidedataRange(curr *Node, min int, max int) *Node {
if curr == nil {
return nil
}
curr.left = TrimOutsidedataRange(curr.left, min, max)
curr.right = TrimOutsidedataRange(curr.right, min, max)
if curr.value < min {
return curr.right
}
if curr.value > max {
return curr.left
}
return curr
}
func (t *Tree) PrintIndataRange(min int, max int) {
PrintIndataRange(t.root, min, max)
}
func PrintIndataRange(root *Node, min int, max int) {
if root == nil {
return
}
PrintIndataRange(root.left, min, max)
if root.value >= min && root.value <= max {
fmt.Print(root.value, " ")
}
PrintIndataRange(root.right, min, max)
}
func (t *Tree) FloorBST(val int) int {
curr := t.root
floor := math.MaxInt32
for curr != nil {
if curr.value == val {
floor = curr.value
break
} else if curr.value > val {
curr = curr.left
} else {
floor = curr.value
curr = curr.right
}
}
return floor
}
func (t *Tree) CeilBST(val int) int {
curr := t.root
ceil := math.MinInt32
for curr != nil {
if curr.value == val {
ceil = curr.value
break
} else if curr.value > val {
ceil = curr.value
curr = curr.left
} else {
curr = curr.right
}
}
return ceil
}
func (t *Tree) FindMaxBT() int {
return FindMaxBT(t.root)
}
func FindMaxBT(curr *Node) int {
if curr == nil {
return math.MinInt32
}
max := curr.value
left := FindMaxBT(curr.left)
right := FindMaxBT(curr.right)
if left > max {
max = left
}
if right > max {
max = right
}
return max
}
func SearchBT(root *Node, value int) bool {
var left, right bool
if root == nil || root.value == value {
return false
}
left = SearchBT(root.left, value)
if left {
return true
}
right = SearchBT(root.right, value)
if right {
return true
}
return false
}
func CreateBinaryTree(arr []int, size int) *Tree {
t := new(Tree)
t.root = CreateBinaryTreeUtil(arr, 0, size-1)
return t
}
func CreateBinaryTreeUtil(arr []int, start int, end int) *Node {
if start > end {
return nil
}
mid := (start + end) / 2
curr := new(Node)
curr.value = arr[mid]
curr.left = CreateBinaryTreeUtil(arr, start, mid-1)
curr.right = CreateBinaryTreeUtil(arr, mid+1, end)
return curr
}
func main() {
t := new(Tree)
t.Add(2)
t.Add(1)
t.Add(3)
t.Add(4)
//t.InOrder()
//t.PreOrder()
//t.PostOrder()
//lst := []int{2, 1, 3, 4}
//Sort(lst)
//fmt.Println(lst)
//t.PrintBredthFirst()
//t.NthPreOrder(2)
//t.NthPostOrder(2)
//t.NthInOrder(2)
//fmt.Println(t.Find(10))
//fmt.Println(t.Find(3))
//fmt.Println(t.FindMax())
//fmt.Println(t.FindMaxNode())
//fmt.Println(t.FindMin())
//fmt.Println(t.FindMinNode())
//t.Free()
//t.InOrder()
//fmt.Println()
t.PrintAllPath()
} | corpus/hermant.data-structure-algo/CH1/Tree.go | 0.621426 | 0.463566 | Tree.go | starcoder |
package datatypes
// const ticker
func (node ConstTickerNode) Vote (t Time) MaybeTime {
if t<node.ConstT {
return SomeTime(node.ConstT)
}
return NothingTime
}
func (node ConstTickerNode) Exec (t Time, _ InPipes) EvPayload {
if t==node.ConstT {
return Some(node.ConstW)
}
return NothingPayload
}
func (node ConstTickerNode) Rinse (inpipes InPipes) {
}
// src ticker
func (node SrcTickerNode) Vote (t Time) MaybeTime {
return NothingTime
}
func (node SrcTickerNode) Exec (t Time, inpipes InPipes) EvPayload {
ev := inpipes.strictConsume(node.SrcStream)
return ev.Payload
}
func (node SrcTickerNode) Rinse (inpipes InPipes) {
}
// delay ticker
func (node *DelayTickerNode) Vote (t Time) MaybeTime {
if len(node.Alarms)==0 {
return NothingTime
}
return SomeTime(node.Alarms[0].Time)
}
func insertInPlace(alarms []Event, newev Event, combiner func(a EvPayload, b EvPayload) EvPayload) []Event {
i:=0
for ;i < len(alarms); i++ {
if (alarms[i].Time == newev.Time) {
// Use combiner
alarms[i].Payload = combiner(alarms[i].Payload, newev.Payload)
return alarms
}
if (alarms[i].Time > newev.Time) {
break
}
}
alarms = append(alarms, newev)
if (i != len(alarms)) {
copy(alarms[i+1:], alarms[i:])
alarms[i] = newev
}
return alarms
}
func (node *DelayTickerNode) Exec (t Time, inpipes InPipes) EvPayload {
if len(node.Alarms)>0 && t==node.Alarms[0].Time {
ret := Some(node.Alarms[0].Payload)
node.Alarms = node.Alarms[1:]
return ret
}
return NothingPayload
}
func (node *DelayTickerNode) Rinse (inpipes InPipes) {
ev := inpipes.strictConsume(node.SrcStream)
if (ev.Payload.IsSet) {
payload := ev.Payload.Val.(EpsVal)
newev := Event{ev.Time+payload.Eps, Some(payload.Val)}
node.Alarms = insertInPlace(node.Alarms, newev, node.Combiner)
}
}
// Union ticker
func (node UnionTickerNode) Vote (t Time) MaybeTime {
return Min(node.LeftTicker.Vote(t), node.RightTicker.Vote(t))
}
func (node UnionTickerNode) Exec (t Time, inpipes InPipes) EvPayload {
pleft := node.LeftTicker.Exec(t, inpipes)
pright := node.RightTicker.Exec(t, inpipes)
if (!pleft.IsSet) {
return pright
}
if (!pright.IsSet) {
return pleft
}
return node.Combiner(pleft, pright)
}
func (node UnionTickerNode) Rinse (inpipes InPipes) {
node.LeftTicker.Rinse(inpipes)
node.RightTicker.Rinse(inpipes)
} | striver-go/datatypes/tickernodes.go | 0.633637 | 0.433981 | tickernodes.go | starcoder |
package demofixtures
import (
"fmt"
"math"
"strconv"
"strings"
"unicode"
)
const absoluteZeroInCelsius float64 = -273.15
const absoluteZeroInFahrenheit float64 = -459.67
const parseError = "Expected float with suffix F, C or K but got '%v'"
const scaleError = "Unrecognized temperature scale: %v"
// NewTemperatureFactory returns a Temperature Factory
func NewTemperatureFactory() *TemperatureFactory {
return new(TemperatureFactory)
}
// TemperatureFactory is an example fixture factory.
type TemperatureFactory struct{}
// NewTemperatureConverter creates a TemperatureConverter.
func (factory *TemperatureFactory) NewTemperatureConverter() *TemperatureConverter {
return new(TemperatureConverter)
}
// NewTemperature creates a Temperature.
func (factory *TemperatureFactory) NewTemperature(input string) *Temperature {
temperature := new(Temperature)
temperature.Parse(input)
return temperature
}
// TemperatureConverter shows how to use objects as parameters.
type TemperatureConverter struct{}
// ConvertTo converts temperatures between scales.
func (temperatureConverter *TemperatureConverter) ConvertTo(input *Temperature, scale string) float64 {
return input.ValueIn(scale)
}
// Temperature is an example parsable object.
type Temperature struct {
value float64
}
// ToString serializes a Temperature.
func (temperature *Temperature) ToString() string {
return fmt.Sprintf("%v K", temperature.value)
}
// Parse deserializes a string into a Temperature.
func (temperature *Temperature) Parse(input string) {
if input == "" {
panic(fmt.Errorf(parseError, ""))
}
scale := input[len(input)-1:]
baseValue := strings.TrimSpace(input[:len(input)-1])
temperatureValue, err := strconv.ParseFloat(baseValue, 64)
if err != nil {
panic(fmt.Errorf(parseError, input))
}
switch scale {
case "F":
temperature.value = math.Round(10000.0*(temperatureValue-absoluteZeroInFahrenheit)*5.0/9.0) / 10000.0
case "C":
temperature.value = temperatureValue - absoluteZeroInCelsius
case "K":
temperature.value = temperatureValue
default:
panic(fmt.Errorf(parseError, input))
}
}
// ValueIn returns the temperature value in the required scale (F, C or K).
func (temperature *Temperature) ValueIn(scale string) float64 {
if scale == "" {
panic(fmt.Errorf(scaleError, ""))
}
switch unicode.ToUpper(rune(scale[0])) {
case 'F':
return math.Round(10000.0*(temperature.value*9.0/5.0+absoluteZeroInFahrenheit)) / 10000.0
case 'C':
return temperature.value + absoluteZeroInCelsius
case 'K':
return temperature.value
default:
panic(fmt.Errorf(scaleError, scale))
}
} | examples/demofixtures/Temperature.go | 0.84672 | 0.489442 | Temperature.go | starcoder |
package control
import (
"encoding/binary"
"io"
)
// Type is the control block type.
type Type = byte
// Control Block Types
var (
Data Type = 0b1000_0000
DataSize Type = 0b0100_0000
Data1 Type = 0b0010_0000
Data2 Type = 0b0001_0000
Skip Type = 0b0000_1000
DataSizeSize Type = 0b0000_0100
SkipSize Type = 0b0000_0010
Null Type = 0b0000_0001
Empty Type = 0b0000_0000
)
// Control Block Masks
var (
dataMask byte = 0b1000_0000
dataSizeMask byte = 0b1100_0000
data1Mask byte = 0b1110_0000
data2Mask byte = 0b1111_0000
skipMask byte = 0b1111_1000
dataSizeSizeMask byte = 0b1111_1100
skipSizeMask byte = 0b1111_1110
)
// TypeString returns the string name for the type.
func TypeString(t Type) string {
switch t {
case Data:
return "Data"
case DataSize:
return "Data Size"
case Data1:
return "Data + 1"
case Data2:
return "Data + 2"
case Skip:
return "Skip"
case DataSizeSize:
return "Data Size Size"
case SkipSize:
return "Skip Size"
case Null:
return "Null"
case Empty:
return "Empty"
}
return "Invalid"
}
// New returns a configured control byte.
func New(t Type, value uint8) (b byte, err error) {
switch t {
case Data:
if value > 127 {
err = Error.New("value too large: %d", value)
return
}
b = (^dataMask & value) | Data
case DataSize:
if value > 64 {
err = Error.New("value too large: %d", value)
return
}
value = value - 1
b = (^dataSizeMask & value) | DataSize
case Data1:
if value > 31 {
err = Error.New("value too large: %d", value)
return
}
b = (^data1Mask & value) | Data1
case Data2:
if value > 15 {
err = Error.New("value too large: %d", value)
return
}
b = (^data2Mask & value) | Data2
case Skip:
if value > 8 {
err = Error.New("value too large: %d", value)
return
}
value = value - 1
b = (^skipMask & value) | Skip
case DataSizeSize:
if value > 4 {
err = Error.New("value too large: %d", value)
return
}
value = value - 1
b = (^dataSizeSizeMask & value) | DataSizeSize
case SkipSize:
if value > 2 {
err = Error.New("value too large: %d", value)
return
}
value = value - 1
b = (^skipSizeMask & value) | SkipSize
case Null:
b = Null
case Empty:
b = Empty
}
return
}
// Parse returns the control block type and value.
func Parse(b byte) (t Type, value uint8, err error) {
if b&dataMask == Data {
v := b & ^dataMask
return Data, v, nil
} else if b&dataSizeMask == DataSize {
v := b & ^dataSizeMask
return DataSize, v + 1, nil
} else if b&data1Mask == Data1 {
v := b & ^data1Mask
return Data1, v, nil
} else if b&data2Mask == Data2 {
v := b & ^data2Mask
return Data2, v, nil
} else if b&skipMask == Skip {
v := b & ^skipMask
return Skip, v + 1, nil
} else if b&dataSizeSizeMask == DataSizeSize {
v := b & ^dataSizeSizeMask
return DataSizeSize, v + 1, nil
} else if b&skipSizeMask == SkipSize {
v := b & ^skipSizeMask
return SkipSize, v + 1, nil
} else if b == Null {
return Null, 0, nil
} else if b == Empty {
return Empty, 0, nil
}
return 0, 0, Error.New("invalid control byte: %08b", b)
}
// Block is a block.
type Block struct {
Type Type
Data []byte
Size uint64
}
func (b *Block) resize(size uint64) {
if uint64(cap(b.Data)) < size {
b.Data = make([]byte, size)
} else {
b.Data = b.Data[:size]
}
}
// Decoder is a decoder.
type Decoder struct {
r io.Reader
}
// NewDecoder returns a new decoder.
func NewDecoder(r io.Reader) *Decoder {
return &Decoder{
r: r,
}
}
// Decode parses a block from the reader.
func (d *Decoder) Decode(b *Block) (err error) {
defer Error.WrapP(&err)
cb := make([]byte, 1)
_, err = io.ReadFull(d.r, cb)
if err != nil {
return
}
t, v, err := Parse(cb[0])
if err != nil {
return
}
b.Type = t
b.Data = b.Data[:0]
b.Size = 0
switch t {
case Data:
b.Data = append(b.Data, byte(v))
case DataSize:
b.resize(uint64(v))
_, err = io.ReadFull(d.r, b.Data)
if err != nil {
return
}
case Data1:
b.resize(2)
b.Data[0] = v
_, err = io.ReadFull(d.r, b.Data[1:])
if err != nil {
return
}
case Data2:
b.resize(3)
b.Data[0] = v
_, err = io.ReadFull(d.r, b.Data[1:])
if err != nil {
return
}
case Skip:
b.Size = uint64(v)
case DataSizeSize:
b.resize(uint64(v))
_, err = io.ReadFull(d.r, b.Data)
if err != nil {
return
}
buf := [4]byte{}
offset := len(buf) - len(b.Data)
copy(buf[offset:], b.Data)
b.Size = uint64(binary.BigEndian.Uint32(buf[:])) + 1
b.resize(b.Size)
_, err = io.ReadFull(d.r, b.Data)
if err != nil {
return
}
case SkipSize:
b.resize(uint64(v))
_, err = io.ReadFull(d.r, b.Data)
if err != nil {
return
}
buf := [2]byte{}
offset := len(buf) - len(b.Data)
copy(buf[offset:], b.Data)
b.Size = uint64(binary.BigEndian.Uint16(buf[:])) + 1
b.Data = nil
case Null, Empty:
// Nothing to do in this case. The block is already in the
// right condition.
}
return
}
// Encoder is an encoder.
type Encoder struct {
w io.Writer
}
// NewEncoder returns a new encoder.
func NewEncoder(w io.Writer) *Encoder {
return &Encoder{
w: w,
}
}
// Encode write a block to the writer.
func (e *Encoder) Encode(b *Block) (err error) {
defer Error.WrapP(&err)
switch b.Type {
case Data:
cb, err := New(b.Type, b.Data[0])
if err != nil {
return err
}
_, err = e.w.Write([]byte{cb})
if err != nil {
return err
}
case DataSize:
cb, err := New(b.Type, uint8(len(b.Data)))
if err != nil {
return err
}
_, err = e.w.Write([]byte{cb})
if err != nil {
return err
}
_, err = e.w.Write(b.Data)
if err != nil {
return err
}
case Data1:
cb, err := New(b.Type, b.Data[0])
if err != nil {
return err
}
_, err = e.w.Write([]byte{cb})
if err != nil {
return err
}
_, err = e.w.Write(b.Data[1:])
if err != nil {
return err
}
case Data2:
cb, err := New(b.Type, b.Data[0])
if err != nil {
return err
}
_, err = e.w.Write([]byte{cb})
if err != nil {
return err
}
_, err = e.w.Write(b.Data[1:])
if err != nil {
return err
}
case Skip:
cb, err := New(b.Type, uint8(b.Size))
if err != nil {
return err
}
_, err = e.w.Write([]byte{cb})
if err != nil {
return err
}
case DataSizeSize:
var bytes uint8
buf := make([]byte, 4)
size := len(b.Data) - 1
if size < 1<<8 {
bytes = 1
buf[3] = uint8(size)
buf = buf[3:]
} else if size < 1<<16 {
bytes = 2
binary.BigEndian.PutUint16(buf[2:], uint16(size))
buf = buf[2:]
} else if size < 1<<24 {
bytes = 3
binary.BigEndian.PutUint32(buf, uint32(size))
buf = buf[1:]
} else {
bytes = 4
binary.BigEndian.PutUint32(buf, uint32(size))
}
cb, err := New(b.Type, bytes)
if err != nil {
return err
}
_, err = e.w.Write([]byte{cb})
if err != nil {
return err
}
_, err = e.w.Write(buf)
if err != nil {
return err
}
_, err = e.w.Write(b.Data)
if err != nil {
return err
}
case SkipSize:
var bytes uint8
buf := make([]byte, 2)
if b.Size <= 1<<8 {
bytes = 1
buf[0] = uint8(b.Size - 1)
buf = buf[:1]
} else {
bytes = 2
binary.BigEndian.PutUint16(buf[:2], uint16(b.Size-1))
buf = buf[:2]
}
cb, err := New(b.Type, bytes)
if err != nil {
return err
}
_, err = e.w.Write([]byte{cb})
if err != nil {
return err
}
_, err = e.w.Write(buf)
if err != nil {
return err
}
case Null:
_, err = e.w.Write([]byte{Null})
if err != nil {
return err
}
case Empty:
_, err = e.w.Write([]byte{Empty})
if err != nil {
return err
}
}
return nil
} | control/control.go | 0.640861 | 0.558508 | control.go | starcoder |
package horizon
import (
"fmt"
"github.com/golang/geo/s2"
)
// RoadPositions Set of states
type RoadPositions []*RoadPosition
// RoadPosition Representation of state (in terms of Hidden Markov Model)
/*
ID - unique identifier of state
GraphEdge - pointer to closest edge in graph
GraphVertex - indentifier of closest vertex
Projected - point (Observation) project onto edge, pointer to GeoPoint
*/
type RoadPosition struct {
RoadPositionID int
GraphEdge *Edge
GraphVertex int64
Projected *GeoPoint
}
// NewRoadPositionFromLonLat Returns pointer to created State
/*
stateID - unique identifier for state
graphVertex - indentifier of vertex which is closest to Observation
e - pointer to Edge
lon - longitude (X for SRID = 0)
lat - latitude (Y for SRID = 0)
srid - SRID (see https://en.wikipedia.org/wiki/Spatial_reference_system)
*/
func NewRoadPositionFromLonLat(stateID int, graphVertex int64, e *Edge, lon, lat float64, srid ...int) *RoadPosition {
state := RoadPosition{
RoadPositionID: stateID,
GraphEdge: e,
GraphVertex: graphVertex,
}
if len(srid) != 0 {
switch srid[0] {
case 0:
state.Projected = NewEuclideanPoint(lon, lat)
break
case 4326:
state.Projected = NewWGS84Point(lon, lat)
break
default:
state.Projected = NewWGS84Point(lon, lat)
break
}
}
return &state
}
// NewRoadPositionFromS2LatLng Returns pointer to created State
/*
stateID - unique identifier for state
graphVertex - indentifier of vertex which is closest to Observation
e - pointer to Edge
lon - longitude (X for SRID = 0)
lat - latitude (Y for SRID = 0)
srid - SRID (see https://en.wikipedia.org/wiki/Spatial_reference_system)
*/
func NewRoadPositionFromS2LatLng(stateID int, graphVertex int64, e *Edge, latLng *s2.LatLng, srid ...int) *RoadPosition {
state := RoadPosition{
RoadPositionID: stateID,
GraphEdge: e,
GraphVertex: graphVertex,
}
if len(srid) != 0 {
switch srid[0] {
case 0:
state.Projected = NewEuclideanPoint(latLng.Lng.Degrees(), latLng.Lat.Degrees())
break
case 4326:
state.Projected = NewWGS84Point(latLng.Lng.Degrees(), latLng.Lat.Degrees())
break
default:
state.Projected = NewWGS84Point(latLng.Lng.Degrees(), latLng.Lat.Degrees())
break
}
}
return &state
}
// ID Method to fit interface State (see https://github.com/chentoz/viterbi/blob/master/viterbi.go#L9)
func (state RoadPosition) ID() int {
return state.RoadPositionID
}
// String Pretty format for State
func (state RoadPosition) String() string {
latlng := s2.LatLngFromPoint(state.Projected.Point)
return fmt.Sprintf(
"State is:\n\tSourceVertexID => %v\n\tTargetVertexID => %v\n\tSRID: %d\n\tCoords => %v",
state.GraphEdge.Source, state.GraphEdge.Target, state.Projected.srid, latlng.String(),
)
} | road_position.go | 0.824533 | 0.541591 | road_position.go | starcoder |
package geom
// A MultiPoint is a collection of Points.
type MultiPoint struct {
// To represent an MultiPoint that allows EMPTY elements, e.g.
// MULTIPOINT ( EMPTY, POINT(1.0 1.0), EMPTY), we have to allow
// record ends. If there is an empty point, ends[i] == ends[i-1].
geom2
}
// NewMultiPoint returns a new, empty, MultiPoint.
func NewMultiPoint(Lay Layout) *MultiPoint {
return NewMultiPointFlat(Lay, nil)
}
// NewMultiPointFlatOption represents an option that can be passed into
// NewMultiPointFlat.
type NewMultiPointFlatOption func(*MultiPoint)
// NewMultiPointFlatOptionWithEnds allows passing ends to NewMultiPointFlat,
// which allows the representation of empty points.
func NewMultiPointFlatOptionWithEnds(ends []int) NewMultiPointFlatOption {
return func(mp *MultiPoint) {
mp.ends = ends
}
}
// NewMultiPointFlat returns a new MultiPoint with the given flat coordinates.
// Assumes no points are empty by default. Use `NewMultiPointFlatOptionWithEnds`
// to specify empty points.
func NewMultiPointFlat(
Lay Layout, FlatCoord []float64, opts ...NewMultiPointFlatOption,
) *MultiPoint {
g := new(MultiPoint)
g.Lay = Lay
g.Strd = Lay.Stride()
g.FlatCoord = FlatCoord
for _, opt := range opts {
opt(g)
}
// If no ends are provided, assume all points are non empty.
if g.ends == nil && len(g.FlatCoord) > 0 {
numCoords := 0
if g.Strd > 0 {
numCoords = len(FlatCoord) / g.Strd
}
g.ends = make([]int, numCoords)
for i := 0; i < numCoords; i++ {
g.ends[i] = (i + 1) * g.Strd
}
}
return g
}
// Area returns the area of g, i.e. zero.
func (g *MultiPoint) Area() float64 {
return 0
}
// Clone returns a deep copy.
func (g *MultiPoint) Clone() *MultiPoint {
return deriveCloneMultiPoint(g)
}
// Length returns zero.
func (g *MultiPoint) Length() float64 {
return 0
}
// MustSetCoords sets the coordinates and panics on any error.
func (g *MultiPoint) MustSetCoords(coords []Coord) *MultiPoint {
Must(g.SetCoords(coords))
return g
}
// Coord returns the ith coord of g.
func (g *MultiPoint) Coord(i int) Coord {
before := 0
if i > 0 {
before = g.ends[i-1]
}
if g.ends[i] == before {
return nil
}
return g.FlatCoord[before:g.ends[i]]
}
// SetCoords sets the coordinates.
func (g *MultiPoint) SetCoords(coords []Coord) (*MultiPoint, error) {
g.FlatCoord = nil
g.ends = nil
for _, c := range coords {
if c != nil {
var err error
g.FlatCoord, err = deflate0(g.FlatCoord, c, g.Strd)
if err != nil {
return nil, err
}
}
g.ends = append(g.ends, len(g.FlatCoord))
}
return g, nil
}
// Coords unpacks and returns all of g's coordinates.
func (g *MultiPoint) Coords() []Coord {
coords1 := make([]Coord, len(g.ends))
offset := 0
prevEnd := 0
for i, end := range g.ends {
if end != prevEnd {
coords1[i] = inflate0(g.FlatCoord, offset, offset+g.Strd, g.Strd)
offset += g.Strd
}
prevEnd = end
}
return coords1
}
// NumCoords returns the number of coordinates in g.
func (g *MultiPoint) NumCoords() int {
return len(g.ends)
}
// SetSRID sets the SRID of g.
func (g *MultiPoint) SetSRID(Srid int) *MultiPoint {
g.Srid = Srid
return g
}
// NumPoints returns the number of Points.
func (g *MultiPoint) NumPoints() int {
return len(g.ends)
}
// Point returns the ith Point.
func (g *MultiPoint) Point(i int) *Point {
coord := g.Coord(i)
if coord == nil {
return NewPointEmpty(g.Lay)
}
return NewPointFlat(g.Lay, coord)
}
// Push appends a point.
func (g *MultiPoint) Push(p *Point) error {
if p.Lay != g.Lay {
return ErrLayoutMismatch{Got: p.Lay, Want: g.Lay}
}
if !p.Empty() {
g.FlatCoord = append(g.FlatCoord, p.FlatCoord...)
}
g.ends = append(g.ends, len(g.FlatCoord))
return nil
}
// Swap swaps the values of g and g2.
func (g *MultiPoint) Swap(g2 *MultiPoint) {
*g, *g2 = *g2, *g
} | multipoint.go | 0.7874 | 0.411111 | multipoint.go | starcoder |
package tomgjson
import (
"encoding/xml"
"fmt"
"math"
"time"
"strconv"
)
func degreesToRadians(degrees float64) float64 {
return degrees * math.Pi / 180
}
func radiansToDegrees(radians float64) float64 {
return radians * 180 / math.Pi
}
func distanceInMBetweenEarthCoordinates(lat1, lon1, lat2, lon2 float64) float64 {
earthRadiusM := 6378137.0
dLat := degreesToRadians(lat2 - lat1)
dLon := degreesToRadians(lon2 - lon1)
lat1 = degreesToRadians(lat1)
lat2 = degreesToRadians(lat2)
a := math.Sin(dLat/2)*math.Sin(dLat/2) + math.Sin(dLon/2)*math.Sin(dLon/2)*math.Cos(lat1)*math.Cos(lat2)
c := 2 * math.Atan2(math.Sqrt(a), math.Sqrt(1-a))
return earthRadiusM * c
}
func angleFromCoordinate(lat1, lon1, lat2, lon2, prev float64) float64 {
lat1 = degreesToRadians(lat1)
lon1 = degreesToRadians(lon1)
lat2 = degreesToRadians(lat2)
lon2 = degreesToRadians(lon2)
x := math.Cos(lat2) * math.Sin(lon2-lon1)
y := math.Cos(lat1)*math.Sin(lat2) - math.Sin(lat1)*math.Cos(lat2)*math.Cos(lon2-lon1)
course := radiansToDegrees(math.Atan2(x, y))
for math.Abs(course-prev) > 180 {
if math.Signbit(course - prev) {
course += 360
} else {
course -= 360
}
}
return course
}
var ids = []string{
// Supported GPX streams
"lat (°)",
"lon (°)",
"ele (m)",
"magvar (°)",
"geoidheight (m)",
"fix",
"sat",
"hdop",
"vdop",
"pdop",
"ageofdgpsdata (s)",
"dgpsid",
// Calculated streams
"distance2d (m)",
"distance3d (m)",
"verticalSpeed (m/s)",
"speed2d (m/s)",
"speed3d (m/s)",
"acceleration2d (m/s²)",
"acceleration3d (m/s²)",
"verticalAcceleration (m/s²)",
"course (°)",
"slope (°)",
// Additional explicit date string
"time",
}
// Return index of stream
func idx(item string) int {
for i, v := range ids {
if v == item {
return i
}
}
return -1
}
func appendToFloatStream(data FormattedData, v *float64, n string) Stream {
st := data.Streams[idx(n)]
if v != nil {
st.Values = append(st.Values, *v)
// Name confirmed streams
st.Label = n
} else {
// Appending zeros for now, could potentially add interpolated values based on previous, next and time
st.Values = append(st.Values, 0)
}
return st
}
func appendToStringStream(data FormattedData, s *string, n string) Stream {
st := data.Streams[idx(n)]
st.Strings = append(st.Strings, *s)
// Name confirmed streams
st.Label = n
return st
}
func stringFirstNumber(s string) (float64, bool) {
n, err := strconv.ParseFloat(s[:1], 64)
if err != nil {
return 0, false
}
return n, true
}
// FromGPX formats a compatible GPX file as a struct ready for mgJSON and returns it. Or returns an error
// The optional extra bool will compute additional streams based on the existing data
func FromGPX(src []byte, extra bool) (FormattedData, error) {
var data FormattedData
utc, err := time.LoadLocation("UTC")
if err != nil {
return data, err
}
type Trkpt struct {
XMLName xml.Name `xml:"trkpt"`
Time *string `xml:"time"`
Lat *float64 `xml:"lat,attr"`
Lon *float64 `xml:"lon,attr"`
Ele *float64 `xml:"ele"`
Magvar *float64 `xml:"magvar"`
Geoidheight *float64 `xml:"geoidheight"`
Fix *string `xml:"fix"`
Sat *float64 `xml:"sat"`
Hdop *float64 `xml:"hdop"`
Vdop *float64 `xml:"vdop"`
Pdop *float64 `xml:"pdop"`
Ageofdgpsdata *float64 `xml:"ageofdgpsdata"`
Dgpsid *float64 `xml:"dgpsid"`
}
type Trkseg struct {
XMLName xml.Name `xml:"trkseg"`
Trkpt []Trkpt `xml:"trkpt"`
}
type Trk struct {
XMLName xml.Name `xml:"trk"`
Trkseg []Trkseg `xml:"trkseg"`
}
type Gpx struct {
XMLName xml.Name `xml:"gpx"`
Trk []Trk `xml:"trk"`
}
gpx := Gpx{}
err = xml.Unmarshal(src, &gpx)
if err != nil {
return data, err
}
if len(gpx.Trk) < 1 {
return data, fmt.Errorf("Error: No GPX tracks")
}
// Just reading one track for now
if len(gpx.Trk[0].Trkseg) < 1 {
return data, fmt.Errorf("Error: No GPX trkseg")
}
trkpts := []Trkpt{}
for _, trkseg := range gpx.Trk[0].Trkseg {
trkpts = append(trkpts, trkseg.Trkpt...)
}
if len(trkpts) < 2 {
return data, fmt.Errorf("Error: Not enough GPX trkpt")
}
// One Stream for each of the supported trkpt and custom fields
data.Streams = make([]Stream, len(ids))
data.Timing = make([]time.Time, len(trkpts))
for _, st := range data.Streams {
st.Values = make([]float64, len(trkpts))
}
for i, trkpt := range trkpts {
if trkpt.Time == nil {
return data, fmt.Errorf("Error: Missing timiing data in GPX")
}
t, err := time.Parse(time.RFC3339, *trkpt.Time)
if err != nil {
return data, err
}
t = t.In(utc)
data.Timing[i] = t
data.Streams[idx("lat (°)")] = appendToFloatStream(data, trkpt.Lat, "lat (°)")
data.Streams[idx("lon (°)")] = appendToFloatStream(data, trkpt.Lon, "lon (°)")
data.Streams[idx("ele (m)")] = appendToFloatStream(data, trkpt.Ele, "ele (m)")
data.Streams[idx("magvar (°)")] = appendToFloatStream(data, trkpt.Magvar, "magvar (°)")
data.Streams[idx("geoidheight (m)")] = appendToFloatStream(data, trkpt.Geoidheight, "geoidheight (m)")
if trkpt.Fix != nil {
fixNum, validFixNum := stringFirstNumber(*trkpt.Fix)
if validFixNum {
data.Streams[idx("fix")] = appendToFloatStream(data, &fixNum, "fix")
}
}
data.Streams[idx("sat")] = appendToFloatStream(data, trkpt.Sat, "sat")
data.Streams[idx("hdop")] = appendToFloatStream(data, trkpt.Hdop, "hdop")
data.Streams[idx("vdop")] = appendToFloatStream(data, trkpt.Vdop, "vdop")
data.Streams[idx("pdop")] = appendToFloatStream(data, trkpt.Pdop, "pdop")
data.Streams[idx("ageofdgpsdata (s)")] = appendToFloatStream(data, trkpt.Ageofdgpsdata, "ageofdgpsdata (s)")
data.Streams[idx("dgpsid")] = appendToFloatStream(data, trkpt.Dgpsid, "dgpsid")
data.Streams[idx("time")] = appendToStringStream(data, trkpt.Time, "time")
// Computed streams
if extra && trkpt.Lat != nil && trkpt.Lon != nil {
var distance2d float64
var speed2d float64
var acceleration2d float64
var course float64
var slope float64
var distance3d float64
var speed3d float64
var acceleration3d float64
var verticalSpeed float64
var verticalAcceleration float64
if i > 0 {
prevLat := data.Streams[idx("lat (°)")].Values[i-1]
prevLon := data.Streams[idx("lon (°)")].Values[i-1]
distance2d = distanceInMBetweenEarthCoordinates(*trkpt.Lat, *trkpt.Lon, prevLat, prevLon)
duration := data.Timing[i].Sub(data.Timing[i-1]).Seconds()
//Make sure duration is not zero
duration = math.Max(duration, 1e-9)
speed2d = distance2d / duration
acceleration2d = speed2d
prevCourse := data.Streams[idx("course (°)")].Values[i-1]
course = angleFromCoordinate(*trkpt.Lat, *trkpt.Lon, prevLat, prevLon, prevCourse)
if trkpt.Ele != nil {
prevEle := data.Streams[idx("ele (m)")].Values[i-1]
verticalDist := *trkpt.Ele - prevEle
slope = math.Atan2(verticalDist, distance2d)
slope = radiansToDegrees(slope)
distance3d = math.Sqrt(math.Pow(verticalDist, 2) + math.Pow(distance2d, 2))
speed3d = distance3d / duration
acceleration3d = speed3d
verticalSpeed = verticalDist / duration
verticalAcceleration = verticalSpeed
}
if i > 1 {
prevDistance := data.Streams[idx("distance2d (m)")].Values[i-1]
distance2d += prevDistance
prevSpeed2d := data.Streams[idx("speed2d (m/s)")].Values[i-1]
speed2dChange := speed2d - prevSpeed2d
acceleration2d = speed2dChange / duration
if trkpt.Ele != nil {
prevDistance3d := data.Streams[idx("distance3d (m)")].Values[i-1]
distance3d += prevDistance3d
prevSpeed3d := data.Streams[idx("speed3d (m/s)")].Values[i-1]
speed3dChange := speed3d - prevSpeed3d
acceleration3d = speed3dChange / duration
prevVerticalSpeed := data.Streams[idx("verticalSpeed (m/s)")].Values[i-1]
verticalSpeedChange := verticalSpeed - prevVerticalSpeed
verticalAcceleration = verticalSpeedChange / duration
}
}
}
data.Streams[idx("distance2d (m)")] = appendToFloatStream(data, &distance2d, "distance2d (m)")
data.Streams[idx("speed2d (m/s)")] = appendToFloatStream(data, &speed2d, "speed2d (m/s)")
data.Streams[idx("acceleration2d (m/s²)")] = appendToFloatStream(data, &acceleration2d, "acceleration2d (m/s²)")
data.Streams[idx("course (°)")] = appendToFloatStream(data, &course, "course (°)")
data.Streams[idx("slope (°)")] = appendToFloatStream(data, &slope, "slope (°)")
data.Streams[idx("distance3d (m)")] = appendToFloatStream(data, &distance3d, "distance3d (m)")
data.Streams[idx("speed3d (m/s)")] = appendToFloatStream(data, &speed3d, "speed3d (m/s)")
data.Streams[idx("acceleration3d (m/s²)")] = appendToFloatStream(data, &acceleration3d, "acceleration3d (m/s²)")
data.Streams[idx("verticalSpeed (m/s)")] = appendToFloatStream(data, &verticalSpeed, "verticalSpeed (m/s)")
data.Streams[idx("verticalAcceleration (m/s²)")] = appendToFloatStream(data, &verticalAcceleration, "verticalAcceleration (m/s²)")
}
}
// Clean up unconfirmed streams
for i := len(data.Streams) - 1; i >= 0; i-- {
if len(data.Streams[i].Label) < 1 {
copy(data.Streams[i:], data.Streams[i+1:])
data.Streams[len(data.Streams)-1] = Stream{}
data.Streams = data.Streams[:len(data.Streams)-1]
}
}
return data, nil
} | fromgpx.go | 0.792745 | 0.592224 | fromgpx.go | starcoder |
package imaging
import (
"image"
"image/color"
)
// Clone returns a copy of the given image.
func Clone(img image.Image) *image.NRGBA {
srcBounds := img.Bounds()
dstBounds := srcBounds.Sub(srcBounds.Min)
dst := image.NewNRGBA(dstBounds)
dstMinX := dstBounds.Min.X
dstMinY := dstBounds.Min.Y
srcMinX := srcBounds.Min.X
srcMinY := srcBounds.Min.Y
srcMaxX := srcBounds.Max.X
srcMaxY := srcBounds.Max.Y
switch src0 := img.(type) {
case *image.NRGBA:
rowSize := srcBounds.Dx() * 4
numRows := srcBounds.Dy()
i0 := dst.PixOffset(dstMinX, dstMinY)
j0 := src0.PixOffset(srcMinX, srcMinY)
di := dst.Stride
dj := src0.Stride
for row := 0; row < numRows; row++ {
copy(dst.Pix[i0:i0+rowSize], src0.Pix[j0:j0+rowSize])
i0 += di
j0 += dj
}
case *image.NRGBA64:
i0 := dst.PixOffset(dstMinX, dstMinY)
for y := srcMinY; y < srcMaxY; y, i0 = y+1, i0+dst.Stride {
for x, i := srcMinX, i0; x < srcMaxX; x, i = x+1, i+4 {
j := src0.PixOffset(x, y)
dst.Pix[i+0] = src0.Pix[j+0]
dst.Pix[i+1] = src0.Pix[j+2]
dst.Pix[i+2] = src0.Pix[j+4]
dst.Pix[i+3] = src0.Pix[j+6]
}
}
case *image.RGBA:
i0 := dst.PixOffset(dstMinX, dstMinY)
for y := srcMinY; y < srcMaxY; y, i0 = y+1, i0+dst.Stride {
for x, i := srcMinX, i0; x < srcMaxX; x, i = x+1, i+4 {
j := src0.PixOffset(x, y)
a := src0.Pix[j+3]
dst.Pix[i+3] = a
switch a {
case 0:
dst.Pix[i+0] = 0
dst.Pix[i+1] = 0
dst.Pix[i+2] = 0
case 0xff:
dst.Pix[i+0] = src0.Pix[j+0]
dst.Pix[i+1] = src0.Pix[j+1]
dst.Pix[i+2] = src0.Pix[j+2]
default:
dst.Pix[i+0] = uint8(uint16(src0.Pix[j+0]) * 0xff / uint16(a))
dst.Pix[i+1] = uint8(uint16(src0.Pix[j+1]) * 0xff / uint16(a))
dst.Pix[i+2] = uint8(uint16(src0.Pix[j+2]) * 0xff / uint16(a))
}
}
}
case *image.RGBA64:
i0 := dst.PixOffset(dstMinX, dstMinY)
for y := srcMinY; y < srcMaxY; y, i0 = y+1, i0+dst.Stride {
for x, i := srcMinX, i0; x < srcMaxX; x, i = x+1, i+4 {
j := src0.PixOffset(x, y)
a := src0.Pix[j+6]
dst.Pix[i+3] = a
switch a {
case 0:
dst.Pix[i+0] = 0
dst.Pix[i+1] = 0
dst.Pix[i+2] = 0
case 0xff:
dst.Pix[i+0] = src0.Pix[j+0]
dst.Pix[i+1] = src0.Pix[j+2]
dst.Pix[i+2] = src0.Pix[j+4]
default:
dst.Pix[i+0] = uint8(uint16(src0.Pix[j+0]) * 0xff / uint16(a))
dst.Pix[i+1] = uint8(uint16(src0.Pix[j+2]) * 0xff / uint16(a))
dst.Pix[i+2] = uint8(uint16(src0.Pix[j+4]) * 0xff / uint16(a))
}
}
}
case *image.Gray:
i0 := dst.PixOffset(dstMinX, dstMinY)
for y := srcMinY; y < srcMaxY; y, i0 = y+1, i0+dst.Stride {
for x, i := srcMinX, i0; x < srcMaxX; x, i = x+1, i+4 {
j := src0.PixOffset(x, y)
c := src0.Pix[j]
dst.Pix[i+0] = c
dst.Pix[i+1] = c
dst.Pix[i+2] = c
dst.Pix[i+3] = 0xff
}
}
case *image.Gray16:
i0 := dst.PixOffset(dstMinX, dstMinY)
for y := srcMinY; y < srcMaxY; y, i0 = y+1, i0+dst.Stride {
for x, i := srcMinX, i0; x < srcMaxX; x, i = x+1, i+4 {
j := src0.PixOffset(x, y)
c := src0.Pix[j]
dst.Pix[i+0] = c
dst.Pix[i+1] = c
dst.Pix[i+2] = c
dst.Pix[i+3] = 0xff
}
}
case *image.YCbCr:
i0 := dst.PixOffset(dstMinX, dstMinY)
for y := srcMinY; y < srcMaxY; y, i0 = y+1, i0+dst.Stride {
for x, i := srcMinX, i0; x < srcMaxX; x, i = x+1, i+4 {
yj := src0.YOffset(x, y)
cj := src0.COffset(x, y)
r, g, b := color.YCbCrToRGB(src0.Y[yj], src0.Cb[cj], src0.Cr[cj])
dst.Pix[i+0] = r
dst.Pix[i+1] = g
dst.Pix[i+2] = b
dst.Pix[i+3] = 0xff
}
}
case *image.Paletted:
plen := len(src0.Palette)
pnew := make([]color.NRGBA, plen)
for i := 0; i < plen; i++ {
pnew[i] = color.NRGBAModel.Convert(src0.Palette[i]).(color.NRGBA)
}
i0 := dst.PixOffset(dstMinX, dstMinY)
for y := srcMinY; y < srcMaxY; y, i0 = y+1, i0+dst.Stride {
for x, i := srcMinX, i0; x < srcMaxX; x, i = x+1, i+4 {
j := src0.PixOffset(x, y)
c := pnew[src0.Pix[j]]
dst.Pix[i+0] = c.R
dst.Pix[i+1] = c.G
dst.Pix[i+2] = c.B
dst.Pix[i+3] = c.A
}
}
default:
i0 := dst.PixOffset(dstMinX, dstMinY)
for y := srcMinY; y < srcMaxY; y, i0 = y+1, i0+dst.Stride {
for x, i := srcMinX, i0; x < srcMaxX; x, i = x+1, i+4 {
c := color.NRGBAModel.Convert(img.At(x, y)).(color.NRGBA)
dst.Pix[i+0] = c.R
dst.Pix[i+1] = c.G
dst.Pix[i+2] = c.B
dst.Pix[i+3] = c.A
}
}
}
return dst
} | vendor/github.com/wujiang/imaging/clone.go | 0.58261 | 0.497803 | clone.go | starcoder |
package answer
import (
"fmt"
"strings"
"github.com/ann-kilzer/go-wordle/common"
)
// Word represents the answer to a Wordle game
type Word struct {
value string
}
func NewWord(word string) Word {
return Word{
value: strings.ToUpper(word),
}
}
func (w Word) String() string {
return w.value
}
// EvaluateGuess determines what the game response should be
// based on evaluating the user's guess against the Word
func (w *Word) EvaluateGuess(guess string) (common.Evaluation, error) {
var res common.Evaluation
if len(guess) < common.WORD_LENGTH {
return res, fmt.Errorf("Invalid guess of length %d, expected %d", len(guess), common.WORD_LENGTH)
}
for i := 0; i < common.WORD_LENGTH; i++ {
letter := string(guess[i])
if w.isGreen(letter, i) {
res[i] = common.GREEN
} else if w.isYellow(letter, i, guess) {
res[i] = common.YELLOW
} else {
res[i] = common.BLACK
}
}
return res, nil
}
func (w *Word) IsWin(guess string) bool {
return guess == w.value
}
func validPosition(position int) bool {
return position >= 0 && position < common.WORD_LENGTH
}
// isGreen means the letter is in the word and in the correct position
func (w *Word) isGreen(letter string, position int) bool {
return validPosition(position) && string(w.value[position]) == letter
}
// yellowIndices returns all potential
func yellowIndices(word, guess, letter string) []int {
res := make([]int, 0)
for i := 0; i < common.WORD_LENGTH; i++ {
if string(word[i]) != letter && string(guess[i]) == letter {
res = append(res, i)
}
}
return res
}
// numGreenForLetter returns the number of green squares for the letter
func numGreenForLetter(word, guess, letter string) int {
num := 0
for i := 0; i < common.WORD_LENGTH; i++ {
if string(guess[i]) == letter && string(word[i]) == letter {
num += 1
}
}
return num
}
// isYellow means the letter is in the word and in the incorrect position
func (w *Word) isYellow(letter string, position int, guess string) bool {
if !validPosition(position) || string(w.value[position]) == letter {
return false
}
budget := strings.Count(w.value, letter) - numGreenForLetter(w.value, guess, letter)
possibleYellow := yellowIndices(w.value, guess, letter)
if len(possibleYellow) == 0 {
return false
}
for i := 0; i < budget && i < len(possibleYellow); i++ {
if possibleYellow[i] == position {
return true
}
}
return false
} | src/game/answer/word.go | 0.780495 | 0.466056 | word.go | starcoder |
package market
import (
"container/heap"
"github.com/robbrit/econerra/goods"
)
type doubleAuction struct {
bids orderMaxHeap
offers orderMinHeap
lastHigh Price
lastLow Price
high Price
low Price
lastVolume Size
volume Size
bid Price
ask Price
good goods.Good
}
// NewDoubleAuction constructs a new market for a given good.
func NewDoubleAuction(good goods.Good) Market {
m := &doubleAuction{
good: good,
}
m.Reset()
return m
}
func (m *doubleAuction) Bid() Price { return m.bid }
func (m *doubleAuction) Ask() Price { return m.ask }
func (m *doubleAuction) High() Price { return m.lastHigh }
func (m *doubleAuction) Low() Price { return m.lastLow }
func (m *doubleAuction) Volume() Size { return m.lastVolume }
func (m *doubleAuction) Good() goods.Good { return m.good }
// Post sends an order to the market. If this order results in a fill,
// the owner(s) will be notified. If not, the order will remain open in
// the market.
func (m *doubleAuction) Post(o *Order) {
if o.Size == 0 {
return
}
if o.Price <= 0 {
return
}
switch o.Side {
case Buy:
if len(m.offers) == 0 || o.Price < m.offers[0].Price {
heap.Push(&m.bids, o)
return
}
// Pop sell orders off the heap until we have filled the entire amount.
size := o.Size
for len(m.offers) > 0 && o.Price >= m.offers[0].Price && size > 0 {
if m.offers[0].Size <= size {
sell := heap.Pop(&m.offers).(*Order)
m.handleFill(o, sell, sell.Price, sell.Size)
size -= sell.Size
} else {
sell := m.offers[0]
m.handleFill(o, sell, sell.Price, size)
m.offers[0].Size -= size
size = 0
}
}
if size > 0 {
o.Size = size
heap.Push(&m.bids, o)
}
case Sell:
if len(m.bids) == 0 || o.Price > m.bids[0].Price {
heap.Push(&m.offers, o)
return
}
// Pop buy orders off the heap until we have filled the entire amount.
size := o.Size
for len(m.bids) > 0 && o.Price <= m.bids[0].Price && size > 0 {
if m.bids[0].Size <= size {
buy := heap.Pop(&m.bids).(*Order)
m.handleFill(buy, o, buy.Price, buy.Size)
size -= buy.Size
} else {
buy := m.bids[0]
m.handleFill(buy, o, buy.Price, size)
m.bids[0].Size -= size
size = 0
}
}
if size > 0 {
o.Size = size
heap.Push(&m.offers, o)
}
}
}
func (m *doubleAuction) handleFill(buy, sell *Order, price Price, size Size) {
buy.Owner.OnFill(m.good, Buy, price, size)
sell.Owner.OnFill(m.good, Sell, price, size)
if price > m.high {
m.high = price
}
if m.low == 0 || price < m.low {
m.low = price
}
m.volume += size
}
func (m *doubleAuction) Reset() {
m.lastLow = m.low
m.lastHigh = m.high
m.lastVolume = m.volume
m.high = 0
m.low = 0
m.volume = 0
// Clear out all the orders, sending unfilled notifications as needed.
for _, order := range m.bids {
order.Owner.OnUnfilled(m.good, Buy, order.Size)
}
for _, order := range m.offers {
order.Owner.OnUnfilled(m.good, Sell, order.Size)
}
if len(m.bids) > 0 {
m.bid = m.bids[0].Price
} else {
m.bid = 0
}
if len(m.offers) > 0 {
m.ask = m.offers[0].Price
} else {
m.ask = 0
}
m.bids = orderMaxHeap{}
m.offers = orderMinHeap{}
heap.Init(&m.bids)
heap.Init(&m.offers)
} | market/double_auction_market.go | 0.569015 | 0.444866 | double_auction_market.go | starcoder |
package senseobjdef
import (
"encoding/json"
"fmt"
"io/ioutil"
"os"
jsoniter "github.com/json-iterator/go"
"github.com/pkg/errors"
"github.com/qlik-oss/gopherciser/enummap"
"github.com/qlik-oss/gopherciser/helpers"
)
type (
// SelectType select function type
SelectType int
// DataType Get Data function type
DataType int
// DataDefType type of data definition, e.g. ListObject or HyperCube
DataDefType int
// DataDef type of data and path to data carrier
DataDef struct {
// Type of data
Type DataDefType `json:"type"`
// Path to data carrier
Path helpers.DataPath `json:"path,omitempty"`
}
// GetDataRequests data requests to send
GetDataRequests struct {
// Type of data function
Type DataType `json:"type"`
// Path for get data function
Path string `json:"path,omitempty"`
// Height of data to get in GetData
Height int `json:"height,omitempty"`
}
// Data Get data definitions
DataCore struct {
// Constraints constraint defining if to send requests
Constraints []*Constraint `json:"constraints,omitempty"`
// Requests List of data requests to send
Requests []GetDataRequests `json:"requests,omitempty"`
}
Data struct {
DataCore
}
// Select definitions for selecting in object
Select struct {
// Type of select function
Type SelectType `json:"type"`
// Path to use for selection
Path string `json:"path,omitempty"`
}
// ObjectDef object definitions
ObjectDef struct {
// DataDef type of data and path to data carrier
DataDef DataDef `json:"datadef,omitempty"`
// Data Get data definitions
Data []Data `json:"data,omitempty"`
// Select definitions for selecting in object
Select *Select `json:"select,omitempty"`
}
// ObjectDefs contains how to find and select data within sense objects
ObjectDefs map[string]*ObjectDef
//NoDefError No object definition found
NoDefError string
)
//Non iota constants
const (
DefaultDataHeight = 500
)
//When adding DataDefType, also:
// * add entry in dataDefTypeEnum
const (
// DataDefUnknown unknown data definition type
DataDefUnknown DataDefType = iota
// DataDefListObject ListObject data carrier
DataDefListObject
// DataDefHyperCube HyperCube data carrier
DataDefHyperCube
// DataDefNoData object contains no data carrier
DataDefNoData
)
//When adding SelectType, also:
// * add entry in selectTypeEnum
const (
// SelectTypeUnknown unknown select func (default int)
SelectTypeUnknown SelectType = iota
// SelectTypeListObjectValues use SelectListObjectValues method
SelectTypeListObjectValues
// SelectTypeHypercubeValues use SelectHyperCubeValues method
SelectTypeHypercubeValues
// SelectTypeHypercubeColumnValues each dimension is a data page
SelectTypeHypercubeColumnValues
)
//When adding DataType, also:
// * add entry in dataTypeEnum
const (
// DataTypeLayout get data from layout
DataTypeLayout DataType = iota
// DataTypeListObject get data from listobject data
DataTypeListObject
// HyperCubeData get data from hypercube
DataTypeHyperCubeData
// DataTypeHyperCubeDataColumns
DataTypeHyperCubeDataColumns
// HyperCubeReducedData get hypercube reduced data
DataTypeHyperCubeReducedData
// DataTypeHyperCubeBinnedData get hypercube binned data
DataTypeHyperCubeBinnedData
// DataTypeHyperCubeStackData get hypercube stacked data
DataTypeHyperCubeStackData
// DataTypeHyperCubeContinuousData get hypercube continuous data
DataTypeHyperCubeContinuousData
// DataTypeHyperCubeTreeData get hypercube tree data
DataTypeHyperCubeTreeData
)
var (
od ObjectDefs
dataDefTypeEnum = enummap.NewEnumMapOrPanic(map[string]int{
"unknown": int(DataDefUnknown),
"listobject": int(DataDefListObject),
"hypercube": int(DataDefHyperCube),
"nodata": int(DataDefNoData),
})
selectTypeEnum = enummap.NewEnumMapOrPanic(map[string]int{
"unknown": int(SelectTypeUnknown),
"listobjectvalues": int(SelectTypeListObjectValues),
"hypercubevalues": int(SelectTypeHypercubeValues),
"hypercubecolumnvalues": int(SelectTypeHypercubeColumnValues),
})
dataTypeEnum = enummap.NewEnumMapOrPanic(map[string]int{
"layout": int(DataTypeLayout),
"listobjectdata": int(DataTypeListObject),
"hypercubedata": int(DataTypeHyperCubeData),
"hypercubereduceddata": int(DataTypeHyperCubeReducedData),
"hypercubebinneddata": int(DataTypeHyperCubeBinnedData),
"hypercubestackdata": int(DataTypeHyperCubeStackData),
"hypercubedatacolumns": int(DataTypeHyperCubeDataColumns),
"hypercubecontinuousdata": int(DataTypeHyperCubeContinuousData),
"hypercubetreedata": int(DataTypeHyperCubeTreeData),
})
jsonit = jsoniter.ConfigCompatibleWithStandardLibrary
)
func init() {
od = DefaultObjectDefs
}
// Error No object definition found
func (err NoDefError) Error() string {
return fmt.Sprintf("Definition for object<%s> not found", string(err))
}
// UnmarshalJSON unmarshal SelectType
func (t *SelectType) UnmarshalJSON(arg []byte) error {
i, err := selectTypeEnum.UnMarshal(arg)
if err != nil {
return errors.Wrap(err, "Failed to unmarshal SelectType")
}
*t = SelectType(i)
return nil
}
// MarshalJSON marshal SelectType
func (t SelectType) MarshalJSON() ([]byte, error) {
str, err := selectTypeEnum.String(int(t))
if err != nil {
return nil, errors.Errorf("Unknown SelectType<%d>", t)
}
if str == "" {
return nil, errors.Errorf("Unknown SelectType<%d>", t)
}
return []byte(fmt.Sprintf(`"%s"`, str)), nil
}
// String representation of select type
func (t SelectType) String() string {
return selectTypeEnum.StringDefault(int(t), "unknown")
}
// UnmarshalJSON unmarshal DataType
func (typ *DataType) UnmarshalJSON(arg []byte) error {
i, err := dataTypeEnum.UnMarshal(arg)
if err != nil {
return errors.Wrap(err, "Failed to unmarshal DataType")
}
*typ = DataType(i)
return nil
}
// MarshalJSON marshal DataType
func (typ DataType) MarshalJSON() ([]byte, error) {
str, err := dataTypeEnum.String(int(typ))
if err != nil {
return nil, errors.Errorf("Unknown DataType<%d>", typ)
}
if str == "" {
return nil, errors.Errorf("Unknown DataType<%d>", typ)
}
return []byte(fmt.Sprintf(`"%s"`, str)), nil
}
// String representation of data type or "unknown"
func (typ DataType) String() string {
return dataTypeEnum.StringDefault(int(typ), "unknown")
}
// UnmarshalJSON unmarshal DataDefType
func (d *DataDefType) UnmarshalJSON(arg []byte) error {
i, err := dataDefTypeEnum.UnMarshal(arg)
if err != nil {
return errors.Wrap(err, "Failed to unmarshal DataDefType")
}
*d = DataDefType(i)
return nil
}
// MarshalJSON marshal DataFuncType
func (d DataDefType) MarshalJSON() ([]byte, error) {
str, err := dataDefTypeEnum.String(int(d))
if err != nil {
return nil, errors.Errorf("Unknown DataDefType<%d>", d)
}
if str == "" {
return nil, errors.Errorf("Unknown DataDefType<%d>", d)
}
return []byte(fmt.Sprintf(`"%s"`, str)), nil
}
// String representation of DataDefType
func (d DataDefType) String() string {
return dataDefTypeEnum.StringDefault(int(d), "unknown")
}
// UnmarshalJSON unmarshal Data
func (d *Data) UnmarshalJSON(arg []byte) error {
if err := helpers.HasDeprecatedFields(arg, []string{
"/constraint",
}); err != nil {
return errors.New("Deprecated field 'constraint' - please replace with 'constraints' array'")
}
dc := DataCore{}
err := json.Unmarshal(arg, &dc)
if err != nil {
return err
}
*d = Data{dc}
return nil
}
// OverrideFromFile read config from file (using default config)
func OverrideFromFile(cfgFile string) (ObjectDefs, error) {
err := od.OverrideFromFile(cfgFile)
return od, errors.WithStack(err)
}
// FromFile read config from file
func (defs ObjectDefs) OverrideFromFile(cfgFile string) error {
if defs == nil {
return errors.Errorf("defs is nil")
}
if cfgFile == "" {
return errors.Errorf("No config file defined")
}
if _, err := os.Stat(cfgFile); os.IsNotExist(err) {
return errors.Wrapf(err, "file not found<%s>", cfgFile)
}
jsonOverrides, err := ioutil.ReadFile(cfgFile)
if err != nil {
return errors.Wrapf(err, "Error reading config from file<%s>", cfgFile)
}
var overrides ObjectDefs
err = jsonit.Unmarshal(jsonOverrides, &overrides)
if err != nil {
return errors.Wrapf(err, "Error unmarshaling file<%s>", cfgFile)
}
for k, v := range overrides {
value := v // de-reference pointer
defs[k] = value
}
return nil
}
// GetObjectDef get definitions for object type (using default config)
func GetObjectDef(object string) (*ObjectDef, error) {
return od.GetObjectDef(object)
}
// GetObjectDef get definitions for object type
func (defs ObjectDefs) GetObjectDef(object string) (*ObjectDef, error) {
if defs == nil {
return nil, errors.Errorf("defs is nil")
}
def, ok := defs[object]
if !ok {
return nil, errors.WithStack(NoDefError(object))
}
return def, nil
}
// Validate object definition
func (def *ObjectDef) Validate() error {
if def == nil {
return errors.Errorf("object definition is nil")
}
switch def.DataDef.Type {
case DataDefUnknown:
case DataDefNoData:
default:
dataDefPath := string(def.DataDef.Path)
if dataDefPath == "" || dataDefPath[0] != '/' {
str, _ := dataDefTypeEnum.String(int(def.DataDef.Type))
return errors.Errorf("data def type<%s> requires a path", str)
}
}
//Validate get data requests def
for _, d := range def.Data {
for _, r := range d.Requests {
switch r.Type {
case DataTypeLayout:
default:
if r.Path == "" || r.Path[0] != '/' {
str, _ := dataTypeEnum.String(int(r.Type))
return errors.Errorf("data type<%s> requires a path", str)
}
}
}
}
if def.Select != nil {
switch def.Select.Type {
case SelectTypeUnknown:
default:
if def.Select.Path == "" || def.Select.Path[0] != '/' {
str, _ := selectTypeEnum.String(int(def.Select.Type))
return errors.Errorf("select type<%s> requires a path", str)
}
}
}
return nil
}
// Evaluate which constraint section applies
func (def *ObjectDef) Evaluate(data json.RawMessage) ([]GetDataRequests, error) {
for _, v := range def.Data {
meetsConstraints := true
for _, c := range v.Constraints {
result, err := c.Evaluate(data)
if err != nil {
return nil, errors.Wrap(err, "Failed to evaluate get data function")
}
if !result {
meetsConstraints = false
break
}
}
if meetsConstraints {
return v.Requests, nil
}
}
return nil, errors.Errorf("No constraint section applies")
}
//MaxHeight max data to get
func (data GetDataRequests) MaxHeight() int {
if data.Height < 1 {
return DefaultDataHeight
}
return data.Height
} | senseobjdef/senseobjdef.go | 0.520984 | 0.411702 | senseobjdef.go | starcoder |
package utils
// Set - mathematical set with operations
// Empty struct requires zero bytes so is more efficient than bool
type Set map[string]struct{}
// CreateSet - create an empty set
func CreateSet() Set {
return Set(make(map[string]struct{}))
}
// Contains - check if elem in set
func (s Set) Contains(elem string) bool {
_, ok := s[elem]
return ok
}
// Add - add elem to set
func (s Set) Add(elem string) {
s[elem] = struct{}{}
}
// Remove - remove elem from set (does not need to be in set)
func (s Set) Remove(elem string) {
delete(s, elem)
}
// Extend - extend set s with all elements in other (the result is union)
func (s Set) Extend(other Set) {
for k := range other {
s[k] = struct{}{}
}
}
// Subtract - remove all elements from s that are in other
func (s Set) Subtract(other Set) {
for k := range other {
_, ok := s[k]
if ok {
delete(s, k)
}
}
}
// Intersect - only keep elements in s which are also in other
func (s Set) Intersect(other Set) {
deleteList := make([]string, 0, len(s))
for k := range s {
_, inOther := other[k]
if !inOther {
deleteList = append(deleteList, k)
}
}
for _, k := range deleteList {
delete(s, k)
}
}
// SetInts - mathematical set with operations
// Empty struct requires zero bytes so is more efficient than bool
type SetInts map[int]struct{}
// CreateSetInts - create an empty set
func CreateSetInts() SetInts {
return SetInts(make(map[int]struct{}))
}
// Contains - check if elem in set
func (s SetInts) Contains(elem int) bool {
_, ok := s[elem]
return ok
}
// Add - add elem to set
func (s SetInts) Add(elem int) {
s[elem] = struct{}{}
}
// Remove - remove elem from set (does not need to be in set)
func (s SetInts) Remove(elem int) {
delete(s, elem)
}
// Extend - extend set s with all elements in other (the result is union)
func (s SetInts) Extend(other SetInts) {
for k := range other {
s[k] = struct{}{}
}
}
// Subtract - remove all elements from s that are in other
func (s SetInts) Subtract(other SetInts) {
for k := range other {
_, ok := s[k]
if ok {
delete(s, k)
}
}
}
// Intersect - only keep elements in s which are also in other
func (s SetInts) Intersect(other SetInts) {
deleteList := make([]int, 0, len(s))
for k := range s {
_, inOther := other[k]
if !inOther {
deleteList = append(deleteList, k)
}
}
for _, k := range deleteList {
delete(s, k)
}
} | go/utils/set.go | 0.660391 | 0.448849 | set.go | starcoder |
package gset
import (
"bytes"
"fmt"
"reflect"
"sort"
"strings"
)
// Builder is a mutable builder for GSet (Generic Set). Functions that
// mutate instances of this type are not thread-safe.
type Builder struct {
result GSet
done bool
}
// GValue a value object associated with the set element name
type GValue interface{}
// GSet is a thread-safe, immutable set-like data structure for names (strings)
type GSet struct {
elems map[string]GValue
}
// KV a key/val for GSet where the key is the set element name
type KV struct {
Key string
Val GValue
}
// NewBuilder returns a mutable GSet builder.
func NewBuilder() Builder {
return Builder{
result: GSet{
elems: map[string]GValue{},
},
}
}
// Add adds the supplied elements to the result. Calling Add after calling
// Result has no effect.
func (b Builder) Add(key string, value GValue) {
if b.done {
return
}
b.result.elems[key] = value
}
// Result returns the result GSet containing all elements that were
// previously added to this builder. Subsequent calls to Add have no effect.
func (b Builder) Result() GSet {
b.done = true
return b.result
}
// NewGSet returns a new GSet containing the supplied elements.
func NewGSet(elems ...KV) GSet {
b := NewBuilder()
for _, kv := range elems {
b.Add(kv.Key, kv.Val)
}
return b.Result()
}
// Size returns the number of elements in this set.
func (s GSet) Size() int {
return len(s.elems)
}
// IsEmpty returns true if there are zero elements in this set.
func (s GSet) IsEmpty() bool {
return s.Size() == 0
}
// Contains returns true if the supplied element is present in this set.
func (s GSet) Contains(net string) bool {
_, found := s.elems[net]
return found
}
// Equals returns true if the supplied set contains exactly the same elements
// as this set (s IsSubsetOf s2 and s2 IsSubsetOf s).
func (s GSet) Equals(s2 GSet) bool {
return reflect.DeepEqual(s.elems, s2.elems)
}
// Filter returns a new G set that contains all of the elements from this
// set that match the supplied predicate, without mutating the source set.
func (s GSet) Filter(predicate func(string) bool) GSet {
b := NewBuilder()
for net := range s.elems {
if predicate(net) {
b.Add(net, s.elems[net])
}
}
return b.Result()
}
// FilterNot returns a new G set that contains all of the elements from this
// set that do not match the supplied predicate, without mutating the source
// set.
func (s GSet) FilterNot(predicate func(string) bool) GSet {
b := NewBuilder()
for net := range s.elems {
if !predicate(net) {
b.Add(net, s.elems[net])
}
}
return b.Result()
}
// IsSubsetOf returns true if the supplied set contains all the elements
func (s GSet) IsSubsetOf(s2 GSet) bool {
result := true
for net := range s.elems {
if !s2.Contains(net) {
result = false
break
}
}
return result
}
// Union returns a new G set that contains all of the elements from this
// set and all of the elements from the supplied set, without mutating
// either source set.
func (s GSet) Union(s2 GSet) GSet {
b := NewBuilder()
for net := range s.elems {
b.Add(net, s.elems[net])
}
for net := range s2.elems {
b.Add(net, s2.elems[net])
}
return b.Result()
}
// Intersection returns a new G set that contains all of the elements
// that are present in both this set and the supplied set, without mutating
// either source set.
func (s GSet) Intersection(s2 GSet) GSet {
return s.Filter(func(net string) bool { return s2.Contains(net) })
}
// Difference returns a new G set that contains all of the elements that
// are present in this set and not the supplied set, without mutating either
// source set.
func (s GSet) Difference(s2 GSet) GSet {
return s.FilterNot(func(net string) bool { return s2.Contains(net) })
}
// ToSlice returns a slice of KV that contains all elements from
// this set.
func (s GSet) ToSlice() KVSlice {
result := KVSlice{}
for net := range s.elems {
result = append(result, KV{net, s.elems[net]})
}
sort.Sort(result)
return result
}
// String returns a new string representation of the elements in this G set
func (s GSet) String() string {
if s.IsEmpty() {
return ""
}
// construct string from elems
var result bytes.Buffer
for _, e := range s.ToSlice() {
result.WriteString("{")
result.WriteString(e.Key)
result.WriteString(",")
result.WriteString(fmt.Sprintf("%v", e.Val))
result.WriteString("}")
result.WriteString(",")
}
return strings.TrimRight(result.String(), ",")
}
// Clone returns a copy of this G set.
func (s GSet) Clone() GSet {
b := NewBuilder()
for elem := range s.elems {
b.Add(elem, s.elems[elem])
}
return b.Result()
}
// KVSlice a slice of KV
type KVSlice []KV
// Len implements KVSlice's Len method for the Sort interface
func (kv KVSlice) Len() int {
return len(kv)
}
// Swap implements KVSlice's Swap method for the Sort interface
func (kv KVSlice) Swap(i, j int) {
kv[i].Key, kv[j].Key = kv[j].Key, kv[i].Key
kv[i].Val, kv[j].Val = kv[j].Val, kv[i].Val
}
// Less implements KVSlice's Less method for the Sort interface
func (kv KVSlice) Less(i, j int) bool {
return kv[i].Key < kv[j].Key
} | gset/gset.go | 0.75274 | 0.422981 | gset.go | starcoder |
package main
import (
"fmt"
"math"
"github.com/go-gl/mathgl/mgl64"
"github.com/go-gl/mathgl/mgl32"
)
var LEFT = -1.0;
var RIGHT = 1.0;
type Paddle struct {
Rectangle
paddleSpeed float64
}
type Collision struct {
delta mgl64.Vec2
normal mgl64.Vec2
}
func (p *Paddle) move(dir float64, dt float64) {
p.pos = mgl64.Vec2{p.pos.X() + dir * dt * p.paddleSpeed, p.pos.Y()}
}
type Ball struct {
Rectangle
vel mgl64.Vec2
}
func deflect(b, n mgl64.Vec2) mgl64.Vec2 {
// r=d−2(d⋅n)n
return b.Sub(n.Mul(2*(b.Dot(n))))
}
func (b *Ball) update(block *Block, paddle Paddle, dt float64) bool {
newPos := mgl64.Vec2{b.pos.X() + b.vel.X() * dt, b.pos.Y() + b.vel.Y() * dt}
newBall := Rectangle{newPos, b.width, b.height, b.color}
collision := Collision{}
updateBlock := false
for i, box := range block.boxes {
if box.getCollision(newBall).normal.Len() > 0 {
collision = box.getCollision(newBall)
block.boxes[i] = block.boxes[len(block.boxes)-1]
block.boxes = block.boxes[:len(block.boxes)-1]
updateBlock = true
}
}
if paddle.getCollision(newBall).normal.Len() > 0 {
collision = paddle.getCollision(newBall);
}
// Check walls
if newPos.X() < 0 {
collision = Collision{mgl64.Vec2{-newPos.X(), 0}, mgl64.Vec2{1, 0}}
} else if newPos.Y() < 0 {
collision = Collision{mgl64.Vec2{0, -newPos.Y()}, mgl64.Vec2{0, 1}}
} else if newPos.X() + b.width > width {
collision = Collision{mgl64.Vec2{width - (newPos.X() + b.width), 0}, mgl64.Vec2{-1, 0}}
} else if newPos.Y() + b.height > height {
collision = Collision{mgl64.Vec2{0, height - (newPos.Y() + b.height)}, mgl64.Vec2{0, -1}}
}
if collision.normal.Len() > 0 {
b.pos = newPos.Add(collision.delta);
b.vel = deflect(b.vel, collision.normal);
} else {
b.pos = newPos
}
return updateBlock
}
type Rectangle struct {
pos mgl64.Vec2
width, height float64
color mgl64.Vec4
}
func (r Rectangle) getCollision(other Rectangle) Collision {
dx := r.center().X() - other.center().X()
px := (r.width / 2 + other.width / 2) - math.Abs(dx)
dy := r.center().Y() - other.center().Y()
py := (r.height / 2 + other.height / 2) - math.Abs(dy)
signX := math.Copysign(1, -dx);
signY := math.Copysign(1, -dy);
if px <= 0 || py <= 0 {
return Collision{};
}
if px < py {
return Collision{mgl64.Vec2{px * signX, 0}, mgl64.Vec2{signX, 0}}
} else {
return Collision{mgl64.Vec2{0, py * signY}, mgl64.Vec2{0, signY}}
}
return Collision{};
}
func (r Rectangle) center() mgl64.Vec2 {
return mgl64.Vec2{r.pos.X() + r.width / 2, r.pos.Y() + r.height / 2}
}
func (r Rectangle) getVerts() ([]float32) {
bottomLeftCorner := []float32 {
0, 0,
0, float32(r.height),
float32(r.width), 0,
}
upperRightCorner := []float32 {
0, float32(r.height),
float32(r.width), float32(r.height),
float32(r.width), 0,
}
return append(bottomLeftCorner, upperRightCorner...)
}
func (r Rectangle) getModelMatrix() mgl32.Mat4 {
return mgl32.Translate3D(float32(r.pos.X()), float32(r.pos.Y()), 0)
}
func (r Rectangle) getColor() mgl32.Vec4 {
return mgl32.Vec4{float32(r.color.X()), float32(r.color.Y()), float32(r.color.Z()), float32(r.color.W())}
}
func (r Rectangle) String() string {
return fmt.Sprintf("%v, %v", r.pos.X(), r.pos.Y())
}
type Block struct {
boxes []Rectangle
}
func colorFor(x, y int) mgl64.Vec4 {
var r, g, b float64;
r = 1.0
if x % 2 == 0 {
r = 0.5
}
if y % 2 == 0 {
g = 1.0
}
if y % 2 == 0 && x % 2 == 0 {
b = 1.0
}
return mgl64.Vec4{ r, g, b, 1.0 }
}
func buildMap() Block {
rects := make([]Rectangle, 0)
startX := 50.0
startY := 300.0
for x := 0; x < 5; x++ {
for y := 0; y < 3; y++ {
rects = append(rects, Rectangle{
mgl64.Vec2{ startX + float64(x) * 105.0, startY + float64(y) * 50.0 },
100, 45, colorFor(x, y)})
}
}
return Block{rects}
}
func (b *Block) getVerts() ([]float32) {
sumVerts := make([]float32, 0)
for _, block := range b.boxes {
sumVerts = append(sumVerts, block.getVerts()...)
}
return sumVerts
} | block.go | 0.64713 | 0.548371 | block.go | starcoder |
package forest
// This is general implementation of the stack using slices and empty interface (interface{}) elements. This is convenient as one
// can use elements of any type to build a stack as long as one is carefull. It is advisable to use elements of the same type,
// though...
// Example usage is seen from the commented main function.
// <NAME>., Jan2015
/*
package main
import (
"fmt"
)
*/
// Stack is a generic implementation of the stack.
type Stack []interface{}
// IsEmpty checks if stack is empty.
func (s Stack) IsEmpty() bool { return len(s) == 0 }
// Len returns the length of the stack.
func (s Stack) Len() int { return len(s) }
// Peek peeks into stack and returns the last value (value is not removed from stack).
func (s Stack) Peek() interface{} { return s[len(s)-1] }
// Push pushes the new value onto a stack.
func (s *Stack) Push(i interface{}) { *s = append(*s, i) }
// Pop pops the last value from the stack.
func (s *Stack) Pop() interface{} {
d := (*s)[len(*s)-1]
*s = (*s)[:len(*s)-1]
return d
}
/*
func main() {
var s Stack
fmt.Printf("Is stack empty? %t\n", s.IsEmpty())
fmt.Println(">> Pushing")
s.Push("something")
fmt.Printf("Is stack empty? %t\n", s.IsEmpty())
fmt.Printf("Stack length: %d\n", s.Len())
str := s.Peek()
fmt.Printf("Peek element: %v\n", str)
fmt.Println(">> Pushing")
s.Push(0)
elem := s.Peek()
fmt.Printf("Stack length: %d\n", s.Len())
fmt.Printf("Peek element: %v\n", elem)
fmt.Println(">> Pushing")
s.Push(3)
s.Push(4)
s.Push(5)
fmt.Printf("Stack length: %d\n", s.Len())
fmt.Printf("Peek element: %v\n", s.Peek())
fmt.Println(">> Popping")
elem = s.Pop()
fmt.Printf("Stack length: %d\n", s.Len())
fmt.Printf("Popped element: %d\n", elem)
fmt.Printf("Peek element: %v\n", s.Peek())
fmt.Println(">> Popping")
elem = s.Pop()
elem = s.Pop()
elem = s.Pop()
fmt.Printf("Stack length: %d\n", s.Len())
fmt.Printf("Popped element: %d\n", elem)
fmt.Printf("Peek element: %v\n", s.Peek())
fmt.Println(">> Popping")
elem = s.Pop()
fmt.Printf("Popped element: %q\n", elem)
if !s.IsEmpty() {
fmt.Printf("Peek element: %v\n", s.Peek())
} else {
fmt.Printf("Stack IS empty!\n")
}
}
*/ | stack.go | 0.813313 | 0.419826 | stack.go | starcoder |
package parser
import (
"errors"
"fmt"
"github.com/hashicorp/go-multierror"
"github.com/flier/gocombine/pkg/stream"
)
// Parser is a type that it can be used to parse an input stream `S` of token `T` into a `O` value.
type Parser[T stream.Token, O any] interface {
Parse(input []T) (out O, remaining []T, err error)
}
// Func is a function that it can be used to parse an input stream `S` of token `T` into a `O` value.
type Func[T stream.Token, O any] func(input []T) (out O, remaining []T, err error)
func (f Func[T, O]) Parse(input []T) (out O, remaining []T, err error) { return f(input) }
// Expected parses with `f` and if it fails without consuming any input any expected errors are replaced by `msg`.
// `msg` is then used in error messages as "Expected `msg`".
func (f Func[T, O]) Expected(msg string) Func[T, O] {
return Expected(f, msg)
}
// Expected parses with `parser` and if it fails without consuming any input any expected errors are replaced by `msg`.
// `msg` is then used in error messages as "Expected `msg`".
func Expected[T stream.Token, O any](parser Func[T, O], msg string) Func[T, O] {
return func(input []T) (parsed O, remaining []T, err error) {
parsed, remaining, err = parser(input)
if err != nil {
err = fmt.Errorf("%s, %w", msg, err)
}
return
}
}
// Message parses with `f` and if it fails, adds the message `msg` to the error.
func (f Func[T, O]) Message(msg string) Func[T, O] {
return Message(f, msg)
}
// Message parses with `parser` and if it fails, adds the message `msg` to the error.
func Message[T stream.Token, O any](parser Func[T, O], msg string) Func[T, O] {
return func(input []T) (parsed O, remaining []T, err error) {
parsed, remaining, err = parser(input)
if err != nil {
err = multierror.Append(err, errors.New(msg))
}
return
}
}
// Map uses `fn` to map over the parsed value.
func (f Func[T, O]) Map(fn func(O) O) Func[T, O] {
return func(input []T) (parsed O, remaining []T, err error) {
var o O
if o, remaining, err = f(input); err != nil {
return
}
parsed = fn(o)
return
}
}
// MapErr uses `f` to map over the parser `p` error.
func MapErr[T stream.Token, O any](p Func[T, O], f func(error) error) Func[T, O] {
return func(input []T) (parsed O, remaining []T, err error) {
parsed, remaining, err = p(input)
if err != nil {
err = f(err)
}
return
}
}
// MapErr uses `fn` to map over the error.
func (f Func[T, O]) MapErr(fn func(error) error) Func[T, O] {
return MapErr(f, fn)
}
// AndThen parses with `f` and applies `fn` on the result if `parser` parses successfully.
// `fn` may optionally fail with an error.
func (f Func[T, O]) AndThen(fn func(O) (O, error)) Func[T, O] {
return func(input []T) (parsed O, remaining []T, err error) {
var o O
if o, remaining, err = f(input); err != nil {
return
}
parsed, err = fn(o)
return
}
} | pkg/parser/parser.go | 0.76882 | 0.57066 | parser.go | starcoder |
package scale
// Scale must be implemented by scale-up and wind-down calculators.
// Three scale calculators come predefined: Incremental, Exponential
// and Constant.
type Scale interface {
IsValid() bool
Apply(n uint32) uint32
ApplyInverse(n uint32) uint32
}
// Constant scaling mode does not allow scaling.
type constant struct{}
// IsValid always return true.
func (s constant) IsValid() bool {
return true
}
// Apply returns supplied value unmodified.
func (s constant) Apply(n uint32) uint32 {
return n
}
// ApplyInverse returns supplied value unmodified.
func (s constant) ApplyInverse(n uint32) uint32 {
return n
}
// Constant scaler that does not allow scaling.
var Constant constant
// Incremental scaling mode specifies the number of new instances to be added
// during each scaling attempt. Must be 1 or greater.
type Incremental uint32
// IsValid checks that its value is greater than 1.
func (s Incremental) IsValid() bool {
return s >= 1
}
// Apply adds itself to the supplied value and returns the sum.
func (s Incremental) Apply(n uint32) uint32 {
return n + uint32(s)
}
// If Incremental is greater or equal to the supplied value, ApplyInverse
// subtracts itself to the argument and returns the difference. Otherwise
// 0 is returned.
func (s Incremental) ApplyInverse(n uint32) uint32 {
if uint32(s) > n {
return 0
}
return n - uint32(s)
}
// Exponential scaling mode specifies the factor by which the number of
// instances should be increased during each scaling attempt. Must be greater
// than 1.0.
type Exponential float32
// IsValid checks that its value is greater than 1.
func (s Exponential) IsValid() bool {
return s > 1.0
}
// Apply scales the supplied value by its factor and returns the result.
// The result is guaranteed to be greater than the input by at least 1.
func (s Exponential) Apply(n uint32) uint32 {
res := uint32(float32(s) * float32(n))
// We must increase by at least 1.
if res <= n {
res = n + 1
}
return res
}
// Apply scales the supplied value by its inverse factor and returns the result.
// The result is guaranteed to be 0 or to be less than the nonzero input
// by at least 1.
func (s Exponential) ApplyInverse(n uint32) uint32 {
res := uint32(float32(n) / float32(s))
// We must decrease by at least 1, but not go below 0.
if res >= n && n > 0 {
res = n - 1
}
return res
} | scale/scale.go | 0.881958 | 0.459925 | scale.go | starcoder |
package igloo
import (
"fmt"
"image"
"math"
)
// Vec2 describes a 2D vector or point in floats
type Vec2 struct {
X, Y float64
}
var (
// Vec2Zero is a Vec2 of (0, 0)
Vec2Zero = Vec2{0, 0}
// Vec2One is a Vec2 of (1, 1)
Vec2One = Vec2{1, 1}
)
// String returns vec2 as a string
func (v *Vec2) String() string {
return fmt.Sprintf("Vec2(%v, %v)", v.X, v.Y)
}
// Add other to us
func (v Vec2) Add(other Vec2) Vec2 {
return Vec2{X: v.X + other.X, Y: v.Y + other.Y}
}
// AddScalar adds scalar to both elements
func (v Vec2) AddScalar(scalar float64) Vec2 {
return Vec2{X: v.X + scalar, Y: v.Y + scalar}
}
// Mul multiplies other to us
func (v Vec2) Mul(other Vec2) Vec2 {
return Vec2{X: v.X * other.X, Y: v.Y * other.Y}
}
// MulScalar multiplies both elements by a scalar
func (v Vec2) MulScalar(scalar float64) Vec2 {
return Vec2{X: v.X * scalar, Y: v.Y * scalar}
}
// Sub other from us
func (v Vec2) Sub(other Vec2) Vec2 {
return Vec2{X: v.X - other.X, Y: v.Y - other.Y}
}
// SubScalar subtracts both elements by a scalar
func (v Vec2) SubScalar(scalar float64) Vec2 {
return Vec2{X: v.X - scalar, Y: v.Y - scalar}
}
// Div other from us
func (v Vec2) Div(other Vec2) Vec2 {
return Vec2{X: v.X / other.X, Y: v.Y / other.Y}
}
// SubScalar subtracts both elements by a scalar
func (v Vec2) DivScalar(scalar float64) Vec2 {
return Vec2{X: v.X / scalar, Y: v.Y / scalar}
}
// Unit is a 1 unit vector in the same direction as v.
// Unless v is (0,0) in which case it returns (0,0).
func (v Vec2) Unit() Vec2 {
mag := v.Mag()
if mag == 0 {
return Vec2Zero
}
return Vec2{v.X / mag, v.Y / mag}
}
// Mag returns the magnitude of our vector
func (v Vec2) Mag() float64 {
return math.Hypot(v.X, v.Y)
}
// SqrMag returns the Square Magnitude of our vector
func (v Vec2) SqrMag() float64 {
return v.X*v.X + v.Y*v.Y
}
// Dist returns the distance between two vectors
func (v Vec2) Dist(other Vec2) float64 {
return math.Sqrt(v.SqrDist(other))
}
// SqrDist returns the square distance between us and another vector
func (v Vec2) SqrDist(other Vec2) float64 {
return math.Pow(v.X-other.X, 2) + math.Pow(v.Y-other.Y, 2)
}
// XY returns the X and Y components separately
func (v Vec2) XY() (float64, float64) {
return v.X, v.Y
}
// Angle returns the angle in radians of our vector
func (v Vec2) Angle() float64 {
return math.Atan2(v.Y, v.X)
}
// Normal returns a vectors normal, same as rotating 90 degress
func (v Vec2) Normal() Vec2 {
return Vec2{X: -v.Y, Y: v.X}
}
// Dot returns the dot product of vectors v and other
func (v Vec2) Dot(other Vec2) float64 {
return v.X*other.X + v.Y*other.Y
}
// Cross returns the cross product of vectors v and other
func (v Vec2) Cross(other Vec2) float64 {
return v.X*other.X - v.Y*other.Y
}
func (v Vec2) ToPoint() image.Point {
return image.Point{X: int(v.X), Y: int(v.Y)}
}
// Map applies a function to both X and Y components and
// returns a new Vec2 of the result
func (v Vec2) Map(fun func(float64) float64) Vec2 {
return Vec2{
X: fun(v.X),
Y: fun(v.Y),
}
}
// Vec2FromAngle returns a Vec2 from an angle in radians
func Vec2FromAngle(angle float64) Vec2 {
sin, cos := math.Sincos(angle)
return Vec2{X: cos, Y: sin}
}
// Vec2FromPoint returns a Vec2 from an image point
func Vec2FromPoint(pt image.Point) Vec2 {
return Vec2{X: float64(pt.X), Y: float64(pt.Y)}
}
// Vec2Lerp returns a Vec2 as a linear interpolation between two vectors
func Vec2Lerp(start, end Vec2, percent float64) Vec2 {
return Vec2{
X: start.X + (end.X-start.X)*percent,
Y: start.Y + (end.Y-start.Y)*percent,
}
} | vec2.go | 0.932353 | 0.69205 | vec2.go | starcoder |
package mel
import (
"log"
"math"
"github.com/emer/etable/etensor"
"github.com/goki/mat32"
"gonum.org/v1/gonum/dsp/fourier"
)
// FilterBank contains mel frequency feature bank sampling parameters
type FilterBank struct {
NFilters int `viewif:"On" def:"32,26" desc:"number of Mel frequency filters to compute"`
LoHz float32 `viewif:"On" def:"120,300" step:"10.0" desc:"low frequency end of mel frequency spectrum"`
HiHz float32 `viewif:"On" def:"10000,8000" step:"1000.0" desc:"high frequency end of mel frequency spectrum -- must be <= sample_rate / 2 (i.e., less than the Nyquist frequencY"`
LogOff float32 `viewif:"On" def:"0" desc:"on add this amount when taking the log of the Mel filter sums to produce the filter-bank output -- e.g., 1.0 makes everything positive -- affects the relative contrast of the outputs"`
LogMin float32 `viewif:"On" def:"-10" desc:"minimum value a log can produce -- puts a lower limit on log output"`
Renorm bool `desc:" whether to perform renormalization of the mel values"`
RenormMin float32 `viewif:"On" step:"1.0" desc:"minimum value to use for renormalization -- you must experiment with range of inputs to determine appropriate values"`
RenormMax float32 `viewif:"On" step:"1.0" desc:"maximum value to use for renormalization -- you must experiment with range of inputs to determine appropriate values"`
RenormScale float32 `inactive:"+" desc:"1.0 / (ren_max - ren_min)"`
}
// Params
type Params struct {
FBank FilterBank
BinPts []int32 `view:"no-inline" desc:" mel scale points in fft bins"`
CompMfcc bool `desc:" compute cepstrum discrete cosine transform (dct) of the mel-frequency filter bank features"`
MfccNCoefs int `def:"13" desc:" number of mfcc coefficients to output -- typically 1/2 of the number of filterbank features"` // Todo: should be 12 total - 2 - 13, higher ones not useful
}
// Defaults
func (mel *Params) Defaults() {
mel.CompMfcc = false
mel.MfccNCoefs = 13
mel.FBank.Defaults()
}
// InitFilters computes the filter bin values
func (mel *Params) InitFilters(dftSize int, sampleRate int, filters *etensor.Float32) {
mel.BinPts = make([]int32, mel.FBank.NFilters+2)
mel.FBank.RenormScale = 1.0 / (mel.FBank.RenormMax - mel.FBank.RenormMin)
hiMel := FreqToMel(mel.FBank.HiHz)
loMel := FreqToMel(mel.FBank.LoHz)
nFiltersEff := mel.FBank.NFilters + 2 // plus 2 because we need end points to create the right number of bins
incr := (hiMel - loMel) / float32(mel.FBank.NFilters+1)
for i := 0; i < nFiltersEff; i++ {
ml := loMel + float32(i)*incr
hz := MelToFreq(ml)
mel.BinPts[i] = int32(FreqToBin(hz, float32(dftSize), float32(sampleRate)))
}
maxBins := int(mel.BinPts[nFiltersEff-1]) - int(mel.BinPts[nFiltersEff-3]) + 1
filters.SetShape([]int{mel.FBank.NFilters, maxBins}, nil, nil)
for flt := 0; flt < mel.FBank.NFilters; flt++ {
mnbin := int(mel.BinPts[flt])
pkbin := int(mel.BinPts[flt+1])
mxbin := int(mel.BinPts[flt+2])
pkmin := float32(pkbin) - float32(mnbin)
pkmax := float32(mxbin) - float32(pkbin)
fi := 0
bin := 0
for bin = mnbin; bin <= pkbin; bin, fi = bin+1, fi+1 {
fval := (float32(bin) - float32(mnbin)) / pkmin
filters.SetFloat([]int{flt, fi}, float64(fval))
}
for ; bin <= mxbin; bin, fi = bin+1, fi+1 {
fval := (float32(mxbin) - float32(bin)) / pkmax
filters.SetFloat([]int{flt, fi}, float64(fval))
}
}
}
// FilterDft applies the mel filters to power of dft
func (mel *Params) FilterDft(ch, step int, dftPowerOut etensor.Float32, segmentData *etensor.Float32, fBankData *etensor.Float32, filters *etensor.Float32) {
mi := 0
for flt := 0; flt < int(mel.FBank.NFilters); flt, mi = flt+1, mi+1 {
minBin := mel.BinPts[flt]
maxBin := mel.BinPts[flt+2]
sum := float32(0)
fi := 0
for bin := minBin; bin <= maxBin; bin, fi = bin+1, fi+1 {
fVal := filters.Value([]int{mi, fi})
pVal := float32(dftPowerOut.FloatVal1D(int(bin)))
sum += fVal * pVal
}
sum += mel.FBank.LogOff
var val float32
if sum == 0 {
val = mel.FBank.LogMin
} else {
val = mat32.Log(sum)
}
if mel.FBank.Renorm {
val -= mel.FBank.RenormMin
if val < 0.0 {
val = 0.0
}
val *= mel.FBank.RenormScale
if val > 1.0 {
val = 1.0
}
}
fBankData.SetFloat1D(mi, float64(val))
segmentData.Set([]int{step, mi, ch}, val)
}
}
// FreqToMel converts frequency to mel scale
func FreqToMel(freq float32) float32 {
return 1127.0 * mat32.Log(1.0+freq/700.0) // 1127 because we are using natural log
}
// FreqToMel converts mel scale to frequency
func MelToFreq(mel float32) float32 {
return 700.0 * (mat32.Exp(mel/1127.0) - 1.0)
}
// FreqToBin converts frequency into FFT bin number, using parameters of number of FFT bins and sample rate
func FreqToBin(freq, nFft, sampleRate float32) int {
return int(mat32.Floor(((nFft + 1) * freq) / sampleRate))
}
//Defaults initializes FBank values - these are the ones you most likely need to adjust for your particular signals
func (mfb *FilterBank) Defaults() {
mfb.LoHz = 300
mfb.HiHz = 8000.0
mfb.NFilters = 32
mfb.LogOff = 0.0
mfb.LogMin = -10.0
mfb.Renorm = true
mfb.RenormMin = -5.0
mfb.RenormMax = 9.0
}
// Filter filters the current window_in input data according to current settings -- called by ProcessStep, but can be called separately
func (mel *Params) Filter(ch int, step int, windowIn *etensor.Float32, filters *etensor.Float32, dftPower *etensor.Float32, segmentData *etensor.Float32, fBankData *etensor.Float32, mfccSegmentData *etensor.Float32, mfccDct *etensor.Float32) {
mel.FilterDft(ch, step, *dftPower, segmentData, fBankData, filters)
if mel.CompMfcc {
mel.CepstrumDct(ch, step, fBankData, mfccSegmentData, mfccDct)
}
}
// FftReal
func (mel *Params) FftReal(out []complex128, in *etensor.Float32) {
var c complex128
for i := 0; i < len(out); i++ {
c = complex(in.FloatVal1D(i), 0)
out[i] = c
}
}
// CepstrumDct applies a discrete cosine transform (DCT) to get the cepstrum coefficients on the mel filterbank values
func (mel *Params) CepstrumDct(ch, step int, fBankData *etensor.Float32, mfccSegmentData *etensor.Float32, mfccDct *etensor.Float32) {
sz := copy(mfccDct.Values, fBankData.Values)
if sz != len(mfccDct.Values) {
log.Printf("mel.CepstrumDctMel: memory copy size wrong")
}
dct := fourier.NewDCT(len(mfccDct.Values))
var mfccDctOut []float64
src := []float64{}
mfccDct.Floats(&src)
mfccDctOut = dct.Transform(mfccDctOut, src)
el0 := mfccDctOut[0]
mfccDctOut[0] = math.Log(1.0 + el0*el0) // replace with log energy instead..
for i := 0; i < mel.FBank.NFilters; i++ {
mfccSegmentData.SetFloat([]int{step, i, ch}, mfccDctOut[i])
}
} | mel/mel.go | 0.721253 | 0.410461 | mel.go | starcoder |
package howlongtobeat
import (
"net/http"
"net/url"
"strings"
"github.com/PuerkitoBio/goquery"
"github.com/spf13/cobra"
)
const (
hltbURL = "https://howlongtobeat.com/search_main.php?page=1"
example = `
# How long takes to beat Life is Strange
!howlongtobeat Life is Strange
# How long takes to beat Life is Strange
!hltb Life is Strange
# How long takes to beat the three more popular Crash games
!howlongtobeat Crash Bandicoot --total 3
# How long takes to beat the second more popular Crash game
!howlongtobeat Crash Bandicoot --skip 1
# How long takes to beat the second and third more popular Crash games
!howlongtobeat Crash Bandicoot --skip 1 --total 2`
)
type (
hltb struct {
total int
skip int
}
)
func NewHLTBCommand() *cobra.Command {
h := newHLTB()
c := &cobra.Command{
Use: "howlongtobeat",
Short: "Search how long takes to beat a game",
Example: example,
Args: cobra.MinimumNArgs(1),
Run: func(cmd *cobra.Command, args []string) {
result, err := h.search(strings.Join(args, " "))
if err != nil {
result = err.Error()
}
cmd.OutOrStdout().Write([]byte(result))
h.reset()
},
Aliases: []string{"hltb"},
}
c.Flags().IntVarP(&h.total, "total", "t", 1, "Total of results to print")
c.Flags().IntVarP(&h.skip, "skip", "s", 0, "How many results should be skipped")
return c
}
func newHLTB() *hltb {
return &hltb{}
}
func (h *hltb) search(text string) (string, error) {
resp, err := http.PostForm(hltbURL,
url.Values{
"queryString": {text},
"t": {"games"},
"sorthead": {"popular"},
})
if err != nil {
return "", err
}
// Load the HTML document
doc, err := goquery.NewDocumentFromReader(resp.Body)
if err != nil {
return "", err
}
list := map[string]map[string]string{}
var typeList []string
var timeList []string
// @TODO: (disiqueira) This really needs a refactor.
// Find the review items
doc.Find("li").Each(func(i int, s *goquery.Selection) {
name := s.Find(".text_white").Text()
list[name] = map[string]string{}
for _, div := range s.Find(".shadow_text").Nodes {
if len(strings.TrimSpace(div.FirstChild.Data)) < 1 {
continue
}
typeList = append(typeList, div.FirstChild.Data)
}
for _, div := range s.Find(".center").Nodes {
if len(strings.TrimSpace(div.FirstChild.Data)) < 1 {
continue
}
timeList = append(timeList, div.FirstChild.Data)
}
for i := range typeList {
list[name][typeList[i]] = timeList[i]
}
})
var msgList []string
skip := 0
for name, game := range list {
if h.skip > skip {
skip++
continue
}
msg := name + "\n"
for t, v := range game {
msg += t + ": " + v + "\n"
}
msgList = append(msgList, msg)
if len(msgList) == h.total {
break
}
}
return "```" + strings.Join(msgList, "\n") + "```", nil
}
func (h *hltb) reset() {
h.skip = 0
h.total = 1
} | pkg/command/howlongtobeat/howlongtobeat.go | 0.547222 | 0.50177 | howlongtobeat.go | starcoder |
package timeago
import (
"time"
)
var DefaultTimeAgo = TimeAgo{DefaultLocale}
type TimeAgo struct {
locale Locale
}
func NewTimeAgo(locale Locale) TimeAgo {
return TimeAgo{locale: locale}
}
// FromNowWithTime takes a specific end Time value
// and the current Time to return how much has been passed
// between them.
func (ta TimeAgo) FromNowWithTime(end time.Time) (string, error) {
return ta.WithTime(time.Now(), end)
}
// TimeAgoFromNowWithTime takes a specific end Time value
// and the current Time to return how much has been passed
// between them.
func TimeAgoFromNowWithTime(end time.Time) (string, error) {
return DefaultTimeAgo.FromNowWithTime(end)
}
// FromNowWithString takes a specific layout as time
// format to parse the time string on end paramter to return
// how much time has been passed between the current time and
// the string representation of the time provided by user.
func (ta TimeAgo) FromNowWithString(layout, end string) (string, error) {
t, e := time.Parse(layout, end)
if e == nil {
return ta.WithTime(time.Now(), t)
} else {
return "", ErrInvalidFormat
}
}
// TimeAgoFromNowWithString takes a specific layout as time
// format to parse the time string on end paramter to return
// how much time has been passed between the current time and
// the string representation of the time provided by user.
func TimeAgoFromNowWithString(layout, end string) (string, error) {
return DefaultTimeAgo.FromNowWithString(layout, end)
}
// WithTime takes a specific start/end Time values
// and calculate how much time has been passed between them.
func (ta TimeAgo) WithTime(start, end time.Time) (string, error) {
duration := start.Sub(end)
return stringForDuration(ta.locale, duration), nil
}
// TimeAgoWithTime takes a specific start/end Time values
// and calculate how much time has been passed between them.
func TimeAgoWithTime(start, end time.Time) (string, error) {
return DefaultTimeAgo.WithTime(start, end)
}
// WithString takes a specific layout as time
// format to parse the time string on start/end parameter to return
// how much time has been passed between them.
func (ta TimeAgo) WithString(layout, start, end string) (string, error) {
timeStart, e := time.Parse(layout, start)
if e != nil {
return "", ErrInvalidStartTimeFormat
}
timeEnd, e := time.Parse(layout, end)
if e != nil {
return "", ErrInvalidEndTimeFormat
}
duration := timeStart.Sub(timeEnd)
return stringForDuration(ta.locale, duration), nil
}
// TimeAgoWithString takes a specific layout as time
// format to parse the time string on start/end parameter to return
// how much time has been passed between them.
func TimeAgoWithString(layout, start, end string) (string, error) {
return DefaultTimeAgo.WithString(layout, start, end)
} | timeago.go | 0.751557 | 0.655901 | timeago.go | starcoder |
package discount
import (
"fmt"
)
const (
// Label holds the string label denoting the discount type in the database.
Label = "discount"
// FieldID holds the string denoting the id field in the database.
FieldID = "id"
// FieldPeriodStart holds the string denoting the period_start field in the database.
FieldPeriodStart = "period_start"
// FieldPeriodEnd holds the string denoting the period_end field in the database.
FieldPeriodEnd = "period_end"
// FieldMethod holds the string denoting the method field in the database.
FieldMethod = "method"
// FieldDiscountPrice holds the string denoting the discount_price field in the database.
FieldDiscountPrice = "discount_price"
// FieldDiscountPercentage holds the string denoting the discount_percentage field in the database.
FieldDiscountPercentage = "discount_percentage"
// EdgeOrders holds the string denoting the orders edge name in mutations.
EdgeOrders = "orders"
// Table holds the table name of the discount in the database.
Table = "discounts"
// OrdersTable is the table that holds the orders relation/edge. The primary key declared below.
OrdersTable = "order_discounts"
// OrdersInverseTable is the table name for the Order entity.
// It exists in this package in order to avoid circular dependency with the "order" package.
OrdersInverseTable = "orders"
)
// Columns holds all SQL columns for discount fields.
var Columns = []string{
FieldID,
FieldPeriodStart,
FieldPeriodEnd,
FieldMethod,
FieldDiscountPrice,
FieldDiscountPercentage,
}
var (
// OrdersPrimaryKey and OrdersColumn2 are the table columns denoting the
// primary key for the orders relation (M2M).
OrdersPrimaryKey = []string{"order_id", "discount_id"}
)
// ValidColumn reports if the column name is valid (part of the table columns).
func ValidColumn(column string) bool {
for i := range Columns {
if column == Columns[i] {
return true
}
}
return false
}
// Method defines the type for the "method" enum field.
type Method string
// Method values.
const (
MethodPERCENTAGE Method = "PERCENTAGE"
MethodPRICE Method = "PRICE"
)
func (m Method) String() string {
return string(m)
}
// MethodValidator is a validator for the "method" field enum values. It is called by the builders before save.
func MethodValidator(m Method) error {
switch m {
case MethodPERCENTAGE, MethodPRICE:
return nil
default:
return fmt.Errorf("discount: invalid enum value for method field: %q", m)
}
} | pkg/ent/discount/discount.go | 0.657758 | 0.406332 | discount.go | starcoder |
package blockchain
import (
"fmt"
"sort"
"github.com/BANKEX/plasma-research/src/node/types"
"github.com/BANKEX/plasma-research/src/node/types/slice"
"github.com/BANKEX/plasma-research/src/node/utils"
)
type SumTreeRoot struct {
// We use 24 bit
Length uint32
Hash types.Uint160
}
type SumTreeNode struct {
Begin uint32
End uint32
// We use 24 bit of length field
Length uint32
Hash types.Uint160
Left *SumTreeNode
Right *SumTreeNode
Parent *SumTreeNode
}
// Index: Bit index of the element in the tree
// Slice: Slice that stored inside a leaf with corresponding index
// Item: Hash ot the transaction associated with a slice
// Data: List of proof steps
type SumMerkleTreeProof struct {
Index uint32
Slice slice.Slice
Item types.Uint160
Data []ProofStep
}
type ProofStep struct {
Length []byte // 4 bytes
Hash types.Uint160 // 20 bytes
}
func HasIntersection(slices []slice.Slice) error {
for i := 0; i < len(slices)-1; i++ {
if slices[i].End > slices[i+1].Begin {
return fmt.Errorf("slices (%d, %d) and (%d, %d) intersect",
slices[i].Begin, slices[i].End, slices[i+1].Begin, slices[i+1].End)
}
}
return nil
}
// Use this first when assemble blocks
func PrepareLeaves(transactions []Transaction) ([]*SumTreeNode, error) {
zeroHash := utils.Keccak160([]byte{})
slice2transactions := map[slice.Slice]*Transaction{}
var slices []slice.Slice
for _, t := range transactions {
for _, input := range t.Inputs {
slices = append(slices, input.Slice)
slice2transactions[input.Slice] = &t
}
}
sort.Slice(slices, func(i, j int) bool {
return slices[i].Begin < slices[j].Begin
})
err := HasIntersection(slices)
if err != nil {
return nil, err
}
slices = FillGaps(slices)
var leaves []*SumTreeNode
for _, s := range slices {
// Slices that filling the gaps haven't got a reference to transaction
tx, hasTx := slice2transactions[s]
var txHash = zeroHash
if hasTx {
txHash = tx.GetHash()
}
leaf := SumTreeNode{
Begin: s.Begin,
End: s.End,
Hash: txHash,
Length: s.End - s.Begin,
}
leaves = append(leaves, &leaf)
}
return leaves, nil
}
type SumMerkleTree struct {
Root *SumTreeNode
Leafs []*SumTreeNode
}
func uint32BE(n uint32) []byte {
return []byte{byte(n >> 24), byte(n >> 16), byte(n >> 8), byte(n)}
}
func concatAndHash(left *SumTreeNode, right *SumTreeNode, hashFunc HashFunction) types.Uint160 {
l1, l2 := left.Length, right.Length
h1, h2 := left.Hash, right.Hash
d1 := append(uint32BE(l1), uint32BE(l2)...)
d2 := append(h1, h2...)
result := append(d1, d2...)
return hashFunc(result)
}
func NewSumMerkleTree(leafs []*SumTreeNode, hashFunc HashFunction) *SumMerkleTree {
var tree SumMerkleTree
tree.Leafs = leafs
var buckets = tree.Leafs
// At the end we assign new layer to buckets, so stop when ever we can't merge anymore
for len(buckets) != 1 {
// next layer
var newBuckets []*SumTreeNode
for len(buckets) > 0 {
if len(buckets) >= 2 {
// deque pair from the head
left, right := buckets[0], buckets[1]
buckets = buckets[2:]
length := left.Length + right.Length
hash := concatAndHash(left, right, hashFunc)
node := SumTreeNode{
Hash: hash,
Length: length,
}
left.Parent = &node
right.Parent = &node
left.Right = right
right.Left = left
newBuckets = append(newBuckets, &node)
} else {
// Pop the last one - goes to next layer as it is
newBuckets = append(newBuckets, buckets[0])
buckets = []*SumTreeNode{}
}
}
buckets = newBuckets
}
tree.Root = buckets[0]
return &tree
}
func (tree *SumMerkleTree) GetProof(leafIndex uint32) SumMerkleTreeProof {
var curr = tree.Leafs[leafIndex]
var result SumMerkleTreeProof
result.Slice = slice.Slice{curr.Begin, curr.End}
result.Item = curr.Hash
index := uint32(0)
var proofSteps []ProofStep
for i := uint(0); curr.Parent != nil; i++ {
var node *SumTreeNode
if curr.Right != nil {
// We are on the left
node = curr.Right
} else {
// We have left node - it means we are at the right
node = curr.Left
// set bit in index, if we are at the right
index |= 1 << i
}
// 4 + 20 byte
// step := append(uint32BE(node.Length), node.Hash...)
// proofSteps = append(proofSteps, step...)
step := ProofStep{uint32BE(node.Length), node.Hash}
proofSteps = append(proofSteps, step)
curr = curr.Parent
}
result.Index = index
result.Data = proofSteps
return result
}
func (tree *SumMerkleTree) GetRlpEncodedProof(leafIndex uint32) []byte {
proof := tree.GetProof(leafIndex)
var data []byte
for _, proofItem := range proof.Data {
data = append(data, proofItem.Length...)
data = append(data, proofItem.Hash...)
}
tmp := struct {
Index uint32
Slice slice.Slice
Item []byte
Data []byte
}{
proof.Index,
proof.Slice,
proof.Item,
data,
}
rlp, _ := utils.EncodeToRLP(tmp)
return rlp
}
func (tree *SumMerkleTree) GetRoot() SumTreeRoot {
r := tree.Root
return SumTreeRoot{
r.Length,
r.Hash,
}
}
// We use 24 bits to define plasma slices space
// 2^24 - 1 = 0x00FFFFFF
const plasmaLength = 16777215
// Fill plasma range space with Slices, src slices should be sorted first
func FillGaps(src []slice.Slice) []slice.Slice {
// TODO(artall64): Slice Merge, it doesn't merge a slices even if they are neighbors as I remember such improvement can be useful
var result []slice.Slice
pos := uint32(0)
for i := 0; i <= len(src)-1; i++ {
item := src[i]
if pos < item.Begin {
emptySlice := slice.Slice{
Begin: pos,
End: item.Begin,
}
result = append(result, emptySlice)
}
result = append(result, item)
pos = item.End
}
if pos != plasmaLength {
emptySlice := slice.Slice{
Begin: pos,
End: plasmaLength,
}
result = append(result, emptySlice)
}
return result
} | src/node/blockchain/sum-merkle-tree.go | 0.655336 | 0.454351 | sum-merkle-tree.go | starcoder |
package graphics
import (
"golang.org/x/mobile/exp/gl/glutil"
"golang.org/x/mobile/gl"
)
// Program is a graphics program that is send to the backend to render the Buffer data
type Program struct {
engine *Engine
shader gl.Program
}
// UniformLocation is the location of a uniform in a Program
type UniformLocation int32
// NewProgram creates a new program, optionally provide custom vertex and/or fragment shader(s)
// The default program uses position (index = 0) and texture (index = 3) data
func (e *Engine) NewProgram(vertexShader, fragmentShader string) (*Program, error) {
if vertexShader == "" {
vertexShader = vertexShaderDefault
}
if fragmentShader == "" {
fragmentShader = fragmentShaderDefault
}
shader, err := glutil.CreateProgram(e.glctx, vertexShader, fragmentShader)
if err != nil {
return nil, err
}
return &Program{
engine: e,
shader: shader,
}, nil
}
// Release releases the Program
// Using the Program after calling this function results in a panic
func (p *Program) Release() {
p.engine.glctx.DeleteProgram(p.shader)
p.engine = nil
}
// Activate actives the program (and deactives the current one)
func (p *Program) Activate() {
p.engine.glctx.UseProgram(p.shader)
}
// GetUniformLocation gets the location of a uniform in the program
func (p *Program) GetUniformLocation(name string) UniformLocation {
u := p.engine.glctx.GetUniformLocation(p.shader, name)
return UniformLocation(u.Value)
}
// SetUnitformMatrix4 sets the value of the uniform at location to mat
func (p *Program) SetUnitformMatrix4(location UniformLocation, mat []float32) {
p.engine.glctx.UniformMatrix4fv(gl.Uniform{Value: int32(location)}, mat)
}
const vertexShaderDefault = `#version 300 es
layout (location = 0) in vec3 position;
layout (location = 1) in vec2 texCoord;
out vec2 TexCoord;
void main() {
gl_Position = vec4(position, 1.0);
TexCoord = texCoord;
}`
const fragmentShaderDefault = `#version 300 es
precision mediump float;
in vec2 TexCoord;
out vec4 color;
uniform sampler2D ourTexture;
void main() {
color = texture(ourTexture, TexCoord);
}` | graphics/program.go | 0.763307 | 0.438665 | program.go | starcoder |
package contourmap
import (
"math"
)
const closed = -math.MaxFloat64
type edge struct {
X0, Y0 int
X1, Y1 int
Boundary bool
}
func fraction(z0, z1, z float64) float64 {
const eps = 1e-9
var f float64
if z0 == closed {
f = 0
} else if z1 == closed {
f = 1
} else if z0 != z1 {
f = (z - z0) / (z1 - z0)
}
f = math.Max(f, eps)
f = math.Min(f, 1-eps)
return f
}
func marchingSquares(m *ContourMap, w, h int, z float64) []Contour {
edgePoint := make(map[edge]Point)
nextEdge := make(map[Point]edge)
for y := 0; y < h-1; y++ {
up := m.at(0, y)
lp := m.at(0, y+1)
for x := 0; x < w-1; x++ {
ul := up
ur := m.at(x+1, y)
ll := lp
lr := m.at(x+1, y+1)
up = ur
lp = lr
var squareCase int
if ul > z {
squareCase |= 1
}
if ur > z {
squareCase |= 2
}
if ll > z {
squareCase |= 4
}
if lr > z {
squareCase |= 8
}
if squareCase == 0 || squareCase == 15 {
continue
}
fx := float64(x)
fy := float64(y)
t := Point{fx + fraction(ul, ur, z), fy}
b := Point{fx + fraction(ll, lr, z), fy + 1}
l := Point{fx, fy + fraction(ul, ll, z)}
r := Point{fx + 1, fy + fraction(ur, lr, z)}
te := edge{x, y, x + 1, y, y == 0}
be := edge{x, y + 1, x + 1, y + 1, y+2 == h}
le := edge{x, y, x, y + 1, x == 0}
re := edge{x + 1, y, x + 1, y + 1, x+2 == w}
const connectHigh = false
switch squareCase {
case 1:
edgePoint[te] = t
nextEdge[t] = le
case 2:
edgePoint[re] = r
nextEdge[r] = te
case 3:
edgePoint[re] = r
nextEdge[r] = le
case 4:
edgePoint[le] = l
nextEdge[l] = be
case 5:
edgePoint[te] = t
nextEdge[t] = be
case 6:
if connectHigh {
edgePoint[le] = l
nextEdge[l] = te
edgePoint[re] = r
nextEdge[r] = be
} else {
edgePoint[re] = r
nextEdge[r] = te
edgePoint[le] = l
nextEdge[l] = be
}
case 7:
edgePoint[re] = r
nextEdge[r] = be
case 8:
edgePoint[be] = b
nextEdge[b] = re
case 9:
if connectHigh {
edgePoint[te] = t
nextEdge[t] = re
edgePoint[be] = b
nextEdge[b] = le
} else {
edgePoint[te] = t
nextEdge[t] = le
edgePoint[be] = b
nextEdge[b] = re
}
case 10:
edgePoint[be] = b
nextEdge[b] = te
case 11:
edgePoint[be] = b
nextEdge[b] = le
case 12:
edgePoint[le] = l
nextEdge[l] = re
case 13:
edgePoint[te] = t
nextEdge[t] = re
case 14:
edgePoint[le] = l
nextEdge[l] = te
}
}
}
var contours []Contour
for len(edgePoint) > 0 {
var contour Contour
// find an unused edge; prefer starting at a boundary
var e edge
for e = range edgePoint {
if e.Boundary {
break
}
}
e0 := e
// add the first point
// (this allows closed paths to start & end at the same point)
p := edgePoint[e]
contour = append(contour, p)
e = nextEdge[p]
// follow points until none remain
for {
p, ok := edgePoint[e]
if !ok {
break
}
contour = append(contour, p)
delete(edgePoint, e)
e = nextEdge[p]
}
// make sure the first one gets deleted in case of open paths
delete(edgePoint, e0)
// add the contour
contours = append(contours, contour)
}
return contours
} | marching.go | 0.588771 | 0.554531 | marching.go | starcoder |
package extractors
import (
"math"
"path/filepath"
"strconv"
"time"
"github.com/sauron/config"
"github.com/sauron/session"
)
//pathVector vector of features inherited from http path
type pathVector struct {
//Delay of the first request (to this path) in the session
started float64
last time.Time
//Total number of requests
counter int
//Delays between consecutive calls to this path
delays []float64
//Average delay
averageDelay float64
//Maximum delay
maxDelay float64
//Minimum delay
minDelay float64
//Delays after previous request to different path of the same content type
chainDelays []float64
}
func (pv *pathVector) describe() []string {
vector := []string{
strconv.FormatInt(int64(pv.counter), 10)}
//strconv.FormatFloat(pv.started, 'f', 2, 64),
//strconv.FormatFloat(pv.averageDelay, 'f', 2, 64),
//strconv.FormatFloat(pv.minDelay, 'f', 2, 64),
//strconv.FormatFloat(pv.maxDelay, 'f', 2, 64)}
return vector
}
//PathsVector feature representation of the session
type PathsVector struct {
//sessionDuration float64
//sessionStartHour int
//sessionStartMinute int
//clientTimeZone int
targetPaths []string
}
//Init initializes extractor
func (fv *PathsVector) Init(configPath string) {
absPath, _ := filepath.Abs(configPath)
fv.targetPaths = configutil.ReadPathsConfig(absPath)
}
func (fv *PathsVector) describe(pathsFilter []string, pathVectors *map[string]*pathVector) []string {
var finalVector []string
for _, path := range pathsFilter {
if pv, ok := (*pathVectors)[path]; ok {
finalVector = append(finalVector, pv.describe()...)
} else {
//Add NaN vector if path was not visited
finalVector = append(finalVector, "0" /*, "0", "0", "0", "0"*/)
}
}
return finalVector
}
//ExtractFeatures extracts paths vector from session
func (fv *PathsVector) ExtractFeatures(s *session.SessionData) []string {
//session.SortRequestsByTime(s.Requests)
pathVectors := make(map[string]*pathVector)
//Build path vectors map from requests
for _, r := range s.Requests {
//fmt.Fprintf(os.Stdout, "%v\n", r.Time)
if _, pv := pathVectors[r.Path]; !pv {
pathVectors[r.Path] = new(pathVector)
pathVectors[r.Path].started = r.Time.Sub(s.Started).Seconds()
pathVectors[r.Path].minDelay = math.MaxFloat64
} else {
//Delay after the last request with the same path
var delay = r.Time.Sub(pathVectors[r.Path].last).Seconds()
pathVectors[r.Path].delays = append(pathVectors[r.Path].delays, delay)
pathVectors[r.Path].averageDelay += delay
//Update max delay
if delay > pathVectors[r.Path].maxDelay {
pathVectors[r.Path].maxDelay = delay
}
//Update min delay
if delay < pathVectors[r.Path].minDelay {
pathVectors[r.Path].minDelay = delay
}
}
pathVectors[r.Path].last = r.Time
pathVectors[r.Path].counter++
}
for _, pathVector := range pathVectors {
if pathVector.counter == 1 {
pathVector.minDelay = 0
pathVector.averageDelay = 0
} else {
pathVector.averageDelay /= float64(pathVector.counter - 1)
}
}
return fv.describe(fv.targetPaths, &pathVectors)
}
//GetFeaturesNames array of features names
func (fv *PathsVector) GetFeaturesNames() []string {
return fv.targetPaths
} | extractors/paths_vector.go | 0.617743 | 0.425546 | paths_vector.go | starcoder |
package outputs
import (
"fmt"
"time"
"github.com/cloudical-io/ancientt/pkg/config"
)
// Data structured parsed data
type Data struct {
TestStartTime time.Time
TestTime time.Time
Tester string
ServerHost string
ClientHost string
AdditionalInfo string
Data DataFormat
}
// DataFormat DataFormat interface that must be implemented by data formats, e.g., Table.
type DataFormat interface {
// Transform run transformations on the `Data`.
Transform(ts []*config.Transformation) error
}
// Table Data format for data in Table form
type Table struct {
DataFormat
Headers []*Row
Rows [][]*Row
}
// Row Row of the Table data format
type Row struct {
Value interface{}
}
// Transform transformation of table data
func (d *Table) Transform(ts []*config.Transformation) error {
// Iterate over each transformation
for _, t := range ts {
index, err := d.GetHeaderIndexByName(t.Source)
if err != nil {
return err
}
if index == -1 {
return nil
}
switch t.Action {
case config.TransformationActionAdd:
d.Headers = append(d.Headers, &Row{
Value: t.Destination,
})
case config.TransformationActionDelete:
d.Headers[index] = nil
case config.TransformationActionReplace:
toHeader := t.Destination
if toHeader == "" {
toHeader = t.Source
}
d.Headers[index].Value = toHeader
}
for row := range d.Rows {
if len(d.Rows[row]) < index {
continue
}
switch t.Action {
case config.TransformationActionAdd:
d.Rows[row] = append(d.Rows[row], &Row{
Value: d.modifyValue(d.Rows[row][index].Value, t),
})
case config.TransformationActionDelete:
d.Rows[row][index] = nil
case config.TransformationActionReplace:
d.Rows[row][index].Value = d.modifyValue(d.Rows[row][index].Value, t)
}
}
}
return nil
}
func (d *Table) modifyValue(in interface{}, t *config.Transformation) interface{} {
value, ok := in.(float64)
if !ok {
valInt, ok := in.(int64)
if !ok {
return in
}
value = float64(valInt)
}
switch t.ModifierAction {
case config.ModifierActionAddition:
return value + *t.Modifier
case config.ModifierActionSubstract:
return value - *t.Modifier
case config.ModifierActionDivison:
return value / *t.Modifier
case config.ModifierActionMultiply:
return value * *t.Modifier
}
return in
}
// CheckIfHeaderExists check if a header exists by name in the Table
func (d *Table) CheckIfHeaderExists(name interface{}) (int, bool) {
for k, c := range d.Headers {
if c == nil {
continue
}
if c.Value == name {
return k, true
}
}
return 0, false
}
// GetHeaderIndexByName return the header index for a given key (name) string
func (d *Table) GetHeaderIndexByName(name string) (int, error) {
for i := range d.Headers {
if d.Headers[i] == nil {
continue
}
val, ok := d.Headers[i].Value.(string)
if !ok {
return -1, fmt.Errorf("failed to cast result header into string, header: %+v", d.Headers[i].Value)
}
if val == name {
return i, nil
}
}
return -1, nil
} | outputs/data.go | 0.698741 | 0.406273 | data.go | starcoder |
package un
import (
"reflect"
)
func init() {
}
var workers int = 6
// Maker takes a function pointer (fn) and implements it with the given reflection-based function implementation
// Internally uses reflect.MakeFunc
func Maker(fn interface{}, impl func(args []reflect.Value) (results []reflect.Value)) {
fnV := reflect.ValueOf(fn).Elem()
fnI := reflect.MakeFunc(fnV.Type(), impl)
fnV.Set(fnI)
}
// ToI takes a slice and converts it to type []interface[]
func ToI(slice interface{}) []interface{} {
s := reflect.ValueOf(slice)
if s.Kind() != reflect.Slice {
panic("ToInterface expects a slice type")
}
ret := make([]interface{}, s.Len())
for i := 0; i < s.Len(); i++ {
ret[i] = s.Index(i).Interface()
}
return ret
}
// Valueize takes a number of arguments and returns them as []reflect.Value
func Valueize(values ...interface{}) []reflect.Value {
ret := make([]reflect.Value, len(values))
for i := 0; i < len(values); i++ {
v := values[i]
if t := reflect.TypeOf(v).String(); t == "reflect.Value" {
ret[i] = v.(reflect.Value)
} else {
ret[i] = reflect.ValueOf(v)
}
}
return ret
}
// SetWorkers sets the number of workers used by the worker pools
// <p>This is a global default value</p>
// <p>If different worker pool sizes are required, use the optional worker argument when calling Parallel Implementations</p>
func SetWorkers(w int) {
workers = w
}
// extractArgs pulls the arguments from a []reflect.Value and converts as appropriate to underlying types
func extractArgs(values []reflect.Value) (reflect.Value, reflect.Value) {
fn := interfaceToValue(values[0])
col := interfaceToValue(values[1])
return fn, col
}
// interfaceToValue converts a value of interface{} to a value of Interface()
// That is, converts to the underlying type of the reflect.Value
func interfaceToValue(v reflect.Value) reflect.Value {
if v.Kind() == reflect.Interface {
return reflect.ValueOf(v.Interface())
}
return v
}
// makeSlice makes a slice of the Output type of the supplied function, and of the specifed capacity
func makeSlice(fn reflect.Value, len int) reflect.Value {
t := reflect.SliceOf(fn.Type().Out(0))
return reflect.MakeSlice(t, len, len)
}
// makeWorkerChans makes a buffered channel of the specified type
func makeWorkerChans(t reflect.Type) (chan []reflect.Value, reflect.Value) {
// display(reflect.TypeOf([]reflect.Value{}))
// job := reflect.MakeChan(reflect.ChanOf(reflect.BothDir, reflect.TypeOf(&channeller{})), 100)
// job := reflect.MakeChan(reflect.ChanOf(reflect.BothDir, reflect.TypeOf([]reflect.Value{})), 100)
job := make(chan []reflect.Value)
res := reflect.MakeChan(reflect.ChanOf(reflect.BothDir, t), 100)
return job, res
}
func callPredicate(fn reflect.Value, args ...reflect.Value) bool {
in := fn.Type().NumIn()
res := fn.Call(args[0:in])
return res[0].Bool()
}
func callFn(fn reflect.Value, args ...reflect.Value) []reflect.Value {
in := fn.Type().NumIn()
// display(fn)
res := fn.Call(args[0:in])
return res
} | underscore.go | 0.696165 | 0.437103 | underscore.go | starcoder |
package spliterator
type Characteristic int
const (
CharacteristicTODO Characteristic = 0x00000000
/**
* Characteristic value signifying that an encounter order is defined for
* elements. If so, this Spliterator guarantees that method
* {@link #trySplit} splits a strict prefix of elements, that method
* {@link #tryAdvance} steps by one element in prefix order, and that
* {@link #forEachRemaining} performs actions in encounter order.
*
* <p>A {@link Collection} has an encounter order if the corresponding
* {@link Collection#iterator} documents an order. If so, the encounter
* order is the same as the documented order. Otherwise, a collection does
* not have an encounter order.
*
* @apiNote Encounter order is guaranteed to be ascending index order for
* any {@link List}. But no order is guaranteed for hash-based collections
* such as {@link HashSet}. Clients of a Spliterator that reports
* {@code ORDERED} are expected to preserve ordering constraints in
* non-commutative parallel computations.
*/
CharacteristicOrdered Characteristic = 0x00000010
/**
* Characteristic value signifying that, for each pair of
* encountered elements {@code x, y}, {@code !x.equals(y)}. This
* applies for example, to a Spliterator based on a {@link Set}.
*/
CharacteristicDistinct Characteristic = 0x00000001
/**
* Characteristic value signifying that encounter order follows a defined
* sort order. If so, method {@link #getComparator()} returns the associated
* Comparator, or {@code null} if all elements are {@link Comparable} and
* are sorted by their natural ordering.
*
* <p>A Spliterator that reports {@code SORTED} must also report
* {@code ORDERED}.
*
* @apiNote The spliterators for {@code Collection} classes in the JDK that
* implement {@link NavigableSet} or {@link SortedSet} report {@code SORTED}.
*/
CharacteristicSorted Characteristic = 0x00000004
/**
* Characteristic value signifying that the value returned from
* {@code estimateSize()} prior to traversal or splitting represents a
* finite size that, in the absence of structural source modification,
* represents an exact count of the number of elements that would be
* encountered by a complete traversal.
*
* @apiNote Most Spliterators for Collections, that cover all elements of a
* {@code Collection} report this characteristic. Sub-spliterators, such as
* those for {@link HashSet}, that cover a sub-set of elements and
* approximate their reported size do not.
*/
CharacteristicSized Characteristic = 0x00000040
/**
* Characteristic value signifying that the source guarantees that
* encountered elements will not be {@code null}. (This applies,
* for example, to most concurrent collections, queues, and maps.)
*/
CharacteristicNonnulL Characteristic = 0x00000100
/**
* Characteristic value signifying that the element source cannot be
* structurally modified; that is, elements cannot be added, replaced, or
* removed, so such changes cannot occur during traversal. A Spliterator
* that does not report {@code IMMUTABLE} or {@code CONCURRENT} is expected
* to have a documented policy (for example throwing
* {@link ConcurrentModificationException}) concerning structural
* interference detected during traversal.
*/
CharacteristicImmutable Characteristic = 0x00000400
/**
* Characteristic value signifying that the element source may be safely
* concurrently modified (allowing additions, replacements, and/or removals)
* by multiple threads without external synchronization. If so, the
* Spliterator is expected to have a documented policy concerning the impact
* of modifications during traversal.
*
* <p>A top-level Spliterator should not report both {@code CONCURRENT} and
* {@code SpliteratorSIZED}, since the finite size, if known, may change if the source
* is concurrently modified during traversal. Such a Spliterator is
* inconsistent and no guarantees can be made about any computation using
* that Spliterator. Sub-spliterators may report {@code SpliteratorSIZED} if the
* sub-split size is known and additions or removals to the source are not
* reflected when traversing.
*
* <p>A top-level Spliterator should not report both {@code CONCURRENT} and
* {@code IMMUTABLE}, since they are mutually exclusive. Such a Spliterator
* is inconsistent and no guarantees can be made about any computation using
* that Spliterator. Sub-spliterators may report {@code IMMUTABLE} if
* additions or removals to the source are not reflected when traversing.
*
* @apiNote Most concurrent collections maintain a consistency policy
* guaranteeing accuracy with respect to elements present at the point of
* Spliterator construction, but possibly not reflecting subsequent
* additions or removals.
*/
CharacteristicConcurrent Characteristic = 0x00001000
/**
* Characteristic value signifying that all Spliterators resulting from
* {@code trySplit()} will be both {@link #SpliteratorSIZED} and {@link #SUBSIZED}.
* (This means that all child Spliterators, whether direct or indirect, will
* be {@code SpliteratorSIZED}.)
*
* <p>A Spliterator that does not report {@code SpliteratorSIZED} as required by
* {@code SUBSIZED} is inconsistent and no guarantees can be made about any
* computation using that Spliterator.
*
* @apiNote Some spliterators, such as the top-level spliterator for an
* approximately balanced binary tree, will report {@code SpliteratorSIZED} but not
* {@code SUBSIZED}, since it is common to know the size of the entire tree
* but not the exact sizes of subtrees.
*/
CharacteristicSubsized Characteristic = 0x00004000
) | go/util/spliterator/characteristic.go | 0.648466 | 0.700466 | characteristic.go | starcoder |
package sampler
import (
"math"
"time"
"github.com/DataDog/datadog-agent/pkg/trace/atomic"
"github.com/DataDog/datadog-agent/pkg/trace/pb"
"github.com/DataDog/datadog-agent/pkg/trace/watchdog"
)
const (
// Sampler parameters not (yet?) configurable
defaultDecayPeriod time.Duration = 5 * time.Second
// With this factor, any past trace counts for less than 50% after 6*decayPeriod and >1% after 39*decayPeriod
// We can keep it hardcoded, but having `decayPeriod` configurable should be enough?
defaultDecayFactor float64 = 1.125 // 9/8
adjustPeriod time.Duration = 10 * time.Second
initialSignatureScoreOffset float64 = 1
minSignatureScoreOffset float64 = 0.01
defaultSignatureScoreSlope float64 = 3
// defaultSamplingRateThresholdTo1 defines the maximum allowed sampling rate below 1.
// If this is surpassed, the rate is set to 1.
defaultSamplingRateThresholdTo1 float64 = 1
)
// EngineType represents the type of a sampler engine.
type EngineType int
const (
// NormalScoreEngineType is the type of the ScoreEngine sampling non-error traces.
NormalScoreEngineType EngineType = iota
// ErrorsScoreEngineType is the type of the ScoreEngine sampling error traces.
ErrorsScoreEngineType
// PriorityEngineType is type of the priority sampler engine type.
PriorityEngineType
)
// Engine is a common basic interface for sampler engines.
type Engine interface {
// Run the sampler.
Run()
// Stop the sampler.
Stop()
// Sample a trace.
Sample(trace pb.Trace, root *pb.Span, env string) bool
// GetState returns information about the sampler.
GetState() interface{}
// GetType returns the type of the sampler.
GetType() EngineType
}
// Sampler is the main component of the sampling logic
type Sampler struct {
// Storage of the state of the sampler
Backend Backend
// Extra sampling rate to combine to the existing sampling
extraRate float64
// Maximum limit to the total number of traces per second to sample
maxTPS float64
// rateThresholdTo1 is the value above which all computed sampling rates will be set to 1
rateThresholdTo1 float64
// Sample any signature with a score lower than scoreSamplingOffset
// It is basically the number of similar traces per second after which we start sampling
signatureScoreOffset *atomic.Float64
// Logarithm slope for the scoring function
signatureScoreSlope *atomic.Float64
// signatureScoreFactor = math.Pow(signatureScoreSlope, math.Log10(scoreSamplingOffset))
signatureScoreFactor *atomic.Float64
exit chan struct{}
}
// newSampler returns an initialized Sampler
func newSampler(extraRate float64, maxTPS float64) *Sampler {
s := &Sampler{
Backend: NewMemoryBackend(defaultDecayPeriod, defaultDecayFactor),
extraRate: extraRate,
maxTPS: maxTPS,
rateThresholdTo1: defaultSamplingRateThresholdTo1,
signatureScoreOffset: atomic.NewFloat(0),
signatureScoreSlope: atomic.NewFloat(0),
signatureScoreFactor: atomic.NewFloat(0),
exit: make(chan struct{}),
}
s.SetSignatureCoefficients(initialSignatureScoreOffset, defaultSignatureScoreSlope)
return s
}
// SetSignatureCoefficients updates the internal scoring coefficients used by the signature scoring
func (s *Sampler) SetSignatureCoefficients(offset float64, slope float64) {
s.signatureScoreOffset.Store(offset)
s.signatureScoreSlope.Store(slope)
s.signatureScoreFactor.Store(math.Pow(slope, math.Log10(offset)))
}
// UpdateExtraRate updates the extra sample rate
func (s *Sampler) UpdateExtraRate(extraRate float64) {
s.extraRate = extraRate
}
// UpdateMaxTPS updates the max TPS limit
func (s *Sampler) UpdateMaxTPS(maxTPS float64) {
s.maxTPS = maxTPS
}
// Run runs and block on the Sampler main loop
func (s *Sampler) Run() {
go func() {
defer watchdog.LogOnPanic()
s.Backend.Run()
}()
s.RunAdjustScoring()
}
// Stop stops the main Run loop
func (s *Sampler) Stop() {
s.Backend.Stop()
close(s.exit)
}
// RunAdjustScoring is the sampler feedback loop to adjust the scoring coefficients
func (s *Sampler) RunAdjustScoring() {
t := time.NewTicker(adjustPeriod)
defer t.Stop()
for {
select {
case <-t.C:
s.AdjustScoring()
case <-s.exit:
return
}
}
}
// GetSampleRate returns the sample rate to apply to a trace.
func (s *Sampler) GetSampleRate(trace pb.Trace, root *pb.Span, signature Signature) float64 {
return s.loadRate(s.GetSignatureSampleRate(signature) * s.extraRate)
}
// GetMaxTPSSampleRate returns an extra sample rate to apply if we are above maxTPS.
func (s *Sampler) GetMaxTPSSampleRate() float64 {
// When above maxTPS, apply an additional sample rate to statistically respect the limit
maxTPSrate := 1.0
if s.maxTPS > 0 {
currentTPS := s.Backend.GetUpperSampledScore()
if currentTPS > s.maxTPS {
maxTPSrate = s.maxTPS / currentTPS
}
}
return maxTPSrate
}
func (s *Sampler) setRateThresholdTo1(r float64) {
s.rateThresholdTo1 = r
} | pkg/trace/sampler/coresampler.go | 0.749821 | 0.444746 | coresampler.go | starcoder |
package models
// Represents the page setup properties of a section.
type PageSetup struct {
// Represents the page setup properties of a section.
Link *WordsApiLink `json:"Link,omitempty"`
// Represents the page setup properties of a section.
Bidi bool `json:"Bidi,omitempty"`
// Represents the page setup properties of a section.
BorderAlwaysInFront bool `json:"BorderAlwaysInFront,omitempty"`
// Represents the page setup properties of a section.
BorderAppliesTo string `json:"BorderAppliesTo,omitempty"`
// Represents the page setup properties of a section.
BorderDistanceFrom string `json:"BorderDistanceFrom,omitempty"`
// Represents the page setup properties of a section.
BottomMargin float64 `json:"BottomMargin,omitempty"`
// Represents the page setup properties of a section.
DifferentFirstPageHeaderFooter bool `json:"DifferentFirstPageHeaderFooter,omitempty"`
// Represents the page setup properties of a section.
FirstPageTray int32 `json:"FirstPageTray,omitempty"`
// Represents the page setup properties of a section.
FooterDistance float64 `json:"FooterDistance,omitempty"`
// Represents the page setup properties of a section.
Gutter float64 `json:"Gutter,omitempty"`
// Represents the page setup properties of a section.
HeaderDistance float64 `json:"HeaderDistance,omitempty"`
// Represents the page setup properties of a section.
LeftMargin float64 `json:"LeftMargin,omitempty"`
// Represents the page setup properties of a section.
LineNumberCountBy int32 `json:"LineNumberCountBy,omitempty"`
// Represents the page setup properties of a section.
LineNumberDistanceFromText float64 `json:"LineNumberDistanceFromText,omitempty"`
// Represents the page setup properties of a section.
LineNumberRestartMode string `json:"LineNumberRestartMode,omitempty"`
// Represents the page setup properties of a section.
LineStartingNumber int32 `json:"LineStartingNumber,omitempty"`
// Represents the page setup properties of a section.
Orientation string `json:"Orientation,omitempty"`
// Represents the page setup properties of a section.
OtherPagesTray int32 `json:"OtherPagesTray,omitempty"`
// Represents the page setup properties of a section.
PageHeight float64 `json:"PageHeight,omitempty"`
// Represents the page setup properties of a section.
PageNumberStyle string `json:"PageNumberStyle,omitempty"`
// Represents the page setup properties of a section.
PageStartingNumber int32 `json:"PageStartingNumber,omitempty"`
// Represents the page setup properties of a section.
PageWidth float64 `json:"PageWidth,omitempty"`
// Represents the page setup properties of a section.
PaperSize string `json:"PaperSize,omitempty"`
// Represents the page setup properties of a section.
RestartPageNumbering bool `json:"RestartPageNumbering,omitempty"`
// Represents the page setup properties of a section.
RightMargin float64 `json:"RightMargin,omitempty"`
// Represents the page setup properties of a section.
RtlGutter bool `json:"RtlGutter,omitempty"`
// Represents the page setup properties of a section.
SectionStart string `json:"SectionStart,omitempty"`
// Represents the page setup properties of a section.
SuppressEndnotes bool `json:"SuppressEndnotes,omitempty"`
// Represents the page setup properties of a section.
TopMargin float64 `json:"TopMargin,omitempty"`
// Represents the page setup properties of a section.
VerticalAlignment string `json:"VerticalAlignment,omitempty"`
}
type IPageSetup interface {
IsPageSetup() bool
}
func (PageSetup) IsPageSetup() bool {
return true
}
func (PageSetup) IsLinkElement() bool {
return true
} | v2010/api/models/page_setup.go | 0.902371 | 0.4856 | page_setup.go | starcoder |
package topo
import (
"github.com/savalin/gonum/graph"
"github.com/savalin/gonum/graph/internal/ordered"
"github.com/savalin/gonum/graph/internal/set"
)
// DegeneracyOrdering returns the degeneracy ordering and the k-cores of
// the undirected graph g.
func DegeneracyOrdering(g graph.Undirected) (order []graph.Node, cores [][]graph.Node) {
order, offsets := degeneracyOrdering(g)
ordered.Reverse(order)
cores = make([][]graph.Node, len(offsets))
offset := len(order)
for i, n := range offsets {
cores[i] = order[offset-n : offset]
offset -= n
}
return order, cores
}
// KCore returns the k-core of the undirected graph g with nodes in an
// optimal ordering for the coloring number.
func KCore(k int, g graph.Undirected) []graph.Node {
order, offsets := degeneracyOrdering(g)
var offset int
for _, n := range offsets[:k] {
offset += n
}
core := make([]graph.Node, len(order)-offset)
copy(core, order[offset:])
return core
}
// degeneracyOrdering is the common code for DegeneracyOrdering and KCore. It
// returns l, the nodes of g in optimal ordering for coloring number and
// s, a set of relative offsets into l for each k-core, where k is an index
// into s.
func degeneracyOrdering(g graph.Undirected) (l []graph.Node, s []int) {
nodes := graph.NodesOf(g.Nodes())
// The algorithm used here is essentially as described at
// http://en.wikipedia.org/w/index.php?title=Degeneracy_%28graph_theory%29&oldid=640308710
// Initialize an output list L in return parameters.
// Compute a number d_v for each vertex v in G,
// the number of neighbors of v that are not already in L.
// Initially, these numbers are just the degrees of the vertices.
dv := make(map[int64]int, len(nodes))
var (
maxDegree int
neighbours = make(map[int64][]graph.Node)
)
for _, n := range nodes {
id := n.ID()
adj := graph.NodesOf(g.From(id))
neighbours[id] = adj
dv[id] = len(adj)
if len(adj) > maxDegree {
maxDegree = len(adj)
}
}
// Initialize an array D such that D[i] contains a list of the
// vertices v that are not already in L for which d_v = i.
d := make([][]graph.Node, maxDegree+1)
for _, n := range nodes {
deg := dv[n.ID()]
d[deg] = append(d[deg], n)
}
// Initialize k to 0.
k := 0
// Repeat n times:
s = []int{0}
for range nodes {
// Scan the array cells D[0], D[1], ... until
// finding an i for which D[i] is nonempty.
var (
i int
di []graph.Node
)
for i, di = range d {
if len(di) != 0 {
break
}
}
// Set k to max(k,i).
if i > k {
k = i
s = append(s, make([]int, k-len(s)+1)...)
}
// Select a vertex v from D[i]. Add v to the
// beginning of L and remove it from D[i].
var v graph.Node
v, d[i] = di[len(di)-1], di[:len(di)-1]
l = append(l, v)
s[k]++
delete(dv, v.ID())
// For each neighbor w of v not already in L,
// subtract one from d_w and move w to the
// cell of D corresponding to the new value of d_w.
for _, w := range neighbours[v.ID()] {
dw, ok := dv[w.ID()]
if !ok {
continue
}
for i, n := range d[dw] {
if n.ID() == w.ID() {
d[dw][i], d[dw] = d[dw][len(d[dw])-1], d[dw][:len(d[dw])-1]
dw--
d[dw] = append(d[dw], w)
break
}
}
dv[w.ID()] = dw
}
}
return l, s
}
// BronKerbosch returns the set of maximal cliques of the undirected graph g.
func BronKerbosch(g graph.Undirected) [][]graph.Node {
nodes := graph.NodesOf(g.Nodes())
// The algorithm used here is essentially BronKerbosch3 as described at
// http://en.wikipedia.org/w/index.php?title=Bron%E2%80%93Kerbosch_algorithm&oldid=656805858
p := set.NewNodesSize(len(nodes))
for _, n := range nodes {
p.Add(n)
}
x := set.NewNodes()
var bk bronKerbosch
order, _ := degeneracyOrdering(g)
ordered.Reverse(order)
for _, v := range order {
neighbours := graph.NodesOf(g.From(v.ID()))
nv := set.NewNodesSize(len(neighbours))
for _, n := range neighbours {
nv.Add(n)
}
bk.maximalCliquePivot(g, []graph.Node{v}, set.IntersectionOfNodes(p, nv), set.IntersectionOfNodes(x, nv))
p.Remove(v)
x.Add(v)
}
return bk
}
type bronKerbosch [][]graph.Node
func (bk *bronKerbosch) maximalCliquePivot(g graph.Undirected, r []graph.Node, p, x set.Nodes) {
if len(p) == 0 && len(x) == 0 {
*bk = append(*bk, r)
return
}
neighbours := bk.choosePivotFrom(g, p, x)
nu := set.NewNodesSize(len(neighbours))
for _, n := range neighbours {
nu.Add(n)
}
for _, v := range p {
if nu.Has(v) {
continue
}
vid := v.ID()
neighbours := graph.NodesOf(g.From(vid))
nv := set.NewNodesSize(len(neighbours))
for _, n := range neighbours {
nv.Add(n)
}
var found bool
for _, n := range r {
if n.ID() == vid {
found = true
break
}
}
var sr []graph.Node
if !found {
sr = append(r[:len(r):len(r)], v)
}
bk.maximalCliquePivot(g, sr, set.IntersectionOfNodes(p, nv), set.IntersectionOfNodes(x, nv))
p.Remove(v)
x.Add(v)
}
}
func (*bronKerbosch) choosePivotFrom(g graph.Undirected, p, x set.Nodes) (neighbors []graph.Node) {
// TODO(kortschak): Investigate the impact of pivot choice that maximises
// |p ⋂ neighbours(u)| as a function of input size. Until then, leave as
// compile time option.
if !tomitaTanakaTakahashi {
for _, n := range p {
return graph.NodesOf(g.From(n.ID()))
}
for _, n := range x {
return graph.NodesOf(g.From(n.ID()))
}
panic("bronKerbosch: empty set")
}
var (
max = -1
pivot graph.Node
)
maxNeighbors := func(s set.Nodes) {
outer:
for _, u := range s {
nb := graph.NodesOf(g.From(u.ID()))
c := len(nb)
if c <= max {
continue
}
for n := range nb {
if _, ok := p[int64(n)]; ok {
continue
}
c--
if c <= max {
continue outer
}
}
max = c
pivot = u
neighbors = nb
}
}
maxNeighbors(p)
maxNeighbors(x)
if pivot == nil {
panic("bronKerbosch: empty set")
}
return neighbors
} | graph/topo/bron_kerbosch.go | 0.695958 | 0.44746 | bron_kerbosch.go | starcoder |
package powervs
import (
"fmt"
"sort"
"strings"
survey "github.com/AlecAivazis/survey/v2"
"github.com/AlecAivazis/survey/v2/core"
"github.com/openshift/installer/pkg/rhcos"
"github.com/pkg/errors"
)
func knownRegions() map[string]string {
regions := make(map[string]string)
for _, region := range rhcos.PowerVSRegions {
regions[region["name"]] = region["description"]
}
return regions
}
// IsKnownRegion return true is a specified region is Known to the installer.
// A known region is subset of AWS regions and the regions where RHEL CoreOS images are published.
func IsKnownRegion(region string) bool {
if _, ok := knownRegions()[region]; ok {
return true
}
return false
}
// Todo(cklokman): Need some form of error handing in this function...
func knownZones(region string) []string {
return rhcos.PowerVSZones[region]
}
// IsKnownZone return true is a specified zone is Known to the installer.
func IsKnownZone(region string, zone string) bool {
if _, ok := knownRegions()[region]; ok {
zones := knownZones(region)
for _, z := range zones {
if z == zone {
return true
}
}
return false
}
return false
}
// GetRegion prompts the user to select a region and returns that region
func GetRegion() (string, error) {
regions := knownRegions()
longRegions := make([]string, 0, len(regions))
shortRegions := make([]string, 0, len(regions))
for id, location := range regions {
longRegions = append(longRegions, fmt.Sprintf("%s (%s)", id, location))
shortRegions = append(shortRegions, id)
}
sort.Strings(longRegions)
sort.Strings(shortRegions)
var regionTransform survey.Transformer = func(ans interface{}) interface{} {
switch v := ans.(type) {
case core.OptionAnswer:
return core.OptionAnswer{Value: strings.SplitN(v.Value, " ", 2)[0], Index: v.Index}
case string:
return strings.SplitN(v, " ", 2)[0]
}
return ""
}
var region string
err := survey.Ask([]*survey.Question{
{
Prompt: &survey.Select{
Message: "Region",
Help: "The Power VS region to be used for installation.",
Options: longRegions,
},
Validate: survey.ComposeValidators(survey.Required, func(ans interface{}) error {
choice := regionTransform(ans).(core.OptionAnswer).Value
i := sort.SearchStrings(shortRegions, choice)
if i == len(shortRegions) || shortRegions[i] != choice {
return errors.Errorf("Invalid region %q", choice)
}
return nil
}),
Transform: regionTransform,
},
}, ®ion)
if err != nil {
return "", err
}
return region, nil
}
// GetZone prompts the user for a zone given a zone
func GetZone(region string) (string, error) {
zones := knownZones(region)
defaultZone := zones[0]
var zoneTransform survey.Transformer = func(ans interface{}) interface{} {
switch v := ans.(type) {
case core.OptionAnswer:
return core.OptionAnswer{Value: strings.SplitN(v.Value, " ", 2)[0], Index: v.Index}
case string:
return strings.SplitN(v, " ", 2)[0]
}
return ""
}
var zone string
err := survey.Ask([]*survey.Question{
{
Prompt: &survey.Select{
Message: "Zone",
Help: "The Power VS zone within the region to be used for installation.",
Default: fmt.Sprintf("%s", defaultZone),
Options: zones,
},
Validate: survey.ComposeValidators(survey.Required, func(ans interface{}) error {
choice := zoneTransform(ans).(core.OptionAnswer).Value
i := sort.SearchStrings(zones, choice)
if i == len(zones) || zones[i] != choice {
return errors.Errorf("Invalid zone %q", choice)
}
return nil
}),
Transform: zoneTransform,
},
}, &zone)
if err != nil {
return "", err
}
return zone, err
} | pkg/asset/installconfig/powervs/regions.go | 0.629433 | 0.41253 | regions.go | starcoder |
package hego
import (
"fmt"
"math"
"math/rand"
"time"
)
// AnnealingState represents the current state of the annealing system. Energy is the
// value of the objective function. Neighbor returns another state candidate
type AnnealingState interface {
Energy() float64
Neighbor() AnnealingState
}
// SAResult represents the result of the Anneal optimization. The last state
// and last energy are the final results. It extends the basic Result type
type SAResult struct {
// State is the result state
State AnnealingState
// Energy is the result Energy
Energy float64
// States when KeepIntermediateResults is set hold every state during the
// process (updated on state change)
States []AnnealingState
// Energies when KeepIntermediateResults is set hold the energy value of
// every state in the process
Energies []float64
Result
}
// SASettings represents the algorithm settings for the simulated annealing
// optimization
type SASettings struct {
// Temperature is used to determine if another state will be selected or not
// better states are selected with probability 1, but worse states are selected
// propability p = exp(state_energy - candidate_energy/temperature)
// a good value for Temperature is in the range of randomly guessed state energies
Temperature float64
// AnnealingFactor is used to decrease the temperature after each iteration
// When temperature reaches 0, only better states will be accepted which leads
// to local search / convergence. Thus AnnealingFactor controls after how many
// iterations convergence might be reached. It's good to reach low temperatures
// during the last third of iterations
AnnealingFactor float64
Settings
}
// Verify returns an error if settings verification fails
func (s *SASettings) Verify() error {
if s.Temperature <= 0.0 {
return fmt.Errorf("temperature must be greater that 0.0, got %v", s.Temperature)
}
if s.AnnealingFactor > 1.0 || s.AnnealingFactor <= 0.0 {
return fmt.Errorf("annealing factor must be between 0.0 and 1.0, got %v", s.AnnealingFactor)
}
return nil
}
// SA performs simulated annealing algorithm
func SA(
initialState AnnealingState,
settings SASettings,
) (res SAResult, err error) {
err = settings.Verify()
if err != nil {
err = fmt.Errorf("settings verification failed: %v", err)
return
}
start := time.Now()
logger := newLogger("Simulated Annealing", []string{"Iteration", "Temperature", "Energy"}, settings.Verbose, settings.MaxIterations)
evaluate := func(s AnnealingState) float64 {
res.FuncEvaluations++
return s.Energy()
}
state := initialState
energy := evaluate(state)
temperature := settings.Temperature
if settings.KeepHistory {
res.States = make([]AnnealingState, 0, settings.MaxIterations)
res.Energies = make([]float64, settings.MaxIterations)
}
for i := 0; i < settings.MaxIterations; i++ {
candidate := state.Neighbor()
candidateEnergy := evaluate(candidate)
update := false
if candidateEnergy < energy {
update = true
} else if math.Exp((energy-candidateEnergy)/temperature) > rand.Float64() {
update = true
}
if update {
state = candidate
energy = candidateEnergy
if settings.KeepHistory {
res.States = append(res.States, candidate)
res.Energies = append(res.Energies, candidateEnergy)
}
}
temperature = temperature * settings.AnnealingFactor
res.Iterations++
logger.AddLine(i, []string{
fmt.Sprint(i),
fmt.Sprint(temperature),
fmt.Sprint(energy),
})
}
end := time.Now()
res.Runtime = end.Sub(start)
res.Iterations = settings.MaxIterations
res.Energy = energy
res.State = state
logger.Flush()
if settings.Verbose > 0 {
fmt.Printf("Done after %v!\n", res.Runtime)
}
return
} | anneal.go | 0.674801 | 0.572305 | anneal.go | starcoder |
package elf_reader
// This file contains the definition for an ELF file interface that can be used
// to read either 32- or 64-bit ELF files. Boilerplate wrappers for
// implementing this interface are also kept in this file.
import (
"fmt"
)
// This is a 32- or 64-bit agnostic way of reading an ELF file. If needed, one
// can use type assertions to convert instances of this interface into either
// instances of *ELF64File or *ELF32File.
type ELFFile interface {
// Returns the value specified in the ELF header of whether the ELF file is
// an executable, relocatable, shared, or core file.
GetFileType() ELFFileType
// Returns the value specified in the ELF header of the type of machine
// this file targets.
GetMachineType() MachineType
// Returns the number of sections defined in the ELF file.
GetSectionCount() uint16
// Returns the number of segments (program headers) defined in the ELF
// file.
GetSegmentCount() uint16
// Returns the name of the section at the given index.
GetSectionName(index uint16) (string, error)
// Returns the content of the section at the given index.
GetSectionContent(index uint16) ([]byte, error)
// Returns the content of the segment at the given index.
GetSegmentContent(index uint16) ([]byte, error)
// Returns an interface that can be used to access the header metadata for
// the section at the given index.
GetSectionHeader(index uint16) (ELFSectionHeader, error)
// Returns an interface that can be used to access the header metadata for
// the program header (segment) at the given index.
GetProgramHeader(index uint16) (ELFProgramHeader, error)
// Returns true if the section at the given index is a string table.
IsStringTable(index uint16) bool
// Returns a slice of strings from the string table in the given section
// index.
GetStringTable(index uint16) ([]string, error)
// Returns true if the section at the given index is a symbol table.
IsSymbolTable(index uint16) bool
// Parses the symbol table in the section at the given index, and returns
// a slice of symbols in it. The slice of strings is the list of symbol
// names, in the same order as the symbols themselves.
GetSymbols(index uint16) ([]ELFSymbol, []string, error)
// Returns true if the section at the given index is a relocation table.
IsRelocationTable(index uint16) bool
// Parses the relocation table in the section at the given index, and
// returns a slice of the relocations contained in it.
GetRelocations(index uint16) ([]ELFRelocation, error)
// Returns true if the section at the given index is a dynamic table.
IsDynamicSection(index uint16) bool
// Parses and returns the dynamic linking table at the given section index.
// This may return entries past the end of the actual table, depending on
// the section size, so callers must check for the terminating null entry
// when referring to the returned slice.
DynamicEntries(intex uint16) ([]ELFDynamicEntry, error)
}
func (f *ELF64File) GetFileType() ELFFileType {
return f.Header.Type
}
func (f *ELF32File) GetFileType() ELFFileType {
return f.Header.Type
}
func (f *ELF64File) GetMachineType() MachineType {
return f.Header.Machine
}
func (f *ELF32File) GetMachineType() MachineType {
return f.Header.Machine
}
func (f *ELF64File) GetSectionCount() uint16 {
return f.Header.SectionHeaderEntries
}
func (f *ELF32File) GetSectionCount() uint16 {
return f.Header.SectionHeaderEntries
}
func (f *ELF64File) GetSegmentCount() uint16 {
return f.Header.ProgramHeaderEntries
}
func (f *ELF32File) GetSegmentCount() uint16 {
return f.Header.ProgramHeaderEntries
}
func (f *ELF64File) GetSectionHeader(index uint16) (ELFSectionHeader, error) {
if int(index) >= len(f.Sections) {
return nil, fmt.Errorf("Invalid section index: %d", index)
}
return &(f.Sections[index]), nil
}
func (f *ELF32File) GetSectionHeader(index uint16) (ELFSectionHeader, error) {
if int(index) >= len(f.Sections) {
return nil, fmt.Errorf("Invalid section index: %d", index)
}
return &(f.Sections[index]), nil
}
func (f *ELF64File) GetProgramHeader(index uint16) (ELFProgramHeader, error) {
if int(index) >= len(f.Segments) {
return nil, fmt.Errorf("Invalid segment index: %d", index)
}
return &(f.Segments[index]), nil
}
func (f *ELF32File) GetProgramHeader(index uint16) (ELFProgramHeader, error) {
if int(index) >= len(f.Segments) {
return nil, fmt.Errorf("Invalid segment index: %d", index)
}
return &(f.Segments[index]), nil
}
func (f *ELF64File) GetSymbols(index uint16) ([]ELFSymbol, []string, error) {
table, names, e := f.GetSymbolTable(index)
if e != nil {
return nil, nil, e
}
// We need to convert the table into a list of pointers to satisfy the
// ELFSymbol interface.
toReturn := make([]ELFSymbol, len(table))
for i := range table {
toReturn[i] = &(table[i])
}
return toReturn, names, nil
}
func (f *ELF32File) GetSymbols(index uint16) ([]ELFSymbol, []string, error) {
table, names, e := f.GetSymbolTable(index)
if e != nil {
return nil, nil, e
}
toReturn := make([]ELFSymbol, len(table))
for i := range table {
toReturn[i] = &(table[i])
}
return toReturn, names, nil
}
func (f *ELF64File) GetRelocations(index uint16) ([]ELFRelocation, error) {
// The 64-bit ELF relocation table already satisfies the ELFRelocation
// interface.
values, e := f.GetRelocationTable(index)
if e != nil {
return nil, e
}
toReturn := make([]ELFRelocation, len(values))
for i := range values {
toReturn[i] = values[i]
}
return toReturn, nil
}
func (f *ELF32File) GetRelocations(index uint16) ([]ELFRelocation, error) {
// We need to convert this table into the 64-bit format...
table32, e := f.GetRelocationTable(index)
if e != nil {
return nil, e
}
toReturn := make([]ELFRelocation, len(table32))
for i := range table32 {
original := table32[i]
relocationType := original.Type()
symbolIndex := original.SymbolIndex()
newInfo := ELF64RelocationInfo(relocationType)
newInfo |= ELF64RelocationInfo(symbolIndex) << 32
toReturn[i] = &ELF64Rela{
Address: uint64(original.Offset()),
RelocationInfo: newInfo,
AddendValue: int64(original.Addend()),
}
}
return toReturn, nil
}
func (f *ELF64File) DynamicEntries(index uint16) ([]ELFDynamicEntry, error) {
table, e := f.GetDynamicTable(index)
if e != nil {
return nil, e
}
// Same story as with GetSymbols... we need a slice of interfaces here.
toReturn := make([]ELFDynamicEntry, len(table))
for i := range table {
toReturn[i] = &(table[i])
}
return toReturn, nil
}
func (f *ELF32File) DynamicEntries(index uint16) ([]ELFDynamicEntry, error) {
table, e := f.GetDynamicTable(index)
if e != nil {
return nil, e
}
// Same story as with GetSymbols... we need a slice of interfaces here.
toReturn := make([]ELFDynamicEntry, len(table))
for i := range table {
toReturn[i] = &(table[i])
}
return toReturn, nil
}
// This is a 32- or 64-bit agnostic interface for accessing an ELF section's
// flags. Can be converted using type assertions into either
// SectionHeaderFlags64 or SectionHeaderFlags32 values.
type ELFSectionFlags interface {
Executable() bool
Allocated() bool
Writable() bool
String() string
}
func (f SectionHeaderFlags32) Executable() bool {
return (f & 4) != 0
}
func (f SectionHeaderFlags32) Allocated() bool {
return (f & 2) != 0
}
func (f SectionHeaderFlags32) Writable() bool {
return (f & 1) != 0
}
func (f SectionHeaderFlags64) Executable() bool {
return (f & 4) != 0
}
func (f SectionHeaderFlags64) Allocated() bool {
return (f & 2) != 0
}
func (f SectionHeaderFlags64) Writable() bool {
return (f & 1) != 0
}
// This is a 32- or 64-bit agnostic way of accessing an ELF section header.
type ELFSectionHeader interface {
GetType() SectionHeaderType
GetFlags() ELFSectionFlags
GetVirtualAddress() uint64
GetFileOffset() uint64
GetSize() uint64
GetLinkedIndex() uint32
GetInfo() uint32
GetAlignment() uint64
GetEntrySize() uint64
String() string
}
func (h *ELF64SectionHeader) GetType() SectionHeaderType {
return h.Type
}
func (h *ELF64SectionHeader) GetFlags() ELFSectionFlags {
return h.Flags
}
func (h *ELF64SectionHeader) GetVirtualAddress() uint64 {
return h.VirtualAddress
}
func (h *ELF64SectionHeader) GetFileOffset() uint64 {
return h.FileOffset
}
func (h *ELF64SectionHeader) GetSize() uint64 {
return h.Size
}
func (h *ELF64SectionHeader) GetLinkedIndex() uint32 {
return h.LinkedIndex
}
func (h *ELF64SectionHeader) GetInfo() uint32 {
return h.Info
}
func (h *ELF64SectionHeader) GetAlignment() uint64 {
return h.Align
}
func (h *ELF64SectionHeader) GetEntrySize() uint64 {
return h.EntrySize
}
func (h *ELF32SectionHeader) GetType() SectionHeaderType {
return h.Type
}
func (h *ELF32SectionHeader) GetFlags() ELFSectionFlags {
return h.Flags
}
func (h *ELF32SectionHeader) GetVirtualAddress() uint64 {
return uint64(h.VirtualAddress)
}
func (h *ELF32SectionHeader) GetFileOffset() uint64 {
return uint64(h.FileOffset)
}
func (h *ELF32SectionHeader) GetSize() uint64 {
return uint64(h.Size)
}
func (h *ELF32SectionHeader) GetLinkedIndex() uint32 {
return h.LinkedIndex
}
func (h *ELF32SectionHeader) GetInfo() uint32 {
return h.Info
}
func (h *ELF32SectionHeader) GetAlignment() uint64 {
return uint64(h.Align)
}
func (h *ELF32SectionHeader) GetEntrySize() uint64 {
return uint64(h.EntrySize)
}
// This is a 32- or 64-bit agnostic way of accessing an ELF program header.
type ELFProgramHeader interface {
GetType() ProgramHeaderType
GetFlags() ProgramHeaderFlags
GetFileOffset() uint64
GetVirtualAddress() uint64
GetPhysicalAddress() uint64
GetFileSize() uint64
GetMemorySize() uint64
GetAlignment() uint64
String() string
}
func (h *ELF64ProgramHeader) GetType() ProgramHeaderType {
return h.Type
}
func (h *ELF64ProgramHeader) GetFlags() ProgramHeaderFlags {
return h.Flags
}
func (h *ELF64ProgramHeader) GetFileOffset() uint64 {
return h.FileOffset
}
func (h *ELF64ProgramHeader) GetVirtualAddress() uint64 {
return h.VirtualAddress
}
func (h *ELF64ProgramHeader) GetPhysicalAddress() uint64 {
return h.PhysicalAddress
}
func (h *ELF64ProgramHeader) GetFileSize() uint64 {
return h.FileSize
}
func (h *ELF64ProgramHeader) GetMemorySize() uint64 {
return h.MemorySize
}
func (h *ELF64ProgramHeader) GetAlignment() uint64 {
return h.Align
}
func (h *ELF32ProgramHeader) GetType() ProgramHeaderType {
return h.Type
}
func (h *ELF32ProgramHeader) GetFlags() ProgramHeaderFlags {
return h.Flags
}
func (h *ELF32ProgramHeader) GetFileOffset() uint64 {
return uint64(h.FileOffset)
}
func (h *ELF32ProgramHeader) GetVirtualAddress() uint64 {
return uint64(h.VirtualAddress)
}
func (h *ELF32ProgramHeader) GetPhysicalAddress() uint64 {
return uint64(h.PhysicalAddress)
}
func (h *ELF32ProgramHeader) GetFileSize() uint64 {
return uint64(h.FileSize)
}
func (h *ELF32ProgramHeader) GetMemorySize() uint64 {
return uint64(h.MemorySize)
}
func (h *ELF32ProgramHeader) GetAlignment() uint64 {
return uint64(h.Align)
}
// This is an interface used to access either 64- or 32-bit ELF symbol table
// entries.
type ELFSymbol interface {
GetName() uint32
GetInfo() ELFSymbolInfo
GetOther() uint8
GetSectionIndex() uint16
GetValue() uint64
GetSize() uint64
String() string
}
func (s *ELF64Symbol) GetName() uint32 {
return s.Name
}
func (s *ELF64Symbol) GetInfo() ELFSymbolInfo {
return s.Info
}
func (s *ELF64Symbol) GetOther() uint8 {
return s.Other
}
func (s *ELF64Symbol) GetSectionIndex() uint16 {
return s.SectionIndex
}
func (s *ELF64Symbol) GetValue() uint64 {
return s.Value
}
func (s *ELF64Symbol) GetSize() uint64 {
return s.Size
}
func (s *ELF32Symbol) GetName() uint32 {
return s.Name
}
func (s *ELF32Symbol) GetInfo() ELFSymbolInfo {
return s.Info
}
func (s *ELF32Symbol) GetOther() uint8 {
return s.Other
}
func (s *ELF32Symbol) GetSectionIndex() uint16 {
return s.SectionIndex
}
func (s *ELF32Symbol) GetValue() uint64 {
return uint64(s.Value)
}
func (s *ELF32Symbol) GetSize() uint64 {
return uint64(s.Size)
}
// This holds a generic entry in a relocation table for either a 32- or 64-bit
// ELF file.
type ELFRelocation interface {
Offset() uint64
Type() uint32
SymbolIndex() uint32
Addend() int64
String() string
}
type ELFDynamicTag interface {
GetValue() int64
String() string
}
func (t ELF64DynamicTag) GetValue() int64 {
return int64(t)
}
func (t ELF32DynamicTag) GetValue() int64 {
return int64(t)
}
type ELFDynamicEntry interface {
GetTag() ELFDynamicTag
GetValue() uint64
}
func (n *ELF64DynamicEntry) GetTag() ELFDynamicTag {
return n.Tag
}
func (n *ELF32DynamicEntry) GetTag() ELFDynamicTag {
return n.Tag
}
func (n *ELF64DynamicEntry) GetValue() uint64 {
return n.Value
}
func (n *ELF32DynamicEntry) GetValue() uint64 {
return uint64(n.Value)
}
// This function parses any ELF file and returns an instance of the ELFFile
// interface if no errors occur.
func ParseELFFile(raw []byte) (ELFFile, error) {
if len(raw) < 5 {
return nil, fmt.Errorf("Invalid ELF file: is only %d bytes", len(raw))
}
if raw[4] == 2 {
return ParseELF64File(raw)
}
return ParseELF32File(raw)
} | elf_interface.go | 0.733833 | 0.489198 | elf_interface.go | starcoder |
package sound
import (
"errors"
"math"
)
// CalcGoertzel calculates the power for a given frequency in a MonoSample. The Goertzel
// Algorithm requires much less CPU cycles than calculating the sprectrum power density
// through an FFT. For more details on the Goertzel Filter check out:
// https://courses.cs.washington.edu/courses/cse466/12au/calendar/Goertzel-EETimes.pdf
func CalcGoertzel(freq float64, samplingRate float64, samples MonoSamples) float64 {
var k int
var floatnumSamples float64
var omega, sine, cosine, coeff, q0, q1, q2, magnitude, real, imag float64
data := samples.toFloat64()
numSamples := len(data)
scalingFactor := float64(numSamples / 2.0)
floatnumSamples = float64(numSamples)
k = int(0.5 + (floatnumSamples * freq / samplingRate))
omega = (2.0 * math.Pi * float64(k)) / floatnumSamples
sine = math.Sin(float64(omega))
cosine = math.Cos(float64(omega))
coeff = 2.0 * cosine
q0 = 0
q1 = 0
q2 = 0
for _, d := range data {
q0 = coeff*q1 - q2 + float64(d)
q2 = q1
q1 = q0
}
real = (q1 - q2*cosine) / scalingFactor
imag = (q2 * sine) / scalingFactor
magnitude = 10 * math.Log10(math.Sqrt(float64(real*real+imag*imag)))
return magnitude
}
// findMaxValuePosition iterates over a []float64 slice an returns
// the maximum value and the corresponding position in the slice
func findMaxValuePosition(in []float64) (int, float64) {
var maxValue float64
var maxPosition int
maxValue = in[0]
maxPosition = 0
for i, el := range in {
if el > maxValue {
maxValue = el
maxPosition = i
}
}
return maxPosition, 10 * math.Log10(maxValue)
}
// CalculateAverage calculates the mean value over a slice of float64 values
func CalculateAverage(slice []float64) (float64, error) {
if len(slice) == 0 {
return 0.0, errors.New("Result array empty")
}
var result float64
for _, el := range slice {
result += el
}
avg := result / float64(len(slice))
return avg, nil
}
// toFloat64 converts a []Sample to []float64
func (in *MonoSamples) toFloat64() []float64 {
out := make([]float64, len(*in))
for i, el := range *in {
out[i] = float64(el)
}
return out
} | sound/goertzel.go | 0.835349 | 0.538255 | goertzel.go | starcoder |
package tree
import (
"fmt"
"github.com/arr-ai/frozen/errors"
"github.com/arr-ai/frozen/internal/depth"
"github.com/arr-ai/frozen/internal/fu"
"github.com/arr-ai/frozen/internal/pkg/masker"
)
const (
fanoutBits = depth.FanoutBits
fanout = depth.Fanout
)
var (
// UseRHS returns its RHS arg.
UseRHS = func(_, b elementT) elementT { return b }
// UseLHS returns its LHS arg.
UseLHS = func(a, _ elementT) elementT { return a }
)
type branch struct {
p packer
}
func newBranch(p *packer) *branch {
b := &branch{}
if p != nil {
b.p = *p
}
return b
}
func newBranchFrom(depth int, data ...elementT) *branch {
b := &branch{}
for _, e := range data {
h := newHasher(e, depth)
b.Add(DefaultNPCombineArgs, e, depth, h)
}
return b
}
func (b *branch) Add(args *CombineArgs, v elementT, depth int, h hasher) (_ node, matches int) {
i := h.hash()
if b.p.data[i] == nil {
l := newLeaf1(v)
b.p.SetNonNilChild(i, l)
} else {
h2 := h.next()
var n node
n, matches = b.p.data[i].Add(args, v, depth+1, h2)
b.p.SetNonNilChild(i, n)
}
return b, matches
}
func (b *branch) AppendTo(dest []elementT) []elementT {
for _, child := range b.p.data {
if child != nil {
if dest = child.AppendTo(dest); dest == nil {
break
}
}
}
return dest
}
func (b *branch) Canonical(_ int) node {
var buf [maxLeafLen]elementT
if data := b.AppendTo(buf[:0]); data != nil {
return newTwig(data...).Canonical(0)
}
return b
}
func (b *branch) Combine(args *CombineArgs, n node, depth int) (_ node, matches int) {
switch n := n.(type) {
case *branch:
ret := newBranch(nil)
_, matches = args.Parallel(depth, b.p.mask|n.p.mask, func(i int) (_ bool, matches int) {
x, y := b.p.data[i], n.p.data[i]
if x == nil {
ret.p.SetNonNilChild(i, y)
} else if y == nil {
ret.p.SetNonNilChild(i, x)
} else {
var n node
n, matches = x.Combine(args, y, depth+1)
ret.p.data[i] = n
}
return true, matches
})
ret.p.updateMask()
return ret, matches
case *leaf:
for _, e := range n.slice() {
h := newHasher(e, depth)
var m int
b, m = b.with(args, e, depth, h)
matches += m
}
return b, matches
default:
panic(errors.WTF)
}
}
func (b *branch) Difference(args *EqArgs, n node, depth int) (_ node, matches int) {
switch n := n.(type) {
case *branch:
ret := newBranch(nil)
_, matches = args.Parallel(depth, b.p.mask, func(i int) (_ bool, matches int) {
x, y := b.p.data[i], n.p.data[i]
if y == nil {
ret.p.data[i] = x
} else {
var n node
n, matches = x.Difference(args, y, depth+1)
ret.p.data[i] = n
}
return true, matches
})
ret.p.updateMask()
return ret.Canonical(depth), matches
case *leaf:
ret := node(b)
for _, e := range n.slice() {
h := newHasher(e, depth)
var m int
ret, m = ret.Without(args, e, depth, h)
matches += m
}
return ret, matches
default:
panic(errors.WTF)
}
}
func (b *branch) Empty() bool {
return false
}
func (b *branch) Equal(args *EqArgs, n node, depth int) bool {
if n, is := n.(*branch); is {
if b.p.mask != n.p.mask {
return false
}
equal, _ := args.Parallel(depth, b.p.mask, func(i int) (_ bool, matches int) {
x, y := b.p.data[i], n.p.data[i]
return x.Equal(args, y, depth+1), 0
})
return equal
}
return false
}
func (b *branch) Get(args *EqArgs, v elementT, h hasher) *elementT {
if x := b.p.data[h.hash()]; x != nil {
h2 := h.next()
return x.Get(args, v, h2)
}
return nil
}
func (b *branch) Intersection(args *EqArgs, n node, depth int) (_ node, matches int) {
switch n := n.(type) {
case *branch:
ret := newBranch(nil)
_, matches = args.Parallel(depth, b.p.mask&n.p.mask, func(i int) (_ bool, matches int) {
x, y := b.p.data[i], n.p.data[i]
var n node
n, matches = x.Intersection(args, y, depth+1)
ret.p.data[i] = n
return true, matches
})
ret.p.updateMask()
return ret.Canonical(depth), matches
case *leaf:
return n.Intersection(args.Flip(), b, depth)
default:
panic(errors.WTF)
}
}
func (b *branch) Iterator(buf [][]node) Iterator {
return b.p.Iterator(buf)
}
func (b *branch) Reduce(args NodeArgs, depth int, r func(values ...elementT) elementT) elementT {
var results [fanout]elementT
args.Parallel(depth, b.p.mask, func(i int) (_ bool, matches int) {
x := b.p.data[i]
results[i] = x.Reduce(args, depth+1, r)
return true, 0
})
results2 := results[:0]
for _, r := range results {
if r != zero {
results2 = append(results2, r)
}
}
return r(results2...)
}
func (b *branch) Remove(args *EqArgs, v elementT, depth int, h hasher) (_ node, matches int) {
i := h.hash()
if n := b.p.data[i]; n != nil {
var n node
n, matches = b.p.data[i].Remove(args, v, depth+1, h.next())
b.p.data[i] = n
if _, is := n.(*branch); !is {
var buf [maxLeafLen]elementT
if data := b.AppendTo(buf[:0]); data != nil {
return newLeaf(data...), matches
}
}
}
b.p.updateMask()
return b, matches
}
func (b *branch) SubsetOf(args *EqArgs, n node, depth int) bool {
switch n := n.(type) {
case *branch:
ok, _ := args.Parallel(depth, b.p.mask|n.p.mask, func(i int) (bool, int) {
x, y := b.p.data[i], n.p.data[i]
if x == nil {
return true, 0
} else if y == nil {
return false, 0
} else {
return x.SubsetOf(args, y, depth+1), 0
}
})
return ok
default:
return false
}
}
func (b *branch) Map(args *CombineArgs, depth int, f func(e elementT) elementT) (_ node, matches int) {
var p packer
_, matches = args.Parallel(depth, b.p.mask, func(i int) (_ bool, matches int) {
if x := b.p.data[i]; x != nil {
var n node
n, matches = x.Map(args, depth+1, f)
p.data[i] = n
}
return true, matches
})
p.updateMask()
if p.mask == 0 {
return
}
acc := p.GetChild(p.mask)
var duplicates int
for m := p.mask.Next(); m != 0; m = m.Next() {
var d int
acc, d = acc.Combine(args, p.GetChild(m), 0)
duplicates += d
}
matches -= duplicates
return acc, matches
}
func (b *branch) Where(args *WhereArgs, depth int) (_ node, matches int) {
var nodes packer
_, matches = args.Parallel(depth, b.p.mask, func(i int) (_ bool, matches int) {
x := b.p.data[i]
var n node
n, matches = x.Where(args, depth+1)
nodes.data[i] = n
return true, matches
})
nodes.updateMask()
if nodes != b.p {
return newBranch(&nodes).Canonical(depth), matches
}
return b, matches
}
func (b *branch) Vet() int {
p := b.p
p.updateMask()
if p.mask != b.p.mask {
panic("stale mask")
}
count := 0
for m := b.p.mask; m != 0; m = m.Next() {
func() {
defer func() {
if r := recover(); r != nil {
panic(errors.WrapPrefix(r, fmt.Sprintf("branch[%d]", m.FirstIndex()), 0))
}
}()
if n := p.GetChild(m); n != nil {
count += p.GetChild(m).Vet()
} else {
panic(errors.Errorf("nil node for mask %b", b.p.mask))
}
}()
}
return count
}
func (b *branch) With(args *CombineArgs, v elementT, depth int, h hasher) (_ node, matches int) {
return b.with(args, v, depth, h)
}
func (b *branch) with(args *CombineArgs, v elementT, depth int, h hasher) (_ *branch, matches int) {
i := h.hash()
g := h.next()
if x := b.p.data[i]; x != nil {
x2, matches := x.With(args, v, depth+1, g)
if x2 != x {
return newBranch(b.p.WithChild(i, x2)), matches
}
return b, matches
}
return newBranch(b.p.WithChild(i, newLeaf1(v))), 0
}
func (b *branch) Without(args *EqArgs, v elementT, depth int, h hasher) (_ node, matches int) {
i := h.hash()
g := h.next()
if x := b.p.data[i]; x != nil {
var x2 node
if x2, matches = x.Without(args, v, depth+1, g); x2 != x {
b.p.updateMaskBit(masker.NewMasker(i))
return newBranch(b.p.WithChild(i, x2)).Canonical(depth), matches
}
}
return b, matches
}
var branchStringIndices = []string{
"⁰", "¹", "²", "³", "⁴", "⁵", "⁶", "⁷", "⁸", "⁹",
"¹⁰", "¹¹", "¹²", "¹³", "¹⁴", "¹⁵",
}
func (b *branch) Format(f fmt.State, verb rune) {
total := 0
printf := func(format string, args ...interface{}) {
n, err := fmt.Fprintf(f, format, args...)
if err != nil {
panic(err)
}
total += n
}
write := func(b []byte) {
n, err := f.Write(b)
if err != nil {
panic(err)
}
total += n
}
write([]byte("⁅"))
var buf [20]elementT
shallow := b.AppendTo(buf[:]) != nil
if shallow {
write([]byte("\n"))
}
for i, x := range b.p.data {
if x == nil {
continue
}
index := branchStringIndices[i]
if shallow {
printf(" %s%s\n", index, fu.IndentBlock(x.String()))
} else {
if i > 0 {
write([]byte(" "))
}
printf("%s", index)
x.Format(f, verb)
}
}
write([]byte("⁆"))
fu.PadFormat(f, total)
}
func (b *branch) String() string {
return fmt.Sprintf("%s", b)
}
func (b *branch) clone() node {
ret := *b
for m := ret.p.mask; m != 0; m = m.Next() {
i := m.FirstIndex()
ret.p.data[i] = ret.p.data[i].clone()
}
return &ret
} | internal/tree/branch.go | 0.613815 | 0.401923 | branch.go | starcoder |
package gui
import (
"fmt"
"math/big"
"time"
)
var (
// ZeroInt is the default value for a big.Int.
ZeroInt = new(big.Int).SetInt64(0)
// ZeroRat is the default value for a big.Rat.
ZeroRat = new(big.Rat).SetInt64(0)
// KiloHash is 1 KH represented as a big.Rat.
KiloHash = new(big.Rat).SetInt64(1000)
// MegaHash is 1MH represented as a big.Rat.
MegaHash = new(big.Rat).SetInt64(1000000)
// GigaHash is 1GH represented as a big.Rat.
GigaHash = new(big.Rat).SetInt64(1000000000)
// TeraHash is 1TH represented as a big.Rat.
TeraHash = new(big.Rat).SetInt64(1000000000000)
// PetaHash is 1PH represented as a big.Rat
PetaHash = new(big.Rat).SetInt64(1000000000000000)
)
func truncateAccountID(accountID string) string {
return fmt.Sprintf("%.12s", accountID) + "..."
}
// HashString formats the provided hashrate per the best-fit unit.
func hashString(hash *big.Rat) string {
if hash.Cmp(ZeroRat) == 0 {
return "0 H/s"
}
if hash.Cmp(PetaHash) > 0 {
ph := new(big.Rat).Quo(hash, PetaHash)
return fmt.Sprintf("%v PH/s", ph.FloatString(4))
}
if hash.Cmp(TeraHash) > 0 {
th := new(big.Rat).Quo(hash, TeraHash)
return fmt.Sprintf("%v TH/s", th.FloatString(4))
}
if hash.Cmp(GigaHash) > 0 {
gh := new(big.Rat).Quo(hash, GigaHash)
return fmt.Sprintf("%v GH/s", gh.FloatString(4))
}
if hash.Cmp(MegaHash) > 0 {
mh := new(big.Rat).Quo(hash, MegaHash)
return fmt.Sprintf("%v MH/s", mh.FloatString(4))
}
if hash.Cmp(KiloHash) > 0 {
kh := new(big.Rat).Quo(hash, KiloHash)
return fmt.Sprintf("%v KH/s", kh.FloatString(4))
}
return "< 1KH/s"
}
func blockURL(blockExplorerURL string, blockHeight uint32) string {
return blockExplorerURL + "/block/" + fmt.Sprint(blockHeight)
}
func txURL(blockExplorerURL string, txID string) string {
return blockExplorerURL + "/tx/" + txID
}
// formatUnixTime formats the provided integer as a UTC time string,
func formatUnixTime(unix int64) string {
return time.Unix(0, unix).Format("2-Jan-2006 15:04:05 MST")
}
// floatToPercent formats the provided float64 as a percentage,
// rounded to the nearest decimal place. eg. "10.5%"
func floatToPercent(rat float64) string {
rat = rat * 100
str := fmt.Sprintf("%.1f", rat)
return str + "%"
}
// ratToPercent formats the provided big.Rat as a percentage,
// rounded to the nearest decimal place. eg. "10.5%"
func ratToPercent(rat *big.Rat) string {
real, _ := rat.Float64()
return floatToPercent(real)
} | gui/formatting.go | 0.69035 | 0.449997 | formatting.go | starcoder |
package commands
import (
"github.com/BurntSushi/gribble"
"github.com/BurntSushi/wingo/workspace"
)
type AutoTile struct {
Workspace gribble.Any `param:"1" types:"int,string"`
Help string `
Initiates automatic tiling on the workspace specified by Workspace. If tiling
is already active, the layout will be re-placed.
Note that this command has no effect if the workspace is not visible.
Workspace may be a workspace index (integer) starting at 0, or a workspace
name.
`
}
func (cmd AutoTile) Run() gribble.Value {
return syncRun(func() gribble.Value {
withWorkspace(cmd.Workspace, func(wrk *workspace.Workspace) {
wrk.LayoutStateSet(workspace.AutoTiling)
})
return nil
})
}
type AutoUntile struct {
Workspace gribble.Any `param:"1" types:"int,string"`
Help string `
Stops automatic tiling on the workspace specified by Workspace, and restores
windows to their position and geometry before being tiled. If tiling is not
active on the specified workspace, this command has no effect.
Note that this command has no effect if the workspace is not visible.
Workspace may be a workspace index (integer) starting at 0, or a workspace
name.
`
}
func (cmd AutoUntile) Run() gribble.Value {
return syncRun(func() gribble.Value {
withWorkspace(cmd.Workspace, func(wrk *workspace.Workspace) {
wrk.LayoutStateSet(workspace.Floating)
})
return nil
})
}
type AutoCycle struct {
Workspace gribble.Any `param:"1" types:"int,string"`
Help string `
Cycles to the next automatic tiling layout in the workspace specified by
Workspace.
Note that this command has no effect if the workspace is not visible.
Workspace may be a workspace index (integer) starting at 0, or a workspace
name.
`
}
func (cmd AutoCycle) Run() gribble.Value {
return syncRun(func() gribble.Value {
withWorkspace(cmd.Workspace, func(wrk *workspace.Workspace) {
wrk.AutoCycle()
})
return nil
})
}
type AutoResizeMaster struct {
Workspace gribble.Any `param:"1" types:"int,string"`
Amount float64 `param:"2"`
Help string `
Increases or decreases the size of the master split by Amount in the layout on
the workspace specified by Workspace.
Amount should be a ratio between 0.0 and 1.0.
Workspace may be a workspace index (integer) starting at 0, or a workspace
name.
`
}
func (cmd AutoResizeMaster) Run() gribble.Value {
return syncRun(func() gribble.Value {
withWorkspace(cmd.Workspace, func(wrk *workspace.Workspace) {
if wrk.State != workspace.AutoTiling {
return
}
wrk.LayoutAutoTiler().ResizeMaster(cmd.Amount)
})
return nil
})
}
type AutoResizeWindow struct {
Workspace gribble.Any `param:"1" types:"int,string"`
Amount float64 `param:"2"`
Help string `
Increases or decreases the size of the current window by Amount in the layout
on the workspace specified by Workspace.
Amount should be a ratio between 0.0 and 1.0.
Workspace may be a workspace index (integer) starting at 0, or a workspace
name.
`
}
func (cmd AutoResizeWindow) Run() gribble.Value {
return syncRun(func() gribble.Value {
withWorkspace(cmd.Workspace, func(wrk *workspace.Workspace) {
if wrk.State != workspace.AutoTiling {
return
}
wrk.LayoutAutoTiler().ResizeWindow(cmd.Amount)
})
return nil
})
}
type AutoNext struct {
Workspace gribble.Any `param:"1" types:"int,string"`
Help string `
Moves focus to the next client in the layout.
Note that this command has no effect if the workspace is not visible.
Workspace may be a workspace index (integer) starting at 0, or a workspace
name.
`
}
func (cmd AutoNext) Run() gribble.Value {
return syncRun(func() gribble.Value {
withWorkspace(cmd.Workspace, func(wrk *workspace.Workspace) {
if wrk.State != workspace.AutoTiling {
return
}
wrk.LayoutAutoTiler().Next()
})
return nil
})
}
type AutoPrev struct {
Workspace gribble.Any `param:"1" types:"int,string"`
Help string `
Moves focus to the next client in the layout.
Note that this command has no effect if the workspace is not visible.
Workspace may be a workspace index (integer) starting at 0, or a workspace
name.
`
}
func (cmd AutoPrev) Run() gribble.Value {
return syncRun(func() gribble.Value {
withWorkspace(cmd.Workspace, func(wrk *workspace.Workspace) {
if wrk.State != workspace.AutoTiling {
return
}
wrk.LayoutAutoTiler().Prev()
})
return nil
})
}
type AutoSwitchNext struct {
Workspace gribble.Any `param:"1" types:"int,string"`
Help string `
Switches the current window with the next window in the layout.
Note that this command has no effect if the workspace is not visible.
Workspace may be a workspace index (integer) starting at 0, or a workspace
name.
`
}
func (cmd AutoSwitchNext) Run() gribble.Value {
return syncRun(func() gribble.Value {
withWorkspace(cmd.Workspace, func(wrk *workspace.Workspace) {
if wrk.State != workspace.AutoTiling {
return
}
wrk.LayoutAutoTiler().SwitchNext()
})
return nil
})
}
type AutoSwitchPrev struct {
Workspace gribble.Any `param:"1" types:"int,string"`
Help string `
Switches the current window with the previous window in the layout.
Note that this command has no effect if the workspace is not visible.
Workspace may be a workspace index (integer) starting at 0, or a workspace
name.
`
}
func (cmd AutoSwitchPrev) Run() gribble.Value {
return syncRun(func() gribble.Value {
withWorkspace(cmd.Workspace, func(wrk *workspace.Workspace) {
if wrk.State != workspace.AutoTiling {
return
}
wrk.LayoutAutoTiler().SwitchPrev()
})
return nil
})
}
type AutoMaster struct {
Workspace gribble.Any `param:"1" types:"int,string"`
Help string `
Focuses the (first) master window in the layout for the workspace specified
by Workspace.
Note that this command has no effect if the workspace is not visible.
Workspace may be a workspace index (integer) starting at 0, or a workspace
name.
`
}
func (cmd AutoMaster) Run() gribble.Value {
return syncRun(func() gribble.Value {
withWorkspace(cmd.Workspace, func(wrk *workspace.Workspace) {
if wrk.State != workspace.AutoTiling {
return
}
wrk.LayoutAutoTiler().FocusMaster()
})
return nil
})
}
type AutoMakeMaster struct {
Workspace gribble.Any `param:"1" types:"int,string"`
Help string `
Switches the current window with the first master in the layout for the
workspace specified by Workspace.
Note that this command has no effect if the workspace is not visible.
Workspace may be a workspace index (integer) starting at 0, or a workspace
name.
`
}
func (cmd AutoMakeMaster) Run() gribble.Value {
return syncRun(func() gribble.Value {
withWorkspace(cmd.Workspace, func(wrk *workspace.Workspace) {
if wrk.State != workspace.AutoTiling {
return
}
wrk.LayoutAutoTiler().MakeMaster()
})
return nil
})
}
type AutoMastersFewer struct {
Workspace gribble.Any `param:"1" types:"int,string"`
Help string `
Allows one fewer master window to fit into the master split.
Workspace may be a workspace index (integer) starting at 0, or a workspace
name.
`
}
func (cmd AutoMastersFewer) Run() gribble.Value {
return syncRun(func() gribble.Value {
withWorkspace(cmd.Workspace, func(wrk *workspace.Workspace) {
if wrk.State != workspace.AutoTiling {
return
}
wrk.LayoutAutoTiler().MastersFewer()
})
return nil
})
}
type AutoMastersMore struct {
Workspace gribble.Any `param:"1" types:"int,string"`
Help string `
Allows one more master window to fit into the master split.
Workspace may be a workspace index (integer) starting at 0, or a workspace
name.
`
}
func (cmd AutoMastersMore) Run() gribble.Value {
return syncRun(func() gribble.Value {
withWorkspace(cmd.Workspace, func(wrk *workspace.Workspace) {
if wrk.State != workspace.AutoTiling {
return
}
wrk.LayoutAutoTiler().MastersMore()
})
return nil
})
} | commands/tile_auto.go | 0.71113 | 0.436382 | tile_auto.go | starcoder |
package output
import (
"sync/atomic"
"time"
"github.com/Jeffail/benthos/v3/lib/log"
"github.com/Jeffail/benthos/v3/lib/metrics"
"github.com/Jeffail/benthos/v3/lib/types"
)
//------------------------------------------------------------------------------
func init() {
Constructors[TypeInproc] = TypeSpec{
constructor: NewInproc,
Description: `
Sends data directly to Benthos inputs by connecting to a unique ID. This allows
you to hook up isolated streams whilst running Benthos in
` + "[`--streams` mode](/docs/guides/streams_mode/about)" + `, it is NOT recommended
that you connect the inputs of a stream with an output of the same stream, as
feedback loops can lead to deadlocks in your message flow.
It is possible to connect multiple inputs to the same inproc ID, but only one
output can connect to an inproc ID, and will replace existing outputs if a
collision occurs.`,
}
}
//------------------------------------------------------------------------------
// InprocConfig contains configuration fields for the Inproc output type.
type InprocConfig string
// NewInprocConfig creates a new InprocConfig with default values.
func NewInprocConfig() InprocConfig {
return InprocConfig("")
}
//------------------------------------------------------------------------------
// Inproc is an output type that serves Inproc messages.
type Inproc struct {
running int32
pipe string
mgr types.Manager
log log.Modular
stats metrics.Type
transactionsOut chan types.Transaction
transactionsIn <-chan types.Transaction
closedChan chan struct{}
closeChan chan struct{}
}
// NewInproc creates a new Inproc output type.
func NewInproc(conf Config, mgr types.Manager, log log.Modular, stats metrics.Type) (Type, error) {
i := &Inproc{
running: 1,
pipe: string(conf.Inproc),
mgr: mgr,
log: log,
stats: stats,
transactionsOut: make(chan types.Transaction),
closedChan: make(chan struct{}),
closeChan: make(chan struct{}),
}
return i, nil
}
//------------------------------------------------------------------------------
// loop is an internal loop that brokers incoming messages to output pipe.
func (i *Inproc) loop() {
var (
mRunning = i.stats.GetGauge("running")
mCount = i.stats.GetCounter("count")
mPartsCount = i.stats.GetCounter("parts.count")
mSendSucc = i.stats.GetCounter("send.success")
mPartsSendSucc = i.stats.GetCounter("parts.send.success")
mSent = i.stats.GetCounter("batch.sent")
mPartsSent = i.stats.GetCounter("sent")
)
defer func() {
mRunning.Decr(1)
atomic.StoreInt32(&i.running, 0)
i.mgr.UnsetPipe(i.pipe, i.transactionsOut)
close(i.transactionsOut)
close(i.closedChan)
}()
mRunning.Incr(1)
i.mgr.SetPipe(i.pipe, i.transactionsOut)
i.log.Infof("Sending inproc messages to ID: %s\n", i.pipe)
var open bool
for atomic.LoadInt32(&i.running) == 1 {
var ts types.Transaction
select {
case ts, open = <-i.transactionsIn:
if !open {
return
}
case <-i.closeChan:
return
}
mCount.Incr(1)
if ts.Payload != nil {
mPartsCount.Incr(int64(ts.Payload.Len()))
}
select {
case i.transactionsOut <- ts:
mSendSucc.Incr(1)
mSent.Incr(1)
if ts.Payload != nil {
mPartsSendSucc.Incr(int64(ts.Payload.Len()))
mPartsSent.Incr(int64(ts.Payload.Len()))
}
case <-i.closeChan:
return
}
}
}
// Consume assigns a messages channel for the output to read.
func (i *Inproc) Consume(ts <-chan types.Transaction) error {
if i.transactionsIn != nil {
return types.ErrAlreadyStarted
}
i.transactionsIn = ts
go i.loop()
return nil
}
// Connected returns a boolean indicating whether this output is currently
// connected to its target.
func (i *Inproc) Connected() bool {
return true
}
// CloseAsync shuts down the Inproc output and stops processing messages.
func (i *Inproc) CloseAsync() {
if atomic.CompareAndSwapInt32(&i.running, 1, 0) {
close(i.closeChan)
}
}
// WaitForClose blocks until the Inproc output has closed down.
func (i *Inproc) WaitForClose(timeout time.Duration) error {
select {
case <-i.closedChan:
case <-time.After(timeout):
return types.ErrTimeout
}
return nil
}
//------------------------------------------------------------------------------ | lib/output/inproc.go | 0.599016 | 0.452173 | inproc.go | starcoder |
package main
import "strconv"
type heatPoint struct {
x int
y int
num int
}
// FindLowPoints finds the low points in a heatmap.
// Returns an array of heatPoints being the lowest nearby points.
func FindLowPoints(input []string) []heatPoint {
maxY := len(input) - 1
maxX := len(input[0]) - 1
lowPoints := make([]heatPoint, 0)
for y, line := range input {
for x, char := range line {
i, _ := strconv.Atoi(string(char))
if isLowPoint(input, line, maxX, maxY, x, y, i) {
lowPoints = append(lowPoints, heatPoint{x, y, i})
}
}
}
return lowPoints
}
// isLowPoint
// Returns two if the point is the lowest of the adjacent points.
func isLowPoint(input []string, line string, maxX int, maxY int, x int, y int, i int) bool {
left := Max(x-1, 0)
right := Min(x+1, maxX)
top := Max(y-1, 0)
bottom := Min(y+1, maxY)
leftNum, _ := strconv.Atoi(string(line[left]))
rightNum, _ := strconv.Atoi(string(line[right]))
topNum, _ := strconv.Atoi(string(input[top][x]))
bottomNum, _ := strconv.Atoi(string(input[bottom][x]))
return (x == 0 || i < leftNum) && (x == maxX || i < rightNum) && (y == 0 || i < topNum) && (y == maxY || i < bottomNum)
}
// CalculateRiskLevel Calculates the risk points of the low points.
// Returns the risk of each low point
func CalculateRiskLevel(heatPoints []heatPoint) int {
sum := 0
for _, point := range heatPoints {
sum += (point.num + 1)
}
return sum
}
// CalculateBasinSize Calculates the basin size using a Breadth first search approach.
// Returns a map of the points that make up a basin starting at a given lowpoint point.
func CalculateBasinSize(input []string, point heatPoint) map[heatPoint]bool {
maxY := len(input) - 1
maxX := len(input[0]) - 1
end := byte('9')
seenPoints := make(map[heatPoint]bool)
point.num = 0
queue := []heatPoint{point}
for {
p := queue[0]
queue = queue[1:]
x := p.x
y := p.y
if !seenPoints[p] && input[y][x] != end {
seenPoints[p] = true
if x > 0 {
queue = append(queue, heatPoint{x - 1, y, 0})
}
if x < maxX {
queue = append(queue, heatPoint{x + 1, y, 0})
}
if y > 0 {
queue = append(queue, heatPoint{x, y - 1, 0})
}
if y < maxY {
queue = append(queue, heatPoint{x, y + 1, 0})
}
}
if len(queue) == 0 {
return seenPoints
}
}
}
/*
1 procedure BFS(G, root) is
2 let Q be a queue
3 label root as explored
4 Q.enqueue(root)
5 while Q is not empty do
6 v := Q.dequeue()
7 if v is the goal then
8 return v
9 for all edges from v to w in G.adjacentEdges(v) do
10 if w is not labeled as explored then
11 label w as explored
12 Q.enqueue(w)
*/ | day9.go | 0.712932 | 0.682097 | day9.go | starcoder |
package resource
import (
"fmt"
"sort"
"github.com/onsi/gomega"
"github.com/onsi/gomega/types"
)
func matcherToGomegaMatcher(matcher interface{}) (types.GomegaMatcher, error) {
switch x := matcher.(type) {
case string, int, uint, int64, uint64, bool, float64:
return gomega.BeEquivalentTo(x), nil
case []interface{}:
var matchers []types.GomegaMatcher
for _, valueI := range x {
if subMatcher, ok := valueI.(types.GomegaMatcher); ok {
matchers = append(matchers, subMatcher)
} else {
matchers = append(matchers, gomega.ContainElement(valueI))
}
}
return gomega.And(matchers...), nil
}
matcher = sanitizeExpectedValue(matcher)
if matcher == nil {
return nil, fmt.Errorf("Missing Required Attribute")
}
matcherMap, ok := matcher.(map[string]interface{})
if !ok {
panic(fmt.Sprintf("Unexpected matcher type: %T\n\n", matcher))
}
var matchType string
var value interface{}
for matchType, value = range matcherMap {
break
}
switch matchType {
case "have-prefix":
return gomega.HavePrefix(value.(string)), nil
case "have-suffix":
return gomega.HaveSuffix(value.(string)), nil
case "match-regexp":
return gomega.MatchRegexp(value.(string)), nil
case "have-len":
value = sanitizeExpectedValue(value)
return gomega.HaveLen(value.(int)), nil
case "have-key-with-value":
subMatchers, err := mapToGomega(value)
if err != nil {
return nil, err
}
for key, val := range subMatchers {
if val == nil {
fmt.Printf("%d is nil", key)
}
}
return gomega.And(subMatchers...), nil
case "have-key":
subMatcher, err := matcherToGomegaMatcher(value)
if err != nil {
return nil, err
}
return gomega.HaveKey(subMatcher), nil
case "contain-element":
subMatcher, err := matcherToGomegaMatcher(value)
if err != nil {
return nil, err
}
return gomega.ContainElement(subMatcher), nil
case "not":
subMatcher, err := matcherToGomegaMatcher(value)
if err != nil {
return nil, err
}
return gomega.Not(subMatcher), nil
case "consist-of":
subMatchers, err := sliceToGomega(value)
if err != nil {
return nil, err
}
var interfaceSlice []interface{}
for _, d := range subMatchers {
interfaceSlice = append(interfaceSlice, d)
}
return gomega.ConsistOf(interfaceSlice...), nil
case "and":
subMatchers, err := sliceToGomega(value)
if err != nil {
return nil, err
}
return gomega.And(subMatchers...), nil
case "or":
subMatchers, err := sliceToGomega(value)
if err != nil {
return nil, err
}
return gomega.Or(subMatchers...), nil
case "gt", "ge", "lt", "le":
// Golang json escapes '>', '<' symbols, so we use 'gt', 'le' instead
comparator := map[string]string{
"gt": ">",
"ge": ">=",
"lt": "<",
"le": "<=",
}[matchType]
return gomega.BeNumerically(comparator, value), nil
default:
return nil, fmt.Errorf("Unknown matcher: %s", matchType)
}
}
func mapToGomega(value interface{}) (subMatchers []types.GomegaMatcher, err error) {
valueI, ok := value.(map[string]interface{})
if !ok {
return nil, fmt.Errorf("Matcher expected map, got: %t", value)
}
// Get keys
keys := []string{}
for key, _ := range valueI {
keys = append(keys, key)
}
// Iterate through keys in a deterministic way, since ranging over a map
// does not guarantee order
sort.Strings(keys)
for _, key := range keys {
val := valueI[key]
val, err = matcherToGomegaMatcher(val)
if err != nil {
return
}
subMatcher := gomega.HaveKeyWithValue(key, val)
subMatchers = append(subMatchers, subMatcher)
}
return
}
func sliceToGomega(value interface{}) ([]types.GomegaMatcher, error) {
valueI, ok := value.([]interface{})
if !ok {
return nil, fmt.Errorf("Matcher expected array, got: %t", value)
}
var subMatchers []types.GomegaMatcher
for _, v := range valueI {
subMatcher, err := matcherToGomegaMatcher(v)
if err != nil {
return nil, err
}
subMatchers = append(subMatchers, subMatcher)
}
return subMatchers, nil
}
// Normalize expectedValue so json and yaml are the same
func sanitizeExpectedValue(i interface{}) interface{} {
if e, ok := i.(float64); ok {
return int(e)
}
if e, ok := i.(map[interface{}]interface{}); ok {
out := make(map[string]interface{})
for k, v := range e {
ks, ok := k.(string)
if !ok {
panic(fmt.Sprintf("Matcher key type not string: %T\n\n", k))
}
out[ks] = sanitizeExpectedValue(v)
}
return out
}
return i
} | resource/gomega.go | 0.536799 | 0.460168 | gomega.go | starcoder |
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Doc
//-------------------------------------------------------------------------------------------------- <-100
// Doc (90 char length for optimal godoc code-block parsing) | <- 90
//-------------------------------------------------------------------------------------------------- <-100
package stats
import (
"math"
"fmt"
"github.com/kelceydamage/collections"
)
// Code
//-------------------------------------------------------------------------------------------------- <-100
// Sum with high precision
func sum(s collections.Slice) int {
n := 0
for _, v := range s.All() {
v, ok := v.(int)
if !ok {
n += ifaceToInt(v)
} else {
n += v
}
}
return n
}
func sumFloat64(s collections.Slice) float64 {
n := 0.0
for _, v := range s.All() {
v, ok := v.(float64)
if !ok {
n += ifaceToFloat64(v)
} else {
n += v
}
}
return n
}
func ifaceToFloat64(x interface{}) float64 {
v, ok := x.(float64)
if !ok {
return float64(x.(int))
}
return v
}
func ifaceToInt(x interface{}) int {
v, ok := x.(int)
if !ok {
return int(x.(float64))
}
return v
}
func removeZeroes(s collections.Slice) collections.Slice {
var l collections.Slice
for _, v := range s.All() {
if ifaceToFloat64(v) == 0.0 {
l.Append(v)
}
}
return l
}
func less(n, v float64) bool {
if v < n {
return true
}
return false
}
func lessNonZero(n, v float64) bool {
if v < n && v != 0 {
return true
}
return false
}
func greater(n, v float64) bool {
if v > n {
return true
}
return false
}
func greaterNonZero(n, v float64) bool {
if v > n && v != 0 {
return true
}
return false
}
func compare(s collections.Slice, n float64, f func(float64, float64) bool) int {
k := 0
for i, v := range s.All() {
if f(n, ifaceToFloat64(v)) {
k = i
}
}
return k
}
func varPowSum(s collections.Slice) float64 {
n := 0.0
for _, v := range s.All() {
n += math.Pow(ifaceToFloat64(v)-Avg(s), 2.0)
}
return n
}
// Variance returns the variance of the values in the slice. Expressed as a float64.
func Variance(s collections.Slice) float64 {
return varPowSum(s) / ifaceToFloat64(s.Len())
}
// StdDev returns the standard deviation of the integers in the slice. Expressed as a float64.
func stdDev(s collections.Slice) float64 {
return math.Sqrt(Variance(s))
}
// Sum returns the sum of all objects in the slice as if they were ints.
func Sum(s collections.Slice) int {
return sum(s)
}
// SumFloat64 returns the sum of all objects in the slice as if they were float64.
func SumFloat64(s collections.Slice) float64 {
return sumFloat64(s)
}
// Avg returns the average of all objects in the slice.
func Avg(s collections.Slice) float64 {
return SumFloat64(s) / float64(s.Len())
}
// AvgNonZero returns the average of all objects in the slice while skipping sero values for both sum and len.
func AvgNonZero(s collections.Slice) float64 {
fmt.Println(s)
return Avg(removeZeroes(s))
}
// Min returns the index of the rightmost lowest value in the slice, including zero.
func Min(s collections.Slice) int {
return compare(s, ifaceToFloat64(s.All()[Max(s)]), less)
}
// MinNonZero returns the index of the rightmost lowest value in the slice, excluding zero.
func MinNonZero(s collections.Slice) int {
return compare(s, ifaceToFloat64(s.All()[Max(s)]), lessNonZero)
}
// Max returns the index of the rightmost highest value in the slice, including zero.
func Max(s collections.Slice) int {
return compare(s, ifaceToFloat64(s.All()[0]), greater)
}
// MaxNonZero returns the index of the rightmost highest value in the slice, excluding zero.
func MaxNonZero(s collections.Slice) int {
return compare(s, ifaceToFloat64(s.All()[0]), greaterNonZero)
} | stats/math.go | 0.89402 | 0.406744 | math.go | starcoder |
package processor
import (
"fmt"
"time"
"github.com/Jeffail/benthos/v3/internal/bloblang"
"github.com/Jeffail/benthos/v3/internal/bloblang/field"
"github.com/Jeffail/benthos/v3/internal/docs"
"github.com/Jeffail/benthos/v3/lib/log"
"github.com/Jeffail/benthos/v3/lib/metrics"
"github.com/Jeffail/benthos/v3/lib/types"
"github.com/opentracing/opentracing-go"
)
//------------------------------------------------------------------------------
func init() {
Constructors[TypeCache] = TypeSpec{
constructor: NewCache,
Categories: []Category{
CategoryIntegration,
},
Summary: `
Performs operations against a [cache resource](/docs/components/caches/about)
for each message, allowing you to store or retrieve data within message payloads.`,
Description: `
This processor will interpolate functions within the ` + "`key` and `value`" + `
fields individually for each message. This allows you to specify dynamic keys
and values based on the contents of the message payloads and metadata. You can
find a list of functions [here](/docs/configuration/interpolation#bloblang-queries).`,
FieldSpecs: docs.FieldSpecs{
docs.FieldCommon("resource", "The [`cache` resource](/docs/components/caches/about) to target with this processor."),
docs.FieldDeprecated("cache"),
docs.FieldCommon("operator", "The [operation](#operators) to perform with the cache.").HasOptions("set", "add", "get", "delete"),
docs.FieldCommon("key", "A key to use with the cache.").SupportsInterpolation(false),
docs.FieldCommon("value", "A value to use with the cache (when applicable).").SupportsInterpolation(false),
docs.FieldAdvanced(
"ttl", "The TTL of each individual item as a duration string. After this period an item will be eligible for removal during the next compaction. Not all caches support per-key TTLs, and those that do not will fall back to their generally configured TTL setting.",
"60s", "5m", "36h",
).SupportsInterpolation(false).AtVersion("3.33.0"),
partsFieldSpec,
},
Examples: []docs.AnnotatedExample{
{
Title: "Deduplication",
Summary: `
Deduplication can be done using the add operator with a key extracted from the
message payload, since it fails when a key already exists we can remove the
duplicates using a
[` + "`bloblang` processor" + `](/docs/components/processors/bloblang):`,
Config: `
pipeline:
processors:
- cache:
resource: foocache
operator: add
key: '${! json("message.id") }'
value: "storeme"
- bloblang: root = if errored() { deleted() }
resources:
caches:
foocache:
redis:
url: tcp://TODO:6379
`,
},
{
Title: "Hydration",
Summary: `
It's possible to enrich payloads with content previously stored in a cache by
using the [` + "`branch`" + `](/docs/components/processors/branch) processor:`,
Config: `
pipeline:
processors:
- branch:
processors:
- cache:
resource: foocache
operator: get
key: '${! json("message.document_id") }'
result_map: 'root.message.document = this'
resources:
caches:
foocache:
memcached:
addresses: [ "TODO:11211" ]
`,
},
},
Footnotes: `
## Operators
### ` + "`set`" + `
Set a key in the cache to a value. If the key already exists the contents are
overridden.
### ` + "`add`" + `
Set a key in the cache to a value. If the key already exists the action fails
with a 'key already exists' error, which can be detected with
[processor error handling](/docs/configuration/error_handling).
### ` + "`get`" + `
Retrieve the contents of a cached key and replace the original message payload
with the result. If the key does not exist the action fails with an error, which
can be detected with [processor error handling](/docs/configuration/error_handling).
### ` + "`delete`" + `
Delete a key and its contents from the cache. If the key does not exist the
action is a no-op and will not fail with an error.`,
}
}
//------------------------------------------------------------------------------
// CacheConfig contains configuration fields for the Cache processor.
type CacheConfig struct {
Cache string `json:"cache" yaml:"cache"`
Resource string `json:"resource" yaml:"resource"`
Parts []int `json:"parts" yaml:"parts"`
Operator string `json:"operator" yaml:"operator"`
Key string `json:"key" yaml:"key"`
Value string `json:"value" yaml:"value"`
TTL string `json:"ttl" yaml:"ttl"`
}
// NewCacheConfig returns a CacheConfig with default values.
func NewCacheConfig() CacheConfig {
return CacheConfig{
Cache: "",
Resource: "",
Parts: []int{},
Operator: "set",
Key: "",
Value: "",
TTL: "",
}
}
//------------------------------------------------------------------------------
// Cache is a processor that stores or retrieves data from a cache for each
// message of a batch via an interpolated key.
type Cache struct {
conf Config
log log.Modular
stats metrics.Type
parts []int
key field.Expression
value field.Expression
ttl field.Expression
cache types.Cache
operator cacheOperator
mCount metrics.StatCounter
mErr metrics.StatCounter
mKeyAlreadyExists metrics.StatCounter
mSent metrics.StatCounter
mBatchSent metrics.StatCounter
}
// NewCache returns a Cache processor.
func NewCache(
conf Config, mgr types.Manager, log log.Modular, stats metrics.Type,
) (Type, error) {
var c types.Cache
var err error
if len(conf.Cache.Resource) > 0 {
c, err = mgr.GetCache(conf.Cache.Resource)
} else {
c, err = mgr.GetCache(conf.Cache.Cache)
}
if err != nil {
return nil, err
}
op, err := cacheOperatorFromString(conf.Cache.Operator, c)
if err != nil {
return nil, err
}
key, err := bloblang.NewField(conf.Cache.Key)
if err != nil {
return nil, fmt.Errorf("failed to parse key expression: %v", err)
}
value, err := bloblang.NewField(conf.Cache.Value)
if err != nil {
return nil, fmt.Errorf("failed to parse value expression: %v", err)
}
if conf.Cache.TTL != "" {
if _, ok := c.(types.CacheWithTTL); !ok {
return nil, fmt.Errorf("this cache type does not support per-key ttl")
}
}
ttl, err := bloblang.NewField(conf.Cache.TTL)
if err != nil {
return nil, fmt.Errorf("failed to parse ttl expression: %v", err)
}
return &Cache{
conf: conf,
log: log,
stats: stats,
parts: conf.Cache.Parts,
key: key,
value: value,
ttl: ttl,
cache: c,
operator: op,
mCount: stats.GetCounter("count"),
mErr: stats.GetCounter("error"),
mKeyAlreadyExists: stats.GetCounter("key_already_exists"),
mSent: stats.GetCounter("sent"),
mBatchSent: stats.GetCounter("batch.sent"),
}, nil
}
//------------------------------------------------------------------------------
type cacheOperator func(key string, value []byte, ttl *time.Duration) ([]byte, bool, error)
func newCacheSetOperator(cache types.Cache) cacheOperator {
return func(key string, value []byte, ttl *time.Duration) ([]byte, bool, error) {
var err error
if cttl, ok := cache.(types.CacheWithTTL); ok {
err = cttl.SetWithTTL(key, value, ttl)
} else {
err = cache.Set(key, value)
}
return nil, false, err
}
}
func newCacheAddOperator(cache types.Cache) cacheOperator {
return func(key string, value []byte, ttl *time.Duration) ([]byte, bool, error) {
var err error
if cttl, ok := cache.(types.CacheWithTTL); ok {
err = cttl.AddWithTTL(key, value, ttl)
} else {
err = cache.Add(key, value)
}
return nil, false, err
}
}
func newCacheGetOperator(cache types.Cache) cacheOperator {
return func(key string, _ []byte, _ *time.Duration) ([]byte, bool, error) {
result, err := cache.Get(key)
return result, true, err
}
}
func newCacheDeleteOperator(cache types.Cache) cacheOperator {
return func(key string, _ []byte, ttl *time.Duration) ([]byte, bool, error) {
err := cache.Delete(key)
return nil, false, err
}
}
func cacheOperatorFromString(operator string, cache types.Cache) (cacheOperator, error) {
switch operator {
case "set":
return newCacheSetOperator(cache), nil
case "add":
return newCacheAddOperator(cache), nil
case "get":
return newCacheGetOperator(cache), nil
case "delete":
return newCacheDeleteOperator(cache), nil
}
return nil, fmt.Errorf("operator not recognised: %v", operator)
}
//------------------------------------------------------------------------------
// ProcessMessage applies the processor to a message, either creating >0
// resulting messages or a response to be sent back to the message source.
func (c *Cache) ProcessMessage(msg types.Message) ([]types.Message, types.Response) {
c.mCount.Incr(1)
newMsg := msg.Copy()
proc := func(index int, span opentracing.Span, part types.Part) error {
key := c.key.String(index, msg)
value := c.value.Bytes(index, msg)
var ttl *time.Duration
if ttls := c.ttl.String(index, msg); ttls != "" {
td, err := time.ParseDuration(ttls)
if err != nil {
c.mErr.Incr(1)
c.log.Debugf("TTL must be a duration: %v\n", err)
return err
}
ttl = &td
}
result, useResult, err := c.operator(key, value, ttl)
if err != nil {
if err != types.ErrKeyAlreadyExists {
c.mErr.Incr(1)
c.log.Debugf("Operator failed for key '%s': %v\n", key, err)
} else {
c.mKeyAlreadyExists.Incr(1)
c.log.Debugf("Key already exists: %v\n", key)
}
return err
}
if useResult {
part.Set(result)
}
return nil
}
IteratePartsWithSpan(TypeCache, c.parts, newMsg, proc)
c.mBatchSent.Incr(1)
c.mSent.Incr(int64(newMsg.Len()))
msgs := [1]types.Message{newMsg}
return msgs[:], nil
}
// CloseAsync shuts down the processor and stops processing requests.
func (c *Cache) CloseAsync() {
}
// WaitForClose blocks until the processor has closed down.
func (c *Cache) WaitForClose(_ time.Duration) error {
return nil
}
//------------------------------------------------------------------------------ | lib/processor/cache.go | 0.73077 | 0.438364 | cache.go | starcoder |
package evaluator
import (
"regexp"
"strings"
"github.com/lyraproj/issue/issue"
"github.com/lyraproj/pcore/px"
"github.com/lyraproj/pcore/types"
"github.com/lyraproj/puppet-evaluator/pdsl"
"github.com/lyraproj/puppet-parser/parser"
)
func evalComparisonExpression(e pdsl.Evaluator, expr *parser.ComparisonExpression) px.Value {
return types.WrapBoolean(doCompare(expr, expr.Operator(), e.Eval(expr.Lhs()), e.Eval(expr.Rhs())))
}
func doCompare(expr parser.Expression, op string, a, b px.Value) bool {
return compare(expr, op, a, b)
}
func evalMatchExpression(e pdsl.Evaluator, expr *parser.MatchExpression) px.Value {
return types.WrapBoolean(match(e, expr.Lhs(), expr.Rhs(), expr.Operator(), e.Eval(expr.Lhs()), e.Eval(expr.Rhs())))
}
func compare(expr parser.Expression, op string, a px.Value, b px.Value) bool {
var result bool
switch op {
case `==`:
result = px.PuppetEquals(a, b)
case `!=`:
result = !px.PuppetEquals(a, b)
default:
result = compareMagnitude(expr, op, a, b, false)
}
return result
}
func compareMagnitude(expr parser.Expression, op string, a px.Value, b px.Value, caseSensitive bool) bool {
switch a.(type) {
case px.Type:
left := a.(px.Type)
switch b := b.(type) {
case px.Type:
switch op {
case `<`:
return px.IsAssignable(b, left) && !left.Equals(b, nil)
case `<=`:
return px.IsAssignable(b, left)
case `>`:
return px.IsAssignable(left, b) && !left.Equals(b, nil)
case `>=`:
return px.IsAssignable(left, b)
default:
panic(evalError(pdsl.OperatorNotApplicable, expr, issue.H{`operator`: op, `left`: a.PType()}))
}
}
case px.StringValue:
if _, ok := b.(px.StringValue); ok {
sa := a.String()
sb := b.String()
if !caseSensitive {
sa = strings.ToLower(sa)
sb = strings.ToLower(sb)
}
// Case insensitive compare
cmp := strings.Compare(sa, sb)
switch op {
case `<`:
return cmp < 0
case `<=`:
return cmp <= 0
case `>`:
return cmp > 0
case `>=`:
return cmp >= 0
default:
panic(evalError(pdsl.OperatorNotApplicable, expr, issue.H{`operator`: op, `left`: a.PType()}))
}
}
case *types.SemVer:
if rhv, ok := b.(*types.SemVer); ok {
cmp := a.(*types.SemVer).Version().CompareTo(rhv.Version())
switch op {
case `<`:
return cmp < 0.0
case `<=`:
return cmp <= 0.0
case `>`:
return cmp > 0.0
case `>=`:
return cmp >= 0.0
default:
panic(evalError(pdsl.OperatorNotApplicable, expr, issue.H{`operator`: op, `left`: a.PType()}))
}
}
case px.Number:
if rhv, ok := b.(px.Number); ok {
cmp := a.(px.Number).Float() - rhv.Float()
switch op {
case `<`:
return cmp < 0.0
case `<=`:
return cmp <= 0.0
case `>`:
return cmp > 0.0
case `>=`:
return cmp >= 0.0
default:
panic(evalError(pdsl.OperatorNotApplicable, expr, issue.H{`operator`: op, `left`: a.PType()}))
}
}
default:
panic(evalError(pdsl.OperatorNotApplicable, expr, issue.H{`operator`: op, `left`: a.PType()}))
}
panic(evalError(pdsl.OperatorNotApplicableWhen, expr, issue.H{`operator`: op, `left`: a.PType(), `right`: b.PType()}))
}
func match(c px.Context, lhs parser.Expression, rhs parser.Expression, operator string, a px.Value, b px.Value) bool {
result := false
switch b := b.(type) {
case px.StringValue, *types.Regexp:
var rx *regexp.Regexp
if s, ok := b.(px.StringValue); ok {
var err error
rx, err = regexp.Compile(s.String())
if err != nil {
panic(px.Error2(rhs, px.MatchNotRegexp, issue.H{`detail`: err.Error()}))
}
} else {
rx = b.(*types.Regexp).Regexp()
}
sv, ok := a.(px.StringValue)
if !ok {
panic(px.Error2(lhs, px.MatchNotString, issue.H{`left`: a.PType()}))
}
if group := rx.FindStringSubmatch(sv.String()); group != nil {
c.Scope().(pdsl.Scope).RxSet(group)
result = true
}
default:
result = px.PuppetMatch(a, b)
}
if operator == `!~` {
result = !result
}
return result
} | evaluator/comparison.go | 0.587115 | 0.413063 | comparison.go | starcoder |
package kamakiri
import "math"
// ShapeType indicates the type of a Shape.
type ShapeType uint8
const (
// ShapeTypeCircle is the ShapeType for circular Shapes.
ShapeTypeCircle ShapeType = iota
// ShapeTypePolygon is the ShapeType for polygon Shapes.
ShapeTypePolygon
)
// Shape is a physics shape.
type Shape struct {
Type ShapeType // Physics shape type (circle or polygon).
Body *Body // Shape physics body reference.
Radius float64 // Circle shape radius (used for circle shapes).
Transform Mat2 // Vertices transform matrix 2x2.
Vertices []Vertex // Polygon shape vertices position and normals verts (just used for polygon shapes).
}
// Finds polygon shapes axis least penetration.
func findAxisLeastPenetration(a, b *Shape) (int, float64) {
bestDistance := -math.MaxFloat64
bestIndex := 0
vertsA := a.Vertices
for i := 0; i < len(vertsA); i++ {
// Retrieve a face normal from A shape
normal := vertsA[i].Normal
transNormal := a.Transform.MultiplyXY(normal)
// Transform face normal into B shape's model space
buT := b.Transform.Transpose()
normal = buT.MultiplyXY(transNormal)
// Retrieve support point from B shape along -n
support := b.getSupport(XY{-normal.X, -normal.Y})
// Retrieve vertex on face from A shape, transform into B shape's model space
vertex := vertsA[i].Position
vertex = a.Transform.MultiplyXY(vertex)
vertex = vertex.Subtract(a.Body.Position)
vertex = vertex.Subtract(b.Body.Position)
vertex = buT.MultiplyXY(vertex)
// Compute penetration distance in B shape's model space
distance := normal.Dot(support.Subtract(vertex))
// Store greatest distance
if distance > bestDistance {
bestDistance = distance
bestIndex = i
}
}
return bestIndex, bestDistance
}
// getSupport returns the extreme point along a direction within a polygon.
func (s *Shape) getSupport(dir XY) XY {
bestProjection := -math.MaxFloat64
bestVertex := XY{0, 0}
vertices := s.Vertices
for i := 0; i < len(vertices); i++ {
vertex := vertices[i].Position
projection := vertex.Dot(dir)
if projection > bestProjection {
bestVertex = vertex
bestProjection = projection
}
}
return bestVertex
}
// Finds two polygon shapes incident face.
func findIncidentFace(ref, inc *Shape, index int) [2]XY {
refVerts := ref.Vertices
incVerts := inc.Vertices
refNorm := refVerts[index].Normal
// Calculate normal in incident's frame of reference
refNorm = ref.Transform.MultiplyXY(refNorm) // To world space
refNorm = inc.Transform.Transpose().MultiplyXY(refNorm) // To incident's model space
// Find most anti-normal face on polygon
incidentFace := 0
minDot := math.MaxFloat64
for i := 0; i < len(incVerts); i++ {
dot := refNorm.Dot(incVerts[i].Normal)
if dot < minDot {
minDot = dot
incidentFace = i
}
}
// Assign face vertices for incident face
v0 := inc.Transform.MultiplyXY(incVerts[incidentFace].Position)
v0 = v0.Add(inc.Body.Position)
incidentFace = (incidentFace + 1) % len(incVerts)
v1 := inc.Transform.MultiplyXY(incVerts[incidentFace].Position)
v1 = v1.Add(inc.Body.Position)
return [2]XY{v0, v1}
} | shape.go | 0.850593 | 0.788217 | shape.go | starcoder |
package graph
import (
i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55 "github.com/microsoft/kiota/abstractions/go/serialization"
)
// WorkbookChartAxis
type WorkbookChartAxis struct {
Entity
// Represents the formatting of a chart object, which includes line and font formatting. Read-only.
format *WorkbookChartAxisFormat;
// Returns a gridlines object that represents the major gridlines for the specified axis. Read-only.
majorGridlines *WorkbookChartGridlines;
// Represents the interval between two major tick marks. Can be set to a numeric value or an empty string. The returned value is always a number.
majorUnit *Json;
// Represents the maximum value on the value axis. Can be set to a numeric value or an empty string (for automatic axis values). The returned value is always a number.
maximum *Json;
// Represents the minimum value on the value axis. Can be set to a numeric value or an empty string (for automatic axis values). The returned value is always a number.
minimum *Json;
// Returns a Gridlines object that represents the minor gridlines for the specified axis. Read-only.
minorGridlines *WorkbookChartGridlines;
// Represents the interval between two minor tick marks. 'Can be set to a numeric value or an empty string (for automatic axis values). The returned value is always a number.
minorUnit *Json;
// Represents the axis title. Read-only.
title *WorkbookChartAxisTitle;
}
// NewWorkbookChartAxis instantiates a new workbookChartAxis and sets the default values.
func NewWorkbookChartAxis()(*WorkbookChartAxis) {
m := &WorkbookChartAxis{
Entity: *NewEntity(),
}
return m
}
// GetFormat gets the format property value. Represents the formatting of a chart object, which includes line and font formatting. Read-only.
func (m *WorkbookChartAxis) GetFormat()(*WorkbookChartAxisFormat) {
if m == nil {
return nil
} else {
return m.format
}
}
// GetMajorGridlines gets the majorGridlines property value. Returns a gridlines object that represents the major gridlines for the specified axis. Read-only.
func (m *WorkbookChartAxis) GetMajorGridlines()(*WorkbookChartGridlines) {
if m == nil {
return nil
} else {
return m.majorGridlines
}
}
// GetMajorUnit gets the majorUnit property value. Represents the interval between two major tick marks. Can be set to a numeric value or an empty string. The returned value is always a number.
func (m *WorkbookChartAxis) GetMajorUnit()(*Json) {
if m == nil {
return nil
} else {
return m.majorUnit
}
}
// GetMaximum gets the maximum property value. Represents the maximum value on the value axis. Can be set to a numeric value or an empty string (for automatic axis values). The returned value is always a number.
func (m *WorkbookChartAxis) GetMaximum()(*Json) {
if m == nil {
return nil
} else {
return m.maximum
}
}
// GetMinimum gets the minimum property value. Represents the minimum value on the value axis. Can be set to a numeric value or an empty string (for automatic axis values). The returned value is always a number.
func (m *WorkbookChartAxis) GetMinimum()(*Json) {
if m == nil {
return nil
} else {
return m.minimum
}
}
// GetMinorGridlines gets the minorGridlines property value. Returns a Gridlines object that represents the minor gridlines for the specified axis. Read-only.
func (m *WorkbookChartAxis) GetMinorGridlines()(*WorkbookChartGridlines) {
if m == nil {
return nil
} else {
return m.minorGridlines
}
}
// GetMinorUnit gets the minorUnit property value. Represents the interval between two minor tick marks. 'Can be set to a numeric value or an empty string (for automatic axis values). The returned value is always a number.
func (m *WorkbookChartAxis) GetMinorUnit()(*Json) {
if m == nil {
return nil
} else {
return m.minorUnit
}
}
// GetTitle gets the title property value. Represents the axis title. Read-only.
func (m *WorkbookChartAxis) GetTitle()(*WorkbookChartAxisTitle) {
if m == nil {
return nil
} else {
return m.title
}
}
// GetFieldDeserializers the deserialization information for the current model
func (m *WorkbookChartAxis) GetFieldDeserializers()(map[string]func(interface{}, i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode)(error)) {
res := m.Entity.GetFieldDeserializers()
res["format"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error {
val, err := n.GetObjectValue(func () i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable { return NewWorkbookChartAxisFormat() })
if err != nil {
return err
}
if val != nil {
m.SetFormat(val.(*WorkbookChartAxisFormat))
}
return nil
}
res["majorGridlines"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error {
val, err := n.GetObjectValue(func () i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable { return NewWorkbookChartGridlines() })
if err != nil {
return err
}
if val != nil {
m.SetMajorGridlines(val.(*WorkbookChartGridlines))
}
return nil
}
res["majorUnit"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error {
val, err := n.GetObjectValue(func () i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable { return NewJson() })
if err != nil {
return err
}
if val != nil {
m.SetMajorUnit(val.(*Json))
}
return nil
}
res["maximum"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error {
val, err := n.GetObjectValue(func () i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable { return NewJson() })
if err != nil {
return err
}
if val != nil {
m.SetMaximum(val.(*Json))
}
return nil
}
res["minimum"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error {
val, err := n.GetObjectValue(func () i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable { return NewJson() })
if err != nil {
return err
}
if val != nil {
m.SetMinimum(val.(*Json))
}
return nil
}
res["minorGridlines"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error {
val, err := n.GetObjectValue(func () i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable { return NewWorkbookChartGridlines() })
if err != nil {
return err
}
if val != nil {
m.SetMinorGridlines(val.(*WorkbookChartGridlines))
}
return nil
}
res["minorUnit"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error {
val, err := n.GetObjectValue(func () i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable { return NewJson() })
if err != nil {
return err
}
if val != nil {
m.SetMinorUnit(val.(*Json))
}
return nil
}
res["title"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error {
val, err := n.GetObjectValue(func () i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable { return NewWorkbookChartAxisTitle() })
if err != nil {
return err
}
if val != nil {
m.SetTitle(val.(*WorkbookChartAxisTitle))
}
return nil
}
return res
}
func (m *WorkbookChartAxis) IsNil()(bool) {
return m == nil
}
// Serialize serializes information the current object
func (m *WorkbookChartAxis) Serialize(writer i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.SerializationWriter)(error) {
err := m.Entity.Serialize(writer)
if err != nil {
return err
}
{
err = writer.WriteObjectValue("format", m.GetFormat())
if err != nil {
return err
}
}
{
err = writer.WriteObjectValue("majorGridlines", m.GetMajorGridlines())
if err != nil {
return err
}
}
{
err = writer.WriteObjectValue("majorUnit", m.GetMajorUnit())
if err != nil {
return err
}
}
{
err = writer.WriteObjectValue("maximum", m.GetMaximum())
if err != nil {
return err
}
}
{
err = writer.WriteObjectValue("minimum", m.GetMinimum())
if err != nil {
return err
}
}
{
err = writer.WriteObjectValue("minorGridlines", m.GetMinorGridlines())
if err != nil {
return err
}
}
{
err = writer.WriteObjectValue("minorUnit", m.GetMinorUnit())
if err != nil {
return err
}
}
{
err = writer.WriteObjectValue("title", m.GetTitle())
if err != nil {
return err
}
}
return nil
}
// SetFormat sets the format property value. Represents the formatting of a chart object, which includes line and font formatting. Read-only.
func (m *WorkbookChartAxis) SetFormat(value *WorkbookChartAxisFormat)() {
if m != nil {
m.format = value
}
}
// SetMajorGridlines sets the majorGridlines property value. Returns a gridlines object that represents the major gridlines for the specified axis. Read-only.
func (m *WorkbookChartAxis) SetMajorGridlines(value *WorkbookChartGridlines)() {
if m != nil {
m.majorGridlines = value
}
}
// SetMajorUnit sets the majorUnit property value. Represents the interval between two major tick marks. Can be set to a numeric value or an empty string. The returned value is always a number.
func (m *WorkbookChartAxis) SetMajorUnit(value *Json)() {
if m != nil {
m.majorUnit = value
}
}
// SetMaximum sets the maximum property value. Represents the maximum value on the value axis. Can be set to a numeric value or an empty string (for automatic axis values). The returned value is always a number.
func (m *WorkbookChartAxis) SetMaximum(value *Json)() {
if m != nil {
m.maximum = value
}
}
// SetMinimum sets the minimum property value. Represents the minimum value on the value axis. Can be set to a numeric value or an empty string (for automatic axis values). The returned value is always a number.
func (m *WorkbookChartAxis) SetMinimum(value *Json)() {
if m != nil {
m.minimum = value
}
}
// SetMinorGridlines sets the minorGridlines property value. Returns a Gridlines object that represents the minor gridlines for the specified axis. Read-only.
func (m *WorkbookChartAxis) SetMinorGridlines(value *WorkbookChartGridlines)() {
if m != nil {
m.minorGridlines = value
}
}
// SetMinorUnit sets the minorUnit property value. Represents the interval between two minor tick marks. 'Can be set to a numeric value or an empty string (for automatic axis values). The returned value is always a number.
func (m *WorkbookChartAxis) SetMinorUnit(value *Json)() {
if m != nil {
m.minorUnit = value
}
}
// SetTitle sets the title property value. Represents the axis title. Read-only.
func (m *WorkbookChartAxis) SetTitle(value *WorkbookChartAxisTitle)() {
if m != nil {
m.title = value
}
} | models/microsoft/graph/workbook_chart_axis.go | 0.797675 | 0.555556 | workbook_chart_axis.go | starcoder |
package ast
// Class represents a class type declaration of the form
// «"class" name { fields }»
type Class struct {
Annotations Annotations // the annotations applied to the class
Name *Identifier // the name of the class
Fields []*Field // the fields of the class
}
func (Class) isNode() {}
// Field represents a field of a class or api, with the structure
// «type name = expression»
type Field struct {
Annotations Annotations // the annotations applied to the field
Type Node // the type the field holds
Name *Identifier // the name of the field
Default Node // the default value expression for the field
}
func (Field) isNode() {}
// EnumEntry represents a single value in an enumerated type.
type EnumEntry struct {
Owner *Enum // the enum this entry is a part of
Name *Identifier // the name this entry is given
Value *Number // the value of this entry
}
func (EnumEntry) isNode() {}
// Enum represents an enumerated type declaration, of the form
// «"enum" name { entries }» where entries is a comma separated list of «name = value»
type Enum struct {
Annotations Annotations // the annotations applied to the enum
Name *Identifier // the name of the enum
IsBitfield bool // whether this enum represents a bitfield form
Entries []*EnumEntry // the set of valid entries for this enum
}
func (Enum) isNode() {}
// Label represents a single name-value mapping in a LabelGroup declaration
type Label struct {
Owner *LabelGroup // the label declaration this entry is a part of
Name *Identifier // the name this entry is given
Value *Number // the value of this entry
}
func (Label) isNode() {}
// LabelGroup associates one or more named constants / labels with a given type.
type LabelGroup struct {
Annotations Annotations // the annotations applied to the declaration
LabeledType *Identifier // the type this declaration is for
Labels []*Label // set of name-value pairs for this declaration
}
func (LabelGroup) isNode() {}
// IndexedType represents a type declaration with an indexing suffix,
// which looks like «type[index]»
type IndexedType struct {
ValueType Node // The element type exposed by the indexed type
Index Node // the index of the type
}
func (IndexedType) isNode() {}
// PreConst represents a pre-const type declaration, of the form «const type»
type PreConst struct {
Type Node // the underlying type that is constant
}
func (PreConst) isNode() {}
// PointerType represents a pointer type declaration, of the form «type*»
type PointerType struct {
To Node // the underlying type this pointer points to
Const bool // whether the pointer type has the post-const modifier applied
}
func (PointerType) isNode() {}
// Alias represents a weak type alias, with structure «"alias" type name».
// An alias does not declare a new type, just a reusable name for a common type.
type Alias struct {
Annotations Annotations // the annotations applied to the alias
Name *Identifier // the name of the alias
To Node // the type it is an alias for
}
func (Alias) isNode() {}
// Pseudonym declares a new type in terms of another type.
// Has the form «"type" type name»
// Pseydonyms are proper types, but the underlying type can be discovered.
type Pseudonym struct {
Annotations Annotations // the annotations applied to the type
Name *Identifier // the name of the type
To Node // the underlying type
}
func (Pseudonym) isNode() {}
// Imported represents an imported type name.
type Imported struct {
From *Identifier // the import this name is from
Name *Identifier // the name being imported
}
func (Imported) isNode() {}
// Definition declares a new named literal, has the form «"define" name value».
type Definition struct {
Annotations Annotations // the annotations applied to this definition
Name *Identifier // the name of this definition
Expression Node // the expression this definition expands to
}
func (Definition) isNode() {} | gapil/ast/type.go | 0.81134 | 0.540621 | type.go | starcoder |
package output
import (
"fmt"
"github.com/Jeffail/benthos/v3/lib/broker"
"github.com/Jeffail/benthos/v3/lib/log"
"github.com/Jeffail/benthos/v3/lib/metrics"
"github.com/Jeffail/benthos/v3/lib/types"
)
//------------------------------------------------------------------------------
func init() {
Constructors[TypeTry] = TypeSpec{
brokerConstructor: NewTry,
Summary: `
Attempts to send each message to a child output, starting from the first output
on the list. If an output attempt fails then the next output in the list is
attempted, and so on.`,
Description: `
This pattern is useful for triggering events in the case where certain output
targets have broken. For example, if you had an output type ` + "`http_client`" + `
but wished to reroute messages whenever the endpoint becomes unreachable you
could use this pattern:
` + "```yaml" + `
output:
try:
- http_client:
url: http://foo:4195/post/might/become/unreachable
retries: 3
retry_period: 1s
- http_client:
url: http://bar:4196/somewhere/else
retries: 3
retry_period: 1s
processors:
- text:
operator: prepend
value: 'failed to send this message to foo: '
- file:
path: /usr/local/benthos/everything_failed.jsonl
` + "```" + `
### Batching
When an output within a try sequence uses batching, like so:
` + "``` yaml" + `
output:
try:
- dynamodb:
table: foo
string_columns:
id: ${!json("id")}
content: ${!content()}
batching:
count: 10
period: 1s
- file:
path: /usr/local/benthos/failed_stuff.jsonl
` + "```" + `
Benthos makes a best attempt at inferring which specific messages of the batch
failed, and only propagates those individual messages to the next try tier.
However, depending on the output and the error returned it is sometimes not
possible to determine the individual messages that failed, in which case the
whole batch is passed to the next tier in order to preserve at-least-once
guarantees.`,
sanitiseConfigFunc: func(conf Config) (interface{}, error) {
outSlice := []interface{}{}
for _, output := range conf.Try {
sanOutput, err := SanitiseConfig(output)
if err != nil {
return nil, err
}
outSlice = append(outSlice, sanOutput)
}
return outSlice, nil
},
Categories: []Category{
CategoryUtility,
},
}
}
//------------------------------------------------------------------------------
// TryConfig contains configuration fields for the Try output type.
type TryConfig brokerOutputList
// NewTryConfig creates a new BrokerConfig with default values.
func NewTryConfig() TryConfig {
return TryConfig{}
}
//------------------------------------------------------------------------------
// NewTry creates a new try broker output type.
func NewTry(
conf Config,
mgr types.Manager,
log log.Modular,
stats metrics.Type,
pipelines ...types.PipelineConstructorFunc,
) (Type, error) {
outputConfs := conf.Try
if len(outputConfs) == 0 {
return nil, ErrBrokerNoOutputs
}
outputs := make([]types.Output, len(outputConfs))
var err error
for i, oConf := range outputConfs {
ns := fmt.Sprintf("try.%v", i)
var pipes []types.PipelineConstructorFunc
outputs[i], err = New(
oConf, mgr,
log.NewModule("."+ns),
metrics.Combine(stats, metrics.Namespaced(stats, ns)),
pipes...)
if err != nil {
return nil, fmt.Errorf("failed to create output '%v' type '%v': %v", i, oConf.Type, err)
}
}
var t *broker.Try
if t, err = broker.NewTry(outputs, stats); err != nil {
return nil, err
}
t.WithMaxInFlight(50)
t.WithOutputMetricsPrefix("try.outputs")
return WrapWithPipelines(t, pipelines...)
}
//------------------------------------------------------------------------------ | lib/output/try.go | 0.675122 | 0.768646 | try.go | starcoder |
// This file implements type parameter inference given
// a list of concrete arguments and a parameter list.
package types
import "github.com/tdakkota/go2go/golib/token"
// infer returns the list of actual type arguments for the given list of type parameters tparams
// by inferring them from the actual arguments args for the parameters params. If infer fails to
// determine all type arguments, an error is reported and the result is nil.
func (check *Checker) infer(pos token.Pos, tparams []*TypeName, params *Tuple, args []*operand) []Type {
assert(params.Len() == len(args))
u := check.unifier()
u.x.init(tparams)
errorf := func(kind string, tpar, targ Type, arg *operand) {
// provide a better error message if we can
if tpar, _ := tpar.(*TypeParam); tpar != nil {
if inferred := u.x.at(tpar.index); inferred != nil {
check.errorf(arg.pos(), "%s %s of %s does not match inferred type %s for %s", kind, targ, arg.expr, inferred, tpar)
return
}
}
check.errorf(arg.pos(), "%s %s of %s does not match %s", kind, targ, arg.expr, tpar)
}
// Terminology: generic parameter = function parameter with a type-parameterized type
// 1st pass: Unify parameter and argument types for generic parameters with typed arguments
// and collect the indices of generic parameters with untyped arguments.
var indices []int
for i, arg := range args {
par := params.At(i)
// If we permit bidirectional unification, this conditional code needs to be
// executed even if par.typ is not parameterized since the argument may be a
// generic function (for which we want to infer // its type arguments).
if IsParameterized(par.typ) {
if arg.mode == invalid {
// TODO(gri) we might still be able to infer all targs by
// simply ignoring (continue) invalid args
return nil // error was reported earlier
}
if targ := arg.typ; isTyped(targ) {
// If we permit bidirectional unification, and targ is
// a generic function, we need to initialize u.y with
// the respectice type parameters of targ.
if !u.unify(par.typ, targ) {
errorf("type", par.typ, targ, arg)
return nil
}
} else {
indices = append(indices, i)
}
}
}
// Some generic parameters with untyped arguments may have been given a type
// indirectly through another generic parameter with a typed argument; we can
// ignore those now. (This only means that we know the types for those generic
// parameters; it doesn't mean untyped arguments can be passed safely. We still
// need to verify that assignment of those arguments is valid when we check
// function parameter passing external to infer.)
j := 0
for _, i := range indices {
par := params.At(i)
// Since untyped types are all basic (i.e., non-composite) types, an
// untyped argument will never match a composite parameter type; the
// only parameter type it can possibly match against is a *TypeParam.
// Thus, only keep the indices of generic parameters that are not of
// composite types and which don't have a type inferred yet.
if tpar, _ := par.typ.(*TypeParam); tpar != nil && u.x.at(tpar.index) == nil {
indices[j] = i
j++
}
}
indices = indices[:j]
// 2nd pass: Unify parameter and default argument types for remaining generic parameters.
for _, i := range indices {
par := params.At(i)
arg := args[i]
targ := Default(arg.typ)
// The default type for an untyped nil is untyped nil. We must not
// infer an untyped nil type as type parameter type. Ignore untyped
// nil by making sure all default argument types are typed.
if isTyped(targ) && !u.unify(par.typ, targ) {
errorf("default type", par.typ, targ, arg)
return nil
}
}
// Collect type arguments and check if they all have been determined.
// TODO(gri) consider moving this outside this function and then we won't need to pass in pos
var targs []Type // lazily allocated
for i, tpar := range tparams {
targ := u.x.at(i)
if targ == nil {
ppos := check.fset.Position(tpar.pos).String()
check.errorf(pos, "cannot infer %s (%s)", tpar.name, ppos)
return nil
}
if targs == nil {
targs = make([]Type, len(tparams))
}
targs[i] = targ
}
return targs
}
// IsParameterized reports whether typ contains any type parameters.
func IsParameterized(typ Type) bool {
return isParameterized(typ, make(map[Type]bool))
}
func isParameterized(typ Type, seen map[Type]bool) (res bool) {
// detect cycles
// TODO(gri) can/should this be a Checker map?
if x, ok := seen[typ]; ok {
return x
}
seen[typ] = false
defer func() {
seen[typ] = res
}()
switch t := typ.(type) {
case nil, *Basic: // TODO(gri) should nil be handled here?
break
case *Array:
return isParameterized(t.elem, seen)
case *Slice:
return isParameterized(t.elem, seen)
case *Struct:
for _, fld := range t.fields {
if isParameterized(fld.typ, seen) {
return true
}
}
case *Pointer:
return isParameterized(t.base, seen)
case *Tuple:
n := t.Len()
for i := 0; i < n; i++ {
if isParameterized(t.At(i).typ, seen) {
return true
}
}
case *Signature:
assert(t.tparams == nil) // TODO(gri) is this correct?
// TODO(gri) Rethink check below: contract interfaces
// have methods where the receiver is a contract type
// parameter, by design.
//assert(t.recv == nil || !isParameterized(t.recv.typ))
return isParameterized(t.params, seen) || isParameterized(t.results, seen)
case *Interface:
t.assertCompleteness()
for _, m := range t.allMethods {
if isParameterized(m.typ, seen) {
return true
}
}
case *Map:
return isParameterized(t.key, seen) || isParameterized(t.elem, seen)
case *Chan:
return isParameterized(t.elem, seen)
case *Named:
return isParameterizedList(t.targs, seen)
case *TypeParam:
return true
case *instance:
return isParameterizedList(t.targs, seen)
default:
unreachable()
}
return false
}
// IsParameterizedList reports whether any type in list is parameterized.
func IsParameterizedList(list []Type) bool {
return isParameterizedList(list, make(map[Type]bool))
}
func isParameterizedList(list []Type, seen map[Type]bool) bool {
for _, t := range list {
if isParameterized(t, seen) {
return true
}
}
return false
} | golib/types/infer.go | 0.559771 | 0.457561 | infer.go | starcoder |
package plaid
import (
"encoding/json"
)
// TransactionData Information about the matched direct deposit transaction used to verify a user's payroll information.
type TransactionData struct {
// The description of the transaction.
Description string `json:"description"`
// The amount of the transaction.
Amount float32 `json:"amount"`
// The date of the transaction, in [ISO 8601](https://wikipedia.org/wiki/ISO_8601) format (\"yyyy-mm-dd\").
Date string `json:"date"`
// A unique identifier for the end user's account.
AccountId string `json:"account_id"`
// A unique identifier for the transaction.
TransactionId string `json:"transaction_id"`
AdditionalProperties map[string]interface{}
}
type _TransactionData TransactionData
// NewTransactionData instantiates a new TransactionData object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewTransactionData(description string, amount float32, date string, accountId string, transactionId string) *TransactionData {
this := TransactionData{}
this.Description = description
this.Amount = amount
this.Date = date
this.AccountId = accountId
this.TransactionId = transactionId
return &this
}
// NewTransactionDataWithDefaults instantiates a new TransactionData object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewTransactionDataWithDefaults() *TransactionData {
this := TransactionData{}
return &this
}
// GetDescription returns the Description field value
func (o *TransactionData) GetDescription() string {
if o == nil {
var ret string
return ret
}
return o.Description
}
// GetDescriptionOk returns a tuple with the Description field value
// and a boolean to check if the value has been set.
func (o *TransactionData) GetDescriptionOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.Description, true
}
// SetDescription sets field value
func (o *TransactionData) SetDescription(v string) {
o.Description = v
}
// GetAmount returns the Amount field value
func (o *TransactionData) GetAmount() float32 {
if o == nil {
var ret float32
return ret
}
return o.Amount
}
// GetAmountOk returns a tuple with the Amount field value
// and a boolean to check if the value has been set.
func (o *TransactionData) GetAmountOk() (*float32, bool) {
if o == nil {
return nil, false
}
return &o.Amount, true
}
// SetAmount sets field value
func (o *TransactionData) SetAmount(v float32) {
o.Amount = v
}
// GetDate returns the Date field value
func (o *TransactionData) GetDate() string {
if o == nil {
var ret string
return ret
}
return o.Date
}
// GetDateOk returns a tuple with the Date field value
// and a boolean to check if the value has been set.
func (o *TransactionData) GetDateOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.Date, true
}
// SetDate sets field value
func (o *TransactionData) SetDate(v string) {
o.Date = v
}
// GetAccountId returns the AccountId field value
func (o *TransactionData) GetAccountId() string {
if o == nil {
var ret string
return ret
}
return o.AccountId
}
// GetAccountIdOk returns a tuple with the AccountId field value
// and a boolean to check if the value has been set.
func (o *TransactionData) GetAccountIdOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.AccountId, true
}
// SetAccountId sets field value
func (o *TransactionData) SetAccountId(v string) {
o.AccountId = v
}
// GetTransactionId returns the TransactionId field value
func (o *TransactionData) GetTransactionId() string {
if o == nil {
var ret string
return ret
}
return o.TransactionId
}
// GetTransactionIdOk returns a tuple with the TransactionId field value
// and a boolean to check if the value has been set.
func (o *TransactionData) GetTransactionIdOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.TransactionId, true
}
// SetTransactionId sets field value
func (o *TransactionData) SetTransactionId(v string) {
o.TransactionId = v
}
func (o TransactionData) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if true {
toSerialize["description"] = o.Description
}
if true {
toSerialize["amount"] = o.Amount
}
if true {
toSerialize["date"] = o.Date
}
if true {
toSerialize["account_id"] = o.AccountId
}
if true {
toSerialize["transaction_id"] = o.TransactionId
}
for key, value := range o.AdditionalProperties {
toSerialize[key] = value
}
return json.Marshal(toSerialize)
}
func (o *TransactionData) UnmarshalJSON(bytes []byte) (err error) {
varTransactionData := _TransactionData{}
if err = json.Unmarshal(bytes, &varTransactionData); err == nil {
*o = TransactionData(varTransactionData)
}
additionalProperties := make(map[string]interface{})
if err = json.Unmarshal(bytes, &additionalProperties); err == nil {
delete(additionalProperties, "description")
delete(additionalProperties, "amount")
delete(additionalProperties, "date")
delete(additionalProperties, "account_id")
delete(additionalProperties, "transaction_id")
o.AdditionalProperties = additionalProperties
}
return err
}
type NullableTransactionData struct {
value *TransactionData
isSet bool
}
func (v NullableTransactionData) Get() *TransactionData {
return v.value
}
func (v *NullableTransactionData) Set(val *TransactionData) {
v.value = val
v.isSet = true
}
func (v NullableTransactionData) IsSet() bool {
return v.isSet
}
func (v *NullableTransactionData) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableTransactionData(val *TransactionData) *NullableTransactionData {
return &NullableTransactionData{value: val, isSet: true}
}
func (v NullableTransactionData) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableTransactionData) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | plaid/model_transaction_data.go | 0.789396 | 0.410756 | model_transaction_data.go | starcoder |
package iso20022
// Execution of the redemption part, in a switch between investment funds or investment fund classes.
type SwitchRedemptionLegExecution3 struct {
// Unique technical identifier for an instance of a leg within a switch.
LegIdentification *Max35Text `xml:"LegId,omitempty"`
// Unique identifier for an instance of a leg execution within a switch confirmation.
LegExecutionIdentification *Max35Text `xml:"LegExctnId,omitempty"`
// Investment fund class to which an investment fund order execution is related.
FinancialInstrumentDetails *FinancialInstrument10 `xml:"FinInstrmDtls"`
// Number of investment funds units redeemed.
UnitsNumber *FinancialInstrumentQuantity1 `xml:"UnitsNb"`
// Account between an investor(s) and a fund manager or a fund. The account can contain holdings in any investment fund or investment fund class managed (or distributed) by the fund manager, within the same fund family.
InvestmentAccountDetails *InvestmentAccount21 `xml:"InvstmtAcctDtls,omitempty"`
// Portion of the investor's holdings, in a specific investment fund/ fund class, that is redeemed.
HoldingsRedemptionRate *PercentageRate `xml:"HldgsRedRate,omitempty"`
// Amount of money paid to the investor as a result of the redemption after deduction of charges, commissions and taxes.
// [(Quantity * Price) - (Charges + Commissions +Taxes)]
NetAmount *ActiveCurrencyAndAmount `xml:"NetAmt,omitempty"`
// Amount of money resulting from the redemption before deduction of charges, commissions and taxes.
// [Quantity * Price]
GrossAmount *ActiveCurrencyAndAmount `xml:"GrssAmt,omitempty"`
// Date and time at which a price is applied, according to the terms stated in the prospectus.
TradeDateTime *DateAndDateTimeChoice `xml:"TradDtTm"`
// Price at which the order was executed.
PriceDetails *UnitPrice10 `xml:"PricDtls"`
// Indicates whether the dividend is included, ie, cum-dividend, in the executed price. When the dividend is not included, the price will be ex-dividend.
CumDividendIndicator *YesNoIndicator `xml:"CumDvddInd"`
// Part of the price deemed as accrued income or profit rather than capital. The interim profit amount is used for tax purposes.
InterimProfitAmount *ProfitAndLoss1Choice `xml:"IntrmPrftAmt,omitempty"`
// Dividend option chosen by the account owner based on the options offered in the prospectus.
IncomePreference *IncomePreference1Code `xml:"IncmPref,omitempty"`
// Tax group to which the purchased investment fund units belong. The investor indicates to the intermediary operating pooled nominees, which type of unit is to be sold.
Group1Or2Units *UKTaxGroupUnitCode `xml:"Grp1Or2Units,omitempty"`
// Currency requested for settlement of cash proceeds.
RequestedSettlementCurrency *ActiveCurrencyCode `xml:"ReqdSttlmCcy,omitempty"`
// Currency to be used for pricing the fund. This currency must be among the set of currencies in which the price may be expressed, as stated in the prospectus.
RequestedNAVCurrency *ActiveOrHistoricCurrencyCode `xml:"ReqdNAVCcy,omitempty"`
// Amount of money associated with a service.
ChargeGeneralDetails *TotalCharges3 `xml:"ChrgGnlDtls,omitempty"`
// Commission for the execution of an investment fund order.
CommissionGeneralDetails *TotalCommissions3 `xml:"ComssnGnlDtls,omitempty"`
// Tax applicable to execution of an investment fund order.
TaxGeneralDetails *TotalTaxes3 `xml:"TaxGnlDtls,omitempty"`
// Parameters used to execute the settlement of an investment fund order.
SettlementAndCustodyDetails *FundSettlementParameters4 `xml:"SttlmAndCtdyDtls,omitempty"`
// Indicates whether the financial instrument is to be physically delivered.
PhysicalDeliveryIndicator *YesNoIndicator `xml:"PhysDlvryInd"`
// Information related to physical delivery of the securities.
PhysicalDeliveryDetails *DeliveryParameters3 `xml:"PhysDlvryDtls,omitempty"`
// Additional specific settlement information for non-regulated traded funds.
NonStandardSettlementInformation *Max350Text `xml:"NonStdSttlmInf,omitempty"`
// Part of an investor's subscription amount that is held by the fund in order to pay incentive / performance fees at the end of the fiscal year.
Equalisation *Equalisation1 `xml:"Equlstn,omitempty"`
}
func (s *SwitchRedemptionLegExecution3) SetLegIdentification(value string) {
s.LegIdentification = (*Max35Text)(&value)
}
func (s *SwitchRedemptionLegExecution3) SetLegExecutionIdentification(value string) {
s.LegExecutionIdentification = (*Max35Text)(&value)
}
func (s *SwitchRedemptionLegExecution3) AddFinancialInstrumentDetails() *FinancialInstrument10 {
s.FinancialInstrumentDetails = new(FinancialInstrument10)
return s.FinancialInstrumentDetails
}
func (s *SwitchRedemptionLegExecution3) AddUnitsNumber() *FinancialInstrumentQuantity1 {
s.UnitsNumber = new(FinancialInstrumentQuantity1)
return s.UnitsNumber
}
func (s *SwitchRedemptionLegExecution3) AddInvestmentAccountDetails() *InvestmentAccount21 {
s.InvestmentAccountDetails = new(InvestmentAccount21)
return s.InvestmentAccountDetails
}
func (s *SwitchRedemptionLegExecution3) SetHoldingsRedemptionRate(value string) {
s.HoldingsRedemptionRate = (*PercentageRate)(&value)
}
func (s *SwitchRedemptionLegExecution3) SetNetAmount(value, currency string) {
s.NetAmount = NewActiveCurrencyAndAmount(value, currency)
}
func (s *SwitchRedemptionLegExecution3) SetGrossAmount(value, currency string) {
s.GrossAmount = NewActiveCurrencyAndAmount(value, currency)
}
func (s *SwitchRedemptionLegExecution3) AddTradeDateTime() *DateAndDateTimeChoice {
s.TradeDateTime = new(DateAndDateTimeChoice)
return s.TradeDateTime
}
func (s *SwitchRedemptionLegExecution3) AddPriceDetails() *UnitPrice10 {
s.PriceDetails = new(UnitPrice10)
return s.PriceDetails
}
func (s *SwitchRedemptionLegExecution3) SetCumDividendIndicator(value string) {
s.CumDividendIndicator = (*YesNoIndicator)(&value)
}
func (s *SwitchRedemptionLegExecution3) AddInterimProfitAmount() *ProfitAndLoss1Choice {
s.InterimProfitAmount = new(ProfitAndLoss1Choice)
return s.InterimProfitAmount
}
func (s *SwitchRedemptionLegExecution3) SetIncomePreference(value string) {
s.IncomePreference = (*IncomePreference1Code)(&value)
}
func (s *SwitchRedemptionLegExecution3) SetGroup1Or2Units(value string) {
s.Group1Or2Units = (*UKTaxGroupUnitCode)(&value)
}
func (s *SwitchRedemptionLegExecution3) SetRequestedSettlementCurrency(value string) {
s.RequestedSettlementCurrency = (*ActiveCurrencyCode)(&value)
}
func (s *SwitchRedemptionLegExecution3) SetRequestedNAVCurrency(value string) {
s.RequestedNAVCurrency = (*ActiveOrHistoricCurrencyCode)(&value)
}
func (s *SwitchRedemptionLegExecution3) AddChargeGeneralDetails() *TotalCharges3 {
s.ChargeGeneralDetails = new(TotalCharges3)
return s.ChargeGeneralDetails
}
func (s *SwitchRedemptionLegExecution3) AddCommissionGeneralDetails() *TotalCommissions3 {
s.CommissionGeneralDetails = new(TotalCommissions3)
return s.CommissionGeneralDetails
}
func (s *SwitchRedemptionLegExecution3) AddTaxGeneralDetails() *TotalTaxes3 {
s.TaxGeneralDetails = new(TotalTaxes3)
return s.TaxGeneralDetails
}
func (s *SwitchRedemptionLegExecution3) AddSettlementAndCustodyDetails() *FundSettlementParameters4 {
s.SettlementAndCustodyDetails = new(FundSettlementParameters4)
return s.SettlementAndCustodyDetails
}
func (s *SwitchRedemptionLegExecution3) SetPhysicalDeliveryIndicator(value string) {
s.PhysicalDeliveryIndicator = (*YesNoIndicator)(&value)
}
func (s *SwitchRedemptionLegExecution3) AddPhysicalDeliveryDetails() *DeliveryParameters3 {
s.PhysicalDeliveryDetails = new(DeliveryParameters3)
return s.PhysicalDeliveryDetails
}
func (s *SwitchRedemptionLegExecution3) SetNonStandardSettlementInformation(value string) {
s.NonStandardSettlementInformation = (*Max350Text)(&value)
}
func (s *SwitchRedemptionLegExecution3) AddEqualisation() *Equalisation1 {
s.Equalisation = new(Equalisation1)
return s.Equalisation
} | SwitchRedemptionLegExecution3.go | 0.832237 | 0.481515 | SwitchRedemptionLegExecution3.go | starcoder |
// Package day08 solves AoC 2020 day 8.
package day08
import (
"fmt"
"io"
"strconv"
"strings"
"github.com/fis/aoc/glue"
"github.com/fis/aoc/util"
)
func init() {
glue.RegisterSolver(2020, 8, glue.LineSolver(solve))
glue.RegisterPlotter(2020, 8, glue.LinePlotter(plotFlow), map[string]string{"ex": example})
}
func solve(lines []string) ([]string, error) {
code, err := parseCode(lines)
if err != nil {
return nil, err
}
_, part1 := loopCheck(code)
part2 := repair(code)
return glue.Ints(part1, part2), nil
}
type opcode int
const (
opAcc opcode = iota
opJmp
opNop
)
type instruction struct {
op opcode
arg int
}
func parseCode(lines []string) (out []instruction, err error) {
var mnemonics = map[string]opcode{"acc": opAcc, "jmp": opJmp, "nop": opNop}
for _, line := range lines {
parts := strings.SplitN(line, " ", 2)
if len(parts) != 2 {
return nil, fmt.Errorf("invalid instruction: %s", line)
}
op, ok := mnemonics[parts[0]]
if !ok {
return nil, fmt.Errorf("invalid opcode: %s", parts[0])
}
arg, err := strconv.Atoi(parts[1])
if err != nil {
return nil, fmt.Errorf("invalid argument: %s", parts[1])
}
out = append(out, instruction{op: op, arg: arg})
}
return out, nil
}
func loopCheck(code []instruction) (loop bool, acc int) {
seen := make([]bool, len(code))
for at := 0; at < len(code); {
if seen[at] {
return true, acc
}
seen[at] = true
switch code[at].op {
case opAcc:
acc += code[at].arg
at++
case opJmp:
at += code[at].arg
case opNop:
at++
}
}
return false, acc
}
func repair(code []instruction) int {
type branch struct{ to, acc int }
var branches []branch
seen := make([]bool, len(code))
for at, acc := 0, 0; !seen[at]; {
seen[at] = true
switch code[at].op {
case opAcc:
acc += code[at].arg
at++
case opJmp:
branches = append(branches, branch{to: at + 1, acc: acc})
at += code[at].arg
case opNop:
branches = append(branches, branch{to: at + code[at].arg, acc: acc})
at++
}
}
for _, branch := range branches {
at, acc := branch.to, branch.acc
for {
if at >= len(code) {
return acc
}
if seen[at] {
break
}
seen[at] = true
switch code[at].op {
case opAcc:
acc += code[at].arg
at++
case opJmp:
at += code[at].arg
case opNop:
at++
}
}
}
panic("this code is unfixable")
}
var example = `nop +0
acc +1
jmp +4
acc +3
jmp -3
acc -99
acc +1
jmp -4
acc +6
`
func plotFlow(lines []string, out io.Writer) error {
var mnemonics = map[opcode]string{opAcc: "acc", opJmp: "jmp", opNop: "nop"}
code, err := parseCode(lines)
if err != nil {
return err
}
g := &util.Graph{}
verts := make([]int, len(code)+1)
for i, inst := range code {
verts[i] = g.V(fmt.Sprintf("%d: %s %+d", i, mnemonics[inst.op], inst.arg))
}
verts[len(code)] = g.V("halt")
for i, inst := range code {
switch inst.op {
case opAcc:
g.AddEdgeWV(verts[i], verts[i+1], 0)
case opJmp:
g.AddEdgeWV(verts[i], verts[i+inst.arg], 0)
g.AddEdgeWV(verts[i], verts[i+1], 1)
case opNop:
g.AddEdgeWV(verts[i], verts[i+1], 0)
g.AddEdgeWV(verts[i], verts[i+inst.arg], 1)
}
}
return g.WriteDOT(out, "prog", func(v int) map[string]string {
if v == verts[0] || v == verts[len(verts)-1] {
return map[string]string{"peripheries": `2`}
}
return nil
}, func(fromV, toV int) map[string]string {
attrs := map[string]string{"label": `""`}
if g.W(fromV, toV) == 1 {
attrs["color"] = `"red"`
}
return attrs
})
} | 2020/day08/day08.go | 0.531453 | 0.404802 | day08.go | starcoder |
package Uint
import (
"crypto/sha256"
"math/big"
)
const (
// Bitwidth256 is the number of bits in a U256
Bitwidth256 = 256
// Bytewidth256 is the number of bytes in a U256
Bytewidth256 = 32
)
// U256 stores a 256 bit value in a big.Int
type U256 struct {
big.Int
}
type u256 interface {
Zero256() *U256
Assign(*U256) *U256
ToString() string
ToBytes() []byte
FromUint64(uint64) *U256
FromString(string) *U256
FromBytes([]byte) *U256
EQ(*U256) bool
NEQ(*U256) bool
GT(*U256) bool
LT(*U256) bool
GToE(*U256) bool
LToE(*U256) bool
Not(*U256) *U256
And(*U256) *U256
Or(*U256) *U256
Xor(*U256) *U256
Add(*U256) *U256
Sub(*U256) *U256
}
// Zero256 returns an empty uint256
func Zero256() *U256 {
return &U256{}
}
func (u *U256) truncate(input *U256) *U256 {
u.Int = *u.SetBytes(input.Bytes()[:Bytewidth256])
return u
}
// Stores a U256 in the receiver and returns it
func (u *U256) Assign(input *U256) *U256 {
if input.BitLen() <= Bitwidth256 {
u = input
} else {
u.truncate(input)
}
return u
}
// Returns a base 10 representation of the value in the receiver as a string
func (u *U256) ToString() string {
if u.BitLen() > Bitwidth256 {
u.truncate(u)
}
return u.String()
}
// Returns the 256 bit integer in the receiver as a byte slice
func (u *U256) ToBytes() []byte {
if u.BitLen() > Bitwidth256 {
u.truncate(u)
}
return u.Bytes()
}
// Converts a uint64 to U256, stores it and returns it
func (u *U256) FromUint64(input uint64) *U256 {
u.SetUint64(input)
return u
}
// FromString converts a string, detecting hexadecimal or octal prefix as required and converting to U256 and returning to the caller
func (u *U256) FromString(input string) *U256 {
u.SetString(input, 0)
if u.BitLen() > Bitwidth256 {
u.truncate(u)
}
return u
}
// Converts a byte slice to U256, truncating or padding as required, storing in the receiver, and returning the derived value
func (u *U256) FromBytes(input []byte) *U256 {
if len(input) > Bytewidth256 {
input = input[:Bytewidth256]
}
u.SetBytes(input)
return u
}
// Returns true if the operand is equal to the value in the receiver
func (u *U256) EQ(operand *U256) bool {
return u.Int.Cmp(&operand.Int) == 0
}
// Returns true if the operand is not equal to the value in the receiver
func (u *U256) NEQ(operand *U256) bool {
return u.Int.Cmp(&operand.Int) != 0
}
// Returns true if the operand is greater than the value in the receiver
func (u *U256) GT(operand *U256) bool {
return u.Int.Cmp(&operand.Int) == 1
}
// Returns true if the operand is lesser than the value in the receiver
func (u *U256) LT(operand *U256) bool {
return u.Int.Cmp(&operand.Int) == -1
}
// Returns true if the operand is greater than or equal to the value in the receiver
func (u *U256) GToE(operand *U256) bool {
i := u.Int.Cmp(&operand.Int)
return i == 0 || i == 1
}
// Returns true if the operand is less than or equal to the value in the receiver
func (u *U256) LToE(operand *U256) bool {
i := u.Int.Cmp(&operand.Int)
return i == 0 || i == -1
}
// Returns the bit-inverse of a 256 bit integer and stores it in the receiver
func (u *U256) Not() *U256 {
u.Int.Not(&u.Int)
return u
}
// Performs a logical AND between one U256 and another and stores the value in the receiver
func (u *U256) And(operand *U256) *U256 {
u.Int.And(&u.Int, &operand.Int)
return u
}
// Returns the logical OR between one 256 bit value and another and stores the result in the receiver
func (u *U256) Or(operand *U256) *U256 {
u.Int.Or(&u.Int, &operand.Int)
return u
}
// Returns the logical Exclusive OR between one 256 value and another and stores the value in the receiver
func (u *U256) Xor(operand *U256) *U256 {
u.Int.Xor(&u.Int, &operand.Int)
return u
}
// Adds a value to a 256 bit integer and stores the result in the receiver
func (u *U256) Add(operand *U256) *U256 {
u.Int.Add(&u.Int, &operand.Int)
return u
}
// Subtracts a value from a 256 bit integer and stores the result in the receiver
func (u *U256) Sub(operand *U256) *U256 {
u.Int.Sub(&u.Int, &operand.Int)
return u
}
// Returns the SHA256 hash of a one or more byte slices
func SHA256(b ...[]byte) *U256 {
var data []byte
for i := range b {
data = append(data, b[i]...)
}
sum := sha256.Sum256(data)
var out U256
out.Int.SetBytes(sum[:])
return &out
} | _old/newold/Uint/u256.go | 0.764716 | 0.439807 | u256.go | starcoder |
package model
type (
// yaml tag for the proxy details
yamlProxy struct {
Http string `yaml:"http_proxy"`
Https string `yaml:"https_proxy"`
NoProxy string `yaml:"no_proxy"`
}
// yaml tag for stuff to be copied on volumes
yamlCopy struct {
//Once indicates if the copy should be done only on one node matching the targeted labels
Once bool
// The volume path where to copy the content
Path string
// Labels to restrict the copy to some node sets
yamlLabel `yaml:",inline"`
// The list of path patterns identifying content to be copied
Sources []string `yaml:"sources"`
}
// yaml tag for parameters
yamlParams struct {
Params map[string]interface{} `yaml:",omitempty"`
}
// yaml tag for variables
yamlVars struct {
Vars map[string]interface{} `yaml:",omitempty"`
}
// yaml tag for authentication parameters
yamlAuth struct {
Auth map[string]interface{} `yaml:",omitempty"`
}
// yaml tag for environment variables
yamlEnv struct {
Env map[string]string `yaml:",omitempty"`
}
// yaml tag for labels on nodesets
yamlLabel struct {
Labels map[string]string `yaml:",omitempty"`
}
// yaml tag for custom playbooks
yamlPlaybooks struct {
Playbooks map[string]string `yaml:",omitempty"`
}
// yaml tag for component
yamlComponent struct {
// The source repository where the component lives
Repository string
// The ref (branch or tag) of the component to use
Ref string
// The authentication parameters
yamlAuth `yaml:",inline"`
}
// yaml tag for a volume and its parameters
yamlVolume struct {
// The mounting path of the created volume
Path string
// The parameters required to create the volume (typically provider dependent)
yamlParams `yaml:",inline"`
}
// yaml tag for a shared volume content
yamlVolumeContent struct {
// The component holding the content to copy into the volume
Component string
// The path of the content to copy
Path string
}
// yaml reference to provider
yamlProviderRef struct {
Name string
// The overriding provider parameters
yamlParams `yaml:",inline"`
// The overriding provider environment variables
yamlEnv `yaml:",inline"`
// The overriding provider proxy
Proxy yamlProxy
}
// yaml reference to orchestrator
yamlOrchestratorRef struct {
// The overriding orchestrator parameters
yamlParams `yaml:",inline"`
// The overriding orchestrator environment variables
yamlEnv `yaml:",inline"`
}
// yaml reference to task
yamlTaskRef struct {
// The referenced task
Task string
// Prefix, optional string used to prefix the stored hook results.*
Prefix string
// The overriding parameters
yamlParams `yaml:",inline"`
// The overriding environment variables
yamlEnv `yaml:",inline"`
}
//yaml tag for hooks
yamlHook struct {
// Hooks to be executed before the corresponding process step
Before []yamlTaskRef `yaml:",omitempty"`
// Hooks to be executed after the corresponding process step
After []yamlTaskRef `yaml:",omitempty"`
}
yamlEkara struct {
Base string `yaml:",omitempty"`
Parent yamlComponent
Components map[string]yamlComponent
// The list of path patterns where to apply the template mechanism
Templates []string `yaml:"templates"`
// The list of custom playbooks
yamlPlaybooks `yaml:",inline"`
}
yamlNode struct {
// The number of instances to create within the node set
Instances int
// The provider used to create the node set and its settings
Provider yamlProviderRef
// The orchestrator settings for this node set
Orchestrator yamlOrchestratorRef
// The orchestrator settings for this node set
Volumes []yamlVolume
// The Hooks to be executed while creating the node set
Hooks struct {
Create yamlHook `yaml:",omitempty"`
} `yaml:",omitempty"`
// The labels associated with the nodeset
yamlLabel `yaml:",inline"`
}
// Definition of the Ekara environment
yamlEnvironment struct {
// The name of the environment
Name string
// The qualifier of the environment
Qualifier string `yaml:",omitempty"`
// The description of the environment
Description string `yaml:",omitempty"`
// The Ekara platform used to interact with the environment
Ekara yamlEkara
// The descriptor variables
yamlVars `yaml:",inline"`
// Tasks which can be run on the created environment
Tasks map[string]struct {
// Name of the task component
Component string
// The task parameters
yamlParams `yaml:",inline"`
// The task environment variables
yamlEnv `yaml:",inline"`
// The name of the playbook to launch the task
Playbook string `yaml:",omitempty"`
// The Hooks to be executed in addition the the main task playbook
Hooks struct {
Execute yamlHook `yaml:",omitempty"`
} `yaml:",omitempty"`
}
// Global definition of the orchestrator to install on the environment
Orchestrator struct {
// Name of the orchestrator component
Component string
// The orchestrator parameters
yamlParams `yaml:",inline"`
// The orchestrator environment variables
yamlEnv `yaml:",inline"`
}
// The list of all cloud providers required to create the environment
Providers map[string]struct {
// Name of the provider component
Component string
// The provider parameters
yamlParams `yaml:",inline"`
// The provider environment variables
yamlEnv `yaml:",inline"`
// The provider proxy
Proxy yamlProxy
}
// The list of node sets to create
Nodes map[string]yamlNode
// Software stacks to be installed on the environment
Stacks map[string]struct {
// Name of the stack component
Component string
// The name of the stack on which this one depends
DependsOn []string `yaml:"depends_on"`
// The Hooks to be executed while deploying the stack
Hooks struct {
Deploy yamlHook `yaml:",omitempty"`
} `yaml:",omitempty"`
// The parameters
yamlParams `yaml:",inline"`
// The environment variables
yamlEnv `yaml:",inline"`
// The stack content to be copied on volumes
Copies map[string]yamlCopy
// Custom playbook
Playbook string
}
// Global hooks
Hooks struct {
Init yamlHook `yaml:",omitempty"`
Create yamlHook `yaml:",omitempty"`
Install yamlHook `yaml:",omitempty"`
Deploy yamlHook `yaml:",omitempty"`
Delete yamlHook `yaml:",omitempty"`
} `yaml:",omitempty"`
// Global volumes
Volumes map[string]struct {
Content []yamlVolumeContent `yaml:",omitempty"`
} `yaml:",omitempty"`
}
) | yaml.go | 0.586641 | 0.515803 | yaml.go | starcoder |
package placement
import (
"strings"
"github.com/matrixorigin/matrixcube/components/prophet/core"
"github.com/matrixorigin/matrixcube/components/prophet/util/slice"
"github.com/matrixorigin/matrixcube/pb/rpcpb"
)
// LabelConstraintOp defines how a LabelConstraint matches a container. It can be one of
// 'in', 'notIn', 'exists', or 'notExists'.
type LabelConstraintOp string
const (
// In restricts the container label value should in the value list.
// If label does not exist, `in` is always false.
In LabelConstraintOp = "in"
// NotIn restricts the container label value should not in the value list.
// If label does not exist, `notIn` is always true.
NotIn LabelConstraintOp = "notIn"
// Exists restricts the container should have the label.
Exists LabelConstraintOp = "exists"
// NotExists restricts the container should not have the label.
NotExists LabelConstraintOp = "notExists"
)
// RPCLabelConstraintOp convert placement.LabelConstraintOp to rpcpb.LabelConstraintOp
func (l LabelConstraintOp) RPCLabelConstraintOp() rpcpb.LabelConstraintOp {
switch l {
case In:
return rpcpb.In
case NotIn:
return rpcpb.NotIn
case Exists:
return rpcpb.Exists
case NotExists:
return rpcpb.NotExists
}
return rpcpb.In
}
func validateOp(op LabelConstraintOp) bool {
return op == In || op == NotIn || op == Exists || op == NotExists
}
func getLabelConstraintOpFromRPC(op rpcpb.LabelConstraintOp) LabelConstraintOp {
switch op {
case rpcpb.In:
return In
case rpcpb.NotIn:
return NotIn
case rpcpb.Exists:
return Exists
case rpcpb.NotExists:
return NotExists
}
return In
}
// LabelConstraint is used to filter container when trying to place peer of a resource.
type LabelConstraint struct {
Key string `json:"key,omitempty"`
Op LabelConstraintOp `json:"op,omitempty"`
Values []string `json:"values,omitempty"`
}
// MatchStore checks if a container matches the constraint.
func (c *LabelConstraint) MatchStore(container *core.CachedStore) bool {
switch c.Op {
case In:
label := container.GetLabelValue(c.Key)
return label != "" && slice.AnyOf(c.Values, func(i int) bool { return c.Values[i] == label })
case NotIn:
label := container.GetLabelValue(c.Key)
return label == "" || slice.NoneOf(c.Values, func(i int) bool { return c.Values[i] == label })
case Exists:
return container.GetLabelValue(c.Key) != ""
case NotExists:
return container.GetLabelValue(c.Key) == ""
}
return false
}
// RPCLabelConstraint convert placement.LabelConstraint to rpcpb.LabelConstraint
func (c LabelConstraint) RPCLabelConstraint() rpcpb.LabelConstraint {
return rpcpb.LabelConstraint{
Key: c.Key,
Op: c.Op.RPCLabelConstraintOp(),
Values: c.Values,
}
}
// For backward compatibility. Need to remove later.
var legacyExclusiveLabels = []string{"engine", "exclusive"}
// If a container has exclusiveLabels, it can only be selected when the label is
// explicitly specified in constraints.
func isExclusiveLabel(key string) bool {
return strings.HasPrefix(key, "$") || slice.AnyOf(legacyExclusiveLabels, func(i int) bool {
return key == legacyExclusiveLabels[i]
})
}
// MatchLabelConstraints checks if a container matches label constraints list.
func MatchLabelConstraints(container *core.CachedStore, constraints []LabelConstraint) bool {
if container == nil {
return false
}
for _, l := range container.Meta.GetLabels() {
if isExclusiveLabel(l.GetKey()) &&
slice.NoneOf(constraints, func(i int) bool { return constraints[i].Key == l.GetKey() }) {
return false
}
}
return slice.AllOf(constraints, func(i int) bool { return constraints[i].MatchStore(container) })
}
func newLabelConstraintsFromRPC(lcs []rpcpb.LabelConstraint) []LabelConstraint {
var values []LabelConstraint
for _, lc := range lcs {
values = append(values, newLabelConstraintFromRPC(lc))
}
return values
}
func newLabelConstraintFromRPC(lc rpcpb.LabelConstraint) LabelConstraint {
return LabelConstraint{
Key: lc.Key,
Op: getLabelConstraintOpFromRPC(lc.Op),
Values: lc.Values,
}
}
func toRPCLabelConstraints(lcs []LabelConstraint) []rpcpb.LabelConstraint {
var values []rpcpb.LabelConstraint
for _, lc := range lcs {
values = append(values, lc.RPCLabelConstraint())
}
return values
} | components/prophet/schedule/placement/label_constraint.go | 0.720663 | 0.448668 | label_constraint.go | starcoder |
package routing
import (
"math"
"github.com/flowmatters/openwater-core/data"
)
/*OW-SPEC
InstreamParticulateNutrient:
inputs:
incomingMassUpstream:
incomingMassLateral:
reachVolume:
outflow:
streambankErosion:
lateralSediment:
floodplainDepositionFraction:
channelDepositionFraction:
states:
channelStoredMass:
parameters:
particulateNutrientConcentration: '[0,1] Proportion of sediment mass, default=0'
soilPercentFine:
durationInSeconds: '[1,86400] Timestep, default=86400'
outputs:
loadDownstream:
loadToFloodplain:
implementation:
function: instreamParticulateNutrient
type: scalar
lang: go
outputs: params
init:
zero: true
lang: go
tags:
sediment transport
*/
// Ideally we'd separate out parameters from flags.
// Can I parameterise this WITHOUT the boolean DoDecay?
// Is the theLinkIsLumpedFlowRouting necessary??? assumed if we are using this model?
// Can we take out the point source logic?
func instreamParticulateNutrient(incomingMassUpstream, incomingMassLateral, reachVolume, outflow,
streamBankErosion, lateralSediment, floodplainDepositionFraction, channelDepositionFraction data.ND1Float64,
storedMass float64,
particulateNutrientConcentration, soilPercentFine, durationInSeconds float64,
loadDownstream, loadToFloodplain data.ND1Float64) float64 {
n := incomingMassUpstream.Len1()
idx := []int{0}
for i:=0; i < n; i++ {
idx[0]=i
incomingUpstream := incomingMassUpstream.Get(idx) * durationInSeconds
incomingLateral := incomingMassLateral.Get(idx) * durationInSeconds
totalDailyConstsituentMass := storedMass + incomingUpstream + incomingLateral // + AdditionalInflowMass
//Do some adjustments to try to overcome the issue where FUs might've provided a Nutrient load (DWC?) but ont a sediment load
//This ultimately makes very little difference
totalDailyConstsituentMassForDepositionProcesses := storedMass + incomingUpstream /*+ AdditionalInflowMass*/
if (lateralSediment.Get(idx) > 0.0) {
totalDailyConstsituentMassForDepositionProcesses += incomingLateral
}
if totalDailyConstsituentMassForDepositionProcesses < 0.0 {
totalDailyConstsituentMassForDepositionProcesses = 0.0
}
//stream bank generation
streamBankParticulate := streamBankErosion.Get(idx) * particulateNutrientConcentration * durationInSeconds
totalDailyConstsituentMass += streamBankParticulate
totalDailyConstsituentMassForDepositionProcesses += streamBankParticulate * (soilPercentFine / 100)
//Deposition on floodplain
fpDepositionFraction := math.Min(math.Max(floodplainDepositionFraction.Get(idx),0.0),1.0)
nutrientDailyDepositedFloodPlain := fpDepositionFraction * totalDailyConstsituentMassForDepositionProcesses
loadToFloodplain.Set(idx,nutrientDailyDepositedFloodPlain/durationInSeconds)
//Intentionally haven't adjusted ConstituentStorage to remove the floodplain deposited material
//as the proportion of channel storage in fine sediment model is also relevant to pre-floodplain total.
//Deposition/remobilisation in stream bed (negative value is remobilisation)
//double bedDeposit = SedMod.proportionDepositedBed * totalDailyConstsituentMass;
//Potentially the sed model could have re-mobilised with 'zero' existing constituent mass...
//But there's not much we can do about that
bedDeposit := channelDepositionFraction.Get(idx) * totalDailyConstsituentMassForDepositionProcesses
if bedDeposit >= 0 {
bedDeposit = math.Min(bedDeposit,totalDailyConstsituentMassForDepositionProcesses - nutrientDailyDepositedFloodPlain)
// } else {
// //Remobilisation
// nutrientDailyRemobilisedBed = bedDeposit * -1.0 //This will make remobilisation a positive number
// nutrientDailyDepositedBed = 0.0
}
netLoss := nutrientDailyDepositedFloodPlain + bedDeposit
amountLeft := totalDailyConstsituentMass - netLoss
// copied from lumped constituent - should refactor
outflowRate := outflow.Get(idx)
outflowV := outflowRate * durationInSeconds
storedV := reachVolume.Get(idx)
workingVol := outflowV + storedV
if workingVol < MINIMUM_VOLUME {
storedMass = 0.0 // workingMass
loadDownstream.Set(idx, 0.0)
continue
}
concentration := amountLeft / workingVol
storedMass = concentration * storedV
outflowLoad := concentration * outflowRate
loadDownstream.Set(idx,outflowLoad)
}
return storedMass
}
/* From sediment model
* SoilPercentFine (param)
* CatchmentInflowMass (? from generation models?)
* BankErosionTotal_kg_per_Day (bank erosion model)
* proportionDepositedFloodplain (existing)
* proportionDepositedBed (not existing?)
*/ | models/routing/instream_particulate_nutrient.go | 0.53777 | 0.511351 | instream_particulate_nutrient.go | starcoder |
package pinapi
import (
"encoding/json"
"time"
)
// ParlayLeg struct for ParlayLeg
type ParlayLeg struct {
SportId *int `json:"sportId,omitempty"`
// Parlay leg type.
LegBetType *string `json:"legBetType,omitempty"`
// Parlay Leg status. CANCELLED = The leg is canceled- the stake on this leg will be transferred to the next one. In this case the leg will be ignored when calculating the winLoss, LOSE = The leg is a loss or a push-lose. When Push-lose happens, the half of the stake on the leg will be pushed to the next leg, and the other half will be a lose. This can happen only when the leg is placed on a quarter points handicap, PUSH = The leg is a push - the stake on this leg will be transferred to the next one. In this case the leg will be ignored when calculating the winLoss, REFUNDED = The leg is refunded - the stake on this leg will be transferred to the next one. In this case the leg will be ignored when calculating the winLoss, WON = The leg is a won or a push-won. When Push-won happens, the half of the stake on the leg will be pushed to the next leg, and the other half is won. This can happen only when the leg is placed on a quarter points handicap
LegBetStatus *string `json:"legBetStatus,omitempty"`
LeagueId *int `json:"leagueId,omitempty"`
EventId *int64 `json:"eventId,omitempty"`
// Date time when the event starts.
EventStartTime *time.Time `json:"eventStartTime,omitempty"`
Handicap NullableFloat64 `json:"handicap,omitempty"`
Price *float64 `json:"price,omitempty"`
TeamName *string `json:"teamName,omitempty"`
// Side type.
Side NullableString `json:"side,omitempty"`
Pitcher1 NullableString `json:"pitcher1,omitempty"`
Pitcher2 NullableString `json:"pitcher2,omitempty"`
Pitcher1MustStart *bool `json:"pitcher1MustStart,omitempty"`
Pitcher2MustStart *bool `json:"pitcher2MustStart,omitempty"`
// Wellington Phoenix
Team1 *string `json:"team1,omitempty"`
// Adelaide United
Team2 *string `json:"team2,omitempty"`
PeriodNumber *int `json:"periodNumber,omitempty"`
// Full time team 1 score
FtTeam1Score NullableFloat64 `json:"ftTeam1Score,omitempty"`
// Full time team 2 score
FtTeam2Score NullableFloat64 `json:"ftTeam2Score,omitempty"`
// End of period team 1 score. If the bet was placed on Game period (periodNumber =0) , this will be null
PTeam1Score NullableFloat64 `json:"pTeam1Score,omitempty"`
// End of period team 2 score. If the bet was placed on Game period (periodNumber =0) , this will be null
PTeam2Score NullableFloat64 `json:"pTeam2Score,omitempty"`
CancellationReason *CancellationReason `json:"cancellationReason,omitempty"`
}
// NewParlayLeg instantiates a new ParlayLeg object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewParlayLeg() *ParlayLeg {
this := ParlayLeg{}
return &this
}
// NewParlayLegWithDefaults instantiates a new ParlayLeg object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewParlayLegWithDefaults() *ParlayLeg {
this := ParlayLeg{}
return &this
}
// GetSportId returns the SportId field value if set, zero value otherwise.
func (o *ParlayLeg) GetSportId() int {
if o == nil || o.SportId == nil {
var ret int
return ret
}
return *o.SportId
}
// GetSportIdOk returns a tuple with the SportId field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *ParlayLeg) GetSportIdOk() (*int, bool) {
if o == nil || o.SportId == nil {
return nil, false
}
return o.SportId, true
}
// HasSportId returns a boolean if a field has been set.
func (o *ParlayLeg) HasSportId() bool {
if o != nil && o.SportId != nil {
return true
}
return false
}
// SetSportId gets a reference to the given int and assigns it to the SportId field.
func (o *ParlayLeg) SetSportId(v int) {
o.SportId = &v
}
// GetLegBetType returns the LegBetType field value if set, zero value otherwise.
func (o *ParlayLeg) GetLegBetType() string {
if o == nil || o.LegBetType == nil {
var ret string
return ret
}
return *o.LegBetType
}
// GetLegBetTypeOk returns a tuple with the LegBetType field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *ParlayLeg) GetLegBetTypeOk() (*string, bool) {
if o == nil || o.LegBetType == nil {
return nil, false
}
return o.LegBetType, true
}
// HasLegBetType returns a boolean if a field has been set.
func (o *ParlayLeg) HasLegBetType() bool {
if o != nil && o.LegBetType != nil {
return true
}
return false
}
// SetLegBetType gets a reference to the given string and assigns it to the LegBetType field.
func (o *ParlayLeg) SetLegBetType(v string) {
o.LegBetType = &v
}
// GetLegBetStatus returns the LegBetStatus field value if set, zero value otherwise.
func (o *ParlayLeg) GetLegBetStatus() string {
if o == nil || o.LegBetStatus == nil {
var ret string
return ret
}
return *o.LegBetStatus
}
// GetLegBetStatusOk returns a tuple with the LegBetStatus field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *ParlayLeg) GetLegBetStatusOk() (*string, bool) {
if o == nil || o.LegBetStatus == nil {
return nil, false
}
return o.LegBetStatus, true
}
// HasLegBetStatus returns a boolean if a field has been set.
func (o *ParlayLeg) HasLegBetStatus() bool {
if o != nil && o.LegBetStatus != nil {
return true
}
return false
}
// SetLegBetStatus gets a reference to the given string and assigns it to the LegBetStatus field.
func (o *ParlayLeg) SetLegBetStatus(v string) {
o.LegBetStatus = &v
}
// GetLeagueId returns the LeagueId field value if set, zero value otherwise.
func (o *ParlayLeg) GetLeagueId() int {
if o == nil || o.LeagueId == nil {
var ret int
return ret
}
return *o.LeagueId
}
// GetLeagueIdOk returns a tuple with the LeagueId field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *ParlayLeg) GetLeagueIdOk() (*int, bool) {
if o == nil || o.LeagueId == nil {
return nil, false
}
return o.LeagueId, true
}
// HasLeagueId returns a boolean if a field has been set.
func (o *ParlayLeg) HasLeagueId() bool {
if o != nil && o.LeagueId != nil {
return true
}
return false
}
// SetLeagueId gets a reference to the given int and assigns it to the LeagueId field.
func (o *ParlayLeg) SetLeagueId(v int) {
o.LeagueId = &v
}
// GetEventId returns the EventId field value if set, zero value otherwise.
func (o *ParlayLeg) GetEventId() int64 {
if o == nil || o.EventId == nil {
var ret int64
return ret
}
return *o.EventId
}
// GetEventIdOk returns a tuple with the EventId field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *ParlayLeg) GetEventIdOk() (*int64, bool) {
if o == nil || o.EventId == nil {
return nil, false
}
return o.EventId, true
}
// HasEventId returns a boolean if a field has been set.
func (o *ParlayLeg) HasEventId() bool {
if o != nil && o.EventId != nil {
return true
}
return false
}
// SetEventId gets a reference to the given int64 and assigns it to the EventId field.
func (o *ParlayLeg) SetEventId(v int64) {
o.EventId = &v
}
// GetEventStartTime returns the EventStartTime field value if set, zero value otherwise.
func (o *ParlayLeg) GetEventStartTime() time.Time {
if o == nil || o.EventStartTime == nil {
var ret time.Time
return ret
}
return *o.EventStartTime
}
// GetEventStartTimeOk returns a tuple with the EventStartTime field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *ParlayLeg) GetEventStartTimeOk() (*time.Time, bool) {
if o == nil || o.EventStartTime == nil {
return nil, false
}
return o.EventStartTime, true
}
// HasEventStartTime returns a boolean if a field has been set.
func (o *ParlayLeg) HasEventStartTime() bool {
if o != nil && o.EventStartTime != nil {
return true
}
return false
}
// SetEventStartTime gets a reference to the given time.Time and assigns it to the EventStartTime field.
func (o *ParlayLeg) SetEventStartTime(v time.Time) {
o.EventStartTime = &v
}
// GetHandicap returns the Handicap field value if set, zero value otherwise (both if not set or set to explicit null).
func (o *ParlayLeg) GetHandicap() float64 {
if o == nil || o.Handicap.Get() == nil {
var ret float64
return ret
}
return *o.Handicap.Get()
}
// GetHandicapOk returns a tuple with the Handicap field value if set, nil otherwise
// and a boolean to check if the value has been set.
// NOTE: If the value is an explicit nil, `nil, true` will be returned
func (o *ParlayLeg) GetHandicapOk() (*float64, bool) {
if o == nil {
return nil, false
}
return o.Handicap.Get(), o.Handicap.IsSet()
}
// HasHandicap returns a boolean if a field has been set.
func (o *ParlayLeg) HasHandicap() bool {
if o != nil && o.Handicap.IsSet() {
return true
}
return false
}
// SetHandicap gets a reference to the given NullableFloat64 and assigns it to the Handicap field.
func (o *ParlayLeg) SetHandicap(v float64) {
o.Handicap.Set(&v)
}
// SetHandicapNil sets the value for Handicap to be an explicit nil
func (o *ParlayLeg) SetHandicapNil() {
o.Handicap.Set(nil)
}
// UnsetHandicap ensures that no value is present for Handicap, not even an explicit nil
func (o *ParlayLeg) UnsetHandicap() {
o.Handicap.Unset()
}
// GetPrice returns the Price field value if set, zero value otherwise.
func (o *ParlayLeg) GetPrice() float64 {
if o == nil || o.Price == nil {
var ret float64
return ret
}
return *o.Price
}
// GetPriceOk returns a tuple with the Price field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *ParlayLeg) GetPriceOk() (*float64, bool) {
if o == nil || o.Price == nil {
return nil, false
}
return o.Price, true
}
// HasPrice returns a boolean if a field has been set.
func (o *ParlayLeg) HasPrice() bool {
if o != nil && o.Price != nil {
return true
}
return false
}
// SetPrice gets a reference to the given float64 and assigns it to the Price field.
func (o *ParlayLeg) SetPrice(v float64) {
o.Price = &v
}
// GetTeamName returns the TeamName field value if set, zero value otherwise.
func (o *ParlayLeg) GetTeamName() string {
if o == nil || o.TeamName == nil {
var ret string
return ret
}
return *o.TeamName
}
// GetTeamNameOk returns a tuple with the TeamName field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *ParlayLeg) GetTeamNameOk() (*string, bool) {
if o == nil || o.TeamName == nil {
return nil, false
}
return o.TeamName, true
}
// HasTeamName returns a boolean if a field has been set.
func (o *ParlayLeg) HasTeamName() bool {
if o != nil && o.TeamName != nil {
return true
}
return false
}
// SetTeamName gets a reference to the given string and assigns it to the TeamName field.
func (o *ParlayLeg) SetTeamName(v string) {
o.TeamName = &v
}
// GetSide returns the Side field value if set, zero value otherwise (both if not set or set to explicit null).
func (o *ParlayLeg) GetSide() string {
if o == nil || o.Side.Get() == nil {
var ret string
return ret
}
return *o.Side.Get()
}
// GetSideOk returns a tuple with the Side field value if set, nil otherwise
// and a boolean to check if the value has been set.
// NOTE: If the value is an explicit nil, `nil, true` will be returned
func (o *ParlayLeg) GetSideOk() (*string, bool) {
if o == nil {
return nil, false
}
return o.Side.Get(), o.Side.IsSet()
}
// HasSide returns a boolean if a field has been set.
func (o *ParlayLeg) HasSide() bool {
if o != nil && o.Side.IsSet() {
return true
}
return false
}
// SetSide gets a reference to the given NullableString and assigns it to the Side field.
func (o *ParlayLeg) SetSide(v string) {
o.Side.Set(&v)
}
// SetSideNil sets the value for Side to be an explicit nil
func (o *ParlayLeg) SetSideNil() {
o.Side.Set(nil)
}
// UnsetSide ensures that no value is present for Side, not even an explicit nil
func (o *ParlayLeg) UnsetSide() {
o.Side.Unset()
}
// GetPitcher1 returns the Pitcher1 field value if set, zero value otherwise (both if not set or set to explicit null).
func (o *ParlayLeg) GetPitcher1() string {
if o == nil || o.Pitcher1.Get() == nil {
var ret string
return ret
}
return *o.Pitcher1.Get()
}
// GetPitcher1Ok returns a tuple with the Pitcher1 field value if set, nil otherwise
// and a boolean to check if the value has been set.
// NOTE: If the value is an explicit nil, `nil, true` will be returned
func (o *ParlayLeg) GetPitcher1Ok() (*string, bool) {
if o == nil {
return nil, false
}
return o.Pitcher1.Get(), o.Pitcher1.IsSet()
}
// HasPitcher1 returns a boolean if a field has been set.
func (o *ParlayLeg) HasPitcher1() bool {
if o != nil && o.Pitcher1.IsSet() {
return true
}
return false
}
// SetPitcher1 gets a reference to the given NullableString and assigns it to the Pitcher1 field.
func (o *ParlayLeg) SetPitcher1(v string) {
o.Pitcher1.Set(&v)
}
// SetPitcher1Nil sets the value for Pitcher1 to be an explicit nil
func (o *ParlayLeg) SetPitcher1Nil() {
o.Pitcher1.Set(nil)
}
// UnsetPitcher1 ensures that no value is present for Pitcher1, not even an explicit nil
func (o *ParlayLeg) UnsetPitcher1() {
o.Pitcher1.Unset()
}
// GetPitcher2 returns the Pitcher2 field value if set, zero value otherwise (both if not set or set to explicit null).
func (o *ParlayLeg) GetPitcher2() string {
if o == nil || o.Pitcher2.Get() == nil {
var ret string
return ret
}
return *o.Pitcher2.Get()
}
// GetPitcher2Ok returns a tuple with the Pitcher2 field value if set, nil otherwise
// and a boolean to check if the value has been set.
// NOTE: If the value is an explicit nil, `nil, true` will be returned
func (o *ParlayLeg) GetPitcher2Ok() (*string, bool) {
if o == nil {
return nil, false
}
return o.Pitcher2.Get(), o.Pitcher2.IsSet()
}
// HasPitcher2 returns a boolean if a field has been set.
func (o *ParlayLeg) HasPitcher2() bool {
if o != nil && o.Pitcher2.IsSet() {
return true
}
return false
}
// SetPitcher2 gets a reference to the given NullableString and assigns it to the Pitcher2 field.
func (o *ParlayLeg) SetPitcher2(v string) {
o.Pitcher2.Set(&v)
}
// SetPitcher2Nil sets the value for Pitcher2 to be an explicit nil
func (o *ParlayLeg) SetPitcher2Nil() {
o.Pitcher2.Set(nil)
}
// UnsetPitcher2 ensures that no value is present for Pitcher2, not even an explicit nil
func (o *ParlayLeg) UnsetPitcher2() {
o.Pitcher2.Unset()
}
// GetPitcher1MustStart returns the Pitcher1MustStart field value if set, zero value otherwise.
func (o *ParlayLeg) GetPitcher1MustStart() bool {
if o == nil || o.Pitcher1MustStart == nil {
var ret bool
return ret
}
return *o.Pitcher1MustStart
}
// GetPitcher1MustStartOk returns a tuple with the Pitcher1MustStart field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *ParlayLeg) GetPitcher1MustStartOk() (*bool, bool) {
if o == nil || o.Pitcher1MustStart == nil {
return nil, false
}
return o.Pitcher1MustStart, true
}
// HasPitcher1MustStart returns a boolean if a field has been set.
func (o *ParlayLeg) HasPitcher1MustStart() bool {
if o != nil && o.Pitcher1MustStart != nil {
return true
}
return false
}
// SetPitcher1MustStart gets a reference to the given bool and assigns it to the Pitcher1MustStart field.
func (o *ParlayLeg) SetPitcher1MustStart(v bool) {
o.Pitcher1MustStart = &v
}
// GetPitcher2MustStart returns the Pitcher2MustStart field value if set, zero value otherwise.
func (o *ParlayLeg) GetPitcher2MustStart() bool {
if o == nil || o.Pitcher2MustStart == nil {
var ret bool
return ret
}
return *o.Pitcher2MustStart
}
// GetPitcher2MustStartOk returns a tuple with the Pitcher2MustStart field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *ParlayLeg) GetPitcher2MustStartOk() (*bool, bool) {
if o == nil || o.Pitcher2MustStart == nil {
return nil, false
}
return o.Pitcher2MustStart, true
}
// HasPitcher2MustStart returns a boolean if a field has been set.
func (o *ParlayLeg) HasPitcher2MustStart() bool {
if o != nil && o.Pitcher2MustStart != nil {
return true
}
return false
}
// SetPitcher2MustStart gets a reference to the given bool and assigns it to the Pitcher2MustStart field.
func (o *ParlayLeg) SetPitcher2MustStart(v bool) {
o.Pitcher2MustStart = &v
}
// GetTeam1 returns the Team1 field value if set, zero value otherwise.
func (o *ParlayLeg) GetTeam1() string {
if o == nil || o.Team1 == nil {
var ret string
return ret
}
return *o.Team1
}
// GetTeam1Ok returns a tuple with the Team1 field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *ParlayLeg) GetTeam1Ok() (*string, bool) {
if o == nil || o.Team1 == nil {
return nil, false
}
return o.Team1, true
}
// HasTeam1 returns a boolean if a field has been set.
func (o *ParlayLeg) HasTeam1() bool {
if o != nil && o.Team1 != nil {
return true
}
return false
}
// SetTeam1 gets a reference to the given string and assigns it to the Team1 field.
func (o *ParlayLeg) SetTeam1(v string) {
o.Team1 = &v
}
// GetTeam2 returns the Team2 field value if set, zero value otherwise.
func (o *ParlayLeg) GetTeam2() string {
if o == nil || o.Team2 == nil {
var ret string
return ret
}
return *o.Team2
}
// GetTeam2Ok returns a tuple with the Team2 field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *ParlayLeg) GetTeam2Ok() (*string, bool) {
if o == nil || o.Team2 == nil {
return nil, false
}
return o.Team2, true
}
// HasTeam2 returns a boolean if a field has been set.
func (o *ParlayLeg) HasTeam2() bool {
if o != nil && o.Team2 != nil {
return true
}
return false
}
// SetTeam2 gets a reference to the given string and assigns it to the Team2 field.
func (o *ParlayLeg) SetTeam2(v string) {
o.Team2 = &v
}
// GetPeriodNumber returns the PeriodNumber field value if set, zero value otherwise.
func (o *ParlayLeg) GetPeriodNumber() int {
if o == nil || o.PeriodNumber == nil {
var ret int
return ret
}
return *o.PeriodNumber
}
// GetPeriodNumberOk returns a tuple with the PeriodNumber field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *ParlayLeg) GetPeriodNumberOk() (*int, bool) {
if o == nil || o.PeriodNumber == nil {
return nil, false
}
return o.PeriodNumber, true
}
// HasPeriodNumber returns a boolean if a field has been set.
func (o *ParlayLeg) HasPeriodNumber() bool {
if o != nil && o.PeriodNumber != nil {
return true
}
return false
}
// SetPeriodNumber gets a reference to the given int and assigns it to the PeriodNumber field.
func (o *ParlayLeg) SetPeriodNumber(v int) {
o.PeriodNumber = &v
}
// GetFtTeam1Score returns the FtTeam1Score field value if set, zero value otherwise (both if not set or set to explicit null).
func (o *ParlayLeg) GetFtTeam1Score() float64 {
if o == nil || o.FtTeam1Score.Get() == nil {
var ret float64
return ret
}
return *o.FtTeam1Score.Get()
}
// GetFtTeam1ScoreOk returns a tuple with the FtTeam1Score field value if set, nil otherwise
// and a boolean to check if the value has been set.
// NOTE: If the value is an explicit nil, `nil, true` will be returned
func (o *ParlayLeg) GetFtTeam1ScoreOk() (*float64, bool) {
if o == nil {
return nil, false
}
return o.FtTeam1Score.Get(), o.FtTeam1Score.IsSet()
}
// HasFtTeam1Score returns a boolean if a field has been set.
func (o *ParlayLeg) HasFtTeam1Score() bool {
if o != nil && o.FtTeam1Score.IsSet() {
return true
}
return false
}
// SetFtTeam1Score gets a reference to the given NullableFloat64 and assigns it to the FtTeam1Score field.
func (o *ParlayLeg) SetFtTeam1Score(v float64) {
o.FtTeam1Score.Set(&v)
}
// SetFtTeam1ScoreNil sets the value for FtTeam1Score to be an explicit nil
func (o *ParlayLeg) SetFtTeam1ScoreNil() {
o.FtTeam1Score.Set(nil)
}
// UnsetFtTeam1Score ensures that no value is present for FtTeam1Score, not even an explicit nil
func (o *ParlayLeg) UnsetFtTeam1Score() {
o.FtTeam1Score.Unset()
}
// GetFtTeam2Score returns the FtTeam2Score field value if set, zero value otherwise (both if not set or set to explicit null).
func (o *ParlayLeg) GetFtTeam2Score() float64 {
if o == nil || o.FtTeam2Score.Get() == nil {
var ret float64
return ret
}
return *o.FtTeam2Score.Get()
}
// GetFtTeam2ScoreOk returns a tuple with the FtTeam2Score field value if set, nil otherwise
// and a boolean to check if the value has been set.
// NOTE: If the value is an explicit nil, `nil, true` will be returned
func (o *ParlayLeg) GetFtTeam2ScoreOk() (*float64, bool) {
if o == nil {
return nil, false
}
return o.FtTeam2Score.Get(), o.FtTeam2Score.IsSet()
}
// HasFtTeam2Score returns a boolean if a field has been set.
func (o *ParlayLeg) HasFtTeam2Score() bool {
if o != nil && o.FtTeam2Score.IsSet() {
return true
}
return false
}
// SetFtTeam2Score gets a reference to the given NullableFloat64 and assigns it to the FtTeam2Score field.
func (o *ParlayLeg) SetFtTeam2Score(v float64) {
o.FtTeam2Score.Set(&v)
}
// SetFtTeam2ScoreNil sets the value for FtTeam2Score to be an explicit nil
func (o *ParlayLeg) SetFtTeam2ScoreNil() {
o.FtTeam2Score.Set(nil)
}
// UnsetFtTeam2Score ensures that no value is present for FtTeam2Score, not even an explicit nil
func (o *ParlayLeg) UnsetFtTeam2Score() {
o.FtTeam2Score.Unset()
}
// GetPTeam1Score returns the PTeam1Score field value if set, zero value otherwise (both if not set or set to explicit null).
func (o *ParlayLeg) GetPTeam1Score() float64 {
if o == nil || o.PTeam1Score.Get() == nil {
var ret float64
return ret
}
return *o.PTeam1Score.Get()
}
// GetPTeam1ScoreOk returns a tuple with the PTeam1Score field value if set, nil otherwise
// and a boolean to check if the value has been set.
// NOTE: If the value is an explicit nil, `nil, true` will be returned
func (o *ParlayLeg) GetPTeam1ScoreOk() (*float64, bool) {
if o == nil {
return nil, false
}
return o.PTeam1Score.Get(), o.PTeam1Score.IsSet()
}
// HasPTeam1Score returns a boolean if a field has been set.
func (o *ParlayLeg) HasPTeam1Score() bool {
if o != nil && o.PTeam1Score.IsSet() {
return true
}
return false
}
// SetPTeam1Score gets a reference to the given NullableFloat64 and assigns it to the PTeam1Score field.
func (o *ParlayLeg) SetPTeam1Score(v float64) {
o.PTeam1Score.Set(&v)
}
// SetPTeam1ScoreNil sets the value for PTeam1Score to be an explicit nil
func (o *ParlayLeg) SetPTeam1ScoreNil() {
o.PTeam1Score.Set(nil)
}
// UnsetPTeam1Score ensures that no value is present for PTeam1Score, not even an explicit nil
func (o *ParlayLeg) UnsetPTeam1Score() {
o.PTeam1Score.Unset()
}
// GetPTeam2Score returns the PTeam2Score field value if set, zero value otherwise (both if not set or set to explicit null).
func (o *ParlayLeg) GetPTeam2Score() float64 {
if o == nil || o.PTeam2Score.Get() == nil {
var ret float64
return ret
}
return *o.PTeam2Score.Get()
}
// GetPTeam2ScoreOk returns a tuple with the PTeam2Score field value if set, nil otherwise
// and a boolean to check if the value has been set.
// NOTE: If the value is an explicit nil, `nil, true` will be returned
func (o *ParlayLeg) GetPTeam2ScoreOk() (*float64, bool) {
if o == nil {
return nil, false
}
return o.PTeam2Score.Get(), o.PTeam2Score.IsSet()
}
// HasPTeam2Score returns a boolean if a field has been set.
func (o *ParlayLeg) HasPTeam2Score() bool {
if o != nil && o.PTeam2Score.IsSet() {
return true
}
return false
}
// SetPTeam2Score gets a reference to the given NullableFloat64 and assigns it to the PTeam2Score field.
func (o *ParlayLeg) SetPTeam2Score(v float64) {
o.PTeam2Score.Set(&v)
}
// SetPTeam2ScoreNil sets the value for PTeam2Score to be an explicit nil
func (o *ParlayLeg) SetPTeam2ScoreNil() {
o.PTeam2Score.Set(nil)
}
// UnsetPTeam2Score ensures that no value is present for PTeam2Score, not even an explicit nil
func (o *ParlayLeg) UnsetPTeam2Score() {
o.PTeam2Score.Unset()
}
// GetCancellationReason returns the CancellationReason field value if set, zero value otherwise.
func (o *ParlayLeg) GetCancellationReason() CancellationReason {
if o == nil || o.CancellationReason == nil {
var ret CancellationReason
return ret
}
return *o.CancellationReason
}
// GetCancellationReasonOk returns a tuple with the CancellationReason field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *ParlayLeg) GetCancellationReasonOk() (*CancellationReason, bool) {
if o == nil || o.CancellationReason == nil {
return nil, false
}
return o.CancellationReason, true
}
// HasCancellationReason returns a boolean if a field has been set.
func (o *ParlayLeg) HasCancellationReason() bool {
if o != nil && o.CancellationReason != nil {
return true
}
return false
}
// SetCancellationReason gets a reference to the given CancellationReason and assigns it to the CancellationReason field.
func (o *ParlayLeg) SetCancellationReason(v CancellationReason) {
o.CancellationReason = &v
}
func (o ParlayLeg) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if o.SportId != nil {
toSerialize["sportId"] = o.SportId
}
if o.LegBetType != nil {
toSerialize["legBetType"] = o.LegBetType
}
if o.LegBetStatus != nil {
toSerialize["legBetStatus"] = o.LegBetStatus
}
if o.LeagueId != nil {
toSerialize["leagueId"] = o.LeagueId
}
if o.EventId != nil {
toSerialize["eventId"] = o.EventId
}
if o.EventStartTime != nil {
toSerialize["eventStartTime"] = o.EventStartTime
}
if o.Handicap.IsSet() {
toSerialize["handicap"] = o.Handicap.Get()
}
if o.Price != nil {
toSerialize["price"] = o.Price
}
if o.TeamName != nil {
toSerialize["teamName"] = o.TeamName
}
if o.Side.IsSet() {
toSerialize["side"] = o.Side.Get()
}
if o.Pitcher1.IsSet() {
toSerialize["pitcher1"] = o.Pitcher1.Get()
}
if o.Pitcher2.IsSet() {
toSerialize["pitcher2"] = o.Pitcher2.Get()
}
if o.Pitcher1MustStart != nil {
toSerialize["pitcher1MustStart"] = o.Pitcher1MustStart
}
if o.Pitcher2MustStart != nil {
toSerialize["pitcher2MustStart"] = o.Pitcher2MustStart
}
if o.Team1 != nil {
toSerialize["team1"] = o.Team1
}
if o.Team2 != nil {
toSerialize["team2"] = o.Team2
}
if o.PeriodNumber != nil {
toSerialize["periodNumber"] = o.PeriodNumber
}
if o.FtTeam1Score.IsSet() {
toSerialize["ftTeam1Score"] = o.FtTeam1Score.Get()
}
if o.FtTeam2Score.IsSet() {
toSerialize["ftTeam2Score"] = o.FtTeam2Score.Get()
}
if o.PTeam1Score.IsSet() {
toSerialize["pTeam1Score"] = o.PTeam1Score.Get()
}
if o.PTeam2Score.IsSet() {
toSerialize["pTeam2Score"] = o.PTeam2Score.Get()
}
if o.CancellationReason != nil {
toSerialize["cancellationReason"] = o.CancellationReason
}
return json.Marshal(toSerialize)
}
type NullableParlayLeg struct {
value *ParlayLeg
isSet bool
}
func (v NullableParlayLeg) Get() *ParlayLeg {
return v.value
}
func (v *NullableParlayLeg) Set(val *ParlayLeg) {
v.value = val
v.isSet = true
}
func (v NullableParlayLeg) IsSet() bool {
return v.isSet
}
func (v *NullableParlayLeg) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableParlayLeg(val *ParlayLeg) *NullableParlayLeg {
return &NullableParlayLeg{value: val, isSet: true}
}
func (v NullableParlayLeg) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableParlayLeg) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | pinapi/model_parlay_leg.go | 0.68721 | 0.433682 | model_parlay_leg.go | starcoder |
// This is an implementation of a variant of the Nelder-Mead (1965) downhill
// simplex optimization heuristic.
package dsl
import (
"math"
)
// nelderMeadOptimize function f with Nelder-Mead. start points to a slice of starting points
// It is the responsibility of the caller to make sure the dimensionality is correct.
func nelderMeadOptimize(f func([]float64) float64, start [][]float64, cf func([]float64)) ([]float64, int, int) {
const (
kMax = 1000 // arbitrarily chosen value for now
ε = 0.000001 // Stopping criterion point
α = 1.0
β = 0.5
γ = 2.0
)
// point is the type of points in ℝ^n
type point []float64
// simplex is the type used to represent a simplex
type simplex []point
evaluations := 0
eval := func(f func([]float64) float64, p point) float64 {
evaluations++
return f(p)
}
// sub perform point subtraction
sub := func(x point, y point) point {
r := make(point, len(x))
for i := range y {
r[i] = x[i] - y[i]
}
return r
}
// add perform point addition
add := func(x point, y point) point {
r := make(point, len(x))
for i := range y {
r[i] = x[i] + y[i]
}
return r
}
// scale multiplies a point by a scalar
scale := func(p point, scalar float64) point {
r := make(point, len(p))
for i := range r {
r[i] = scalar * p[i]
}
return r
}
// centroid calculates the centroid of a simplex of one dimensionality lower by omitting a point
centroid := func(s simplex, omit int) point {
r := make(point, len(s[0]))
for i := range r {
c := 0.0
for j := range s {
if j == omit {
continue
} else {
c += s[j][i]
}
}
r[i] = c / float64((len(s) - 1))
}
return r
}
n := len(start)
c := len(start[0])
points := make([]point, 0)
fv := make([]float64, n)
for _, p := range start {
points = append(points, point(p))
}
sx := simplex(points)
if n != c+1 {
panic("Can't optimize with too few starting points")
}
// Set up initial values
for i := range fv {
if cf != nil {
cf(sx[i])
}
fv[i] = eval(f, sx[i])
}
k := 0
for ; k < kMax; k++ {
// Find the largest index
vg := 0
for i := range fv {
if fv[i] > fv[vg] {
vg = i
}
}
// Find the smallest index
vs := 0
for i := range fv {
if fv[i] < fv[vs] {
vs = i
}
}
// Second largest index
vh := vs
for i := range fv {
if fv[i] > fv[vh] && fv[i] < fv[vg] {
vh = i
}
}
vm := centroid(sx, vg)
vr := add(vm, scale(sub(vm, sx[vg]), α))
if cf != nil {
cf(vr)
}
fr := eval(f, vr)
if fr < fv[vh] && fr >= fv[vs] {
// Replace
fv[vg] = fr
sx[vg] = vr
}
// Investigate a step further
if fr < fv[vs] {
ve := add(vm, scale(sub(vr, vm), γ))
if cf != nil {
cf(ve)
}
fe := eval(f, ve)
if fe < fr {
sx[vg] = ve
fv[vg] = fe
} else {
sx[vg] = vr
fv[vg] = fr
}
}
// Check contraction
if fr >= fv[vh] {
var vc point
var fc float64
if fr < fv[vg] && fr >= fv[vh] {
// Outside contraction
vc = add(vm, scale(sub(vr, vm), β))
} else {
// Inside contraction
vc = sub(vm, scale(sub(vm, sx[vg]), β))
}
if cf != nil {
cf(vc)
}
fc = eval(f, vc)
if fc < fv[vg] {
sx[vg] = vc
fv[vg] = fc
} else {
for i := range sx {
if i != vs {
sx[i] = add(sx[vs], scale(sub(sx[i], sx[vs]), β))
}
}
if cf != nil {
cf(sx[vg])
}
fv[vg] = eval(f, sx[vg])
if cf != nil {
cf(sx[vh])
}
fv[vh] = eval(f, sx[vh])
}
}
fsum := 0.0
for _, v := range fv {
fsum += v
}
favg := fsum / float64(len(fv))
s := 0.0
for _, v := range fv {
s += math.Pow(v-favg, 2.0)
}
s = s * (1.0 / (float64(len(fv)) + 1.0))
s = math.Sqrt(s)
if s < ε {
break
}
}
vs := 0
for i := range fv {
if fv[i] < fv[vs] {
vs = i
}
}
return sx[vs], k, evaluations
} | dsl/neldermead.go | 0.744378 | 0.550789 | neldermead.go | starcoder |
package gothumb
import (
"errors"
"image"
"image/draw"
)
type GenericTransformer struct {
In image.Image
Out image.Image
}
func (t GenericTransformer) None() error {
t.Out = t.In
return nil
}
func (t GenericTransformer) FlipH() error {
out, err := FlipH(t.In)
if err != nil {
return err
}
t.Out = out
return nil
}
func (t GenericTransformer) FlipV() error {
out, err := FlipV(t.In)
if err != nil {
return err
}
t.Out = out
return nil
}
func (t GenericTransformer) Transpose() error {
out, err := Transpose(t.In)
if err != nil {
return err
}
t.Out = out
return nil
}
func (t GenericTransformer) Rotate90() error {
out, err := Rotate(t.In, 90)
if err != nil {
return err
}
t.Out = out
return nil
}
func (t GenericTransformer) Rotate180() error {
out, err := Rotate(t.In, 180)
if err != nil {
return err
}
t.Out = out
return nil
}
func (t GenericTransformer) Rotate270() error {
out, err := Rotate(t.In, 270)
if err != nil {
return err
}
t.Out = out
return nil
}
func (t GenericTransformer) Transverse() error {
out, err := Transverse(t.In)
if err != nil {
return err
}
t.Out = out
return nil
}
func toRGBA(src image.Image) *image.RGBA {
m := image.NewRGBA(image.Rect(0, 0, src.Bounds().Dx(), src.Bounds().Dy()))
draw.Draw(m, m.Bounds(), src, src.Bounds().Min, draw.Src)
return m
}
func Rotate(s image.Image, deg int) (image.Image, error) {
src := toRGBA(s)
var d image.Rectangle
switch deg {
default:
return nil, errors.New("Unsupported angle (90, 180, 270).")
case 90, 270:
d = image.Rect(0, 0, src.Bounds().Size().Y, src.Bounds().Size().X)
case 180:
d = image.Rect(0, 0, src.Bounds().Size().X, src.Bounds().Size().Y)
}
rv := image.NewRGBA(d)
b := src.Bounds()
/* switch outside of loops for performance reasons */
switch deg {
case 270:
for y := 0; y < b.Size().Y; y++ {
for x := 0; x < b.Size().X; x++ {
s := x*rv.Stride + 4*(d.Size().X-y-1)
p := y*src.Stride + x*4
copy(rv.Pix[s:s+4], src.Pix[p:p+4])
}
}
case 180:
for y := 0; y < b.Size().Y; y++ {
for x := 0; x < b.Size().X; x++ {
s := (d.Size().Y-y-1)*rv.Stride + 4*d.Size().X - (x+1)*4
p := y*src.Stride + x*4
copy(rv.Pix[s:s+4], src.Pix[p:p+4])
}
}
case 90:
for y := 0; y < b.Size().Y; y++ {
for x := 0; x < b.Size().X; x++ {
s := (d.Size().Y-x-1)*rv.Stride + y*4
p := y*src.Stride + x*4
copy(rv.Pix[s:s+4], src.Pix[p:p+4])
}
}
}
return rv, nil
}
func FlipH(s image.Image) (image.Image, error) {
src := toRGBA(s)
d := image.Rect(0, 0, src.Bounds().Size().X, src.Bounds().Size().Y)
rv := image.NewRGBA(d)
b := src.Bounds()
for y := 0; y < b.Size().Y; y++ {
for x := 0; x < b.Size().X; x++ {
s := y*rv.Stride + x*4
p := y*src.Stride + (b.Size().X-x-1)*4
copy(rv.Pix[s:s+4], src.Pix[p:p+4])
}
}
return rv, nil
}
func FlipV(s image.Image) (image.Image, error) {
src := toRGBA(s)
d := image.Rect(0, 0, src.Bounds().Size().X, src.Bounds().Size().Y)
rv := image.NewRGBA(d)
b := src.Bounds()
for y := 0; y < b.Size().Y; y++ {
for x := 0; x < b.Size().X; x++ {
s := y*rv.Stride + x*4
p := (b.Size().Y-y-1)*src.Stride + x*4
copy(rv.Pix[s:s+4], src.Pix[p:p+4])
}
}
return rv, nil
}
func Transpose(s image.Image) (image.Image, error) {
src := toRGBA(s)
d := image.Rect(0, 0, src.Bounds().Size().Y, src.Bounds().Size().X)
rv := image.NewRGBA(d)
b := src.Bounds()
for y := 0; y < b.Size().Y; y++ {
for x := 0; x < b.Size().X; x++ {
s := x*rv.Stride + y*4
p := y*src.Stride + x*4
copy(rv.Pix[s:s+4], src.Pix[p:p+4])
}
}
return rv, nil
}
func Transverse(s image.Image) (image.Image, error) {
src := toRGBA(s)
d := image.Rect(0, 0, src.Bounds().Size().Y, src.Bounds().Size().X)
rv := image.NewRGBA(d)
b := src.Bounds()
for y := 0; y < b.Size().Y; y++ {
for x := 0; x < b.Size().X; x++ {
s := (d.Size().Y-x-1)*rv.Stride + (d.Size().X-y-1)*4
p := y*src.Stride + x*4
copy(rv.Pix[s:s+4], src.Pix[p:p+4])
}
}
return rv, nil
} | generic_transformer.go | 0.682045 | 0.419053 | generic_transformer.go | starcoder |
package gorethink
import (
p "github.com/adjust/gorethink/ql2"
)
// Add sums two numbers or concatenates two arrays.
func (t RqlTerm) Add(args ...interface{}) RqlTerm {
enforceArgLength(1, -1, args)
return newRqlTermFromPrevVal(t, "Add", p.Term_ADD, args, map[string]interface{}{})
}
// Add sums two numbers or concatenates two arrays.
func Add(args ...interface{}) RqlTerm {
enforceArgLength(2, -1, args)
return newRqlTerm("Add", p.Term_ADD, args, map[string]interface{}{})
}
// Sub subtracts two numbers.
func (t RqlTerm) Sub(args ...interface{}) RqlTerm {
enforceArgLength(1, -1, args)
return newRqlTermFromPrevVal(t, "Sub", p.Term_SUB, args, map[string]interface{}{})
}
// Sub subtracts two numbers.
func Sub(args ...interface{}) RqlTerm {
enforceArgLength(2, -1, args)
return newRqlTerm("Sub", p.Term_SUB, args, map[string]interface{}{})
}
// Mul multiplies two numbers.
func (t RqlTerm) Mul(args ...interface{}) RqlTerm {
enforceArgLength(1, -1, args)
return newRqlTermFromPrevVal(t, "Mul", p.Term_MUL, args, map[string]interface{}{})
}
func Mul(args ...interface{}) RqlTerm {
enforceArgLength(2, -1, args)
return newRqlTerm("Mul", p.Term_MUL, args, map[string]interface{}{})
}
// Div divides two numbers.
func (t RqlTerm) Div(args ...interface{}) RqlTerm {
enforceArgLength(1, -1, args)
return newRqlTermFromPrevVal(t, "Div", p.Term_DIV, args, map[string]interface{}{})
}
// Div divides two numbers.
func Div(args ...interface{}) RqlTerm {
enforceArgLength(2, -1, args)
return newRqlTerm("Div", p.Term_DIV, args, map[string]interface{}{})
}
// Mod divides two numbers and returns the remainder.
func (t RqlTerm) Mod(args ...interface{}) RqlTerm {
enforceArgLength(1, 1, args)
return newRqlTermFromPrevVal(t, "Mod", p.Term_MOD, args, map[string]interface{}{})
}
// Mod divides two numbers and returns the remainder.
func Mod(args ...interface{}) RqlTerm {
enforceArgLength(2, 2, args)
return newRqlTerm("Mod", p.Term_MOD, args, map[string]interface{}{})
}
// And performs a logical and on two values.
func (t RqlTerm) And(args ...interface{}) RqlTerm {
enforceArgLength(1, -1, args)
return newRqlTermFromPrevVal(t, "And", p.Term_ALL, args, map[string]interface{}{})
}
// And performs a logical and on two values.
func And(args ...interface{}) RqlTerm {
enforceArgLength(2, -1, args)
return newRqlTerm("And", p.Term_ALL, args, map[string]interface{}{})
}
// Or performs a logical or on two values.
func (t RqlTerm) Or(args ...interface{}) RqlTerm {
enforceArgLength(1, -1, args)
return newRqlTermFromPrevVal(t, "Or", p.Term_ANY, args, map[string]interface{}{})
}
// Or performs a logical or on two values.
func Or(args ...interface{}) RqlTerm {
enforceArgLength(2, -1, args)
return newRqlTerm("Or", p.Term_ANY, args, map[string]interface{}{})
}
// Eq returns true if two values are equal.
func (t RqlTerm) Eq(args ...interface{}) RqlTerm {
enforceArgLength(1, -1, args)
return newRqlTermFromPrevVal(t, "Eq", p.Term_EQ, args, map[string]interface{}{})
}
// Eq returns true if two values are equal.
func Eq(args ...interface{}) RqlTerm {
enforceArgLength(2, -1, args)
return newRqlTerm("Eq", p.Term_EQ, args, map[string]interface{}{})
}
// Ne returns true if two values are not equal.
func (t RqlTerm) Ne(args ...interface{}) RqlTerm {
enforceArgLength(1, -1, args)
return newRqlTermFromPrevVal(t, "Ne", p.Term_NE, args, map[string]interface{}{})
}
// Ne returns true if two values are not equal.
func Ne(args ...interface{}) RqlTerm {
enforceArgLength(2, -1, args)
return newRqlTerm("Ne", p.Term_NE, args, map[string]interface{}{})
}
// Gt returns true if the first value is greater than the second.
func (t RqlTerm) Gt(args ...interface{}) RqlTerm {
enforceArgLength(1, -1, args)
return newRqlTermFromPrevVal(t, "Gt", p.Term_GT, args, map[string]interface{}{})
}
// Gt returns true if the first value is greater than the second.
func Gt(args ...interface{}) RqlTerm {
enforceArgLength(2, -1, args)
return newRqlTerm("Gt", p.Term_GT, args, map[string]interface{}{})
}
// Ge returns true if the first value is greater than or equal to the second.
func (t RqlTerm) Ge(args ...interface{}) RqlTerm {
enforceArgLength(1, -1, args)
return newRqlTermFromPrevVal(t, "Ge", p.Term_GE, args, map[string]interface{}{})
}
// Ge returns true if the first value is greater than or equal to the second.
func Ge(args ...interface{}) RqlTerm {
enforceArgLength(2, -1, args)
return newRqlTerm("Ge", p.Term_GE, args, map[string]interface{}{})
}
// Lt returns true if the first value is less than the second.
func (t RqlTerm) Lt(args ...interface{}) RqlTerm {
enforceArgLength(1, -1, args)
return newRqlTermFromPrevVal(t, "Lt", p.Term_LT, args, map[string]interface{}{})
}
// Lt returns true if the first value is less than the second.
func Lt(args ...interface{}) RqlTerm {
enforceArgLength(2, -1, args)
return newRqlTerm("Lt", p.Term_LT, args, map[string]interface{}{})
}
// Le returns true if the first value is less than or equal to the second.
func (t RqlTerm) Le(args ...interface{}) RqlTerm {
enforceArgLength(1, -1, args)
return newRqlTermFromPrevVal(t, "Le", p.Term_LE, args, map[string]interface{}{})
}
// Le returns true if the first value is less than or equal to the second.
func Le(args ...interface{}) RqlTerm {
enforceArgLength(2, -1, args)
return newRqlTerm("Le", p.Term_LE, args, map[string]interface{}{})
}
// Not performs a logical not on a value.
func (t RqlTerm) Not() RqlTerm {
return newRqlTermFromPrevVal(t, "Not", p.Term_NOT, []interface{}{}, map[string]interface{}{})
}
// Not performs a logical not on a value.
func Not(args ...interface{}) RqlTerm {
enforceArgLength(1, -1, args)
return newRqlTerm("Not", p.Term_NOT, args, map[string]interface{}{})
} | query_math.go | 0.786418 | 0.453141 | query_math.go | starcoder |
package aws
import (
"github.com/infracost/infracost/internal/resources"
"github.com/infracost/infracost/internal/schema"
"strings"
"github.com/shopspring/decimal"
)
type KinesisAnalyticsV2Application struct {
Address string
Region string
RuntimeEnvironment string
KinesisProcessingUnits *int64 `infracost_usage:"kinesis_processing_units"`
DurableApplicationBackupGB *float64 `infracost_usage:"durable_application_backup_gb"`
}
var KinesisAnalyticsV2ApplicationUsageSchema = []*schema.UsageItem{
{Key: "kinesis_processing_units", ValueType: schema.Int64, DefaultValue: 0},
{Key: "durable_application_backup_gb", ValueType: schema.Float64, DefaultValue: 0},
}
func (r *KinesisAnalyticsV2Application) PopulateUsage(u *schema.UsageData) {
resources.PopulateArgsWithUsage(r, u)
}
func (r *KinesisAnalyticsV2Application) BuildResource() *schema.Resource {
costComponents := make([]*schema.CostComponent, 0)
var kinesisProcessingUnits *decimal.Decimal
if r.KinesisProcessingUnits != nil {
kinesisProcessingUnits = decimalPtr(decimal.NewFromInt(*r.KinesisProcessingUnits))
}
var durableApplicationBackupGB *decimal.Decimal
if r.DurableApplicationBackupGB != nil {
durableApplicationBackupGB = decimalPtr(decimal.NewFromFloat(*r.DurableApplicationBackupGB))
}
v1App := &KinesisAnalyticsApplication{
Region: r.Region,
KinesisProcessingUnits: r.KinesisProcessingUnits,
}
costComponents = append(costComponents, v1App.processingStreamCostComponent(kinesisProcessingUnits))
if strings.HasPrefix(strings.ToLower(r.RuntimeEnvironment), "flink") {
costComponents = append(costComponents, r.processingOrchestrationCostComponent())
costComponents = append(costComponents, r.runningStorageCostComponent(kinesisProcessingUnits))
costComponents = append(costComponents, r.backupCostComponent(durableApplicationBackupGB))
}
return &schema.Resource{
Name: r.Address,
CostComponents: costComponents, UsageSchema: KinesisAnalyticsV2ApplicationUsageSchema,
}
}
func (r *KinesisAnalyticsV2Application) processingOrchestrationCostComponent() *schema.CostComponent {
return &schema.CostComponent{
Name: "Processing (orchestration)",
Unit: "KPU",
UnitMultiplier: schema.HourToMonthUnitMultiplier,
HourlyQuantity: decimalPtr(decimal.NewFromInt(1)),
ProductFilter: &schema.ProductFilter{
VendorName: strPtr("aws"),
Region: strPtr(r.Region),
Service: strPtr("AmazonKinesisAnalytics"),
ProductFamily: strPtr("Kinesis Analytics"),
AttributeFilters: []*schema.AttributeFilter{
{Key: "usagetype", ValueRegex: strPtr("/KPU-Hour-Java/i")},
},
},
}
}
func (r *KinesisAnalyticsV2Application) runningStorageCostComponent(kinesisProcessingUnits *decimal.Decimal) *schema.CostComponent {
var quantity *decimal.Decimal
if kinesisProcessingUnits != nil {
quantity = decimalPtr(kinesisProcessingUnits.Mul(decimal.NewFromInt(50)))
}
return &schema.CostComponent{
Name: "Running storage",
Unit: "GB",
UnitMultiplier: decimal.NewFromInt(1),
MonthlyQuantity: quantity,
ProductFilter: &schema.ProductFilter{
VendorName: strPtr("aws"),
Region: strPtr(r.Region),
Service: strPtr("AmazonKinesisAnalytics"),
ProductFamily: strPtr("Kinesis Analytics"),
AttributeFilters: []*schema.AttributeFilter{
{Key: "usagetype", ValueRegex: strPtr("/RunningApplicationStorage$/i")},
},
},
}
}
func (r *KinesisAnalyticsV2Application) backupCostComponent(durableApplicationBackupGB *decimal.Decimal) *schema.CostComponent {
return &schema.CostComponent{
Name: "Backup",
Unit: "GB",
UnitMultiplier: decimal.NewFromInt(1),
MonthlyQuantity: durableApplicationBackupGB,
ProductFilter: &schema.ProductFilter{
VendorName: strPtr("aws"),
Region: strPtr(r.Region),
Service: strPtr("AmazonKinesisAnalytics"),
ProductFamily: strPtr("Kinesis Analytics"),
AttributeFilters: []*schema.AttributeFilter{
{Key: "usagetype", ValueRegex: strPtr("/DurableApplicationBackups/i")},
},
},
}
} | internal/resources/aws/kinesisanalyticsv2_application.go | 0.515376 | 0.4206 | kinesisanalyticsv2_application.go | starcoder |
package wasm
// Opcode is the binary Opcode of an instruction. See also InstructionName
type Opcode = byte
const (
// OpcodeUnreachable causes an unconditional trap.
OpcodeUnreachable Opcode = 0x00
// OpcodeNop does nothing
OpcodeNop Opcode = 0x01
// OpcodeBlock brackets a sequence of instructions. A branch instruction on an if label breaks out to after its
// OpcodeEnd.
OpcodeBlock Opcode = 0x02
// OpcodeLoop brackets a sequence of instructions. A branch instruction on a loop label will jump back to the
// beginning of its block.
OpcodeLoop Opcode = 0x03
// OpcodeIf brackets a sequence of instructions. When the top of the stack evaluates to 1, the block is executed.
// Zero jumps to the optional OpcodeElse. A branch instruction on an if label breaks out to after its OpcodeEnd.
OpcodeIf Opcode = 0x04
// OpcodeElse brackets a sequence of instructions enclosed by an OpcodeIf. A branch instruction on a then label
// breaks out to after the OpcodeEnd on the enclosing OpcodeIf.
OpcodeElse Opcode = 0x05
// OpcodeEnd terminates a control instruction OpcodeBlock, OpcodeLoop or OpcodeIf.
OpcodeEnd Opcode = 0x0b
OpcodeBr Opcode = 0x0c
OpcodeBrIf Opcode = 0x0d
OpcodeBrTable Opcode = 0x0e
OpcodeReturn Opcode = 0x0f
OpcodeCall Opcode = 0x10
OpcodeCallIndirect Opcode = 0x11
// parametric instructions
OpcodeDrop Opcode = 0x1a
OpcodeSelect Opcode = 0x1b
// variable instructions
OpcodeLocalGet Opcode = 0x20
OpcodeLocalSet Opcode = 0x21
OpcodeLocalTee Opcode = 0x22
OpcodeGlobalGet Opcode = 0x23
OpcodeGlobalSet Opcode = 0x24
// memory instructions
OpcodeI32Load Opcode = 0x28
OpcodeI64Load Opcode = 0x29
OpcodeF32Load Opcode = 0x2a
OpcodeF64Load Opcode = 0x2b
OpcodeI32Load8S Opcode = 0x2c
OpcodeI32Load8U Opcode = 0x2d
OpcodeI32Load16S Opcode = 0x2e
OpcodeI32Load16U Opcode = 0x2f
OpcodeI64Load8S Opcode = 0x30
OpcodeI64Load8U Opcode = 0x31
OpcodeI64Load16S Opcode = 0x32
OpcodeI64Load16U Opcode = 0x33
OpcodeI64Load32S Opcode = 0x34
OpcodeI64Load32U Opcode = 0x35
OpcodeI32Store Opcode = 0x36
OpcodeI64Store Opcode = 0x37
OpcodeF32Store Opcode = 0x38
OpcodeF64Store Opcode = 0x39
OpcodeI32Store8 Opcode = 0x3a
OpcodeI32Store16 Opcode = 0x3b
OpcodeI64Store8 Opcode = 0x3c
OpcodeI64Store16 Opcode = 0x3d
OpcodeI64Store32 Opcode = 0x3e
OpcodeMemorySize Opcode = 0x3f
OpcodeMemoryGrow Opcode = 0x40
// const instructions
OpcodeI32Const Opcode = 0x41
OpcodeI64Const Opcode = 0x42
OpcodeF32Const Opcode = 0x43
OpcodeF64Const Opcode = 0x44
// numeric instructions
OpcodeI32Eqz Opcode = 0x45
OpcodeI32Eq Opcode = 0x46
OpcodeI32Ne Opcode = 0x47
OpcodeI32LtS Opcode = 0x48
OpcodeI32LtU Opcode = 0x49
OpcodeI32GtS Opcode = 0x4a
OpcodeI32GtU Opcode = 0x4b
OpcodeI32LeS Opcode = 0x4c
OpcodeI32LeU Opcode = 0x4d
OpcodeI32GeS Opcode = 0x4e
OpcodeI32GeU Opcode = 0x4f
OpcodeI64Eqz Opcode = 0x50
OpcodeI64Eq Opcode = 0x51
OpcodeI64Ne Opcode = 0x52
OpcodeI64LtS Opcode = 0x53
OpcodeI64LtU Opcode = 0x54
OpcodeI64GtS Opcode = 0x55
OpcodeI64GtU Opcode = 0x56
OpcodeI64LeS Opcode = 0x57
OpcodeI64LeU Opcode = 0x58
OpcodeI64GeS Opcode = 0x59
OpcodeI64GeU Opcode = 0x5a
OpcodeF32Eq Opcode = 0x5b
OpcodeF32Ne Opcode = 0x5c
OpcodeF32Lt Opcode = 0x5d
OpcodeF32Gt Opcode = 0x5e
OpcodeF32Le Opcode = 0x5f
OpcodeF32Ge Opcode = 0x60
OpcodeF64Eq Opcode = 0x61
OpcodeF64Ne Opcode = 0x62
OpcodeF64Lt Opcode = 0x63
OpcodeF64Gt Opcode = 0x64
OpcodeF64Le Opcode = 0x65
OpcodeF64Ge Opcode = 0x66
OpcodeI32Clz Opcode = 0x67
OpcodeI32Ctz Opcode = 0x68
OpcodeI32Popcnt Opcode = 0x69
OpcodeI32Add Opcode = 0x6a
OpcodeI32Sub Opcode = 0x6b
OpcodeI32Mul Opcode = 0x6c
OpcodeI32DivS Opcode = 0x6d
OpcodeI32DivU Opcode = 0x6e
OpcodeI32RemS Opcode = 0x6f
OpcodeI32RemU Opcode = 0x70
OpcodeI32And Opcode = 0x71
OpcodeI32Or Opcode = 0x72
OpcodeI32Xor Opcode = 0x73
OpcodeI32Shl Opcode = 0x74
OpcodeI32ShrS Opcode = 0x75
OpcodeI32ShrU Opcode = 0x76
OpcodeI32Rotl Opcode = 0x77
OpcodeI32Rotr Opcode = 0x78
OpcodeI64Clz Opcode = 0x79
OpcodeI64Ctz Opcode = 0x7a
OpcodeI64Popcnt Opcode = 0x7b
OpcodeI64Add Opcode = 0x7c
OpcodeI64Sub Opcode = 0x7d
OpcodeI64Mul Opcode = 0x7e
OpcodeI64DivS Opcode = 0x7f
OpcodeI64DivU Opcode = 0x80
OpcodeI64RemS Opcode = 0x81
OpcodeI64RemU Opcode = 0x82
OpcodeI64And Opcode = 0x83
OpcodeI64Or Opcode = 0x84
OpcodeI64Xor Opcode = 0x85
OpcodeI64Shl Opcode = 0x86
OpcodeI64ShrS Opcode = 0x87
OpcodeI64ShrU Opcode = 0x88
OpcodeI64Rotl Opcode = 0x89
OpcodeI64Rotr Opcode = 0x8a
OpcodeF32Abs Opcode = 0x8b
OpcodeF32Neg Opcode = 0x8c
OpcodeF32Ceil Opcode = 0x8d
OpcodeF32Floor Opcode = 0x8e
OpcodeF32Trunc Opcode = 0x8f
OpcodeF32Nearest Opcode = 0x90
OpcodeF32Sqrt Opcode = 0x91
OpcodeF32Add Opcode = 0x92
OpcodeF32Sub Opcode = 0x93
OpcodeF32Mul Opcode = 0x94
OpcodeF32Div Opcode = 0x95
OpcodeF32Min Opcode = 0x96
OpcodeF32Max Opcode = 0x97
OpcodeF32Copysign Opcode = 0x98
OpcodeF64Abs Opcode = 0x99
OpcodeF64Neg Opcode = 0x9a
OpcodeF64Ceil Opcode = 0x9b
OpcodeF64Floor Opcode = 0x9c
OpcodeF64Trunc Opcode = 0x9d
OpcodeF64Nearest Opcode = 0x9e
OpcodeF64Sqrt Opcode = 0x9f
OpcodeF64Add Opcode = 0xa0
OpcodeF64Sub Opcode = 0xa1
OpcodeF64Mul Opcode = 0xa2
OpcodeF64Div Opcode = 0xa3
OpcodeF64Min Opcode = 0xa4
OpcodeF64Max Opcode = 0xa5
OpcodeF64Copysign Opcode = 0xa6
OpcodeI32WrapI64 Opcode = 0xa7
OpcodeI32TruncF32S Opcode = 0xa8
OpcodeI32TruncF32U Opcode = 0xa9
OpcodeI32TruncF64S Opcode = 0xaa
OpcodeI32TruncF64U Opcode = 0xab
OpcodeI64ExtendI32S Opcode = 0xac
OpcodeI64ExtendI32U Opcode = 0xad
OpcodeI64TruncF32S Opcode = 0xae
OpcodeI64TruncF32U Opcode = 0xaf
OpcodeI64TruncF64S Opcode = 0xb0
OpcodeI64TruncF64U Opcode = 0xb1
OpcodeF32ConvertI32s Opcode = 0xb2
OpcodeF32ConvertI32U Opcode = 0xb3
OpcodeF32ConvertI64S Opcode = 0xb4
OpcodeF32ConvertI64U Opcode = 0xb5
OpcodeF32DemoteF64 Opcode = 0xb6
OpcodeF64ConvertI32S Opcode = 0xb7
OpcodeF64ConvertI32U Opcode = 0xb8
OpcodeF64ConvertI64S Opcode = 0xb9
OpcodeF64ConvertI64U Opcode = 0xba
OpcodeF64PromoteF32 Opcode = 0xbb
OpcodeI32ReinterpretF32 Opcode = 0xbc
OpcodeI64ReinterpretF64 Opcode = 0xbd
OpcodeF32ReinterpretI32 Opcode = 0xbe
OpcodeF64ReinterpretI64 Opcode = 0xbf
// Below are toggled with FeatureSignExtensionOps
// OpcodeI32Extend8S extends a signed 8-bit integer to a 32-bit integer.
// Note: This is dependent on the flag FeatureSignExtensionOps
OpcodeI32Extend8S Opcode = 0xc0
// OpcodeI32Extend16S extends a signed 16-bit integer to a 32-bit integer.
// Note: This is dependent on the flag FeatureSignExtensionOps
OpcodeI32Extend16S Opcode = 0xc1
// OpcodeI64Extend8S extends a signed 8-bit integer to a 64-bit integer.
// Note: This is dependent on the flag FeatureSignExtensionOps
OpcodeI64Extend8S Opcode = 0xc2
// OpcodeI64Extend16S extends a signed 16-bit integer to a 64-bit integer.
// Note: This is dependent on the flag FeatureSignExtensionOps
OpcodeI64Extend16S Opcode = 0xc3
// OpcodeI64Extend32S extends a signed 32-bit integer to a 64-bit integer.
// Note: This is dependent on the flag FeatureSignExtensionOps
OpcodeI64Extend32S Opcode = 0xc4
LastOpcode = OpcodeI64Extend32S
)
var instructionNames = [256]string{
OpcodeUnreachable: "unreachable",
OpcodeNop: "nop",
OpcodeBlock: "block",
OpcodeLoop: "loop",
OpcodeIf: "if",
OpcodeElse: "else",
OpcodeEnd: "end",
OpcodeBr: "br",
OpcodeBrIf: "br_if",
OpcodeBrTable: "br_table",
OpcodeReturn: "return",
OpcodeCall: "call",
OpcodeCallIndirect: "call_indirect",
OpcodeDrop: "drop",
OpcodeSelect: "select",
OpcodeLocalGet: "local.get",
OpcodeLocalSet: "local.set",
OpcodeLocalTee: "local.tee",
OpcodeGlobalGet: "global.get",
OpcodeGlobalSet: "global.set",
OpcodeI32Load: "i32.load",
OpcodeI64Load: "i64.load",
OpcodeF32Load: "f32.load",
OpcodeF64Load: "f64.load",
OpcodeI32Load8S: "i32.load8_s",
OpcodeI32Load8U: "i32.load8_u",
OpcodeI32Load16S: "i32.load16_s",
OpcodeI32Load16U: "i32.load16_u",
OpcodeI64Load8S: "i64.load8_s",
OpcodeI64Load8U: "i64.load8_u",
OpcodeI64Load16S: "i64.load16_s",
OpcodeI64Load16U: "i64.load16_u",
OpcodeI64Load32S: "i64.load32_s",
OpcodeI64Load32U: "i64.load32_u",
OpcodeI32Store: "i32.store",
OpcodeI64Store: "i64.store",
OpcodeF32Store: "f32.store",
OpcodeF64Store: "f64.store",
OpcodeI32Store8: "i32.store8",
OpcodeI32Store16: "i32.store16",
OpcodeI64Store8: "i64.store8",
OpcodeI64Store16: "i64.store16",
OpcodeI64Store32: "i64.store32",
OpcodeMemorySize: "memory.size",
OpcodeMemoryGrow: "memory.grow",
OpcodeI32Const: "i32.const",
OpcodeI64Const: "i64.const",
OpcodeF32Const: "f32.const",
OpcodeF64Const: "f64.const",
OpcodeI32Eqz: "i32.eqz",
OpcodeI32Eq: "i32.eq",
OpcodeI32Ne: "i32.ne",
OpcodeI32LtS: "i32.lt_s",
OpcodeI32LtU: "i32.lt_u",
OpcodeI32GtS: "i32.gt_s",
OpcodeI32GtU: "i32.gt_u",
OpcodeI32LeS: "i32.le_s",
OpcodeI32LeU: "i32.le_u",
OpcodeI32GeS: "i32.ge_s",
OpcodeI32GeU: "i32.ge_u",
OpcodeI64Eqz: "i64.eqz",
OpcodeI64Eq: "i64.eq",
OpcodeI64Ne: "i64.ne",
OpcodeI64LtS: "i64.lt_s",
OpcodeI64LtU: "i64.lt_u",
OpcodeI64GtS: "i64.gt_s",
OpcodeI64GtU: "i64.gt_u",
OpcodeI64LeS: "i64.le_s",
OpcodeI64LeU: "i64.le_u",
OpcodeI64GeS: "i64.ge_s",
OpcodeI64GeU: "i64.ge_u",
OpcodeF32Eq: "f32.eq",
OpcodeF32Ne: "f32.ne",
OpcodeF32Lt: "f32.lt",
OpcodeF32Gt: "f32.gt",
OpcodeF32Le: "f32.le",
OpcodeF32Ge: "f32.ge",
OpcodeF64Eq: "f64.eq",
OpcodeF64Ne: "f64.ne",
OpcodeF64Lt: "f64.lt",
OpcodeF64Gt: "f64.gt",
OpcodeF64Le: "f64.le",
OpcodeF64Ge: "f64.ge",
OpcodeI32Clz: "i32.clz",
OpcodeI32Ctz: "i32.ctz",
OpcodeI32Popcnt: "i32.popcnt",
OpcodeI32Add: "i32.add",
OpcodeI32Sub: "i32.sub",
OpcodeI32Mul: "i32.mul",
OpcodeI32DivS: "i32.div_s",
OpcodeI32DivU: "i32.div_u",
OpcodeI32RemS: "i32.rem_s",
OpcodeI32RemU: "i32.rem_u",
OpcodeI32And: "i32.and",
OpcodeI32Or: "i32.or",
OpcodeI32Xor: "i32.xor",
OpcodeI32Shl: "i32.shl",
OpcodeI32ShrS: "i32.shr_s",
OpcodeI32ShrU: "i32.shr_u",
OpcodeI32Rotl: "i32.rotl",
OpcodeI32Rotr: "i32.rotr",
OpcodeI64Clz: "i64.clz",
OpcodeI64Ctz: "i64.ctz",
OpcodeI64Popcnt: "i64.popcnt",
OpcodeI64Add: "i64.add",
OpcodeI64Sub: "i64.sub",
OpcodeI64Mul: "i64.mul",
OpcodeI64DivS: "i64.div_s",
OpcodeI64DivU: "i64.div_u",
OpcodeI64RemS: "i64.rem_s",
OpcodeI64RemU: "i64.rem_u",
OpcodeI64And: "i64.and",
OpcodeI64Or: "i64.or",
OpcodeI64Xor: "i64.xor",
OpcodeI64Shl: "i64.shl",
OpcodeI64ShrS: "i64.shr_s",
OpcodeI64ShrU: "i64.shr_u",
OpcodeI64Rotl: "i64.rotl",
OpcodeI64Rotr: "i64.rotr",
OpcodeF32Abs: "f32.abs",
OpcodeF32Neg: "f32.neg",
OpcodeF32Ceil: "f32.ceil",
OpcodeF32Floor: "f32.floor",
OpcodeF32Trunc: "f32.trunc",
OpcodeF32Nearest: "f32.nearest",
OpcodeF32Sqrt: "f32.sqrt",
OpcodeF32Add: "f32.add",
OpcodeF32Sub: "f32.sub",
OpcodeF32Mul: "f32.mul",
OpcodeF32Div: "f32.div",
OpcodeF32Min: "f32.min",
OpcodeF32Max: "f32.max",
OpcodeF32Copysign: "f32.copysign",
OpcodeF64Abs: "f64.abs",
OpcodeF64Neg: "f64.neg",
OpcodeF64Ceil: "f64.ceil",
OpcodeF64Floor: "f64.floor",
OpcodeF64Trunc: "f64.trunc",
OpcodeF64Nearest: "f64.nearest",
OpcodeF64Sqrt: "f64.sqrt",
OpcodeF64Add: "f64.add",
OpcodeF64Sub: "f64.sub",
OpcodeF64Mul: "f64.mul",
OpcodeF64Div: "f64.div",
OpcodeF64Min: "f64.min",
OpcodeF64Max: "f64.max",
OpcodeF64Copysign: "f64.copysign",
OpcodeI32WrapI64: "i32.wrap_i64",
OpcodeI32TruncF32S: "i32.trunc_f32_s",
OpcodeI32TruncF32U: "i32.trunc_f32_u",
OpcodeI32TruncF64S: "i32.trunc_f64_s",
OpcodeI32TruncF64U: "i32.trunc_f64_u",
OpcodeI64ExtendI32S: "i64.extend_i32_s",
OpcodeI64ExtendI32U: "i64.extend_i32_u",
OpcodeI64TruncF32S: "i64.trunc_f32_s",
OpcodeI64TruncF32U: "i64.trunc_f32_u",
OpcodeI64TruncF64S: "i64.trunc_f64_s",
OpcodeI64TruncF64U: "i64.trunc_f64_u",
OpcodeF32ConvertI32s: "f32.convert_i32_s",
OpcodeF32ConvertI32U: "f32.convert_i32_u",
OpcodeF32ConvertI64S: "f32.convert_i64_s",
OpcodeF32ConvertI64U: "f32.convert_i64u",
OpcodeF32DemoteF64: "f32.demote_f64",
OpcodeF64ConvertI32S: "f64.convert_i32_s",
OpcodeF64ConvertI32U: "f64.convert_i32_u",
OpcodeF64ConvertI64S: "f64.convert_i64_s",
OpcodeF64ConvertI64U: "f64.convert_i64_u",
OpcodeF64PromoteF32: "f64.promote_f32",
OpcodeI32ReinterpretF32: "i32.reinterpret_f32",
OpcodeI64ReinterpretF64: "i64.reinterpret_f64",
OpcodeF32ReinterpretI32: "f32.reinterpret_i32",
OpcodeF64ReinterpretI64: "f64.reinterpret_i64",
// Below are toggled with FeatureSignExtensionOps
OpcodeI32Extend8S: "i32.extend8_s",
OpcodeI32Extend16S: "i32.extend16_s",
OpcodeI64Extend8S: "i64.extend8_s",
OpcodeI64Extend16S: "i64.extend16_s",
OpcodeI64Extend32S: "i64.extend32_s",
}
// InstructionName returns the instruction corresponding to this binary Opcode.
// See https://www.w3.org/TR/2019/REC-wasm-core-1-20191205/#a7-index-of-instructions
func InstructionName(oc Opcode) string {
return instructionNames[oc]
} | vendor/github.com/tetratelabs/wazero/internal/wasm/instruction.go | 0.555918 | 0.454412 | instruction.go | starcoder |
package stepdefinitions
import (
"fmt"
"github.com/DATA-DOG/godog"
"github.com/jaysonesmith/gopherhole/board"
"github.com/jaysonesmith/gopherhole/support"
"github.com/jaysonesmith/gopherhole/utils"
"github.com/pkg/errors"
)
var difficulty = map[string]int{"medium": 1, "hard": 2}
var characters = map[string]string{
"gopher": "g", "gophers": "g",
"hole": "h", "holes": "h",
"space": " ", "spaces": " ",
"earth": "e",
}
// Steps is a mapping of gherkin regex to step
// defintion methods
func (sc *ScenarioContext) Steps(s *godog.Suite) {
s.Step(`^a new game is requested with no board size set$`, sc.ANewGameIsRequestedWithNoBoardSizeSet)
s.Step(`^a (\d+)x(\d+) board must be returned$`, sc.AXBoardMustBeReturned)
s.Step(`^a new game is requested with a (\d+)x(\d+) board size$`, sc.ANewGameIsRequestedWithAXBoardSize)
s.Step(`^a (\d+)x(\d+) board is filled at (\w+) difficulty$`, sc.AXBoardIsFilledAtDifficulty)
s.Step(`^approximately (\d+) spaces will be filled$`, sc.ApproximatelySpacesWillBeFilled)
s.Step(`^a medium (\d+)x(\d+) board$`, sc.AMediumXBoard)
s.Step(`^a new game is started$`, sc.ANewGameIsStarted)
s.Step(`^no gophers should be returned to the player$`, sc.NoGophersShouldBeReturnedToThePlayer)
s.Step(`^a (\d+)x(\d+) sized board full of (\w+)$`, sc.ABoardFullOf)
s.Step(`^a (\w+) is entered into position \((\d+), (\d+)\)$`, sc.IsEnteredToPosition)
s.Step(`^that position must contain the expected character$`, sc.ThatPositionMustContainTheExpectedCharacter)
s.Step(`^that position must contain a (\w+) character$`, sc.ThatPositionMustContainACharacter)
s.Step(`^a placement error of "([^"]*)" must be returned$`, sc.APlacementErrorOfMustBeReturned)
}
func (sc *ScenarioContext) ANewGameIsRequestedWithNoBoardSizeSet() error {
sc.Board = board.New(0, 0)
return nil
}
func (sc *ScenarioContext) ANewGameIsRequestedWithAXBoardSize(x, y int) error {
sc.Board = board.New(x, y)
return nil
}
func (sc *ScenarioContext) AXBoardMustBeReturned(x, y int) error {
err := support.CheckBoardDimensions(x, y, sc.Board)
if err != nil {
return fmt.Errorf("board dimensions incorrect: %s", err.Error())
}
return nil
}
func (sc *ScenarioContext) AXBoardIsFilledAtDifficulty(x, y int, d string) error {
sc.Board = board.New(x, y)
sc.Board.Fill(difficulty[d], 1)
return nil
}
func (sc *ScenarioContext) ApproximatelySpacesWillBeFilled(c int) error {
if f, ok := utils.FilledCount(c, sc.Board); !ok {
return fmt.Errorf("expected amount of spaces were not filled. expected: %d found: %d", c, f)
}
return nil
}
func (sc *ScenarioContext) AMediumXBoard(x, y int) error {
sc.Board = board.New(x, y)
sc.Board.Fill(1, 1)
return nil
}
func (sc *ScenarioContext) ANewGameIsStarted() error {
sc.Board.RemoveGophers()
return nil
}
func (sc *ScenarioContext) NoGophersShouldBeReturnedToThePlayer() error {
if utils.GophersExist(sc.Board) {
sc.Board.Print()
return fmt.Errorf("Unexpected gophers found in board")
}
return nil
}
func (sc *ScenarioContext) ABoardFullOf(x, y int, item string) error {
sc.Board = board.New(x, y)
utils.FillBoardWith(characters[item], sc.Board)
return nil
}
func (sc *ScenarioContext) IsEnteredToPosition(char string, x, y int) error {
c := characters[char]
sc.Char = c
sc.X = x
sc.Y = y
if err := sc.Board.WriteChar(c, x, y); err != nil {
sc.Errors.PlacementError = err.Error()
}
return nil
}
func (sc *ScenarioContext) ThatPositionMustContainTheExpectedCharacter() error {
foundChar, err := sc.Board.CharAt(sc.X, sc.Y)
if err != nil {
return nil
}
if sc.Char != foundChar {
return errors.Errorf("%s not found at (%d, %d)", sc.Char, sc.X, sc.Y)
}
return nil
}
func (sc *ScenarioContext) ThatPositionMustContainACharacter(char string) error {
foundChar, err := sc.Board.CharAt(sc.X, sc.Y)
if err != nil {
return nil
}
if characters[char] != foundChar {
return errors.Errorf("%s not found at (%d, %d)", sc.Char, sc.X, sc.Y)
}
return nil
}
func (sc *ScenarioContext) APlacementErrorOfMustBeReturned(err string) error {
if sc.Errors.PlacementError != err {
return errors.Errorf(`expected error of "%s" but found "%s"`, err, sc.Errors.PlacementError)
}
return nil
} | step_definitions/steps.go | 0.616243 | 0.493103 | steps.go | starcoder |
package day369
import "math"
// Price is a stock price represented as float64.
type Price float64
// Timestamp is a Unix timestamp.
type Timestamp uint
// Datapoint is a timestamp and a price.
type Datapoint struct {
Timestamp Timestamp
Price Price
}
// StockService is the defined API for stock data points.
type StockService interface {
AddOrUpdate(Datapoint)
Remove(Timestamp)
Max() Price
Min() Price
Avg() Price
}
type stockService struct {
max, min, avg Price
n uint
data map[Timestamp]Datapoint
}
func (ss *stockService) AddOrUpdate(d Datapoint) {
total := ss.avg * Price(ss.n)
if prev, exists := ss.data[d.Timestamp]; exists {
ss.data[d.Timestamp] = d
ss.avg = (total + d.Price - prev.Price) / Price(ss.n)
if d.Price < ss.min || d.Price > ss.max {
ss.min, ss.max = ss.recalculateMinMax()
}
} else {
if d.Price > ss.max {
ss.max = d.Price
}
if d.Price < ss.min {
ss.min = d.Price
}
ss.n++
ss.avg = (total + d.Price) / Price(ss.n)
ss.data[d.Timestamp] = d
}
}
func (ss *stockService) Remove(t Timestamp) {
if prev, exists := ss.data[t]; exists {
delete(ss.data, t)
total := ss.avg * Price(ss.n)
ss.n--
if ss.n == 0 {
ss.min = math.MaxFloat64
ss.max = -math.MaxFloat64
ss.avg = 0
return
}
ss.avg = (total - prev.Price) / Price(ss.n)
if prev.Price == ss.min || prev.Price == ss.max {
ss.min, ss.max = ss.recalculateMinMax()
}
}
}
func (ss *stockService) recalculateMinMax() (Price, Price) {
max := Price(0)
min := Price(math.MaxFloat64)
for _, dp := range ss.data {
if dp.Price < min {
min = dp.Price
}
if dp.Price > max {
max = dp.Price
}
}
return min, max
}
func (ss *stockService) Max() Price {
return ss.max
}
func (ss *stockService) Min() Price {
return ss.min
}
func (ss *stockService) Avg() Price {
return ss.avg
}
// NewStockService returns a new StockService for a single
// stock.
func NewStockService() StockService {
return &stockService{
min: math.MaxFloat64,
max: -math.MaxFloat64,
data: make(map[Timestamp]Datapoint),
}
} | day369/problem.go | 0.778313 | 0.478773 | problem.go | starcoder |
package team4
import (
"math"
"github.com/SOMAS2020/SOMAS2020/internal/common/baseclient"
"github.com/SOMAS2020/SOMAS2020/internal/common/shared"
)
// MakeDisasterPrediction is called on each client for them to make a prediction about a disaster
// Prediction includes location, magnitude, confidence etc
// COMPULSORY, you need to implement this method
func (c *client) MakeDisasterPrediction() shared.DisasterPredictionInfo {
// Use the sample mean of each field as our prediction
meanDisaster := getMeanDisaster(c.obs.pastDisastersList)
prediction := shared.DisasterPrediction{
CoordinateX: meanDisaster.CoordinateX,
CoordinateY: meanDisaster.CoordinateY,
Magnitude: meanDisaster.Magnitude,
TimeLeft: meanDisaster.Turn,
}
// Use (variance limit - mean(sample variance)), where the mean is taken over each field, as confidence
// Use a variance limit of 100 for now //TODO: tune this
varianceLimit := 100.0
prediction.Confidence = determineConfidence(c.obs.pastDisastersList, meanDisaster, varianceLimit)
// For MVP, share this prediction with all islands since trust has not yet been implemented
islandsToSend := make([]shared.ClientID, len(c.ServerReadHandle.GetGameState().ClientLifeStatuses))
for index, id := range shared.TeamIDs {
islandsToSend[index] = id
}
// Return all prediction info and store our own island's prediction in global variable
predictionInfo := shared.DisasterPredictionInfo{
PredictionMade: prediction,
TeamsOfferedTo: islandsToSend,
}
c.obs.iifoObs.ourDisasterPrediction = predictionInfo
return predictionInfo
}
func getMeanDisaster(pastDisastersList baseclient.PastDisastersList) baseclient.DisasterInfo {
totalCoordinateX, totalCoordinateY, totalMagnitude, totalTurn := 0.0, 0.0, 0.0, 0.0
numberDisastersPassed := float64(len(pastDisastersList))
if numberDisastersPassed == 0 {
return baseclient.DisasterInfo{CoordinateX: 0, CoordinateY: 0, Magnitude: 0, Turn: 1000}
}
for _, disaster := range pastDisastersList {
totalCoordinateX += disaster.CoordinateX
totalCoordinateY += disaster.CoordinateY
totalMagnitude += float64(disaster.Magnitude)
totalTurn += float64(disaster.Turn)
}
meanDisaster := baseclient.DisasterInfo{
CoordinateX: totalCoordinateX / numberDisastersPassed,
CoordinateY: totalCoordinateY / numberDisastersPassed,
Magnitude: totalMagnitude / numberDisastersPassed,
Turn: uint(math.Floor(totalTurn/numberDisastersPassed)) - uint(totalTurn)%uint(numberDisastersPassed), // gives the number of turns left until the next disaster
}
return meanDisaster
}
func determineConfidence(pastDisastersList baseclient.PastDisastersList, meanDisaster baseclient.DisasterInfo, varianceLimit float64) float64 {
totalDisaster := baseclient.DisasterInfo{}
numberDisastersPassed := float64(len(pastDisastersList))
// Find the sum of the square of the difference between the actual and mean, for each field
for _, disaster := range pastDisastersList {
totalDisaster.CoordinateX += math.Pow(disaster.CoordinateX-meanDisaster.CoordinateX, 2)
totalDisaster.CoordinateY += math.Pow(disaster.CoordinateY-meanDisaster.CoordinateY, 2)
totalDisaster.Magnitude += math.Pow(disaster.Magnitude-meanDisaster.Magnitude, 2)
totalDisaster.Turn += uint(math.Round(math.Pow(float64(disaster.Turn-meanDisaster.Turn), 2)))
}
// Find the sum of the variances and the average variance
varianceSum := (totalDisaster.CoordinateX + totalDisaster.CoordinateY + totalDisaster.Magnitude + float64(totalDisaster.Turn)) / numberDisastersPassed
averageVariance := varianceSum / 4
// Implement the variance cap chosen
if averageVariance > varianceLimit {
averageVariance = varianceLimit
}
// Return the confidence of the prediction
return math.Round(varianceLimit - averageVariance)
}
// ReceiveDisasterPredictions provides each client with the prediction info, in addition to the source island,
// that they have been granted access to see
// COMPULSORY, you need to implement this method
func (c *client) ReceiveDisasterPredictions(receivedPredictions shared.ReceivedDisasterPredictionsDict) {
// If we assume that we trust each island equally (including ourselves), then take the final prediction
// of disaster as being the weighted mean of predictions according to confidence
numberOfPredictions := float64(len(receivedPredictions) + 1)
predictionInfo := c.obs.iifoObs.ourDisasterPrediction.PredictionMade
selfConfidence := predictionInfo.Confidence
// Initialise running totals using our own island's predictions
totalCoordinateX := selfConfidence * predictionInfo.CoordinateX
totalCoordinateY := selfConfidence * predictionInfo.CoordinateY
totalMagnitude := selfConfidence * predictionInfo.Magnitude
totalTimeLeft := uint(math.Round(selfConfidence)) * predictionInfo.TimeLeft
totalConfidence := selfConfidence
// Add other island's predictions using their confidence values
for _, prediction := range receivedPredictions {
totalCoordinateX += prediction.PredictionMade.Confidence * prediction.PredictionMade.CoordinateX
totalCoordinateY += prediction.PredictionMade.Confidence * prediction.PredictionMade.CoordinateY
totalMagnitude += prediction.PredictionMade.Confidence * prediction.PredictionMade.Magnitude
totalTimeLeft += uint(math.Round(prediction.PredictionMade.Confidence)) * prediction.PredictionMade.TimeLeft
totalConfidence += prediction.PredictionMade.Confidence
}
// Finally get the final prediction generated by considering predictions from all islands that we have available
// This result is currently unused but would be used in decision making in full implementation
if totalConfidence == 0 {
totalConfidence = numberOfPredictions
}
finalPrediction := shared.DisasterPrediction{
CoordinateX: totalCoordinateX / totalConfidence,
CoordinateY: totalCoordinateY / totalConfidence,
Magnitude: totalMagnitude / totalConfidence,
TimeLeft: uint((float64(totalTimeLeft) / totalConfidence) + 0.5),
Confidence: totalConfidence / numberOfPredictions,
}
c.obs.iifoObs.finalDisasterPrediction = finalPrediction
}
// MakeForageInfo allows clients to share their most recent foraging DecisionMade, ResourceObtained from it to
// other clients.
// OPTIONAL. If this is not implemented then all values are nil.
func (c *client) MakeForageInfo() shared.ForageShareInfo {
// Who to share to
var shareTo []shared.ClientID
for id, status := range c.getAllLifeStatus() {
if status != shared.Dead {
if c.getTurn() < 5 {
// Send to everyone for the first five rounds
shareTo = append(shareTo, id)
} else {
// Send only to island who sent to us in the previous round
shareTo = c.returnPreviousForagers()
// Maybe also add island we trust incase they won't send to us unless we send to them?
shareTo = append(shareTo, c.trustMatrix.trustedClients(0.70)...)
shareTo = createClientSet(shareTo)
}
}
}
// Greediness and selfishness to lie?
var resourceObtained shared.Resources = 0
var decisionMade shared.ForageDecision = shared.ForageDecision{}
if len(c.forage.forageHistory) > 0 {
lastRound := c.forage.forageHistory[len(c.forage.forageHistory)-1]
decisionMade = lastRound.decision
resourceObtained = lastRound.resourceReturn
}
forageInfo := shared.ForageShareInfo{
ShareTo: shareTo,
ResourceObtained: resourceObtained,
DecisionMade: decisionMade,
SharedFrom: c.GetID(),
}
return forageInfo
}
//ReceiveForageInfo lets clients know what other clients has obtained from their most recent foraging attempt.
//Most recent foraging attempt includes information about: foraging DecisionMade and ResourceObtained as well
//as where this information came from.
func (c *client) ReceiveForageInfo(neighbourForaging []shared.ForageShareInfo) {
c.forage.receivedForageData = append(c.forage.receivedForageData, neighbourForaging)
//Give trust to island that contribute to this?
}
func (c *client) returnPreviousForagers() []shared.ClientID {
data := c.forage.receivedForageData
if len(data) < 1 {
return nil
}
lastEntry := data[len(data)-1]
var shareTo []shared.ClientID
for _, teamReturns := range lastEntry {
shareTo = append(shareTo, teamReturns.SharedFrom)
}
return shareTo
} | internal/clients/team4/iifo.go | 0.726329 | 0.415729 | iifo.go | starcoder |
package v1alpha1
// CaptureListerExpansion allows custom methods to be added to
// CaptureLister.
type CaptureListerExpansion interface{}
// CaptureNamespaceListerExpansion allows custom methods to be added to
// CaptureNamespaceLister.
type CaptureNamespaceListerExpansion interface{}
// ImageListerExpansion allows custom methods to be added to
// ImageLister.
type ImageListerExpansion interface{}
// ImageNamespaceListerExpansion allows custom methods to be added to
// ImageNamespaceLister.
type ImageNamespaceListerExpansion interface{}
// InstanceListerExpansion allows custom methods to be added to
// InstanceLister.
type InstanceListerExpansion interface{}
// InstanceNamespaceListerExpansion allows custom methods to be added to
// InstanceNamespaceLister.
type InstanceNamespaceListerExpansion interface{}
// KeyListerExpansion allows custom methods to be added to
// KeyLister.
type KeyListerExpansion interface{}
// KeyNamespaceListerExpansion allows custom methods to be added to
// KeyNamespaceLister.
type KeyNamespaceListerExpansion interface{}
// NetworkListerExpansion allows custom methods to be added to
// NetworkLister.
type NetworkListerExpansion interface{}
// NetworkNamespaceListerExpansion allows custom methods to be added to
// NetworkNamespaceLister.
type NetworkNamespaceListerExpansion interface{}
// NetworkPortListerExpansion allows custom methods to be added to
// NetworkPortLister.
type NetworkPortListerExpansion interface{}
// NetworkPortNamespaceListerExpansion allows custom methods to be added to
// NetworkPortNamespaceLister.
type NetworkPortNamespaceListerExpansion interface{}
// NetworkPortAttachListerExpansion allows custom methods to be added to
// NetworkPortAttachLister.
type NetworkPortAttachListerExpansion interface{}
// NetworkPortAttachNamespaceListerExpansion allows custom methods to be added to
// NetworkPortAttachNamespaceLister.
type NetworkPortAttachNamespaceListerExpansion interface{}
// OperationsListerExpansion allows custom methods to be added to
// OperationsLister.
type OperationsListerExpansion interface{}
// OperationsNamespaceListerExpansion allows custom methods to be added to
// OperationsNamespaceLister.
type OperationsNamespaceListerExpansion interface{}
// SnapshotListerExpansion allows custom methods to be added to
// SnapshotLister.
type SnapshotListerExpansion interface{}
// SnapshotNamespaceListerExpansion allows custom methods to be added to
// SnapshotNamespaceLister.
type SnapshotNamespaceListerExpansion interface{}
// VolumeListerExpansion allows custom methods to be added to
// VolumeLister.
type VolumeListerExpansion interface{}
// VolumeNamespaceListerExpansion allows custom methods to be added to
// VolumeNamespaceLister.
type VolumeNamespaceListerExpansion interface{}
// VolumeAttachListerExpansion allows custom methods to be added to
// VolumeAttachLister.
type VolumeAttachListerExpansion interface{}
// VolumeAttachNamespaceListerExpansion allows custom methods to be added to
// VolumeAttachNamespaceLister.
type VolumeAttachNamespaceListerExpansion interface{} | client/listers/pi/v1alpha1/expansion_generated.go | 0.557364 | 0.42913 | expansion_generated.go | starcoder |
package metrics
import (
"context"
"fmt"
rfcontext "github.com/grailbio/reflow/context"
)
// Gauge wraps prometheus.Gauge. Gauges can be set to arbitrary values.
type Gauge interface {
// Set updates the value of the gauge
Set(float64)
// Inc increments the Gauge by 1. Use Add to increment it by arbitrary
// values.
Inc()
// Dec decrements the Gauge by 1. Use Sub to decrement it by arbitrary
// values.
Dec()
// Add adds the given value to the Gauge. (The value can be negative,
// resulting in a decrease of the Gauge.)
Add(float64)
// Sub subtracts the given value from the Gauge. (The value can be
// negative, resulting in an increase of the Gauge.)
Sub(float64)
}
// Counter wraps prometheus.Counter. Counters can only increase in value.
type Counter interface {
// Inc adds one to the counter
Inc()
// Add adds the given value to the counter. It panics if the value is <
// 0.
Add(float64)
}
// Histogram wraps prometheus.Histogram. Histograms record observations of events and discretize
// them into preconfigured buckets.
type Histogram interface {
// Observe adds a sample observation to the histogram
Observe(float64)
}
type labelSet []string
type gaugeOpts struct {
Labels labelSet
Help string
}
type counterOpts struct {
Labels labelSet
Help string
}
type histogramOpts struct {
Labels labelSet
Help string
Buckets []float64
}
// mustCompleteLabels confirms that all of the labels in the given labelSet are satisfied by labels.
func mustCompleteLabels(labelSet labelSet, labels map[string]string) bool {
if len(labels) != len(labelSet) {
return false
}
for _, label := range labelSet {
if _, ok := labels[label]; !ok {
return false
}
}
return true
}
// getGauge inspects the given context and returns the requested Gauge if metrics are enabled on the context.
func getGauge(ctx context.Context, name string, labels map[string]string) Gauge {
if !On(ctx) {
return &nopGauge{}
}
if opts, ok := Gauges[name]; !ok {
msg := fmt.Sprintf("attempted to get undeclared gauge %s", name)
panic(msg)
} else {
if !mustCompleteLabels(opts.Labels, labels) {
msg := fmt.Sprintf("attempted to get gauge %s with invalid labels, expected %v but got %v",
name, opts.Labels, labels)
panic(msg)
}
}
return metricsClient(ctx).GetGauge(name, labels)
}
// getCounter inspects the given context and returns the requested Counter if metrics are enabled on the context.
func getCounter(ctx context.Context, name string, labels map[string]string) Counter {
if !On(ctx) {
return &nopCounter{}
}
if opts, ok := Counters[name]; !ok {
msg := fmt.Sprintf("attempted to get undeclared counter %s", name)
panic(msg)
} else {
if !mustCompleteLabels(opts.Labels, labels) {
err := fmt.Sprintf("attempted to set counter %s with invalid labels, expected %v but got %v",
name, opts.Labels, labels)
panic(err)
}
}
return metricsClient(ctx).GetCounter(name, labels)
}
// getHistogram inspects the given context and returns the requested Histogram if metrics are enabled on the context.
func getHistogram(ctx context.Context, name string, labels map[string]string) Histogram {
if !On(ctx) {
return &nopHistogram{}
}
if opts, ok := Histograms[name]; !ok {
msg := fmt.Sprintf("attempted to get undeclared histogram %s", name)
panic(msg)
} else {
if !mustCompleteLabels(opts.Labels, labels) {
err := fmt.Sprintf("attempted to set histogram %s with invalid labels, expected %v but got %v",
name, opts.Labels, labels)
panic(err)
}
}
return metricsClient(ctx).GetHistogram(name, labels)
}
// Client is a sink for metrics.
type Client interface {
GetGauge(name string, labels map[string]string) Gauge
GetCounter(name string, labels map[string]string) Counter
GetHistogram(name string, labels map[string]string) Histogram
}
// NopClient is default metrics client that does nothing.
var NopClient Client = &nopClient{}
// WithClient returns a context that emits trace events to the
// provided Client.
func WithClient(ctx context.Context, client Client) context.Context {
if client == nil {
return ctx
}
return context.WithValue(ctx, rfcontext.MetricsClientKey, client)
}
// On returns true if there is a current Client associated with the
// provided context.
func On(ctx context.Context) bool {
_, ok := ctx.Value(rfcontext.MetricsClientKey).(Client)
return ok
}
func metricsClient(ctx context.Context) Client {
return ctx.Value(rfcontext.MetricsClientKey).(Client)
} | metrics/client.go | 0.668772 | 0.446434 | client.go | starcoder |
package edge_compute
import (
"encoding/json"
)
// MetricsData The data points in a metrics collection
type MetricsData struct {
Matrix *DataMatrix `json:"matrix,omitempty"`
Vector *DataVector `json:"vector,omitempty"`
}
// NewMetricsData instantiates a new MetricsData object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewMetricsData() *MetricsData {
this := MetricsData{}
return &this
}
// NewMetricsDataWithDefaults instantiates a new MetricsData object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewMetricsDataWithDefaults() *MetricsData {
this := MetricsData{}
return &this
}
// GetMatrix returns the Matrix field value if set, zero value otherwise.
func (o *MetricsData) GetMatrix() DataMatrix {
if o == nil || o.Matrix == nil {
var ret DataMatrix
return ret
}
return *o.Matrix
}
// GetMatrixOk returns a tuple with the Matrix field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *MetricsData) GetMatrixOk() (*DataMatrix, bool) {
if o == nil || o.Matrix == nil {
return nil, false
}
return o.Matrix, true
}
// HasMatrix returns a boolean if a field has been set.
func (o *MetricsData) HasMatrix() bool {
if o != nil && o.Matrix != nil {
return true
}
return false
}
// SetMatrix gets a reference to the given DataMatrix and assigns it to the Matrix field.
func (o *MetricsData) SetMatrix(v DataMatrix) {
o.Matrix = &v
}
// GetVector returns the Vector field value if set, zero value otherwise.
func (o *MetricsData) GetVector() DataVector {
if o == nil || o.Vector == nil {
var ret DataVector
return ret
}
return *o.Vector
}
// GetVectorOk returns a tuple with the Vector field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *MetricsData) GetVectorOk() (*DataVector, bool) {
if o == nil || o.Vector == nil {
return nil, false
}
return o.Vector, true
}
// HasVector returns a boolean if a field has been set.
func (o *MetricsData) HasVector() bool {
if o != nil && o.Vector != nil {
return true
}
return false
}
// SetVector gets a reference to the given DataVector and assigns it to the Vector field.
func (o *MetricsData) SetVector(v DataVector) {
o.Vector = &v
}
func (o MetricsData) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if o.Matrix != nil {
toSerialize["matrix"] = o.Matrix
}
if o.Vector != nil {
toSerialize["vector"] = o.Vector
}
return json.Marshal(toSerialize)
}
type NullableMetricsData struct {
value *MetricsData
isSet bool
}
func (v NullableMetricsData) Get() *MetricsData {
return v.value
}
func (v *NullableMetricsData) Set(val *MetricsData) {
v.value = val
v.isSet = true
}
func (v NullableMetricsData) IsSet() bool {
return v.isSet
}
func (v *NullableMetricsData) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableMetricsData(val *MetricsData) *NullableMetricsData {
return &NullableMetricsData{value: val, isSet: true}
}
func (v NullableMetricsData) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableMetricsData) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | pkg/edge_compute/model_metrics_data.go | 0.854748 | 0.61477 | model_metrics_data.go | starcoder |
package generation
import (
"math"
"github.com/flowmatters/openwater-core/conv/units"
"github.com/flowmatters/openwater-core/data"
)
/*OW-SPEC
DynamicSednetGully:
inputs:
quickflow: m^3.s^-1
year: year
AnnualRunoff: 'mm.yr^-1'
annualLoad: ''
states:
parameters:
YearDisturbance: ''
GullyEndYear: ''
Area: m^2
averageGullyActivityFactor: '[0,3]'
GullyAnnualAverageSedimentSupply: 't.yr^-1'
GullyPercentFine: 'Average clay + silt percentage of gully material'
managementPracticeFactor: ''
longtermRunoffFactor: ''
dailyRunoffPowerFactor: ''
sdrFine: ''
sdrCoarse: ''
timeStepInSeconds: '[0,100000000]s Duration of timestep in seconds, default=86400'
outputs:
fineLoad: kg
coarseLoad: kg
generatedFine: kg
generatedCoarse: kg
implementation:
function: sednetGullyOrig
type: scalar
lang: go
outputs: params
init:
zero: true
tags:
constituent generation
sediment
gully
*/
type gullyExportFn func(float64, float64, float64, float64, float64, float64, float64, float64, float64, float64) (float64, float64)
func sednetGully(quickflow, year, annualRunoff_ts, annualLoad_ts data.ND1Float64,
yearDisturbance, gullyEndYear, area, averageGullyActivityFactor,
annualAverageSedimentSupply, percentFine,
managementPracticeFactor, longtermRunoffFactor, dailyRunoffPowerFactor,
sdrFine, sdrCoarse, timestepInSeconds float64,
fineLoad, coarseLoad, generatedFine, generatedCoarse data.ND1Float64, calc gullyExportFn) {
n := quickflow.Len1()
idx := []int{0}
propFine := percentFine / 100
for day := 0; day < n; day++ {
idx[0] = day
yr := year.Get(idx)
annualLoad := annualLoad_ts.Get(idx)
annualRunoff := annualRunoff_ts.Get(idx)
runoffRate := quickflow.Get(idx)
if yr < yearDisturbance {
fineLoad.Set(idx, 0)
coarseLoad.Set(idx, 0)
continue
}
activityFactor := 1.0
if yr > gullyEndYear {
activityFactor = averageGullyActivityFactor
}
if runoffRate == 0 || annualRunoff == 0 { //|| annualAverageSedimentSupply == 0 {
fineLoad.Set(idx, 0)
coarseLoad.Set(idx, 0)
continue
}
generated_gully_load_kg_fine, generated_gully_load_kg_coarse := calc(runoffRate, annualRunoff, area, propFine, activityFactor, managementPracticeFactor,
annualLoad, annualAverageSedimentSupply, longtermRunoffFactor, dailyRunoffPowerFactor)
generated_gully_load_kg_fine = generated_gully_load_kg_fine/timestepInSeconds
generated_gully_load_kg_coarse = generated_gully_load_kg_coarse/timestepInSeconds
fineLoad.Set(idx, generated_gully_load_kg_fine*(sdrFine*0.01))
coarseLoad.Set(idx, generated_gully_load_kg_coarse*(sdrCoarse*0.01))
generatedFine.Set(idx, generated_gully_load_kg_fine)
generatedCoarse.Set(idx, generated_gully_load_kg_coarse)
}
}
func sednetGullyOrig(quickflow, year, annualRunoff, annualLoad data.ND1Float64,
yearDisturbance, gullyEndYear, area, averageGullyActivityFactor,
annualAverageSedimentSupply, percentFine,
managementPracticeFactor, longtermRunoffFactor, dailyRunoffPowerFactor,
sdrFine, sdrCoarse, timestepInSeconds float64,
fineLoad, coarseLoad, generatedFine, generatedCoarse data.ND1Float64) {
sednetGully(quickflow, year, annualRunoff, annualLoad,
yearDisturbance, gullyEndYear, area, averageGullyActivityFactor,
annualAverageSedimentSupply, percentFine,
managementPracticeFactor, longtermRunoffFactor, dailyRunoffPowerFactor,
sdrFine, sdrCoarse, timestepInSeconds, fineLoad, coarseLoad, generatedFine, generatedCoarse, gullyLoadOrig)
}
func gullyLoadOrig(dailyRunoff, annualRunoff, area, propFine, activityFactor, managementPracticeFactor, annualLoad, annualSupply,
longTermRunoffFactor, dailyRunoffPowerfactor float64) (float64, float64) {
//Scott's simplified factor to break annual load into daily
annualToDailyAdjustmentFactor := 1 / 365.25
thisYearsSedimentSupply := annualSupply // math.Max(annualSupply, annualLoad)
dailyRunoffFactor := 1.0
//Stop NaN's on models that don't have the required longterm flow analysis
if longTermRunoffFactor > 0 {
if dailyRunoffPowerfactor <= 0 {
dailyRunoffPowerfactor = 1
}
//Swap these over if reverting to Scott's power-based event-to-annual adjustment
//Scott's complex version with stuffed raised to to a power
//all cumecs
dailyRunoffFactor = math.Pow(dailyRunoff, dailyRunoffPowerfactor) / longTermRunoffFactor
}
Gully_Daily_Load_kg_Fine := annualToDailyAdjustmentFactor * dailyRunoffFactor * propFine * activityFactor * managementPracticeFactor * thisYearsSedimentSupply * units.TONNES_TO_KG
Gully_Daily_Load_kg_Coarse := annualToDailyAdjustmentFactor * dailyRunoffFactor * (1 - propFine) * thisYearsSedimentSupply * managementPracticeFactor * units.TONNES_TO_KG
return Gully_Daily_Load_kg_Fine, Gully_Daily_Load_kg_Coarse
} | models/generation/sednet_gully.go | 0.623492 | 0.583678 | sednet_gully.go | starcoder |
package iso20022
// Specifies rates.
type CorporateActionRate1 struct {
// Annual rate of a financial instrument.
Interest *RateAndAmountFormat1Choice `xml:"Intrst,omitempty"`
// Index rate related to the interest rate of the forthcoming interest payment.
RelatedIndex *RateFormat1Choice `xml:"RltdIndx,omitempty"`
// Percentage of securities the offeror/issuer will purchase or redeem under the terms of the event. This can be a number or the term "any and all".
PercentageSought *RateFormat1Choice `xml:"PctgSght,omitempty"`
// Rate of discount for securities purchased through a reinvestment scheme as compared to the current market price of security.
ReinvestmentDiscountToMarket *RateFormat1Choice `xml:"RinvstmtDscntToMkt,omitempty"`
// Margin allowed over or under a given rate.
Spread *RateFormat1Choice `xml:"Sprd,omitempty"`
// Acceptable price increment used for submitting a bid.
BidInterval *AmountAndRateFormat3Choice `xml:"BidIntrvl,omitempty"`
// Rate used to calculate the amount of the charges/fees that cannot be categorised.
Charges *RateAndAmountFormat1Choice `xml:"Chrgs,omitempty"`
}
func (c *CorporateActionRate1) AddInterest() *RateAndAmountFormat1Choice {
c.Interest = new(RateAndAmountFormat1Choice)
return c.Interest
}
func (c *CorporateActionRate1) AddRelatedIndex() *RateFormat1Choice {
c.RelatedIndex = new(RateFormat1Choice)
return c.RelatedIndex
}
func (c *CorporateActionRate1) AddPercentageSought() *RateFormat1Choice {
c.PercentageSought = new(RateFormat1Choice)
return c.PercentageSought
}
func (c *CorporateActionRate1) AddReinvestmentDiscountToMarket() *RateFormat1Choice {
c.ReinvestmentDiscountToMarket = new(RateFormat1Choice)
return c.ReinvestmentDiscountToMarket
}
func (c *CorporateActionRate1) AddSpread() *RateFormat1Choice {
c.Spread = new(RateFormat1Choice)
return c.Spread
}
func (c *CorporateActionRate1) AddBidInterval() *AmountAndRateFormat3Choice {
c.BidInterval = new(AmountAndRateFormat3Choice)
return c.BidInterval
}
func (c *CorporateActionRate1) AddCharges() *RateAndAmountFormat1Choice {
c.Charges = new(RateAndAmountFormat1Choice)
return c.Charges
} | CorporateActionRate1.go | 0.801042 | 0.506164 | CorporateActionRate1.go | starcoder |
package wire
import (
"bytes"
"encoding/binary"
"io"
"time"
"github.com/btgsuite/btgd/chaincfg/chainhash"
)
// MaxSolutionSize is the max known Equihash solution size (1344 is for Equihash-200,9)
const MaxSolutionSize = 1344
// MaxBlockHeaderPayload is the maximum number of bytes a block header can be.
const MaxBlockHeaderPayload = 144 + MaxSolutionSize
// blockHeaderLen is a constant that represents the number of bytes for a block
// header in BTC.
const legacyBlockHeaderLen = 80
// BlockHeader defines information about a block and is used in the bitcoin
// block (MsgBlock) and headers (MsgHeaders) messages.
type BlockHeader struct {
// Version of the block. This is not the same as the protocol version.
Version int32
// Hash of the previous block header in the block chain.
PrevBlock chainhash.Hash
// Merkle tree reference to hash of all transactions for the block.
MerkleRoot chainhash.Hash
// The block height
Height uint32
// Reversed bytes (always zero)
Reserved [7]uint32
// Time the block was created. This is, unfortunately, encoded as a
// uint32 on the wire and therefore is limited to 2106.
Timestamp time.Time
// Difficulty target for the block.
Bits uint32
// Nonce used to generate the block.
Nonce [32]byte
// Equihash solution
Solution []byte
}
// BlockHeaderBytesFromBuffer returns a slice of the input buffer with the data after the block
// header truncated.
func BlockHeaderBytesFromBuffer(buffer []byte) []byte {
r := bytes.NewReader(buffer)
var h BlockHeader
h.Deserialize(r)
return buffer[:h.BlockHeaderLen()]
}
// BlockHeaderLen returns the number of bytes for the block header.
func (h *BlockHeader) BlockHeaderLen() int {
nSol := len(h.Solution)
return 140 + VarIntSerializeSize(uint64(nSol)) + nSol
}
// BlockHeaderLegacyLen returns the number of bytes for the block header in BTC.
func (h *BlockHeader) BlockHeaderLegacyLen() int {
return legacyBlockHeaderLen
}
// BlockHash computes the block identifier hash for the given block header.
func (h *BlockHeader) BlockHash() chainhash.Hash {
return h.blockHashInternal(len(h.Solution) == 0)
}
func (h *BlockHeader) blockHashInternal(legacy bool) chainhash.Hash {
// Encode the header and double sha256 everything prior to the number of
// transactions. Ignore the error returns since there is no way the
// encode could fail except being out of memory which would cause a
// run-time panic.
buf := bytes.NewBuffer(make([]byte, 0, MaxBlockHeaderPayload))
if legacy {
_ = writeBlockHeaderLegacy(buf, 0, h)
} else {
_ = writeBlockHeader(buf, 0, h)
}
return chainhash.DoubleHashH(buf.Bytes())
}
// BtcDecode decodes r using the bitcoin protocol encoding into the receiver.
// This is part of the Message interface implementation.
// See Deserialize for decoding block headers stored to disk, such as in a
// database, as opposed to decoding block headers from the wire.
func (h *BlockHeader) BtcDecode(r io.Reader, pver uint32, enc MessageEncoding) error {
return readBlockHeader(r, pver, h)
}
// BtcEncode encodes the receiver to w using the bitcoin protocol encoding.
// This is part of the Message interface implementation.
// See Serialize for encoding block headers to be stored to disk, such as in a
// database, as opposed to encoding block headers for the wire.
func (h *BlockHeader) BtcEncode(w io.Writer, pver uint32, enc MessageEncoding) error {
return writeBlockHeader(w, pver, h)
}
// Deserialize decodes a block header from r into the receiver using a format
// that is suitable for long-term storage such as a database while respecting
// the Version field.
func (h *BlockHeader) Deserialize(r io.Reader) error {
// At the current time, there is no difference between the wire encoding
// at protocol version 0 and the stable long-term storage format. As
// a result, make use of readBlockHeader.
return readBlockHeader(r, 0, h)
}
// Serialize encodes a block header from r into the receiver using a format
// that is suitable for long-term storage such as a database while respecting
// the Version field.
func (h *BlockHeader) Serialize(w io.Writer) error {
// At the current time, there is no difference between the wire encoding
// at protocol version 0 and the stable long-term storage format. As
// a result, make use of writeBlockHeader.
return writeBlockHeader(w, 0, h)
}
// NewBlockHeader returns a new BlockHeader using the provided version, previous
// block hash, merkle root hash, difficulty bits, and nonce used to generate the
// block with defaults for the remaining fields.
func NewBlockHeader(version int32, prevHash, merkleRootHash *chainhash.Hash,
height uint32, bits uint32, nonce *[32]byte, solution []byte) *BlockHeader {
// Limit the timestamp to one second precision since the protocol
// doesn't support better.
solutionCopy := make([]byte, len(solution))
copy(solutionCopy, solution)
return &BlockHeader{
Version: version,
PrevBlock: *prevHash,
MerkleRoot: *merkleRootHash,
Timestamp: time.Unix(time.Now().Unix(), 0),
Height: height,
Reserved: [7]uint32{},
Bits: bits,
Nonce: *nonce,
Solution: solutionCopy,
}
}
// NewLegacyBlockHeader returns a legacy Bitcoin block header.
func NewLegacyBlockHeader(version int32, prevHash, merkleRootHash *chainhash.Hash,
bits uint32, nonce uint32) *BlockHeader {
nounce256 := Uint256FromUint32(nonce)
return NewBlockHeader(version, prevHash, merkleRootHash, 0, bits, &nounce256, []byte{})
}
// ReadBlockHeaderLegacy reads a legacy bitcoin block from r.
func ReadBlockHeaderLegacy(r io.Reader, pver uint32, bh *BlockHeader) error {
var nonce uint32
err := readElements(r, &bh.Version, &bh.PrevBlock, &bh.MerkleRoot,
(*uint32Time)(&bh.Timestamp), &bh.Bits, &nonce)
if err != nil {
return err
}
bh.Nonce = Uint256FromUint32(nonce)
bh.Solution = []byte{}
return nil
}
// readBlockHeader reads a Bitcoin Gold bitcoin block header from r. See Deserialize for
// decoding block headers stored to disk, such as in a database, as opposed to
// decoding from the wire.
func readBlockHeader(r io.Reader, pver uint32, bh *BlockHeader) error {
if err := readElements(r, &bh.Version, &bh.PrevBlock, &bh.MerkleRoot, &bh.Height); err != nil {
return err
}
for i := range bh.Reserved {
if err := readElement(r, &bh.Reserved[i]); err != nil {
return err
}
}
if err := readElements(r, (*uint32Time)(&bh.Timestamp), &bh.Bits, &bh.Nonce); err != nil {
return err
}
solution, err := ReadVarBytes(r, pver, MaxSolutionSize, "Solution")
if err != nil {
return err
}
bh.Solution = solution
return nil
}
// writeBlockHeader and writeBlockHeaderLegacy writes a bitcoin block
// header to w. See Serialize for encoding block headers to be stored
// to disk, such as in a database, as opposed to encoding for the wire.
func writeBlockHeaderLegacy(w io.Writer, pver uint32, bh *BlockHeader) error {
sec := uint32(bh.Timestamp.Unix())
nonceUint32 := binary.LittleEndian.Uint32(bh.Nonce[0:4])
return writeElements(w, bh.Version, &bh.PrevBlock, &bh.MerkleRoot,
sec, bh.Bits, nonceUint32)
}
func writeBlockHeader(w io.Writer, pver uint32, bh *BlockHeader) error {
sec := uint32(bh.Timestamp.Unix())
if err := writeElements(w, bh.Version, &bh.PrevBlock, &bh.MerkleRoot, bh.Height); err != nil {
return err
}
for _, v := range bh.Reserved {
if err := writeElement(w, v); err != nil {
return err
}
}
if err := writeElements(w, sec, bh.Bits, bh.Nonce); err != nil {
return err
}
if err := WriteVarBytes(w, pver, bh.Solution); err != nil {
return err
}
return nil
} | wire/blockheader.go | 0.812086 | 0.405684 | blockheader.go | starcoder |
package common
import (
"fmt"
"reflect"
)
// StatAggregator is the interface
type StatAggregator interface {
Aggregate(interface{}) error
Result() interface{}
String() string
}
type numericKind uint
const (
invalidNum numericKind = iota
intNum
uintNum
floatNum
)
// String returns the type of number.
func (nk numericKind) String() string {
switch nk {
case invalidNum:
return "invalid"
case intNum:
return "int[8|16|32|64]"
case uintNum:
return "uint[8|16|32|64]"
case floatNum:
return "float[32|64]"
}
return "unknown numericKind"
}
func unifyNumericKind(num interface{}) (interface{}, numericKind) {
switch reflect.ValueOf(num).Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return reflect.ValueOf(num).Int(), intNum
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
return reflect.ValueOf(num).Uint(), uintNum
case reflect.Float32, reflect.Float64:
return reflect.ValueOf(num).Float(), floatNum
default:
return num, invalidNum
}
}
// NumericMaxAggregator is the structure with max value
type NumericMaxAggregator struct {
max interface{}
nk numericKind
}
// String returns the name.
func (a *NumericMaxAggregator) String() string {
return "numeric_max"
}
// Aggregate records the max of value
func (a *NumericMaxAggregator) Aggregate(num interface{}) error {
if num == nil {
return nil
}
kind := reflect.ValueOf(num).Kind()
value, nk := unifyNumericKind(num)
if nk == invalidNum {
return fmt.Errorf("unsupported kind %s", kind)
}
if a.max == nil {
a.max, a.nk = value, nk
return nil
}
if a.nk != nk {
return fmt.Errorf("want kind %s, got %s", a.nk, kind)
}
switch a.nk {
case intNum:
if a.max.(int64) < value.(int64) {
a.max = value
}
case uintNum:
if a.max.(uint64) < value.(uint64) {
a.max = value
}
case floatNum:
if a.max.(float64) < value.(float64) {
a.max = value
}
default:
return fmt.Errorf("BUG: unsupported numeric kind %s", a.nk)
}
return nil
}
// Result return the max value
func (a *NumericMaxAggregator) Result() interface{} {
return a.max
}
// NumericMinAggregator is the structure with min value
type NumericMinAggregator struct {
min interface{}
nk numericKind
}
// String returns the name.
func (a *NumericMinAggregator) String() string {
return "numeric_min"
}
// Aggregate records the min value
func (a *NumericMinAggregator) Aggregate(num interface{}) error {
if num == nil {
return nil
}
kind := reflect.ValueOf(num).Kind()
value, nk := unifyNumericKind(num)
if nk == invalidNum {
return fmt.Errorf("unsupported kind %s", kind)
}
if a.min == nil {
a.min, a.nk = value, nk
return nil
}
if a.nk != nk {
return fmt.Errorf("want kind %s, got %s", a.nk, kind)
}
switch a.nk {
case intNum:
if a.min.(int64) > value.(int64) {
a.min = value
}
case uintNum:
if a.min.(uint64) > value.(uint64) {
a.min = value
}
case floatNum:
if a.min.(float64) > value.(float64) {
a.min = value
}
default:
return fmt.Errorf("BUG: unsupported numeric kind %s", a.nk)
}
return nil
}
// Result returns the min value.
func (a *NumericMinAggregator) Result() interface{} {
return a.min
}
// NumericSumAggregator is the structure with sum value
type NumericSumAggregator struct {
sum interface{}
nk numericKind
}
// String returns the name.
func (a *NumericSumAggregator) String() string {
return "numeric_sum"
}
// Aggregate records the sum value
func (a *NumericSumAggregator) Aggregate(num interface{}) error {
if num == nil {
return nil
}
kind := reflect.ValueOf(num).Kind()
value, nk := unifyNumericKind(num)
if nk == invalidNum {
return fmt.Errorf("unsupported kind %s", kind)
}
if a.sum == nil {
a.sum, a.nk = value, nk
return nil
}
if a.nk != nk {
return fmt.Errorf("want kind %s, got %s", a.nk, kind)
}
switch a.nk {
case intNum:
a.sum = a.sum.(int64) + value.(int64)
case uintNum:
a.sum = a.sum.(uint64) + value.(uint64)
case floatNum:
a.sum = a.sum.(float64) + value.(float64)
default:
return fmt.Errorf("BUG: unsupported numeric kind %s", a.nk)
}
return nil
}
// Result returns the sum value.
func (a *NumericSumAggregator) Result() interface{} {
return a.sum
}
// NumericAvgAggregator is the structure with average value
type NumericAvgAggregator struct {
NumericSumAggregator
count int64
}
// String returns the name.
func (a *NumericAvgAggregator) String() string {
return "numeric_average"
}
// Aggregate records the number of values and the sum of values
func (a *NumericAvgAggregator) Aggregate(num interface{}) error {
if num == nil {
return nil
}
err := a.NumericSumAggregator.Aggregate(num)
if err != nil {
return err
}
a.count++
return nil
}
// Result returns the average values
func (a *NumericAvgAggregator) Result() interface{} {
if a.NumericSumAggregator.Result() == nil {
return nil
}
switch a.nk {
case intNum:
return a.NumericSumAggregator.Result().(int64) / int64(a.count)
case uintNum:
return a.NumericSumAggregator.Result().(uint64) / uint64(a.count)
case floatNum:
return a.NumericSumAggregator.Result().(float64) / float64(a.count)
default:
return nil
}
} | pkg/common/stat_aggregator.go | 0.670716 | 0.40642 | stat_aggregator.go | starcoder |
package series
import (
"fmt"
"strings"
)
type boolElement struct {
e bool
valid bool
}
func (e *boolElement) Set(value interface{}) error {
e.valid = true
e.e = false
if value == nil {
e.valid = false
return nil
}
switch value.(type) {
case string:
switch strings.ToLower(value.(string)) {
case "true", "t", "1":
e.e = true
case "false", "f", "0":
e.e = false
default:
e.valid = false
return fmt.Errorf("can't convert string '%s' to boolean", value.(string))
}
case int:
if value.(int) == 0 {
e.e = false
} else {
e.e = true
}
case int64:
if value.(int64) == 0 {
e.e = false
} else {
e.e = true
}
case uint:
if value.(uint) == 0 {
e.e = false
} else {
e.e = true
}
case uint64:
if value.(uint64) == 0 {
e.e = false
} else {
e.e = true
}
case float32:
v := value.(float32)
if v == 0 || v != v {
e.e = false
} else {
e.e = true
}
case float64:
v := value.(float64)
if v == 0 || v != v {
e.e = false
} else {
e.e = true
}
case bool:
e.e = value.(bool)
case Element:
if value.(Element).IsValid() {
b, err := value.(Element).Bool()
if err != nil {
e.valid = false
return err
}
e.e = b
} else {
e.valid = false
return nil
}
default:
e.valid = false
return fmt.Errorf("Unsupported type '%T' conversion to a boolean", value)
}
return nil
}
func (e boolElement) Copy() Element {
return &boolElement{e.e, e.valid}
}
func (e boolElement) IsValid() bool {
return e.valid
}
func (e boolElement) IsNaN() bool {
if !e.valid {
return true
}
return false
}
func (e boolElement) IsInf(sign int) bool {
return false
}
func (e boolElement) Type() Type {
return Bool
}
func (e boolElement) Val() ElementValue {
if !e.valid {
return nil
}
return bool(e.e)
}
func (e boolElement) String() (string, error) {
if !e.valid {
return "false", fmt.Errorf("can't convert a nil to string")
}
if e.e {
return "true", nil
}
return "false", nil
}
func (e boolElement) Int() (int64, error) {
if !e.valid {
return 0, fmt.Errorf("can't convert a nil to an int64")
}
if e.e == true {
return 1, nil
}
return 0, nil
}
func (e boolElement) Uint() (uint64, error) {
if !e.valid {
return 0, fmt.Errorf("can't convert a nil to an uint64")
}
if e.e == true {
return 1, nil
}
return 0, nil
}
func (e boolElement) Float() (float64, error) {
if !e.valid {
return 0, fmt.Errorf("can't convert a nil to a float64")
}
if e.e {
return 1.0, nil
}
return 0.0, nil
}
func (e boolElement) Bool() (bool, error) {
if !e.valid {
return false, fmt.Errorf("can't convert a nil to a boolean")
}
return bool(e.e), nil
}
func (e boolElement) Eq(elem Element) bool {
if e.valid != elem.IsValid() {
// xor
return false
}
if !e.valid && !elem.IsValid() {
// nil == nil is true
return true
}
b, err := elem.Bool()
if err != nil {
return false
}
return e.e == b
}
func (e boolElement) Neq(elem Element) bool {
if e.valid != elem.IsValid() {
return true
}
return !e.Eq(elem)
}
func (e boolElement) Less(elem Element) bool {
if !e.valid || !elem.IsValid() {
// really should be an error
return false
}
if elem.IsNaN() {
return false
}
b, err := elem.Bool()
if err != nil {
return false
}
return !e.e && b
}
func (e boolElement) LessEq(elem Element) bool {
if !e.valid || !elem.IsValid() {
// really should be an error
return false
}
if elem.IsNaN() {
return false
}
b, err := elem.Bool()
if err != nil {
return false
}
return !e.e || b
}
func (e boolElement) Greater(elem Element) bool {
if !e.valid || !elem.IsValid() {
// really should be an error
return false
}
if elem.IsNaN() {
return false
}
b, err := elem.Bool()
if err != nil {
return false
}
return e.e && !b
}
func (e boolElement) GreaterEq(elem Element) bool {
if !e.valid || !elem.IsValid() {
// really should be an error
return false
}
if elem.IsNaN() {
return false
}
b, err := elem.Bool()
if err != nil {
return false
}
return e.e || !b
} | series/type-bool.go | 0.532911 | 0.539772 | type-bool.go | starcoder |
package leetcode
import (
"fmt"
)
// You are given two non-empty linked lists representing two non-negative integers. The digits are stored in reverse order and each of their nodes contain a single digit. Add the two numbers and return it as a linked list.
// You may assume the two numbers do not contain any leading zero, except the number 0 itself.
// Example
// Input: (2 -> 4 -> 3) + (5 -> 6 -> 4)
// Output: 7 -> 0 -> 8
// Explanation: 342 + 465 = 807.
// Definition for singly-linked list.
type ListNode struct {
Val int
Next *ListNode
}
func genListNode(array []int) *ListNode {
var l, curr *ListNode
for _, v := range array {
n := &ListNode{Val: v}
if l == nil {
l = n
curr = n
} else {
curr.Next = n
curr = n
}
}
return l
}
func printListNode(l *ListNode) string {
str := "["
for l != nil {
str = fmt.Sprintf("%s %d", str, l.Val)
l = l.Next
}
str = str + "]"
return str
}
func addTwoNumbers(l1 *ListNode, l2 *ListNode) *ListNode {
var output *ListNode
var curr *ListNode
var overflow int = 0
for l1 != nil && l2 != nil {
// fmt.Printf("/ %d \n", (l1.Val + l2.Val + overflow) / 10)
node := &ListNode{Val:(l1.Val + l2.Val + overflow) % 10}
overflow = (l1.Val + l2.Val + overflow) / 10
// fmt.Printf("mod %d \n", (l1.Val + l2.Val + overflow) % 10)
if output == nil {
output = node
curr = node
} else {
curr.Next = node
curr = node
}
l1 = l1.Next
l2 = l2.Next
}
for l1 != nil {
node := &ListNode{Val:(l1.Val + overflow) % 10}
overflow = (l1.Val + overflow) / 10
curr.Next = node
curr = node
l1 = l1.Next
}
for l2 != nil {
node := &ListNode{Val:(l2.Val + overflow) % 10}
overflow = (l2.Val + overflow) / 10
curr.Next = node
curr = node
l2 = l2.Next
}
if overflow > 0 {
node := &ListNode{Val: overflow}
curr.Next = node
}
return output
}
func TestAddTwo () {
fmt.Println("TestLongestSubstring")
// l1 := genListNode([]int{2, 4, 3})
// l2 := genListNode([]int{5, 6, 4})
l1 := genListNode([]int{0})
l2 := genListNode([]int{7, 3})
fmt.Println(printListNode(l1))
fmt.Println(printListNode(l2))
l3 := addTwoNumbers(l1, l2)
fmt.Println(printListNode(l3))
} | add_two.go | 0.639849 | 0.492249 | add_two.go | starcoder |
package models
import (
i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e "time"
i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization"
)
// UserSimulationDetails
type UserSimulationDetails struct {
// Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
additionalData map[string]interface{}
// Number of trainings assigned to a user in an attack simulation and training campaign.
assignedTrainingsCount *int32
// Number of trainings completed by a user in an attack simulation and training campaign.
completedTrainingsCount *int32
// Date and time of the compromising online action by a user in an attack simulation and training campaign.
compromisedDateTime *i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e.Time
// Number of trainings in progress by a user in an attack simulation and training campaign.
inProgressTrainingsCount *int32
// Flag representing if user was compromised in an attack simulation and training campaign.
isCompromised *bool
// Date and time when user reported delivered payload as phish in the attack simulation and training campaign.
reportedPhishDateTime *i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e.Time
// List of simulation events of a user in the attack simulation and training campaign.
simulationEvents []UserSimulationEventInfoable
// User in an attack simulation and training campaign.
simulationUser AttackSimulationUserable
// List of training events of a user in the attack simulation and training campaign.
trainingEvents []UserTrainingEventInfoable
}
// NewUserSimulationDetails instantiates a new userSimulationDetails and sets the default values.
func NewUserSimulationDetails()(*UserSimulationDetails) {
m := &UserSimulationDetails{
}
m.SetAdditionalData(make(map[string]interface{}));
return m
}
// CreateUserSimulationDetailsFromDiscriminatorValue creates a new instance of the appropriate class based on discriminator value
func CreateUserSimulationDetailsFromDiscriminatorValue(parseNode i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, error) {
return NewUserSimulationDetails(), nil
}
// GetAdditionalData gets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
func (m *UserSimulationDetails) GetAdditionalData()(map[string]interface{}) {
if m == nil {
return nil
} else {
return m.additionalData
}
}
// GetAssignedTrainingsCount gets the assignedTrainingsCount property value. Number of trainings assigned to a user in an attack simulation and training campaign.
func (m *UserSimulationDetails) GetAssignedTrainingsCount()(*int32) {
if m == nil {
return nil
} else {
return m.assignedTrainingsCount
}
}
// GetCompletedTrainingsCount gets the completedTrainingsCount property value. Number of trainings completed by a user in an attack simulation and training campaign.
func (m *UserSimulationDetails) GetCompletedTrainingsCount()(*int32) {
if m == nil {
return nil
} else {
return m.completedTrainingsCount
}
}
// GetCompromisedDateTime gets the compromisedDateTime property value. Date and time of the compromising online action by a user in an attack simulation and training campaign.
func (m *UserSimulationDetails) GetCompromisedDateTime()(*i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e.Time) {
if m == nil {
return nil
} else {
return m.compromisedDateTime
}
}
// GetFieldDeserializers the deserialization information for the current model
func (m *UserSimulationDetails) GetFieldDeserializers()(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(error)) {
res := make(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(error))
res["assignedTrainingsCount"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetInt32Value()
if err != nil {
return err
}
if val != nil {
m.SetAssignedTrainingsCount(val)
}
return nil
}
res["completedTrainingsCount"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetInt32Value()
if err != nil {
return err
}
if val != nil {
m.SetCompletedTrainingsCount(val)
}
return nil
}
res["compromisedDateTime"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetTimeValue()
if err != nil {
return err
}
if val != nil {
m.SetCompromisedDateTime(val)
}
return nil
}
res["inProgressTrainingsCount"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetInt32Value()
if err != nil {
return err
}
if val != nil {
m.SetInProgressTrainingsCount(val)
}
return nil
}
res["isCompromised"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetBoolValue()
if err != nil {
return err
}
if val != nil {
m.SetIsCompromised(val)
}
return nil
}
res["reportedPhishDateTime"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetTimeValue()
if err != nil {
return err
}
if val != nil {
m.SetReportedPhishDateTime(val)
}
return nil
}
res["simulationEvents"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetCollectionOfObjectValues(CreateUserSimulationEventInfoFromDiscriminatorValue)
if err != nil {
return err
}
if val != nil {
res := make([]UserSimulationEventInfoable, len(val))
for i, v := range val {
res[i] = v.(UserSimulationEventInfoable)
}
m.SetSimulationEvents(res)
}
return nil
}
res["simulationUser"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetObjectValue(CreateAttackSimulationUserFromDiscriminatorValue)
if err != nil {
return err
}
if val != nil {
m.SetSimulationUser(val.(AttackSimulationUserable))
}
return nil
}
res["trainingEvents"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetCollectionOfObjectValues(CreateUserTrainingEventInfoFromDiscriminatorValue)
if err != nil {
return err
}
if val != nil {
res := make([]UserTrainingEventInfoable, len(val))
for i, v := range val {
res[i] = v.(UserTrainingEventInfoable)
}
m.SetTrainingEvents(res)
}
return nil
}
return res
}
// GetInProgressTrainingsCount gets the inProgressTrainingsCount property value. Number of trainings in progress by a user in an attack simulation and training campaign.
func (m *UserSimulationDetails) GetInProgressTrainingsCount()(*int32) {
if m == nil {
return nil
} else {
return m.inProgressTrainingsCount
}
}
// GetIsCompromised gets the isCompromised property value. Flag representing if user was compromised in an attack simulation and training campaign.
func (m *UserSimulationDetails) GetIsCompromised()(*bool) {
if m == nil {
return nil
} else {
return m.isCompromised
}
}
// GetReportedPhishDateTime gets the reportedPhishDateTime property value. Date and time when user reported delivered payload as phish in the attack simulation and training campaign.
func (m *UserSimulationDetails) GetReportedPhishDateTime()(*i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e.Time) {
if m == nil {
return nil
} else {
return m.reportedPhishDateTime
}
}
// GetSimulationEvents gets the simulationEvents property value. List of simulation events of a user in the attack simulation and training campaign.
func (m *UserSimulationDetails) GetSimulationEvents()([]UserSimulationEventInfoable) {
if m == nil {
return nil
} else {
return m.simulationEvents
}
}
// GetSimulationUser gets the simulationUser property value. User in an attack simulation and training campaign.
func (m *UserSimulationDetails) GetSimulationUser()(AttackSimulationUserable) {
if m == nil {
return nil
} else {
return m.simulationUser
}
}
// GetTrainingEvents gets the trainingEvents property value. List of training events of a user in the attack simulation and training campaign.
func (m *UserSimulationDetails) GetTrainingEvents()([]UserTrainingEventInfoable) {
if m == nil {
return nil
} else {
return m.trainingEvents
}
}
// Serialize serializes information the current object
func (m *UserSimulationDetails) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {
{
err := writer.WriteInt32Value("assignedTrainingsCount", m.GetAssignedTrainingsCount())
if err != nil {
return err
}
}
{
err := writer.WriteInt32Value("completedTrainingsCount", m.GetCompletedTrainingsCount())
if err != nil {
return err
}
}
{
err := writer.WriteTimeValue("compromisedDateTime", m.GetCompromisedDateTime())
if err != nil {
return err
}
}
{
err := writer.WriteInt32Value("inProgressTrainingsCount", m.GetInProgressTrainingsCount())
if err != nil {
return err
}
}
{
err := writer.WriteBoolValue("isCompromised", m.GetIsCompromised())
if err != nil {
return err
}
}
{
err := writer.WriteTimeValue("reportedPhishDateTime", m.GetReportedPhishDateTime())
if err != nil {
return err
}
}
if m.GetSimulationEvents() != nil {
cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetSimulationEvents()))
for i, v := range m.GetSimulationEvents() {
cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)
}
err := writer.WriteCollectionOfObjectValues("simulationEvents", cast)
if err != nil {
return err
}
}
{
err := writer.WriteObjectValue("simulationUser", m.GetSimulationUser())
if err != nil {
return err
}
}
if m.GetTrainingEvents() != nil {
cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetTrainingEvents()))
for i, v := range m.GetTrainingEvents() {
cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)
}
err := writer.WriteCollectionOfObjectValues("trainingEvents", cast)
if err != nil {
return err
}
}
{
err := writer.WriteAdditionalData(m.GetAdditionalData())
if err != nil {
return err
}
}
return nil
}
// SetAdditionalData sets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
func (m *UserSimulationDetails) SetAdditionalData(value map[string]interface{})() {
if m != nil {
m.additionalData = value
}
}
// SetAssignedTrainingsCount sets the assignedTrainingsCount property value. Number of trainings assigned to a user in an attack simulation and training campaign.
func (m *UserSimulationDetails) SetAssignedTrainingsCount(value *int32)() {
if m != nil {
m.assignedTrainingsCount = value
}
}
// SetCompletedTrainingsCount sets the completedTrainingsCount property value. Number of trainings completed by a user in an attack simulation and training campaign.
func (m *UserSimulationDetails) SetCompletedTrainingsCount(value *int32)() {
if m != nil {
m.completedTrainingsCount = value
}
}
// SetCompromisedDateTime sets the compromisedDateTime property value. Date and time of the compromising online action by a user in an attack simulation and training campaign.
func (m *UserSimulationDetails) SetCompromisedDateTime(value *i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e.Time)() {
if m != nil {
m.compromisedDateTime = value
}
}
// SetInProgressTrainingsCount sets the inProgressTrainingsCount property value. Number of trainings in progress by a user in an attack simulation and training campaign.
func (m *UserSimulationDetails) SetInProgressTrainingsCount(value *int32)() {
if m != nil {
m.inProgressTrainingsCount = value
}
}
// SetIsCompromised sets the isCompromised property value. Flag representing if user was compromised in an attack simulation and training campaign.
func (m *UserSimulationDetails) SetIsCompromised(value *bool)() {
if m != nil {
m.isCompromised = value
}
}
// SetReportedPhishDateTime sets the reportedPhishDateTime property value. Date and time when user reported delivered payload as phish in the attack simulation and training campaign.
func (m *UserSimulationDetails) SetReportedPhishDateTime(value *i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e.Time)() {
if m != nil {
m.reportedPhishDateTime = value
}
}
// SetSimulationEvents sets the simulationEvents property value. List of simulation events of a user in the attack simulation and training campaign.
func (m *UserSimulationDetails) SetSimulationEvents(value []UserSimulationEventInfoable)() {
if m != nil {
m.simulationEvents = value
}
}
// SetSimulationUser sets the simulationUser property value. User in an attack simulation and training campaign.
func (m *UserSimulationDetails) SetSimulationUser(value AttackSimulationUserable)() {
if m != nil {
m.simulationUser = value
}
}
// SetTrainingEvents sets the trainingEvents property value. List of training events of a user in the attack simulation and training campaign.
func (m *UserSimulationDetails) SetTrainingEvents(value []UserTrainingEventInfoable)() {
if m != nil {
m.trainingEvents = value
}
} | models/user_simulation_details.go | 0.567577 | 0.464659 | user_simulation_details.go | starcoder |
package iso20022
// Additional restrictions on the financial instrument, related to the stipulation.
type FinancialInstrumentStipulations2 struct {
// Type of stipulation expressing geographical constraints on a fixed income instrument. It is expressed with a state or country abbreviation and a minimum or maximum percentage. Example: CA 0-80 (minimum of 80 percent in Californian assests).
Geographics *Max35Text `xml:"Geogcs,omitempty"`
// Range of allowed yield.
YieldRange *AmountOrPercentageRange `xml:"YldRg,omitempty"`
// Range of assessment of securities credit and investment risk.
Rating *Rating1 `xml:"Ratg,omitempty"`
// Identification of a range of coupon numbers attached to its related financial instrument.
CouponRange *AmountOrPercentageRange `xml:"CpnRg,omitempty"`
// Indicates whether the financial instrument repays the principal amount in parts during the life cycle of the security.
AmortisableIndicator *YesNoIndicator `xml:"AmtsblInd,omitempty"`
// Reason for which money is raised through the issuance of a security.
Purpose *Max256Text `xml:"Purp,omitempty"`
// Identifies whether the issue is subject to alternative minimum taxation (used for municipal bonds).
AlternativeMinimumTaxIndicator *YesNoIndicator `xml:"AltrntvMinTaxInd,omitempty"`
// Indicates an instruction to reinvest dividends in the underlying security (or proceeds at maturity in a similar instrument) if the current rate is <rate> or better.
AutoReinvestment *PercentageRate `xml:"AutoRinvstmt,omitempty"`
// Indicates the conditions under which the order/trade is to be/was executed.
TransactionConditions *TradeTransactionCondition2Code `xml:"TxConds,omitempty"`
// Currency in which a security is issued or redenominated.
Currency *CurrencyCode `xml:"Ccy,omitempty"`
// Indicates an instruction to override an investment's default start and/or end date with a custom date.
CustomDate *DateTimePeriodDetails1 `xml:"CstmDt,omitempty"`
// Haircut or valuation factor on the security expressed as a percentage.
Haircut *PercentageRate `xml:"Hrcut,omitempty"`
// Identifies whether the lender is assured partial or full payment by a third party if the borrower defaults.
InsuredIndicator *YesNoIndicator `xml:"InsrdInd,omitempty"`
// Indicates an instruction or attribute giving the number of days to be included in the look-back period for the investment. E.g. some options allow exercise based on the underlying asset's optimal value over the look-back period.
LookBack *Number `xml:"LookBck,omitempty"`
// Indicates the maturity date.
MaturityDate *ISOYearMonth `xml:"MtrtyDt,omitempty"`
// Indicates the issue date.
IssueDate *ISOYearMonth `xml:"IsseDt,omitempty"`
// Identification of the issuer.
IssuerIdentification *BICNonFIIdentifier `xml:"IssrId,omitempty"`
// Identifies the issue size range.
IssueSize *Number `xml:"IsseSz,omitempty"`
// Indicates the minimum denomination of a security.
MinimumDenomination *FinancialInstrumentQuantityChoice `xml:"MinDnmtn,omitempty"`
// Maximum number of time the collateral can be substitute.
MaximumSubstitution *Number `xml:"MaxSbstitn,omitempty"`
// Indicates the minimum tradable increments of a security.
MinimumIncrement *FinancialInstrumentQuantityChoice `xml:"MinIncrmt,omitempty"`
// Indicates the periodic or regular cycle of interest payments.
PaymentFrequency *Frequency1Code `xml:"PmtFrqcy,omitempty"`
// Indicates the minimum tradable quantity of a security.
MinimumQuantity *FinancialInstrumentQuantityChoice `xml:"MinQty,omitempty"`
// Indicates a search criterion used when looking to buy a bond, particularly an MBS, issued in a particular year.
Production *Max35Text `xml:"Pdctn,omitempty"`
// Identifies if the securities is restricted or not (as per Rule 144).
RestrictedIndicator *YesNoIndicator `xml:"RstrctdInd,omitempty"`
// Indicates the frequency at which the bond is re-rated and therefore re-priced (bond attribute, particularly of floating rate and index linked instruments).
PriceFrequency *Frequency1Code `xml:"PricFrqcy,omitempty"`
// Indicates the market sector the security is classified as. E.g. pharmacuticals, automobile, housing, etc.
Sector *Max35Text `xml:"Sctr,omitempty"`
// Indicates the maximum number of times collateral can be substituted.
SubstitutionFrequency *Frequency1Code `xml:"SbstitnFrqcy,omitempty"`
// Number of remaining times the collateral can be substitute.
SubstitutionLeft *Number `xml:"SbstitnLft,omitempty"`
// Indicates a search criterion when looking to buy an MBS that either is [yes] or is not [no] an entire pool.
WholePoolIndicator *YesNoIndicator `xml:"WhlPoolInd,omitempty"`
// Identifies the Benchmark source price (eg. BB Generic, BB Fairvalue, Brokertec..).
PriceSource *Max35Text `xml:"PricSrc,omitempty"`
// Date/time at which an interest bearing security becomes due and assets are to be repaid.
ExpirationDate *ISODateTime `xml:"XprtnDt,omitempty"`
// Amount for which a security can be overalloted (as in greenshoe option).
OverAllotmentAmount *ActiveCurrencyAndAmount `xml:"OverAlltmtAmt,omitempty"`
// Percentage for which a security can be overalloted (as in greenshoe option).
OverAllotmentRate *PercentageRate `xml:"OverAlltmtRate,omitempty"`
// Indicates a search criterion used when looking to buy a bond within a particular price range.
PriceRange *AmountOrPercentageRange `xml:"PricRg,omitempty"`
// Indicates whether the issuer has the right to pay the security prior to maturity. Also called RetractableIndicator.
CallableIndicator *YesNoIndicator `xml:"CllblInd,omitempty"`
// Indicates whether the interest bearing security is convertible into another type of security.
ConvertibleIndicator *YesNoIndicator `xml:"ConvtblInd,omitempty"`
// Indicates whether the holder has the right to ask for redemption of the security prior to final maturity. Also called RedeemableIndicator.
PutableIndicator *YesNoIndicator `xml:"PutblInd,omitempty"`
// Indicates whether an interest bearing instrument is deposited in a fund that will be used to pay debt service on refunded securities.
PreFundedIndicator *YesNoIndicator `xml:"PreFnddInd,omitempty"`
// Indicates whether an interest bearing instrument is being escrowed or collateralized either by direct obligations guaranteed by the US government, or by other types of securities. The maturity schedules of the securities in the escrow fund are determined in such a way to pay the maturity value, coupon, and premium payments (if any) of the refunded bonds.
EscrowedIndicator *YesNoIndicator `xml:"EscrwdInd,omitempty"`
// Indicates whether the security has no maturity date.
PerpetualIndicator *YesNoIndicator `xml:"PerptlInd,omitempty"`
}
func (f *FinancialInstrumentStipulations2) SetGeographics(value string) {
f.Geographics = (*Max35Text)(&value)
}
func (f *FinancialInstrumentStipulations2) AddYieldRange() *AmountOrPercentageRange {
f.YieldRange = new(AmountOrPercentageRange)
return f.YieldRange
}
func (f *FinancialInstrumentStipulations2) AddRating() *Rating1 {
f.Rating = new(Rating1)
return f.Rating
}
func (f *FinancialInstrumentStipulations2) AddCouponRange() *AmountOrPercentageRange {
f.CouponRange = new(AmountOrPercentageRange)
return f.CouponRange
}
func (f *FinancialInstrumentStipulations2) SetAmortisableIndicator(value string) {
f.AmortisableIndicator = (*YesNoIndicator)(&value)
}
func (f *FinancialInstrumentStipulations2) SetPurpose(value string) {
f.Purpose = (*Max256Text)(&value)
}
func (f *FinancialInstrumentStipulations2) SetAlternativeMinimumTaxIndicator(value string) {
f.AlternativeMinimumTaxIndicator = (*YesNoIndicator)(&value)
}
func (f *FinancialInstrumentStipulations2) SetAutoReinvestment(value string) {
f.AutoReinvestment = (*PercentageRate)(&value)
}
func (f *FinancialInstrumentStipulations2) SetTransactionConditions(value string) {
f.TransactionConditions = (*TradeTransactionCondition2Code)(&value)
}
func (f *FinancialInstrumentStipulations2) SetCurrency(value string) {
f.Currency = (*CurrencyCode)(&value)
}
func (f *FinancialInstrumentStipulations2) AddCustomDate() *DateTimePeriodDetails1 {
f.CustomDate = new(DateTimePeriodDetails1)
return f.CustomDate
}
func (f *FinancialInstrumentStipulations2) SetHaircut(value string) {
f.Haircut = (*PercentageRate)(&value)
}
func (f *FinancialInstrumentStipulations2) SetInsuredIndicator(value string) {
f.InsuredIndicator = (*YesNoIndicator)(&value)
}
func (f *FinancialInstrumentStipulations2) SetLookBack(value string) {
f.LookBack = (*Number)(&value)
}
func (f *FinancialInstrumentStipulations2) SetMaturityDate(value string) {
f.MaturityDate = (*ISOYearMonth)(&value)
}
func (f *FinancialInstrumentStipulations2) SetIssueDate(value string) {
f.IssueDate = (*ISOYearMonth)(&value)
}
func (f *FinancialInstrumentStipulations2) SetIssuerIdentification(value string) {
f.IssuerIdentification = (*BICNonFIIdentifier)(&value)
}
func (f *FinancialInstrumentStipulations2) SetIssueSize(value string) {
f.IssueSize = (*Number)(&value)
}
func (f *FinancialInstrumentStipulations2) AddMinimumDenomination() *FinancialInstrumentQuantityChoice {
f.MinimumDenomination = new(FinancialInstrumentQuantityChoice)
return f.MinimumDenomination
}
func (f *FinancialInstrumentStipulations2) SetMaximumSubstitution(value string) {
f.MaximumSubstitution = (*Number)(&value)
}
func (f *FinancialInstrumentStipulations2) AddMinimumIncrement() *FinancialInstrumentQuantityChoice {
f.MinimumIncrement = new(FinancialInstrumentQuantityChoice)
return f.MinimumIncrement
}
func (f *FinancialInstrumentStipulations2) SetPaymentFrequency(value string) {
f.PaymentFrequency = (*Frequency1Code)(&value)
}
func (f *FinancialInstrumentStipulations2) AddMinimumQuantity() *FinancialInstrumentQuantityChoice {
f.MinimumQuantity = new(FinancialInstrumentQuantityChoice)
return f.MinimumQuantity
}
func (f *FinancialInstrumentStipulations2) SetProduction(value string) {
f.Production = (*Max35Text)(&value)
}
func (f *FinancialInstrumentStipulations2) SetRestrictedIndicator(value string) {
f.RestrictedIndicator = (*YesNoIndicator)(&value)
}
func (f *FinancialInstrumentStipulations2) SetPriceFrequency(value string) {
f.PriceFrequency = (*Frequency1Code)(&value)
}
func (f *FinancialInstrumentStipulations2) SetSector(value string) {
f.Sector = (*Max35Text)(&value)
}
func (f *FinancialInstrumentStipulations2) SetSubstitutionFrequency(value string) {
f.SubstitutionFrequency = (*Frequency1Code)(&value)
}
func (f *FinancialInstrumentStipulations2) SetSubstitutionLeft(value string) {
f.SubstitutionLeft = (*Number)(&value)
}
func (f *FinancialInstrumentStipulations2) SetWholePoolIndicator(value string) {
f.WholePoolIndicator = (*YesNoIndicator)(&value)
}
func (f *FinancialInstrumentStipulations2) SetPriceSource(value string) {
f.PriceSource = (*Max35Text)(&value)
}
func (f *FinancialInstrumentStipulations2) SetExpirationDate(value string) {
f.ExpirationDate = (*ISODateTime)(&value)
}
func (f *FinancialInstrumentStipulations2) SetOverAllotmentAmount(value, currency string) {
f.OverAllotmentAmount = NewActiveCurrencyAndAmount(value, currency)
}
func (f *FinancialInstrumentStipulations2) SetOverAllotmentRate(value string) {
f.OverAllotmentRate = (*PercentageRate)(&value)
}
func (f *FinancialInstrumentStipulations2) AddPriceRange() *AmountOrPercentageRange {
f.PriceRange = new(AmountOrPercentageRange)
return f.PriceRange
}
func (f *FinancialInstrumentStipulations2) SetCallableIndicator(value string) {
f.CallableIndicator = (*YesNoIndicator)(&value)
}
func (f *FinancialInstrumentStipulations2) SetConvertibleIndicator(value string) {
f.ConvertibleIndicator = (*YesNoIndicator)(&value)
}
func (f *FinancialInstrumentStipulations2) SetPutableIndicator(value string) {
f.PutableIndicator = (*YesNoIndicator)(&value)
}
func (f *FinancialInstrumentStipulations2) SetPreFundedIndicator(value string) {
f.PreFundedIndicator = (*YesNoIndicator)(&value)
}
func (f *FinancialInstrumentStipulations2) SetEscrowedIndicator(value string) {
f.EscrowedIndicator = (*YesNoIndicator)(&value)
}
func (f *FinancialInstrumentStipulations2) SetPerpetualIndicator(value string) {
f.PerpetualIndicator = (*YesNoIndicator)(&value)
} | data/train/go/dd70e9f33d046f048429ec3e23ac72fcb16fc09cFinancialInstrumentStipulations2.go | 0.915184 | 0.511168 | dd70e9f33d046f048429ec3e23ac72fcb16fc09cFinancialInstrumentStipulations2.go | starcoder |
package main
import "fmt"
const PieceStatusBits = 3
const BitsPerSquare = PieceStatusBits + 2
/*
Every board square is numbered this way:
0 1 2 3 4 5 6 7
8 9 10 11 12 13 14 15
16 17 ...
...
A uint64 in Board contains one bit for each of the 64 squares,
in that order.
The first PieceStatusBits bits for each square represent the current square status.
This status uses the values in PieceStatus.
The remaining bits are used for:
- one bit for storing the PieceStatus value (can the king do a castling move? etc.)
- one bit for storing the PieceColor value
*/
type Board [BitsPerSquare]uint64;
type Piece uint8
const (
Piece_Empty Piece = iota
Piece_Pawn
Piece_Rock
Piece_Knight
Piece_Bishop
Piece_King
Piece_Queen
)
var pieceNamesMap = map[Piece]string {
Piece_Empty : "Empty", Piece_Pawn : "Pawn", Piece_Rock : "Rock", Piece_Bishop : "Bishop", Piece_Knight : "Knight", Piece_Queen : "Queen", Piece_King : "King",
}
func (p Piece) String() string {
return pieceNamesMap[p]
}
type PieceStatus bool
const (
PieceStatus_Default PieceStatus = false // initial status: pawn can't be captured in en-passant, rock / king can do castling
PieceStatus_EnPassantAllowed = true // pawn can be captured using en-passant move
PieceStatus_CastlingNotAllowed = true // rock or king not allowed to do castling
)
type PieceColor bool
const (
PieceColor_White PieceColor = true
PieceColor_Black = false
)
type PieceInfo struct {
piece Piece
status PieceStatus
color PieceColor
}
var EmptyPieceInfo = PieceInfo{ Piece_Empty, PieceStatus_Default, PieceColor_White }
func (p PieceColor) String() string {
if p == PieceColor_White { return "White" }
return "Black"
}
type SquareColor bool
const (
SquareColor_White SquareColor = true
SquareColor_Black = false
)
// we assume background is white, otherwise the colors will look reverted
var pieceCharMap = map[PieceColor]map[Piece]string {
PieceColor_White :
{ Piece_Empty : ` `, Piece_Pawn : `♙`, Piece_Rock : `♖`, Piece_Knight : `♘`, Piece_Bishop : `♗`, Piece_King : `♔`, Piece_Queen : `♕`, },
PieceColor_Black :
{ Piece_Empty : ` `, Piece_Pawn : `♟`, Piece_Rock : `♜`, Piece_Knight : `♞`, Piece_Bishop : `♝`, Piece_King : `♚`, Piece_Queen : `♛`, },
}
var squareCharMap = map[SquareColor]string { SquareColor_White : ` `, SquareColor_Black : `▨`, }
func BoolToInt(b bool) uint64 {
if b {
return 1
}
return 0
}
func GetBitValue(bits uint64, pos uint64) uint64 {
return (bits >> pos) & 1
}
func SetBitValue(bits, pos, value uint64) uint64 {
return (bits &^ (1 << pos)) | (value << pos)
}
// GetBoardAt gives information about a piece in a given position
func GetBoardAt(board Board, pos Position) (info PieceInfo) {
var value, bitidx uint64
if !PositionInBoard(pos) { panic("wrong position") }
bitidx = uint64(pos.x + pos.y * 8)
// value[bit0] = board[0][bit pos], value[bit1] = board[1][bit pos], ...
for i := uint64(0); i < PieceStatusBits; i ++ {
value |= GetBitValue(board[i], bitidx) << i
}
info.piece = Piece(value)
info.status = 1 == GetBitValue(board[PieceStatusBits], bitidx)
info.color = 1 == GetBitValue(board[PieceStatusBits + 1], bitidx)
return
}
// SetBoardAt modifies a specific position of a board
func SetBoardAt(board *Board, pos Position, info PieceInfo) {
if !PositionInBoard(pos) { panic("wrong position") }
bitidx := uint64(pos.x + pos.y * 8)
for i := uint64(0); i < PieceStatusBits; i ++ {
(*board)[i] = SetBitValue((*board)[i], bitidx, GetBitValue(uint64(info.piece), i))
}
(*board)[PieceStatusBits] = SetBitValue((*board)[PieceStatusBits], bitidx, BoolToInt(bool(info.status)))
(*board)[PieceStatusBits + 1] = SetBitValue((*board)[PieceStatusBits + 1], bitidx, BoolToInt(bool(info.color)))
}
func PositionInBoard(pos Position) bool {
if pos.x < 0 || pos.x > 7 || pos.y < 0 || pos.y > 7 { return false }
return true
}
func DrawPiece(info PieceInfo, square SquareColor) {
printSquares := true
debugStatus := false
fmt.Print(" ")
if debugStatus {
if info.piece == Piece_Pawn && info.status == PieceStatus_EnPassantAllowed {
fmt.Print("P")
return
}
if info.piece == Piece_Rock && info.status == PieceStatus_CastlingNotAllowed {
fmt.Print("R")
return
}
if info.piece == Piece_King && info.status == PieceStatus_CastlingNotAllowed {
fmt.Print("K")
return
}
}
if info.piece == Piece_Empty {
if printSquares {
fmt.Print(squareCharMap[square])
} else {
fmt.Print(" ")
}
} else {
fmt.Print(pieceCharMap[info.color][info.piece])
}
}
func DrawBoard(board Board) {
squareColor := SquareColor_White
lineCount := 0
fmt.Println(" 0 1 2 3 4 5 6 7")
for y := 0; y < 8; y ++ {
fmt.Print(lineCount)
for x := 0; x < 8; x ++ {
info := GetBoardAt(board, Position{x, y})
DrawPiece(info, squareColor)
squareColor = !squareColor
}
squareColor = !squareColor
lineCount ++
fmt.Println("")
}
}
func GetPieces(board Board, piece Piece, color PieceColor) []Position {
posl := make([]Position, 0, 4)
for x := 0; x < 8; x ++ {
for y := 0; y < 8; y ++ {
pos := Position{x, y}
infoHere := GetBoardAt(board, pos)
if piece == infoHere.piece && color == infoHere.color {
posl = append(posl, pos)
}
}
}
return posl
}
func GetPiecesByColor(board Board, color PieceColor) []Position {
posl := make([]Position, 0, 4)
for x := 0; x < 8; x ++ {
for y := 0; y < 8; y ++ {
pos := Position{x, y}
infoHere := GetBoardAt(board, pos)
if color == infoHere.color && infoHere.piece != Piece_Empty {
posl = append(posl, pos)
}
}
}
return posl
}
func fillInitialBoardSide(board Board, piecesRow, pawnsRow int, color PieceColor, testBoard bool) Board {
for i := 0; i < 8; i ++ {
SetBoardAt(&board, Position{i, pawnsRow}, PieceInfo{ Piece_Pawn, PieceStatus_Default, color })
}
SetBoardAt(&board, Position{0, piecesRow}, PieceInfo{ Piece_Rock, PieceStatus_Default, color })
SetBoardAt(&board, Position{7, piecesRow}, PieceInfo{ Piece_Rock, PieceStatus_Default, color })
SetBoardAt(&board, Position{4, piecesRow}, PieceInfo{ Piece_King, PieceStatus_Default, color })
if !testBoard {
SetBoardAt(&board, Position{1, piecesRow}, PieceInfo{ Piece_Knight, PieceStatus_Default, color })
SetBoardAt(&board, Position{6, piecesRow}, PieceInfo{ Piece_Knight, PieceStatus_Default, color })
SetBoardAt(&board, Position{2, piecesRow}, PieceInfo{ Piece_Bishop, PieceStatus_Default, color })
SetBoardAt(&board, Position{5, piecesRow}, PieceInfo{ Piece_Bishop, PieceStatus_Default, color })
SetBoardAt(&board, Position{3, piecesRow}, PieceInfo{ Piece_Queen, PieceStatus_Default, color })
}
return board
}
func InitialBoard(testBoard bool) Board {
var board Board
board = fillInitialBoardSide(board, 0, 1, PieceColor_Black, testBoard)
board = fillInitialBoardSide(board, 7, 6, PieceColor_White, testBoard)
return board
} | src/chessAI/board.go | 0.688364 | 0.57087 | board.go | starcoder |
package gorgonia
import (
"fmt"
"math"
tf32 "github.com/chewxy/gorgonia/tensor/f32"
tf64 "github.com/chewxy/gorgonia/tensor/f64"
ti "github.com/chewxy/gorgonia/tensor/i"
"github.com/chewxy/gorgonia/tensor/types"
"github.com/gonum/graph"
)
func graphNodeToNode(in []graph.Node) (out Nodes) {
out = make(Nodes, len(in))
for i, n := range in {
out[i] = n.(*Node) // will panic if not. which is a good thng
}
return
}
func nodeToGraphNode(in []*Node) (out []graph.Node) {
out = make([]graph.Node, len(in))
for i, n := range in {
out[i] = n
}
return
}
func dtypeToDtype(t types.Dtype) Dtype {
if t >= types.MAXDTYPE || Dtype(t) >= Ptr {
panic("Unsupported Dtype")
}
return Dtype(t)
}
func dtypeToTensorDtype(t Dtype) types.Dtype {
if t >= Ptr || types.Dtype(t) >= types.MAXDTYPE {
panic("Unsupported Dtype")
}
return types.Dtype(t)
}
func tensorInfo(t types.Tensor) (dt Dtype, dim int) {
tdt := t.Dtype()
dt = dtypeToDtype(tdt)
dim = t.Dims()
return
}
func cloneNodes(node Nodes, replacements map[*Node]*Node) Nodes {
return nil
}
// takes any value and returns the Node equivalent
func anyToNode(val interface{}) *Node {
switch v := val.(type) {
case *Node:
return v
case types.Tensor:
case Tensor:
/*case Tuple:*/
default:
s := NewScalarValue(val)
return NewConstant(s)
}
return nil
}
func anyToValue(any interface{}) (val Value, err error) {
switch a := any.(type) {
case float64, float32, int, int64, int32, byte, bool:
return NewScalarValue(any), nil
case types.Tensor:
return Tensor{Tensor: a}, nil
case Value:
return a, nil
default:
err = NewError(NotYetImplemented, "value %v of %T not yet handled", any, any)
return
}
panic("Unreachable")
}
// valuesToInts will FORCIBLY cast floats to ints.
func valuesToInts(values []Value) (retVal []int, err error) {
retVal = make([]int, len(values))
for i, v := range values {
sv, ok := v.(Scalar)
if !ok {
err = NewError(typeError, "Expected values to be all Scalar Value. Got %v of %T instead", v, v)
return
}
var intV int
switch vt := sv.v.(type) {
case float64:
intV = int(vt)
case float32:
intV = int(vt)
case int:
intV = vt
default:
err = NewError(TypeError, "Expected ScalarValue to have Int type. Got %v of %v(%T) instead", sv.v, sv.t, sv.v)
return
}
retVal[i] = intV
}
return
}
func intRange(start, end int) []int {
size := end - start
incr := true
if start > end {
incr = false
size = start - end
}
if size < 0 {
panic("Cannot create an int range that is somehow negative in size")
}
retVal := make([]int, size)
for i, v := 0, start; i < size; i++ {
retVal[i] = v
if incr {
v++
} else {
v--
}
}
return retVal
}
func ones(dt Dtype, sizes ...int) (retVal Value) {
switch dt {
case Float64:
if len(sizes) == 0 {
retVal = NewScalarValue(float64(1.0))
} else {
t := tf64.Ones(sizes...)
retVal = FromTensor(t)
}
case Float32:
if len(sizes) == 0 {
retVal = NewScalarValue(float64(1.0))
} else {
t := tf32.Ones(sizes...)
retVal = FromTensor(t)
}
case Int:
if len(sizes) == 0 {
retVal = NewScalarValue(float64(1.0))
} else {
t := ti.Ones(sizes...)
retVal = FromTensor(t)
}
default:
panic(fmt.Sprintf("Dtype of %v not yet implemented for ones()"))
}
return
}
func hasInf(a []float64) bool {
for _, v := range a {
if math.IsInf(v, 0) {
return true
}
}
return false
}
func hasNaN(v Value) bool {
switch vt := v.(type) {
case Tensor:
switch vt.Dtype() {
case Float64:
T := vt.Tensor.(*tf64.Tensor)
data := T.Data().([]float64)
for _, datum := range data {
if math.IsNaN(datum) {
return true
}
}
return false
case Float32:
T := vt.Tensor.(*tf64.Tensor)
data := T.Data().([]float32)
for _, datum := range data {
if math.IsNaN(float64(datum)) {
return true
}
}
return false
default:
err := nyi("hasNaN", vt.Dtype())
panic(err)
}
case Scalar:
switch f := vt.v.(type) {
case float32:
return math.IsNaN(float64(f))
case float64:
return math.IsNaN(f)
default:
return false
}
case *dualValue:
return hasNaN(vt.Value) || hasNaN(vt.d)
default:
err := nyi("hasNaN", vt)
panic(err)
}
panic("Unreachable")
} | utils.go | 0.572006 | 0.403743 | utils.go | starcoder |
package dfl
import (
"github.com/spatialcurrent/go-dfl/pkg/dfl/builder"
)
// BinaryOperator is a DFL Node that represents the binary operator of a left value and right value.
// This struct functions as an embedded struct for many comparator operations.
type BinaryOperator struct {
Left Node
Right Node
}
func (bo BinaryOperator) Builder(operator string, quotes []string, tabs int) builder.Builder {
return builder.New(quotes, tabs).Left(bo.Left).Op(operator).Right(bo.Right)
}
func (bo BinaryOperator) Dfl(operator string, quotes []string, pretty bool, tabs int) string {
b := bo.Builder(operator, quotes, tabs)
if pretty {
b = b.Indent(tabs)
switch bo.Left.(type) {
case *Literal:
switch bo.Left.(*Literal).Value.(type) {
case string, int, []byte, Null:
return b.Dfl()
}
}
switch bo.Right.(type) {
case *Literal:
switch bo.Right.(*Literal).Value.(type) {
case string, int, []byte, Null:
return b.Dfl()
}
}
return b.Pretty(true).Tabs(tabs).Dfl()
}
return b.Dfl()
}
func (bo BinaryOperator) Sql(operator string, pretty bool, tabs int) string {
return builder.New([]string{}, tabs).Left(bo.Left).Op(operator).Right(bo.Right).Sql()
}
func (bo BinaryOperator) Map(operator string, left Node, right Node) map[string]interface{} {
return map[string]interface{}{
"op": operator,
"left": left.Map(),
"right": right.Map(),
}
}
// EvaluateLeftAndRight evaluates the value of the left node and right node given a context map (ctx) and function map (funcs).
// Returns a 3 value tuple of left value, right value, and error.
func (bo BinaryOperator) EvaluateLeftAndRight(vars map[string]interface{}, ctx interface{}, funcs FunctionMap, quotes []string) (map[string]interface{}, interface{}, interface{}, error) {
vars, lv, err := bo.Left.Evaluate(vars, ctx, funcs, quotes)
if err != nil {
return vars, false, false, err
}
vars, rv, err := bo.Right.Evaluate(vars, ctx, funcs, quotes)
if err != nil {
return vars, false, false, err
}
return vars, lv, rv, nil
}
// Attributes returns a slice of all attributes used in the evaluation of this node, including a children nodes.
// Attributes de-duplicates values from the left node and right node using a set.
func (bo BinaryOperator) Attributes() []string {
set := make(map[string]struct{})
for _, x := range bo.Left.Attributes() {
set[x] = struct{}{}
}
for _, x := range bo.Right.Attributes() {
set[x] = struct{}{}
}
attrs := make([]string, 0, len(set))
for x := range set {
attrs = append(attrs, x)
}
return attrs
}
// Variables returns a slice of all variables used in the evaluation of this node, including a children nodes.
// Attributes de-duplicates values from the left node and right node using a set.
func (bo BinaryOperator) Variables() []string {
set := make(map[string]struct{})
for _, x := range bo.Left.Variables() {
set[x] = struct{}{}
}
for _, x := range bo.Right.Variables() {
set[x] = struct{}{}
}
attrs := make([]string, 0, len(set))
for x := range set {
attrs = append(attrs, x)
}
return attrs
} | pkg/dfl/BinaryOperator.go | 0.816736 | 0.552419 | BinaryOperator.go | starcoder |
package assert
import "reflect"
func valueEqual(v1, v2 reflect.Value) bool {
if !v1.IsValid() || !v2.IsValid() {
return v1.IsValid() == v2.IsValid()
}
if v1.CanInterface() && v2.CanInterface() {
return reflect.DeepEqual(v1.Interface(), v2.Interface())
}
v1, d1 := derefInterface(v1)
v2, d2 := derefInterface(v2)
if d1 || d2 {
return valueEqual(v1, v2)
}
if v1.Type() != v2.Type() {
return false
}
switch v1.Kind() {
case reflect.Bool:
return v1.Bool() == v2.Bool()
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return v1.Int() == v2.Int()
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
return v1.Uint() == v2.Uint()
case reflect.Float32, reflect.Float64:
return v1.Float() == v2.Float()
case reflect.Complex64, reflect.Complex128:
return v1.Complex() == v2.Complex()
case reflect.String:
return v1.String() == v2.String()
case reflect.Chan, reflect.UnsafePointer:
return v1.Pointer() == v2.Pointer()
case reflect.Func:
return v1.IsNil() && v2.IsNil()
case reflect.Ptr:
if v1.IsNil() || v2.IsNil() {
return v1.IsNil() && v2.IsNil()
}
if v1.Pointer() == v2.Pointer() {
return true
}
return valueEqual(v1.Elem(), v2.Elem())
case reflect.Interface:
if v1.IsNil() || v2.IsNil() {
return v1.IsNil() == v2.IsNil()
}
return valueEqual(v1.Elem(), v2.Elem())
case reflect.Slice:
if v1.IsNil() != v2.IsNil() {
return false
}
if v1.Len() != v2.Len() {
return false
}
if v1.Pointer() == v2.Pointer() {
return true
}
fallthrough
case reflect.Array:
for i := 0; i < v1.Len(); i++ {
if !valueEqual(v1.Index(i), v2.Index(i)) {
return false
}
}
return true
case reflect.Map:
if v1.IsNil() != v2.IsNil() {
return false
}
if v1.Len() != v2.Len() {
return false
}
if v1.Pointer() == v2.Pointer() {
return true
}
for _, k := range v1.MapKeys() {
if e1, e2 := v1.MapIndex(k), v2.MapIndex(k); !valueEqual(e1, e2) {
return false
}
}
return true
case reflect.Struct:
for i, n := 0, v1.NumField(); i < n; i++ {
if !valueEqual(v1.Field(i), v2.Field(i)) {
return false
}
}
return true
default: // reflect.Invalid
}
return false
}
func isNil(a interface{}) bool {
if a == nil {
return true
}
return isNilForValue(reflect.ValueOf(a))
}
func isNilForValue(v reflect.Value) bool {
if !v.IsValid() {
return true
}
if isPointer(v.Type()) {
return v.Pointer() == 0
} else if k := v.Kind(); k != reflect.Array && k != reflect.Struct && isNonTrivial(v.Type()) {
return v.IsNil()
}
return false
}
func isSameInValue(e, a interface{}) bool {
if reflect.DeepEqual(e, a) {
return true
}
if e == nil || a == nil {
return isNil(e) && isNil(a)
}
return convertCompare(reflect.ValueOf(e), reflect.ValueOf(a))
}
func convertCompare(v1, v2 reflect.Value) bool {
v1, _ = derefInterface(v1)
v2, _ = derefInterface(v2)
if !v1.IsValid() || !v2.IsValid() {
return isNilForValue(v1) && isNilForValue(v2)
}
return convertCompareB(v1, v2) || convertCompareB(v2, v1)
}
func convertCompareB(f, t reflect.Value) bool {
switch t.Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return convertCompareInt(f, t)
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
return convertCompareUint(f, t)
case reflect.Float32, reflect.Float64:
return convertCompareFloat(f, t)
case reflect.Complex64, reflect.Complex128:
return convertCompareComplex(f, t)
case reflect.String:
return convertCompareString(f, t)
case reflect.Ptr, reflect.UnsafePointer:
return convertComparePtr(f, t)
case reflect.Array, reflect.Slice:
return convertCompareArray(f, t)
case reflect.Map:
return convertCompareMap(f, t)
case reflect.Struct:
return convertCompareStruct(f, t)
}
return convertCompareC(f, t)
}
func convertCompareInt(f, t reflect.Value) bool {
switch f.Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return f.Int() == t.Int()
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
return convertCompareUint(t, f)
case reflect.Float32, reflect.Float64:
return convertCompareFloat(t, f)
case reflect.Complex64, reflect.Complex128:
return convertCompareComplex(t, f)
}
return convertCompareC(f, t)
}
func convertCompareUint(f, t reflect.Value) bool {
v := t.Uint()
switch f.Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return f.Int() >= 0 && uint64(f.Int()) == v
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
return f.Uint() == v
case reflect.Float32, reflect.Float64:
return convertCompareFloat(t, f)
case reflect.Complex64, reflect.Complex128:
return convertCompareComplex(t, f)
}
return convertCompareC(f, t)
}
func convertCompareFloat(f, t reflect.Value) bool {
v := t.Float()
switch f.Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return float64(f.Int()) == v
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
return float64(f.Uint()) == v
case reflect.Float32, reflect.Float64:
return f.Float() == v
case reflect.Complex64, reflect.Complex128:
return convertCompareComplex(t, f)
}
return convertCompareC(f, t)
}
func convertCompareComplex(f, t reflect.Value) bool {
v := t.Complex()
switch f.Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return imag(v) == 0 && float64(f.Int()) == real(v)
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
return imag(v) == 0 && float64(f.Uint()) == real(v)
case reflect.Float32, reflect.Float64:
return imag(v) == 0 && f.Float() == real(v)
case reflect.Complex64, reflect.Complex128:
return f.Complex() == v
}
return convertCompareC(f, t)
}
func convertCompareString(f, t reflect.Value) bool {
switch f.Kind() {
case reflect.String:
return f.String() == t.String()
case reflect.Array, reflect.Slice:
if e := f.Type().Elem().Kind(); e == reflect.Uint8 {
return convertCompareArray(f, reflect.ValueOf([]byte(t.String())))
} else if e == reflect.Int32 {
return convertCompareArray(f, reflect.ValueOf([]rune(t.String())))
}
}
return convertCompareC(f, t)
}
func convertComparePtr(f, t reflect.Value) bool {
v := t.Pointer()
switch f.Kind() {
case reflect.UnsafePointer:
return f.Pointer() == v
case reflect.Ptr:
return t.Kind() == reflect.UnsafePointer && f.Pointer() == v // diff type pointers are NOT equal
}
return convertCompareC(f, t)
}
func convertCompareArray(f, t reflect.Value) bool {
if t.Kind() != reflect.Slice || !t.IsNil() {
switch f.Kind() {
case reflect.Slice:
if f.IsNil() {
break
}
fallthrough
case reflect.Array:
if f.Len() != t.Len() {
return false
}
if f.Len() == 0 {
return convertible(f.Type().Elem(), t.Type().Elem())
}
for i := 0; i < f.Len(); i++ {
if !convertCompare(f.Index(i), t.Index(i)) {
return false
}
}
return true
}
}
return convertCompareC(f, t)
}
// NOTE: Map keys must be exactly equal (both type and value), e.g. int(100) and uint(100) are
// different keys.
func convertCompareMap(f, t reflect.Value) bool {
if !t.IsNil() && f.Kind() == reflect.Map && !f.IsNil() {
if f.Len() != t.Len() || !convertibleKeyTo(f.Type().Key(), t.Type().Key()) {
return false
}
if f.Len() == 0 {
return convertible(f.Type().Elem(), t.Type().Elem())
}
ks := t.MapKeys()
find := func(k1 reflect.Value) bool {
for _, k2 := range ks {
if valueEqual(k1, k2) {
return true
}
}
return false
}
for _, k := range f.MapKeys() {
if !find(k) {
return false
}
if !convertCompare(f.MapIndex(k), t.MapIndex(k)) {
return false
}
}
return true
}
return convertCompareC(f, t)
}
func convertCompareStruct(f, t reflect.Value) bool {
if f.Type() == t.Type() {
for i := 0; i < f.NumField(); i++ {
if !convertCompare(f.Field(i), t.Field(i)) {
return false
}
}
return true
}
return convertCompareC(f, t)
}
func convertCompareC(f, t reflect.Value) bool {
if !f.Type().ConvertibleTo(t.Type()) {
return false
}
a := f.Convert(t.Type())
return valueEqual(a, t)
} | assert/predict.go | 0.558809 | 0.583678 | predict.go | starcoder |
package three
import "github.com/go-gl/mathgl/mgl32"
// TextGeometry defines the geometry of 2D text.
type TextGeometry struct {
Vertices []mgl32.Vec2
UVs []mgl32.Vec2
Text string
Position mgl32.Vec2
Size float32
Font *Font
}
// NewTextGeometry creates a new 2D text geometry for the given text.
func NewTextGeometry(text string, position mgl32.Vec2, size float32, font *Font) *TextGeometry {
vertices, uvs := createTextVertices(text, position, size, font)
geometry := TextGeometry{
Vertices: vertices,
UVs: uvs,
Text: text,
Size: size,
Position: position,
Font: font,
}
return &geometry
}
func (t *TextGeometry) updateVertices(text string) {
vertices, uvs := createTextVertices(text, t.Position, t.Size, t.Font)
t.Vertices = vertices
t.UVs = uvs
}
func createTextVertices(text string, position mgl32.Vec2, size float32, font *Font) (vertices []mgl32.Vec2, uvs []mgl32.Vec2) {
x := position.X()
y := float32(currentWindow.Height()) - position.Y()
for c, char := range text {
i := float32(c)
upLeft := mgl32.Vec2{x + i*size, y + size}
upRight := mgl32.Vec2{x + i*size + size, y + size}
downRight := mgl32.Vec2{x + i*size + size, y}
downLeft := mgl32.Vec2{x + i*size, y}
vertices = append(vertices, upLeft, downLeft, upRight)
vertices = append(vertices, downRight, upRight, downLeft)
glyph := font.font.Glyphs().Find(string(char))
fullWidth := float32(font.font.Width)
fullHeight := float32(font.font.Height)
width := float32(glyph.Width)
height := float32(glyph.Height)
x := float32(glyph.X)
y := float32(glyph.Y)
uvX := x / fullWidth
uvY := (fullHeight - y) / fullHeight
uvWidth := width / fullWidth
uvHeight := height / fullHeight
uvUpLeft := mgl32.Vec2{uvX, uvY}
uvUpRight := mgl32.Vec2{uvX + uvWidth, uvY}
uvDownRight := mgl32.Vec2{uvX + uvWidth, uvY - uvHeight}
uvDownLeft := mgl32.Vec2{uvX, uvY - uvHeight}
uvs = append(uvs, uvUpLeft, uvDownLeft, uvUpRight)
uvs = append(uvs, uvDownRight, uvUpRight, uvDownLeft)
}
return
} | text_geometry.go | 0.799481 | 0.580203 | text_geometry.go | starcoder |
package ha
import (
"context"
"github.com/atomix/go-client/pkg/client/map"
"github.com/atomix/go-client/pkg/client/session"
"github.com/google/uuid"
"github.com/onosproject/onos-test/pkg/onit/env"
"github.com/stretchr/testify/assert"
"testing"
"time"
)
// TestRaftHA : integration test
func (s *TestSuite) TestRaftHA(t *testing.T) {
partitions := env.Database().Partitions("protocol")
group, err := partitions.Connect()
assert.NoError(t, err)
assert.NotNil(t, group)
m, err := group.GetMap(context.Background(), "TestRaftHA", session.WithTimeout(5*time.Second))
assert.NoError(t, err)
ch := make(chan *_map.Event)
err = m.Watch(context.Background(), ch)
assert.NoError(t, err)
key := uuid.New().String()
entry, err := m.Put(context.Background(), key, []byte("foo"))
assert.NoError(t, err)
assert.Equal(t, key, entry.Key)
assert.Equal(t, "foo", string(entry.Value))
version := entry.Version
event := <-ch
assert.Equal(t, _map.EventInserted, event.Type)
assert.Equal(t, key, event.Entry.Key)
assert.Equal(t, "foo", string(event.Entry.Value))
assert.Equal(t, version, event.Entry.Version)
entry, err = m.Get(context.Background(), key)
assert.NoError(t, err)
assert.Equal(t, key, entry.Key)
assert.Equal(t, "foo", string(entry.Value))
assert.Equal(t, version, entry.Version)
key = uuid.New().String()
entry, err = m.Put(context.Background(), key, []byte("bar"))
assert.NoError(t, err)
assert.Equal(t, key, entry.Key)
assert.Equal(t, "bar", string(entry.Value))
event = <-ch
assert.Equal(t, _map.EventInserted, event.Type)
assert.Equal(t, key, event.Entry.Key)
assert.Equal(t, "bar", string(event.Entry.Value))
key = uuid.New().String()
entry, err = m.Put(context.Background(), key, []byte("baz"))
assert.NoError(t, err)
assert.Equal(t, key, entry.Key)
event = <-ch
assert.Equal(t, _map.EventInserted, event.Type)
assert.Equal(t, key, event.Entry.Key)
assert.Equal(t, "baz", string(event.Entry.Value))
i := 0
for {
for _, partition := range partitions.List() {
if len(partition.Nodes()) == 1 || len(partition.Nodes()) <= i {
return
}
key := uuid.New().String()
entry, err = m.Put(context.Background(), key, []byte(uuid.New().String()))
assert.NoError(t, err)
assert.Equal(t, key, entry.Key)
t.Logf("Killing Raft node %s", partition.Nodes()[i].Name())
err = partition.Nodes()[i].Kill()
assert.NoError(t, err)
event = <-ch
assert.Equal(t, key, event.Entry.Key)
entry, err = m.Get(context.Background(), key)
assert.NoError(t, err)
assert.Equal(t, key, entry.Key)
}
t.Log("Sleeping for 15 seconds")
time.Sleep(10 * time.Second)
for range partitions.List() {
key := uuid.New().String()
entry, err = m.Put(context.Background(), key, []byte(uuid.New().String()))
assert.NoError(t, err)
assert.Equal(t, key, entry.Key)
event = <-ch
assert.Equal(t, key, event.Entry.Key)
entry, err = m.Get(context.Background(), key)
assert.NoError(t, err)
assert.Equal(t, key, entry.Key)
}
t.Log("Waiting for pods to recover")
for _, partition := range partitions.List() {
err = partition.AwaitReady()
assert.NoError(t, err)
}
i++
}
} | test/ha/hatest.go | 0.533397 | 0.579192 | hatest.go | starcoder |
Package game contains the implementation of the state management and rules engine type.
The Game type contains manages the state and provides the rules engine interface. This
interface is described as the two actions a player may take each turn. Those are Place
and Move. A Place action is where a player takes a piece from their pool and sets it
at a particular coordinate on the board while Move is where the player updates the
location of a piece that has already been placed.
Basics
During each turn, each player may perform one of the actions if the action they attempt
to perform generates an error the action receiver function will return an error. If the
error is a violation of the game rules a Rules Error will be returned. If the error is
due to a logic failure or a state corruption then other error types may be returned.
Features
The engine has implemented feature flags for rules beyond the base game. These rules
may be toggled on and off at the instantiation of the game type.
Types and Values
The Game type should act as the primary interface for the library if you want to just
provide a client or server wrapper around the the game. If you're looking to implement
your own game the rest of the types within the hive package are at your disposal.
Errors
The Game type will return two types of errors Rule and State. Rule errors are returned
when the player action made violates a game rule. A state error is returned when the
attempted interaction with the game state is invalid.
State Errors
- ErrGameNotOver : Returned when using the Winner interface and the game hasn't reached an end state.
- ErrUnknownPiece : Returned when attempting to place a piece that isn't recognized by the engine.
- ErrUnknownBoardError : Returned if there is an unexpected error while updating the state of the board.
Rule Errors
Returned when either a Place or a Move action violates a rule of the game. For more information see
the rules file. These errors should be very specific and clear when compared to the rules of the game.
*/
package game | game/doc.go | 0.841793 | 0.969757 | doc.go | starcoder |
package models
import (
i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e "time"
i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization"
)
// Notification
type Notification struct {
Entity
// Sets how long (in seconds) this notification content will stay in each platform's notification viewer. For example, when the notification is delivered to a Windows device, the value of this property is passed on to ToastNotification.ExpirationTime, which determines how long the toast notification will stay in the user's Windows Action Center.
displayTimeToLive *int32
// Sets a UTC expiration date and time on a user notification using ISO 8601 format (for example, midnight UTC on Jan 1, 2019 would look like this: '2019-01-01T00:00:00Z'). When time is up, the notification is removed from the Microsoft Graph notification feed store completely and is no longer part of notification history. Max value is 30 days.
expirationDateTime *i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e.Time
// The name of the group that this notification belongs to. It is set by the developer for the purpose of grouping notifications together.
groupName *string
// The payload property
payload PayloadTypesable
// Indicates the priority of a raw user notification. Visual notifications are sent with high priority by default. Valid values are None, High and Low.
priority *Priority
// Represents the host name of the app to which the calling service wants to post the notification, for the given user. If targeting web endpoints (see targetPolicy.platformTypes), ensure that targetHostName is the same as the name used when creating a subscription on the client side within the application JSON property.
targetHostName *string
// Target policy object handles notification delivery policy for endpoint types that should be targeted (Windows, iOS, Android and WebPush) for the given user.
targetPolicy TargetPolicyEndpointsable
}
// NewNotification instantiates a new notification and sets the default values.
func NewNotification()(*Notification) {
m := &Notification{
Entity: *NewEntity(),
}
return m
}
// CreateNotificationFromDiscriminatorValue creates a new instance of the appropriate class based on discriminator value
func CreateNotificationFromDiscriminatorValue(parseNode i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, error) {
return NewNotification(), nil
}
// GetDisplayTimeToLive gets the displayTimeToLive property value. Sets how long (in seconds) this notification content will stay in each platform's notification viewer. For example, when the notification is delivered to a Windows device, the value of this property is passed on to ToastNotification.ExpirationTime, which determines how long the toast notification will stay in the user's Windows Action Center.
func (m *Notification) GetDisplayTimeToLive()(*int32) {
if m == nil {
return nil
} else {
return m.displayTimeToLive
}
}
// GetExpirationDateTime gets the expirationDateTime property value. Sets a UTC expiration date and time on a user notification using ISO 8601 format (for example, midnight UTC on Jan 1, 2019 would look like this: '2019-01-01T00:00:00Z'). When time is up, the notification is removed from the Microsoft Graph notification feed store completely and is no longer part of notification history. Max value is 30 days.
func (m *Notification) GetExpirationDateTime()(*i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e.Time) {
if m == nil {
return nil
} else {
return m.expirationDateTime
}
}
// GetFieldDeserializers the deserialization information for the current model
func (m *Notification) GetFieldDeserializers()(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(error)) {
res := m.Entity.GetFieldDeserializers()
res["displayTimeToLive"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetInt32Value()
if err != nil {
return err
}
if val != nil {
m.SetDisplayTimeToLive(val)
}
return nil
}
res["expirationDateTime"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetTimeValue()
if err != nil {
return err
}
if val != nil {
m.SetExpirationDateTime(val)
}
return nil
}
res["groupName"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetStringValue()
if err != nil {
return err
}
if val != nil {
m.SetGroupName(val)
}
return nil
}
res["payload"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetObjectValue(CreatePayloadTypesFromDiscriminatorValue)
if err != nil {
return err
}
if val != nil {
m.SetPayload(val.(PayloadTypesable))
}
return nil
}
res["priority"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetEnumValue(ParsePriority)
if err != nil {
return err
}
if val != nil {
m.SetPriority(val.(*Priority))
}
return nil
}
res["targetHostName"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetStringValue()
if err != nil {
return err
}
if val != nil {
m.SetTargetHostName(val)
}
return nil
}
res["targetPolicy"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetObjectValue(CreateTargetPolicyEndpointsFromDiscriminatorValue)
if err != nil {
return err
}
if val != nil {
m.SetTargetPolicy(val.(TargetPolicyEndpointsable))
}
return nil
}
return res
}
// GetGroupName gets the groupName property value. The name of the group that this notification belongs to. It is set by the developer for the purpose of grouping notifications together.
func (m *Notification) GetGroupName()(*string) {
if m == nil {
return nil
} else {
return m.groupName
}
}
// GetPayload gets the payload property value. The payload property
func (m *Notification) GetPayload()(PayloadTypesable) {
if m == nil {
return nil
} else {
return m.payload
}
}
// GetPriority gets the priority property value. Indicates the priority of a raw user notification. Visual notifications are sent with high priority by default. Valid values are None, High and Low.
func (m *Notification) GetPriority()(*Priority) {
if m == nil {
return nil
} else {
return m.priority
}
}
// GetTargetHostName gets the targetHostName property value. Represents the host name of the app to which the calling service wants to post the notification, for the given user. If targeting web endpoints (see targetPolicy.platformTypes), ensure that targetHostName is the same as the name used when creating a subscription on the client side within the application JSON property.
func (m *Notification) GetTargetHostName()(*string) {
if m == nil {
return nil
} else {
return m.targetHostName
}
}
// GetTargetPolicy gets the targetPolicy property value. Target policy object handles notification delivery policy for endpoint types that should be targeted (Windows, iOS, Android and WebPush) for the given user.
func (m *Notification) GetTargetPolicy()(TargetPolicyEndpointsable) {
if m == nil {
return nil
} else {
return m.targetPolicy
}
}
// Serialize serializes information the current object
func (m *Notification) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {
err := m.Entity.Serialize(writer)
if err != nil {
return err
}
{
err = writer.WriteInt32Value("displayTimeToLive", m.GetDisplayTimeToLive())
if err != nil {
return err
}
}
{
err = writer.WriteTimeValue("expirationDateTime", m.GetExpirationDateTime())
if err != nil {
return err
}
}
{
err = writer.WriteStringValue("groupName", m.GetGroupName())
if err != nil {
return err
}
}
{
err = writer.WriteObjectValue("payload", m.GetPayload())
if err != nil {
return err
}
}
if m.GetPriority() != nil {
cast := (*m.GetPriority()).String()
err = writer.WriteStringValue("priority", &cast)
if err != nil {
return err
}
}
{
err = writer.WriteStringValue("targetHostName", m.GetTargetHostName())
if err != nil {
return err
}
}
{
err = writer.WriteObjectValue("targetPolicy", m.GetTargetPolicy())
if err != nil {
return err
}
}
return nil
}
// SetDisplayTimeToLive sets the displayTimeToLive property value. Sets how long (in seconds) this notification content will stay in each platform's notification viewer. For example, when the notification is delivered to a Windows device, the value of this property is passed on to ToastNotification.ExpirationTime, which determines how long the toast notification will stay in the user's Windows Action Center.
func (m *Notification) SetDisplayTimeToLive(value *int32)() {
if m != nil {
m.displayTimeToLive = value
}
}
// SetExpirationDateTime sets the expirationDateTime property value. Sets a UTC expiration date and time on a user notification using ISO 8601 format (for example, midnight UTC on Jan 1, 2019 would look like this: '2019-01-01T00:00:00Z'). When time is up, the notification is removed from the Microsoft Graph notification feed store completely and is no longer part of notification history. Max value is 30 days.
func (m *Notification) SetExpirationDateTime(value *i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e.Time)() {
if m != nil {
m.expirationDateTime = value
}
}
// SetGroupName sets the groupName property value. The name of the group that this notification belongs to. It is set by the developer for the purpose of grouping notifications together.
func (m *Notification) SetGroupName(value *string)() {
if m != nil {
m.groupName = value
}
}
// SetPayload sets the payload property value. The payload property
func (m *Notification) SetPayload(value PayloadTypesable)() {
if m != nil {
m.payload = value
}
}
// SetPriority sets the priority property value. Indicates the priority of a raw user notification. Visual notifications are sent with high priority by default. Valid values are None, High and Low.
func (m *Notification) SetPriority(value *Priority)() {
if m != nil {
m.priority = value
}
}
// SetTargetHostName sets the targetHostName property value. Represents the host name of the app to which the calling service wants to post the notification, for the given user. If targeting web endpoints (see targetPolicy.platformTypes), ensure that targetHostName is the same as the name used when creating a subscription on the client side within the application JSON property.
func (m *Notification) SetTargetHostName(value *string)() {
if m != nil {
m.targetHostName = value
}
}
// SetTargetPolicy sets the targetPolicy property value. Target policy object handles notification delivery policy for endpoint types that should be targeted (Windows, iOS, Android and WebPush) for the given user.
func (m *Notification) SetTargetPolicy(value TargetPolicyEndpointsable)() {
if m != nil {
m.targetPolicy = value
}
} | models/notification.go | 0.673943 | 0.420124 | notification.go | starcoder |
package descriptor
import (
"math"
"go.einride.tech/can"
)
// Signal describes a CAN signal.
type Signal struct {
// Name of the signal.
Name string
// LongName of the signal.
LongName string
// Start bit.
Start uint16
// Length in bits.
Length uint16
// IsBigEndian is true if the signal is big-endian.
IsBigEndian bool
// IsSigned is true if the signal uses raw signed values.
IsSigned bool
// IsMultiplexer is true if the signal is the multiplexor of a multiplexed message.
IsMultiplexer bool
// IsMultiplexed is true if the signal is multiplexed.
IsMultiplexed bool
// MultiplexerValue is the value of the multiplexer when this signal is present.
MultiplexerValue uint
// Offset for real-world transform.
Offset float64
// Scale for real-world transform.
Scale float64
// Min real-world value.
Min float64
// Max real-world value.
Max float64
// Unit of the signal.
Unit string
// Description of the signal.
Description string
// ValueDescriptions of the signal.
ValueDescriptions []*ValueDescription
// ReceiverNodes is the list of names of the nodes receiving the signal.
ReceiverNodes []string
// DefaultValue of the signal.
DefaultValue int
}
type DecodedSignal struct {
// Value is the physical value of a decoded signal
Value float64
// Description physical description of a decoded signal
Description string
// Signal is a pointer to the dbc signal
Signal *Signal
}
// ValueDescription returns the value description for the provided value.
func (s *Signal) ValueDescription(value int) (string, bool) {
for _, vd := range s.ValueDescriptions {
if vd.Value == value {
return vd.Description, true
}
}
return "", false
}
// ToPhysical converts a raw signal value to its physical value.
func (s *Signal) ToPhysical(value float64) float64 {
result := value
result *= s.Scale
result += s.Offset
if s.Min != 0 || s.Max != 0 {
result = math.Max(math.Min(result, s.Max), s.Min)
}
return result
}
// FromPhysical converts a physical signal value to its raw value.
func (s *Signal) FromPhysical(physical float64) float64 {
result := physical
if s.Min != 0 || s.Max != 0 {
result = math.Max(math.Min(result, s.Max), s.Min)
}
result -= s.Offset
result /= s.Scale
// perform saturated cast
if s.IsSigned {
result = math.Max(float64(s.MinSigned()), math.Min(float64(s.MaxSigned()), result))
} else {
result = math.Max(0, math.Min(float64(s.MaxUnsigned()), result))
}
return result
}
// UnmarshalPhysical returns the physical value of the signal in the provided CAN frame.
func (s *Signal) UnmarshalPhysical(d can.Data) float64 {
switch {
case uint8(s.Length) == 1:
if d.Bit(uint8(s.Start)) {
return 1
}
return 0
case s.IsSigned:
var value int64
if s.IsBigEndian {
value = d.SignedBitsBigEndian(uint8(s.Start), uint8(s.Length))
} else {
value = d.SignedBitsLittleEndian(uint8(s.Start), uint8(s.Length))
}
return s.ToPhysical(float64(value))
default:
var value uint64
if s.IsBigEndian {
value = d.UnsignedBitsBigEndian(uint8(s.Start), uint8(s.Length))
} else {
value = d.UnsignedBitsLittleEndian(uint8(s.Start), uint8(s.Length))
}
return s.ToPhysical(float64(value))
}
}
// Decode returns the physical value of the signal in the provided CAN frame.
func (s *Signal) Decode(d can.Data) float64 {
switch {
case uint8(s.Length) == 1:
if d.Bit(uint8(s.Start)) {
return 1
}
return 0
case s.IsSigned:
var value int64
if s.IsBigEndian {
value = d.SignedBitsBigEndian(uint8(s.Start), uint8(s.Length))
} else {
value = d.SignedBitsLittleEndian(uint8(s.Start), uint8(s.Length))
}
return s.Offset + float64(value)*s.Scale
default:
var value uint64
if s.IsBigEndian {
value = d.UnsignedBitsBigEndian(uint8(s.Start), uint8(s.Length))
} else {
value = d.UnsignedBitsLittleEndian(uint8(s.Start), uint8(s.Length))
}
return s.Offset + float64(value)*s.Scale
}
}
// UnmarshalPhysicalPayload returns the physical value of the signal in the provided CAN frame.
func (s *Signal) UnmarshalPhysicalPayload(p *can.Payload) float64 {
switch {
case uint8(s.Length) == 1:
if p.Bit(s.Start) {
return 1
}
return 0
case s.IsSigned:
var value int64
if s.IsBigEndian {
value = p.SignedBitsBigEndian(s.Start, s.Length)
} else {
value = p.SignedBitsLittleEndian(s.Start, s.Length)
}
return s.ToPhysical(float64(value))
default:
var value uint64
if s.IsBigEndian {
value = p.UnsignedBitsBigEndian(s.Start, s.Length)
} else {
value = p.UnsignedBitsLittleEndian(s.Start, s.Length)
}
return s.ToPhysical(float64(value))
}
}
// DecodePayload returns the physical value of the signal in the provided CAN frame.
func (s *Signal) DecodePayload(p *can.Payload) float64 {
switch {
case uint8(s.Length) == 1:
if p.Bit(s.Start) {
return 1
}
return 0
case s.IsSigned:
var value int64
if s.IsBigEndian {
value = p.SignedBitsBigEndian(s.Start, s.Length)
} else {
value = p.SignedBitsLittleEndian(s.Start, s.Length)
}
return s.Offset + float64(value)*s.Scale
default:
var value uint64
if s.IsBigEndian {
value = p.UnsignedBitsBigEndian(s.Start, s.Length)
} else {
value = p.UnsignedBitsLittleEndian(s.Start, s.Length)
}
return s.Offset + float64(value)*s.Scale
}
}
// UnmarshalUnsigned returns the unsigned value of the signal in the provided CAN frame.
func (s *Signal) UnmarshalUnsigned(d can.Data) uint64 {
if s.IsBigEndian {
return d.UnsignedBitsBigEndian(uint8(s.Start), uint8(s.Length))
}
return d.UnsignedBitsLittleEndian(uint8(s.Start), uint8(s.Length))
}
// UnmarshalUnsignedPayload returns the unsigned value of the signal in the provided CAN frame.
func (s *Signal) UnmarshalUnsignedPayload(p *can.Payload) uint64 {
if s.IsBigEndian {
return p.UnsignedBitsBigEndian(s.Start, s.Length)
}
return p.UnsignedBitsLittleEndian(s.Start, s.Length)
}
// UnmarshalValueDescription returns the value description of the signal in the provided CAN data.
func (s *Signal) UnmarshalValueDescription(d can.Data) (string, bool) {
if len(s.ValueDescriptions) == 0 {
return "", false
}
var intValue int
if s.IsSigned {
intValue = int(s.UnmarshalSigned(d))
} else {
intValue = int(s.UnmarshalUnsigned(d))
}
return s.ValueDescription(intValue)
}
// UnmarshalValueDescriptionPayload returns the value description of the signal in the provided CAN data.
func (s *Signal) UnmarshalValueDescriptionPayload(p *can.Payload) (string, bool) {
if len(s.ValueDescriptions) == 0 {
return "", false
}
var intValue int
if s.IsSigned {
intValue = int(s.UnmarshalSignedPayload(p))
} else {
intValue = int(s.UnmarshalUnsignedPayload(p))
}
return s.ValueDescription(intValue)
}
// UnmarshalSigned returns the signed value of the signal in the provided CAN frame.
func (s *Signal) UnmarshalSigned(d can.Data) int64 {
if s.IsBigEndian {
return d.SignedBitsBigEndian(uint8(s.Start), uint8(s.Length))
}
return d.SignedBitsLittleEndian(uint8(s.Start), uint8(s.Length))
}
// UnmarshalSignedPayload returns the signed value of the signal in the provided CAN frame.
func (s *Signal) UnmarshalSignedPayload(p *can.Payload) int64 {
if s.IsBigEndian {
return p.SignedBitsBigEndian(s.Start, s.Length)
}
return p.SignedBitsLittleEndian(s.Start, s.Length)
}
// UnmarshalBool returns the bool value of the signal in the provided CAN frame.
func (s *Signal) UnmarshalBool(d can.Data) bool {
return d.Bit(uint8(s.Start))
}
// MarshalUnsigned sets the unsigned value of the signal in the provided CAN frame.
func (s *Signal) MarshalUnsigned(d *can.Data, value uint64) {
if s.IsBigEndian {
d.SetUnsignedBitsBigEndian(uint8(s.Start), uint8(s.Length), value)
} else {
d.SetUnsignedBitsLittleEndian(uint8(s.Start), uint8(s.Length), value)
}
}
// MarshalSigned sets the signed value of the signal in the provided CAN frame.
func (s *Signal) MarshalSigned(d *can.Data, value int64) {
if s.IsBigEndian {
d.SetSignedBitsBigEndian(uint8(s.Start), uint8(s.Length), value)
} else {
d.SetSignedBitsLittleEndian(uint8(s.Start), uint8(s.Length), value)
}
}
// MarshalBool sets the bool value of the signal in the provided CAN frame.
func (s *Signal) MarshalBool(d *can.Data, value bool) {
d.SetBit(uint8(s.Start), value)
}
// MaxUnsigned returns the maximum unsigned value representable by the signal.
func (s *Signal) MaxUnsigned() uint64 {
return (2 << (uint8(s.Length) - 1)) - 1
}
// MinSigned returns the minimum signed value representable by the signal.
func (s *Signal) MinSigned() int64 {
return (2 << (uint8(s.Length) - 1) / 2) * -1
}
// MaxSigned returns the maximum signed value representable by the signal.
func (s *Signal) MaxSigned() int64 {
return (2 << (uint8(s.Length) - 1) / 2) - 1
}
// SaturatedCastSigned performs a saturated cast of an int64 to the value domain of the signal.
func (s *Signal) SaturatedCastSigned(value int64) int64 {
min := s.MinSigned()
max := s.MaxSigned()
switch {
case value < min:
return min
case value > max:
return max
default:
return value
}
}
// SaturatedCastUnsigned performs a saturated cast of a uint64 to the value domain of the signal.
func (s *Signal) SaturatedCastUnsigned(value uint64) uint64 {
max := s.MaxUnsigned()
if value > max {
return max
}
return value
} | pkg/descriptor/signal.go | 0.726911 | 0.478773 | signal.go | starcoder |
package imagepipeline
import (
"bytes"
"context"
"image"
"github.com/disintegration/imaging"
)
type Image struct {
// previous is the previous before handle
previous *Image
// originalSize is the original raw data size of image
originalSize int
// grid is the grid of color.Color values
grid image.Image
// optimizedData is the data of optimize image
optimizedData []byte
// format is the format type of image
format string
}
// Job is the image pipeline job
type Job func(context.Context, *Image) (*Image, error)
// NewImageFromBytes returns a image from byte data, an error will be return if decode fail
func NewImageFromBytes(data []byte) (*Image, error) {
img, format, err := image.Decode(bytes.NewReader(data))
if err != nil {
return nil, err
}
return &Image{
optimizedData: data,
originalSize: len(data),
format: format,
grid: img,
}, nil
}
// Previous returns the previous image
func (i *Image) Previous() *Image {
return i.previous
}
// Set sets the image grid
func (i *Image) Set(grid image.Image) {
previous := *i
i.previous = &previous
// the image is changed, reset the optimized data
i.optimizedData = nil
i.grid = grid
}
// Width returns the width of image
func (i *Image) Width() int {
return i.grid.Bounds().Dx()
}
// Height returns the height of image
func (i *Image) Height() int {
return i.grid.Bounds().Dy()
}
func (i *Image) setOptimized(data []byte, format string) {
i.optimizedData = data
i.format = format
}
func (i *Image) encode(format string) ([]byte, error) {
buffer := bytes.Buffer{}
f := imaging.JPEG
if format == ImageTypePNG {
f = imaging.PNG
}
err := imaging.Encode(&buffer, i.grid, f)
if err != nil {
return nil, err
}
return buffer.Bytes(), nil
}
// PNG encodes the image as png, and returns the bytes
func (i *Image) PNG() ([]byte, error) {
if i.format == ImageTypePNG &&
len(i.optimizedData) != 0 {
return i.optimizedData, nil
}
return i.encode(ImageTypePNG)
}
// JPEG encodes the image as jpeg, and returns the bytes
func (i *Image) JPEG() ([]byte, error) {
if i.format == ImageTypeJPEG &&
len(i.optimizedData) != 0 {
return i.optimizedData, nil
}
return i.encode(ImageTypeJPEG)
}
// Bytes returns the bytes and format of image
func (i *Image) Bytes() ([]byte, string) {
return i.optimizedData, i.format
} | image.go | 0.794185 | 0.405302 | image.go | starcoder |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.