code stringlengths 114 1.05M | path stringlengths 3 312 | quality_prob float64 0.5 0.99 | learning_prob float64 0.2 1 | filename stringlengths 3 168 | kind stringclasses 1 value |
|---|---|---|---|---|---|
package stats
import "math"
// ClassMetrics provides methods to calculate Precision, Recall, F1Score, Accuracy
// and other metrics useful to analyze the accuracy of a classifier.
type ClassMetrics struct {
TruePos int // The number of true positive results (correctly marked as positive)
TrueNeg int // The number of true negative results (correctly marked as negative)
FalsePos int // The number of false positive results (that should have been negative)
FalseNeg int // The number of false negative results (that should have been positive)
}
// NewMetricCounter returns a new ClassMetrics ready-to-use.
func NewMetricCounter() *ClassMetrics {
return &ClassMetrics{
TruePos: 0,
TrueNeg: 0,
FalsePos: 0,
FalseNeg: 0,
}
}
// Reset sets all the counters to zero.
func (c *ClassMetrics) Reset() {
c.TruePos = 0
c.TrueNeg = 0
c.FalsePos = 0
c.FalseNeg = 0
}
// IncTruePos increments the true positive.
func (c *ClassMetrics) IncTruePos() {
c.TruePos++
}
// IncTrueNeg increments the true negative.
func (c *ClassMetrics) IncTrueNeg() {
c.TrueNeg++
}
// IncFalsePos increments the false positive.
func (c *ClassMetrics) IncFalsePos() {
c.FalsePos++
}
// IncFalseNeg increments the false negative.
func (c *ClassMetrics) IncFalseNeg() {
c.FalseNeg++
}
// ExpectedPos returns the sum of true positive and false negative
func (c *ClassMetrics) ExpectedPos() int {
return c.TruePos + c.FalseNeg
}
// Precision returns the precision metric, calculated as true positive / (true positive + false positive).
func (c *ClassMetrics) Precision() float64 {
return zeroIfNaN(float64(c.TruePos) / float64(c.TruePos+c.FalsePos))
}
// Recall returns the recall (true positive rate) metric, calculated as true positive / (true positive + false negative).
func (c *ClassMetrics) Recall() float64 {
return zeroIfNaN(float64(c.TruePos) / float64(c.TruePos+c.FalseNeg))
}
// F1Score returns the harmonic mean of precision and recall, calculated as 2 * (precision * recall / (precision + recall))
func (c *ClassMetrics) F1Score() float64 {
return zeroIfNaN(2.0 * ((c.Precision() * c.Recall()) / (c.Precision() + c.Recall())))
}
// Specificity returns the specificity (selectivity, true negative rate) metric, calculated as true negative / (true negative + false positive).
func (c *ClassMetrics) Specificity() float64 {
return zeroIfNaN(float64(c.TrueNeg) / float64(c.TrueNeg+c.FalsePos))
}
// Accuracy returns the accuracy metric, calculated as (true positive + true negative) / (TP + TN + FP + FN).
func (c *ClassMetrics) Accuracy() float64 {
numerator := float64(c.TruePos) + float64(c.TrueNeg)
return zeroIfNaN(numerator / (numerator + float64(c.FalseNeg+c.FalsePos)))
}
// zeroIfNaN returns zero if the value is NaN otherwise the value.
func zeroIfNaN(value float64) float64 {
if value == math.NaN() {
return 0.0
}
return value
} | pkg/ml/stats/classmetrics.go | 0.894055 | 0.694601 | classmetrics.go | starcoder |
package tarjan
// Graph is a directed graph containing the vertex name and their Edges.
type Graph map[string]Edges
// Edges is a set of edges for a vertex.
type Edges map[string]struct{}
// SCC returns the strongly connected components of the given Graph.
func SCC(g Graph) [][]string {
t := tarjan{
g: g,
indexTable: make(map[string]int, len(g)),
lowLink: make(map[string]int, len(g)),
onStack: make(map[string]bool, len(g)),
}
for v := range t.g {
if t.indexTable[v] == 0 {
t.strongConnect(v)
}
}
return t.sccs
}
type tarjan struct {
g Graph
index int
indexTable map[string]int
lowLink map[string]int
onStack map[string]bool
stack []string
sccs [][]string
}
// strongConnect implements the pseudo-code from
// https://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm#The_algorithm_in_pseudocode
func (t *tarjan) strongConnect(v string) {
// Set the depth index for v to the smallest unused index.
t.index++
t.indexTable[v] = t.index
t.lowLink[v] = t.index
t.stack = append(t.stack, v)
t.onStack[v] = true
// Consider successors of v.
for w := range t.g[v] {
if t.indexTable[w] == 0 {
// Successor w has not yet been visited; recur on it.
t.strongConnect(w)
t.lowLink[v] = min(t.lowLink[v], t.lowLink[w])
} else if t.onStack[w] {
// Successor w is in stack s and hence in the current SCC.
t.lowLink[v] = min(t.lowLink[v], t.indexTable[w])
}
}
// If v is a root graph, pop the stack and generate an SCC.
if t.lowLink[v] == t.indexTable[v] {
// Start a new strongly connected component.
var (
scc []string
w string
)
for {
w, t.stack = t.stack[len(t.stack)-1], t.stack[:len(t.stack)-1]
t.onStack[w] = false
// Add w to current strongly connected component.
scc = append(scc, w)
if w == v {
break
}
}
// Output the current strongly connected component.
t.sccs = append(t.sccs, scc)
}
}
func min(a, b int) int {
if a < b {
return a
}
return b
} | runtime/internal/tarjan/tarjan.go | 0.755817 | 0.587884 | tarjan.go | starcoder |
package filter
import (
"bytes"
"github.com/hidal-go/hidalgo/values"
)
type ValueFilter interface {
FilterValue(v values.Value) bool
}
type SortableFilter interface {
ValueFilter
FilterSortable(v values.Sortable) bool
// ValuesRange returns an optional range of value that matches the filter.
// It is used as an optimization for complex filters for backend to limit the range of keys that will be considered.
ValuesRange() *Range
}
var _ SortableFilter = Any{}
type Any struct{}
func (Any) FilterValue(v values.Value) bool {
return v != nil
}
func (Any) FilterSortable(v values.Sortable) bool {
return v != nil
}
func (Any) ValuesRange() *Range {
return nil
}
// EQ is a shorthand for Equal.
func EQ(v values.Value) SortableFilter {
return Equal{Value: v}
}
var _ SortableFilter = Equal{}
type Equal struct {
Value values.Value
}
func (f Equal) FilterValue(a values.Value) bool {
switch a := a.(type) {
case values.Bytes:
b, ok := f.Value.(values.Bytes)
if !ok {
return false
}
return bytes.Equal(a, b)
}
return f.Value == a
}
func (f Equal) FilterSortable(a values.Sortable) bool {
b, ok := f.Value.(values.Sortable)
if !ok {
return a == nil && f.Value == nil
}
switch a := a.(type) {
case values.Bytes:
b, ok := b.(values.Bytes)
if !ok {
return false
}
return bytes.Equal(a, b)
}
return f.Value == a
}
func (f Equal) ValuesRange() *Range {
b, ok := f.Value.(values.Sortable)
if !ok {
return nil
}
return &Range{
Start: GTE(b),
End: LTE(b),
}
}
// LT is a "less than" filter. Shorthand for Less.
func LT(v values.Sortable) *Less {
return &Less{Value: v}
}
// LTE is a "less than or equal" filter. Shorthand for Less.
func LTE(v values.Sortable) *Less {
return &Less{Value: v, Equal: true}
}
var _ SortableFilter = Less{}
type Less struct {
Value values.Sortable
Equal bool
}
func (f Less) FilterValue(v values.Value) bool {
a, ok := v.(values.Sortable)
if !ok && v != nil {
return false
}
return f.FilterSortable(a)
}
func (f Less) FilterSortable(v values.Sortable) bool {
if v == nil {
return true
}
c := values.Compare(v, f.Value)
return c == -1 || (f.Equal && c == 0)
}
func (f Less) ValuesRange() *Range {
return &Range{End: &f}
}
// GT is a "greater than" filter. Shorthand for Greater.
func GT(v values.Sortable) *Greater {
return &Greater{Value: v}
}
// GTE is a "greater than or equal" filter. Shorthand for Greater.
func GTE(v values.Sortable) *Greater {
return &Greater{Value: v, Equal: true}
}
var _ SortableFilter = Greater{}
type Greater struct {
Value values.Sortable
Equal bool
}
func (f Greater) FilterValue(v values.Value) bool {
a, ok := v.(values.Sortable)
if !ok && v != nil {
return false
}
return f.FilterSortable(a)
}
func (f Greater) FilterSortable(v values.Sortable) bool {
if v == nil {
return true
}
c := values.Compare(v, f.Value)
return c == +1 || (f.Equal && c == 0)
}
func (f Greater) ValuesRange() *Range {
return &Range{Start: &f}
}
var _ SortableFilter = Range{}
// Range represents a range of sortable values.
// If inclusive is set, the range is [start, end], if not, the range is (start, end).
type Range struct {
Start *Greater
End *Less
}
// isPrefix checks if the range describes a prefix. In this case Start.Value describes the prefix.
func (f Range) isPrefix() bool {
if f.Start == nil || !f.Start.Equal {
return false
}
s, ok := f.Start.Value.(values.BinaryString)
if !ok {
return false
}
end := s.PrefixEnd()
if end == nil {
return f.End == nil
}
if f.End == nil || f.End.Equal {
return false
}
return values.Compare(end, f.End.Value) == 0
}
// Prefix returns a common prefix of the range. Boolean flag indicates if prefix fully describes the range.
func (f Range) Prefix() (values.BinaryString, bool) {
if !f.isPrefix() {
// TODO: calculate common prefix
return nil, false
}
p, ok := f.Start.Value.(values.BinaryString)
return p, ok
}
func (f Range) FilterValue(v values.Value) bool {
a, ok := v.(values.Sortable)
if !ok && v != nil {
return false
}
return f.FilterSortable(a)
}
func (f Range) FilterSortable(v values.Sortable) bool {
if v == nil {
return f.Start != nil
}
if f.Start != nil && !f.Start.FilterSortable(v) {
return false
}
if f.End != nil && !f.End.FilterSortable(v) {
return false
}
return true
}
func (f Range) ValuesRange() *Range {
return &f
}
type And []ValueFilter
func (arr And) FilterValue(v values.Value) bool {
for _, f := range arr {
if !f.FilterValue(v) {
return false
}
}
return true
}
type Or []ValueFilter
func (arr Or) FilterValue(v values.Value) bool {
for _, f := range arr {
if f.FilterValue(v) {
return true
}
}
return false
}
type Not struct {
Filter ValueFilter
}
func (f Not) FilterValue(v values.Value) bool {
return !f.Filter.FilterValue(v)
}
func Prefix(pref values.BinaryString) SortableFilter {
gt := GTE(pref)
end := pref.PrefixEnd()
if end == nil {
return *gt
}
return Range{
Start: gt,
End: LT(end),
}
} | filter/filters.go | 0.793826 | 0.548734 | filters.go | starcoder |
package dataframe
import (
"github.com/AdikaStyle/go-df/backend"
"github.com/AdikaStyle/go-df/conds"
"github.com/AdikaStyle/go-df/types"
)
type defaultJoinable struct {
df Dataframe
}
func newDefaultJoinable(df Dataframe) *defaultJoinable {
return &defaultJoinable{df: df}
}
func (this *defaultJoinable) LeftJoin(with Dataframe, on conds.JoinCondition) Dataframe {
newHeaders := combineHeaders(this.df, with)
joinedDf := this.df.constructNew(newHeaders)
index := make(map[string][]int)
with.VisitRows(func(id int, row backend.Row) {
indexKey := on.ColumnsHint(row)
index[indexKey] = append(index[indexKey], id)
})
this.df.VisitRows(func(id int, left backend.Row) {
indexKey := on.ColumnsHint(left)
matches, found := index[indexKey]
if found {
for _, match := range matches {
right := with.getBackend().GetRow(match)
if on.Match(left, right) {
joinedRow := combineRows(left, right, false)
joinedDf.getBackend().AppendRow(joinedRow)
}
}
} else {
row := combineRows(left, with.getBackend().GetRow(0), true)
joinedDf.getBackend().AppendRow(row)
}
})
return joinedDf
}
func (this *defaultJoinable) RightJoin(with Dataframe, on conds.JoinCondition) Dataframe {
return with.LeftJoin(this.df, on)
}
func (this *defaultJoinable) InnerJoin(with Dataframe, on conds.JoinCondition) Dataframe {
newHeaders := combineHeaders(this.df, with)
joinedDf := this.df.constructNew(newHeaders)
var small, big Dataframe
if this.df.GetRowCount() > with.GetRowCount() {
big = this.df
small = with
} else {
big = with
small = this.df
}
index := make(map[string][]int)
small.VisitRows(func(id int, row backend.Row) {
indexKey := on.ColumnsHint(row)
index[indexKey] = append(index[indexKey], id)
})
big.VisitRows(func(id int, row backend.Row) {
indexKey := on.ColumnsHint(row)
matches, found := index[indexKey]
if found {
for _, match := range matches {
row2 := small.getBackend().GetRow(match)
if on.Match(row, row2) {
joinedRow := combineRows(row, row2, false)
joinedDf.getBackend().AppendRow(joinedRow)
}
}
}
})
return joinedDf
}
func (this *defaultJoinable) OuterJoin(with Dataframe, on conds.JoinCondition) Dataframe {
panic("unimplemented")
}
func combineHeaders(left Dataframe, right Dataframe) backend.Headers {
var newHeaders backend.Headers
dup := make(map[string]bool)
for _, h := range left.GetHeaders() {
newHeaders = append(newHeaders, h)
dup[h.Name] = true
}
for _, h := range right.GetHeaders() {
if _, found := dup[h.Name]; !found {
newHeaders = append(newHeaders, h)
}
}
return newHeaders
}
func combineRows(left backend.Row, right backend.Row, rightMissing bool) backend.Row {
dup := make(map[string]bool)
row := make(backend.Row)
for k, v := range left {
dup[k] = true
row[k] = v
}
for k, v := range right {
if _, found := dup[k]; found {
continue
}
if rightMissing {
row[k] = types.Missing
} else {
row[k] = v
}
}
return row
} | dataframe/default_joinable.go | 0.572484 | 0.429489 | default_joinable.go | starcoder |
package parser
import (
"github.com/itchyny/gojq"
"github.com/pkg/errors"
)
// postProcessQuery processes query to allow the realization of the special `assertThat` function.
// Generally, functions defined via `def` may receive filters as arguments, but builtin functions will only ever see
// the concrete values. We therefore patch the AST to change every invocation of `assertThat` such that it passes
// the string representation of the first argument as the second argument. This allows us to give a more specific
// error message, stating what the predicate was that caused the assertion to be violated.
func postProcessQuery(query *gojq.Query) error {
if query == nil {
return nil
}
if err := postProcessQuery(query.Left); err != nil {
return err
}
if err := postProcessQuery(query.Right); err != nil {
return err
}
if query.Term != nil {
if fn := query.Term.Func; fn != nil && fn.Name == "assertThat" && len(fn.Args) != 2 {
if len(fn.Args) != 1 {
return errors.Errorf("incorrect number of arguments for assertThat: %d, expected 1 or 2", len(fn.Args))
}
filterStr := &gojq.Query{
Term: &gojq.Term{
Type: gojq.TermTypeString,
Str: &gojq.String{
Str: query.Term.Func.Args[0].String(),
},
},
}
fn.Args = append(fn.Args, filterStr)
}
if arr := query.Term.Array; arr != nil {
if err := postProcessQuery(arr.Query); err != nil {
return err
}
}
if un := query.Term.Unary; un != nil {
if err := postProcessQuery(un.Term.Query); err != nil {
return err
}
}
if err := postProcessQuery(query.Term.Query); err != nil {
return err
}
if lbl := query.Term.Label; lbl != nil {
if err := postProcessQuery(lbl.Body); err != nil {
return err
}
}
for _, suff := range query.Term.SuffixList {
if bind := suff.Bind; bind != nil {
if err := postProcessQuery(bind.Body); err != nil {
return err
}
}
}
}
for _, fd := range query.FuncDefs {
if err := postProcessQuery(fd.Body); err != nil {
return err
}
}
return nil
} | internal/parser/post_process.go | 0.743634 | 0.450843 | post_process.go | starcoder |
package d2
import (
"strconv"
"strings"
"github.com/adamcolton/geom/angle"
"github.com/adamcolton/geom/calc/cmpr"
"github.com/adamcolton/geom/geomerr"
)
// Pt represets a two dimensional point.
type Pt D2
// Pt is defined on Pt to fulfill Point
func (pt Pt) Pt() Pt { return pt }
// V converts Pt to V
func (pt Pt) V() V { return V(pt) }
// Polar converts Pt to Polar
func (pt Pt) Polar() Polar { return D2(pt).Polar() }
// Angle returns the angle of the point relative to the origin
func (pt Pt) Angle() angle.Rad { return D2(pt).Angle() }
// Mag2 returns the square of the magnitude. For comparisions this can be more
// efficient as it avoids a sqrt call.
func (pt Pt) Mag2() float64 { return D2(pt).Mag2() }
// Mag returns the magnitude of the point relative to the origin
func (pt Pt) Mag() float64 { return D2(pt).Mag() }
// Subtract returns the difference between two points as V
func (pt Pt) Subtract(pt2 Pt) V {
return D2{
pt.X - pt2.X,
pt.Y - pt2.Y,
}.V()
}
// Add a V to a Pt
func (pt Pt) Add(v V) Pt {
return D2{
pt.X + v.X,
pt.Y + v.Y,
}.Pt()
}
// Distance returns the distance between to points
func (pt Pt) Distance(pt2 Pt) float64 {
return pt.Subtract(pt2).Mag()
}
// Multiply performs a scalar multiplication on the Pt
func (pt Pt) Multiply(scale float64) Pt {
return D2{pt.X * scale, pt.Y * scale}.Pt()
}
// Prec is the precision for the String method on F
var Prec = 4
// String fulfills Stringer, returns the vector as "(X, Y)"
func (pt Pt) String() string {
return strings.Join([]string{
"Pt(",
strconv.FormatFloat(pt.X, 'f', Prec, 64),
", ",
strconv.FormatFloat(pt.Y, 'f', Prec, 64),
")",
}, "")
}
// Min returns a Pt with the lowest X and the lowest Y.
func Min(pts ...Pt) Pt {
if len(pts) == 0 {
return Pt{}
}
m := pts[0]
for _, pt := range pts[1:] {
if pt.X < m.X {
m.X = pt.X
}
if pt.Y < m.Y {
m.Y = pt.Y
}
}
return m
}
// Max returns a Pt with the highest X and highest Y.
func Max(pts ...Pt) Pt {
if len(pts) == 0 {
return Pt{}
}
m := pts[0]
for _, pt := range pts[1:] {
if pt.X > m.X {
m.X = pt.X
}
if pt.Y > m.Y {
m.Y = pt.Y
}
}
return m
}
// MinMax takes any number of points and returns a min point with the lowest X
// and the lowest Y in the entire set and a max point with the highest X and
// highest Y in the set.
func MinMax(pts ...Pt) (Pt, Pt) {
if len(pts) == 0 {
return Pt{}, Pt{}
}
min, max := pts[0], pts[0]
for _, pt := range pts[1:] {
min, max = Min(min, pt), Max(max, pt)
}
return min, max
}
// AssertEqual fulfils geomtest.AssertEqualizer
func (pt Pt) AssertEqual(actual interface{}, t cmpr.Tolerance) error {
if err := geomerr.NewTypeMismatch(pt, actual); err != nil {
return err
}
pt2 := actual.(Pt)
v := pt.Subtract(pt2)
if !t.Zero(v.X) || !t.Zero(v.Y) {
return geomerr.NotEqual(pt, pt2)
}
return nil
} | d2/pt.go | 0.851753 | 0.616416 | pt.go | starcoder |
package year2021
import (
"github.com/lanphiergm/adventofcodego/internal/utils"
)
// Chiton Part 1 computes the lowest total risk level for a path through the cave
func ChitonPart1(filename string) interface{} {
grid := parseChitonGrid(filename)
return relaxGrid(&grid)
}
// Chiton Part 2 computes the lowest total risk level for a path through the expanded cave
func ChitonPart2(filename string) interface{} {
grid := parseChitonGrid(filename)
grid = replicateGrid(&grid)
return relaxGrid(&grid)
}
func relaxGrid(grid *[][]int) interface{} {
distances := make(map[utils.Coord]int)
distances[utils.Coord{X: 0, Y: 0}] = 0
hasVisited := make(map[utils.Coord]bool)
queue := make([]node, 0)
queue = enqueueCoord(queue, utils.Coord{X: 0, Y: 0}, 0)
for len(queue) > 0 {
src := queue[0].Coord
queue = queue[1:]
if hasVisited[src] {
continue
}
hasVisited[src] = true
adjacents := make([]utils.Coord, 0)
if src.X > 0 { // left
adjacents = append(adjacents, utils.Coord{X: src.X - 1, Y: src.Y})
}
if src.X < len((*grid)[0])-1 { // right
adjacents = append(adjacents, utils.Coord{X: src.X + 1, Y: src.Y})
}
if src.Y > 0 { // up
adjacents = append(adjacents, utils.Coord{X: src.X, Y: src.Y - 1})
}
if src.Y < len(*grid)-1 { // down
adjacents = append(adjacents, utils.Coord{X: src.X, Y: src.Y + 1})
}
for _, dest := range adjacents {
if hasVisited[dest] {
continue
}
dist := distances[src] + (*grid)[dest.Y][dest.X]
if existingDist, found := distances[dest]; !found || dist < existingDist {
distances[dest] = dist
queue = enqueueCoord(queue, dest, dist)
}
}
}
return distances[utils.Coord{X: len((*grid)[0]) - 1, Y: len(*grid) - 1}]
}
type node struct {
Coord utils.Coord
Dist int
}
func enqueueCoord(queue []node, coord utils.Coord, dist int) []node {
newNode := node{Coord: coord, Dist: dist}
insertAt := len(queue)
for index, item := range queue {
if item.Dist > dist {
insertAt = index
break
}
}
if len(queue) == insertAt {
return append(queue, newNode)
}
queue = append(queue[:insertAt+1], queue[insertAt:]...)
queue[insertAt] = newNode
return queue
}
func parseChitonGrid(filename string) [][]int {
data := utils.ReadStrings(filename)
grid := make([][]int, len(data))
for i, row := range data {
grid[i] = make([]int, len(row))
for j, r := range row {
grid[i][j] = utils.Rtoi(r)
}
}
return grid
}
func replicateGrid(grid *[][]int) [][]int {
lenX := len((*grid)[0])
lenY := len(*grid)
newGrid := make([][]int, lenY*5)
for i := 0; i < lenY*5; i++ {
newGrid[i] = make([]int, lenX*5)
}
for i := 0; i < 5; i++ {
for y := 0; y < lenY; y++ {
for x := 0; x < lenX; x++ {
if i == 0 { // copy from original grid
newGrid[y][x] = (*grid)[y][x]
} else { // copy down from above
newV := newGrid[(i-1)*lenY+y][x] + 1
if newV == 10 {
newV = 1
}
newGrid[i*lenY+y][x] = newV
}
}
}
// copy right
for j := 1; j < 5; j++ {
for y := 0; y < lenY; y++ {
for x := 0; x < lenX; x++ {
newV := newGrid[i*lenY+y][(j-1)*lenX+x] + 1
if newV == 10 {
newV = 1
}
newGrid[i*lenY+y][j*lenX+x] = newV
}
}
}
}
return newGrid
} | internal/puzzles/year2021/day_15_chiton.go | 0.648021 | 0.434341 | day_15_chiton.go | starcoder |
package graphics
import (
"github.com/inkyblackness/shocked-client/opengl"
)
// ColorsPerPalette defines how many colors are per palette. This value is 256 to cover byte-based bitmaps.
const ColorsPerPalette = 256
// BytesPerRgba defines the byte count for an RGBA color value.
const BytesPerRgba = 4
// ColorProvider is a function to return the RGBA values for a certain palette index.
type ColorProvider func(index int) (byte, byte, byte, byte)
// PaletteTexture contains a palette stored as OpenGL texture.
type PaletteTexture struct {
gl opengl.OpenGl
colorProvider ColorProvider
handle uint32
}
// NewPaletteTexture creates a new PaletteTexture instance.
func NewPaletteTexture(gl opengl.OpenGl, colorProvider ColorProvider) *PaletteTexture {
tex := &PaletteTexture{
gl: gl,
colorProvider: colorProvider,
handle: gl.GenTextures(1)[0]}
tex.Update()
return tex
}
// Dispose implements the GraphicsTexture interface.
func (tex *PaletteTexture) Dispose() {
if tex.handle != 0 {
tex.gl.DeleteTextures([]uint32{tex.handle})
tex.handle = 0
}
}
// Handle returns the texture handle.
func (tex *PaletteTexture) Handle() uint32 {
return tex.handle
}
// Update reloads the palette.
func (tex *PaletteTexture) Update() {
gl := tex.gl
var palette [ColorsPerPalette * BytesPerRgba]byte
tex.loadColors(&palette)
gl.BindTexture(opengl.TEXTURE_2D, tex.handle)
gl.TexImage2D(opengl.TEXTURE_2D, 0, opengl.RGBA, ColorsPerPalette, 1, 0, opengl.RGBA, opengl.UNSIGNED_BYTE, palette[:])
gl.TexParameteri(opengl.TEXTURE_2D, opengl.TEXTURE_MAG_FILTER, opengl.NEAREST)
gl.TexParameteri(opengl.TEXTURE_2D, opengl.TEXTURE_MIN_FILTER, opengl.NEAREST)
gl.GenerateMipmap(opengl.TEXTURE_2D)
gl.BindTexture(opengl.TEXTURE_2D, 0)
}
func (tex *PaletteTexture) loadColors(palette *[ColorsPerPalette * BytesPerRgba]byte) {
for i := 0; i < ColorsPerPalette; i++ {
r, g, b, a := tex.colorProvider(i)
palette[i*BytesPerRgba+0] = r
palette[i*BytesPerRgba+1] = g
palette[i*BytesPerRgba+2] = b
palette[i*BytesPerRgba+3] = a
}
} | src/github.com/inkyblackness/shocked-client/graphics/PaletteTexture.go | 0.820073 | 0.433142 | PaletteTexture.go | starcoder |
package osm
import (
"encoding/json"
"sort"
)
// Polygon returns true if the way should be considered a closed polygon area.
// OpenStreetMap doesn't have an intrinsic area data type. The algorithm used
// here considers a set of heuristics to determine what is most likely an area.
// The heuristics can be found here,
// https://wiki.openstreetmap.org/wiki/Overpass_turbo/Polygon_Features
// and are used by osmtogeojson and overpass turbo.
func (w *Way) Polygon() bool {
if len(w.Nodes) <= 3 {
// need more than 3 nodes to be polygon since first/last is repeated.
return false
}
if w.Nodes[0].ID != w.Nodes[len(w.Nodes)-1].ID {
// must be closed
return false
}
if area := w.Tags.Find("area"); area == "no" {
return false
} else if area != "" {
return true
}
for _, c := range polyConditions {
v := w.Tags.Find(c.Key)
if v == "" || v == "no" {
continue
}
if c.Condition == conditionAll {
return true
} else if c.Condition == conditionWhitelist {
index := sort.SearchStrings(c.Values, v)
if index != len(c.Values) && c.Values[index] == v {
return true
}
} else if c.Condition == conditionBlacklist {
index := sort.SearchStrings(c.Values, v)
if index == len(c.Values) || c.Values[index] != v {
return true
}
}
}
return false
}
func init() {
err := json.Unmarshal(polygonJSON, &polyConditions)
if err != nil {
// This must be valid json
panic(err)
}
for _, p := range polyConditions {
sort.StringSlice(p.Values).Sort()
}
}
var polyConditions []polyCondition
type polyCondition struct {
Key string `json:"key"`
Condition conditionType `json:"polygon"`
Values []string `json:"values"`
}
type conditionType string
var (
conditionAll conditionType = "all"
conditionBlacklist conditionType = "blacklist"
conditionWhitelist conditionType = "whitelist"
)
// polygonJSON holds advanced conditions for when an osm way is a polygon.
// Sourced from: https://wiki.openstreetmap.org/wiki/Overpass_turbo/Polygon_Features
// Also used by node lib: https://github.com/tyrasd/osmtogeojson
var polygonJSON = []byte(`
[
{
"key": "building",
"polygon": "all"
},
{
"key": "highway",
"polygon": "whitelist",
"values": [
"services",
"rest_area",
"escape",
"elevator"
]
},
{
"key": "natural",
"polygon": "blacklist",
"values": [
"coastline",
"cliff",
"ridge",
"arete",
"tree_row"
]
},
{
"key": "landuse",
"polygon": "all"
},
{
"key": "waterway",
"polygon": "whitelist",
"values": [
"riverbank",
"dock",
"boatyard",
"dam"
]
},
{
"key": "amenity",
"polygon": "all"
},
{
"key": "leisure",
"polygon": "all"
},
{
"key": "barrier",
"polygon": "whitelist",
"values": [
"city_wall",
"ditch",
"hedge",
"retaining_wall",
"wall",
"spikes"
]
},
{
"key": "railway",
"polygon": "whitelist",
"values": [
"station",
"turntable",
"roundhouse",
"platform"
]
},
{
"key": "boundary",
"polygon": "all"
},
{
"key": "man_made",
"polygon": "blacklist",
"values": [
"cutline",
"embankment",
"pipeline"
]
},
{
"key": "power",
"polygon": "whitelist",
"values": [
"plant",
"substation",
"generator",
"transformer"
]
},
{
"key": "place",
"polygon": "all"
},
{
"key": "shop",
"polygon": "all"
},
{
"key": "aeroway",
"polygon": "blacklist",
"values": [
"taxiway"
]
},
{
"key": "tourism",
"polygon": "all"
},
{
"key": "historic",
"polygon": "all"
},
{
"key": "public_transport",
"polygon": "all"
},
{
"key": "office",
"polygon": "all"
},
{
"key": "building:part",
"polygon": "all"
},
{
"key": "military",
"polygon": "all"
},
{
"key": "ruins",
"polygon": "all"
},
{
"key": "area:highway",
"polygon": "all"
},
{
"key": "craft",
"polygon": "all"
},
{
"key": "golf",
"polygon": "all"
},
{
"key": "indoor",
"polygon": "all"
}
]`)
// Polygon returns true if the relation is of type multipolygon or boundary.
func (r *Relation) Polygon() bool {
t := r.Tags.Find("type")
return t == "multipolygon" || t == "boundary"
} | polygon.go | 0.672654 | 0.436682 | polygon.go | starcoder |
package object_storage
import (
"encoding/json"
)
// DataVectorResult Time series containing a single sample for each time series, all sharing the same timestamp
type DataVectorResult struct {
// The data points' labels
Metric *map[string]string `json:"metric,omitempty"`
Value *DataValue `json:"value,omitempty"`
}
// NewDataVectorResult instantiates a new DataVectorResult object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewDataVectorResult() *DataVectorResult {
this := DataVectorResult{}
return &this
}
// NewDataVectorResultWithDefaults instantiates a new DataVectorResult object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewDataVectorResultWithDefaults() *DataVectorResult {
this := DataVectorResult{}
return &this
}
// GetMetric returns the Metric field value if set, zero value otherwise.
func (o *DataVectorResult) GetMetric() map[string]string {
if o == nil || o.Metric == nil {
var ret map[string]string
return ret
}
return *o.Metric
}
// GetMetricOk returns a tuple with the Metric field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *DataVectorResult) GetMetricOk() (*map[string]string, bool) {
if o == nil || o.Metric == nil {
return nil, false
}
return o.Metric, true
}
// HasMetric returns a boolean if a field has been set.
func (o *DataVectorResult) HasMetric() bool {
if o != nil && o.Metric != nil {
return true
}
return false
}
// SetMetric gets a reference to the given map[string]string and assigns it to the Metric field.
func (o *DataVectorResult) SetMetric(v map[string]string) {
o.Metric = &v
}
// GetValue returns the Value field value if set, zero value otherwise.
func (o *DataVectorResult) GetValue() DataValue {
if o == nil || o.Value == nil {
var ret DataValue
return ret
}
return *o.Value
}
// GetValueOk returns a tuple with the Value field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *DataVectorResult) GetValueOk() (*DataValue, bool) {
if o == nil || o.Value == nil {
return nil, false
}
return o.Value, true
}
// HasValue returns a boolean if a field has been set.
func (o *DataVectorResult) HasValue() bool {
if o != nil && o.Value != nil {
return true
}
return false
}
// SetValue gets a reference to the given DataValue and assigns it to the Value field.
func (o *DataVectorResult) SetValue(v DataValue) {
o.Value = &v
}
func (o DataVectorResult) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if o.Metric != nil {
toSerialize["metric"] = o.Metric
}
if o.Value != nil {
toSerialize["value"] = o.Value
}
return json.Marshal(toSerialize)
}
type NullableDataVectorResult struct {
value *DataVectorResult
isSet bool
}
func (v NullableDataVectorResult) Get() *DataVectorResult {
return v.value
}
func (v *NullableDataVectorResult) Set(val *DataVectorResult) {
v.value = val
v.isSet = true
}
func (v NullableDataVectorResult) IsSet() bool {
return v.isSet
}
func (v *NullableDataVectorResult) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableDataVectorResult(val *DataVectorResult) *NullableDataVectorResult {
return &NullableDataVectorResult{value: val, isSet: true}
}
func (v NullableDataVectorResult) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableDataVectorResult) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | pkg/object_storage/model_data_vector_result.go | 0.80406 | 0.605828 | model_data_vector_result.go | starcoder |
package version
import (
"bytes"
"fmt"
"strings"
"text/scanner"
)
type constraintExpression struct {
units [][]constraintUnit // only supports or'ing a group of and'ed groups
comparators [][]Comparator // only supports or'ing a group of and'ed groups
}
func newConstraintExpression(phrase string, genFn comparatorGenerator) (constraintExpression, error) {
orParts, err := scanExpression(phrase)
if err != nil {
return constraintExpression{}, fmt.Errorf("unable to create constraint expression from=%q : %w", phrase, err)
}
orUnits := make([][]constraintUnit, len(orParts))
orComparators := make([][]Comparator, len(orParts))
for orIdx, andParts := range orParts {
andUnits := make([]constraintUnit, len(andParts))
andComparators := make([]Comparator, len(andParts))
for andIdx, part := range andParts {
unit, err := parseUnit(part)
if err != nil {
return constraintExpression{}, err
}
if unit == nil {
return constraintExpression{}, fmt.Errorf("unable to parse unit: %q", part)
}
andUnits[andIdx] = *unit
comparator, err := genFn(*unit)
if err != nil {
return constraintExpression{}, fmt.Errorf("failed to create comparator for '%s': %w", unit, err)
}
andComparators[andIdx] = comparator
}
orUnits[orIdx] = andUnits
orComparators[orIdx] = andComparators
}
return constraintExpression{
units: orUnits,
comparators: orComparators,
}, nil
}
func (c *constraintExpression) satisfied(other *Version) (bool, error) {
oneSatisfied := false
for i, andOperand := range c.comparators {
allSatisfied := true
for j, andUnit := range andOperand {
result, err := andUnit.Compare(other)
if err != nil {
return false, fmt.Errorf("uncomparable %+v %+v: %w", andUnit, other, err)
}
unit := c.units[i][j]
if !unit.Satisfied(result) {
allSatisfied = false
}
}
oneSatisfied = oneSatisfied || allSatisfied
}
return oneSatisfied, nil
}
func scanExpression(phrase string) ([][]string, error) {
var scnr scanner.Scanner
var orGroups [][]string // all versions a group of and'd groups or'd together
var andGroup []string // most current group of and'd versions
var buf bytes.Buffer // most current single version value
var lastToken string
captureVersionOperatorPair := func() {
if buf.Len() > 0 {
ver := buf.String()
andGroup = append(andGroup, ver)
buf.Reset()
}
}
captureAndGroup := func() {
if len(andGroup) > 0 {
orGroups = append(orGroups, andGroup)
andGroup = nil
}
}
scnr.Init(strings.NewReader(phrase))
scnr.Error = func(*scanner.Scanner, string) {
// scanner has the ability to invoke a callback upon tokenization errors. By default, if no handler is provided
// then errors are printed to stdout. This handler is provided to suppress this output.
// Suppressing these errors is not a problem in this case since the scanExpression function should see all tokens
// and accumulate them as part of a version value if it is not a token of interest. The text/scanner splits on
// a pre-configured set of "common" tokens (which we cannot provide). We are only interested in a sub-set of
// these tokens, thus allow for input that would seemingly be invalid for this common set of tokens.
// For example, the scanner finding `3.e` would interpret this as a float with no valid exponent. However,
// this function accumulates all tokens into the version component (and versions are not guaranteed to have
// valid tokens).
}
tokenRune := scnr.Scan()
for tokenRune != scanner.EOF {
currentToken := scnr.TokenText()
switch {
case currentToken == ",":
captureVersionOperatorPair()
case currentToken == "|" && lastToken == "|":
captureVersionOperatorPair()
captureAndGroup()
case currentToken == "(" || currentToken == ")":
return nil, fmt.Errorf("parenthetical expressions are not supported yet")
case currentToken != "|":
buf.Write([]byte(currentToken))
}
lastToken = currentToken
tokenRune = scnr.Scan()
}
captureVersionOperatorPair()
captureAndGroup()
return orGroups, nil
} | grype/version/constraint_expression.go | 0.683736 | 0.417806 | constraint_expression.go | starcoder |
package models
import (
"database/sql"
"database/sql/driver"
"encoding/json"
"errors"
"fmt"
)
var ErrExampleTypeInvalid = errors.New("ExampleType is invalid")
func init() {
var v ExampleType
if _, ok := interface{}(v).(fmt.Stringer); ok {
defExampleTypeNameToValue = map[string]ExampleType{
interface{}(testA).(fmt.Stringer).String(): testA,
interface{}(testB).(fmt.Stringer).String(): testB,
interface{}(testC).(fmt.Stringer).String(): testC,
interface{}(testD).(fmt.Stringer).String(): testD,
}
}
}
var defExampleTypeNameToValue = map[string]ExampleType{
"testA": testA,
"testB": testB,
"testC": testC,
"testD": testD,
}
var defExampleTypeValueToName = map[ExampleType]string{
testA: "testA",
testB: "testB",
testC: "testC",
testD: "testD",
}
// String is generated so ExampleType satisfies fmt.Stringer.
func (r ExampleType) String() string {
s, ok := defExampleTypeValueToName[r]
if !ok {
return fmt.Sprintf("ExampleType(%d)", r)
}
return s
}
// Validate verifies that value is predefined for ExampleType.
func (r ExampleType) Validate() error {
_, ok := defExampleTypeValueToName[r]
if !ok {
return ErrExampleTypeInvalid
}
return nil
}
// MarshalJSON is generated so ExampleType satisfies json.Marshaler.
func (r ExampleType) MarshalJSON() ([]byte, error) {
if s, ok := interface{}(r).(fmt.Stringer); ok {
return json.Marshal(s.String())
}
s, ok := defExampleTypeValueToName[r]
if !ok {
return nil, fmt.Errorf("ExampleType(%d) is invalid value", r)
}
return json.Marshal(s)
}
// UnmarshalJSON is generated so ExampleType satisfies json.Unmarshaler.
func (r *ExampleType) UnmarshalJSON(data []byte) error {
var s string
if err := json.Unmarshal(data, &s); err != nil {
return fmt.Errorf("ExampleType: should be a string, got %s", string(data))
}
v, ok := defExampleTypeNameToValue[s]
if !ok {
return fmt.Errorf("ExampleType(%q) is invalid value", s)
}
*r = v
return nil
}
// Value is generated so ExampleType satisfies db row driver.Valuer.
func (r ExampleType) Value() (driver.Value, error) {
j, ok := defExampleTypeValueToName[r]
if !ok {
return nil, nil
}
return j, nil
}
// Value is generated so ExampleType satisfies db row driver.Scanner.
func (r *ExampleType) Scan(src interface{}) error {
switch v := src.(type) {
case string:
val, ok := defExampleTypeNameToValue[v]
if !ok {
return errors.New("ExampleType: can't unmarshal column data")
}
*r = val
return nil
case []byte:
var i ExampleType
is := string(v)
i = defExampleTypeNameToValue[is]
*r = i
return nil
case int, int8, int32, int64, uint, uint8, uint32, uint64:
ni := sql.NullInt64{}
err := ni.Scan(v)
if err != nil {
return errors.New("ExampleType: can't scan column data into int64")
}
*r = ExampleType(ni.Int64)
return nil
}
return errors.New("ExampleType: invalid type")
} | models/exampletype_enums.go | 0.617397 | 0.430686 | exampletype_enums.go | starcoder |
package vm
import (
"reflect"
"strings"
)
type theArrayVectorType struct{}
func (t *theArrayVectorType) String() string { return t.Name() }
func (t *theArrayVectorType) Type() ValueType { return TypeType }
func (t *theArrayVectorType) Unbox() interface{} { return reflect.TypeOf(t) }
func (lt *theArrayVectorType) Name() string { return "let-go.lang.ArrayVector" }
func (lt *theArrayVectorType) Box(bare interface{}) (Value, error) {
arr, ok := bare.([]Value)
if !ok {
return NIL, NewTypeError(bare, "can't be boxed as", lt)
}
return ArrayVector(arr), nil
}
// ArrayVectorType is the type of ArrayVectors
var ArrayVectorType *theArrayVectorType
func init() {
ArrayVectorType = &theArrayVectorType{}
}
// ArrayVector is boxed singly linked list that can hold other Values.
type ArrayVector []Value
func (l ArrayVector) Conj(val Value) Collection {
ret := make([]Value, len(l)+1)
copy(ret, l)
ret[len(ret)-1] = val
return ArrayVector(ret)
}
// Type implements Value
func (l ArrayVector) Type() ValueType { return ArrayVectorType }
// Unbox implements Value
func (l ArrayVector) Unbox() interface{} {
return []Value(l)
}
// First implements Seq
func (l ArrayVector) First() Value {
if len(l) == 0 {
return NIL
}
return l[0]
}
// More implements Seq
func (l ArrayVector) More() Seq {
if len(l) <= 1 {
return EmptyList
}
newl, _ := ListType.Box([]Value(l[1:]))
return newl.(*List)
}
// Next implements Seq
func (l ArrayVector) Next() Seq {
return l.More()
}
// Cons implements Seq
func (l ArrayVector) Cons(val Value) Seq {
ret := EmptyList
n := len(l) - 1
for i := range l {
ret = ret.Cons(l[n-i]).(*List)
}
return ret.Cons(val)
}
// Count implements Collection
func (l ArrayVector) Count() Value {
return Int(len(l))
}
func (l ArrayVector) RawCount() int {
return len(l)
}
// Empty implements Collection
func (l ArrayVector) Empty() Collection {
return make(ArrayVector, 0)
}
func NewArrayVector(v []Value) Value {
vk := make([]Value, len(v))
copy(vk, v)
return ArrayVector(vk)
}
func (l ArrayVector) ValueAt(key Value) Value {
return l.ValueAtOr(key, NIL)
}
func (l ArrayVector) ValueAtOr(key Value, dflt Value) Value {
if key == NIL {
return dflt
}
numkey, ok := key.(Int)
if !ok || numkey < 0 || int(numkey) >= len(l) {
return dflt
}
return l[int(numkey)]
}
func (l ArrayVector) String() string {
b := &strings.Builder{}
b.WriteRune('[')
n := len(l)
for i := range l {
b.WriteString(l[i].String())
if i < n-1 {
b.WriteRune(' ')
}
}
b.WriteRune(']')
return b.String()
} | pkg/vm/vector.go | 0.568416 | 0.517083 | vector.go | starcoder |
package view
import (
"image"
"github.com/mewmew/pgg/grid"
)
// A View is a visible portion of the screen.
type View struct {
// The width and height of the view.
Width, Height int
// The width and height of the view in number of columns and rows
// respectively.
cols, rows int
// The pixel offset between the top left point of the world and the view.
off image.Point
// The maximum valid pixel offset of the view.
max image.Point
}
// NewView returns a new view of the specified dimensions. The top left point
// of the world is assumed to be located at (0, 0) and the bottom right point of
// the world is specified by end.
func NewView(width, height int, end image.Point) (v *View) {
v = &View{
Width: width,
Height: height,
cols: width / grid.CellWidth,
rows: height / grid.CellHeight,
max: end.Sub(image.Pt(width+1, height+1)),
}
return v
}
// Move moves the view based on the provided delta offset.
func (v *View) Move(delta image.Point) {
off := v.off.Add(delta)
// TODO(u): consider creating a geom.Clamp function to encapsulate this
// behaviour. It would have the following function definition:
// func Clamp(p, min, max image.Point) image.Point
if off.X < 0 {
off.X = 0
}
if off.Y < 0 {
off.Y = 0
}
if off.X > v.max.X {
off.X = v.max.X
}
if off.Y > v.max.Y {
off.Y = v.max.Y
}
v.off = off
}
// Col returns the top left column visible through the view.
func (v *View) Col() int {
return v.off.X / grid.CellWidth
}
// Row returns the top left row visible through the view.
func (v *View) Row() int {
return v.off.Y / grid.CellHeight
}
// Cols returns the number of columns visible through the view.
func (v *View) Cols() int {
if v.off.X != 0 {
// TODO(u): verify that views with a width of `n*grid.CellWidth + r`
// don't cause an index overflow in the draw loop logic.
return v.cols + 1
}
return v.cols
}
// Rows returns the number of rows visible through the view.
func (v *View) Rows() int {
if v.off.Y != 0 {
// TODO(u): verify that views with a height of `n*grid.CellHeight + r`
// don't cause an index overflow in the draw loop logic.
return v.rows + 1
}
return v.rows
}
// X returns the x offset to the grid columns visible through the view.
func (v *View) X() int {
return v.off.X % grid.CellWidth
}
// Y returns the y offset to the grid rows visible through the view.
func (v *View) Y() int {
return v.off.Y % grid.CellHeight
} | view/view.go | 0.710327 | 0.600364 | view.go | starcoder |
package binarysearchtree
// BinarySearchTree is a binary search tree of ints.
type BinarySearchTree struct {
root *Node
size int
}
// New creates a binary search tree of ints.
func New() *BinarySearchTree {
return &BinarySearchTree{}
}
// Insert adds a value to the binary search tree.
func (bts *BinarySearchTree) Insert(v int) {
newRoot, inserted := insertRecursive(nil, bts.root, v)
bts.root = newRoot
if inserted {
bts.size++
}
}
func insertRecursive(previous *Node, current *Node, v int) (node *Node, ok bool) {
if current == nil {
return &Node{
value: v,
parent: previous,
}, true
}
if v > current.Value() {
current.right, ok = insertRecursive(current, current.right, v)
} else if v < current.Value() {
current.left, ok = insertRecursive(current, current.left, v)
}
return current, ok
}
// Remove removes a node from the binary search tree.
func (bts *BinarySearchTree) Remove(n *Node) {
if n.Left() == nil && n.Right() == nil {
p := n.Parent()
if p.Left() == n {
p.left = nil
} else {
p.right = nil
}
} else if n.Left() != nil && n.Right() != nil {
successor := n.Right()
for successor.Left() != nil {
successor = successor.Left()
}
n.value = successor.Value()
bts.Remove(successor)
return
} else if n.Left() != nil {
n.value = n.Left().Value()
n.left = nil
} else {
n.value = n.Right().Value()
n.right = nil
}
}
// NodeHeight returns the number of edges from a node to its deepest descendent (the height of a node).
func (bts *BinarySearchTree) NodeHeight(n *Node) int {
return nodeHeightRecursive(n)
}
func nodeHeightRecursive(n *Node) int {
if n.Left() == nil && n.Right() == nil {
return 0
}
leftHeight := nodeHeightRecursive(n.Left())
rightHeight := nodeHeightRecursive(n.Right())
if leftHeight > rightHeight {
return leftHeight + 1
}
return rightHeight + 1
}
// NodeDepth returns the number of edges from the root of the tree to a node (the depth of a node).
func (bts *BinarySearchTree) NodeDepth(n *Node) int {
return nodeDepthRecursive(n)
}
func nodeDepthRecursive(n *Node) int {
if n.Parent() == nil {
return 0
}
return nodeDepthRecursive(n.Parent()) + 1
}
// Find returns the node of the binary search value that has a specific value.
func (bts *BinarySearchTree) Find(v int) (n *Node, found bool) {
n = bts.Root()
for n != nil {
nV := n.Value()
if v < nV {
n = n.Left()
} else if v > nV {
n = n.Right()
} else {
return n, true
}
}
return n, n == nil
}
// Root returns the root of the binary search tree.
func (bts *BinarySearchTree) Root() *Node {
return bts.root
}
// Height returns the number of edges between the root of the tree and its deepest descendent, i.e. the height of the root.
func (bts *BinarySearchTree) Height() int {
return bts.NodeHeight(bts.Root())
}
// Size returns the number of nodes in the binary search tree.
func (bts *BinarySearchTree) Size() int {
return bts.size
} | binarysearchtree/binarysearchtree.go | 0.908546 | 0.486027 | binarysearchtree.go | starcoder |
package canvas
import (
"encoding/json"
"golang.org/x/xerrors"
)
const backgroundChar = '-'
type Canvas struct {
Name string `json:"name,omitempty"`
Width uint `json:"width"`
Height uint `json:"height"`
Data []byte `json:"data,omitempty"`
}
func (c *Canvas) MarshalBinary() (data []byte, err error) {
data, err = json.Marshal(c)
if err != nil {
return data, xerrors.Errorf("failed to marshal canvas to json: %w", err)
}
return data, nil
}
// Split returns the content of the canvas split into lines.
func (c *Canvas) Split() []string {
if len(c.Data) == 0 {
c.initData(backgroundChar)
}
data := make([]string, 0, c.Height)
var y uint
for y = 0; y < c.Height; y++ {
start := y * c.Width
line := c.Data[start : start+c.Width]
data = append(data, string(line))
}
return data
}
func (c *Canvas) DrawRect(rect *Rectangle, fill string, outline string) error {
if rect.Origin.X > c.Width || rect.Origin.Y > c.Height {
return PointOutOfBound
}
if rect.Origin.X+rect.Width > c.Width || rect.Origin.Y+rect.Height > c.Height {
return ObjectTooLarge
}
if len(fill) > 1 || len(outline) > 1 {
return BadPattern
}
if rect.Width == 0 || rect.Height == 0 {
return nil
}
if len(c.Data) == 0 {
c.initData(backgroundChar)
}
// We make a copy of the rectangle for the filling operation.
// We will adjust the size of the filling rectangle if we draw an outline.
fillOrigin := rect.Origin
fillWidth := rect.Width
fillHeight := rect.Height
if outline != "" {
outlineChar := outline[0]
// Start with the horizontal lines
upperOffset := rect.Origin.Y * c.Width
lowerOffset := (rect.Origin.Y + rect.Height - 1) * c.Width
for x := rect.Origin.X + rect.Width - 1; x >= rect.Origin.X; x-- {
c.Data[upperOffset+x] = outlineChar
c.Data[lowerOffset+x] = outlineChar
}
// Then draw the vertical lines.
// We can skip the start and end chars since we just drew them with the horizontal lines.
leftOffset := rect.Origin.X
rightOffset := rect.Origin.X + rect.Width - 1
for y := rect.Origin.Y + rect.Height - 1 - 1; y > rect.Origin.Y; y-- {
yOffset := y * c.Width
c.Data[yOffset+leftOffset] = outlineChar
c.Data[yOffset+rightOffset] = outlineChar
}
// Shrink the fill by one char on each side to avoid overwriting the outline.
fillOrigin.X++
fillOrigin.Y++
fillWidth -= 2
fillHeight -= 2
}
if fill != "" {
fillChar := fill[0]
for y := fillOrigin.Y + fillHeight - 1; y >= fillOrigin.Y; y-- {
yOffset := y * c.Width
for x := fillOrigin.X + fillWidth - 1; x >= fillOrigin.X; x-- {
c.Data[yOffset+x] = fillChar
}
}
}
return nil
}
func (c *Canvas) FloodFill(origin *Point, fill string) error {
if origin.X > c.Width || origin.Y > c.Height {
return PointOutOfBound
}
if len(fill) > 1 {
return BadPattern
}
if len(c.Data) == 0 {
c.initData(fill[0])
return nil
}
fillChar := fill[0]
orgChar := c.Data[origin.Y*c.Width+origin.X]
if orgChar == fillChar {
return nil
}
dir := []struct{ x, y int }{
{-1, 0},
{0, 1},
{1, 0},
{0, -1},
}
var recFill func(x, y uint)
recFill = func(x, y uint) {
if c.get(x, y) == fillChar {
return
}
c.set(x, y, fillChar)
for _, d := range dir {
if int(x)+d.x < 0 || int(y)+d.y < 0 {
return
}
dx := uint(int(x) + d.x)
dy := uint(int(y) + d.y)
if dx < c.Width && dy < c.Height && c.get(dx, dy) == orgChar {
recFill(dx, dy)
}
}
}
recFill(origin.X, origin.Y)
return nil
}
func (c *Canvas) initData(v byte) {
c.Data = make([]byte, c.Width*c.Height)
for i := range c.Data {
c.Data[i] = v
}
}
func (c *Canvas) set(x, y uint, v byte) {
c.Data[y*c.Width+x] = v
}
func (c *Canvas) get(x, y uint) byte {
return c.Data[y*c.Width+x]
} | pkg/canvas/canvas.go | 0.80954 | 0.45944 | canvas.go | starcoder |
package gdual
// square, upper triangular Toeplitz matrix
type UpperTriToeplitz struct {
order int
val []float64
}
func NewUpperTriToeplitz(order int) *UpperTriToeplitz {
mat := &UpperTriToeplitz{
order: order,
val: make([]float64, order),
}
return mat
}
func importUpperTriToeplitz(val []float64) *UpperTriToeplitz {
mat := &UpperTriToeplitz{
order: len(val),
val: val,
}
return mat
}
/* utility functions */
func (m *UpperTriToeplitz) get(i int) float64 {
if i > m.order {
return 0.0
}
return m.val[i]
}
func (m *UpperTriToeplitz) set(i int, val float64) {
if i > m.order {
return
}
m.val[i] = val
}
func (m *UpperTriToeplitz) Fill(diagonal int, val float64) {
// fill the given upper diagonal of the matrix
m.set(diagonal, val)
}
func (m *UpperTriToeplitz) Reset(val float64) {
for i := 0; i < m.order; i++ {
m.set(i, val)
}
}
func (m *UpperTriToeplitz) Copy() *UpperTriToeplitz {
copy := NewUpperTriToeplitz(m.order)
for i := 0; i < m.order; i++ {
val := m.get(i)
copy.set(i, val)
}
return copy
}
/* element-wise matrix operations */
func (m *UpperTriToeplitz) ElementAdd(val float64) {
for i := 0; i < m.order; i++ {
sum := m.get(i) + val
m.set(i, sum)
}
}
func (m *UpperTriToeplitz) ElementSub(val float64) {
for i := 0; i < m.order; i++ {
difference := m.get(i) - val
m.set(i, difference)
}
}
func (m *UpperTriToeplitz) ElementMul(val float64) {
for i := 0; i < m.order; i++ {
product := m.get(i) * val
m.set(i, product)
}
}
func (m *UpperTriToeplitz) ElementDiv(val float64) {
for i := 0; i < m.order; i++ {
quotient := m.get(i) / val
m.set(i, quotient)
}
}
/* matrix operations */
func (m *UpperTriToeplitz) Add(inp *UpperTriToeplitz) *UpperTriToeplitz {
out := NewUpperTriToeplitz(m.order)
for i := 0; i < m.order; i++ {
sum := m.get(i) + inp.get(i)
out.set(i, sum)
}
return out
}
func (m *UpperTriToeplitz) Sub(inp *UpperTriToeplitz) *UpperTriToeplitz {
out := NewUpperTriToeplitz(m.order)
for i := 0; i < m.order; i++ {
difference := m.get(i) - inp.get(i)
out.set(i, difference)
}
return out
}
func (m *UpperTriToeplitz) Mul(inp *UpperTriToeplitz) *UpperTriToeplitz {
out := NewUpperTriToeplitz(m.order)
for i := 0; i < m.order; i++ {
product := 0.0
for k := i; k >= 0; k-- {
product += m.get(i-k) * inp.get(k)
}
out.set(i, product)
}
return out
}
/*
shortcut borrowed from
blog.jliszka.org/2013/10/24/exact-numeric-nth-derivatives.html
a nilpotent matrix follows the form:
(I - N)^-1 = Σ(I + N + N^2 + ... N^n-1)
since the square upper triangular matrices we're using
follow the form a*I + D, where a is some constant and D
is a nilpotent matrix, we can use the above equation
to simplify our inverse calculation.
we calculate D by subtracting our input matrix by a*I.
in order to find (a*I + D)^-1, the formula is:
(a*I + D)^-1 = (1 / a*I) * Σ(I + N + N^2 + ... N^n-1)
*/
func (m *UpperTriToeplitz) Inv() *UpperTriToeplitz {
// derive a*I
a := m.get(0)
A := NewUpperTriToeplitz(m.order)
A.Fill(0, a)
// derive the nilpotent matrix D
D := m.Sub(A)
// derive N from D and a
N := D
N.ElementDiv(-a)
// initialize the identity matrix
inv := NewUpperTriToeplitz(m.order)
inv.Fill(0, 1.0)
firstN := N.Copy()
for i := 0; i < m.order; i++ {
inv = inv.Add(N)
if i != m.order-1 {
N = firstN.Mul(N)
}
}
// divide by a to find the true inverse
inv.ElementDiv(a)
return inv
}
func (m *UpperTriToeplitz) Div(inp *UpperTriToeplitz) *UpperTriToeplitz {
inv := inp.Inv()
out := m.Mul(inv)
return out
}
func (m *UpperTriToeplitz) Pow(n int) *UpperTriToeplitz {
out := m.Copy()
for i := 0; i < n-1; i++ {
out = out.Mul(m)
}
return out
}
/* standard matrix for testing and benchmarking purposes */
type Matrix struct {
order int
val [][]float64
}
func NewMatrix(order int) *Matrix {
val := make([][]float64, order)
for i := 0; i < order; i++ {
val[i] = make([]float64, order)
}
mat := &Matrix{
order: order,
val: val,
}
return mat
}
func importMatrix(inp []float64) *Matrix {
order := len(inp)
val := make([][]float64, order)
for i := 0; i < order; i++ {
val[i] = make([]float64, order)
for j := i; j < order; j++ {
val[i][j] = inp[j-i]
}
}
mat := &Matrix{
order: order,
val: val,
}
return mat
}
/* utility functions */
func (m *Matrix) get(i, j int) float64 {
if i > m.order || j > m.order {
return 0.0
}
return m.val[i][j]
}
func (m *Matrix) set(i, j int, val float64) {
if i > m.order || j > m.order {
return
}
m.val[i][j] = val
}
func (m *Matrix) Fill(diagonal int, val float64) {
// fill the given upper diagonal of the matrix
for i := diagonal; i < m.order; i++ {
m.set(i-diagonal, i, val)
}
}
func (m *Matrix) Reset(val float64) {
for i := 0; i < m.order; i++ {
for j := 0; j < m.order; j++ {
m.set(i, j, val)
}
}
}
func (m *Matrix) Copy() *Matrix {
copy := NewMatrix(m.order)
for i := 0; i < m.order; i++ {
for j := 0; j < m.order; j++ {
val := m.get(i, j)
copy.set(i, j, val)
}
}
return copy
}
/* general matrix operations */
func (m *Matrix) Determinant() float64 {
det := 1.0
for i := 0; i < m.order; i++ {
det *= m.get(i, i)
}
return det
}
/* element-wise matrix operations */
func (m *Matrix) ElementAdd(val float64) {
for i := 0; i < m.order; i++ {
for j := 0; j < m.order; j++ {
sum := m.get(i, j) + val
m.set(i, j, sum)
}
}
}
func (m *Matrix) ElementSub(val float64) {
for i := 0; i < m.order; i++ {
for j := 0; j < m.order; j++ {
difference := m.get(i, j) - val
m.set(i, j, difference)
}
}
}
func (m *Matrix) ElementMul(val float64) {
for i := 0; i < m.order; i++ {
for j := 0; j < m.order; j++ {
product := m.get(i, j) * val
m.set(i, j, product)
}
}
}
func (m *Matrix) ElementDiv(val float64) {
for i := 0; i < m.order; i++ {
for j := 0; j < m.order; j++ {
quotient := m.get(i, j) / val
m.set(i, j, quotient)
}
}
}
/* matrix operations */
func (m *Matrix) Add(inp *Matrix) *Matrix {
out := NewMatrix(m.order)
for i := 0; i < m.order; i++ {
for j := 0; j < m.order; j++ {
sum := m.get(i, j) + inp.get(i, j)
out.set(i, j, sum)
}
}
return out
}
func (m *Matrix) Sub(inp *Matrix) *Matrix {
out := NewMatrix(m.order)
for i := 0; i < m.order; i++ {
for j := 0; j < m.order; j++ {
difference := m.get(i, j) - inp.get(i, j)
out.set(i, j, difference)
}
}
return out
}
func (m *Matrix) Mul(inp *Matrix) *Matrix {
out := NewMatrix(m.order)
for i := 0; i < m.order; i++ {
for j := 0; j < m.order; j++ {
product := 0.0
for k := 0; k < m.order; k++ {
product += m.get(i, k) * inp.get(k, j)
}
out.set(i, j, product)
}
}
return out
}
func (m *Matrix) Inv() *Matrix {
// derive a*I
a := m.get(0, 0)
A := NewMatrix(m.order)
A.Fill(0, a)
// derive the nilpotent matrix D
D := m.Sub(A)
// derive N from D and a
N := D
N.ElementDiv(-a)
// initialize the identity matrix
inv := NewMatrix(m.order)
inv.Fill(0, 1.0)
firstN := N.Copy()
for i := 0; i < m.order; i++ {
inv = inv.Add(N)
if i != m.order-1 {
N = firstN.Mul(N)
}
}
// divide by a to find the true inverse
inv.ElementDiv(a)
return inv
}
func (m *Matrix) Div(inp *Matrix) *Matrix {
inv := inp.Inv()
out := m.Mul(inv)
return out
}
func (m *Matrix) Pow(n int) *Matrix {
out := m.Copy()
for i := 0; i < n-1; i++ {
out = out.Mul(m)
}
return out
} | matrix.go | 0.864368 | 0.529872 | matrix.go | starcoder |
package prjn
import (
"github.com/emer/emergent/evec"
"github.com/emer/etable/etensor"
"github.com/goki/mat32"
)
// Circle implements a circular pattern of connectivity between two layers
// where the center moves in proportion to receiver position with offset
// and multiplier factors, and a given radius is used (with wrap-around
// optionally). A corresponding Gaussian bump of TopoWts is available as well.
// Makes for a good center-surround connectivity pattern.
// 4D layers are automatically flattened to 2D for this connection.
type Circle struct {
Radius int `desc:"radius of the circle, in units from center in sending layer"`
Start evec.Vec2i `desc:"starting offset in sending layer, for computing the corresponding sending center relative to given recv unit position"`
Scale mat32.Vec2 `desc:"scaling to apply to receiving unit position to compute sending center as function of recv unit position"`
AutoScale bool `desc:"auto-scale sending center positions as function of relative sizes of send and recv layers -- if Start is positive then assumes it is a border, subtracted from sending size"`
Wrap bool `desc:"if true, connectivity wraps around edges"`
Sigma float32 `desc:"gaussian sigma (width) as a proportion of the radius of the circle"`
MaxWt float32 `desc:"maximum weight value for GaussWts function -- multiplies values"`
SelfCon bool `desc:"if true, and connecting layer to itself (self projection), then make a self-connection from unit to itself"`
}
func NewCircle() *Circle {
cr := &Circle{}
cr.Defaults()
return cr
}
func (cr *Circle) Defaults() {
cr.Wrap = true
cr.Radius = 8
cr.Scale.SetScalar(1)
cr.Sigma = 0.5
cr.MaxWt = 1
}
func (cr *Circle) Name() string {
return "Circle"
}
func (cr *Circle) Connect(send, recv *etensor.Shape, same bool) (sendn, recvn *etensor.Int32, cons *etensor.Bits) {
sendn, recvn, cons = NewTensors(send, recv)
sNy, sNx, _, _ := etensor.Prjn2DShape(send, false)
rNy, rNx, _, _ := etensor.Prjn2DShape(recv, false)
rnv := recvn.Values
snv := sendn.Values
sNtot := send.Len()
sc := cr.Scale
if cr.AutoScale {
ssz := mat32.Vec2{float32(sNx), float32(sNy)}
if cr.Start.X >= 0 && cr.Start.Y >= 0 {
ssz.X -= float32(2 * cr.Start.X)
ssz.Y -= float32(2 * cr.Start.Y)
}
rsz := mat32.Vec2{float32(rNx), float32(rNy)}
sc = ssz.Div(rsz)
}
for ry := 0; ry < rNy; ry++ {
for rx := 0; rx < rNx; rx++ {
sctr := mat32.Vec2{float32(rx)*sc.X + float32(cr.Start.X), float32(ry)*sc.Y + float32(cr.Start.Y)}
for sy := 0; sy < sNy; sy++ {
for sx := 0; sx < sNx; sx++ {
sp := mat32.Vec2{float32(sx), float32(sy)}
if cr.Wrap {
sp.X = WrapMinDist(sp.X, float32(sNx-1), sctr.X)
sp.Y = WrapMinDist(sp.Y, float32(sNy-1), sctr.Y)
}
d := int(mat32.Round(sp.DistTo(sctr)))
if d <= cr.Radius {
ri := etensor.Prjn2DIdx(recv, false, ry, rx)
si := etensor.Prjn2DIdx(send, false, sy, sx)
off := ri*sNtot + si
if !cr.SelfCon && same && ri == si {
continue
}
cons.Values.Set(off, true)
rnv[ri]++
snv[si]++
}
}
}
}
}
return
}
// GaussWts returns gaussian weight value for given unit indexes in
// given send and recv layers according to Gaussian Sigma and MaxWt.
// Can be used for a Prjn.SetScalesFunc or SetWtsFunc
func (cr *Circle) GaussWts(si, ri int, send, recv *etensor.Shape) float32 {
sNy, sNx, _, _ := etensor.Prjn2DShape(send, false)
rNy, rNx, _, _ := etensor.Prjn2DShape(recv, false)
ry := ri / rNx // todo: this is not right for 4d!
rx := ri % rNx
sy := si / sNx
sx := si % sNx
fsig := cr.Sigma * float32(cr.Radius)
sc := cr.Scale
if cr.AutoScale {
ssz := mat32.Vec2{float32(sNx), float32(sNy)}
if cr.Start.X >= 0 && cr.Start.Y >= 0 {
ssz.X -= float32(2 * cr.Start.X)
ssz.Y -= float32(2 * cr.Start.Y)
}
rsz := mat32.Vec2{float32(rNx), float32(rNy)}
sc = ssz.Div(rsz)
}
sctr := mat32.Vec2{float32(rx)*sc.X + float32(cr.Start.X), float32(ry)*sc.Y + float32(cr.Start.Y)}
sp := mat32.Vec2{float32(sx), float32(sy)}
if cr.Wrap {
sp.X = WrapMinDist(sp.X, float32(sNx-1), sctr.X)
sp.Y = WrapMinDist(sp.Y, float32(sNy-1), sctr.Y)
}
wt := cr.MaxWt * evec.GaussVecDistNoNorm(sp, sctr, fsig)
return wt
} | prjn/circle.go | 0.742795 | 0.556882 | circle.go | starcoder |
package gouldian
/*
Endpoint is a composable function that abstract HTTP endpoint.
The function takes HTTP request and returns value of some type:
`Context => Output`.
↣ `Context` is a wrapper over HTTP request with additional context.
↣ `Output` is sum type that represents if it is matched on a given input
or not. The library uses `error` type to represent both valid and invalid
variants.
Any `Endpoint A` can be composed with `Endpoint B` into new `Endpoint C`.
It supports two combinators: and-then, or-else.
↣ Use `and-then` to build product Endpoint. The product type matches Input
if each composed function successfully matches it.
↣ Use `or-else` to build co-product Endpoint. The co-product is also known
as sum-type matches first successful function.
Endpoint life-cycle - each incoming HTTP request is wrapped with `Input`
and applied to an endpoint. A returned error-like results is checked
against successful Output or NoMatch error. All these machinery is handled
by the libray, you should only dare to declare Endpoint from ready made
primitives.
gouldian library delivers set of built-in endpoints to deal with HTTP
request processing.
*/
type Endpoint func(*Context) error
// Then builds product Endpoint
func (a Endpoint) Then(b Endpoint) Endpoint {
return func(http *Context) (err error) {
if err = a(http); err == nil {
return b(http)
}
return err
}
}
// Or builds co-product Endpoint
func (a Endpoint) Or(b Endpoint) Endpoint {
return func(http *Context) (err error) {
switch err := a(http).(type) {
case NoMatch:
return b(http)
default:
return err
}
}
}
/*
Routable is endpoint with routing metadata
*/
type Routable func() ([]string, Endpoint)
/*
Router is data structure that holds routing information,
convertable to Endpoint
*/
type Router interface {
Endpoint() Endpoint
}
// NoMatch is returned by Endpoint if Context is not matched.
type NoMatch int
func (err NoMatch) Error() string {
return "No Match"
}
// ErrNoMatch constant
var ErrNoMatch error = NoMatch(255)
/*
Endpoints is sequence of Endpoints
*/
type Endpoints []Endpoint
/*
Join builds product endpoint from sequence
*/
func (seq Endpoints) Join(ctx *Context) (err error) {
for _, f := range seq {
err = f(ctx)
if err != nil {
return err
}
}
return nil
}
/*
Or builds co-product endpoint from sequence
*/
func (seq Endpoints) Or(ctx *Context) (err error) {
for _, f := range seq {
x := f(ctx)
switch err := x.(type) {
case NoMatch:
continue
default:
return err
}
}
return ErrNoMatch
} | endpoint.go | 0.797044 | 0.400779 | endpoint.go | starcoder |
package v2d
import "github.com/chewxy/math32"
type Transform interface {
// TransVec applies this Transform to a vector
TransVec(Vec) Vec
// TransRect applies this transform to a rectangle. Note that since rectangles always are axis
// aligned the transformed rectangle will fully enclose the original rectangle.
TransRect(Rect) Rect
}
type TransformRotateMove struct {
Sin, Cos float32
Around Vec
Offset Vec
}
// Rotate move creates a Transform that rotates with an angle theta about around and
// subsequently adds offset.
func RotateMove(theta float32, around, offset Vec) (t TransformRotateMove) {
t.Sin = math32.Sin(theta)
t.Cos = math32.Cos(theta)
t.Around = around
t.Offset = offset
return
}
// TransVec applies this Transform to a vector
func (t TransformRotateMove) TransVec(v Vec) (w Vec) {
v = v.Sub(t.Around)
w = V(v.X*t.Cos-v.Y*t.Sin, v.X*t.Sin+v.Y*t.Cos)
w = w.Add(t.Around)
w = w.Add(t.Offset)
return
}
// TransRect applies this transform to a rectangle. Note that since rectangles always are axis
// aligned the transformed rectangle will fully enclose the original rectangle.
func (t TransformRotateMove) TransRect(r Rect) (tr Rect) {
// simplest approach is to transform all 4 corners, and find min and
// max x, y coordinates.
// TODO: Since we know the angle we should be able to do something smarter? We know
// which corner will be where - but do that and check to this function to be sure...
c1 := r.Min
c2 := V(r.Max.X, r.Min.Y)
c3 := r.Max
c4 := V(r.Min.X, r.Max.Y)
c1 = t.TransVec(c1)
c2 = t.TransVec(c2)
c3 = t.TransVec(c3)
c4 = t.TransVec(c4)
tr.Min = V(math32.MaxFloat32, math32.MaxFloat32)
tr.Max = V(-math32.MaxFloat32, -math32.MaxFloat32)
// manually unrolled loops
if c1.X < tr.Min.X {
tr.Min.X = c1.X
}
if c2.X < tr.Min.X {
tr.Min.X = c2.X
}
if c3.X < tr.Min.X {
tr.Min.X = c3.X
}
if c4.X < tr.Min.X {
tr.Min.X = c4.X
}
if c1.Y < tr.Min.Y {
tr.Min.Y = c1.Y
}
if c2.Y < tr.Min.Y {
tr.Min.Y = c2.Y
}
if c3.Y < tr.Min.Y {
tr.Min.Y = c3.Y
}
if c4.Y < tr.Min.Y {
tr.Min.Y = c4.Y
}
if c1.X > tr.Max.X {
tr.Max.X = c1.X
}
if c2.X > tr.Max.X {
tr.Max.X = c2.X
}
if c3.X > tr.Max.X {
tr.Max.X = c3.X
}
if c4.X > tr.Max.X {
tr.Max.X = c4.X
}
if c1.Y > tr.Max.Y {
tr.Max.Y = c1.Y
}
if c2.Y > tr.Max.Y {
tr.Max.Y = c2.Y
}
if c3.Y > tr.Max.Y {
tr.Max.Y = c3.Y
}
if c4.Y > tr.Max.Y {
tr.Max.Y = c4.Y
}
return
} | trans.go | 0.767341 | 0.644463 | trans.go | starcoder |
package costmodel
import (
costAnalyzerCloud "github.com/kubecost/cost-model/pkg/cloud"
"github.com/kubecost/cost-model/pkg/util"
"k8s.io/klog"
)
// NetworkUsageVNetworkUsageDataector contains the network usage values for egress network traffic
type NetworkUsageData struct {
ClusterID string
PodName string
Namespace string
NetworkZoneEgress []*util.Vector
NetworkRegionEgress []*util.Vector
NetworkInternetEgress []*util.Vector
}
// NetworkUsageVector contains a network usage vector for egress network traffic
type NetworkUsageVector struct {
ClusterID string
PodName string
Namespace string
Values []*util.Vector
}
// GetNetworkUsageData performs a join of the the results of zone, region, and internet usage queries to return a single
// map containing network costs for each namespace+pod
func GetNetworkUsageData(zr interface{}, rr interface{}, ir interface{}, defaultClusterID string) (map[string]*NetworkUsageData, error) {
zoneNetworkMap, err := getNetworkUsage(zr, defaultClusterID)
if err != nil {
return nil, err
}
regionNetworkMap, err := getNetworkUsage(rr, defaultClusterID)
if err != nil {
return nil, err
}
internetNetworkMap, err := getNetworkUsage(ir, defaultClusterID)
if err != nil {
return nil, err
}
usageData := make(map[string]*NetworkUsageData)
for k, v := range zoneNetworkMap {
existing, ok := usageData[k]
if !ok {
usageData[k] = &NetworkUsageData{
ClusterID: v.ClusterID,
PodName: v.PodName,
Namespace: v.Namespace,
NetworkZoneEgress: v.Values,
}
continue
}
existing.NetworkZoneEgress = v.Values
}
for k, v := range regionNetworkMap {
existing, ok := usageData[k]
if !ok {
usageData[k] = &NetworkUsageData{
ClusterID: v.ClusterID,
PodName: v.PodName,
Namespace: v.Namespace,
NetworkRegionEgress: v.Values,
}
continue
}
existing.NetworkRegionEgress = v.Values
}
for k, v := range internetNetworkMap {
existing, ok := usageData[k]
if !ok {
usageData[k] = &NetworkUsageData{
ClusterID: v.ClusterID,
PodName: v.PodName,
Namespace: v.Namespace,
NetworkInternetEgress: v.Values,
}
continue
}
existing.NetworkInternetEgress = v.Values
}
return usageData, nil
}
// GetNetworkCost computes the actual cost for NetworkUsageData based on data provided by the Provider.
func GetNetworkCost(usage *NetworkUsageData, cloud costAnalyzerCloud.Provider) ([]*util.Vector, error) {
var results []*util.Vector
pricing, err := cloud.NetworkPricing()
if err != nil {
return nil, err
}
zoneCost := pricing.ZoneNetworkEgressCost
regionCost := pricing.RegionNetworkEgressCost
internetCost := pricing.InternetNetworkEgressCost
zlen := len(usage.NetworkZoneEgress)
rlen := len(usage.NetworkRegionEgress)
ilen := len(usage.NetworkInternetEgress)
l := max(zlen, rlen, ilen)
for i := 0; i < l; i++ {
var cost float64 = 0
var timestamp float64
if i < zlen {
cost += usage.NetworkZoneEgress[i].Value * zoneCost
timestamp = usage.NetworkZoneEgress[i].Timestamp
}
if i < rlen {
cost += usage.NetworkRegionEgress[i].Value * regionCost
timestamp = usage.NetworkRegionEgress[i].Timestamp
}
if i < ilen {
cost += usage.NetworkInternetEgress[i].Value * internetCost
timestamp = usage.NetworkInternetEgress[i].Timestamp
}
results = append(results, &util.Vector{
Value: cost,
Timestamp: timestamp,
})
}
return results, nil
}
func getNetworkUsage(qr interface{}, defaultClusterID string) (map[string]*NetworkUsageVector, error) {
ncdmap := make(map[string]*NetworkUsageVector)
result, err := NewQueryResults(qr)
if err != nil {
return nil, err
}
for _, val := range result {
podName, err := val.GetString("pod_name")
if err != nil {
return nil, err
}
namespace, err := val.GetString("namespace")
if err != nil {
return nil, err
}
clusterID, err := val.GetString("cluster_id")
if clusterID == "" {
klog.V(4).Info("Prometheus vector does not have cluster id")
clusterID = defaultClusterID
}
key := namespace + "," + podName + "," + clusterID
ncdmap[key] = &NetworkUsageVector{
ClusterID: clusterID,
Namespace: namespace,
PodName: podName,
Values: val.Values,
}
}
return ncdmap, nil
}
func max(x int, rest ...int) int {
curr := x
for _, v := range rest {
if v > curr {
curr = v
}
}
return curr
} | pkg/costmodel/networkcosts.go | 0.572962 | 0.492188 | networkcosts.go | starcoder |
package csg
import (
"fmt"
"strings"
)
// BSP holds a node in a BSP tree. A BSP tree is built from a collection of
// polygons by picking a polygon to split along. That polygon (and all other
// coplanar polygons) are added directly to that node and the other polygons
// are added to the front and/or back subtrees. This is not a leafy BSP tree
// since there is no distinction between internal and leaf nodes.
type BSP struct {
Plane *Plane
Polygons Polygons
Front *BSP
Back *BSP
}
// Invert converts solid space to empty space and empty space to solid space.
func (n *BSP) Invert() {
for i := range n.Polygons {
n.Polygons[i].Flip()
}
n.Plane.Flip()
if n.Front != nil {
n.Front.Invert()
}
if n.Back != nil {
n.Back.Invert()
}
n.Front, n.Back = n.Back, n.Front
}
// ClipPolygons recursively removes all polygons in `polygons` that are inside
// this BSP tree.
func (n BSP) ClipPolygons(polygons Polygons) Polygons {
if n.Plane == nil {
return append(Polygons(nil), polygons...)
}
var front, back Polygons
for _, p := range polygons {
n.Plane.SplitPolygon(p, &front, &back, &front, &back)
}
if n.Front != nil {
front = n.Front.ClipPolygons(front)
}
if n.Back != nil {
back = n.Back.ClipPolygons(back)
} else {
back = nil
}
return append(front, back...)
}
// ClipTo removes all polygons in this BSP tree that are inside the other BSP
// tree `bsp`.
func (n *BSP) ClipTo(bsp *BSP) {
n.Polygons = bsp.ClipPolygons(n.Polygons)
if n.Front != nil {
n.Front.ClipTo(bsp)
}
if n.Back != nil {
n.Back.ClipTo(bsp)
}
}
// AllPolygons returns a list of all polygons in this BSP tree.
func (n BSP) AllPolygons() Polygons {
polygons := append(Polygons(nil), n.Polygons...)
if n.Front != nil {
polygons = append(polygons, n.Front.AllPolygons()...)
}
if n.Back != nil {
polygons = append(polygons, n.Back.AllPolygons()...)
}
return polygons
}
// AddPolygons builds a BSP tree out of `polygons`. When called on an existing
// tree, the new polygons are filtered down to the bottom of the tree and become
// new nodes there. Each set of polygons is partitioned using the first polygon
// (no heuristic is used to pick a good split).
func (n *BSP) AddPolygons(polygons Polygons) {
if len(polygons) == 0 {
return
}
if n.Plane == nil {
p := polygons[0].Plane
n.Plane = &p
}
var front, back Polygons
for _, p := range polygons {
n.Plane.SplitPolygon(p, &n.Polygons, &n.Polygons, &front, &back)
}
if len(front) > 0 {
if n.Front == nil {
n.Front = &BSP{}
}
n.Front.AddPolygons(front)
}
if len(back) > 0 {
if n.Back == nil {
n.Back = &BSP{}
}
n.Back.AddPolygons(back)
}
}
func (n *BSP) print(level int, sb *strings.Builder) {
sb.WriteString(fmt.Sprintf("%*s%s:%+v\n", level*2, "", "plane", n.Plane.Normal))
if n.Front != nil {
n.Front.print(level+1, sb)
}
if n.Back != nil {
n.Back.print(level+1, sb)
}
}
func (n *BSP) String() string {
var sb strings.Builder
n.print(0, &sb)
return sb.String()
} | bsp.go | 0.747063 | 0.536191 | bsp.go | starcoder |
package math
import (
"errors"
)
type Polygon struct {
localVertices []float32
worldVertices []float32
dirty bool
origin Vector2
position Vector2
rotation float32
scalar Vector2
bounds *Rectangle
}
func NewPolygon(vertices []float32) (*Polygon, error) {
if len(vertices) < 6 {
return nil, errors.New("Polygon must contain at least three points.")
}
return &Polygon{localVertices: vertices, dirty: true}, nil
}
func (p *Polygon) Vertices() []float32 {
return p.localVertices
}
func (p *Polygon) TransformedVertices() []float32 {
if p.dirty == false {
return p.worldVertices
}
p.dirty = false
localVertices := p.localVertices
if p.worldVertices == nil || len(p.worldVertices) < len(localVertices) {
p.worldVertices = make([]float32, len(localVertices))
}
cos := Cos(p.rotation) * DegreeToRadians
sin := Sin(p.rotation) * DegreeToRadians
for i := 0; i < len(localVertices); i += 2 {
x := localVertices[i] - p.origin.X
y := localVertices[i+1] - p.origin.Y
if p.scalar.X != 1 || p.scalar.Y != 1 {
x *= p.scalar.X
y *= p.scalar.Y
}
if p.rotation != 0 {
oldX := x
x = cos*x - sin*y
y = sin*oldX + cos*y
}
p.worldVertices[i] = p.position.X + x + p.origin.X
p.worldVertices[i+1] = p.position.Y + y + p.origin.Y
}
return p.worldVertices
}
func (p *Polygon) SetOrigin(origin Vector2) {
p.origin = origin
p.dirty = true
}
func (p *Polygon) SetPosition(position Vector2) {
p.position = position
p.dirty = true
}
func (p *Polygon) Translate(vec Vector2) {
p.position = p.position.Add(vec)
p.dirty = true
}
func (p *Polygon) SetRotation(degrees float32) {
p.rotation = degrees
p.dirty = true
}
func (p *Polygon) Rotate(degrees float32) {
p.rotation += degrees
p.dirty = true
}
func (p *Polygon) SetScale(scalar Vector2) {
p.scalar = scalar
p.dirty = true
}
func (p *Polygon) Scale(amount float32) {
p.scalar = p.scalar.Scale(amount)
p.dirty = true
}
func (p *Polygon) Dirty() {
p.dirty = true
}
func (p *Polygon) Area() float32 {
var area float32
vertices := p.TransformedVertices()
var x1, y1, x2, y2 int
for i := 0; i < len(vertices); i += 2 {
x1 = i
y1 = i + 1
x2 = (i + 2) % len(vertices)
y2 = (i + 3) % len(vertices)
area += vertices[x1] * vertices[y2]
area -= vertices[x2] * vertices[y1]
}
area *= 0.5
return area
}
// Returns an axis-aligned bounding box of this polygon.
func (p *Polygon) BoundingRectangle() *Rectangle {
vertices := p.TransformedVertices()
minX := vertices[0]
minY := vertices[1]
maxX := vertices[0]
maxY := vertices[1]
for i := 0; i < len(vertices); i += 2 {
if minX > vertices[i] {
minX = vertices[i]
}
if minY > vertices[i+1] {
minY = vertices[i+1]
}
if maxX < vertices[i] {
maxX = vertices[i]
}
if maxY < vertices[i+1] {
maxY = vertices[i+1]
}
}
if p.bounds == nil {
p.bounds = Rect(minX, minY, maxX, maxY)
} else {
p.bounds.X = minX
p.bounds.Y = minY
p.bounds.Width = maxX
p.bounds.Height = maxY
}
return p.bounds
}
func (p *Polygon) Contains(vec Vector2) bool {
vertices := p.TransformedVertices()
intersects := 0
for i := 0; i < len(vertices); i += 2 {
x1 := vertices[i]
y1 := vertices[i+1]
x2 := vertices[(i+2)%len(vertices)]
y2 := vertices[(i+3)%len(vertices)]
if ((y1 <= p.position.Y && p.position.Y < y2) || (y2 <= p.position.Y && p.position.Y < y1)) && p.position.X < ((x2-x1)/(y2-y1)*(p.position.Y-y1)+x1) {
intersects++
}
}
return (intersects & 1) == 1
}
func (p *Polygon) Position() Vector2 { return p.position }
func (p *Polygon) Origin() Vector2 { return p.origin }
func (p *Polygon) Rotation() float32 { return p.rotation }
func (p *Polygon) Scalar() Vector2 { return p.scalar } | polygon.go | 0.778944 | 0.703424 | polygon.go | starcoder |
package universe
import (
"github.com/apache/arrow/go/v7/arrow/memory"
"github.com/influxdata/flux"
"github.com/influxdata/flux/array"
"github.com/influxdata/flux/arrow"
)
type derivativeInt struct {
t int64
v int64
isValid bool
unit float64
nonNegative bool
initialized bool
}
func (d *derivativeInt) Type() flux.ColType {
return flux.TFloat
}
func (d *derivativeInt) Do(ts *array.Int, in array.Array, mem memory.Allocator) array.Array {
// Empty column chunk returns an empty array
// and does not initialize the derivative.
if in.Len() == 0 {
return arrow.Empty(flux.TFloat)
}
i := 0
// Initialize by reading the first value.
vs := in.(*array.Int)
if !d.initialized {
d.t = ts.Value(i)
if vs.IsValid(i) {
d.v, d.isValid = vs.Value(i), true
}
d.initialized = true
i++
}
// Initialize the size of the builder.
b := array.NewFloatBuilder(mem)
b.Resize(vs.Len() - i)
// Process the rest of the rows.
for l := vs.Len(); i < l; i++ {
// If the current value is nil, append nil and skip to the
// next point. We do not modify the previous value when we
// see null and we do not update the timestamp.
if vs.IsNull(i) {
b.AppendNull()
continue
}
t := ts.Value(i)
// If we haven't yet seen a valid value, append nil and use
// the current value as the previous for the next iteration.
// to use the current value.
if !d.isValid {
b.AppendNull()
d.t, d.v, d.isValid = t, vs.Value(i), true
continue
}
// We have seen a valid value so retrieve it now.
pv, cv := d.v, vs.Value(i)
if d.nonNegative && pv > cv {
// The previous value is greater than the current
// value and non-negative was set.
b.AppendNull()
} else {
// Do the derivative.
elapsed := float64(t-d.t) / d.unit
var diff float64
if pv > cv {
// Avoid wrapping on unsigned subtraction.
diff = -float64(pv - cv)
} else {
diff = float64(cv - pv)
}
b.Append(diff / elapsed)
}
d.t, d.v, d.isValid = t, cv, true
}
return b.NewArray()
}
type derivativeUint struct {
t int64
v uint64
isValid bool
unit float64
nonNegative bool
initialized bool
}
func (d *derivativeUint) Type() flux.ColType {
return flux.TFloat
}
func (d *derivativeUint) Do(ts *array.Int, in array.Array, mem memory.Allocator) array.Array {
// Empty column chunk returns an empty array
// and does not initialize the derivative.
if in.Len() == 0 {
return arrow.Empty(flux.TFloat)
}
i := 0
// Initialize by reading the first value.
vs := in.(*array.Uint)
if !d.initialized {
d.t = ts.Value(i)
if vs.IsValid(i) {
d.v, d.isValid = vs.Value(i), true
}
d.initialized = true
i++
}
// Initialize the size of the builder.
b := array.NewFloatBuilder(mem)
b.Resize(vs.Len() - i)
// Process the rest of the rows.
for l := vs.Len(); i < l; i++ {
// If the current value is nil, append nil and skip to the
// next point. We do not modify the previous value when we
// see null and we do not update the timestamp.
if vs.IsNull(i) {
b.AppendNull()
continue
}
t := ts.Value(i)
// If we haven't yet seen a valid value, append nil and use
// the current value as the previous for the next iteration.
// to use the current value.
if !d.isValid {
b.AppendNull()
d.t, d.v, d.isValid = t, vs.Value(i), true
continue
}
// We have seen a valid value so retrieve it now.
pv, cv := d.v, vs.Value(i)
if d.nonNegative && pv > cv {
// The previous value is greater than the current
// value and non-negative was set.
b.AppendNull()
} else {
// Do the derivative.
elapsed := float64(t-d.t) / d.unit
var diff float64
if pv > cv {
// Avoid wrapping on unsigned subtraction.
diff = -float64(pv - cv)
} else {
diff = float64(cv - pv)
}
b.Append(diff / elapsed)
}
d.t, d.v, d.isValid = t, cv, true
}
return b.NewArray()
}
type derivativeFloat struct {
t int64
v float64
isValid bool
unit float64
nonNegative bool
initialized bool
}
func (d *derivativeFloat) Type() flux.ColType {
return flux.TFloat
}
func (d *derivativeFloat) Do(ts *array.Int, in array.Array, mem memory.Allocator) array.Array {
// Empty column chunk returns an empty array
// and does not initialize the derivative.
if in.Len() == 0 {
return arrow.Empty(flux.TFloat)
}
i := 0
// Initialize by reading the first value.
vs := in.(*array.Float)
if !d.initialized {
d.t = ts.Value(i)
if vs.IsValid(i) {
d.v, d.isValid = vs.Value(i), true
}
d.initialized = true
i++
}
// Initialize the size of the builder.
b := array.NewFloatBuilder(mem)
b.Resize(vs.Len() - i)
// Process the rest of the rows.
for l := vs.Len(); i < l; i++ {
// If the current value is nil, append nil and skip to the
// next point. We do not modify the previous value when we
// see null and we do not update the timestamp.
if vs.IsNull(i) {
b.AppendNull()
continue
}
t := ts.Value(i)
// If we haven't yet seen a valid value, append nil and use
// the current value as the previous for the next iteration.
// to use the current value.
if !d.isValid {
b.AppendNull()
d.t, d.v, d.isValid = t, vs.Value(i), true
continue
}
// We have seen a valid value so retrieve it now.
pv, cv := d.v, vs.Value(i)
if d.nonNegative && pv > cv {
// The previous value is greater than the current
// value and non-negative was set.
b.AppendNull()
} else {
// Do the derivative.
elapsed := float64(t-d.t) / d.unit
var diff float64
if pv > cv {
// Avoid wrapping on unsigned subtraction.
diff = -float64(pv - cv)
} else {
diff = float64(cv - pv)
}
b.Append(diff / elapsed)
}
d.t, d.v, d.isValid = t, cv, true
}
return b.NewArray()
} | stdlib/universe/derivative.gen.go | 0.838415 | 0.541894 | derivative.gen.go | starcoder |
package main
import (
"bytes"
"fmt"
"go/format"
"io/ioutil"
"log"
"strings"
"text/template"
)
var opcodePrototypes = []opcodeProto{
{"LoadScalarConst", "op dst:wslot value:scalarindex"},
{"LoadStrConst", "op dst:wslot value:strindex"},
{"Zero", "op dst:wslot"},
{"Move", "op dst:wslot src:rslot"},
{"Move8", "op dst:wslot src:rslot"},
{"MoveResult2", "op dst:wslot"},
{"Not", "op dst:wslot x:rslot"},
{"IsNil", "op dst:wslot x:rslot"},
{"IsNotNil", "op dst:wslot x:rslot"},
{"IsNilInterface", "op dst:wslot x:rslot"},
{"IsNotNilInterface", "op dst:wslot x:rslot"},
{"Len", "op dst:wslot str:rslot"},
{"Cap", "op dst:wslot str:rslot"},
{"StrSlice", "op dst:wslot str:rslot from:rslot to:rslot"},
{"StrSliceFrom", "op dst:wslot str:rslot from:rslot"},
{"StrSliceTo", "op dst:wslot str:rslot to:rslot"},
{"StrIndex", "op dst:wslot str:rslot index:rslot"},
{"SliceIndexScalar8", "op dst:wslot slice:rslot index:rslot"},
{"SliceIndexScalar64", "op dst:wslot slice:rslot index:rslot"},
{"BytesSlice", "op dst:wslot str:rslot from:rslot to:rslot"},
{"BytesSliceFrom", "op dst:wslot str:rslot from:rslot"},
{"BytesSliceTo", "op dst:wslot str:rslot to:rslot"},
{"SliceSetScalar8", "op slice:rslot index:rslot value:rslot"},
{"SliceSetScalar64", "op slice:rslot index:rslot value:rslot"},
{"Concat", "op dst:wslot s1:rslot s2:rslot"},
{"StrEq", "op dst:wslot s1:rslot s2:rslot"},
{"StrNotEq", "op dst:wslot s1:rslot s2:rslot"},
{"StrGt", "op dst:wslot s1:rslot s2:rslot"},
{"StrLt", "op dst:wslot s1:rslot s2:rslot"},
{"IntNeg", "op dst:wslot x:rslot"},
{"IntBitwiseNot", "op dst:wslot x:rslot"},
{"ScalarEq", "op dst:wslot x:rslot y:rslot"},
{"ScalarNotEq", "op dst:wslot x:rslot y:rslot"},
{"IntGt", "op dst:wslot x:rslot y:rslot"},
{"IntGtEq", "op dst:wslot x:rslot y:rslot"},
{"IntLt", "op dst:wslot x:rslot y:rslot"},
{"IntLtEq", "op dst:wslot x:rslot y:rslot"},
{"IntAdd8", "op dst:wslot x:rslot y:rslot"},
{"IntAdd64", "op dst:wslot x:rslot y:rslot"},
{"IntSub8", "op dst:wslot x:rslot y:rslot"},
{"IntSub64", "op dst:wslot x:rslot y:rslot"},
{"IntMul8", "op dst:wslot x:rslot y:rslot"},
{"IntMul64", "op dst:wslot x:rslot y:rslot"},
{"IntDiv", "op dst:wslot x:rslot y:rslot"},
{"IntMod", "op dst:wslot x:rslot y:rslot"},
{"IntXor", "op dst:wslot x:rslot y:rslot"},
{"IntOr", "op dst:wslot x:rslot y:rslot"},
{"IntLshift", "op dst:wslot x:rslot y:rslot"},
{"IntRshift", "op dst:wslot x:rslot y:rslot"},
{"IntInc", "op x:rwslot"},
{"IntDec", "op x:rwslot"},
{"Jump", "op offset:offset"},
{"JumpZero", "op offset:offset cond:rslot"},
{"JumpNotZero", "op offset:offset cond:rslot"},
{"JumpTable", "op value:rslot"},
{"Call", "op dst:wslot fn:funcid"},
{"CallRecur", "op dst:wslot"},
{"CallVoid", "op fn:funcid"},
{"CallNative", "op dst:wslot fn:nativefuncid"},
{"CallVoidNative", "op fn:nativefuncid"},
{"PushVariadicBoolArg", "op x:rslot"},
{"PushVariadicScalarArg", "op x:rslot"},
{"PushVariadicStrArg", "op x:rslot"},
{"PushVariadicInterfaceArg", "op x:rslot"},
{"VariadicReset", "op"},
{"ReturnVoid", "op"},
{"ReturnZero", "op"},
{"ReturnOne", "op"},
{"ReturnStr", "op x:rslot"},
{"ReturnScalar", "op x:rslot"},
{"Return", "op x:rslot"},
{"FloatAdd64", "op dst:wslot x:rslot y:rslot"},
{"FloatSub64", "op dst:wslot x:rslot y:rslot"},
{"FloatMul64", "op dst:wslot x:rslot y:rslot"},
{"FloatDiv64", "op dst:wslot x:rslot y:rslot"},
{"FloatGt", "op dst:wslot x:rslot y:rslot"},
{"FloatGtEq", "op dst:wslot x:rslot y:rslot"},
{"FloatLt", "op dst:wslot x:rslot y:rslot"},
{"FloatLtEq", "op dst:wslot x:rslot y:rslot"},
{"FloatNeg", "op dst:wslot x:rslot"},
{"ConvIntToFloat", "op dst:wslot x:rslot"},
}
type opcodeProto struct {
name string
enc string
}
type encodingInfo struct {
width int
parts int
encdoc string
args string
flags string
}
type opcodeInfo struct {
Opcode byte
Name string
Enc string
EncString string
Width int
Flags string
Args string
}
const stackUnchanged = ""
var fileTemplate = template.Must(template.New("opcodes.go").Parse(`// Code generated "gen_opcodes.go"; DO NOT EDIT.
package bytecode
const (
OpInvalid Op = 0
{{ range .Opcodes }}
// Encoding: {{.EncString}}
Op{{ .Name }} Op = {{.Opcode}}
{{ end -}}
)
var opcodeInfoTable = [256]OpcodeInfo{
OpInvalid: {Width: 1},
{{ range .Opcodes -}}
Op{{.Name}}: {
Width: {{.Width}},
Flags: {{.Flags}},
Args: []Argument{ {{.Args}} },
},
{{ end }}
}
`))
func main() {
opcodes := make([]opcodeInfo, len(opcodePrototypes))
for i, proto := range opcodePrototypes {
opcode := byte(i + 1)
encInfo := decodeEnc(proto.enc)
var encString string
if encInfo.encdoc == "" {
encString = fmt.Sprintf("0x%02x (width=%d)", opcode, encInfo.width)
} else {
encString = fmt.Sprintf("0x%02x %s (width=%d)",
opcode, encInfo.encdoc, encInfo.width)
}
opcodes[i] = opcodeInfo{
Opcode: opcode,
Name: proto.name,
Enc: proto.enc,
EncString: encString,
Width: encInfo.width,
Flags: encInfo.flags,
Args: encInfo.args,
}
}
var buf bytes.Buffer
err := fileTemplate.Execute(&buf, map[string]interface{}{
"Opcodes": opcodes,
})
if err != nil {
log.Panicf("execute template: %v", err)
}
writeFile("opcodes.gen.go", buf.Bytes())
}
func decodeEnc(enc string) encodingInfo {
fields := strings.Fields(enc)
width := 1 // opcode is uint8
opfield := fields[0]
if opfield != "op" {
panic(fmt.Sprintf("parse %s: expected 'op', found '%s'", opfield))
}
argfields := fields[1:]
var encdocParts []string
var argList []string
var argFlagList []string
hasDst := false
argOffset := 1
for i, f := range argfields {
argFlagList = argFlagList[:0]
parts := strings.Split(f, ":")
var typ string
if len(parts) == 2 {
typ = parts[1]
} else {
panic(fmt.Sprintf("parse %s: can't decode %s field: expected 2 parts", enc, f))
}
argName := parts[0]
argType := ""
encType := ""
argWidth := 0
switch typ {
case "wslot", "rwslot":
if i != 0 {
panic(fmt.Sprintf("parse %s: dst arg at i=%d", enc, i))
}
if typ == "wslot" {
argFlagList = append(argFlagList, "FlagIsWrite")
} else {
argFlagList = append(argFlagList, "FlagIsWrite", "FlagIsRead")
}
hasDst = true
argType = "ArgSlot"
encType = "u8"
argWidth = 1
case "rslot":
argFlagList = append(argFlagList, "FlagIsRead")
argType = "ArgSlot"
encType = "u8"
argWidth = 1
case "strindex":
argType = "ArgStrConst"
encType = "u8"
argWidth = 1
case "scalarindex":
argType = "ArgScalarConst"
encType = "u8"
argWidth = 1
case "offset":
argType = "ArgOffset"
encType = "i16"
argWidth = 2
case "funcid":
argType = "ArgFuncID"
encType = "u16"
argWidth = 2
case "nativefuncid":
argType = "ArgNativeFuncID"
encType = "u16"
argWidth = 2
default:
panic(fmt.Sprintf("unknown op argument type: %s", typ))
}
argFlags := "0"
if len(argFlagList) != 0 {
argFlags = strings.Join(argFlagList, " | ")
}
encdocParts = append(encdocParts, argName+":"+encType)
argList = append(argList, fmt.Sprintf("{Name: %q, Kind: %s, Offset: %d, Flags: %s}", argName, argType, argOffset, argFlags))
width += argWidth
argOffset += argWidth
}
var flagList []string
if hasDst {
flagList = append(flagList, "FlagHasDst")
}
flagsString := "0"
if len(flagList) != 0 {
flagsString = strings.Join(flagList, " | ")
}
argsString := ""
if len(argList) != 0 {
argsString = "\n" + strings.Join(argList, ",\n")
}
return encodingInfo{
width: width,
encdoc: strings.Join(encdocParts, " "),
parts: len(fields),
flags: flagsString,
args: argsString,
}
}
func writeFile(filename string, data []byte) {
pretty, err := format.Source(data)
if err != nil {
log.Panicf("gofmt: %v", err)
}
if err := ioutil.WriteFile(filename, pretty, 0666); err != nil {
log.Panicf("write %s: %v", filename, err)
}
} | internal/bytecode/gen_opcodes.go | 0.513912 | 0.47098 | gen_opcodes.go | starcoder |
package imageutil
import (
"image"
"image/color"
"math"
)
func Invert(img ImageReader) ImageReader {
var (
invertedImage ImageReadWriter
pp PP
)
bounds := img.Bounds()
switch img.(type) {
case *image.Alpha, *image.Alpha16:
return img
case *image.Gray:
invertedImage = image.NewGray(bounds)
pp = func(pt image.Point) {
c := img.At(pt.X, pt.Y).(color.Gray)
c.Y = math.MaxInt8 - c.Y
invertedImage.Set(pt.X, pt.Y, c)
}
case *image.Gray16:
invertedImage = image.NewGray16(bounds)
pp = func(pt image.Point) {
c := img.At(pt.X, pt.Y).(color.Gray16)
c.Y = math.MaxInt8 - c.Y
invertedImage.Set(pt.X, pt.Y, c)
}
case *image.NRGBA:
invertedImage = image.NewNRGBA(bounds)
pp = func(pt image.Point) {
c := img.At(pt.X, pt.Y).(color.NRGBA)
c.R = math.MaxUint8 - c.R
c.G = math.MaxUint8 - c.G
c.B = math.MaxUint8 - c.B
invertedImage.Set(pt.X, pt.Y, c)
}
case *image.NRGBA64:
invertedImage = image.NewNRGBA64(bounds)
pp = func(pt image.Point) {
c := img.At(pt.X, pt.Y).(color.NRGBA64)
c.R = math.MaxUint16 - c.R
c.G = math.MaxUint16 - c.G
c.B = math.MaxUint16 - c.B
invertedImage.Set(pt.X, pt.Y, c)
}
case *image.RGBA:
invertedImage = image.NewNRGBA(bounds)
pp = func(pt image.Point) {
c := img.At(pt.X, pt.Y).(color.RGBA)
c.R = c.A - c.R
c.G = c.A - c.G
c.B = c.A - c.B
invertedImage.Set(pt.X, pt.Y, c)
}
case *image.RGBA64:
invertedImage = image.NewRGBA64(bounds)
pp = func(pt image.Point) {
c := img.At(pt.X, pt.Y).(color.RGBA64)
c.R = c.A - c.R
c.G = c.A - c.G
c.B = c.A - c.B
invertedImage.Set(pt.X, pt.Y, c)
}
}
QuickRP(AllPointsRP(pp))(bounds)
return invertedImage
}
func EdgesGray16(radius int, img Channel) *image.Gray16 {
bounds := img.Bounds()
edgeImage := image.NewGray16(bounds)
if radius < 1 {
return edgeImage
}
// Compute the horizontal and vertical averages.
hGA := RowAverageGray16(radius, img)
vGA := ColumnAverageGray16(radius, img)
QuickRP(
AllPointsRP(
func(pt image.Point) {
e := float64(hGA.Gray16At(pt.X, pt.Y).Y)
w := float64(hGA.Gray16At(pt.X-radius+1, pt.Y).Y)
n := float64(vGA.Gray16At(pt.X, pt.Y).Y)
s := float64(vGA.Gray16At(pt.X, pt.Y-radius+1).Y)
edgeImage.Set(pt.X, pt.Y,
color.Gray16{
Y: uint16(math.Max(math.Abs(e-w), math.Abs(s-n))), //uint16((math.Abs(e-w) + math.Abs(s-n)) / 2.0),
},
)
},
),
)(bounds)
return edgeImage
}
func EdgesNRGBA64(radius int, img *image.NRGBA64) *image.NRGBA64 {
r, g, b, a := NRGBA64ToChannels(img)
r = EdgesGray16(radius, r)
g = EdgesGray16(radius, g)
b = EdgesGray16(radius, b)
return ChannelsToNRGBA64(r, g, b, a)
} | filter.go | 0.703142 | 0.485783 | filter.go | starcoder |
package channel
import (
"errors"
"fmt"
"io"
)
// ThousandOp holds the data necessary to call ten HunredOps.
type ThousandOp struct {
op1 *HundredOp
op2 *HundredOp
op3 *HundredOp
op4 *HundredOp
op5 *HundredOp
op6 *HundredOp
op7 *HundredOp
op8 *HundredOp
op9 *HundredOp
op10 *HundredOp
}
// NewThousandOp returns a new ThousandOp.
func NewThousandOp() *ThousandOp {
o := &ThousandOp{
op1: NewHundredOp(),
op2: NewHundredOp(),
op3: NewHundredOp(),
op4: NewHundredOp(),
op5: NewHundredOp(),
op6: NewHundredOp(),
op7: NewHundredOp(),
op8: NewHundredOp(),
op9: NewHundredOp(),
op10: NewHundredOp(),
}
op1To2 := make(chan int, 10)
op2To3 := make(chan int, 10)
op3To4 := make(chan int, 10)
op4To5 := make(chan int, 10)
op5To6 := make(chan int, 10)
op6To7 := make(chan int, 10)
op7To8 := make(chan int, 10)
op8To9 := make(chan int, 10)
op9To10 := make(chan int, 10)
o.op1.SetOut(op1To2)
o.op2.SetIn(op1To2)
o.op2.SetOut(op2To3)
o.op3.SetIn(op2To3)
o.op3.SetOut(op3To4)
o.op4.SetIn(op3To4)
o.op4.SetOut(op4To5)
o.op5.SetIn(op4To5)
o.op5.SetOut(op5To6)
o.op6.SetIn(op5To6)
o.op6.SetOut(op6To7)
o.op7.SetIn(op6To7)
o.op7.SetOut(op7To8)
o.op8.SetIn(op7To8)
o.op8.SetOut(op8To9)
o.op9.SetIn(op8To9)
o.op9.SetOut(op9To10)
o.op10.SetIn(op9To10)
return o
}
// SetIn sets the input port of the ThousandOp.
func (op *ThousandOp) SetIn(port <-chan int) {
op.op1.SetIn(port)
}
// SetOut sets the output port of the ThousandOp.
func (op *ThousandOp) SetOut(port chan<- int) {
op.op10.SetOut(port)
}
// SetError sets the error port of the ThousandOp.
func (op *ThousandOp) SetError(port chan<- error) {
op.op1.SetError(port)
op.op2.SetError(port)
op.op3.SetError(port)
op.op4.SetError(port)
op.op5.SetError(port)
op.op6.SetError(port)
op.op7.SetError(port)
op.op8.SetError(port)
op.op9.SetError(port)
op.op10.SetError(port)
}
// Run the ThousandOp or rather all of its internal operations.
func (op *ThousandOp) Run() {
op.op1.Run()
op.op2.Run()
op.op3.Run()
op.op4.Run()
op.op5.Run()
op.op6.Run()
op.op7.Run()
op.op8.Run()
op.op9.Run()
op.op10.Run()
}
// HundredOp holds the data necessary to call ten tenOps.
type HundredOp struct {
op1 *tenOp
op2 *tenOp
op3 *tenOp
op4 *tenOp
op5 *tenOp
op6 *tenOp
op7 *tenOp
op8 *tenOp
op9 *tenOp
op10 *tenOp
}
// NewHundredOp returns a new HundredOp.
func NewHundredOp() *HundredOp {
o := &HundredOp{
op1: newTenOp(),
op2: newTenOp(),
op3: newTenOp(),
op4: newTenOp(),
op5: newTenOp(),
op6: newTenOp(),
op7: newTenOp(),
op8: newTenOp(),
op9: newTenOp(),
op10: newTenOp(),
}
op1To2 := make(chan int, 10)
op2To3 := make(chan int, 10)
op3To4 := make(chan int, 10)
op4To5 := make(chan int, 10)
op5To6 := make(chan int, 10)
op6To7 := make(chan int, 10)
op7To8 := make(chan int, 10)
op8To9 := make(chan int, 10)
op9To10 := make(chan int, 10)
o.op1.SetOut(op1To2)
o.op2.SetIn(op1To2)
o.op2.SetOut(op2To3)
o.op3.SetIn(op2To3)
o.op3.SetOut(op3To4)
o.op4.SetIn(op3To4)
o.op4.SetOut(op4To5)
o.op5.SetIn(op4To5)
o.op5.SetOut(op5To6)
o.op6.SetIn(op5To6)
o.op6.SetOut(op6To7)
o.op7.SetIn(op6To7)
o.op7.SetOut(op7To8)
o.op8.SetIn(op7To8)
o.op8.SetOut(op8To9)
o.op9.SetIn(op8To9)
o.op9.SetOut(op9To10)
o.op10.SetIn(op9To10)
return o
}
// SetIn sets the input port of the HundredOp.
func (op *HundredOp) SetIn(port <-chan int) {
op.op1.SetIn(port)
}
// SetOut sets the output port of the HundredOp.
func (op *HundredOp) SetOut(port chan<- int) {
op.op10.SetOut(port)
}
// SetError sets the error port of the HundredOp.
func (op *HundredOp) SetError(port chan<- error) {
op.op1.SetError(port)
op.op2.SetError(port)
op.op3.SetError(port)
op.op4.SetError(port)
op.op5.SetError(port)
op.op6.SetError(port)
op.op7.SetError(port)
op.op8.SetError(port)
op.op9.SetError(port)
op.op10.SetError(port)
}
// Run the HundredOp or rather all of its internal operations.
func (op *HundredOp) Run() {
op.op1.Run()
op.op2.Run()
op.op3.Run()
op.op4.Run()
op.op5.Run()
op.op6.Run()
op.op7.Run()
op.op8.Run()
op.op9.Run()
op.op10.Run()
}
type tenOp struct {
op1 *singleOp
op2 *singleOp
op3 *singleOp
op4 *singleOp
op5 *singleOp
op6 *singleOp
op7 *singleOp
op8 *singleOp
op9 *singleOp
op10 *singleOp
}
func newTenOp() *tenOp {
o := &tenOp{
op1: &singleOp{},
op2: &singleOp{},
op3: &singleOp{},
op4: &singleOp{},
op5: &singleOp{},
op6: &singleOp{},
op7: &singleOp{},
op8: &singleOp{},
op9: &singleOp{},
op10: &singleOp{},
}
op1To2 := make(chan int, 10)
op2To3 := make(chan int, 10)
op3To4 := make(chan int, 10)
op4To5 := make(chan int, 10)
op5To6 := make(chan int, 10)
op6To7 := make(chan int, 10)
op7To8 := make(chan int, 10)
op8To9 := make(chan int, 10)
op9To10 := make(chan int, 10)
o.op1.Out = op1To2
o.op2.In = op1To2
o.op2.Out = op2To3
o.op3.In = op2To3
o.op3.Out = op3To4
o.op4.In = op3To4
o.op4.Out = op4To5
o.op5.In = op4To5
o.op5.Out = op5To6
o.op6.In = op5To6
o.op6.Out = op6To7
o.op7.In = op6To7
o.op7.Out = op7To8
o.op8.In = op7To8
o.op8.Out = op8To9
o.op9.In = op8To9
o.op9.Out = op9To10
o.op10.In = op9To10
return o
}
func (op *tenOp) SetIn(port <-chan int) {
op.op1.In = port
}
func (op *tenOp) SetOut(port chan<- int) {
op.op10.Out = port
}
func (op *tenOp) SetError(port chan<- error) {
op.op1.Error = port
op.op2.Error = port
op.op3.Error = port
op.op4.Error = port
op.op5.Error = port
op.op6.Error = port
op.op7.Error = port
op.op8.Error = port
op.op9.Error = port
op.op10.Error = port
}
func (op *tenOp) Run() {
op.op1.Run()
op.op2.Run()
op.op3.Run()
op.op4.Run()
op.op5.Run()
op.op6.Run()
op.op7.Run()
op.op8.Run()
op.op9.Run()
op.op10.Run()
}
type singleOp struct {
In <-chan int
Out chan<- int
Error chan<- error
}
func (op *singleOp) Run() {
go func() {
for {
i, ok := <-op.In
if !ok {
close(op.Out)
op.Error <- io.EOF // signal that we are done
return
}
if i < 0 || i > 1000000 {
op.Error <- errors.New("should not happen")
continue
}
op.Out <- (i + 1)
}
}()
}
// ErrorOp handles errors by reporting them.
type ErrorOp struct {
Error <-chan error
Done chan<- bool
}
// Run runs the ErrorOp with input from n components.
func (op *ErrorOp) Run(n int) {
i := 0
go func() {
for err := range op.Error {
if err != io.EOF {
fmt.Printf("ERROR: %s\n", err)
} else {
i++
if i >= n {
break
}
}
}
op.Done <- true
}()
} | channel/channel.go | 0.584627 | 0.553867 | channel.go | starcoder |
package mqttp
// ConnAck The CONNACK Packet is the packet sent by the Server in response to a CONNECT Packet
// received from a Client. The first packet sent from the Server to the Client MUST
// be a CONNACK Packet [MQTT-3.2.0-1].
// If the Client does not receive a CONNACK Packet from the Server within a reasonable
// amount of time, the Client SHOULD close the Network Connection. A "reasonable" amount
// of time depends on the type of application and the communications infrastructure.
type ConnAck struct {
header
sessionPresent bool
returnCode ReasonCode
}
var _ IFace = (*ConnAck)(nil)
// newConnAck creates a new CONNACK packet
func newConnAck() *ConnAck {
return &ConnAck{}
}
// NewConnAck creates a new CONNACK packet
func NewConnAck(v ProtocolVersion) *ConnAck {
p := newConnAck()
p.init(CONNACK, v, p.size, p.encodeMessage, p.decodeMessage)
return p
}
// SessionPresent returns the session present flag value
func (msg *ConnAck) SessionPresent() bool {
return msg.sessionPresent
}
// SetSessionPresent sets the value of the session present flag
func (msg *ConnAck) SetSessionPresent(v bool) {
msg.sessionPresent = v
}
// ReturnCode returns the return code received for the CONNECT message. The return
// type is an error
func (msg *ConnAck) ReturnCode() ReasonCode {
return msg.returnCode
}
// SetReturnCode of conn
func (msg *ConnAck) SetReturnCode(ret ReasonCode) error {
if !ret.IsValidForType(msg.Type()) {
return ErrInvalidReturnCode
}
msg.returnCode = ret
return nil
}
func (msg *ConnAck) decodeMessage(from []byte) (int, error) {
offset := 0
// [MQTT-3.2.2.1]
b := from[offset]
if b&(^maskConnAckSessionPresent) != 0 {
var rejectCode ReasonCode
if msg.version == ProtocolV50 {
rejectCode = CodeMalformedPacket
} else {
rejectCode = CodeRefusedServerUnavailable
}
return offset, rejectCode
}
msg.sessionPresent = b&maskConnAckSessionPresent != 0
offset++
b = from[offset]
msg.returnCode = ReasonCode(b)
if !msg.returnCode.IsValidForType(msg.mType) {
reason := CodeRefusedServerUnavailable
if msg.version == ProtocolV50 {
reason = CodeProtocolError
}
return offset, reason
}
offset++
// v5 [MQTT-3.1.2.11] specifies properties in variable header
if msg.version == ProtocolV50 {
n, err := msg.properties.decode(msg.Type(), from[offset:])
offset += n
if err != nil {
return offset, err
}
}
return offset, nil
}
func (msg *ConnAck) encodeMessage(to []byte) (int, error) {
offset := 0
if msg.sessionPresent {
to[offset] = 1
} else {
to[offset] = 0
}
offset++
to[offset] = msg.returnCode.Value()
offset++
var err error
// V5.0 [MQTT-3.1.2.11]
if msg.version == ProtocolV50 {
var n int
n, err = msg.properties.encode(to[offset:])
offset += n
if err != nil {
return offset, err
}
}
return offset, err
}
func (msg *ConnAck) size() int {
total := 2
// v5.0 [MQTT-3.1.2.11]
if msg.version == ProtocolV50 {
total += int(msg.properties.FullLen())
}
return total
} | mqttp/connack.go | 0.730482 | 0.408395 | connack.go | starcoder |
package iso20022
// Payment obligation contracted between two financial institutions related to the financing of a commercial transaction.
type PaymentObligation2 struct {
// Bank that has to pay under the obligation.
ObligorBank *BICIdentification1 `xml:"OblgrBk"`
// Bank that will be paid under the obligation.
RecipientBank *BICIdentification1 `xml:"RcptBk"`
// Payment obligation amount specified as an amount or percentage.
PaymentObligationAmount *AmountOrPercentage2Choice `xml:"PmtOblgtnAmt"`
// Charges related to the payment obligation.
Charges []*Charges5 `xml:"Chrgs,omitempty"`
// Date at which the obligation will expire.
ExpiryDate *ISODate `xml:"XpryDt"`
// Rules which apply to the BPO (Bank Payment Obligation).
ApplicableRules *BPOApplicableRules1Choice `xml:"AplblRules,omitempty"`
// Country of which the law governs the bank payment obligation.
ApplicableLaw *CountryCode `xml:"AplblLaw,omitempty"`
// Location and forum for dispute resolution.
PlaceOfJurisdiction *Location2 `xml:"PlcOfJursdctn,omitempty"`
// Payment processes required to transfer cash from the debtor to the creditor.
PaymentTerms []*PaymentTerms4 `xml:"PmtTerms,omitempty"`
// Instruction between two clearing agents stipulating the cash transfer characteristics between the two parties.
SettlementTerms *SettlementTerms3 `xml:"SttlmTerms,omitempty"`
}
func (p *PaymentObligation2) AddObligorBank() *BICIdentification1 {
p.ObligorBank = new(BICIdentification1)
return p.ObligorBank
}
func (p *PaymentObligation2) AddRecipientBank() *BICIdentification1 {
p.RecipientBank = new(BICIdentification1)
return p.RecipientBank
}
func (p *PaymentObligation2) AddPaymentObligationAmount() *AmountOrPercentage2Choice {
p.PaymentObligationAmount = new(AmountOrPercentage2Choice)
return p.PaymentObligationAmount
}
func (p *PaymentObligation2) AddCharges() *Charges5 {
newValue := new(Charges5)
p.Charges = append(p.Charges, newValue)
return newValue
}
func (p *PaymentObligation2) SetExpiryDate(value string) {
p.ExpiryDate = (*ISODate)(&value)
}
func (p *PaymentObligation2) AddApplicableRules() *BPOApplicableRules1Choice {
p.ApplicableRules = new(BPOApplicableRules1Choice)
return p.ApplicableRules
}
func (p *PaymentObligation2) SetApplicableLaw(value string) {
p.ApplicableLaw = (*CountryCode)(&value)
}
func (p *PaymentObligation2) AddPlaceOfJurisdiction() *Location2 {
p.PlaceOfJurisdiction = new(Location2)
return p.PlaceOfJurisdiction
}
func (p *PaymentObligation2) AddPaymentTerms() *PaymentTerms4 {
newValue := new(PaymentTerms4)
p.PaymentTerms = append(p.PaymentTerms, newValue)
return newValue
}
func (p *PaymentObligation2) AddSettlementTerms() *SettlementTerms3 {
p.SettlementTerms = new(SettlementTerms3)
return p.SettlementTerms
} | PaymentObligation2.go | 0.708515 | 0.518973 | PaymentObligation2.go | starcoder |
package model
import (
"database/sql"
"errors"
)
/*
| Table Name | Column Name | Position | Matches | Qty |
| ------------------------------------- | --------------------------------- | -------- | --------------------------------------- | --- |
| COLUMNS | TABLE_CATALOG | 1 | sql2003, pg, mssql, mariadb, hsqldb, h2 | 6 |
| COLUMNS | TABLE_SCHEMA | 2 | sql2003, pg, mssql, mariadb, hsqldb, h2 | 6 |
| COLUMNS | TABLE_NAME | 3 | sql2003, pg, mssql, mariadb, hsqldb, h2 | 6 |
| COLUMNS | COLUMN_NAME | 4 | sql2003, pg, mssql, mariadb, hsqldb, h2 | 6 |
| COLUMNS | ORDINAL_POSITION | 5 | sql2003, pg, mssql, mariadb, hsqldb, h2 | 6 |
| COLUMNS | COLUMN_DEFAULT | 6 | sql2003, pg, mssql, mariadb, hsqldb, h2 | 6 |
| COLUMNS | IS_NULLABLE | 7 | sql2003, pg, mssql, mariadb, hsqldb, h2 | 6 |
| COLUMNS | DATA_TYPE | 8 | sql2003, pg, mssql, mariadb, hsqldb, h2 | 6 |
| COLUMNS | CHARACTER_MAXIMUM_LENGTH | 9 | sql2003, pg, mssql, mariadb, hsqldb, h2 | 6 |
| COLUMNS | CHARACTER_OCTET_LENGTH | 10 | sql2003, pg, mssql, mariadb, hsqldb, h2 | 6 |
| COLUMNS | NUMERIC_PRECISION | 11 | sql2003, pg, mssql, mariadb, hsqldb, h2 | 6 |
| COLUMNS | NUMERIC_PRECISION_RADIX | 12 | sql2003, pg, mssql, hsqldb, h2 | 5 |
| COLUMNS | NUMERIC_SCALE | 13 | sql2003, pg, mssql, mariadb, hsqldb, h2 | 6 |
| COLUMNS | DATETIME_PRECISION | 14 | sql2003, pg, mssql, mariadb, hsqldb | 5 |
| COLUMNS | INTERVAL_TYPE | 15 | sql2003, pg, hsqldb | 3 |
| COLUMNS | INTERVAL_PRECISION | 16 | sql2003, pg, hsqldb | 3 |
| COLUMNS | CHARACTER_SET_CATALOG | 17 | sql2003, pg, mssql, hsqldb | 4 |
| COLUMNS | CHARACTER_SET_SCHEMA | 18 | sql2003, pg, mssql, hsqldb | 4 |
| COLUMNS | CHARACTER_SET_NAME | 19 | sql2003, pg, mssql, mariadb, hsqldb, h2 | 6 |
| COLUMNS | COLLATION_CATALOG | 20 | sql2003, pg, mssql, hsqldb | 4 |
| COLUMNS | COLLATION_SCHEMA | 21 | sql2003, pg, mssql, hsqldb | 4 |
| COLUMNS | COLLATION_NAME | 22 | sql2003, pg, mssql, mariadb, hsqldb, h2 | 6 |
| COLUMNS | DOMAIN_CATALOG | 23 | sql2003, pg, mssql, hsqldb | 4 |
| COLUMNS | DOMAIN_SCHEMA | 24 | sql2003, pg, mssql, hsqldb | 4 |
| COLUMNS | DOMAIN_NAME | 25 | sql2003, pg, mssql, hsqldb | 4 |
| COLUMNS | UDT_CATALOG | 26 | sql2003, pg, hsqldb | 3 |
| COLUMNS | UDT_SCHEMA | 27 | sql2003, pg, hsqldb | 3 |
| COLUMNS | UDT_NAME | 28 | sql2003, pg, hsqldb | 3 |
| COLUMNS | SCOPE_CATALOG | 29 | sql2003, pg, hsqldb | 3 |
| COLUMNS | SCOPE_SCHEMA | 30 | sql2003, pg, hsqldb | 3 |
| COLUMNS | SCOPE_NAME | 31 | sql2003, pg, hsqldb | 3 |
| COLUMNS | MAXIMUM_CARDINALITY | 32 | sql2003, pg, hsqldb | 3 |
| COLUMNS | DTD_IDENTIFIER | 33 | sql2003, pg, hsqldb | 3 |
| COLUMNS | IS_SELF_REFERENCING | 34 | sql2003, pg, hsqldb | 3 |
| COLUMNS | IS_IDENTITY | 35 | sql2003, pg, hsqldb | 3 |
| COLUMNS | IDENTITY_GENERATION | 36 | sql2003, pg, hsqldb | 3 |
| COLUMNS | IDENTITY_START | 37 | sql2003, pg, hsqldb | 3 |
| COLUMNS | IDENTITY_INCREMENT | 38 | sql2003, pg, hsqldb | 3 |
| COLUMNS | IDENTITY_MAXIMUM | 39 | sql2003, pg, hsqldb | 3 |
| COLUMNS | IDENTITY_MINIMUM | 40 | sql2003, pg, hsqldb | 3 |
| COLUMNS | IDENTITY_CYCLE | 41 | sql2003, pg, hsqldb | 3 |
| COLUMNS | IS_GENERATED | 42 | sql2003, pg, mariadb, hsqldb | 4 |
| COLUMNS | GENERATION_EXPRESSION | 43 | sql2003, pg, mariadb, hsqldb | 4 |
| COLUMNS | IS_UPDATABLE | 44 | sql2003, pg, hsqldb | 3 |
| COLUMNS | COLUMN_TYPE | | mariadb, h2 | 2 |
*/
// Column contains details for Columns
type Column struct {
TableCatalog sql.NullString `json:"tableCatalog"`
TableSchema sql.NullString `json:"tableSchema"`
TableName sql.NullString `json:"tableName"`
ColumnName sql.NullString `json:"columnName"`
OrdinalPosition sql.NullInt32 `json:"ordinalPosition"`
ColumnDefault sql.NullString `json:"columnDefault"`
IsNullable sql.NullString `json:"isNullable"`
DataType sql.NullString `json:"dataType"`
DomainCatalog sql.NullString `json:"domainCatalog"`
DomainSchema sql.NullString `json:"domainSchema"`
DomainName sql.NullString `json:"domainName"`
Comment sql.NullString `json:"comment"`
}
// Columns returns a slice of Columns for the (tableSchema, tableName) parameters
func (db *m.DB) Columns(q, tableSchema, tableName string) ([]Column, error) {
var d []Column
if q == "" {
return d, errors.New("No query provided to Columns")
}
rows, err := db.Query(q, tableSchema, tableName)
if err != nil {
return d, err
}
defer func() {
if cerr := rows.Close(); cerr != nil && err == nil {
err = cerr
}
}()
for rows.Next() {
var u Column
err = rows.Scan(&u.TableCatalog,
&u.TableSchema,
&u.TableName,
&u.ColumnName,
&u.OrdinalPosition,
&u.DataType,
&u.IsNullable,
&u.ColumnDefault,
&u.DomainCatalog,
&u.DomainSchema,
&u.DomainName,
&u.Comment,
)
if err != nil {
return d, err
} else {
d = append(d, u)
}
}
return d, err
} | model/columns.go | 0.569853 | 0.445891 | columns.go | starcoder |
package model
import (
"fmt"
"google.golang.org/protobuf/proto"
"gorm.io/gorm"
)
// GormQuiz is the persisted version of the Quiz proto
type GormQuiz struct {
gorm.Model
// ProtoData contains the serialized Quiz proto
ProtoData []byte
// GormQuestions are the persisted questions in this quiz
GormQuestions []GormQuestion
}
// GetQuiz returns a Quiz with the given ID, with questions pre-populated
func (p *Persistence) GetQuiz(id int64) (*Quiz, error) {
var gq GormQuiz
if err := p.db.Preload("GormQuestions").First(&gq, id).Error; err != nil {
return nil, err
}
return getQuizFromGormQuiz(&gq)
}
// GetQuizWithoutQuestions returns a Quiz with the given ID, without questions pre-populated
func (p *Persistence) GetQuizWithoutQuestions(id int64) (*Quiz, error) {
var gq GormQuiz
if err := p.db.First(&gq, id).Error; err != nil {
return nil, err
}
return getQuizFromGormQuiz(&gq)
}
// GetQuizFromQuestionID returns a Quiz that contains this question ID.
// Other questions are not pre-populated in the Quiz.
func (p *Persistence) GetQuizFromQuestionID(id uint) (*Quiz, error) {
var gq GormQuiz
var qn GormQuestion
if err := p.db.First(&qn, id).Error; err != nil {
return nil, err
}
if err := p.db.First(&gq, qn.GormQuizID).Error; err != nil {
return nil, err
}
q, err := getQuizFromGormQuiz(&gq)
if err != nil {
return nil, err
}
return q, nil
}
// CreateQuiz stores a quiz object to disk and returns the ID.
// The questions belonging to this quiz must be persisted separately.
// An ACL entry is also created allowing the creator write privileges.
func (p *Persistence) CreateQuiz(q *Quiz) (uint, error) {
var resultingQuizID uint
err := p.db.Transaction(func(tx *gorm.DB) error {
if len(q.Quizmasters) == 0 || q.Quizmasters[0].GetUserId() == 0 {
return fmt.Errorf("a quiz can only be created by a logged in user")
}
q1 := proto.Clone(q).(*Quiz)
q1.Questions = nil
b, err := proto.Marshal(q1)
if err != nil {
return err
}
gq := GormQuiz{ProtoData: b}
err = tx.Create(&gq).Error
if err != nil {
return err
}
resultingQuizID = gq.ID
initACL := AccessType{ReadAllowed: proto.Bool(true), WriteAllowed: proto.Bool(true)}
bacl, err := proto.Marshal(&initACL)
if err != nil {
return err
}
gacl := GormAccessControl{
QuizID: resultingQuizID,
UserID: uint(q.Quizmasters[0].GetUserId()),
ProtoData: bacl,
}
if err = tx.Create(&gacl).Error; err != nil {
return err
}
return nil
})
return resultingQuizID, err
}
// SaveQuiz stores a quiz object to disk.
func (p *Persistence) SaveQuiz(q *Quiz) error {
q1 := proto.Clone(q).(*Quiz)
q1.Questions = nil
gq, err := getGormQuizFromQuiz(q1)
if err != nil {
return err
}
return p.db.Save(&gq).Error
}
// DeleteQuiz soft-deletes a quiz
func (p *Persistence) DeleteQuiz(qzid int64) error {
var gq GormQuiz
gq.ID = uint(qzid)
return p.db.Delete(&gq).Error
}
// ReinstateQuiz un-deletes a quiz
func (p *Persistence) ReinstateQuiz(qzid int64) error {
return p.db.Transaction(func(tx *gorm.DB) error {
var gq GormQuiz
if err := tx.Unscoped().First(&gq, uint(qzid)).Error; err != nil {
return err
}
gq.DeletedAt.Valid = false
return tx.Unscoped().Save(&gq).Error
})
}
// SaveQuizMetadata stores the metadata of the quiz without affecting other fields.
func (p *Persistence) SaveQuizMetadata(qz *Quiz) error {
return p.db.Transaction(func(tx *gorm.DB) error {
var gq GormQuiz
if err := tx.First(&gq, uint(qz.GetId())).Error; err != nil {
return err
}
qzo, err := getQuizFromGormQuiz(&gq)
if err != nil {
return err
}
qzo.Title = proto.String(qz.GetTitle())
qzo.HtmlDescription = proto.String(qz.GetHtmlDescription())
gq2, err := getGormQuizFromQuiz(qzo)
if err != nil {
return nil
}
return tx.Save(gq2).Error
})
}
// GetAllQuizzes returns all the quizzes in the database (without questions populated)
func (p *Persistence) GetAllQuizzes() ([]*Quiz, error) {
gqs := make([]GormQuiz, 0)
if err := p.db.Find(&gqs).Error; err != nil {
return nil, err
}
qs := make([]*Quiz, 0)
for _, gq := range gqs {
q, err := getQuizFromGormQuiz(&gq)
if err != nil {
return nil, err
}
qs = append(qs, q)
}
return qs, nil
}
// RegisterParticipant adds a user to the quiz as a participant if needed
// This is an atomic read-modify-write of the quiz proto
func (p *Persistence) RegisterParticipant(qid int64, userID int64, profileName string) error {
return p.db.Transaction(func(tx *gorm.DB) error {
var gq GormQuiz
if err := tx.First(&gq, qid).Error; err != nil {
return err
}
qp, err := getQuizFromGormQuiz(&gq)
if err != nil {
return err
}
var pp *ParticipantProfile
for _, v := range qp.GetParticipants() {
if v.GetUserId() == userID {
pp = v
break
}
}
if pp == nil {
pp = &ParticipantProfile{}
pp.UserId = proto.Int64(int64(userID))
qp.Participants = append(qp.Participants, pp)
}
pp.ProfileName = proto.String(profileName)
pp.CompletedRegistration = proto.Bool(true)
b, err := proto.Marshal(qp)
if err != nil {
return err
}
gq.ProtoData = b
err = tx.Save(&gq).Error
if err != nil {
return err
}
return nil
})
}
func getQuizFromGormQuiz(gq *GormQuiz) (*Quiz, error) {
var q Quiz
if err := proto.Unmarshal(gq.ProtoData, &q); err != nil {
return nil, err
}
// Load the values from the other columns that might not
// have been persisted in the proto.
q.Id = proto.Int64(int64(gq.ID))
for _, qn := range gq.GormQuestions {
var qp Question
if err := proto.Unmarshal(qn.ProtoData, &qp); err != nil {
return nil, err
}
qp.Id = proto.Int64(int64(qn.ID))
qp.QuizId = proto.Int64(int64(qn.GormQuizID))
q.Questions = append(q.Questions, &qp)
}
return &q, nil
}
func getGormQuizFromQuiz(qz *Quiz) (*GormQuiz, error) {
b, err := proto.Marshal(qz)
if err != nil {
return nil, err
}
gq := GormQuiz{ProtoData: b}
gq.ID = uint(qz.GetId())
return &gq, nil
} | model/gormquiz.go | 0.64791 | 0.409044 | gormquiz.go | starcoder |
// Copyright 2021 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package metrics provides tracking arbitrary metrics composed of
// values of comparable types.
package main
import (
"fmt"
"sort"
"sync"
)
// _Metric1 tracks metrics of values of some type.
type _Metric1[T comparable] struct {
mu sync.Mutex
m map[T]int
}
// Add adds another instance of some value.
func (m *_Metric1[T]) Add(v T) {
m.mu.Lock()
defer m.mu.Unlock()
if m.m == nil {
m.m = make(map[T]int)
}
m.m[v]++
}
// Count returns the number of instances we've seen of v.
func (m *_Metric1[T]) Count(v T) int {
m.mu.Lock()
defer m.mu.Unlock()
return m.m[v]
}
// Metrics returns all the values we've seen, in an indeterminate order.
func (m *_Metric1[T]) Metrics() []T {
return _Keys(m.m)
}
type key2[T1, T2 comparable] struct {
f1 T1
f2 T2
}
// _Metric2 tracks metrics of pairs of values.
type _Metric2[T1, T2 comparable] struct {
mu sync.Mutex
m map[key2[T1, T2]]int
}
// Add adds another instance of some pair of values.
func (m *_Metric2[T1, T2]) Add(v1 T1, v2 T2) {
m.mu.Lock()
defer m.mu.Unlock()
if m.m == nil {
m.m = make(map[key2[T1, T2]]int)
}
m.m[key2[T1, T2]{v1, v2}]++
}
// Count returns the number of instances we've seen of v1/v2.
func (m *_Metric2[T1, T2]) Count(v1 T1, v2 T2) int {
m.mu.Lock()
defer m.mu.Unlock()
return m.m[key2[T1, T2]{v1, v2}]
}
// Metrics returns all the values we've seen, in an indeterminate order.
func (m *_Metric2[T1, T2]) Metrics() (r1 []T1, r2 []T2) {
for _, k := range _Keys(m.m) {
r1 = append(r1, k.f1)
r2 = append(r2, k.f2)
}
return r1, r2
}
type key3[T1, T2, T3 comparable] struct {
f1 T1
f2 T2
f3 T3
}
// _Metric3 tracks metrics of triplets of values.
type _Metric3[T1, T2, T3 comparable] struct {
mu sync.Mutex
m map[key3[T1, T2, T3]]int
}
// Add adds another instance of some triplet of values.
func (m *_Metric3[T1, T2, T3]) Add(v1 T1, v2 T2, v3 T3) {
m.mu.Lock()
defer m.mu.Unlock()
if m.m == nil {
m.m = make(map[key3[T1, T2, T3]]int)
}
m.m[key3[T1, T2, T3]{v1, v2, v3}]++
}
// Count returns the number of instances we've seen of v1/v2/v3.
func (m *_Metric3[T1, T2, T3]) Count(v1 T1, v2 T2, v3 T3) int {
m.mu.Lock()
defer m.mu.Unlock()
return m.m[key3[T1, T2, T3]{v1, v2, v3}]
}
// Metrics returns all the values we've seen, in an indeterminate order.
func (m *_Metric3[T1, T2, T3]) Metrics() (r1 []T1, r2 []T2, r3 []T3) {
for k := range m.m {
r1 = append(r1, k.f1)
r2 = append(r2, k.f2)
r3 = append(r3, k.f3)
}
return r1, r2, r3
}
type S struct{ a, b, c string }
func TestMetrics() {
m1 := _Metric1[string]{}
if got := m1.Count("a"); got != 0 {
panic(fmt.Sprintf("Count(%q) = %d, want 0", "a", got))
}
m1.Add("a")
m1.Add("a")
if got := m1.Count("a"); got != 2 {
panic(fmt.Sprintf("Count(%q) = %d, want 2", "a", got))
}
if got, want := m1.Metrics(), []string{"a"}; !_SlicesEqual(got, want) {
panic(fmt.Sprintf("Metrics = %v, want %v", got, want))
}
m2 := _Metric2[int, float64]{}
m2.Add(1, 1)
m2.Add(2, 2)
m2.Add(3, 3)
m2.Add(3, 3)
k1, k2 := m2.Metrics()
sort.Ints(k1)
w1 := []int{1, 2, 3}
if !_SlicesEqual(k1, w1) {
panic(fmt.Sprintf("_Metric2.Metrics first slice = %v, want %v", k1, w1))
}
sort.Float64s(k2)
w2 := []float64{1, 2, 3}
if !_SlicesEqual(k2, w2) {
panic(fmt.Sprintf("_Metric2.Metrics first slice = %v, want %v", k2, w2))
}
m3 := _Metric3[string, S, S]{}
m3.Add("a", S{"d", "e", "f"}, S{"g", "h", "i"})
m3.Add("a", S{"d", "e", "f"}, S{"g", "h", "i"})
m3.Add("a", S{"d", "e", "f"}, S{"g", "h", "i"})
m3.Add("b", S{"d", "e", "f"}, S{"g", "h", "i"})
if got := m3.Count("a", S{"d", "e", "f"}, S{"g", "h", "i"}); got != 3 {
panic(fmt.Sprintf("Count(%v, %v, %v) = %d, want 3", "a", S{"d", "e", "f"}, S{"g", "h", "i"}, got))
}
}
func main() {
TestMetrics()
}
// _Equal reports whether two slices are equal: the same length and all
// elements equal. All floating point NaNs are considered equal.
func _SlicesEqual[Elem comparable](s1, s2 []Elem) bool {
if len(s1) != len(s2) {
return false
}
for i, v1 := range s1 {
v2 := s2[i]
if v1 != v2 {
isNaN := func(f Elem) bool { return f != f }
if !isNaN(v1) || !isNaN(v2) {
return false
}
}
}
return true
}
// _Keys returns the keys of the map m.
// The keys will be an indeterminate order.
func _Keys[K comparable, V any](m map[K]V) []K {
r := make([]K, 0, len(m))
for k := range m {
r = append(r, k)
}
return r
} | test/typeparam/metrics.go | 0.803714 | 0.412412 | metrics.go | starcoder |
package io
import (
"encoding/csv"
"encoding/gob"
"fmt"
"io"
"math"
"os"
"strconv"
"golem/pkg/model"
mat "github.com/nlpodyssey/spago/pkg/mat32"
)
// DataInstance holds data for a single data point.
type DataRecord struct {
// ContinuousFeatures contains the raw value of the continuous features
// these are indexed according to the mapping from continuous feature to index
// specified in the dataset metadata
ContinuousFeatures mat.Matrix
// CategoricalFeatures contain the category values for the categorical features.
// Each column of the slice corresponds to a categorical feature. The mapping between
// column indices and feature is specified in the dataset metadata
CategoricalFeatures []int
// Target contains the target value.
// Float64 is used to represent valus for both continuous and categorical target types.
Target mat.Float
}
// DataBatch holds a minibatch of data.
type DataBatch []*DataRecord
func (d DataBatch) Size() int {
return len(d)
}
type void struct{}
var Void = void{}
type Set map[string]void
func NewSet(values ...string) Set {
set := Set{}
for _, val := range values {
set[val] = Void
}
return set
}
type DataParameters struct {
DataFile string
TargetColumn string
CategoricalColumns Set
BatchSize int
}
type DataError struct {
Line int
Error string
}
// LoadData reads the train file and splits it into batches of at most BatchSize elements.
func LoadData(p DataParameters, metaData *model.Metadata) (*model.Metadata, *DataSet, []DataError, error) {
var errors []DataError
inputFile, err := os.Open(p.DataFile)
if err != nil {
return nil, nil, nil, fmt.Errorf("error opening file: %w", err)
}
reader := csv.NewReader(inputFile)
reader.Comma = ','
//First line is expected to be a header
record, err := reader.Read()
if err != nil {
return nil, nil, nil, fmt.Errorf("error reading data header: %w", err)
}
newMetadata := false
if metaData == nil {
metaData = model.NewMetadata()
newMetadata = true
metaData.Columns = parseColumns(record, p)
if err := setTargetColumn(p, metaData); err != nil {
return nil, nil, nil, err
}
buildFeatureIndex(metaData)
}
var data []*DataRecord
currentLine := 0
targetType := metaData.Columns[metaData.TargetColumn].Type
for record, err = reader.Read(); err == nil; record, err = reader.Read() {
dataRecord := DataRecord{}
targetValue, err := parseTarget(newMetadata, metaData, record[metaData.TargetColumn])
if err != nil {
errors = append(errors, DataError{
Line: currentLine,
Error: err.Error(),
})
continue
}
dataRecord.Target = targetValue
if targetType == model.Continuous && newMetadata {
metaData.Columns[metaData.TargetColumn].Average += float64(targetValue)
}
dataRecord.ContinuousFeatures = mat.NewEmptyVecDense(metaData.ContinuousFeaturesMap.Size())
err = parseContinuousFeatures(metaData, record, dataRecord.ContinuousFeatures, newMetadata)
if err != nil {
errors = append(errors, DataError{
Line: currentLine,
Error: err.Error(),
})
continue
}
dataRecord.CategoricalFeatures, err = parseCategoricalFeatures(metaData, newMetadata, record)
if err != nil {
errors = append(errors, DataError{
Line: currentLine,
Error: err.Error(),
})
continue
}
data = append(data, &dataRecord)
currentLine++
}
dataSet := NewDataSet(data, p.BatchSize)
if newMetadata {
computeStatistics(metaData, dataSet)
}
standardizeContinuousFeatures(metaData, dataSet)
if targetType == model.Continuous {
standardizeTarget(metaData, dataSet)
}
return metaData, dataSet, errors, nil
}
func standardizeTarget(metadata *model.Metadata, set *DataSet) {
set.ResetOrder(OriginalOrder)
for batch := set.Next(); len(batch) > 0; batch = set.Next() {
for _, d := range batch {
targetColumn := metadata.Columns[metadata.TargetColumn]
d.Target = mat.Float((float64(d.Target) - targetColumn.Average) / targetColumn.StdDev)
}
}
}
func standardizeContinuousFeatures(metadata *model.Metadata, set *DataSet) {
set.ResetOrder(OriginalOrder)
for batch := set.Next(); len(batch) > 0; batch = set.Next() {
for _, d := range batch {
for column, index := range metadata.ContinuousFeaturesMap.ColumnToIndex {
val := float64(d.ContinuousFeatures.At(index, 0))
col := metadata.Columns[column]
val = (val - col.Average) / col.StdDev
d.ContinuousFeatures.Set(index, 0, mat.Float(val))
}
}
}
}
// computeStatistics computes dataset-wide statistics
// (currently only mean and std deviation of each continuous feature and continuous target)
func computeStatistics(metadata *model.Metadata, set *DataSet) {
set.ResetOrder(OriginalOrder)
stdDevs := make([]float64, len(metadata.Columns))
targetStdDev := 0.0
dataCount := float64(set.Size())
targetColumn := metadata.Columns[metadata.TargetColumn]
for _, col := range metadata.Columns {
col.Average = col.Average / dataCount
}
for batch := set.Next(); len(batch) > 0; batch = set.Next() {
for _, d := range batch {
for column, index := range metadata.ContinuousFeaturesMap.ColumnToIndex {
diff := math.Pow(float64(d.ContinuousFeatures.At(index, 0))-metadata.Columns[column].Average, 2)
stdDevs[column] += diff
}
targetStdDev += math.Pow(float64(d.Target)-targetColumn.Average, 2)
}
}
for i := range stdDevs {
metadata.Columns[i].StdDev = math.Sqrt(stdDevs[i] / dataCount)
}
targetColumn.StdDev = math.Sqrt(targetStdDev / dataCount)
}
func parseColumns(record []string, p DataParameters) []*model.Column {
result := make([]*model.Column, len(record))
columnType := func(c string) model.ColumnType {
if _, ok := p.CategoricalColumns[c]; ok {
return model.Categorical
}
return model.Continuous
}
for i := range result {
result[i] = &model.Column{
Name: record[i],
Type: columnType(record[i]),
}
}
return result
}
func parseCategoricalFeatures(metaData *model.Metadata, newMetadata bool, record []string) ([]int, error) {
categoricalFeatures := make([]int, metaData.CategoricalFeaturesMap.Size())
for column, index := range metaData.CategoricalFeaturesMap.ColumnToIndex {
categoryValue := model.CategoricalValue{
Column: column,
Value: record[column],
}
valueIndex := 0
if newMetadata {
valueIndex = metaData.CategoricalValuesMap.ValueFor(categoryValue)
} else {
ok := false
valueIndex, ok = metaData.CategoricalValuesMap.ValueToIndex[categoryValue]
if !ok {
return nil, fmt.Errorf("unknown value %s for categorical attribute %s", record[column], metaData.Columns[column].Name)
}
}
categoricalFeatures[index] = valueIndex
}
return categoricalFeatures, nil
}
func parseContinuousFeatures(metaData *model.Metadata, record []string, features mat.Matrix, newMetadata bool) error {
for column, index := range metaData.ContinuousFeaturesMap.ColumnToIndex {
value, err := strconv.ParseFloat(record[column], 64)
if err != nil {
return fmt.Errorf("error parsing feature %s: %w", metaData.Columns[column].Name, err)
}
features.Set(index, 0, mat.Float(value))
if newMetadata {
metaData.Columns[column].Average += value
}
}
return nil
}
func parseTarget(newMetadata bool, metaData *model.Metadata, target string) (mat.Float, error) {
var parseFunc func(string) (mat.Float, error)
switch metaData.TargetType() {
case model.Categorical:
if newMetadata {
parseFunc = metaData.ParseOrAddCategoricalTarget
} else {
parseFunc = metaData.ParseCategoricalTarget
}
case model.Continuous:
parseFunc = metaData.ParseContinuousTarget
}
targetValue, err := parseFunc(target)
if err != nil {
return 0, fmt.Errorf("unable to parse target value %s: %w", target, err)
}
return targetValue, nil
}
func buildFeatureIndex(metaData *model.Metadata) {
continuousFeatureIndex := 0
categoricalFeatureIndex := 0
for i, col := range metaData.Columns {
if i != metaData.TargetColumn {
if col.Type == model.Continuous {
metaData.ContinuousFeaturesMap.Set(i, continuousFeatureIndex)
continuousFeatureIndex++
} else {
metaData.CategoricalFeaturesMap.Set(i, categoricalFeatureIndex)
categoricalFeatureIndex++
}
}
}
}
func setTargetColumn(p DataParameters, metaData *model.Metadata) error {
for i, col := range metaData.Columns {
if col.Name == p.TargetColumn {
metaData.TargetColumn = i
return nil
}
}
return fmt.Errorf("target column %s not found in data header", p.TargetColumn)
}
func SaveModel(model *model.Model, writer io.Writer) error {
encoder := gob.NewEncoder(writer)
err := encoder.Encode(model)
if err != nil {
return fmt.Errorf("error encoding model: %w", err)
}
return nil
}
func LoadModel(input io.Reader) (*model.Model, error) {
decoder := gob.NewDecoder(input)
m := model.Model{}
err := decoder.Decode(&m)
if err != nil {
return nil, fmt.Errorf("error decoding model: %w", err)
}
return &m, nil
} | pkg/io/io.go | 0.683314 | 0.464476 | io.go | starcoder |
package hangulsimilarity
import (
"regexp"
"strings"
)
// CompareBySyllables returns similarity of given two strings
// based on the common syllables.
func CompareBySyllables(first, second string) float64 {
var similarity float64
var common int
first = strings.TrimSpace(first)
second = strings.TrimSpace(second)
syllableCount1, len1 := countBySyllables(first)
syllableCount2, len2 := countBySyllables(second)
if len1 < len2 {
return CompareBySyllables(second, first)
}
for syllable, cnt2 := range syllableCount2 {
if cnt1, exists := syllableCount1[syllable]; exists {
common += min(cnt1, cnt2)
} else {
continue
}
}
similarity = 100 * float64(common) / float64(len2)
return similarity
}
// countBySyllables returns a map containing syllable counts of a given sentence and number of syllables.
func countBySyllables(sentence string) (map[string]int, int) {
syllableMap := make(map[string]int)
var cnt int
cleansed := cleanse(sentence)
segments := strings.Split(cleansed, " ")
for _, segment := range segments {
syllables := strings.Split(segment, "")
for _, syllable := range syllables {
cnt++
if _, exists := syllableMap[syllable]; !exists {
syllableMap[syllable] = 1
} else {
syllableMap[syllable]++
}
}
}
return syllableMap, cnt
}
// min returns minimum value between given two integers.
func min(first, second int) int {
if first < second {
return first
}
return second
}
// cleanse returns a string with punctuation removed.
func cleanse(s string) string {
const hangulRegex string = `[.,!?~ㆍ:/\"\']`
re := regexp.MustCompile(hangulRegex)
cleansed := re.ReplaceAllString(s, "")
return cleansed
}
// CompareBySegments returns similarity of given two strings
// based on the common segments.
func CompareBySegments(first, second string) float64 {
var similarity float64
var common int
first = strings.TrimSpace(first)
second = strings.TrimSpace(second)
segmentCount1, len1 := countBySegments(first)
segmentCount2, len2 := countBySegments(second)
if len1 < len2 {
return CompareBySegments(second, first)
}
for segment, cnt2 := range segmentCount2 {
if cnt1, exists := segmentCount1[segment]; exists {
common += min(cnt1, cnt2)
} else {
continue
}
}
similarity = 100 * float64(common) / float64(len2)
return similarity
}
// countBySegments returns a map containing segment counts of a given sentence and number of segments.
func countBySegments(sentence string) (map[string]int, int) {
segmentMap := make(map[string]int)
var cnt int
cleansed := cleanse(sentence)
segments := strings.Split(cleansed, " ")
for _, segment := range segments {
cnt++
if _, exists := segmentMap[segment]; !exists {
segmentMap[segment] = 1
} else {
segmentMap[segment]++
}
}
return segmentMap, cnt
} | week01/hangulSimilarity/src/hangulsimilarity/hangulSimilarity.go | 0.809653 | 0.431524 | hangulSimilarity.go | starcoder |
package mockrequire
import (
mockassert "github.com/derision-test/go-mockgen/testutil/assert"
"github.com/stretchr/testify/require"
)
// Called asserts that the mock function object was called at least once.
func Called(t require.TestingT, mockFn interface{}, msgAndArgs ...interface{}) {
if !mockassert.Called(t, mockFn, msgAndArgs...) {
t.FailNow()
}
}
// NotCalled asserts that the mock function object was not called.
func NotCalled(t require.TestingT, mockFn interface{}, msgAndArgs ...interface{}) {
if !mockassert.NotCalled(t, mockFn, msgAndArgs...) {
t.FailNow()
}
}
// CalledOnce asserts that the mock function object was called exactly once.
func CalledOnce(t require.TestingT, mockFn interface{}, msgAndArgs ...interface{}) {
if !mockassert.CalledOnce(t, mockFn, msgAndArgs...) {
t.FailNow()
}
}
// CalledOnce asserts that the mock function object was called exactly n times.
func CalledN(t require.TestingT, mockFn interface{}, n int, msgAndArgs ...interface{}) {
if !mockassert.CalledN(t, mockFn, n, msgAndArgs...) {
t.FailNow()
}
}
// CalledWith asserts that the mock function object was called at least once with a set of
// arguments matching the given mockassertion function.
func CalledWith(t require.TestingT, mockFn interface{}, asserter CallInstanceAsserter, msgAndArgs ...interface{}) {
if !mockassert.CalledWith(t, mockFn, asserter, msgAndArgs...) {
t.FailNow()
}
}
// CalledWith asserts that the mock function object was not called with a set of arguments
// matching the given mockassertion function.
func NotCalledWith(t require.TestingT, mockFn interface{}, asserter CallInstanceAsserter, msgAndArgs ...interface{}) {
if !mockassert.NotCalledWith(t, mockFn, asserter, msgAndArgs...) {
t.FailNow()
}
}
// CalledOnceWith asserts that the mock function object was called exactly once with a set of
// arguments matching the given mockassertion function.
func CalledOnceWith(t require.TestingT, mockFn interface{}, asserter CallInstanceAsserter, msgAndArgs ...interface{}) {
if !mockassert.CalledOnceWith(t, mockFn, asserter, msgAndArgs...) {
t.FailNow()
}
}
// CalledNWith asserts that the mock function object was called exactly n times with a set of
// arguments matching the given mockassertion function.
func CalledNWith(t require.TestingT, mockFn interface{}, n int, asserter CallInstanceAsserter, msgAndArgs ...interface{}) {
if !mockassert.CalledNWith(t, mockFn, n, asserter, msgAndArgs...) {
t.FailNow()
}
} | testutil/require/require.go | 0.509276 | 0.413063 | require.go | starcoder |
package DG2D
import (
"math"
"github.com/notargets/gocfd/DG1D"
"github.com/notargets/gocfd/utils"
)
func Vandermonde2D(N int, r, s utils.Vector) (V2D utils.Matrix) {
V2D = utils.NewMatrix(r.Len(), (N+1)*(N+2)/2)
a, b := RStoAB(r, s)
var sk int
for i := 0; i <= N; i++ {
for j := 0; j <= (N - i); j++ {
V2D.SetCol(sk, Simplex2DP(a, b, i, j))
sk++
}
}
return
}
func Simplex2DPTerm(r, s float64, i, j int) (P float64) {
aa, bb := rsToab(r, s)
a, b := utils.NewVector(1, []float64{aa}), utils.NewVector(1, []float64{bb})
P = Simplex2DP(a, b, i, j)[0]
return
}
func Simplex2DP(a, b utils.Vector, i, j int) (P []float64) {
var (
Np = a.Len()
bd = b.DataP
)
h1 := DG1D.JacobiP(a, 0, 0, i)
h2 := DG1D.JacobiP(b, float64(2*i+1), 0, j)
P = make([]float64, Np)
sq2 := math.Sqrt(2)
for ii := range h1 {
tv1 := sq2 * h1[ii] * h2[ii]
tv2 := utils.POW(1-bd[ii], i)
P[ii] = tv1 * tv2
}
return
}
// Purpose : Compute (x,y) nodes in equilateral triangle for
// polynomial of order N
func Nodes2D(N int) (x, y utils.Vector) {
var (
alpha float64
Np = (N + 1) * (N + 2) / 2
L1, L2, L3 utils.Vector
blend1, blend2, blend3, warp1, warp2, warp3, warpf1, warpf2, warpf3 []float64
)
L1, L2, L3, x, y =
utils.NewVector(Np), utils.NewVector(Np), utils.NewVector(Np), utils.NewVector(Np), utils.NewVector(Np)
l1d, l2d, l3d, xd, yd := L1.DataP, L2.DataP, L3.DataP, x.DataP, y.DataP
blend1, blend2, blend3, warp1, warp2, warp3 =
make([]float64, Np), make([]float64, Np), make([]float64, Np), make([]float64, Np), make([]float64, Np), make([]float64, Np)
alpopt := []float64{
0.0000, 0.0000, 1.4152, 0.1001, 0.2751,
0.9800, 1.0999, 1.2832, 1.3648, 1.4773,
1.4959, 1.5743, 1.5770, 1.6223, 1.6258,
}
if N < 16 {
alpha = alpopt[N-1]
} else {
alpha = 5. / 3.
}
// Create equidistributed nodes on equilateral triangle
fn := 1. / float64(N)
var sk int
for n := 0; n < N+1; n++ {
for m := 0; m < (N + 1 - n); m++ {
l1d[sk] = float64(n) * fn
l3d[sk] = float64(m) * fn
sk++
}
}
for i := range xd {
l2d[i] = 1 - l1d[i] - l3d[i]
xd[i] = l3d[i] - l2d[i]
yd[i] = (2*l1d[i] - l3d[i] - l2d[i]) / math.Sqrt(3)
// Compute blending function at each node for each edge
blend1[i] = 4 * l2d[i] * l3d[i]
blend2[i] = 4 * l1d[i] * l3d[i]
blend3[i] = 4 * l1d[i] * l2d[i]
}
// Amount of warp for each node, for each edge
warpf1 = Warpfactor(N, L3.Copy().Subtract(L2))
warpf2 = Warpfactor(N, L1.Copy().Subtract(L3))
warpf3 = Warpfactor(N, L2.Copy().Subtract(L1))
// Combine blend & warp
for i := range warpf1 {
warp1[i] = blend1[i] * warpf1[i] * (1 + utils.POW(alpha*l1d[i], 2))
warp2[i] = blend2[i] * warpf2[i] * (1 + utils.POW(alpha*l2d[i], 2))
warp3[i] = blend3[i] * warpf3[i] * (1 + utils.POW(alpha*l3d[i], 2))
}
// Accumulate deformations associated with each edge
for i := range xd {
xd[i] += warp1[i] + math.Cos(2*math.Pi/3)*warp2[i] + math.Cos(4*math.Pi/3)*warp3[i]
yd[i] += math.Sin(2*math.Pi/3)*warp2[i] + math.Sin(4*math.Pi/3)*warp3[i]
}
return
}
func Warpfactor(N int, rout utils.Vector) (warpF []float64) {
var (
Nr = rout.Len()
Pmat = utils.NewMatrix(N+1, Nr)
)
// Compute LGL and equidistant node distribution
LGLr := DG1D.JacobiGL(0, 0, N)
req := utils.NewVector(N+1).Linspace(-1, 1)
Veq := DG1D.Vandermonde1D(N, req)
// Evaluate Lagrange polynomial at rout
for i := 0; i < (N + 1); i++ {
Pmat.M.SetRow(i, DG1D.JacobiP(rout, 0, 0, i))
}
Lmat := Veq.Transpose().LUSolve(Pmat)
// Compute warp factor
warp := Lmat.Transpose().Mul(LGLr.Subtract(req).ToMatrix())
// Scale factor
zerof := rout.Copy().Apply(func(val float64) (res float64) {
if math.Abs(val) < (1.0 - (1e-10)) {
res = 1.
}
return
})
sf := zerof.Copy().ElMul(rout).Apply(func(val float64) (res float64) {
res = 1 - val*val
return
})
w2 := warp.Copy()
warp.ElDiv(sf.ToMatrix()).Add(w2.ElMul(zerof.AddScalar(-1).ToMatrix()))
warpF = warp.DataP
return
}
func GradVandermonde2D(N int, r, s utils.Vector) (V2Dr, V2Ds utils.Matrix) {
var (
a, b = RStoAB(r, s)
Np = (N + 1) * (N + 2) / 2
Nr = r.Len()
)
V2Dr, V2Ds = utils.NewMatrix(Nr, Np), utils.NewMatrix(Nr, Np)
var sk int
for i := 0; i <= N; i++ {
for j := 0; j <= (N - i); j++ {
ddr, dds := GradSimplex2DP(a, b, i, j)
V2Dr.M.SetCol(sk, ddr)
V2Ds.M.SetCol(sk, dds)
sk++
}
}
return
}
func GradSimplex2DPTerm(r, s float64, i, j int) (ddr, dds float64) {
rr := utils.NewVector(1, []float64{r})
ss := utils.NewVector(1, []float64{s})
a, b := RStoAB(rr, ss)
ddrV, ddsV := GradSimplex2DP(a, b, i, j)
ddr, dds = ddrV[0], ddsV[0]
return
}
func GradSimplex2DP(a, b utils.Vector, id, jd int) (ddr, dds []float64) {
var (
ad, bd = a.DataP, b.DataP
)
_ = ad
fa := DG1D.JacobiP(a, 0, 0, id)
dfa := DG1D.GradJacobiP(a, 0, 0, id)
gb := DG1D.JacobiP(b, 2*float64(id)+1, 0, jd)
dgb := DG1D.GradJacobiP(b, 2*float64(id)+1, 0, jd)
// r-derivative
// d/dr = da/dr d/da + db/dr d/db = (2/(1-s)) d/da = (2/(1-b)) d/da
ddr = make([]float64, len(gb))
for i := range ddr {
ddr[i] = dfa[i] * gb[i]
if id > 0 {
ddr[i] *= utils.POW(0.5*(1-bd[i]), id-1)
}
// Normalize
ddr[i] *= math.Pow(2, float64(id)+0.5)
}
// s-derivative
// d/ds = ((1+a)/2)/((1-b)/2) d/da + d/db
dds = make([]float64, len(gb))
for i := range dds {
dds[i] = 0.5 * dfa[i] * gb[i] * (1 + ad[i])
if id > 0 {
dds[i] *= utils.POW(0.5*(1-bd[i]), id-1)
}
tmp := dgb[i] * utils.POW(0.5*(1-bd[i]), id)
if id > 0 {
tmp -= 0.5 * float64(id) * gb[i] * utils.POW(0.5*(1-bd[i]), id-1)
}
dds[i] += fa[i] * tmp
// Normalize
dds[i] *= math.Pow(2, float64(id)+0.5)
}
return
}
func RStoAB(r, s utils.Vector) (a, b utils.Vector) {
var (
Np = r.Len()
rd, sd = r.DataP, s.DataP
)
ad, bd := make([]float64, Np), make([]float64, Np)
for n, sval := range sd {
/*
if sval != 1 {
ad[n] = 2*(1+rd[n])/(1-sval) - 1
} else {
ad[n] = -1
}
bd[n] = sval
*/
ad[n], bd[n] = rsToab(rd[n], sval)
}
a, b = utils.NewVector(Np, ad), utils.NewVector(Np, bd)
return
}
func rsToab(r, s float64) (a, b float64) {
if s != 1 {
a = 2*(1+r)/(1-s) - 1
} else {
a = -1
}
b = s
return
}
// function [r,s] = xytors(x,y)
// Purpose : Transfer from (x,y) in equilateral triangle
// to (r,s) coordinates in standard triangle
func XYtoRS(x, y utils.Vector) (r, s utils.Vector) {
r, s = utils.NewVector(x.Len()), utils.NewVector(x.Len())
var (
xd, yd = x.DataP, y.DataP
rd, sd = r.DataP, s.DataP
)
sr3 := math.Sqrt(3)
for i := range xd {
l1 := (sr3*yd[i] + 1) / 3
l2 := (-3*xd[i] - sr3*yd[i] + 2) / 6
l3 := (3*xd[i] - sr3*yd[i] + 2) / 6
rd[i] = -l2 + l3 - l1
sd[i] = -l2 - l3 + l1
}
return
}
func CalculateElementLocalGeometry(EToV utils.Matrix, VX, VY, R, S utils.Vector) (X, Y utils.Matrix) {
/*
For input values of vector field [R,S], transform them into element local [X,Y]
*/
va, vb, vc := EToV.Col(0), EToV.Col(1), EToV.Col(2)
X = R.Copy().Add(S).Scale(-1).Outer(VX.SubsetIndex(va.ToIndex())).Add(
R.Copy().AddScalar(1).Outer(VX.SubsetIndex(vb.ToIndex()))).Add(
S.Copy().AddScalar(1).Outer(VX.SubsetIndex(vc.ToIndex()))).Scale(0.5)
Y = R.Copy().Add(S).Scale(-1).Outer(VY.SubsetIndex(va.ToIndex())).Add(
R.Copy().AddScalar(1).Outer(VY.SubsetIndex(vb.ToIndex()))).Add(
S.Copy().AddScalar(1).Outer(VY.SubsetIndex(vc.ToIndex()))).Scale(0.5)
return
} | DG2D/element_utils.go | 0.600423 | 0.637031 | element_utils.go | starcoder |
package querytee
import (
"encoding/json"
"fmt"
"math"
"github.com/go-kit/kit/log/level"
"github.com/pkg/errors"
"github.com/prometheus/common/model"
util_log "github.com/cortexproject/cortex/pkg/util/log"
)
// SamplesComparatorFunc helps with comparing different types of samples coming from /api/v1/query and /api/v1/query_range routes.
type SamplesComparatorFunc func(expected, actual json.RawMessage, tolerance float64) error
type SamplesResponse struct {
Status string
Data struct {
ResultType string
Result json.RawMessage
}
}
func NewSamplesComparator(tolerance float64) *SamplesComparator {
return &SamplesComparator{
tolerance: tolerance,
sampleTypesComparator: map[string]SamplesComparatorFunc{
"matrix": compareMatrix,
"vector": compareVector,
"scalar": compareScalar,
},
}
}
type SamplesComparator struct {
tolerance float64
sampleTypesComparator map[string]SamplesComparatorFunc
}
// RegisterSamplesComparator helps with registering custom sample types
func (s *SamplesComparator) RegisterSamplesType(samplesType string, comparator SamplesComparatorFunc) {
s.sampleTypesComparator[samplesType] = comparator
}
func (s *SamplesComparator) Compare(expectedResponse, actualResponse []byte) error {
var expected, actual SamplesResponse
err := json.Unmarshal(expectedResponse, &expected)
if err != nil {
return errors.Wrap(err, "unable to unmarshal expected response")
}
err = json.Unmarshal(actualResponse, &actual)
if err != nil {
return errors.Wrap(err, "unable to unmarshal actual response")
}
if expected.Status != actual.Status {
return fmt.Errorf("expected status %s but got %s", expected.Status, actual.Status)
}
if expected.Data.ResultType != actual.Data.ResultType {
return fmt.Errorf("expected resultType %s but got %s", expected.Data.ResultType, actual.Data.ResultType)
}
comparator, ok := s.sampleTypesComparator[expected.Data.ResultType]
if !ok {
return fmt.Errorf("resultType %s not registered for comparison", expected.Data.ResultType)
}
return comparator(expected.Data.Result, actual.Data.Result, s.tolerance)
}
func compareMatrix(expectedRaw, actualRaw json.RawMessage, tolerance float64) error {
var expected, actual model.Matrix
err := json.Unmarshal(expectedRaw, &expected)
if err != nil {
return err
}
err = json.Unmarshal(actualRaw, &actual)
if err != nil {
return err
}
if len(expected) != len(actual) {
return fmt.Errorf("expected %d metrics but got %d", len(expected),
len(actual))
}
metricFingerprintToIndexMap := make(map[model.Fingerprint]int, len(expected))
for i, actualMetric := range actual {
metricFingerprintToIndexMap[actualMetric.Metric.Fingerprint()] = i
}
for _, expectedMetric := range expected {
actualMetricIndex, ok := metricFingerprintToIndexMap[expectedMetric.Metric.Fingerprint()]
if !ok {
return fmt.Errorf("expected metric %s missing from actual response", expectedMetric.Metric)
}
actualMetric := actual[actualMetricIndex]
expectedMetricLen := len(expectedMetric.Values)
actualMetricLen := len(actualMetric.Values)
if expectedMetricLen != actualMetricLen {
err := fmt.Errorf("expected %d samples for metric %s but got %d", expectedMetricLen,
expectedMetric.Metric, actualMetricLen)
if expectedMetricLen > 0 && actualMetricLen > 0 {
level.Error(util_log.Logger).Log("msg", err.Error(), "oldest-expected-ts", expectedMetric.Values[0].Timestamp,
"newest-expected-ts", expectedMetric.Values[expectedMetricLen-1].Timestamp,
"oldest-actual-ts", actualMetric.Values[0].Timestamp, "newest-actual-ts", actualMetric.Values[actualMetricLen-1].Timestamp)
}
return err
}
for i, expectedSamplePair := range expectedMetric.Values {
actualSamplePair := actualMetric.Values[i]
err := compareSamplePair(expectedSamplePair, actualSamplePair, tolerance)
if err != nil {
return errors.Wrapf(err, "sample pair not matching for metric %s", expectedMetric.Metric)
}
}
}
return nil
}
func compareVector(expectedRaw, actualRaw json.RawMessage, tolerance float64) error {
var expected, actual model.Vector
err := json.Unmarshal(expectedRaw, &expected)
if err != nil {
return err
}
err = json.Unmarshal(actualRaw, &actual)
if err != nil {
return err
}
if len(expected) != len(actual) {
return fmt.Errorf("expected %d metrics but got %d", len(expected),
len(actual))
}
metricFingerprintToIndexMap := make(map[model.Fingerprint]int, len(expected))
for i, actualMetric := range actual {
metricFingerprintToIndexMap[actualMetric.Metric.Fingerprint()] = i
}
for _, expectedMetric := range expected {
actualMetricIndex, ok := metricFingerprintToIndexMap[expectedMetric.Metric.Fingerprint()]
if !ok {
return fmt.Errorf("expected metric %s missing from actual response", expectedMetric.Metric)
}
actualMetric := actual[actualMetricIndex]
err := compareSamplePair(model.SamplePair{
Timestamp: expectedMetric.Timestamp,
Value: expectedMetric.Value,
}, model.SamplePair{
Timestamp: actualMetric.Timestamp,
Value: actualMetric.Value,
}, tolerance)
if err != nil {
return errors.Wrapf(err, "sample pair not matching for metric %s", expectedMetric.Metric)
}
}
return nil
}
func compareScalar(expectedRaw, actualRaw json.RawMessage, tolerance float64) error {
var expected, actual model.Scalar
err := json.Unmarshal(expectedRaw, &expected)
if err != nil {
return err
}
err = json.Unmarshal(actualRaw, &actual)
if err != nil {
return err
}
return compareSamplePair(model.SamplePair{
Timestamp: expected.Timestamp,
Value: expected.Value,
}, model.SamplePair{
Timestamp: actual.Timestamp,
Value: actual.Value,
}, tolerance)
}
func compareSamplePair(expected, actual model.SamplePair, tolerance float64) error {
if expected.Timestamp != actual.Timestamp {
return fmt.Errorf("expected timestamp %v but got %v", expected.Timestamp, actual.Timestamp)
}
if !compareSampleValue(expected.Value, actual.Value, tolerance) {
return fmt.Errorf("expected value %s for timestamp %v but got %s", expected.Value, expected.Timestamp, actual.Value)
}
return nil
}
func compareSampleValue(first, second model.SampleValue, tolerance float64) bool {
f := float64(first)
s := float64(second)
if math.IsNaN(f) && math.IsNaN(s) {
return true
} else if tolerance <= 0 {
return math.Float64bits(f) == math.Float64bits(s)
}
return math.Abs(f-s) <= tolerance
} | tools/querytee/response_comparator.go | 0.7478 | 0.429908 | response_comparator.go | starcoder |
package vocabulary
import (
metrics "github.com/googleapis/gnostic/metrics"
)
// mapIntersection finds the intersection between two Vocabularies.
// This function takes a Vocabulary and checks if the words within
// the current Vocabulary already exist within the global Vocabulary.
// If the word exists in both structures it is added to a temp Vocabulary
// which replaces the old Vocabulary.
func mapIntersection(v *metrics.Vocabulary) {
schemastemp := make(map[string]int)
operationIDTemp := make(map[string]int)
parametersTemp := make(map[string]int)
propertiesTemp := make(map[string]int)
for _, s := range v.Schemas {
value, ok := schemas[s.Word]
if ok {
schemastemp[s.Word] += (value + int(s.Count))
}
}
for _, op := range v.Operations {
value, ok := operationID[op.Word]
if ok {
operationIDTemp[op.Word] += (value + int(op.Count))
}
}
for _, param := range v.Parameters {
value, ok := parameters[param.Word]
if ok {
parametersTemp[param.Word] += (value + int(param.Count))
}
}
for _, prop := range v.Properties {
value, ok := properties[prop.Word]
if ok {
propertiesTemp[prop.Word] += (value + int(prop.Count))
}
}
schemas = schemastemp
operationID = operationIDTemp
parameters = parametersTemp
properties = propertiesTemp
}
// Intersection implements the intersection operation between multiple Vocabularies.
// The function accepts a slice of Vocabularies and returns a single Vocabulary
// struct which that contains words that were found in all of the Vocabularies.
func Intersection(vocabSlices []*metrics.Vocabulary) *metrics.Vocabulary {
schemas = make(map[string]int)
operationID = make(map[string]int)
parameters = make(map[string]int)
properties = make(map[string]int)
unpackageVocabulary(vocabSlices[0])
for i := 1; i < len(vocabSlices); i++ {
mapIntersection(vocabSlices[i])
}
v := &metrics.Vocabulary{
Properties: fillProtoStructure(properties),
Schemas: fillProtoStructure(schemas),
Operations: fillProtoStructure(operationID),
Parameters: fillProtoStructure(parameters),
}
return v
} | metrics/vocabulary/intersection.go | 0.677794 | 0.418459 | intersection.go | starcoder |
package header
/**
* A CSeq header field in a request contains a single decimal sequence number
* and the request method. The CSeq header field serves to identify and order
* transactions within a dialog, to provide a means to uniquely identify
* transactions, and to differentiate between new requests and request
* retransmissions. Two CSeq header fields are considered equal if the
* sequence number and the request method are identical.
* <ul>
* <li>Method - The method part of CSeq is case-sensitive and MUST match that
* of the request.
* <li>Sequence number - The sequence number is chosen by the requesting
* client and is unique within a single value of Call-ID. The sequence number
* MUST be expressible as a 32-bit unsigned integer and MUST be less than
* 2**31. For non-REGISTER requests outside of a dialog, the sequence number
* value is arbitrary. Consecutive Requests that differ in method, headers or
* body, but have the same CallIdHeader must contain strictly monotonically
* increasing and contiguous sequence numbers; sequence numbers do not wrap
* around. Retransmissions of the same Request carry the same sequence number,
* but an INVITE Request with a different message body or different headers
* (a "re-invitation") acquires a new, higher sequence number. A server must
* echo the CSeqHeader from the Request in its Response. If the method value is
* missing in the received CSeqHeader, the server fills it in appropriately.
* ACK and CANCEL Requests must contain the same CSeqHeader sequence number
* (but not method) as the INVITE Request they refer to, while a BYE Request
* cancelling an invitation must have a higher sequence number. An user agent
* server must remember the highest sequence number for any INVITE Request
* with the same CallIdHeader. The server must respond to, and then discard,
* any INVITE Request with a lower sequence number.
* </ul>
* As long as a client follows the above guidelines, it may use any mechanism
* it would like to select CSeq header field values.
* <p>
* <b>Forked Requests:</b><br>
* Forked Requests must have the same CSeqHeader as there would be ambiguity
* otherwise between these forked Requests and later BYE Requests issued by the
* client user agent.
* <p>
* For Example:<br>
* <code>CSeq: 4711 INVITE</code>
*
*/
type CSeqHeader interface {
Header
/**
* Sets the method of CSeqHeader
*
* @param method - the method of the Request of this CSeqHeader
* @throws ParseException which signals that an error has been reached
* unexpectedly while parsing the method value.
*/
SetMethod(method string) (ParseException error)
/**
* Gets the method of CSeqHeader
*
* @return method of CSeqHeader
*/
GetMethod() string
/**
* Sets the sequence number value of the CSeqHeader. The sequence number
* MUST be expressible as a 32-bit unsigned integer and MUST be less than
* 2**31.
*
* @param sequenceNumber - the new sequence number of this CSeqHeader
* @throws InvalidArgumentException if supplied value is less than zero.
*
*/
SetSequenceNumber(sequenceNumber int) (InvalidArgumentException error)
/**
* Gets the sequence number of this CSeqHeader.
*
* @return sequence number of the CSeqHeader
*
*/
GetSequenceNumber() int
} | sip/header/CSeqHeader.go | 0.894375 | 0.606265 | CSeqHeader.go | starcoder |
package mapval
import "fmt"
// Results the results of executing a schema.
// They are a flattened map (using dotted paths) of all the values []ValueResult representing the results
// of the IsDefs.
type Results struct {
Fields map[string][]ValueResult
Valid bool
}
// NewResults creates a new Results object.
func NewResults() *Results {
return &Results{
Fields: make(map[string][]ValueResult),
Valid: true,
}
}
// SimpleResult provides a convenient and simple method for creating a *Results object for a single validation.
// It's a very common way for validators to return a *Results object, and is generally simpler than
// using SingleResult.
func SimpleResult(path Path, valid bool, msg string, args ...interface{}) *Results {
vr := ValueResult{valid, fmt.Sprintf(msg, args...)}
return SingleResult(path, vr)
}
// SingleResult returns a *Results object with a single validated value at the given path
// using the provided ValueResult as its sole validation.
func SingleResult(path Path, result ValueResult) *Results {
r := NewResults()
r.record(path, result)
return r
}
func (r *Results) merge(other *Results) {
for path, valueResults := range other.Fields {
for _, valueResult := range valueResults {
r.record(MustParsePath(path), valueResult)
}
}
}
func (r *Results) mergeUnderPrefix(prefix Path, other *Results) {
if len(prefix) == 0 {
// If the prefix is empty, just use standard merge
// No need to add the dots
r.merge(other)
return
}
for path, valueResults := range other.Fields {
for _, valueResult := range valueResults {
parsed := MustParsePath(path)
r.record(prefix.Concat(parsed), valueResult)
}
}
}
func (r *Results) record(path Path, result ValueResult) {
if r.Fields[path.String()] == nil {
r.Fields[path.String()] = []ValueResult{result}
} else {
r.Fields[path.String()] = append(r.Fields[path.String()], result)
}
if !result.Valid {
r.Valid = false
}
}
// EachResult executes the given callback once per Value result.
// The provided callback can return true to keep iterating, or false
// to stop.
func (r Results) EachResult(f func(Path, ValueResult) bool) {
for path, pathResults := range r.Fields {
for _, result := range pathResults {
if !f(MustParsePath(path), result) {
return
}
}
}
}
// DetailedErrors returns a new Results object consisting only of error data.
func (r *Results) DetailedErrors() *Results {
errors := NewResults()
r.EachResult(func(path Path, vr ValueResult) bool {
if !vr.Valid {
errors.record(path, vr)
}
return true
})
return errors
}
// ValueResultError is used to represent an error validating an individual value.
type ValueResultError struct {
path Path
valueResult ValueResult
}
// Error returns the error that occurred during validation with its context included.
func (vre ValueResultError) Error() string {
return fmt.Sprintf("@path '%s': %s", vre.path, vre.valueResult.Message)
}
// Errors returns a list of error objects, one per failed value validation.
func (r Results) Errors() []error {
errors := make([]error, 0)
r.EachResult(func(path Path, vr ValueResult) bool {
if !vr.Valid {
errors = append(errors, ValueResultError{path, vr})
}
return true
})
return errors
} | vendor/github.com/elastic/beats/libbeat/common/mapval/results.go | 0.833968 | 0.494385 | results.go | starcoder |
package imagex
import (
"github.com/xuzhuoxi/infra-go/graphicx"
"image"
"image/color"
"image/draw"
)
type PixelImage struct {
//A,R,G,B
Pix []uint32
Width, Height int
}
func (i *PixelImage) Max() (maxX, maxY int) {
return i.Width, i.Height
}
func (i *PixelImage) At(x, y int) uint32 {
return i.Pix[i.getIndex(x, y)]
}
func (i *PixelImage) Set(x, y int, pixel uint32) {
i.Pix[i.getIndex(x, y)] = pixel
}
func (i *PixelImage) PixelAt(x, y int) graphicx.Pixel {
return graphicx.Pixel(i.Pix[i.getIndex(x, y)])
}
func (i *PixelImage) SetPixel(x, y int, pixel graphicx.Pixel) {
i.Pix[i.getIndex(x, y)] = uint32(pixel)
}
func (i *PixelImage) IndexAt(index int) uint32 {
return i.Pix[index]
}
func (i *PixelImage) IndexSet(index int, pixel uint32) {
i.Pix[index] = pixel
}
func (i *PixelImage) PixelIndexAt(index int) graphicx.Pixel {
return graphicx.Pixel(i.Pix[index])
}
func (i *PixelImage) IndexSetPixel(index int, pixel graphicx.Pixel) {
i.Pix[index] = uint32(pixel)
}
func (i *PixelImage) ForEachPixel(eachFunc func(x, y int)) {
var x, y int
for y = 0; y < i.Height; y++ {
for x = 0; x < i.Width; x++ {
eachFunc(x, y)
}
}
}
func (i *PixelImage) getIndex(x, y int) int {
return y*i.Width + x
}
//----------------------------------------------
type PixelImage64 struct {
//A,R,G,B
Pix []uint64
Width, Height int
}
func (i *PixelImage64) Max() (maxX, maxY int) {
return i.Width, i.Height
}
func (i *PixelImage64) At(x, y int) uint64 {
return i.Pix[i.getIndex(x, y)]
}
func (i *PixelImage64) Set(x, y int, pixel uint64) {
i.Pix[i.getIndex(x, y)] = pixel
}
func (i *PixelImage64) PixelAt(x, y int) graphicx.Pixel64 {
return graphicx.Pixel64(i.Pix[i.getIndex(x, y)])
}
func (i *PixelImage64) SetPixel(x, y int, pixel graphicx.Pixel64) {
i.Pix[i.getIndex(x, y)] = uint64(pixel)
}
func (i *PixelImage64) IndexAt(index int) uint64 {
return i.Pix[index]
}
func (i *PixelImage64) IndexSet(index int, pixel uint64) {
i.Pix[index] = pixel
}
func (i *PixelImage64) PixelIndexAt(index int) graphicx.Pixel64 {
return graphicx.Pixel64(i.Pix[index])
}
func (i *PixelImage64) IndexSetPixel(index int, pixel graphicx.Pixel64) {
i.Pix[index] = uint64(pixel)
}
func (i *PixelImage64) ForEachPixel(eachFunc func(x, y int)) {
var x, y int
for y = 0; y < i.Height; y++ {
for x = 0; x < i.Width; x++ {
eachFunc(x, y)
}
}
}
func (i *PixelImage64) getIndex(x, y int) int {
return y*i.Width + x
}
//----------------------------------------------
func NewPixelImage(width, height int, pixel uint32) *PixelImage {
ln := width * height
rs := &PixelImage{Width: width, Height: height, Pix: make([]uint32, ln)}
if pixel > 0 {
for index := 0; index < ln; index++ {
rs.Pix[index] = uint32(pixel)
}
}
return rs
}
func NewPixelImage64(width, height int, pixel uint64) *PixelImage64 {
ln := width * height
rs := &PixelImage64{Width: width, Height: height, Pix: make([]uint64, ln)}
if pixel > 0 {
for index := 0; index < ln; index++ {
rs.Pix[index] = uint64(pixel)
}
}
return rs
}
func Copy2PixelImage(src image.Image, dst *PixelImage) {
size := src.Bounds().Size()
var r, g, b, a uint32
var pixel uint32
for y := 0; y < size.Y; y++ {
for x := 0; x < size.X; x++ {
r, g, b, a = src.At(x, y).RGBA()
pixel = (a << 16) | (r << 8) | g | (b >> 8)
dst.Set(x, y, pixel)
}
}
}
func Copy2PixelImage64(src image.Image, dst *PixelImage64) {
size := src.Bounds().Size()
var r, g, b, a uint32
var pixel uint64
for y := 0; y < size.Y; y++ {
for x := 0; x < size.X; x++ {
r, g, b, a = src.At(x, y).RGBA()
pixel = (uint64(a) << 48) | (uint64(r) << 32) | (uint64(g) << 16) | uint64(b)
dst.Set(x, y, pixel)
}
}
}
func CopyPixel2Image(src *PixelImage, dst draw.Image) {
var setColor = &color.RGBA{}
for y := 0; y < src.Height; y++ {
for x := 0; x < src.Width; x++ {
setColor.R, setColor.G, setColor.B, setColor.A = src.PixelAt(x, y).RGBA()
dst.Set(x, y, setColor)
}
}
}
func CopyPixel642Image(src *PixelImage64, dst draw.Image) {
var setColor = &color.RGBA64{}
for y := 0; y < src.Height; y++ {
for x := 0; x < src.Width; x++ {
setColor.R, setColor.G, setColor.B, setColor.A = src.PixelAt(x, y).RGBA()
dst.Set(x, y, setColor)
}
}
} | imagex/pixel.go | 0.684791 | 0.566139 | pixel.go | starcoder |
package reshapes
/*
//TransposeChannelForward will take a nchw and change it to a nhwc and vice-versa. Will find the transpose of x and put it in y
func (o *Ops) TransposeChannelForward(handle *cudnn.Handler, x, y *tensor.Volume) error {
xfrmt, _, xdims, err := x.Properties()
if err != nil {
return err
}
_, _, ydims, err := y.Properties()
if err != nil {
return err
}
//ydims[3]==xdims[1]||ydims[1]==xdims[3] one of these being false is ok, but if they are both false then uhh oh
if ydims[0] != xdims[0] || !(ydims[3] == xdims[1] || ydims[1] == xdims[3]) {
return errors.New("Dims are not matching up N for both tensors need to be the same and channel dims need to be switched")
}
var fflg cudnn.TensorFormatFlag
switch xfrmt {
case fflg.NCHW():
return o.trans.Transpose(handle.XHandle(), o.nCHWtonHWC, x.TD(), x.Memer(), y.TD(), y.Memer())
case fflg.NHWC():
return o.trans.Transpose(handle.XHandle(), o.nHWCtonCHW, x.TD(), x.Memer(), y.TD(), y.Memer())
}
return errors.New("TransposeChannelXtoY - Passed Non supported tensor format")
}
//TransposeChannelBackward will take a nchw and change it to a nhwc and vice-versa. Will find the transpose of y and put it in x
func (o *Ops) TransposeChannelBackward(handle *cudnn.Handler, x, y *tensor.Volume) error {
xfrmt, _, xdims, err := x.Properties()
if err != nil {
return err
}
_, _, ydims, err := y.Properties()
if err != nil {
return err
}
//ydims[3]==xdims[1]||ydims[1]==xdims[3] one of these being false is ok, but if they are both false then uhh oh
if ydims[0] != xdims[0] || !(ydims[3] == xdims[1] || ydims[1] == xdims[3]) {
return errors.New("Dims are not matching up N for both tensors need to be the same and channel dims need to be switched")
}
var fflg cudnn.TensorFormatFlag
switch xfrmt {
case fflg.NCHW():
return o.trans.Transpose(handle.XHandle(), o.nCHWtonHWC, y.TD(), y.Memer(), x.TD(), x.Memer())
case fflg.NHWC():
return o.trans.Transpose(handle.XHandle(), o.nHWCtonCHW, y.TD(), y.Memer(), x.TD(), x.Memer())
}
return errors.New("TransposeChannelXtoY - Passed Non supported tensor format")
}
//TransposeChannel will take x and transpose it along the channel.
//The function works by creating a new volume and replacing x with it and deleting the old x.
func (o *Ops) TransposeChannel(handle *cudnn.Handler, x *tensor.Volume) error {
xfrmt, _, _, err := x.Properties()
if err != nil {
return err
}
y, err := o.gettransposevol(handle, x)
if err != nil {
return err
}
var fflg cudnn.TensorFormatFlag
switch xfrmt {
case fflg.NCHW():
err = o.trans.Transpose(handle.XHandle(), o.nCHWtonHWC, x.TD(), x.Memer(), y.TD(), y.Memer())
// x.Destroy()
*x = *y
//y.Destroy()
return err
case fflg.NHWC():
err = o.trans.Transpose(handle.XHandle(), o.nHWCtonCHW, x.TD(), x.Memer(), y.TD(), y.Memer())
// x.Destroy()
*x = *y
//y.Destroy()
return err
}
return errors.New("TransposeChannelXtoY - Passed Non supported tensor format")
}
//GetTransposeOutputProperties will get the volume of a transpose operation handled through this op
func (o *Ops) GetTransposeOutputProperties(handle *cudnn.Handler, x *tensor.Volume) (cudnn.TensorFormat, cudnn.DataType, []int32, []int32, error) {
frmt, dtype, dims, perm, err := o.trans.GetChannelTransposeOutputProperties(x.TD())
return cudnn.TensorFormat(frmt), cudnn.DataType(dtype), dims, perm, err
}
func (o *Ops) gettransposevol(handle *cudnn.Handler, x *tensor.Volume) (*tensor.Volume, error) {
frmt, dtype, dims, _, err := o.trans.GetChannelTransposeOutputProperties(x.TD())
if err != nil {
return nil, err
}
return tensor.Build(handle, cudnn.TensorFormat(frmt), cudnn.DataType(dtype), dims)
}
*/ | devices/gpu/nvidia/custom/reshapes/transpose.go | 0.549157 | 0.553566 | transpose.go | starcoder |
package visibility
import (
"regexp"
"strconv"
"strings"
cnv "github.com/urkk/metar/conversion"
)
// Unit of measurement.
type Unit string
const (
// M - meters
M = "M"
// FT - feet
FT = "FT"
// SM - statute miles
SM = "SM"
)
// Distance in units of measure
type Distance struct {
// By default, meters. Or feet in US RVR. Both integer
Value int
// Used only for horizontal visibility in miles
FractionValue float64
Unit
}
// BaseVisibility - the basis of visibility: one measurement
type BaseVisibility struct {
Distance
AboveMax bool // more than reported value (P5000, P6SM...)
BelowMin bool // less than reported value (M1/4SM, M0050...)
}
// Visibility - prevailing visibility
type Visibility struct {
BaseVisibility
// sector visibility
LowerDistance Distance
LowerDirection string
}
// Meters - returns the distance in meters
func (d *Distance) Meters() (result int) {
switch d.Unit {
case M:
result = d.Value
case FT:
result = cnv.FtToM(d.Value)
case SM:
result = cnv.SMileToM(float64(d.Value) + d.FractionValue)
default:
result = d.Value
}
return
}
// Feet - returns the distance in feet
func (d *Distance) Feet() (result int) {
switch d.Unit {
case M:
result = cnv.MToFt(d.Value)
case FT:
result = d.Value
case SM:
result = cnv.SMileToFt(float64(d.Value) + d.FractionValue)
default:
result = cnv.MToFt(d.Value)
}
return
}
// Miles - returns the distance in miles
func (d *Distance) Miles() (result float64) {
switch d.Unit {
case M:
result = cnv.MToSMile(d.Value)
case FT:
result = float64(cnv.FtToSMile(int(d.Value)))
case SM:
result = float64(d.Value) + d.FractionValue
default:
result = cnv.MToSMile(d.Value)
}
return
}
// ParseVisibility - identify and parses the representation oh horizontal visibility
func (v *Visibility) ParseVisibility(input []string) (tokensused int) {
inputstring := strings.Join(input, " ")
metric := regexp.MustCompile(`^(P|M)?(\d{4})(\s|$)((\d{4})(NE|SE|NW|SW|N|E|S|W))?`)
// In US and CA sector visibility reported in the remarks section. (as VIS NW-SE 1/2; VIS NE 2 1/2 etc)
imperial := regexp.MustCompile(`^(P|M)?(\d{1,2}|\d(\s)?)?((\d)/(\d))?SM`)
switch {
case metric.MatchString(inputstring):
tokensused = 1
v.Distance.Unit = M
matches := metric.FindStringSubmatch(inputstring)
v.BelowMin = matches[1] == "M"
v.AboveMax = matches[1] == "P"
v.Distance.Value, _ = strconv.Atoi(matches[2])
if matches[4] != "" {
v.LowerDistance.Value, _ = strconv.Atoi(matches[5])
v.LowerDistance.Unit = M
v.LowerDirection = matches[6]
tokensused++
}
case imperial.MatchString(inputstring):
tokensused = 1
matches := imperial.FindStringSubmatch(inputstring)
v.BelowMin = matches[1] == "M"
v.AboveMax = matches[1] == "P"
if matches[2] != "" {
v.Distance.Value, _ = strconv.Atoi(strings.TrimSpace(matches[2]))
}
if matches[5] != "" && matches[6] != "" {
numerator, _ := strconv.Atoi(matches[5])
denominator, _ := strconv.Atoi(matches[6])
if denominator != 0 {
v.Distance.FractionValue += float64(numerator) / float64(denominator)
}
}
v.Distance.Unit = SM
if matches[3] == " " {
tokensused++
}
default:
return
}
return
} | visibility/visibility.go | 0.739328 | 0.474388 | visibility.go | starcoder |
package poly
import (
"math"
)
// Gets the value of the polynomial function for input x
func (self *Poly) Call(x float64) float64 {
if self == nil {
return 0.
}
n := float64(len(self.Coefficients))
sum := 0.
var i float64
for i = 0.; i < n; i += 1 {
sum += self.Coefficients[int(i)] * math.Pow(x, i)
}
return sum
}
// tests Whether two polynoms are the same.
// P.Eq(Q) <=> P.Call(x) == Q.Call(x) for all x (real)
func (self *Poly) Eq(other *Poly) bool {
if self.Degre() != other.Degre() {
return false
} else if self.Degre() < 0 {
return true
}
for i := 0; i < len(self.Coefficients); i++ {
if self.Coefficients[i] != other.Coefficients[i] {
return false
}
}
return true
}
// Multiplies two polynoms together with standard polynomial product
func (self *Poly) Mult(other *Poly) *Poly {
coeffs := make([]float64, len(self.Coefficients)*len(other.Coefficients))
if self.Coefficients == nil || other.Coefficients == nil {
// a null Polynom will always return a null Polynom, no matter what it's being multiplied with
return &Poly{}
}
for i := 0; i < len(self.Coefficients); i++ {
for j := 0; j < len(other.Coefficients); j++ {
coeffs[i+j] += self.Coefficients[i] * other.Coefficients[j]
}
}
return &Poly{Coefficients: coeffs}
}
// Multiplies a polynomial by a scalar. Returns the result
func (self *Poly) Scalar_prod(x float64) *Poly {
if self.Degre() < 0 {
return nil
}
coeffs := make([]float64, len(self.Coefficients))
for i := 0; i < len(self.Coefficients); i++ {
coeffs[i] = self.Coefficients[i] * x
}
return &Poly{Coefficients: coeffs}
}
// Multiplies all of the coefficients of a Polynom. Is done in place.
func (self *Poly) Scalar_prod_in_place(x float64) {
if self.Degre() < 0 {
return
}
for i := 0; i < len(self.Coefficients); i++ {
self.Coefficients[i] *= x
}
}
// Adds two polynoms together
func (self *Poly) Add(other *Poly) *Poly {
// we handle cases of polynoms having different degrees
var n_max, n_min int
var longer, shorter *Poly
if self.Degre() > other.Degre() {
longer = self
shorter = other
n_max = self.Degre() + 1 // degre = len - 1
n_min = other.Degre() + 1
} else {
longer = other
shorter = self
n_max = other.Degre() + 1 // degre = len - 1
n_min = self.Degre() + 1
}
coeffs := make([]float64, n_max)
for i := 0; i < n_max; i++ {
if i < n_min {
coeffs[i] = longer.Coefficients[i] + shorter.Coefficients[i]
} else {
coeffs[i] = longer.Coefficients[i]
}
}
return &Poly{Coefficients: coeffs}
}
// subracts the other polynom from self
func (self *Poly) Minus(other *Poly) *Poly {
// we handle cases of polynoms having different degrees
if self.Degre() < other.Degre() {
P := other.Minus(self)
P.Scalar_prod_in_place(-1.)
return P
}
coeffs := make([]float64, len(self.Coefficients))
for i := 0; i < len(self.Coefficients); i++ {
if i < len(other.Coefficients) {
coeffs[i] = self.Coefficients[i] - other.Coefficients[i]
} else {
coeffs[i] = self.Coefficients[i]
}
}
return &Poly{Coefficients: coeffs}
} | simplemath.go | 0.796728 | 0.505737 | simplemath.go | starcoder |
package randomnames
// List of animals from https://gist.githubusercontent.com/atduskgreg/3cf8ef48cb0d29cf151bedad81553a54/raw/82f142562cf50b0f6fb8010f890b2f934093553e/animals.txt
import (
"math/rand"
"sync"
)
func init() {
animalSize = len(Animals)
}
// RandomAnimal returns a pseudo-random animal from the list
func RandomAnimal() string {
return Animals[rand.Intn(animalSize)]
}
// SafeRandomAnimal returns a pseudo-random animal from the list goro-safely
func SafeRandomAnimal() string {
nameSafety.Lock()
s := RandomAnimal()
nameSafety.Unlock() // manual unlock so we don't incur the defer penalty
return s
}
var (
animalSafety sync.Mutex
animalSize int
Animals []string = []string{
"Aardvark",
"Aardwolf",
"Albatross",
"Alligator",
"Alpaca",
"Alpaca",
"Amphibian",
"Anaconda",
"Angelfish",
"Anglerfish",
"Ant",
"Anteater",
"Antelope",
"Antlion",
"Ape",
"Aphid",
"Arctic Fox",
"Arctic Wolf",
"Armadillo",
"Arrow crab",
"Asp",
"Ass",
"Baboon",
"Bactrian camel",
"Badger",
"Bald eagle",
"Bali cattle",
"Bandicoot",
"Barnacle",
"Barracuda",
"Basilisk",
"Bass",
"Bat",
"Beaked whale",
"Bear",
"Beaver",
"Bedbug",
"Bee",
"Beetle",
"Bird",
"Bison",
"Blackbird",
"Black panther",
"Black widow spider",
"Blue bird",
"Blue jay",
"Blue whale",
"Boa",
"Boar",
"Bobcat",
"Bobolink",
"Bonobo",
"Booby",
"Bovid",
"Box jellyfish",
"Buffalo",
"Bug",
"Butterfly",
"Buzzard",
"Camel",
"Canid",
"Canidae",
"Cape buffalo",
"Capybara",
"Cardinal",
"Caribou",
"Carp",
"Cat",
"Caterpillar",
"Catfish",
"Catshark",
"Cattle",
"Centipede",
"Cephalopod",
"Chameleon",
"Cheetah",
"Chickadee",
"Chicken",
"Chimpanzee",
"Chinchilla",
"Chipmunk",
"Cicada",
"Clam",
"Clownfish",
"Cobra",
"Cockroach",
"Cod",
"Condor",
"Constrictor",
"Coral",
"Cougar",
"Cow",
"Coyote",
"Crab",
"Crane",
"Crane fly",
"Crawdad",
"Crayfish",
"Cricket",
"Crocodile",
"Crow",
"Cuckoo",
"Damselfly",
"Deer",
"Dingo",
"Dinosaur",
"Dog",
"Dolphin",
"Donkey",
"Donkey",
"Donkey",
"Dormouse",
"Dove",
"Dragon",
"Dragonfly",
"Dromedary camel",
"Duck",
"Dung beetle",
"Eagle",
"Earthworm",
"Earwig",
"Echidna",
"Eel",
"Egret",
"Elephant",
"Elephant seal",
"Elk",
"Emu",
"English pointer",
"Ermine",
"Falcon",
"Fancy mouse",
"Fancy rat",
"Felidae",
"Ferret",
"Finch",
"Firefly",
"Fish",
"Flamingo",
"Flea",
"Fly",
"Flyingfish",
"Fowl",
"Fox",
"Frog",
"Fruit bat",
"Galliform",
"Gamefowl",
"Gayal",
"Gazelle",
"Gecko",
"Gerbil",
"Giant panda",
"Giant squid",
"Gibbon",
"Gila monster",
"Giraffe",
"Goat",
"Goldfish",
"Goose",
"Gopher",
"Gorilla",
"Grasshopper",
"Great blue heron",
"Great white shark",
"Grizzly bear",
"Ground shark",
"Ground sloth",
"Grouse",
"Guan",
"Guanaco",
"Guineafowl",
"Guinea pig",
"Gull",
"Guppy",
"Haddock",
"Halibut",
"Hammerhead shark",
"Hamster",
"Hare",
"Harrier",
"Hawk",
"Hedgehog",
"Hermit crab",
"Heron",
"Herring",
"Hippopotamus",
"Hookworm",
"Hornet",
"Horse",
"Hoverfly",
"Hummingbird",
"Humpback whale",
"Hyena",
"Iguana",
"Impala",
"Irukandji jellyfish",
"Jackal",
"Jaguar",
"Jay",
"Jellyfish",
"Junglefowl",
"Kangaroo",
"Kangaroo mouse",
"Kangaroo rat",
"Kingfisher",
"Kite",
"Kiwi",
"Koala",
"Koi",
"<NAME>",
"Krill",
"Lab rat",
"Ladybug",
"Lamprey",
"Landfowl",
"Land snail",
"Lark",
"Leech",
"Lemming",
"Lemur",
"Leopard",
"Leopon",
"Limpet",
"Lion",
"Lizard",
"Llama",
"Lobster",
"Locust",
"Loon",
"Louse",
"Lungfish",
"Lynx",
"Macaw",
"Mackerel",
"Magpie",
"Mammal",
"Manatee",
"Mandrill",
"<NAME>",
"Marlin",
"Marmoset",
"Marmot",
"Marsupial",
"Marten",
"Mastodon",
"Meadowlark",
"Meerkat",
"Mink",
"Minnow",
"Mite",
"Mockingbird",
"Mole",
"Mollusk",
"Mongoose",
"Monitor lizard",
"Monkey",
"Moose",
"Mosquito",
"Moth",
"Mountain goat",
"Mouse",
"Mule",
"Muskox",
"Narwhal",
"Newt",
"New World quail",
"Nightingale",
"Ocelot",
"Octopus",
"Old World quail",
"Opossum",
"Orangutan",
"Orca",
"Ostrich",
"Otter",
"Owl",
"Ox",
"Panda",
"Panther",
"Panthera hybrid",
"Parakeet",
"Parrot",
"Parrotfish",
"Partridge",
"Peacock",
"Peafowl",
"Pelican",
"Penguin",
"Perch",
"Peregrine falcon",
"Pheasant",
"Pig",
"Pigeon",
"Pike",
"Pilot whale",
"Pinniped",
"Piranha",
"Planarian",
"Platypus",
"Polar bear",
"Pony",
"Porcupine",
"Porpoise",
"Portuguese man o' war",
"Possum",
"Prairie dog",
"Prawn",
"Praying mantis",
"Primate",
"Ptarmigan",
"Puffin",
"Puma",
"Python",
"Quail",
"Quelea",
"Quokka",
"Rabbit",
"Raccoon",
"Rainbow trout",
"Rat",
"Rattlesnake",
"Raven",
"Ray",
"Red panda",
"Reindeer",
"Reptile",
"Rhinoceros",
"Right whale",
"Ringneck dove",
"Roadrunner",
"Rodent",
"Rook",
"Rooster",
"Roundworm",
"Saber-toothed cat",
"Sailfish",
"Salamander",
"Salmon",
"Sawfish",
"Scale insect",
"Scallop",
"Scorpion",
"Seahorse",
"Sea lion",
"Sea slug",
"Sea snail",
"Shark",
"Sheep",
"Shrew",
"Shrimp",
"Siamese fighting fish",
"Silkworm",
"Silverfish",
"Skink",
"Skunk",
"Sloth",
"Slug",
"Smelt",
"Snail",
"Snake",
"Snipe",
"Snow leopard",
"Society finch",
"Sockeye salmon",
"Sole",
"Sparrow",
"Sperm whale",
"Spider",
"Spider monkey",
"Spoonbill",
"Squid",
"Squirrel",
"Starfish",
"Star-nosed mole",
"Steelhead trout",
"Stingray",
"Stoat",
"Stork",
"Sturgeon",
"Sugar glider",
"Swallow",
"Swan",
"Swift",
"Swordfish",
"Swordtail",
"Tahr",
"Takin",
"Tapir",
"Tarantula",
"Tarsier",
"Tasmanian devil",
"Termite",
"Tern",
"Thrush",
"Tick",
"Tiger",
"Tiger shark",
"Tiglon",
"Toad",
"Tortoise",
"Toucan",
"Trapdoor spider",
"Tree frog",
"Trout",
"Tuna",
"Turkey",
"Turtle",
"Tyrannosaurus",
"Urial",
"Vampire bat",
"Vampire squid",
"Vicuna",
"Viper",
"Vole",
"Vulture",
"Wallaby",
"Walrus",
"Warbler",
"Wasp",
"Water Boa",
"Water buffalo",
"Weasel",
"Whale",
"Whippet",
"Whitefish",
"Whooping crane",
"Wildcat",
"Wildebeest",
"Wildfowl",
"Wolf",
"Wolverine",
"Wombat",
"Woodpecker",
"Worm",
"Wren",
"Xerinae",
"X-ray fish",
"Yak",
"Yellow perch",
"Zebra",
"Zebra finch",
}
) | animals.go | 0.555676 | 0.445771 | animals.go | starcoder |
package softwarebackend
import (
"image"
"image/color"
"image/draw"
"math"
)
func (b *SoftwareBackend) activateBlurTarget() {
b.blurSwap = b.Image
b.Image = image.NewRGBA(b.Image.Rect)
}
func (b *SoftwareBackend) drawBlurred(size float64) {
blurred := box3(b.Image, size)
b.Image = b.blurSwap
draw.Draw(b.Image, b.Image.Rect, blurred, image.ZP, draw.Over)
}
func box3(img *image.RGBA, size float64) *image.RGBA {
size *= 1 - 1/(size+1) // this just seems to improve the accuracy
fsize := math.Floor(size)
sizea := int(fsize)
sizeb := sizea
sizec := sizea
if size-fsize > 0.333333333 {
sizeb++
}
if size-fsize > 0.666666666 {
sizec++
}
img = box3x(img, sizea)
img = box3x(img, sizeb)
img = box3x(img, sizec)
img = box3y(img, sizea)
img = box3y(img, sizeb)
img = box3y(img, sizec)
return img
}
func box3x(img *image.RGBA, size int) *image.RGBA {
bounds := img.Bounds()
result := image.NewRGBA(bounds)
w, h := bounds.Dx(), bounds.Dy()
for y := 0; y < h; y++ {
if size >= w {
var r, g, b, a float64
for x := 0; x < w; x++ {
col := img.RGBAAt(x, y)
r += float64(col.R)
g += float64(col.G)
b += float64(col.B)
a += float64(col.A)
}
factor := 1.0 / float64(w)
col := color.RGBA{
R: uint8(math.Round(r * factor)),
G: uint8(math.Round(g * factor)),
B: uint8(math.Round(b * factor)),
A: uint8(math.Round(a * factor)),
}
for x := 0; x < w; x++ {
result.SetRGBA(x, y, col)
}
continue
}
var r, g, b, a float64
for x := 0; x <= size; x++ {
col := img.RGBAAt(x, y)
r += float64(col.R)
g += float64(col.G)
b += float64(col.B)
a += float64(col.A)
}
samples := size + 1
x := 0
for {
factor := 1.0 / float64(samples)
col := color.RGBA{
R: uint8(math.Round(r * factor)),
G: uint8(math.Round(g * factor)),
B: uint8(math.Round(b * factor)),
A: uint8(math.Round(a * factor)),
}
result.SetRGBA(x, y, col)
if x >= w-1 {
break
}
if left := x - size; left >= 0 {
col = img.RGBAAt(left, y)
r -= float64(col.R)
g -= float64(col.G)
b -= float64(col.B)
a -= float64(col.A)
samples--
}
x++
if right := x + size; right < w {
col = img.RGBAAt(right, y)
r += float64(col.R)
g += float64(col.G)
b += float64(col.B)
a += float64(col.A)
samples++
}
}
}
return result
}
func box3y(img *image.RGBA, size int) *image.RGBA {
bounds := img.Bounds()
result := image.NewRGBA(bounds)
w, h := bounds.Dx(), bounds.Dy()
for x := 0; x < w; x++ {
if size >= h {
var r, g, b, a float64
for y := 0; y < h; y++ {
col := img.RGBAAt(x, y)
r += float64(col.R)
g += float64(col.G)
b += float64(col.B)
a += float64(col.A)
}
factor := 1.0 / float64(h)
col := color.RGBA{
R: uint8(math.Round(r * factor)),
G: uint8(math.Round(g * factor)),
B: uint8(math.Round(b * factor)),
A: uint8(math.Round(a * factor)),
}
for y := 0; y < h; y++ {
result.SetRGBA(x, y, col)
}
continue
}
var r, g, b, a float64
for y := 0; y <= size; y++ {
col := img.RGBAAt(x, y)
r += float64(col.R)
g += float64(col.G)
b += float64(col.B)
a += float64(col.A)
}
samples := size + 1
y := 0
for {
factor := 1.0 / float64(samples)
col := color.RGBA{
R: uint8(math.Round(r * factor)),
G: uint8(math.Round(g * factor)),
B: uint8(math.Round(b * factor)),
A: uint8(math.Round(a * factor)),
}
result.SetRGBA(x, y, col)
if y >= h-1 {
break
}
if top := y - size; top >= 0 {
col = img.RGBAAt(x, top)
r -= float64(col.R)
g -= float64(col.G)
b -= float64(col.B)
a -= float64(col.A)
samples--
}
y++
if bottom := y + size; bottom < h {
col = img.RGBAAt(x, bottom)
r += float64(col.R)
g += float64(col.G)
b += float64(col.B)
a += float64(col.A)
samples++
}
}
}
return result
} | backend/softwarebackend/blur.go | 0.689515 | 0.50116 | blur.go | starcoder |
package fitness
import (
"math"
"math/rand"
"github.com/200sc/geva/env"
)
// FourPeaks represents a problem where there are four explicit
// maxima in the search space and two of the maxima can hide the
// other two.
func FourPeaks(t int) func(e *env.F) int {
return func(e *env.F) int {
leadingOnes := 0
for _, f := range *e {
if rand.Float64() < *f {
leadingOnes++
} else {
break
}
}
trailingZeroes := 0
for i := len(*e) - 1; i >= 0; i-- {
f := (*e)[i]
if rand.Float64() > *f {
trailingZeroes++
} else {
break
}
}
base := int(math.Max(float64(leadingOnes), float64(trailingZeroes)))
if trailingZeroes > t && leadingOnes > t {
base += len(*e)
}
return ((2 * len(*e)) - t) - base
}
}
func SixPeaks(t int) func(e *env.F) int {
return func(e *env.F) int {
leadingOnes, leadingZeroes, trailingOnes, trailingZeroes := bsEndlengths(e)
base := int(math.Max(float64(leadingOnes), float64(trailingZeroes)))
if (trailingZeroes > t && leadingOnes > t) ||
(trailingOnes > t && leadingZeroes > t) {
base += len(*e)
}
return ((2 * len(*e)) - t) - base
}
}
func bsEndlengths(e *env.F) (int, int, int, int) {
leadingOnes := 0
leadingZeroes := 0
if rand.Float64() < e.Get(0) {
leadingOnes++
for i := 1; i < len(*e); i++ {
if rand.Float64() < e.Get(i) {
leadingOnes++
} else {
break
}
}
} else {
leadingZeroes++
for i := 1; i < len(*e); i++ {
if rand.Float64() > e.Get(i) {
leadingZeroes++
} else {
break
}
}
}
trailingOnes := 0
trailingZeroes := 0
if rand.Float64() < e.Get(len(*e)-1) {
trailingOnes++
for i := len(*e) - 2; i > -1; i-- {
if rand.Float64() < e.Get(i) {
trailingOnes++
} else {
break
}
}
} else {
trailingZeroes++
for i := len(*e) - 2; i > -1; i-- {
if rand.Float64() > e.Get(i) {
trailingZeroes++
} else {
break
}
}
}
return leadingOnes, leadingZeroes, trailingOnes, trailingZeroes
}
// Related problems:
// K-Coloring | eda/fitness/peaks.go | 0.564939 | 0.412796 | peaks.go | starcoder |
package search_in_rotated_sorted_array
/*
33. 搜索旋转排序数组
https://leetcode-cn.com/problems/search-in-rotated-sorted-array
假设按照升序排序的数组在预先未知的某个点上进行了旋转。
( 例如,数组 [0,1,2,4,5,6,7] 可能变为 [4,5,6,7,0,1,2] )。
搜索一个给定的目标值,如果数组中存在这个目标值,则返回它的索引,否则返回 -1 。
你可以假设数组中不存在重复的元素。
你的算法时间复杂度必须是 O(log n) 级别。
示例 1:
输入: nums = [4,5,6,7,0,1,2], target = 0
输出: 4
示例 2:
输入: nums = [4,5,6,7,0,1,2], target = 3
输出: -1
*/
// 经典二分法(模板一)
func search(nums []int, target int) int {
left, right := 0, len(nums)-1
for left <= right {
mid := left + (right-left)/2
switch {
case nums[mid] == target:
return mid
case nums[0] <= target: // target is in the left part
if nums[mid] >= nums[0] && nums[mid] < target { // only nums[mid] in the left part, move left, else move right
left = mid + 1
} else {
right = mid - 1
}
default: // target is in the right part
if nums[mid] < nums[0] && nums[mid] > target { // only nums[mid] in the right part, move right, else move left
right = mid - 1
} else {
left = mid + 1
}
}
}
return -1
}
// 高级二分法(模板二)
func search1(nums []int, target int) int {
left, right := 0, len(nums)
for left < right {
mid := left + (right-left)/2
switch {
case nums[mid] == target:
return mid
case nums[0] <= target:
if nums[mid] >= nums[0] && nums[mid] < target {
left = mid + 1
} else {
right = mid
}
default:
if nums[mid] < nums[0] && nums[mid] > target {
right = mid
} else {
left = mid + 1
}
}
}
if left < len(nums) && nums[left] == target {
return left
}
return -1
}
// 二分法(模板三)
func search2(nums []int, target int) int {
if len(nums) == 0 {
return -1
}
left, right := 0, len(nums)-1
for left+1 < right {
mid := left + (right-left)/2
switch {
case nums[mid] == target:
return mid
case nums[0] <= target:
if nums[mid] >= nums[0] && nums[mid] < target {
left = mid
} else {
right = mid
}
default:
if nums[mid] < nums[0] && nums[mid] > target {
right = mid
} else {
left = mid
}
}
}
if nums[left] == target {
return left
}
if nums[right] == target {
return right
}
return -1
} | solutions/search-in-rotated-sorted-array/d.go | 0.745028 | 0.571348 | d.go | starcoder |
package graphics
import (
"fmt"
mgl "github.com/go-gl/mathgl/mgl32"
"github.com/inkyblackness/shocked-client/opengl"
)
var fillRectVertexShaderSource = `
#version 150
precision mediump float;
in vec2 vertexPosition;
uniform mat4 projectionMatrix;
void main(void) {
gl_Position = projectionMatrix * vec4(vertexPosition, 0.0, 1.0);
}
`
var fillRectFragmentShaderSource = `
#version 150
precision mediump float;
uniform vec4 color;
out vec4 fragColor;
void main(void) {
fragColor = color;
}
`
// RectangleRenderer renders rectangular shapes.
type RectangleRenderer struct {
gl opengl.OpenGl
projectionMatrix *mgl.Mat4
program uint32
vao *opengl.VertexArrayObject
vertexPositionBuffer uint32
vertexPositionAttrib int32
projectionMatrixUniform opengl.Matrix4Uniform
colorUniform opengl.Vector4Uniform
}
// NewRectangleRenderer returns a new instance of an RectangleRenderer type.
func NewRectangleRenderer(gl opengl.OpenGl, projectionMatrix *mgl.Mat4) *RectangleRenderer {
program, programErr := opengl.LinkNewStandardProgram(gl, fillRectVertexShaderSource, fillRectFragmentShaderSource)
if programErr != nil {
panic(fmt.Errorf("BitmapTextureRenderer shader failed: %v", programErr))
}
renderer := &RectangleRenderer{
gl: gl,
projectionMatrix: projectionMatrix,
program: program,
vao: opengl.NewVertexArrayObject(gl, program),
vertexPositionBuffer: gl.GenBuffers(1)[0],
vertexPositionAttrib: gl.GetAttribLocation(program, "vertexPosition"),
projectionMatrixUniform: opengl.Matrix4Uniform(gl.GetUniformLocation(program, "projectionMatrix")),
colorUniform: opengl.Vector4Uniform(gl.GetUniformLocation(program, "color"))}
renderer.vao.WithSetter(func(gl opengl.OpenGl) {
gl.EnableVertexAttribArray(uint32(renderer.vertexPositionAttrib))
gl.BindBuffer(opengl.ARRAY_BUFFER, renderer.vertexPositionBuffer)
gl.VertexAttribOffset(uint32(renderer.vertexPositionAttrib), 2, opengl.FLOAT, false, 0, 0)
gl.BindBuffer(opengl.ARRAY_BUFFER, 0)
})
return renderer
}
// Dispose clears any open resources.
func (renderer *RectangleRenderer) Dispose() {
renderer.vao.Dispose()
renderer.gl.DeleteProgram(renderer.program)
renderer.gl.DeleteBuffers([]uint32{renderer.vertexPositionBuffer})
}
// Fill renders a rectangle filled with a solid color.
func (renderer *RectangleRenderer) Fill(left, top, right, bottom float32, fillColor Color) {
gl := renderer.gl
{
var vertices = []float32{
left, top,
right, top,
left, bottom,
left, bottom,
right, top,
right, bottom}
gl.BindBuffer(opengl.ARRAY_BUFFER, renderer.vertexPositionBuffer)
gl.BufferData(opengl.ARRAY_BUFFER, len(vertices)*4, vertices, opengl.STATIC_DRAW)
gl.BindBuffer(opengl.ARRAY_BUFFER, 0)
}
renderer.vao.OnShader(func() {
renderer.projectionMatrixUniform.Set(gl, renderer.projectionMatrix)
renderer.colorUniform.Set(gl, fillColor.AsVector())
gl.DrawArrays(opengl.TRIANGLES, 0, 6)
})
} | src/github.com/inkyblackness/shocked-client/graphics/RectangleRenderer.go | 0.843477 | 0.498596 | RectangleRenderer.go | starcoder |
package kamakiri
import (
"math"
"time"
)
const (
// deg2Rad is the ratio of degrees to radians.
deg2Rad = math.Pi / 180.0
// Epsilon is used for contacts.
epsilon float64 = 0.000001
// k is 1/3.
k float64 = 1.0 / 3
)
// World an abstraction of globals from original Physac lib.
type World struct {
CollisionIterations int
PenetrationAllowance float64
PenetrationCorrection float64
DeltaTime time.Duration
CurrentTime time.Duration
Accumulator time.Duration
StepsCount uint
GravityForce XY
Bodies []*Body
Contacts []*Contact
}
// NewWorld creates a new World abstraction with default params.
func NewWorld() *World {
w := &World{
CollisionIterations: 100,
PenetrationAllowance: 0.05,
PenetrationCorrection: 0.4,
CurrentTime: 0,
DeltaTime: time.Second / 600,
Accumulator: 0,
StepsCount: 0,
GravityForce: XY{0.0, -9.81},
Bodies: make([]*Body, 0),
Contacts: make([]*Contact, 0),
}
return w
}
// Delta returns the World's delta as a float64 millisecond value.
func (w *World) Delta() float64 {
return float64(w.DeltaTime) / float64(time.Millisecond)
}
// RunStep is a wrapper to ensure PhysicsStep is run at a fixed time step.
func (w *World) RunStep(delta time.Duration) {
// Store the time elapsed since the last frame began
w.Accumulator += delta
// Fixed time stepping loop
for w.Accumulator >= w.DeltaTime {
w.PhysicsStep()
w.Accumulator -= w.DeltaTime
}
}
// PhysicsStep performs physics steps calculations (dynamics, collisions
// and position corrections) for one DeltaTime.
func (w *World) PhysicsStep() {
// Update current steps count
w.StepsCount++
// Clear previous generated collisions information
w.Contacts = w.Contacts[:0]
// Reset physics bodies grounded state
for _, body := range w.Bodies {
body.IsGrounded = false
}
// Generate new collision information
for i, bodyA := range w.Bodies {
for j := i + 1; j < len(w.Bodies); j++ {
bodyB := w.Bodies[j]
if bodyA.InverseMass() == 0 && bodyB.InverseMass() == 0 {
continue
}
contact := w.NewContact(bodyA, bodyB)
contact.solve()
if contact.Count > 0 {
// Create a new contact with same information as
// previously solved contact and add it to the contacts
// pool last slot
contact2 := w.NewContact(bodyA, bodyB)
contact2.Penetration = contact.Penetration
contact2.Normal = contact.Normal
contact2.Contacts = [2]XY{
contact.Contacts[0],
contact.Contacts[1],
}
contact2.Count = contact.Count
contact2.Restitution = contact.Restitution
contact2.DynamicFriction = contact.DynamicFriction
contact2.StaticFriction = contact.StaticFriction
}
}
}
// Integrate forces to physics bodies
for _, body := range w.Bodies {
body.integrateForces()
}
// Initialize physics contacts to solve collisions
for i := 0; i < len(w.Contacts); i++ {
w.Contacts[i].initialize()
}
// Integrate physics collisions impulses to solve collisions
for i := 0; i < w.CollisionIterations; i++ {
for j := 0; j < len(w.Contacts); j++ {
w.Contacts[j].integrateImpulses()
}
}
// Integrate velocity to physics bodies
for _, body := range w.Bodies {
body.integrateVelocity()
}
// Correct physics bodies positions based on contacts collision
// information
for i := 0; i < len(w.Contacts); i++ {
w.Contacts[i].correctPositions()
}
// Clear physics bodies forces
for _, body := range w.Bodies {
body.Force = XY{0, 0}
body.Torque = 0.0
}
} | world.go | 0.83622 | 0.493348 | world.go | starcoder |
package coloralgorithms
/**
Shamelessly stolen from Apache commons math
*/
import (
"errors"
"time"
"math"
"math/rand"
)
// Represent a data point. The length of the array specifies the dimensions of the point
type Point []float64
// Just a cleaner name for a set of points
type Cluster []*Point
// A holder for a cluster and its center
type CentroidCluster struct {
Center *Point
Points *Cluster
}
// function used to calculate the distance between two points
type DistanceMeasure func(a, b *Point) (float64, error)
func Euclidean(a, b *Point) (float64, error) {
if len(*a) != len(*b) {
return 0, errors.New("Cannot calculate distance between points of different dimensions")
}
acc := float64(0)
for i,_ := range *a {
acc += math.Pow((*a)[i] - (*b)[i], 2.0)
}
return math.Sqrt(acc), nil
}
// function to determine what to do with empty clusters that might appear in the algorithm
type EmptyClusterStrategy func(Config, []*CentroidCluster) *Point
func GetPointFromLargestVarianceCluster(config Config, clusters []*CentroidCluster) *Point {
maxVariance := 0.0
var selectedCluster *CentroidCluster = nil
for _, cluster := range clusters {
if len(*cluster.Points) > 0 {
variance := PopulationVariance(calculateDistances(config, cluster.Points, cluster.Center))
if variance > maxVariance {
maxVariance = variance
selectedCluster = cluster
}
}
}
if selectedCluster == nil {
for _, cluster := range clusters {
if len(*cluster.Points) > 0 {
selectedCluster = cluster
break
}
}
}
r := rand.New(rand.NewSource(time.Now().UnixNano()))
randomIndex := r.Intn(len(*selectedCluster.Points))
selectedPoint := (*selectedCluster.Points)[randomIndex]
return selectedPoint
}
func calculateDistances(config Config, cluster *Cluster, point *Point) []float64 {
var distances []float64
for _, pointA := range *cluster {
// TODO: Handle error
distance, _ := config.Measure(pointA, point)
distances = append(distances, distance)
}
return distances
}
type Config struct {
K int /* number of clusters */
MaxIterations int32 /* a negative value means no max */
Measure DistanceMeasure
EmptyStrategy EmptyClusterStrategy
}
var DEFAULT_CONFIG Config = Config {-1, math.MaxInt32, Euclidean, GetPointFromLargestVarianceCluster}
func FindClusters(config Config, points []*Point) ([]*CentroidCluster, error) {
if len(points) < config.K {
return nil, errors.New("Too few datapoints")
}
clusters := chooseInitialCenters(config, points);
var assignments []int
for i := 0; i < len(points); i++ {
assignments = append(assignments, 0)
}
assignPointsToClusters(config, clusters, points, assignments)
var maxIterations int32 = config.MaxIterations
for i := int32(0); i < maxIterations; i++ {
// Update cluster centers
emptyCluster := false
var newClusters []*CentroidCluster
for _, cluster := range clusters {
var newCenter *Point
if len(*cluster.Points) == 0 {
emptyCluster = true
newCenter = config.EmptyStrategy(config, clusters)
} else {
newCenter = centroidOf(cluster.Points, cluster.Center, len(*cluster.Center))
}
newClusters = append(newClusters, &CentroidCluster{newCenter, &Cluster{}})
}
pointsThatChangedCluster := assignPointsToClusters(config, newClusters, points, assignments)
clusters = newClusters
if pointsThatChangedCluster == 0 && !emptyCluster {
return clusters, nil
}
}
return clusters, nil
}
func chooseInitialCenters(config Config, points []*Point) []*CentroidCluster {
numPoints := len(points)
var taken []bool
for i := 0; i < numPoints; i++ {
taken = append(taken, false)
}
var resultSet []*CentroidCluster
random := rand.New(rand.NewSource(time.Now().UnixNano()))
firstPointIndex := random.Intn(numPoints)
firstPoint := points[firstPointIndex]
resultSet = append(resultSet, &CentroidCluster{firstPoint, &Cluster{}})
taken[firstPointIndex] = true
var minDistSquared []float64
for i := 0; i < numPoints; i++ {
minDistSquared = append(minDistSquared, 0.0)
}
for i := 0; i < numPoints; i++ {
if i != firstPointIndex {
// TODO: Handle error
d, _ := config.Measure(firstPoint, points[i])
minDistSquared[i] = d * d
}
}
for len(resultSet) < config.K {
distSqSum := float64(0)
for i := 0; i < numPoints; i++ {
if !taken[i] {
distSqSum += minDistSquared[i]
}
}
r := random.NormFloat64() * distSqSum
nextPointIndex := -1
sum := 0.0
for i := 0; i < numPoints; i++ {
if !taken[i] {
sum += minDistSquared[i]
if sum >= r {
nextPointIndex = i
break
}
}
}
if nextPointIndex == -1 {
for i := numPoints - 1; i >= 0; i-- {
if !taken[i] {
nextPointIndex = i;
break;
}
}
}
if nextPointIndex >= 0 {
nextPoint := points[nextPointIndex]
resultSet = append(resultSet, &CentroidCluster{nextPoint, &Cluster{}})
taken[nextPointIndex] = true
if len(resultSet) < config.K {
for j := 0; j < numPoints; j++ {
if !taken[j] {
// TODO: Handle error
d, _ := config.Measure(nextPoint, points[j])
dSqr := d * d
if dSqr < minDistSquared[j] {
minDistSquared[j] = dSqr
}
}
}
}
} else {
break
}
}
return resultSet
}
func centroidOf(cluster *Cluster, center *Point, dimensions int) *Point {
var centroid Point
for i := 0; i < dimensions; i++ {
centroid = append(centroid, 0.0)
}
for _, point := range *cluster {
for i := 0; i < dimensions; i++ {
centroid[i] += (*point)[i]
}
}
for i := 0; i < len(centroid); i++ {
centroid[i] = centroid[i] / float64(len(*cluster))
}
return ¢roid
}
func assignPointsToClusters(config Config, clusters []*CentroidCluster, points []*Point, assignments []int) int {
assignedDifferently := 0
for pointIndex,point := range points {
clusterIndex := getNearestCluster(config, clusters, point)
if clusterIndex != assignments[pointIndex] {
assignedDifferently++
}
*clusters[clusterIndex].Points = append(*clusters[clusterIndex].Points, point)
assignments[pointIndex] = clusterIndex
}
return assignedDifferently
}
func getNearestCluster(config Config, clusters []*CentroidCluster, point *Point) int {
minDistance := math.MaxFloat64
minCluster := 0
for clusterIndex, cluster := range clusters {
// TODO: Handle error
distance, _ := config.Measure(point, cluster.Center)
if distance < minDistance {
minDistance = distance
minCluster = clusterIndex
}
}
return minCluster
}
type Quality struct {
rootSquaredError float64
dataPoints int
}
func CalculateClusterQuality(config Config, clusters []*CentroidCluster) []Quality {
var qualities []Quality
for _, cluster := range clusters {
clusterQuality := 0.0
for _, point := range *cluster.Points {
// TODO: Handle error
distance,_ := config.Measure(point, cluster.Center)
clusterQuality += math.Sqrt(distance * distance)
}
clusterQuality /= float64(len(*cluster.Points))
qualities = append(qualities, Quality{clusterQuality, len(*cluster.Points)})
}
return qualities
} | src/coloralgorithms/k-means.go | 0.580828 | 0.513425 | k-means.go | starcoder |
package p16
import (
c "s13g.com/euler/common"
)
func Solve(input string) (string, string) {
resultA, resultB := solve(input, 1000000000)
return resultA, resultB
}
func solve(input string, loops int) (string, string) {
// Parse input into functions for speed.
funcs := parse(input)
// Perform one dance.
_, resultA := loopIt(1, "", funcs)
// See how many iterations until we hit resultA again.
numLoops, _ := loopIt(loops, resultA, funcs)
// Only perform the remaining operations
_, resultB := loopIt(loops%numLoops, "", funcs)
return resultA, resultB
}
func loopIt(num int, match string, funcs []func(arr []rune) []rune) (int, string) {
arr := []rune{'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p'}
i := 0
for ; i < num; i++ {
for _, f := range funcs {
arr = f(arr)
}
if i > 0 && toString(arr) == match {
break
}
}
return i, toString(arr)
}
func toString(arr []rune) (result string) {
for _, c := range arr {
result += string(c)
}
return
}
func partner(arr []rune, a rune, b rune) []rune {
for i, v := range arr {
if v == a {
arr[i] = b
} else if v == b {
arr[i] = a
}
}
return arr
}
func exchange(arr []rune, a int, b int) []rune {
arr[a], arr[b] = arr[b], arr[a]
return arr
}
func spin(arr []rune, num int) []rune {
partA := arr[len(arr)-num:]
partB := arr[0 : len(arr)-num]
return append(partA, partB...)
}
func parse(input string) []func(arr []rune) []rune {
ops := c.SplitByCommaTrim(input)
result := make([]func(arr []rune) []rune, len(ops))
for i, move := range ops {
switch move[0] {
case 's':
x := c.ToIntOrPanic(move[1:])
result[i] = func(arr []rune) []rune {
return spin(arr, x)
}
case 'p':
parts := c.SplitTrim(move[1:], '/')
result[i] = func(arr []rune) []rune {
return partner(arr, rune(parts[0][0]), rune(parts[1][0]))
}
case 'x':
parts := c.SplitTrim(move[1:], '/')
a, b := c.ToIntOrPanic(parts[0]), c.ToIntOrPanic(parts[1])
result[i] = func(arr []rune) []rune {
return exchange(arr, a, b)
}
}
}
return result
} | go/aoc17/p16/p16.go | 0.579995 | 0.401394 | p16.go | starcoder |
package main
import (
"math"
"github.com/prometheus/client_golang/prometheus"
)
type VastAiPriceStatsCollector struct {
ondemand_price_median_dollars *prometheus.GaugeVec
ondemand_price_p10_dollars *prometheus.GaugeVec
ondemand_price_p90_dollars *prometheus.GaugeVec
ondemand_price_per_100dlperf_median_dollars *prometheus.GaugeVec
ondemand_price_per_100dlperf_p10_dollars *prometheus.GaugeVec
ondemand_price_per_100dlperf_p90_dollars *prometheus.GaugeVec
gpu_count *prometheus.GaugeVec
}
func newVastAiPriceStatsCollector() VastAiPriceStatsCollector {
namespace := "vastai"
labelNames := []string{"verified", "rented"}
labelNamesWithGpu := []string{"gpu_name", "verified", "rented"}
return VastAiPriceStatsCollector{
ondemand_price_median_dollars: prometheus.NewGaugeVec(prometheus.GaugeOpts{
Namespace: namespace,
Name: "ondemand_price_median_dollars",
Help: "Median on-demand price per GPU model",
}, labelNamesWithGpu),
ondemand_price_p10_dollars: prometheus.NewGaugeVec(prometheus.GaugeOpts{
Namespace: namespace,
Name: "ondemand_price_10th_percentile_dollars",
Help: "10th percentile of on-demand prices per GPU model",
}, labelNamesWithGpu),
ondemand_price_p90_dollars: prometheus.NewGaugeVec(prometheus.GaugeOpts{
Namespace: namespace,
Name: "ondemand_price_90th_percentile_dollars",
Help: "90th percentile of on-demand prices per GPU model",
}, labelNamesWithGpu),
ondemand_price_per_100dlperf_median_dollars: prometheus.NewGaugeVec(prometheus.GaugeOpts{
Namespace: namespace,
Name: "ondemand_price_per_100dlperf_median_dollars",
Help: "Median on-demand price per 100 DLPerf points among all GPU models",
}, labelNames),
ondemand_price_per_100dlperf_p10_dollars: prometheus.NewGaugeVec(prometheus.GaugeOpts{
Namespace: namespace,
Name: "ondemand_price_per_100dlperf_p10_dollars",
Help: "10th percentile of on-demand price per 100 DLPerf point among all GPU models",
}, labelNames),
ondemand_price_per_100dlperf_p90_dollars: prometheus.NewGaugeVec(prometheus.GaugeOpts{
Namespace: namespace,
Name: "ondemand_price_per_100dlperf_p90_dollars",
Help: "90th percentile of on-demand prices per 100 DLPerf points among all GPU models",
}, labelNames),
gpu_count: prometheus.NewGaugeVec(prometheus.GaugeOpts{
Namespace: namespace,
Name: "gpu_count",
Help: "Number of GPUs offered on site",
}, labelNamesWithGpu),
}
}
func (e *VastAiPriceStatsCollector) Describe(ch chan<- *prometheus.Desc) {
e.ondemand_price_median_dollars.Describe(ch)
e.ondemand_price_p10_dollars.Describe(ch)
e.ondemand_price_p90_dollars.Describe(ch)
e.ondemand_price_per_100dlperf_median_dollars.Describe(ch)
e.ondemand_price_per_100dlperf_p10_dollars.Describe(ch)
e.ondemand_price_per_100dlperf_p90_dollars.Describe(ch)
e.gpu_count.Describe(ch)
}
func (e *VastAiPriceStatsCollector) Collect(ch chan<- prometheus.Metric) {
e.ondemand_price_median_dollars.Collect(ch)
e.ondemand_price_p10_dollars.Collect(ch)
e.ondemand_price_p90_dollars.Collect(ch)
e.ondemand_price_per_100dlperf_median_dollars.Collect(ch)
e.ondemand_price_per_100dlperf_p10_dollars.Collect(ch)
e.ondemand_price_per_100dlperf_p90_dollars.Collect(ch)
e.gpu_count.Collect(ch)
}
func (e *VastAiPriceStatsCollector) UpdateFrom(offerCache *OfferCache, gpuNames []string) {
groupedOffers := offerCache.machines.groupByGpu()
updateMetrics := func(labels prometheus.Labels, stats OfferStats, needCount bool) {
if needCount {
e.gpu_count.With(labels).Set(float64(stats.Count))
}
if !math.IsNaN(stats.Median) {
e.ondemand_price_median_dollars.With(labels).Set(stats.Median / 100)
} else {
e.ondemand_price_median_dollars.Delete(labels)
}
if !math.IsNaN(stats.PercentileLow) && !math.IsNaN(stats.PercentileHigh) {
e.ondemand_price_p10_dollars.With(labels).Set(stats.PercentileLow / 100)
e.ondemand_price_p90_dollars.With(labels).Set(stats.PercentileHigh / 100)
} else {
e.ondemand_price_p10_dollars.Delete(labels)
e.ondemand_price_p90_dollars.Delete(labels)
}
}
filterByGpuName := gpuNames != nil
isMyGpu := map[string]bool{}
if filterByGpuName {
for _, name := range gpuNames {
isMyGpu[name] = true
}
}
for gpuName, offers := range groupedOffers {
if filterByGpuName && !isMyGpu[gpuName] {
continue
}
stats := offers.stats3(false)
updateMetrics(prometheus.Labels{"gpu_name": gpuName, "verified": "yes", "rented": "yes"}, stats.Rented.Verified, true)
updateMetrics(prometheus.Labels{"gpu_name": gpuName, "verified": "no", "rented": "yes"}, stats.Rented.Unverified, true)
updateMetrics(prometheus.Labels{"gpu_name": gpuName, "verified": "any", "rented": "yes"}, stats.Rented.All, false)
updateMetrics(prometheus.Labels{"gpu_name": gpuName, "verified": "yes", "rented": "no"}, stats.Available.Verified, true)
updateMetrics(prometheus.Labels{"gpu_name": gpuName, "verified": "no", "rented": "no"}, stats.Available.Unverified, true)
updateMetrics(prometheus.Labels{"gpu_name": gpuName, "verified": "any", "rented": "no"}, stats.Available.All, false)
updateMetrics(prometheus.Labels{"gpu_name": gpuName, "verified": "yes", "rented": "any"}, stats.All.Verified, false)
updateMetrics(prometheus.Labels{"gpu_name": gpuName, "verified": "no", "rented": "any"}, stats.All.Unverified, false)
updateMetrics(prometheus.Labels{"gpu_name": gpuName, "verified": "any", "rented": "any"}, stats.All.All, false)
}
// per-100-dlperf stats
if !filterByGpuName {
updateMetrics2 := func(labels prometheus.Labels, stats OfferStats) {
if !math.IsNaN(stats.Median) {
e.ondemand_price_per_100dlperf_median_dollars.With(labels).Set(stats.Median / 100)
} else {
e.ondemand_price_per_100dlperf_median_dollars.Delete(labels)
}
if !math.IsNaN(stats.PercentileLow) && !math.IsNaN(stats.PercentileHigh) {
e.ondemand_price_per_100dlperf_p10_dollars.With(labels).Set(stats.PercentileLow / 100)
e.ondemand_price_per_100dlperf_p90_dollars.With(labels).Set(stats.PercentileHigh / 100)
} else {
e.ondemand_price_per_100dlperf_p10_dollars.Delete(labels)
e.ondemand_price_per_100dlperf_p90_dollars.Delete(labels)
}
}
stats := offerCache.machines.stats3(true)
updateMetrics2(prometheus.Labels{"verified": "yes", "rented": "yes"}, stats.Rented.Verified)
updateMetrics2(prometheus.Labels{"verified": "no", "rented": "yes"}, stats.Rented.Unverified)
updateMetrics2(prometheus.Labels{"verified": "any", "rented": "yes"}, stats.Rented.All)
updateMetrics2(prometheus.Labels{"verified": "yes", "rented": "no"}, stats.Available.Verified)
updateMetrics2(prometheus.Labels{"verified": "no", "rented": "no"}, stats.Available.Unverified)
updateMetrics2(prometheus.Labels{"verified": "any", "rented": "no"}, stats.Available.All)
updateMetrics2(prometheus.Labels{"verified": "yes", "rented": "any"}, stats.All.Verified)
updateMetrics2(prometheus.Labels{"verified": "no", "rented": "any"}, stats.All.Unverified)
updateMetrics2(prometheus.Labels{"verified": "any", "rented": "any"}, stats.All.All)
}
} | src/vastai_collector_price_stats.go | 0.561936 | 0.506469 | vastai_collector_price_stats.go | starcoder |
package frame
import (
"math"
"github.com/spaolacci/murmur3"
)
func init() {
RegisterOps(func(slice []string) Ops {
return Ops{
Less: func(i, j int) bool { return slice[i] < slice[j] },
HashWithSeed: func(i int, seed uint32) uint32 {
return murmur3.Sum32WithSeed([]byte(slice[i]), seed)
},
}
})
RegisterOps(func(slice []uint) Ops {
return Ops{
Less: func(i, j int) bool { return slice[i] < slice[j] },
HashWithSeed: func(i int, seed uint32) uint32 {
return hash64(uint64(slice[i]), seed)
},
}
})
RegisterOps(func(slice []uint8) Ops {
return Ops{
Less: func(i, j int) bool { return slice[i] < slice[j] },
HashWithSeed: func(i int, seed uint32) uint32 {
return hash32(uint32(slice[i]), seed)
},
}
})
RegisterOps(func(slice []uint16) Ops {
return Ops{
Less: func(i, j int) bool { return slice[i] < slice[j] },
HashWithSeed: func(i int, seed uint32) uint32 {
return hash32(uint32(slice[i]), seed)
},
}
})
RegisterOps(func(slice []uint32) Ops {
return Ops{
Less: func(i, j int) bool { return slice[i] < slice[j] },
HashWithSeed: func(i int, seed uint32) uint32 {
return hash32(uint32(slice[i]), seed)
},
}
})
RegisterOps(func(slice []uint64) Ops {
return Ops{
Less: func(i, j int) bool { return slice[i] < slice[j] },
HashWithSeed: func(i int, seed uint32) uint32 {
return hash64(uint64(slice[i]), seed)
},
}
})
RegisterOps(func(slice []int) Ops {
return Ops{
Less: func(i, j int) bool { return slice[i] < slice[j] },
HashWithSeed: func(i int, seed uint32) uint32 {
return hash64(uint64(slice[i]), seed)
},
}
})
RegisterOps(func(slice []int8) Ops {
return Ops{
Less: func(i, j int) bool { return slice[i] < slice[j] },
HashWithSeed: func(i int, seed uint32) uint32 {
return hash32(uint32(slice[i]), seed)
},
}
})
RegisterOps(func(slice []int16) Ops {
return Ops{
Less: func(i, j int) bool { return slice[i] < slice[j] },
HashWithSeed: func(i int, seed uint32) uint32 {
return hash32(uint32(slice[i]), seed)
},
}
})
RegisterOps(func(slice []int32) Ops {
return Ops{
Less: func(i, j int) bool { return slice[i] < slice[j] },
HashWithSeed: func(i int, seed uint32) uint32 {
return hash32(uint32(slice[i]), seed)
},
}
})
RegisterOps(func(slice []int64) Ops {
return Ops{
Less: func(i, j int) bool { return slice[i] < slice[j] },
HashWithSeed: func(i int, seed uint32) uint32 {
return hash64(uint64(slice[i]), seed)
},
}
})
RegisterOps(func(slice []float32) Ops {
return Ops{
Less: func(i, j int) bool { return slice[i] < slice[j] },
HashWithSeed: func(i int, seed uint32) uint32 {
return hash32(math.Float32bits(slice[i]), seed)
},
}
})
RegisterOps(func(slice []float64) Ops {
return Ops{
Less: func(i, j int) bool { return slice[i] < slice[j] },
HashWithSeed: func(i int, seed uint32) uint32 {
return hash64(math.Float64bits(slice[i]), seed)
},
}
})
RegisterOps(func(slice []uintptr) Ops {
return Ops{
Less: func(i, j int) bool { return slice[i] < slice[j] },
HashWithSeed: func(i int, seed uint32) uint32 {
return hash64(uint64(slice[i]), seed)
},
}
})
}
// Hash32 is the 32-bit integer hashing function from
// http://burtleburtle.net/bob/hash/integer.html. (Public domain.)
func hash32(x, seed uint32) uint32 {
var b [4]byte
b[0] = byte(x)
b[1] = byte(x >> 8)
b[2] = byte(x >> 16)
b[3] = byte(x >> 24)
return murmur3.Sum32WithSeed(b[:], seed)
}
// Hash64 uses hash32 to compute a 64-bit integer hash.
func hash64(x uint64, seed uint32) uint32 {
var b [8]byte
b[0] = byte(x)
b[1] = byte(x >> 8)
b[2] = byte(x >> 16)
b[3] = byte(x >> 24)
b[4] = byte(x >> 32)
b[5] = byte(x >> 40)
b[6] = byte(x >> 48)
b[7] = byte(x >> 56)
return murmur3.Sum32WithSeed(b[:], seed)
} | frame/ops_builtin.go | 0.615088 | 0.602237 | ops_builtin.go | starcoder |
package json
import (
"encoding/base64"
"errors"
"github.com/francoispqt/gojay"
"github.com/jexia/semaphore/pkg/specs/types"
)
// ErrUnknownType is thrown when the given type is unknown
var ErrUnknownType = errors.New("unknown type")
// AddTypeKey encodes the given value into the given encoder
func AddTypeKey(encoder *gojay.Encoder, key string, typed types.Type, value interface{}) {
switch typed {
case types.Double:
encoder.AddFloat64Key(key, Float64Empty(value))
case types.Int64:
encoder.AddInt64Key(key, Int64Empty(value))
case types.Uint64:
encoder.AddUint64Key(key, Uint64Empty(value))
case types.Fixed64:
encoder.AddUint64Key(key, Uint64Empty(value))
case types.Int32:
encoder.AddInt32Key(key, Int32Empty(value))
case types.Uint32:
encoder.AddUint32Key(key, Uint32Empty(value))
case types.Fixed32:
encoder.AddUint64Key(key, Uint64Empty(value))
case types.Float:
encoder.AddFloat32Key(key, Float32Empty(value))
case types.String:
encoder.AddStringKey(key, StringEmpty(value))
case types.Enum:
encoder.AddStringKey(key, StringEmpty(value))
case types.Bool:
encoder.AddBoolKey(key, BoolEmpty(value))
case types.Bytes:
encoder.AddStringKey(key, BytesBase64Empty(value))
case types.Sfixed32:
encoder.AddInt32Key(key, Int32Empty(value))
case types.Sfixed64:
encoder.AddInt64Key(key, Int64Empty(value))
case types.Sint32:
encoder.AddInt32Key(key, Int32Empty(value))
case types.Sint64:
encoder.AddInt64Key(key, Int64Empty(value))
}
}
// AddType encodes the given value into the given encoder
func AddType(encoder *gojay.Encoder, typed types.Type, value interface{}) {
// do not skip NULL values while encoding array elements
if value == nil {
encoder.AddNull()
return
}
switch typed {
case types.Double:
encoder.AddFloat64(Float64Empty(value))
case types.Int64:
encoder.AddInt64(Int64Empty(value))
case types.Uint64:
encoder.AddUint64(Uint64Empty(value))
case types.Fixed64:
encoder.AddUint64(Uint64Empty(value))
case types.Int32:
encoder.AddInt32(Int32Empty(value))
case types.Uint32:
encoder.AddUint32(Uint32Empty(value))
case types.Fixed32:
encoder.AddUint64(Uint64Empty(value))
case types.Float:
encoder.AddFloat32(Float32Empty(value))
case types.String:
encoder.AddString(StringEmpty(value))
case types.Enum:
encoder.AddString(StringEmpty(value))
case types.Bool:
encoder.AddBool(BoolEmpty(value))
case types.Bytes:
encoder.AddString(BytesBase64Empty(value))
case types.Sfixed32:
encoder.AddInt32(Int32Empty(value))
case types.Sfixed64:
encoder.AddInt64(Int64Empty(value))
case types.Sint32:
encoder.AddInt32(Int32Empty(value))
case types.Sint64:
encoder.AddInt64(Int64Empty(value))
}
}
// DecodeType decodes the given property from the given decoder
func DecodeType(decoder *gojay.Decoder, prop types.Type) (interface{}, error) {
switch prop {
case types.Double:
var value float64
err := decoder.AddFloat64(&value)
return value, err
case types.Float:
var value float32
err := decoder.AddFloat32(&value)
return value, err
case types.Int64:
var value int64
err := decoder.AddInt64(&value)
return value, err
case types.Uint64:
var value uint64
err := decoder.AddUint64(&value)
return value, err
case types.Fixed64:
var value uint64
err := decoder.AddUint64(&value)
return value, err
case types.Int32:
var value int32
err := decoder.AddInt32(&value)
return value, err
case types.Uint32:
var value uint32
err := decoder.AddUint32(&value)
return value, err
case types.Fixed32:
var value uint32
err := decoder.AddUint32(&value)
return value, err
case types.String:
var value string
err := decoder.AddString(&value)
return value, err
case types.Bool:
var value bool
err := decoder.AddBool(&value)
return value, err
case types.Bytes:
var raw string
if err := decoder.AddString(&raw); err != nil {
return nil, err
}
value := make([]byte, len(raw))
_, err := base64.StdEncoding.Decode(value, []byte(raw))
return value, err
case types.Sfixed32:
var value int32
err := decoder.AddInt32(&value)
return value, err
case types.Sfixed64:
var value int64
err := decoder.AddInt64(&value)
return value, err
case types.Sint32:
var value int32
err := decoder.AddInt32(&value)
return value, err
case types.Sint64:
var value int64
err := decoder.AddInt64(&value)
return value, err
}
return nil, ErrUnknownType
}
// StringEmpty returns the given value as a string or a empty string if the value is nil
func StringEmpty(val interface{}) string {
if val == nil {
return ""
}
return val.(string)
}
// BoolEmpty returns the given value as a bool or a empty bool if the value is nil
func BoolEmpty(val interface{}) bool {
if val == nil {
return false
}
return val.(bool)
}
// Int32Empty returns the given value as a int32 or a empty int32 if the value is nil
func Int32Empty(val interface{}) int32 {
if val == nil {
return 0
}
return val.(int32)
}
// Uint32Empty returns the given value as a uint32 or a empty uint32 if the value is nil
func Uint32Empty(val interface{}) uint32 {
if val == nil {
return 0
}
return val.(uint32)
}
// Int64Empty returns the given value as a int64 or a empty int64 if the value is nil
func Int64Empty(val interface{}) int64 {
if val == nil {
return 0
}
return val.(int64)
}
// Uint64Empty returns the given value as a uint64 or a empty uint64 if the value is nil
func Uint64Empty(val interface{}) uint64 {
if val == nil {
return 0
}
return val.(uint64)
}
// Float64Empty returns the given value as a float64 or a empty float64 if the value is nil
func Float64Empty(val interface{}) float64 {
switch t := val.(type) {
case float32:
return float64(t)
case float64:
return t
default:
return 0
}
}
// Float32Empty returns the given value as a float32 or a empty float32 if the value is nil
func Float32Empty(val interface{}) float32 {
switch t := val.(type) {
case float32:
return t
case float64:
return float32(t)
default:
return 0
}
}
// BytesBase64Empty returns the given bytes buffer as a base64 string or a empty string if the value is nil
func BytesBase64Empty(val interface{}) string {
if val == nil {
return ""
}
return base64.StdEncoding.EncodeToString(val.([]byte))
} | pkg/codec/json/types.go | 0.677901 | 0.434341 | types.go | starcoder |
package course
import "github.com/pkg/errors"
type Course struct {
id string
title string
period Period
started bool
creatorID string
collaborators map[string]bool
students map[string]bool
tasks map[int]*Task
nextTaskNumber int
}
type CreationParams struct {
ID string
Creator Academic
Title string
Period Period
Started bool
Collaborators []string
Students []string
}
var (
ErrEmptyCourseID = errors.New("empty course id")
ErrZeroCreator = errors.New("empty course creator id")
ErrEmptyCourseTitle = errors.New("empty course title")
ErrZeroCoursePeriod = errors.New("zero course period")
)
func IsInvalidCourseParametersError(err error) bool {
return errors.Is(err, ErrEmptyCourseID) ||
errors.Is(err, ErrZeroCreator) ||
errors.Is(err, ErrEmptyCourseTitle) ||
errors.Is(err, ErrZeroCoursePeriod)
}
func NewCourse(params CreationParams) (*Course, error) {
if params.ID == "" {
return nil, ErrEmptyCourseID
}
if params.Creator.IsZero() {
return nil, ErrZeroCreator
}
if err := params.Creator.canCreateCourse(); err != nil {
return nil, err
}
if params.Title == "" {
return nil, ErrEmptyCourseTitle
}
if params.Period.IsZero() {
return nil, ErrZeroCoursePeriod
}
crs := &Course{
id: params.ID,
creatorID: params.Creator.ID(),
title: params.Title,
period: params.Period,
started: params.Started,
collaborators: make(map[string]bool, len(params.Collaborators)),
students: make(map[string]bool, len(params.Students)),
tasks: make(map[int]*Task),
nextTaskNumber: 1,
}
for _, c := range params.Collaborators {
crs.collaborators[c] = true
}
for _, s := range params.Students {
crs.students[s] = true
}
return crs, nil
}
func (c *Course) Extend(params CreationParams) (*Course, error) {
if params.ID == "" {
return nil, ErrEmptyCourseID
}
if params.Creator.IsZero() {
return nil, ErrZeroCreator
}
if err := params.Creator.canCreateCourse(); err != nil {
return nil, err
}
if err := c.canAcademicEditWithAccess(params.Creator, TeacherAccess); err != nil {
return nil, err
}
extendedCourseTitle := c.Title()
if params.Title != "" {
extendedCourseTitle = params.Title
}
extendedCoursePeriod := c.period.next()
if !params.Period.IsZero() {
extendedCoursePeriod = params.Period
}
crs := &Course{
id: params.ID,
creatorID: params.Creator.ID(),
title: extendedCourseTitle,
period: extendedCoursePeriod,
started: params.Started,
collaborators: unmarshalIDs(append(c.Collaborators(), params.Collaborators...)),
students: unmarshalIDs(append(c.Students(), params.Students...)),
tasks: make(map[int]*Task, len(c.tasks)),
nextTaskNumber: len(c.tasks) + 1,
}
for i, t := range c.tasksCopy() {
number := i + 1
crs.tasks[number] = t
crs.tasks[number].number = number
}
return crs, nil
}
func MustNewCourse(params CreationParams) *Course {
crs, err := NewCourse(params)
if err != nil {
panic(err)
}
return crs
}
func (c *Course) ID() string {
return c.id
}
func (c *Course) Title() string {
return c.title
}
func (c *Course) Period() Period {
return c.period
}
func (c *Course) Started() bool {
return c.started
}
func (c *Course) CreatorID() string {
return c.creatorID
}
type UnmarshallingParams struct {
ID string
Title string
Period Period
Started bool
CreatorID string
Collaborators []string
Students []string
Tasks []UnmarshallingTaskParams
}
type UnmarshallingTaskParams struct {
Number int
Title string
Description string
TaskType TaskType
Deadline Deadline
TestPoints []TestPoint
TestData []TestData
}
// UnmarshalFromDatabase unmarshalls Course from the database.
// It should be used only for unmarshalling from the database!
// Using UnmarshalFromDatabase may put domain into the invalid state!
func UnmarshalFromDatabase(params UnmarshallingParams) *Course {
tasks, lastNumber := unmarshalTasks(params.Tasks)
crs := &Course{
id: params.ID,
title: params.Title,
period: params.Period,
started: params.Started,
creatorID: params.CreatorID,
collaborators: unmarshalIDs(params.Collaborators),
students: unmarshalIDs(params.Students),
tasks: tasks,
nextTaskNumber: lastNumber + 1,
}
return crs
}
func unmarshalIDs(ids []string) map[string]bool {
unmarshalled := make(map[string]bool, len(ids))
for _, id := range ids {
unmarshalled[id] = true
}
return unmarshalled
}
func unmarshalTasks(taskParams []UnmarshallingTaskParams) (map[int]*Task, int) {
tasks := make(map[int]*Task, len(taskParams))
lastNumber := 0
for _, tp := range taskParams {
tasks[tp.Number] = &Task{
number: tp.Number,
title: tp.Title,
description: tp.Description,
taskType: tp.TaskType,
optional: taskOptional{
deadline: tp.Deadline,
testData: tp.TestData,
testPoints: tp.TestPoints,
},
}
if tp.Number > lastNumber {
lastNumber = tp.Number
}
}
return tasks, lastNumber
} | internal/domain/course/course.go | 0.554953 | 0.42185 | course.go | starcoder |
package main
// cell-based version
// * split region into CELL_SIZE x CELL_SIZE grid cells
// * pick node closest to center in each grid cell
// * iteratively find shortest paths between cell centers that are CELL_DISTANCE cells apart (Manhattan distance)
// * cut PADDING off path and retain the rest
// * also retain edges based on shortest paths from vertices that interface between existing and inferred maps
// * some post-processing steps to remove some edges
import (
"github.com/mitroadmaps/gomapinfer/common"
"encoding/json"
"fmt"
"io/ioutil"
"math"
"os"
"sync"
)
const CELL_SIZE = 512
const PADDING = 1024
const CELL_DISTANCE = 10
func abs(x int) int {
if x < 0 {
return -x
} else {
return x
}
}
func manh(c1 [2]int, c2 [2]int) int {
return abs(c1[0] - c2[0]) + abs(c1[1] - c2[1])
}
func main() {
var inName, probsName, ifaceName, outName string
if len(os.Args) == 3 {
inName = os.Args[1]
outName = os.Args[2]
} else if len(os.Args) == 4 {
inName = os.Args[1]
probsName = os.Args[2]
outName = os.Args[3]
} else if len(os.Args) == 5 {
inName = os.Args[1]
probsName = os.Args[2]
ifaceName = os.Args[3]
outName = os.Args[4]
} else {
fmt.Println("usage: centrality3 in-name [probs-name [iface-name]] out-name")
return
}
graph, err := common.ReadGraph(inName)
if err != nil {
panic(err)
}
var edgeProbs []int
var ifaceNodes map[int]bool
if probsName != "" {
edgeProbs = func() []int {
bytes, err := ioutil.ReadFile(probsName)
if err != nil {
panic(err)
}
var edgeProbs []int
if err := json.Unmarshal(bytes, &edgeProbs); err != nil {
panic(err)
}
return edgeProbs
}()
} else {
for _ = range graph.Edges {
edgeProbs = append(edgeProbs, 50)
}
}
if ifaceName != "" {
ifaceNodes = func() map[int]bool {
bytes, err := ioutil.ReadFile(ifaceName)
if err != nil {
panic(err)
}
var ifaceList []int
if err := json.Unmarshal(bytes, &ifaceList); err != nil {
panic(err)
}
ifaceNodes := make(map[int]bool)
for _, nodeID := range ifaceList {
ifaceNodes[nodeID] = true
}
return ifaceNodes
}()
}
rsGraph, edgeToRS, rsNodeMap := graph.GetRoadSegmentGraph()
// compute edge lengths in rsGraph for shortest path
// the length of each edge in the rs is scaled based on edgeProbs
rsLengths := make(map[int]float64)
for _, rsEdge := range rsGraph.Edges {
rs := edgeToRS[rsEdge.ID]
var length float64 = 0
for _, edge := range rs.Edges {
length += edge.Segment().Length() * (2 - float64(edgeProbs[edge.ID]) / 100)
}
rsLengths[rsEdge.ID] = length
}
// convert ifaceNodes to nodes in rsGraph
rsIfaceNodes := make(map[int]bool)
for nodeID := range ifaceNodes {
if rsNodeMap[nodeID] == nil {
continue
}
rsIfaceNodes[rsNodeMap[nodeID].ID] = true
}
// find cell centers for rsGraph
fmt.Println("finding cell centers")
cellNodes := make(map[[2]int][]*common.Node)
for _, node := range rsGraph.Nodes {
cell := [2]int{
int(math.Floor(node.Point.X / CELL_SIZE)),
int(math.Floor(node.Point.Y / CELL_SIZE)),
}
cellNodes[cell] = append(cellNodes[cell], node)
}
cellCenters := make(map[[2]int]*common.Node)
for cell, nodes := range cellNodes {
if len(nodes) < 8 {
continue
}
p := common.Point{
(float64(cell[0]) + 0.5) * CELL_SIZE,
(float64(cell[1]) + 0.5) * CELL_SIZE,
}
var bestNode *common.Node
var bestDistance float64
for _, node := range nodes {
d := node.Point.Distance(p)
if bestNode == nil || d < bestDistance {
bestNode = node
bestDistance = d
}
}
cellCenters[cell] = bestNode
}
// get shortest paths
fmt.Println("computing shortest paths between centers")
nthreads := 6
goodRSEdges := addBetweenClusters(rsGraph, rsLengths, cellCenters, nthreads)
// postprocessing: add shortest path from iface nodes to goodEdges
fmt.Println("adding shortest paths from iface nodes")
addFromIface(rsGraph, rsLengths, goodRSEdges, rsIfaceNodes, nthreads)
// convert goodRSEdges to edges in the original graph
goodEdges := make(map[int]bool)
for edgeID := range goodRSEdges {
rs := edgeToRS[edgeID]
for _, edge := range rs.Edges {
goodEdges[edge.ID] = true
}
}
// filter bad edges
badEdges := make(map[int]bool)
for _, edge := range graph.Edges {
if !goodEdges[edge.ID] {
badEdges[edge.ID] = true
}
}
origEdges := len(graph.Edges)
graph, nodeMap, _ := graph.FilterEdgesWithMaps(badEdges)
fmt.Printf("filter from %d to %d edges\n", origEdges, len(graph.Edges))
graph.MakeBidirectional()
// update ifaceNodes for new nodes after filtering
newIfaceNodes := make(map[int]bool)
for nodeID := range ifaceNodes {
if nodeMap[nodeID] == nil {
continue
}
newIfaceNodes[nodeMap[nodeID].ID] = true
}
// more postprocessing: remove dead-end non-iface road segments
fmt.Println("removing dead-ends")
roadSegments := graph.GetRoadSegments()
badEdges = make(map[int]bool)
for _, rs := range roadSegments {
var deadEndNode *common.Node
if len(rs.Src().Out) == 1 {
deadEndNode = rs.Src()
} else if len(rs.Dst().Out) == 1 {
deadEndNode = rs.Dst()
} else {
continue
}
if newIfaceNodes[deadEndNode.ID] || rs.Length() > CELL_SIZE {
continue
}
for _, edge := range rs.Edges {
badEdges[edge.ID] = true
}
}
graph = graph.FilterEdges(badEdges)
if err := graph.Write(outName); err != nil {
panic(err)
}
}
func addBetweenClusters(graph *common.Graph, edgeLengths map[int]float64, cellCenters map[[2]int]*common.Node, nthreads int) map[int]bool {
type job struct {
cell [2]int
center *common.Node
}
jobch := make(chan job)
donech := make(chan map[int]bool)
for i := 0; i < nthreads; i++ {
go func() {
goodEdges := make(map[int]bool)
for job := range jobch {
result := graph.ShortestPath(job.center, common.ShortestPathParams{
MaxDistance: CELL_SIZE * 2 * CELL_DISTANCE * 1.25,
EdgeLengths: edgeLengths,
})
for cell, center := range cellCenters {
if manh(job.cell, cell) != CELL_DISTANCE {
continue
}
if result.Remaining[center.ID] {
continue
}
if _, ok := result.Distances[center.ID]; !ok {
continue
}
path := result.GetFullPathTo(center)
var totalDistance float64 = 0
for i := 0; i < len(path) - 1; i++ {
totalDistance += path[i].Point.Distance(path[i + 1].Point)
}
var curDistance float64 = 0
for i := 0; i < len(path) - 1; i++ {
if curDistance >= PADDING {
rsEdge := graph.FindEdge(path[i], path[i + 1])
goodEdges[rsEdge.ID] = true
}
curDistance += path[i].Point.Distance(path[i + 1].Point)
if curDistance >= totalDistance - PADDING {
break
}
}
}
}
donech <- goodEdges
}()
}
count := 0
for cell, center := range cellCenters {
jobch <- job{cell, center}
fmt.Printf("... %d/%d\n", count, len(cellCenters))
count++
}
close(jobch)
goodEdges := make(map[int]bool)
for i := 0; i < nthreads; i++ {
m := <- donech
for k := range m {
goodEdges[k] = true
}
}
return goodEdges
}
func addFromIface(graph *common.Graph, edgeLengths map[int]float64, goodEdges map[int]bool, ifaceNodes map[int]bool, nthreads int) {
jobch := make(chan *common.Node)
donech := make(chan bool)
var mu sync.Mutex
goodNodes := make(map[int]bool)
for edgeID := range goodEdges {
edge := graph.Edges[edgeID]
goodNodes[edge.Src.ID] = true
goodNodes[edge.Dst.ID] = true
}
for i := 0; i < nthreads; i++ {
go func() {
for node := range jobch {
result := graph.ShortestPath(node, common.ShortestPathParams{
MaxDistance: CELL_SIZE * 2,
EdgeLengths: edgeLengths,
})
mu.Lock()
var bestDst *common.Node
var bestDistance float64
for otherID, distance := range result.Distances {
if result.Remaining[otherID] || !goodNodes[otherID] {
continue
}
if bestDst == nil || distance < bestDistance {
bestDst = graph.Nodes[otherID]
bestDistance = distance
}
}
if bestDst == nil {
mu.Unlock()
continue
}
path := result.GetFullPathTo(bestDst)
for i := 0; i < len(path) - 1; i++ {
edge := graph.FindEdge(path[i], path[i + 1])
goodEdges[edge.ID] = true
goodNodes[edge.Src.ID] = true
goodNodes[edge.Dst.ID] = true
}
mu.Unlock()
}
donech <- true
}()
}
count := 0
for nodeID := range ifaceNodes {
jobch <- graph.Nodes[nodeID]
if count % 10 == 0 {
fmt.Printf("... %d/%d\n", count, len(ifaceNodes))
}
count++
}
close(jobch)
for i := 0; i < nthreads; i++ {
<- donech
}
} | server/prune.go | 0.556038 | 0.422922 | prune.go | starcoder |
package magkal
import (
"math"
"../ahrs"
)
const (
Pi = math.Pi
Small = 1e-9
Big = 1e9
Deg = Pi / 180
AvgMagField = 4390
)
type MagKalState struct {
T float64 // Time when state last updated
K [3]float64 // Scaling factor for magnetometer
L [3]float64 // Offset for magnetometer
LogMap map[string]interface{} // Map only for analysis/debugging
}
// NewMagKal returns a new MagKal object that runs the algorithm passed to it.
// It is initialized with the starting K, L.
func NewMagKal(k, l [3]float64, f func(MagKalState, chan ahrs.Measurement, chan MagKalState)) (cIn chan ahrs.Measurement, cOut chan MagKalState) {
cIn = make(chan ahrs.Measurement)
cOut = make(chan MagKalState)
s := MagKalState{K: k, L: l, LogMap: make(map[string]interface{})}
s.updateLogMap(ahrs.NewMeasurement(), s.LogMap)
go f(s, cIn, cOut)
return
}
func (s *MagKalState) updateLogMap(m *ahrs.Measurement, p map[string]interface{}) {
var logMapFunc = map[string]func(s *MagKalState, m *ahrs.Measurement) float64{
"Ta": func(s *MagKalState, m *ahrs.Measurement) float64 { return s.T },
"WValid": func(s *MagKalState, m *ahrs.Measurement) float64 {
if m.WValid {
return 1
}
return 0
},
"T": func(s *MagKalState, m *ahrs.Measurement) float64 { return m.T },
"TW": func(s *MagKalState, m *ahrs.Measurement) float64 { return m.TW },
"W1": func(s *MagKalState, m *ahrs.Measurement) float64 { return m.W1 },
"W2": func(s *MagKalState, m *ahrs.Measurement) float64 { return m.W2 },
"W3": func(s *MagKalState, m *ahrs.Measurement) float64 { return m.W3 },
"A1": func(s *MagKalState, m *ahrs.Measurement) float64 { return m.A1 },
"A2": func(s *MagKalState, m *ahrs.Measurement) float64 { return m.A2 },
"A3": func(s *MagKalState, m *ahrs.Measurement) float64 { return m.A3 },
"B1": func(s *MagKalState, m *ahrs.Measurement) float64 { return m.B1 },
"B2": func(s *MagKalState, m *ahrs.Measurement) float64 { return m.B2 },
"B3": func(s *MagKalState, m *ahrs.Measurement) float64 { return m.B3 },
"M1": func(s *MagKalState, m *ahrs.Measurement) float64 { return m.M1 },
"M2": func(s *MagKalState, m *ahrs.Measurement) float64 { return m.M2 },
"M3": func(s *MagKalState, m *ahrs.Measurement) float64 { return m.M3 },
"K1": func(s *MagKalState, m *ahrs.Measurement) float64 { return s.K[0] },
"K2": func(s *MagKalState, m *ahrs.Measurement) float64 { return s.K[1] },
"K3": func(s *MagKalState, m *ahrs.Measurement) float64 { return s.K[2] },
"L1": func(s *MagKalState, m *ahrs.Measurement) float64 { return s.L[0] },
"L2": func(s *MagKalState, m *ahrs.Measurement) float64 { return s.L[1] },
"L3": func(s *MagKalState, m *ahrs.Measurement) float64 { return s.L[2] },
"MM1": func(s *MagKalState, m *ahrs.Measurement) float64 { return s.K[0]*m.M1 + s.L[0] },
"MM2": func(s *MagKalState, m *ahrs.Measurement) float64 { return s.K[1]*m.M2 + s.L[1] },
"MM3": func(s *MagKalState, m *ahrs.Measurement) float64 { return s.K[2]*m.M3 + s.L[2] },
}
for k := range logMapFunc {
p[k] = logMapFunc[k](s, m)
}
}
// NormDiff calculates the norm of the diff of two 3-vectors to see how different they are.
func NormDiff(v1, v2 *[3]float64) (res float64) {
for i := 0; i < 3; i++ {
res += (v1[i] - v2[i]) * (v1[i] - v2[i])
}
res = math.Sqrt(res)
return
}
// NormVec calculates the norm of a 3-vector.
func NormVec(v1 [3]float64) (res float64) {
for i := 0; i < 3; i++ {
res += v1[i] * v1[i]
}
res = math.Sqrt(res)
return
} | magnetometer/magkal_defs.go | 0.701713 | 0.626438 | magkal_defs.go | starcoder |
// This package provides a graph data struture
// and graph functionality using ObjMetadata as
// vertices in the graph.
package graph
import (
"bytes"
"fmt"
"sigs.k8s.io/cli-utils/pkg/object"
)
// Graph is contains a directed set of edges, implemented as
// an adjacency list (map key is "from" vertex, slice are "to"
// vertices).
type Graph struct {
// map "from" vertex -> list of "to" vertices
edges map[object.ObjMetadata][]object.ObjMetadata
}
// Edge encapsulates a pair of vertices describing a
// directed edge.
type Edge struct {
From object.ObjMetadata
To object.ObjMetadata
}
// New returns a pointer to an empty Graph data structure.
func New() *Graph {
g := &Graph{}
g.edges = make(map[object.ObjMetadata][]object.ObjMetadata)
return g
}
// AddVertex adds an ObjMetadata vertex to the graph, with
// an initial empty set of edges from added vertex.
func (g *Graph) AddVertex(v object.ObjMetadata) {
if _, exists := g.edges[v]; !exists {
g.edges[v] = []object.ObjMetadata{}
}
}
// AddEdge adds a edge from one ObjMetadata vertex to another. The
// direction of the edge is "from" -> "to".
func (g *Graph) AddEdge(from object.ObjMetadata, to object.ObjMetadata) {
// Add "from" vertex if it doesn't already exist.
if _, exists := g.edges[from]; !exists {
g.edges[from] = []object.ObjMetadata{}
}
// Add "to" vertex if it doesn't already exist.
if _, exists := g.edges[to]; !exists {
g.edges[to] = []object.ObjMetadata{}
}
// Add edge "from" -> "to" if it doesn't already exist
// into the adjacency list.
if !g.isAdjacent(from, to) {
g.edges[from] = append(g.edges[from], to)
}
}
// GetEdges returns the slice of vertex pairs which are
// the directed edges of the graph.
func (g *Graph) GetEdges() []Edge {
edges := []Edge{}
for from, toList := range g.edges {
for _, to := range toList {
edge := Edge{From: from, To: to}
edges = append(edges, edge)
}
}
return edges
}
// isAdjacent returns true if an edge "from" vertex -> "to" vertex exists;
// false otherwise.
func (g *Graph) isAdjacent(from object.ObjMetadata, to object.ObjMetadata) bool {
// If "from" vertex does not exist, it is impossible edge exists; return false.
if _, exists := g.edges[from]; !exists {
return false
}
// Iterate through adjacency list to see if "to" vertex is adjacent.
for _, vertex := range g.edges[from] {
if vertex == to {
return true
}
}
return false
}
// Size returns the number of vertices in the graph.
func (g *Graph) Size() int {
return len(g.edges)
}
// removeVertex removes the passed vertex as well as any edges
// into the vertex.
func (g *Graph) removeVertex(r object.ObjMetadata) {
// First, remove the object from all adjacency lists.
for v, adj := range g.edges {
for i, a := range adj {
if a == r {
g.edges[v] = removeObj(adj, i)
break
}
}
}
// Finally, remove the vertex
delete(g.edges, r)
}
// removeObj removes the object at index "i" from the passed
// list of vertices, returning the new list.
func removeObj(adj []object.ObjMetadata, i int) []object.ObjMetadata {
adj[len(adj)-1], adj[i] = adj[i], adj[len(adj)-1]
return adj[:len(adj)-1]
}
// Sort returns the ordered set of vertices after
// a topological sort.
func (g *Graph) Sort() ([][]object.ObjMetadata, error) {
sorted := [][]object.ObjMetadata{}
for g.Size() > 0 {
// Identify all the leaf vertices.
leafVertices := []object.ObjMetadata{}
for v, adj := range g.edges {
if len(adj) == 0 {
leafVertices = append(leafVertices, v)
}
}
// No leaf vertices means cycle in the directed graph,
// where remaining edges define the cycle.
if len(leafVertices) == 0 {
return [][]object.ObjMetadata{}, CyclicDependencyError{
Edges: g.GetEdges(),
}
}
// Remove all edges to leaf vertices.
for _, v := range leafVertices {
g.removeVertex(v)
}
sorted = append(sorted, leafVertices)
}
return sorted, nil
}
// CyclicDependencyError when directed acyclic graph contains a cycle.
// The cycle makes it impossible to topological sort.
type CyclicDependencyError struct {
Edges []Edge
}
func (cde CyclicDependencyError) Error() string {
var errorBuf bytes.Buffer
errorBuf.WriteString("cyclic dependency")
for _, edge := range cde.Edges {
from := fmt.Sprintf("%s/%s", edge.From.Namespace, edge.From.Name)
to := fmt.Sprintf("%s/%s", edge.To.Namespace, edge.To.Name)
errorBuf.WriteString(fmt.Sprintf("\n\t%s -> %s", from, to))
}
return errorBuf.String()
} | pkg/object/graph/graph.go | 0.796807 | 0.468304 | graph.go | starcoder |
package board
// TetrisBlock holds bit pattern and orientation. The pattern and orientation are related.
type TetrisBlock struct {
Label string
Type BlockType
Orientation BlockOrientation
Colour BlockColour
Pattern [][]bool
}
// BlockType is one of the 5x possible
type BlockType string
// Square is 4x4 Tetris block shape
const Square BlockType = "Square"
// Pipe is 4x1 Tetris block shape
const Pipe BlockType = "Pipe"
// Tee is Tetris block shape
const Tee BlockType = "Tee"
// Elle is Tetris 2x2 block square
const Elle BlockType = "Elle"
// IElle is Tetris block shape
const IElle BlockType = "InvertedElle"
// Rotate takes the block right Up->Right->Down->Left->Up...
func (blk *TetrisBlock) Rotate() {
pattern := blk.Pattern
// 3. Rotate the pattern for the shape and set orientation
patwidth := len(pattern)
patheight := len(pattern[0])
rpat := make([][]bool, patheight)
for h := 0; h < patheight; h++ {
rpat[h] = make([]bool, patwidth)
}
for w := 0; w < patwidth; w++ {
for h := 0; h < patheight; h++ {
rpat[patheight-h-1][w] = pattern[w][h]
}
}
blk.Pattern = rpat
switch blk.Orientation {
case Up:
blk.Orientation = Right
case Right:
blk.Orientation = Down
case Down:
blk.Orientation = Left
case Left:
blk.Orientation = Up
}
}
func makeBlockPattern(t BlockType, o BlockOrientation) (pattern [][]bool) {
//Create a default "UP" and then rotate if not o==Up
switch t {
case Square:
pattern = make([][]bool, 2)
for i := range pattern {
pattern[i] = make([]bool, 2)
}
pattern[0][0] = true
pattern[0][1] = true
pattern[1][0] = true
pattern[1][1] = true
case Pipe:
pattern = make([][]bool, 1)
for i := range pattern {
pattern[i] = make([]bool, 4)
}
pattern[0][0] = true
pattern[0][1] = true
pattern[0][2] = true
pattern[0][3] = true
case Tee:
pattern = make([][]bool, 3)
for i := range pattern {
pattern[i] = make([]bool, 2)
}
pattern[0][0] = true
pattern[1][0] = true
pattern[1][1] = true
pattern[2][0] = true
case Elle:
pattern = make([][]bool, 3)
for i := range pattern {
pattern[i] = make([]bool, 2)
}
pattern[0][0] = true
pattern[1][0] = true
pattern[2][0] = true
pattern[2][1] = true
case IElle:
pattern = make([][]bool, 3)
for i := range pattern {
pattern[i] = make([]bool, 2)
}
pattern[0][0] = true
pattern[0][1] = true
pattern[1][0] = true
pattern[2][0] = true
}
return pattern
}
// BlockOrientation describes how the block is applied to the board
type BlockOrientation string
// Up is the default block orientation
const Up BlockOrientation = "Up"
// Down is a block orientation
const Down BlockOrientation = "Right"
// Left is a block orientation
const Left BlockOrientation = "Down"
// Right is a block orientation
const Right BlockOrientation = "Left"
// BlockColour describes what colour the block has on the board
type BlockColour string
// Red is a block colour
const Red BlockColour = "Red"
// Blue is a block colour
const Blue BlockColour = "Blue"
// Green is a block colour
const Green BlockColour = "Green"
// Orange is a block colour
const Orange BlockColour = "Orange"
// Purple is a block colour
const Purple BlockColour = "Purple"
// MakeTetrisBlock creates a TetrisBlock
func (b *Board) MakeTetrisBlock(label string, t BlockType, c BlockColour, o BlockOrientation) TetrisBlock {
return TetrisBlock{
Label: label,
Type: t,
Orientation: o,
Colour: c,
Pattern: makeBlockPattern(t, Up)}
}
// TeeShape builds a default T Tetris block
func (b *Board) TeeShape(c BlockColour) (blk TetrisBlock) {
blk = b.MakeTetrisBlock("T", Tee, c, Up)
return blk
}
// ElleShape builds a default L Tetris block
func (b *Board) ElleShape(c BlockColour) (blk TetrisBlock) {
blk = b.MakeTetrisBlock("L", Elle, c, Up)
return blk
}
// IElleShape builds a default inverted-L Tetris block
func (b *Board) IElleShape(c BlockColour) (blk TetrisBlock) {
blk = b.MakeTetrisBlock("I", IElle, c, Up)
return blk
}
// PipeShape builds a default | Tetris block
func (b *Board) PipeShape(c BlockColour) (blk TetrisBlock) {
blk = b.MakeTetrisBlock("P", Pipe, c, Up)
return blk
}
// SquareShape builds a default [] Tetris block
func (b *Board) SquareShape(c BlockColour) (blk TetrisBlock) {
blk = b.MakeTetrisBlock("B", Square, c, Up)
return blk
} | 01-tetrisgo/pkg/board/block.go | 0.681197 | 0.495667 | block.go | starcoder |
package main
import (
"fmt"
t "github.com/wallberg/jbtracer"
)
func main() {
var material *t.Material
// Configure the world
world := t.NewWorld()
world.Light = t.NewPointLight(t.White, t.NewPoint(-10, 10, -10))
// Configure the camera
camera := t.NewCamera(300, 150, t.Pi3)
camera.Transform = t.ViewTransform(
t.NewPoint(0, 1.5, -5),
t.NewPoint(0, 1, 0),
t.NewVector(0, 1, 0),
)
// Add the world objects
// floor
floor := t.NewSphere()
floor.SetTransform(t.Scaling(10, 0.01, 10))
material = t.NewMaterial()
material.Color = t.NewColor(1, 0.9, 0.9)
material.Specular = 0
floor.SetMaterial(material)
world.AddObject(floor)
// left wall
leftWall := t.NewSphere()
leftWall.SetTransform(
t.Translation(0, 0, 5).Multiply(
t.Rotation(t.Axis_Y, -1*t.Pi4),
).Multiply(
t.Rotation(t.Axis_X, t.Pi2),
).Multiply(
t.Scaling(10, 0.01, 10),
),
)
leftWall.SetMaterial(floor.Material())
world.AddObject(leftWall)
// right wall
rightWall := t.NewSphere()
rightWall.SetTransform(
t.Translation(0, 0, 5).Multiply(
t.Rotation(t.Axis_Y, t.Pi4),
).Multiply(
t.Rotation(t.Axis_X, t.Pi2),
).Multiply(
t.Scaling(10, 0.01, 10),
),
)
rightWall.SetMaterial(floor.Material())
world.AddObject(rightWall)
// middle sphere
middle := t.NewSphere()
middle.SetTransform(t.Translation(-0.5, 1, 0.5))
material = t.NewMaterial()
material.Color = t.NewColor(0.1, 1, 0.5)
material.Diffuse = 0.7
material.Specular = 0.3
middle.SetMaterial(material)
world.AddObject(middle)
// right sphere
right := t.NewSphere()
right.SetTransform(
t.Translation(1.5, 0.5, -0.5).Multiply(
t.Scaling(0.5, 0.5, 0.5),
),
)
material = t.NewMaterial()
material.Color = t.NewColor(0.5, 1, 0.1)
material.Diffuse = 0.7
material.Specular = 0.3
right.SetMaterial(material)
world.AddObject(right)
// left sphere
left := t.NewSphere()
left.SetTransform(t.Translation(-1.5, 0.33, -0.75).Multiply(
t.Scaling(0.33, 0.33, 0.33),
),
)
material = t.NewMaterial()
material.Color = t.NewColor(1, 0.8, 0.1)
material.Diffuse = 0.7
material.Specular = 0.3
left.SetMaterial(material)
world.AddObject(left)
// Render the result to a canvas
canvas := camera.Render(world)
// Output the PPM image file
ppm := canvas.NewPPM()
for _, line := range *ppm {
fmt.Print(line)
}
} | cmd/chapter7/chapter7.go | 0.600305 | 0.434161 | chapter7.go | starcoder |
package chans
import "github.com/goki/mat32"
// AKParams control an A-type K Ca channel
type AKParams struct {
Gbar float32 `def:"0.01" desc:"strength of AK current"`
Beta float32 `def:"0.01446,02039" desc:"multiplier for the beta term; 0.01446 for distal, 0.02039 for proximal dendrites"`
Dm float32 `def:"0.5,0.25" desc:"Dm factor: 0.5 for distal, 0.25 for proximal"`
Koff float32 `def:"1.8,1.5" desc:"offset for K, 1.8 for distal, 1.5 for proximal"`
Voff float32 `def:"1,11" desc:"voltage offset for alpha and beta functions: 1 for distal, 11 for proximal"`
Hf float32 `def:"0.1133,0.1112" desc:"h multiplier factor, 0.1133 for distal, 0.1112 for proximal"`
}
// Defaults sets the parameters for distal dendrites
func (ap *AKParams) Defaults() {
ap.Gbar = 0.01
ap.Distal()
}
// Distal sets the parameters for distal dendrites
func (ap *AKParams) Distal() {
ap.Beta = 0.01446
ap.Dm = 0.5
ap.Koff = 1.8
ap.Voff = 1
ap.Hf = 0.1133
}
// Proximal sets parameters for proximal dendrites
func (ap *AKParams) Proximal() {
ap.Beta = 0.02039
ap.Dm = 0.25
ap.Koff = 1.5
ap.Voff = 11
ap.Hf = 0.1112
}
// AlphaFmVK returns the Alpha function from vbio (not normalized, must not exceed 0)
func (ap *AKParams) AlphaFmVK(vbio, k float32) float32 {
return mat32.FastExp(0.03707 * k * (vbio - ap.Voff))
}
// BetaFmVK returns the Beta function from vbio (not normalized, must not exceed 0)
func (ap *AKParams) BetaFmVK(vbio, k float32) float32 {
return mat32.FastExp(ap.Beta * k * (vbio - ap.Voff))
}
// KFmV returns the K value from vbio (not normalized, must not exceed 0)
func (ap *AKParams) KFmV(vbio float32) float32 {
return -ap.Koff - 1.0/(1.0+mat32.FastExp((vbio+40)/5))
}
// HFmV returns the H gate value from vbio (not normalized, must not exceed 0)
func (ap *AKParams) HFmV(vbio float32) float32 {
return 1.0 / (1.0 + mat32.FastExp(ap.Hf*(vbio+56)))
}
// HTauFmV returns the HTau rate constant in msec from vbio (not normalized, must not exceed 0)
func (ap *AKParams) HTauFmV(vbio float32) float32 {
tau := 0.26 * (vbio + 50)
if tau < 2 {
tau = 2
}
return tau
}
// MFmAlpha returns the M gate factor from alpha
func (ap *AKParams) MFmAlpha(alpha float32) float32 {
return 1.0 / (1.0 + alpha)
}
// MTauFmAlphaBeta returns the MTau rate constant in msec from alpha, beta
func (ap *AKParams) MTauFmAlphaBeta(alpha, beta float32) float32 {
return 1 + beta/(ap.Dm*(1+alpha)) // minimum of 1 msec
}
// DMHFmV returns the change at msec update scale in M, H factors
// as a function of V normalized (0-1)
func (ap *AKParams) DMHFmV(v, m, h float32) (float32, float32) {
vbio := VFmBio(v)
if vbio > 0 {
vbio = 0
}
k := ap.KFmV(vbio)
a := ap.AlphaFmVK(vbio, k)
b := ap.BetaFmVK(vbio, k)
mt := ap.MTauFmAlphaBeta(a, b)
ht := ap.HTauFmV(vbio)
dm := (ap.MFmAlpha(a) - m) / mt
dh := (ap.HFmV(vbio) - h) / ht
return dm, dh
}
// Gak returns the AK net conductance from m, h gates
func (ap *AKParams) Gak(m, h float32) float32 {
return ap.Gbar * m * h
} | chans/ak.go | 0.855806 | 0.424949 | ak.go | starcoder |
package colexec
import "github.com/cockroachdb/cockroach/pkg/col/coldata"
// populateEqChains populates op.scratch.eqChains with indices of tuples from b
// that belong to the same groups. It returns the number of equality chains.
// Passed-in sel is updated to include tuples that are "heads" of the
// corresponding equality chains and op.ht.probeScratch.hashBuffer is adjusted
// accordingly. headToEqChainsID is a scratch space that must contain all
// zeroes and be of at least batchLength length.
const _ = "template_populateEqChains"
func populateEqChains_false(
op *hashAggregator, batchLength int, sel []int, headToEqChainsID []int) int {
eqChainsCount := 0
for i, headID := range op.ht.probeScratch.headID[:batchLength] {
// Since we're essentially probing the batch against itself, headID
// cannot be 0, so we don't need to check that. What we have here is
// the tuple at position i belongs to the same equality chain as the
// tuple at position headID-1.
// We will use a similar to keyID encoding for eqChains slot - all
// tuples that should be included in eqChains[i] chain will have
// eqChainsID = i + 1. headToEqChainsID is a mapping from headID to
// eqChainsID that we're currently building in which eqChainsID
// indicates that the current tuple is the head of its equality chain.
if eqChainsID := headToEqChainsID[headID-1]; eqChainsID == 0 {
// This tuple is the head of the new equality chain, so we include
// it in updated selection vector. We also compact the hash buffer
// accordingly.
op.ht.probeScratch.hashBuffer[eqChainsCount] = op.ht.probeScratch.hashBuffer[i]
sel[eqChainsCount] = i
op.scratch.eqChains[eqChainsCount] = append(op.scratch.eqChains[eqChainsCount], i)
eqChainsCount++
headToEqChainsID[headID-1] = eqChainsCount
} else {
op.scratch.eqChains[eqChainsID-1] = append(op.scratch.eqChains[eqChainsID-1], i)
}
}
return eqChainsCount
}
func populateEqChains_true(
op *hashAggregator, batchLength int, sel []int, headToEqChainsID []int) int {
eqChainsCount := 0
for i, headID := range op.ht.probeScratch.headID[:batchLength] {
// Since we're essentially probing the batch against itself, headID
// cannot be 0, so we don't need to check that. What we have here is
// the tuple at position i belongs to the same equality chain as the
// tuple at position headID-1.
// We will use a similar to keyID encoding for eqChains slot - all
// tuples that should be included in eqChains[i] chain will have
// eqChainsID = i + 1. headToEqChainsID is a mapping from headID to
// eqChainsID that we're currently building in which eqChainsID
// indicates that the current tuple is the head of its equality chain.
if eqChainsID := headToEqChainsID[headID-1]; eqChainsID == 0 {
// This tuple is the head of the new equality chain, so we include
// it in updated selection vector. We also compact the hash buffer
// accordingly.
op.ht.probeScratch.hashBuffer[eqChainsCount] = op.ht.probeScratch.hashBuffer[i]
sel[eqChainsCount] = sel[i]
op.scratch.eqChains[eqChainsCount] = append(op.scratch.eqChains[eqChainsCount], sel[i])
eqChainsCount++
headToEqChainsID[headID-1] = eqChainsCount
} else {
op.scratch.eqChains[eqChainsID-1] = append(op.scratch.eqChains[eqChainsID-1], sel[i])
}
}
return eqChainsCount
}
// populateEqChains populates op.scratch.eqChains with indices of tuples from b
// that belong to the same groups. It returns the number of equality chains as
// well as a selection vector that contains "heads" of each of the chains. The
// method assumes that op.ht.probeScratch.headID has been populated with keyIDs
// of all tuples.
// NOTE: selection vector of b is modified to include only heads of each of the
// equality chains.
// NOTE: op.ht.probeScratch.headID and op.ht.probeScratch.differs are reset.
func (op *hashAggregator) populateEqChains(
b coldata.Batch,
) (eqChainsCount int, eqChainsHeadsSel []int) {
batchLength := b.Length()
headIDToEqChainsID := op.scratch.intSlice[:batchLength]
copy(headIDToEqChainsID, zeroIntColumn)
sel := b.Selection()
if sel != nil {
eqChainsCount = populateEqChains_true(op, batchLength, sel, headIDToEqChainsID)
} else {
b.SetSelection(true)
sel = b.Selection()
eqChainsCount = populateEqChains_false(op, batchLength, sel, headIDToEqChainsID)
}
return eqChainsCount, sel
} | pkg/sql/colexec/hash_aggregator.eg.go | 0.707506 | 0.436022 | hash_aggregator.eg.go | starcoder |
package msgraph
// RatingNewZealandMoviesType undocumented
type RatingNewZealandMoviesType int
const (
// RatingNewZealandMoviesTypeVAllAllowed undocumented
RatingNewZealandMoviesTypeVAllAllowed RatingNewZealandMoviesType = 0
// RatingNewZealandMoviesTypeVAllBlocked undocumented
RatingNewZealandMoviesTypeVAllBlocked RatingNewZealandMoviesType = 1
// RatingNewZealandMoviesTypeVGeneral undocumented
RatingNewZealandMoviesTypeVGeneral RatingNewZealandMoviesType = 2
// RatingNewZealandMoviesTypeVParentalGuidance undocumented
RatingNewZealandMoviesTypeVParentalGuidance RatingNewZealandMoviesType = 3
// RatingNewZealandMoviesTypeVMature undocumented
RatingNewZealandMoviesTypeVMature RatingNewZealandMoviesType = 4
// RatingNewZealandMoviesTypeVAgesAbove13 undocumented
RatingNewZealandMoviesTypeVAgesAbove13 RatingNewZealandMoviesType = 5
// RatingNewZealandMoviesTypeVAgesAbove15 undocumented
RatingNewZealandMoviesTypeVAgesAbove15 RatingNewZealandMoviesType = 6
// RatingNewZealandMoviesTypeVAgesAbove16 undocumented
RatingNewZealandMoviesTypeVAgesAbove16 RatingNewZealandMoviesType = 7
// RatingNewZealandMoviesTypeVAgesAbove18 undocumented
RatingNewZealandMoviesTypeVAgesAbove18 RatingNewZealandMoviesType = 8
// RatingNewZealandMoviesTypeVRestricted undocumented
RatingNewZealandMoviesTypeVRestricted RatingNewZealandMoviesType = 9
// RatingNewZealandMoviesTypeVAgesAbove16Restricted undocumented
RatingNewZealandMoviesTypeVAgesAbove16Restricted RatingNewZealandMoviesType = 10
)
// RatingNewZealandMoviesTypePAllAllowed returns a pointer to RatingNewZealandMoviesTypeVAllAllowed
func RatingNewZealandMoviesTypePAllAllowed() *RatingNewZealandMoviesType {
v := RatingNewZealandMoviesTypeVAllAllowed
return &v
}
// RatingNewZealandMoviesTypePAllBlocked returns a pointer to RatingNewZealandMoviesTypeVAllBlocked
func RatingNewZealandMoviesTypePAllBlocked() *RatingNewZealandMoviesType {
v := RatingNewZealandMoviesTypeVAllBlocked
return &v
}
// RatingNewZealandMoviesTypePGeneral returns a pointer to RatingNewZealandMoviesTypeVGeneral
func RatingNewZealandMoviesTypePGeneral() *RatingNewZealandMoviesType {
v := RatingNewZealandMoviesTypeVGeneral
return &v
}
// RatingNewZealandMoviesTypePParentalGuidance returns a pointer to RatingNewZealandMoviesTypeVParentalGuidance
func RatingNewZealandMoviesTypePParentalGuidance() *RatingNewZealandMoviesType {
v := RatingNewZealandMoviesTypeVParentalGuidance
return &v
}
// RatingNewZealandMoviesTypePMature returns a pointer to RatingNewZealandMoviesTypeVMature
func RatingNewZealandMoviesTypePMature() *RatingNewZealandMoviesType {
v := RatingNewZealandMoviesTypeVMature
return &v
}
// RatingNewZealandMoviesTypePAgesAbove13 returns a pointer to RatingNewZealandMoviesTypeVAgesAbove13
func RatingNewZealandMoviesTypePAgesAbove13() *RatingNewZealandMoviesType {
v := RatingNewZealandMoviesTypeVAgesAbove13
return &v
}
// RatingNewZealandMoviesTypePAgesAbove15 returns a pointer to RatingNewZealandMoviesTypeVAgesAbove15
func RatingNewZealandMoviesTypePAgesAbove15() *RatingNewZealandMoviesType {
v := RatingNewZealandMoviesTypeVAgesAbove15
return &v
}
// RatingNewZealandMoviesTypePAgesAbove16 returns a pointer to RatingNewZealandMoviesTypeVAgesAbove16
func RatingNewZealandMoviesTypePAgesAbove16() *RatingNewZealandMoviesType {
v := RatingNewZealandMoviesTypeVAgesAbove16
return &v
}
// RatingNewZealandMoviesTypePAgesAbove18 returns a pointer to RatingNewZealandMoviesTypeVAgesAbove18
func RatingNewZealandMoviesTypePAgesAbove18() *RatingNewZealandMoviesType {
v := RatingNewZealandMoviesTypeVAgesAbove18
return &v
}
// RatingNewZealandMoviesTypePRestricted returns a pointer to RatingNewZealandMoviesTypeVRestricted
func RatingNewZealandMoviesTypePRestricted() *RatingNewZealandMoviesType {
v := RatingNewZealandMoviesTypeVRestricted
return &v
}
// RatingNewZealandMoviesTypePAgesAbove16Restricted returns a pointer to RatingNewZealandMoviesTypeVAgesAbove16Restricted
func RatingNewZealandMoviesTypePAgesAbove16Restricted() *RatingNewZealandMoviesType {
v := RatingNewZealandMoviesTypeVAgesAbove16Restricted
return &v
} | v1.0/RatingNewZealandMoviesTypeEnum.go | 0.587943 | 0.574335 | RatingNewZealandMoviesTypeEnum.go | starcoder |
package command
import (
"context"
"errors"
"strings"
)
const (
RootNode = iota
LiteralNode
ArgumentNode
)
// Graph is a directed graph with a root node, representing all commands and how they are parsed.
type Graph struct {
// List of all nodes. The first element is the root node
nodes []*Node
}
func NewGraph() *Graph {
var g Graph
g.nodes = append(g.nodes,
&Node{g: &g, kind: RootNode},
)
return &g
}
func (g *Graph) Execute(ctx context.Context, cmd string) error {
var args []ParsedData
node := g.nodes[0] // root
for {
// parser command
left, value, err := node.parse(cmd)
if err != nil {
return err
}
args = append(args, value)
left = strings.TrimSpace(left)
if len(left) == 0 {
return node.Run(ctx, args)
}
// find next node
next, err := node.next(left)
if err != nil {
return err
}
if next == 0 {
return errors.New("command contains extra text: " + left)
}
cmd = left
node = g.nodes[next]
}
}
type ParsedData interface{}
type HandlerFunc func(ctx context.Context, args []ParsedData) error
// Node is the node of the Graph. There are 3 kinds of node: Root, Literal and Argument.
type Node struct {
g *Graph
index int32
kind byte
Name string
Children []int32
SuggestionsType string
Parser Parser
Run HandlerFunc
}
type Literal Node
type Argument Node
func (n *Node) parse(cmd string) (left string, value ParsedData, err error) {
switch n.kind & 0x03 {
case RootNode:
left = cmd
value = nil
err = nil
case LiteralNode:
if !strings.HasPrefix(cmd, n.Name) {
panic("expect " + cmd + " prefixed with " + n.Name)
}
left = strings.TrimPrefix(cmd, n.Name)
value = LiteralData(n.Name)
case ArgumentNode:
left, value, err = n.Parser.Parse(cmd)
default:
panic("unreachable")
}
return
}
func (n *Node) next(left string) (next int32, err error) {
if len(n.Children) == 0 {
return 0, nil
}
// look up the first child's type
switch n.g.nodes[n.Children[0]].kind & 0x03 {
case RootNode:
panic("root node can't be child")
default:
panic("unreachable")
case LiteralNode:
_, value, err := StringParser(0).Parse(strings.TrimSpace(left))
if err != nil {
return 0, err
}
literal := value.(string)
for _, i := range n.Children {
if n.g.nodes[i].Name == literal {
next = i
break
}
}
case ArgumentNode:
next = n.Children[0]
}
return
}
func unhandledCmd(context.Context, []ParsedData) error {
return errors.New("unhandled function")
}
type LiteralData string | server/command/command.go | 0.610453 | 0.422386 | command.go | starcoder |
&compiler.Ast{
Pos: Position{Filename: "", Offset: 0, Line: 1, Column: 1},
Modules: []*compiler.Module{
&compiler.Module{
Pos: Position{Filename: "", Offset: 0, Line: 1, Column: 1},
Name: "wasi_unstable",
End: "\n",
ImportSection: compiler.ImportSection{
Pos: Position{Filename: "", Offset: 0, Line: 0, Column: 0},
},
TypeSection: compiler.TypeSection{
Pos: Position{Filename: "", Offset: 0, Line: 0, Column: 0},
},
Functions: []*compiler.Function{
&compiler.Function{
Pos: Position{Filename: "", Offset: 21, Line: 2, Column: 1},
Start: "\n",
Type: "extern",
Name: "fd_write",
Parameters: []compiler.FuncParameter{
compiler.FuncParameter{
Pos: Position{Filename: "", Offset: 38, Line: 3, Column: 17},
Name: "a",
Type: compiler.FuncParameterType{
Pos: Position{Filename: "", Offset: 39, Line: 3, Column: 18},
Name: "int",
},
},
compiler.FuncParameter{
Pos: Position{Filename: "", Offset: 46, Line: 3, Column: 25},
Name: "b",
Type: compiler.FuncParameterType{
Pos: Position{Filename: "", Offset: 47, Line: 3, Column: 26},
Name: "int",
},
},
compiler.FuncParameter{
Pos: Position{Filename: "", Offset: 54, Line: 3, Column: 33},
Name: "c",
Type: compiler.FuncParameterType{
Pos: Position{Filename: "", Offset: 55, Line: 3, Column: 34},
Name: "int",
},
},
compiler.FuncParameter{
Pos: Position{Filename: "", Offset: 62, Line: 3, Column: 41},
Name: "d",
Type: compiler.FuncParameterType{
Pos: Position{Filename: "", Offset: 63, Line: 3, Column: 42},
Name: "int",
},
},
},
ReturnType: &compiler.FuncParameterType{
Pos: Position{Filename: "", Offset: 69, Line: 3, Column: 48},
Name: "int",
},
EndReturn: "\n",
End: "\n",
},
},
},
&compiler.Module{
Pos: Position{Filename: "", Offset: 0, Line: 1, Column: 1},
Name: "wasi_unstable",
End: "\n",
ImportSection: compiler.ImportSection{
Pos: Position{Filename: "", Offset: 0, Line: 0, Column: 0},
},
TypeSection: compiler.TypeSection{
Pos: Position{Filename: "", Offset: 0, Line: 0, Column: 0},
},
Functions: []*compiler.Function{
&compiler.Function{
Pos: Position{Filename: "", Offset: 21, Line: 2, Column: 1},
Start: "\n",
Type: "extern",
Name: "fd_write",
Parameters: []compiler.FuncParameter{
compiler.FuncParameter{
Pos: Position{Filename: "", Offset: 38, Line: 3, Column: 17},
Name: "a",
Type: compiler.FuncParameterType{
Pos: Position{Filename: "", Offset: 39, Line: 3, Column: 18},
Name: "int",
},
},
compiler.FuncParameter{
Pos: Position{Filename: "", Offset: 46, Line: 3, Column: 25},
Name: "b",
Type: compiler.FuncParameterType{
Pos: Position{Filename: "", Offset: 47, Line: 3, Column: 26},
Name: "int",
},
},
compiler.FuncParameter{
Pos: Position{Filename: "", Offset: 54, Line: 3, Column: 33},
Name: "c",
Type: compiler.FuncParameterType{
Pos: Position{Filename: "", Offset: 55, Line: 3, Column: 34},
Name: "int",
},
},
compiler.FuncParameter{
Pos: Position{Filename: "", Offset: 62, Line: 3, Column: 41},
Name: "d",
Type: compiler.FuncParameterType{
Pos: Position{Filename: "", Offset: 63, Line: 3, Column: 42},
Name: "int",
},
},
},
ReturnType: &compiler.FuncParameterType{
Pos: Position{Filename: "", Offset: 69, Line: 3, Column: 48},
Name: "int",
},
EndReturn: "\n",
End: "\n",
},
},
},
&compiler.Module{
Pos: Position{Filename: "", Offset: 0, Line: 1, Column: 1},
Name: "wasi_unstable",
End: "\n",
ImportSection: compiler.ImportSection{
Pos: Position{Filename: "", Offset: 0, Line: 0, Column: 0},
},
TypeSection: compiler.TypeSection{
Pos: Position{Filename: "", Offset: 0, Line: 0, Column: 0},
},
Functions: []*compiler.Function{
&compiler.Function{
Pos: Position{Filename: "", Offset: 21, Line: 2, Column: 1},
Start: "\n",
Type: "extern",
Name: "fd_write",
Parameters: []compiler.FuncParameter{
compiler.FuncParameter{
Pos: Position{Filename: "", Offset: 38, Line: 3, Column: 17},
Name: "a",
Type: compiler.FuncParameterType{
Pos: Position{Filename: "", Offset: 39, Line: 3, Column: 18},
Name: "int",
},
},
compiler.FuncParameter{
Pos: Position{Filename: "", Offset: 46, Line: 3, Column: 25},
Name: "b",
Type: compiler.FuncParameterType{
Pos: Position{Filename: "", Offset: 47, Line: 3, Column: 26},
Name: "int",
},
},
compiler.FuncParameter{
Pos: Position{Filename: "", Offset: 54, Line: 3, Column: 33},
Name: "c",
Type: compiler.FuncParameterType{
Pos: Position{Filename: "", Offset: 55, Line: 3, Column: 34},
Name: "int",
},
},
compiler.FuncParameter{
Pos: Position{Filename: "", Offset: 62, Line: 3, Column: 41},
Name: "d",
Type: compiler.FuncParameterType{
Pos: Position{Filename: "", Offset: 63, Line: 3, Column: 42},
Name: "int",
},
},
},
ReturnType: &compiler.FuncParameterType{
Pos: Position{Filename: "", Offset: 69, Line: 3, Column: 48},
Name: "int",
},
EndReturn: "\n",
End: "\n",
},
},
},
&compiler.Module{
Pos: Position{Filename: "", Offset: 0, Line: 1, Column: 1},
Name: "main",
End: "\n",
ImportSection: compiler.ImportSection{
Pos: Position{Filename: "", Offset: 12, Line: 2, Column: 1},
Start: &"\n",
Imports: []*compiler.Import{
&compiler.Import{
Pos: Position{Filename: "", Offset: 13, Line: 3, Column: 1},
Url: "github.com/pyros2097/wasi_unstable",
End: &"\n",
},
&compiler.Import{
Pos: Position{Filename: "", Offset: 57, Line: 4, Column: 1},
Url: "github.com/pyros2097/wasi_unstable",
End: &"\n",
},
&compiler.Import{
Pos: Position{Filename: "", Offset: 101, Line: 5, Column: 1},
Url: "github.com/pyros2097/wasi_unstable",
End: &"\n",
},
},
End: &"\n",
},
TypeSection: compiler.TypeSection{
Pos: Position{Filename: "", Offset: 146, Line: 7, Column: 1},
Start: &"\n",
Types: []*compiler.Type{
&compiler.Type{
Pos: Position{Filename: "", Offset: 147, Line: 8, Column: 1},
Name: "string",
Alias: "i32",
End: "\n",
},
&compiler.Type{
Pos: Position{Filename: "", Offset: 163, Line: 9, Column: 1},
Name: "ff",
Alias: "i32",
End: "\n",
},
},
End: &"\n",
},
Functions: []*compiler.Function{
&compiler.Function{
Pos: Position{Filename: "", Offset: 180, Line: 11, Column: 1},
Start: "\n",
Type: "proc",
Name: "add",
Parameters: []compiler.FuncParameter{
compiler.FuncParameter{
Pos: Position{Filename: "", Offset: 190, Line: 12, Column: 10},
Name: "a",
Type: compiler.FuncParameterType{
Pos: Position{Filename: "", Offset: 191, Line: 12, Column: 11},
Name: "int",
},
},
compiler.FuncParameter{
Pos: Position{Filename: "", Offset: 198, Line: 12, Column: 18},
Name: "b",
Type: compiler.FuncParameterType{
Pos: Position{Filename: "", Offset: 199, Line: 12, Column: 19},
Name: "int",
},
},
},
ReturnType: &compiler.FuncParameterType{
Pos: Position{Filename: "", Offset: 205, Line: 12, Column: 25},
Name: "int",
},
EndReturn: "\n",
Body: []*compiler.Block{
&compiler.Block{
Exp: &compiler.Expression{
Left: &compiler.Literal{
Pos: Position{Filename: "", Offset: 213, Line: 13, Column: 3},
Reference: &"a",
},
Operator: &"+",
Right: &compiler.Expression{
Left: &compiler.Literal{
Pos: Position{Filename: "", Offset: 217, Line: 13, Column: 7},
Reference: &"b",
},
},
},
End: &"\n",
},
},
End: "\n",
},
&compiler.Function{
Pos: Position{Filename: "", Offset: 220, Line: 15, Column: 1},
Start: "\n",
Type: "method",
Name: "add",
Parameters: []compiler.FuncParameter{
compiler.FuncParameter{
Pos: Position{Filename: "", Offset: 232, Line: 16, Column: 12},
Name: "a",
Type: compiler.FuncParameterType{
Pos: Position{Filename: "", Offset: 233, Line: 16, Column: 13},
Name: "int",
},
},
compiler.FuncParameter{
Pos: Position{Filename: "", Offset: 240, Line: 16, Column: 20},
Name: "b",
Type: compiler.FuncParameterType{
Pos: Position{Filename: "", Offset: 241, Line: 16, Column: 21},
Name: "int",
},
},
},
ReturnType: &compiler.FuncParameterType{
Pos: Position{Filename: "", Offset: 247, Line: 16, Column: 27},
Name: "int",
},
EndReturn: "\n",
Body: []*compiler.Block{
&compiler.Block{
Exp: &compiler.Expression{
Left: &compiler.Literal{
Pos: Position{Filename: "", Offset: 255, Line: 17, Column: 3},
Reference: &"a",
},
Operator: &"+",
Right: &compiler.Expression{
Left: &compiler.Literal{
Pos: Position{Filename: "", Offset: 259, Line: 17, Column: 7},
Reference: &"b",
},
},
},
End: &"\n",
},
},
End: "\n",
},
&compiler.Function{
Pos: Position{Filename: "", Offset: 262, Line: 19, Column: 1},
Start: "\n",
Type: "proc",
Name: "main",
EndReturn: "\n",
Body: []*compiler.Block{
&compiler.Block{
Exp: &compiler.Expression{
Left: &compiler.Literal{
Pos: Position{Filename: "", Offset: 277, Line: 21, Column: 3},
Reference: &"add",
},
Right: &compiler.Expression{
Left: &compiler.Literal{
Pos: Position{Filename: "", Offset: 280, Line: 21, Column: 6},
Params: []*compiler.Literal{
&compiler.Literal{
Pos: Position{Filename: "", Offset: 281, Line: 21, Column: 7},
Int: &1,
},
&compiler.Literal{
Pos: Position{Filename: "", Offset: 284, Line: 21, Column: 10},
Int: &3,
},
},
},
},
},
End: &"\n",
},
&compiler.Block{
Exp: &compiler.Expression{
Left: &compiler.Literal{
Pos: Position{Filename: "", Offset: 289, Line: 22, Column: 3},
Reference: &"add",
},
Right: &compiler.Expression{
Left: &compiler.Literal{
Pos: Position{Filename: "", Offset: 292, Line: 22, Column: 6},
Params: []*compiler.Literal{
&compiler.Literal{
Pos: Position{Filename: "", Offset: 293, Line: 22, Column: 7},
Int: &1,
},
&compiler.Literal{
Pos: Position{Filename: "", Offset: 296, Line: 22, Column: 10},
Int: &3,
},
},
},
},
},
End: &"\n",
},
&compiler.Block{
Exp: &compiler.Expression{
Left: &compiler.Literal{
Pos: Position{Filename: "", Offset: 301, Line: 23, Column: 3},
Reference: &"add",
},
Right: &compiler.Expression{
Left: &compiler.Literal{
Pos: Position{Filename: "", Offset: 304, Line: 23, Column: 6},
Params: []*compiler.Literal{
&compiler.Literal{
Pos: Position{Filename: "", Offset: 305, Line: 23, Column: 7},
Int: &1,
},
&compiler.Literal{
Pos: Position{Filename: "", Offset: 308, Line: 23, Column: 10},
Int: &3,
},
},
},
},
},
End: &"\n",
},
&compiler.Block{
Exp: &compiler.Expression{
Left: &compiler.Literal{
Pos: Position{Filename: "", Offset: 313, Line: 24, Column: 3},
Reference: &"add",
},
Right: &compiler.Expression{
Left: &compiler.Literal{
Pos: Position{Filename: "", Offset: 316, Line: 24, Column: 6},
Params: []*compiler.Literal{
&compiler.Literal{
Pos: Position{Filename: "", Offset: 317, Line: 24, Column: 7},
Int: &1,
},
&compiler.Literal{
Pos: Position{Filename: "", Offset: 320, Line: 24, Column: 10},
Int: &3,
},
},
},
},
},
End: &"\n",
},
&compiler.Block{
Exp: &compiler.Expression{
Left: &compiler.Literal{
Pos: Position{Filename: "", Offset: 325, Line: 25, Column: 3},
Reference: &"add",
},
Right: &compiler.Expression{
Left: &compiler.Literal{
Pos: Position{Filename: "", Offset: 328, Line: 25, Column: 6},
Params: []*compiler.Literal{
&compiler.Literal{
Pos: Position{Filename: "", Offset: 329, Line: 25, Column: 7},
Int: &1,
},
&compiler.Literal{
Pos: Position{Filename: "", Offset: 332, Line: 25, Column: 10},
Int: &3,
},
},
},
},
},
End: &"\n",
},
&compiler.Block{
Exp: &compiler.Expression{
Left: &compiler.Literal{
Pos: Position{Filename: "", Offset: 337, Line: 26, Column: 3},
Reference: &"add",
},
Right: &compiler.Expression{
Left: &compiler.Literal{
Pos: Position{Filename: "", Offset: 340, Line: 26, Column: 6},
Params: []*compiler.Literal{
&compiler.Literal{
Pos: Position{Filename: "", Offset: 341, Line: 26, Column: 7},
Int: &1,
},
&compiler.Literal{
Pos: Position{Filename: "", Offset: 344, Line: 26, Column: 10},
Int: &3,
},
},
},
},
},
End: &"\n",
},
},
End: "\n",
},
},
},
},
} | compiler/.snapshots/main.go | 0.53048 | 0.550305 | main.go | starcoder |
package tile
import (
"reflect"
"runtime"
"sync/atomic"
"unsafe"
)
// Iterator represents an iterator function.
type Iterator = func(Point, Tile)
type pageFn = func(*page)
type indexFn = func(x, y int16) int
type pointFn = func(i int) Point
// Grid represents a 2D tile map. Internally, a map is composed of 3x3 pages.
type Grid struct {
pages []page // The pages of the map
pageWidth int16 // The max page width
pageHeight int16 // The max page height
observers pubsub // The map of observers
Size Point // The map size
indexOf indexFn // The page index function
}
// NewGrid returns a new map of the specified size. The width and height must be both
// multiples of 3.
func NewGrid(width, height int16) *Grid {
width, height = width/3, height/3
max := int32(width) * int32(height)
pages := make([]page, max)
m := &Grid{
pages: pages,
pageWidth: width,
pageHeight: height,
observers: pubsub{},
Size: At(width*3, height*3),
}
// Function to calculate a point based on the index
var pointAt func(i int) Point = func(i int) Point {
return At(int16(i%int(width)), int16(i/int(width)))
}
m.indexOf = m.pointToFlat
// If the map is square and page count is a power of 2, use z-curve filling instead
// as this will speed up data access under certain conditions.
if width == height && (width&(width-1)) == 0 {
pointAt = deinterleavePoint
m.indexOf = m.pointToZ
}
for i := 0; i < int(max); i++ {
pages[i].point = pointAt(i).MultiplyScalar(3)
}
return m
}
func (m *Grid) pointToFlat(x, y int16) int {
return int(x) + int(m.pageWidth)*int(y)
}
func (m *Grid) pointToZ(x, y int16) int {
return int(At(x, y).Interleave())
}
// Each iterates over all of the tiles in the map.
func (m *Grid) Each(fn Iterator) {
until := int(m.pageHeight) * int(m.pageWidth)
for i := 0; i < until; i++ {
m.pages[i].Each(fn)
}
}
// Within selects the tiles within a specifid bounding box which is specified by
// north-west and south-east coordinates.
func (m *Grid) Within(nw, se Point, fn Iterator) {
m.pagesWithin(nw, se, func(page *page) {
page.Each(func(p Point, tile Tile) {
if p.Within(nw, se) {
fn(p, tile)
}
})
})
}
// pagesWithin selects the pages within a specifid bounding box which is specified
// by north-west and south-east coordinates.
func (m *Grid) pagesWithin(nw, se Point, fn pageFn) {
if !se.WithinSize(m.Size) {
se = At(m.Size.X-1, m.Size.Y-1)
}
for x := nw.X / 3; x <= se.X/3; x++ {
for y := nw.Y / 3; y <= se.Y/3; y++ {
fn(&m.pages[m.indexOf(x, y)])
}
}
}
// At returns the tile at a specified position
func (m *Grid) At(x, y int16) (Tile, bool) {
if x >= 0 && y >= 0 && x < m.Size.X && y < m.Size.Y {
return m.pages[m.indexOf(x/3, y/3)].Get(x, y), true
}
return Tile{}, false
}
// WriteAt updates the entire tile value at a specific coordinate
func (m *Grid) WriteAt(x, y int16, tile Tile) {
if x >= 0 && y >= 0 && x < m.Size.X && y < m.Size.Y {
if m.pages[m.indexOf(x/3, y/3)].SetTile(x, y, tile) {
m.observers.Notify(At(x/3*3, y/3*3), At(x, y), tile)
}
}
}
// MergeAt updates the bits of tile at a specific coordinate. The bits are specified
// by the mask. The bits that need to be updated should be flipped on in the mask.
func (m *Grid) MergeAt(x, y int16, tile, mask Tile) {
if x >= 0 && y >= 0 && x < m.Size.X && y < m.Size.Y {
if v, ok := m.pages[m.indexOf(x/3, y/3)].SetBits(x, y, tile, mask); ok {
m.observers.Notify(At(x/3*3, y/3*3), At(x, y), v)
}
}
}
// Neighbors iterates over the direct neighbouring tiles
func (m *Grid) Neighbors(x, y int16, fn Iterator) {
// First we need to figure out which pages contain the neighboring tiles and
// then load them. In the best-case we need to load only a single page. In
// the worst-case: we need to load 3 pages.
nX, nY := x/3, (y-1)/3 // North
eX, eY := (x+1)/3, y/3 // East
sX, sY := x/3, (y+1)/3 // South
wX, wY := (x-1)/3, y/3 // West
// Get the North
if y > 0 {
fn(At(x, y-1), m.pages[m.indexOf(nX, nY)].Get(x, y-1))
}
// Get the East
if eX < m.pageWidth {
fn(At(x+1, y), m.pages[m.indexOf(eX, eY)].Get(x+1, y))
}
// Get the South
if sY < m.pageHeight {
fn(At(x, y+1), m.pages[m.indexOf(sX, sY)].Get(x, y+1))
}
// Get the West
if x > 0 {
fn(At(x-1, y), m.pages[m.indexOf(wX, wY)].Get(x-1, y))
}
}
// View creates a new view of the map.
func (m *Grid) View(rect Rect, fn Iterator) *View {
view := &View{
Grid: m,
Inbox: make(chan Update, 8),
rect: NewRect(-1, -1, -1, -1),
}
// Call the resize method
view.Resize(rect, fn)
return view
}
// -----------------------------------------------------------------------------
// Tile represents a packed tile information, it must fit on 6 bytes.
type Tile [6]byte
// -----------------------------------------------------------------------------
// page represents a 3x3 tile page each page should neatly fit on a cache
// line and speed things up.
type page struct {
lock int32 // Page spin-lock, 4 bytes
flags uint16 // Page flags, 2 bytes
point Point // Page X, Y coordinate, 4 bytes
tiles [9]Tile // Page tiles, 54 bytes
}
// Bounds returns the bounding box for the tile page.
func (p *page) Bounds() Rect {
return Rect{p.point, At(p.point.X+3, p.point.Y+3)}
}
// SetTile updates the tile at a specific coordinate
func (p *page) SetTile(x, y int16, tile Tile) bool {
i := (y%3)*3 + (x % 3)
// Synchronize the update from this point on
p.Lock()
p.tiles[i] = tile
notify := p.flags&1 != 0
p.Unlock()
// Return whether tile is observed or not
return notify
}
// SetBits updates certain tile bits at a specific coordinate
func (p *page) SetBits(x, y int16, tile, mask Tile) (Tile, bool) {
t := uint64(tile[0]) | uint64(tile[1])<<8 | uint64(tile[2])<<16 |
uint64(tile[3])<<24 | uint64(tile[4])<<32 | uint64(tile[5])<<40
m := uint64(mask[0]) | uint64(mask[1])<<8 | uint64(mask[2])<<16 |
uint64(mask[3])<<24 | uint64(mask[4])<<32 | uint64(mask[5])<<40
i := (y%3)*3 + (x % 3)
// Get the tile and do the binary merge
p.Lock()
b := &p.tiles[i]
v := uint64(b[0]) | uint64(b[1])<<8 | uint64(b[2])<<16 |
uint64(b[3])<<24 | uint64(b[4])<<32 | uint64(b[5])<<40
v = (v &^ m) | (t & m)
// Write the merged result back
b[0] = byte(v)
b[1] = byte(v >> 8)
b[2] = byte(v >> 16)
b[3] = byte(v >> 24)
b[4] = byte(v >> 32)
b[5] = byte(v >> 40)
merged, notify := *b, p.flags&1 != 0
p.Unlock()
// Return the merged tile data and whether tile is observed or not
return merged, notify
}
// Get gets a tile at a specific coordinate.
func (p *page) Get(x, y int16) (tile Tile) {
i := (y%3)*3 + (x % 3)
p.Lock()
tile = p.tiles[i]
p.Unlock()
return
}
// Each iterates over all of the tiles in the page.
func (p *page) Each(fn Iterator) {
p.Lock()
tiles := p.tiles
p.Unlock()
x, y := p.point.X, p.point.Y
fn(Point{x, y}, tiles[0]) // NW
fn(Point{x + 1, y}, tiles[1]) // N
fn(Point{x + 2, y}, tiles[2]) // NE
fn(Point{x, y + 1}, tiles[3]) // W
fn(Point{x + 1, y + 1}, tiles[4]) // C
fn(Point{x + 2, y + 1}, tiles[5]) // E
fn(Point{x, y + 2}, tiles[6]) // SW
fn(Point{x + 1, y + 2}, tiles[7]) // S
fn(Point{x + 2, y + 2}, tiles[8]) // SE
}
// SetObserved sets the observed flag on the page
func (p *page) SetObserved(observed bool) {
p.Lock()
defer p.Unlock()
if observed {
p.flags = p.flags | 1
} else {
p.flags = p.flags &^ 1
}
}
// Lock locks the spin lock. Note: this needs to be named Lock() so go vet will
// complain if the page is copied around.
func (p *page) Lock() {
for !atomic.CompareAndSwapInt32(&p.lock, 0, 1) {
runtime.Gosched()
}
}
// Unlock unlocks the page. Note: this needs to be named Unlock() so go vet will
// complain if the page is copied around.
func (p *page) Unlock() {
atomic.StoreInt32(&p.lock, 0)
}
// Data returns a buffer to the tile data, without allocations.
func (p *page) Data() []byte {
var out reflect.SliceHeader
out.Data = reflect.ValueOf(&p.tiles).Pointer()
out.Len = tileDataSize
out.Cap = tileDataSize
return *(*[]byte)(unsafe.Pointer(&out))
} | grid.go | 0.822902 | 0.460592 | grid.go | starcoder |
package Character
import "github.com/golang/The-Lagorinth/Items"
import "github.com/golang/The-Lagorinth/Spells"
import "github.com/golang/The-Lagorinth/Point"
import "github.com/golang/The-Lagorinth/Labyrinth"
import "math/rand"
type NPC struct {
Location *Point.Point
Symbol string
Name string
Orientation *Point.Point
Weapon *Items.Weapon
Armor *Items.Armor
DmgMultuplier float32
Defence, Evasion, CritChance int
CurrentHealth, MaxHealth, HealthRegen float32
CurrentMana, MaxMana, ManaRegen float32
VisionRadious int
IsStunned bool
BuffList map[int]*Spell.Buff
IsHuman bool
TrapHandling int
}
//moveTowardsHero determines if a character can move towards the player.
//Function returns 2 values.
//True if the player is next to the character and his coordinates.
func (npc *NPC) moveTowardsHero(labyrinth *Labyrinth.Labyrinth) (bool, *Point.Point) {
var upTile string
if labyrinth.IsInBondaries(npc.Location.X-1, npc.Location.Y) {
upTile = labyrinth.Labyrinth[npc.Location.X-1][npc.Location.Y]
}
if upTile == Labyrinth.CharSymbol {
return true, &Point.Point{npc.Location.X - 1, npc.Location.Y, nil}
}
var downTile string
if labyrinth.IsInBondaries(npc.Location.X+1, npc.Location.Y) {
downTile = labyrinth.Labyrinth[npc.Location.X+1][npc.Location.Y]
}
if downTile == Labyrinth.CharSymbol {
return true, &Point.Point{npc.Location.X + 1, npc.Location.Y, nil}
}
var leftTile string
if labyrinth.IsInBondaries(npc.Location.X, npc.Location.Y-1) {
leftTile = labyrinth.Labyrinth[npc.Location.X][npc.Location.Y-1]
}
if leftTile == Labyrinth.CharSymbol {
return true, &Point.Point{npc.Location.X, npc.Location.Y - 1, nil}
}
var rightTile string
if labyrinth.IsInBondaries(npc.Location.X, npc.Location.Y+1) {
rightTile = labyrinth.Labyrinth[npc.Location.X][npc.Location.Y+1]
}
if rightTile == Labyrinth.CharSymbol {
return true, &Point.Point{npc.Location.X, npc.Location.Y + 1, nil}
}
return false, &Point.Point{}
}
//makeDecisionWhereToMove determines to which empty tile a character should move to.
//Return true if such a tile exists and its coordinates.
func (npc *NPC) makeDecisionWhereToMove(labyrinth *Labyrinth.Labyrinth) (bool, *Point.Point) {
frontTile := labyrinth.Labyrinth[npc.Location.X+npc.Orientation.X][npc.Location.Y+npc.Orientation.Y]
if frontTile != Labyrinth.Wall && frontTile != Labyrinth.Monster && frontTile != Labyrinth.Treasure {
if rand.Intn(100) < 80 {
return true, &Point.Point{npc.Location.X + npc.Orientation.X, npc.Location.Y + npc.Orientation.Y, nil}
}
} else {
direction := make([]Point.Point, 0, 4)
upTile := labyrinth.Labyrinth[npc.Location.X-1][npc.Location.Y]
if upTile != Labyrinth.Wall && upTile != Labyrinth.Monster && upTile != Labyrinth.Treasure {
direction = append(direction, Point.Point{npc.Location.X - 1, npc.Location.Y, nil})
}
downTile := labyrinth.Labyrinth[npc.Location.X+1][npc.Location.Y]
if downTile != Labyrinth.Wall && downTile != Labyrinth.Monster && downTile != Labyrinth.Treasure {
direction = append(direction, Point.Point{npc.Location.X + 1, npc.Location.Y, nil})
}
leftTile := labyrinth.Labyrinth[npc.Location.X][npc.Location.Y-1]
if leftTile != Labyrinth.Wall && leftTile != Labyrinth.Monster && leftTile != Labyrinth.Treasure {
direction = append(direction, Point.Point{npc.Location.X, npc.Location.Y - 1, nil})
}
rightTile := labyrinth.Labyrinth[npc.Location.X][npc.Location.Y+1]
if rightTile != Labyrinth.Wall && rightTile != Labyrinth.Monster && rightTile != Labyrinth.Treasure {
direction = append(direction, Point.Point{npc.Location.X, npc.Location.Y + 1, nil})
}
if len(direction) != 0 {
return true, &direction[rand.Intn(len(direction))]
}
}
return false, &Point.Point{-1, -1, nil}
}
//Move handles characters movement.
func (npc *NPC) Move(labyritnh *Labyrinth.Labyrinth) *Point.Point {
isNextToHero, location := npc.moveTowardsHero(labyritnh)
if isNextToHero {
return location
}
isDecisionMade, place := npc.makeDecisionWhereToMove(labyritnh)
if isDecisionMade {
return place
}
return npc.Location
}
//ChangeOrientation changes the field value that tracks where to character is facing.
func (npc *NPC) ChangeOrientation(x2 int, y2 int) {
npc.Orientation.X = x2 - npc.Location.X
npc.Orientation.Y = y2 - npc.Location.Y
}
//EquipWeapon sets a reference for the weapon argument.
func (npc *NPC) EquipWeapon(newWeapon *Items.Weapon) {
npc.Weapon = newWeapon
}
//UnequipWeapon removes the reference for currently equipped weapon.
func (npc *NPC) UnequipWeapon() {
if npc.Weapon != nil {
npc.Weapon = nil
}
}
//EquipArmor sets a reference for the armor argument.
//Also adds the armor field values to those of the character.
func (npc *NPC) EquipArmor(newArmor *Items.Armor) {
npc.Armor = newArmor
npc.CurrentHealth += newArmor.Health
npc.MaxHealth += newArmor.Health
npc.CurrentMana += newArmor.Mana
npc.MaxMana += newArmor.Mana
npc.Evasion += newArmor.Evasion
npc.HealthRegen += newArmor.HealthRegen
npc.ManaRegen += newArmor.ManaRegen
}
//EquipArmor removes the reference for the equipped armor.
//Also removes the armor field values from those of the character.
func (npc *NPC) UnequipArmor() {
if npc.Armor != nil {
npc.CurrentHealth -= npc.Armor.Health
npc.MaxHealth -= npc.Armor.Health
npc.CurrentMana -= npc.Armor.Mana
npc.MaxMana -= npc.Armor.Mana
npc.Evasion -= npc.Armor.Evasion
npc.HealthRegen -= npc.Armor.HealthRegen
npc.ManaRegen -= npc.Armor.ManaRegen
npc.Armor = nil
}
}
//DoDamage return the damage the character will deal.
func (npc *NPC) DoDamage() float32 {
if rand.Intn(100) < npc.CritChance+npc.Weapon.BonusCritChance {
return 2 * npc.DmgMultuplier * npc.Weapon.Damage()
}
return npc.DmgMultuplier * npc.Weapon.Damage()
}
//CombinedDefence return the sum of the armor's defense with the character's defense.
func (npc *NPC) CombinedDefence() float32 {
return float32(npc.Defence + npc.Armor.Defence)
}
//TakeDamage substracts the received argument from the character health points.
//the argument's value is lowered by the defense of the character.
func (npc *NPC) TakeDamage(damage float32) {
var damageTaken float32 = damage - npc.CombinedDefence()
if damageTaken > 0 {
npc.CurrentHealth = npc.CurrentHealth - damageTaken
}
}
//RegenHealth add the health regeneration value to the current health points value.
func (npc *NPC) RegenHealth() {
npc.CurrentHealth = npc.CurrentHealth + npc.HealthRegen
if npc.CurrentHealth > npc.MaxHealth {
npc.CurrentHealth = npc.MaxHealth
}
}
//RegenHealth add the mana regeneration value to the current mana points value.
func (npc *NPC) RegenMana() {
npc.CurrentMana = npc.CurrentMana + npc.ManaRegen
if npc.CurrentMana > npc.MaxMana {
npc.CurrentMana = npc.MaxMana
}
}
//ApplyBuff receives a argument Buff and add its field values to those of the character.
func (npc *NPC) ApplyBuff(buff *Spell.Buff) {
npc.HealthRegen += buff.BonusHealthRegen
npc.DmgMultuplier += buff.BonusDamageMultiplier
npc.Weapon.BonusDmg += buff.BonusDamage
npc.Defence += buff.BonusDefence
npc.Evasion += buff.BonusEvasion
npc.CritChance += buff.BonusCritChance
npc.ManaRegen -= buff.ManaCostPerTurn
}
//ApplyBuff receives a argument Buff and substracts its field values from those of the character.
func (npc *NPC) RemoveBuff(buff *Spell.Buff) {
npc.HealthRegen -= buff.BonusHealthRegen
npc.DmgMultuplier -= buff.BonusDamageMultiplier
npc.Weapon.BonusDmg -= buff.BonusDamage
npc.Defence -= buff.BonusDefence
npc.Evasion -= buff.BonusEvasion
npc.CritChance -= buff.BonusCritChance
npc.ManaRegen += buff.ManaCostPerTurn
}
//Regenerate call the RegenMana() and RegenHealth() functions.
func (npc *NPC) Regenerate() {
npc.RegenMana()
npc.RegenHealth()
}
//ProjectileToTheFace takes one argument - Projectile.
//Stuns the character if the WillStun flag is true.
//Applies a buff to the character if such a buff exists.
func (npc *NPC) ProjectileToTheFace(projectile *Spell.Projectile) {
npc.IsStunned = projectile.WillStun
if projectile.Buff != nil {
npc.ApplyBuff(projectile.Buff)
}
} | Characters/character.go | 0.668664 | 0.41834 | character.go | starcoder |
// Package regression contains a simple Thiel-Sen estimator for linear regression.
package regression
import (
"container/heap"
"sort"
)
// LinearRegression returns the slope and intercept, using Thiel-Sen estimator.
// This is the median of the slopes defined only from pairs of points having distinct x-coordinates.
func LinearRegression(xData, yData []float64) (float64, float64) {
if len(xData) != len(yData) {
panic("x and y must have the same length!")
}
if len(xData) == 0 {
return 0, 0
}
lns := lines(make([]line, 0, len(xData)*len(xData)))
for i, x1 := range xData {
for j, x2 := range xData {
if i == j || x1 == x2 {
continue
}
a := (yData[j] - yData[i]) / (x2 - x1)
// y = a*x + b ==> b = y - a*x
lns = append(lns, line{a: a, b: yData[j] - a*x2})
}
}
m := lns.Median()
return m.a, m.b
}
type line struct {
a, b float64
}
type lines []line
func (ln lines) Len() int { return len(ln) }
func (ln lines) Less(i, j int) bool { return ln[i].a < ln[j].a }
func (ln lines) Swap(i, j int) { ln[i], ln[j] = ln[j], ln[i] }
func (ln *lines) Pop() interface{} { x := (*ln)[0]; *ln = (*ln)[1:]; return x }
func (ln *lines) Push(x interface{}) { *ln = append(*ln, x.(line)) }
// Median returns the median from all the lines - based on the slope.
// This sorts the underlying slice.
func (ln lines) Median() line {
if len(ln) == 0 {
return line{}
}
h := &ln
heap.Init(h)
for i := 0; i < len(ln)/2; i++ {
heap.Pop(h)
}
return heap.Pop(h).(line)
}
type floats []float64
func (fh floats) Len() int { return len(fh) }
func (fh floats) Less(i, j int) bool { return fh[i] < fh[j] }
func (fh floats) Swap(i, j int) { fh[i], fh[j] = fh[j], fh[i] }
func (fh *floats) Pop() interface{} { x := (*fh)[0]; *fh = (*fh)[1:]; return x }
func (fh *floats) Push(x interface{}) { *fh = append(*fh, x.(float64)) }
// Median returns the median value of the data.
func Median(x []float64) float64 {
if len(x) == 0 {
return 0
}
if sort.Float64sAreSorted(x) {
return x[len(x)/2]
}
h := new(floats)
heap.Init(h)
for _, f := range x {
heap.Push(h, f)
}
for i := 0; i < len(x)/2; i++ {
heap.Pop(h)
}
return heap.Pop(h).(float64)
} | regression/regression.go | 0.78964 | 0.647422 | regression.go | starcoder |
package sphinx
import "github.com/xlab/pocketsphinx-go/pocketsphinx"
/*
* Fast integer logarithmic addition operations.
*
* In evaluating HMM models, probability values are often kept in log
* domain, to avoid overflow. To enable these logprob values to be
* held in int32 variables without significant loss of precision, a
* logbase of (1+epsilon) (where epsilon < 0.01 or so) is used. This
* module maintains this logbase (B).
*
* However, maintaining probabilities in log domain creates a problem
* when adding two probability values. This problem can be solved by
* table lookup. Note that:
*
* - \f$ b^z = b^x + b^y \f$
* - \f$ b^z = b^x(1 + b^{y-x}) = b^y(1 + e^{x-y}) \f$
* - \f$ z = x + log_b(1 + b^{y-x}) = y + log_b(1 + b^{x-y}) \f$
*
* So:
*
* - when \f$ y > x, z = y + logadd\_table[-(x-y)] \f$
* - when \f$ x > y, z = x + logadd\_table[-(y-x)] \f$
* - where \f$ logadd\_table[n] = log_b(1 + b^{-n}) \f$
*
* The first entry in <i>logadd_table</i> is
* simply \f$ log_b(2.0) \f$, for
* the case where \f$ y = x \f$ and thus
* \f$ z = log_b(2x) = log_b(2) + x \f$. The last entry is zero,
* where \f$ log_b(x+y) = x = y \f$ due to loss of precision.
*
* Since this table can be quite large particularly for small
* logbases, an option is provided to compress it by dropping the
* least significant bits of the table.
*/
// LogMath integer log math computation class.
type LogMath struct {
m *pocketsphinx.Logmath
}
// LogMath returns a retained copy of underlying reference to pocketsphinx.Logmath.
func (l *LogMath) LogMath() *pocketsphinx.Logmath {
return pocketsphinx.LogmathRetain(l.m)
}
// WriteTo writes a log table to a file.
func (l LogMath) WriteTo(filename String) bool {
ret := pocketsphinx.LogmathWrite(l.m, filename.S())
return ret == 0
}
// GetTableShape gets the log table size and dimensions.
func (l LogMath) GetTableShape() (size, width, shift uint32, ok bool) {
ret := pocketsphinx.LogmathGetTableShape(l.m, &size, &width, &shift)
ok = ret == 0
return
}
// GetBase gets the log base.
func (l LogMath) GetBase() float64 {
return pocketsphinx.LogmathGetBase(l.m)
}
// GetZero gets the smallest possible value represented in this base.
func (l LogMath) GetZero() int32 {
return pocketsphinx.LogmathGetZero(l.m)
}
// GetWidth gets the width of the values in a log table.
func (l LogMath) GetWidth() int32 {
return pocketsphinx.LogmathGetWidth(l.m)
}
// GetShift gets the shift of the values in a log table.
func (l LogMath) GetShift() int32 {
return pocketsphinx.LogmathGetShift(l.m)
}
// AddExact adds two values in log space exactly and slowly (without using add table).
func (l LogMath) AddExact(p, q int32) int32 {
return pocketsphinx.LogmathAddExact(l.m, p, q)
}
// Add two values in log space (i.e. return log(exp(p)+exp(q)))
func (l LogMath) Add(p, q int32) int32 {
return pocketsphinx.LogmathAdd(l.m, p, q)
}
// Log converts linear floating point number to integer log in base B.
func (l LogMath) Log(p float64) int32 {
return pocketsphinx.LogmathLog(l.m, p)
}
// Exp converts integer log in base B to linear floating point.
func (l LogMath) Exp(p int32) float64 {
return pocketsphinx.LogmathExp(l.m, p)
}
// LnToLog converts natural log (in floating point) to integer log in base B.
func (l LogMath) LnToLog(p float64) int32 {
return pocketsphinx.LogmathLnToLog(l.m, p)
}
// LogToLn converts integer log in base B to natural log (in floating point).
func (l LogMath) LogToLn(p int32) float64 {
return pocketsphinx.LogmathLogToLn(l.m, p)
}
// Log10ToLog converts base 10 log (in floating point) to integer log in base B.
func (l LogMath) Log10ToLog(p float64) int32 {
return pocketsphinx.LogmathLog10ToLog(l.m, p)
}
// LogToLog10 converts integer log in base B to base 10 log (in floating point).
func (l LogMath) LogToLog10(p int32) float64 {
return pocketsphinx.LogmathLogToLog10(l.m, p)
}
// Log10ToLogFloat converts base 10 log (in floating point) to float log in base B.
func (l LogMath) Log10ToLogFloat(p float64) float32 {
return pocketsphinx.LogmathLog10ToLogFloat(l.m, p)
}
// LogFloatToLog10 converts float log in base B to base 10 log.
func (l LogMath) LogFloatToLog10(p float32) float64 {
return pocketsphinx.LogmathLogFloatToLog10(l.m, p)
}
func (l *LogMath) Destroy() bool {
if l.m != nil {
ret := pocketsphinx.LogmathFree(l.m)
l.m = nil
return ret == 0
}
return true
}
func (l *LogMath) Retain() {
l.m = pocketsphinx.LogmathRetain(l.m)
} | sphinx/logmath.go | 0.847116 | 0.514644 | logmath.go | starcoder |
package cemi
import (
"io"
"github.com/vapourismo/knx-go/knx/util"
)
// APCI is the Application-layer Protocol Control Information.
type APCI uint8
// IsGroupCommand determines if the APCI indicates a group command.
func (apci APCI) IsGroupCommand() bool {
return apci < 3
}
// These are usable APCI values.
const (
GroupValueRead APCI = 0
GroupValueResponse APCI = 1
GroupValueWrite APCI = 2
IndividualAddrWrite APCI = 3
IndividualAddrRequest APCI = 4
IndividualAddrResponse APCI = 5
AdcRead APCI = 6
AdcResponse APCI = 7
MemoryRead APCI = 8
MemoryResponse APCI = 9
MemoryWrite APCI = 10
UserMessage APCI = 11
MaskVersionRead APCI = 12
MaskVersionResponse APCI = 13
Restart APCI = 14
Escape APCI = 15
)
// An AppData contains application data in a transport unit.
type AppData struct {
Numbered bool
SeqNumber uint8
Command APCI
Data []byte
}
// Size retrieves the packed size.
func (app *AppData) Size() uint {
dataLength := uint(len(app.Data))
if dataLength > 255 {
dataLength = 255
} else if dataLength < 1 {
dataLength = 1
}
return 2 + dataLength
}
// Pack into a transport data unit including its leading length byte.
func (app *AppData) Pack(buffer []byte) {
dataLength := len(app.Data)
if dataLength > 255 {
dataLength = 255
} else if dataLength < 1 {
dataLength = 1
}
buffer[0] = byte(dataLength)
if app.Numbered {
buffer[1] |= 1<<6 | (app.SeqNumber&15)<<2
}
buffer[1] |= byte(app.Command>>2) & 3
copy(buffer[2:], app.Data)
buffer[2] &= 63
buffer[2] |= byte(app.Command&3) << 6
}
// A ControlData encodes control information in a transport unit.
type ControlData struct {
Numbered bool
SeqNumber uint8
Command uint8
}
// Size retrieves the packed size.
func (ControlData) Size() uint {
return 2
}
// Pack into a transport data unit including its leading length byte.
func (control *ControlData) Pack(buffer []byte) {
buffer[0] = 0
buffer[1] = 1<<7 | (control.Command & 3)
if control.Numbered {
buffer[1] |= 1<<6 | (control.SeqNumber&15)<<2
}
}
// A TransportUnit is responsible to transport data.
type TransportUnit interface {
util.Packable
}
// unpackTransportUnit parses the given data in order to extract the transport unit that it encodes.
func unpackTransportUnit(data []byte, unit *TransportUnit) (uint, error) {
if len(data) < 2 {
return 0, io.ErrUnexpectedEOF
}
// Does unit contain control information?
if (data[1] & (1 << 7)) == 1<<7 {
control := &ControlData{
Numbered: (data[1] & (1 << 6)) == 1<<6,
SeqNumber: (data[1] >> 2) & 15,
Command: data[1] & 3,
}
*unit = control
return 2, nil
}
dataLength := int(data[0])
if len(data) < 3 || dataLength+2 < len(data) {
return 0, io.ErrUnexpectedEOF
}
app := &AppData{
Numbered: (data[1] & (1 << 6)) == 1<<6,
SeqNumber: (data[1] >> 2) & 15,
Command: APCI((data[1]&3)<<2 | data[2]>>6),
Data: make([]byte, dataLength),
}
copy(app.Data, data[2:])
app.Data[0] &= 63
*unit = app
return uint(dataLength) + 2, nil
} | knx/cemi/tpdu.go | 0.677261 | 0.420124 | tpdu.go | starcoder |
package rewrite
import (
"fmt"
"reflect"
"github.com/monnoroch/go-inject"
)
/// Annotations mapping: a map from annotations to be replaced to annotations to replace them with.
type AnnotationsMapping map[inject.Annotation]inject.Annotation
/// Generate a module that takes all input module's providers and replaces specified annotations
/// according to the `annotationsToRewrite` map.
func RewriteAnnotations(
module inject.Module,
annotationsToRewrite AnnotationsMapping,
) inject.DynamicModule {
return rewriteAnnotationsModule{
module: module,
annotationsToRewrite: annotationsToRewrite,
}
}
type rewriteAnnotationsModule struct {
module inject.Module
annotationsToRewrite AnnotationsMapping
}
func (self rewriteAnnotationsModule) Providers() ([]inject.Provider, error) {
providers, err := inject.Providers(self.module)
if err != nil {
return nil, err
}
annotationsToRewrite := map[reflect.Type]reflect.Type{}
for from, to := range self.annotationsToRewrite {
annotationsToRewrite[reflect.TypeOf(from)] = reflect.TypeOf(to)
}
newProviders := make([]inject.Provider, len(providers))
for index, provider := range providers {
if !provider.IsValid() {
return nil, fmt.Errorf("invalid provider %v", provider)
}
function := provider.Function()
functionType := function.Type()
providerArgumentTypes := make([]reflect.Type, functionType.NumIn())
for inputIndex := 0; inputIndex < functionType.NumIn(); inputIndex += 2 {
annotationType := functionType.In(inputIndex + 1)
if rewrittenType, ok := annotationsToRewrite[annotationType]; ok {
annotationType = rewrittenType
}
providerArgumentTypes[inputIndex] = functionType.In(inputIndex)
providerArgumentTypes[inputIndex+1] = annotationType
}
valueType := functionType.Out(0)
annotationType := functionType.Out(1)
if rewrittenType, ok := annotationsToRewrite[annotationType]; ok {
annotationType = rewrittenType
}
returnTypes := []reflect.Type{valueType, annotationType}
// Provider with an error.
if functionType.NumOut() == 3 {
returnTypes = append(returnTypes, reflect.TypeOf((*error)(nil)).Elem())
}
newProviders[index] = inject.NewProvider(reflect.MakeFunc(
reflect.FuncOf(
providerArgumentTypes,
returnTypes,
false,
),
func(arguments []reflect.Value) []reflect.Value {
newArguments := make([]reflect.Value, functionType.NumIn())
for inputIndex := 0; inputIndex < functionType.NumIn(); inputIndex += 2 {
newArguments[inputIndex] = arguments[inputIndex]
newArguments[inputIndex+1] = reflect.Zero(functionType.In(inputIndex + 1))
}
resutls := function.Call(newArguments)
resutls[1] = reflect.Zero(annotationType)
return resutls
},
)).Cached(provider.IsCached())
}
return newProviders, nil
} | rewrite/annotations.go | 0.775477 | 0.417568 | annotations.go | starcoder |
package geom
import (
"fmt"
"regexp"
"strconv"
"strings"
)
// Vec2 is a two-element vector.
type Vec2 struct {
X int
Y int
}
// Vec3 is a three-element vector.
type Vec3 struct {
X int
Y int
Z int
}
// Vec4 is a three-element vector.
type Vec4 struct {
W int
X int
Y int
Z int
}
// String does the usual.
func (v Vec2) String() string {
return fmt.Sprintf("(%d,%d)", v.X, v.Y)
}
// String does the usual.
func (v Vec3) String() string {
return fmt.Sprintf("(%d,%d,%d)", v.X, v.Y, v.Z)
}
// Abs returns the same vector, but with negative coordinates replaced by their positive values.
func (v Vec3) Abs() Vec3 {
if v.X < 0 {
v.X = -v.X
}
if v.Y < 0 {
v.Y = -v.Y
}
if v.Z < 0 {
v.Z = -v.Z
}
return v
}
// Sum returns the x+y+z.
func (v Vec3) Sum() int {
return v.X + v.Y + v.Z
}
// AbsSum returns |x| + |y| + |z|.
func (v Vec3) AbsSum() int {
return v.Abs().Sum()
}
// Add adds two vectors.
func (v Vec3) Add(w Vec3) Vec3 {
return Vec3{v.X + w.X, v.Y + w.Y, v.Z + w.Z}
}
// Sub subtracts a vector from this one, returning the result.
func (v Vec3) Sub(w Vec3) Vec3 {
return Vec3{v.X - w.X, v.Y - w.Y, v.Z - w.Z}
}
// Neg negates a vector.][]
func (v Vec3) Neg() Vec3 {
return Vec3{-v.X, -v.Y, -v.Z}
}
// Sgn replaces each element of a vector with -1, 0, 1, depending on its sign.
func (v Vec3) Sgn() Vec3 {
var result Vec3
if v.X < 0 {
result.X = -1
} else if v.X > 0 {
result.X = 1
}
if v.Y < 0 {
result.Y = -1
} else if v.Y > 0 {
result.Y = 1
}
if v.Z < 0 {
result.Z = -1
} else if v.Z > 0 {
result.Z = 1
}
return result
}
// Mul returns the vector multiplied by a scalar.
func (v Vec3) Mul(factor int) Vec3 {
return Vec3{X: v.X * factor, Y: v.Y * factor, Z: v.Z * factor}
}
var vec3regex = regexp.MustCompile(`<x=(-?[0-9]+), *y=(-?[0-9]+), *z=(-?[0-9]+)>`)
// ParseVec3 parses a string vec3 in format "<x=17,y=42,z=-1>".
func ParseVec3(s string) (Vec3, error) {
s = strings.TrimSpace(s)
parts := vec3regex.FindStringSubmatch(s)
if parts == nil {
return Vec3{}, fmt.Errorf("ParseVec3: weird input: %q", s)
}
x, err := strconv.Atoi(parts[1])
if err != nil {
return Vec3{}, fmt.Errorf("cannot parse x coordinate %q (in vector %q)", parts[1], s)
}
y, err := strconv.Atoi(parts[2])
if err != nil {
return Vec3{}, fmt.Errorf("cannot parse y coordinate %q (in vector %q)", parts[2], s)
}
z, err := strconv.Atoi(parts[3])
if err != nil {
return Vec3{}, fmt.Errorf("cannot parse z coordinate %q (in vector %q)", parts[3], s)
}
return Vec3{x, y, z}, nil
}
// ParseVec3Lines parses Vec3s, one per line.
func ParseVec3Lines(s string) ([]Vec3, error) {
var result []Vec3
s = strings.TrimSpace(s)
lines := strings.Split(s, "\n")
for _, line := range lines {
v, err := ParseVec3(line)
if err != nil {
return nil, err
}
result = append(result, v)
}
return result, nil
}
// Hash3 takes a shitty hash of a slice of Vec3s.
func Hash3(vecs []Vec3) uint {
res := uint(1)
for _, v := range vecs {
res = res*31 + uint(v.X)
res = res*31 + uint(v.Y)
res = res*31 + uint(v.Z)
}
return res
}
// Abs returns the same vector, but with negative coordinates replaced by their positive values.
func (v Vec2) Abs() Vec2 {
if v.X < 0 {
v.X = -v.X
}
if v.Y < 0 {
v.Y = -v.Y
}
return v
}
// Sum returns the x+y.
func (v Vec2) Sum() int {
return v.X + v.Y
}
// AbsSum returns |x| + |y|.
func (v Vec2) AbsSum() int {
return v.Abs().Sum()
}
// Add adds two vectors.
func (v Vec2) Add(w Vec2) Vec2 {
return Vec2{v.X + w.X, v.Y + w.Y}
}
// Sub subtracts a vector from this one, returning the result.
func (v Vec2) Sub(w Vec2) Vec2 {
return Vec2{v.X - w.X, v.Y - w.Y}
}
// Neg negates a vector.][]
func (v Vec2) Neg() Vec2 {
return Vec2{-v.X, -v.Y}
}
// Sgn replaces each element of a vector with -1, 0, 1, depending on its sign.
func (v Vec2) Sgn() Vec2 {
var result Vec2
if v.X < 0 {
result.X = -1
} else if v.X > 0 {
result.X = 1
}
if v.Y < 0 {
result.Y = -1
} else if v.Y > 0 {
result.Y = 1
}
return result
}
// Mul returns the vector multiplied by a scalar.
func (v Vec2) Mul(factor int) Vec2 {
return Vec2{X: v.X * factor, Y: v.Y * factor}
}
// Min2 returns the minimum of two vectors in both X and Y.
func Min2(a, b Vec2) Vec2 {
if a.X > b.X {
a.X = b.X
}
if a.Y > b.Y {
a.Y = b.Y
}
return a
}
// Max2 returns the minimum of two vectors in both X and Y.
func Max2(a, b Vec2) Vec2 {
if a.X < b.X {
a.X = b.X
}
if a.Y < b.Y {
a.Y = b.Y
}
return a
}
// Add adds two vectors.
func (v Vec4) Add(w Vec4) Vec4 {
return Vec4{v.W + w.W, v.X + w.X, v.Y + w.Y, v.Z + w.Z}
}
// Dirs4 are the four cardinal direction length-1 Vec2s.
var Dirs4 = []Vec2{
{0, -1},
{1, 0},
{0, 1},
{-1, 0},
}
// Neighbors4 returns the four orthogonally adjacent positions of a Vec2 position.
func Neighbors4(pos Vec2) []Vec2 {
return []Vec2{
{pos.X - 1, pos.Y},
{pos.X, pos.Y + 1},
{pos.X + 1, pos.Y},
{pos.X, pos.Y - 1},
}
}
// Dirs8 are the eight neighboring Vec2s to the given Vec2.
var Dirs8 = []Vec2{
{0, -1},
{1, -1},
{1, 0},
{1, 1},
{0, 1},
{-1, 1},
{-1, 0},
{-1, -1},
}
// Neighbors8 returns the four orthogonally and diagonally adjacent positions of a Vec2 position.
func Neighbors8(pos Vec2) []Vec2 {
return []Vec2{
{pos.X, pos.Y - 1},
{pos.X + 1, pos.Y - 1},
{pos.X + 1, pos.Y},
{pos.X + 1, pos.Y + 1},
{pos.X, pos.Y + 1},
{pos.X - 1, pos.Y + 1},
{pos.X - 1, pos.Y},
{pos.X - 1, pos.Y - 1},
}
}
// Dirs6 are the four cardinal direction length-1 Vec3s.
var Dirs6 = []Vec3{
{0, -1, 0},
{1, 0, 0},
{0, 1, 0},
{-1, 0, 0},
{0, 0, -1},
{0, 0, 1},
}
// Neighbors6 returns the six orthogonally adjacent positions of a Vec3 position.
func Neighbors6(pos Vec3) []Vec3 {
return []Vec3{
{pos.X - 1, pos.Y, pos.Z},
{pos.X, pos.Y + 1, pos.Z},
{pos.X + 1, pos.Y, pos.Z},
{pos.X, pos.Y - 1, pos.Z},
{pos.X, pos.Y, pos.Z - 1},
{pos.X, pos.Y, pos.Z + 1},
}
}
// Dirs26 are the 26 neighboring Vec3s to the given Vec3.
var Dirs26 = []Vec3{
{0, -1, 0},
{1, -1, 0},
{1, 0, 0},
{1, 1, 0},
{0, 1, 0},
{-1, 1, 0},
{-1, 0, 0},
{-1, -1, 0},
{0, 0, -1},
{0, -1, -1},
{1, -1, -1},
{1, 0, -1},
{1, 1, -1},
{0, 1, -1},
{-1, 1, -1},
{-1, 0, -1},
{-1, -1, -1},
{0, 0, 1},
{0, -1, 1},
{1, -1, 1},
{1, 0, 1},
{1, 1, 1},
{0, 1, 1},
{-1, 1, 1},
{-1, 0, 1},
{-1, -1, 1},
}
// Neighbors26 returns the 26 orthogonally and diagonally adjacent positions of a Vec3 position.
func Neighbors26(pos Vec3) []Vec3 {
return []Vec3{
{pos.X, pos.Y - 1, pos.Z},
{pos.X + 1, pos.Y - 1, pos.Z},
{pos.X + 1, pos.Y, pos.Z},
{pos.X + 1, pos.Y + 1, pos.Z},
{pos.X, pos.Y + 1, pos.Z},
{pos.X - 1, pos.Y + 1, pos.Z},
{pos.X - 1, pos.Y, pos.Z},
{pos.X - 1, pos.Y - 1, pos.Z},
{pos.X, pos.Y, pos.Z - 1},
{pos.X, pos.Y - 1, pos.Z - 1},
{pos.X + 1, pos.Y - 1, pos.Z - 1},
{pos.X + 1, pos.Y, pos.Z - 1},
{pos.X + 1, pos.Y + 1, pos.Z - 1},
{pos.X, pos.Y + 1, pos.Z - 1},
{pos.X - 1, pos.Y + 1, pos.Z - 1},
{pos.X - 1, pos.Y, pos.Z - 1},
{pos.X - 1, pos.Y - 1, pos.Z - 1},
{pos.X, pos.Y, pos.Z + 1},
{pos.X, pos.Y - 1, pos.Z + 1},
{pos.X + 1, pos.Y - 1, pos.Z + 1},
{pos.X + 1, pos.Y, pos.Z + 1},
{pos.X + 1, pos.Y + 1, pos.Z + 1},
{pos.X, pos.Y + 1, pos.Z + 1},
{pos.X - 1, pos.Y + 1, pos.Z + 1},
{pos.X - 1, pos.Y, pos.Z + 1},
{pos.X - 1, pos.Y - 1, pos.Z + 1},
}
}
// Dirs80 are the 80 neighboring Vec4s to the given Vec4.
var Dirs80 []Vec4
func init() {
for w := -1; w <= 1; w++ {
for x := -1; x <= 1; x++ {
for y := -1; y <= 1; y++ {
for z := -1; z <= 1; z++ {
if w == 0 && x == 0 && y == 0 && z == 0 {
continue
}
Dirs80 = append(Dirs80, Vec4{W: w, X: x, Y: y, Z: z})
}
}
}
}
}
// Neighbors80 returns the 80 orthogonally and diagonally adjacent positions of a Vec4 position.
func Neighbors80(pos Vec4) []Vec4 {
result := make([]Vec4, 80)
for i, v := range Dirs80 {
result[i] = v.Add(pos)
}
return result
} | geom/geom.go | 0.870115 | 0.440048 | geom.go | starcoder |
package slice
import "errors"
// MinByte returns the minimum value of a byte slice or an error in case of a nil or empty slice
func MinByte(a []byte) (byte, error) {
if len(a) == 0 {
return 0, errors.New("Cannot get the minimum of a nil or empty slice")
}
min := a[0]
for k := 1; k < len(a); k++ {
if a[k] < min {
min = a[k]
}
}
return min, nil
}
// MinFloat32 returns the minimum value of a float32 slice or an error in case of a nil or empty slice
func MinFloat32(a []float32) (float32, error) {
if len(a) == 0 {
return 0, errors.New("Cannot get the minimum of a nil or empty slice")
}
min := a[0]
for k := 1; k < len(a); k++ {
if a[k] < min {
min = a[k]
}
}
return min, nil
}
// MinFloat64 returns the minimum value of a float64 slice or an error in case of a nil or empty slice
func MinFloat64(a []float64) (float64, error) {
if len(a) == 0 {
return 0, errors.New("Cannot get the minimum of a nil or empty slice")
}
min := a[0]
for k := 1; k < len(a); k++ {
if a[k] < min {
min = a[k]
}
}
return min, nil
}
// MinInt returns the minimum value of an int slice or an error in case of a nil or empty slice
func MinInt(a []int) (int, error) {
if len(a) == 0 {
return 0, errors.New("Cannot get the minimum of a nil or empty slice")
}
min := a[0]
for k := 1; k < len(a); k++ {
if a[k] < min {
min = a[k]
}
}
return min, nil
}
// MinInt8 returns the minimum value of an int8 slice or an error in case of a nil or empty slice
func MinInt8(a []int8) (int8, error) {
if len(a) == 0 {
return 0, errors.New("Cannot get the minimum of a nil or empty slice")
}
min := a[0]
for k := 1; k < len(a); k++ {
if a[k] < min {
min = a[k]
}
}
return min, nil
}
// MinInt16 returns the minimum value of an int16 slice or an error in case of a nil or empty slice
func MinInt16(a []int16) (int16, error) {
if len(a) == 0 {
return 0, errors.New("Cannot get the minimum of a nil or empty slice")
}
min := a[0]
for k := 1; k < len(a); k++ {
if a[k] < min {
min = a[k]
}
}
return min, nil
}
// MinInt32 returns the minimum value of an int32 slice or an error in case of a nil or empty slice
func MinInt32(a []int32) (int32, error) {
if len(a) == 0 {
return 0, errors.New("Cannot get the minimum of a nil or empty slice")
}
min := a[0]
for k := 1; k < len(a); k++ {
if a[k] < min {
min = a[k]
}
}
return min, nil
}
// MinInt64 returns the minimum value of an int64 slice or an error in case of a nil or empty slice
func MinInt64(a []int64) (int64, error) {
if len(a) == 0 {
return 0, errors.New("Cannot get the minimum of a nil or empty slice")
}
min := a[0]
for k := 1; k < len(a); k++ {
if a[k] < min {
min = a[k]
}
}
return min, nil
}
// MinRune returns the minimum value of a rune slice or an error in case of a nil or empty slice
func MinRune(a []rune) (rune, error) {
if len(a) == 0 {
return 0, errors.New("Cannot get the minimum of a nil or empty slice")
}
min := a[0]
for k := 1; k < len(a); k++ {
if a[k] < min {
min = a[k]
}
}
return min, nil
}
// MinUint returns the minimum value of a uint slice or an error in case of a nil or empty slice
func MinUint(a []uint) (uint, error) {
if len(a) == 0 {
return 0, errors.New("Cannot get the minimum of a nil or empty slice")
}
min := a[0]
for k := 1; k < len(a); k++ {
if a[k] < min {
min = a[k]
}
}
return min, nil
}
// MinUint8 returns the minimum value of a uint8 slice or an error in case of a nil or empty slice
func MinUint8(a []uint8) (uint8, error) {
if len(a) == 0 {
return 0, errors.New("Cannot get the minimum of a nil or empty slice")
}
min := a[0]
for k := 1; k < len(a); k++ {
if a[k] < min {
min = a[k]
}
}
return min, nil
}
// MinUint16 returns the minimum value of a uint16 slice or an error in case of a nil or empty slice
func MinUint16(a []uint16) (uint16, error) {
if len(a) == 0 {
return 0, errors.New("Cannot get the minimum of a nil or empty slice")
}
min := a[0]
for k := 1; k < len(a); k++ {
if a[k] < min {
min = a[k]
}
}
return min, nil
}
// MinUint32 returns the minimum value of a uint32 slice or an error in case of a nil or empty slice
func MinUint32(a []uint32) (uint32, error) {
if len(a) == 0 {
return 0, errors.New("Cannot get the minimum of a nil or empty slice")
}
min := a[0]
for k := 1; k < len(a); k++ {
if a[k] < min {
min = a[k]
}
}
return min, nil
}
// MinUint64 returns the minimum value of a uint64 slice or an error in case of a nil or empty slice
func MinUint64(a []uint64) (uint64, error) {
if len(a) == 0 {
return 0, errors.New("Cannot get the minimum of a nil or empty slice")
}
min := a[0]
for k := 1; k < len(a); k++ {
if a[k] < min {
min = a[k]
}
}
return min, nil
}
// MinUintptr returns the minimum value of a uintptr slice or an error in case of a nil or empty slice
func MinUintptr(a []uintptr) (uintptr, error) {
if len(a) == 0 {
return 0, errors.New("Cannot get the minimum of a nil or empty slice")
}
min := a[0]
for k := 1; k < len(a); k++ {
if a[k] < min {
min = a[k]
}
}
return min, nil
} | min.go | 0.878705 | 0.589007 | min.go | starcoder |
package proverb
import (
"math/rand"
"time"
)
// Proverb represents a particular proverb with a corresponding link to learn
// more.
type Proverb struct {
Link string `json:"link"`
Content string `json:"content"`
}
// NewProverbStore initializes a ProverbStore.
func NewInMemProverbStore() *InMemProverbStore {
rand.Seed(time.Now().Unix())
return &InMemProverbStore{
proverbs: []Proverb{
{
Link: "https://www.youtube.com/watch?v=PAAkCSZUG1c&t=2m48s",
Content: "Don't communicate by sharing memory, share memory by communicating.",
},
{
Link: "https://www.youtube.com/watch?v=PAAkCSZUG1c&t=3m42s",
Content: "Concurrency is not parallelism.",
},
{
Link: "https://www.youtube.com/watch?v=PAAkCSZUG1c&t=4m20s",
Content: "Channels orchestrate; mutexes serialize.",
},
{
Link: "https://www.youtube.com/watch?v=PAAkCSZUG1c&t=5m17s",
Content: "The bigger the interface, the weaker the abstraction.",
},
{
Link: "https://www.youtube.com/watch?v=PAAkCSZUG1c&t=6m25s",
Content: "Make the zero value useful.",
},
{
Link: "https://www.youtube.com/watch?v=PAAkCSZUG1c&t=7m36s",
Content: "interface{} says nothing.",
},
{
Link: "https://www.youtube.com/watch?v=PAAkCSZUG1c&t=8m43s",
Content: "Gofmt's style is no one's favorite, yet gofmt is everyone's favorite.",
},
{
Link: "https://www.youtube.com/watch?v=PAAkCSZUG1c&t=9m28s",
Content: "A little copying is better than a little dependency.",
},
{
Link: "https://www.youtube.com/watch?v=PAAkCSZUG1c&t=11m10s",
Content: "Syscall must always be guarded with build tags.",
},
{
Link: "https://www.youtube.com/watch?v=PAAkCSZUG1c&t=11m53s",
Content: "Cgo must always be guarded with build tags.",
},
{
Link: "https://www.youtube.com/watch?v=PAAkCSZUG1c&t=12m37s",
Content: "Cgo is not Go.",
},
{
Link: "https://www.youtube.com/watch?v=PAAkCSZUG1c&t=13m49s",
Content: "With the unsafe package there are no guarantees.",
},
{
Link: "https://www.youtube.com/watch?v=PAAkCSZUG1c&t=14m35s",
Content: "Clear is better than clever.",
},
{
Link: "https://www.youtube.com/watch?v=PAAkCSZUG1c&t=15m22s",
Content: "Reflection is never clear.",
},
{
Link: "https://www.youtube.com/watch?v=PAAkCSZUG1c&t=16m13s",
Content: "Errors are values.",
},
{
Link: "https://www.youtube.com/watch?v=PAAkCSZUG1c&t=17m25s",
Content: "Don't just check errors, handle them gracefully.",
},
{
Link: "https://www.youtube.com/watch?v=PAAkCSZUG1c&t=18m09s",
Content: "Design the architecture, name the components, document the details.",
},
{
Link: "https://www.youtube.com/watch?v=PAAkCSZUG1c&t=19m07s",
Content: "Documentation is for users.",
},
{
Link: "https://github.com/golang/go/wiki/CodeReviewComments#dont-panic",
Content: "Don't panic.",
},
},
}
}
// ProverbStore provides access to the Go Proverbs.
type InMemProverbStore struct {
proverbs []Proverb
}
// Random returns a random proverb.
func (p *InMemProverbStore) Random() Proverb {
return p.proverbs[rand.Intn(len(p.proverbs))]
} | proverb/store.go | 0.652463 | 0.411466 | store.go | starcoder |
package main
import "fmt"
func main() {
// A comparación de los arreglos, los slices son solo del tipo
// de los elementos que contienen (no del numero de elementos).
// Para crear un slice de tamaño cero, se usa la sentencia `make`.
// En este ejemplo creamos un slice de `string`s de tamaño `3`
// (inicializado con valores cero).
s := make([]string, 3)
fmt.Println("emp:", s)
// Podemos establecer y obtener valores just como con los arreglos.
s[0] = "a"
s[1] = "b"
s[2] = "c"
fmt.Println("set:", s)
fmt.Println("get:", s[2])
// `len` regresa el tamaño del slice.
fmt.Println("len:", len(s))
// Aparte de estas operaciones básicas, los slices
// soportan muchas mas que los hacen más funcionales
// que los arreglos. Una de ellas es `append`, la que
// regresa un slice que contiene uno o mas valores nuevos.
// Nota que necesitamos asignar el valor de regreso de
// append tal como lo haríamos con el valor de un slice nuevo.
s = append(s, "d")
s = append(s, "e", "f")
fmt.Println("apd:", s)
// Los Slices pueden ser copiados utilizando `copy`.
// Aquí creamos un slice vacío `c` del mismo tamaño que
// `s` y copiamos el contenido de `s` a `c`.
c := make([]string, len(s))
copy(c, s)
fmt.Println("cpy:", c)
// Los Slices soportan un operador de rango con la sintaxis
// `slice[low:high]`. Por ejemplo, esto regresa un slice
// de los elementos `s[2]`, `s[3]`, y `s[4]`.
l := s[2:5]
fmt.Println("sl1:", l)
// Esto regresa los elementos hasta antes de `s[5]`.
l = s[:5]
fmt.Println("sl2:", l)
// y esto regresa los elementos desde `s[2]`.
l = s[2:]
fmt.Println("sl3:", l)
// Podemos declarar e inicializar una variable para el slice
// en una sola línea también.
t := []string{"g", "h", "i"}
fmt.Println("dcl:", t)
// Los slices pueden ser compuestos de estructuras multi dimensionales.
// A diferencia de los arreglos, el tamaño de los slices interiores
// puede variar.
twoD := make([][]int, 3)
for i := 0; i < 3; i++ {
innerLen := i + 1
twoD[i] = make([]int, innerLen)
for j := 0; j < innerLen; j++ {
twoD[i][j] = i + j
}
}
fmt.Println("2d: ", twoD)
} | examples/slices/slices.go | 0.561455 | 0.547585 | slices.go | starcoder |
package zopfli
import (
"math"
)
// Converts a series of Huffman tree bitLengths, to the bit values of the symbols.
func lengthsToSymbols(lengths []uint, maxBits uint) (symbols []uint) {
n := len(lengths)
blCount := make([]uint, maxBits+1)
nextCode := make([]uint, maxBits+1)
symbols = make([]uint, n)
// 1) Count the number of codes for each code length.
// Let blCount[N] be the number of codes of length N, N >= 1.
for bits := uint(0); bits <= maxBits; bits++ {
blCount[bits] = 0
}
for i := 0; i < n; i++ {
if lengths[i] > maxBits {
panic("length is too large")
}
blCount[lengths[i]]++
}
// 2) Find the numerical value of the smallest code for each code length.
var code uint
blCount[0] = 0
for bits := uint(1); bits <= maxBits; bits++ {
code = (code + blCount[bits-1]) << 1
nextCode[bits] = code
}
// 3) Assign numerical values to all codes, using consecutive values for
// all codes of the same length with the base values determined at step 2.
for i := 0; i < n; i++ {
len := lengths[i]
if len != 0 {
symbols[i] = nextCode[len]
nextCode[len]++
}
}
return symbols
}
// Calculates the entropy of each symbol, based on the counts of each symbol. The
// result is similar to the result of CalculateBitLengths, but with the
// actual theoritical bit lengths according to the entropy. Since the resulting
// values are fractional, they cannot be used to encode the tree specified by
// DEFLATE.
func CalculateEntropy(count []float64) (bitLengths []float64) {
var sum, log2sum float64
n := len(count)
for i := 0; i < n; i++ {
sum += count[i]
}
if sum == 0 {
log2sum = math.Log2(float64(n))
} else {
log2sum = math.Log2(sum)
}
bitLengths = make([]float64, n)
for i := 0; i < n; i++ {
// When the count of the symbol is 0, but its cost is requested anyway, it
// means the symbol will appear at least once anyway, so give it the cost as if
// its count is 1.
if count[i] == 0 {
bitLengths[i] = log2sum
} else {
bitLengths[i] = math.Log2(sum / count[i])
}
if !(bitLengths[i] >= 0) {
panic("bit length is not positive")
}
}
return bitLengths
} | vendor/git.townsourced.com/townsourced/go-zopfli/zopfli/tree.go | 0.646125 | 0.536191 | tree.go | starcoder |
package check
import (
"bufio"
"fmt"
_ "log"
)
// Err is a function that checks if given error is nil.
// If it is not nil, then exit with log.Fatal.
func Err(err error) {
if err != nil {
// log.Fatal(err)
panic(err)
}
}
// Scanner is a function that checks if *bufio.Scanner.Err() is nil.
// If it is not nil, then exit with log.Fatal.
func Scanner(scanner *bufio.Scanner) {
if err := scanner.Err(); err != nil {
// log.Fatal(err)
panic(err)
}
}
// Tint is a test type for int asserts.
type Tint int
// Assert is a method to check if `result` value equals to the `expected` one.
// The method panics, when the `result` does not equal to `expected` value.
func (result Tint) Assert(expected Tint) {
if result != expected {
panic(fmt.Sprintf("Expected %v, got %v\n", expected, result))
}
}
// Tfloat64 is a test type for float64 asserts.
type Tfloat64 float64
// Assert is a method to check if `result` value equals to the `expected` one.
// The method panics, when the `result` does not equal to `expected` value.
func (result Tfloat64) Assert(expected Tfloat64) {
if result != expected {
panic(fmt.Sprintf("Expected %v, got %v\n", expected, result))
}
}
// Tstring is a test type for string asserts.
type Tstring string
// Assert is a method to check if `result` value equals to the `expected` one.
// The method panics, when the `result` does not equal to `expected` value.
func (result Tstring) Assert(expected Tstring) {
if result != expected {
panic(fmt.Sprintf("Expected '%v', got '%v'\n", expected, result))
}
}
// TintL is a test type for []int asserts.
type TintL []int
// Assert is a method to check if `result` value equals to the `expected` one.
// The method panics, when the `result` does not equal to `expected` value.
func (result TintL) Assert(expected TintL) {
for i, elem := range result {
if elem != expected[i] {
panic(fmt.Sprintf("Expected %v, got %v\n", expected, result))
}
}
}
// Tfloat64L is a test type for []float64 asserts.
type Tfloat64L []float64
// Assert is a method to check if `result` value equals to the `expected` one.
// The method panics, when the `result` does not equal to `expected` value.
func (result Tfloat64L) Assert(expected Tfloat64L) {
for i, elem := range result {
if elem != expected[i] {
panic(fmt.Sprintf("Expected %v, got %v\n", expected, result))
}
}
}
// TstringL is a test type for []string asserts.
type TstringL []string
// Assert is a method to check if `result` value equals to the `expected` one.
// The method panics, when the `result` does not equal to `expected` value.
func (result TstringL) Assert(expected TstringL) {
for i, elem := range result {
if elem != expected[i] {
panic(fmt.Sprintf("Expected %v, got %v\n", expected, result))
}
}
} | check.go | 0.61832 | 0.490663 | check.go | starcoder |
package cork
import (
"reflect"
"time"
)
// DecodeReflect decodes a reflect.Value value from the Reader.
func (r *Reader) DecodeReflect(v reflect.Value) {
b := r.peekOne()
if b == cNil {
r.readOne()
return
}
t := v.Type()
k := v.Kind()
// First let's check to see if this is
// a nil pointer, and if it is then we
// will create a new value for the
// underlying type, and set the pointer
// to this value.
if v.Kind() == reflect.Ptr && v.IsNil() {
n := reflect.New(t.Elem())
v.Set(n)
}
// Next let's check to see if the type
// implements either the Selfer or Corker
// interfaces, and if it does then decode
// it directly. Caching the interface
// detection speeds up the decoding.
if c.Selfable(t) {
n := reflect.New(t.Elem())
r.DecodeSelfer(n.Interface().(Selfer))
v.Set(n)
return
}
if c.Corkable(t) {
n := reflect.New(t.Elem())
r.DecodeCorker(n.Interface().(Corker))
v.Set(n)
return
}
// It wasn't a self describing interface
// so let's now see if it is a string or
// a byte slice, and if it is, then
// decode it immediately.
switch t {
case typeStr:
var x string
r.DecodeString(&x)
v.SetString(x)
return
case typeBit:
var x []byte
r.DecodeBytes(&x)
v.SetBytes(x)
return
case typeTime:
var x time.Time
r.DecodeTime(&x)
v.Set(reflect.ValueOf(x))
return
}
// Otherwise let's switch over all of the
// possible types that this item can be
// and decode it into the correct type.
// For structs, we will cache the struct
// fields, so that we do not have to parse
// these for every item that we process.
switch k {
case reflect.Ptr:
r.DecodeReflect(v.Elem())
case reflect.Map:
r.decodeMap(v)
case reflect.Slice:
r.decodeArr(v)
case reflect.Bool:
var x bool
r.DecodeBool(&x)
v.SetBool(x)
case reflect.String:
var x string
r.DecodeString(&x)
v.SetString(x)
case reflect.Int:
var x int
r.DecodeInt(&x)
v.SetInt(int64(x))
case reflect.Int8:
var x int8
r.DecodeInt8(&x)
v.SetInt(int64(x))
case reflect.Int16:
var x int16
r.DecodeInt16(&x)
v.SetInt(int64(x))
case reflect.Int32:
var x int32
r.DecodeInt32(&x)
v.SetInt(int64(x))
case reflect.Int64:
var x int64
r.DecodeInt64(&x)
v.SetInt(x)
case reflect.Uint:
var x uint
r.DecodeUint(&x)
v.SetUint(uint64(x))
case reflect.Uint8:
var x uint8
r.DecodeUint8(&x)
v.SetUint(uint64(x))
case reflect.Uint16:
var x uint16
r.DecodeUint16(&x)
v.SetUint(uint64(x))
case reflect.Uint32:
var x uint32
r.DecodeUint32(&x)
v.SetUint(uint64(x))
case reflect.Uint64:
var x uint64
r.DecodeUint64(&x)
v.SetUint(x)
case reflect.Float32:
var x float32
r.DecodeFloat32(&x)
v.SetFloat(float64(x))
case reflect.Float64:
var x float64
r.DecodeFloat64(&x)
v.SetFloat(x)
case reflect.Complex64:
var x complex64
r.DecodeComplex64(&x)
v.SetComplex(complex128(x))
case reflect.Complex128:
var x complex128
r.DecodeComplex128(&x)
v.SetComplex(x)
case reflect.Interface:
var x interface{}
r.DecodeInterface(&x)
if reflect.ValueOf(x).IsValid() {
v.Set(reflect.ValueOf(x))
}
case reflect.Struct:
if !c.Has(t) {
tot := 0
fls := make([]*field, t.NumField())
for i := 0; i < t.NumField(); i++ {
if f := newField(t.Field(i)); f != nil {
fls[tot] = f
tot++
}
}
c.Set(t, fls[:tot])
}
x := c.Get(t)
s := r.decodeMapLen()
for i := 0; i < s; i++ {
var k string
r.DecodeString(&k)
for _, f := range x {
if k == f.Name() {
if f := v.FieldByIndex(f.indx); f.CanSet() {
if v.CanAddr() {
r.DecodeReflect(f.Addr())
} else {
r.DecodeReflect(f)
}
}
continue
}
}
}
}
} | reader_ref.go | 0.593138 | 0.45944 | reader_ref.go | starcoder |
package ast
import "fmt"
// Inspect traverses the AST in depth-first order; It starts by calling f(node);
// node must be non-nil. If f returns true, Inspect invokes f recursively for
// each of the non-nil children, followed by a call of f(nil)
func Inspect(node Node, f func(Node) bool) {
Walk(inspector(f), node)
}
// Visitor interface for visitors traversing AST. The Visit method is called on
// every node encountered by Walk. if the result visitor is non-nil Walk will
// visits each of the children of node with the visitor v, followed by a call of
// v.Visit(nil).
type Visitor interface {
Visit(node Node) (v Visitor)
}
type inspector func(Node) bool
func (f inspector) Visit(node Node) Visitor {
if f(node) {
return f
}
return nil
}
// Walk traverses an AST in depth-first order: It starts by calling v.Visit(node);
// node must be non-nil. If the visitor v returned by v.Visit(node) is not nil,
// Walk is invoked recursively with visitor v for each of the non-nil children
// of node, followed by a call of v.Visit(nil).
func Walk(v Visitor, node Node) {
if v = v.Visit(node); v == nil {
return
}
switch n := node.(type) {
case *Document:
for _, def := range n.Defs {
Walk(v, def)
}
case *OperationDefinition:
if n.VarDefns != nil {
Walk(v, n.VarDefns)
}
if n.Directs != nil {
Walk(v, n.Directs)
}
Walk(v, n.SelSet)
case *SelectionSet:
for _, s := range n.Sels {
Walk(v, s)
}
case *Field:
if n.Als != nil {
Walk(v, n.Als)
}
if n.Args != nil {
Walk(v, n.Args)
}
if n.Directs != nil {
Walk(v, n.Directs)
}
if n.SelSet != nil {
Walk(v, n.SelSet)
}
case *Alias:
// do nothing
case *Arguments:
for _, a := range n.Args {
Walk(v, a)
}
case *Argument:
Walk(v, n.Val)
case *FragmentSpread:
if n.Directs != nil {
Walk(v, n.Directs)
}
case *InlineFragment:
if n.TypeCond != nil {
Walk(v, n.TypeCond)
}
if n.Directs != nil {
Walk(v, n.Directs)
}
Walk(v, n.SelSet)
case *FragmentDefinition:
Walk(v, n.TypeCond)
if n.Directs != nil {
Walk(v, n.Directs)
}
Walk(v, n.SelSet)
case *TypeCondition:
Walk(v, n.NamedTyp)
case *Variable, *LiteralValue, *NameValue:
// do nothing
case *ListValue:
for _, val := range n.Vals {
Walk(v, val)
}
case *ObjectValue:
for _, obj := range n.ObjFields {
Walk(v, obj)
}
case *ObjectField:
Walk(v, n.Val)
case *VariableDefinitions:
for _, vd := range n.VarDefns {
Walk(v, vd)
}
case *VariableDefinition:
Walk(v, n.Var)
Walk(v, n.Typ)
if n.DeflVal != nil {
Walk(v, n.DeflVal)
}
case *DefaultValue:
Walk(v, n.Val)
case *NamedType:
// do nothing
case *ListType:
Walk(v, n.Typ)
case *Directives:
for _, d := range n.Directs {
Walk(v, d)
}
case *Directive:
if n.Args != nil {
Walk(v, n.Args)
}
case *Schema:
for _, iface := range n.Interfaces {
Walk(v, iface)
}
for _, scalar := range n.Scalars {
Walk(v, scalar)
}
for _, input := range n.InputObjects {
Walk(v, input)
}
for _, typ := range n.Types {
Walk(v, typ)
}
for _, extend := range n.Extends {
Walk(v, extend)
}
for _, direct := range n.Directives {
Walk(v, direct)
}
for _, schema := range n.Schemas {
Walk(v, schema)
}
for _, enum := range n.Enums {
Walk(v, enum)
}
for _, union := range n.Unions {
Walk(v, union)
}
case *InterfaceDefinition:
if n.Directs != nil {
Walk(v, n.Directs)
}
for _, fd := range n.FieldDefns {
Walk(v, fd)
}
case *FieldDefinition:
if n.ArgDefns != nil {
Walk(v, n.ArgDefns)
}
Walk(v, n.Typ)
if n.Directs != nil {
Walk(v, n.Directs)
}
case *ArgumentsDefinition:
for _, input := range n.InputValDefns {
Walk(v, input)
}
case *InputValueDefinition:
Walk(v, n.Typ)
if n.DeflVal != nil {
Walk(v, n.DeflVal)
}
if n.Directs != nil {
Walk(v, n.Directs)
}
case *ScalarDefinition:
if n.Directs != nil {
Walk(v, n.Directs)
}
case *InputObjectDefinition:
if n.Directs != nil {
Walk(v, n.Directs)
}
for _, input := range n.InputValDefns {
Walk(v, input)
}
case *TypeDefinition:
if n.Implements != nil {
Walk(v, n.Implements)
}
if n.Directs != nil {
Walk(v, n.Directs)
}
for _, fd := range n.FieldDefns {
Walk(v, fd)
}
case *ImplementsInterfaces:
for _, namdTyp := range n.NamedTyps {
Walk(v, namdTyp)
}
case *ExtendDefinition:
if n.TypDefn != nil {
Walk(v, n.TypDefn)
}
case *DirectiveDefinition:
if n.Args != nil {
Walk(v, n.Args)
}
if n.Locs != nil {
Walk(v, n.Locs)
}
case *DirectiveLocations:
for _, l := range n.Locs {
Walk(v, l)
}
case *DirectiveLocation:
// do nothing
case *SchemaDefinition:
if n.Directs != nil {
Walk(v, n.Directs)
}
for _, o := range n.OperDefns {
Walk(v, o)
}
case *OperationTypeDefinition:
if n.NamedTyp != nil {
Walk(v, n.NamedTyp)
}
case *EnumDefinition:
if n.Directs != nil {
Walk(v, n.Directs)
}
for _, e := range n.EnumVals {
Walk(v, e)
}
case *EnumValue:
if n.Directs != nil {
Walk(v, n.Directs)
}
case *UnionDefinition:
if n.Directs != nil {
Walk(v, n.Directs)
}
if n.Members != nil {
Walk(v, n.Members)
}
case *UnionMembers:
if n.NamedTyp != nil {
Walk(v, n.NamedTyp)
}
for _, m := range n.Members {
Walk(v, m)
}
case *UnionMember:
if n.NamedTyp != nil {
Walk(v, n.NamedTyp)
}
default:
panic(fmt.Sprintf("ast.Walk: unexpected node type %T", n))
}
v.Visit(nil)
} | ql/ast/walk.go | 0.684159 | 0.465266 | walk.go | starcoder |
* This source code is part of the near-RT RIC (RAN Intelligent Controller)
* platform project (RICP).
*/
/*
Package sdlgo provides a lightweight, high-speed interface for accessing shared data storage.
Shared Data Layer (SDL) is a concept where applications can use and share data using a common
storage. The storage must be optimised for very high tranactional throughput and very low
latency. Sdlgo is a library which provides applications an API to read and write data
to a common storage using key-value paradigm. In addition to this, sdlgo provides an
event mechanism that can be used to notify listeners that data was changed.
This SDL version assumes that the DBAAS service provided by O-RAN community is
working as a storage backend.
All functions except receiving of notifications are safe for concurrent usage by
multiple goroutines.
Namespace
Namespace concept in a shared data layer connection is to isolate data write and read operations
to happen within particular namespace.
SDL instance
There are two ways to create SDL instance, the first preferable option is to create so called
SDL multi-namespace instance with `sdlgo.NewSyncStorage` call. The second option is to create
SDL instance with `sdlgo.NewSdlInstance` call. Latter SDL instance creation method is deprecated
and it should not be used anymore in any new application implementations, it is left to SDL API
to guarantee backward compatibility for the old application implementations.
The difference between multi-namespace `SyncStorage` SDL instance and the old one is that in
`SyncStorage` case namespace is not defined at instance creation time, but it is defined when
SDL read and write APIs are called. This means that with SDL `SyncStorage` instance it is much
easier to write and read data from different namespaces in a single application client compared
to the old SDL API where you needed to create own SDL instance for each namespace going to be
used later to write and read data.
Database connection
When `SyncStorage` instance is created, it also creates database backend connection, this means
that sdlgo shall open a tcp connection to backend database. Below is example how to create SDL
`SyncStorage` instance and what also connects to database backend under the hood:
sdl := sdlgo.NewSyncStorage()
For the backend database connection a circuit breaker design is used. If the connection fails,
an error is returned immediately to application. Restoration of the connection happens
automatically by SDL and application should retry the operation again after a while.
Database service is discovered by using DBAAS* environment variables. For simple standalone
DBAAS case there are needed two environment variables: DBAAS_SERVICE_HOST and
DBAAS_SERVICE_PORT. If not set, localhost and port 6379 are used by default.
Keys and data
Clients save key-value pairs. Keys are allways strings. The types of the values must be of a basic
type, i.e. string, integer or byte array or slice. This means that the internal structures, like
protobufs or JSON objects, must be serialised to a byte array or slice before passing it to SDL.
Clients are responsible for managing the keys within a namespace.
Some examples on how to set the data using different kind of input parameters:
Basic usage, keys and values are given as own parameters (with mixed data types)
err := s.Set("example-namaspace", "key1", "value1", "key2", 2)
Keys and values inside a slice (again with mixed types, thus empty interface used as a type)
exampleSlice := []interface{"key1", "value1", "key2", 2}
err := s.Set("example-namaspace", exampleSlice)
Data stored to a byte array
data := make([]byte), 3
data[0] = 1
data[1] = 2
data[2] = 3
s.Set("key", data)
Keys and values stored into a map (byte array "data" used from previous example)
mapData := map[string]interface{
"key1" : "data",
"key2" : 2,
"key3" : data,
}
When data is read from SDL storage, a map is returned where the requested key works as map key.
If the key was not found, the value for the given key is nil. It is possible to request several
key with one Get() call.
Groups
SDL groups are unordered collections of members where each member is unique. Using the SDL API
it is possible to add/remove members from a group, remove the whole group or do queries like the
size of a group and if member belongs to a group. Like key-value storage, groups are per namespace.
Events
Events are a publish-subscribe pattern to indicate interested parties that there has been a change
in data. Delivery of the events are not guaranteed. In SDL, events are happening via channels and
channels are per namespace. It is possible to publish several kinds of events through one channel.
In order to publish changes to SDL data, the publisher need to call an API function that supports
publishing. E.g.
err := sdl.SetAndPublish("example-namespace", []string{
"channel1", "event1", "channel2", "event2"}, "key", "value",
)
This example will publish event1 to channel1 and event2 in channel2 after writing the data.
When subscribing the channels, the application needs to first create an SDL instance for the desired
namespace. The subscription happens using the SubscribeChannel() API function. The parameters for
the function takes a callback function and one or many channels to be subscribed. When an event is
received for the given channel, the given callback function shall be called with one or many events.
It is possible to make several subscriptions for different channels using different callback
functions if different kind of handling is required.
sdl := sdlgo.NewSyncStorage()
cb1 := func(channel string, event ...string) {
fmt.Printf("cb1: Received %s from channel %s\n", event, channel)
}
cb2 := func(channel string, event ...string) {
fmt.Printf("cb2: Received %s from channel %s\n", event, channel)
}
sdl.SubscribeChannel("example-namespace", cb1, "channel1", "channel2")
sdl.SubscribeChannel("example-namespace", cb2, "channel3")
This example subscribes three channels from "example-namespace" and assigns cb1 for channel1 and channel2
whereas channel3 is assigned to cb2.
The callbacks are called from a context of a goroutine that is listening for the events. When
application receives events, the preferred way to do the required processing of an event (e.g. read
from SDL) is to do it in another context, e.g. by triggering applications own goroutine using Go
channels. By doing like this, blocking the receive routine and possibly loosing events, can be
avoided.
*/
package sdlgo | doc.go | 0.822225 | 0.735855 | doc.go | starcoder |
package metric
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Millimeter
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// MMToCM converts millimeters to centimeters.
func MMToCM(mm Millimeter) Centimeter { return Centimeter(mm * 0.1) }
// MMToM converts millimeters to meters.
func MMToM(mm Millimeter) Meter { return Meter(mm * 0.001) }
// MMToKM converts millimeters to kilometers.
func MMToKM(mm Millimeter) Kilometer { return Kilometer(mm * 1.0E-6) }
// MMToIN converts millimeters to inches,
func MMToIN(mm Millimeter) Inch { return Inch(mm * 0.039370078740157) }
// MMToFT converts millimeters to feet,
func MMToFT(mm Millimeter) Foot { return Foot(mm * 0.0032808398950131) }
// MMToYD converts millimeters to yards.
func MMToYD(mm Millimeter) Yard { return Yard(mm * 0.0010936132983377) }
// MMToMI converts millimeters to miles.
func MMToMI(mm Millimeter) Mile { return Mile(mm * 6.2137119223733E-7) }
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Centimeters
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// CMToMM converts centimeters to millimeters.
func CMToMM(cm Centimeter) Millimeter { return Millimeter(cm * 10) }
// CMToM converts centimeters to meters.
func CMToM(cm Centimeter) Meter { return Meter(cm * 0.001) }
// CMToKM converts centimeters to kilometers.
func CMToKM(cm Centimeter) Kilometer { return Kilometer(cm * 1.0E-6) }
// CMToIN converts centimeters to inches.
func CMToIN(cm Centimeter) Inch { return Inch(cm * 0.39370078740157) }
// CMToFT converts centimeters to feet.
func CMToFT(cm Centimeter) Foot { return Foot(cm * 0.032808398950131) }
// CMToYD converts centimeters to yards.
func CMToYD(cm Centimeter) Yard { return Yard(cm * 0.010936132983377) }
// CMToMI converts centimeters to miles.
func CMToMI(cm Centimeter) Mile { return Mile(cm * 6.2137119223733E-6) }
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Meters
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// MToMM converts meters to millimeters.
func MToMM(m Meter) Millimeter { return Millimeter(m * 1000) }
// MToCM converts meters to centimeters.
func MToCM(m Meter) Centimeter { return Centimeter(m * 100) }
// MToKM converts meters to kilometers.
func MToKM(m Meter) Kilometer { return Kilometer(m * 0.001) }
// MToIN converts meters to inches.
func MToIN(m Meter) Inch { return Inch(m * 39.370078740157) }
// MToFT converts meters to feet.
func MToFT(m Meter) Foot { return Foot(m * 3.2808398950131) }
// MToYD converts meters to yards.
func MToYD(m Meter) Yard { return Yard(m * 1.0936132983377) }
// MToMI converts meters to miles.
func MToMI(m Meter) Mile { return Mile(m * 0.00062137119223733) }
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Kilometers
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// KMToMM converts kilometers to millimeters.
func KMToMM(km Kilometer) Millimeter { return Millimeter(km * 1000000) }
// KMToCM converts kilometers to centimeters.
func KMToCM(km Kilometer) Centimeter { return Centimeter(km * 100000) }
// KMToM converts kilometers to meters.
func KMToM(km Kilometer) Meter { return Meter(km * 1000) }
// KMToIN converts kilometers to inches.
func KMToIN(km Kilometer) Inch { return Inch(km * 39370) }
// KMToFT converts kilometers to feet.
func KMToFT(km Kilometer) Foot { return Foot(km * 3281) }
// KMToYD converts kilometers to yards.
func KMToYD(km Kilometer) Yard { return Yard(km * 1072.666666666667) }
// KMToMI converts kilometers to miles.
func KMToMI(km Kilometer) Mile { return Mile(km * 0.6214) }
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Inches
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// INToMM converts inches to millimeters.
func INToMM(in Inch) Millimeter { return Millimeter(in * 25.4) }
// INToCM converts inches to centimeters.
func INToCM(in Inch) Centimeter { return Centimeter(in * 2.54) }
// INToM converts inches to meters.
func INToM(in Inch) Meter { return Meter(in * 0.0254) }
// INToKM converts inches to kilometers.
func INToKM(in Inch) Kilometer { return Kilometer(in * 2.54E-5) }
// INToFT converts inches to feet.
func INToFT(in Inch) Foot { return Foot(in * 0.083333333333333) }
// INToYD converts inches to yards.
func INToYD(in Inch) Yard { return Yard(in * 0.027777777777778) }
// INToMI converts inches to miles.
func INToMI(in Inch) Mile { return Mile(in * 1.5782828282828E-5) }
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Foot
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// FTToMM converts feet to millimeters.
func FTToMM(ft Foot) Millimeter { return Millimeter(ft * 304.8) }
// FTToCM converts feet to centimeters.
func FTToCM(ft Foot) Centimeter { return Centimeter(ft * 30.48) }
// FTToM converts feet to meters.
func FTToM(ft Foot) Meter { return Meter(ft * 0.3048) }
// FTToKM converts feet to kilometers.
func FTToKM(ft Foot) Kilometer { return Kilometer(ft * 0.0003048) }
// FTToIN converts feet to inches.
func FTToIN(ft Foot) Inch { return Inch(ft * 12) }
// FTToYD converts feet to yards.
func FTToYD(ft Foot) Yard { return Yard(ft * 0.33333333333333) }
// FTToMI converts feet to mile.
func FTToMI(ft Foot) Mile { return Mile(ft * 0.00018939393939394) }
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Yard
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// YDToMM converts yards to millimeters.
func YDToMM(yd Yard) Millimeter { return Millimeter(yd * 914.4) }
// YDToCM converts yards to centimeters.
func YDToCM(yd Yard) Centimeter { return Centimeter(yd * 91.44) }
// YDToM converts yards to meters.
func YDToM(yd Yard) Meter { return Meter(yd * 0.9144) }
// YDToKM converts yards to kilometers.
func YDToKM(yd Yard) Kilometer { return Kilometer(yd * 0.0009144) }
// YDToIN converts yards to inches.
func YDToIN(yd Yard) Inch { return Inch(yd * 36) }
// YDToFT converts yards to feet.
func YDToFT(yd Yard) Foot { return Foot(yd * 3) }
// YDToMI converts yards to miles.
func YDToMI(yd Yard) Mile { return Mile(yd * 0.00056818181818182) }
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Mile
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// MIToMM converts miles to millimeters.
func MIToMM(mi Mile) Millimeter { return Millimeter(mi * 1609344) }
// MIToCM converts miles to centimeters.
func MIToCM(mi Mile) Centimeter { return Centimeter(mi * 160934.4) }
// MIToM converts miles to meters.
func MIToM(mi Mile) Meter { return Meter(mi * 1609.344) }
// MIToKM converts miles to kilometers.
func MIToKM(mi Mile) Kilometer { return Kilometer(mi * 1.609344) }
// MIToIN converts miles to inches.
func MIToIN(mi Mile) Inch { return Inch(mi * 63360) }
// MIToFT converts miles to feet.
func MIToFT(mi Mile) Foot { return Foot(mi * 5280) }
// MIToYD converts miles to yards.
func MIToYD(mi Mile) Yard { return Yard(mi * 1760) } | ex_02.02-unit_conversion/metric/length_func.go | 0.773131 | 0.437103 | length_func.go | starcoder |
package imagediff
import (
"errors"
"image"
"image/color"
)
// SimpleImageComparer considers pixels to be the same when their RGBA values are equal
type SimpleImageComparer struct {
ignoreColor *color.NRGBA
useignoreColor bool
DiffColor color.NRGBA
}
//NewSimpleImageComparer creates a new SimpleImageComparer
func NewSimpleImageComparer() *SimpleImageComparer {
return &SimpleImageComparer{
DiffColor: color.NRGBA{R: 255, G: 0, B: 0, A: 255},
}
}
//SetIgnoreColor tells the comparer to ignore anypixel of the specified color in either images
func (comparer *SimpleImageComparer) SetIgnoreColor(c *color.NRGBA) {
comparer.ignoreColor = c
comparer.useignoreColor = true
}
//CompareImages compares 2 images, returns the number of different pixels and an output image that highlights the differences
func (comparer *SimpleImageComparer) CompareImages(img1 image.Image, img2 image.Image) (int, *image.NRGBA, error) {
img1Bounds := img1.Bounds()
img2Bounds := img2.Bounds()
XBoundsDifferent := img1Bounds.Max.X-img1Bounds.Min.X != img2Bounds.Max.X-img2Bounds.Min.X
YBoundsDifferent := img1Bounds.Max.Y-img1Bounds.Min.Y != img2Bounds.Max.Y-img2Bounds.Min.Y
if XBoundsDifferent || YBoundsDifferent {
return 0, nil, errors.New("Images not same size")
}
//Determine if each pixel is same or different
numDifferentPixel := 0
diffImage := image.NewNRGBA(img1Bounds)
var outputPixel color.NRGBA
same := false
for y := img1Bounds.Min.Y; y < img1Bounds.Max.Y; y++ {
for x := img1Bounds.Min.X; x < img1Bounds.Max.X; x++ {
Pixel1rgba := img1.At(x, y)
Pixel2rgba := img2.At(x, y)
P1NRGBA := color2nrgba(Pixel1rgba)
P2NRGBA := color2nrgba(Pixel2rgba)
r1 := P1NRGBA.R
g1 := P1NRGBA.G
b1 := P1NRGBA.B
a1 := P1NRGBA.A
r2 := P2NRGBA.R
g2 := P2NRGBA.G
b2 := P2NRGBA.B
a2 := P2NRGBA.A
if comparer.useignoreColor && isIgnorePixel(P1NRGBA.R, P1NRGBA.G, P1NRGBA.B, P1NRGBA.A, P2NRGBA.R, P2NRGBA.G, P2NRGBA.B, P2NRGBA.A, comparer.ignoreColor) {
//These pixels should be ignored
diffImage.SetNRGBA(x, y, niceOutputPixel(*comparer.ignoreColor))
continue
} else {
same = r1 == r2 && g1 == g2 && b1 == b2 && a1 == a2
}
if !same {
//These 2 pixels have different RGBA values
numDifferentPixel++
outputPixel = comparer.DiffColor
} else {
//These 2 pixels are exactly the same
outputPixel = niceOutputPixel(P1NRGBA)
}
diffImage.SetNRGBA(x, y, outputPixel)
}
}
return numDifferentPixel, diffImage, nil
} | imagediff/SimpleImageComparer.go | 0.823506 | 0.567817 | SimpleImageComparer.go | starcoder |
package export
import "github.com/prometheus/client_golang/prometheus"
// IngestionRealtimeIndexingExporter contains all the Prometheus metrics that are possible to gather from the Jetty service
type IngestionRealtimeIndexingExporter struct {
TaskRunTime *prometheus.HistogramVec `description:"milliseconds taken to run a task"`
TaskActionLogTime *prometheus.HistogramVec `description:"milliseconds taken to log a task action to the audit log"`
TaskActionRunTime *prometheus.HistogramVec `description:"milliseconds taken to execute a task action"`
SegmentAddedBytes *prometheus.SummaryVec `description:"size in bytes of new segments created"`
SegmentMovedBytes *prometheus.SummaryVec `description:"size in bytes of segments moved/archived via the Move Task"`
SegmentNukedBytes *prometheus.SummaryVec `description:"size in bytes of segments deleted via the Kill Task"`
TaskSuccessCount *prometheus.GaugeVec `description:"number of successful tasks per emission period. This metric is only available if the TaskCountStatsMonitor module is included"`
TaskFailedCount *prometheus.GaugeVec `description:"number of failed tasks per emission period. This metric is only available if the TaskCountStatsMonitor module is included"`
TaskRunningCount *prometheus.GaugeVec `description:"number of current running tasks. This metric is only available if the TaskCountStatsMonitor module is included"`
TaskPendingCount *prometheus.GaugeVec `description:"number of current pending tasks. This metric is only available if the TaskCountStatsMonitor module is included"`
TaskWaitingCount *prometheus.GaugeVec `description:"number of current waiting tasks. This metric is only available if the TaskCountStatsMonitor module is included"`
}
// NewIngestionRealtimeIndexingExporter returns a new Jetty exporter object
func NewIngestionRealtimeIndexingExporter() *IngestionRealtimeIndexingExporter {
re := &IngestionRealtimeIndexingExporter{
TaskRunTime: prometheus.NewHistogramVec(prometheus.HistogramOpts{
Namespace: "druid",
Subsystem: "realtime_indexing",
Name: "task_runtime",
Help: "milliseconds taken to run a task",
Buckets: []float64{10, 100, 500, 1000, 2000, 3000, 5000, 7000, 10000},
}, []string{"dataSource", "taskId", "taskType", "taskStatus"}),
TaskActionLogTime: prometheus.NewHistogramVec(prometheus.HistogramOpts{
Namespace: "druid",
Subsystem: "realtime_indexing",
Name: "task_action_logtime",
Help: "milliseconds taken to log a task action to the audit log",
Buckets: []float64{10, 100, 500, 1000, 2000, 3000, 5000, 7000, 10000},
}, []string{"dataSource", "taskId", "taskType"}),
TaskActionRunTime: prometheus.NewHistogramVec(prometheus.HistogramOpts{
Namespace: "druid",
Subsystem: "realtime_indexing",
Name: "task_action_runtime",
Help: "milliseconds taken to execute a task action",
Buckets: []float64{10, 100, 500, 1000, 2000, 3000, 5000, 7000, 10000},
}, []string{"dataSource", "taskId", "taskType"}),
SegmentAddedBytes: prometheus.NewSummaryVec(prometheus.SummaryOpts{
Namespace: "druid",
Subsystem: "realtime_indexing",
Name: "segment_added_bytes",
Help: "size in bytes of new segments created",
Objectives: map[float64]float64{0.5: 0.05, 0.9: 0.01, 0.99: 0.001},
}, []string{"dataSource", "taskId", "taskType"}),
SegmentMovedBytes: prometheus.NewSummaryVec(prometheus.SummaryOpts{
Namespace: "druid",
Subsystem: "realtime_indexing",
Name: "segment_moved_bytes",
Help: "size in bytes of segments moved/archived via the Move Task",
Objectives: map[float64]float64{0.5: 0.05, 0.9: 0.01, 0.99: 0.001},
}, []string{"dataSource", "taskId", "taskType"}),
SegmentNukedBytes: prometheus.NewSummaryVec(prometheus.SummaryOpts{
Namespace: "druid",
Subsystem: "realtime_indexing",
Name: "segment_nuked_bytes",
Help: "size in bytes of segments deleted via the Kill Task",
Objectives: map[float64]float64{0.5: 0.05, 0.9: 0.01, 0.99: 0.001},
}, []string{"dataSource", "taskId", "taskType"}),
TaskSuccessCount: prometheus.NewGaugeVec(prometheus.GaugeOpts{
Namespace: "druid",
Subsystem: "realtime_indexing",
Name: "task_success_count",
Help: "number of successful tasks per emission period. This metric is only available if the TaskCountStatsMonitor module is included",
}, []string{"dataSource"}),
TaskFailedCount: prometheus.NewGaugeVec(prometheus.GaugeOpts{
Namespace: "druid",
Subsystem: "realtime_indexing",
Name: "task_failed_count",
Help: "number of failed tasks per emission period. This metric is only available if the TaskCountStatsMonitor module is included",
}, []string{"dataSource"}),
TaskRunningCount: prometheus.NewGaugeVec(prometheus.GaugeOpts{
Namespace: "druid",
Subsystem: "realtime_indexing",
Name: "task_running_count",
Help: "number of current running tasks. This metric is only available if the TaskCountStatsMonitor module is included",
}, []string{"dataSource"}),
TaskPendingCount: prometheus.NewGaugeVec(prometheus.GaugeOpts{
Namespace: "druid",
Subsystem: "realtime_indexing",
Name: "task_pending_count",
Help: "number of current pending tasks. This metric is only available if the TaskCountStatsMonitor module is included",
}, []string{"dataSource"}),
TaskWaitingCount: prometheus.NewGaugeVec(prometheus.GaugeOpts{
Namespace: "druid",
Subsystem: "realtime_indexing",
Name: "task_waiting_count",
Help: "number of current waiting tasks. This metric is only available if the TaskCountStatsMonitor module is included",
}, []string{"dataSource"}),
}
// register all the prometheus metrics
prometheus.MustRegister(re.SegmentAddedBytes)
prometheus.MustRegister(re.SegmentMovedBytes)
prometheus.MustRegister(re.SegmentNukedBytes)
prometheus.MustRegister(re.TaskActionLogTime)
prometheus.MustRegister(re.TaskActionRunTime)
prometheus.MustRegister(re.TaskFailedCount)
prometheus.MustRegister(re.TaskPendingCount)
prometheus.MustRegister(re.TaskWaitingCount)
prometheus.MustRegister(re.TaskSuccessCount)
prometheus.MustRegister(re.TaskRunningCount)
prometheus.MustRegister(re.TaskRunTime)
return re
}
// SetTaskRunTime .
func (re *IngestionRealtimeIndexingExporter) SetTaskRunTime(labels map[string]string, val float64) {
re.TaskRunTime.With(labels).Observe(val)
}
// SetTaskActionLogTime .
func (re *IngestionRealtimeIndexingExporter) SetTaskActionLogTime(labels map[string]string, val float64) {
re.TaskActionLogTime.With(labels).Observe(val)
}
// SetTaskActionRunTime .
func (re *IngestionRealtimeIndexingExporter) SetTaskActionRunTime(labels map[string]string, val float64) {
re.TaskActionRunTime.With(labels).Observe(val)
}
// SetSegmentAddedBytes .
func (re *IngestionRealtimeIndexingExporter) SetSegmentAddedBytes(labels map[string]string, val float64) {
re.SegmentAddedBytes.With(labels).Observe(val)
}
// SetSegmentMovedBytes .
func (re *IngestionRealtimeIndexingExporter) SetSegmentMovedBytes(labels map[string]string, val float64) {
re.SegmentMovedBytes.With(labels).Observe(val)
}
// SetSegmentNukedBytes .
func (re *IngestionRealtimeIndexingExporter) SetSegmentNukedBytes(labels map[string]string, val float64) {
re.SegmentNukedBytes.With(labels).Observe(val)
}
// SetTaskSuccessCount .
func (re *IngestionRealtimeIndexingExporter) SetTaskSuccessCount(source string, val float64) {
re.TaskSuccessCount.With(prometheus.Labels{"dataSource": source}).Add(val)
}
// SetTaskFailedCount .
func (re *IngestionRealtimeIndexingExporter) SetTaskFailedCount(source string, val float64) {
re.TaskFailedCount.With(prometheus.Labels{"dataSource": source}).Add(val)
}
// SetTaskRunningCount .
func (re *IngestionRealtimeIndexingExporter) SetTaskRunningCount(source string, val float64) {
re.TaskRunningCount.With(prometheus.Labels{"dataSource": source}).Add(val)
}
// SetTaskPendingCount .
func (re *IngestionRealtimeIndexingExporter) SetTaskPendingCount(source string, val float64) {
re.TaskPendingCount.With(prometheus.Labels{"dataSource": source}).Add(val)
}
// SetTaskWaitingCount .
func (re *IngestionRealtimeIndexingExporter) SetTaskWaitingCount(source string, val float64) {
re.TaskWaitingCount.With(prometheus.Labels{"dataSource": source}).Add(val)
} | pkg/export/ingestion_realtime_indexing.go | 0.728748 | 0.431644 | ingestion_realtime_indexing.go | starcoder |
package bytealg
const (
// Index can search any valid length of string.
MaxLen = int(-1) >> 31
MaxBruteForce = MaxLen
)
// Compare two byte slices.
// Returns -1 if the first differing byte is lower in a, or 1 if the first differing byte is greater in b.
// If the byte slices are equal, returns 0.
// If the lengths are different and there are no differing bytes, compares based on length.
func Compare(a, b []byte) int {
// Compare for differing bytes.
for i := 0; i < len(a) && i < len(b); i++ {
switch {
case a[0] < b[0]:
return -1
case a[0] > b[0]:
return 1
}
}
// Compare lengths.
switch {
case len(a) > len(b):
return 1
case len(a) < len(b):
return -1
default:
return 0
}
}
// Count the number of instances of a byte in a slice.
func Count(b []byte, c byte) int {
// Use a simple implementation, as there is no intrinsic that does this like we want.
n := 0
for _, v := range b {
if v == c {
n++
}
}
return n
}
// Count the number of instances of a byte in a string.
func CountString(s string, c byte) int {
// Use a simple implementation, as there is no intrinsic that does this like we want.
// Currently, the compiler does not generate zero-copy byte-string conversions, so this needs to be seperate from Count.
n := 0
for i := 0; i < len(s); i++ {
if s[i] == c {
n++
}
}
return n
}
// Cutover is not reachable in TinyGo, but must exist as it is referenced.
func Cutover(n int) int {
// Setting MaxLen and MaxBruteForce should force a different path to be taken.
// This should never be called.
panic("cutover is unreachable")
}
// Equal checks if two byte slices are equal.
// It is equivalent to bytes.Equal.
func Equal(a, b []byte) bool {
if len(a) != len(b) {
return false
}
for i, v := range a {
if v != b[i] {
return false
}
}
return true
}
// Index finds the base index of the first instance of the byte sequence b in a.
// If a does not contain b, this returns -1.
func Index(a, b []byte) int {
for i := 0; i <= len(a)-len(b); i++ {
if Equal(a[i:i+len(b)], b) {
return i
}
}
return -1
}
// Index finds the index of the first instance of the specified byte in the slice.
// If the byte is not found, this returns -1.
func IndexByte(b []byte, c byte) int {
for i, v := range b {
if v == c {
return i
}
}
return -1
}
// Index finds the index of the first instance of the specified byte in the string.
// If the byte is not found, this returns -1.
func IndexByteString(s string, c byte) int {
for i := 0; i < len(s); i++ {
if s[i] == c {
return i
}
}
return -1
}
// Index finds the base index of the first instance of a substring in a string.
// If the substring is not found, this returns -1.
func IndexString(str, sub string) int {
for i := 0; i <= len(str)-len(sub); i++ {
if str[i:i+len(sub)] == sub {
return i
}
}
return -1
} | src/internal/bytealg/bytealg.go | 0.808559 | 0.47993 | bytealg.go | starcoder |
// An implementation of Conway's Game of Life.
package main
import (
"bytes"
"fmt"
"math/rand"
"time"
)
// Field represents a two-dimensional field of cells.
type Field struct {
s [][]bool
w, h int
}
// NewField returns an empty field of the specified width and height.
func NewField(w, h int) *Field {
s := make([][]bool, h)
for i := range s {
s[i] = make([]bool, w)
}
return &Field{s: s, w: w, h: h}
}
// Set sets the state of the specified cell to the given value.
func (f *Field) Set(x, y int, b bool) {
f.s[y][x] = b
}
// Alive reports whether the specified cell is alive.
// If the x or y coordinates are outside the field boundaries they are wrapped
// toroidally. For instance, an x value of -1 is treated as width-1.
func (f *Field) Alive(x, y int) bool {
x += f.w
x %= f.w
y += f.h
y %= f.h
return f.s[y][x]
}
// Next returns the state of the specified cell at the next time step.
func (f *Field) Next(x, y int) bool {
// Count the adjacent cells that are alive.
alive := 0
for i := -1; i <= 1; i++ {
for j := -1; j <= 1; j++ {
if (j != 0 || i != 0) && f.Alive(x+i, y+j) {
alive++
}
}
}
// Return next state according to the game rules:
// exactly 3 neighbors: on,
// exactly 2 neighbors: maintain current state,
// otherwise: off.
return alive == 3 || alive == 2 && f.Alive(x, y)
}
// Life stores the state of a round of Conway's Game of Life.
type Life struct {
a, b *Field
w, h int
}
// NewLife returns a new Life game state with a random initial state.
func NewLife(w, h int) *Life {
a := NewField(w, h)
for i := 0; i < (w * h / 4); i++ {
a.Set(rand.Intn(w), rand.Intn(h), true)
}
return &Life{
a: a, b: NewField(w, h),
w: w, h: h,
}
}
// Step advances the game by one instant, recomputing and updating all cells.
func (l *Life) Step() {
// Update the state of the next field (b) from the current field (a).
for y := 0; y < l.h; y++ {
for x := 0; x < l.w; x++ {
l.b.Set(x, y, l.a.Next(x, y))
}
}
// Swap fields a and b.
l.a, l.b = l.b, l.a
}
// String returns the game board as a string.
func (l *Life) String() string {
var buf bytes.Buffer
for y := 0; y < l.h; y++ {
for x := 0; x < l.w; x++ {
b := byte(' ')
if l.a.Alive(x, y) {
b = '*'
}
buf.WriteByte(b)
}
buf.WriteByte('\n')
}
return buf.String()
}
func main() {
l := NewLife(40, 15)
for i := 0; i < 300; i++ {
l.Step()
fmt.Print("\x0c", l) // Clear screen and print field.
time.Sleep(time.Second / 30)
}
} | software/baremetal/life/go/life_go.go | 0.849113 | 0.474631 | life_go.go | starcoder |
package store
type (
Strings struct {
Values []string
}
Ints struct {
Values []int
}
Bools struct {
Values []bool
}
KVs struct {
Keys []string
Values []string
}
)
func (s *Strings) Init(size int) {
if cap(s.Values) < size {
s.Values = make([]string, size)
} else {
s.Values = s.Values[:size]
}
}
func (s *Strings) Final(size int) {
s.Values = s.Values[:size]
}
func (s *Strings) Ptrs(index int, ptrs []interface{}) {
ptrs[0] = &s.Values[index]
}
func (s *Strings) Realloc(count int) int {
if c := cap(s.Values); c == count {
values := make([]string, 2*c)
copy(values, s.Values)
s.Values = values
return 2 * c
} else if c > count {
s.Values = s.Values[:c]
return c
}
panic("unexpected capacity of Strings")
}
func (s *Strings) Clear() {
if s.Values != nil {
s.Values = s.Values[:0]
}
}
func (s *Ints) Init(size int) {
if cap(s.Values) < size {
s.Values = make([]int, size)
} else {
s.Values = s.Values[:size]
}
}
func (s *Ints) Final(size int) {
s.Values = s.Values[:size]
}
func (s *Ints) Ptrs(index int, ptrs []interface{}) {
ptrs[0] = &s.Values[index]
}
func (s *Ints) Realloc(count int) int {
if c := cap(s.Values); c == count {
values := make([]int, 2*c)
copy(values, s.Values)
s.Values = values
return 2 * c
} else if c > count {
s.Values = s.Values[:c]
return c
}
panic("unexpected capacity of Ints")
}
func (s *Ints) Clear() {
if s.Values != nil {
s.Values = s.Values[:0]
}
}
func (s *Bools) Init(size int) {
if cap(s.Values) < size {
s.Values = make([]bool, size)
} else {
s.Values = s.Values[:size]
}
}
func (s *Bools) Final(size int) {
s.Values = s.Values[:size]
}
func (s *Bools) Ptrs(index int, ptrs []interface{}) {
ptrs[0] = &s.Values[index]
}
func (s *Bools) Realloc(count int) int {
if c := cap(s.Values); c == count {
values := make([]bool, 2*c)
copy(values, s.Values)
s.Values = values
return 2 * c
} else if c > count {
s.Values = s.Values[:c]
return c
}
panic("unexpected capacity of Bools")
}
func (s *Bools) Clear() {
if s.Values != nil {
s.Values = s.Values[:0]
}
}
func (s *KVs) Init(size int) {
if cap(s.Values) < size {
s.Keys = make([]string, size)
s.Values = make([]string, size)
} else {
s.Keys = s.Keys[:size]
s.Values = s.Values[:size]
}
}
func (s *KVs) Final(size int) {
s.Keys = s.Keys[:size]
s.Values = s.Values[:size]
}
func (s *KVs) Ptrs(index int, ptrs []interface{}) {
ptrs[0] = &s.Keys[index]
ptrs[1] = &s.Values[index]
}
func (s *KVs) Realloc(count int) int {
if c := cap(s.Values); c == count {
keys := make([]string, 2*c)
copy(keys, s.Keys)
s.Keys = keys
values := make([]string, 2*c)
copy(values, s.Values)
s.Values = values
return 2 * c
} else if c > count {
s.Keys = s.Keys[:c]
s.Values = s.Values[:c]
return c
}
panic("unexpected capacity of KVs")
} | store/store.go | 0.5564 | 0.452234 | store.go | starcoder |
package texture
import (
g2dcol "github.com/jphsd/graphics2d/color"
"image"
"image/color"
"image/draw"
"math"
)
// Image holds the data to support a continuous bicubic interpolation over an image.
type Image struct {
Image *image.NRGBA
Max []float64
LastX, LastY int
HSL bool
}
// NewImage sets up a new field with the supplied image. The image is converted to a {0, 0}
// offset image.
func NewImage(img image.Image) *Image {
r := img.Bounds()
w, h := r.Dx(), r.Dy()
nr := image.Rect(0, 0, w, h)
gimg := image.NewNRGBA(nr)
draw.Draw(gimg, nr, img, r.Min, draw.Src)
return &Image{gimg, []float64{float64(w), float64(h)}, w - 1, h - 1, false}
}
const (
epsilon = 0.0000001
)
// Eval2 implements the Field interface.
func (f *Image) Eval2(x, y float64) []float64 {
if x < 0 || x >= f.Max[0] || y < 0 || y >= f.Max[1] {
return []float64{0, 0, 0, 1}
}
ix, iy := int(math.Floor(x+epsilon)), int(math.Floor(y+epsilon))
rx, ry := x-float64(ix), y-float64(iy)
p := f.getValues(ix, iy)
v := BiCubic(rx, ry, p)
// Scale from [0,1] to [-1,1]
return []float64{v[0]*2 - 1, v[1]*2 - 1, v[2]*2 - 1, v[3]*2 - 1}
}
// Get 4x4 patch
func (f *Image) getValues(x, y int) [][][]float64 {
res := make([][][]float64, 4)
for r, i := y-1, 0; r < y+3; r++ {
res[i] = make([][]float64, 4)
for c, j := x-1, 0; c < x+3; c++ {
res[i][j] = f.getValue(c, r)
j++
}
i++
}
return res
}
// Get converted values and handle edges
func (f *Image) getValue(x, y int) []float64 {
var col color.Color
if x < 0 {
if y < 0 {
col = f.Image.At(0, 0)
} else if y > f.LastY {
col = f.Image.At(0, f.LastY)
} else {
col = f.Image.At(0, y)
}
} else if x > f.LastX {
if y < 0 {
col = f.Image.At(f.LastX, 0)
} else if y > f.LastY {
col = f.Image.At(f.LastX, f.LastY)
} else {
col = f.Image.At(f.LastX, y)
}
} else if y < 0 {
col = f.Image.At(x, 0)
} else if y > f.LastY {
col = f.Image.At(x, f.LastY)
} else {
col = f.Image.At(x, y)
}
if f.HSL {
// HSLA
hsl := g2dcol.NewHSL(col)
return []float64{hsl.H, hsl.S, hsl.L, hsl.A}
}
// NRGBA
c, _ := col.(color.NRGBA)
r := uint32(c.R)
r = r<<8 | r
rv := float64(r)
rv /= 0xffff
g := uint32(c.G)
g = g<<8 | g
gv := float64(g)
gv /= 0xffff
b := uint32(c.B)
b = b<<8 | b
bv := float64(b)
bv /= 0xffff
a := uint32(c.A)
a = a<<8 | a
av := float64(a)
av /= 0xffff
// Scale to [-1,1]
return []float64{rv, gv, bv, av}
}
// Cubic calculates the value of f(t) for t in range [0,1] given the values of t at -1, 0, 1, 2 in p[]
// fitted to a cubic polynomial: f(t) = at^3 + bt^2 + ct + d. Clamped because it over/undershoots.
func Cubic(t float64, p []float64) float64 {
v := p[1] + 0.5*t*(p[2]-p[0]+t*(2.0*p[0]-5.0*p[1]+4.0*p[2]-p[3]+t*(3.0*(p[1]-p[2])+p[3]-p[0])))
if v < 0 {
v = 0
} else if v > 1 {
v = 1
}
return v
}
// BiCubic uses Cubic to calculate the value of f(u,v) for u,v in range [0,1].
func BiCubic(u, v float64, p [][][]float64) []float64 {
res := make([]float64, 4)
for i := 0; i < 4; i++ {
np := make([]float64, 4)
np[0] = Cubic(v, p[i][0])
np[1] = Cubic(v, p[i][1])
np[2] = Cubic(v, p[i][2])
np[3] = Cubic(v, p[i][3])
res[i] = Cubic(u, np)
}
return res
}
// NewRGBA renders the texture into a new RGBA image.
func NewRGBA(width, height int, src ColorField, ox, oy, dx, dy float64) *image.RGBA {
img := image.NewRGBA(image.Rect(0, 0, width, height))
y := oy
for r := 0; r < height; r++ {
x := ox
for c := 0; c < width; c++ {
v := src.Eval2(x, y)
img.Set(c, r, v)
x += dx
}
y += dy
}
return img
} | image.go | 0.733547 | 0.574693 | image.go | starcoder |
package main
import "fmt"
type Color string
const (
R Color = "R"
B = "B"
)
type Tree interface {
ins(x int) Tree
}
type E struct{}
func (_ E) ins(x int) Tree {
return T{R, E{}, x, E{}}
}
func (_ E) String() string {
return "E"
}
type T struct {
cl Color
le Tree
aa int
ri Tree
}
func (t T) balance() Tree {
if t.cl != B {
return t
}
le, leIsT := t.le.(T)
ri, riIsT := t.ri.(T)
var lele, leri, rile, riri T
var leleIsT, leriIsT, rileIsT, ririIsT bool
if leIsT {
lele, leleIsT = le.le.(T)
}
if leIsT {
leri, leriIsT = le.ri.(T)
}
if riIsT {
rile, rileIsT = ri.le.(T)
}
if riIsT {
riri, ririIsT = ri.ri.(T)
}
switch {
case leIsT && leleIsT && le.cl == R && lele.cl == R:
_, t2, z, d := t.destruct()
_, t3, y, c := t2.(T).destruct()
_, a, x, b := t3.(T).destruct()
return T{R, T{B, a, x, b}, y, T{B, c, z, d}}
case leIsT && leriIsT && le.cl == R && leri.cl == R:
_, t2, z, d := t.destruct()
_, a, x, t3 := t2.(T).destruct()
_, b, y, c := t3.(T).destruct()
return T{R, T{B, a, x, b}, y, T{B, c, z, d}}
case riIsT && rileIsT && ri.cl == R && rile.cl == R:
_, a, x, t2 := t.destruct()
_, t3, z, d := t2.(T).destruct()
_, b, y, c := t3.(T).destruct()
return T{R, T{B, a, x, b}, y, T{B, c, z, d}}
case riIsT && ririIsT && ri.cl == R && riri.cl == R:
_, a, x, t2 := t.destruct()
_, b, y, t3 := t2.(T).destruct()
_, c, z, d := t3.(T).destruct()
return T{R, T{B, a, x, b}, y, T{B, c, z, d}}
default:
return t
}
}
func (t T) ins(x int) Tree {
switch {
case x < t.aa:
return T{t.cl, t.le.ins(x), t.aa, t.ri}.balance()
case x > t.aa:
return T{t.cl, t.le, t.aa, t.ri.ins(x)}.balance()
default:
return t
}
}
func (t T) destruct() (Color, Tree, int, Tree) {
return t.cl, t.le, t.aa, t.ri
}
func (t T) String() string {
return fmt.Sprintf("T(%s, %v, %d, %v)", t.cl, t.le, t.aa, t.ri)
}
func insert(tr Tree, x int) Tree {
t := tr.ins(x)
switch t.(type) {
case T:
tt := t.(T)
_, a, y, b := tt.destruct()
return T{B, a, y, b}
case E:
return E{}
default:
return nil
}
}
func main() {
var tr Tree = E{}
for i := 1; i <= 16; i++ {
tr = insert(tr, i)
}
fmt.Println(tr)
} | lang/Go/pattern-matching.go | 0.739893 | 0.40486 | pattern-matching.go | starcoder |
package compare
import (
"bytes"
"time"
)
// Compare returns a value indicating the sort order relationship between the
// receiver and the parameter.
// Given c = a.Compare(b):
// c < 0 if a < b;
// c == 0 if a == b; and
// c > 0 if a > b.
type Comparable interface {
Compare(Comparable) int
}
type CompString string
func (a CompString) Compare(b Comparable) int {
aAsserted := string(a)
bAsserted := string(b.(CompString))
min := len(bAsserted)
if len(aAsserted) < len(bAsserted) {
min = len(aAsserted)
}
diff := 0
for i := 0; i < min && diff == 0; i++ {
diff = int(aAsserted[i]) - int(bAsserted[i])
}
if diff == 0 {
diff = len(aAsserted) - len(bAsserted)
}
if diff < 0 {
return -1
}
if diff > 0 {
return 1
}
return 0
}
type CompInt int
func (a CompInt) Compare(b Comparable) int {
aAsserted := int(a)
bAsserted := int(b.(CompInt))
switch {
case aAsserted > bAsserted:
return 1
case aAsserted < bAsserted:
return -1
default:
return 0
}
}
type CompInt8 int8
func (a CompInt8) Compare(b Comparable) int {
aAsserted := int8(a)
bAsserted := int8(b.(CompInt8))
switch {
case aAsserted > bAsserted:
return 1
case aAsserted < bAsserted:
return -1
default:
return 0
}
}
type CompInt16 int16
func (a CompInt16) Compare(b Comparable) int {
aAsserted := int16(a)
bAsserted := int16(b.(CompInt16))
switch {
case aAsserted > bAsserted:
return 1
case aAsserted < bAsserted:
return -1
default:
return 0
}
}
type CompInt32 int32
func (a CompInt32) Compare(b Comparable) int {
aAsserted := int32(a)
bAsserted := int32(b.(CompInt32))
switch {
case aAsserted > bAsserted:
return 1
case aAsserted < bAsserted:
return -1
default:
return 0
}
}
type CompInt64 int64
func (a CompInt64) Compare(b Comparable) int {
aAsserted := int64(a)
bAsserted := int64(b.(CompInt64))
switch {
case aAsserted > bAsserted:
return 1
case aAsserted < bAsserted:
return -1
default:
return 0
}
}
type CompUint uint
func (a CompUint) Compare(b Comparable) int {
aAsserted := uint(a)
bAsserted := uint(b.(CompUint))
switch {
case aAsserted > bAsserted:
return 1
case aAsserted < bAsserted:
return -1
default:
return 0
}
}
type CompUint8 uint8
func (a CompUint8) Compare(b Comparable) int {
aAsserted := uint8(a)
bAsserted := uint8(b.(CompUint8))
switch {
case aAsserted > bAsserted:
return 1
case aAsserted < bAsserted:
return -1
default:
return 0
}
}
type CompUint16 uint16
func (a CompUint16) Compare(b Comparable) int {
aAsserted := uint16(a)
bAsserted := uint16(b.(CompUint16))
switch {
case aAsserted > bAsserted:
return 1
case aAsserted < bAsserted:
return -1
default:
return 0
}
}
type CompUint32 uint32
func (a CompUint32) Compare(b Comparable) int {
aAsserted := uint32(a)
bAsserted := uint32(b.(CompUint32))
switch {
case aAsserted > bAsserted:
return 1
case aAsserted < bAsserted:
return -1
default:
return 0
}
}
type CompUint64 uint64
func (a CompUint64) Compare(b Comparable) int {
aAsserted := uint64(a)
bAsserted := uint64(b.(CompUint64))
switch {
case aAsserted > bAsserted:
return 1
case aAsserted < bAsserted:
return -1
default:
return 0
}
}
type CompFloat32 float32
func (a CompFloat32) Compare(b Comparable) int {
aAsserted := float32(a)
bAsserted := float32(b.(CompFloat32))
switch {
case aAsserted > bAsserted:
return 1
case aAsserted < bAsserted:
return -1
default:
return 0
}
}
type CompFloat64 float64
func (a CompFloat64) Compare(b Comparable) int {
aAsserted := float64(a)
bAsserted := float64(b.(CompFloat64))
switch {
case aAsserted > bAsserted:
return 1
case aAsserted < bAsserted:
return -1
default:
return 0
}
}
type CompByte byte
func (a CompByte) Compare(b Comparable) int {
aAsserted := byte(a)
bAsserted := byte(b.(CompByte))
switch {
case aAsserted > bAsserted:
return 1
case aAsserted < bAsserted:
return -1
default:
return 0
}
}
type CompRune rune
func (a CompRune) Compare(b Comparable) int {
aAsserted := rune(a)
bAsserted := rune(b.(CompRune))
switch {
case aAsserted > bAsserted:
return 1
case aAsserted < bAsserted:
return -1
default:
return 0
}
}
type CompTime time.Time
func (a CompTime) Compare(b Comparable) int {
aAsserted := time.Time(a)
bAsserted := time.Time(b.(CompTime))
switch {
case aAsserted.After(bAsserted):
return 1
case aAsserted.Before(bAsserted):
return -1
default:
return 0
}
}
type CompBytes []byte
func (a CompBytes) Compare(b Comparable) int {
aAsserted := []byte(a)
bAsserted := []byte(b.(CompBytes))
return bytes.Compare(aAsserted, bAsserted)
} | util/compare/comparator.go | 0.792424 | 0.686498 | comparator.go | starcoder |
package dna
import (
"log"
)
// Count returns the number of each base present in the input sequence.
func Count(seq []Base) (ACount int, CCount int, GCount int, TCount int, NCount int, aCount int, cCount int, gCount int, tCount int, nCount int, gapCount int) {
ACount, CCount, GCount, TCount, NCount, aCount, cCount, gCount, tCount, nCount, gapCount = 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
for _, b := range seq {
switch b {
case A:
ACount++
case C:
CCount++
case G:
GCount++
case T:
TCount++
case N:
NCount++
case LowerA:
aCount++
case LowerC:
cCount++
case LowerG:
gCount++
case LowerT:
tCount++
case LowerN:
nCount++
case Gap:
gapCount++
}
}
return ACount, CCount, GCount, TCount, NCount, aCount, cCount, gCount, tCount, NCount, gapCount
}
// CountMask returns the number of bases that are masked/unmasked (lowercase/uppercase) in the input sequence.
func CountMask(seq []Base) (unmaskedCount int, maskedCount int, gapCount int) {
ACount, CCount, GCount, TCount, NCount, aCount, cCount, gCount, tCount, nCount, gapCount := Count(seq)
unmaskedCount = ACount + CCount + GCount + TCount + NCount
maskedCount = aCount + cCount + gCount + tCount + nCount
return unmaskedCount, maskedCount, gapCount
}
// CountGaps returns the number of gaps present in the input sequence.
func CountGaps(seq []Base) int {
var gapCount int
for i := range seq {
if seq[i] == Gap {
gapCount++
}
}
return gapCount
}
// baseDist is a helper function for Dist that returns 1 if input bases do not match.
func baseDist(a Base, b Base) int {
if a == b {
return 0
}
return 1
}
// Dist returns the number of bases that do not match between the input sequences.
// Input sequences must be the same length.
func Dist(a []Base, b []Base) int {
if len(a) != len(b) {
log.Panicf("input sequence lengths are different")
}
var sum int
for i := range a {
sum = sum + baseDist(a[i], b[i])
}
return sum
}
// IsLower returns true if the input base is lowercase.
func IsLower(b Base) bool {
switch b {
case LowerA:
return true
case LowerG:
return true
case LowerC:
return true
case LowerT:
return true
default:
return false
}
}
// DefineBase returns false if the input base is an N, Gap, Dot, or Nil.
func DefineBase(b Base) bool {
switch b {
case A:
return true
case C:
return true
case G:
return true
case T:
return true
case N:
return false
case LowerA:
return true
case LowerC:
return true
case LowerG:
return true
case LowerT:
return true
case LowerN:
return false
case Gap:
return false
case Dot:
return false
case Nil:
return false
default:
return false
}
}
// CountBase returns the number of the designated base present in the input sequence.
func CountBase(seq []Base, b Base) int {
return CountBaseInterval(seq, b, 0, len(seq))
}
// CountBaseInterval returns the number of the designated base present in the input range of the sequence.
func CountBaseInterval(seq []Base, b Base, start int, end int) int {
var answer int
if start < 0 || end > len(seq) {
return answer
}
for i := start; i < end; i++ {
if seq[i] == b {
answer++
}
}
return answer
} | dna/examine.go | 0.674479 | 0.555737 | examine.go | starcoder |
package main
import (
"math"
)
// CalculateGammaEpsilon Calculates the gamma and epsilon
// values for a byte array of \n seperated binary values
func CalculateGammaEpsilon(input []byte) (int, int) {
size := 0
for i, char := range input {
if char == 10 {
size = i
break
}
i++
}
posCount := make([]int, size)
lineCount := 0
pos := 0
for _, char := range input {
switch char {
case 10: // Newline
lineCount++
pos = 0
case 49: //1
posCount[pos]++
pos++
case 48: //0
pos++
}
}
half := lineCount / 2
gamma := 0
eps := 0
for i, count := range posCount {
if count > half {
// Most common is 1
gamma += int(math.Pow(2, float64(size-i-1)))
} else {
eps += int(math.Pow(2, float64(size-i-1)))
}
}
return gamma, eps
}
// Calculates the most common bit and least common bit from
// an array of binary values.
func GetMcbLcb(input []string, pos int) (byte, byte) {
count := 0
for _, v := range input {
if v[pos] == 49 {
count++
}
}
if count*2 == len(input) { // Tie breaker of equal
return 49, 48
}
if count > len(input)/2 {
return 49, 48
}
return 48, 49
}
// CalculateOxAndCo2 takes a string array of binary values
// and calculates the oxygen and Co2 values
func CalculateOxAndCo2(input []string) (int, int) {
size := len(input[0])
oxyList := input
co2List := input
for i := 0; i < size; i++ {
temp := make([]string, 0)
if len(oxyList) != 1 {
mcb, _ := GetMcbLcb(oxyList, i)
for _, v := range oxyList {
if v[i] == mcb {
temp = append(temp, v)
}
}
oxyList = temp
}
if len(co2List) != 1 {
_, lcb := GetMcbLcb(co2List, i)
temp = make([]string, 0)
for _, v := range co2List {
if v[i] == lcb {
temp = append(temp, v)
}
}
co2List = temp
}
}
oxy := 0
co2 := 0
for i := 0; i < size; i++ {
if oxyList[0][i] == 49 {
oxy += int(math.Pow(2, float64(size-i-1)))
}
if co2List[0][i] == 49 {
co2 += int(math.Pow(2, float64(size-i-1)))
}
}
return oxy, co2
} | diagnosticReport.go | 0.565059 | 0.504822 | diagnosticReport.go | starcoder |
package matrix
// Map applies f to every element of the matrix and returns the result
func Map(m Matrix, f Mapper) Matrix {
n := New(m.Rows, m.Columns, nil)
for i := 0; i < m.Rows; i++ {
for j := 0; j < m.Columns; j++ {
val := m.Data[i][j]
n.Data[i][j] = f(val, i, j)
}
}
return n
}
// Fold accumulates the values in a matrix according to a Folder function
func Fold(m Matrix, f Folder, accumulator float64) float64 {
for i := 0; i < m.Rows; i++ {
for j := 0; j < m.Columns; j++ {
accumulator = f(accumulator, m.Data[i][j], i, j)
}
}
return accumulator
}
// Transpose returns the transposed version of the matrix
func Transpose(m Matrix) Matrix {
return Map(New(m.Columns, m.Rows, nil),
func(val float64, x, y int) float64 {
return m.Data[y][x]
})
}
// Multiply does scalar multiplication
func Multiply(m Matrix, a float64) Matrix {
return Map(New(m.Rows, m.Columns, nil), func(val float64, x, y int) float64 {
return m.Data[x][y] * a
})
}
// Divide does scalar division
func Divide(m Matrix, a float64) Matrix {
return Map(New(m.Rows, m.Columns, nil), func(val float64, x, y int) float64 {
return m.Data[x][y] / a
})
}
// Sum gives the sum of the elements in the matrix
func Sum(m Matrix) float64 {
return m.Fold(func(accumulator, val float64, x, y int) float64 {
return accumulator + val
}, 0)
}
// AddMatrix adds 2 matrices together
func AddMatrix(m, n Matrix) Matrix {
if m.Rows != n.Rows || m.Columns != n.Columns {
panic("matrix: can't add different sized matricies")
}
return Map(New(m.Rows, m.Columns, nil), func(val float64, x, y int) float64 {
return m.Data[x][y] + n.Data[x][y]
})
}
// Add does scalar addition
func Add(m Matrix, n float64) Matrix {
return Map(New(m.Rows, m.Columns, nil), func(val float64, x, y int) float64 {
return m.Data[x][y] + n
})
}
// SubtractMatrix subtracts 2 matrices
func SubtractMatrix(m, n Matrix) Matrix {
if m.Rows != n.Rows || m.Columns != n.Columns {
panic("matrix: can't subtract different sized matricies")
}
return Map(New(m.Rows, m.Columns, nil), func(val float64, x, y int) float64 {
return m.Data[x][y] - n.Data[x][y]
})
}
// Subtract does scalar subtraction
func Subtract(m Matrix, n float64) Matrix {
return Map(New(m.Rows, m.Columns, nil), func(val float64, x, y int) float64 {
return m.Data[x][y] - n
})
}
// HadamardProduct does Hadamard Product (entrywise)
func HadamardProduct(m Matrix, n Matrix) Matrix {
if m.Columns != n.Columns || m.Rows != n.Rows {
panic("matrix: matricies must have the same shape")
}
return Map(New(m.Rows, m.Columns, nil), func(val float64, x, y int) float64 {
return m.Data[x][y] * n.Data[x][y]
})
}
// DotProduct does matrix product
func DotProduct(m, n Matrix) Matrix {
if m.Columns != n.Rows {
panic("matrix: rows must match with columns of matricies")
}
return Map(New(m.Rows, n.Columns, nil), func(_ float64, x, y int) float64 {
sum := 0.0
for i := 0; i < n.Rows; i++ {
sum += m.Data[x][i] * n.Data[i][y]
}
return sum
})
} | matrix/funcs.go | 0.918945 | 0.738528 | funcs.go | starcoder |
package v1_0
func init() {
Profile["/tosca/common/1.0/js/RESOLUTION.md"] = `
Topology Resolution
===================
This is where we create the flat topology: relationships from templates to capabilities (the
"sockets", if you will) in other node templates. We call this "resolving" the topology.
Resolution is handled via the **tosca.resolve** JavaScript embedded in the Clout. This allows you
to re-resolve an existing compiled Clout according to varying factors.
Loops
-----
Puccini allows for relationship loops: for example, two node templates can have ` + "`" + `DependsOn` + "`" + `
relationships with each other. Semantically, this is not a problem. However, practically it could
be a problem for some orchestrators.
A good orchestrator should know what to do. For example, mutually dependent resources could be
provisioned simultaneously. Whether or not orchestrators can deal with such loops is beyond the
scope of Puccini and TOSCA.
Capability Occurrences
----------------------
For capabilities we take into account the ` + "`" + `occurrences` + "`" + ` field, which limits the number of times a
capability may be be used for relationships.
There's no elaboration in the TOSCA specification on what ` + "`" + `occurrences` + "`" + ` means. Our interpretation is
that it does *not* relate to the capacity of our actual resources. While it may be possible for an
orchestrator to provision an extra node to allow for more capacity, that would also change the
topology by creating additional relationships, and generally it would be an overly simplistic
strategy for scaling. TOSCA's role, and thus Puccini's, should merely be to validate the design.
Thus requirements-and-capabilities should have nothing to do with resource provisioning.
That said, ` + "`" + `occurrences` + "`" + ` does introduce a subtle restriction on how requirements are satisfied. It
means that some capabilities might have a *minimum* number of relationships attached to them, or
else a problem is reported. Likewise, some capabilities will allow for a *maximum* number of
possible relationships. Allocating these restricted slots in such a way that all requirements can be
satisfied while keeping all minimums fulfilled would require a non-trivial algorithm.
Currently, Puccini's algorithm is not ideal. It does report problems, and it does try to prioritize
some capabilities with ` + "`" + `occurrences` + "`" + ` restrictions over others. However, it still iterates
requirements one at a time, meaning that it may very well miss on finding a problem-free topology.
It at least guarantees that the results will be consistent by ensuring a reproducible order of
iteration via alphanumeric sorting.
A better algorithm would require either 1) trying various sort orders until one succeeds, or 2)
finding a more sophisticated way to prioritize certain pairs of requirements-and-capabilities.
Both approaches are difficult.
`
} | tosca/profiles/common/v1_0/js-RESOLUTION.go | 0.862786 | 0.477189 | js-RESOLUTION.go | starcoder |
// This file contains the definitions of the math elementary
// (builtin) functions.
package lisp1_5
import (
"math/big"
)
// Arithmetic.
func (c *Context) mathFunc(expr *Expr, fn func(*big.Int, *big.Int) *big.Int) *Expr {
return atomExpr(number(fn(c.getNumber(Car(expr)), c.getNumber(Car(Cdr(expr))))))
}
func (c *Context) getNumber(expr *Expr) *big.Int {
if !expr.isNumber() {
errorf("expect number; have %s", expr)
}
return expr.atom.num
}
func add(a, b *big.Int) *big.Int { return new(big.Int).Add(a, b) }
func div(a, b *big.Int) *big.Int {
if b.Cmp(&zero) == 0 {
errorf("division by zero")
}
return new(big.Int).Div(a, b)
}
func mul(a, b *big.Int) *big.Int { return new(big.Int).Mul(a, b) }
func rem(a, b *big.Int) *big.Int {
if b.Cmp(&zero) == 0 {
errorf("rem by zero")
}
return new(big.Int).Rem(a, b)
}
func sub(a, b *big.Int) *big.Int { return new(big.Int).Sub(a, b) }
func (c *Context) addFunc(name *token, expr *Expr) *Expr { return c.mathFunc(expr, add) }
func (c *Context) divFunc(name *token, expr *Expr) *Expr { return c.mathFunc(expr, div) }
func (c *Context) mulFunc(name *token, expr *Expr) *Expr { return c.mathFunc(expr, mul) }
func (c *Context) remFunc(name *token, expr *Expr) *Expr { return c.mathFunc(expr, rem) }
func (c *Context) subFunc(name *token, expr *Expr) *Expr { return c.mathFunc(expr, sub) }
// Comparison.
func (c *Context) boolFunc(expr *Expr, fn func(*big.Int, *big.Int) bool) *Expr {
return truthExpr(fn(c.getNumber(Car(expr)), c.getNumber(Car(Cdr(expr)))))
}
func ge(a, b *big.Int) bool { return a.Cmp(b) >= 0 }
func gt(a, b *big.Int) bool { return a.Cmp(b) > 0 }
func le(a, b *big.Int) bool { return a.Cmp(b) <= 0 }
func lt(a, b *big.Int) bool { return a.Cmp(b) < 0 }
func ne(a, b *big.Int) bool { return a.Cmp(b) != 0 }
func (c *Context) geFunc(name *token, expr *Expr) *Expr { return c.boolFunc(expr, ge) }
func (c *Context) gtFunc(name *token, expr *Expr) *Expr { return c.boolFunc(expr, gt) }
func (c *Context) leFunc(name *token, expr *Expr) *Expr { return c.boolFunc(expr, le) }
func (c *Context) ltFunc(name *token, expr *Expr) *Expr { return c.boolFunc(expr, lt) }
func (c *Context) neFunc(name *token, expr *Expr) *Expr { return c.boolFunc(expr, ne) }
// Logic. These are implemented here because they are variadic.
func (c *Context) andFunc(name *token, expr *Expr) *Expr {
if expr == nil {
return truthExpr(true)
}
if !Car(expr).isTrue() {
return truthExpr(false)
}
return c.andFunc(name, Cdr(expr))
}
func (c *Context) orFunc(name *token, expr *Expr) *Expr {
if expr == nil {
return truthExpr(false)
}
if Car(expr).isTrue() {
return truthExpr(true)
}
return c.orFunc(name, Cdr(expr))
} | lisp1_5/math.go | 0.573081 | 0.422266 | math.go | starcoder |
package three
import "github.com/gopherjs/gopherjs/js"
type Image struct {
*js.Object
}
type Texture struct {
*js.Object
Id int `js:"id"`
UUID string `js:"uuid"`
Name string `js:"name"`
Image *Image `js:"image"`
// Array of user-specified mipmaps (optional).
Mipmaps *js.Object `js:"mipmaps"`
WrapS WrappingMode `js:"wrapS"`
WrapT WrappingMode `js:"wrapT"`
MagFilter MinMagFilter `js:"magFilter"`
MinFilter MinMagFilter `js:"minFilter"`
Anisotropy int `js:"anisotropy"`
Format TextureFormat `js:"format"`
InternalFormat string `js:"internalFormat"`
Type TextureType `js:"internalFormat"`
// How much a single repetition of the texture is offset from the beginning, in each direction U and V. Typical range is 0.0 to 1.0.
Offset Vector2 `js:"offset"`
// How many times the texture is repeated across the surface, in each direction U and V. If repeat is set greater than 1 in either direction, the
Repeat Vector2 `js:"repeat"`
// How much the texture is rotated around the center point, in radians. Positive values are counter-clockwise. Default is 0.
Rotation float64 `js:"rotation"`
// The point around which rotation occurs. A value of (0.5, 0.5) corresponds to the center of the texture. Default is (0, 0), the lower left.
Center Vector2 `js:"center"`
MatrixAutoUpdate bool `js:"matrixAutoUpdate"`
// Matrix Matrix3 `js:"matrix"`
GenerateMipmaps bool `js:"generateMipmaps"`
PremultiplyAlpha bool `js:"premultiplyAlpha"`
FlipY bool `js:"flipY"`
UnpackAlignment int `js:"unpackAlignment"`
Encoding TextureEncoding `js:"encoding"`
Version int `js:"version"`
NeedsUpdate bool `js:"needsUpdate"`
UserData *js.Object `js:"userData"`
}
type TextureParameters struct {
Image *Image
// How the image is applied to the object. An object type of THREE.UVMapping is the default, where the U,V coordinates are used to apply the map.
Mapping MappingMode
// This defines how the texture is wrapped horizontally and corresponds to U in UV mapping.
WrapS WrappingMode
// This defines how the texture is wrapped vertically and corresponds to V in UV mapping.
WrapT WrappingMode
// How the texture is sampled when a texel covers more than one pixel. The default is THREE.LinearFilter, which takes the four closest texels and bilinearly interpolates among them.
MagFilter MinMagFilter
// How the texture is sampled when a texel covers less than one pixel. The default is THREE.LinearMipmapLinearFilter, which uses mipmapping and a trilinear filter.
MinFilter MinMagFilter
Format TextureFormat
Type TextureType
// The number of samples taken along the axis through the pixel that has the highest density of texels. By default, this value is 1. A higher value gives a less blurry result than a basic mipmap, at the cost of more texture samples being used. Use renderer.getMaxAnisotropy() to find the maximum valid anisotropy value for the GPU; this value is usually a power of 2
Anisotropy int
Encoding TextureEncoding
}
func NewTexture(params TextureParameters) *Texture {
if params.Anisotropy == 0 {
params.Anisotropy = 1
}
// Set Default values if parameters are invalid
params.WrapS.clampDefault()
params.WrapT.clampDefault()
params.MagFilter.clampDefault(true)
params.MinFilter.clampDefault(false)
params.Mapping.clampDefault()
params.Type.clampDefault()
params.Format.clampDefault()
params.Encoding.clampDefault()
return &Texture{
Object: three.Get("Texture").New(
params.Image,
params.Mapping,
params.WrapS,
params.WrapT,
params.MagFilter,
params.MinFilter,
params.Format,
params.Type,
params.Anisotropy,
params.Encoding,
),
}
}
// Update the texture's uv-transform .matrix from the texture properties .offset, .repeat, .rotation, and .center.
func (t *Texture) UpdateMatrix() {
t.Call("updateMatrix")
}
func (t *Texture) Clone() *Texture {
return &Texture{
Object: t.Call("clone"),
}
}
func (t *Texture) ToJSON() interface{} {
return t.Object.Call("toJSON")
}
func (t *Texture) Dispose() {
t.Object.Call("dispose")
}
func (t *Texture) TransformUV(uv Vector2) Vector2 {
return Vector2{
Object: t.Object.Call("transformUV", uv),
}
} | materials_texture.go | 0.84891 | 0.435001 | materials_texture.go | starcoder |
package table
import "math/rand"
// Clone returns a new Table with the same contents as this one.
func (m Table[K, V]) Clone() Table[K, V] {
n := make(map[K]V)
for k, v := range m {
n[k] = v
}
return n
}
// Add adds a new element to the Table.
func (m Table[K, V]) Add(k K, v V) {
m[k] = v
}
// Remove removes the element from the Table.
func (m Table[K, V]) Remove(k K) {
delete(m, k)
}
// MapKey returns a new Table with the result of applying the function to each key.
func (m Table[K, V]) MapKey(f func(K, V) K) map[K]V {
r := make(map[K]V)
for k, v := range m {
r[f(k, v)] = v
}
return r
}
// MapValue returns a new Table with the result of applying the function to each value.
func (m Table[K, V]) MapValue(f func(K, V) V) map[K]V {
r := make(map[K]V)
for k, v := range m {
r[k] = f(k, v)
}
return r
}
// ReduceKey applies a function against an initial and each key in the Table.
func (m Table[K, V]) ReduceKey(f func(K, K) K, initial K) K {
r := initial
for k := range m {
r = f(r, k)
}
return r
}
// ReduceValue applies a function against an initial and each value in the Table.
func (m Table[K, V]) ReduceValue(f func(V, V) V, initial V) V {
r := initial
for _, v := range m {
r = f(r, v)
}
return r
}
// Any returns true if any element in the Table satisfies the predicate.
func (m Table[K, V]) Any(f func(K, V) bool) bool {
for k, v := range m {
if f(k, v) {
return true
}
}
return false
}
// All returns true if all elements in the Table satisfy the predicate.
func (m Table[K, V]) All(f func(K, V) bool) bool {
for k, v := range m {
if !f(k, v) {
return false
}
}
return true
}
// Count returns the number of elements satisfy the predicate in the Table.
func (m Table[K, V]) Count(f func(K, V) bool) int {
r := 0
for k, v := range m {
if f(k, v) {
r++
}
}
return r
}
// Filter returns a new Table with all elements satisfy the predicate.
func (m Table[K, V]) Filter(f func(K, V) bool) map[K]V {
r := make(map[K]V)
for k, v := range m {
if f(k, v) {
r[k] = v
}
}
return r
}
// Map applies the function to each element in the Table.
func (m Table[K, V]) Map(f func(K, V) (K, V)) map[K]V {
r := make(map[K]V)
for k, v := range m {
k, v = f(k, v)
r[k] = v
}
return r
}
// Random returns a random element from the Table.
func (m Table[K, V]) Random() (rk K, rv V) {
l := rand.Intn(len(m))
i := 0
for k, v := range m {
if i == l {
return k, v
}
i++
}
return rk, rv
} | collections/table/methods.go | 0.825941 | 0.478468 | methods.go | starcoder |
package fmt
import (
"os"
)
// Printf formats according to a format specifier and writes to standard output.
// It returns the number of bytes written and any write error encountered.
func Printf(format string, a ...interface{}) (n int, err error) {
return Fprintf(os.Stdout, format, a...)
}
// Print formats using the default formats for its operands and writes to standard output.
// Spaces are added between operands when neither is a string.
// It returns the number of bytes written and any write error encountered.
func Print(a ...interface{}) (n int, err error) {
return Fprint(os.Stdout, a...)
}
// Println formats using the default formats for its operands and writes to standard output.
// Spaces are always added between operands and a newline is appended.
// It returns the number of bytes written and any write error encountered.
func Println(a ...interface{}) (n int, err error) {
return Fprintln(os.Stdout, a...)
}
// Scan scans text read from standard input, storing successive
// space-separated values into successive arguments. Newlines count
// as space. It returns the number of items successfully scanned.
// If that is less than the number of arguments, err will report why.
func Scan(a ...interface{}) (n int, err error) {
return Fscan(os.Stdin, a...)
}
// Scanln is similar to Scan, but stops scanning at a newline and
// after the final item there must be a newline or EOF.
func Scanln(a ...interface{}) (n int, err error) {
return Fscanln(os.Stdin, a...)
}
// Scanf scans text read from standard input, storing successive
// space-separated values into successive arguments as determined by
// the format. It returns the number of items successfully scanned.
// If that is less than the number of arguments, err will report why.
// Newlines in the input must match newlines in the format.
// The one exception: the verb %c always scans the next rune in the
// input, even if it is a space (or tab etc.) or newline.
func Scanf(format string, a ...interface{}) (n int, err error) {
return Fscanf(os.Stdin, format, a...)
} | src/fmt/stdio.go | 0.761804 | 0.49347 | stdio.go | starcoder |
package values
import (
"reflect"
)
var (
int64Type = reflect.TypeOf(int64(0))
float64Type = reflect.TypeOf(float64(0))
)
// Equal returns a bool indicating whether a == b after conversion.
func Equal(a, b interface{}) bool { // nolint: gocyclo
a, b = ToLiquid(a), ToLiquid(b)
if a == nil || b == nil {
return a == b
}
ra, rb := reflect.ValueOf(a), reflect.ValueOf(b)
switch joinKind(ra.Kind(), rb.Kind()) {
case reflect.Array, reflect.Slice:
if ra.Len() != rb.Len() {
return false
}
for i := 0; i < ra.Len(); i++ {
if !Equal(ra.Index(i).Interface(), rb.Index(i).Interface()) {
return false
}
}
return true
case reflect.Bool:
return ra.Bool() == rb.Bool()
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return ra.Convert(int64Type).Int() == rb.Convert(int64Type).Int()
case reflect.Float32, reflect.Float64:
return ra.Convert(float64Type).Float() == rb.Convert(float64Type).Float()
case reflect.String:
return ra.String() == rb.String()
case reflect.Ptr:
if rb.Kind() == reflect.Ptr && (ra.IsNil() || rb.IsNil()) {
return ra.IsNil() == rb.IsNil()
}
return a == b
default:
return a == b
}
}
// Less returns a bool indicating whether a < b.
func Less(a, b interface{}) bool {
a, b = ToLiquid(a), ToLiquid(b)
if a == nil || b == nil {
return false
}
ra, rb := reflect.ValueOf(a), reflect.ValueOf(b)
switch joinKind(ra.Kind(), rb.Kind()) {
case reflect.Bool:
return !ra.Bool() && rb.Bool()
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return ra.Convert(int64Type).Int() < rb.Convert(int64Type).Int()
case reflect.Float32, reflect.Float64:
return ra.Convert(float64Type).Float() < rb.Convert(float64Type).Float()
case reflect.String:
return ra.String() < rb.String()
default:
return false
}
}
func joinKind(a, b reflect.Kind) reflect.Kind { // nolint: gocyclo
if a == b {
return a
}
switch a {
case reflect.Array, reflect.Slice:
if b == reflect.Array || b == reflect.Slice {
return reflect.Slice
}
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
if isIntKind(b) {
return reflect.Int64
}
if isFloatKind(b) {
return reflect.Float64
}
case reflect.Float32, reflect.Float64:
if isIntKind(b) || isFloatKind(b) {
return reflect.Float64
}
}
return reflect.Invalid
}
func isIntKind(k reflect.Kind) bool {
switch k {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return true
default:
return false
}
}
func isFloatKind(k reflect.Kind) bool {
switch k {
case reflect.Float32, reflect.Float64:
return true
default:
return false
}
} | values/compare.go | 0.729327 | 0.520374 | compare.go | starcoder |
package models
import (
i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e "time"
i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization"
)
// AccessReviewInstance
type AccessReviewInstance struct {
Entity
// Returns the collection of reviewers who were contacted to complete this review. While the reviewers and fallbackReviewers properties of the accessReviewScheduleDefinition might specify group owners or managers as reviewers, contactedReviewers returns their individual identities. Supports $select. Read-only.
contactedReviewers []AccessReviewReviewerable
// Each user reviewed in an accessReviewInstance has a decision item representing if they were approved, denied, or not yet reviewed.
decisions []AccessReviewInstanceDecisionItemable
// There is exactly one accessReviewScheduleDefinition associated with each instance. It is the parent schedule for the instance, where instances are created for each recurrence of a review definition and each group selected to review by the definition.
definition AccessReviewScheduleDefinitionable
// DateTime when review instance is scheduled to end.The DatetimeOffset type represents date and time information using ISO 8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 is 2014-01-01T00:00:00Z. Supports $select. Read-only.
endDateTime *i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e.Time
// Collection of errors in an access review instance lifecycle. Read-only.
errors []AccessReviewErrorable
// This collection of reviewer scopes is used to define the list of fallback reviewers. These fallback reviewers will be notified to take action if no users are found from the list of reviewers specified. This could occur when either the group owner is specified as the reviewer but the group owner does not exist, or manager is specified as reviewer but a user's manager does not exist. Supports $select.
fallbackReviewers []AccessReviewReviewerScopeable
// This collection of access review scopes is used to define who the reviewers are. Supports $select. For examples of options for assigning reviewers, see Assign reviewers to your access review definition using the Microsoft Graph API.
reviewers []AccessReviewReviewerScopeable
// Created based on scope and instanceEnumerationScope at the accessReviewScheduleDefinition level. Defines the scope of users reviewed in a group. Supports $select and $filter (contains only). Read-only.
scope AccessReviewScopeable
// If the instance has multiple stages, this returns the collection of stages. A new stage will only be created when the previous stage ends. The existence, number, and settings of stages on a review instance are created based on the accessReviewStageSettings on the parent accessReviewScheduleDefinition.
stages []AccessReviewStageable
// DateTime when review instance is scheduled to start. May be in the future. The DateTimeOffset type represents date and time information using ISO 8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 is 2014-01-01T00:00:00Z. Supports $select. Read-only.
startDateTime *i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e.Time
// Specifies the status of an accessReview. Possible values: Initializing, NotStarted, Starting, InProgress, Completing, Completed, AutoReviewing, and AutoReviewed. Supports $select, $orderby, and $filter (eq only). Read-only.
status *string
}
// NewAccessReviewInstance instantiates a new accessReviewInstance and sets the default values.
func NewAccessReviewInstance()(*AccessReviewInstance) {
m := &AccessReviewInstance{
Entity: *NewEntity(),
}
return m
}
// CreateAccessReviewInstanceFromDiscriminatorValue creates a new instance of the appropriate class based on discriminator value
func CreateAccessReviewInstanceFromDiscriminatorValue(parseNode i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, error) {
return NewAccessReviewInstance(), nil
}
// GetContactedReviewers gets the contactedReviewers property value. Returns the collection of reviewers who were contacted to complete this review. While the reviewers and fallbackReviewers properties of the accessReviewScheduleDefinition might specify group owners or managers as reviewers, contactedReviewers returns their individual identities. Supports $select. Read-only.
func (m *AccessReviewInstance) GetContactedReviewers()([]AccessReviewReviewerable) {
if m == nil {
return nil
} else {
return m.contactedReviewers
}
}
// GetDecisions gets the decisions property value. Each user reviewed in an accessReviewInstance has a decision item representing if they were approved, denied, or not yet reviewed.
func (m *AccessReviewInstance) GetDecisions()([]AccessReviewInstanceDecisionItemable) {
if m == nil {
return nil
} else {
return m.decisions
}
}
// GetDefinition gets the definition property value. There is exactly one accessReviewScheduleDefinition associated with each instance. It is the parent schedule for the instance, where instances are created for each recurrence of a review definition and each group selected to review by the definition.
func (m *AccessReviewInstance) GetDefinition()(AccessReviewScheduleDefinitionable) {
if m == nil {
return nil
} else {
return m.definition
}
}
// GetEndDateTime gets the endDateTime property value. DateTime when review instance is scheduled to end.The DatetimeOffset type represents date and time information using ISO 8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 is 2014-01-01T00:00:00Z. Supports $select. Read-only.
func (m *AccessReviewInstance) GetEndDateTime()(*i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e.Time) {
if m == nil {
return nil
} else {
return m.endDateTime
}
}
// GetErrors gets the errors property value. Collection of errors in an access review instance lifecycle. Read-only.
func (m *AccessReviewInstance) GetErrors()([]AccessReviewErrorable) {
if m == nil {
return nil
} else {
return m.errors
}
}
// GetFallbackReviewers gets the fallbackReviewers property value. This collection of reviewer scopes is used to define the list of fallback reviewers. These fallback reviewers will be notified to take action if no users are found from the list of reviewers specified. This could occur when either the group owner is specified as the reviewer but the group owner does not exist, or manager is specified as reviewer but a user's manager does not exist. Supports $select.
func (m *AccessReviewInstance) GetFallbackReviewers()([]AccessReviewReviewerScopeable) {
if m == nil {
return nil
} else {
return m.fallbackReviewers
}
}
// GetFieldDeserializers the deserialization information for the current model
func (m *AccessReviewInstance) GetFieldDeserializers()(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(error)) {
res := m.Entity.GetFieldDeserializers()
res["contactedReviewers"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetCollectionOfObjectValues(CreateAccessReviewReviewerFromDiscriminatorValue)
if err != nil {
return err
}
if val != nil {
res := make([]AccessReviewReviewerable, len(val))
for i, v := range val {
res[i] = v.(AccessReviewReviewerable)
}
m.SetContactedReviewers(res)
}
return nil
}
res["decisions"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetCollectionOfObjectValues(CreateAccessReviewInstanceDecisionItemFromDiscriminatorValue)
if err != nil {
return err
}
if val != nil {
res := make([]AccessReviewInstanceDecisionItemable, len(val))
for i, v := range val {
res[i] = v.(AccessReviewInstanceDecisionItemable)
}
m.SetDecisions(res)
}
return nil
}
res["definition"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetObjectValue(CreateAccessReviewScheduleDefinitionFromDiscriminatorValue)
if err != nil {
return err
}
if val != nil {
m.SetDefinition(val.(AccessReviewScheduleDefinitionable))
}
return nil
}
res["endDateTime"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetTimeValue()
if err != nil {
return err
}
if val != nil {
m.SetEndDateTime(val)
}
return nil
}
res["errors"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetCollectionOfObjectValues(CreateAccessReviewErrorFromDiscriminatorValue)
if err != nil {
return err
}
if val != nil {
res := make([]AccessReviewErrorable, len(val))
for i, v := range val {
res[i] = v.(AccessReviewErrorable)
}
m.SetErrors(res)
}
return nil
}
res["fallbackReviewers"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetCollectionOfObjectValues(CreateAccessReviewReviewerScopeFromDiscriminatorValue)
if err != nil {
return err
}
if val != nil {
res := make([]AccessReviewReviewerScopeable, len(val))
for i, v := range val {
res[i] = v.(AccessReviewReviewerScopeable)
}
m.SetFallbackReviewers(res)
}
return nil
}
res["reviewers"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetCollectionOfObjectValues(CreateAccessReviewReviewerScopeFromDiscriminatorValue)
if err != nil {
return err
}
if val != nil {
res := make([]AccessReviewReviewerScopeable, len(val))
for i, v := range val {
res[i] = v.(AccessReviewReviewerScopeable)
}
m.SetReviewers(res)
}
return nil
}
res["scope"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetObjectValue(CreateAccessReviewScopeFromDiscriminatorValue)
if err != nil {
return err
}
if val != nil {
m.SetScope(val.(AccessReviewScopeable))
}
return nil
}
res["stages"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetCollectionOfObjectValues(CreateAccessReviewStageFromDiscriminatorValue)
if err != nil {
return err
}
if val != nil {
res := make([]AccessReviewStageable, len(val))
for i, v := range val {
res[i] = v.(AccessReviewStageable)
}
m.SetStages(res)
}
return nil
}
res["startDateTime"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetTimeValue()
if err != nil {
return err
}
if val != nil {
m.SetStartDateTime(val)
}
return nil
}
res["status"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetStringValue()
if err != nil {
return err
}
if val != nil {
m.SetStatus(val)
}
return nil
}
return res
}
// GetReviewers gets the reviewers property value. This collection of access review scopes is used to define who the reviewers are. Supports $select. For examples of options for assigning reviewers, see Assign reviewers to your access review definition using the Microsoft Graph API.
func (m *AccessReviewInstance) GetReviewers()([]AccessReviewReviewerScopeable) {
if m == nil {
return nil
} else {
return m.reviewers
}
}
// GetScope gets the scope property value. Created based on scope and instanceEnumerationScope at the accessReviewScheduleDefinition level. Defines the scope of users reviewed in a group. Supports $select and $filter (contains only). Read-only.
func (m *AccessReviewInstance) GetScope()(AccessReviewScopeable) {
if m == nil {
return nil
} else {
return m.scope
}
}
// GetStages gets the stages property value. If the instance has multiple stages, this returns the collection of stages. A new stage will only be created when the previous stage ends. The existence, number, and settings of stages on a review instance are created based on the accessReviewStageSettings on the parent accessReviewScheduleDefinition.
func (m *AccessReviewInstance) GetStages()([]AccessReviewStageable) {
if m == nil {
return nil
} else {
return m.stages
}
}
// GetStartDateTime gets the startDateTime property value. DateTime when review instance is scheduled to start. May be in the future. The DateTimeOffset type represents date and time information using ISO 8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 is 2014-01-01T00:00:00Z. Supports $select. Read-only.
func (m *AccessReviewInstance) GetStartDateTime()(*i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e.Time) {
if m == nil {
return nil
} else {
return m.startDateTime
}
}
// GetStatus gets the status property value. Specifies the status of an accessReview. Possible values: Initializing, NotStarted, Starting, InProgress, Completing, Completed, AutoReviewing, and AutoReviewed. Supports $select, $orderby, and $filter (eq only). Read-only.
func (m *AccessReviewInstance) GetStatus()(*string) {
if m == nil {
return nil
} else {
return m.status
}
}
// Serialize serializes information the current object
func (m *AccessReviewInstance) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {
err := m.Entity.Serialize(writer)
if err != nil {
return err
}
if m.GetContactedReviewers() != nil {
cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetContactedReviewers()))
for i, v := range m.GetContactedReviewers() {
cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)
}
err = writer.WriteCollectionOfObjectValues("contactedReviewers", cast)
if err != nil {
return err
}
}
if m.GetDecisions() != nil {
cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetDecisions()))
for i, v := range m.GetDecisions() {
cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)
}
err = writer.WriteCollectionOfObjectValues("decisions", cast)
if err != nil {
return err
}
}
{
err = writer.WriteObjectValue("definition", m.GetDefinition())
if err != nil {
return err
}
}
{
err = writer.WriteTimeValue("endDateTime", m.GetEndDateTime())
if err != nil {
return err
}
}
if m.GetErrors() != nil {
cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetErrors()))
for i, v := range m.GetErrors() {
cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)
}
err = writer.WriteCollectionOfObjectValues("errors", cast)
if err != nil {
return err
}
}
if m.GetFallbackReviewers() != nil {
cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetFallbackReviewers()))
for i, v := range m.GetFallbackReviewers() {
cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)
}
err = writer.WriteCollectionOfObjectValues("fallbackReviewers", cast)
if err != nil {
return err
}
}
if m.GetReviewers() != nil {
cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetReviewers()))
for i, v := range m.GetReviewers() {
cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)
}
err = writer.WriteCollectionOfObjectValues("reviewers", cast)
if err != nil {
return err
}
}
{
err = writer.WriteObjectValue("scope", m.GetScope())
if err != nil {
return err
}
}
if m.GetStages() != nil {
cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetStages()))
for i, v := range m.GetStages() {
cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)
}
err = writer.WriteCollectionOfObjectValues("stages", cast)
if err != nil {
return err
}
}
{
err = writer.WriteTimeValue("startDateTime", m.GetStartDateTime())
if err != nil {
return err
}
}
{
err = writer.WriteStringValue("status", m.GetStatus())
if err != nil {
return err
}
}
return nil
}
// SetContactedReviewers sets the contactedReviewers property value. Returns the collection of reviewers who were contacted to complete this review. While the reviewers and fallbackReviewers properties of the accessReviewScheduleDefinition might specify group owners or managers as reviewers, contactedReviewers returns their individual identities. Supports $select. Read-only.
func (m *AccessReviewInstance) SetContactedReviewers(value []AccessReviewReviewerable)() {
if m != nil {
m.contactedReviewers = value
}
}
// SetDecisions sets the decisions property value. Each user reviewed in an accessReviewInstance has a decision item representing if they were approved, denied, or not yet reviewed.
func (m *AccessReviewInstance) SetDecisions(value []AccessReviewInstanceDecisionItemable)() {
if m != nil {
m.decisions = value
}
}
// SetDefinition sets the definition property value. There is exactly one accessReviewScheduleDefinition associated with each instance. It is the parent schedule for the instance, where instances are created for each recurrence of a review definition and each group selected to review by the definition.
func (m *AccessReviewInstance) SetDefinition(value AccessReviewScheduleDefinitionable)() {
if m != nil {
m.definition = value
}
}
// SetEndDateTime sets the endDateTime property value. DateTime when review instance is scheduled to end.The DatetimeOffset type represents date and time information using ISO 8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 is 2014-01-01T00:00:00Z. Supports $select. Read-only.
func (m *AccessReviewInstance) SetEndDateTime(value *i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e.Time)() {
if m != nil {
m.endDateTime = value
}
}
// SetErrors sets the errors property value. Collection of errors in an access review instance lifecycle. Read-only.
func (m *AccessReviewInstance) SetErrors(value []AccessReviewErrorable)() {
if m != nil {
m.errors = value
}
}
// SetFallbackReviewers sets the fallbackReviewers property value. This collection of reviewer scopes is used to define the list of fallback reviewers. These fallback reviewers will be notified to take action if no users are found from the list of reviewers specified. This could occur when either the group owner is specified as the reviewer but the group owner does not exist, or manager is specified as reviewer but a user's manager does not exist. Supports $select.
func (m *AccessReviewInstance) SetFallbackReviewers(value []AccessReviewReviewerScopeable)() {
if m != nil {
m.fallbackReviewers = value
}
}
// SetReviewers sets the reviewers property value. This collection of access review scopes is used to define who the reviewers are. Supports $select. For examples of options for assigning reviewers, see Assign reviewers to your access review definition using the Microsoft Graph API.
func (m *AccessReviewInstance) SetReviewers(value []AccessReviewReviewerScopeable)() {
if m != nil {
m.reviewers = value
}
}
// SetScope sets the scope property value. Created based on scope and instanceEnumerationScope at the accessReviewScheduleDefinition level. Defines the scope of users reviewed in a group. Supports $select and $filter (contains only). Read-only.
func (m *AccessReviewInstance) SetScope(value AccessReviewScopeable)() {
if m != nil {
m.scope = value
}
}
// SetStages sets the stages property value. If the instance has multiple stages, this returns the collection of stages. A new stage will only be created when the previous stage ends. The existence, number, and settings of stages on a review instance are created based on the accessReviewStageSettings on the parent accessReviewScheduleDefinition.
func (m *AccessReviewInstance) SetStages(value []AccessReviewStageable)() {
if m != nil {
m.stages = value
}
}
// SetStartDateTime sets the startDateTime property value. DateTime when review instance is scheduled to start. May be in the future. The DateTimeOffset type represents date and time information using ISO 8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 is 2014-01-01T00:00:00Z. Supports $select. Read-only.
func (m *AccessReviewInstance) SetStartDateTime(value *i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e.Time)() {
if m != nil {
m.startDateTime = value
}
}
// SetStatus sets the status property value. Specifies the status of an accessReview. Possible values: Initializing, NotStarted, Starting, InProgress, Completing, Completed, AutoReviewing, and AutoReviewed. Supports $select, $orderby, and $filter (eq only). Read-only.
func (m *AccessReviewInstance) SetStatus(value *string)() {
if m != nil {
m.status = value
}
} | models/access_review_instance.go | 0.710427 | 0.420272 | access_review_instance.go | starcoder |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.