code stringlengths 114 1.05M | path stringlengths 3 312 | quality_prob float64 0.5 0.99 | learning_prob float64 0.2 1 | filename stringlengths 3 168 | kind stringclasses 1 value |
|---|---|---|---|---|---|
package matrix
import (
"errors"
"reflect"
)
var (
errPositiveNumberRequired = errors.New("positive number required")
errIndexOutOfRange = errors.New("index out of range")
errDimensionMismatch = errors.New("matrices dimensions do not match")
errIsNotSquare = errors.New("must be a n x n matrix")
errRowsMustBeSameSize = errors.New("rows must contain the same number of elements")
)
// Matrix defines a two-dimensional matrix comprised of
// rows and columns.
type Matrix struct {
data [][]float64
}
// New creates a new matrix.
func New(data [][]float64) (*Matrix, error) {
var numRow = len(data)
var numCol int
if numRow == 0 {
return nil, errPositiveNumberRequired
}
if len(data[0]) == 0 {
return nil, errPositiveNumberRequired
}
numCol = len(data[0])
for _, row := range data {
if len(row) != numCol {
return nil, errRowsMustBeSameSize
}
}
return &Matrix{data: data}, nil
}
// NewIdentity creates a new n x n identity matrix.
func NewIdentity(n int) (*Matrix, error) {
if n <= 0 {
return nil, errPositiveNumberRequired
}
var data = make([][]float64, n)
for i := 0; i < len(data); i++ {
data[i] = make([]float64, n)
for j := 0; j < len(data[i]); j++ {
if i == j {
data[i][j] = 1
} else {
data[i][j] = 0
}
}
}
matrix, err := New(data)
if err != nil {
return nil, err
}
return matrix, nil
}
// GetRowCount determines the total number of rows in the matrix.
func (m *Matrix) GetRowCount() int {
return len(m.data)
}
// GetRow gets the indexed row.
func (m *Matrix) GetRow(index int) ([]float64, error) {
if index < m.GetRowCount() {
return m.data[index], nil
}
return nil, errIndexOutOfRange
}
// GetColumnCount determines the number of columns in the matrix.
func (m *Matrix) GetColumnCount() int {
return len(m.data[0])
}
// GetColumn gets the indexed column.
func (m *Matrix) GetColumn(index int) ([]float64, error) {
var column []float64
rowCount := m.GetRowCount()
if index < 0 || index > rowCount {
return column, errIndexOutOfRange
}
for i := 0; i < rowCount; i++ {
column = append(column, m.data[i][index])
}
return column, nil
}
// GetElement retrieves an element from the matrix.
func (m *Matrix) GetElement(i, j int) (float64, error) {
if (i < 0 || i >= m.GetRowCount()) || (j < 0 || j >= m.GetColumnCount()) {
return 0, errIndexOutOfRange
}
return m.data[i][j], nil
}
// SetElement sets an element in the matrix.
func (m *Matrix) SetElement(i, j int, val float64) error {
if (i < 0 || i >= m.GetRowCount()) || (j < 0 || j >= m.GetColumnCount()) {
return errIndexOutOfRange
}
m.data[i][j] = val
return nil
}
// Scale executes scalar multiplication on a matrix.
func (m *Matrix) Scale(val float64) {
for i := 0; i < m.GetRowCount(); i++ {
for j := 0; j < m.GetColumnCount(); j++ {
m.data[i][j] = val * m.data[i][j]
}
}
}
// Transpose executes a transposition on the matrix.
func (m *Matrix) Transpose() {
var numRow = m.GetRowCount()
var numCol = m.GetColumnCount()
var data = make([][]float64, numCol)
for i := 0; i < numCol; i++ {
data[i] = make([]float64, numRow)
for j := 0; j < numRow; j++ {
data[i][j] = m.data[j][i]
}
}
m.data = data
}
// Determinant calculates the determinant of the matrix.
// Must be an n x n matrix.
func (m *Matrix) Determinant() (float64, error) {
if !m.IsSquare() {
return 0, errIsNotSquare
}
var numCol = m.GetColumnCount()
var numRow = m.GetRowCount()
var determinant float64
var diagLeft float64
var diagRight float64
for j := 0; j < numCol; j++ {
diagLeft = m.data[0][j]
diagRight = m.data[0][j]
for i := 0; i < numRow; i++ {
diagRight *= m.data[i][(((j+i)%numCol)+numCol)%numCol]
diagLeft *= m.data[i][(((j-i)%numCol)+numCol)%numCol]
}
determinant += diagRight - diagLeft
}
return determinant, nil
}
// IsSquare determines if the matrix is an n x n matrix.
func (m *Matrix) IsSquare() bool {
return m.GetRowCount() == m.GetColumnCount()
}
// IsEqual determines if a matrix is equal to another matrix.
func (m *Matrix) IsEqual(b *Matrix) bool {
return reflect.DeepEqual(m.data, b.data)
}
func (m *Matrix) IsSameSize(b *Matrix) bool {
return m.GetRowCount() == b.GetRowCount() &&
m.GetColumnCount() == b.GetColumnCount()
}
// Add adds two matrices together.
func (m *Matrix) Add(b *Matrix) error {
if !m.IsSameSize(b) {
return errDimensionMismatch
}
for i := 0; i < m.GetRowCount(); i++ {
for j := 0; j < m.GetColumnCount(); j++ {
current, err := m.GetElement(i, j)
if err != nil {
return err
}
additive, err := b.GetElement(i, j)
if err != nil {
return err
}
m.SetElement(i, j, current+additive)
}
}
return nil
} | matrix.go | 0.667473 | 0.497986 | matrix.go | starcoder |
package vbo
import (
"time"
"github.com/golang/geo/s1"
"github.com/golang/geo/s2"
)
const (
earthRadius float64 = 6367000.0
)
var gateWidth = s1.ChordAngleFromAngle(s1.Angle(12.5 / earthRadius))
func processLaps(f File) (laps []Lap) {
startLine := s2.PointFromLatLng(s2.LatLngFromDegrees(f.Start.Lat, f.Start.Lng))
rows := f.Data.Rows
latIndex := f.Columns[Latitude]
longIndex := f.Columns[Longitude]
latLng := func(r DataRow) s2.LatLng {
s1Lat := r.GetValue(latIndex).(float64)
s1Lng := r.GetValue(longIndex).(float64)
return s2.LatLngFromDegrees(s1Lat/60, s1Lng*-1/60)
}
startFraction := func(i int) time.Duration {
if i <= 1 || i == len(rows)-1 {
return 0
}
a := s2.PointFromLatLng(latLng(rows[i-1]))
b := s2.PointFromLatLng(latLng(rows[i]))
x := s2.Project(startLine, a, b)
sf := s2.DistanceFraction(x, a, b)
sf = 1.0 - sf
duration := float64(f.duration(i-1, i)) * sf
return time.Duration(duration)
}
duration := func(s int, e int) time.Duration {
sd := startFraction(s)
ed := startFraction(e)
return f.duration(s, e) + sd - ed
}
start := 0
end := 0
var s s2.Point
var e s2.Point
minDistance := gateWidth
partial := true
firstLap := true
inRange := false
startedInRange := false
nearStartLine := false
for i, v := range rows {
if i == 0 {
e = s2.PointFromLatLng(latLng(v))
comp := s2.CompareDistance(
startLine,
e,
gateWidth)
if comp < 1 {
inRange = true
nearStartLine = true
startedInRange = true
minDistance = s1.ChordAngleFromAngle(startLine.Distance(e))
}
continue
}
s = e
e = s2.PointFromLatLng(latLng(v))
inRange = s2.IsDistanceLess(
startLine,
s,
e,
gateWidth)
if inRange {
d, updated := s2.UpdateMinDistance(
startLine,
s,
e,
minDistance)
if updated {
end = i
minDistance = d
startedInRange = false
}
nearStartLine = true
} else if nearStartLine { // previously near the start line
if firstLap {
partial = !startedInRange
}
laps = append(laps, newLap(f, start, end, partial, duration))
start = end
partial = false
firstLap = false
minDistance = gateWidth
nearStartLine = false
}
}
if nearStartLine && start != end {
if firstLap {
partial = !startedInRange
}
laps = append(laps, newLap(f, start, end, partial, duration))
start = end
}
if start != (len(rows) - 1) {
laps = append(laps, newLap(f, start, len(rows)-1, true, duration))
}
return
}
func newLap(f File, s int, e int, partial bool, duration func(a, b int) time.Duration) Lap {
return Lap{startIndex: s, endIndex: e, Partial: partial, LapTime: jsonDuration(duration(s, e).Round(10 * time.Millisecond))}
}
func Distance(f *File) float64 {
//latLons := make([]s2.LatLng, len(file.data.rows))
latIndex := f.Columns[Latitude]
longIndex := f.Columns[Longitude]
latLng := func(r DataRow) s2.LatLng {
s1Lat := r.GetValue(latIndex).(float64)
s1Lng := r.GetValue(longIndex).(float64)
return s2.LatLngFromDegrees(s1Lat/60, s1Lng*-1/60)
}
var distance float64
for i, v := range f.Data.Rows {
if i == 0 {
continue
}
distance = distance + latLng(v).Distance(latLng(f.Data.Rows[i-1])).Radians()
//latLons = append(latLons, v.latLon)
}
//polyline := s2.PolylineFromLatLngs(latLons)
return distance * (earthRadius / 1000)
} | pkg/vbo/s2.go | 0.594434 | 0.429011 | s2.go | starcoder |
Package oparse parses simple expressions in orismologer protos.
Basic arithmetic, variables, function calls, string literals, nested expressions, and string
concatenation are supported.
Based on the version originally published at:
https://github.com/alecthomas/participle/blob/master/_examples/expr/main.go
*/
package oparse
import (
"errors"
"fmt"
"log"
"math"
"strings"
"github.com/alecthomas/participle"
"github.com/golang/glog"
)
// Operator represents an arithmetic (or string interpolation) operator, eg: +.
type Operator int
const (
// OpMul represents a multiplication symbol (*).
OpMul Operator = iota
// OpDiv represents a division symbol (/).
OpDiv
// OpAdd represents an addition symbol (+).
OpAdd
// OpSub represents a subtraction symbol (-).
OpSub
)
var operatorMap = map[string]Operator{"+": OpAdd, "-": OpSub, "*": OpMul, "/": OpDiv}
// Capture implements Participle's Capture interface.
func (o *Operator) Capture(s []string) error {
*o = operatorMap[s[0]]
return nil
}
// Arg captures a function argument as an identifier optionally followed by a comma.
type Arg struct {
Value Expression `@@` // nolint: govet
Separator *string `[ "," ]`
}
// Function captures a function call as an identifier followed by a matched pair of brackets which
// contain 0 or more arguments.
type Function struct {
Name string `@Ident`
Open string `"("`
Args []*Arg `{ @@ }`
Close string `")"`
}
// Value captures a value, which is either a literal of some kind (eg: a string or a number) or
// something that evaluates to one (eg: a function call, or a nested expression).
type Value struct {
// NB: All numeric values will be represented as floats, to simplify parsing.
Number *float64 `@(Float|Int)`
StrLiteral *string `| @(String|Char)`
Function *Function `| @@`
Variable *string `| @Ident`
Subexpression *Expression `| "(" @@ ")"`
}
// Factor captures a base and an exponent.
type Factor struct {
Base *Value `@@`
Exponent *Value `[ "^" @@ ]`
}
// OpFactor captures a multiplication or division operator followed by a factor.
type OpFactor struct {
Operator Operator `@("*" | "/")`
Factor *Factor `@@`
}
// Term captures a Factor followed by an OpFactor.
type Term struct {
Left *Factor `@@`
Right []*OpFactor `{ @@ }`
}
// OpTerm captures a plus or minus operator followed by a term.
type OpTerm struct {
Operator Operator `@("+" | "-")`
Term *Term `@@`
}
// Expression is the top level node in the grammar AST. It represents the complete expression to be
// parsed and evaluated.
type Expression struct {
Left *Term `@@`
Right []*OpTerm `{ @@ }`
}
// Functions for displaying parsed expressions. Useful for debugging.
func (o Operator) String() string {
switch o {
case OpMul:
return "*"
case OpDiv:
return "/"
case OpSub:
return "-"
case OpAdd:
return "+"
}
glog.Error("Got unsupported operator while parsing expression")
return "?"
}
func (f *Function) String() string {
var args []string
for _, arg := range f.Args {
args = append(args, arg.Value.String())
}
return fmt.Sprintf("%v(%v)", f.Name, strings.Join(args, ", "))
}
func (v *Value) String() string {
switch {
case v.Number != nil:
return fmt.Sprintf("%g", *v.Number)
case v.StrLiteral != nil:
return fmt.Sprintf("%q", *v.StrLiteral)
case v.Variable != nil:
return *v.Variable
case v.Function != nil:
return v.Function.String()
case v.Subexpression != nil:
return "(" + v.Subexpression.String() + ")"
default:
return ""
}
}
func (f *Factor) String() string {
out := f.Base.String()
if f.Exponent != nil {
out += " ^ " + f.Exponent.String()
}
return out
}
func (o *OpFactor) String() string {
return fmt.Sprintf("%s %s", o.Operator, o.Factor)
}
func (t *Term) String() string {
out := []string{t.Left.String()}
for _, r := range t.Right {
out = append(out, r.String())
}
return strings.Join(out, " ")
}
func (o *OpTerm) String() string {
return fmt.Sprintf("%s %s", o.Operator, o.Term)
}
func (e *Expression) String() string {
out := []string{e.Left.String()}
for _, r := range e.Right {
out = append(out, r.String())
}
return strings.Join(out, " ")
}
// Functions for actually evaluating parsed expressions.
func (o Operator) eval(l, r interface{}) (interface{}, error) {
_, lIsInt := l.(int)
_, rIsInt := r.(int)
// Because of earlier handling we can assume that all numeric values are represented as floats.
// ie: We should never get an int here.
if lIsInt || rIsInt {
log.Fatal("Evaluated parser output contained an int. That should not have happened.")
}
lFloat, lIsFloat := l.(float64)
rFloat, rIsFloat := r.(float64)
_, lIsString := l.(string)
_, rIsString := r.(string)
if lIsFloat && rIsFloat {
// Accept loss in precision in exchange for simpler code by always using floats for arithmetic.
switch o {
case OpMul:
return lFloat * rFloat, nil
case OpDiv:
if rFloat == 0 {
return nil, errors.New("division by 0")
}
return lFloat / rFloat, nil
case OpAdd:
return lFloat + rFloat, nil
case OpSub:
return lFloat - rFloat, nil
}
return nil, errors.New(fmt.Sprintf("unsupported float operator: %v", o))
}
if lIsString || rIsString {
if o == OpAdd {
return fmt.Sprint(l) + fmt.Sprint(r), nil
}
return nil, fmt.Errorf("unsupported string operator (use '+' for concatenation): %v", o)
}
return nil, errors.New("unsupported type (only floats and strings are supported)")
}
func (f *Function) eval(ctx Context, caller FunctionCaller) (interface{}, error) {
var args []interface{}
for _, arg := range f.Args {
argEval, err := arg.Value.eval(ctx, caller)
if err != nil {
return nil, err
}
args = append(args, argEval)
}
result, err := caller(f.Name, args...)
if err != nil {
return nil, err
}
// Convert any int output to float, to simplify parsing.
resultInt, resultIsInt := result.(int)
if resultIsInt {
return float64(resultInt), nil
}
return result, nil
}
func (v *Value) eval(ctx Context, caller FunctionCaller) (interface{}, error) {
switch {
case v.Number != nil:
return *v.Number, nil
case v.StrLiteral != nil:
return *v.StrLiteral, nil
case v.Variable != nil:
value, ok := ctx[*v.Variable]
if !ok {
return nil, errors.New("no such variable " + *v.Variable)
}
// Attempt to cast to float, then string, then fail.
valueInt, ok := value.(int)
if ok {
return float64(valueInt), nil
}
valueFloat, ok := value.(float64)
if ok {
return valueFloat, nil
}
valueString, ok := value.(string)
if ok {
return valueString, nil
}
return nil, fmt.Errorf("could not cast variable `%v` to float or string", *v.Variable)
case v.Function != nil:
return v.Function.eval(ctx, caller)
case v.Subexpression != nil:
return v.Subexpression.eval(ctx, caller)
default:
return nil, nil
}
}
func (f *Factor) eval(ctx Context, caller FunctionCaller) (interface{}, error) {
b, err := f.Base.eval(ctx, caller)
if err != nil {
return nil, err
}
if f.Exponent != nil {
exponentEval, err := f.Exponent.eval(ctx, caller)
if err != nil {
return nil, err
}
return math.Pow(b.(float64), exponentEval.(float64)), nil
}
return b, nil
}
func (t *Term) eval(ctx Context, caller FunctionCaller) (interface{}, error) {
n, err := t.Left.eval(ctx, caller)
if err != nil {
return nil, err
}
for _, r := range t.Right {
rFactorEval, err := r.Factor.eval(ctx, caller)
if err != nil {
return nil, err
}
n, err = r.Operator.eval(n, rFactorEval)
if err != nil {
return nil, err
}
}
return n, nil
}
func (e *Expression) eval(ctx Context, caller FunctionCaller) (interface{}, error) {
l, err := e.Left.eval(ctx, caller)
if err != nil {
return nil, err
}
for _, r := range e.Right {
rEval, err := r.Term.eval(ctx, caller)
if err != nil {
return nil, err
}
l, err = r.Operator.eval(l, rEval)
if err != nil {
return nil, err
}
}
return l, nil
}
// Functions for returning information about expressions.
func (f *Function) identifiers() (variables []string, functions []string) {
functions = append(functions, f.Name)
for _, arg := range f.Args {
argVars, argFuncs := arg.Value.Identifiers()
variables = append(variables, argVars...)
functions = append(functions, argFuncs...)
}
return variables, functions
}
func (v *Value) identifiers() (variables []string, functions []string) {
switch {
case v.Variable != nil:
variables = append(variables, *v.Variable)
case v.Function != nil:
return v.Function.identifiers()
case v.Subexpression != nil:
return v.Subexpression.Identifiers()
}
return variables, functions
}
func (f *Factor) identifiers() (variables []string, functions []string) {
variables, functions = f.Base.identifiers()
if f.Exponent != nil {
expVars, expFuncs := f.Exponent.identifiers()
variables = append(variables, expVars...)
functions = append(functions, expFuncs...)
}
return variables, functions
}
func (t *Term) identifiers() (variables []string, functions []string) {
variables, functions = t.Left.identifiers()
for _, r := range t.Right {
rFactorVars, rFactorFuncs := r.Factor.identifiers()
variables = append(variables, rFactorVars...)
functions = append(functions, rFactorFuncs...)
}
return variables, functions
}
// Identifiers returns the names of the variables and functions in the given expression.
func (e *Expression) Identifiers() (variables []string, functions []string) {
if e.Left != nil { // Can be nil if the expression is empty (ie: "").
variables, functions = e.Left.identifiers()
}
for _, r := range e.Right {
opTermVars, opTermFuncs := r.Term.identifiers()
variables = append(variables, opTermVars...)
functions = append(functions, opTermFuncs...)
}
return variables, functions
}
// Context maps variable names to the values they should be replaced by in expressions.
type Context map[string]interface{}
/*
FunctionCaller defines a function which can call another function given its name as a string and any
arguments.
*/
type FunctionCaller func(string, ...interface{}) (interface{}, error)
/*
Parse is a convenience function which parses a string and returns the resulting expression, which
can then be evaluated.
*/
func Parse(input string) (*Expression, error) {
expression := &Expression{}
parser, err := participle.Build(expression)
if err != nil {
return nil, fmt.Errorf("could not build parser (try checking the grammar): %v", err)
}
if err = parser.ParseString(input, expression); err != nil {
return nil, fmt.Errorf("could not parse string %q: %v", input, err)
}
return expression, nil
}
/*
Eval is a convenience function which evaluates a parsed expression and returns the result.
The ctx parameter is a map containing variable definitions. Note that all numeric variable values
are cast to float64.
*/
func Eval(expression *Expression, ctx Context, caller FunctionCaller) (interface{}, error) {
result, err := expression.eval(ctx, caller)
if err != nil {
return nil, fmt.Errorf("could not evaluate expression `%v`: %v", expression, err)
}
glog.Infof("Evaluated expression: %v = %v", expression, result)
return result, nil
} | oparse/parser.go | 0.899022 | 0.656108 | parser.go | starcoder |
package gps
import (
"errors"
"strconv"
"strings"
"time"
)
var (
errEmptyNMEASentence = errors.New("cannot parse empty NMEA sentence")
errUnknownNMEASentence = errors.New("unsupported NMEA sentence type")
errInvalidGGASentence = errors.New("invalid GGA NMEA sentence")
errInvalidRMCSentence = errors.New("invalid RMC NMEA sentence")
)
// Parser for GPS NMEA sentences.
type Parser struct {
}
// Fix is a GPS location fix
type Fix struct {
// Valid if the fix was valid.
Valid bool
// Time that the fix was taken, in UTC time.
Time time.Time
// Latitude is the decimal latitude. Negative numbers indicate S.
Latitude float32
// Longitude is the decimal longitude. Negative numbers indicate E.
Longitude float32
// Altitude is only returned for GGA sentences.
Altitude int32
// Satellites is the number of visible satellites, but is only returned for GGA sentences.
Satellites int16
// Speed based on reported movement. Only returned for RMC sentences.
Speed float32
// Heading based on reported movement. Only returned for RMC sentences.
Heading float32
}
// NewParser returns a GPS NMEA Parser.
func NewParser() Parser {
return Parser{}
}
// Parse parses a NMEA sentence looking for fix info.
func (parser *Parser) Parse(sentence string) (fix Fix, err error) {
if sentence == "" {
err = errEmptyNMEASentence
return
}
typ := sentence[3:6]
switch typ {
case "GGA":
fields := strings.Split(sentence, ",")
if len(fields) != 15 {
err = errInvalidGGASentence
return
}
fix.Altitude = findAltitude(fields[9])
fix.Satellites = findSatellites(fields[7])
fix.Longitude = findLongitude(fields[4], fields[5])
fix.Latitude = findLatitude(fields[2], fields[3])
fix.Time = findTime(fields[1])
fix.Valid = (fix.Altitude != -99999) && (fix.Satellites > 0)
case "RMC":
fields := strings.Split(sentence, ",")
if len(fields) != 13 {
err = errInvalidRMCSentence
return
}
fix.Longitude = findLongitude(fields[5], fields[6])
fix.Latitude = findLatitude(fields[3], fields[4])
fix.Time = findTime(fields[1])
fix.Speed = findSpeed(fields[7])
fix.Heading = findHeading(fields[8])
fix.Valid = (len(fields[2]) > 0 && fields[2][0:1] == "A")
default:
err = errUnknownNMEASentence
}
return
}
// findTime returns the time from an NMEA sentence:
// $--GGA,hhmmss.ss,,,,,,,,,,,,,*xx
func findTime(val string) time.Time {
if len(val) < 6 {
return time.Time{}
}
h, _ := strconv.ParseInt(val[0:2], 10, 8)
m, _ := strconv.ParseInt(val[2:4], 10, 8)
s, _ := strconv.ParseInt(val[4:6], 10, 8)
ms, _ := strconv.ParseInt(val[7:10], 10, 16)
t := time.Date(0, 0, 0, int(h), int(m), int(s), int(ms), time.UTC)
return t
}
// findAltitude returns the altitude from an NMEA sentence:
// $--GGA,,,,,,,,,25.8,,,,,*63
func findAltitude(val string) int32 {
if len(val) > 0 {
var v, _ = strconv.ParseFloat(val, 32)
return int32(v)
}
return -99999
}
// findLatitude returns the Latitude from an NMEA sentence:
// $--GGA,,ddmm.mmmmm,x,,,,,,,,,,,*hh
func findLatitude(val, hemi string) float32 {
if len(val) > 8 {
var dd = val[0:2]
var mm = val[2:]
var d, _ = strconv.ParseFloat(dd, 32)
var m, _ = strconv.ParseFloat(mm, 32)
var v = float32(d + (m / 60))
if hemi == "S" {
v *= -1
}
return v
}
return 0.0
}
// findLatitude returns the longitude from an NMEA sentence:
// $--GGA,,,,dddmm.mmmmm,x,,,,,,,,,*hh
func findLongitude(val, hemi string) float32 {
if len(val) > 8 {
var ddd = val[0:3]
var mm = val[3:]
var d, _ = strconv.ParseFloat(ddd, 32)
var m, _ = strconv.ParseFloat(mm, 32)
var v = float32(d + (m / 60))
if hemi == "W" {
v *= -1
}
return v
}
return 0.0
}
// findSatellites returns the satellites from an NMEA sentence:
// $--GGA,,,,,,,nn,,,,,,,*hh
func findSatellites(val string) (n int16) {
if len(val) > 0 {
var nn = val
var v, _ = strconv.ParseInt(nn, 10, 32)
n = int16(v)
return n
}
return 0
}
// findSpeed returns the speed from an RMC NMEA sentence.
func findSpeed(val string) float32 {
if len(val) > 0 {
var v, _ = strconv.ParseFloat(val, 32)
return float32(v)
}
return 0
}
// findHeading returns the speed from an RMC NMEA sentence.
func findHeading(val string) float32 {
if len(val) > 0 {
var v, _ = strconv.ParseFloat(val, 32)
return float32(v)
}
return 0
} | gps/gpsparser.go | 0.644561 | 0.409044 | gpsparser.go | starcoder |
package day10
import (
"fmt"
"math"
"github.com/nlowe/aoc2019/challenge"
"github.com/spf13/cobra"
)
const symAsteroid = '#'
var A = &cobra.Command{
Use: "10a",
Short: "Day 10, Problem A",
Run: func(_ *cobra.Command, _ []string) {
fmt.Printf("Answer: %d\n", a(challenge.FromFile()))
},
}
type asteroid struct {
x float64
y float64
}
func (a asteroid) distanceTo(other asteroid) float64 {
return math.Sqrt(math.Pow(a.x-other.x, 2) + math.Pow(a.y-other.y, 2))
}
func (a asteroid) angleTo(other asteroid) float64 {
theta := math.Atan2(other.x-a.x, a.y-other.y)
if theta < 0 {
return theta + 2*math.Pi
}
return theta
}
func (a asteroid) slopeTo(other asteroid) float64 {
if a.y == other.y {
return math.Inf(1)
} else if a.x == other.x {
return 0
}
return math.Abs(a.x-other.x) / math.Abs(a.y-other.y)
}
func (a asteroid) canSee(other asteroid, all []asteroid) bool {
if a.x == other.x && a.y == other.y {
return false
}
for _, blocker := range all {
if (blocker.x == a.x || blocker.x == other.x) && (blocker.y == a.y || blocker.y == other.y) {
continue
} else if blocker.x < math.Min(a.x, other.x) || blocker.x > math.Max(a.x, other.x) {
continue
} else if blocker.y < math.Min(a.y, other.y) || blocker.y > math.Max(a.y, other.y) {
continue
}
if a.slopeTo(blocker) == a.slopeTo(other) {
return a.distanceTo(other) < a.distanceTo(blocker)
}
}
return true
}
func a(challenge *challenge.Input) int {
asteroids := makeMap(challenge)
_, result, _ := findStation(asteroids)
return result
}
func findStation(asteroids []asteroid) (station asteroid, best int, targets []asteroid) {
for _, a := range asteroids {
seen := 0
var inSight []asteroid
for _, other := range asteroids {
if a.canSee(other, asteroids) {
inSight = append(inSight, other)
seen++
}
}
if seen > best {
station = a
best = seen
targets = inSight
}
}
return
}
func makeMap(challenge *challenge.Input) []asteroid {
var asteroids []asteroid
y := 0
for line := range challenge.Lines() {
for x, v := range line {
if v == symAsteroid {
asteroids = append(asteroids, asteroid{float64(x), float64(y)})
}
}
y++
}
return asteroids
} | challenge/day10/a.go | 0.696887 | 0.435181 | a.go | starcoder |
package q
import (
"fmt"
"hash/fnv"
"github.com/aunum/gold/pkg/v1/common/num"
"github.com/aunum/log"
"github.com/k0kubun/pp"
"gorgonia.org/tensor"
)
// Table is the qualtiy table which stores the quality of an action by state.
type Table interface {
// GetMax returns the action with the max Q value for a given state hash.
GetMax(state uint32) (action int, qValue float32, err error)
// Get the Q value for the given state and action.
Get(state uint32, action int) (float32, error)
// Set the q value of the action taken for a given state.
Set(state uint32, action int, value float32) error
// Clear the table.
Clear() error
// Pretty print the table.
Print()
}
// MemTable is an in memory Table with a row for every state, and a column for every action. State is
// held as a hash of observations.
type MemTable struct {
actionSpaceSize int
table map[uint32][]float32
}
// NewMemTable returns a new MemTable with the dimensions defined by the observation and
// action space sizes.
func NewMemTable(actionSpaceSize int) Table {
return &MemTable{
actionSpaceSize: actionSpaceSize,
table: map[uint32][]float32{},
}
}
// GetMax returns the action with the max Q value for a given state hash.
func (m *MemTable) GetMax(state uint32) (action int, qValue float32, err error) {
qv, ok := m.table[state]
if !ok {
log.Debug("state does not exist yet: ", state)
return 0, 0.0, nil
}
// fmt.Println("state exists! ", state)
action, qValue = num.MaxF32(qv)
return
}
// Get the Q value for the given state and action.
func (m *MemTable) Get(state uint32, action int) (float32, error) {
qv, ok := m.table[state]
if !ok {
return 0.0, nil
}
if len(qv) < action+1 {
return 0.0, fmt.Errorf("action %d outside of action space size %d", action, m.actionSpaceSize)
}
return qv[action], nil
}
// Set the quality of the action taken for a given state.
func (m *MemTable) Set(state uint32, action int, qValue float32) error {
qv, ok := m.table[state]
if !ok {
qv = make([]float32, m.actionSpaceSize)
}
qv[action] = qValue
m.table[state] = qv
return nil
}
// Clear the table.
func (m *MemTable) Clear() error {
m.table = map[uint32][]float32{}
return nil
}
// Print the table with a pretty printer.
func (m *MemTable) Print() {
for state, values := range m.table {
fmt.Println("-----")
fmt.Printf("---\nstate: %d\nqvalues: %s\n", state, pp.Sprint(values))
fmt.Println("-----")
}
}
// HashState observations into an integer value. Note: this requires observations to always
// occur in the same order.
func HashState(observations *tensor.Dense) uint32 {
h := fnv.New32a()
s := fmt.Sprintf("%v", observations)
h.Write([]byte(s))
return h.Sum32()
} | pkg/v1/agent/q/table.go | 0.724188 | 0.458227 | table.go | starcoder |
package sigmo
import (
"fmt"
"log"
)
func Add(a Atom, b Atom) Atom {
if (a.t == "int" || a.t == "float") && (b.t == "int" || b.t == "float") {
sum := a.AsFloat() + b.AsFloat()
if a.t == "int" && b.t == "int" {
return Atom{t: "int", value: int(sum)}
}
return Atom{t: "float", value: sum}
}
return Atom{t: "error", value: "Non-numeric value being added"}
}
func Negate(a Atom) Atom {
if a.t == "float" {
return Atom{t: "float", value: -a.value.(float64)}
} else if a.t == "int" {
return Atom{t: "int", value: -a.value.(int)}
}
return Atom{t: "error", value: "Non-numeric value cannot be negated"}
}
func Multiply(a Atom, b Atom) Atom {
if (a.t == "int" || a.t == "float") && (b.t == "int" || b.t == "float") {
prod := a.AsFloat() * b.AsFloat()
if a.t == "int" && b.t == "int" {
return Atom{t: "int", value: int(prod)}
}
return Atom{t: "float", value: prod}
}
return Atom{t: "error", value: "Non-numeric value being multiplied"}
}
func Divide(a Atom, b Atom) Atom {
if (a.t == "int" || a.t == "float") && (b.t == "int" || b.t == "float") {
div := a.AsFloat() / b.AsFloat()
if a.t == "int" && b.t == "int" {
return Atom{t: "int", value: int(div)}
}
return Atom{t: "float", value: div}
}
return Atom{t: "error", value: "Non-numeric value being divided"}
}
func Compare(a Value, b Value) bool {
if a.Type() != b.Type() {
return false
}
if a.Type() == "nil" {
return a == b
}
return a.Value() == b.Value()
}
func (n Atom) AsFloat() float64 {
if n.t == "float" {
return n.value.(float64)
} else if n.t == "int" {
return float64(n.value.(int))
}
log.Fatal("Non-numeric type cannot be converted to float.")
return -1
}
func CompareNum(a Atom, b Atom) int {
an := a.AsFloat()
bn := b.AsFloat()
if an == bn {
return 0
} else if an > bn {
return 1
} else {
return -1
}
}
func Boolean(n Value) bool {
if n.Type() == "list" {
return len(n.(*List).children) > 0
}
if n.Type() == "hash" {
h := n.(*Hash)
return len(h.vals)+len(h.sym_vals) > 0
}
a := n.(Atom)
switch a.t {
case "string":
return len(a.value.(string)) > 0
case "int":
return a.value.(int) != 0
case "float":
return a.value.(float64) != 0.0
case "bool":
return a == TRUE
case "function":
return true
default:
return false
}
}
func NestedReplace(n Value, subs *map[string]Value) []Value {
switch n.Type() {
case "identifier":
if v, ok := (*subs)[n.Value().(string)]; ok {
return []Value{v}
}
case "expansion":
if v, ok := (*subs)[n.Value().(string)]; ok {
if v.Type() != "list" {
log.Fatal(fmt.Sprintf("Cannot expand value of type '%s'", v.Type()))
}
out := []Value{}
out = append(out, v.(*List).children...)
return out
}
case "list":
o := n.(*List)
l := &List{Quoted: o.Quoted}
for _, c := range o.children {
l.children = append(l.children, NestedReplace(c, subs)...)
}
return []Value{l}
}
return []Value{n}
} | util.go | 0.557845 | 0.556038 | util.go | starcoder |
package typ
var (
Void = Type{Kind: KindVoid}
Any = Type{Kind: KindAny}
Typ = Type{Kind: KindTyp}
Num = Type{Kind: KindNum}
Bool = Type{Kind: KindBool}
Int = Type{Kind: KindInt}
Real = Type{Kind: KindReal}
Char = Type{Kind: KindChar}
Str = Type{Kind: KindStr}
Raw = Type{Kind: KindRaw}
UUID = Type{Kind: KindUUID}
Time = Type{Kind: KindTime}
Span = Type{Kind: KindSpan}
Expr = Type{Kind: KindExpr}
Sym = Type{Kind: KindSym}
Dyn = Type{Kind: KindDyn}
Call = Type{Kind: KindCall}
Tag = Type{Kind: KindTag}
)
func Opt(t Type) Type { return Type{t.Kind | KindOpt, t.Info} }
func Rec(fs []Param) Type { return Type{KindRec, &Info{Params: fs}} }
func List(t Type) Type { return cont(KindList, t) }
func Dict(t Type) Type { return cont(KindDict, t) }
func Idxr(t Type) Type { return cont(KindIdxr, t) }
func Keyr(t Type) Type { return cont(KindKeyr, t) }
func Cont(t Type) Type { return cont(KindCont, t) }
func cont(k Kind, el Type) Type {
if el == Void || el == Any {
return Type{Kind: k}
}
return Type{k, &Info{Params: []Param{{Type: el}}}}
}
func Ref(n string) Type { return Type{KindRef, &Info{Ref: n}} }
func Sch(n string) Type { return Type{KindSch, &Info{Ref: n}} }
func Bits(n string) Type { return Type{KindBits, &Info{Ref: n}} }
func Enum(n string) Type { return Type{KindEnum, &Info{Ref: n}} }
func Obj(n string) Type { return Type{KindObj, &Info{Ref: n}} }
func Func(n string, ps []Param) Type { return Type{KindFunc, &Info{Ref: n, Params: ps}} }
func Form(n string, ps []Param) Type { return Type{KindForm, &Info{Ref: n, Params: ps}} }
func VarKind(id uint64) Kind { return KindVar | Kind(id<<SlotSize) }
func Var(id uint64, alts ...Type) Type {
t := Type{VarKind(id), &Info{}}
if len(alts) != 0 {
ps := make([]Param, 0, len(alts))
for _, a := range alts {
ps = append(ps, Param{Type: a})
}
t.Params = ps
}
return t
}
// IsOpt returns whether t is an optional type and not any.
func (t Type) IsOpt() bool {
return t.Kind&KindOpt != 0 && t.Kind&MaskRef != 0
}
// Deopt returns the non-optional type of t if t is a optional type and not any,
// otherwise t is returned as is.
func (t Type) Deopt() (_ Type, ok bool) {
if ok = t.IsOpt(); ok {
t.Kind &^= KindOpt
}
return t, ok
}
// Elem returns a generalized element type for container types and void otherwise.
func (t Type) Elem() Type {
switch t.Kind & MaskElem {
case KindCont, KindIdxr, KindKeyr, KindList, KindDict, KindExpr:
if !t.HasParams() {
return Any
}
return t.Params[0].Type
case KindRec, KindObj:
// TODO consider an attempt to unify field types
return Any
}
return Void
}
// Last returns the last element type if t is a list or dict type otherwise t is returned as is.
func (t Type) Last() Type {
el := t.Elem()
for el != Void && el != Any {
t, el = el, el.Elem()
}
return t
}
// Ordered returns whether type t supports ordering.
func (t Type) Ordered() bool {
if t.Kind&KindNum != 0 {
return true
}
switch t.Kind & MaskRef {
case KindChar, KindStr, KindEnum, KindTime:
return true
}
return false
}
// Resolved returns whether t is fully resolved
func (t Type) Resolved() bool {
switch t.Kind & MaskRef {
case KindBits, KindEnum: // check that consts were resolved
return t.HasConsts()
case KindList, KindDict: // check elem type
return t.Elem().Resolved()
case KindObj, KindRec, KindFunc, KindForm: // check that params were resolved
if !t.HasParams() {
return false
}
for _, p := range t.Params {
if !p.Type.Resolved() {
return false
}
}
case KindSch, KindRef, KindVar, KindAlt:
return false
}
return true
} | typ/decl.go | 0.515376 | 0.603786 | decl.go | starcoder |
// Package table produces a string that represents slice of structs data in a text table
package table
import (
"errors"
"fmt"
"reflect"
)
type bd struct {
H rune // BOX DRAWINGS HORIZONTAL
V rune // BOX DRAWINGS VERTICAL
VH rune // BOX DRAWINGS VERTICAL AND HORIZONTAL
HU rune // BOX DRAWINGS HORIZONTAL AND UP
HD rune // BOX DRAWINGS HORIZONTAL AND DOWN
VL rune // BOX DRAWINGS VERTICAL AND LEFT
VR rune // BOX DRAWINGS VERTICAL AND RIGHT
DL rune // BOX DRAWINGS DOWN AND LEFT
DR rune // BOX DRAWINGS DOWN AND RIGHT
UL rune // BOX DRAWINGS UP AND LEFT
UR rune // BOX DRAWINGS UP AND RIGHT
}
type Format map[string]string
var m = map[string]bd{
"ascii": bd{'-', '|', '+', '+', '+', '+', '+', '+', '+', '+', '+'},
"box-drawing": bd{'─', '│', '┼', '┴', '┬', '┤', '├', '┐', '┌', '┘', '└'},
}
// Output formats slice of structs data and writes to standard output.(Using box drawing characters)
func Output(slice interface{}) {
fmt.Println(Table(slice))
}
// FormattedOutput formats slice of structs data according to map of formats,
// and writes to standard output.(Using box drawing characters)
func FormattedOutput(slice interface{}, format Format) {
fmt.Println(FormattedTable(slice, format))
}
// OutputA formats slice of structs data and writes to standard output.(Using standard ascii characters)
func OutputA(slice interface{}) {
fmt.Println(AsciiTable(slice))
}
// Table formats slice of structs data and returns the resulting string.(Using box drawing characters)
func Table(slice interface{}) string {
coln, colw, rows, err := parse(slice, Format{})
if err != nil {
return err.Error()
}
table := table(coln, colw, rows, m["box-drawing"])
return table
}
// Table formats slice of structs data according to map of formats, and returns the resulting string.(Using box drawing characters)
func FormattedTable(slice interface{}, format Format) string {
coln, colw, rows, err := parse(slice, format)
if err != nil {
return err.Error()
}
table := table(coln, colw, rows, m["box-drawing"])
return table
}
// AsciiTable formats slice of structs data and returns the resulting string.(Using standard ascii characters)
func AsciiTable(slice interface{}) string {
coln, colw, rows, err := parse(slice, Format{})
if err != nil {
return err.Error()
}
table := table(coln, colw, rows, m["ascii"])
return table
}
func parse(slice interface{}, format Format) (
coln []string, // name of columns
colw []int, // width of columns
rows [][]string, // rows of content
err error,
) {
// s is a slice of structs
s, err := sliceconv(slice)
if err != nil {
return
}
for i, u := range s {
// u is a single struct
v := reflect.ValueOf(u)
t := reflect.TypeOf(u)
if v.Kind() == reflect.Ptr {
v = v.Elem()
t = t.Elem()
}
if v.Kind() != reflect.Struct {
err = errors.New("warning: table: items of slice should be on struct value")
return
}
var row []string
fields := DeepFields(v.Interface())
m := 0 // count of skipped fields
for n := 0; n < len(fields); n++ {
field := fields[n]
cn := field.typeof.Name
cf, ok := format[cn]
if !ok {
// cf = "%+v"
m++
continue
}
// skip if column format string is set but empty
if len(cf) == 0 {
m++
continue
}
cv := fmt.Sprintf(cf, field.valueof.Interface())
if i == 0 {
coln = append(coln, cn)
colw = append(colw, len(cn))
}
if colw[n-m] < len(cv) {
colw[n-m] = len(cv)
}
row = append(row, cv)
}
rows = append(rows, row)
}
return coln, colw, rows, nil
}
type field struct {
typeof reflect.StructField
valueof reflect.Value
}
// https://stackoverflow.com/a/24333912/1264797
// https://play.golang.org/p/PfmfkNUVkJu
func DeepFields(iface interface{}) (fields []field) {
ifv := reflect.ValueOf(iface)
ift := reflect.TypeOf(iface)
for i := 0; i < ift.NumField(); i++ {
typeof := ift.Field(i)
valueof := ifv.Field(i)
// filter out unexported
if typeof.PkgPath != "" {
continue
}
if valueof.Kind() == reflect.Ptr {
valueof = valueof.Elem()
}
switch valueof.Kind() {
case reflect.Struct:
fields = append(fields, DeepFields(valueof.Interface())...)
default:
fields = append(fields, field{typeof, valueof})
}
}
return fields
}
func table(coln []string, colw []int, rows [][]string, b bd) (table string) {
head := [][]rune{[]rune{b.DR}, []rune{b.V}, []rune{b.VR}}
bttm := []rune{b.UR}
for i, v := range colw {
head[0] = append(head[0], []rune(repeat(v+2, b.H)+string(b.HD))...)
head[1] = append(head[1], []rune(" "+coln[i]+repeat(v-len(coln[i])+1, ' ')+string(b.V))...)
head[2] = append(head[2], []rune(repeat(v+2, b.H)+string(b.VH))...)
bttm = append(bttm, []rune(repeat(v+2, b.H)+string(b.HU))...)
}
head[0][len(head[0])-1] = b.DL
head[2][len(head[2])-1] = b.VL
bttm[len(bttm)-1] = b.UL
var body [][]rune
for _, r := range rows {
row := []rune{b.V}
for i, v := range colw {
// handle non-ascii character
l := length([]rune(r[i]))
row = append(row, []rune(" "+r[i]+repeat(v-l+1, ' ')+string(b.V))...)
}
body = append(body, row)
}
for _, v := range head {
table += string(v) + "\n"
}
for _, v := range body {
table += string(v) + "\n"
}
table += string(bttm)
return table
}
func sliceconv(slice interface{}) ([]interface{}, error) {
v := reflect.ValueOf(slice)
if v.Kind() != reflect.Slice {
return nil, errors.New("warning: sliceconv: param \"slice\" should be on slice value")
}
l := v.Len()
r := make([]interface{}, l)
for i := 0; i < l; i++ {
r[i] = v.Index(i).Interface()
}
return r, nil
}
func repeat(time int, char rune) string {
var s = make([]rune, time)
for i := range s {
s[i] = char
}
return string(s)
}
func length(r []rune) int {
// CJK(Chinese, Japanese, Korean)
type cjk struct {
from rune
to rune
}
// References:
// - [Unicode Table](http://www.tamasoft.co.jp/en/general-info/unicode.html)
// - [汉字 Unicode 编码范围](http://www.qqxiuzi.cn/zh/hanzi-unicode-bianma.php)
var a = []cjk{
{0x2E80, 0x9FD0}, // Chinese, Hiragana, Katakana, ...
{0xAC00, 0xD7A3}, // Hangul
{0xF900, 0xFACE}, // Kanji
{0xFE00, 0xFE6C}, // Fullwidth
{0xFF00, 0xFF60}, // Fullwidth again
{0x20000, 0x2FA1D}, // Extension
// More? PRs are aways welcome here.
}
length := len(r)
l:
for _, v := range r {
for _, c := range a {
if v >= c.from && v <= c.to {
length++
continue l
}
}
}
return length
} | table.go | 0.664214 | 0.428054 | table.go | starcoder |
package evaltest
import (
"fmt"
"math"
"reflect"
"regexp"
"src.elv.sh/pkg/eval"
)
// ApproximatelyThreshold defines the threshold for matching float64 values when
// using Approximately.
const ApproximatelyThreshold = 1e-15
// Approximately can be passed to Case.Puts to match a float64 within the
// threshold defined by ApproximatelyThreshold.
type Approximately struct{ F float64 }
func matchFloat64(a, b, threshold float64) bool {
if math.IsNaN(a) && math.IsNaN(b) {
return true
}
if math.IsInf(a, 0) && math.IsInf(b, 0) &&
math.Signbit(a) == math.Signbit(b) {
return true
}
return math.Abs(a-b) <= threshold
}
// MatchingRegexp can be passed to Case.Puts to match a any string that matches
// a regexp pattern. If the pattern is not a valid regexp, the test will panic.
type MatchingRegexp struct{ Pattern string }
func matchRegexp(p, s string) bool {
matched, err := regexp.MatchString(p, s)
if err != nil {
panic(err)
}
return matched
}
type errorMatcher interface{ matchError(error) bool }
// AnyError is an error that can be passed to Case.Throws to match any non-nil
// error.
var AnyError = anyError{}
// An errorMatcher for any error.
type anyError struct{}
func (anyError) Error() string { return "any error" }
func (anyError) matchError(e error) bool { return e != nil }
// An errorMatcher for exceptions.
type exc struct {
reason error
stacks []string
}
func (e exc) Error() string {
if len(e.stacks) == 0 {
return fmt.Sprintf("exception with reason %v", e.reason)
}
return fmt.Sprintf("exception with reason %v and stacks %v", e.reason, e.stacks)
}
func (e exc) matchError(e2 error) bool {
if e2, ok := e2.(eval.Exception); ok {
return matchErr(e.reason, e2.Reason()) &&
(len(e.stacks) == 0 ||
reflect.DeepEqual(e.stacks, getStackTexts(e2.StackTrace())))
}
return false
}
func getStackTexts(tb *eval.StackTrace) []string {
texts := []string{}
for tb != nil {
ctx := tb.Head
texts = append(texts, ctx.Source[ctx.From:ctx.To])
tb = tb.Next
}
return texts
}
// ErrorWithType returns an error that can be passed to the Case.Throws to match
// any error with the same type as the argument.
func ErrorWithType(v error) error { return errWithType{v} }
// An errorMatcher for any error with the given type.
type errWithType struct{ v error }
func (e errWithType) Error() string { return fmt.Sprintf("error with type %T", e.v) }
func (e errWithType) matchError(e2 error) bool {
return reflect.TypeOf(e.v) == reflect.TypeOf(e2)
}
// ErrorWithMessage returns an error that can be passed to Case.Throws to match
// any error with the given message.
func ErrorWithMessage(msg string) error { return errWithMessage{msg} }
// An errorMatcher for any error with the given message.
type errWithMessage struct{ msg string }
func (e errWithMessage) Error() string { return "error with message " + e.msg }
func (e errWithMessage) matchError(e2 error) bool {
return e2 != nil && e.msg == e2.Error()
}
// CmdExit returns an error that can be passed to Case.Throws to match an
// eval.ExternalCmdExit ignoring the Pid field.
func CmdExit(v eval.ExternalCmdExit) error { return errCmdExit{v} }
// An errorMatcher for an ExternalCmdExit error that ignores the `Pid` member.
// We only match the command name and exit status because at run time we
// cannot know the correct value for `Pid`.
type errCmdExit struct{ v eval.ExternalCmdExit }
func (e errCmdExit) Error() string {
return e.v.Error()
}
func (e errCmdExit) matchError(gotErr error) bool {
if gotErr == nil {
return false
}
ge := gotErr.(eval.ExternalCmdExit)
return e.v.CmdName == ge.CmdName && e.v.WaitStatus == ge.WaitStatus
}
type errOneOf struct{ errs []error }
func OneOfErrors(errs ...error) error { return errOneOf{errs} }
func (e errOneOf) Error() string { return fmt.Sprint("one of", e.errs) }
func (e errOneOf) matchError(gotError error) bool {
for _, want := range e.errs {
if matchErr(want, gotError) {
return true
}
}
return false
} | pkg/eval/evaltest/matchers.go | 0.703957 | 0.416144 | matchers.go | starcoder |
package memmetrics
import (
"fmt"
"time"
hdrhistogram "github.com/HdrHistogram/hdrhistogram-go"
//"github.com/codahale/hdrhistogram"
"github.com/mailgun/timetools"
)
// HDRHistogram is a tiny wrapper around github.com/codahale/hdrhistogram that provides convenience functions for measuring http latencies
type HDRHistogram struct {
// lowest trackable value
low int64
// highest trackable value
high int64
// significant figures
sigfigs int
h *hdrhistogram.Histogram
}
func NewHDRHistogram(low, high int64, sigfigs int) (h *HDRHistogram, err error) {
defer func() {
if msg := recover(); msg != nil {
err = fmt.Errorf("%s", msg)
}
}()
return &HDRHistogram{
low: low,
high: high,
sigfigs: sigfigs,
h: hdrhistogram.New(low, high, sigfigs),
}, nil
}
// Returns latency at quantile with microsecond precision
func (h *HDRHistogram) LatencyAtQuantile(q float64) time.Duration {
return time.Duration(h.ValueAtQuantile(q)) * time.Microsecond
}
// Records latencies with microsecond precision
func (h *HDRHistogram) RecordLatencies(d time.Duration, n int64) error {
return h.RecordValues(int64(d/time.Microsecond), n)
}
func (h *HDRHistogram) Reset() {
h.h.Reset()
}
func (h *HDRHistogram) ValueAtQuantile(q float64) int64 {
return h.h.ValueAtQuantile(q)
}
func (h *HDRHistogram) RecordValues(v, n int64) error {
return h.h.RecordValues(v, n)
}
func (h *HDRHistogram) Merge(other *HDRHistogram) error {
if other == nil {
return fmt.Errorf("other is nil")
}
h.h.Merge(other.h)
return nil
}
type rhOptSetter func(r *RollingHDRHistogram) error
func RollingClock(clock timetools.TimeProvider) rhOptSetter {
return func(r *RollingHDRHistogram) error {
r.clock = clock
return nil
}
}
// RollingHistogram holds multiple histograms and rotates every period.
// It provides resulting histogram as a result of a call of 'Merged' function.
type RollingHDRHistogram struct {
idx int
lastRoll time.Time
period time.Duration
bucketCount int
low int64
high int64
sigfigs int
buckets []*HDRHistogram
clock timetools.TimeProvider
}
func NewRollingHDRHistogram(low, high int64, sigfigs int, period time.Duration, bucketCount int, options ...rhOptSetter) (*RollingHDRHistogram, error) {
rh := &RollingHDRHistogram{
bucketCount: bucketCount,
period: period,
low: low,
high: high,
sigfigs: sigfigs,
}
for _, o := range options {
if err := o(rh); err != nil {
return nil, err
}
}
if rh.clock == nil {
rh.clock = &timetools.RealTime{}
}
buckets := make([]*HDRHistogram, rh.bucketCount)
for i := range buckets {
h, err := NewHDRHistogram(low, high, sigfigs)
if err != nil {
return nil, err
}
buckets[i] = h
}
rh.buckets = buckets
return rh, nil
}
func (r *RollingHDRHistogram) Append(o *RollingHDRHistogram) error {
if r.bucketCount != o.bucketCount || r.period != o.period || r.low != o.low || r.high != o.high || r.sigfigs != o.sigfigs {
return fmt.Errorf("can't merge")
}
for i := range r.buckets {
if err := r.buckets[i].Merge(o.buckets[i]); err != nil {
return err
}
}
return nil
}
func (r *RollingHDRHistogram) Reset() {
r.idx = 0
r.lastRoll = r.clock.UtcNow()
for _, b := range r.buckets {
b.Reset()
}
}
func (r *RollingHDRHistogram) rotate() {
r.idx = (r.idx + 1) % len(r.buckets)
r.buckets[r.idx].Reset()
}
func (r *RollingHDRHistogram) Merged() (*HDRHistogram, error) {
m, err := NewHDRHistogram(r.low, r.high, r.sigfigs)
if err != nil {
return m, err
}
for _, h := range r.buckets {
if m.Merge(h); err != nil {
return nil, err
}
}
return m, nil
}
func (r *RollingHDRHistogram) getHist() *HDRHistogram {
if r.clock.UtcNow().Sub(r.lastRoll) >= r.period {
r.rotate()
r.lastRoll = r.clock.UtcNow()
}
return r.buckets[r.idx]
}
func (r *RollingHDRHistogram) RecordLatencies(v time.Duration, n int64) error {
return r.getHist().RecordLatencies(v, n)
}
func (r *RollingHDRHistogram) RecordValues(v, n int64) error {
return r.getHist().RecordValues(v, n)
} | src/github.com/mailgun/oxy/memmetrics/histogram.go | 0.754915 | 0.489809 | histogram.go | starcoder |
package model
func predict1(features []float64) float64 {
if (features[2] < 0.5) || (features[2] == -1) {
if (features[1] < 0.5) || (features[1] == -1) {
if (features[0] < 0.5) || (features[0] == -1) {
if (features[7] < 0.108870044) || (features[7] == -1) {
if (features[9] < 0.264320642) || (features[9] == -1) {
if (features[6] < 0.741639555) || (features[6] == -1) {
if (features[13] < 0.338543236) || (features[13] == -1) {
return 6.96567678
} else {
return 1.07602775
}
} else {
return 62.2323074
}
} else {
if (features[15] < 0.0908528864) || (features[15] == -1) {
if (features[3] < 0.366575122) || (features[3] == -1) {
return 4.76929235
} else {
return 0.694447219
}
} else {
if (features[9] < 0.922377348) || (features[9] == -1) {
return 72.3714905
} else {
return -3.25134134
}
}
}
} else {
if (features[14] < 0.877792954) || (features[14] == -1) {
if (features[16] < 0.594482124) || (features[16] == -1) {
if (features[7] < 0.711652517) || (features[7] == -1) {
return 46.916111
} else {
return 24.1145916
}
} else {
if (features[15] < 0.559699059) || (features[15] == -1) {
return 29.9923706
} else {
return 9.65826797
}
}
} else {
if (features[6] < 0.872805834) || (features[6] == -1) {
if (features[14] < 0.94291389) || (features[14] == -1) {
return -0.145070225
} else {
return 4.8073535
}
} else {
if (features[3] < 0.788281798) || (features[3] == -1) {
return 63.1950874
} else {
return 0.808762372
}
}
}
}
} else {
return 8.99846268
}
} else {
return 1.31035149
}
} else {
return 89.6473465
}
} | examples/xgboost/XGBRegressor/booster1.go | 0.518546 | 0.479077 | booster1.go | starcoder |
package main
import (
"flag"
"fmt"
"strings"
"github.com/alexchao26/advent-of-code-go/util"
)
func main() {
var part int
flag.IntVar(&part, "part", 1, "part 1 or 2")
flag.Parse()
fmt.Println("Running part", part)
if part == 1 {
ans := part1(util.ReadFile("./input.txt"))
fmt.Println("Output:", ans)
} else {
ans := part2(util.ReadFile("./input.txt"))
fmt.Println("Output:", ans)
}
}
func part1(input string) int {
opcodeComputer := parseInput(input)
for !opcodeComputer.tick() {
// instruction 28 of my input is the only one that accesses register zero
// it is comparing reg 0 to reg 5. so to break out of loops asap, set
// reg 0 to the value found in reg 5 when 28 is hit for the first time
if opcodeComputer.registers[opcodeComputer.instructionPointer] == 28 {
break
}
}
return opcodeComputer.registers[5]
}
func part2(input string) int {
opcodeComputer := parseInput(input)
// similar idea for part 2 but now we need to find the previous state of
// register 5 when register 5 REPEATS itself. this is a brute force solution
// using a map to store previous reg5 values, and stores the previous reg5
var lastReg5 int
comparedRegister5s := map[int]bool{}
for !opcodeComputer.tick() {
if opcodeComputer.registers[opcodeComputer.instructionPointer] == 28 {
reg5 := opcodeComputer.registers[5]
if comparedRegister5s[reg5] {
break
}
comparedRegister5s[reg5] = true
lastReg5 = reg5
}
}
return lastReg5
}
type opcodeComputer struct {
instructions []instruction
registers [6]int
instructionPointer int // an index the stores the index for which instruction to run
}
type instruction struct {
name string
abcValues [3]int
}
// literal opcode computer implementation, unoptimized
func (o *opcodeComputer) tick() (done bool) {
if o.registers[o.instructionPointer] >= len(o.instructions) {
fmt.Println("Out of range instruction, terminating...")
return true
}
instIndex := o.registers[o.instructionPointer]
inst := o.instructions[instIndex]
// fmt.Println(strings.Repeat(" ", instIndex) + strconv.Itoa(instIndex))
opcodeFunc := opcodeNamesToFuncs[inst.name]
o.registers = opcodeFunc(o.registers, inst.abcValues)
// increment value @ instructionPointer, validate that it's still in range
o.registers[o.instructionPointer]++
if o.registers[o.instructionPointer] >= len(o.instructions) {
return true
}
return false
}
func parseInput(input string) opcodeComputer {
lines := strings.Split(input, "\n")
var instructionPointer int
fmt.Sscanf(lines[0], "#ip %d", &instructionPointer)
var instructions []instruction
for _, l := range lines[1:] {
var inst instruction
fmt.Sscanf(l, "%4s %d %d %d", &inst.name, &inst.abcValues[0], &inst.abcValues[1], &inst.abcValues[2])
instructions = append(instructions, inst)
}
return opcodeComputer{
instructions: instructions,
instructionPointer: instructionPointer,
}
}
var opcodeNamesToFuncs = map[string]opcodeFunc{
"addr": addr, "addi": addi,
"mulr": mulr, "muli": muli,
"banr": banr, "bani": bani,
"borr": borr, "bori": bori,
"setr": setr, "seti": seti,
"gtir": gtir, "gtri": gtri, "gtrr": gtrr,
"eqir": eqir, "eqri": eqri, "eqrr": eqrr,
}
type opcodeFunc func([6]int, [3]int) [6]int
func addr(registers [6]int, abcValues [3]int) [6]int {
registers[abcValues[2]] = registers[abcValues[0]] + registers[abcValues[1]]
return registers
}
func addi(registers [6]int, abcValues [3]int) [6]int {
registers[abcValues[2]] = registers[abcValues[0]] + abcValues[1]
return registers
}
func mulr(registers [6]int, abcValues [3]int) [6]int {
registers[abcValues[2]] = registers[abcValues[0]] * registers[abcValues[1]]
return registers
}
func muli(registers [6]int, abcValues [3]int) [6]int {
registers[abcValues[2]] = registers[abcValues[0]] * abcValues[1]
return registers
}
func banr(registers [6]int, abcValues [3]int) [6]int {
registers[abcValues[2]] = registers[abcValues[0]] & registers[abcValues[1]]
return registers
}
func bani(registers [6]int, abcValues [3]int) [6]int {
registers[abcValues[2]] = registers[abcValues[0]] & abcValues[1]
return registers
}
func borr(registers [6]int, abcValues [3]int) [6]int {
registers[abcValues[2]] = registers[abcValues[0]] | registers[abcValues[1]]
return registers
}
func bori(registers [6]int, abcValues [3]int) [6]int {
registers[abcValues[2]] = registers[abcValues[0]] | abcValues[1]
return registers
}
func setr(registers [6]int, abcValues [3]int) [6]int {
registers[abcValues[2]] = registers[abcValues[0]]
return registers
}
func seti(registers [6]int, abcValues [3]int) [6]int {
registers[abcValues[2]] = abcValues[0]
return registers
}
func gtir(registers [6]int, abcValues [3]int) [6]int {
if abcValues[0] > registers[abcValues[1]] {
registers[abcValues[2]] = 1
} else {
registers[abcValues[2]] = 0
}
return registers
}
func gtri(registers [6]int, abcValues [3]int) [6]int {
if registers[abcValues[0]] > abcValues[1] {
registers[abcValues[2]] = 1
} else {
registers[abcValues[2]] = 0
}
return registers
}
func gtrr(registers [6]int, abcValues [3]int) [6]int {
if registers[abcValues[0]] > registers[abcValues[1]] {
registers[abcValues[2]] = 1
} else {
registers[abcValues[2]] = 0
}
return registers
}
func eqir(registers [6]int, abcValues [3]int) [6]int {
if abcValues[0] == registers[abcValues[1]] {
registers[abcValues[2]] = 1
} else {
registers[abcValues[2]] = 0
}
return registers
}
func eqri(registers [6]int, abcValues [3]int) [6]int {
if registers[abcValues[0]] == abcValues[1] {
registers[abcValues[2]] = 1
} else {
registers[abcValues[2]] = 0
}
return registers
}
func eqrr(registers [6]int, abcValues [3]int) [6]int {
if registers[abcValues[0]] == registers[abcValues[1]] {
registers[abcValues[2]] = 1
} else {
registers[abcValues[2]] = 0
}
return registers
} | 2018/day21/main.go | 0.522202 | 0.432303 | main.go | starcoder |
package goconsider
// Settings contain all the parameters for the analysis.
type Settings struct {
// Phrases describe all the texts the linter should look for
Phrases []Phrase
}
// Phrase describes an expression, with optional alternatives, that the linter flags.
type Phrase struct {
// Synonyms are one or more expressions that have the same meaning and proposed alternatives.
Synonyms []string
// Alternatives are zero, one, or more expressions that are provided as replacement.
Alternatives []string
// References are one or more resources that can help understand why the phrase is flagged, or
// at least give examples of other (larger) peer groups that considered rewording.
// Ideally, a reference is a stable link.
References []string
}
// DefaultSettings return a settings instance for common use.
func DefaultSettings() Settings {
settings := minimalSettings()
settings = forEnglish(settings)
return settings
}
func minimalSettings() Settings {
return Settings{}
}
func synonyms(s ...string) func(Phrase) Phrase {
return func(p Phrase) Phrase {
p.Synonyms = append(p.Synonyms, s...)
return p
}
}
func alternatives(s ...string) func(Phrase) Phrase {
return func(p Phrase) Phrase {
p.Alternatives = append(p.Alternatives, s...)
return p
}
}
func references(s ...string) func(Phrase) Phrase {
return func(p Phrase) Phrase {
p.References = append(p.References, s...)
return p
}
}
func phraseWith(mod ...func(Phrase) Phrase) Phrase {
var p Phrase
for _, m := range mod {
p = m(p)
}
return p
}
func forEnglish(settings Settings) Settings {
add := func(p Phrase) {
settings.Phrases = append(settings.Phrases, p)
}
const linuxKernel = "https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git/commit/?id=49decddd39e5f6132ccd7d9fdc3d7c470b0061bb"
const twitter = "https://www.cnet.com/news/twitter-engineers-replace-racially-loaded-tech-terms-like-master-slave/"
const google = "https://developers.google.com/style/inclusive-documentation"
const googlePronouns = "https://developers.google.com/style/pronouns#gender-neutral-pronouns"
add(phraseWith(synonyms("master"),
alternatives("primary", "leader", "main"),
references(linuxKernel, twitter),
))
add(phraseWith(synonyms("slave", "slaves"),
alternatives("secondary", "follower", "replica", "standby"),
references(linuxKernel, twitter),
))
add(phraseWith(synonyms("whitelist", "whitelists"),
alternatives("allowlist", "passlist"),
references(linuxKernel, twitter),
))
add(phraseWith(synonyms("blacklist", "blacklists"),
alternatives("denylist", "blocklist"),
references(linuxKernel, twitter),
))
add(phraseWith(synonyms("grandfathered"),
alternatives("legacy status"),
references(twitter),
))
add(phraseWith(synonyms("guy", "guys"),
alternatives("people", "folks", "you all"),
references(twitter),
))
add(phraseWith(synonyms("he", "his", "him"),
alternatives("their", "them"),
references(googlePronouns, twitter),
))
add(phraseWith(synonyms("man hour", "man hours"),
alternatives("person hours", "engineer hours"),
references(google, twitter),
))
add(phraseWith(synonyms("dummy"),
alternatives("placeholder", "sample"),
references(google, twitter),
))
add(phraseWith(synonyms("sanity check", "sanity checks"),
alternatives("quick check"),
references(google, twitter),
))
return settings
} | settings.go | 0.698124 | 0.450843 | settings.go | starcoder |
package gozxing
type EncodeHintType int
const (
/**
* Specifies what degree of error correction to use, for example in QR Codes.
* Type depends on the encoder. For example for QR codes it's type
* {@link com.google.zxing.qrcode.decoder.ErrorCorrectionLevel ErrorCorrectionLevel}.
* For Aztec it is of type {@link Integer}, representing the minimal percentage of error correction words.
* For PDF417 it is of type {@link Integer}, valid values being 0 to 8.
* In all cases, it can also be a {@link String} representation of the desired value as well.
* Note: an Aztec symbol should have a minimum of 25% EC words.
*/
EncodeHintType_ERROR_CORRECTION = iota
/**
* Specifies what character encoding to use where applicable (type {@link String})
*/
EncodeHintType_CHARACTER_SET
/**
* Specifies the matrix shape for Data Matrix (type {@link com.google.zxing.datamatrix.encoder.SymbolShapeHint})
*/
EncodeHintType_DATA_MATRIX_SHAPE
/**
* Specifies a minimum barcode size (type {@link Dimension}). Only applicable to Data Matrix now.
*
* @deprecated use width/height params in
* {@link com.google.zxing.datamatrix.DataMatrixWriter#encode(String, BarcodeFormat, int, int)}
*/
EncodeHintType_MIN_SIZE
/**
* Specifies a maximum barcode size (type {@link Dimension}). Only applicable to Data Matrix now.
*
* @deprecated without replacement
*/
EncodeHintType_MAX_SIZE
/**
* Specifies margin, in pixels, to use when generating the barcode. The meaning can vary
* by format; for example it controls margin before and after the barcode horizontally for
* most 1D formats. (Type {@link Integer}, or {@link String} representation of the integer value).
*/
EncodeHintType_MARGIN
/**
* Specifies whether to use compact mode for PDF417 (type {@link Boolean}, or "true" or "false"
* {@link String} value).
*/
EncodeHintType_PDF417_COMPACT
/**
* Specifies what compaction mode to use for PDF417 (type
* {@link com.google.zxing.pdf417.encoder.Compaction Compaction} or {@link String} value of one of its
* enum values).
*/
EncodeHintType_PDF417_COMPACTION
/**
* Specifies the minimum and maximum number of rows and columns for PDF417 (type
* {@link com.google.zxing.pdf417.encoder.Dimensions Dimensions}).
*/
EncodeHintType_PDF417_DIMENSIONS
/**
* Specifies the required number of layers for an Aztec code.
* A negative number (-1, -2, -3, -4) specifies a compact Aztec code.
* 0 indicates to use the minimum number of layers (the default).
* A positive number (1, 2, .. 32) specifies a normal (non-compact) Aztec code.
* (Type {@link Integer}, or {@link String} representation of the integer value).
*/
EncodeHintType_AZTEC_LAYERS
/**
* Specifies the exact version of QR code to be encoded.
* (Type {@link Integer}, or {@link String} representation of the integer value).
*/
EncodeHintType_QR_VERSION
/**
* Specifies the QR code mask pattern to be used. Allowed values are
* 0..QRCode.NUM_MASK_PATTERNS-1. By default the code will automatically select
* the optimal mask pattern.
* (Type {@link Integer}, or {@link String} representation of the integer value).
*/
EncodeHintType_QR_MASK_PATTERN
/**
* Specifies whether the data should be encoded to the GS1 standard (type {@link Boolean}, or "true" or "false"
* {@link String } value).
*/
EncodeHintType_GS1_FORMAT
)
func (this EncodeHintType) String() string {
switch this {
case EncodeHintType_ERROR_CORRECTION:
return "ERROR_CORRECTION"
case EncodeHintType_CHARACTER_SET:
return "CHARACTER_SET"
case EncodeHintType_DATA_MATRIX_SHAPE:
return "DATA_MATRIX_SHAPE"
case EncodeHintType_MIN_SIZE:
return "MIN_SIZE"
case EncodeHintType_MAX_SIZE:
return "MAX_SIZE"
case EncodeHintType_MARGIN:
return "MARGIN"
case EncodeHintType_PDF417_COMPACT:
return "PDF417_COMPACT"
case EncodeHintType_PDF417_COMPACTION:
return "PDF417_COMPACTION"
case EncodeHintType_PDF417_DIMENSIONS:
return "PDF417_DIMENSIONS"
case EncodeHintType_AZTEC_LAYERS:
return "AZTEC_LAYERS"
case EncodeHintType_QR_VERSION:
return "QR_VERSION"
case EncodeHintType_QR_MASK_PATTERN:
return "QR_MASK_PATTERN"
case EncodeHintType_GS1_FORMAT:
return "GS1_FORMAT"
}
return ""
} | encode_hint_type.go | 0.874774 | 0.595669 | encode_hint_type.go | starcoder |
package cmd
import (
"fmt"
"github.com/jaredbancroft/aoc2020/pkg/helpers"
"github.com/spf13/cobra"
)
// day6Cmd represents the day6 command
var day6Cmd = &cobra.Command{
Use: "day6",
Short: "Advent of Code 2020 - Day 6: Custom Customs",
Long: `
Advent of Code 2020
--- Day 6: Custom Customs ---
As your flight approaches the regional airport where you'll switch to a much larger plane,
customs declaration forms are distributed to the passengers.
The form asks a series of 26 yes-or-no questions marked a through z. All you need to do
is identify the questions for which anyone in your group answers "yes". Since your group
is just you, this doesn't take very long.
However, the person sitting next to you seems to be experiencing a language barrier and
asks if you can help. For each of the people in their group, you write down the questions
for which they answer "yes", one per line. For example:
abcx
abcy
abcz
In this group, there are 6 questions to which anyone answered "yes": a, b, c, x, y, and z.
(Duplicate answers to the same question don't count extra; each question counts at most once.)
Another group asks for your help, then another, and eventually you've collected answers from
every group on the plane (your puzzle input). Each group's answers are separated by a blank
line, and within each group, each person's answers are on a single line. For example:
abc
a
b
c
ab
ac
a
a
a
a
b
This list represents answers from five groups:
The first group contains one person who answered "yes" to 3 questions: a, b, and c.
The second group contains three people; combined, they answered "yes" to 3 questions: a, b, and c.
The third group contains two people; combined, they answered "yes" to 3 questions: a, b, and c.
The fourth group contains four people; combined, they answered "yes" to only 1 question, a.
The last group contains one person who answered "yes" to only 1 question, b.
In this example, the sum of these counts is 3 + 3 + 3 + 1 + 1 = 11.
For each group, count the number of questions to which anyone answered "yes". What is the
sum of those counts?
--- Part Two ---
As you finish the last group's customs declaration, you notice that you misread one word in
the instructions:
You don't need to identify the questions to which anyone answered "yes"; you need to identify
the questions to which everyone answered "yes"!
Using the same example as above:
abc
a
b
c
ab
ac
a
a
a
a
b
This list represents answers from five groups:
In the first group, everyone (all 1 person) answered "yes" to 3 questions: a, b, and c.
In the second group, there is no question to which everyone answered "yes".
In the third group, everyone answered yes to only 1 question, a. Since some people did not answer "yes" to b or c, they don't count.
In the fourth group, everyone answered yes to only 1 question, a.
In the fifth group, everyone (all 1 person) answered "yes" to 1 question, b.
In this example, the sum of these counts is 3 + 0 + 1 + 1 + 1 = 6.
For each group, count the number of questions to which everyone answered "yes". What is the sum of those counts?`,
RunE: func(cmd *cobra.Command, args []string) error {
answers, _ := helpers.ReadGroupStringFile(input)
count := 0
count2 := 0
unique := make(map[rune]int)
for _, answerGroup := range answers {
groupSize := len(answerGroup)
for _, answer := range answerGroup {
for _, ans := range answer {
unique[ans]++
}
}
count = count + len(unique)
for _, v := range unique {
if v == groupSize {
count2++
}
}
unique = make(map[rune]int)
}
fmt.Println("Part1: ", count)
fmt.Println("Part2: ", count2)
return nil
},
}
func init() {
rootCmd.AddCommand(day6Cmd)
} | cmd/day6.go | 0.647575 | 0.538194 | day6.go | starcoder |
package mods
import (
"image"
"image/color"
"github.com/oakmound/oak/render/mod"
)
func HighlightOff(c color.Color, thickness, xOff, yOff int) mod.Mod {
return func(img image.Image) *image.RGBA {
bds := img.Bounds()
w := bds.Max.X + thickness*2 + xOff
h := bds.Max.Y + thickness*2 + yOff
newRgba := image.NewRGBA(image.Rect(0, 0, w, h))
highlight := image.NewRGBA(image.Rect(0, 0, w, h))
for x := thickness; x < w-thickness; x++ {
for y := thickness; y < h-thickness; y++ {
newRgba.Set(x, y, img.At(x-thickness, y-thickness))
}
}
for x := thickness; x < w-thickness; x++ {
for y := thickness; y < h-thickness; y++ {
if _, _, _, a := newRgba.At(x, y).RGBA(); a > 0 {
for x2 := x - thickness; x2 <= x+thickness; x2++ {
for y2 := y - thickness; y2 <= y+thickness; y2++ {
highlight.Set(x2, y2, c)
}
}
}
}
}
for x := 0; x < w; x++ {
for y := 0; y < h; y++ {
hc := highlight.At(x, y)
if _, _, _, a := hc.RGBA(); a != 0 {
if _, _, _, a2 := newRgba.At(x+xOff, y+yOff).RGBA(); a2 == 0 {
newRgba.Set(x+xOff, y+yOff, hc)
}
}
}
}
return newRgba
}
}
func InnerHighlightOff(c color.Color, thickness, xOff, yOff int) mod.Mod {
return func(img image.Image) *image.RGBA {
bds := img.Bounds()
w := bds.Max.X
h := bds.Max.Y
newRgba := image.NewRGBA(image.Rect(0, 0, w, h))
highlight := image.NewRGBA(image.Rect(0, 0, w, h))
for x := thickness; x < w-thickness; x++ {
for y := thickness; y < h-thickness; y++ {
newRgba.Set(x, y, img.At(x-thickness, y-thickness))
}
}
for x := thickness; x < w-thickness; x++ {
for y := thickness; y < h-thickness; y++ {
if _, _, _, a := newRgba.At(x, y).RGBA(); a == 0 {
for x2 := x - thickness; x2 <= x+thickness; x2++ {
for y2 := y - thickness; y2 <= y+thickness; y2++ {
highlight.Set(x2, y2, c)
}
}
}
}
}
for x := 0; x < w; x++ {
for y := 0; y < h; y++ {
hc := highlight.At(x, y)
if _, _, _, a := hc.RGBA(); a != 0 {
if _, _, _, a2 := newRgba.At(x+xOff, y+yOff).RGBA(); a2 != 0 {
newRgba.Set(x+xOff, y+yOff, hc) // todo overlay instead
}
}
}
}
return newRgba
}
}
func InnerHighlight(c color.Color, thickness int) mod.Mod {
return InnerHighlightOff(c, thickness, 0, 0)
}
func Highlight(c color.Color, thickness int) mod.Mod {
return HighlightOff(c, thickness, 0, 0)
}
type Filter func(color.Color) color.Color
func Inset(fn Filter, dir Dir) mod.Mod {
return func(img image.Image) *image.RGBA {
bds := img.Bounds()
w := bds.Max.X
h := bds.Max.Y
newRgba := image.NewRGBA(image.Rect(0, 0, w, h))
for x := 0; x < w; x++ {
for y := 0; y < h; y++ {
// todo: depth
_, _, _, a := img.At(x+dir.X(), y+dir.Y()).RGBA()
if a == 0 {
newRgba.Set(x, y, fn(img.At(x, y)))
} else {
newRgba.Set(x, y, img.At(x, y))
}
}
}
return newRgba
}
}
// Darker produces a darker color by f percentage (0 to 1) difference
func Darker(c color.Color, f float64) color.Color {
r, g, b, a := c.RGBA()
diff := uint32(65535 * f)
r -= diff
g -= diff
b -= diff
// Don't touch alpha
if r > 65535 {
r = 0
}
if g > 65535 {
g = 0
}
if b > 65535 {
b = 0
}
return color.RGBA64{uint16(r), uint16(g), uint16(b), uint16(a)}
}
// Fade produces a color with more transparency by f percentage (0 to 1)
func Fade(c color.Color, f float64) color.Color {
r, g, b, a := c.RGBA()
diff := uint32(65535 * f)
r -= diff
g -= diff
b -= diff
a -= diff
if r > 65535 {
r = 0
}
if g > 65535 {
g = 0
}
if b > 65535 {
b = 0
}
if a > 65535 {
a = 0
}
return color.RGBA64{uint16(r), uint16(g), uint16(b), uint16(a)}
} | entities/x/mods/highlight.go | 0.535341 | 0.536434 | highlight.go | starcoder |
package querier
import (
"math"
"sort"
"github.com/pkg/errors"
"github.com/prometheus/prometheus/pkg/labels"
"github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/storage"
"github.com/prometheus/prometheus/tsdb/chunkenc"
"github.com/thanos-io/thanos/pkg/store/storepb"
"github.com/cortexproject/cortex/pkg/querier/series"
)
func convertMatchersToLabelMatcher(matchers []*labels.Matcher) []storepb.LabelMatcher {
var converted []storepb.LabelMatcher
for _, m := range matchers {
var t storepb.LabelMatcher_Type
switch m.Type {
case labels.MatchEqual:
t = storepb.LabelMatcher_EQ
case labels.MatchNotEqual:
t = storepb.LabelMatcher_NEQ
case labels.MatchRegexp:
t = storepb.LabelMatcher_RE
case labels.MatchNotRegexp:
t = storepb.LabelMatcher_NRE
}
converted = append(converted, storepb.LabelMatcher{
Type: t,
Name: m.Name,
Value: m.Value,
})
}
return converted
}
// Implementation of storage.SeriesSet, based on individual responses from store client.
type blockQuerierSeriesSet struct {
series []*storepb.Series
warnings storage.Warnings
// next response to process
next int
currSeries storage.Series
}
func (bqss *blockQuerierSeriesSet) Next() bool {
bqss.currSeries = nil
if bqss.next >= len(bqss.series) {
return false
}
currLabels := bqss.series[bqss.next].Labels
currChunks := bqss.series[bqss.next].Chunks
bqss.next++
// Merge chunks for current series. Chunks may come in multiple responses, but as soon
// as the response has chunks for a new series, we can stop searching. Series are sorted.
// See documentation for StoreClient.Series call for details.
for bqss.next < len(bqss.series) && storepb.CompareLabels(currLabels, bqss.series[bqss.next].Labels) == 0 {
currChunks = append(currChunks, bqss.series[bqss.next].Chunks...)
bqss.next++
}
bqss.currSeries = newBlockQuerierSeries(currLabels, currChunks)
return true
}
func (bqss *blockQuerierSeriesSet) At() storage.Series {
return bqss.currSeries
}
func (bqss *blockQuerierSeriesSet) Err() error {
return nil
}
func (bqss *blockQuerierSeriesSet) Warnings() storage.Warnings {
return bqss.warnings
}
// newBlockQuerierSeries makes a new blockQuerierSeries. Input labels must be already sorted by name.
func newBlockQuerierSeries(lbls []storepb.Label, chunks []storepb.AggrChunk) *blockQuerierSeries {
sort.Slice(chunks, func(i, j int) bool {
return chunks[i].MinTime < chunks[j].MinTime
})
return &blockQuerierSeries{labels: storepb.LabelsToPromLabelsUnsafe(lbls), chunks: chunks}
}
type blockQuerierSeries struct {
labels labels.Labels
chunks []storepb.AggrChunk
}
func (bqs *blockQuerierSeries) Labels() labels.Labels {
return bqs.labels
}
func (bqs *blockQuerierSeries) Iterator() chunkenc.Iterator {
if len(bqs.chunks) == 0 {
// should not happen in practice, but we have a unit test for it
return series.NewErrIterator(errors.New("no chunks"))
}
its := make([]chunkenc.Iterator, 0, len(bqs.chunks))
for _, c := range bqs.chunks {
ch, err := chunkenc.FromData(chunkenc.EncXOR, c.Raw.Data)
if err != nil {
return series.NewErrIterator(errors.Wrapf(err, "failed to initialize chunk from XOR encoded raw data (series: %v min time: %d max time: %d)", bqs.Labels(), c.MinTime, c.MaxTime))
}
it := ch.Iterator(nil)
its = append(its, it)
}
return newBlockQuerierSeriesIterator(bqs.Labels(), its)
}
func newBlockQuerierSeriesIterator(labels labels.Labels, its []chunkenc.Iterator) *blockQuerierSeriesIterator {
return &blockQuerierSeriesIterator{labels: labels, iterators: its, lastT: math.MinInt64}
}
// blockQuerierSeriesIterator implements a series iterator on top
// of a list of time-sorted, non-overlapping chunks.
type blockQuerierSeriesIterator struct {
// only used for error reporting
labels labels.Labels
iterators []chunkenc.Iterator
i int
lastT int64
}
func (it *blockQuerierSeriesIterator) Seek(t int64) bool {
// We generally expect the chunks already to be cut down
// to the range we are interested in. There's not much to be gained from
// hopping across chunks so we just call next until we reach t.
for {
ct, _ := it.At()
if ct >= t {
return true
}
if !it.Next() {
return false
}
}
}
func (it *blockQuerierSeriesIterator) At() (int64, float64) {
if it.i >= len(it.iterators) {
return 0, 0
}
t, v := it.iterators[it.i].At()
it.lastT = t
return t, v
}
func (it *blockQuerierSeriesIterator) Next() bool {
if it.i >= len(it.iterators) {
return false
}
if it.iterators[it.i].Next() {
return true
}
if it.iterators[it.i].Err() != nil {
return false
}
for {
it.i++
if it.i >= len(it.iterators) {
return false
}
// we must advance iterator first, to see if it has any samples.
// Seek will call At() as its first operation.
if !it.iterators[it.i].Next() {
if it.iterators[it.i].Err() != nil {
return false
}
// Found empty iterator without error, skip it.
continue
}
// Chunks are guaranteed to be ordered but not generally guaranteed to not overlap.
// We must ensure to skip any overlapping range between adjacent chunks.
return it.Seek(it.lastT + 1)
}
}
func (it *blockQuerierSeriesIterator) Err() error {
if it.i >= len(it.iterators) {
return nil
}
err := it.iterators[it.i].Err()
if err != nil {
return promql.ErrStorage{Err: errors.Wrapf(err, "cannot iterate chunk for series: %v", it.labels)}
}
return nil
} | vendor/github.com/cortexproject/cortex/pkg/querier/block.go | 0.705785 | 0.404302 | block.go | starcoder |
package idxfile
/*
== Original (version 1) pack-*.idx files have the following format:
- The header consists of 256 4-byte network byte order
integers. N-th entry of this table records the number of
objects in the corresponding pack, the first byte of whose
object name is less than or equal to N. This is called the
'first-level fan-out' table.
- The header is followed by sorted 24-byte entries, one entry
per object in the pack. Each entry is:
4-byte network byte order integer, recording where the
object is stored in the packfile as the offset from the
beginning.
20-byte object name.
- The file is concluded with a trailer:
A copy of the 20-byte SHA1 checksum at the end of
corresponding packfile.
20-byte SHA1-checksum of all of the above.
Pack Idx file:
-- +--------------------------------+
fanout | fanout[0] = 2 (for example) |-.
table +--------------------------------+ |
| fanout[1] | |
+--------------------------------+ |
| fanout[2] | |
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
| fanout[255] = total objects |---.
-- +--------------------------------+ | |
main | offset | | |
index | object name 00XXXXXXXXXXXXXXXX | | |
table +--------------------------------+ | |
| offset | | |
| object name 00XXXXXXXXXXXXXXXX | | |
+--------------------------------+<+ |
.-| offset | |
| | object name 01XXXXXXXXXXXXXXXX | |
| +--------------------------------+ |
| | offset | |
| | object name 01XXXXXXXXXXXXXXXX | |
| ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
| | offset | |
| | object name FFXXXXXXXXXXXXXXXX | |
--| +--------------------------------+<--+
trailer | | packfile checksum |
| +--------------------------------+
| | idxfile checksum |
| +--------------------------------+
.-------.
|
Pack file entry: <+
packed object header:
1-byte size extension bit (MSB)
type (next 3 bit)
size0 (lower 4-bit)
n-byte sizeN (as long as MSB is set, each 7-bit)
size0..sizeN form 4+7+7+..+7 bit integer, size0
is the least significant part, and sizeN is the
most significant part.
packed object data:
If it is not DELTA, then deflated bytes (the size above
is the size before compression).
If it is REF_DELTA, then
20-byte base object name SHA1 (the size above is the
size of the delta data that follows).
delta data, deflated.
If it is OFS_DELTA, then
n-byte offset (see below) interpreted as a negative
offset from the type-byte of the header of the
ofs-delta entry (the size above is the size of
the delta data that follows).
delta data, deflated.
offset encoding:
n bytes with MSB set in all but the last one.
The offset is then the number constructed by
concatenating the lower 7 bit of each byte, and
for n >= 2 adding 2^7 + 2^14 + ... + 2^(7*(n-1))
to the result.
== Version 2 pack-*.idx files support packs larger than 4 GiB, and
have some other reorganizations. They have the format:
- A 4-byte magic number '\377tOc' which is an unreasonable
fanout[0] value.
- A 4-byte version number (= 2)
- A 256-entry fan-out table just like v1.
- A table of sorted 20-byte SHA1 object names. These are
packed together without offset values to reduce the cache
footprint of the binary search for a specific object name.
- A table of 4-byte CRC32 values of the packed object data.
This is new in v2 so compressed data can be copied directly
from pack to pack during repacking without undetected
data corruption.
- A table of 4-byte offset values (in network byte order).
These are usually 31-bit pack file offsets, but large
offsets are encoded as an index into the next table with
the msbit set.
- A table of 8-byte offset entries (empty for pack files less
than 2 GiB). Pack files are organized with heavily used
objects toward the front, so most object references should
not need to refer to this table.
- The same trailer as a v1 pack file:
A copy of the 20-byte SHA1 checksum at the end of
corresponding packfile.
20-byte SHA1-checksum of all of the above.
From:
https://www.kernel.org/pub/software/scm/git/docs/v1.7.5/technical/pack-protocol.txt
*/ | vendor/srcd.works/go-git.v4/plumbing/format/idxfile/doc.go | 0.539711 | 0.6602 | doc.go | starcoder |
package ode
// #cgo LDFLAGS: -lode
// #define dDOUBLE
// #include <ode/ode.h>
import "C"
import (
"unsafe"
)
// Initialization flags
const (
ManualThreadCleanupIFlag = C.dInitFlagManualThreadCleanup
)
// Allocation flags
const (
BasicDataAFlag = C.dAllocateFlagBasicData
CollisionDataAFlag = C.dAllocateFlagCollisionData
AllAFlag = C.dAllocateMaskAll
)
// Short constructor aliases for convenience
var (
V3 = NewVector3
V4 = NewVector4
M3 = NewMatrix3
M4 = NewMatrix4
Q = NewQuaternion
BB = NewAABB
)
// NearCallback is a callback type for handling potential object collisions.
type NearCallback func(data interface{}, obj1, obj2 Geom)
type nearCallbackData struct {
data interface{}
fn NearCallback
}
var nearCallbackDataMap = NewCGOMap()
//export nearCallback
func nearCallback(data unsafe.Pointer, obj1, obj2 C.dGeomID) {
cbData := nearCallbackDataMap.Get(int(uintptr(data))).(*nearCallbackData)
cbData.fn(cbData.data, cToGeom(obj1), cToGeom(obj2))
}
// round num up to nearest multiple of align
func alignNum(num, align int) int {
return (num + (align - 1)) &^ (align - 1)
}
// Vector represents a double precision vector.
type Vector []float64
// NewVector returns a new Vector instance.
func NewVector(size, align int, vals ...float64) Vector {
alignSize := alignNum(size, align)
v := make(Vector, size, alignSize)
copy(v, vals)
return v
}
func (v Vector) convertC(c *C.dReal, toC bool) {
for i := range v {
if toC {
*c = C.dReal(v[i])
} else {
v[i] = float64(*c)
}
c = (*C.dReal)(unsafe.Pointer(uintptr(unsafe.Pointer(c)) + unsafe.Sizeof(*c)))
}
}
func (v Vector) toC(c *C.dReal) {
v.convertC(c, true)
}
func (v Vector) fromC(c *C.dReal) {
v.convertC(c, false)
}
// Vector3 represents a 3 component vector.
type Vector3 Vector
func cToVector3(a *C.dReal) Vector3 {
vec := NewVector3()
Vector(vec).fromC(a)
return vec
}
// NewVector3 returns a new Vector3 instance.
func NewVector3(vals ...float64) Vector3 {
return Vector3(NewVector(3, 4, vals...))
}
// Vector4 represents a 4 component vector.
type Vector4 Vector
// NewVector4 returns a new Vector4 instance.
func NewVector4(vals ...float64) Vector4 {
return Vector4(NewVector(4, 4, vals...))
}
// Quaternion represents a quaternion.
type Quaternion Vector
// NewQuaternion returns a new Quaternion instance.
func NewQuaternion(vals ...float64) Quaternion {
return Quaternion(NewVector(4, 1, vals...))
}
// AABB represents an axis-aligned bounding box.
type AABB Vector
// NewAABB returns a new AABB instance.
func NewAABB(vals ...float64) AABB {
return AABB(NewVector(6, 1, vals...))
}
// Matrix represents a double precision matrix.
type Matrix [][]float64
// NewVector returns a new Matrix instance.
func NewMatrix(numRows, numCols, align int, vals ...float64) Matrix {
mat := make(Matrix, numRows)
numAlignCols := alignNum(numCols, align)
elts := make([]float64, numAlignCols*numRows)
for i := range mat {
mat[i], elts = elts[:numCols:numAlignCols], elts[numAlignCols:]
n := numCols
if len(vals) < numCols {
n = len(vals)
}
copy(mat[i], vals[:n])
vals = vals[n:]
}
return mat
}
func (m Matrix) convertC(c *C.dReal, toC bool) {
for i := range m {
for j := 0; j < cap(m[i]); j++ {
if j < len(m[i]) {
if toC {
*c = C.dReal(m[i][j])
} else {
m[i][j] = float64(*c)
}
}
c = (*C.dReal)(unsafe.Pointer(uintptr(unsafe.Pointer(c)) + unsafe.Sizeof(*c)))
}
}
}
func (m Matrix) toC(c *C.dReal) {
m.convertC(c, true)
}
func (m Matrix) fromC(c *C.dReal) {
m.convertC(c, false)
}
// Matrix3 represents a 3x3 matrix.
type Matrix3 Matrix
// NewMatrix3 returns a new Matrix3 instance.
func NewMatrix3(vals ...float64) Matrix3 {
return Matrix3(NewMatrix(3, 3, 4, vals...))
}
// Matrix4 represents a 4x4 matrix.
type Matrix4 Matrix
// NewMatrix4 returns a new Matrix4 instance.
func NewMatrix4(vals ...float64) Matrix4 {
return Matrix4(NewMatrix(4, 4, 4, vals...))
}
// VertexList represents a list of 3D vertices.
type VertexList Matrix
// NewVertexList returns a new VertexList instance.
func NewVertexList(size int, vals ...float64) VertexList {
return VertexList(NewMatrix(size, 3, 4, vals...))
}
// PlaneList represents a list of plane definitions.
type PlaneList Matrix
// NewPlaneList returns a new PlaneList instance.
func NewPlaneList(size int, vals ...float64) PlaneList {
return PlaneList(NewMatrix(size, 4, 1, vals...))
}
// TriVertexIndexList represents a list of triangle vertex indices.
type TriVertexIndexList [][]uint32
// NewTriVertexIndexList returns a new TriVertexIndexList instance.
func NewTriVertexIndexList(size int, indices ...uint32) TriVertexIndexList {
list := make(TriVertexIndexList, size)
elts := make([]uint32, 3*size)
for i := range list {
list[i], elts = elts[:3], elts[3:]
n := 3
if len(indices) < 3 {
n = len(indices)
}
copy(list[i], indices[:n])
indices = indices[n:]
}
return list
}
// PolygonList represents a list of polygon definitions
type PolygonList []C.uint
func GetConfiguration() string {
return C.GoString(C.dGetConfiguration())
}
// Init initializes ODE.
func Init(initFlags, allocFlags int) {
C.dInitODE2(C.uint(initFlags))
C.dAllocateODEDataForThread(C.uint(allocFlags))
}
// Close releases ODE resources.
func Close() {
C.dCloseODE()
}
// CleanupAllDataForThread manually releases ODE resources for the current thread.
func CleanupAllDataForThread() {
C.dCleanupODEAllDataForThread()
}
func btoi(b bool) int {
if b {
return 1
}
return 0
} | ode.go | 0.690455 | 0.452173 | ode.go | starcoder |
package gol
import (
"strconv"
"sync"
"time"
"uk.ac.bris.cs/gameoflife/util"
)
type distributorChannels struct {
events chan<- Event
ioCommand chan<- ioCommand
ioIdle <-chan bool
ioFileName chan<- string
ioOutput chan<- uint8
ioInput <-chan uint8
keyPresses <-chan rune
}
// Sends the file name to io.go so the world can be initialised
func sendFileName(fileName string, ioCommand chan<- ioCommand, ioFileName chan<- string) {
ioCommand <- ioInput
ioFileName <- fileName
}
// Returns the world with its initial values filled
func initialiseWorld(height int, width int, ioInput <-chan uint8, events chan<- Event) [][]byte {
world := make([][]byte, height)
for y := range world { // Create an array of bytes for each row
world[y] = make([]byte, width)
}
for y, row := range world {
for x := range row {
cell := <-ioInput
world[y][x] = cell // Add each cell to the row
if cell == 255 { // If the cell is alive send a cellFlipped event
events <- CellFlipped{
CompletedTurns: 0,
Cell: util.Cell{
X: x,
Y: y,
},
}
}
}
}
events <- TurnComplete{
CompletedTurns: 0,
}
return world
}
// Returns a slice of channels, that will each be used to communicate a section of the world between the distributor and a worker
func createPartChannels(numOfThreads int) []chan [][]byte{
var parts []chan [][]byte
for i := 0; i < numOfThreads; i++ {
parts = append(parts, make(chan [][]byte))
}
return parts
}
// Returns a slice containing the height of each section that each worker will process
func calcSectionHeights(height int, threads int) []int {
heightOfParts := make([]int, threads)
for i := range heightOfParts{
heightOfParts[i] = 0
}
partAssigning := 0
for i := 0; i < height; i++ {
heightOfParts[partAssigning] += 1
if partAssigning == len(heightOfParts) - 1 {
partAssigning = 0
} else {
partAssigning += 1
}
}
return heightOfParts
}
// Returns a slice containing the initial y-values of the parts of the world that each worker will process
func calcStartYValues(sectionHeights []int) []int {
startYValues := make([]int, len(sectionHeights))
totalHeightAssigned := 0
for i, height := range sectionHeights {
startYValues[i] = totalHeightAssigned
totalHeightAssigned += height
}
return startYValues
}
// Returns the neighbours of a cell at given coordinates
func getNeighbours(world [][]byte, row int, column int) []byte {
rowAbove, rowBelow := row - 1, row + 1
if row == 0 {
rowAbove = len(world[0]) - 1
} else if row == len(world[0]) - 1 {
rowBelow = 0
}
columnLeft, columnRight := column - 1, column + 1
if column == 0 {
columnLeft = len(world[0]) - 1
} else if column == len(world[0]) - 1 {
columnRight = 0
}
neighbours := []byte{world[rowAbove][columnLeft], world[rowAbove][column], world[rowAbove][columnRight],
world[row][columnLeft], world[row][columnRight], world[rowBelow][columnLeft], world[rowBelow][column],
world[rowBelow][columnRight]}
return neighbours
}
// Returns the number of live neighbours from a set of neighbours
func calcLiveNeighbours(neighbours []byte) int {
liveNeighbours := 0
for _, neighbour := range neighbours {
if neighbour == 255 {
liveNeighbours += 1
}
}
return liveNeighbours
}
// Returns the new value of a cell given its current value and number of live neighbours
func calcValue(item byte, liveNeighbours int) byte {
calculatedValue := byte(0)
if item == 255 {
if liveNeighbours == 2 || liveNeighbours == 3 {
calculatedValue = byte(255)
}
} else {
if liveNeighbours == 3 {
calculatedValue = byte(255)
}
}
return calculatedValue
}
// Returns the next state of part of a world given the current state
func calcNextState(world [][]byte, events chan<- Event, startY int, turn int) [][]byte {
var nextWorld [][]byte
for y, row := range world[1:len(world) - 1] { // Loops over each row apart from the top and bottom row
nextWorld = append(nextWorld, []byte{})
for x, element := range row {
neighbours := getNeighbours(world, y + 1, x)
liveNeighbours := calcLiveNeighbours(neighbours)
value := calcValue(element, liveNeighbours)
nextWorld[y] = append(nextWorld[y], value)
if value != world[y + 1][x] { // If the value of the cell has changed send a cell flipped event
events <- CellFlipped{
CompletedTurns: turn,
Cell: util.Cell{
X: x,
Y: y + startY,
},
}
}
}
}
return nextWorld
}
// Takes part of an image, calculates the next stage, and passes it back
func worker(part chan [][]byte, events chan<- Event, startY int, turns int) {
for turn := 0; turn < turns; turn++ {
thePart := <-part
nextPart := calcNextState(thePart, events, startY, turn)
part <- nextPart
}
}
// Reports the number of alive cells every 2 seconds
func ticker(twoSecondTicker *time.Ticker, mutexTurnsWorld *sync.Mutex, completedTurns *int, world *[][]byte, events chan<- Event) {
for {
<-twoSecondTicker.C
mutexTurnsWorld.Lock()
events <- AliveCellsCount{
CompletedTurns: *completedTurns,
CellsCount: calcNumAliveCells(*world),
}
mutexTurnsWorld.Unlock()
}
}
// Receives key presses from the user and performs the appropriate action
func handleKeyPresses(keyPresses <-chan rune, mutexTurnsWorld *sync.Mutex, world *[][]byte, fileName string,
completedTurns *int, ioCommand chan<- ioCommand, ioFileName chan<- string, ioOutput chan<- uint8,
events chan<- Event, stop chan<- bool, pause chan<- bool) {
paused := false
for {
key := <-keyPresses
switch key {
case 115: // Save
mutexTurnsWorld.Lock()
writeFile(*world, fileName, *completedTurns, ioCommand, ioFileName, ioOutput, events)
mutexTurnsWorld.Unlock()
case 113: // Stop
stop <- true
case 112: // Pause/Resume
pause <- true
var newState State
if paused {
newState = Continuing
paused = false
} else {
newState = Paused
paused = true
}
mutexTurnsWorld.Lock()
events <- StateChange{*completedTurns, newState}
mutexTurnsWorld.Unlock()
}
}
}
// Performs the specified number of turns of the world
func performAllTurns(turns int, stop <-chan bool, pause <-chan bool, parts []chan [][]byte, startYValues []int,
sectionHeights []int, world *[][]byte, threads int, mutexTurnsWorld *sync.Mutex, completedTurns *int, events chan<- Event) {
// For each turn, pass part of the board to each worker, process it, then put it back together and repeat
turnsLoop:
for turn := 0; turn < turns; turn++ {
select {
case <-stop:
break turnsLoop
case <-pause:
select {
case <-stop:
break turnsLoop
case <-pause:
}
default: // If no keys have been pressed just move onto performing the next turn of the world
}
for i, part := range parts { // Send the next part to each worker
startY := startYValues[i]
endY := startY + sectionHeights[i]
worldPart := getPart(*world, threads, i, startY, endY)
part <- worldPart
}
var nextWorld [][]byte
for _, part := range parts { // Collect each part from each worker and build the next state of the world
nextWorld = append(nextWorld, <-part...)
}
mutexTurnsWorld.Lock()
*world = nextWorld
*completedTurns = turn + 1 // turn + 1 because we are at the end of the turn (e.g. end of turn 0 means completed 1 turn)
mutexTurnsWorld.Unlock()
events <- TurnComplete{
CompletedTurns: *completedTurns,
}
}
}
// Returns the number of alive cells in a world
func calcNumAliveCells(world [][]byte) int {
total := 0
for _, row := range world {
for _, element := range row {
if element == 255 {
total += 1
}
}
}
return total
}
// Returns part of a world given the number of threads, the part number, the startY, and the endY
func getPart(world [][]byte, threads int, partNum int, startY int, endY int) [][]byte {
var worldPart [][]byte
if threads == 1 { // Having 1 thread is a special case as the top and bottom row will come from the same part
worldPart = append(worldPart, world[len(world) - 1])
worldPart = append(worldPart, world...)
worldPart = append(worldPart, world[0])
} else {
if partNum == 0 { // If it is the first part add the bottom row of the world as the top row
worldPart = append(worldPart, world[len(world)-1])
worldPart = append(worldPart, world[:endY + 1]...)
} else if partNum == threads - 1 { // If it is the last part add the top row of the world as the bottom row
worldPart = append(worldPart, world[startY - 1:]...)
worldPart = append(worldPart, world[0])
} else {
worldPart = append(worldPart, world[startY - 1:endY+1]...)
}
}
return worldPart
}
// Returns a slice of alive cells
func getAliveCells(world [][]byte) []util.Cell {
var aliveCells []util.Cell
for y, row := range world {
for x, element := range row {
if element == 255 {
aliveCells = append(aliveCells, util.Cell{X: x, Y: y})
}
}
}
return aliveCells
}
// Writes to a file and sends the correct event
func writeFile(world [][]byte, fileName string, turns int, ioCommand chan<- ioCommand, ioFileName chan<- string,
ioOutputChannel chan<- uint8, events chan<- Event) {
outputFileName := fileName + "x" + strconv.Itoa(turns)
ioCommand <- ioOutput
ioFileName <- outputFileName
for _, row := range world {
for _, element := range row {
ioOutputChannel <- element
}
}
events <- ImageOutputComplete{ // implements Event
CompletedTurns: turns,
Filename: outputFileName,
}
}
// Distributor divides the work between workers and interacts with other goroutines.
func distributor(p Params, c distributorChannels) {
fileName := strconv.Itoa(p.ImageWidth) + "x" + strconv.Itoa(p.ImageHeight)
sendFileName(fileName, c.ioCommand, c.ioFileName)
world := initialiseWorld(p.ImageHeight, p.ImageWidth, c.ioInput, c.events)
parts := createPartChannels(p.Threads)
sectionHeights := calcSectionHeights(p.ImageHeight, p.Threads)
startYValues := calcStartYValues(sectionHeights)
for i, part := range parts { // Starts the workers ready to receive parts to calculate the next state
go worker(part, c.events, startYValues[i], p.Turns)
}
var completedTurns int
mutexTurnsWorld := &sync.Mutex{}
twoSecondTicker := time.NewTicker(2 * time.Second)
go ticker(twoSecondTicker, mutexTurnsWorld, &completedTurns, &world, c.events) // Runs the ticker
stop := make(chan bool)
pause := make(chan bool)
go handleKeyPresses(c.keyPresses, mutexTurnsWorld, &world, fileName, &completedTurns, c.ioCommand, c.ioFileName,
c.ioOutput, c.events, stop, pause) // Handles key presses for the user
performAllTurns(p.Turns, stop, pause, parts, startYValues, sectionHeights, &world, p.Threads, mutexTurnsWorld,
&completedTurns, c.events)
twoSecondTicker.Stop() // The ticker stops running once all turns have been performed
mutexTurnsWorld.Lock()
aliveCells := getAliveCells(world)
c.events <- FinalTurnComplete{ // Send a final turn complete event to the events channel
CompletedTurns: completedTurns,
Alive: aliveCells,
}
writeFile(world, fileName, completedTurns, c.ioCommand, c.ioFileName, c.ioOutput, c.events)
c.ioCommand <- ioCheckIdle // Make sure that the Io has finished any output before exiting.
<-c.ioIdle
c.events <- StateChange{completedTurns, Quitting}
mutexTurnsWorld.Unlock()
close(c.events) // Close the channel to stop the SDL goroutine gracefully. Removing may cause deadlock.
} | gol/distributor.go | 0.593845 | 0.44083 | distributor.go | starcoder |
package packet
/*
Motion Packet
The motion packet gives physics data for all the cars being driven.
There is additional data for the car being driven with the goal of being able to drive a motion platform setup.
N.B. For the normalised vectors below, to convert to float values divide by 32767.0f – 16-bit signed values are used to pack the data and on the assumption that direction values are always between -1.0f and 1.0f.
Frequency: Rate as specified in menus
Size: 1464 bytes (Packet size updated in Beta 3)
Version: 1
*/
type CarMotionData struct {
WorldPositionX float32 // World space X position
WorldPositionY float32 // World space Y position
WorldPositionZ float32 // World space Z position
WorldVelocityX float32 // Velocity in world space X
WorldVelocityY float32 // Velocity in world space Y
WorldVelocityZ float32 // Velocity in world space Z
WorldForwardDirX int16 // World space forward X direction (normalised)
WorldForwardDirY int16 // World space forward Y direction (normalised)
WorldForwardDirZ int16 // World space forward Z direction (normalised)
WorldRightDirX int16 // World space right X direction (normalised)
WorldRightDirY int16 // World space right Y direction (normalised)
WorldRightDirZ int16 // World space right Z direction (normalised)
GForceLateral float32 // Lateral G-Force component
GForceLongitudinal float32 // Longitudinal G-Force component
GForceVertical float32 // Vertical G-Force component
Yaw float32 // Yaw angle in radians
Pitch float32 // Pitch angle in radians
Roll float32 // Roll angle in radians
}
type MotionDataPacket struct {
Header PacketHeader
Payload [22]CarMotionData
// Extra player car ONLY data:
SuspensionPosition [4]float32 // Note: All wheel arrays have the following order:
SuspensionVelocity [4]float32 // RL, RR, FL, FR
SuspensionAcceleration [4]float32 // RL, RR, FL, FR
WheelSpeed [4]float32 // Speed of each wheel
WheelSlip [4]float32 // Slip ratio for each wheel
LocalVelocityX float32 // Velocity in local space
LocalVelocityY float32 // Velocity in local space
LocalVelocityZ float32 // Velocity in local space
AngularVelocityX float32 // Angular velocity x-component
AngularVelocityY float32 // Angular velocity y-component
AngularVelocityZ float32 // Angular velocity z-component
AngularAccelerationX float32 // Angular velocity x-component
AngularAccelerationY float32 // Angular velocity y-component
AngularAccelerationZ float32 // Angular velocity z-component
FrontWheelsAngle float32 // Current front wheels angle in radians
} | golang/pkg/common/packet/motion.go | 0.5083 | 0.564158 | motion.go | starcoder |
package types
import (
"fmt"
"strconv"
)
// PlmnID is a globally unique network identifier (Public Land Mobile Network)
type PlmnID uint32
// EnbID is an eNodeB Identifier
type EnbID uint32
// CellID is a node-local cell identifier
type CellID uint8
// ECI is a E-UTRAN Cell Identifier
type ECI uint32
// GEnbID is a Globally eNodeB identifier
type GEnbID uint64
// ECGI is E-UTRAN Cell Global Identifier
type ECGI uint64
// CRNTI is a cell-specific UE identifier
type CRNTI uint32
// MSIN is Mobile Subscriber Identification Number
type MSIN uint32
// IMSI is International Mobile Subscriber Identity
type IMSI uint64
const (
mask28 = 0xfffffff
mask20 = 0xfffff00
lowest24 = 0x0ffffff
maskSecondNibble = 0x00000f0
maskSeventhNibble = 0xf000000
maskThirteenthNibble = 0xf000000000000
)
// EncodePlmnID encodes MCC and MNC strings into a PLMNID hex string
func EncodePlmnID(mcc string, mnc string) string {
if len(mnc) == 2 {
return string(mcc[1]) + string(mcc[0]) + "F" + string(mcc[2]) + string(mnc[1]) + string(mnc[0])
} else {
return string(mcc[1]) + string(mcc[0]) + string(mnc[2]) + string(mcc[2]) + string(mnc[1]) + string(mnc[0])
}
}
// DecodePlmnID decodes MCC and MNC strings from PLMNID hex string
func DecodePlmnID(plmnID string) (mcc string, mnc string) {
if plmnID[2] == 'f' || plmnID[2] == 'F' {
return string(plmnID[1]) + string(plmnID[0]) + string(plmnID[3]),
string(plmnID[5]) + string(plmnID[4])
} else {
return string(plmnID[1]) + string(plmnID[0]) + string(plmnID[3]),
string(plmnID[5]) + string(plmnID[4]) + string(plmnID[2])
}
}
// ToPlmnID encodes the specified MCC and MNC strings into a numeric PLMNID
func ToPlmnID(mcc string, mnc string) PlmnID {
s := EncodePlmnID(mcc, mnc)
n, err := strconv.ParseUint(s, 16, 32)
if err != nil {
return 0
}
return PlmnID(n)
}
// PlmnIDFromHexString converts string form of PLMNID in its hex form into a numeric one suitable for APIs
func PlmnIDFromHexString(plmnID string) PlmnID {
n, err := strconv.ParseUint(plmnID, 16, 32)
if err != nil {
return 0
}
return PlmnID(n)
}
// PlmnIDFromString converts string form of PLMNID given as a simple MCC-MCN catenation into a numeric one suitable for APIs
func PlmnIDFromString(plmnID string) PlmnID {
return ToPlmnID(plmnID[0:3], plmnID[3:])
}
// PlmnIDToString generates the MCC-MCN catenation format from the specified numeric PLMNID
func PlmnIDToString(plmnID PlmnID) string {
hexString := fmt.Sprintf("%x", plmnID)
mcc, mnc := DecodePlmnID(hexString)
return mcc + mnc
}
// ToECI produces ECI from the specified components
func ToECI(enbID EnbID, cid CellID) ECI {
if cid&maskSecondNibble == 0 {
return ECI(uint(enbID)<<4 | uint(cid)) // Unclear whether this clause is needed
}
return ECI(uint(enbID)<<8 | uint(cid))
}
// ToECGI produces ECGI from the specified components
func ToECGI(plmnID PlmnID, eci ECI) ECGI {
if uint(eci)&maskSeventhNibble == 0 {
return ECGI(uint(plmnID)<<24 | (uint(eci) & mask28)) // Unclear whether this clause is needed
}
return ECGI(uint(plmnID)<<28 | (uint(eci) & mask28))
}
// ToGEnbID produces GEnbID from the specified components
func ToGEnbID(plmnID PlmnID, enbID EnbID) GEnbID {
return GEnbID(uint(plmnID)<<28 | (uint(enbID) << 8 & mask20))
}
// GetPlmnID extracts PLMNID from the specified ECGI, GEnbID or IMSI
func GetPlmnID(id uint64) PlmnID {
if id&maskThirteenthNibble == 0 {
return PlmnID(id >> 24)
}
return PlmnID(id >> 28)
}
// GetCellID extracts Cell ID from the specified ECGI or GEnbID
func GetCellID(id uint64) CellID {
if id&maskThirteenthNibble == 0 {
return CellID(id & 0xf)
}
return CellID(id & 0xff)
}
// GetEnbID extracts Enb ID from the specified ECGI or GEnbID
func GetEnbID(id uint64) EnbID {
if id&maskThirteenthNibble == 0 {
return EnbID((id & mask20) >> 4)
}
return EnbID((id & mask20) >> 8)
}
// GetECI extracts ECI from the specified ECGI or GEnbID
func GetECI(id uint64) ECI {
if id&maskThirteenthNibble == 0 {
return ECI(id & lowest24)
}
return ECI(id & mask28)
}
const (
// AzimuthKey - used in topo device attributes
AzimuthKey = "azimuth"
// ArcKey - used in topo device attributes
ArcKey = "arc"
// LatitudeKey - used in topo device attributes
LatitudeKey = "latitude"
// LongitudeKey - used in topo device attributes
LongitudeKey = "longitude"
// EcidKey - used in topo device attributes
EcidKey = "ecid"
// PlmnIDKey - used in topo device attributes
PlmnIDKey = "plmnid"
// GrpcPortKey - used in topo device attributes
GrpcPortKey = "grpcport"
// AddressKey ...
AddressKey = "address"
)
const (
// E2NodeType - used in topo device type
E2NodeType = "E2Node"
// E2NodeVersion100 - used in topo device version
E2NodeVersion100 = "1.0.0"
) | go/onos/ransim/types/types.go | 0.63861 | 0.516778 | types.go | starcoder |
package collections
import (
"fmt"
"sync"
)
type Vector struct {
sync.Mutex
elementCount int
capacityIncrement int
elementData []interface{}
}
func NewVector() *Vector {
inst := &Vector{
elementData: make([]interface{}, 0),
}
return inst
}
func (v *Vector) Size() int {
return v.elementCount
}
func (v *Vector) Capacity() int {
return cap(v.elementData)
}
func (v *Vector) IsEmpty() bool {
return v.elementCount == 0
}
func (v *Vector) AddElement(obj interface{}) {
v.Lock()
defer v.Unlock()
v.elementCount++
v.elementData = append(v.elementData, obj)
}
func (v *Vector) RemoveElementAt(index int) {
v.Lock()
defer v.Unlock()
if index >= v.elementCount {
panic(fmt.Sprintf("array index out of bounds (%d >= %d)", index, v.elementCount))
}
if index < 0 {
panic(fmt.Sprintf("array index out of bounds (%d)", index))
}
l := v.Size()
elementData := v.elementData
if index == 0 {
if l > 1 {
v.elementData = elementData[1:]
} else {
v.elementData = make([]interface{}, 0)
}
} else if index == l-1 {
v.elementData = elementData[:index]
} else {
v.elementData = append(elementData[:index], elementData[index+1:l]...)
}
v.elementCount--
}
func (v *Vector) ElementAt(index int) interface{} {
if index >= v.elementCount {
panic(fmt.Sprintf("array index out of bounds (%d >= %d)", index, v.elementCount))
}
return v.elementData[index]
}
func (v *Vector) FirstElement() interface{} {
if v.IsEmpty() {
panic("no such element")
}
return v.elementData[0]
}
func (v *Vector) LastElement() interface{} {
if v.IsEmpty() {
panic("no such element")
}
return v.elementData[v.elementCount-1]
}
func (v *Vector) SetElementAt(obj interface{}, index int) {
v.Lock()
defer v.Unlock()
if index >= v.elementCount {
panic(fmt.Sprintf("array index out of bounds (%d >= %d)", index, v.elementCount))
}
v.elementData[index] = obj
}
func (v *Vector) InsertElementAt(obj interface{}, index int) {
v.Lock()
defer v.Unlock()
if index > v.elementCount {
panic(fmt.Sprintf("array index out of bounds (%d > %d)", index, v.elementCount))
}
l := v.Size()
elementData := v.elementData
if index == 0 {
v.elementData = append([]interface{}{obj}, elementData...)
} else if index == l-1 {
v.elementData = append(elementData, obj)
} else {
afterPart := elementData[index:l]
elementData = append(elementData[:index], obj)
v.elementData = append(elementData, afterPart...)
}
v.elementCount++
}
func (v *Vector) RemoveElement(obj interface{}) bool {
v.Lock()
i := v.IndexOf(obj, 0)
v.Unlock()
if i >= 0 {
v.RemoveElementAt(i)
return true
}
return false
}
func (v *Vector) RemoveAllElements() {
v.Lock()
defer v.Unlock()
for i := range v.elementData {
v.elementData[i] = nil
}
v.elementData = make([]interface{}, 0)
v.elementCount = 0
}
func (v *Vector) Clone() *Vector {
inst := NewVector()
elementData := make([]interface{}, 0)
elementData = append(elementData, v.elementData...)
inst.elementData = elementData
inst.elementCount = len(elementData)
return inst
}
func (v *Vector) Contains(obj interface{}) bool {
return v.IndexOf(obj, 0) >= 0
}
func (v *Vector) IndexOf(obj interface{}, index int) int {
for i := index; i < v.elementCount; i++ {
if v.elementData[i] == obj {
return i
}
}
return -1
}
func (v *Vector) LastIndexOf(obj interface{}, index int) int {
if index >= v.elementCount {
panic(fmt.Sprintf("array index out of bounds (%d >= %d)", index, v.elementCount))
}
for i := index; i >= 0; i-- {
if v.elementData[i] == obj {
return i
}
}
return -1
}
func (v *Vector) Get(index int) interface{} {
return v.ElementAt(index)
}
func (v *Vector) Set(index int, obj interface{}) interface{} {
oldValue := v.Get(index)
v.SetElementAt(obj, index)
return oldValue
}
func (v *Vector) Add(obj interface{}) bool {
v.AddElement(obj)
return true
}
func (v *Vector) Remove(index int) interface{} {
oldValue := v.Get(index)
v.RemoveElementAt(index)
return oldValue
}
func (v *Vector) Elements() enumerationI {
return NewEnumeration(v)
} | collections/vector.go | 0.517327 | 0.474144 | vector.go | starcoder |
package main
import (
"fmt"
"image/color"
"math"
"os"
"p4l/gifhelper"
"p4l/vec"
"strconv"
)
// Universe holds our bodies and the universe's parameters and
type Universe struct {
bodies []Body
width float64
g float64 // Universal gravitational constant = 6.674E-11 Nm^2/kg^2
}
// Body represents any mass in our universe
type Body struct {
name string
color color.RGBA
radius float64
mass float64
pos vec.Vec //Really a Point
vel vec.Vec
accel vec.Vec
}
// G is the universal gravitation constant, obtained from the initial Universe
var G float64
// SimulateGravity is our simulation engine. It takes an initial state, the size of the time interval for
// each "tick" and the number of simulation steps to run
func SimulateGravity(initialUniverse Universe, numGens int, t float64) []Universe {
G = initialUniverse.g
timePoints := make([]Universe, numGens+1)
timePoints[0] = initialUniverse
for i := 1; i < numGens; i++ {
timePoints[i] = UpdateUniverse(timePoints[i-1], t)
}
return timePoints
}
// UpdateUniverse takes a current universe, and returns a new universe calculated after time t has elapsed
func UpdateUniverse(curUni Universe, t float64) Universe {
newUni := CopyUniverse(curUni)
for i, body := range newUni.bodies {
accel := ComputeNetAccel(curUni, body)
// v2 = v1 + accl * t
vel := body.vel.Plus(accel.Mul(t))
// p2 = p1 + v*t + 1/2a*t^2
pos := body.pos.Plus(vel.Mul(t)).Plus(accel.Mul(.5 * math.Pow(t, 2)))
newUni.bodies[i].accel = accel
newUni.bodies[i].vel = vel
newUni.bodies[i].pos = pos
}
// need to update the body and put it into []body
return newUni
}
// CopyUniverse creates a deepcopy of a universe. (values, not references)
func CopyUniverse(curUni Universe) (newUni Universe) {
newUni.width = curUni.width
newUni.g = curUni.g
newUni.bodies = make([]Body, len(curUni.bodies))
_ = copy(newUni.bodies, curUni.bodies)
return
}
// ComputeNetAccel calculates the new acceleration on a body based on the gravitation effects of
// all other bodies in the universe
func ComputeNetAccel(curUni Universe, b Body) vec.Vec {
netAccel := vec.Newc(0, 0, 0)
for _, b2 := range curUni.bodies {
if b2 != b {
accel := ComputeAccl(b, b2)
netAccel = netAccel.Plus(accel)
}
}
return netAccel
}
// ComputeAccl compute the acceleration vector between two bodies
func ComputeAccl(b, b2 Body) vec.Vec {
// Create a new vector which represents the distance and direction from b to b2
// and get its spherical coordinates (distance and direction)
dist, theta, phi := vec.Newp(b.pos, b2.pos).AsSpherical()
accl := (G * b2.mass) / math.Pow(dist, 2)
// Calculate the acceleration on b caused by b2
// The acceleration is in the same direction as b2
return vec.Newd(accl, theta, phi)
}
func check(e error) {
if e != nil {
panic(e)
}
}
// WriteUniverse1 writes out the bodies in a Universe on a single line
func WriteUniverse1(resFile *os.File, i int, iUni Universe) {
for _, body := range iUni.bodies {
fmt.Fprintf(resFile, "%6d,%10s,%v,%v,%v\n", i, body.name, body.pos, body.vel, body.accel)
}
}
func main() {
// Jupiter System Model from Phillip
jupiter := Body{
name: "Jupiter",
color: color.RGBA{223, 227, 202, 255},
radius: 71000000,
mass: 1.898e27,
pos: vec.Newc(2000000000, 2000000000, 0),
vel: vec.Newc(0, 0, 0),
accel: vec.Newc(0, 0, 0),
}
io := Body{
name: "Io",
color: color.RGBA{249, 249, 165, 255},
radius: 1821000,
mass: 8.9319e22,
pos: vec.Newc(2000000000-421600000, 2000000000, 0),
vel: vec.Newc(0, -17320, 0),
accel: vec.Newc(0, 0, 0),
}
europa := Body{
name: "Europa",
color: color.RGBA{132, 83, 52, 255},
radius: 1569000,
mass: 4.7998e22,
pos: vec.Newc(2000000000, 2000000000+670900000, 0),
vel: vec.Newc(-13740, 0, 0),
accel: vec.Newc(0, 0, 0),
}
ganymede := Body{
name: "Ganymede",
color: color.RGBA{76, 0, 153, 255},
radius: 2631000,
mass: 1.4819e23,
pos: vec.Newc(2000000000+1070400000, 2000000000, 0),
vel: vec.Newc(0, 10870, 0),
accel: vec.Newc(0, 0, 0),
}
callisto := Body{
name: "Callisto",
color: color.RGBA{0, 153, 76, 255},
radius: 2410000,
mass: 1.0759e23,
pos: vec.Newc(2000000000, 2000000000-1882700000, 0),
vel: vec.Newc(8200, 0, 0),
accel: vec.Newc(0, 0, 0),
}
jupiterSystem := Universe{
width: 4000000000,
bodies: []Body{jupiter, io, europa, ganymede, callisto},
}
/* Earth/Moon Model
Gravitational constant G = 6.67408e-11
Name Colour Radius Mass PosX PosY VelX VelY AccX AccY
Earth green 6.371e6 5.97237e24 -4.6e6 0.0 0.0 -12.14 0.0 0.0
Moon grey 1.7374e6 7.342e22 3.80e8 0.0 0.0 1.022e3 0.0 0.0
*/
/*
earth := Body{
name: "earth",
color: color.RGBA{0, 255, 0, 255}, // green
radius: 6.371e6,
mass: 5.97237e24,
pos: vec.Newc(-4.6e6, 0.0, 0),
vel: vec.Newc(0, -12.14, 0),
accel: vec.Newc(0, 0, 0),
}
moon := Body{
name: "moon",
color: color.RGBA{64, 64, 64, 255}, // grey,
radius: 1.7374e6,
mass: 7.342e22,
pos: vec.Newc(3.80e8, 0.0, 0),
vel: vec.Newc(0, 1.022e3, 0),
accel: vec.Newc(0, 0, 0),
}
earthSystem := Universe{
bodies: []Body{earth, moon},
width: 100000000,
g: 6.67408e-11,
}
*/
fmt.Println("Starting gravity simulator...")
// numGens is in os.Args[1], time is in os.Args[2],
// width of the drawing is in os.Args[3], os.Args[4] is to draw every nth image
numGens, err1 := strconv.Atoi(os.Args[1])
if err1 != nil {
panic(err1)
}
time, err2 := strconv.ParseFloat(os.Args[2], 64)
if err2 != nil {
panic(err2)
}
canvasWidth, err3 := strconv.Atoi(os.Args[3])
if err3 != nil {
panic(err3)
}
frequency, err4 := strconv.Atoi(os.Args[4])
if err4 != nil {
panic(err4)
}
timePoints := SimulateGravity(jupiterSystem, numGens, time)
fmt.Println("Simulation Run Successfully...")
images := AnimateSystem(timePoints, canvasWidth, frequency)
fmt.Println("Simulation Images Generated...")
filename := "Universe_Animation"
gifhelper.ImagesToGIF(images, filename)
fmt.Println("Animation Generated...")
resFileName := filename + ".txt"
resFile, err := os.Create(resFileName)
check(err)
// Range over each universe
fmt.Fprintln(resFile, "----START---")
for i, iUni := range timePoints {
if i%frequency == 0 {
WriteUniverse1(resFile, i, iUni)
}
}
fmt.Fprintln(resFile, "----END---")
err = resFile.Close()
check(err)
fmt.Println("Data File Written... Done!")
} | gravity/model.go | 0.640636 | 0.494751 | model.go | starcoder |
package tests
import (
"fmt"
"reflect"
)
type numberTest struct {
expected float64
}
func (nt *numberTest) Run(input interface{}) error {
castedNumber, ok := castNumber(input)
if !ok {
return fmt.Errorf("input (%#v %v) isn't a number", input, reflect.TypeOf(input))
}
if castedNumber != nt.expected {
return fmt.Errorf("%v is not equal to %v", castedNumber, nt.expected)
}
return nil
}
func (nt *numberTest) IsMetaTest() bool {
return false
}
func NewNumberTest(expected float64) Test {
return &numberTest{
expected: expected,
}
}
type gtTest struct {
greaterThan float64
}
func (nt *gtTest) Run(input interface{}) error {
castedNumber, ok := castNumber(input)
if !ok {
return fmt.Errorf("input (%#v %v) isn't a number", input, reflect.TypeOf(input))
}
if castedNumber <= nt.greaterThan {
return fmt.Errorf("%v is not bigger than %v", castedNumber, nt.greaterThan)
}
return nil
}
func (nt *gtTest) IsMetaTest() bool {
return false
}
func NewGreaterThanTest(expected float64) Test {
return >Test{
greaterThan: expected,
}
}
type ltTest struct {
lessThan float64
}
func (lt *ltTest) Run(input interface{}) error {
castedNumber, ok := castNumber(input)
if !ok {
return fmt.Errorf("input (%#v %v) isn't a number", input, reflect.TypeOf(input))
}
if castedNumber >= lt.lessThan {
return fmt.Errorf("%v is not less than %v", castedNumber, lt.lessThan)
}
return nil
}
func (lt *ltTest) IsMetaTest() bool {
return false
}
func NewLessThanTest(expected float64) Test {
return <Test{
lessThan: expected,
}
}
type gteTest struct {
graterThanEqual float64
}
func (nt *gteTest) Run(input interface{}) error {
castedNumber, ok := castNumber(input)
if !ok {
return fmt.Errorf("input (%#v %v) isn't a number", input, reflect.TypeOf(input))
}
if castedNumber < nt.graterThanEqual {
return fmt.Errorf("%v is not bigger than or equal to %v", castedNumber, nt.graterThanEqual)
}
return nil
}
func (nt *gteTest) IsMetaTest() bool {
return false
}
func NewGreaterThanEqualTest(expected float64) Test {
return >eTest{
graterThanEqual: expected,
}
}
type lteTest struct {
lessThanEqual float64
}
func (nt *lteTest) Run(input interface{}) error {
castedNumber, ok := castNumber(input)
if !ok {
return fmt.Errorf("input (%#v %v) isn't a number", input, reflect.TypeOf(input))
}
if castedNumber > nt.lessThanEqual {
return fmt.Errorf("%v is not less than or equal to %v", castedNumber, nt.lessThanEqual)
}
return nil
}
func (nt *lteTest) IsMetaTest() bool {
return false
}
func NewLessThanEqualTest(expected float64) Test {
return <eTest{
lessThanEqual: expected,
}
} | number.go | 0.819244 | 0.446495 | number.go | starcoder |
package forecast
type AttributeType string
// Enum values for AttributeType
const (
AttributeTypeString AttributeType = "string"
AttributeTypeInteger AttributeType = "integer"
AttributeTypeFloat AttributeType = "float"
AttributeTypeTimestamp AttributeType = "timestamp"
)
func (enum AttributeType) MarshalValue() (string, error) {
return string(enum), nil
}
func (enum AttributeType) MarshalValueBuf(b []byte) ([]byte, error) {
b = b[0:0]
return append(b, enum...), nil
}
type DatasetType string
// Enum values for DatasetType
const (
DatasetTypeTargetTimeSeries DatasetType = "TARGET_TIME_SERIES"
DatasetTypeRelatedTimeSeries DatasetType = "RELATED_TIME_SERIES"
DatasetTypeItemMetadata DatasetType = "ITEM_METADATA"
)
func (enum DatasetType) MarshalValue() (string, error) {
return string(enum), nil
}
func (enum DatasetType) MarshalValueBuf(b []byte) ([]byte, error) {
b = b[0:0]
return append(b, enum...), nil
}
type Domain string
// Enum values for Domain
const (
DomainRetail Domain = "RETAIL"
DomainCustom Domain = "CUSTOM"
DomainInventoryPlanning Domain = "INVENTORY_PLANNING"
DomainEc2Capacity Domain = "EC2_CAPACITY"
DomainWorkForce Domain = "WORK_FORCE"
DomainWebTraffic Domain = "WEB_TRAFFIC"
DomainMetrics Domain = "METRICS"
)
func (enum Domain) MarshalValue() (string, error) {
return string(enum), nil
}
func (enum Domain) MarshalValueBuf(b []byte) ([]byte, error) {
b = b[0:0]
return append(b, enum...), nil
}
type EvaluationType string
// Enum values for EvaluationType
const (
EvaluationTypeSummary EvaluationType = "SUMMARY"
EvaluationTypeComputed EvaluationType = "COMPUTED"
)
func (enum EvaluationType) MarshalValue() (string, error) {
return string(enum), nil
}
func (enum EvaluationType) MarshalValueBuf(b []byte) ([]byte, error) {
b = b[0:0]
return append(b, enum...), nil
}
type FeaturizationMethodName string
// Enum values for FeaturizationMethodName
const (
FeaturizationMethodNameFilling FeaturizationMethodName = "filling"
)
func (enum FeaturizationMethodName) MarshalValue() (string, error) {
return string(enum), nil
}
func (enum FeaturizationMethodName) MarshalValueBuf(b []byte) ([]byte, error) {
b = b[0:0]
return append(b, enum...), nil
}
type FilterConditionString string
// Enum values for FilterConditionString
const (
FilterConditionStringIs FilterConditionString = "IS"
FilterConditionStringIsNot FilterConditionString = "IS_NOT"
)
func (enum FilterConditionString) MarshalValue() (string, error) {
return string(enum), nil
}
func (enum FilterConditionString) MarshalValueBuf(b []byte) ([]byte, error) {
b = b[0:0]
return append(b, enum...), nil
}
type ScalingType string
// Enum values for ScalingType
const (
ScalingTypeAuto ScalingType = "Auto"
ScalingTypeLinear ScalingType = "Linear"
ScalingTypeLogarithmic ScalingType = "Logarithmic"
ScalingTypeReverseLogarithmic ScalingType = "ReverseLogarithmic"
)
func (enum ScalingType) MarshalValue() (string, error) {
return string(enum), nil
}
func (enum ScalingType) MarshalValueBuf(b []byte) ([]byte, error) {
b = b[0:0]
return append(b, enum...), nil
} | service/forecast/api_enums.go | 0.732496 | 0.442757 | api_enums.go | starcoder |
package horizon
import (
"fmt"
)
// FindShortestPath Find shortest path between two obserations (not necessary GPS points).
/*
NOTICE: this function snaps point to nearest edges simply (without multiple 'candidates' for each observation)
gpsMeasurements - Two observations
statesRadiusMeters - maximum radius to search nearest polylines
*/
func (matcher *MapMatcher) FindShortestPath(source, target *GPSMeasurement, statesRadiusMeters float64) (MatcherResult, error) {
closestSource, _ := matcher.engine.s2Storage.NearestNeighborsInRadius(source.Point, statesRadiusMeters, 1)
if len(closestSource) == 0 {
// @todo need to handle this case properly...
return MatcherResult{}, fmt.Errorf("Can't find closest edge for 'source' point")
}
if len(closestSource) > 1 {
// actually it's impossible if NearestNeighborsInRadius() has been implemented correctly
return MatcherResult{}, fmt.Errorf("More than 1 edge for 'source' point")
}
closestTarget, _ := matcher.engine.s2Storage.NearestNeighborsInRadius(target.Point, statesRadiusMeters, 1)
if len(closestTarget) == 0 {
// @todo need to handle this case properly...
return MatcherResult{}, fmt.Errorf("Can't find closest edge for 'target' point")
}
if len(closestTarget) > 1 {
// actually it's impossible if NearestNeighborsInRadius() has been implemented correctly
return MatcherResult{}, fmt.Errorf("More than 1 edge for 'target' point")
}
s2polylineSource := matcher.engine.s2Storage.edges[closestSource[0].edgeID]
s2polylineTarget := matcher.engine.s2Storage.edges[closestTarget[0].edgeID]
// Find vertex for 'source' point
m, n := s2polylineSource.Source, s2polylineSource.Target
edgeSource := matcher.engine.edges[m][n]
if edgeSource == nil {
return MatcherResult{}, fmt.Errorf("Edge 'source' not found in graph")
}
_, fractionSource := calcProjection(*edgeSource.Polyline, source.Point)
choosenSourceVertex := n
if fractionSource > 0.5 {
choosenSourceVertex = m
} else {
choosenSourceVertex = n
}
// Find vertex for 'target' point
m, n = s2polylineTarget.Source, s2polylineTarget.Target
edgeTarget := matcher.engine.edges[m][n]
if edgeTarget == nil {
return MatcherResult{}, fmt.Errorf("Edge 'target' not found in graph")
}
_, fractionTarget := calcProjection(*edgeTarget.Polyline, target.Point)
choosenTargetVertex := n
if fractionTarget > 0.5 {
choosenTargetVertex = m
} else {
choosenTargetVertex = n
}
ans, path := matcher.engine.graph.ShortestPath(choosenSourceVertex, choosenTargetVertex)
if ans == -1.0 {
return MatcherResult{}, fmt.Errorf("Path not found")
}
edges := []Edge{}
result := MatcherResult{
Observations: make([]*ObservationResult, 2),
Probability: 100.0,
}
for i := 1; i < len(path); i++ {
s := path[i-1]
t := path[i]
edge := matcher.engine.edges[s][t]
edges = append(edges, *edge)
result.Path = append(result.Path, *edge.Polyline...)
}
result.Observations[0] = &ObservationResult{
Observation: source,
MatchedEdge: edges[0],
}
result.Observations[1] = &ObservationResult{
Observation: target,
MatchedEdge: edges[len(edges)-1],
}
return result, nil
} | map_matcher_simple_sp.go | 0.67822 | 0.537588 | map_matcher_simple_sp.go | starcoder |
package block
import (
"github.com/df-mc/dragonfly/server/block/cube"
"github.com/df-mc/dragonfly/server/block/model"
"github.com/df-mc/dragonfly/server/item"
"github.com/df-mc/dragonfly/server/world"
"github.com/df-mc/dragonfly/server/world/sound"
"github.com/go-gl/mathgl/mgl64"
)
// WoodDoor is a block that can be used as an openable 1x2 barrier.
type WoodDoor struct {
transparent
bass
// Wood is the type of wood of the door. This field must have one of the values found in the material
// package.
Wood WoodType
// Facing is the direction the door is facing.
Facing cube.Direction
// Open is whether the door is open.
Open bool
// Top is whether the block is the top or bottom half of a door
Top bool
// Right is whether the door hinge is on the right side
Right bool
}
// FlammabilityInfo ...
func (d WoodDoor) FlammabilityInfo() FlammabilityInfo {
if !d.Wood.Flammable() {
return newFlammabilityInfo(0, 0, false)
}
return newFlammabilityInfo(0, 0, true)
}
// Model ...
func (d WoodDoor) Model() world.BlockModel {
return model.Door{Facing: d.Facing, Open: d.Open, Right: d.Right}
}
// NeighbourUpdateTick ...
func (d WoodDoor) NeighbourUpdateTick(pos, _ cube.Pos, w *world.World) {
if d.Top {
if _, ok := w.Block(pos.Side(cube.FaceDown)).(WoodDoor); !ok {
w.BreakBlock(pos)
}
} else {
if solid := w.Block(pos.Side(cube.FaceDown)).Model().FaceSolid(pos.Side(cube.FaceDown), cube.FaceUp, w); !solid {
w.BreakBlock(pos)
} else if _, ok := w.Block(pos.Side(cube.FaceUp)).(WoodDoor); !ok {
w.BreakBlock(pos)
}
}
}
// UseOnBlock handles the directional placing of doors
func (d WoodDoor) UseOnBlock(pos cube.Pos, face cube.Face, _ mgl64.Vec3, w *world.World, user item.User, ctx *item.UseContext) bool {
pos, face, used := firstReplaceable(w, pos, face, d)
if !used {
return false
}
if face != cube.FaceUp {
return false
}
if solid := w.Block(pos.Side(cube.FaceDown)).Model().FaceSolid(pos.Side(cube.FaceDown), cube.FaceUp, w); !solid {
return false
}
if !replaceableWith(w, pos.Side(cube.FaceUp), d) {
return false
}
d.Facing = user.Facing()
left := w.Block(pos.Side(d.Facing.RotateLeft().Face()))
right := w.Block(pos.Side(d.Facing.RotateRight().Face()))
if door, ok := left.(WoodDoor); ok {
if door.Wood == d.Wood {
d.Right = true
}
}
// The side the door hinge is on can be affected by the blocks to the left and right of the door. In particular,
// opaque blocks on the right side of the door with transparent blocks on the left side result in a right sided
// door hinge.
if diffuser, ok := right.(LightDiffuser); !ok || diffuser.LightDiffusionLevel() != 0 {
if diffuser, ok := left.(LightDiffuser); ok && diffuser.LightDiffusionLevel() == 0 {
d.Right = true
}
}
ctx.IgnoreAABB = true
place(w, pos, d, user, ctx)
place(w, pos.Side(cube.FaceUp), WoodDoor{Wood: d.Wood, Facing: d.Facing, Top: true, Right: d.Right}, user, ctx)
return placed(ctx)
}
// Activate ...
func (d WoodDoor) Activate(pos cube.Pos, _ cube.Face, w *world.World, _ item.User) bool {
d.Open = !d.Open
w.PlaceBlock(pos, d)
otherPos := pos.Side(cube.Face(boolByte(!d.Top)))
other := w.Block(otherPos)
if door, ok := other.(WoodDoor); ok {
door.Open = d.Open
w.PlaceBlock(otherPos, door)
}
w.PlaySound(pos.Vec3Centre(), sound.Door{})
return true
}
// BreakInfo ...
func (d WoodDoor) BreakInfo() BreakInfo {
return newBreakInfo(3, alwaysHarvestable, axeEffective, oneOf(d))
}
// CanDisplace ...
func (d WoodDoor) CanDisplace(l world.Liquid) bool {
_, water := l.(Water)
return water
}
// SideClosed ...
func (d WoodDoor) SideClosed(cube.Pos, cube.Pos, *world.World) bool {
return false
}
// EncodeItem ...
func (d WoodDoor) EncodeItem() (name string, meta int16) {
switch d.Wood {
case OakWood():
return "minecraft:wooden_door", 0
case SpruceWood():
return "minecraft:spruce_door", 0
case BirchWood():
return "minecraft:birch_door", 0
case JungleWood():
return "minecraft:jungle_door", 0
case AcaciaWood():
return "minecraft:acacia_door", 0
case DarkOakWood():
return "minecraft:dark_oak_door", 0
case CrimsonWood():
return "minecraft:crimson_door", 0
case WarpedWood():
return "minecraft:warped_door", 0
}
panic("invalid wood type")
}
// EncodeBlock ...
func (d WoodDoor) EncodeBlock() (name string, properties map[string]interface{}) {
direction := 3
switch d.Facing {
case cube.South:
direction = 1
case cube.West:
direction = 2
case cube.East:
direction = 0
}
switch d.Wood {
case OakWood():
return "minecraft:wooden_door", map[string]interface{}{"direction": int32(direction), "door_hinge_bit": d.Right, "open_bit": d.Open, "upper_block_bit": d.Top}
default:
return "minecraft:" + d.Wood.String() + "_door", map[string]interface{}{"direction": int32(direction), "door_hinge_bit": d.Right, "open_bit": d.Open, "upper_block_bit": d.Top}
}
}
// allDoors returns a list of all door types
func allDoors() (doors []world.Block) {
for _, w := range WoodTypes() {
for i := cube.Direction(0); i <= 3; i++ {
doors = append(doors, WoodDoor{Wood: w, Facing: i, Open: false, Top: false, Right: false})
doors = append(doors, WoodDoor{Wood: w, Facing: i, Open: false, Top: true, Right: false})
doors = append(doors, WoodDoor{Wood: w, Facing: i, Open: true, Top: true, Right: false})
doors = append(doors, WoodDoor{Wood: w, Facing: i, Open: true, Top: false, Right: false})
doors = append(doors, WoodDoor{Wood: w, Facing: i, Open: false, Top: false, Right: true})
doors = append(doors, WoodDoor{Wood: w, Facing: i, Open: false, Top: true, Right: true})
doors = append(doors, WoodDoor{Wood: w, Facing: i, Open: true, Top: true, Right: true})
doors = append(doors, WoodDoor{Wood: w, Facing: i, Open: true, Top: false, Right: true})
}
}
return
} | server/block/wood_door.go | 0.623606 | 0.416559 | wood_door.go | starcoder |
package hermit2
import (
"fmt"
"github.com/ungerik/go3d/vec2"
)
// PointTangent contains a point and a tangent at that point.
// This is a helper sub-struct for T.
type PointTangent struct {
Point vec2.T
Tangent vec2.T
}
// T holds the data to define a hermit spline.
type T struct {
A PointTangent
B PointTangent
}
// Parse parses T from a string. See also String()
func Parse(s string) (r T, err error) {
_, err = fmt.Sscan(s,
&r.A.Point[0], &r.A.Point[1],
&r.A.Tangent[0], &r.A.Tangent[1],
&r.B.Point[0], &r.B.Point[1],
&r.B.Tangent[0], &r.B.Tangent[1],
)
return r, err
}
// String formats T as string. See also Parse().
func (herm *T) String() string {
return fmt.Sprintf("%s %s %s %s",
herm.A.Point.String(), herm.A.Tangent.String(),
herm.B.Point.String(), herm.B.Tangent.String(),
)
}
// Point returns a point on a hermit spline at t (0,1).
func (herm *T) Point(t float32) vec2.T {
return Point(&herm.A.Point, &herm.A.Tangent, &herm.B.Point, &herm.B.Tangent, t)
}
// Tangent returns a tangent on a hermit spline at t (0,1).
func (herm *T) Tangent(t float32) vec2.T {
return Tangent(&herm.A.Point, &herm.A.Tangent, &herm.B.Point, &herm.B.Tangent, t)
}
// Length returns the length of a hermit spline from A.Point to t (0,1).
func (herm *T) Length(t float32) float32 {
return Length(&herm.A.Point, &herm.A.Tangent, &herm.B.Point, &herm.B.Tangent, t)
}
// Point returns a point on a hermit spline at t (0,1).
func Point(pointA, tangentA, pointB, tangentB *vec2.T, t float32) vec2.T {
t2 := t * t
t3 := t2 * t
f := 2*t3 - 3*t2 + 1
result := pointA.Scaled(f)
f = t3 - 2*t2 + t
tAf := tangentA.Scaled(f)
result.Add(&tAf)
f = t3 - t2
tBf := tangentB.Scaled(f)
result.Add(&tBf)
f = -2*t3 + 3*t2
pAf := pointB.Scaled(f)
result.Add(&pAf)
return result
}
// Tangent returns a tangent on a hermit spline at t (0,1).
func Tangent(pointA, tangentA, pointB, tangentB *vec2.T, t float32) vec2.T {
t2 := t * t
t3 := t2 * t
f := 2*t3 - 3*t2
result := pointA.Scaled(f)
f = t3 - 2*t2 + 1
tAf := tangentA.Scaled(f)
result.Add(&tAf)
f = t3 - t2
tBf := tangentB.Scaled(f)
result.Add(&tBf)
f = -2*t3 + 3*t2
pAf := pointB.Scaled(f)
result.Add(&pAf)
return result
}
// Length returns the length of a hermit spline from pointA to t (0,1).
func Length(pointA, tangentA, pointB, tangentB *vec2.T, t float32) float32 {
sqrT := t * t
t1 := sqrT * 0.5
t2 := sqrT * t * 1.0 / 3.0
t3 := sqrT*sqrT + 1.0/4.0
f := 2*t3 - 3*t2 + t
result := pointA.Scaled(f)
f = t3 - 2*t2 + t1
tAf := tangentA.Scaled(f)
result.Add(&tAf)
f = t3 - t2
tBf := tangentB.Scaled(f)
result.Add(&tBf)
f = -2*t3 + 3*t2
pBf := pointB.Scaled(f)
result.Add(&pBf)
return result.Length()
} | hermit2/hermit2.go | 0.841858 | 0.663546 | hermit2.go | starcoder |
package aws
import (
"bytes"
"context"
"fmt"
"net/url"
"sort"
"strings"
"time"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/session"
"github.com/aws/aws-sdk-go/service/s3/s3manager"
"github.com/benthosdev/benthos/v4/internal/batch/policy"
"github.com/benthosdev/benthos/v4/internal/bloblang/field"
"github.com/benthosdev/benthos/v4/internal/bundle"
"github.com/benthosdev/benthos/v4/internal/component"
"github.com/benthosdev/benthos/v4/internal/component/output"
"github.com/benthosdev/benthos/v4/internal/docs"
sess "github.com/benthosdev/benthos/v4/internal/impl/aws/session"
"github.com/benthosdev/benthos/v4/internal/interop"
"github.com/benthosdev/benthos/v4/internal/log"
"github.com/benthosdev/benthos/v4/internal/message"
"github.com/benthosdev/benthos/v4/internal/metadata"
ooutput "github.com/benthosdev/benthos/v4/internal/old/output"
"github.com/benthosdev/benthos/v4/internal/old/output/writer"
)
func init() {
err := bundle.AllOutputs.Add(bundle.OutputConstructorFromSimple(func(c ooutput.Config, nm bundle.NewManagement) (output.Streamed, error) {
sthree, err := newAmazonS3Writer(c.AWSS3, nm)
if err != nil {
return nil, err
}
w, err := ooutput.NewAsyncWriter("aws_s3", c.AWSS3.MaxInFlight, sthree, nm.Logger(), nm.Metrics())
if err != nil {
return nil, err
}
return ooutput.NewBatcherFromConfig(c.AWSS3.Batching, w, nm, nm.Logger(), nm.Metrics())
}), docs.ComponentSpec{
Name: "aws_s3",
Version: "3.36.0",
Summary: `
Sends message parts as objects to an Amazon S3 bucket. Each object is uploaded
with the path specified with the ` + "`path`" + ` field.`,
Description: output.Description(true, false, `
In order to have a different path for each object you should use function
interpolations described [here](/docs/configuration/interpolation#bloblang-queries), which are
calculated per message of a batch.
### Metadata
Metadata fields on messages will be sent as headers, in order to mutate these values (or remove them) check out the [metadata docs](/docs/configuration/metadata).
### Tags
The tags field allows you to specify key/value pairs to attach to objects as tags, where the values support
[interpolation functions](/docs/configuration/interpolation#bloblang-queries):
`+"```yaml"+`
output:
aws_s3:
bucket: TODO
path: ${!count("files")}-${!timestamp_unix_nano()}.tar.gz
tags:
Key1: Value1
Timestamp: ${!meta("Timestamp")}
`+"```"+`
### Credentials
By default Benthos will use a shared credentials file when connecting to AWS
services. It's also possible to set them explicitly at the component level,
allowing you to transfer data across accounts. You can find out more
[in this document](/docs/guides/cloud/aws).
### Batching
It's common to want to upload messages to S3 as batched archives, the easiest
way to do this is to batch your messages at the output level and join the batch
of messages with an
`+"[`archive`](/docs/components/processors/archive)"+` and/or
`+"[`compress`](/docs/components/processors/compress)"+` processor.
For example, if we wished to upload messages as a .tar.gz archive of documents
we could achieve that with the following config:
`+"```yaml"+`
output:
aws_s3:
bucket: TODO
path: ${!count("files")}-${!timestamp_unix_nano()}.tar.gz
batching:
count: 100
period: 10s
processors:
- archive:
format: tar
- compress:
algorithm: gzip
`+"```"+`
Alternatively, if we wished to upload JSON documents as a single large document
containing an array of objects we can do that with:
`+"```yaml"+`
output:
aws_s3:
bucket: TODO
path: ${!count("files")}-${!timestamp_unix_nano()}.json
batching:
count: 100
processors:
- archive:
format: json_array
`+"```"+``),
Config: docs.FieldComponent().WithChildren(
docs.FieldString("bucket", "The bucket to upload messages to."),
docs.FieldString(
"path", "The path of each message to upload.",
`${!count("files")}-${!timestamp_unix_nano()}.txt`,
`${!meta("kafka_key")}.json`,
`${!json("doc.namespace")}/${!json("doc.id")}.json`,
).IsInterpolated(),
docs.FieldString(
"tags", "Key/value pairs to store with the object as tags.",
map[string]string{
"Key1": "Value1",
"Timestamp": `${!meta("Timestamp")}`,
},
).IsInterpolated().Map(),
docs.FieldString("content_type", "The content type to set for each object.").IsInterpolated(),
docs.FieldString("content_encoding", "An optional content encoding to set for each object.").IsInterpolated().Advanced(),
docs.FieldString("cache_control", "The cache control to set for each object.").Advanced().IsInterpolated(),
docs.FieldString("content_disposition", "The content disposition to set for each object.").Advanced().IsInterpolated(),
docs.FieldString("content_language", "The content language to set for each object.").Advanced().IsInterpolated(),
docs.FieldString("website_redirect_location", "The website redirect location to set for each object.").Advanced().IsInterpolated(),
docs.FieldObject("metadata", "Specify criteria for which metadata values are attached to objects as headers.").WithChildren(metadata.ExcludeFilterFields()...),
docs.FieldString("storage_class", "The storage class to set for each object.").HasOptions(
"STANDARD", "REDUCED_REDUNDANCY", "GLACIER", "STANDARD_IA", "ONEZONE_IA", "INTELLIGENT_TIERING", "DEEP_ARCHIVE",
).IsInterpolated().Advanced(),
docs.FieldString("kms_key_id", "An optional server side encryption key.").Advanced(),
docs.FieldString("server_side_encryption", "An optional server side encryption algorithm.").AtVersion("3.63.0").Advanced(),
docs.FieldBool("force_path_style_urls", "Forces the client API to use path style URLs, which helps when connecting to custom endpoints.").Advanced(),
docs.FieldInt("max_in_flight", "The maximum number of messages to have in flight at a given time. Increase this to improve throughput."),
docs.FieldString("timeout", "The maximum period to wait on an upload before abandoning it and reattempting.").Advanced(),
policy.FieldSpec(),
).WithChildren(sess.FieldSpecs()...).ChildDefaultAndTypesFromStruct(ooutput.NewAmazonS3Config()),
Categories: []string{
"Services",
"AWS",
},
})
if err != nil {
panic(err)
}
}
type s3TagPair struct {
key string
value *field.Expression
}
type amazonS3Writer struct {
conf ooutput.AmazonS3Config
path *field.Expression
tags []s3TagPair
contentType *field.Expression
contentEncoding *field.Expression
cacheControl *field.Expression
contentDisposition *field.Expression
contentLanguage *field.Expression
websiteRedirectLocation *field.Expression
storageClass *field.Expression
metaFilter *metadata.ExcludeFilter
session *session.Session
uploader *s3manager.Uploader
timeout time.Duration
log log.Modular
}
func newAmazonS3Writer(conf ooutput.AmazonS3Config, mgr interop.Manager) (*amazonS3Writer, error) {
var timeout time.Duration
if tout := conf.Timeout; len(tout) > 0 {
var err error
if timeout, err = time.ParseDuration(tout); err != nil {
return nil, fmt.Errorf("failed to parse timeout period string: %v", err)
}
}
a := &amazonS3Writer{
conf: conf,
log: mgr.Logger(),
timeout: timeout,
}
var err error
if a.path, err = mgr.BloblEnvironment().NewField(conf.Path); err != nil {
return nil, fmt.Errorf("failed to parse path expression: %v", err)
}
if a.contentType, err = mgr.BloblEnvironment().NewField(conf.ContentType); err != nil {
return nil, fmt.Errorf("failed to parse content type expression: %v", err)
}
if a.contentEncoding, err = mgr.BloblEnvironment().NewField(conf.ContentEncoding); err != nil {
return nil, fmt.Errorf("failed to parse content encoding expression: %v", err)
}
if a.cacheControl, err = mgr.BloblEnvironment().NewField(conf.CacheControl); err != nil {
return nil, fmt.Errorf("failed to parse cache control expression: %v", err)
}
if a.contentDisposition, err = mgr.BloblEnvironment().NewField(conf.ContentDisposition); err != nil {
return nil, fmt.Errorf("failed to parse content disposition expression: %v", err)
}
if a.contentLanguage, err = mgr.BloblEnvironment().NewField(conf.ContentLanguage); err != nil {
return nil, fmt.Errorf("failed to parse content language expression: %v", err)
}
if a.websiteRedirectLocation, err = mgr.BloblEnvironment().NewField(conf.WebsiteRedirectLocation); err != nil {
return nil, fmt.Errorf("failed to parse website redirect location expression: %v", err)
}
if a.metaFilter, err = conf.Metadata.Filter(); err != nil {
return nil, fmt.Errorf("failed to construct metadata filter: %w", err)
}
if a.storageClass, err = mgr.BloblEnvironment().NewField(conf.StorageClass); err != nil {
return nil, fmt.Errorf("failed to parse storage class expression: %v", err)
}
a.tags = make([]s3TagPair, 0, len(conf.Tags))
for k, v := range conf.Tags {
vExpr, err := mgr.BloblEnvironment().NewField(v)
if err != nil {
return nil, fmt.Errorf("failed to parse tag expression for key '%v': %v", k, err)
}
a.tags = append(a.tags, s3TagPair{
key: k,
value: vExpr,
})
}
sort.Slice(a.tags, func(i, j int) bool {
return a.tags[i].key < a.tags[j].key
})
return a, nil
}
func (a *amazonS3Writer) ConnectWithContext(ctx context.Context) error {
if a.session != nil {
return nil
}
sess, err := a.conf.GetSession(func(c *aws.Config) {
c.S3ForcePathStyle = aws.Bool(a.conf.ForcePathStyleURLs)
})
if err != nil {
return err
}
a.session = sess
a.uploader = s3manager.NewUploader(sess)
a.log.Infof("Uploading message parts as objects to Amazon S3 bucket: %v\n", a.conf.Bucket)
return nil
}
func (a *amazonS3Writer) WriteWithContext(wctx context.Context, msg *message.Batch) error {
if a.session == nil {
return component.ErrNotConnected
}
ctx, cancel := context.WithTimeout(
wctx, a.timeout,
)
defer cancel()
return writer.IterateBatchedSend(msg, func(i int, p *message.Part) error {
metadata := map[string]*string{}
_ = a.metaFilter.Iter(p, func(k, v string) error {
metadata[k] = aws.String(v)
return nil
})
var contentEncoding *string
if ce := a.contentEncoding.String(i, msg); len(ce) > 0 {
contentEncoding = aws.String(ce)
}
var cacheControl *string
if ce := a.cacheControl.String(i, msg); len(ce) > 0 {
cacheControl = aws.String(ce)
}
var contentDisposition *string
if ce := a.contentDisposition.String(i, msg); len(ce) > 0 {
contentDisposition = aws.String(ce)
}
var contentLanguage *string
if ce := a.contentLanguage.String(i, msg); len(ce) > 0 {
contentLanguage = aws.String(ce)
}
var websiteRedirectLocation *string
if ce := a.websiteRedirectLocation.String(i, msg); len(ce) > 0 {
websiteRedirectLocation = aws.String(ce)
}
uploadInput := &s3manager.UploadInput{
Bucket: &a.conf.Bucket,
Key: aws.String(a.path.String(i, msg)),
Body: bytes.NewReader(p.Get()),
ContentType: aws.String(a.contentType.String(i, msg)),
ContentEncoding: contentEncoding,
CacheControl: cacheControl,
ContentDisposition: contentDisposition,
ContentLanguage: contentLanguage,
WebsiteRedirectLocation: websiteRedirectLocation,
StorageClass: aws.String(a.storageClass.String(i, msg)),
Metadata: metadata,
}
// Prepare tags, escaping keys and values to ensure they're valid query string parameters.
if len(a.tags) > 0 {
tags := make([]string, len(a.tags))
for j, pair := range a.tags {
tags[j] = url.QueryEscape(pair.key) + "=" + url.QueryEscape(pair.value.String(i, msg))
}
uploadInput.Tagging = aws.String(strings.Join(tags, "&"))
}
if a.conf.KMSKeyID != "" {
uploadInput.ServerSideEncryption = aws.String("aws:kms")
uploadInput.SSEKMSKeyId = &a.conf.KMSKeyID
}
// NOTE: This overrides the ServerSideEncryption set above. We need this to preserve
// backwards compatibility, where it is allowed to only set kms_key_id in the config and
// the ServerSideEncryption value of "aws:kms" is implied.
if a.conf.ServerSideEncryption != "" {
uploadInput.ServerSideEncryption = &a.conf.ServerSideEncryption
}
if _, err := a.uploader.UploadWithContext(ctx, uploadInput); err != nil {
return err
}
return nil
})
}
func (a *amazonS3Writer) CloseAsync() {
}
func (a *amazonS3Writer) WaitForClose(time.Duration) error {
return nil
} | internal/impl/aws/output_s3.go | 0.658198 | 0.414484 | output_s3.go | starcoder |
package bigtable
import (
"cloud.google.com/go/bigtable"
"github.com/wolffcm/flux/ast"
"github.com/wolffcm/flux/plan"
"github.com/wolffcm/flux/semantic"
"github.com/wolffcm/flux/stdlib/universe"
"time"
)
func AddFilterToNode(queryNode plan.Node, filterNode plan.Node) (plan.Node, bool) {
querySpec := queryNode.ProcedureSpec().(*FromBigtableProcedureSpec)
filterSpec := filterNode.ProcedureSpec().(*universe.FilterProcedureSpec)
switch body := filterSpec.Fn.Fn.Block.Body.(type) {
case *semantic.BinaryExpression:
switch body.Operator {
case ast.EqualOperator:
// Look for a Single Row filter
if isRRowKey(body.Left) {
if name, ok := body.Right.(*semantic.StringLiteral); ok {
querySpec.RowSet = bigtable.SingleRow(name.Value)
return queryNode, true
}
}
// Look for a Family filter
if isRFamily(body.Left) {
if family, ok := body.Right.(*semantic.StringLiteral); ok {
querySpec.Filter = bigtable.ChainFilters(querySpec.Filter, bigtable.FamilyFilter(family.Value))
return queryNode, true
}
}
case ast.GreaterThanEqualOperator:
// Look for an Infinite Range filter (>=)
if isRRowKey(body.Left) {
if name, ok := body.Right.(*semantic.StringLiteral); ok {
querySpec.RowSet = bigtable.InfiniteRange(name.Value)
return queryNode, true
}
}
// Filter from startTime with no upper bound
if isRTime(body.Left) {
if startTime, ok := body.Right.(*semantic.DateTimeLiteral); ok {
querySpec.Filter = bigtable.ChainFilters(querySpec.Filter, bigtable.TimestampRangeFilter(startTime.Value, time.Time{}))
return queryNode, true
}
}
case ast.LessThanOperator:
// Filter to endTime with no lower bound
if isRTime(body.Left) {
if endTime, ok := body.Right.(*semantic.DateTimeLiteral); ok {
querySpec.Filter = bigtable.ChainFilters(querySpec.Filter, bigtable.TimestampRangeFilter(time.Time{}, endTime.Value))
return queryNode, true
}
}
}
case *semantic.LogicalExpression:
// Look for a Range filter
if begin, end, ok := getRange(body); ok {
querySpec.RowSet = bigtable.NewRange(begin, end)
return queryNode, true
}
// Look for Timestamp Range filter
if startTime, endTime, ok := getTimeRange(body); ok {
querySpec.Filter = bigtable.ChainFilters(querySpec.Filter, bigtable.TimestampRangeFilter(startTime, endTime))
return queryNode, true
}
// Look for a Prefix filter
case *semantic.CallExpression:
if prefix, ok := getPrefix(body); ok {
querySpec.RowSet = bigtable.PrefixRange(prefix)
return queryNode, true
}
}
return filterNode, false
}
func AddLimitToNode(queryNode plan.Node, limitNode plan.Node) (plan.Node, bool) {
querySpec := queryNode.ProcedureSpec().(*FromBigtableProcedureSpec)
limitSpec := limitNode.ProcedureSpec().(*universe.LimitProcedureSpec)
if limitSpec.Offset != 0 {
return limitNode, false
}
querySpec.ReadOptions = append(querySpec.ReadOptions, bigtable.LimitRows(limitSpec.N))
return queryNode, true
}
func getRange(logic *semantic.LogicalExpression) (string, string, bool) {
if logic.Operator == ast.AndOperator {
if left, ok := logic.Left.(*semantic.BinaryExpression); ok {
if right, ok := logic.Right.(*semantic.BinaryExpression); ok {
if isRRowKey(left.Left) && isRRowKey(right.Left) {
if left.Operator == ast.GreaterThanEqualOperator && right.Operator == ast.LessThanOperator {
if leftVal, ok := left.Right.(*semantic.StringLiteral); ok {
if rightVal, ok := right.Right.(*semantic.StringLiteral); ok {
return leftVal.Value, rightVal.Value, ok
}
}
} else if left.Operator == ast.LessThanOperator && right.Operator == ast.GreaterThanEqualOperator {
if leftVal, ok := left.Right.(*semantic.StringLiteral); ok {
if rightVal, ok := right.Right.(*semantic.StringLiteral); ok {
return rightVal.Value, leftVal.Value, ok
}
}
}
}
}
}
}
return "", "", false
}
func getTimeRange(logic *semantic.LogicalExpression) (time.Time, time.Time, bool) {
if logic.Operator == ast.AndOperator {
if left, ok := logic.Left.(*semantic.BinaryExpression); ok {
if right, ok := logic.Right.(*semantic.BinaryExpression); ok {
if isRTime(left.Left) && isRTime(right.Left) {
if left.Operator == ast.GreaterThanEqualOperator && right.Operator == ast.LessThanOperator {
if leftVal, ok := left.Right.(*semantic.DateTimeLiteral); ok {
if rightVal, ok := right.Right.(*semantic.DateTimeLiteral); ok {
return leftVal.Value, rightVal.Value, ok
}
}
} else if left.Operator == ast.LessThanOperator && right.Operator == ast.GreaterThanEqualOperator {
if leftVal, ok := left.Right.(*semantic.DateTimeLiteral); ok {
if rightVal, ok := right.Right.(*semantic.DateTimeLiteral); ok {
return rightVal.Value, leftVal.Value, ok
}
}
}
}
}
}
}
return time.Time{}, time.Time{}, false
}
func getPrefix(callExpression *semantic.CallExpression) (string, bool) {
if callee, ok := callExpression.Callee.(*semantic.MemberExpression); ok && callee.Property == "hasPrefix" {
var rowKey bool
prefix := ""
for _, prop := range callExpression.Arguments.Properties {
if key, ok := prop.Key.(*semantic.Identifier); ok {
if isRRowKey(prop.Value) {
rowKey = true
} else if key.Name == "prefix" {
if val, ok := prop.Value.(*semantic.StringLiteral); ok {
prefix = val.Value
}
}
}
}
return prefix, prefix != "" && rowKey
}
return "", false
}
// helper function to identify `r.rowKey`
func isRRowKey(i interface{}) bool {
if exp, ok := i.(*semantic.MemberExpression); ok {
if obj, ok := exp.Object.(*semantic.IdentifierExpression); ok && obj.Name == "r" {
return exp.Property == "rowKey"
}
}
return false
}
// helper function to identify `r.family`
func isRFamily(i interface{}) bool {
if exp, ok := i.(*semantic.MemberExpression); ok {
if obj, ok := exp.Object.(*semantic.IdentifierExpression); ok && obj.Name == "r" {
return exp.Property == "family"
}
}
return false
}
// helper function to identify `r._time`
func isRTime(i interface{}) bool {
if exp, ok := i.(*semantic.MemberExpression); ok {
if obj, ok := exp.Object.(*semantic.IdentifierExpression); ok && obj.Name == "r" {
return exp.Property == "_time"
}
}
return false
} | stdlib/experimental/bigtable/bigtable_rewrite.go | 0.595728 | 0.446193 | bigtable_rewrite.go | starcoder |
package types
import (
"bytes"
"fmt"
"io"
"strings"
"github.com/MakeNowJust/heredoc"
"github.com/spf13/cobra"
"k8s.io/kubernetes/pkg/kubectl/cmd/templates"
kcmdutil "k8s.io/kubernetes/pkg/kubectl/cmd/util"
"k8s.io/kubernetes/pkg/kubectl/genericclioptions"
)
type concept struct {
Name string
Abbreviation string
Description string
}
var concepts = []concept{
{
"Containers",
"",
heredoc.Doc(`
A definition of how to run one or more processes inside of a portable Linux
environment. Containers are started from an Image and are usually isolated
from other containers on the same machine.
`),
},
{
"Image",
"",
heredoc.Doc(`
A layered Linux filesystem that contains application code, dependencies,
and any supporting operating system libraries. An image is identified by
a name that can be local to the current cluster or point to a remote Docker
registry (a storage server for images).
`),
}, {
"Pods",
"pod",
heredoc.Doc(`
A set of one or more containers that are deployed onto a Node together and
share a unique IP and Volumes (persistent storage). Pods also define the
security and runtime policy for each container.
`),
}, {
"Labels",
"",
heredoc.Doc(`
Labels are key value pairs that can be assigned to any resource in the
system for grouping and selection. Many resources use labels to identify
sets of other resources.
`),
}, {
"Volumes",
"",
heredoc.Doc(`
Containers are not persistent by default - on restart their contents are
cleared. Volumes are mounted filesystems available to Pods and their
containers which may be backed by a number of host-local or network
attached storage endpoints. The simplest volume type is EmptyDir, which
is a temporary directory on a single machine. Administrators may also
allow you to request a Persistent Volume that is automatically attached
to your pods.
`),
}, {
"Nodes",
"node",
heredoc.Doc(`
Machines set up in the cluster to run containers. Usually managed
by administrators and not by end users.
`),
}, {
"Services",
"svc",
heredoc.Doc(`
A name representing a set of pods (or external servers) that are
accessed by other pods. The service gets an IP and a DNS name, and can be
exposed externally to the cluster via a port or a Route. It's also easy
to consume services from pods because an environment variable with the
name <SERVICE>_HOST is automatically injected into other pods.
`),
}, {
"Routes",
"route",
heredoc.Doc(`
A route is an external DNS entry (either a top level domain or a
dynamically allocated name) that is created to point to a service so that
it can be accessed outside the cluster. The administrator may configure
one or more Routers to handle those routes, typically through an Apache
or HAProxy load balancer / proxy.
`),
},
{
"Replication Controllers",
"rc",
heredoc.Doc(`
A replication controller maintains a specific number of pods based on a
template that match a set of labels. If pods are deleted (because the
node they run on is taken out of service) the controller creates a new
copy of that pod. A replication controller is most commonly used to
represent a single deployment of part of an application based on a
built image.
`),
},
{
"Deployment Configuration",
"dc",
heredoc.Doc(`
Defines the template for a pod and manages deploying new images or
configuration changes whenever those change. A single deployment
configuration is usually analogous to a single micro-service. Can support
many different deployment patterns, including full restart, customizable
rolling updates, and fully custom behaviors, as well as pre- and post-
hooks. Each deployment is represented as a replication controller.
`),
},
{
"Build Configuration",
"bc",
heredoc.Doc(`
Contains a description of how to build source code and a base image into a
new image - the primary method for delivering changes to your application.
Builds can be source based and use builder images for common languages like
Java, PHP, Ruby, or Python, or be Docker based and create builds from a
Dockerfile. Each build configuration has web-hooks and can be triggered
automatically by changes to their base images.
`),
},
{
"Builds",
"build",
heredoc.Doc(`
Builds create a new image from source code, other images, Dockerfiles, or
binary input. A build is run inside of a container and has the same
restrictions normal pods have. A build usually results in an image pushed
to a Docker registry, but you can also choose to run a post-build test that
does not push an image.
`),
},
{
"Image Streams and Image Stream Tags",
"is,istag",
heredoc.Doc(`
An image stream groups sets of related images under tags - analogous to a
branch in a source code repository. Each image stream may have one or
more tags (the default tag is called "latest") and those tags may point
at external Docker registries, at other tags in the same stream, or be
controlled to directly point at known images. In addition, images can be
pushed to an image stream tag directly via the integrated Docker
registry.
`),
},
{
"Secrets",
"secret",
heredoc.Doc(`
The secret resource can hold text or binary secrets for delivery into
your pods. By default, every container is given a single secret which
contains a token for accessing the API (with limited privileges) at
/var/run/secrets/kubernetes.io/serviceaccount. You can create new
secrets and mount them in your own pods, as well as reference secrets
from builds (for connecting to remote servers) or use them to import
remote images into an image stream.
`),
},
{
"Projects",
"project",
heredoc.Doc(`
All of the above resources (except Nodes) exist inside of a project.
Projects have a list of members and their roles, like viewer, editor,
or admin, as well as a set of security controls on the running pods, and
limits on how many resources the project can use. The names of each
resource are unique within a project. Developers may request projects
be created, but administrators control the resources allocated to
projects.
`),
},
}
func writeConcept(w io.Writer, c concept) {
fmt.Fprintf(w, "* %s", c.Name)
if len(c.Abbreviation) > 0 {
fmt.Fprintf(w, " [%s]", c.Abbreviation)
}
fmt.Fprintln(w, ":")
for _, s := range strings.Split(c.Description, "\n") {
fmt.Fprintf(w, " %s\n", s)
}
}
var (
typesLong = templates.LongDesc(`
Concepts and Types
Kubernetes and OpenShift help developers and operators build, test, and deploy
applications in a containerized cloud environment. Applications may be composed
of all of the components below, although most developers will be concerned with
Services, Deployments, and Builds for delivering changes.
Concepts:
%[1]sFor more, see https://docs.openshift.com`)
typesExample = templates.Examples(`
# View all projects you have access to
%[1]s get projects
# See a list of all services in the current project
%[1]s get svc
# Describe a deployment configuration in detail
%[1]s describe dc mydeploymentconfig
# Show the images tagged into an image stream
%[1]s describe is ruby-centos7`)
)
func NewCmdTypes(fullName string, f kcmdutil.Factory, streams genericclioptions.IOStreams) *cobra.Command {
buf := &bytes.Buffer{}
for _, c := range concepts {
writeConcept(buf, c)
}
cmd := &cobra.Command{
Use: "types",
Short: "An introduction to concepts and types",
Long: fmt.Sprintf(typesLong, buf.String()),
Example: fmt.Sprintf(typesExample, fullName),
Run: kcmdutil.DefaultSubCommandRun(streams.Out),
}
return cmd
} | pkg/oc/cli/types/types.go | 0.636353 | 0.550849 | types.go | starcoder |
package unit
import (
"fmt"
"math"
"reflect"
"strings"
"github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts"
)
type NumericDeltaOption struct {
Value float64
}
type SameTypeOption struct {
Value bool
}
type SamePointerOption struct {
Value bool
}
type UseEqualMethodOption struct {
Value bool
}
type IgnoreUnexportedOption struct {
Value interface{}
}
type IgnoreFieldsOption struct {
Type interface{}
Fields []string
}
type EqualComparator struct {
numericDelta float64
sameType bool
samePointer bool
useEqualMethod bool
ignoreUnexported []interface{}
ignoresFields []IgnoreFieldsOption
}
func NewEqualComparator(options ...interface{}) *EqualComparator {
result := &EqualComparator{
ignoreUnexported: []interface{}{},
ignoresFields: []IgnoreFieldsOption{},
}
for i, option := range options {
switch option.(type) {
case NumericDeltaOption:
typedOption := option.(NumericDeltaOption)
if typedOption.Value < 0 {
err := NewErrorf(
"Variable 'options[%d].Value' must be greater or equal to 0, actual: %f",
i,
typedOption.Value,
)
panic(err)
}
result.numericDelta = option.(NumericDeltaOption).Value
case SameTypeOption:
result.sameType = option.(SameTypeOption).Value
case SamePointerOption:
result.samePointer = option.(SamePointerOption).Value
case UseEqualMethodOption:
result.useEqualMethod = option.(UseEqualMethodOption).Value
case IgnoreUnexportedOption:
typedOption := option.(IgnoreUnexportedOption)
if reflect.ValueOf(typedOption.Value).Kind() != reflect.Struct {
err := NewInvalidKindError(
fmt.Sprintf("options[%d].Value", i),
typedOption.Value,
reflect.Struct,
)
panic(err)
}
result.ignoreUnexported = append(result.ignoreUnexported, typedOption.Value)
case IgnoreFieldsOption:
typedOption := option.(IgnoreFieldsOption)
typedOptionTypeValue := reflect.ValueOf(typedOption.Type)
if typedOptionTypeValue.Kind() != reflect.Struct {
err := NewInvalidKindError(fmt.Sprintf("options[%d].Type", i), typedOption.Type, reflect.Struct)
panic(err)
}
typedOptionTypeValueType := typedOptionTypeValue.Type()
for j, fieldName := range typedOption.Fields {
if _, ok := typedOptionTypeValueType.FieldByName(fieldName); !ok {
err := NewErrorf(
"Variable 'options[%d].Fields[%d]' contains unknown field name: '%s'",
i,
j,
fieldName,
)
panic(err)
}
}
result.ignoresFields = append(result.ignoresFields, typedOption)
default:
err := NewErrorf("Variable 'options[%d]' has unknown type: %T", i, option)
panic(err)
}
}
return result
}
func (c *EqualComparator) Compare(x interface{}, y interface{}) bool {
return cmp.Equal(x, y, c.buildCmpOptions(x, y)...)
}
func (c *EqualComparator) Diff(x interface{}, y interface{}) string {
return strings.Replace(cmp.Diff(x, y, c.buildCmpOptions(x, y)...), "\u00a0", " ", -1)
}
func (c *EqualComparator) compareAny(x interface{}, y interface{}) bool {
if c.sameType && reflect.TypeOf(x) != reflect.TypeOf(y) {
return false
}
if c.useEqualMethod {
if xEqualer, ok := x.(Equaler); ok {
return xEqualer.Equal(y)
}
if yEqualer, ok := y.(Equaler); ok {
return yEqualer.Equal(x)
}
}
xValue := reflect.ValueOf(x)
yValue := reflect.ValueOf(y)
switch xValue.Kind() {
case reflect.Bool:
if yValue.Kind() != reflect.Bool {
return false
}
return xValue.Bool() == yValue.Bool()
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
switch yValue.Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return math.Abs(float64(xValue.Int())-float64(yValue.Int())) <= c.numericDelta
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
return math.Abs(float64(xValue.Int())-float64(yValue.Uint())) <= c.numericDelta
case reflect.Float32, reflect.Float64:
return math.Abs(float64(xValue.Int())-yValue.Float()) <= c.numericDelta
default:
return false
}
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
switch yValue.Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return math.Abs(float64(xValue.Uint())-float64(yValue.Int())) <= c.numericDelta
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
return math.Abs(float64(xValue.Uint())-float64(yValue.Uint())) <= c.numericDelta
case reflect.Float32, reflect.Float64:
return math.Abs(float64(xValue.Uint())-yValue.Float()) <= c.numericDelta
default:
return false
}
case reflect.Float32, reflect.Float64:
switch yValue.Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return math.Abs(xValue.Float()-float64(yValue.Int())) <= c.numericDelta
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
return math.Abs(xValue.Float()-float64(yValue.Uint())) <= c.numericDelta
case reflect.Float32, reflect.Float64:
return math.Abs(xValue.Float()-yValue.Float()) <= c.numericDelta
default:
return false
}
case reflect.Uintptr:
if yValue.Kind() != reflect.Uintptr {
return false
}
return xValue.Uint() == yValue.Uint()
case reflect.Complex64, reflect.Complex128:
switch yValue.Kind() {
case reflect.Complex64, reflect.Complex128:
return xValue.Complex() == yValue.Complex()
}
return false
case reflect.String:
if yValue.Kind() != reflect.String {
return false
}
return xValue.String() == yValue.String()
case reflect.Func:
if yValue.Kind() != reflect.Func {
return false
}
return xValue.Pointer() == yValue.Pointer()
case reflect.Ptr:
if yValue.Kind() != reflect.Ptr {
return false
}
if c.samePointer {
return xValue.Pointer() == yValue.Pointer()
}
}
return false
}
func (c *EqualComparator) buildCmpOptions(x, y interface{}) []cmp.Option {
return []cmp.Option{
c.buildEqualComparerOption(),
c.buildAllowAllUnexported(x, y),
c.buildIgnoreAllUnexported(),
c.buildIgnoreFieldOption(),
}
}
func (c *EqualComparator) buildIgnoreFieldOption() cmp.Option {
result := cmp.Options{}
for _, ignoreFields := range c.ignoresFields {
result = append(result, cmpopts.IgnoreFields(ignoreFields.Type, ignoreFields.Fields...))
}
return result
}
func (c *EqualComparator) buildEqualComparerOption() cmp.Option {
return cmp.FilterValues(
func(x, y interface{}) bool {
xKind := reflect.ValueOf(x).Kind()
yKind := reflect.ValueOf(y).Kind()
switch xKind {
case
reflect.Bool,
reflect.Int,
reflect.Int8,
reflect.Int16,
reflect.Int32,
reflect.Int64,
reflect.Uint,
reflect.Uint8,
reflect.Uint16,
reflect.Uint32,
reflect.Uint64,
reflect.Uintptr,
reflect.Float32,
reflect.Float64,
reflect.Complex64,
reflect.Complex128,
reflect.String,
reflect.Func:
switch yKind {
case
reflect.Bool,
reflect.Int,
reflect.Int8,
reflect.Int16,
reflect.Int32,
reflect.Int64,
reflect.Uint,
reflect.Uint8,
reflect.Uint16,
reflect.Uint32,
reflect.Uint64,
reflect.Uintptr,
reflect.Float32,
reflect.Float64,
reflect.Complex64,
reflect.Complex128,
reflect.String,
reflect.Func:
return true
}
}
if c.samePointer && xKind == reflect.Ptr && yKind == reflect.Ptr {
return true
}
if c.useEqualMethod {
if _, ok := x.(Equaler); ok {
return true
}
if _, ok := y.(Equaler); ok {
return true
}
}
return false
},
cmp.Comparer(c.compareAny),
)
}
func (c *EqualComparator) buildAllowAllUnexported(allowedTypes ...interface{}) cmp.Option {
reflectTypes := make(map[reflect.Type]struct{})
for _, allowedType := range allowedTypes {
c.fetchUsedTypes(reflect.ValueOf(allowedType), reflectTypes)
}
for _, ignoredType := range c.ignoreUnexported {
delete(reflectTypes, reflect.TypeOf(ignoredType))
}
var unexported []interface{}
for reflectType := range reflectTypes {
unexported = append(unexported, reflect.New(reflectType).Elem().Interface())
}
return cmp.AllowUnexported(unexported...)
}
func (c *EqualComparator) buildIgnoreAllUnexported() cmp.Option {
return cmpopts.IgnoreUnexported(c.ignoreUnexported...)
}
func (c *EqualComparator) fetchUsedTypes(value reflect.Value, result map[reflect.Type]struct{}) {
if !value.IsValid() {
return
}
switch value.Kind() {
case reflect.Ptr:
if !value.IsNil() {
c.fetchUsedTypes(value.Elem(), result)
}
case reflect.Interface:
if !value.IsNil() {
c.fetchUsedTypes(value.Elem(), result)
}
case reflect.Slice, reflect.Array:
for i := 0; i < value.Len(); i++ {
c.fetchUsedTypes(value.Index(i), result)
}
case reflect.Map:
for _, k := range value.MapKeys() {
c.fetchUsedTypes(value.MapIndex(k), result)
}
case reflect.Struct:
if _, ok := result[value.Type()]; ok {
return
}
result[value.Type()] = struct{}{}
for i := 0; i < value.NumField(); i++ {
c.fetchUsedTypes(value.Field(i), result)
}
}
} | unit/equal_comparator.go | 0.594787 | 0.456289 | equal_comparator.go | starcoder |
package datadog
import (
"encoding/json"
"fmt"
)
// Series A metric to submit to Datadog. See [Datadog metrics](https://docs.datadoghq.com/developers/metrics/#custom-metrics-properties).
type Series struct {
// The name of the host that produced the metric.
Host *string `json:"host,omitempty"`
// If the type of the metric is rate or count, define the corresponding interval.
Interval NullableInt64 `json:"interval,omitempty"`
// The name of the timeseries.
Metric string `json:"metric"`
// Points relating to a metric. All points must be tuples with timestamp and a scalar value (cannot be a string). Timestamps should be in POSIX time in seconds, and cannot be more than ten minutes in the future or more than one hour in the past.
Points [][]*float64 `json:"points"`
// A list of tags associated with the metric.
Tags *[]string `json:"tags,omitempty"`
// The type of the metric either `count`, `gauge`, or `rate`.
Type *string `json:"type,omitempty"`
// UnparsedObject contains the raw value of the object if there was an error when deserializing into the struct
UnparsedObject map[string]interface{} `json:-`
}
// NewSeries instantiates a new Series object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewSeries(metric string, points [][]*float64) *Series {
this := Series{}
this.Metric = metric
this.Points = points
var type_ string = "gauge"
this.Type = &type_
return &this
}
// NewSeriesWithDefaults instantiates a new Series object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewSeriesWithDefaults() *Series {
this := Series{}
var type_ string = "gauge"
this.Type = &type_
return &this
}
// GetHost returns the Host field value if set, zero value otherwise.
func (o *Series) GetHost() string {
if o == nil || o.Host == nil {
var ret string
return ret
}
return *o.Host
}
// GetHostOk returns a tuple with the Host field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *Series) GetHostOk() (*string, bool) {
if o == nil || o.Host == nil {
return nil, false
}
return o.Host, true
}
// HasHost returns a boolean if a field has been set.
func (o *Series) HasHost() bool {
if o != nil && o.Host != nil {
return true
}
return false
}
// SetHost gets a reference to the given string and assigns it to the Host field.
func (o *Series) SetHost(v string) {
o.Host = &v
}
// GetInterval returns the Interval field value if set, zero value otherwise (both if not set or set to explicit null).
func (o *Series) GetInterval() int64 {
if o == nil || o.Interval.Get() == nil {
var ret int64
return ret
}
return *o.Interval.Get()
}
// GetIntervalOk returns a tuple with the Interval field value if set, nil otherwise
// and a boolean to check if the value has been set.
// NOTE: If the value is an explicit nil, `nil, true` will be returned
func (o *Series) GetIntervalOk() (*int64, bool) {
if o == nil {
return nil, false
}
return o.Interval.Get(), o.Interval.IsSet()
}
// HasInterval returns a boolean if a field has been set.
func (o *Series) HasInterval() bool {
if o != nil && o.Interval.IsSet() {
return true
}
return false
}
// SetInterval gets a reference to the given NullableInt64 and assigns it to the Interval field.
func (o *Series) SetInterval(v int64) {
o.Interval.Set(&v)
}
// SetIntervalNil sets the value for Interval to be an explicit nil
func (o *Series) SetIntervalNil() {
o.Interval.Set(nil)
}
// UnsetInterval ensures that no value is present for Interval, not even an explicit nil
func (o *Series) UnsetInterval() {
o.Interval.Unset()
}
// GetMetric returns the Metric field value
func (o *Series) GetMetric() string {
if o == nil {
var ret string
return ret
}
return o.Metric
}
// GetMetricOk returns a tuple with the Metric field value
// and a boolean to check if the value has been set.
func (o *Series) GetMetricOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.Metric, true
}
// SetMetric sets field value
func (o *Series) SetMetric(v string) {
o.Metric = v
}
// GetPoints returns the Points field value
func (o *Series) GetPoints() [][]*float64 {
if o == nil {
var ret [][]*float64
return ret
}
return o.Points
}
// GetPointsOk returns a tuple with the Points field value
// and a boolean to check if the value has been set.
func (o *Series) GetPointsOk() (*[][]*float64, bool) {
if o == nil {
return nil, false
}
return &o.Points, true
}
// SetPoints sets field value
func (o *Series) SetPoints(v [][]*float64) {
o.Points = v
}
// GetTags returns the Tags field value if set, zero value otherwise.
func (o *Series) GetTags() []string {
if o == nil || o.Tags == nil {
var ret []string
return ret
}
return *o.Tags
}
// GetTagsOk returns a tuple with the Tags field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *Series) GetTagsOk() (*[]string, bool) {
if o == nil || o.Tags == nil {
return nil, false
}
return o.Tags, true
}
// HasTags returns a boolean if a field has been set.
func (o *Series) HasTags() bool {
if o != nil && o.Tags != nil {
return true
}
return false
}
// SetTags gets a reference to the given []string and assigns it to the Tags field.
func (o *Series) SetTags(v []string) {
o.Tags = &v
}
// GetType returns the Type field value if set, zero value otherwise.
func (o *Series) GetType() string {
if o == nil || o.Type == nil {
var ret string
return ret
}
return *o.Type
}
// GetTypeOk returns a tuple with the Type field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *Series) GetTypeOk() (*string, bool) {
if o == nil || o.Type == nil {
return nil, false
}
return o.Type, true
}
// HasType returns a boolean if a field has been set.
func (o *Series) HasType() bool {
if o != nil && o.Type != nil {
return true
}
return false
}
// SetType gets a reference to the given string and assigns it to the Type field.
func (o *Series) SetType(v string) {
o.Type = &v
}
func (o Series) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if o.UnparsedObject != nil {
return json.Marshal(o.UnparsedObject)
}
if o.Host != nil {
toSerialize["host"] = o.Host
}
if o.Interval.IsSet() {
toSerialize["interval"] = o.Interval.Get()
}
if true {
toSerialize["metric"] = o.Metric
}
if true {
toSerialize["points"] = o.Points
}
if o.Tags != nil {
toSerialize["tags"] = o.Tags
}
if o.Type != nil {
toSerialize["type"] = o.Type
}
return json.Marshal(toSerialize)
}
func (o *Series) UnmarshalJSON(bytes []byte) (err error) {
raw := map[string]interface{}{}
required := struct {
Metric *string `json:"metric"`
Points *[][]*float64 `json:"points"`
}{}
all := struct {
Host *string `json:"host,omitempty"`
Interval NullableInt64 `json:"interval,omitempty"`
Metric string `json:"metric"`
Points [][]*float64 `json:"points"`
Tags *[]string `json:"tags,omitempty"`
Type *string `json:"type,omitempty"`
}{}
err = json.Unmarshal(bytes, &required)
if err != nil {
return err
}
if required.Metric == nil {
return fmt.Errorf("Required field metric missing")
}
if required.Points == nil {
return fmt.Errorf("Required field points missing")
}
err = json.Unmarshal(bytes, &all)
if err != nil {
err = json.Unmarshal(bytes, &raw)
if err != nil {
return err
}
o.UnparsedObject = raw
return nil
}
o.Host = all.Host
o.Interval = all.Interval
o.Metric = all.Metric
o.Points = all.Points
o.Tags = all.Tags
o.Type = all.Type
return nil
} | api/v1/datadog/model_series.go | 0.876291 | 0.468122 | model_series.go | starcoder |
package main
import (
"encoding/json"
"flag"
"fmt"
"io/ioutil"
"log"
"os"
"strings"
"time"
"github.com/arran4/golang-ical"
"github.com/avast/retry-go"
"github.com/kelvins/sunrisesunset"
)
var version = "<dev>"
// ProductID identifies this software in User-Agents and iCal fields.
const ProductID = "github.com/cdzombak/wxcal"
// CalendarForecastPeriod represents one period (daytime or nighttime) of a forecast entry on the calendar.
type CalendarForecastPeriod struct {
IsPopulated bool
Name string
ShortForecast string
DetailedForecast string
Temperature int
TemperatureUnit string
}
// SummaryLine returns a brief, <1 line summary of the forecast period.
func (p CalendarForecastPeriod) SummaryLine() string {
if !p.IsPopulated {
return ""
}
sf := strings.Replace(p.ShortForecast, "Slight ", "", -1)
sf = strings.Replace(sf, " then ", "; ", -1)
sf = strings.Replace(sf, "Areas Of ", "", -1)
return fmt.Sprintf("%dº%s %s", p.Temperature, p.TemperatureUnit, sf)
}
// CalendarForecastDay represents one day's forecast entry on the calendar.
type CalendarForecastDay struct {
Start time.Time
DaytimePeriod CalendarForecastPeriod
NighttimePeriod CalendarForecastPeriod
Sunrise time.Time
Sunset time.Time
}
// SummaryLine returns a brief, 1 line summary of the day's forecast.
func (d CalendarForecastDay) SummaryLine() string {
daySummary := d.DaytimePeriod.SummaryLine()
nightSummary := d.NighttimePeriod.SummaryLine()
if len(daySummary) > 0 && len(nightSummary) > 0 {
return fmt.Sprintf("%s | %s", daySummary, nightSummary)
} else if len(nightSummary) > 0 {
return fmt.Sprintf("%s: %s", d.NighttimePeriod.Name, nightSummary)
} else {
return daySummary
}
}
// DetailedForecast returns a more detailed version of the day's forecast.
func (d CalendarForecastDay) DetailedForecast() string {
if d.DaytimePeriod.IsPopulated && d.NighttimePeriod.IsPopulated {
return fmt.Sprintf("%s\\n\\nOvernight: %s", d.DaytimePeriod.DetailedForecast, d.NighttimePeriod.DetailedForecast)
} else if d.NighttimePeriod.IsPopulated {
return fmt.Sprintf("%s: %s", d.NighttimePeriod.Name, d.NighttimePeriod.DetailedForecast)
} else {
return d.DaytimePeriod.DetailedForecast
}
}
// DatesEqual returns true if the two given times are on the same day; false otherwise.
func DatesEqual(date1, date2 time.Time) bool {
y1, m1, d1 := date1.Date()
y2, m2, d2 := date2.Date()
return y1 == y2 && m1 == m2 && d1 == d2
}
// CalendarForecast represents a collection of daily forecasts, to be rendered to calendar entries.
type CalendarForecast []CalendarForecastDay
// IndexForTime returns the index of the CalendarForecastDay for the given date, or -1 if the forecast calendar
// does not yet include the given date. The boolean return value indicates whether the date was found.
func (cf CalendarForecast) IndexForTime(t time.Time) (int, bool) {
for i, p := range cf {
if DatesEqual(p.Start, t) {
return i, true
}
}
return -1, false
}
func buildCalendarID(calLocation string, calDomain string, lat float64, lon float64) string {
calLocation = strings.Replace(calLocation, " ", "-", -1)
calLocation = strings.Replace(calLocation, ",", "", -1)
return fmt.Sprintf("%s{%.2f,%.2f}@%s",
strings.ToLower(calLocation),
lat, lon,
strings.ToLower(calDomain))
}
func mustInt(x json.Number) int {
xi64, err := x.Int64()
if err != nil {
panic(err)
}
return int(xi64)
}
// Main implements the wxcal program.
func Main(calLocation string, calDomain string, lat float64, lon float64, evtTitlePrefix string, icalOutfile string) error {
var forecastResp *ForecastResponse
err := retry.Do(
func() (err error) {
forecastResp, err = GetForecast(lat, lon)
return
},
retry.Attempts(3),
retry.Delay(20*time.Second),
)
if err != nil {
return fmt.Errorf("failed to get forecast: %w", err)
}
// build a structure summarizing the data as we'll use it to build a calendar:
cf := CalendarForecast{}
for _, forecastPeriod := range forecastResp.Properties.ForecastPeriods {
calDay := CalendarForecastDay{}
i, existed := cf.IndexForTime(forecastPeriod.StartTime)
if existed {
calDay = cf[i]
}
calDay.Start = time.Date(forecastPeriod.StartTime.Year(), forecastPeriod.StartTime.Month(), forecastPeriod.StartTime.Day(), 0, 0, 0, 0, forecastPeriod.StartTime.Location())
calPeriod := CalendarForecastPeriod{
IsPopulated: true,
Name: forecastPeriod.Name,
ShortForecast: forecastPeriod.ShortForecast,
DetailedForecast: forecastPeriod.DetailedForecast,
Temperature: mustInt(forecastPeriod.Temperature),
TemperatureUnit: forecastPeriod.TemperatureUnit,
}
if forecastPeriod.Daytime {
calDay.DaytimePeriod = calPeriod
} else {
calDay.NighttimePeriod = calPeriod
}
if calDay.Sunrise.IsZero() || calDay.Sunset.IsZero(){
_, offsetSec := forecastPeriod.StartTime.Zone()
p := sunrisesunset.Parameters{
Latitude: lat,
Longitude: lon,
UtcOffset: float64(offsetSec)/3600.0,
Date: time.Date(forecastPeriod.StartTime.Year(), forecastPeriod.StartTime.Month(), forecastPeriod.StartTime.Day(), 0, 0, 0, 0, time.UTC),
}
sunrise, sunset, err := p.GetSunriseSunset()
if err == nil {
calDay.Sunrise = sunrise
calDay.Sunset = sunset
} else {
log.Printf("error calculating sunrise/sunset: %s", err)
}
}
if existed {
cf[i] = calDay
} else {
cf = append(cf, calDay)
}
}
forecastLink := fmt.Sprintf("https://forecast.weather.gov/MapClick.php?textField1=%.2f&textField2=%.2f", lat, lon)
calID := buildCalendarID(calLocation, calDomain, lat, lon)
cal := ics.NewCalendar()
cal.SetName(fmt.Sprintf("%s Weather", calLocation))
cal.SetXWRCalName(fmt.Sprintf("%s Weather", calLocation))
cal.SetDescription(fmt.Sprintf("Weather forecast for the next week in %s, provided by weather.gov.", calLocation))
cal.SetXWRCalDesc(fmt.Sprintf("Weather forecast for the next week in %s, provided by weather.gov.", calLocation))
cal.SetLastModified(forecastResp.Updated)
cal.SetMethod(ics.MethodPublish)
cal.SetProductId(fmt.Sprintf("-//%s//EN", ProductID))
cal.SetVersion("2.0")
cal.SetXPublishedTTL("PT1H")
cal.SetRefreshInterval("PT1H")
for _, d := range cf {
event := cal.AddEvent(fmt.Sprintf("%s-%s", d.Start.Format("20060102"), calID))
event.SetDtStampTime(time.Now())
event.SetModifiedAt(forecastResp.Updated)
event.SetAllDayStartAt(d.Start)
event.SetAllDayEndAt(d.Start) // one-day all-day event ends the same day it started
event.SetLocation(calLocation)
event.SetURL(forecastLink)
evtSummary := d.SummaryLine()
if len(evtTitlePrefix) > 0 {
evtSummary = fmt.Sprintf("%s %s", evtTitlePrefix, evtSummary)
}
event.SetSummary(evtSummary)
event.SetDescription(fmt.Sprintf("%s\\n\\nSunrise: %s\\nSunset: %s\\n\\nForecast Detail: %s",
d.DetailedForecast(),
d.Sunrise.Format("3:04:05 PM"),
d.Sunset.Format("3:04:05 PM"),
forecastLink,
))
}
err = ioutil.WriteFile(icalOutfile, []byte(cal.Serialize()), 0644)
if err != nil {
return fmt.Errorf("failed to write output file '%s': %w", icalOutfile, err)
}
return nil
}
func main() {
var calLocation = flag.String("calLocation", "", "The name of the calendar's location (eg. \"Ann Arbor, MI\") (required)")
var calendarDomain = flag.String("calDomain", "", "The calendar's domain (eg. \"ical.dzombak.com\") (required)")
var evtTitlePrefix = flag.String("evtTitlePrefix", "", "An optional prefix to be inserted before each event's title")
var lat = flag.Float64("lat", 42.27, "The forecast location's latitude (eg. \"42.27\")")
var lon = flag.Float64("lon", -83.74, "The forecast location's longitude (eg. \"-83.74\")")
var icalOutfile = flag.String("icalFile", "", "Path/filename for iCal output file (required)")
var printVersion = flag.Bool("version", false, "Print version and exit")
flag.Parse()
if *printVersion {
fmt.Println(version)
os.Exit(0)
}
if *calLocation == "" || *calendarDomain == "" || *icalOutfile == "" {
flag.PrintDefaults()
os.Exit(1)
}
if err := Main(*calLocation, *calendarDomain, *lat, *lon, *evtTitlePrefix, *icalOutfile); err != nil {
log.Fatalf(err.Error())
}
} | main.go | 0.674694 | 0.434941 | main.go | starcoder |
package proto
import (
"encoding/binary"
"fmt"
"github.com/pkg/errors"
"time"
)
var ErrNotFound = errors.New("not found")
//PutStringWithUInt8Len converts the string to slice of bytes. The first byte of resulting slice contains the length of the string.
func PutStringWithUInt8Len(buf []byte, s string) {
sl := uint8(len(s))
buf[0] = sl
copy(buf[1:], s)
}
//StringWithUInt8Len reads a string from given slice of bytes. The first byte of slice should contain the length of the following string.
//Function fails then the length of slice is less then 1 byte or the length of remaining slice is less then the length value from first byte.
func StringWithUInt8Len(buf []byte) (string, error) {
if l := len(buf); l < 1 {
return "", fmt.Errorf("not enought data, expected not less then %d, received %d", 1, l)
}
s := uint8(buf[0])
buf = buf[1:]
if l := len(buf); l < int(s) {
return "", fmt.Errorf("not enough data to read sting of lenght %d, recieved only %d bytes", s, l)
}
r := string(buf[:s])
return r, nil
}
// PutStringWithUInt16Len writes to the buffer `buf` two bytes of the string `s` length followed with the bytes of the string `s`.
func PutStringWithUInt16Len(buf []byte, s string) {
sl := uint16(len(s))
binary.BigEndian.PutUint16(buf, sl)
copy(buf[2:], s)
}
// StringWithUInt16Len reads a string from the buffer `buf`.
func StringWithUInt16Len(buf []byte) (string, error) {
if l := len(buf); l < 2 {
return "", fmt.Errorf("not enough data, expected not less than %d, received %d", 2, l)
}
s := binary.BigEndian.Uint16(buf[0:2])
buf = buf[2:]
if l := len(buf); l < int(s) {
return "", fmt.Errorf("not enough data to read string of length %d, received only %d bytes", s, l)
}
r := string(buf[:s])
return r, nil
}
// PutStringWithUInt32Len writes to the buffer `buf` four bytes of the string's `s` length followed with the bytes of string itself.
func PutStringWithUInt32Len(buf []byte, s string) {
sl := uint32(len(s))
binary.BigEndian.PutUint32(buf, sl)
copy(buf[4:], s)
}
// StringWithUInt32Len reads a string from the buffer `buf`.
func StringWithUInt32Len(buf []byte) (string, error) {
if l := len(buf); l < 4 {
return "", fmt.Errorf("not enough data, expected not less than %d, received %d", 4, l)
}
s := binary.BigEndian.Uint32(buf[0:4])
buf = buf[4:]
if l := len(buf); l < int(s) {
return "", fmt.Errorf("not enough data to read string of length %d, received only %d bytes", s, l)
}
r := string(buf[:s])
return r, nil
}
//PutBytesWithUInt16Len prepends given buf with 2 bytes of it's length.
func PutBytesWithUInt16Len(buf []byte, data []byte) {
sl := uint16(len(data))
binary.BigEndian.PutUint16(buf, sl)
copy(buf[2:], data)
}
// BytesWithUInt16Len reads from buf an array of bytes of length encoded in first 2 bytes.
func BytesWithUInt16Len(buf []byte) ([]byte, error) {
if l := len(buf); l < 2 {
return nil, fmt.Errorf("not enough data, expected not less than %d, received %d", 2, l)
}
s := binary.BigEndian.Uint16(buf[0:2])
buf = buf[2:]
if l := len(buf); l < int(s) {
return nil, fmt.Errorf("not enough data to read array of bytes of length %d, received only %d bytes", s, l)
}
r := make([]byte, s)
copy(r, buf[:s])
return r, nil
}
//PutBytesWithUInt32Len prepends given buf with 4 bytes of it's length.
func PutBytesWithUInt32Len(buf []byte, data []byte) {
sl := uint32(len(data))
binary.BigEndian.PutUint32(buf, sl)
copy(buf[4:], data)
}
// BytesWithUInt32Len reads from buf an array of bytes of length encoded in first 4 bytes.
func BytesWithUInt32Len(buf []byte) ([]byte, error) {
if l := len(buf); l < 4 {
return nil, fmt.Errorf("not enough data, expected not less than %d, received %d", 4, l)
}
s := binary.BigEndian.Uint32(buf[0:4])
buf = buf[4:]
if l := len(buf); l < int(s) {
return nil, fmt.Errorf("not enough data to read array of bytes of length %d, received only %d bytes", s, l)
}
r := make([]byte, s)
copy(r, buf[:s])
return r, nil
}
func PutBool(buf []byte, b bool) {
if b {
buf[0] = 1
} else {
buf[0] = 0
}
}
func Bool(buf []byte) (bool, error) {
if l := len(buf); l < 1 {
return false, errors.New("failed to unmarshal Bool, empty buffer received")
}
switch buf[0] {
case 0:
return false, nil
case 1:
return true, nil
default:
return false, fmt.Errorf("invalid bool value %d", buf[0])
}
}
func NewTimestampFromTime(t time.Time) uint64 {
return NewTimestampFromUnixNano(t.UnixNano())
}
func NewTimestampFromUnixNano(nano int64) uint64 {
return uint64(nano / 1000000)
}
func NewTimestampFromUSeconds(seconds uint64) uint64 {
return seconds * 1000
}
func NetworkStrFromScheme(scheme Scheme) string {
prefix := "waves"
return prefix + string(scheme)
} | pkg/proto/common.go | 0.754553 | 0.534916 | common.go | starcoder |
package io
import (
"encoding/binary"
"math"
)
func NewBuffer(cap int) *Buffer {
return &Buffer{
buf: make([]byte, cap),
pos: 0,
}
}
func NewBufferWithData(data []byte) *Buffer {
return &Buffer{
buf: data,
pos: 0,
}
}
type Buffer struct {
buf []byte
pos int
}
func (b *Buffer) Cap() int {
return len(b.buf)
}
func (b *Buffer) Pos(p int) {
b.pos = p
}
func (b *Buffer) GetPos() int {
return b.pos
}
func (b *Buffer) SetBuf(buf []byte) {
b.buf = buf
b.pos = 0
}
func (b *Buffer) Reset() {
b.pos = 0
}
func (b *Buffer) Buf() []byte {
return b.buf
}
func (b *Buffer) Data() []byte {
return b.buf[:b.pos]
}
func (b *Buffer) WriteByteSlice(v []byte) {
n := b.PutByteSlice(b.pos, v)
b.pos += n
}
func (b *Buffer) WriteString(s string) {
n := b.PutString(b.pos, s)
b.pos += n
}
func (b *Buffer) WriteByte(v byte) {
b.buf[b.pos] = v
b.pos++
}
func (b *Buffer) WriteBytes(v []byte) {
n := copy(b.buf[b.pos:], v)
if n < len(v) {
panic("buffer full`")
}
b.pos += n
}
func (b *Buffer) WriteVarInt(v int) {
n := b.PutVarInt(b.pos, v)
b.pos += n
}
func (b *Buffer) WriteIntAsUVarInt(v int) {
n := b.PutIntAsUVarInt(b.pos, v)
b.pos += n
}
func (b *Buffer) WriteVarInt64(v int64) {
n := b.PutVarInt64(b.pos, v)
b.pos += n
}
func (b *Buffer) WriteUVarInt64(v uint64) {
n := b.PutUVarInt64(b.pos, v)
b.pos += n
}
func (b *Buffer) WriteFloat64(v float64) {
n := b.PutFloat64(b.pos, v)
b.pos += n
}
func (b *Buffer) WriteFixedUInt64(v uint64) {
n := b.PutFixedUInt64(b.pos, v)
b.pos += n
}
func (b *Buffer) WriteVarUintSlice(v []int) {
b.WriteIntAsUVarInt(len(v))
for _, i := range v {
b.WriteIntAsUVarInt(i)
}
}
func (b *Buffer) Free() []byte {
return b.buf[b.pos:]
}
func (b *Buffer) Available() int {
return len(b.buf) - b.pos
}
func (b *Buffer) PutByte(pos int, v byte) {
b.buf[pos] = v
}
func (b *Buffer) PutByteSlice(pos int, v []byte) int {
n := b.PutIntAsUVarInt(pos, len(v))
c := copy(b.buf[pos+n:], v)
if c != len(v) {
panic("buffer full")
}
return n + c
}
func (b *Buffer) PutString(pos int, s string) int {
n := b.PutIntAsUVarInt(pos, len(s))
c := copy(b.buf[pos+n:], s)
if c != len(s) {
panic("buffer full")
}
return n + c
}
func (b *Buffer) PutVarInt(pos int, v int) int {
return binary.PutVarint(b.buf[pos:], int64(v))
}
func (b *Buffer) PutIntAsUVarInt(pos int, v int) int {
return binary.PutUvarint(b.buf[pos:], uint64(v))
}
func (b *Buffer) PutVarInt64(pos int, v int64) int {
return binary.PutVarint(b.buf[pos:], v)
}
func (b *Buffer) PutUVarInt64(pos int, v uint64) int {
return binary.PutUvarint(b.buf[pos:], v)
}
func (b *Buffer) PutFloat64(pos int, v float64) int {
binary.LittleEndian.PutUint64(b.buf[pos:], math.Float64bits(v))
return 8
}
func (b *Buffer) PutFixedUInt64(pos int, v uint64) int {
binary.LittleEndian.PutUint64(b.buf[pos:], v)
return 8
}
func (b *Buffer) ReadByte() byte {
r := b.buf[b.pos]
b.pos++
return r
}
func (b *Buffer) ReadBytes() []byte {
r, n := b.Bytes(b.pos)
b.pos += n
return r
}
func (b *Buffer) ReadVarInt() int {
r, n := b.VarInt(b.pos)
b.pos += n
return r
}
func (b *Buffer) ReadUVarInt() uint {
r, n := b.UVarInt(b.pos)
b.pos += n
return r
}
func (b *Buffer) ReadUVarIntAsInt() int {
r, n := b.UVarInt(b.pos)
b.pos += n
return int(r)
}
func (b *Buffer) ReadFixedUInt64() uint64 {
r, n := b.UInt64(b.pos)
b.pos += n
return r
}
func (b *Buffer) Byte(pos int) byte {
return b.buf[pos]
}
func (b *Buffer) Bytes(pos int) ([]byte, int) {
l, n := binary.Uvarint(b.buf[pos:])
if n <= 0 {
panic("invalid Uvarint")
}
start := pos + n
end := start + int(l)
return b.buf[start:end], int(l) + n
}
func (b *Buffer) VarInt(pos int) (int, int) {
r, n := binary.Varint(b.buf[pos:])
if n <= 0 {
panic("invalid Varint")
}
return int(r), n
}
func (b *Buffer) UVarInt(pos int) (uint, int) {
r, n := binary.Uvarint(b.buf[pos:])
if n <= 0 {
panic("invalid Uvarint")
}
return uint(r), n
}
func (b *Buffer) UInt64(pos int) (uint64, int) {
r := binary.LittleEndian.Uint64(b.buf[pos:])
return r, 8
}
func (b *Buffer) UVarIntAsInt(pos int) (int, int) {
r, n := binary.Uvarint(b.buf[pos:])
if n <= 0 {
panic("invalid Uvarint")
}
return int(r), n
}
func (b *Buffer) Grow(cap int) {
n := make([]byte, cap)
copy(n, b.buf)
b.buf = n
}
// SizeVarint returns the varint encoding size of an integer.
func SizeUVarint(x uint64) int {
switch {
case x < 1<<7:
return 1
case x < 1<<14:
return 2
case x < 1<<21:
return 3
case x < 1<<28:
return 4
case x < 1<<35:
return 5
case x < 1<<42:
return 6
case x < 1<<49:
return 7
case x < 1<<56:
return 8
case x < 1<<63:
return 9
}
return 10
} | internal/storage/io/buffer.go | 0.613005 | 0.493653 | buffer.go | starcoder |
package advent
import (
"fmt"
"github.com/davidparks11/advent2021/internal/coordinate"
)
type seaCucumber struct {
dailyProblem
}
func NewSeaCumber() Problem {
return &seaCucumber{
dailyProblem{
day: 25,
},
}
}
func (s *seaCucumber) Solve() interface{} {
input := s.GetInputLines()
var results []int
results = append(results, s.cucumberSteps(input))
return results
}
/*
This is it: the bottom of the ocean trench, the last place the sleigh keys could be. Your submarine's experimental antenna still isn't boosted enough to detect the keys, but they must be here. All you need to do is reach the seafloor and find them.
At least, you'd touch down on the seafloor if you could; unfortunately, it's completely covered by two large herds of sea cucumbers, and there isn't an open space large enough for your submarine.
You suspect that the Elves must have done this before, because just then you discover the phone number of a deep-sea marine biologist on a handwritten note taped to the wall of the submarine's cockpit.
"Sea cucumbers? Yeah, they're probably hunting for food. But don't worry, they're predictable critters: they move in perfectly straight lines, only moving forward when there's space to do so. They're actually quite polite!"
You explain that you'd like to predict when you could land your submarine.
"Oh that's easy, they'll eventually pile up and leave enough space for-- wait, did you say submarine? And the only place with that many sea cucumbers would be at the very bottom of the Mariana--" You hang up the phone.
There are two herds of sea cucumbers sharing the same region; one always moves east (>), while the other always moves south (v). Each location can contain at most one sea cucumber; the remaining locations are empty (.). The submarine helpfully generates a map of the situation (your puzzle input). For example:
v...>>.vv>
.vv>>.vv..
>>.>v>...v
>>v>>.>.v.
v>v.vv.v..
>.>>..v...
.vv..>.>v.
v.v..>>v.v
....v..v.>
Every step, the sea cucumbers in the east-facing herd attempt to move forward one location, then the sea cucumbers in the south-facing herd attempt to move forward one location. When a herd moves forward, every sea cucumber in the herd first simultaneously considers whether there is a sea cucumber in the adjacent location it's facing (even another sea cucumber facing the same direction), and then every sea cucumber facing an empty location simultaneously moves into that location.
So, in a situation like this:
...>>>>>...
After one step, only the rightmost sea cucumber would have moved:
...>>>>.>..
After the next step, two sea cucumbers move:
...>>>.>.>.
During a single step, the east-facing herd moves first, then the south-facing herd moves. So, given this situation:
..........
.>v....v..
.......>..
..........
After a single step, of the sea cucumbers on the left, only the south-facing sea cucumber has moved (as it wasn't out of the way in time for the east-facing cucumber on the left to move), but both sea cucumbers on the right have moved (as the east-facing sea cucumber moved out of the way of the south-facing sea cucumber):
..........
.>........
..v....v>.
..........
Due to strong water currents in the area, sea cucumbers that move off the right edge of the map appear on the left edge, and sea cucumbers that move off the bottom edge of the map appear on the top edge. Sea cucumbers always check whether their destination location is empty before moving, even if that destination is on the opposite side of the map:
Initial state:
...>...
.......
......>
v.....>
......>
.......
..vvv..
After 1 step:
..vv>..
.......
>......
v.....>
>......
.......
....v..
After 2 steps:
....v>.
..vv...
.>.....
......>
v>.....
.......
.......
After 3 steps:
......>
..v.v..
..>v...
>......
..>....
v......
.......
After 4 steps:
>......
..v....
..>.v..
.>.v...
...>...
.......
v......
To find a safe place to land your submarine, the sea cucumbers need to stop moving. Again consider the first example:
Initial state:
v...>>.vv>
.vv>>.vv..
>>.>v>...v
>>v>>.>.v.
v>v.vv.v..
>.>>..v...
.vv..>.>v.
v.v..>>v.v
....v..v.>
...
After 58 steps:
..>>v>vv..
..v.>>vv..
..>>v>>vv.
..>>>>>vv.
v......>vv
v>v....>>v
vvv.....>>
>vv......>
.>v.vv.v..
In this example, the sea cucumbers stop moving after 58 steps.
Find somewhere safe to land your submarine. What is the first step on which no sea cucumbers move?
*/
func (s *seaCucumber) cucumberSteps(input []string) int {
heard := s.parseInput(input)
count := 1
for ; !heard.step(); count++ {}
return count
}
type cucumber rune
func (c cucumber) nextPosition() coordinate.Point {
if c == south {
return coordinate.Point{X: 0, Y: 1}
}
return coordinate.Point{X: 1, Y: 0}
}
const (
south cucumber = 'v'
east cucumber = '>'
)
type cucumberHeard struct {
length, width int
cucumbers map[coordinate.Point]cucumber
}
func (c *cucumberHeard) String() string {
str := fmt.Sprintf("length: %d, width %d\n", c.length, c.width)
for y := 0; y < c.length; y++ {
for x := 0; x < c.width; x++ {
if r, found := c.cucumbers[coordinate.Point{Y: y, X: x}]; found {
str += string(r)
} else {
str += "."
}
}
str += "\n"
}
return str
}
func (c *cucumberHeard) step() bool {
done := true
newPositions := make(map[coordinate.Point]cucumber)
for p, current := range c.cucumbers {
if current == south {
continue
}
if nextPos := c.nextPosition(p); c.isFree(current, nextPos, nil) {
newPositions[nextPos] = current
done = false
} else {
newPositions[p] = current
}
}
for p, current := range c.cucumbers {
if current == east {
continue
}
if nextPos := c.nextPosition(p); c.isFree(current, nextPos, newPositions) {
newPositions[nextPos] = current
done = false
} else {
newPositions[p] = current
}
}
c.cucumbers = newPositions
return done
}
func (c *cucumberHeard) nextPosition(p coordinate.Point) coordinate.Point {
delta := c.cucumbers[p].nextPosition()
next := coordinate.Point{X: p.X + delta.X, Y: p.Y + delta.Y}
if next.Y == c.length {
next.Y = 0
}
if next.X == c.width {
next.X = 0
}
return next
}
func (c *cucumberHeard) isFree(current cucumber, p coordinate.Point, newPositions map[coordinate.Point]cucumber) bool {
if current == east {
if _, found := c.cucumbers[p]; found {
return false
}
} else {
if other, found := c.cucumbers[p]; found && other == south {
return false
}
if other, found := newPositions[p]; found && other == east{
return false
}
}
return true
}
func (c *seaCucumber) parseInput(input []string) (heard *cucumberHeard) {
heard = &cucumberHeard{
length: len(input),
width: len(input[0]),
cucumbers: make(map[coordinate.Point]cucumber),
}
for y, line := range input {
for x, char := range line {
if char == rune(south) || char == rune(east) {
heard.cucumbers[coordinate.Point{X: x, Y: y}] = cucumber(char)
}
}
}
return heard
} | internal/advent/day25.go | 0.717903 | 0.42471 | day25.go | starcoder |
package jsonschema
import (
"github.com/json-iterator/go"
"strconv"
)
// AllOf MUST be a non-empty array. Each item of the array MUST be a valid JSON Schema.
// An instance validates successfully against this keyword if it validates successfully against all schemas defined by this keyword's value.
type AllOf []*Schema
// NewAllOf creates a new AllOf validator
func NewAllOf() Validator {
return &AllOf{}
}
// Validate implements the validator interface for AllOf
func (a AllOf) Validate(propPath string, data interface{}, errs *[]ValError) {
for _, sch := range a {
sch.Validate(propPath, data, errs)
}
}
// JSONProp implements JSON property name indexing for AllOf
func (a AllOf) JSONProp(name string) interface{} {
idx, err := strconv.Atoi(name)
if err != nil {
return nil
}
if idx > len(a) || idx < 0 {
return nil
}
return a[idx]
}
// JSONChildren implements the JSONContainer interface for AllOf
func (a AllOf) JSONChildren() (res map[string]JSONPather) {
res = map[string]JSONPather{}
for i, sch := range a {
res[strconv.Itoa(i)] = sch
}
return
}
// AnyOf MUST be a non-empty array. Each item of the array MUST be a valid JSON Schema.
// An instance validates successfully against this keyword if it validates successfully against at
// least one schema defined by this keyword's value.
type AnyOf []*Schema
// NewAnyOf creates a new AnyOf validator
func NewAnyOf() Validator {
return &AnyOf{}
}
// Validate implements the validator interface for AnyOf
func (a AnyOf) Validate(propPath string, data interface{}, errs *[]ValError) {
for _, sch := range a {
test := &[]ValError{}
sch.Validate(propPath, data, test)
if len(*test) == 0 {
return
}
}
AddError(errs, propPath, data, "did Not match any specified AnyOf schemas")
}
// JSONProp implements JSON property name indexing for AnyOf
func (a AnyOf) JSONProp(name string) interface{} {
idx, err := strconv.Atoi(name)
if err != nil {
return nil
}
if idx > len(a) || idx < 0 {
return nil
}
return a[idx]
}
// JSONChildren implements the JSONContainer interface for AnyOf
func (a AnyOf) JSONChildren() (res map[string]JSONPather) {
res = map[string]JSONPather{}
for i, sch := range a {
res[strconv.Itoa(i)] = sch
}
return
}
// OneOf MUST be a non-empty array. Each item of the array MUST be a valid JSON Schema.
// An instance validates successfully against this keyword if it validates successfully against exactly one schema defined by this keyword's value.
type OneOf []*Schema
// NewOneOf creates a new OneOf validator
func NewOneOf() Validator {
return &OneOf{}
}
// Validate implements the validator interface for OneOf
func (o OneOf) Validate(propPath string, data interface{}, errs *[]ValError) {
matched := false
for _, sch := range o {
test := &[]ValError{}
sch.Validate(propPath, data, test)
if len(*test) == 0 {
if matched {
AddError(errs, propPath, data, "matched more than one specified OneOf schemas")
return
}
matched = true
}
}
if !matched {
AddError(errs, propPath, data, "did not match any of the specified OneOf schemas")
}
}
// JSONProp implements JSON property name indexing for OneOf
func (o OneOf) JSONProp(name string) interface{} {
idx, err := strconv.Atoi(name)
if err != nil {
return nil
}
if idx > len(o) || idx < 0 {
return nil
}
return o[idx]
}
// JSONChildren implements the JSONContainer interface for OneOf
func (o OneOf) JSONChildren() (res map[string]JSONPather) {
res = map[string]JSONPather{}
for i, sch := range o {
res[strconv.Itoa(i)] = sch
}
return
}
// Not MUST be a valid JSON Schema.
// An instance is valid against this keyword if it fails to validate successfully against the schema defined
// by this keyword.
type Not Schema
// NewNot creates a new Not validator
func NewNot() Validator {
return &Not{}
}
// Validate implements the validator interface for Not
func (n *Not) Validate(propPath string, data interface{}, errs *[]ValError) {
sch := Schema(*n)
test := &[]ValError{}
sch.Validate(propPath, data, test)
if len(*test) == 0 {
// TODO - make this error actually make sense
AddError(errs, propPath, data, "cannot match schema")
}
}
// JSONProp implements JSON property name indexing for Not
func (n Not) JSONProp(name string) interface{} {
return Schema(n).JSONProp(name)
}
// JSONChildren implements the JSONContainer interface for Not
func (n Not) JSONChildren() (res map[string]JSONPather) {
if n.Ref != "" {
s := Schema(n)
return map[string]JSONPather{"$ref": &s}
}
return Schema(n).JSONChildren()
}
// UnmarshalJSON implements the jsoniter.Unmarshaler interface for Not
func (n *Not) UnmarshalJSON(data []byte) error {
var sch Schema
if err := jsoniter.Unmarshal(data, &sch); err != nil {
return err
}
*n = Not(sch)
return nil
}
// MarshalJSON implements jsoniter.Marshaller for Not
func (n Not) MarshalJSON() ([]byte, error) {
return jsoniter.Marshal(Schema(n))
} | keywords_booleans.go | 0.767472 | 0.406744 | keywords_booleans.go | starcoder |
package mocks
import (
"io"
"github.com/stretchr/testify/mock"
"github.com/ZupIT/ritchie-cli/pkg/api"
"github.com/ZupIT/ritchie-cli/pkg/env"
"github.com/ZupIT/ritchie-cli/pkg/formula"
"github.com/ZupIT/ritchie-cli/pkg/formula/creator/template"
"github.com/ZupIT/ritchie-cli/pkg/git"
"github.com/ZupIT/ritchie-cli/pkg/rtutorial"
)
type EnvFinderMock struct {
mock.Mock
}
func (e *EnvFinderMock) Find() (env.Holder, error) {
args := e.Called()
return args.Get(0).(env.Holder), args.Error(1)
}
type DetailManagerMock struct {
mock.Mock
}
func (d *DetailManagerMock) LatestTag(repo formula.Repo) string {
args := d.Called(repo)
return args.String(0)
}
type TutorialFindSetterMock struct {
mock.Mock
}
func (t *TutorialFindSetterMock) Find() (rtutorial.TutorialHolder, error) {
args := t.Called()
return args.Get(0).(rtutorial.TutorialHolder), args.Error(1)
}
func (t *TutorialFindSetterMock) Set(tutorial string) (rtutorial.TutorialHolder, error) {
args := t.Called(tutorial)
return args.Get(0).(rtutorial.TutorialHolder), args.Error(1)
}
type GitRepositoryMock struct {
mock.Mock
}
func (g *GitRepositoryMock) Zipball(info git.RepoInfo, version string) (io.ReadCloser, error) {
args := g.Called(info, version)
return args.Get(0).(io.ReadCloser), args.Error(1)
}
func (g *GitRepositoryMock) Tags(info git.RepoInfo) (git.Tags, error) {
args := g.Called(info)
return args.Get(0).(git.Tags), args.Error(1)
}
func (g *GitRepositoryMock) LatestTag(info git.RepoInfo) (git.Tag, error) {
args := g.Called(info)
return args.Get(0).(git.Tag), args.Error(1)
}
type InputURLMock struct {
mock.Mock
}
func (i *InputURLMock) URL(name, defaultValue string) (string, error) {
args := i.Called(name, defaultValue)
return args.String(0), args.Error(1)
}
type InputBoolMock struct {
mock.Mock
}
func (i *InputBoolMock) Bool(name string, items []string, helper ...string) (bool, error) {
args := i.Called(name, items, helper)
return args.Bool(0), args.Error(1)
}
type InputListMock struct {
mock.Mock
}
func (i *InputListMock) List(name string, items []string, helper ...string) (string, error) {
args := i.Called(name, items, helper)
return args.String(0), args.Error(1)
}
type InputMultiselectMock struct {
mock.Mock
}
func (i *InputMultiselectMock) Multiselect(input formula.Input) ([]string, error) {
args := i.Called(input)
return args.Get(0).([]string), args.Error(1)
}
type InputDefaultTextMock struct {
mock.Mock
}
func (i *InputDefaultTextMock) Text(input formula.Input) (string, error) {
args := i.Called(input)
return args.String(0), args.Error(1)
}
type InputIntMock struct {
mock.Mock
}
func (i *InputIntMock) Int(name string, helper ...string) (int64, error) {
args := i.Called(name, helper)
return args.Get(0).(int64), args.Error(1)
}
type InputPasswordMock struct {
mock.Mock
}
func (i *InputPasswordMock) Password(label string, helper ...string) (string, error) {
args := i.Called(label, helper)
return args.String(0), args.Error(1)
}
type InputTextMock struct {
mock.Mock
}
func (i *InputTextMock) Text(name string, required bool, helper ...string) (string, error) {
args := i.Called(name, required, helper)
return args.String(0), args.Error(1)
}
type InputTextValidatorMock struct {
mock.Mock
}
func (i *InputTextValidatorMock) Text(name string, validate func(interface{}) error, helper ...string) (string, error) {
args := i.Called(name, validate)
return args.String(0), validate(args.String(0))
}
type InputPathMock struct {
mock.Mock
}
func (i *InputPathMock) Read(text string) (string, error) {
args := i.Called(text)
return args.String(0), args.Error(1)
}
type FormCreator struct {
mock.Mock
}
func (f *FormCreator) Create(cf formula.Create) error {
args := f.Called(cf)
return args.Error(0)
}
func (f *FormCreator) Build(info formula.BuildInfo) error {
args := f.Called(info)
return args.Error(0)
}
type WorkspaceForm struct {
mock.Mock
}
func (w *WorkspaceForm) Add(workspace formula.Workspace) error {
args := w.Called(workspace)
return args.Error(0)
}
func (w *WorkspaceForm) Delete(workspace formula.Workspace) error {
args := w.Called(workspace)
return args.Error(0)
}
func (w *WorkspaceForm) List() (formula.Workspaces, error) {
args := w.Called()
return args.Get(0).(formula.Workspaces), args.Error(1)
}
func (w *WorkspaceForm) Validate(workspace formula.Workspace) error {
args := w.Called(workspace)
return args.Error(0)
}
func (w *WorkspaceForm) CurrentHash(formulaPath string) (string, error) {
args := w.Called(formulaPath)
return args.String(0), args.Error(1)
}
func (w *WorkspaceForm) PreviousHash(formulaPath string) (string, error) {
args := w.Called(formulaPath)
return args.String(0), args.Error(1)
}
func (w *WorkspaceForm) UpdateHash(formulaPath string, hash string) error {
args := w.Called(formulaPath, hash)
return args.Error(0)
}
type RepoManager struct {
mock.Mock
}
func (r *RepoManager) Add(repo formula.Repo) error {
args := r.Called(repo)
return args.Error(0)
}
func (r *RepoManager) List() (formula.Repos, error) {
args := r.Called()
return args.Get(0).(formula.Repos), args.Error(1)
}
func (r *RepoManager) Update(name formula.RepoName, version formula.RepoVersion) error {
args := r.Called(name, version)
return args.Error(0)
}
func (r *RepoManager) Delete(name formula.RepoName) error {
args := r.Called(name)
return args.Error(0)
}
func (r *RepoManager) SetPriority(name formula.RepoName, priority int) error {
args := r.Called(name, priority)
return args.Error(0)
}
func (r *RepoManager) Create(repo formula.Repo) error {
args := r.Called(repo)
return args.Error(0)
}
func (r *RepoManager) Write(repos formula.Repos) error {
args := r.Called(repos)
return args.Error(0)
}
func (r *RepoManager) LatestTag(repo formula.Repo) string {
args := r.Called(repo)
return args.String(0)
}
type DirManager struct {
mock.Mock
}
func (d *DirManager) Exists(path string) bool {
args := d.Called(path)
return args.Bool(0)
}
func (d *DirManager) IsDir(dir string) bool {
args := d.Called(dir)
return args.Bool(0)
}
type FileManager struct {
mock.Mock
}
func (f *FileManager) Exists(path string) bool {
args := f.Called(path)
return args.Bool(0)
}
func (f *FileManager) Read(path string) ([]byte, error) {
args := f.Called(path)
return args.Get(0).([]byte), args.Error(1)
}
func (f *FileManager) Write(path string, content []byte) error {
args := f.Called(path, content)
return args.Error(0)
}
func (f *FileManager) Create(path string, data io.ReadCloser) error {
args := f.Called(path, data)
return args.Error(0)
}
func (f *FileManager) Remove(path string) error {
args := f.Called(path)
return args.Error(0)
}
func (f *FileManager) List(file string) ([]string, error) {
args := f.Called(file)
return args.Get(0).([]string), args.Error(1)
}
func (f *FileManager) ListNews(oldPath, newPath string) ([]string, error) {
args := f.Called(oldPath, newPath)
return args.Get(0).([]string), args.Error(1)
}
func (f *FileManager) Copy(src, dst string) error {
args := f.Called(src, dst)
return args.Error(0)
}
func (f *FileManager) Append(path string, content []byte) error {
args := f.Called(path, content)
return args.Error(0)
}
func (f *FileManager) Move(oldPath, newPath string, files []string) error {
args := f.Called(oldPath, newPath, files)
return args.Error(0)
}
type TreeManager struct {
mock.Mock
}
func (t *TreeManager) Tree() (map[formula.RepoName]formula.Tree, error) {
args := t.Called()
return args.Get(0).(map[formula.RepoName]formula.Tree), args.Error(1)
}
func (t *TreeManager) MergedTree(core bool) formula.Tree {
args := t.Called(core)
return args.Get(0).(formula.Tree)
}
func (t *TreeManager) TreeByRepo(repoName formula.RepoName) (formula.Tree, error) {
args := t.Called(repoName)
return args.Get(0).(formula.Tree), args.Error(1)
}
func (t *TreeManager) Generate(repoPath string) (formula.Tree, error) {
args := t.Called(repoPath)
return args.Get(0).(formula.Tree), args.Error(1)
}
func (t *TreeManager) Check() []api.CommandID {
args := t.Called()
return args.Get(0).([]api.CommandID)
}
type TemplateManagerMock struct {
mock.Mock
}
func (tm *TemplateManagerMock) Languages() ([]string, error) {
args := tm.Called()
return args.Get(0).([]string), args.Error(1)
}
func (tm *TemplateManagerMock) LangTemplateFiles(lang string) ([]template.File, error) {
args := tm.Called(lang)
return args.Get(0).([]template.File), args.Error(1)
}
func (tm *TemplateManagerMock) ResolverNewPath(oldPath, newDir, lang, workspacePath string) (string, error) {
args := tm.Called(oldPath, newDir, lang, workspacePath)
return args.String(0), args.Error(1)
}
func (tm *TemplateManagerMock) Validate() error {
args := tm.Called()
return args.Error(0)
}
type WorkspaceMock struct {
mock.Mock
}
func (w *WorkspaceMock) Add(workspace formula.Workspace) error {
args := w.Called(workspace)
return args.Error(0)
}
func (w *WorkspaceMock) List() (formula.Workspaces, error) {
args := w.Called()
return args.Get(0).(formula.Workspaces), args.Error(1)
} | internal/mocks/mocks.go | 0.651798 | 0.411525 | mocks.go | starcoder |
package waveform
import (
"bytes"
"errors"
"fmt"
"image/color"
"io"
"math"
"time"
svg "github.com/ajstarks/svgo/float"
"github.com/go-audio/audio"
"github.com/go-audio/wav"
"github.com/hajimehoshi/go-mp3"
)
// Option image option
type Option struct {
// Resolution specifies the resolution of the
// Required.
Resolution int
// Width specifies the width of the resulting image.
// Default: Resolution * 5
Width int
// Height specifies the height of the resulting image.
// Default: 540
Height int
// Background specifies the color of the background.
// Default: none (transparent)
Background color.Color
// Color specifies the color of the waveform.
// Default: color.Black
Color color.Color
}
// bound sample value upper and lower boundary
type bound struct {
Upper float64
Lower float64
}
type float64Reader interface {
// readNSamples reads the next n samples, and return the values in float64 slice.
readNSamples(buf []float64) ([]float64, error)
}
type mp3Decoder struct {
*mp3.Decoder
buf []byte
}
func (d *mp3Decoder) readNSamples(buf []float64) ([]float64, error) {
if d.buf == nil || cap(d.buf) < len(buf)*4 {
d.buf = make([]byte, len(buf)*4)
}
totalSamples := 0
for totalSamples < len(buf) {
expectBytes := len(d.buf) - totalSamples*4
read, err := d.Read(d.buf[:expectBytes])
if err != nil && err != io.EOF {
return nil, fmt.Errorf("an error occurred while decoding mp3: %w", err)
}
if err == io.EOF {
break
}
if read%4 != 0 {
return nil, errors.New("expected multiple of 4 bytes to be read")
}
// 16 bit 2 channels
src := d.buf[:read]
for i := 0; i < read/4; i++ {
buf[totalSamples+i] = float64(int16(uint16(src[i*4]) | uint16(src[i*4+1])<<8))
}
totalSamples += read / 4
}
return buf[:totalSamples], nil
}
type wavDecoder struct {
*wav.Decoder
buf *audio.IntBuffer
}
func (d *wavDecoder) readNSamples(buf []float64) ([]float64, error) {
numCh := int(d.NumChans)
if d.buf == nil {
d.buf = &audio.IntBuffer{
Data: make([]int, len(buf)*numCh),
}
}
read, err := d.PCMBuffer(d.buf)
if err != nil {
return nil, err
}
// workaround: https://github.com/motoki317/go-waveform/issues/1
read = min(read, len(d.buf.Data))
src := d.buf.Data[:read]
for i := 0; i < read/numCh; i++ {
buf[i] = float64(src[i*numCh])
}
return buf[:read/numCh], nil
}
type svgWriter struct {
s *svg.SVG
sample float64Reader
sampleLength int
bound *bound
option *Option
}
func (s *svgWriter) write() error {
n := s.option.Resolution
batchRead := int(float64(s.sampleLength)/float64(n) + 0.5)
if n > s.sampleLength {
n = s.sampleLength
batchRead = 1
}
rectWidth := float64(2)
width := float64(n * 5)
height := 540.
if s.option.Width != 0 {
width = float64(s.option.Width)
rectWidth = width / float64(n) * 0.4
}
if s.option.Height > 0 {
height = float64(s.option.Height)
}
s.s.Start(width, height)
if s.option.Background != nil {
s.s.Rect(0, 0, width, height, `fill="`+colorToHex(s.option.Background)+`"`)
}
floor := (s.bound.Upper + s.bound.Lower) / 2
sampleHeight := s.bound.Upper - s.bound.Lower
lineCol := s.option.Color
if lineCol == nil {
lineCol = color.Black
}
i := 0
buf := make([]float64, batchRead)
for i < s.sampleLength {
expectBytes := min(batchRead, s.sampleLength-i)
read, err := s.sample.readNSamples(buf[:expectBytes])
if err != nil {
return err
}
if len(read) == 0 {
break
}
min, max := getMinMax(floor, read)
x := float64(i) / float64(s.sampleLength) * width
y := (min - s.bound.Lower) / sampleHeight * height
h := (max - min) / sampleHeight * height
s.s.Rect(x, y, rectWidth, h, `fill="`+colorToHex(lineCol)+`"`)
i += len(read)
}
return nil
}
func outputWaveformImage(sample float64Reader, sampleLength int, bound *bound, option *Option) (r io.Reader, err error) {
defer func() {
if r := recover(); r != nil {
if e, ok := r.(error); ok {
err = e
} else {
err = fmt.Errorf("recovered: %v", r)
}
}
}()
b := bytes.NewBuffer(make([]byte, 0))
s := svg.New(b)
writer := &svgWriter{
s: s,
sample: sample,
sampleLength: sampleLength,
bound: bound,
option: option,
}
if err = writer.write(); err != nil {
return
}
writer.s.End()
return b, nil
}
func getMinMax(floor float64, s []float64) (min, max float64) {
max, min = floor, floor
for _, y := range s {
if y > floor {
if y > max {
max = y
}
} else {
if y < min {
min = y
}
}
}
return
}
// OutputWaveformImageMp3 outputs waveform image from *mp3.Decoder.
func OutputWaveformImageMp3(data *mp3.Decoder, option *Option) (r io.Reader, err error) {
d := &mp3Decoder{
Decoder: data,
}
return outputWaveformImage(d, int(data.Length()/4), &bound{
Upper: 32767,
Lower: -32768,
}, option)
}
// OutputWaveformImageWav outputs waveform image from *wav.Decoder.
func OutputWaveformImageWav(data *wav.Decoder, option *Option) (r io.Reader, err error) {
d := &wavDecoder{
Decoder: data,
}
data.ReadInfo()
dur, err := data.Duration()
if err != nil {
return nil, err
}
byteLen := int((float64(dur) * float64(data.AvgBytesPerSec)) / float64(time.Second))
if data.BitDepth < 8 || data.NumChans == 0 {
return nil, fmt.Errorf("failed to retrieve correct bit depth / num channels (%d, %d)", data.BitDepth, data.NumChans)
}
return outputWaveformImage(d, byteLen/int(data.BitDepth/8)/int(data.NumChans), &bound{
Upper: math.Pow(2, float64(data.BitDepth-1)) - 1,
Lower: -math.Pow(2, float64(data.BitDepth-1)),
}, option)
} | image.go | 0.707708 | 0.407687 | image.go | starcoder |
package ewgraph
import (
"math"
ipq "github.com/DmitryBogomolov/algorithms/indexpriorityqueue"
)
func scanMinimumSpanningTreeVertexPrim(
wgr EdgeWeightedGraph, marked []bool, edgeTo []int, distTo []float64, verticesQueue ipq.IndexPriorityQueue,
vertexID int,
) {
marked[vertexID] = true
weights := wgr.AdjacentWeights(vertexID)
for i, adjacentVertexID := range wgr.AdjacentVertices(vertexID) {
weight := weights[i]
if !marked[adjacentVertexID] && weight < distTo[adjacentVertexID] {
edgeTo[adjacentVertexID] = vertexID
distTo[adjacentVertexID] = weight
verticesQueue.Insert(adjacentVertexID, weight)
}
}
}
func processMinimumSpanningTreePrim(
wgr EdgeWeightedGraph, marked []bool, edgeTo []int, distTo []float64, verticesQueue ipq.IndexPriorityQueue,
startVertexID int,
) {
distTo[startVertexID] = 0
verticesQueue.Insert(startVertexID, 0)
for verticesQueue.Size() > 0 {
_, vertexID := verticesQueue.Remove()
scanMinimumSpanningTreeVertexPrim(wgr, marked, edgeTo, distTo, verticesQueue, vertexID)
}
}
// BuildMinimumSpanningTreePrim computes minimum spanning tree using Prim's algorithm.
// https://algs4.cs.princeton.edu/43mst/PrimMST.java.html
func BuildMinimumSpanningTreePrim(wgr EdgeWeightedGraph) EdgeWeightedGraph {
numVertices := wgr.NumVertices()
marked := make([]bool, numVertices)
edgeTo := make([]int, numVertices)
distTo := make([]float64, numVertices)
verticesQueue := ipq.New(func(lhs, rhs interface{}) bool {
return lhs.(float64) < rhs.(float64)
})
for vertexID := 0; vertexID < numVertices; vertexID++ {
edgeTo[vertexID] = -1
distTo[vertexID] = math.MaxFloat64
}
for vertexID := 0; vertexID < numVertices; vertexID++ {
if !marked[vertexID] {
processMinimumSpanningTreePrim(wgr, marked, edgeTo, distTo, verticesQueue, vertexID)
}
}
adjacency := make([][]int, numVertices)
weights := make([][]float64, numVertices)
numEdges := 0
for vertexID := 0; vertexID < numVertices; vertexID++ {
otherVertexID := edgeTo[vertexID]
if otherVertexID != -1 {
addWeightedEdge(adjacency, weights, vertexID, otherVertexID, distTo[vertexID])
numEdges++
}
}
return NewImplEdgeWeightedGraph(numVertices, numEdges, adjacency, weights)
} | graph/ewgraph/minimum_spanning_tree_prim.go | 0.742515 | 0.404978 | minimum_spanning_tree_prim.go | starcoder |
package project
import "github.com/Smadarl/orb"
// Geometry is a helper to project any geomtry.
func Geometry(g orb.Geometry, proj orb.Projection) orb.Geometry {
if g == nil {
return nil
}
switch g := g.(type) {
case orb.Point:
return Point(g, proj)
case orb.MultiPoint:
return MultiPoint(g, proj)
case orb.LineString:
return LineString(g, proj)
case orb.MultiLineString:
return MultiLineString(g, proj)
case orb.Ring:
return Ring(g, proj)
case orb.Polygon:
return Polygon(g, proj)
case orb.MultiPolygon:
return MultiPolygon(g, proj)
case orb.Collection:
return Collection(g, proj)
case orb.Bound:
return Bound(g, proj)
}
panic("geometry type not supported")
}
// Point is a helper to project an a point
func Point(p orb.Point, proj orb.Projection) orb.Point {
return proj(p)
}
// MultiPoint is a helper to project an entire multi point.
func MultiPoint(mp orb.MultiPoint, proj orb.Projection) orb.MultiPoint {
for i := range mp {
mp[i] = proj(mp[i])
}
return mp
}
// LineString is a helper to project an entire line string.
func LineString(ls orb.LineString, proj orb.Projection) orb.LineString {
return orb.LineString(MultiPoint(orb.MultiPoint(ls), proj))
}
// MultiLineString is a helper to project an entire multi linestring.
func MultiLineString(mls orb.MultiLineString, proj orb.Projection) orb.MultiLineString {
for i := range mls {
mls[i] = LineString(mls[i], proj)
}
return mls
}
// Ring is a helper to project an entire ring.
func Ring(r orb.Ring, proj orb.Projection) orb.Ring {
return orb.Ring(LineString(orb.LineString(r), proj))
}
// Polygon is a helper to project an entire polygon.
func Polygon(p orb.Polygon, proj orb.Projection) orb.Polygon {
for i := range p {
p[i] = Ring(p[i], proj)
}
return p
}
// MultiPolygon is a helper to project an entire multi polygon.
func MultiPolygon(mp orb.MultiPolygon, proj orb.Projection) orb.MultiPolygon {
for i := range mp {
mp[i] = Polygon(mp[i], proj)
}
return mp
}
// Collection is a helper to project a rectangle.
func Collection(c orb.Collection, proj orb.Projection) orb.Collection {
for i := range c {
c[i] = Geometry(c[i], proj)
}
return c
}
// Bound is a helper to project a rectangle.
func Bound(bound orb.Bound, proj orb.Projection) orb.Bound {
min := proj(bound.Min)
return orb.Bound{Min: min, Max: min}.Extend(proj(bound.Max))
} | project/helpers.go | 0.798108 | 0.498657 | helpers.go | starcoder |
package sliceop
// Prefill - prefil array with values
func Prefill(size int, symbol string) (output []string) {
output = make([]string, size)
for i := 0; i < size; i++ {
output[i] = symbol
}
return output
}
// Map - maps array of strings with func
func Map(f func(input string) string, input ...string) (output []string) {
if len(input) == 0 {
return input
}
for _, key := range input {
output = append(output, f(key))
}
return output
}
// Includes - whether key includes array
func Includes(input []string, key string) bool {
for _, k := range input {
if k == key {
return true
}
}
return false
}
// NotIncludes - whether key is not included in array
func NotIncludes(input []string, key string) bool {
return !Includes(input, key)
}
// Reject - reject specified keys
func Reject(input []string, toReject ...string) (output []string) {
if len(toReject) == 0 {
return input
}
for _, key := range input {
if NotIncludes(toReject, key) {
output = append(output, key)
}
}
return output
}
// Select - select and return specified keys
func Select(input []string, toSelect ...string) (output []string) {
if len(toSelect) == 0 {
return input
}
for _, key := range input {
if Includes(toSelect, key) {
output = append(output, key)
}
}
return output
}
// Unique - returns unique values for given input
func Unique(input ...string) (output []string) {
if len(input) == 0 {
return input
}
for _, key := range input {
if NotIncludes(output, key) {
output = append(output, key)
}
}
return output
}
// Intersection - finds matching values of both input types
func Intersection(u1, u2 []string) (output []string) {
if len(u1) > len(u2) {
for _, t := range u1 {
if Includes(u2, t) {
output = append(output, t)
}
}
} else {
for _, t := range u2 {
if Includes(u1, t) {
output = append(output, t)
}
}
}
return output
}
// CountFunc - counts the number of elements yielding a true value.
func CountFunc(f func(input string) bool, input ...string) (sum int) {
for _, key := range input {
if f(key) {
sum++
}
}
return sum
}
// Compact - removes empty values from given array
func Compact(input ...string) (output []string) {
for _, key := range input {
if key != "" {
output = append(output, key)
}
}
return output
} | sliceop.go | 0.684053 | 0.409398 | sliceop.go | starcoder |
package statistics
import (
"math"
)
type PearsonIIIDistribution struct {
Mean float64 `json:"mean"`
StandardDeviation float64 `json:"standarddeviation"`
Skew float64 `json:"skew"`
}
func (d PearsonIIIDistribution) InvCDF(probability float64) float64 {
if probability > 1 {
panic("nope")
}
if probability <= 0 {
panic("nope")
}
noSkew := .00001
if math.Abs(d.Skew) < noSkew {
z := zeroSkewDistribution(d)
return z.InvCDF(probability)
} else {
if d.Skew > 0 {
g := positiveSkewDistribution(d)
return g.InvCDF(probability)
} else {
g := negativeSkewDistribution(d)
return -g.InvCDF(1 - probability)
}
}
}
func (d PearsonIIIDistribution) CDF(value float64) float64 {
noSkew := .00001
if math.Abs(d.Skew) < noSkew {
z := zeroSkewDistribution(d)
return z.CDF(value)
} else {
if d.Skew > 0 {
g := positiveSkewDistribution(d)
return g.CDF(value)
} else {
g := negativeSkewDistribution(d)
return 1 - g.CDF(-value)
}
}
}
func (d PearsonIIIDistribution) PDF(value float64) float64 {
noSkew := .00001
if math.Abs(d.Skew) < noSkew {
z := zeroSkewDistribution(d)
return z.PDF(value)
} else {
if d.Skew > 0 {
g := positiveSkewDistribution(d)
return g.PDF(value)
} else {
g := negativeSkewDistribution(d)
return -g.PDF(value)
}
}
}
func (n PearsonIIIDistribution) CentralTendency() float64 {
return n.Mean
}
func zeroSkewDistribution(d PearsonIIIDistribution) ContinuousDistribution {
return NormalDistribution{Mean: d.Mean, StandardDeviation: d.StandardDeviation}
}
func negativeSkewDistribution(d PearsonIIIDistribution) ContinuousDistribution {
shift := -d.Mean + 2.0*d.StandardDeviation/d.Skew
alpha := 4.0 / (d.Skew * d.Skew)
beta := .5 * d.StandardDeviation * d.Skew
beta = -beta
return ShiftedGammaDistribution{Alpha: alpha, Beta: beta, Shift: shift}
}
func positiveSkewDistribution(d PearsonIIIDistribution) ContinuousDistribution {
alpha := 4.0 / (d.Skew * d.Skew)
beta := .5 * d.StandardDeviation * d.Skew
shift := d.Mean - 2.0*d.StandardDeviation/d.Skew
return ShiftedGammaDistribution{Alpha: alpha, Beta: beta, Shift: shift}
} | statistics/pearsonIIIDistribution.go | 0.832543 | 0.45302 | pearsonIIIDistribution.go | starcoder |
package gokalman
import (
"errors"
"github.com/gonum/matrix/mat64"
"github.com/gonum/stat"
)
// NewChiSquare runs the Chi square tests from the MonteCarlo runs. These runs
// and the KF used are the ones tested via Chi square. The KF provided must be a
// pure predictor Vanilla KF and will be used to compute the intermediate steps
// of both the NEES and NIS tests.
// Returns NEESmeans, NISmeans and an error if applicable
// TODO: Change order of parameters.
func NewChiSquare(kf LDKF, runs MonteCarloRuns, controls []*mat64.Vector, withNEES, withNIS bool) ([]float64, []float64, error) {
if !withNEES && !withNIS {
return nil, nil, errors.New("Chi Square requires either NEES or NIS or both")
}
numRuns := runs.runs
steps := len(runs.Runs[0].Estimates)
NISsamples := make(map[int][]float64)
NEESsamples := make(map[int][]float64)
if len(controls) == 1 {
ctrlSize, _ := controls[0].Dims()
controls = make([]*mat64.Vector, steps)
// Populate with zero controls
for k := 0; k < steps; k++ {
controls[k] = mat64.NewVector(ctrlSize, nil)
}
} else if len(controls) != steps {
return nil, nil, errors.New("must provide as much control vectors as steps, or just one control vector")
}
for rNo, run := range runs.Runs {
kf.Reset()
for k, mcTruth := range run.Estimates {
est, err := kf.Update(mcTruth.Measurement(), controls[k])
if err != nil {
panic(err)
}
if withNEES {
if NEESsamples[k] == nil {
NEESsamples[k] = make([]float64, numRuns)
}
var PInv mat64.Dense
PInv.Inverse(est.Covariance()) // XXX: Pinv is OK
var nees, nees0, nees1 mat64.Vector
nees0.SubVec(mcTruth.State(), est.State())
// fmt.Printf("d=%v\n", mat64.Formatted(&nees0, mat64.Prefix(" ")))
nees1.MulVec(&PInv, &nees0)
nees.MulVec(nees0.T(), &nees1)
NEESsamples[k][rNo] = nees.At(0, 0) // Will be just a scalar.
}
if withNIS {
if NISsamples[k] == nil {
NISsamples[k] = make([]float64, numRuns)
}
// Compute the actual NIS.
var Pyy, Pyy0, PyyInv mat64.Dense
H := kf.GetMeasurementMatrix()
Pyy0.Mul(est.PredCovariance(), H.T())
Pyy.Mul(H, &Pyy0)
Pyy.Add(&Pyy, kf.GetNoise().MeasurementMatrix())
// This corresponds to the pure prediction: H*Pkp1_minus*H' + Rtrue;
PyyInv.Inverse(&Pyy)
var nis, nis0 mat64.Vector
nis0.MulVec(&PyyInv, est.Innovation())
nis.MulVec(est.Innovation().T(), &nis0)
NISsamples[k][rNo] = nis.At(0, 0) // Will be just be a scalar.
}
}
}
// Let's compute the means for each step.
NISmeans := make([]float64, steps)
NEESmeans := make([]float64, steps)
for k := 0; k < steps; k++ {
if withNEES {
NEESmeans[k] = stat.Mean(NEESsamples[k], nil)
}
if withNIS {
NISmeans[k] = stat.Mean(NISsamples[k], nil)
}
}
return NISmeans, NEESmeans, nil
} | chisquare.go | 0.584627 | 0.6305 | chisquare.go | starcoder |
package unionfind
type UnionFind struct {
data map[interface{}]*Node
}
func NewUnionFind() *UnionFind {
union := UnionFind{data: make(map[interface{}]*Node)}
return &union
}
// Get parent node of the union searching by value of its member
// Return union's parent Node
// O(m) time, O(1) space (in the worst case O(m), where m is the number of Union calls)
func (union *UnionFind) FindInSet(value interface{}) *Node {
var node *Node
var exists bool
node, exists = union.data[value]
if !exists {
node = NewNode(value, 1, nil)
node.parent = node
union.data[value] = node
}
// path compression, remap child nodes directly to the head
if node.parent != node {
node.parent = union.FindInSet(node.parent.GetValue())
}
return node.parent
}
// Check if UnionFind has union for specific value
// Return true if union exists
// O(1) time, O(1) space
func (union *UnionFind) Has(value interface{}) bool {
if _, exists := union.data[value]; exists {
return true
}
return false
}
// Join two unions using any values belonging to them
// Return new union's parent Node
// O(m*a(n)) time, O(m) space (approximately O(m), where m is the number of calls, a(n) is the inverse Ackermann function)
func (union *UnionFind) Union(value1 interface{}, value2 interface{}) *Node {
node1 := union.FindInSet(value1)
node2 := union.FindInSet(value2)
if node1 != node2 {
if node1.GetRank() >= node2.GetRank() {
node2.parent = node1
node1.rank += node2.GetRank()
return node1
}
node1.parent = node2
node2.rank += node1.GetRank()
return node2
}
return node1
}
type Node struct {
parent *Node
rank int
value interface{}
}
func NewNode(value interface{}, rank int, parent *Node) *Node {
node := new(Node)
node.value = value
node.rank = rank
return node
}
// Get Node rank
// Returns rank
// O(1) time, O(1) space
func (node *Node) GetRank() int {
return node.rank
}
// Get Node value
// Returns value
// O(1) time, O(1) space
func (node *Node) GetValue() interface{} {
return node.value
} | unionfind/unionfind.go | 0.835819 | 0.430806 | unionfind.go | starcoder |
package main
import "strings"
/*
iven an m x n grid of characters board and a string word, return true if word exists in the grid.
The word can be constructed from letters of sequentially adjacent cells, where adjacent cells are horizontally or vertically neighboring.
The same letter cell may not be used more than once.
Example 1:
Input: board = [["A","B","C","E"],["S","F","C","S"],["A","D","E","E"]], word = "ABCCED"
Output: true
Example 2:
Input: board = [["A","B","C","E"],["S","F","C","S"],["A","D","E","E"]], word = "SEE"
Output: true
Example 3:
Input: board = [["A","B","C","E"],["S","F","C","S"],["A","D","E","E"]], word = "ABCB"
Output: false
Constraints:
m == board.length
n = board[i].length
1 <= m, n <= 6
1 <= word.length <= 15
board and word consists of only lowercase and uppercase English letters.
*/
func dfs(board [][]byte, rowIndex int, columnIndex int, currentLetterIndex int, word string) bool {
/*
ASFBA
board =
[["A","B","C","E"],
["S","F","C","S"],
["A","D","E","E"]]
word = "ABCCED"
*/
if currentLetterIndex == len(word) {
return true
}
rows := len(board)
columns := len(board[0])
if rowIndex < 0 || rowIndex >= rows || columnIndex < 0 || columnIndex >= columns {
return false
}
letterThatIWant := word[currentLetterIndex]
currentBoardLetter := board[rowIndex][columnIndex]
if currentBoardLetter != letterThatIWant {
return false
}
board[rowIndex][columnIndex] = ' '
wordHasBeenFound := dfs(board, rowIndex-1, columnIndex, currentLetterIndex+1, word) ||
dfs(board, rowIndex+1, columnIndex, currentLetterIndex+1, word) ||
dfs(board, rowIndex, columnIndex-1, currentLetterIndex+1, word) ||
dfs(board, rowIndex, columnIndex+1, currentLetterIndex+1, word)
board[rowIndex][columnIndex] = currentBoardLetter
return wordHasBeenFound
}
func exist(board [][]byte, word string) bool {
for rowIndex, row := range board {
for columnIndex, letter := range row {
if strings.HasPrefix(word, string(letter)) && dfs(board, rowIndex, columnIndex, 0, word) {
return true
}
}
}
return false
}
func main() {
} | golang/algorithms/others/word_search/main.go | 0.592431 | 0.429728 | main.go | starcoder |
// Package parser contains logic for parsing Herd-style observations.
package parser
import (
"bufio"
"fmt"
"io"
"strings"
"github.com/c4-project/c4t/internal/subject/obs"
)
// Parse parses an observation from r into o using i.
func Parse(i Impl, r io.Reader, o *obs.Obs) error {
p := parser{impl: i, o: o}
return p.parse(r)
}
// parser holds the state for a Herdtools parser.
type parser struct {
// impl tells us how to perform the Herd/Litmus-specific parts of the parsing set-up.
impl Impl
// o is the observation we're creating.
o *obs.Obs
// tt is the test type, if any.
tt TestType
// state is the current state of the parsing FSA.
state state
// nstates is the number of states left to read if we're in psState.
nstates uint64
}
// parse parses r into this parser.
func (p *parser) parse(r io.Reader) error {
if p.impl == nil {
return ErrNoImpl
}
if err := p.parseLines(r); err != nil {
return err
}
return p.checkFinalState()
}
// parseLine processes the lines of a Herdtools observation from reader r.
func (p *parser) parseLines(r io.Reader) error {
s := bufio.NewScanner(r)
lineno := 1
for s.Scan() {
if err := p.parseLine(s.Text()); err != nil {
return fmt.Errorf("line %d (%q): %w", lineno, s.Text(), err)
}
lineno++
}
return s.Err()
}
// parseLine processes a single line of a Herdtools observation.
func (p *parser) parseLine(line string) error {
fields := strings.Fields(line)
switch p.state {
case psEmpty:
return p.processEmpty(fields)
case psPreTest:
return p.processPreTest(fields)
case psPreamble:
return p.processPreamble(fields)
case psState:
return p.processState(fields)
case psSummary:
return p.processSummary(fields)
case psPostamble:
// TODO(@MattWindsor91): do something with this?
return nil
default:
return fmt.Errorf("%w: %d", ErrBadState, p.state)
}
}
func (p *parser) processEmpty(fields []string) error {
if err := p.afterBegin(); err != nil {
return err
}
return p.processPreTest(fields)
}
func (p *parser) processPreTest(fields []string) error {
nf := len(fields)
if nf == 0 {
return nil
}
if fields[0] != "Test" {
return p.processPreTestImplHooks(fields)
}
if nf != 3 {
return fmt.Errorf("%w: expected three fields, got %d", ErrBadTestType, nf)
}
return p.processTestType(fields)
}
func (p *parser) processTestType(fields []string) error {
var err error
if p.tt, err = parseTestType(fields[2]); err != nil {
return err
}
p.o.Flags |= p.tt.Flags()
return p.afterPreTest()
}
// processPreTestImplHooks handles passing a pre-test line to the implementation to scan for flags.
func (p *parser) processPreTestImplHooks(fields []string) error {
f, err := p.impl.ParsePreTestLine(fields)
if err != nil {
return err
}
p.o.Flags |= f
return nil
}
func (p *parser) processPreamble(fields []string) error {
nstates, ok, err := p.impl.ParseStateCount(fields)
if err != nil {
return err
}
if !ok {
// Skip this line.
return nil
}
return p.afterPreamble(nstates)
}
func (p *parser) processState(fields []string) error {
sl, err := p.impl.ParseStateLine(p.tt, fields)
if err != nil {
return err
}
if err := p.processStateLine(sl); err != nil {
return err
}
return p.afterStateLine()
}
func (p *parser) processSummary(fields []string) error {
// Herd and Litmus themselves always follow the final state line with a summary;
// the leniency here, as often is the case, is mainly for rmem and other herd-a-likes.
nf := len(fields)
// Some summary lines might be 'Flag (description)'.
if nf == 0 {
return nil
}
f, ok := parseFlag(fields[0])
if !ok {
// We want to catch possible mismatches between state count and state lines.
return p.errorIfStateLine(fields)
}
// Making sure not to override any partiality flags already parsed.
p.o.Flags |= f
return p.afterSummary()
}
func (p *parser) errorIfStateLine(fields []string) error {
if _, err := p.impl.ParseStateLine(p.tt, fields); err != nil {
// Intentional
return nil
}
return fmt.Errorf("%w: possible extraneous state line", ErrBadSummary)
}
// parseFlag parses the summary flag f as an observation flag.
func parseFlag(f string) (flag obs.Flag, ok bool) {
ok = true
switch f {
case "Ok":
flag = obs.Sat
case "No":
flag = obs.Unsat
case "Undef":
flag = obs.Undef
default:
ok = false
}
return
} | internal/serviceimpl/backend/herdstyle/parser/parser.go | 0.6705 | 0.459561 | parser.go | starcoder |
package utils
// IncludesString if the list contains the string
func IncludesString(list []string, a string) bool {
for _, b := range list {
if b == a {
return true
}
}
return false
}
// IncludesInt if the list contains the Int
func IncludesInt(list []int, a int) bool {
for _, b := range list {
if b == a {
return true
}
}
return false
}
// NextIndex returns the index of the element that comes after the given number
func NextIndex(numbers []int, currentNumber int) int {
for index, number := range numbers {
if number > currentNumber {
return index
}
}
return len(numbers) - 1
}
// PrevIndex returns the index that comes before the given number, cycling if we reach the end
func PrevIndex(numbers []int, currentNumber int) int {
end := len(numbers) - 1
for i := end; i >= 0; i-- {
if numbers[i] < currentNumber {
return i
}
}
return 0
}
// UnionInt returns the union of two int arrays
func UnionInt(a, b []int) []int {
m := make(map[int]bool)
for _, item := range a {
m[item] = true
}
for _, item := range b {
if _, ok := m[item]; !ok {
// this does not mutate the original a slice
// though it does mutate the backing array I believe
// but that doesn't matter because if you later want to append to the
// original a it must see that the backing array has been changed
// and create a new one
a = append(a, item)
}
}
return a
}
// DifferenceInt returns the difference of two int arrays
func DifferenceInt(a, b []int) []int {
result := []int{}
m := make(map[int]bool)
for _, item := range b {
m[item] = true
}
for _, item := range a {
if _, ok := m[item]; !ok {
result = append(result, item)
}
}
return result
}
// NextIntInCycle returns the next int in a slice, returning to the first index if we've reached the end
func NextIntInCycle(sl []int, current int) int {
for i, val := range sl {
if val == current {
if i == len(sl)-1 {
return sl[0]
}
return sl[i+1]
}
}
return sl[0]
}
// PrevIntInCycle returns the prev int in a slice, returning to the first index if we've reached the end
func PrevIntInCycle(sl []int, current int) int {
for i, val := range sl {
if val == current {
if i > 0 {
return sl[i-1]
}
return sl[len(sl)-1]
}
}
return sl[len(sl)-1]
}
func StringArraysOverlap(strArrA []string, strArrB []string) bool {
for _, first := range strArrA {
for _, second := range strArrB {
if first == second {
return true
}
}
}
return false
}
func Uniq(values []string) []string {
added := make(map[string]bool)
result := make([]string, 0, len(values))
for _, value := range values {
if added[value] {
continue
}
added[value] = true
result = append(result, value)
}
return result
}
func Limit(values []string, limit int) []string {
if len(values) > limit {
return values[:limit]
}
return values
}
func Reverse(values []string) []string {
result := make([]string, len(values))
for i, val := range values {
result[len(values)-i-1] = val
}
return result
}
func LimitStr(value string, limit int) string {
n := 0
for i := range value {
if n >= limit {
return value[:i]
}
n++
}
return value
} | pkg/utils/slice.go | 0.732974 | 0.435661 | slice.go | starcoder |
package subsampler
import (
"errors"
"jpeg2000/data"
)
type Subsampler interface {
Subsample(y, u, v data.Layer) (data.Layer, data.Layer, data.Layer)
Supersample(y, u, v data.Layer) (data.Layer, data.Layer, data.Layer)
ToProtobuf() data.Subsampling
}
func ScaleLayers(y1, u1, v1 data.Layer, xScale, yScale int) (data.Layer, data.Layer, data.Layer) {
ySizeX, ySizeY := y1.GetDimensions()
uSizeX, uSizeY := u1.GetDimensions()
vSizeX, vSizeY := v1.GetDimensions()
if xScale*uSizeX != ySizeX {
panic("Invalid X size for U layer")
}
if xScale*vSizeX != ySizeX {
panic("Invalid X size for V layer")
}
if yScale*uSizeY != ySizeY {
panic("Invalid Y size for U layer")
}
if yScale*vSizeY != ySizeY {
panic("Invalid Y size for V layer")
}
y2 := y1.Copy()
u2 := u1.ScaleInteger(xScale, yScale)
v2 := v1.ScaleInteger(xScale, yScale)
return y2, u2, v2
}
type Subsampler410 struct{}
func (s *Subsampler410) Subsample(y1, u1, v1 data.Layer) (data.Layer, data.Layer, data.Layer) {
ySizeX, ySizeY := y1.GetDimensions()
uSizeX, uSizeY := u1.GetDimensions()
vSizeX, vSizeY := v1.GetDimensions()
if ySizeX != uSizeX || ySizeX != vSizeX {
panic("X dimensions aren't equal")
}
if ySizeY != uSizeY || ySizeY != vSizeY {
panic("Y dimensions aren't equal")
}
sizeX := ySizeX
sizeY := ySizeY
y2 := data.NewLayer(sizeX, sizeY)
u2 := data.NewLayer(sizeX/4, sizeY/2)
v2 := data.NewLayer(sizeX/4, sizeY/2)
for j := 0; j < sizeY; j++ {
for i := 0; i < sizeX; i++ {
y2[j][i] = y1[j][i]
}
}
for j := 0; j < sizeY/2; j++ {
for i := 0; i < sizeX/4; i++ {
u := float32(0)
u += u1[2*j+0][4*i+0]
u += u1[2*j+0][4*i+1]
u += u1[2*j+0][4*i+2]
u += u1[2*j+0][4*i+3]
u += u1[2*j+1][4*i+0]
u += u1[2*j+1][4*i+1]
u += u1[2*j+1][4*i+2]
u += u1[2*j+1][4*i+3]
u2[j][i] = u / 8
v := float32(0)
v += v1[2*j+0][4*i+0]
v += v1[2*j+0][4*i+1]
v += v1[2*j+0][4*i+2]
v += v1[2*j+0][4*i+3]
v += v1[2*j+1][4*i+0]
v += v1[2*j+1][4*i+1]
v += v1[2*j+1][4*i+2]
v += v1[2*j+1][4*i+3]
v2[j][i] = v / 8
}
}
return y2, u2, v2
}
func (s *Subsampler410) Supersample(y, u, v data.Layer) (data.Layer, data.Layer, data.Layer) {
return ScaleLayers(y, u, v, 4, 2)
}
func (s *Subsampler410) ToProtobuf() data.Subsampling {
return data.Subsampling_SUBSAMPLING_410
}
type Subsampler420 struct{}
func (s *Subsampler420) Subsample(y1, u1, v1 data.Layer) (data.Layer, data.Layer, data.Layer) {
ySizeX, ySizeY := y1.GetDimensions()
uSizeX, uSizeY := u1.GetDimensions()
vSizeX, vSizeY := v1.GetDimensions()
if ySizeX != uSizeX || ySizeX != vSizeX {
panic("X dimensions aren't equal")
}
if ySizeY != uSizeY || ySizeY != vSizeY {
panic("Y dimensions aren't equal")
}
sizeX := ySizeX
sizeY := ySizeY
y2 := data.NewLayer(sizeX, sizeY)
u2 := data.NewLayer(sizeX/2, sizeY/2)
v2 := data.NewLayer(sizeX/2, sizeY/2)
for j := 0; j < sizeY; j++ {
for i := 0; i < sizeX; i++ {
y2[j][i] = y1[j][i]
}
}
for j := 0; j < sizeY/2; j++ {
for i := 0; i < sizeX/2; i++ {
u := float32(0)
u += u1[2*j+0][2*i+0]
u += u1[2*j+1][2*i+0]
u += u1[2*j+1][2*i+1]
u += u1[2*j+0][2*i+1]
u2[j][i] = u / 4
v := float32(0)
v += v1[2*j+0][2*i+0]
v += v1[2*j+1][2*i+0]
v += v1[2*j+1][2*i+1]
v += v1[2*j+0][2*i+1]
v2[j][i] = v / 4
}
}
return y2, u2, v2
}
func (s *Subsampler420) Supersample(y, u, v data.Layer) (data.Layer, data.Layer, data.Layer) {
return ScaleLayers(y, u, v, 2, 2)
}
func (s *Subsampler420) ToProtobuf() data.Subsampling {
return data.Subsampling_SUBSAMPLING_420
}
type Subsampler422 struct{}
func (s *Subsampler422) Subsample(y1, u1, v1 data.Layer) (data.Layer, data.Layer, data.Layer) {
ySizeX, ySizeY := y1.GetDimensions()
uSizeX, uSizeY := u1.GetDimensions()
vSizeX, vSizeY := v1.GetDimensions()
if ySizeX != uSizeX || ySizeX != vSizeX {
panic("X dimensions aren't equal")
}
if ySizeY != uSizeY || ySizeY != vSizeY {
panic("Y dimensions aren't equal")
}
sizeX := ySizeX
sizeY := ySizeY
y2 := data.NewLayer(sizeX, sizeY)
u2 := data.NewLayer(sizeX/2, sizeY)
v2 := data.NewLayer(sizeX/2, sizeY)
for j := 0; j < sizeY; j++ {
for i := 0; i < sizeX; i++ {
y2[j][i] = y1[j][i]
}
}
for j := 0; j < sizeY; j++ {
for i := 0; i < sizeX/2; i++ {
u := float32(0)
u += u1[j][2*i+0]
u += u1[j][2*i+1]
u2[j][i] = u / 2
v := float32(0)
v += v1[j][2*i+0]
v += v1[j][2*i+1]
v2[j][i] = v / 2
}
}
return y2, u2, v2
}
func (s *Subsampler422) Supersample(y, u, v data.Layer) (data.Layer, data.Layer, data.Layer) {
return ScaleLayers(y, u, v, 2, 1)
}
func (s *Subsampler422) ToProtobuf() data.Subsampling {
return data.Subsampling_SUBSAMPLING_422
}
type Subsampler444 struct{}
func (s *Subsampler444) Subsample(y1, u1, v1 data.Layer) (data.Layer, data.Layer, data.Layer) {
ySizeX, ySizeY := y1.GetDimensions()
uSizeX, uSizeY := u1.GetDimensions()
vSizeX, vSizeY := v1.GetDimensions()
if ySizeX != uSizeX || ySizeX != vSizeX {
panic("X dimensions aren't equal")
}
if ySizeY != uSizeY || ySizeY != vSizeY {
panic("Y dimensions aren't equal")
}
y2 := y1.Copy()
u2 := u1.Copy()
v2 := v1.Copy()
return y2, u2, v2
}
func (s *Subsampler444) Supersample(y, u, v data.Layer) (data.Layer, data.Layer, data.Layer) {
return y.Copy(), u.Copy(), v.Copy()
}
func (s *Subsampler444) ToProtobuf() data.Subsampling {
return data.Subsampling_SUBSAMPLING_444
}
func FromCommandLine(arg string) (Subsampler, error) {
switch arg {
case "410":
return &Subsampler410{}, nil
case "420":
return &Subsampler420{}, nil
case "422":
return &Subsampler422{}, nil
case "444":
return &Subsampler444{}, nil
default:
return nil, errors.New("Unrecognized subsampling format")
}
}
func FromProtobuf(d data.Subsampling) (Subsampler, error) {
switch d {
case data.Subsampling_SUBSAMPLING_410:
return &Subsampler410{}, nil
case data.Subsampling_SUBSAMPLING_420:
return &Subsampler420{}, nil
case data.Subsampling_SUBSAMPLING_422:
return &Subsampler422{}, nil
case data.Subsampling_SUBSAMPLING_444:
return &Subsampler444{}, nil
default:
return nil, errors.New("Unknown subsampling format from protobuf")
}
} | labo-2/jpeg2000/subsampler/subsampler.go | 0.541894 | 0.426023 | subsampler.go | starcoder |
package continuous
import (
"github.com/jtejido/stats"
"github.com/jtejido/stats/err"
"math"
"math/rand"
)
// Johnson SL Distribution (Semi-bounded)
// https://reference.wolfram.com/language/ref/JohnsonDistribution.html
type JohnsonSL struct {
gamma, delta, location, scale float64 // γ, δ, location μ, and scale σ
src rand.Source
}
func NewJohnsonSL(gamma, delta, location, scale float64) (*JohnsonSL, error) {
return NewJohnsonSLWithSource(gamma, delta, location, scale, nil)
}
func NewJohnsonSLWithSource(gamma, delta, location, scale float64, src rand.Source) (*JohnsonSL, error) {
if delta <= 0 && scale <= 0 {
return nil, err.Invalid()
}
return &JohnsonSL{gamma, delta, location, scale, src}, nil
}
// γ ∈ (-∞,∞)
// δ ∈ (0,∞)
// μ ∈ (-∞,∞)
// σ ∈ (0,∞)
func (j *JohnsonSL) Parameters() stats.Limits {
return stats.Limits{
"γ": stats.Interval{math.Inf(-1), math.Inf(1), true, true},
"δ": stats.Interval{0, math.Inf(1), true, true},
"μ": stats.Interval{math.Inf(-1), math.Inf(1), true, true},
"σ": stats.Interval{0, math.Inf(1), true, true},
}
}
// x ∈ (μ,∞)
func (j *JohnsonSL) Support() stats.Interval {
return stats.Interval{j.location, math.Inf(1), true, true}
}
func (j *JohnsonSL) Probability(x float64) float64 {
if x > j.location {
return j.delta / (math.Exp(.5*math.Pow(j.gamma+j.delta*math.Log((x-j.location)/j.scale), 2)) * (math.Sqrt(2*math.Pi) * (x - j.location)))
}
return 0
}
func (j *JohnsonSL) Distribution(x float64) float64 {
if x >= j.location && x <= j.location+j.scale {
return .5 * (math.Erfc(-(j.gamma + j.delta*math.Log((x-j.location)/j.scale)/math.Sqrt(2))))
} else if x > j.location+j.scale {
return .5 * (1 + math.Erf(j.gamma+j.delta*math.Log((x-j.location)/j.scale)/math.Sqrt(2)))
}
return 0
}
func (j *JohnsonSL) Mean() float64 {
return j.location + math.Exp((1-2*j.gamma*j.delta)/(2*(j.delta*j.delta)))*j.scale
}
func (j *JohnsonSL) Variance() float64 {
return math.Exp((1-2*j.gamma*j.delta)/(j.delta*j.delta)) * (-1 + math.Exp(1/(j.delta*j.delta))) * (j.scale * j.scale)
}
func (j *JohnsonSL) Median() float64 {
return j.location + j.scale/math.Exp(j.gamma/j.delta)
}
func (j *JohnsonSL) ExKurtosis() float64 {
return math.Exp(2/(j.delta*j.delta)) * (3 + math.Exp(1/(j.delta*j.delta))*(2+math.Exp(1/(j.delta*j.delta))))
}
func (j *JohnsonSL) Entropy() float64 {
stats.NotImplementedError()
return math.NaN()
}
func (j *JohnsonSL) Inverse(q float64) float64 {
if q <= 0 {
return j.location
}
if q >= 1 {
return math.Inf(1)
}
return j.location + math.Exp((-j.gamma-math.Sqrt(2)*math.Erfcinv(2*q))/j.delta)*j.scale
}
func (j *JohnsonSL) Rand() float64 {
var rnd float64
if j.src != nil {
rnd = rand.New(j.src).Float64()
} else {
rnd = rand.Float64()
}
return j.Inverse(rnd)
} | dist/continuous/johnson_sl.go | 0.808899 | 0.455683 | johnson_sl.go | starcoder |
package cache
import (
"fmt"
"math"
"columbia.github.com/privatekube/dpfscheduler/pkg/scheduler/util"
columbiav1 "columbia.github.com/privatekube/privacyresource/pkg/apis/columbia.github.com/v1"
"k8s.io/klog"
)
type StreamingCounter struct {
LaplaceNoise float64
Budget *columbiav1.PrivacyBudget
T int
n_bits int
current_index int
counts map[int]int
alpha map[int]int
n_alpha map[int]float64
}
type StreamingCounterOptions struct {
LaplaceNoise float64
MaxNumberOfTicks int
}
/// NewStreamingCounter builds a new counter.
/// We could also load the counter from a previous set of blocks (persisted in etcd) in case we need to rebuild the cache.
func NewStreamingCounter(options *StreamingCounterOptions) *StreamingCounter {
// The budget is lazily initialized depending on the type of the blocks (RDP or not)
// All the counters are initialized to zero
return &StreamingCounter{
LaplaceNoise: options.LaplaceNoise,
T: options.MaxNumberOfTicks,
n_bits: int(math.Ceil(math.Log2(float64(options.MaxNumberOfTicks)))),
current_index: 1,
counts: make(map[int]int),
alpha: make(map[int]int),
n_alpha: make(map[int]float64),
}
}
/// Feed extends the stream with `n_ticks` new bits, where `n_new_users` bits are 1
/// Feed is called only inside cache.AddBlock and cache.blockLock.Lock() ensures that there is no race condition
func (counter *StreamingCounter) Feed(n_new_users int, n_ticks int) error {
if n_new_users > n_ticks {
// The block loader will ensure that this error does not happen
// If you want to use fewer ticks, you should use a different counter algorithm
return fmt.Errorf("there are more 1 bits than the length of the stream")
}
if (counter.current_index-1)+n_ticks > counter.T {
return fmt.Errorf("this counter is full: the maximum stream length has been reached")
}
for t := counter.current_index; t < counter.current_index+n_ticks; t++ {
i := 0
for bin_digit(t, i) == 0 {
i++
}
// We simulate n_ticks where only the `n_new_users` ticks are 1
if t-counter.current_index < n_new_users {
counter.alpha[i] = 1
} else {
counter.alpha[i] = 0
}
// All the j < i are added to the new (larger) psum and erased
// alpha[i] is the psum of the 2^i last items before t
for j := 0; j < i; j++ {
counter.alpha[i] += counter.alpha[j]
counter.alpha[j] = 0
counter.n_alpha[j] = 0.0
}
// Sanitize and release the psum
counter.n_alpha[i] = float64(counter.alpha[i]) + util.LaplaceNoise(counter.LaplaceNoise)
// Add psums of the remaining items
c := 0.0
for j := 0; j < counter.n_bits; j++ {
if bin_digit(t, j) == 1 {
c += counter.n_alpha[j]
}
}
// Map to the closest credible value. No stream consistency here.
counter.counts[t] = int(math.Ceil(math.Max(0, c)))
println("Count", t, counter.counts[t])
}
counter.current_index += n_ticks
return nil
}
/// bin_digit returns the ith digit of t in binary, 0th digit is the least significant.
func bin_digit(t int, i int) int {
return (t >> i) % 2
}
/// GetLaplaceBudgetAs returns the budget spent by the counter for a given block, either in RDP or epsilon-delta DP
func (counter *StreamingCounter) GetLaplaceBudgetAs(target *columbiav1.PrivacyBudget) columbiav1.PrivacyBudget {
// Cache the result of the computation assuming all the blocks have the same tracking set of RDP orders
if counter.Budget != nil {
return *counter.Budget
}
var budget columbiav1.PrivacyBudget
if target.IsEpsDelType() {
budget = columbiav1.NewPrivacyBudget(float(counter.n_bits)/counter.LaplaceNoise, 0, false)
} else {
// See extended paper for the RDP curve, which corresponds to the sum of log T curves of the Laplace mechanism
b := make(columbiav1.RenyiBudget, 0, len(target.Renyi))
target.Copy()
for i := range target.Renyi {
alpha := target.Renyi[i].Alpha
epsilon := float(counter.n_bits) * laplaceRDP(counter.LaplaceNoise, alpha)
if !math.IsInf(epsilon, 0) && !math.IsNaN(epsilon) {
b = append(b, columbiav1.RenyiBudgetBlock{
Alpha: alpha,
Epsilon: epsilon,
})
}
}
budget = columbiav1.PrivacyBudget{
EpsDel: nil,
Renyi: b,
}
}
counter.Budget = &budget
return budget
}
/// laplaceRDP computes the RDP curve of the Laplace mechanism for alpha > 1
/// Ref: Table II of the RDP paper (https://arxiv.org/pdf/1702.07476.pdf)
func laplaceRDP(l float64, alpha float64) float64 {
a := alpha * math.Exp((alpha-1)/l) / (2*alpha - 1)
b := (alpha - 1) * math.Exp(-1*alpha/l) / (2*alpha - 1)
return math.Log(a+b) / (alpha - 1)
}
/// count returns the DP count of 1s in the stream at tick `t`. We don't actually use all the ticks.
func (counter *StreamingCounter) count(t int) int {
return counter.counts[t]
}
/// CountLast returns the DP count for the last tick along with the value of that tick.
/// In our case, we can release and store this result into the newly-appended block.
func (counter *StreamingCounter) CountLast() (int, int) {
// current_index starts at 1
return counter.count(counter.current_index - 1), counter.current_index - 1
}
/// UpdateCount computes the DP count (if possible). DPCount is the cumulative count.
/// It updates the state of the counter and writes the result back into the block.
func (counter *StreamingCounter) UpdateCount(block *columbiav1.PrivateDataBlock) {
n_new_users, n_ticks, count_index, err := getNonDPCounts(block)
if err != nil {
klog.Info("Failed to retrieve counts from this block:", err)
return
}
err = counter.Feed(n_new_users, n_ticks)
if err != nil {
klog.Info("Failed to update the counter:", err)
return
}
dp_count, tick := counter.CountLast()
// Pay for the result we just got
budget := counter.GetLaplaceBudgetAs(&block.Spec.InitialBudget).Copy()
block.Status.CommittedBudgetMap["counter"] = budget
block.Status.PendingBudget = block.Status.PendingBudget.Minus(budget).NonNegative()
// Store the DP count in the block's dimensions, along with the corresponding tick
block.Spec.Dimensions[count_index] = columbiav1.Dimension{
Attribute: "DPCount",
NumericValue: util.ToDecimal(dp_count),
StringValue: fmt.Sprintf("%d", tick),
}
}
/// getNonDPCounts tries to read the number of new newers from a block's metadata
/// `count_index` is the index in the `Dimensions` list that contains the non-DP count. We will use it to overwrite the count with DPCount instead.
func getNonDPCounts(block *columbiav1.PrivateDataBlock) (int, int, int, error) {
n_new_users, n_ticks, count_index := -1, -1, -1
var err error
for i, dim := range block.Spec.Dimensions {
if dim.Attribute == "NNewUsers" {
n_new_users = int(dim.NumericValue.IntPart())
count_index = i
}
if dim.Attribute == "NTicks" {
n_ticks = int(dim.NumericValue.IntPart())
}
}
// If the block doesn't have any user count attributes, the variables remain at -1
if n_new_users < 0 || n_ticks < 0 {
err = fmt.Errorf("this block doesn't have a user count")
}
if n_new_users > n_ticks {
n_new_users = n_ticks
klog.Warning("there are more new users than ticks. You should increase the number of ticks (e.g. every millisecond instead of every second). Instead of crashing, we truncated the number of new users for this block: n_new_users := n_ticks")
}
return n_new_users, n_ticks, count_index, err
} | system/dpfscheduler/pkg/scheduler/cache/counter.go | 0.78016 | 0.409516 | counter.go | starcoder |
// http://play.golang.org/p/xy-wyPrsjz
// Declare a struct type that represents a request for a customer invoice. Include a CustomerID and InvoiceID field. Define
// tags that can be used to validate the request. Define tags that specify both the length and range for the ID to be valid.
// Declare a function named validate that accepts values of any type and processes the tags. Display the resutls of the validation.
package main
import (
"fmt"
"reflect"
"strconv"
"strings"
"unicode/utf8"
)
// Customer represents a customer
type Customer struct {
CustomerID int `length:"3" range:"100-300"`
InvoiceID int `length:"5" range:"60000-99999"`
}
// main is the entry point for the application.
func main() {
// Declare a variable of type Customer.
customer := Customer{
CustomerID: 202,
InvoiceID: 76545,
}
// Validate the value and display the results.
validate(&customer)
}
// validate performs data validation on any struct type value.
func validate(value interface{}) {
// Retrieve the value that the interface contains or points to.
val := reflect.ValueOf(value).Elem()
// Iterate over the fields of the struct value.
for i := 0; i < val.NumField(); i++ {
// Retrieve the field information.
typeField := val.Type().Field(i)
// Get the value as an int, string and the length.
field := typeField.Name
value := int(val.Field(i).Int())
stringValue := strconv.Itoa(value)
valueLength := utf8.RuneCountInString(stringValue)
// Test the length first
length, _ := strconv.Atoi(typeField.Tag.Get("length"))
if valueLength != length {
fmt.Printf("Invalid Length: Field[%s] Value[%d] - Len[%d] - ExpLen[%d]\n", field, value, valueLength, length)
continue
}
// Test the range.
r := strings.Split(typeField.Tag.Get("range"), "-")
front, _ := strconv.Atoi(r[0])
end, _ := strconv.Atoi(r[1])
if value < front || value > end {
fmt.Printf("Invalid Range: Field[%s] Value[%d] - Front[%d] - End[%d]\n", field, value, front, end)
continue
}
fmt.Printf("VALID: Field[%s] Value[%d]\n", field, value)
}
} | 12-reflection/exercises/exercise1/exercise1.go | 0.662906 | 0.429429 | exercise1.go | starcoder |
package maps
import (
"fmt"
"github.com/mbark/advent-of-code-2021/util"
)
type Cuboid struct {
From Coordinate3D
To Coordinate3D
}
func (c Cuboid) Coordinates() []Coordinate3D {
var coordinates []Coordinate3D
for x := c.From.X; x <= c.To.X; x++ {
for y := c.From.Y; y <= c.To.Y; y++ {
for z := c.From.Z; z <= c.To.Z; z++ {
coordinates = append(coordinates, Coordinate3D{
X: x, Y: y, Z: z,
})
}
}
}
return coordinates
}
func (c Cuboid) Contains(co Cuboid) bool {
return c.From.X <= co.From.X && c.To.X >= co.To.X &&
c.From.Y <= co.From.Y && c.To.Y >= co.To.Y &&
c.From.Z <= co.From.Z && c.To.Z >= co.To.Z
}
func (c Cuboid) String() string {
return fmt.Sprintf("(x=%d..%d,y=%d..%d,z=%d..%d)",
c.From.X, c.To.X, c.From.Y, c.To.Y, c.From.Z, c.To.Z)
}
func (c Cuboid) Size() int {
return util.AbsInt(1 *
(c.To.X - c.From.X) *
(c.To.Y - c.From.Y) *
(c.To.Z - c.From.Z))
}
func (c Cuboid) Subdivide(co Cuboid) ([]Cuboid, *Cuboid, []Cuboid) {
if !c.IsOverlapping(co) {
return []Cuboid{c}, nil, []Cuboid{co}
}
xvals := []int{
util.MinInt(c.From.X, co.From.X),
util.MaxInt(c.From.X, co.From.X),
util.MinInt(c.To.X, co.To.X),
util.MaxInt(c.To.X, co.To.X),
}
yvals := []int{
util.MinInt(c.From.Y, co.From.Y),
util.MaxInt(c.From.Y, co.From.Y),
util.MinInt(c.To.Y, co.To.Y),
util.MaxInt(c.To.Y, co.To.Y),
}
zvals := []int{
util.MinInt(c.From.Z, co.From.Z),
util.MaxInt(c.From.Z, co.From.Z),
util.MinInt(c.To.Z, co.To.Z),
util.MaxInt(c.To.Z, co.To.Z),
}
var cCuboids []Cuboid
var sharedCuboid *Cuboid
var coCuboids []Cuboid
for xi := 0; xi < len(xvals)-1; xi++ {
for yi := 0; yi < len(yvals)-1; yi++ {
for zi := 0; zi < len(zvals)-1; zi++ {
cuboid := Cuboid{
From: Coordinate3D{
X: xvals[xi],
Y: yvals[yi],
Z: zvals[zi],
},
To: Coordinate3D{
X: xvals[xi+1],
Y: yvals[yi+1],
Z: zvals[zi+1],
},
}
switch {
case c.Contains(cuboid) && co.Contains(cuboid):
sharedCuboid = &cuboid
case !c.Contains(cuboid) && co.Contains(cuboid):
coCuboids = append(coCuboids, cuboid)
case c.Contains(cuboid) && !co.Contains(cuboid):
cCuboids = append(cCuboids, cuboid)
}
}
}
}
return cCuboids, sharedCuboid, coCuboids
}
func (c Cuboid) IsOverlapping(co Cuboid) bool {
minx := util.MaxInt(c.From.X, co.From.X)
miny := util.MaxInt(c.From.Y, co.From.Y)
minz := util.MaxInt(c.From.Z, co.From.Z)
return c.From.X <= minx && c.To.X >= minx &&
co.From.X <= minx && co.To.X > minx &&
c.From.Y <= miny && c.To.Y >= miny &&
co.From.Y <= miny && co.To.Y > miny &&
c.From.Z <= minz && c.To.Z >= minz &&
co.From.Z <= minz && co.To.Z > minz
}
func (c Cuboid) Overlapping(co Cuboid) *Cuboid {
if !c.IsOverlapping(co) {
return nil
}
return &Cuboid{
From: Coordinate3D{
X: util.MaxInt(co.From.X, c.From.X),
Y: util.MaxInt(co.From.Y, c.From.Y),
Z: util.MaxInt(co.From.Z, c.From.Z),
},
To: Coordinate3D{
X: util.MinInt(co.To.X, c.To.X),
Y: util.MinInt(co.To.Y, c.To.Y),
Z: util.MinInt(co.To.Z, c.To.Z),
},
}
} | maps/cube.go | 0.690663 | 0.487124 | cube.go | starcoder |
package unencrypted_asset
import (
"github.com/threagile/threagile/model"
)
func Category() model.RiskCategory {
return model.RiskCategory{
Id: "unencrypted-asset",
Title: "Unencrypted Technical Assets",
Description: "Devido à classificação de confidencialidade do próprio ativo técnico e / ou os ativos de dados processados " +
"Este ativo técnico deve ser criptografado.A classificação de risco depende do próprio ativo técnico da sensibilidade e dos ativos de dados armazenados.",
Impact: "Se este risco for ignorado, os invasores poderão acessar dados não criptografados quando comprometer com êxito componentes sensíveis.",
ASVS: "V6 - Stored Cryptography Verification Requirements",
CheatSheet: "https://cheatsheetseries.owasp.org/cheatsheets/Cryptographic_Storage_Cheat_Sheet.html",
Action: "Encryption of Technical Asset",
Mitigation: "Aplique criptografia ao ativo técnico.",
Check: "As recomendações do cheat sheet e do ASVS/CSVS referenciado são aplicadas?",
Function: model.Operations,
STRIDE: model.InformationDisclosure,
DetectionLogic: "Ativos técnicos não criptografados no escopo (excluindo " + model.ReverseProxy.String() +
", " + model.LoadBalancer.String() + ", " + model.WAF.String() + ", " + model.IDS.String() +
", " + model.IPS.String() + "e componentes incorporados como " + model.Library.String() + ") " +
"armazenar ativos de dados classificados pelo menos como " + model.Confidential.String() + " ou " + model.Critical.String() + ". " +
"Para ativos técnicos que armazenam ativos de dados classificados como " + model.StrictlyConfidential.String() + " ou " + model.MissionCritical.String() + " a " +
"criptografia deve ser do tipo " + model.DataWithEnduserIndividualKey.String() + ".",
RiskAssessment: "Dependendo da classificação de confidencialidade dos ativos de dados armazenados ou de alto risco.",
FalsePositives: "Quando todos os dados confidenciais armazenados dentro do ativo já estiver totalmente criptografado no documento ou no nível de dados.",
ModelFailurePossibleReason: false,
CWE: 311,
}
}
func SupportedTags() []string {
return []string{}
}
// check for technical assets that should be encrypted due to their confidentiality
func GenerateRisks() []model.Risk {
risks := make([]model.Risk, 0)
for _, id := range model.SortedTechnicalAssetIDs() {
technicalAsset := model.ParsedModelRoot.TechnicalAssets[id]
if !technicalAsset.OutOfScope && !IsEncryptionWaiver(technicalAsset) &&
(technicalAsset.HighestConfidentiality() >= model.Confidential ||
technicalAsset.HighestIntegrity() >= model.Critical) {
verySensitive := technicalAsset.HighestConfidentiality() == model.StrictlyConfidential ||
technicalAsset.HighestIntegrity() == model.MissionCritical
requiresEnduserKey := verySensitive && technicalAsset.Technology.IsUsuallyStoringEnduserData()
if technicalAsset.Encryption == model.NoneEncryption {
impact := model.MediumImpact
if verySensitive {
impact = model.HighImpact
}
risks = append(risks, createRisk(technicalAsset, impact, requiresEnduserKey))
} else if requiresEnduserKey &&
(technicalAsset.Encryption == model.Transparent || technicalAsset.Encryption == model.DataWithSymmetricSharedKey || technicalAsset.Encryption == model.DataWithAsymmetricSharedKey) {
risks = append(risks, createRisk(technicalAsset, model.MediumImpact, requiresEnduserKey))
}
}
}
return risks
}
// Simple routing assets like 'Reverse Proxy' or 'Load Balancer' usually don't have their own storage and thus have no
// encryption requirement for the asset itself (though for the communication, but that's a different rule)
func IsEncryptionWaiver(asset model.TechnicalAsset) bool {
return asset.Technology == model.ReverseProxy || asset.Technology == model.LoadBalancer ||
asset.Technology == model.WAF || asset.Technology == model.IDS || asset.Technology == model.IPS ||
asset.Technology.IsEmbeddedComponent()
}
func createRisk(technicalAsset model.TechnicalAsset, impact model.RiskExploitationImpact, requiresEnduserKey bool) model.Risk {
title := "<b>Unencrypted Technical Asset</b> named <b>" + technicalAsset.Title + "</b>"
if requiresEnduserKey {
title += " missing enduser-individual encryption with " + model.DataWithEnduserIndividualKey.String()
}
risk := model.Risk{
Category: Category(),
Severity: model.CalculateSeverity(model.Unlikely, impact),
ExploitationLikelihood: model.Unlikely,
ExploitationImpact: impact,
Title: title,
MostRelevantTechnicalAssetId: technicalAsset.Id,
DataBreachProbability: model.Improbable,
DataBreachTechnicalAssetIDs: []string{technicalAsset.Id},
}
risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id
return risk
} | risks/built-in/unencrypted-asset/unencrypted-asset-rule.go | 0.557364 | 0.47658 | unencrypted-asset-rule.go | starcoder |
package math
import (
"math"
)
// Pi is math.Pi but as a float32 type
const Pi = float32(math.Pi)
const Pi2 = Pi * 2
const Sqrt2 = float32(math.Sqrt2)
const MaxFloat32 = math.MaxFloat32
const RadiansToDegrees = 180 / Pi
const DegreeToRadians = Pi / 180
const RadFull = Pi * 2
const DegFull = 360
const NanoToSec = 1 / 1000000000
const NORMALIZATION_TOLERANCE = 0.00001
// redefinition of math functions for float32
func Abs(x float32) float32 { return float32(math.Abs(float64(x))) }
func Acos(x float32) float32 { return float32(math.Acos(float64(x))) }
func Acosh(x float32) float32 { return float32(math.Acosh(float64(x))) }
func Asin(x float32) float32 { return float32(math.Asin(float64(x))) }
func Asinh(x float32) float32 { return float32(math.Asinh(float64(x))) }
func Atan(x float32) float32 { return float32(math.Atan(float64(x))) }
func Atan2(y, x float32) float32 { return float32(math.Atan2(float64(y), float64(x))) }
func Atanh(x float32) float32 { return float32(math.Atanh(float64(x))) }
func Cbrt(x float32) float32 { return float32(math.Cbrt(float64(x))) }
func Ceil(x float32) float32 { return float32(math.Ceil(float64(x))) }
func Copysign(x, y float32) float32 { return float32(math.Copysign(float64(x), float64(y))) }
func Cos(x float32) float32 { return float32(math.Cos(float64(x))) }
func Cosh(x float32) float32 { return float32(math.Cosh(float64(x))) }
func Dim(x, y float32) float32 { return float32(math.Dim(float64(x), float64(y))) }
func Erf(x float32) float32 { return float32(math.Erf(float64(x))) }
func Erfc(x float32) float32 { return float32(math.Erfc(float64(x))) }
func Exp(x float32) float32 { return float32(math.Exp(float64(x))) }
func Exp2(x float32) float32 { return float32(math.Exp2(float64(x))) }
func Expm1(x float32) float32 { return float32(math.Expm1(float64(x))) }
func Floatbits(f float32) uint32 { return math.Float32bits(f) }
func Floatfrombits(b uint32) float32 { return math.Float32frombits(b) }
func Floor(x float32) float32 { return float32(math.Floor(float64(x))) }
func Frexp(f float32) (float32, int) { f2, e := math.Frexp(float64(f)); return float32(f2), e }
func Gamma(x float32) float32 { return float32(math.Gamma(float64(x))) }
func Hypot(p, q float32) float32 { return float32(math.Hypot(float64(p), float64(q))) }
func Ilogb(x float32) int { return math.Ilogb(float64(x)) }
func Inf(sign int) float32 { return float32(math.Inf(sign)) }
func IsInf(f float32, sign int) bool { return math.IsInf(float64(f), sign) }
func IsNaN(f float32) bool { return math.IsNaN(float64(f)) }
func J0(x float32) float32 { return float32(math.J0(float64(x))) }
func J1(x float32) float32 { return float32(math.J1(float64(x))) }
func Jn(n int, x float32) float32 { return float32(math.Jn(n, float64(x))) }
func Ldexp(frac float32, exp int) float32 { return float32(math.Ldexp(float64(frac), exp)) }
func Lgamma(x float32) (float32, int) {
lgamma, sign := math.Lgamma(float64(x))
return float32(lgamma), sign
}
func Log(x float32) float32 { return float32(math.Log(float64(x))) }
func Log10(x float32) float32 { return float32(math.Log10(float64(x))) }
func Log1p(x float32) float32 { return float32(math.Log1p(float64(x))) }
func Log2(x float32) float32 { return float32(math.Log2(float64(x))) }
func Logb(x float32) float32 { return float32(math.Logb(float64(x))) }
//func Max(x, y float32) float32 { return float32(math.Max(float64(x), float64(y))) }
//func Min(x, y float32) float32 { return float32(math.Min(float64(x), float64(y))) }
func Mod(x, y float32) float32 { return float32(math.Mod(float64(x), float64(y))) }
func Modf(f float32) (float32, float32) { x, y := math.Modf(float64(f)); return float32(x), float32(y) }
func NaN() float32 { return float32(math.NaN()) }
func Nextafter(x, y float32) float32 { return float32(math.Nextafter(float64(x), float64(y))) }
func Pow(x, y float32) float32 { return float32(math.Pow(float64(x), float64(y))) }
func Pow10(e int) float32 { return float32(math.Pow10(e)) }
func Remainder(x, y float32) float32 { return float32(math.Remainder(float64(x), float64(y))) }
func Signbit(x float32) bool { return math.Signbit(float64(x)) }
func Sin(x float32) float32 { return float32(math.Sin(float64(x))) }
func Sincos(x float32) (float32, float32) {
sin, cos := math.Sincos(float64(x))
return float32(sin), float32(cos)
}
func Sinh(x float32) float32 { return float32(math.Sinh(float64(x))) }
func Sqrt(x float32) float32 { return float32(math.Sqrt(float64(x))) }
func Tan(x float32) float32 { return float32(math.Tan(float64(x))) }
func Tanh(x float32) float32 { return float32(math.Tanh(float64(x))) }
func Trunc(x float32) float32 { return float32(math.Trunc(float64(x))) }
func Y0(x float32) float32 { return float32(math.Y0(float64(x))) }
func Y1(x float32) float32 { return float32(math.Y1(float64(x))) }
func Yn(n int, x float32) float32 { return float32(math.Yn(n, float64(x))) }
func NextPowerOfTwo(value int) int {
if value == 0 {
return 1
}
value--
value |= value >> 1
value |= value >> 2
value |= value >> 4
value |= value >> 8
value |= value >> 16
return value + 1
}
func IsPowerOfTwo(value int) bool {
return value != 0 && (value&value-1) == 0
}
func ToRadians(degrees float32) float32 {
return degrees * DegreeToRadians
}
func ToDegrees(radians float32) float32 {
return radians * RadiansToDegrees
}
func Clampi(value, min, max int) int {
if value < min {
return min
}
if value > max {
return max
}
return value
}
func Clampf(value, min, max float32) float32 {
if value < min {
return min
}
if value > max {
return max
}
return value
}
func Min(a, b float32) float32 {
if a < b {
return a
}
return b
}
func Max(a, b float32) float32 {
if a > b {
return a
}
return b
} | math.go | 0.827689 | 0.797596 | math.go | starcoder |
package schema
const RUMV3Schema = `{
"$id": "docs/spec/rum_v3_metadata.json",
"title": "Metadata",
"description": "Metadata concerning the other objects in the stream.",
"type": [
"object"
],
"properties": {
"se": {
"$id": "docs/spec/rum_v3_service.json",
"title": "Service",
"type": [
"object"
],
"properties": {
"a": {
"description": "Name and version of the Elastic APM agent",
"type": [
"object"
],
"properties": {
"n": {
"description": "Name of the Elastic APM agent, e.g. \"Python\"",
"type": [
"string"
],
"minLength": 1,
"maxLength": 1024
},
"ve": {
"description": "Version of the Elastic APM agent, e.g.\"1.0.0\"",
"type": [
"string"
],
"maxLength": 1024
}
},
"required": [
"n",
"ve"
]
},
"fw": {
"description": "Name and version of the web framework used",
"type": [
"object",
"null"
],
"properties": {
"n": {
"type": [
"string",
"null"
],
"maxLength": 1024
},
"ve": {
"type": [
"string",
"null"
],
"maxLength": 1024
}
}
},
"la": {
"description": "Name and version of the programming language used",
"type": [
"object",
"null"
],
"properties": {
"n": {
"type": [
"string"
],
"maxLength": 1024
},
"ve": {
"type": [
"string",
"null"
],
"maxLength": 1024
}
},
"required": [
"n"
]
},
"n": {
"description": "Immutable name of the service emitting this event",
"type": [
"string"
],
"pattern": "^[a-zA-Z0-9 _-]+$",
"minLength": 1,
"maxLength": 1024
},
"en": {
"description": "Environment name of the service, e.g. \"production\" or \"staging\"",
"type": [
"string",
"null"
],
"maxLength": 1024
},
"ru": {
"description": "Name and version of the language runtime running this service",
"type": [
"object",
"null"
],
"properties": {
"n": {
"type": [
"string"
],
"maxLength": 1024
},
"ve": {
"type": [
"string"
],
"maxLength": 1024
}
},
"required": [
"n",
"ve"
]
},
"ve": {
"description": "Version of the service emitting this event",
"type": [
"string",
"null"
],
"maxLength": 1024
}
},
"required": [
"a",
"n"
]
}
},
"required": [
"se"
]
}` | model/metadata/generated/schema/rum_v3_metadata.go | 0.709019 | 0.510802 | rum_v3_metadata.go | starcoder |
package loco
import (
"math"
"strconv"
"github.com/golang/geo/s2"
)
func FindCellIDs(area s2.Rect) (cellUnion s2.CellUnion) {
cellQueue := make([]s2.CellID, 0)
c := s2.CellIDFromFacePosLevel(0, 0, 0).ChildBeginAtLevel(0)
endCellId := s2.CellIDFromFacePosLevel(5, 0, 0).ChildEndAtLevel(0)
for c != endCellId {
if containsGeodataToFind(c, area) {
cellQueue = append(cellQueue, c)
}
}
return s2.CellUnion{}
}
// Merge continuous cells in CellUnion and return a list of merged GeohashRanges.
func MergeCells(cellUnion s2.CellUnion) (ranges []GeohashRange) {
ranges = make([]GeohashRange, 0)
var cellId s2.CellID
for _, cellId = range cellUnion {
currentRange := GeohashRange{RangeMin: cellId.RangeMin(), RangeMax: cellId.RangeMax()}
wasMerged := false
for _, r := range ranges {
if wasMerged = r.TryMerge(¤tRange); wasMerged == true {
break
}
}
if !wasMerged {
ranges = append(ranges, currentRange)
}
}
return
}
// Generate a geohash for the given latitude & longitude
func GenerateGeohashFromLatLng(lat float64, lng float64) (geohash int64) {
p := s2.PointFromLatLng(s2.LatLngFromDegrees(lat, lng))
return GenerateGeohash(p)
}
// Generate a geohash for the supplied point
func GenerateGeohash(p s2.Point) (geohash int64) {
ll := s2.LatLngFromPoint(p)
geohash = int64(s2.CellIDFromLatLng(ll))
return
}
// Generate a DynamoDB hashkey of the given length for the supplied geohash
func GenerateHashKey(geohash int64, hashKeyLength int) (hashkey int64) {
if geohash < 0 {
// Counteract "-" at beginning of geohash
hashKeyLength = hashKeyLength + 1
}
geohashString := strconv.FormatInt(geohash, 10)
denominator := round(math.Pow(10, float64(len(geohashString)-hashKeyLength)))
hashkey = int64(round(float64(geohash) / denominator))
return
}
func processQueue(queue []s2.CellID, r s2.Rect) (cells []s2.CellID) {
cellIds := make([]s2.CellID, 0)
for _, c := range queue {
if !c.IsValid() {
break
}
cellIds = append(cellIds, processChildren(c, r, queue)...)
}
return
}
func processChildren(parent s2.CellID, r s2.Rect, queue []s2.CellID) (cellIds []s2.CellID) {
children := make([]s2.CellID, 0)
for c := parent.ChildBegin(); c != parent.ChildEnd(); c = c.Next() {
if containsGeodataToFind(c, r) {
children = append(children, c)
}
}
cellIds = make([]s2.CellID, 0)
switch len(children) {
case 1:
case 2:
for _, child := range children {
if child.IsLeaf() {
cellIds = append(cellIds, child)
} else {
queue = append(queue, child)
}
}
break
case 3:
cellIds = append(cellIds, children...)
break
case 4:
cellIds = append(cellIds, parent)
break
default:
}
return
}
func containsGeodataToFind(c s2.CellID, r s2.Rect) (intersects bool) {
for edgeID := 0; edgeID <= 3; edgeID += 1 {
if r.ContainsLatLng(s2.LatLngFromPoint(s2.CellFromCellID(c).Vertex(edgeID))) {
return true
}
}
return false
}
func round(f float64) float64 {
return math.Floor(f + .5)
} | loco/helpers.go | 0.717606 | 0.467089 | helpers.go | starcoder |
package design
import (
design "goa.design/goa/design"
)
const (
// FormatDate describes RFC3339 date values.
FormatDate = design.FormatDate
// FormatDateTime describes RFC3339 date time values.
FormatDateTime = design.FormatDateTime
// FormatUUID describes RFC4122 UUID values.
FormatUUID = design.FormatUUID
// FormatEmail describes RFC5322 email addresses.
FormatEmail = design.FormatEmail
// FormatHostname describes RFC1035 Internet hostnames.
FormatHostname = design.FormatHostname
// FormatIPv4 describes RFC2373 IPv4 address values.
FormatIPv4 = design.FormatIPv4
// FormatIPv6 describes RFC2373 IPv6 address values.
FormatIPv6 = design.FormatIPv6
// FormatIP describes RFC2373 IPv4 or IPv6 address values.
FormatIP = design.FormatIP
// FormatURI describes RFC3986 URI values.
FormatURI = design.FormatURI
// FormatMAC describes IEEE 802 MAC-48, EUI-48 or EUI-64 MAC address values.
FormatMAC = design.FormatMAC
// FormatCIDR describes RFC4632 and RFC4291 CIDR notation IP address values.
FormatCIDR = design.FormatCIDR
// FormatRegexp describes regular expression syntax accepted by RE2.
FormatRegexp = design.FormatRegexp
// FormatJSON describes JSON text.
FormatJSON = design.FormatJSON
// FormatRFC1123 describes RFC1123 date time values.
FormatRFC1123 = design.FormatRFC1123
)
const (
// NoStreamKind represents no payload or result stream in method.
NoStreamKind = design.NoStreamKind
// ClientStreamKind represents client sends a streaming payload to method.
ClientStreamKind = design.ClientStreamKind
// ServerStreamKind represents server sends a streaming result from method.
ServerStreamKind = design.ServerStreamKind
// BidirectionalStreamKind represents both client and server streams payload
// and result respectively.
BidirectionalStreamKind = design.BidirectionalStreamKind
)
const (
// DefaultView is the name of the default result type view.
DefaultView = design.DefaultView
)
const (
// OAuth2Kind identifies a "OAuth2" security scheme.
OAuth2Kind = design.OAuth2Kind
// BasicAuthKind means "basic" security scheme.
BasicAuthKind = design.BasicAuthKind
// APIKeyKind means "apiKey" security scheme.
APIKeyKind = design.APIKeyKind
// JWTKind means an "apiKey" security scheme, with support for
// TokenPath and Scopes.
JWTKind = design.JWTKind
// NoKind means to have no security for this endpoint.
NoKind = design.NoKind
)
const (
// AuthorizationCodeFlowKind identifies a OAuth2 authorization code
// flow.
AuthorizationCodeFlowKind = design.AuthorizationCodeFlowKind
// ImplicitFlowKind identifiers a OAuth2 implicit flow.
ImplicitFlowKind = design.ImplicitFlowKind
// PasswordFlowKind identifies a Resource Owner Password flow.
PasswordFlowKind = design.PasswordFlowKind
// ClientCredentialsFlowKind identifies a OAuth Client Credentials flow.
ClientCredentialsFlowKind = design.ClientCredentialsFlowKind
)
const (
// BooleanKind represents a boolean.
BooleanKind = design.BooleanKind
// IntKind represents a signed integer.
IntKind = design.IntKind
// Int32Kind represents a signed 32-bit integer.
Int32Kind = design.Int32Kind
// Int64Kind represents a signed 64-bit integer.
Int64Kind = design.Int64Kind
// UIntKind represents an unsigned integer.
UIntKind = design.UIntKind
// UInt32Kind represents an unsigned 32-bit integer.
UInt32Kind = design.UInt32Kind
// UInt64Kind represents an unsigned 64-bit integer.
UInt64Kind = design.UInt64Kind
// Float32Kind represents a 32-bit floating number.
Float32Kind = design.Float32Kind
// Float64Kind represents a 64-bit floating number.
Float64Kind = design.Float64Kind
// StringKind represents a JSON string.
StringKind = design.StringKind
// BytesKind represent a series of bytes (binary data).
BytesKind = design.BytesKind
// ArrayKind represents a JSON array.
ArrayKind = design.ArrayKind
// ObjectKind represents a JSON object.
ObjectKind = design.ObjectKind
// MapKind represents a JSON object where the keys are not known in
// advance.
MapKind = design.MapKind
// UserTypeKind represents a user type.
UserTypeKind = design.UserTypeKind
// ResultTypeKind represents a result type.
ResultTypeKind = design.ResultTypeKind
// AnyKind represents an unknown type.
AnyKind = design.AnyKind
)
const (
// Boolean is the type for a JSON boolean.
Boolean = design.Boolean
// Int is the type for a signed integer.
Int = design.Int
// Int32 is the type for a signed 32-bit integer.
Int32 = design.Int32
// Int64 is the type for a signed 64-bit integer.
Int64 = design.Int64
// UInt is the type for an unsigned integer.
UInt = design.UInt
// UInt32 is the type for an unsigned 32-bit integer.
UInt32 = design.UInt32
// UInt64 is the type for an unsigned 64-bit integer.
UInt64 = design.UInt64
// Float32 is the type for a 32-bit floating number.
Float32 = design.Float32
// Float64 is the type for a 64-bit floating number.
Float64 = design.Float64
// String is the type for a JSON string.
String = design.String
// Bytes is the type for binary data.
Bytes = design.Bytes
// Any is the type for an arbitrary JSON value (interface{} in Go).
Any = design.Any
) | http/design/aliases.go | 0.611498 | 0.413181 | aliases.go | starcoder |
package slippy
import (
"math"
"errors"
"github.com/go-spatial/geom"
)
// MaxZoom is the lowest zoom (furthest in)
const MaxZoom = 22
// NewTile returns a Tile of Z,X,Y passed in
func NewTile(z, x, y uint) *Tile {
return &Tile{
Z: z,
X: x,
Y: y,
}
}
// Tile describes a slippy tile.
type Tile struct {
// zoom
Z uint
// column
X uint
// row
Y uint
}
// NewTileMinMaxer returns the smallest tile which fits the
// geom.MinMaxer. Note: it assumes the values of ext are
// EPSG:4326 (lng/lat)
func NewTileMinMaxer(ext geom.MinMaxer) *Tile {
upperLeft := NewTileLatLon(MaxZoom, ext.MaxY(), ext.MinX())
point := &geom.Point{ext.MaxX(), ext.MinY()}
var ret *Tile
for z := uint(MaxZoom); int(z) >= 0 && ret == nil; z-- {
upperLeft.RangeFamilyAt(z, func(tile *Tile) error {
if tile.Extent4326().Contains(point) {
ret = tile
return errors.New("stop iter")
}
return nil
})
}
return ret
}
// NewTileLatLon instantiates a tile containing the coordinate with the specified zoom
func NewTileLatLon(z uint, lat, lon float64) *Tile {
x := Lon2Tile(z, lon)
y := Lat2Tile(z, lat)
return &Tile{
Z: z,
X: x,
Y: y,
}
}
func minmax(a, b uint) (uint, uint) {
if a > b {
return b, a
}
return a, b
}
// FromBounds returns a list of tiles that make up the bound given. The bounds should be defined as the following lng/lat points [4]float64{west,south,east,north}
func FromBounds(bounds *geom.Extent, z uint) []Tile {
if bounds == nil {
return nil
}
minx, maxx := minmax(Lon2Tile(z, bounds[0]), Lon2Tile(z, bounds[2]))
miny, maxy := minmax(Lat2Tile(z, bounds[1]), Lat2Tile(z, bounds[3]))
// tiles := make([]Tile, (maxx-minx)*(maxy-miny))
var tiles []Tile
for x := minx; x <= maxx; x++ {
for y := miny; y <= maxy; y++ {
tiles = append(tiles, Tile{Z: z, X: x, Y: y})
}
}
return tiles
}
// ZXY returns back the z,x,y of the tile
func (t Tile) ZXY() (uint, uint, uint) { return t.Z, t.X, t.Y }
// Extent3395 returns the tile's extent in EPSG:3395 (aka World Mercator) projection
func (t Tile) Extent3395() *geom.Extent {
return geom.NewExtent(
[2]float64{Tile2WebX(t.Z, t.X), Tile2WebY(t.Z, t.Y+1)},
[2]float64{Tile2WebX(t.Z, t.X+1), Tile2WebY(t.Z, t.Y)},
)
}
// Extent4326 returns the tile's extent in EPSG:4326 (aka lat/long)
func (t Tile) Extent4326() *geom.Extent {
return geom.NewExtent(
[2]float64{Tile2Lon(t.Z, t.X), Tile2Lat(t.Z, t.Y+1)},
[2]float64{Tile2Lon(t.Z, t.X+1), Tile2Lat(t.Z, t.Y)},
)
}
// RangeFamilyAt calls f on every tile vertically related to t at the specified zoom
// TODO (ear7h): sibling support
func (t Tile) RangeFamilyAt(zoom uint, f func(*Tile) error) error {
// handle ancestors and self
if zoom <= t.Z {
mag := t.Z - zoom
arg := NewTile(zoom, t.X>>mag, t.Y>>mag)
return f(arg)
}
// handle descendants
mag := zoom - t.Z
delta := uint(math.Exp2(float64(mag)))
leastX := t.X << mag
leastY := t.Y << mag
for x := leastX; x < leastX+delta; x++ {
for y := leastY; y < leastY+delta; y++ {
err := f(NewTile(zoom, x, y))
if err != nil {
return err
}
}
}
return nil
} | vendor/github.com/go-spatial/geom/slippy/tile.go | 0.789153 | 0.51312 | tile.go | starcoder |
package data
import (
"regexp"
"github.com/go-playground/validator"
)
func (user *User) Validate() error {
validate := validator.New()
err1 := validate.RegisterValidation("email", validateEmail)
err2 := validate.RegisterValidation("dateofbirth", validateDateOfBirth)
err3 := validate.RegisterValidation("isStatusType", validateIsStatusType)
if err1 != nil || err2 != nil || err3 != nil {
panic("Validator connexions failed")
}
return validate.Struct(user)
}
func validateEmail(fl validator.FieldLevel) bool {
re := regexp.MustCompile(`([a-z0-9][-a-z0-9_\+\.]*[a-z0-9])@([a-z0-9][-a-z0-9\.]*[a-z0-9]\.(arpa|root|aero|biz|cat|com|coop|edu|gov|info|int|jobs|mil|mobi|museum|name|net|org|pro|tel|travel|ac|ad|ae|af|ag|ai|al|am|an|ao|aq|ar|as|at|au|aw|ax|az|ba|bb|bd|be|bf|bg|bh|bi|bj|bm|bn|bo|br|bs|bt|bv|bw|by|bz|ca|cc|cd|cf|cg|ch|ci|ck|cl|cm|cn|co|cr|cu|cv|cx|cy|cz|de|dj|dk|dm|do|dz|ec|ee|eg|er|es|et|eu|fi|fj|fk|fm|fo|fr|ga|gb|gd|ge|gf|gg|gh|gi|gl|gm|gn|gp|gq|gr|gs|gt|gu|gw|gy|hk|hm|hn|hr|ht|hu|id|ie|il|im|in|io|iq|ir|is|it|je|jm|jo|jp|ke|kg|kh|ki|km|kn|kr|kw|ky|kz|la|lb|lc|li|lk|lr|ls|lt|lu|lv|ly|ma|mc|md|mg|mh|mk|ml|mm|mn|mo|mp|mq|mr|ms|mt|mu|mv|mw|mx|my|mz|na|nc|ne|nf|ng|ni|nl|no|np|nr|nu|nz|om|pa|pe|pf|pg|ph|pk|pl|pm|pn|pr|ps|pt|pw|py|qa|re|ro|ru|rw|sa|sb|sc|sd|se|sg|sh|si|sj|sk|sl|sm|sn|so|sr|st|su|sv|sy|sz|tc|td|tf|tg|th|tj|tk|tl|tm|tn|to|tp|tr|tt|tv|tw|tz|ua|ug|uk|um|us|uy|uz|va|vc|ve|vg|vi|vn|vu|wf|ws|ye|yt|yu|za|zm|zw)|([0-9]{1,3}\.{3}[0-9]{1,3}))`)
matches := re.FindAllString(fl.Field().String(), -1)
return len(matches) == 1
}
func validateDateOfBirth(fl validator.FieldLevel) bool {
re := regexp.MustCompile(`[0-9]{1,2}/[0-9]{1,2}/[0-9]{4}`)
matches := re.FindAllString(fl.Field().String(), -1)
return len(matches) == 1
}
// validates the status type is valid
func validateIsStatusType(fieldLevel validator.FieldLevel) bool {
statusType := fieldLevel.Field().String()
switch statusType {
case string(Online), string(Offline), string(InGame):
return true
}
return false
} | pkg/data/validate.go | 0.525856 | 0.498718 | validate.go | starcoder |
package techan
import "github.com/sdcoffey/big"
type relativeVigorIndexIndicator struct {
numerator Indicator
denominator Indicator
}
// NewRelativeVigorIndexIndicator returns an Indicator which returns the index of the relative vigor of the prices of
// a sercurity. Relative Vigor Index is simply the difference of the previous four days' close and open prices divided
// by the difference between the previous four days high and low prices. A more in-depth explanation of relative vigor
// index can be found here: https://www.fidelity.com/learning-center/trading-investing/technical-analysis/technical-indicator-guide/relative-vigor-index
func NewRelativeVigorIndexIndicator(series *TimeSeries) Indicator {
return relativeVigorIndexIndicator{
numerator: NewDifferenceIndicator(NewClosePriceIndicator(series), NewOpenPriceIndicator(series)),
denominator: NewDifferenceIndicator(NewHighPriceIndicator(series), NewLowPriceIndicator(series)),
}
}
func (rvii relativeVigorIndexIndicator) Calculate(index int) big.Decimal {
if index < 3 {
return big.ZERO
}
two := big.NewFromString("2")
a := rvii.numerator.Calculate(index)
b := rvii.numerator.Calculate(index - 1).Mul(two)
c := rvii.numerator.Calculate(index - 2).Mul(two)
d := rvii.numerator.Calculate(index - 3)
num := (a.Add(b).Add(c).Add(d)).Div(big.NewFromString("6"))
e := rvii.denominator.Calculate(index)
f := rvii.denominator.Calculate(index - 1).Mul(two)
g := rvii.denominator.Calculate(index - 2).Mul(two)
h := rvii.denominator.Calculate(index - 3)
denom := (e.Add(f).Add(g).Add(h)).Div(big.NewFromString("6"))
return num.Div(denom)
}
type relativeVigorIndexSignalLine struct {
relativeVigorIndex Indicator
}
// NewRelativeVigorSignalLine returns an Indicator intended to be used in conjunction with Relative vigor index, which
// returns the average value of the last 4 indices of the RVI indicator.
func NewRelativeVigorSignalLine(series *TimeSeries) Indicator {
return relativeVigorIndexSignalLine{
relativeVigorIndex: NewRelativeVigorIndexIndicator(series),
}
}
func (rvsn relativeVigorIndexSignalLine) Calculate(index int) big.Decimal {
if index < 3 {
return big.ZERO
}
rvi := rvsn.relativeVigorIndex.Calculate(index)
i := rvsn.relativeVigorIndex.Calculate(index - 1).Mul(big.NewFromString("2"))
j := rvsn.relativeVigorIndex.Calculate(index - 2).Mul(big.NewFromString("2"))
k := rvsn.relativeVigorIndex.Calculate(index - 3)
return (rvi.Add(i).Add(j).Add(k)).Div(big.NewFromString("6"))
} | indicator_relative_vigor_index.go | 0.769167 | 0.612947 | indicator_relative_vigor_index.go | starcoder |
package method
import (
"log"
"math"
"gonum.org/v1/gonum/optimize"
)
// Linear prototype of Linear regression
// this could be used for either Linear regression
// and logistic regression
type Linear struct {
Features [][]float64
Theta []float64
Output []float64
LearningRate float64
Hypothesis LinearHypothesis
Result *optimize.Result
}
// LogisticRegression inherits Liner
type LogisticRegression struct {
Linear
TrueDegree float64
}
// LinearRegression inherits Liner
type LinearRegression struct {
Linear
}
// LinearHypothesis prototype for hypothesis
type LinearHypothesis func(X, theta []float64) float64
// LinearSetting prototype for setting
type LinearSetting struct {
MajorIteration int
Threshod float64
}
func sigmoid(z float64) float64 {
return 1 / (1 + math.Exp(-z))
}
// LinearDefaultSetting returns default
// setting for Linear regression
func LinearDefaultSetting() *LinearSetting {
return &LinearSetting{
MajorIteration: 1e5,
Threshod: 1e-12,
}
}
/***********************
* LOGISTIC REGRESSION *
***********************/
// NewLogisticRegression return new pointer of
// LogisticRegression struct with default Linear
// hypothesis ax+b
func NewLogisticRegression() *LogisticRegression {
lr := &LogisticRegression{}
lr.Hypothesis = func(X, theta []float64) float64 {
hypothesis := 0.0
for key, x := range X {
hypothesis += theta[key] * x
}
return hypothesis
}
lr.LearningRate = 1
lr.TrueDegree = 0.5
return lr
}
func (l *LogisticRegression) calculateCost(X []float64, y float64) float64 {
h := sigmoid(l.Hypothesis(X, l.Theta))
return -y*math.Log(h) - (1-y)*math.Log(1-h)
}
// Minimize start training of hypothesis
// Minimize start training of hypothesis
func (l *LogisticRegression) Minimize(setting *LinearSetting) *optimize.Result {
var s *optimize.Settings
if setting != nil {
s = &optimize.Settings{
GradientThreshold: setting.Threshod,
MajorIterations: setting.MajorIteration,
Converger: &optimize.FunctionConverge{
Absolute: 1e-12,
Iterations: 1e5,
},
}
}
prob := optimize.Problem{
Func: l.Func,
Grad: l.Grad,
}
meth := &optimize.BFGS{}
result, err := optimize.Minimize(prob, l.Theta, s, meth)
if err != nil {
log.Fatal(err)
}
if err = result.Status.Err(); err != nil {
log.Fatal(err)
}
l.Result = result
l.Theta = result.X
return result
}
// Func returns cost of theta
func (l *LogisticRegression) Func(theta []float64) float64 {
m := float64(len(l.Features))
sum := 0.0
for i, X := range l.Features {
sum += l.calculateCost(X, l.Output[i])
}
return (1 / m) * sum
}
// Grad updates initil thetas to minimum
func (l *LogisticRegression) Grad(grad, theta []float64) {
m := float64(len(l.Features))
for j := range theta {
sum := 0.0
for i, x := range l.Features {
sum += (sigmoid(l.Hypothesis(x, theta)) - l.Output[i]) * x[j]
}
grad[j] = l.LearningRate / m * sum
}
}
// Predict start training of hypothesis
func (l *LogisticRegression) Predict(X []float64) bool {
if l.TrueDegree == 0 {
return sigmoid(l.Hypothesis(X, l.Theta)) >= 0.5
}
return sigmoid(l.Hypothesis(X, l.Theta)) >= l.TrueDegree
}
/***********************
* Linear REGRESSION *
***********************/
// NewLinearRegression return new pointer of
// LogisticRegression struct with default Linear
// hypothesis ax+b
func NewLinearRegression() *LinearRegression {
lr := &LinearRegression{}
lr.Hypothesis = func(X, theta []float64) float64 {
hypothesis := 0.0
for key, x := range X {
hypothesis += theta[key] * x
}
return hypothesis
}
lr.LearningRate = 1
return lr
}
func (l *LinearRegression) calculateCost(x []float64, y float64) float64 {
cost := l.Hypothesis(x, l.Theta) - y
return math.Pow(cost, 2)
}
// Minimize start training of hypothesis
func (l *LinearRegression) Minimize(setting *LinearSetting) *optimize.Result {
var s *optimize.Settings
if setting != nil {
s = &optimize.Settings{
GradientThreshold: setting.Threshod,
MajorIterations: setting.MajorIteration,
Converger: &optimize.FunctionConverge{
Absolute: 1e-12,
Iterations: 1e5,
},
}
}
prob := optimize.Problem{
Func: l.Func,
Grad: l.Grad,
}
meth := &optimize.BFGS{}
result, err := optimize.Minimize(prob, l.Theta, s, meth)
if err != nil {
log.Fatal(err)
}
if err = result.Status.Err(); err != nil {
log.Fatal(err)
}
l.Theta = result.X
l.Result = result
return result
}
// Func return cost
func (l *LinearRegression) Func(theta []float64) float64 {
sum := 0.0
for i, x := range l.Features {
sum += l.calculateCost(x, l.Output[i])
}
m := float64(len(l.Features))
return 1 / (2 * m) * sum
}
// Grad updates initil thetas to minimum
func (l *LinearRegression) Grad(grad, theta []float64) {
m := float64(len(l.Features))
for j := range theta {
sum := 0.0
for i, x := range l.Features {
sum += (l.Hypothesis(x, theta) - l.Output[i]) * x[j]
}
grad[j] = l.LearningRate / m * sum
}
}
// Predict start training of hypothesis
func (l *LinearRegression) Predict(X []float64) float64 {
return l.Hypothesis(X, l.Theta)
} | method/linear.go | 0.674372 | 0.617743 | linear.go | starcoder |
package gohome
import (
"github.com/PucklaMotzer09/GLSLGenerator"
"strings"
)
// 3D
const (
ShaderVersion = "110"
)
var (
Attributes3D = []glslgen.Variable{
glslgen.Variable{"vec3", "highp", "vertex"},
glslgen.Variable{"vec3", "highp", "normal"},
glslgen.Variable{"vec2", "highp", "texCoord"},
glslgen.Variable{"vec3", "highp", "tangent"},
}
AttributesInstanced3D = []glslgen.Variable{
glslgen.Variable{"mat4", "highp", "transformMatrix3D"},
}
UniformModuleVertex3D = glslgen.Module{
Uniforms: []glslgen.Variable{
glslgen.Variable{"mat4", "highp", "viewMatrix3D"},
glslgen.Variable{"mat4", "highp", "inverseViewMatrix3D"},
glslgen.Variable{"mat4", "highp", "projectionMatrix3D"},
},
}
UniformNormalModuleVertex3D = glslgen.Module{
Uniforms: []glslgen.Variable{
glslgen.Variable{"mat4", "highp", "transformMatrix3D"},
},
}
CalculatePositionModule3D = glslgen.Module{
Name: "calculatePosition",
Body: "gl_Position = projectionMatrix3D*viewMatrix3D*transformMatrix3D*vec4(vertex,1.0);",
}
SetOutputsModuleVertex3D = glslgen.Module{
Name: "setOutputs",
Body: `fragViewMatrix3D = viewMatrix3D;
fragInverseViewMatrix3D = inverseViewMatrix3D;`,
}
SetOutputsNormalModuleVertex3D = glslgen.Module{
Name: "setOutputsNormal",
Body: `fragPos = (viewMatrix3D*transformMatrix3D*vec4(vertex,1.0)).xyz;
fragNormal = (viewMatrix3D*transformMatrix3D*vec4(normal,0.0)).xyz;
vec3 norm = normalize(fragNormal);
vec3 tang = normalize((viewMatrix3D*transformMatrix3D*vec4(tangent,0.0)).xyz);
vec3 bitang = normalize(cross(norm,tang));
fragToTangentSpace = mat3(
tang.x,bitang.x,norm.x,
tang.y,bitang.y,norm.y,
tang.z,bitang.z,norm.z
);`,
}
SetOutputsNoUVModuleVertex3D = glslgen.Module{
Name: "setOutputsNoUV",
Body: `fragPos = (transformMatrix3D*vec4(vertex,1.0)).xyz;
fragNormal = (transformMatrix3D*vec4(normal,0.0)).xyz;`,
}
SetOutputTexCoordModuleVertex3D = glslgen.Module{
Name: "setOutputTexCoord",
Body: `fragTexCoord = texCoord;`,
}
GlobalsFragment3D = []glslgen.Variable{
glslgen.Variable{"float", "const highp", "shadowDistance = 50.0"},
glslgen.Variable{"float", "const highp", "transitionDistance = 5.0"},
glslgen.Variable{"float", "const highp", "bias = 0.005"},
glslgen.Variable{"vec4", "highp", "finalDiffuseColor"},
glslgen.Variable{"vec4", "highp", "finalSpecularColor"},
glslgen.Variable{"vec4", "highp", "finalAmbientColor"},
glslgen.Variable{"vec3", "highp", "norm"},
glslgen.Variable{"vec3", "highp", "viewDir"},
}
InputsFragment3D = []glslgen.Variable{
glslgen.Variable{"vec3", "highp", "fragPos"},
glslgen.Variable{"vec3", "highp", "fragNormal"},
glslgen.Variable{"mat4", "highp", "fragViewMatrix3D"},
glslgen.Variable{"mat4", "highp", "fragInverseViewMatrix3D"},
}
InputsNormalFragment3D = []glslgen.Variable{
glslgen.Variable{"vec2", "highp", "fragTexCoord"},
glslgen.Variable{"mat3", "highp", "fragToTangentSpace"},
}
LightMakrosFragment3D = []glslgen.Makro{
glslgen.Makro{"MAX_POINT_LIGHTS", "5"},
glslgen.Makro{"MAX_DIRECTIONAL_LIGHTS", "2"},
glslgen.Makro{"MAX_SPOT_LIGHTS", "1"},
glslgen.Makro{"degToRad(deg)", "(deg/180.0*3.14159265359)"},
glslgen.Makro{"MAX_SPECULAR_EXPONENT", "50.0"},
glslgen.Makro{"MIN_SPECULAR_EXPONENT", "5.0"},
}
InitialiseModuleFragment3D = glslgen.Module{
Name: "initialise",
Body: `finalDiffuseColor = vec4(1.0,1.0,1.0,1.0);
finalSpecularColor = vec4(0.0);
finalAmbientColor = vec4(0.0);`,
}
InitialiseNormalModuleFragment3D = glslgen.Module{
Name: "initialiseNormal",
Body: `norm = normalize(fragToTangentSpace*fragNormal);
viewDir = normalize(fragToTangentSpace*(fragPos*-1.0));`,
}
InitialiseNoUVModuleFragment3D = glslgen.Module{
Name: "initialiseNoUV",
Body: `norm = normalize(fragNormal);
vec3 camPos = (fragInverseViewMatrix3D*vec4(0.0,0.0,0.0,1.0)).xyz;
viewDir = camPos - fragPos;`,
}
LightUniformsModule3D = glslgen.Module{
Structs: []glslgen.Struct{
glslgen.Struct{
"Attentuation",
[]glslgen.Variable{
glslgen.Variable{"float", "highp", "constant"},
glslgen.Variable{"float", "highp", "linear"},
glslgen.Variable{"float", "highp", "quadratic"},
},
},
glslgen.Struct{
"PointLight",
[]glslgen.Variable{
glslgen.Variable{"vec3", "highp", "position"},
glslgen.Variable{"vec3", "highp", "diffuseColor"},
glslgen.Variable{"vec3", "highp", "specularColor"},
glslgen.Variable{"Attentuation", "", "attentuation"},
},
},
glslgen.Struct{
"DirectionalLight",
[]glslgen.Variable{
glslgen.Variable{"vec3", "highp", "direction"},
glslgen.Variable{"vec3", "highp", "diffuseColor"},
glslgen.Variable{"vec3", "highp", "specularColor"},
glslgen.Variable{"mat4", "highp", "lightSpaceMatrix"},
glslgen.Variable{"bool", "", "castsShadows"},
glslgen.Variable{"ivec2", "", "shadowMapSize"},
glslgen.Variable{"float", "highp", "shadowDistance"},
},
},
glslgen.Struct{
"SpotLight",
[]glslgen.Variable{
glslgen.Variable{"vec3", "highp", "position"},
glslgen.Variable{"vec3", "highp", "direction"},
glslgen.Variable{"vec3", "highp", "diffuseColor"},
glslgen.Variable{"vec3", "highp", "specularColor"},
glslgen.Variable{"float", "highp", "innerCutOff"},
glslgen.Variable{"float", "highp", "outerCutOff"},
glslgen.Variable{"Attentuation", "", "attentuation"},
glslgen.Variable{"mat4", "highp", "lightSpaceMatrix"},
glslgen.Variable{"bool", "", "castsShadows"},
glslgen.Variable{"ivec2", "", "shadowMapSize"},
},
},
},
Uniforms: []glslgen.Variable{
glslgen.Variable{"int", "", "numPointLights"},
glslgen.Variable{"int", "", "numDirectionalLights"},
glslgen.Variable{"int", "", "numSpotLights"},
glslgen.Variable{"vec3", "highp", "ambientLight"},
glslgen.Variable{"PointLight", "", "pointLights[MAX_POINT_LIGHTS]"},
glslgen.Variable{"DirectionalLight", "", "directionalLights[MAX_POINT_LIGHTS]"},
glslgen.Variable{"sampler2D", "highp", "directionalLightsshadowmap[MAX_DIRECTIONAL_LIGHTS]"},
glslgen.Variable{"SpotLight", "", "spotLights[MAX_SPOT_LIGHTS]"},
glslgen.Variable{"sampler2D", "highp", "spotLightsshadowmap[MAX_SPOT_LIGHTS]"},
},
Functions: []glslgen.Function{
glslgen.Function{
"vec3 diffuseLighting(vec3 lightDir, vec3 diffuse)",
`float diff = max(dot(norm,lightDir),0.0);
diffuse *= diff;
return diffuse;`,
},
glslgen.Function{
"vec3 specularLighting(vec3 lightDir, vec3 specular)",
`vec3 reflectDir = reflect(-lightDir, norm);
vec3 halfwayDir = normalize(lightDir + viewDir);
float spec = max(pow(max(dot(norm,halfwayDir),0.0),calculateShinyness(material.shinyness)),0.0);
specular *= spec;
return specular;`,
},
glslgen.Function{
"void calculatePointLights()",
` for (int i = 0;i<MAX_POINT_LIGHTS;i++)
{
if(i>=numPointLights)
break;
calculatePointLight(pointLights[i],i);
}`,
},
glslgen.Function{
"void calculateDirectionalLights()",
`
for (int i = 0;i<MAX_DIRECTIONAL_LIGHTS;i++)
{
if(i>=numDirectionalLights)
break;
calculateDirectionalLight(directionalLights[i],i);
}`,
},
glslgen.Function{
"void calculateSpotLights()",
`for(int i=0;i<MAX_SPOT_LIGHTS ; i++)
{
if(i>=numSpotLights)
break;
calculateSpotLight(spotLights[i],i);
}`,
},
glslgen.Function{
"void calculateAllLights()",
`calculatePointLights();
calculateDirectionalLights();
calculateSpotLights();`,
},
glslgen.Function{
"float calcAttentuation(vec3 lightPosition,Attentuation attentuation)",
`float distance = distance(lightPosition,fragPos);
float attent = 1.0/(attentuation.quadratic*distance*distance + attentuation.linear*distance + attentuation.constant);
return attent;`,
},
glslgen.Function{
"float calculateShinyness(float shinyness)",
"return max(MAX_SPECULAR_EXPONENT*(pow(max(shinyness,0.0),-3.0)-1.0)+MIN_SPECULAR_EXPONENT,0.0);",
},
},
Name: "calculateLights",
Body: `finalDiffuseColor = vec4(0.0,0.0,0.0,1.0);
finalAmbientColor = vec4(ambientLight,1.0);
calculateAllLights();`,
}
LightCalcSpotAmountNormalModule3D = glslgen.Module{
Functions: []glslgen.Function{
glslgen.Function{
"float calcSpotAmount(vec3 lightDir,vec3 lightDirection,SpotLight pl)",
`float theta = dot(lightDir, normalize(fragToTangentSpace*lightDirection));
float spotAmount = 0.0;
float outerCutOff = cos(degToRad(pl.outerCutOff));
float innerCutOff = cos(degToRad(pl.innerCutOff));
float epsilon = innerCutOff - outerCutOff;
spotAmount = clamp((theta - outerCutOff) / epsilon, 0.0, 1.0);
return spotAmount;`,
},
},
}
LightCalcSpotAmountNoUVModule3D = glslgen.Module{
Functions: []glslgen.Function{
glslgen.Function{
"float calcSpotAmount(vec3 lightDir,vec3 lightDirection,SpotLight pl)",
`float theta = dot(lightDir, lightDirection);
float spotAmount = 0.0;
float outerCutOff = cos(degToRad(pl.outerCutOff));
float innerCutOff = cos(degToRad(pl.innerCutOff));
float epsilon = innerCutOff - outerCutOff;
spotAmount = clamp((theta - outerCutOff) / epsilon, 0.0, 1.0);
return spotAmount;`,
},
},
}
LightsAndShadowsFunctionsNoUV3D = glslgen.Module{
Functions: []glslgen.Function{
glslgen.Function{
"float calcShadow(sampler2D shadowMap,mat4 lightSpaceMatrix,float shadowdistance,bool distanceTransition,ivec2 shadowMapSize)",
`float distance = 0.0;
if(distanceTransition)
{
distance = length(fragPos);
distance = distance - (shadowdistance - transitionDistance);
distance = distance / transitionDistance;
distance = clamp(1.0-distance,0.0,1.0);
}
vec4 fragPosLightSpace = lightSpaceMatrix*vec4(fragPos,1.0);
vec3 projCoords = clamp((fragPosLightSpace.xyz / fragPosLightSpace.w)*0.5+0.5,-1.0,1.0);
float currentDepth = projCoords.z-bias;
float shadowresult = 0.0;
float closestDepth = texture2D(shadowMap, projCoords.xy).r;
vec2 texelSize = 1.0 / vec2(shadowMapSize);
for(int x = -1; x <= 1; ++x)
{
for(int y = -1; y <= 1; ++y)
{
float pcfDepth = texture2D(shadowMap, projCoords.xy + vec2(x, y) * texelSize).r;
shadowresult += currentDepth > pcfDepth ? 0.0 : 1.0;
}
}
shadowresult /= 9.0;
if(distanceTransition)
{
shadowresult = 1.0 - (1.0-shadowresult)*distance;
}
return shadowresult;`,
},
},
Name: "lightsAndShadowCalculationNoUV",
}
LightsAndShadowsFunctions3D = glslgen.Module{
Functions: []glslgen.Function{
glslgen.Function{
"float calcShadow(sampler2D shadowMap,mat4 lightSpaceMatrix,float shadowdistance,bool distanceTransition,ivec2 shadowMapSize)",
`float distance = 0.0;
if(distanceTransition)
{
distance = length(fragPos);
distance = distance - (shadowdistance - transitionDistance);
distance = distance / transitionDistance;
distance = clamp(1.0-distance,0.0,1.0);
}
vec4 fragPosLightSpace = lightSpaceMatrix*fragInverseViewMatrix3D*vec4(fragPos,1.0);
vec3 projCoords = clamp((fragPosLightSpace.xyz / fragPosLightSpace.w)*0.5+0.5,-1.0,1.0);
float currentDepth = projCoords.z-bias;
float shadowresult = 0.0;
float closestDepth = texture2D(shadowMap, projCoords.xy).r;
vec2 texelSize = 1.0 / vec2(shadowMapSize);
for(int x = -1; x <= 1; ++x)
{
for(int y = -1; y <= 1; ++y)
{
float pcfDepth = texture2D(shadowMap, projCoords.xy + vec2(x, y) * texelSize).r;
shadowresult += currentDepth > pcfDepth ? 0.0 : 1.0;
}
}
shadowresult /= 9.0;
if(distanceTransition)
{
shadowresult = 1.0 - (1.0-shadowresult)*distance;
}
return shadowresult;`,
},
},
Name: "lightsAndShadowCalculation",
}
calcPointLightFunc = glslgen.Function{
"void calculatePointLight(PointLight pl,int index)",
`vec3 lightPosition = (fragViewMatrix3D*vec4(pl.position,1.0)).xyz;
vec3 lightDir = normalize(fragToTangentSpace*(lightPosition - fragPos));
// Diffuse
vec3 diffuse = diffuseLighting(lightDir,pl.diffuseColor);
// Specular
vec3 specular = specularLighting(lightDir,pl.specularColor);
// Attentuation
float attent = calcAttentuation(lightPosition,pl.attentuation);
diffuse *= attent;
specular *= attent;
finalDiffuseColor += vec4(diffuse,0.0);
finalSpecularColor += vec4(specular,0.0);`,
}
LightsAndShadowsCalculationModule3D = glslgen.Module{
Functions: []glslgen.Function{
calcPointLightFunc,
glslgen.Function{
"void calculateDirectionalLight(DirectionalLight dl,int index)",
`vec3 lightDirection = (fragViewMatrix3D*vec4(dl.direction*-1.0,0.0)).xyz;
vec3 lightDir = normalize(fragToTangentSpace*lightDirection);
// Diffuse
vec3 diffuse = diffuseLighting(lightDir,dl.diffuseColor);
// Specular
vec3 specular = specularLighting(lightDir,dl.specularColor);
// Shadow
float shadow = dl.castsShadows ? calcShadow(directionalLightsshadowmap[index],dl.lightSpaceMatrix,dl.shadowDistance,true,dl.shadowMapSize) : 1.0;
diffuse *= shadow;
specular *= shadow;
finalDiffuseColor += vec4(diffuse,0.0);
finalSpecularColor += vec4(specular,0.0);`,
},
glslgen.Function{
"void calculateSpotLight(SpotLight pl,int index)",
`vec3 lightPosition = (fragViewMatrix3D*vec4(pl.position,1.0)).xyz;
vec3 lightDirection = (fragViewMatrix3D*vec4(pl.direction*-1.0,0.0)).xyz;
vec3 lightDir = normalize(fragToTangentSpace*(lightPosition-fragPos));
// Spotamount
float spotAmount = calcSpotAmount(lightDir,lightDirection,pl);
// Diffuse
vec3 diffuse = diffuseLighting(lightDir,pl.diffuseColor);
// Specular
vec3 specular = specularLighting(lightDir,pl.specularColor);
// Attentuation
float attent = calcAttentuation(lightPosition,pl.attentuation);
// Shadow
float shadow = pl.castsShadows ? calcShadow(spotLightsshadowmap[index],pl.lightSpaceMatrix,50.0,false,pl.shadowMapSize) : 1.0;
// float shadow = 1.0;
diffuse *= attent * spotAmount * shadow;
specular *= attent * spotAmount * shadow;
finalDiffuseColor += vec4(diffuse,0.0);
finalSpecularColor += vec4(specular,0.0);`,
},
},
}
LightCalculationModel3D = glslgen.Module{
Functions: []glslgen.Function{
calcPointLightFunc,
glslgen.Function{
"void calculateDirectionalLight(DirectionalLight dl,int index)",
`vec3 lightDirection = (fragViewMatrix3D*vec4(dl.direction*-1.0,0.0)).xyz;
vec3 lightDir = normalize(fragToTangentSpace*lightDirection);
// Diffuse
vec3 diffuse = diffuseLighting(lightDir,dl.diffuseColor);
// Specular
vec3 specular = specularLighting(lightDir,dl.specularColor);
finalDiffuseColor += vec4(diffuse,0.0);
finalSpecularColor += vec4(specular,0.0);`,
},
glslgen.Function{
"void calculateSpotLight(SpotLight pl,int index)",
`vec3 lightPosition = (fragViewMatrix3D*vec4(pl.position,1.0)).xyz;
vec3 lightDirection = (fragViewMatrix3D*vec4(pl.direction*-1.0,0.0)).xyz;
vec3 lightDir = normalize(fragToTangentSpace*(lightPosition-fragPos));
// Spotamount
float spotAmount = calcSpotAmount(lightDir,lightDirection,pl);
// Diffuse
vec3 diffuse = diffuseLighting(lightDir,pl.diffuseColor);
// Specular
vec3 specular = specularLighting(lightDir,pl.specularColor);
// Attentuation
float attent = calcAttentuation(lightPosition,pl.attentuation);
diffuse *= attent * spotAmount;
specular *= attent * spotAmount;
finalDiffuseColor += vec4(diffuse,0.0);
finalSpecularColor += vec4(specular,0.0);`,
},
},
}
calcPointLightNoUVFunc = glslgen.Function{
"void calculatePointLight(PointLight pl,int index)",
`vec3 lightPosition = pl.position;
vec3 lightDir = normalize(lightPosition - fragPos);
// Diffuse
vec3 diffuse = diffuseLighting(lightDir,pl.diffuseColor);
// Specular
vec3 specular = specularLighting(lightDir,pl.specularColor);
// Attentuation
float attent = calcAttentuation(lightPosition,pl.attentuation);
diffuse *= attent;
specular *= attent;
finalDiffuseColor += vec4(diffuse,0.0);
finalSpecularColor += vec4(specular,0.0);`,
}
LightsAndShadowsCalculationNoUVModule3D = glslgen.Module{
Functions: []glslgen.Function{
calcPointLightNoUVFunc,
glslgen.Function{
"void calculateDirectionalLight(DirectionalLight dl,int index)",
`vec3 lightDirection = -dl.direction;
vec3 lightDir = normalize(lightDirection);
// Diffuse
vec3 diffuse = diffuseLighting(lightDir,dl.diffuseColor);
// Specular
vec3 specular = specularLighting(lightDir,dl.specularColor);
// Shadow
float shadow = dl.castsShadows ? calcShadow(directionalLightsshadowmap[index],dl.lightSpaceMatrix,dl.shadowDistance,true,dl.shadowMapSize) : 1.0;
diffuse *= shadow;
specular *= shadow;
finalDiffuseColor += vec4(diffuse,0.0);
finalSpecularColor += vec4(specular,0.0);`,
},
glslgen.Function{
"void calculateSpotLight(SpotLight pl,int index)",
`vec3 lightPosition = pl.position;
vec3 lightDirection = -pl.direction;
vec3 lightDir = normalize(lightPosition-fragPos);
// Spotamount
float spotAmount = calcSpotAmount(lightDir,lightDirection,pl);
// Diffuse
vec3 diffuse = diffuseLighting(lightDir,pl.diffuseColor);
// Specular
vec3 specular = specularLighting(lightDir,pl.specularColor);
// Attentuation
float attent = calcAttentuation(lightPosition,pl.attentuation);
// Shadow
float shadow = pl.castsShadows ? calcShadow(spotLightsshadowmap[index],pl.lightSpaceMatrix,50.0,false,pl.shadowMapSize) : 1.0;
// float shadow = 1.0;
diffuse *= attent * spotAmount * shadow;
specular *= attent * spotAmount * shadow;
finalDiffuseColor += vec4(diffuse,0.0);
finalSpecularColor += vec4(specular,0.0);`,
},
},
}
LightCalculationNoUVModule3D = glslgen.Module{
Functions: []glslgen.Function{
calcPointLightNoUVFunc,
glslgen.Function{
"void calculateDirectionalLight(DirectionalLight dl,int index)",
`vec3 lightDirection = -dl.direction;
vec3 lightDir = normalize(lightDirection);
// Diffuse
vec3 diffuse = diffuseLighting(lightDir,dl.diffuseColor);
// Specular
vec3 specular = specularLighting(lightDir,dl.specularColor);
finalDiffuseColor += vec4(diffuse,0.0);
finalSpecularColor += vec4(specular,0.0);`,
},
glslgen.Function{
"void calculateSpotLight(SpotLight pl,int index)",
`vec3 lightPosition = pl.position;
vec3 lightDirection = -pl.direction;
vec3 lightDir = normalize(lightPosition-fragPos);
// Spotamount
float spotAmount = calcSpotAmount(lightDir,lightDirection,pl);
// Diffuse
vec3 diffuse = diffuseLighting(lightDir,pl.diffuseColor);
// Specular
vec3 specular = specularLighting(lightDir,pl.specularColor);
// Attentuation
float attent = calcAttentuation(lightPosition,pl.attentuation);
diffuse *= attent * spotAmount;
specular *= attent * spotAmount;
finalDiffuseColor += vec4(diffuse,0.0);
finalSpecularColor += vec4(specular,0.0);`,
},
},
}
MaterialModule3D = glslgen.Module{
Structs: []glslgen.Struct{
glslgen.Struct{
Name: "Material",
Variables: []glslgen.Variable{
glslgen.Variable{"vec3", "highp", "diffuseColor"},
glslgen.Variable{"vec3", "highp", "specularColor"},
glslgen.Variable{"bool", "", "DiffuseTextureLoaded"},
glslgen.Variable{"bool", "", "SpecularTextureLoaded"},
glslgen.Variable{"bool", "", "NormalMapLoaded"},
glslgen.Variable{"float", "highp", "shinyness"},
glslgen.Variable{"float", "highp", "transparency"},
},
},
},
Uniforms: []glslgen.Variable{
glslgen.Variable{"Material", "", "material"},
},
Name: "materialCalculation",
Body: `finalDiffuseColor *= vec4(material.diffuseColor,material.transparency);
finalSpecularColor *= vec4(material.specularColor,0.0);
finalAmbientColor *= vec4(material.diffuseColor,0.0);`,
}
DiffuseTextureModule3D = glslgen.Module{
Uniforms: []glslgen.Variable{
glslgen.Variable{"sampler2D", "highp", "materialdiffuseTexture"},
},
Name: "diffuseTextureCalculation",
Body: `vec4 texDifCol;
if(material.DiffuseTextureLoaded)
texDifCol = texture2D(materialdiffuseTexture,fragTexCoord);
else
texDifCol = vec4(1.0);
finalDiffuseColor *= texDifCol;
finalAmbientColor *= texDifCol;`,
}
SpecularTextureModule3D = glslgen.Module{
Uniforms: []glslgen.Variable{
glslgen.Variable{"sampler2D", "highp", "materialspecularTexture"},
},
Name: "specularTextureCalculation",
Body: `vec4 texSpecCol;
if(material.SpecularTextureLoaded)
texSpecCol = texture2D(materialspecularTexture,fragTexCoord);
else
texSpecCol = vec4(1.0);
finalSpecularColor *= texSpecCol;`,
}
NormalMapModule3D = glslgen.Module{
Uniforms: []glslgen.Variable{
glslgen.Variable{"sampler2D", "highp", "materialnormalMap"},
},
Name: "normalMapCalculation",
Body: `if(material.NormalMapLoaded)
norm = normalize(2.0*(texture2D(materialnormalMap,fragTexCoord)).xyz-1.0);`,
}
FinalModuleFragment3D = glslgen.Module{
Name: "finalCalculation",
Body: `if(finalDiffuseColor.a < 0.1)
discard;
gl_FragColor = finalDiffuseColor + finalSpecularColor + finalAmbientColor;`,
}
)
func LoadGeneratedShader3D(shader_type uint8, flags uint32) Shader {
n, v, f := GenerateShader3D(shader_type, flags)
return ls(n, v, f)
}
func GenerateShaderSource3D() {
n, v, f := GenerateShader3D(SHADER_TYPE_3D, 0)
ls(n, v, f)
n, v, f = GenerateShader3D(SHADER_TYPE_3D, SHADER_FLAG_NOUV)
ls(n, v, f)
n, v, f = GenerateShader3D(SHADER_TYPE_3D, SHADER_FLAG_INSTANCED)
ls(n, v, f)
n, v, f = GenerateShader3D(SHADER_TYPE_3D, SHADER_FLAG_INSTANCED|SHADER_FLAG_NOUV)
ls(n, v, f)
}
func ls(n, v, f string) Shader {
return ResourceMgr.LoadShaderSource(n, v, f, "", "", "", "")
}
const (
SHADER_FLAG_INSTANCED uint32 = (1 << 0)
SHADER_FLAG_NOUV uint32 = (1 << 1)
SHADER_FLAG_NO_SHADOWS uint32 = (1 << 2)
SHADER_FLAG_NO_LIGHTING uint32 = (1 << 3)
SHADER_FLAG_NO_DIFTEX uint32 = (1 << 4)
SHADER_FLAG_NO_SPECTEX uint32 = (1 << 5)
SHADER_FLAG_NO_NORMAP uint32 = (1 << 6)
NUM_FLAGS_3D = 7
SHADER_FLAG_NO_KEYCOLOR uint32 = (1 << 1)
SHADER_FLAG_NO_MODCOLOR uint32 = (1 << 2)
SHADER_FLAG_NO_FLIP uint32 = (1 << 3)
SHADER_FLAG_NO_TEXTURE_REGION uint32 = (1 << 4)
SHADER_FLAG_NO_DEPTH uint32 = (1 << 5)
SHADER_FLAG_NO_TEXTURE uint32 = (1 << 6)
SHADER_FLAG_DEPTHMAP uint32 = (1 << 7)
NUM_FLAGS_2D = 8
)
var (
FLAG_NAMES_3D = [NUM_FLAGS_3D]string{
"Instanced",
"NoUV",
"NoShadows",
"NoLighting",
"NoDiftex",
"NoSpectex",
"NoNormap",
}
FLAG_NAMES_2D = [NUM_FLAGS_2D]string{
"Instanced",
"NoKeyColor",
"NoModColor",
"NoFlip",
"NoTextureRegion",
"NoDepth",
"NoTexture",
"DepthMap",
}
)
func GetShaderName3D(flags uint32) string {
var n = "3D"
for i := uint32(0); i < NUM_FLAGS_3D; i++ {
if flags&(1<<i) != 0 {
n += " " + FLAG_NAMES_3D[i]
}
}
return n
}
func GenerateShader3D(shader_type uint8, flags uint32) (n, v, f string) {
if shader_type == SHADER_TYPE_3D {
n, v, f = generateShader3D(flags)
} else {
n, v, f = generateShaderShape3D()
}
return
}
func generateShader3D(flags uint32) (n, v, f string) {
if flags&SHADER_FLAG_NO_LIGHTING != 0 {
flags |= SHADER_FLAG_NO_SHADOWS
}
startFlags := flags
if !Render.HasFunctionAvailable("INSTANCED") {
flags &= ^SHADER_FLAG_INSTANCED
}
if flags&SHADER_FLAG_NOUV != 0 {
flags |= SHADER_FLAG_NO_DIFTEX | SHADER_FLAG_NO_SPECTEX | SHADER_FLAG_NO_NORMAP
}
rname := Render.GetName()
if rname == "OpenGLES2" || rname == "WebGL" {
flags |= SHADER_FLAG_NO_SHADOWS | SHADER_FLAG_NO_NORMAP
}
var vertex glslgen.VertexGenerator
var fragment glslgen.FragmentGenerator
if rname == "WebGL" {
vertex.SetVersion("WebGL")
fragment.SetVersion("WebGL")
} else if strings.Contains(rname, "OpenGLES") {
vertex.SetVersion("100")
fragment.SetVersion("100")
} else {
vertex.SetVersion(ShaderVersion)
fragment.SetVersion(ShaderVersion)
}
vertex.AddAttributes(Attributes3D)
if flags&SHADER_FLAG_INSTANCED != 0 {
vertex.AddAttributes(AttributesInstanced3D)
}
vertex.AddOutputs(InputsFragment3D)
if flags&SHADER_FLAG_NOUV == 0 && (rname != "OpenGLES2" && rname != "WebGL") {
vertex.AddOutputs(InputsNormalFragment3D)
}
if rname == "OpenGLES2" || rname == "WebGL" {
vertex.AddOutput(glslgen.Variable{"vec2", "highp", "fragTexCoord"})
}
vertex.AddModule(UniformModuleVertex3D)
if flags&SHADER_FLAG_INSTANCED == 0 {
vertex.AddModule(UniformNormalModuleVertex3D)
}
vertex.AddModule(CalculatePositionModule3D)
vertex.AddModule(SetOutputsModuleVertex3D)
if flags&SHADER_FLAG_NOUV == 0 && (rname != "OpenGLES2" && rname != "WebGL") {
vertex.AddModule(SetOutputsNormalModuleVertex3D)
} else {
vertex.AddModule(SetOutputsNoUVModuleVertex3D)
}
if flags&SHADER_FLAG_NOUV == 0 {
vertex.AddModule(SetOutputTexCoordModuleVertex3D)
}
if flags&SHADER_FLAG_NO_LIGHTING == 0 {
fragment.AddMakros(LightMakrosFragment3D)
}
fragment.AddGlobals(GlobalsFragment3D)
fragment.AddInputs(InputsFragment3D)
if flags&SHADER_FLAG_NOUV == 0 && (rname != "OpenGLES2" && rname != "WebGL") {
fragment.AddInputs(InputsNormalFragment3D)
}
if flags&SHADER_FLAG_NOUV == 0 && (rname == "OpenGLES2" || rname == "WebGL") {
fragment.AddInput(glslgen.Variable{"vec2", "highp", "fragTexCoord"})
}
fragment.AddModule(InitialiseModuleFragment3D)
if flags&SHADER_FLAG_NOUV == 0 && (rname != "OpenGLES2" && rname != "WebGL") {
fragment.AddModule(InitialiseNormalModuleFragment3D)
} else {
fragment.AddModule(InitialiseNoUVModuleFragment3D)
}
if flags&(SHADER_FLAG_NO_NORMAP|SHADER_FLAG_NOUV) == 0 {
fragment.AddModule(NormalMapModule3D)
}
if flags&SHADER_FLAG_NO_LIGHTING == 0 {
fragment.AddModule(LightUniformsModule3D)
if flags&SHADER_FLAG_NOUV == 0 && (rname != "OpenGLES2" && rname != "WebGL") {
fragment.AddModule(LightCalcSpotAmountNormalModule3D)
} else {
fragment.AddModule(LightCalcSpotAmountNoUVModule3D)
}
if flags&SHADER_FLAG_NO_SHADOWS == 0 {
if flags&SHADER_FLAG_NOUV == 0 && (rname != "OpenGLES2" && rname != "WebGL") {
fragment.AddModule(LightsAndShadowsFunctions3D)
fragment.AddModule(LightsAndShadowsCalculationModule3D)
} else {
fragment.AddModule(LightsAndShadowsFunctionsNoUV3D)
fragment.AddModule(LightsAndShadowsCalculationNoUVModule3D)
}
} else {
if flags&SHADER_FLAG_NOUV == 0 && (rname != "OpenGLES2" && rname != "WebGL") {
fragment.AddModule(LightCalculationModel3D)
} else {
fragment.AddModule(LightCalculationNoUVModule3D)
}
}
}
fragment.AddModule(MaterialModule3D)
if flags&SHADER_FLAG_NO_DIFTEX == 0 {
fragment.AddModule(DiffuseTextureModule3D)
}
if flags&SHADER_FLAG_NO_SPECTEX == 0 {
fragment.AddModule(SpecularTextureModule3D)
}
fragment.AddModule(FinalModuleFragment3D)
v = vertex.String()
f = fragment.String()
n = GetShaderName3D(startFlags)
return
} | src/gohome/shadermodules3dopengl.go | 0.552298 | 0.610773 | shadermodules3dopengl.go | starcoder |
package heap
type Heap[T comparable] struct {
nodes []T // The array that stores the heap's nodes.
orderCriteria func(T, T) bool // Determines how to compare two nodes in the heap.
}
/*
Creates an empty heap.
The sort function determines whether this is a min-heap or max-heap.
For comparable data types, > makes a max-heap, < makes a min-heap.
*/
func HeapInit[T comparable](sort func(T, T) bool) *Heap[T] {
heap := &Heap[T]{}
heap.orderCriteria = sort
return heap
}
/*
Creates a heap from an array. The order of the array does not matter;
the elements are Inserted into the heap in the order determined by the
sort function. For comparable data types, '>' makes a max-heap,
'<' makes a min-heap.
*/
func HeapSliceInit[T comparable](slice []T, sort func(T, T) bool) *Heap[T] {
heap := &Heap[T]{}
heap.orderCriteria = sort
heap.configureHeap(&slice)
return heap
}
/*
Configures the max-heap or min-heap from an array, in a bottom-up manner.
Performance: This runs pretty much in O(n).
*/
func (self *Heap[T]) configureHeap(slice *[]T) {
self.nodes = *slice
for i := len(self.nodes)/2 - 1; i >= 0; i -= 1 {
self.shiftDown(i)
}
}
func (self *Heap[T]) IsEmpty() bool {
if nodes := self.nodes; nodes != nil {
return len(nodes) == 0
}
return true
}
func (self *Heap[T]) Count() int {
if !self.IsEmpty() {
return len(self.nodes)
}
return 0
}
/*
Returns the index of the parent of the element at index i.
The element at index 0 is the root of the tree and has no parent.
*/
func (self *Heap[T]) parentIndex(index int) int {
return (index - 1) / 2
}
/*
Returns the index of the left child of the element at index i.
Note that this index can be greater than the heap size, in which case
there is no left child.
*/
func (self *Heap[T]) leftChildIndex(index int) int {
return 2*index + 1
}
/*
Returns the index of the right child of the element at index i.
Note that this index can be greater than the heap size, in which case
there is no right child.
*/
func (self *Heap[T]) rightChildIndex(index int) int {
return 2*index + 2
}
/*
Returns the maximum value in the heap (for a max-heap) or the minimum
value (for a min-heap).
*/
func (self *Heap[T]) Peek() (T, bool) {
if self.IsEmpty() {
var element T
return element, false
}
element := self.nodes[0]
return element, true
}
/*
Adds a new value to the heap. This reorders the heap so that the max-heap
or min-heap property still holds. Performance: O(log n).
*/
func (self *Heap[T]) Insert(value T) {
self.nodes = append(self.nodes, value)
self.shiftUp(self.Count() - 1)
}
/*
Adds a sequence of values to the heap. This reorders the heap so that
the max-heap or min-heap property still holds. Performance: O(log n).
*/
func (self *Heap[T]) InsertSequence(sequence ...T) {
for _, value := range sequence {
self.Insert(value)
}
}
/*
Allows you to change an element. This reorders the heap so that
the max-heap or min-heap property still holds.
*/
func (self *Heap[T]) Replace(index int, value T) {
if index >= self.Count() {
return
}
self.PopAt(index)
self.Insert(value)
}
/*
Removes the root node from the heap. For a max-heap, this is the maximum
value; for a min-heap it is the minimum value. Performance: O(log n).
*/
func (self *Heap[T]) Pop() (T, bool) {
if self.IsEmpty() {
var value T
return value, false
}
if self.Count() == 1 {
value := self.nodes[0]
self.nodes = self.nodes[1:]
return value, true
} else {
value := self.nodes[0]
self.nodes[0] = self.nodes[len(self.nodes)-1]
self.nodes = self.nodes[:len(self.nodes)-1]
self.shiftDown(0)
return value, true
}
}
/*
Removes an arbitrary node from the heap. Performance: O(log n).
Note that you need to know the node's index.
*/
func (self *Heap[T]) PopAt(index int) (T, bool) {
if index >= self.Count() {
var value T
return value, false
}
size := self.Count() - 1
if index != size {
self.nodes[index], self.nodes[size] = self.nodes[size], self.nodes[index]
self.shiftDown(index, size)
self.shiftUp(index)
}
value := self.nodes[len(self.nodes)-1]
self.nodes = self.nodes[:len(self.nodes)-1]
return value, true
}
/*
Takes a child node and looks at its parents; if a parent is not larger
(max-heap) or not smaller (min-heap) than the child, we exchange them.
*/
func (self *Heap[T]) shiftUp(index int) {
childIndex := index
child := self.nodes[childIndex]
parentIndex := self.parentIndex(childIndex)
for childIndex > 0 && self.orderCriteria(child, self.nodes[parentIndex]) {
self.nodes[childIndex] = self.nodes[parentIndex]
childIndex = parentIndex
parentIndex = self.parentIndex(childIndex)
}
self.nodes[childIndex] = child
}
/*
Looks at a parent node and makes sure it is still larger (max-heap) or
smaller (min-heap) than its childeren.
*/
func (self *Heap[T]) shiftDown(indicies ...int) {
if len(indicies) == 1 {
self.shiftDown(indicies[0], self.Count())
return
}
index := indicies[0]
endIndex := indicies[1]
leftChildIndex := self.leftChildIndex(index)
rightChildIndex := leftChildIndex + 1
/*
Figure out which comes first if we order them by the sort function:
the parent, the left child, or the right child. If the parent comes
first, we're done. If not, that element is out-of-place and we make
it "float down" the tree until the heap property is restored.
*/
first := index
if leftChildIndex < endIndex && self.orderCriteria(self.nodes[leftChildIndex], self.nodes[first]) {
first = leftChildIndex
}
if rightChildIndex < endIndex && self.orderCriteria(self.nodes[rightChildIndex], self.nodes[first]) {
first = rightChildIndex
}
if first == index {
return
}
self.nodes[index], self.nodes[first] = self.nodes[first], self.nodes[index]
self.shiftDown(first, endIndex)
}
/*
Get the index of a node in the heap. Performance: O(n).
*/
func (self *Heap[T]) Search(node T) int {
for index, n := range self.nodes {
if n == node {
return index
}
}
return -1
}
/*
Removes the first occurrence of a node from the heap. Performance: O(n).
*/
func (self *Heap[T]) PopNode(node T) (T, bool) {
if index := self.Search(node); index != -1 {
return self.PopAt(index)
}
var value T
return value, false
}
func (self *Heap[T]) IndexOf(node T) int {
if !self.IsEmpty() {
for index, n := range self.nodes {
if n == node {
return index
}
}
}
return -1
} | Heap/heap.go | 0.846768 | 0.700011 | heap.go | starcoder |
package rabbitmonit
import (
"strconv"
"github.com/c-datculescu/rabbit-hole"
)
/*
QueueProperties offers a broader set of operations than rabbithole.QueueInfo including warnings,
errors and statistics
*/
type QueueProperties struct {
Stats QueueStat
Error QueueAlert
Warning QueueAlert
QueueInfo rabbithole.QueueInfo
Client *rabbithole.Client
}
/*
QueueStat holds a set of queue related statistics
*/
type QueueStat struct {
NonPersistentMessagesCount int // the number of non-persistent messages in the queue
RdyReduced string // ready messages in k, m, g etc
UnackReduced string // unacked messages in k, m, g etc
TransReduced string // non-durable messages in k, m, g etc
ConsumerReduced string // consumers in k, m, g etc
Utilisation float64 // the utilisation converted to float64 and rounded to 2 decimals
EnqueueDequeueDiff float32 // the difference between enqueue and dequeue
}
/*
QueueAlert holds various alert flags
*/
type QueueAlert struct {
State bool // the status of the queue. will be true if the state is not "running"
NonDurable bool // the durability of the queue. will be true if the queue is not durable (will not survive a server restart)
Rdy bool // the number of rady messages. 1-100 = warning, >100 = error
Unack bool // the number of unacknowledged messages. > ∑ consumer qos = error
Listener bool // the number of consumers. 1-3 = warning, 0 = error
Utilisation bool // the consumer utilisation. < 70 = warning, < 30 = error
Intake bool // the diff between in and out. ∑ in, out > 1 = warning
ConsumptionLow bool // the diff between enqueue/dequeue is too large
NonDurableMsg bool // the number of non-durable messages. will be true if the number of non-durable messages > 1
Has bool // identifies whether we have errors/warnings at all
}
/*
Calculate performs various additional calculations based on the details provided by the api
for the queues, initialising and calculating the warnings, alerts and stats
*/
func (qp *QueueProperties) Calculate() {
qp.Stats = QueueStat{}
qp.Error = QueueAlert{}
qp.Warning = QueueAlert{}
qp.calculateStats().
alertState().
alertDurable().
alertRdy().
alertListener().
alertUtilisation().
alertIntake().
alertNonDurableMessages().
alertUnackMessages()
}
func (qp *QueueProperties) calculateStats() *QueueProperties {
var util float64
switch qp.QueueInfo.ConsumerUtilisation.(type) {
case string:
util, _ = strconv.ParseFloat(qp.QueueInfo.ConsumerUtilisation.(string), 64)
default:
util = qp.QueueInfo.ConsumerUtilisation.(float64)
}
qp.Stats.Utilisation = util
qp.Stats.RdyReduced = reduceInt(qp.QueueInfo.MessagesRdy)
qp.Stats.TransReduced = reduceInt(qp.Stats.NonPersistentMessagesCount)
qp.Stats.UnackReduced = reduceInt(qp.QueueInfo.MessagesUnack)
qp.Stats.ConsumerReduced = reduceInt(qp.QueueInfo.Consumers)
return qp
}
/*
alertRdy calculates whether it should raise an alert or warning when there are messages in the queue that are in
ready status
threshold for warnings is 0
threshold for error is 100
*/
func (qp *QueueProperties) alertRdy() *QueueProperties {
if qp.QueueInfo.MessagesRdy > 100 {
qp.Error.Rdy = true
qp.Error.Has = true
} else if qp.QueueInfo.MessagesRdy > 0 {
qp.Warning.Rdy = true
qp.Warning.Has = true
}
return qp
}
/*
alertListener calculates whether there should be an alert/warning raised for the number of listeners on the current
queue
threshold for alert is 0 listeners and more than 0 ready messages
threshold for warning is 3 listeners and more than 0 ready messages
*/
func (qp *QueueProperties) alertListener() *QueueProperties {
if qp.QueueInfo.Consumers == 0 && qp.QueueInfo.MessagesRdy > 0 {
qp.Error.Listener = true
qp.Error.Has = true
} else if qp.QueueInfo.Consumers <= 3 && qp.QueueInfo.MessagesRdy > 0 {
qp.Warning.Has = true
qp.Warning.Listener = true
}
return qp
}
/*
alertUtilisation calculates whether there should be an alert/warning raised for the utilisation on the current queue
utilisation is important to give an overview of how many messages end up being dispatched
utlisation is only usable though for high traffic queues
threshold for alert is utilisation less than 30 and more than 0 messages ready in the queue
threshold for warning is utilisation less than 70 and messages ready in the queue
*/
func (qp *QueueProperties) alertUtilisation() *QueueProperties {
var consumerUtilisation float64
switch qp.QueueInfo.ConsumerUtilisation.(type) {
case string:
if qp.QueueInfo.ConsumerUtilisation == "" {
qp.QueueInfo.ConsumerUtilisation = "0"
}
consumerUtilisation, _ = strconv.ParseFloat(qp.QueueInfo.ConsumerUtilisation.(string), 64)
case float64:
consumerUtilisation = qp.QueueInfo.ConsumerUtilisation.(float64)
}
if consumerUtilisation < 30 && qp.QueueInfo.MessagesRdy > 0 {
qp.Error.Has = true
qp.Error.Utilisation = true
} else if consumerUtilisation < 70 && qp.QueueInfo.MessagesRdy > 0 {
qp.Warning.Has = true
qp.Warning.Utilisation = true
}
return qp
}
/*
alertIntake should be deprecated and replaced by a diff between enqueue rate and dequeue rate
@todo replace intake alert with enqueue/dequeue rate difference
*/
func (qp *QueueProperties) alertIntake() *QueueProperties {
if qp.QueueInfo.MessagesRdyDetails.Rate > 1 {
qp.Error.Has = true
qp.Warning.Intake = true
}
return qp
}
/*
alertNonDurableMessages raises an alert if the queue contains non-durable messages.
non-durable messages can be lost on a server restart
*/
func (qp *QueueProperties) alertNonDurableMessages() *QueueProperties {
qp.Stats.NonPersistentMessagesCount = qp.QueueInfo.Messages - qp.QueueInfo.MessagesPersistent
if qp.Stats.NonPersistentMessagesCount > 0 {
qp.Error.Has = true
qp.Error.NonDurableMsg = true
}
return qp
}
/*
alertUnackMessages raises an alert by analysing individual properties of consumers on queue as well as unack messages
the threshold for alert is sum of consumer prefetch count is lower than the number of unack messages in the queue
*/
func (qp *QueueProperties) alertUnackMessages() *QueueProperties {
consumers, err := qp.Client.ConsumersIn(qp.QueueInfo.Vhost)
if err != nil {
return qp
}
var total int
for _, consumer := range consumers {
if qp.QueueInfo.Name == consumer.Queue.Name && qp.QueueInfo.Vhost == consumer.Queue.Vhost {
total += consumer.PrefetchCount
}
}
if qp.QueueInfo.MessagesUnack > total {
qp.Error.Has = true
qp.Error.Unack = true
}
return qp
}
/*
alertState raises an alert if the state of the queue is not "running"
*/
func (qp *QueueProperties) alertState() *QueueProperties {
if qp.QueueInfo.State != "running" {
qp.Error.Has = true
qp.Error.State = true
}
return qp
}
/*
alertDurable raises an alert if the queue is not durable
*/
func (qp *QueueProperties) alertDurable() *QueueProperties {
if !qp.QueueInfo.Durable {
qp.Error.Has = true
qp.Error.NonDurable = true
}
return qp
}
/*
alertConsumptionLow raises an alert/warning when consumption rate vs ingestion rate exceeds certain values
threshold for alert is rate difference bigger than 10 and consuption is less than publishing
threshold for alert is rate difference bigger than 5 and consuption is less than publishing
*/
func (qp *QueueProperties) alertConsumptionLow() *QueueProperties {
rate := qp.QueueInfo.MessageStats.PublishDetails.Rate - qp.QueueInfo.MessageStats.DeliverDetails.Rate
qp.Stats.EnqueueDequeueDiff = rate
lowerThan := qp.QueueInfo.MessageStats.DeliverDetails.Rate < qp.QueueInfo.MessageStats.PublishDetails.Rate
if lowerThan && rate > 10 {
qp.Error.Has = true
qp.Error.ConsumptionLow = true
} else if lowerThan && rate > 5 {
qp.Warning.Has = true
qp.Warning.ConsumptionLow = true
}
return qp
} | queue.go | 0.52074 | 0.432243 | queue.go | starcoder |
package turfgo
// LineDiff take two lines and gives an array of lines by subracting second from first. Single coordinate overlaps are ignored.
// Line should not have duplicate values.
func LineDiff(firstLine *LineString, secondLine *LineString) []*LineString {
diffSegments := []*LineString{}
fPoints := firstLine.Points
sPoints := secondLine.Points
for i := 0; i < len(fPoints)-1; i++ {
if !containLocationPair(sPoints, fPoints[i], fPoints[i+1]) {
diffSegments = append(diffSegments, NewLineString([]*Point{fPoints[i], fPoints[i+1]}))
}
}
return reduceDiffSegment(diffSegments)
}
// LineDiffPercentage take two lines and give the percentage of difference between first and second line with respect to first line.
// Single coordinate overlaps are ignored. Line should not have duplicate values.
func LineDiffPercentage(firstLine *LineString, secondLine *LineString) float64 {
totalPoints := len(firstLine.Points)
if totalPoints == 0 {
return 0
}
diff := LineDiff(firstLine, secondLine)
if len(diff) == 1 && totalPoints == len(diff[0].Points) {
return 100
}
diffPoints := 0
for _, line := range diff {
isStartingSegment := isEqualLocation(firstLine.Points[0], line.Points[0])
isEndingSegment := isEqualLocation(firstLine.Points[len(firstLine.Points)-1], line.Points[len(line.Points)-1])
diffPoints += len(line.Points)
if isStartingSegment || isEndingSegment {
diffPoints--
} else {
diffPoints -= 2
}
}
return (float64(diffPoints) / float64(totalPoints)) * 100
}
func reduceDiffSegment(segments []*LineString) []*LineString {
if len(segments) == 0 {
return segments
}
result := []*LineString{}
previousSeg := segments[0]
for i := 1; i < len(segments); i++ {
currentSeg := segments[i]
pLen := len(previousSeg.Points)
previousSegLastPoint := previousSeg.Points[pLen-1]
currentSegFirstPoint := currentSeg.Points[0]
if isEqualLocation(previousSegLastPoint, currentSegFirstPoint) {
mergedPoints := append(previousSeg.Points, currentSeg.Points[1])
previousSeg = NewLineString(mergedPoints)
} else {
result = append(result, previousSeg)
previousSeg = currentSeg
}
}
result = append(result, previousSeg)
return result
}
func containLocationPair(points []*Point, point1, point2 *Point) bool {
for i := 0; i < len(points)-1; i++ {
if isEqualLocation(point1, points[i]) && isEqualLocation(point2, points[i+1]) {
return true
}
}
return false
} | transformation.go | 0.730963 | 0.61341 | transformation.go | starcoder |
package redis
import (
"context"
"fmt"
"strconv"
"time"
"github.com/go-redis/redis/v7"
"github.com/benthosdev/benthos/v4/internal/bloblang/field"
"github.com/benthosdev/benthos/v4/internal/bundle"
"github.com/benthosdev/benthos/v4/internal/component/processor"
"github.com/benthosdev/benthos/v4/internal/docs"
bredis "github.com/benthosdev/benthos/v4/internal/impl/redis/old"
"github.com/benthosdev/benthos/v4/internal/interop"
"github.com/benthosdev/benthos/v4/internal/log"
"github.com/benthosdev/benthos/v4/internal/message"
oprocessor "github.com/benthosdev/benthos/v4/internal/old/processor"
"github.com/benthosdev/benthos/v4/internal/tracing"
)
//------------------------------------------------------------------------------
func init() {
err := bundle.AllProcessors.Add(func(conf oprocessor.Config, mgr bundle.NewManagement) (processor.V1, error) {
p, err := newRedisProc(conf.Redis, mgr)
if err != nil {
return nil, err
}
return processor.NewV2BatchedToV1Processor("redis", p, mgr.Metrics()), nil
}, docs.ComponentSpec{
Name: "redis",
Type: docs.TypeProcessor,
Status: docs.StatusStable,
Categories: []string{
"Integration",
},
Summary: `
Performs actions against Redis that aren't possible using a
` + "[`cache`](/docs/components/processors/cache)" + ` processor. Actions are
performed for each message of a batch, where the contents are replaced with the
result.`,
Description: `
## Operators
### ` + "`keys`" + `
Returns an array of strings containing all the keys that match the pattern specified by the ` + "`key` field" + `.
### ` + "`scard`" + `
Returns the cardinality of a set, or ` + "`0`" + ` if the key does not exist.
### ` + "`sadd`" + `
Adds a new member to a set. Returns ` + "`1`" + ` if the member was added.
### ` + "`incrby`" + `
Increments the number stored at ` + "`key`" + ` by the message content. If the
key does not exist, it is set to ` + "`0`" + ` before performing the operation.
Returns the value of ` + "`key`" + ` after the increment.`,
Config: docs.FieldComponent().WithChildren(
bredis.ConfigDocs().Add(
docs.FieldString("operator", "The [operator](#operators) to apply.").HasOptions("scard", "sadd", "incrby", "keys").HasDefault(""),
docs.FieldString("key", "A key to use for the target operator.").IsInterpolated().HasDefault(""),
docs.FieldInt("retries", "The maximum number of retries before abandoning a request.").Advanced().HasDefault(3),
docs.FieldString("retry_period", "The time to wait before consecutive retry attempts.").Advanced().HasDefault("500ms"),
)...,
),
Examples: []docs.AnnotatedExample{
{
Title: "Querying Cardinality",
Summary: `
If given payloads containing a metadata field ` + "`set_key`" + ` it's possible
to query and store the cardinality of the set for each message using a
` + "[`branch` processor](/docs/components/processors/branch)" + ` in order to
augment rather than replace the message contents:`,
Config: `
pipeline:
processors:
- branch:
processors:
- redis:
url: TODO
operator: scard
key: ${! meta("set_key") }
result_map: 'root.cardinality = this'
`,
},
{
Title: "Running Total",
Summary: `
If we have JSON data containing number of friends visited during covid 19:
` + "```json" + `
{"name":"ash","month":"feb","year":2019,"friends_visited":10}
{"name":"ash","month":"apr","year":2019,"friends_visited":-2}
{"name":"bob","month":"feb","year":2019,"friends_visited":3}
{"name":"bob","month":"apr","year":2019,"friends_visited":1}
` + "```" + `
We can add a field that contains the running total number of friends visited:
` + "```json" + `
{"name":"ash","month":"feb","year":2019,"friends_visited":10,"total":10}
{"name":"ash","month":"apr","year":2019,"friends_visited":-2,"total":8}
{"name":"bob","month":"feb","year":2019,"friends_visited":3,"total":3}
{"name":"bob","month":"apr","year":2019,"friends_visited":1,"total":4}
` + "```" + `
Using the ` + "`incrby`" + ` operator:
`,
Config: `
pipeline:
processors:
- branch:
request_map: |
root = this.friends_visited
meta name = this.name
processors:
- redis:
url: TODO
operator: incrby
key: ${! meta("name") }
result_map: 'root.total = this'
`,
},
},
})
if err != nil {
panic(err)
}
}
//------------------------------------------------------------------------------
type redisProc struct {
log log.Modular
key *field.Expression
operator redisOperator
client redis.UniversalClient
retries int
retryPeriod time.Duration
}
func newRedisProc(conf oprocessor.RedisConfig, mgr interop.Manager) (*redisProc, error) {
var retryPeriod time.Duration
if tout := conf.RetryPeriod; len(tout) > 0 {
var err error
if retryPeriod, err = time.ParseDuration(tout); err != nil {
return nil, fmt.Errorf("failed to parse retry period string: %v", err)
}
}
client, err := conf.Config.Client()
if err != nil {
return nil, err
}
key, err := mgr.BloblEnvironment().NewField(conf.Key)
if err != nil {
return nil, fmt.Errorf("failed to parse key expression: %v", err)
}
r := &redisProc{
log: mgr.Logger(),
key: key,
retries: conf.Retries,
retryPeriod: retryPeriod,
client: client,
}
if r.operator, err = getRedisOperator(conf.Operator); err != nil {
return nil, err
}
return r, nil
}
type redisOperator func(r *redisProc, key string, part *message.Part) error
func newRedisKeysOperator() redisOperator {
return func(r *redisProc, key string, part *message.Part) error {
res, err := r.client.Keys(key).Result()
for i := 0; i <= r.retries && err != nil; i++ {
r.log.Errorf("Keys command failed: %v\n", err)
<-time.After(r.retryPeriod)
res, err = r.client.Keys(key).Result()
}
if err != nil {
return err
}
iRes := make([]interface{}, 0, len(res))
for _, v := range res {
iRes = append(iRes, v)
}
part.SetJSON(iRes)
return nil
}
}
func newRedisSCardOperator() redisOperator {
return func(r *redisProc, key string, part *message.Part) error {
res, err := r.client.SCard(key).Result()
for i := 0; i <= r.retries && err != nil; i++ {
r.log.Errorf("SCard command failed: %v\n", err)
<-time.After(r.retryPeriod)
res, err = r.client.SCard(key).Result()
}
if err != nil {
return err
}
part.Set(strconv.AppendInt(nil, res, 10))
return nil
}
}
func newRedisSAddOperator() redisOperator {
return func(r *redisProc, key string, part *message.Part) error {
res, err := r.client.SAdd(key, part.Get()).Result()
for i := 0; i <= r.retries && err != nil; i++ {
r.log.Errorf("SAdd command failed: %v\n", err)
<-time.After(r.retryPeriod)
res, err = r.client.SAdd(key, part.Get()).Result()
}
if err != nil {
return err
}
part.Set(strconv.AppendInt(nil, res, 10))
return nil
}
}
func newRedisIncrByOperator() redisOperator {
return func(r *redisProc, key string, part *message.Part) error {
valueInt, err := strconv.Atoi(string(part.Get()))
if err != nil {
return err
}
res, err := r.client.IncrBy(key, int64(valueInt)).Result()
for i := 0; i <= r.retries && err != nil; i++ {
r.log.Errorf("incrby command failed: %v\n", err)
<-time.After(r.retryPeriod)
res, err = r.client.IncrBy(key, int64(valueInt)).Result()
}
if err != nil {
return err
}
part.Set(strconv.AppendInt(nil, res, 10))
return nil
}
}
func getRedisOperator(opStr string) (redisOperator, error) {
switch opStr {
case "keys":
return newRedisKeysOperator(), nil
case "sadd":
return newRedisSAddOperator(), nil
case "scard":
return newRedisSCardOperator(), nil
case "incrby":
return newRedisIncrByOperator(), nil
}
return nil, fmt.Errorf("operator not recognised: %v", opStr)
}
func (r *redisProc) ProcessBatch(ctx context.Context, spans []*tracing.Span, msg *message.Batch) ([]*message.Batch, error) {
newMsg := msg.Copy()
_ = newMsg.Iter(func(index int, part *message.Part) error {
key := r.key.String(index, newMsg)
if err := r.operator(r, key, part); err != nil {
r.log.Debugf("Operator failed for key '%s': %v", key, err)
return err
}
return nil
})
return []*message.Batch{newMsg}, nil
}
func (r *redisProc) Close(ctx context.Context) error {
return r.client.Close()
} | internal/impl/redis/processor.go | 0.730097 | 0.592195 | processor.go | starcoder |
package main
// Window : Sliding Window data type
type Window struct {
length int // length of the stack
mirror []int // unordered list of values
size int // size of the sliding window
stack []int // ordered list of values
}
// AddDelay : adds a delay value to the stack
func (w *Window) AddDelay(delay int) {
// check if stack of values is full
if w.full() {
w.cut()
}
// no values have been stored yet or the value goes to the end of the stack
if w.length < 1 || w.stack[w.length-1] <= delay {
w.stack = append(w.stack, delay)
w.copy(delay)
return
}
// check if the value goes to the beginning of the stack
if w.stack[0] > delay {
w.stack = append([]int{delay}, w.stack...)
w.copy(delay)
return
}
// find the offset position and insert the value in the middle
offset := w.offset(delay)
w.stack = append(w.stack[:offset], append([]int{delay}, w.stack[offset:]...)...)
w.copy(delay)
}
// mirror : mirror the stack values in their order of arrival
func (w *Window) copy(val int) {
w.mirror = append(w.mirror, val)
w.length++
}
// cut : cut the oldest value from the stack
func (w *Window) cut() {
target := w.mirror[0]
w.mirror = w.mirror[1:]
for key, val := range w.stack {
if val == target {
w.stack = append(w.stack[:key], w.stack[key+1:]...)
break
}
}
w.length--
}
// full : check if the window is full
func (w *Window) full() bool {
if w.length < w.size {
return false
}
return true
}
// Median : calculate the median value from the stack of values
func (w *Window) Median() int {
// check if there is only 1 or less values in the stack
if w.length < 2 {
return -1
}
// calculate the median from an odd number of values
if w.length%2 == 1 {
l := w.length + 1
idx := l/2 - 1
return w.stack[idx]
}
// calculate the median from the even number of values
idx := w.length/2 - 1
val := w.stack[idx]
val += w.stack[idx+1]
return val / 2
}
// offset : find the offset for a delay value in the stack
func (w *Window) offset(val int) int {
for key, v := range w.stack {
if v > val || key == w.length-1 {
return key
}
}
return -1
}
// Size : set the size of the sliding window
func (w *Window) Size(val int) {
w.size = val
}
// NewSlidingWindow : Create a new Sliding Window object
func NewSlidingWindow() *Window {
win := &Window{}
win.length = 0
return win
} | sliding-window.go | 0.693265 | 0.433981 | sliding-window.go | starcoder |
package meetingtime
import (
"errors"
"time"
)
// Schedule defines a regular schedule for a meeting
type Schedule struct {
Type ScheduleType // Type of recurrence
First time.Time // Time and date of first meeting
Frequency uint // How frequently this meeting occurs. For a daily meeting, 2 would mean every other day.
}
// ScheduleType specifies the way in which this schedule recurs
type ScheduleType uint8
const (
// Daily specifies a meeting that recurs daily.
Daily ScheduleType = iota
// Weekly specifies a meeting that recurs weekly.
Weekly
// Monthly specifies a meeting that recurs monthly.
Monthly
// MonthlyByWeekday specifies a meeting that recurs on the nth weekday of the month (2nd Wednesday, for example), based on the first meeting date.
MonthlyByWeekday
// Yearly specifes a meeting that recurs yearly.
Yearly
)
// NewDailySchedule creates a schedule recurring every n days
func NewDailySchedule(first time.Time, n uint) Schedule {
return Schedule{Type: Daily, First: first, Frequency: n}
}
// NewWeeklySchedule creates a schedule recurring on the same day every n weeks
func NewWeeklySchedule(first time.Time, n uint) Schedule {
return Schedule{Type: Weekly, First: first, Frequency: n}
}
// NewMonthlySchedule creates a schedule recurring on the specified day in the month, every n months.
func NewMonthlySchedule(first time.Time, n uint) Schedule {
return Schedule{Type: Monthly, First: first, Frequency: n}
}
// NewMonthlyScheduleByWeekday creates a schedule recurring every month on the same day of the week as the first meeting (for example, the 2nd Wednesday).
func NewMonthlyScheduleByWeekday(first time.Time) Schedule {
return Schedule{Type: MonthlyByWeekday, First: first, Frequency: 1}
}
// NewYearlySchedule creates a schedule recurring every n years
func NewYearlySchedule(first time.Time, n uint) Schedule {
return Schedule{Type: Yearly, First: first, Frequency: n}
}
/*
Next returns the time of the next meeting after the given time.
*/
func (s Schedule) Next(t time.Time) (time.Time, error) {
var err error
c := s.First
for c.Before(t) || c.Equal(t) {
c, err = s.increment(c)
if err != nil {
return time.Time{}, err
}
}
return c, nil
}
/*
Previous returns the time of the closest meeting before the given time.
If the given time is before the first meeting, ErrNoEarlierMeetings will be returned.
*/
func (s Schedule) Previous(t time.Time) (time.Time, error) {
if t.Before(s.First) || t.Equal(s.First) {
return time.Time{}, ErrNoEarlierMeetings
}
var err error
c := s.First
prev := s.First
for c.Before(t) {
prev = c
c, err = s.increment(c)
if err != nil {
return time.Time{}, err
}
}
return prev, nil
}
func (s *Schedule) increment(t time.Time) (time.Time, error) {
if s.Type == Daily {
return t.AddDate(0, 0, int(s.Frequency)), nil
}
if s.Type == Weekly {
return t.AddDate(0, 0, 7*int(s.Frequency)), nil
}
if s.Type == Monthly {
return t.AddDate(0, int(s.Frequency), 0), nil
}
if s.Type == MonthlyByWeekday {
// Identify the weekday and index
weekday, n := GetWeekdayAndIndex(s.First)
c := t.AddDate(0, 0, 1)
w, cn := GetWeekdayAndIndex(c)
for w != weekday || n != cn {
c = c.AddDate(0, 0, 1)
w, cn = GetWeekdayAndIndex(c)
}
return c, nil
}
if s.Type == Yearly {
return t.AddDate(int(s.Frequency), 0, 0), nil
}
return time.Time{}, errors.New("not implemented")
}
// GetWeekdayAndIndex returns the Weekday of a given time, along with the count of that particular
// day in the month. For example: a time on October 12th 2016, would return Wednesday and 2, since
// that date is the second Wednesday in the month.
func GetWeekdayAndIndex(t time.Time) (weekday time.Weekday, n int) {
// Identify the weekday and index
weekday = t.Weekday()
n = 0
d := t.AddDate(0, 0, -t.Day())
for d.Before(t) {
d = d.AddDate(0, 0, 1)
if d.Weekday() == weekday {
n++
}
}
return weekday, n
} | schedule.go | 0.673192 | 0.686697 | schedule.go | starcoder |
package walk
// Kata (Japanese) is a form of movement (in martial arts)
type Kata []GoTo // Japanese
// ========================================================
// From returns the Here (or nil) reached from e by steps
func (steps Kata) From(e *Here) (*Here, Distance) {
var dist, dnow Distance
goal := e
for _, step := range steps {
if goal == nil {
return nil, dist
}
goal, dnow = step.from(goal)
dist += dnow
}
return goal, dist
}
// ========================================================
// Grab returns the Trail reached from e by steps
// until Here is nil or same
// Note: Grab may be useful in debugging, as it returns a full trace
// To Grab is not intended for regular use - Don't be greedy :-)
func (steps Kata) Grab(e *Here) (Trail, Distance) {
var dist, dnow Distance
var goal = e
last := goal
goals := make(Trail, 0, len(steps))
for _, step := range steps {
last = goal
goal, dnow = step.from(goal)
if goal == nil || goal == last {
continue
}
goals = append(goals, goal)
dist += dnow
}
return goals, dist
}
// ========================================================
// Haul returns the Trail From e by repeating steps
// until Here has been seen before, or becomes nil
func (steps Kata) Haul(e *Here) (Trail, Distance) {
var seen = make(map[*Here]bool)
var dist, dnow Distance
var goal = e
goals := make(Trail, 0, len(steps)*8)
for {
goal, dnow = steps.From(goal)
if goal == nil || seen[goal] {
return goals, dist
}
seen[goal] = true
goals = append(goals, goal)
dist += dnow
}
}
// ========================================================
// Iterator
// Walker returns an iterator repeating Kata.From(e) ...
func (steps Kata) Walker(e *Here) Walk {
var seen = make(map[*Here]bool)
var curr = e
var kata = steps
var move Walk = func() *Here {
seen[curr] = true
if curr == nil {
return nil
}
curr, _ = kata.From(curr)
if seen[curr] {
return nil
} // already seen
return curr
}
return move
} | walk/kata.go | 0.675336 | 0.460653 | kata.go | starcoder |
package table
import (
"encoding/json"
"fmt"
"reflect"
"sort"
"github.com/go-gota/gota/dataframe"
gota "github.com/go-gota/gota/series"
"github.com/gojek/merlin/pkg/transformer/spec"
"github.com/gojek/merlin/pkg/transformer/types/series"
)
type Table struct {
dataFrame *dataframe.DataFrame
}
func NewTable(df *dataframe.DataFrame) *Table {
return &Table{dataFrame: df}
}
func New(se ...*series.Series) *Table {
ss := make([]gota.Series, 0, 0)
for _, gs := range se {
ss = append(ss, *gs.Series())
}
df := dataframe.New(ss...)
return &Table{dataFrame: &df}
}
func NewRaw(columnValues map[string]interface{}) (*Table, error) {
newColumns, err := createSeries(columnValues, 1)
if err != nil {
return nil, err
}
return New(newColumns...), nil
}
// Row return a table containing only the specified row
// It's similar to GetRow, however it will panic if the specified row doesn't exists in the table
// Intended to be used as built-in function in expression
func (t *Table) Row(row int) *Table {
result, err := t.GetRow(row)
if err != nil {
panic(err)
}
return result
}
// Col return a series containing the column specified by colName
// It's similar to GetColumn, however it will panic if the specified column doesn't exists in the table
// Intended to be used as built-in function in expression
func (t *Table) Col(colName string) *series.Series {
result, err := t.GetColumn(colName)
if err != nil {
panic(err)
}
return result
}
// GetRow return a table containing only the specified row
func (t *Table) GetRow(row int) (*Table, error) {
if row < 0 || row >= t.dataFrame.Nrow() {
return nil, fmt.Errorf("invalid row number, expected: 0 <= row < %d, got: %d", t.dataFrame.Nrow(), row)
}
subsetDataframe := t.dataFrame.Subset(row)
if subsetDataframe.Err != nil {
return nil, subsetDataframe.Err
}
return &Table{&subsetDataframe}, nil
}
// GetColumn return a series containing the column specified by colName
func (t *Table) GetColumn(colName string) (*series.Series, error) {
s := t.dataFrame.Col(colName)
if s.Err != nil {
return nil, s.Err
}
return series.NewSeries(&s), nil
}
// NRow return number of row in the table
func (t *Table) NRow() int {
return t.dataFrame.Nrow()
}
// ColumnNames return slice string containing the column names
func (t *Table) ColumnNames() []string {
return t.dataFrame.Names()
}
// Columns return slice of series containing all column values
func (t *Table) Columns() []*series.Series {
columnNames := t.ColumnNames()
columns := make([]*series.Series, len(columnNames))
for idx, columnName := range columnNames {
columns[idx], _ = t.GetColumn(columnName)
}
return columns
}
// DataFrame return internal representation of table
func (t *Table) DataFrame() *dataframe.DataFrame {
return t.dataFrame
}
// Copy create a separate copy of the table
func (t *Table) Copy() *Table {
df := t.dataFrame.Copy()
return NewTable(&df)
}
// Concat add all column from tbl to this table with restriction that the number of row in tbl is equal with this table
func (t *Table) Concat(tbl *Table) (*Table, error) {
if t.DataFrame().Nrow() != tbl.DataFrame().Nrow() {
return nil, fmt.Errorf("different number of row")
}
leftDf := *t.DataFrame()
rightDf := *tbl.DataFrame()
for _, col := range rightDf.Names() {
leftDf = leftDf.Mutate(rightDf.Col(col))
}
t.dataFrame = &leftDf
return t, nil
}
// DropColumns drop all columns specified in "columns" argument
// It will return error if "columns" contains column not existing in the table
func (t *Table) DropColumns(columns []string) error {
df := t.dataFrame.Drop(columns)
if df.Err != nil {
return df.Err
}
t.dataFrame = &df
return nil
}
// SelectColumns perform reordering of columns and potentially drop column
// It will return error if "columns" contains column not existing in the table
func (t *Table) SelectColumns(columns []string) error {
df := t.dataFrame.Select(columns)
if df.Err != nil {
return df.Err
}
t.dataFrame = &df
return nil
}
// RenameColumns rename multiple column name using the mapping given by "columnMap"
// It will return error if "columnMap" contains column not existing in the table
func (t *Table) RenameColumns(columnMap map[string]string) error {
df := t.dataFrame
columns := df.Names()
renamedSeries := make([]gota.Series, len(columns))
// check all column in columnMap exists in the original table
for colName, _ := range columnMap {
found := false
for _, origCol := range columns {
if colName == origCol {
found = true
break
}
}
if !found {
return fmt.Errorf("unable to rename column: unknown column: %s", colName)
}
}
// rename columns
for idx, column := range columns {
col := df.Col(column)
newColName, ok := columnMap[column]
if !ok {
newColName = column
}
col.Name = newColName
renamedSeries[idx] = col
}
newDf := dataframe.New(renamedSeries...)
t.dataFrame = &newDf
return nil
}
// Sort sort the table using rule specified in sortRules
// It will return error if "sortRules" contains column not existing in the table
func (t *Table) Sort(sortRules []*spec.SortColumnRule) error {
df := t.dataFrame
orders := make([]dataframe.Order, len(sortRules))
for idx, sortRule := range sortRules {
orders[idx] = dataframe.Order{
Colname: sortRule.Column,
Reverse: sortRule.Order == spec.SortOrder_DESC,
}
}
newDf := df.Arrange(orders...)
if newDf.Err != nil {
return newDf.Err
}
t.dataFrame = &newDf
return nil
}
// UpdateColumnsRaw add or update existing column with values specified in columnValues map
func (t *Table) UpdateColumnsRaw(columnValues map[string]interface{}) error {
origColumns := t.Columns()
newColumns, err := createSeries(columnValues, t.NRow())
if err != nil {
return err
}
combinedColumns := make([]*series.Series, 0)
combinedColumns = append(combinedColumns, newColumns...)
for _, origColumn := range origColumns {
origColumnName := origColumn.Series().Name
_, ok := columnValues[origColumnName]
if ok {
continue
}
combinedColumns = append(combinedColumns, origColumn)
}
newT := New(combinedColumns...)
t.dataFrame = newT.dataFrame
return nil
}
// LeftJoin perform left join with the right table on the specified joinColumn
// Return new table containing the join result
func (t *Table) LeftJoin(right *Table, joinColumns []string) (*Table, error) {
df := t.dataFrame.LeftJoin(*right.dataFrame, joinColumns...)
if df.Err != nil {
return nil, df.Err
}
return NewTable(&df), nil
}
// RightJoin perform right join with the right table on the specified joinColumn
// Return new table containing the join result
func (t *Table) RightJoin(right *Table, joinColumns []string) (*Table, error) {
df := t.dataFrame.RightJoin(*right.dataFrame, joinColumns...)
if df.Err != nil {
return nil, df.Err
}
return NewTable(&df), nil
}
// InnerJoin perform inner join with the right table on the specified joinColumn
// Return new table containing the join result
func (t *Table) InnerJoin(right *Table, joinColumns []string) (*Table, error) {
df := t.dataFrame.InnerJoin(*right.dataFrame, joinColumns...)
if df.Err != nil {
return nil, df.Err
}
return NewTable(&df), nil
}
// OuterJoin perform outer join with the right table on the specified joinColumn
// Return new table containing the join result
func (t *Table) OuterJoin(right *Table, joinColumns []string) (*Table, error) {
df := t.dataFrame.OuterJoin(*right.dataFrame, joinColumns...)
if df.Err != nil {
return nil, df.Err
}
return NewTable(&df), nil
}
// CrossJoin perform cross join with the right table on the specified joinColumn
// Return new table containing the join result
func (t *Table) CrossJoin(right *Table) (*Table, error) {
df := t.dataFrame.CrossJoin(*right.dataFrame)
if df.Err != nil {
return nil, df.Err
}
return NewTable(&df), nil
}
func (t *Table) String() string {
jsonTable, _ := tableToJsonSplitFormat(t)
jsonStr, _ := json.Marshal(jsonTable)
return fmt.Sprintf("%v", string(jsonStr))
}
func getLength(value interface{}) int {
colValueVal := reflect.ValueOf(value)
switch colValueVal.Kind() {
case reflect.Slice:
return colValueVal.Len()
default:
s, ok := value.(*series.Series)
if ok {
return s.Series().Len()
}
return 1
}
}
func broadcastScalar(colMap map[string]interface{}, length int) map[string]interface{} {
// we don't need to broadcast if all column has length = 1
if length == 1 {
return colMap
}
for k, v := range colMap {
val := v
colValueVal := reflect.ValueOf(v)
switch colValueVal.Kind() {
case reflect.Slice:
if colValueVal.Len() > 1 {
continue
}
val = colValueVal.Index(0)
default:
s, ok := v.(*series.Series)
if ok {
if s.Series().Len() > 1 {
continue
}
val = s.Get(0)
}
}
values := make([]interface{}, length)
for i := range values {
values[i] = val
}
colMap[k] = values
}
return colMap
}
func createSeries(columnValues map[string]interface{}, maxLength int) ([]*series.Series, error) {
// ensure all values in columnValues has length either 1 or maxLenght
for k, v := range columnValues {
valueLength := getLength(v)
// check that length is either 1 or maxLength
if valueLength != 1 && maxLength != 1 && valueLength != maxLength {
return nil, fmt.Errorf("columns %s has different dimension", k)
}
if valueLength > maxLength {
maxLength = valueLength
}
}
columnValues = broadcastScalar(columnValues, maxLength)
ss := make([]*series.Series, 0)
colNames := make([]string, len(columnValues))
// get all column name in colMap as slice
i := 0
for k := range columnValues {
colNames[i] = k
i++
}
sort.Strings(colNames)
for _, colName := range colNames {
s, err := series.NewInferType(columnValues[colName], colName)
if err != nil {
return nil, err
}
ss = append(ss, s)
}
return ss, nil
} | api/pkg/transformer/types/table/table.go | 0.71123 | 0.449272 | table.go | starcoder |
import hp "container/heap"
/*
1. Priority Queue Using Heap
2. Graph and dijkstra algorithm
3. Represent cell (i,j) as a node with id = i*C + j,
4. Map relationship between cell(i,j) to cell(i+1,j) and cell(i,j+1) as diff of heights
5. Use Dijkstra's algorithm to find the short parth between 0 to R*C - 1
6. the edge cost is modified from usual dijkstras of total cost
edgeCost = cost + abs(heights[i][j] - heights[i'][j']) to
edgeCost = Max(cost, abs(heights[i][j] - heights[i'][j'])
// adopted dijkstras from here
https://dev.to/douglasmakey/implementation-of-dijkstra-using-heap-in-go-6e3
*/
type path struct {
value int
nodes []int
}
type minPath []path
func (h minPath) Len() int { return len(h) }
func (h minPath) Less(i, j int) bool { return h[i].value < h[j].value }
func (h minPath) Swap(i, j int) { h[i], h[j] = h[j], h[i] }
func (h *minPath) Push(x interface{}) {
*h = append(*h, x.(path))
}
func (h *minPath) Pop() interface{} {
old := *h
n := len(old)
x := old[n-1]
*h = old[0 : n-1]
return x
}
type heap struct {
values *minPath
}
func newHeap() *heap {
return &heap{values: &minPath{}}
}
func (h *heap) push(p path) {
hp.Push(h.values, p)
}
func (h *heap) pop() path {
i := hp.Pop(h.values)
return i.(path)
}
type edge struct {
node int
weight int
}
type graph struct {
nodes map[int][]edge
}
func newGraph() *graph {
return &graph{nodes: make(map[int][]edge)}
}
func (g *graph) addEdge(origin, destiny int, weight int) {
//fmt.Println(origin, destiny, weight)
g.nodes[origin] = append(g.nodes[origin], edge{node: destiny, weight: weight})
g.nodes[destiny] = append(g.nodes[destiny], edge{node: origin, weight: weight})
}
func (g *graph) getEdges(node int) []edge {
return g.nodes[node]
}
func Max(x,y int) int {
if x < y {return y}
return x
}
func (g *graph) getPath(origin, destiny int) (int, []int) {
h := newHeap()
h.push(path{value: 0, nodes: []int{origin}})
visited := make(map[int]bool)
for len(*h.values) > 0 {
// Find the nearest yet to visit node
p := h.pop()
node := p.nodes[len(p.nodes)-1]
if visited[node] {
continue
}
if node == destiny {
return p.value, p.nodes
}
for _, e := range g.getEdges(node) {
if !visited[e.node] {
// We calculate the total spent so far plus the cost and the path of getting here
h.push(path{value: Max(p.value, e.weight), nodes: append([]int{}, append(p.nodes, e.node)...)})
}
}
visited[node] = true
}
return 0, nil
}
func Diff(x, y int) int {
ret := x - y
if ret < 0 {return -ret}
return ret
}
func minimumEffortPath(heights [][]int) int {
R := len(heights)
if R == 0 {return 0}
graph := newGraph()
C:= len(heights[0])
for i,_:= range(heights) {
for j:=0; j < C; j++ {
src := i * C + j
if i < R-1 {
dst := (i+1)*C + j
graph.addEdge(src,dst,Diff(heights[i][j], heights[i+1][j]) )
}
if j < C-1 {
dst := i*C + j+1
graph.addEdge(src,dst,Diff(heights[i][j], heights[i][j+1]) )
}
}
}
ret,_ := graph.getPath(0,R*C-1)
//fmt.Println(nodes)
return ret
} | submissions/1631.Path_With_Minimum_Effort.go | 0.572962 | 0.506469 | 1631.Path_With_Minimum_Effort.go | starcoder |
package series
import (
"fmt"
"math"
"strconv"
"strings"
)
type stringElement struct {
e string
valid bool
}
// Strings with NaN will be treated as just strings with NaN
func (e *stringElement) Set(value interface{}) error {
e.valid = true
if value == nil {
e.valid = false
return nil
}
switch value.(type) {
case string:
if value.(string) == Nil {
e.valid = false
} else {
e.e = string(value.(string))
}
case int:
e.e = strconv.Itoa(value.(int))
case int64:
e.e = strconv.FormatInt(value.(int64), 10)
case uint64:
e.e = strconv.FormatUint(value.(uint64), 10)
case float32:
e.e = strconv.FormatFloat(float64(value.(float32)), 'f', 6, 64)
case float64:
e.e = strconv.FormatFloat(value.(float64), 'f', 6, 64)
case bool:
b := value.(bool)
if b {
e.e = "true"
} else {
e.e = "false"
}
case NaNElement:
e.e = "NaN"
case Element:
if value.(Element).IsValid() {
v, err := value.(Element).String()
if err != nil {
e.valid = false
return err
}
e.e = v
} else {
e.valid = false
return nil
}
default:
e.valid = false
return fmt.Errorf("Unsupported type '%T' conversion to a string", value)
}
return nil
}
func (e stringElement) Copy() Element {
return &stringElement{e.e, e.valid}
}
// Returns true if the string is parsed as NaN, missing, or fails to be parsed as a float
func (e stringElement) IsNaN() bool {
if !e.valid {
return true
}
f, err := strconv.ParseFloat(e.e, 64)
if err == nil {
return math.IsNaN(f)
}
return true
}
func (e stringElement) IsValid() bool {
return e.valid
}
// Returns true if the string is parsed as Inf, -Inf or +Inf.
func (e stringElement) IsInf(sign int) bool {
switch strings.ToLower(e.e) {
case "inf", "-inf", "+inf":
f, err := strconv.ParseFloat(e.e, 64)
if err != nil {
return false
}
return math.IsInf(f, sign)
}
return false
}
func (e stringElement) Type() Type {
return String
}
func (e stringElement) Val() ElementValue {
if !e.IsValid() {
return nil
}
return string(e.e)
}
func (e stringElement) String() (string, error) {
if !e.IsValid() {
return "", fmt.Errorf("can't convert nil to string")
}
return string(e.e), nil
}
func (e stringElement) Int() (int64, error) {
if !e.IsValid() {
return 0, fmt.Errorf("can't convert nil to int64")
}
return strconv.ParseInt(e.e, 10, 64)
}
func (e stringElement) Uint() (uint64, error) {
if !e.IsValid() {
return 0, fmt.Errorf("can't convert nil to uint64")
}
return strconv.ParseUint(e.e, 10, 64)
}
func (e stringElement) Float() (float64, error) {
if !e.IsValid() {
return math.NaN(), fmt.Errorf("can't convert nil to float64")
}
f, err := strconv.ParseFloat(e.e, 64)
if err != nil {
return math.NaN(), nil
}
return f, nil
}
func (e stringElement) Bool() (bool, error) {
if !e.valid {
return false, fmt.Errorf("can't convert nil to Bool")
}
switch strings.ToLower(e.e) {
case "true", "t", "1":
return true, nil
case "false", "f", "0":
return false, nil
}
return false, fmt.Errorf("can't convert String '%v' to bool", e.e)
}
func (e stringElement) Eq(elem Element) bool {
if e.valid != elem.IsValid() {
// xor
return false
}
if !e.valid && !elem.IsValid() {
// nil == nil is true
return true
}
s, err := elem.String()
if err != nil {
return false
}
return e.e == s
}
func (e stringElement) Neq(elem Element) bool {
return !e.Eq(elem)
}
func (e stringElement) Less(elem Element) bool {
if !e.valid || !elem.IsValid() {
// really should be an error
return false
}
s, err := elem.String()
if err != nil {
return false
}
return e.e < s
}
func (e stringElement) LessEq(elem Element) bool {
if !e.valid || !elem.IsValid() {
// really should be an error
return false
}
s, err := elem.String()
if err != nil {
return false
}
return e.e <= s
}
func (e stringElement) Greater(elem Element) bool {
if !e.valid || !elem.IsValid() {
// really should be an error
return false
}
s, err := elem.String()
if err != nil {
return false
}
return e.e > s
}
func (e stringElement) GreaterEq(elem Element) bool {
if !e.valid || !elem.IsValid() {
// really should be an error
return false
}
s, err := elem.String()
if err != nil {
return false
}
return e.e >= s
} | series/type-string.go | 0.66628 | 0.519399 | type-string.go | starcoder |
package main
import (
"math"
)
func lines(car *Car, newAngle float32) [4][2]float32 {
relx1 := car.width / 2.0
rely1 := float32(0)
relx3 := float32(0)
rely3 := car.height / 2.0
relx2 := -relx1
rely2 := -rely1
relx4 := -relx3
rely4 := -rely3
vectors := [4]*Vector{
{relx1, rely1},
{relx2, rely2},
{relx3, rely3},
{relx4, rely4},
}
for _, vector := range vectors {
*vector = ((*vector).Rotate(car.angle))
*vector = ((*vector).Add(car.position))
}
angle := car.angle + newAngle
k1 := float32(math.Tan(math.Pi/2 + float64(angle)))
d1 := -k1*vectors[0].x + vectors[0].y
k2 := float32(math.Tan(math.Pi/2 + float64(angle)))
d2 := -k2*vectors[1].x + vectors[1].y
k3 := float32(math.Tan(float64(angle)))
d3 := -k3*vectors[2].x + vectors[2].y
k4 := float32(math.Tan(float64(angle)))
d4 := -k4*vectors[3].x + vectors[3].y
return [4][2]float32{
{k1, d1},
{k2, d2},
{k3, d3},
{k4, d4},
}
}
func col(car *Car, car2 *Car) {
newAngle := float32(0)
car1Angle := car.angle
car2Angle := car2.angle
for car1Angle > float32(math.Pi/2.0) {
car1Angle -= float32(math.Pi / 2.0)
}
for car2Angle > float32(math.Pi/2.0) {
car2Angle -= float32(math.Pi / 2.0)
}
if car1Angle+car2Angle < math.Pi/16.0 {
newAngle = float32(45)
} else if car1Angle < math.Pi/32.0 || car2Angle < math.Pi/32.0 {
newAngle = (car1Angle + car2Angle) / 2.0
}
c1Lines := lines(car, newAngle)
c2Lines := lines(car2, newAngle)
linePairs := [4][2][2]float32{
{c2Lines[0], c2Lines[2]},
{c2Lines[2], c2Lines[1]},
{c2Lines[1], c2Lines[3]},
{c2Lines[3], c2Lines[0]},
}
for _, c1Line := range c1Lines {
for _, linePair := range linePairs {
c2Line := linePair[0]
if math.Abs(float64(c1Line[0]-c2Line[0])) < 0.001 {
continue
}
x1 := (c2Line[1] - c1Line[1]) / (c1Line[0] - c2Line[0])
y1 := c1Line[0]*x1 + c1Line[1]
c2Line = linePair[1]
if math.Abs(float64(c1Line[0]-c2Line[0])) < 0.001 {
continue
}
x2 := (c2Line[1] - c1Line[1]) / (c1Line[0] - c2Line[0])
y2 := c1Line[0]*x2 + c1Line[1]
dx := math.Abs(float64(x1 - x2))
dy := math.Abs(float64(y1 - y2))
d := math.Sqrt(dx*dx + dy*dy)
if d < 2 {
car.owner.Vibrate()
car2.owner.Vibrate()
carAbs := Vector{(x1 + x2) / 2.0, (y1 + y2) / 2.0}
carRel := carAbs.Sub(car.position)
car2Rel := carAbs.Sub(car2.position)
carRel = carRel.Rotate(-newAngle)
car2Rel = car2Rel.Rotate(-newAngle)
car1Vel :=
car.velocity
car2Vel := car2.velocity
car1AngVel := car.angularVelocity
car2AngVel := car2.angularVelocity
car.force = car.force.MulScalar(-2)
car2.force = car.force.MulScalar(-2)
car.velocity = car2Vel
car2.velocity = car1Vel
car.torque *= -1
car2.torque *= -1
car.angularVelocity = car2AngVel
car2.angularVelocity = car1AngVel
forceOn1 := car2.force
forceOn2 := car.force
car.AddForce(forceOn1, car.RelativeToWorld(carRel))
car2.AddForce(forceOn2, car2.RelativeToWorld(car2Rel))
return
}
}
}
}
func (r *Racer) HandleCollisions() {
collisionHandled := [][2]*Car{}
for _, car := range r.cars {
for _, car2 := range r.cars {
if car == car2 {
continue
}
for _, collisions := range collisionHandled {
if (car == collisions[0] && car2 == collisions[1]) || (car2 ==
collisions[0] && car == collisions[1]) {
continue
}
}
if car2.position.Sub(car.position).Length() < (car.size+
car2.size)/2 {
collisionHandled = append(collisionHandled, [2]*Car{car,
car2})
col(car, car2)
}
}
}
} | collision.go | 0.657428 | 0.487124 | collision.go | starcoder |
package crypto
// Go/crypto/elliptic only supports NIST curves. This file implements the SECG
// curve secp256k1 as described in https://www.secg.org/sec2-v2.pdf. The API in this
// implementation is consistent with the crypto/elliptic package API.
// This implementation is Go based and is not using optimized methods for points
// arithmetic or big number arithmetic. It should evolve in the future to address
// performance optimization
// This implementation does not include any security against side-channel attacks.
import (
goec "crypto/elliptic"
"math/big"
)
// A SECCurve represents a short-form Weierstrass curve with a=0
// It embeds CurveParams from crypto/elliptic which implements NIST curves (a=-3)
// The functions involving the parameter `a` are re-written
type SECCurve struct {
goec.CurveParams
}
// affineFromJacobian reverses the Jacobian transform.
// If the point is ∞ it returns 0, 0.
// (taken from Go/crypto/elliptic)
func (curve *SECCurve) affineFromJacobian(x, y, z *big.Int) (xOut, yOut *big.Int) {
if z.Sign() == 0 {
return new(big.Int), new(big.Int)
}
zinv := new(big.Int).ModInverse(z, curve.P)
zinvsq := new(big.Int).Mul(zinv, zinv)
xOut = new(big.Int).Mul(x, zinvsq)
xOut.Mod(xOut, curve.P)
zinvsq.Mul(zinvsq, zinv)
yOut = new(big.Int).Mul(y, zinvsq)
yOut.Mod(yOut, curve.P)
return
}
// Add returns the sum of (x1,y1) and (x2,y2)
// (taken from Go/crypto/elliptic)
func (curve *SECCurve) Add(x1, y1, x2, y2 *big.Int) (*big.Int, *big.Int) {
z := new(big.Int).SetInt64(1)
return curve.affineFromJacobian(curve.addJacobian(x1, y1, z, x2, y2, z))
}
// addJacobian takes two points in Jacobian coordinates, (x1, y1, z1) and
// (x2, y2, z2) and returns their sum, also in Jacobian form.
// (taken from Go/crypto/elliptic)
func (curve *SECCurve) addJacobian(x1, y1, z1, x2, y2, z2 *big.Int) (*big.Int, *big.Int, *big.Int) {
// See https://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian-3.html#addition-add-2007-bl
x3, y3, z3 := new(big.Int), new(big.Int), new(big.Int)
if z1.Sign() == 0 {
x3.Set(x2)
y3.Set(y2)
z3.Set(z2)
return x3, y3, z3
}
if z2.Sign() == 0 {
x3.Set(x1)
y3.Set(y1)
z3.Set(z1)
return x3, y3, z3
}
z1z1 := new(big.Int).Mul(z1, z1)
z1z1.Mod(z1z1, curve.P)
z2z2 := new(big.Int).Mul(z2, z2)
z2z2.Mod(z2z2, curve.P)
u1 := new(big.Int).Mul(x1, z2z2)
u1.Mod(u1, curve.P)
u2 := new(big.Int).Mul(x2, z1z1)
u2.Mod(u2, curve.P)
h := new(big.Int).Sub(u2, u1)
xEqual := h.Sign() == 0
if h.Sign() == -1 {
h.Add(h, curve.P)
}
i := new(big.Int).Lsh(h, 1)
i.Mul(i, i)
j := new(big.Int).Mul(h, i)
s1 := new(big.Int).Mul(y1, z2)
s1.Mul(s1, z2z2)
s1.Mod(s1, curve.P)
s2 := new(big.Int).Mul(y2, z1)
s2.Mul(s2, z1z1)
s2.Mod(s2, curve.P)
r := new(big.Int).Sub(s2, s1)
if r.Sign() == -1 {
r.Add(r, curve.P)
}
yEqual := r.Sign() == 0
if xEqual && yEqual {
return curve.doubleJacobian(x1, y1, z1)
}
r.Lsh(r, 1)
v := new(big.Int).Mul(u1, i)
x3.Set(r)
x3.Mul(x3, x3)
x3.Sub(x3, j)
x3.Sub(x3, v)
x3.Sub(x3, v)
x3.Mod(x3, curve.P)
y3.Set(r)
v.Sub(v, x3)
y3.Mul(y3, v)
s1.Mul(s1, j)
s1.Lsh(s1, 1)
y3.Sub(y3, s1)
y3.Mod(y3, curve.P)
z3.Add(z1, z2)
z3.Mul(z3, z3)
z3.Sub(z3, z1z1)
z3.Sub(z3, z2z2)
z3.Mul(z3, h)
z3.Mod(z3, curve.P)
return x3, y3, z3
}
// Double returns 2*(x,y)
// (taken from Go/crypto/elliptic)
func (curve *SECCurve) Double(x1, y1 *big.Int) (*big.Int, *big.Int) {
z1 := new(big.Int).SetInt64(1)
return curve.affineFromJacobian(curve.doubleJacobian(x1, y1, z1))
}
// (taken from github.com/ThePiachu/GoBit/blob/master/bitelliptic )
// It is the only difference between NIST curves and SECG curves implementations
// doubleJacobian takes a point in Jacobian coordinates, (x, y, z), and
// returns its double, also in Jacobian form.
func (curve *SECCurve) doubleJacobian(x, y, z *big.Int) (*big.Int, *big.Int, *big.Int) {
if z.Sign() == 0 {
return new(big.Int), new(big.Int), new(big.Int)
}
a := new(big.Int).Mul(x, x) //X1²
b := new(big.Int).Mul(y, y) //Y1²
c := new(big.Int).Mul(b, b) //B²
d := new(big.Int).Add(x, b) //X1+B
d.Mul(d, d) //(X1+B)²
d.Sub(d, a) //(X1+B)²-A
d.Sub(d, c) //(X1+B)²-A-C
d.Mul(d, big.NewInt(2)) //2*((X1+B)²-A-C)
e := new(big.Int).Mul(big.NewInt(3), a) //3*A
f := new(big.Int).Mul(e, e) //E²
x3 := new(big.Int).Mul(big.NewInt(2), d) //2*D
x3.Sub(f, x3) //F-2*D
x3.Mod(x3, curve.P)
y3 := new(big.Int).Sub(d, x3) //D-X3
y3.Mul(e, y3) //E*(D-X3)
y3.Sub(y3, new(big.Int).Mul(big.NewInt(8), c)) //E*(D-X3)-8*C
y3.Mod(y3, curve.P)
z3 := new(big.Int).Mul(y, z) //Y1*Z1
z3.Mul(big.NewInt(2), z3) //3*Y1*Z1
z3.Mod(z3, curve.P)
return x3, y3, z3
}
// ScalarMult returns k*(Bx,By) where k is a number in big-endian form.
// k must be larger than 0
func (curve *SECCurve) ScalarMult(Bx, By *big.Int, k []byte) (*big.Int, *big.Int) {
i := 0
for i < len(k) && k[i] == 0 {
i++
}
if i == len(k) {
return new(big.Int), new(big.Int)
}
mask := byte(0x80)
for (k[i] & mask) == 0 {
mask >>= 1
}
mask >>= 1
Bz := new(big.Int).SetInt64(1)
x, y, z := Bx, By, Bz
for ; i < len(k); i++ {
for mask != 0 {
x, y, z = curve.doubleJacobian(x, y, z)
if (k[i] & mask) != 0 {
x, y, z = curve.addJacobian(Bx, By, Bz, x, y, z)
}
mask >>= 1
}
mask = 0x80
}
return curve.affineFromJacobian(x, y, z)
}
// ScalarBaseMult returns k*G, where G is the base point of the group and k is
// an integer in big-endian form.
// (taken from Go/crypto/elliptic)
func (curve *SECCurve) ScalarBaseMult(k []byte) (*big.Int, *big.Int) {
return curve.ScalarMult(curve.Gx, curve.Gy, k)
}
// secp256k1 returns a SEC curve which implements secp256k1
func secp256k1() *SECCurve {
// See SEC 2 section 2.7.1
secp256k1Curve := new(SECCurve)
secp256k1Curve.P, _ = new(big.Int).SetString("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F", 16)
secp256k1Curve.N, _ = new(big.Int).SetString("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141", 16)
secp256k1Curve.B, _ = new(big.Int).SetString("0000000000000000000000000000000000000000000000000000000000000007", 16)
secp256k1Curve.Gx, _ = new(big.Int).SetString("79BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798", 16)
secp256k1Curve.Gy, _ = new(big.Int).SetString("<KEY>", 16)
secp256k1Curve.BitSize = 256
return secp256k1Curve
} | crypto/internal/crypto/secg.go | 0.921397 | 0.645399 | secg.go | starcoder |
package impl
import (
"fmt"
"github.com/gdamore/tcell"
)
//GradientRaySampler sample a ray given it depth and some other properties
//TODO: remove outOfRangeStyle property
type GradientRaySampler struct {
//styles to be used to render a wall (i.e: the color used in a range of distance).
wallStyles []tcell.Style
//styles used to render the floor
backgroundStyles []tcell.Style
//the depth-ranges: each distance to be sampled will be in a range. The depthRanges set the upper-limit of a range at its index.
depthRanges []float64
//the ordered-background-ranges to which apply the background-colors. The number is the upper-limit of a range at its index.
backgroundRanges []float32
//the ordered background-colors to be applied by background-ranges.
backgroundRangesColors []tcell.Style
}
//CreateRaySamplerForAnsiColorTerminal create the Gradients
func CreateRaySamplerForAnsiColorTerminal(first float64, multiplicator float64, maxLimit float64, wallStartColor int, wallEndColor int, screenHeight int, backgroundRange []float32, backgroundColors []int) (g RaySampler, err error) {
if first < 0.0 {
return nil, fmt.Errorf("Gradient ray-sampler 'first' value cannot be negative")
}
if multiplicator <= 0.0 {
return nil, fmt.Errorf("Gradient-ray-sampler 'multiplicator' value cannot be negative or 0")
}
if maxLimit <= 0.0 {
return nil, fmt.Errorf("Gradient ray-sampler 'maxLimit' value cannot be negative or 0")
}
if wallStartColor < 0.0 {
return nil, fmt.Errorf("Gradient ray-sampler 'wallStartColor' value cannot be negative")
}
if wallEndColor < 0.0 {
return nil, fmt.Errorf("Gradient ray-sampler 'wallEndColor' value cannot be negative")
}
if screenHeight < 0 {
return nil, fmt.Errorf("Gradient ray-sampler 'screenHeight' value cannot be negative")
}
if len(backgroundRange) == 0 {
return nil, fmt.Errorf("Gradient ray-sampler 'backgroundRange' array cannot be empty")
}
previousBackgroundRangeValue := backgroundRange[0]
for i := 1; i < len(backgroundRange); i++ {
if previousBackgroundRangeValue > backgroundRange[i] {
return nil, fmt.Errorf("Gradient ray-sampler 'backgroundRange' must be ordered from smallest to biggest value")
}
previousBackgroundRangeValue = backgroundRange[i]
}
if len(backgroundRange)+1 != len(backgroundColors) {
return nil, fmt.Errorf("Gradient ray-sampler 'backgroundColors' length must be 'backgroundRange' length + 1")
}
result := &GradientRaySampler{
backgroundRanges: backgroundRange,
}
currentLimit := first
for currentLimit < maxLimit {
result.depthRanges = append(result.depthRanges, currentLimit)
currentLimit *= multiplicator
}
//result.depthRanges = append(result.depthRanges, math.Inf(1))
result.wallStyles = result.getColorArrayFromDepthRange(wallStartColor, wallEndColor)
result.backgroundRangesColors = make([]tcell.Style, len(backgroundColors))
for index, element := range backgroundColors {
result.backgroundRangesColors[index] = tcell.StyleDefault.Background(tcell.Color(element))
}
result.setBackgroundStyles(screenHeight)
return result, nil
}
func (raySampler *GradientRaySampler) getColorArrayFromDepthRange(startColor, endColor int) []tcell.Style {
styles := make([]tcell.Style, len(raySampler.depthRanges)+1)
if startColor > endColor {
colorStep := (startColor - endColor) / len(raySampler.depthRanges)
for i := 0; i < len(raySampler.depthRanges); i++ {
styles[i] = tcell.StyleDefault.Background(tcell.Color(endColor + colorStep*i))
}
styles[len(raySampler.depthRanges)] = tcell.StyleDefault.Background(tcell.Color(endColor))
} else {
colorStep := (endColor - startColor) / len(raySampler.depthRanges)
for i := 0; i < len(raySampler.depthRanges); i++ {
styles[i] = tcell.StyleDefault.Background(tcell.Color(startColor + colorStep*i))
}
styles[len(raySampler.depthRanges)] = tcell.StyleDefault.Background(tcell.Color(startColor))
}
return styles
}
//setBackgroundStyles reset the background-style and runes based from the new screen height.
func (raySampler *GradientRaySampler) setBackgroundStyles(screenHeight int) {
raySampler.backgroundStyles = make([]tcell.Style, screenHeight)
currentBackgroundRange := 0
for i := 0; i < screenHeight; i++ {
if i > int(float32(screenHeight)*raySampler.backgroundRanges[currentBackgroundRange]) && currentBackgroundRange < len(raySampler.backgroundRanges)-1 {
currentBackgroundRange++
}
raySampler.backgroundStyles[i] = raySampler.backgroundRangesColors[currentBackgroundRange]
}
}
//GetBackgroundRune returns the rune used for the background at a specific row number.
func (raySampler *GradientRaySampler) GetBackgroundRune(rowIndex int) rune {
return ' '
}
//GetWallRune returns the rune used for the wall at a specific row number.
func (raySampler *GradientRaySampler) GetWallRune(rowIndex int) rune {
return ' '
}
//GetBackgroundStyle returns the style used for the background at a specific row number.
func (raySampler *GradientRaySampler) GetBackgroundStyle(rowIndex int) tcell.Style {
return raySampler.backgroundStyles[rowIndex]
}
//GetWallStyleFromDistance returns the wall's style for a given distance.
func (raySampler *GradientRaySampler) GetWallStyleFromDistance(distance float64) tcell.Style {
rangeNumber := 0
for rangeNumber < len(raySampler.depthRanges) {
if distance < raySampler.depthRanges[rangeNumber] {
break
}
rangeNumber++
}
return raySampler.wallStyles[rangeNumber]
} | client/render/impl/raysampler_default.go | 0.522446 | 0.541227 | raysampler_default.go | starcoder |
package mathgl
import (
"math"
)
type Vec2d [2]float64
type Vec3d [3]float64
type Vec4d [4]float64
func (v1 Vec2d) Add(v2 Vec2d) Vec2d {
return Vec2d{v1[0] + v2[0], v1[1] + v2[1]}
}
func (v1 Vec3d) Add(v2 Vec3d) Vec3d {
return Vec3d{v1[0] + v2[0], v1[1] + v2[1], v1[2] + v2[2]}
}
func (v1 Vec4d) Add(v2 Vec4d) Vec4d {
return Vec4d{v1[0] + v2[0], v1[1] + v2[1], v1[2] + v2[2], v1[3] + v2[3]}
}
func (v1 Vec2d) Sub(v2 Vec2d) Vec2d {
return Vec2d{v1[0] - v2[0], v1[1] - v2[1]}
}
func (v1 Vec3d) Sub(v2 Vec3d) Vec3d {
return Vec3d{v1[0] - v2[0], v1[1] - v2[1], v1[2] - v2[2]}
}
func (v1 Vec4d) Sub(v2 Vec4d) Vec4d {
return Vec4d{v1[0] - v2[0], v1[1] - v2[1], v1[2] - v2[2], v1[3] - v2[3]}
}
func (v1 Vec2d) Mul(c float64) Vec2d {
return Vec2d{v1[0] * c, v1[1] * c}
}
func (v1 Vec3d) Mul(c float64) Vec3d {
return Vec3d{v1[0] * c, v1[1] * c, v1[2] * c}
}
func (v1 Vec4d) Mul(c float64) Vec4d {
return Vec4d{v1[0] * c, v1[1] * c, v1[2] * c, v1[3] * c}
}
func (v1 Vec2d) Dot(v2 Vec2d) float64 {
return v1[0]*v2[0] + v1[1]*v2[1]
}
func (v1 Vec3d) Dot(v2 Vec3d) float64 {
return v1[0]*v2[0] + v1[1]*v2[1] + v1[2]*v2[2]
}
func (v1 Vec4d) Dot(v2 Vec4d) float64 {
return v1[0]*v2[0] + v1[1]*v2[1] + v1[2]*v2[2] + v1[3]*v2[3]
}
func (v1 Vec2d) Len() float64 {
return float64(math.Sqrt(float64(v1[0]*v1[0] + v1[1]*v1[1])))
}
func (v1 Vec3d) Len() float64 {
return float64(math.Sqrt(float64(v1[0]*v1[0] + v1[1]*v1[1] + v1[2]*v1[2])))
}
func (v1 Vec4d) Len() float64 {
return float64(math.Sqrt(float64(v1[0]*v1[0] + v1[1]*v1[1] + v1[2]*v1[2] + v1[3]*v1[3])))
}
func (v1 Vec2d) Normalize() Vec2d {
l := 1.0 / math.Sqrt(float64(v1[0]*v1[0]+v1[1]*v1[1]))
return Vec2d{float64(float64(v1[0]) * l), float64(float64(v1[1]) * l)}
}
func (v1 Vec3d) Normalize() Vec3d {
l := 1.0 / math.Sqrt(float64(v1[0]*v1[0]+v1[1]*v1[1]+v1[2]*v1[2]))
return Vec3d{float64(float64(v1[0]) * l), float64(float64(v1[1]) * l), float64(float64(v1[2]) * l)}
}
func (v1 Vec4d) Normalize() Vec4d {
l := 1.0 / math.Sqrt(float64(v1[0]*v1[0]+v1[1]*v1[1]+v1[2]*v1[2]+v1[3]*v1[3]))
return Vec4d{float64(float64(v1[0]) * l), float64(float64(v1[1]) * l), float64(float64(v1[2]) * l), float64(float64(v1[3]) * l)}
}
func (v1 Vec3d) Cross(v2 Vec3d) Vec3d {
return Vec3d{v1[1]*v2[2] - v1[2]*v2[1], v1[2]*v2[0] - v1[0]*v2[2], v1[0]*v2[1] - v1[1]*v2[0]}
}
func (v1 Vec2d) ApproxEqual(v2 Vec2d) bool {
for i := range v1 {
if !FloatEqual(v1[i], v2[i]) {
return false
}
}
return true
}
func (v1 Vec3d) ApproxEqual(v2 Vec3d) bool {
for i := range v1 {
if !FloatEqual(v1[i], v2[i]) {
return false
}
}
return true
}
func (v1 Vec4d) ApproxEqual(v2 Vec4d) bool {
for i := range v1 {
if !FloatEqual(v1[i], v2[i]) {
return false
}
}
return true
}
func (v1 Vec2d) ApproxEqualTheshold(v2 Vec2d, threshold float64) bool {
for i := range v1 {
if !FloatEqualThreshold(v1[i], v2[i], threshold) {
return false
}
}
return true
}
func (v1 Vec3d) ApproxEqualTheshold(v2 Vec3d, threshold float64) bool {
for i := range v1 {
if !FloatEqualThreshold(v1[i], v2[i], threshold) {
return false
}
}
return true
}
func (v1 Vec4d) ApproxEqualTheshold(v2 Vec4d, threshold float64) bool {
for i := range v1 {
if !FloatEqualThreshold(v1[i], v2[i], threshold) {
return false
}
}
return true
}
func (v1 Vec2d) ApproxFuncEqual(v2 Vec2d, eq func(float64, float64) bool) bool {
for i := range v1 {
if !eq(v1[i], v2[i]) {
return false
}
}
return true
}
func (v1 Vec3d) ApproxFuncEqual(v2 Vec3d, eq func(float64, float64) bool) bool {
for i := range v1 {
if !eq(v1[i], v2[i]) {
return false
}
}
return true
}
func (v1 Vec4d) ApproxFuncEqual(v2 Vec4d, eq func(float64, float64) bool) bool {
for i := range v1 {
if !eq(v1[i], v2[i]) {
return false
}
}
return true
} | vectord.go | 0.767298 | 0.714111 | vectord.go | starcoder |
package outputs
import (
"sort"
"time"
"barista.run/bar"
"barista.run/timing"
)
// Repeat creates a TimedOutput from a function by repeatedly calling it at
// different times.
type Repeat func(time.Time) bar.Output
// Every repeats the output at a fixed interval.
func (r Repeat) Every(interval time.Duration) bar.TimedOutput {
return &repeating{r, repeatEvery{interval, timing.Now()}}
}
// AtNext repeats the output at multiples of the given duration. e.g.
// AtNext(time.Minute) will repeat the output at 13:00:00, 13:01:00, and so on,
// regardless of the first output time.
func (r Repeat) AtNext(interval time.Duration) bar.TimedOutput {
return &repeating{r, repeatAtNext{interval}}
}
// At repeats the output at the specified fixed points in time.
func (r Repeat) At(times ...time.Time) bar.TimedOutput {
sort.Slice(times, func(i, j int) bool {
return times[i].Before(times[j])
})
return &repeating{r, repeatAtTimes{times}}
}
type timer interface {
after(time.Time) time.Time
before(time.Time) time.Time
}
type repeatEvery struct {
interval time.Duration
start time.Time
}
func (r repeatEvery) after(now time.Time) time.Time {
count := now.Sub(r.start) / r.interval
return r.start.Add((count + 1) * r.interval)
}
func (r repeatEvery) before(now time.Time) time.Time {
count := now.Sub(r.start) / r.interval
return r.start.Add(count * r.interval)
}
type repeatAtNext struct {
interval time.Duration
}
func (r repeatAtNext) after(now time.Time) time.Time {
return now.Add(r.interval + 1).Truncate(r.interval)
}
func (r repeatAtNext) before(now time.Time) time.Time {
return now.Truncate(r.interval)
}
type repeatAtTimes struct {
times []time.Time
}
func (r repeatAtTimes) after(now time.Time) time.Time {
for i, t := range r.times {
if t.After(now) {
r.times = r.times[i:]
return t
}
}
return time.Time{}
}
func (r repeatAtTimes) before(now time.Time) time.Time {
var result time.Time
for _, t := range r.times {
if t.After(now) {
break
}
result = t
}
return result
}
type repeating struct {
outputFunc func(time.Time) bar.Output
timer
}
func (r *repeating) Segments() []*bar.Segment {
t := r.before(timing.Now())
if t.IsZero() {
return nil
}
o := r.outputFunc(t)
if o == nil {
return nil
}
return o.Segments()
}
func (r *repeating) NextRefresh() time.Time {
return r.after(timing.Now())
}
// resetStartTime is used by Group to ensure that all timed outputs that repeat
// at a fixed interval start their timers together. Perfectly aligning the start
// times for fixed-interval outputs reduces the total number of refresh events,
// by having a single update where timers overlap.
func resetStartTime(out bar.Output, start time.Time) bar.Output {
r, ok := out.(*repeating)
if !ok {
return out
}
e, ok := r.timer.(repeatEvery)
if !ok {
return out
}
return &repeating{r.outputFunc, repeatEvery{e.interval, start}}
} | outputs/timed.go | 0.764628 | 0.559952 | timed.go | starcoder |
package interpolation
import (
"fmt"
"sort"
"github.com/hashicorp/hil"
"github.com/hashicorp/hil/ast"
)
// interpolationFuncHas returns if the key exists in the provided map
func interpolationFuncHas() ast.Function {
return ast.Function{
ArgTypes: []ast.Type{ast.TypeMap, ast.TypeString},
ReturnType: ast.TypeString,
Variadic: false,
Callback: func(inputs []interface{}) (interface{}, error) {
mapInput := inputs[0].(map[string]ast.Variable)
_, ok := mapInput[inputs[1].(string)]
if ok {
return "true", nil
}
return "false", nil
},
}
}
// interpolationFuncMap accepts a variable number of arguments in key/value pairs
// and converts them to a map
func interpolationFuncMap() ast.Function {
return ast.Function{
ArgTypes: []ast.Type{},
ReturnType: ast.TypeMap,
Variadic: true,
VariadicType: ast.TypeAny,
Callback: func(inputs []interface{}) (interface{}, error) {
result := make(map[string]ast.Variable)
if len(inputs)%2 != 0 {
return nil, fmt.Errorf("requires an even number of arguments, got %d", len(inputs))
}
for i := 0; i < len(inputs); i += 2 {
key, ok := inputs[i].(string)
if !ok {
return nil, fmt.Errorf("argument %d represents a key in the map, but it is not a string", i+1)
}
val := inputs[i+1]
nativeVar, err := hil.InterfaceToVariable(val)
if err != nil {
return nil, err
}
result[key] = nativeVar
}
return result, nil
},
}
}
// interpolationFuncKeys returns the keys of the provided map sorted in dictionary order
func interpolationFuncKeys() ast.Function {
return ast.Function{
ArgTypes: []ast.Type{ast.TypeMap},
ReturnType: ast.TypeList,
Variadic: false,
Callback: func(inputs []interface{}) (interface{}, error) {
mapInput := inputs[0].(map[string]ast.Variable)
keys := make([]string, 0, len(mapInput)+1)
result := make([]ast.Variable, 0, len(mapInput)+1)
for key := range mapInput {
keys = append(keys, key)
}
sort.Strings(keys)
for _, key := range keys {
nativeKey, err := hil.InterfaceToVariable(key)
if err != nil {
return nil, err
}
result = append(result, nativeKey)
}
return result, nil
},
}
}
// interpolationFuncValues extracts the values from a map
func interpolationFuncValues() ast.Function {
return ast.Function{
ArgTypes: []ast.Type{ast.TypeMap},
ReturnType: ast.TypeList,
Variadic: false,
Callback: func(inputs []interface{}) (interface{}, error) {
mapInput := inputs[0].(map[string]ast.Variable)
values := make([]interface{}, 0, len(mapInput)+1)
result := make([]ast.Variable, 0, len(mapInput)+1)
for _, val := range mapInput {
values = append(values, val)
}
for _, val := range values {
nativeValue, err := hil.InterfaceToVariable(val)
if err != nil {
return nil, err
}
result = append(result, nativeValue)
}
return result, nil
},
}
}
// interpolationFuncMerge will merge multiple maps into a single map. Last reference of a key will always win.
func interpolationFuncMerge() ast.Function {
return ast.Function{
ArgTypes: []ast.Type{ast.TypeMap},
ReturnType: ast.TypeMap,
Variadic: true,
VariadicType: ast.TypeMap,
Callback: func(inputs []interface{}) (interface{}, error) {
result := make(map[string]ast.Variable)
for _, input := range inputs {
for k, v := range input.(map[string]ast.Variable) {
result[k] = v
}
}
return result, nil
},
}
}
// interpolationFuncPick will pick the values of the provided keys, and create a new map
func interpolationFuncPick() ast.Function {
return ast.Function{
ArgTypes: []ast.Type{ast.TypeMap},
ReturnType: ast.TypeMap,
Variadic: true,
VariadicType: ast.TypeString,
Callback: func(inputs []interface{}) (interface{}, error) {
inMap := inputs[0].(map[string]ast.Variable)
result := make(map[string]ast.Variable)
for i := 1; i < len(inputs); i++ {
key := inputs[i].(string)
if val, ok := inMap[key]; ok {
result[key] = val
}
}
return result, nil
},
}
}
// interpolationFuncOmit will return a map that has omitted the keys provided
func interpolationFuncOmit() ast.Function {
return ast.Function{
ArgTypes: []ast.Type{ast.TypeMap},
ReturnType: ast.TypeMap,
Variadic: true,
VariadicType: ast.TypeString,
Callback: func(inputs []interface{}) (interface{}, error) {
inMap := inputs[0].(map[string]ast.Variable)
for i := 1; i < len(inputs); i++ {
key := inputs[i].(string)
delete(inMap, key)
}
return inMap, nil
},
}
} | internal/interpolation/maps.go | 0.773815 | 0.401336 | maps.go | starcoder |
package cron
import (
"fmt"
"math"
"strconv"
"strings"
)
var (
allYears = [3]uint64{math.MaxUint64, math.MaxUint64, math.MaxUint64}
)
// MustParse returns a new Expression pointer.
// It expects a well-formed cron expression.
// If a malformed cron expression is supplied, it will `panic`.
func MustParse(spec string) *Expression {
expr, err := Parse(spec)
if err != nil {
panic(err)
}
return expr
}
// Parse returns a new Expression pointer.
// An error is returned if a malformed cron expression is supplied.
func Parse(spec string) (*Expression, error) {
cron := strings.TrimSpace(spec)
if len(cron) == 0 {
return nil, fmt.Errorf("empty spec string")
}
// Handle named cron expression
if strings.HasPrefix(cron, "@") {
return parseNamedExpression(cron)
}
// Handle normalize cron expression
expr := &Expression{expression: spec}
fields := strings.Split(cron, " ")
// remove empty fields
for i := len(fields) - 1; i >= 0; i-- {
if len(fields[i]) == 0 {
copy(fields[i:], fields[i+1:])
fields = fields[:len(fields)-1]
}
}
fieldCount := len(fields)
if fieldCount < 5 {
return nil, fmt.Errorf("missing field(s)")
}
field := 0
parser := 0
// second field (optional)
if fieldCount == 5 {
expr.seconds = startBit // 0 second
parser++ // set minute parser to the first
}
for field < fieldCount && parser < len(fieldParsers) {
if err := fieldParsers[parser].parse(expr, fields[field]); err != nil {
return nil, err
}
field++
parser++
}
// padding years to all values
if fieldCount < 7 {
expr.years = allYears
}
// special handling for day of week
// 7->0
if expr.daysOfWeek&(startBit>>7) != 0 {
expr.daysOfWeek |= startBit
}
if expr.lastWeekdaysOfWeek&(startBit>>7) != 0 {
expr.lastWeekdaysOfWeek |= startBit
}
// expand to 5 week
mask := uint64(0xfe00000000000000)
daysOfWeek := expr.daysOfWeek & mask
lastWeekdaysOfWeek := expr.lastWeekdaysOfWeek
for i := 0; i < 35; i += 7 {
expr.daysOfWeek |= daysOfWeek >> i
expr.lastWeekdaysOfWeek |= lastWeekdaysOfWeek >> i
}
// sun to bit 1
expr.daysOfWeek >>= 1
expr.lastWeekdaysOfWeek >>= 1
return expr, nil
}
func parseNamedExpression(spec string) (*Expression, error) {
switch spec {
case "@yearly", "@annually":
return &Expression{
expression: spec, //0 0 0 1 1 * *
seconds: startBit,
minutes: startBit,
hours: startBit,
daysOfMonth: startBit >> 1,
months: startBit >> 1,
daysOfWeek: weeksMask,
years: allYears,
}, nil
case "@monthly":
return &Expression{
expression: spec, // 0 0 0 1 * * *
seconds: startBit,
minutes: startBit,
hours: startBit,
daysOfMonth: startBit >> 1,
months: monthsMask,
daysOfWeek: weeksMask,
years: allYears,
}, nil
case "@weekly":
return &Expression{
expression: spec, // 0 0 0 * * 0 *
seconds: startBit,
minutes: startBit,
hours: startBit,
daysOfMonth: daysMask,
months: monthsMask,
daysOfWeek: genWeekdayBits([7]bool{0: true}),
years: allYears,
}, nil
case "@daily", "@midnight":
return &Expression{
expression: spec, // 0 0 0 * * * *
seconds: startBit,
minutes: startBit,
hours: startBit,
daysOfMonth: daysMask,
months: monthsMask,
daysOfWeek: weeksMask,
years: allYears,
}, nil
case "@hourly":
return &Expression{
expression: spec, // 0 0 * * * * *
seconds: startBit,
minutes: startBit,
hours: hoursMask,
daysOfMonth: daysMask,
months: monthsMask,
daysOfWeek: weeksMask,
years: allYears,
}, nil
}
return nil, fmt.Errorf("unrecognized name of cron expression: %s", spec)
}
var fieldParsers = []fieldParser{
{
"second",
func(expr *Expression, begin, end, step int) {
for i := begin; i <= end; i += step {
expr.seconds |= startBit >> i
}
},
0, 59,
atoi,
nil,
},
{
"minute",
func(expr *Expression, begin, end, step int) {
for i := begin; i <= end; i += step {
expr.minutes |= startBit >> i
}
},
0, 59,
atoi,
nil,
},
{
"hour",
func(expr *Expression, begin, end, step int) {
for i := begin; i <= end; i += step {
expr.hours |= startBit >> i
}
},
0, 23,
atoi,
nil,
},
{
"day of month",
func(expr *Expression, begin, end, step int) {
for i := begin; i <= end; i += step {
expr.daysOfMonth |= startBit >> i
}
},
1, 31,
atoi,
parseSpecDomEntry,
},
{
"month",
func(expr *Expression, begin, end, step int) {
for i := begin; i <= end; i += step {
expr.months |= startBit >> i
}
},
1, 12,
atomi,
nil,
},
{
"day of week",
func(expr *Expression, begin, end, step int) {
for i := begin; i <= end; i += step {
expr.daysOfWeek |= startBit >> i
}
},
0, 7,
atowi,
parseSpecDowEntry,
},
{
"year",
func(expr *Expression, begin, end, step int) {
for i := begin - 1970; i <= end-1970; i += step {
expr.years[i>>6] |= startBit >> (i & 0x3f)
}
},
1970, 2099,
atoi,
nil,
},
}
const errPattern = "syntax error in %s field: '%s'"
type fieldParser struct {
name string
populateTo func(expr *Expression, begin, end, step int)
min, max int
atoi func(string) (int, bool)
specEntryParser func(expr *Expression, entry string, atoi func(string) (int, bool)) bool
}
func (fp *fieldParser) parse(expr *Expression, field string) error {
idx := strings.IndexByte(field, ',')
if idx == -1 {
return fp.parseEntry(expr, field)
}
entrys := strings.Split(field, ",")
for _, entry := range entrys {
err := fp.parseEntry(expr, entry)
if err != nil {
return err
}
}
return nil
}
func (fp *fieldParser) parseStep(expr *Expression, entry string, step int) bool {
if entry == "*" { // min-max
fp.populateTo(expr, fp.min, fp.max, step)
return true
}
n, ok := fp.atoi(entry)
if ok { // n-max
if !fp.isValid(n) {
return false
}
fp.populateTo(expr, n, fp.max, step)
return true
}
// standard begin-end
idx := strings.IndexByte(entry, '-')
begin, ok := fp.atoi(entry[:idx])
if !ok || !fp.isValid(begin) {
return false
}
end, ok := fp.atoi(entry[idx+1:])
if !ok || !fp.isValid(end) {
return false
}
fp.populateTo(expr, begin, end, step)
return true
}
func (fp *fieldParser) parseEntry(expr *Expression, entry string) error {
if entry == "*" {
fp.populateTo(expr, fp.min, fp.max, 1)
return nil
}
n, ok := fp.atoi(entry)
if ok { // one value
if !fp.isValid(n) {
return fmt.Errorf(errPattern, fp.name, entry)
}
fp.populateTo(expr, n, n, 1)
return nil
}
// step /
idx := strings.IndexByte(entry, '/')
if idx != -1 {
step, ok := fp.atoi(entry[idx+1:])
if !ok || step < 1 || step > (fp.max-fp.min) {
return fmt.Errorf(errPattern, fp.name, entry)
}
if !fp.parseStep(expr, entry[:idx], step) {
return fmt.Errorf(errPattern, fp.name, entry)
}
return nil
}
// span
idx = strings.IndexByte(entry, '-')
if idx != -1 {
if !fp.parseStep(expr, entry, 1) {
return fmt.Errorf(errPattern, fp.name, entry)
}
return nil
}
if fp.specEntryParser == nil || !fp.specEntryParser(expr, entry, fp.atoi) {
return fmt.Errorf(errPattern, fp.name, entry)
}
return nil
}
func (fp *fieldParser) isValid(n int) bool {
return n >= fp.min && n <= fp.max
}
func parseSpecDomEntry(expr *Expression, entry string, atoi func(string) (int, bool)) bool {
const min, max = int(1), int(31)
if entry == "?" {
expr.daysOfMonth |= daysMask
return true
}
if entry == "LW" {
expr.lastWorkdayOfMonth = true
return true
}
if entry == "L" {
expr.lastDayOfMonth = true
return true
}
if strings.HasSuffix(entry, "W") {
n, ok := atoi(entry[:len(entry)-1])
if !ok || n < min || n > max {
return false
}
expr.workdaysOfMonth |= startBit >> n
return true
}
return false
}
func parseSpecDowEntry(expr *Expression, entry string, atoi func(string) (int, bool)) bool {
const min, max = int(0), int(7)
if entry == "?" {
expr.daysOfWeek |= weeksMask << 1
return true
}
if strings.HasSuffix(entry, "L") {
n, ok := atoi(entry[:len(entry)-1])
if !ok || n < min || n > max {
return false
}
expr.lastWeekdaysOfWeek |= startBit >> n
return true
}
idx := strings.IndexByte(entry, '#')
if idx != -1 {
weekday, ok := atowi(entry[:idx])
if !ok || weekday < min || weekday > max {
return false
}
ith, ok := atowi(entry[idx+1:])
if !ok || ith < 1 || ith > 5 {
return false
}
if weekday == 7 {
weekday = 0
}
n := (ith-1)*7 + weekday
expr.ithWeekdaysOfWeek |= startBit >> (n + 1) // sun is bit 1
return true
}
return false
}
func genWeekdayBits(weekdays [7]bool) uint64 {
var v uint64
i := 1 // day start from 1
for k := 0; k < 5; k++ {
for _, b := range weekdays {
if b {
v |= startBit >> i
}
i++
}
}
return v
}
func atoi(s string) (int, bool) {
i, err := strconv.Atoi(s)
return i, err == nil
}
func atowi(s string) (int, bool) {
switch strings.ToLower(s) {
case `0`, `sun`, `sunday`:
return 0, true
case `1`, `mon`, `monday`:
return 1, true
case `2`, `tue`, `tuesday`:
return 2, true
case `3`, `wed`, `wednesday`:
return 3, true
case `4`, `thu`, `thursday`:
return 4, true
case `5`, `fri`, `friday`:
return 5, true
case `6`, `sat`, `saturday`:
return 6, true
case `7`:
return 7, true
default:
return 0, false
}
}
func atomi(s string) (int, bool) {
switch strings.ToLower(s) {
case `1`, `jan`, `january`:
return 1, true
case `2`, `feb`, `february`:
return 2, true
case `3`, `mar`, `march`:
return 3, true
case `4`, `apr`, `april`:
return 4, true
case `5`, `may`:
return 5, true
case `6`, `jun`, `june`:
return 6, true
case `7`, `jul`, `july`:
return 7, true
case `8`, `aug`, `august`:
return 8, true
case `9`, `sep`, `september`:
return 9, true
case `10`, `oct`, `october`:
return 10, true
case `11`, `nov`, `november`:
return 11, true
case `12`, `dec`, `december`:
return 12, true
default:
return 0, false
}
} | cron/parser.go | 0.695855 | 0.403361 | parser.go | starcoder |
package main
import (
"korok.io/korok/game"
"korok.io/korok"
"korok.io/korok/engi"
"korok.io/korok/gfx"
"korok.io/korok/asset"
"korok.io/korok/hid/input"
"korok.io/korok/math/f32"
)
// A face surround with 4 blocks!
type Block struct {
engi.Entity
}
func NewBlock() Block {
e := korok.Entity.New()
b := Block{e}
korok.Sprite.NewComp(e)
korok.Transform.NewComp(e)
return b
}
func (b *Block) SetTexture(tex gfx.Tex2D) {
korok.Sprite.Comp(b.Entity).SetSprite(tex)
}
func (b *Block) SetPosition(x, y float32) {
korok.Transform.Comp(b.Entity).SetPosition(f32.Vec2{x, y})
}
func (b *Block) SetSize(w, h float32) {
korok.Sprite.Comp(b.Entity).SetSize(w, h)
}
type Face struct {
engi.Entity
up, down, left, right Block
}
func NewFace() *Face {
e := korok.Entity.New()
f := &Face{Entity:e}
korok.Sprite.NewComp(f.Entity)
korok.Transform.NewComp(f.Entity)
return f
}
func (f *Face) SetTexture(tex gfx.Tex2D) {
korok.Sprite.Comp(f.Entity).SetSprite(tex)
}
func (f *Face) SetPosition(x, y float32) {
korok.Transform.Comp(f.Entity).SetPosition(f32.Vec2{x, y})
}
func (f *Face) SetSize(w, h float32) {
korok.Sprite.Comp(f.Entity).SetSize(w, h)
}
func (f *Face) LoadBlock(up, down, left, right Block) {
xf := korok.Transform.Comp(f.Entity)
b1 := korok.Transform.Comp(up.Entity)
b2 := korok.Transform.Comp(down.Entity)
b3 := korok.Transform.Comp(left.Entity)
b4 := korok.Transform.Comp(right.Entity)
xf.LinkChildren(b1, b2, b3, b4)
b1.SetPosition(f32.Vec2{0, 100})
b2.SetPosition(f32.Vec2{0, -100})
b3.SetPosition(f32.Vec2{-100, 0})
b4.SetPosition(f32.Vec2{100, 0})
}
type MainScene struct {
face *Face
}
func (m *MainScene) Load() {
asset.Texture.Load("face.png")
asset.Texture.Load("block.png")
input.RegisterButton("up", input.ArrowUp)
input.RegisterButton("down", input.ArrowDown)
input.RegisterButton("left", input.ArrowLeft)
input.RegisterButton("right", input.ArrowRight)
}
func (m *MainScene) OnEnter(g *game.Game) {
blockTex := asset.Texture.Get("block.png")
up, down, left, right := NewBlock(), NewBlock(), NewBlock(), NewBlock()
up.SetTexture(blockTex); up.SetSize(30, 30)
down.SetTexture(blockTex); down.SetSize(30, 30)
left.SetTexture(blockTex); left.SetSize(30, 30)
right.SetTexture(blockTex); right.SetSize(30, 30)
faceTex := asset.Texture.Get("face.png")
face := NewFace()
face.SetTexture(faceTex)
face.LoadBlock(up, down, left, right)
face.SetPosition(240, 160)
face.SetSize(50 ,50)
m.face = face
}
func (m *MainScene) Update(dt float32) {
if dt > 1 {
return
}
var x, y float32
if input.Button("up").Down() {
y = 50
}
if input.Button("down").Down() {
y = -50
}
if input.Button("left").Down() {
x = -50
}
if input.Button("right").Down() {
x = 50
}
xf := korok.Transform.Comp(m.face.Entity)
p := xf.Position()
x, y = x * dt + p[0] , y * dt + p[1]
xf.SetPosition(f32.Vec2{x, y})
}
func (m *MainScene) OnExit() {
}
func main() {
options := &korok.Options{
Title:"Node System",
Width:480,
Height:320,
}
korok.Run(options, &MainScene{})
} | src/demo/node/main.go | 0.536556 | 0.413181 | main.go | starcoder |
package model
import (
"time"
"github.com/GoogleCloudPlatform/heapster/store"
)
// latestTimestamp returns its largest time.Time argument
func latestTimestamp(first time.Time, second time.Time) time.Time {
if first.After(second) {
return first
}
return second
}
// newInfoType is an InfoType Constructor, which returns a new InfoType.
// Initial fields for the new InfoType can be provided as arguments.
// A nil argument results in a newly-allocated map for that field.
func newInfoType(metrics map[string]*store.TimeStore, labels map[string]string) InfoType {
if metrics == nil {
metrics = make(map[string]*store.TimeStore)
}
if labels == nil {
labels = make(map[string]string)
}
return InfoType{
Metrics: metrics,
Labels: labels,
}
}
// addContainerToMap creates or finds a ContainerInfo element under a map[string]*ContainerInfo
func addContainerToMap(container_name string, dict map[string]*ContainerInfo) *ContainerInfo {
var container_ptr *ContainerInfo
if val, ok := dict[container_name]; ok {
// A container already exists under that name, return the address
container_ptr = val
} else {
container_ptr = &ContainerInfo{
InfoType: newInfoType(nil, nil),
}
dict[container_name] = container_ptr
}
return container_ptr
}
// addTimePoints adds the values of two TimePoints as uint64.
// addTimePoints returns a new TimePoint with the added Value fields
// and the Timestamp of the first TimePoint.
func addTimePoints(tp1 store.TimePoint, tp2 store.TimePoint) store.TimePoint {
return store.TimePoint{
Timestamp: tp1.Timestamp,
Value: tp1.Value.(uint64) + tp2.Value.(uint64),
}
}
// popTPSlice pops the first element of a TimePoint Slice, removing it from the slice.
// popTPSlice receives a *[]TimePoint and returns its first element.
func popTPSlice(tps_ptr *[]store.TimePoint) *store.TimePoint {
if tps_ptr == nil {
return nil
}
tps := *tps_ptr
if len(tps) == 0 {
return nil
}
res := tps[0]
if len(tps) == 1 {
(*tps_ptr) = tps[0:0]
}
(*tps_ptr) = tps[1:]
return &res
}
// addMatchingTimeseries performs addition over two timeseries with unique timestamps.
// addMatchingTimeseries returns a []TimePoint of the resulting aggregated.
// Assumes time-descending order of both []TimePoint parameters and the return slice.
func addMatchingTimeseries(left []store.TimePoint, right []store.TimePoint) []store.TimePoint {
var cur_left *store.TimePoint
var cur_right *store.TimePoint
result := []store.TimePoint{}
// Merge timeseries into result until either one is empty
cur_left = popTPSlice(&left)
cur_right = popTPSlice(&right)
for cur_left != nil && cur_right != nil {
if cur_left.Timestamp.Equal(cur_right.Timestamp) {
result = append(result, addTimePoints(*cur_left, *cur_right))
cur_left = popTPSlice(&left)
cur_right = popTPSlice(&right)
} else if cur_left.Timestamp.After(cur_right.Timestamp) {
result = append(result, *cur_left)
cur_left = popTPSlice(&left)
} else {
result = append(result, *cur_right)
cur_right = popTPSlice(&right)
}
}
if cur_left == nil && cur_right != nil {
result = append(result, *cur_right)
} else if cur_left != nil && cur_right == nil {
result = append(result, *cur_left)
}
// Append leftover elements from non-empty timeseries
if len(left) > 0 {
result = append(result, left...)
} else if len(right) > 0 {
result = append(result, right...)
}
return result
} | model/util.go | 0.759315 | 0.455683 | util.go | starcoder |
package ratingutil
import (
"time"
"github.com/mashiike/rating"
)
//RatingPeriod constants
//can multiple float64
// PeriodDay * 3.0 => 3 days
const (
PeriodDay time.Duration = 24 * time.Hour
PeriodWeek = 7 * PeriodDay
PeriodMonth = 30 * PeriodDay
PeriodYear = 365 * PeriodDay
)
//Clock is a clock used in this package. The default is to use time.Now()
type Clock interface {
Now() time.Time
}
type defaultClock struct{}
func (c defaultClock) Now() time.Time {
return time.Now()
}
//Config is service configuration
type Config struct {
//Service Clock
Clock
Tau float64
//RatingPeriod is the fixed interval of Rating.
//All matches played between this interval are considered to occur simultaneously and are calculated.
//In RatingPeriod, the period in which the players play about 15 times is good.
RatingPeriod time.Duration
//It will return to the initial deviation if you have not played for about this period.
//This period is a guideline, and the time to return to the actual initial deviation is determined by the player's Volatility here.
//And initial Volatility is calculated based on this period.
PeriodToResetDeviation time.Duration
//Fighting prosperity strategy uses round robin by default
DefaultApplyStrategy ApplyStrategy
}
//NewConfig is default configuration
func NewConfig() *Config {
return &Config{
Clock: defaultClock{},
RatingPeriod: PeriodWeek,
PeriodToResetDeviation: PeriodYear,
Tau: 0.5,
DefaultApplyStrategy: AsRoundrobin,
}
}
//InitialVolatility calculates the initial rating fluctuation according to the setting
func (c *Config) InitialVolatility() float64 {
count := c.PeriodToResetDeviation.Seconds() / c.RatingPeriod.Seconds()
return rating.NewVolatility(50.0, count)
}
//WithClock is set clock to config
func (c *Config) WithClock(clock Clock) *Config {
c.Clock = clock
return c
}
//WithRatingPeriod is set RatingPeriod to config
func (c *Config) WithRatingPeriod(period time.Duration) *Config {
c.RatingPeriod = period
return c
}
//WithTau is set Tau to config
func (c *Config) WithTau(tau float64) *Config {
c.Tau = tau
return c
}
//WithApplyStrategy is set DefaultApplyStrategy to config
func (c *Config) WithApplyStrategy(strategy ApplyStrategy) *Config {
if strategy != nil {
c.DefaultApplyStrategy = strategy
}
return c
} | ratingutil/config.go | 0.765681 | 0.431285 | config.go | starcoder |
package file
import (
"fmt"
"io"
"path/filepath"
"github.com/turbinelabs/api"
"github.com/turbinelabs/cli/command"
"github.com/turbinelabs/codec"
tbnflag "github.com/turbinelabs/nonstdlib/flag"
"github.com/turbinelabs/rotor"
)
const fileDescription = `Watches the given JSON or YAML file and updates Clusters
stored in the Turbine Labs API at startup and whenever the file changes.
The file can be specified as a flag or as the only argument (but not both).
The structure of the JSON and YAML formats is equivalent. Each contains 0 or
more clusters identified by name, each containing 0 or more instances. For
example, as YAML:
- cluster: c1
instances:
- host: h1
port: 8000
metadata:
- key: stage
value: prod
Alternatively as JSON:
[
{
"cluster": "c1",
"instances": [
{
"host": "h1",
"port": 8000,
"metadata": [
{ "key": "stage", "value": "prod" }
]
}
]
}
]
Note that when updating the file, care should be taken to make the modification
atomic. In practice, this means writing the updated file to a temporary location and
then moving/renaming the file to the watched path. Alternatively, the watched path
may be a symbolic link that is replaced with a reference to the updated file.`
// Cmd creates the file based collector sub command
func Cmd(updaterFlags rotor.UpdaterFromFlags) *command.Cmd {
cmd := &command.Cmd{
Name: "file",
Summary: "file-based collector",
Usage: "[OPTIONS] <file>",
Description: fileDescription,
}
flags := tbnflag.Wrap(&cmd.Flags)
r := &fileRunner{
codecFlags: codec.NewFromFlags(flags),
updaterFlags: updaterFlags,
}
cmd.Runner = r
cmd.Flags.StringVar(&r.file, "filename", "", "The file from which to collect")
return cmd
}
type fileRunner struct {
file string
updaterFlags rotor.UpdaterFromFlags
codecFlags codec.FromFlags
}
func (r *fileRunner) Run(cmd *command.Cmd, args []string) command.CmdErr {
if err := r.updaterFlags.Validate(); err != nil {
return cmd.BadInput(err)
}
var file string
if r.file == "" {
if len(args) != 1 {
return cmd.BadInput("must specify filename as either flag or single argument")
}
file = filepath.Clean(args[0])
} else {
if len(args) != 0 {
return cmd.BadInput("cannot specify filename as both flag and argument")
}
file = filepath.Clean(r.file)
}
if err := r.codecFlags.Validate(); err != nil {
return cmd.BadInput(err)
}
updater, err := r.updaterFlags.Make()
if err != nil {
return cmd.Error(err)
}
collector := NewCollector(file, updater, mkParser(r.codecFlags.Make()))
if err := collector.Run(); err != nil {
return cmd.Error(err)
}
return command.NoError()
}
type fileCluster struct {
ClusterName string `json:"cluster"`
Instances api.Instances `json:"instances"`
}
func mkParser(codec codec.Codec) func(io.Reader) ([]api.Cluster, error) {
return func(reader io.Reader) ([]api.Cluster, error) {
fileClusters := []fileCluster{}
err := codec.Decode(reader, &fileClusters)
if err != nil {
return nil, err
}
clusters := make(map[string]*api.Cluster, len(fileClusters))
for _, fc := range fileClusters {
if _, exists := clusters[fc.ClusterName]; exists {
return nil, fmt.Errorf("duplicate cluster: %s", fc.ClusterName)
}
cluster := &api.Cluster{
Name: fc.ClusterName,
Instances: fc.Instances,
}
clusters[cluster.Name] = cluster
}
result := make([]api.Cluster, 0, len(clusters))
for _, cluster := range clusters {
result = append(result, *cluster)
}
return result, nil
}
} | plugins/file/file.go | 0.587943 | 0.442938 | file.go | starcoder |
package nulldate
import (
"time"
"github.com/lovung/date"
)
// NullDate is the nullable type for Date only.
// Support UTC timezone only
// Null if valid is true
type NullDate struct {
Date date.Date
Valid bool
}
// New creates a new Date
func New(t time.Time, valid bool) NullDate {
return NullDate{
Date: date.New(t),
Valid: valid,
}
}
// NewFrom creates a new Date that will be valid
func NewFrom(t time.Time) NullDate {
return NullDate{
Date: date.New(t),
Valid: true,
}
}
// NewDate from year, month and day
func NewDate(year int, month time.Month, day int) NullDate {
t := time.Date(year, month, day, 0, 0, 0, 0, time.UTC)
return NewFrom(t)
}
// NewZero creates the new zero (null) Date
func NewZero() NullDate {
return NullDate{
Date: date.NewZero(),
Valid: false,
}
}
// NewFromPtr creates a Date that be null if t is nil
func NewFromPtr(t *time.Time) NullDate {
if t == nil {
return NewZero()
}
return NullDate{
Date: date.New(*t),
Valid: true,
}
}
// NewFromStr creates a new Date from the RFC3339 Date - "2006-01-02"
func NewFromStr(s string) (NullDate, error) {
t, err := time.Parse(date.RFC3339Date, s)
if err != nil {
return NewZero(), err
}
return NewFrom(t), nil
}
// MustParse creates a new Date from the RFC3339 Date - "2006-01-02"
// Panic if wrong format
func MustParse(s string) NullDate {
t, err := time.Parse(date.RFC3339Date, s)
if err != nil {
panic(err)
}
return NewFrom(t)
}
// ToTime returns a time.Time to this Date's value
func (d NullDate) ToTime() time.Time {
if !d.Valid {
return time.Time{}
}
return d.Date.ToTime()
}
// SetValid changes this Date's value and also sets it to be non-null
func (d *NullDate) SetValid(t time.Time) {
d.Date = date.New(t)
d.Valid = true
}
// IsZero returns true for invalid Date's, for omitempty support
func (d NullDate) IsZero() bool {
return !d.Valid
}
func removeTime(t time.Time) time.Time {
year, month, day := t.Date()
return time.Date(year, month, day, 0, 0, 0, 0, time.UTC)
} | nulldate/null_date.go | 0.72487 | 0.431704 | null_date.go | starcoder |
package day12
import "math"
import "fmt"
var pairs = [][]int{
{0, 1},
{0, 2},
{0, 3},
{1, 2},
{1, 3},
{2, 3},
}
type Position struct {
X int
Y int
Z int
}
func NewPosition(x, y, z int) Position {
return Position{X: x, Y: y, Z: z}
}
type Velocity struct {
X int
Y int
Z int
}
func NewVelocity(x, y, z int) Velocity {
return Velocity{X: x, Y: y, Z: z}
}
type Moon struct {
Name string
Position Position
Velocity Velocity
}
func NewMoonOnlyPosition(x int, y int, z int) *Moon {
return &Moon{Position: Position{X: x, Y: y, Z: z}, Velocity: Velocity{X: 0, Y: 0, Z: 0}}
}
func (moon *Moon) GetPulledBy(puller *Moon) {
switch {
case moon.Position.X < puller.Position.X:
moon.Velocity.X++
case moon.Position.X > puller.Position.X:
moon.Velocity.X--
}
switch {
case moon.Position.Y < puller.Position.Y:
moon.Velocity.Y++
case moon.Position.Y > puller.Position.Y:
moon.Velocity.Y--
}
switch {
case moon.Position.Z < puller.Position.Z:
moon.Velocity.Z++
case moon.Position.Z > puller.Position.Z:
moon.Velocity.Z--
}
}
func (moon *Moon) Move() {
moon.Position.X += moon.Velocity.X
moon.Position.Y += moon.Velocity.Y
moon.Position.Z += moon.Velocity.Z
}
func (moon *Moon) Energy() (int, int) {
potential := int(math.Abs(float64(moon.Position.X)) + math.Abs(float64(moon.Position.Y)) + math.Abs(float64(moon.Position.Z)))
kinetic := int(math.Abs(float64(moon.Velocity.X)) + math.Abs(float64(moon.Velocity.Y)) + math.Abs(float64(moon.Velocity.Z)))
return potential, kinetic
}
type Moons []*Moon
func (moons Moons) TotalEnergy() int {
totalEnergy := 0
for i, moon := range moons {
p, k := moon.Energy()
fmt.Printf("moon %v %v P: %v K: %v P*K: %v\n", i, moon, p, k, p*k)
totalEnergy += p * k
}
return totalEnergy
}
func Tick(moons Moons, count int) Moons {
if count == 0 {
return moons
}
count -= 1
for _, pair := range pairs {
moons[pair[0]].GetPulledBy(moons[pair[1]])
moons[pair[1]].GetPulledBy(moons[pair[0]])
}
for _, moon := range moons {
moon.Move()
}
return Tick(moons, count)
} | day12/day12.go | 0.707809 | 0.425068 | day12.go | starcoder |
package model
// Options defines possible square values when enumerating valid grids.
var options = []Square{
SquareDragon,
SquareFire,
SquareAir,
}
type gridPredicate func(*Grid) bool
type gridsPredicate func([]*Grid) bool
// Enumerate enumerates all possible successors from a given grid.
func Enumerate(g *Grid) []*Grid {
return enumerate(g, all, stopNever)
}
// EnumerateLimited enumerates all possible successors from a given grid but stops as soon as there are more than one solution.
func EnumerateLimited(g *Grid) []*Grid {
return enumerate(g, all, stopWhenMultipleSolutions)
}
// EnumerateFilter enumerates all possible successors from a given grid and
// allows to filter only the wanted grids.
func EnumerateFilter(g *Grid, filter gridPredicate) []*Grid {
return enumerate(g, filter, stopNever)
}
// EnumerateSquare enumerates all possible grids if a specified square is undefined.
// Otherwise, the given grid is returned as single possible solution.
func EnumerateSquare(g *Grid, i int) []*Grid {
if g.Squarei(i) != SquareUndefined {
return []*Grid{g.Clone()}
}
result := make([]*Grid, 0)
for _, option := range options {
suc := g.Clone()
suc.SetSquarei(i, option)
if Validate(suc) {
result = append(result, suc)
}
}
return result
}
// IsDistinct returns true if there is exactly one solution to the given grid.
func IsDistinct(g *Grid) bool {
return len(enumerate(g, all, stopWhenMultipleSolutions)) == 1
}
func enumerate(g *Grid, filter gridPredicate, isEarlyStop gridsPredicate) []*Grid {
res := make([]*Grid, 0)
if !Validate(g) {
// skip further investigation because the state is invalid
return res
}
return enumRecur(g, res, 0, filter, isEarlyStop)
}
func enumRecur(g *Grid, res []*Grid, i int, filter gridPredicate, isEarlyStop gridsPredicate) []*Grid {
if isEarlyStop(res) {
// early exit is used to validate, if a distinct solution exists
return res
}
if isLeaf(g) {
if filter(g) {
// valid leaf node found, add grid to result
res = append(res, g)
}
return res
}
square := g.Squarei(i)
if square == SquareUndefined {
for _, option := range options {
suc := g.Clone()
suc.SetSquarei(i, option)
// check all neighbor square plus the square that was changed
if !ValidateIncr(suc, i, 1) {
continue
}
res = enumRecur(suc, res, i+1, filter, isEarlyStop)
}
} else {
suc := g.Clone()
res = enumRecur(suc, res, i+1, filter, isEarlyStop)
}
return res
}
func isLeaf(g *Grid) bool {
return !g.HasSquare(SquareUndefined)
}
func stopWhenMultipleSolutions(res []*Grid) bool {
return len(res) > 1
}
func stopNever(res []*Grid) bool {
return false
}
func all(g *Grid) bool {
return true
} | pkg/model/enumerate.go | 0.828245 | 0.445349 | enumerate.go | starcoder |
package mesh
import (
"github.com/galaco/kero/framework/graphics/adapter"
"github.com/go-gl/mathgl/mgl32"
)
type Mesh adapter.Mesh
// BasicMesh
type BasicMesh struct {
vertices []float32
normals []float32
uvs []float32
lightmapUVs []float32
tangents []float32
indices []uint32
}
// AddVertex
func (mesh *BasicMesh) AddVertex(vertex ...float32) {
mesh.vertices = append(mesh.vertices, vertex...)
}
// AddNormal
func (mesh *BasicMesh) AddNormal(normal ...float32) {
mesh.normals = append(mesh.normals, normal...)
}
// AddUV
func (mesh *BasicMesh) AddUV(uv ...float32) {
mesh.uvs = append(mesh.uvs, uv...)
}
// AddLightmapUV
func (mesh *BasicMesh) AddLightmapUV(uv ...float32) {
mesh.lightmapUVs = append(mesh.lightmapUVs, uv...)
}
// AddTangent
func (mesh *BasicMesh) AddTangent(tangent ...float32) {
mesh.tangents = append(mesh.tangents, tangent...)
}
// AddIndice
func (mesh *BasicMesh) AddIndice(indice ...uint32) {
mesh.indices = append(mesh.indices, indice...)
}
// Vertices
func (mesh *BasicMesh) Vertices() []float32 {
return mesh.vertices
}
// Normals
func (mesh *BasicMesh) Normals() []float32 {
return mesh.normals
}
// UVs
func (mesh *BasicMesh) UVs() []float32 {
return mesh.uvs
}
// LightmapUVs
func (mesh *BasicMesh) LightmapUVs() []float32 {
return mesh.lightmapUVs
}
// Tangents
func (mesh *BasicMesh) Tangents() []float32 {
return mesh.tangents
}
// Indices
func (mesh *BasicMesh) Indices() []uint32 {
return mesh.indices
}
// GenerateTangents
func (mesh *BasicMesh) GenerateTangents() {
//const vector<vec3> & points,
//const vector<vec3> & normals,
//const vector<int> & faces,
//const vector<vec2> & texCoords,
// vector<vec4> & tangents)
//{
//vector<vec3> tan1Accum;
tan1Accum := make([]float32, len(mesh.vertices))
//vector<vec3> tan2Accum;
tan2Accum := make([]float32, len(mesh.vertices))
tangents := make([]float32, len(mesh.vertices)+(len(mesh.vertices)/3))
//for( uint i = 0; i < points.size(); i++ ) {
//tan1Accum.push_back(vec3(0.0f));
//tan2Accum.push_back(vec3(0.0f));
//tangents.push_back(vec4(0.0f));
//}
// Compute the tangent vector
for i := uint(0); i < uint(len(mesh.vertices))-9; i += 9 {
rootIdx := i / 3
p1 := mgl32.Vec3{mesh.vertices[i], mesh.vertices[i+1], mesh.vertices[i+2]}
p2 := mgl32.Vec3{mesh.vertices[i+3], mesh.vertices[i+4], mesh.vertices[i+5]}
p3 := mgl32.Vec3{mesh.vertices[i+6], mesh.vertices[i+7], mesh.vertices[i+8]}
uvIdx := rootIdx * 2
tc1 := mgl32.Vec2{mesh.uvs[uvIdx], mesh.uvs[uvIdx+1]}
tc2 := mgl32.Vec2{mesh.uvs[uvIdx+2], mesh.uvs[uvIdx+3]}
tc3 := mgl32.Vec2{mesh.uvs[uvIdx+4], mesh.uvs[uvIdx+5]}
q1 := p2.Sub(p1)
q2 := p3.Sub(p1)
s1 := tc2.X() - tc1.X()
s2 := tc3.X() - tc1.X()
t1 := tc2.Y() - tc1.Y()
t2 := tc3.Y() - tc1.Y()
r := 1.0 / (s1*t2 - s2*t1)
tan1 := mgl32.Vec3{
(t2*q1.X() - t1*q2.X()) * r,
(t2*q1.Y() - t1*q2.Y()) * r,
(t2*q1.Z() - t1*q2.Z()) * r,
}
tan2 := mgl32.Vec3{
(s1*q2.X() - s2*q1.X()) * r,
(s1*q2.Y() - s2*q1.Y()) * r,
(s1*q2.Z() - s2*q1.Z()) * r,
}
tan1Accum[i] += tan1.X()
tan1Accum[i+1] += tan1.Y()
tan1Accum[i+2] += tan1.Z()
tan2Accum[i] += tan2.X()
tan2Accum[i+1] += tan2.Y()
tan2Accum[i+2] += tan2.Z()
}
for i := uint(0); i < uint(len(mesh.vertices))-2; i++ {
n := mgl32.Vec3{
mesh.normals[i],
mesh.normals[i+1],
mesh.normals[i+2],
}
t1 := mgl32.Vec3{
tan1Accum[i],
tan1Accum[i+1],
tan1Accum[i+2],
}
t2 := mgl32.Vec3{
tan2Accum[i],
tan2Accum[i+1],
tan2Accum[i+2],
}
//const vec3 &n = normals[i];
//vec3 &t1 = tan1Accum[i];
//vec3 &t2 = tan2Accum[i];
// Gram-Schmidt orthogonalize
//tangents[i] = vec4(glm::normalize( t1 - (glm::dot(n,t1) * n) ), 0.0f);
res := t1.Sub(n.Mul(n.Dot(t1))).Normalize()
tangents[i] = res.X()
tangents[i+1] = res.Y()
tangents[i+2] = res.Z()
// Store handedness in w
w := float32(1.0)
if n.Cross(t1).Dot(t2) < 0 {
w = -1.0
}
tangents[i+3] = w
//tangents[i] = (glm::dot( glm::cross(n,t1), t2 ) < 0.0f) ? -1.0f : 1.0f;
}
tan1Accum = nil
tan2Accum = nil
//tan1Accum.clear();
//tan2Accum.clear();
mesh.tangents = tangents
}
// NewMesh
func NewMesh() *BasicMesh {
return &BasicMesh{}
} | framework/graphics/mesh/mesh.go | 0.625209 | 0.459197 | mesh.go | starcoder |
package bytefmt
// from: github.com/cloudfoundry/bytefmt by Apache License
import (
"errors"
"strconv"
"strings"
"unicode"
)
const (
sizeByte = 1 << (10 * iota)
sizeKilo
sizeMega
sizeGiga
sizeTera
sizePeta
sizeExa
)
var errInvalidByteQuantity = errors.New("byte quantity must be a positive integer with a unit of measurement like M, MB, MiB, G, GiB, or GB")
// FormatSize returns a human-readable byte string of the form 10M, 12.5K, and so forth. The following units are available:
// E: Exabyte
// P: Petabyte
// T: Terabyte
// G: Gigabyte
// M: Megabyte
// K: Kilobyte
// B: Byte
// The unit that results in the smallest number greater than or equal to 1 is always chosen.
func FormatSize(bytes uint64) string {
unit := ""
value := float64(bytes)
switch {
case bytes >= sizeExa:
unit = "E"
value = value / sizeExa
case bytes >= sizePeta:
unit = "P"
value = value / sizePeta
case bytes >= sizeTera:
unit = "T"
value = value / sizeTera
case bytes >= sizeGiga:
unit = "G"
value = value / sizeGiga
case bytes >= sizeMega:
unit = "M"
value = value / sizeMega
case bytes >= sizeKilo:
unit = "K"
value = value / sizeKilo
case bytes >= sizeByte:
unit = "B"
case bytes == 0:
return "0"
}
result := strconv.FormatFloat(value, 'f', 1, 64)
result = strings.TrimSuffix(result, ".0")
return result + unit
}
// ParseSize parses a string formatted by FormatSize as bytes. Note binary-prefixed and SI prefixed units both mean a base-2 units
// KB = K = KiB = 1024
// MB = M = MiB = 1024 * K
// GB = G = GiB = 1024 * M
// TB = T = TiB = 1024 * G
// PB = P = PiB = 1024 * T
// EB = E = EiB = 1024 * P
func ParseSize(s string) (uint64, error) {
s = strings.TrimSpace(s)
s = strings.ToUpper(s)
i := strings.IndexFunc(s, unicode.IsLetter)
if i == -1 {
return 0, errInvalidByteQuantity
}
bytesString, multiple := s[:i], s[i:]
bytes, err := strconv.ParseFloat(bytesString, 64)
if err != nil || bytes <= 0 {
return 0, errInvalidByteQuantity
}
switch multiple {
case "E", "EB", "EIB":
return uint64(bytes * sizeExa), nil
case "P", "PB", "PIB":
return uint64(bytes * sizePeta), nil
case "T", "TB", "TIB":
return uint64(bytes * sizeTera), nil
case "G", "GB", "GIB":
return uint64(bytes * sizeGiga), nil
case "M", "MB", "MIB":
return uint64(bytes * sizeMega), nil
case "K", "KB", "KIB":
return uint64(bytes * sizeKilo), nil
case "B":
return uint64(bytes), nil
default:
return 0, errInvalidByteQuantity
}
} | bytefmt/bytefmt.go | 0.628977 | 0.439026 | bytefmt.go | starcoder |
package tree
type BinarySearchTreeNode struct {
Value int64
Times int64
Left *BinarySearchTreeNode
Right *BinarySearchTreeNode
}
type BinarySearchTree struct {
Root *BinarySearchTreeNode
}
func NewBinarySearchTree() *BinarySearchTree {
return new(BinarySearchTree)
}
func (tree *BinarySearchTree) Add(value int64) {
if tree.Root == nil {
tree.Root = &BinarySearchTreeNode{Value: value}
return
}
tree.Root.Add(value)
}
func (node *BinarySearchTreeNode) Add(value int64) {
if value < node.Value {
if node.Left == nil {
node.Left = &BinarySearchTreeNode{Value: value}
} else {
node.Left.Add(value)
}
} else if value > node.Value {
if node.Right == nil {
node.Right = &BinarySearchTreeNode{Value: value}
} else {
node.Right.Add(value)
}
} else {
node.Times = node.Times + 1
}
}
func (tree *BinarySearchTree) FindMinValue() *BinarySearchTreeNode {
if tree.Root == nil {
return nil
}
return tree.Root.FindMinValue()
}
func (node *BinarySearchTreeNode) FindMinValue() *BinarySearchTreeNode {
if node.Left == nil {
return node
}
return node.Left.FindMinValue()
}
func (tree *BinarySearchTree) FindMaxValue() *BinarySearchTreeNode {
if tree.Root == nil {
return nil
}
return tree.Root.FindMaxValue()
}
func (node *BinarySearchTreeNode) FindMaxValue() *BinarySearchTreeNode {
if node.Right == nil {
return node
}
return node.Right.FindMaxValue()
}
func (tree *BinarySearchTree) Find(value int64) *BinarySearchTreeNode {
if tree.Root == nil {
return nil
}
return tree.Root.Find(value)
}
func (node *BinarySearchTreeNode) Find(value int64) *BinarySearchTreeNode {
if value == node.Value {
return node
} else if value < node.Value {
if node.Left == nil {
return nil
}
return node.Left.Find(value)
} else {
if node.Right == nil {
return nil
}
return node.Right.Find(value)
}
}
func (tree *BinarySearchTree) FindParent(value int64) *BinarySearchTreeNode {
if tree.Root == nil {
return nil
}
if tree.Root.Value == value {
return nil
}
return tree.Root.FindParent(value)
}
func (node *BinarySearchTreeNode) FindParent(value int64) *BinarySearchTreeNode {
if value < node.Value {
leftTree := node.Left
if leftTree == nil {
return nil
}
if leftTree.Value == value {
return node
} else {
return leftTree.FindParent(value)
}
} else {
rightTree := node.Right
if rightTree == nil {
return nil
}
if rightTree.Value == value {
return node
} else {
return rightTree.FindParent(value)
}
}
}
func (tree *BinarySearchTree) Delete(value int64) {
if tree.Root == nil {
return
}
node := tree.Root.Find(value)
if node == nil {
return
}
parent := tree.Root.FindParent(value)
if parent == nil && node.Left == nil && node.Right == nil {
tree.Root = nil
return
} else if node.Left == nil && node.Right == nil {
if parent.Left != nil && value == parent.Left.Value {
parent.Left = nil
} else {
parent.Right = nil
}
return
} else if node.Left != nil && node.Right != nil {
minNode := node.Right
for minNode.Left != nil {
minNode = minNode.Left
}
tree.Delete(minNode.Value)
node.Value = minNode.Value
node.Times = minNode.Times
} else {
if parent == nil {
if node.Left != nil {
tree.Root = node.Left
} else {
tree.Root = node.Right
}
return
}
if node.Left != nil {
if parent.Left != nil && value == parent.Left.Value {
parent.Left = node.Left
} else {
parent.Right = node.Left
}
} else {
if parent.Left != nil && value == parent.Left.Value {
parent.Left = node.Right
} else {
parent.Right = node.Right
}
}
}
}
func (tree *BinarySearchTree) MidOder(visit func(value interface{})) {
tree.Root.MidOrder(visit)
}
func (node *BinarySearchTreeNode) MidOrder(visit func(value interface{})) {
if node == nil {
return
}
node.Left.MidOrder(visit)
for i := 0; i <= int(node.Times); i++ {
visit(node)
}
node.Right.MidOrder(visit)
} | basic_ds/tree/BinarySearchTree.go | 0.705278 | 0.420659 | BinarySearchTree.go | starcoder |
package main
import (
"fmt"
)
// tag::board[]
// Code in this file likely performs quite a few unnecessary copy operations on data. Performance
// doesn't matter much here, though.
// Field is a field of a bingo board.
type Field struct {
val int
marked bool
}
// Board is a bingo board.
type Board struct {
fields [][]Field
last int
}
// Convert an int slice into a field slice, initialising all fields as unmarked.
func fieldsFromInts(ints []int) []Field {
fields := make([]Field, 0, len(ints))
for _, val := range ints {
newField := Field{
val: val,
marked: false,
}
fields = append(fields, newField)
}
return fields
}
// Determine whether a set of fields is a winning set, i.e. whether all fields in the set (actually
// a slice) are marked.
func winningSet(fields []Field) bool {
for _, f := range fields {
if !f.marked {
return false
}
}
return true
}
// IsComplete determines whether a board has as many rows as cols. Such a board is complete since
// bingo boards are square.
func (b Board) IsComplete() bool {
if len(b.fields) == 0 {
return false
}
// A square board is considered complete.
return len(b.fields) == len(b.fields[0])
}
// AddRow adds a row to a board.
func (b *Board) AddRow(input []int) error {
if len(input) == 0 {
// Ignore empty lines as a convenience feature.
return nil
}
if len(b.fields) > 0 && len(input) != len(b.fields[0]) {
return fmt.Errorf("cannot process row of length %d, require %d", len(input), len(b.fields))
}
fields := fieldsFromInts(input)
b.fields = append(b.fields, fields)
return nil
}
// Row gets the row with the specified index. If the index is out of range, an empty slice is
// returned.
func (b Board) Row(idx int) []Field {
if idx < 0 || idx+1 > len(b.fields) {
return []Field{}
}
// This is easy.
result := make([]Field, len(b.fields))
_ = copy(result, b.fields[idx])
return result
}
// Col gets the column with the specified index. If the index is out of range, an empty slice is
// returned.
func (b Board) Col(idx int) []Field {
if idx < 0 || idx+1 > len(b.fields) {
return []Field{}
}
// This is less easy.
result := make([]Field, 0, len(b.fields))
for _, row := range b.fields {
result = append(result, row[idx])
}
return result
}
// Mark marks a number and returns whether the board had the number. All occurrences are marked.
func (b *Board) Mark(num int) bool {
found := false
for rowIdx := range b.fields {
for fieldIdx := range b.fields {
field := &b.fields[rowIdx][fieldIdx]
if field.val == num {
field.marked = true
found = true
}
}
}
if found {
b.last = num
}
return found
}
// Sum sums up all numbers. The value of `marked` determines whether marked or unmarked unes are
// summed up.
func (b Board) Sum(marked bool) int {
sum := 0
for _, row := range b.fields {
for _, field := range row {
if field.marked == marked {
sum += field.val
}
}
}
return sum
}
// Score determines whether this is a winning board by returning the score. A non-winning board has
// -1 score. That way, we can distinguish winning with a zero from non-winning boards.
func (b Board) Score() int {
for idx := 0; idx < len(b.fields); idx++ {
if winningSet(b.Row(idx)) || winningSet(b.Col(idx)) {
score := b.last * b.Sum(false)
return score
}
}
return -1
}
// Pretty makes a pretty string representation for this board. A marked field is followed by the
// letter "X". An unmarked field is represented by its number alone followed by a space.
func (b Board) Pretty() string {
// Hard-code formatting helper strings.
pre := "> "
post := " <"
sep := " | "
marker := "X"
clear := " "
formatter := "%-4s"
// Actually build the representation.
result := ""
for _, row := range b.fields {
result += pre
for colIdx, field := range row {
fieldRep := fmt.Sprintf("%d", field.val)
if field.marked {
fieldRep += marker
} else {
fieldRep += clear
}
fieldRep = fmt.Sprintf(formatter, fieldRep)
if colIdx > 0 {
result += sep
}
result += fieldRep
}
result += post + "\n"
}
return result
}
// end::board[] | day04/go/razziel89/board.go | 0.69285 | 0.466056 | board.go | starcoder |
// Interface to ws2811 chip (neopixel driver). Make sure that you have
// ws2811.h and pwm.h in a GCC include path (e.g. /usr/local/include) and
// libws2811.a in a GCC library path (e.g. /usr/local/lib).
// See https://github.com/jgarff/rpi_ws281x for instructions
package ws2811
import (
"os"
"os/signal"
"syscall"
"github.com/pkg/errors"
)
const (
// DefaultDmaNum is the default DMA number. Usually, this is 5 ob the Raspberry Pi (NOW CHANGE TO 10)
DefaultDmaNum = 10
// RpiPwmChannels is the number of PWM leds in the Raspberry Pi
RpiPwmChannels = 2
// TargetFreq is the target frequency. It is usually 800kHz (800000), and an go as low as 400000
TargetFreq = 800000
// DefaultGpioPin is the default pin on the Raspberry Pi where the signal will be available. Note
// that it is the BCM (Broadcom Pin Number) and the "Pin" 18 is actually the physical pin 12 of the
// Raspberry Pi.
DefaultGpioPin = 18
// DefaultLedCount is the default number of LEDs on the stripe.
DefaultLedCount = 32
// DefaultBrightness is the default maximum brightness of the LEDs. The brightness value can be between 0 and 255.
// If the brightness is too low, the LEDs remain dark. If the brightness is too high, the system needs too much
// current.
DefaultBrightness = 96 // Safe value between 0 and 255.
)
// StateDesc is a map from a return state to its string description.
var StateDesc = map[int]string{
0: "Success",
-1: "Generic failure",
-2: "Out of memory",
-3: "Hardware revision is not supported",
-4: "Memory lock failed",
-5: "mmap() failed",
-6: "Unable to map registers into userspace",
-7: "Unable to initialize GPIO",
-8: "Unable to initialize PWM",
-9: "Failed to create mailbox device",
-10: "DMA error",
-11: "Selected GPIO not possible",
-12: "Unable to initialize PCM",
-13: "Unable to initialize SPI",
-14: "SPI transfer error",
}
// HwDesc is the Hardware Description
type HwDesc struct {
Type uint32
Version uint32
PeriphBase uint32
VideocoreBase uint32
Desc string
}
// ChannelOption is the list of channel options
type ChannelOption struct {
// GpioPin is the GPIO Pin with PWM alternate function, 0 if unused
GpioPin int
// Invert inverts output signal
Invert bool
// LedCount is the number of LEDs, 0 if channel is unused
LedCount int
// StripeType is the strip color layout -- one of WS2811StripXXX constants
StripeType int
// Brightness is the maximum brightness of the LEDs. Value between 0 and 255
Brightness int
// WShift is the white shift value
WShift int
// RShift is the red shift value
RShift int
// GShift is the green shift value
GShift int
// BShift is blue shift value
BShift int
// Gamma is the gamma correction table
Gamma []byte
// Must capture Interrupt and SIGTERM signals to handle program exit
CaptureExit bool
// Must the LEDs be cleared on exit
ClearOnExit bool
}
// Option is the list of device options
type Option struct {
// RenderWaitTime is the time in µs before the next render can run
RenderWaitTime int
// Frequency is the required output frequency
Frequency int
// DmaNum is the number of a DMA _not_ already in use
DmaNum int
// Channels are channel options
Channels []ChannelOption
}
// DefaultOptions defines sensible default options for MakeWS2811
var DefaultOptions = Option{
Frequency: TargetFreq,
DmaNum: DefaultDmaNum,
Channels: []ChannelOption{
{
GpioPin: DefaultGpioPin,
LedCount: DefaultLedCount,
Brightness: DefaultBrightness,
StripeType: WS2812Strip,
Invert: false,
Gamma: nil,
},
},
}
// Leds returns the LEDs array of a given channel
func (ws2811 *WS2811) Leds(channel int) []uint32 {
return ws2811.leds[channel]
}
// SetLedsSync wait for the frame to finish and replace all the LEDs
func (ws2811 *WS2811) SetLedsSync(channel int, leds []uint32) error {
if err := ws2811.Wait(); err != nil {
return errors.WithMessage(err, "Error setting LEDs")
}
l := len(leds)
if l > len(ws2811.leds[channel]) {
return errors.New("Error: Too many LEDs")
}
for i := 0; i < l; i++ {
ws2811.leds[channel][i] = leds[i]
}
return nil
}
// StatusDesc returns the description of a status code
func StatusDesc(code int) string {
desc, ok := StateDesc[code]
if ok {
return desc
}
return "Unknown"
}
// SetupExit captures Interrupt and SIGTERM signals to handle program exit
func (ws2811 *WS2811) SetupExit(channel int, clear bool) {
signalChan := make(chan os.Signal, 1)
signal.Notify(signalChan, os.Interrupt, syscall.SIGTERM)
go func() {
for range signalChan {
if (clear) {
ws2811.ClearAll(channel)
ws2811.Render()
}
ws2811.Fini()
os.Exit(1)
}
}()
}
// SetAll sets all the leds for matrix to the specified color
func (ws2811 *WS2811) SetAll(channel int, color uint32) {
for led := 0; led < len(ws2811.Leds(channel)); led++ {
ws2811.Leds(channel)[led] = color
}
}
// ClearAll clears all the leds (sets to 0x0000000) for matrix
func (ws2811 *WS2811) ClearAll(channel int) {
ws2811.SetAll(channel, 0)
}
// WaitRender first waits and then renders
func (ws2811 *WS2811) WaitRender() (error) {
if err := ws2811.Wait(); err != nil {
return errors.WithMessage(err, "Error waiting LEDs")
}
if err := ws2811.Render(); err != nil {
return errors.WithMessage(err, "Error rendering LEDs")
}
return nil
}
var gamma8 = []byte{
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2,
2, 3, 3, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 5, 5, 5,
5, 6, 6, 6, 6, 7, 7, 7, 7, 8, 8, 8, 9, 9, 9, 10,
10, 10, 11, 11, 11, 12, 12, 13, 13, 13, 14, 14, 15, 15, 16, 16,
17, 17, 18, 18, 19, 19, 20, 20, 21, 21, 22, 22, 23, 24, 24, 25,
25, 26, 27, 27, 28, 29, 29, 30, 31, 32, 32, 33, 34, 35, 35, 36,
37, 38, 39, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 50,
51, 52, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 66, 67, 68,
69, 70, 72, 73, 74, 75, 77, 78, 79, 81, 82, 83, 85, 86, 87, 89,
90, 92, 93, 95, 96, 98, 99, 101, 102, 104, 105, 107, 109, 110, 112, 114,
115, 117, 119, 120, 122, 124, 126, 127, 129, 131, 133, 135, 137, 138, 140, 142,
144, 146, 148, 150, 152, 154, 156, 158, 160, 162, 164, 167, 169, 171, 173, 175,
177, 180, 182, 184, 186, 189, 191, 193, 196, 198, 200, 203, 205, 208, 210, 213,
215, 218, 220, 223, 225, 228, 231, 233, 236, 239, 241, 244, 247, 249, 252, 255,
}
// 4 color R, G, B and W ordering
const (
// SK6812StripRGBW is the RGBW Mode
SK6812StripRGBW = 0x18100800
// SK6812StripRBGW is the StripRBGW Mode
SK6812StripRBGW = 0x18100008
// SK6812StripGRBW is the StripGRBW Mode
SK6812StripGRBW = 0x18081000
// SK6812StrioGBRW is the StrioGBRW Mode
SK6812StrioGBRW = 0x18080010
// SK6812StrioBRGW is the StrioBRGW Mode
SK6812StrioBRGW = 0x18001008
// SK6812StripBGRW is the StripBGRW Mode
SK6812StripBGRW = 0x18000810
// SK6812ShiftWMask is the Shift White Mask
SK6812ShiftWMask = 0xf0000000
)
// 3 color R, G and B ordering
const (
// WS2811StripRGB is the RGB Mode
WS2811StripRGB = 0x100800
// WS2811StripRBG is the RBG Mode
WS2811StripRBG = 0x100008
// WS2811StripGRB is the GRB Mode
WS2811StripGRB = 0x081000
// WS2811StripGBR is the GBR Mode
WS2811StripGBR = 0x080010
// WS2811StripBRG is the BRG Mode
WS2811StripBRG = 0x001008
// WS2811StripBGR is the BGR Mode
WS2811StripBGR = 0x000810
)
// Predefined fixed LED types
const (
// WS2812Strip is the WS2812 Mode
WS2812Strip = WS2811StripGRB
// SK6812Strip is the SK6812 Mode
SK6812Strip = WS2811StripGRB
// SK6812WStrip is the SK6812W Mode
SK6812WStrip = SK6812StripGRBW
) | ws2811.go | 0.587943 | 0.419648 | ws2811.go | starcoder |
package pb
import (
"fmt"
"github.com/golang/protobuf/ptypes/wrappers"
"reflect"
"strings"
)
// Strips a pointer, or pointer(-to-pointer)^* to base object if needed
func indirect(reflectValue reflect.Value) reflect.Value {
for reflectValue.Kind() == reflect.Ptr {
reflectValue = reflectValue.Elem()
}
return reflectValue
}
// Strips a pointer, slice, pointer-to-slice, slice-of-pointers,
// pointer-to-slice-of-pointers-to-pointers, etc... to base type if needed
func indirectType(reflectType reflect.Type) reflect.Type {
for reflectType.Kind() == reflect.Ptr || reflectType.Kind() == reflect.Slice {
reflectType = reflectType.Elem()
}
return reflectType
}
//----- Types, and zero values to avoid having to recreate them every time
var pStringValueType = reflect.TypeOf(&wrappers.StringValue{})
var pStringValueZeroValue = reflect.Zero(pStringValueType)
var pUInt32ValueType = reflect.TypeOf(&wrappers.UInt32Value{})
var pUInt32ValueZeroValue = reflect.Zero(pUInt32ValueType)
var exString = ""
var pStringType = reflect.TypeOf(&exString)
var pStringZeroValue = reflect.Zero(pStringType)
var exUInt32 = uint32(0)
var pUInt32Type = reflect.TypeOf(&exUInt32)
var pUInt32ZeroValue = reflect.Zero(pUInt32Type)
type typeToType struct {
from reflect.Type
to reflect.Type
}
// Cache for different conversions, dest fields in order, -1 if no dest
var fieldMapsByType = make(map[typeToType][]int)
// Convert Copies data between fields at ORM and service levels.
// Works under the assumption that any WKT fields in proto map to * fields at ORM.
func Convert(source interface{}, dest interface{}) error {
// If dest object is unaddressable, that won't work. Unfortunately, a code
// error that will only be caught at runtime
toObject := indirect(reflect.ValueOf(dest))
if toObject.CanAddr() == false {
return fmt.Errorf("Dest, type %s, is unaddressable", reflect.TypeOf(dest))
}
if indirectType(reflect.TypeOf(source)).Kind() != reflect.Struct {
return fmt.Errorf("Cannot convert a non-struct")
}
destType := toObject.Type()
fromObject := indirect(reflect.ValueOf(source))
fromType := fromObject.Type()
// Check for mapping, populate mapping if not already present
fieldMap, exists := fieldMapsByType[typeToType{fromType, destType}]
if !exists {
for i := 0; i < fromType.NumField(); i++ {
found := false
for j := 0; j < destType.NumField(); j++ {
if strings.EqualFold(fromType.Field(i).Name, destType.Field(j).Name) {
found = true
fieldMap = append(fieldMap, j)
break
}
}
// Store -1 if no dest corresponds to field
if !found {
fieldMap = append(fieldMap, -1)
}
}
fieldMapsByType[typeToType{fromType, destType}] = fieldMap
}
for i := 0; i < fromType.NumField(); i++ {
if fieldMap[i] == -1 {
continue
}
to := toObject.Field(fieldMap[i])
if to.IsValid() {
fromFieldDesc := fromType.Field(i)
fromData := fromObject.Field(i)
switch fromFieldDesc.Type {
case to.Type(): // Matching type
to.Set(fromData)
case pStringValueType: // WKT *StringValue{} --> *string
if fromData.IsNil() || !fromData.IsValid() {
to.Set(pStringZeroValue)
} else {
value := fromData.Elem().Field(0).String()
to.Set(reflect.ValueOf(&value))
}
case pStringType: // *string --> WKT *StringValue{}
if fromData.IsNil() || !fromData.IsValid() {
to.Set(pStringValueZeroValue)
} else {
strValue := fromData.Elem().String()
to.Set(reflect.ValueOf(&wrappers.StringValue{strValue}))
}
case pUInt32ValueType: // WKT *UInt32Value{} --> *uint32
if fromData.IsNil() || !fromData.IsValid() {
to.Set(pUInt32ZeroValue)
} else {
value := uint32(fromData.Elem().Field(0).Uint())
to.Set(reflect.ValueOf(&value))
}
case pUInt32Type: // *uint32 --> WKT *UInt32Value{}
if fromData.IsNil() || !fromData.IsValid() {
to.Set(pUInt32ValueZeroValue)
} else {
intValue := uint32(fromData.Elem().Uint())
to.Set(reflect.ValueOf(&wrappers.UInt32Value{intValue}))
}
//Additional WKTs to be used should be included here
default:
kind := fromFieldDesc.Type.Kind()
if kind == reflect.Slice &&
indirectType(fromFieldDesc.Type).Kind() == reflect.Struct &&
indirectType(to.Type()).Kind() == reflect.Struct { // Copy slice one at a time
len := fromData.Len()
to.Set(reflect.MakeSlice(to.Type(), len, len))
for k := 0; k < len; k++ {
dest := to.Index(k)
if dest.Kind() == reflect.Ptr {
dest.Set(reflect.New(indirectType(dest.Type())).Elem().Addr())
}
err := Convert(fromData.Index(k).Interface(), dest.Addr().Interface())
if err != nil {
fmt.Printf("%s", err.Error())
}
}
} else if kind == reflect.Struct && !fromData.IsNil() { // A nested struct
err := Convert(fromData.Interface(), to.Addr().Interface())
if err != nil {
fmt.Printf("%s", err.Error())
}
} else if kind == reflect.Int32 && to.Type().Kind() == reflect.Int32 { // Probably an enum
to.Set(reflect.ValueOf(int32(fromData.Int())))
}
}
}
}
return nil
} | pb/converter.go | 0.594434 | 0.452475 | converter.go | starcoder |
package MCTS
import (
"math/rand"
"sort"
)
// Uct is an Upper Confidence Bound Tree search through game stats for an optimal move, given a starting game state.
func Uct(state GameState, iterations uint, simulations uint, ucbC float64, playerId uint64, scorer Scorer) Move {
// Find the best move given a fixed number of state explorations.
var root = newTreeNode(nil, nil, state, ucbC)
for i := 0; i < int(iterations); i++ {
// Start at the top of the tree again.
var node = root
// Select. Find the node we wish to explore next.
// While we have complete nodes, dig deeper for a new state to explore.
for len(node.untriedMoves) == 0 && len(node.children) > 0 {
// This node has no more moves to try but it does have children.
// Move the focus to its most promising child.
node = node.selectChild()
}
// Expand.
// Can we explore more about this particular state? Are there untried moves?
if len(node.untriedMoves) > 0 {
node = node.makeRandomUntriedMove() // This creates a new child node with cloned game state.
}
// Simulation.
// From the new child, make many simulated random steps to get a fuzzy idea of how good
// the move that created the child is.
var simulatedState = node.state.Clone()
for j := 0; j < int(simulations); j++ {
// Randomize any part of the game state that is unkonwn to all the players (e.g. facedown cards).
simulatedState.RandomizeUnknowns()
// What moves can further the game state?
var availableMoves = simulatedState.AvailableMoves()
// Is the game over?
if len(availableMoves) == 0 {
break
}
// Pick a random move (could be any player).
var randomIndex = rand.Intn(len(availableMoves))
var move = availableMoves[randomIndex]
simulatedState.MakeMove(move)
}
// Backpropagate.
// Our simulated state may be good or bad in the eyes of our player of interest.
var outcome = scorer(playerId, simulatedState)
node.addOutcome(outcome) // Will internally propogate up the tree.
}
// The best move to take is going to be the root nodes most visited child.
sort.Sort(byVisits(root.children))
return root.children[0].move // Descending by visits.
} | uct.go | 0.736874 | 0.631395 | uct.go | starcoder |
package ldclient
import (
"log"
"os"
"sync"
)
// FeatureStore is an interface describing a structure that maintains the live collection of features and related objects.
// It is used by LaunchDarkly when streaming mode is enabled, and stores data returned
// by the streaming API. Custom FeatureStore implementations can be passed to the
// LaunchDarkly client via a custom Config object. LaunchDarkly provides two FeatureStore
// implementations: one backed by an in-memory map, and one backed by Redis.
// Implementations must be thread-safe.
type FeatureStore interface {
Get(kind VersionedDataKind, key string) (VersionedData, error)
All(kind VersionedDataKind) (map[string]VersionedData, error)
Init(map[VersionedDataKind]map[string]VersionedData) error
Delete(kind VersionedDataKind, key string, version int) error
Upsert(kind VersionedDataKind, item VersionedData) error
Initialized() bool
}
// InMemoryFeatureStore is a memory based FeatureStore implementation, backed by a lock-striped map.
type InMemoryFeatureStore struct {
allData map[VersionedDataKind]map[string]VersionedData
isInitialized bool
sync.RWMutex
logger Logger
}
// NewInMemoryFeatureStore creates a new in-memory FeatureStore instance.
func NewInMemoryFeatureStore(logger Logger) *InMemoryFeatureStore {
if logger == nil {
logger = log.New(os.Stderr, "[LaunchDarkly InMemoryFeatureStore]", log.LstdFlags)
}
return &InMemoryFeatureStore{
allData: make(map[VersionedDataKind]map[string]VersionedData),
isInitialized: false,
logger: logger,
}
}
// Get returns an individual object of a given type from the store
func (store *InMemoryFeatureStore) Get(kind VersionedDataKind, key string) (VersionedData, error) {
store.RLock()
defer store.RUnlock()
if store.allData[kind] == nil {
store.allData[kind] = make(map[string]VersionedData)
}
item := store.allData[kind][key]
if item == nil {
store.logger.Printf("WARN: Key: %s not found in \"%s\".", key, kind)
return nil, nil
} else if item.IsDeleted() {
store.logger.Printf("WARN: Attempted to get deleted item in \"%s\". Key: %s", kind, key)
return nil, nil
} else {
return item, nil
}
}
// All returns all the objects of a given kind from the store
func (store *InMemoryFeatureStore) All(kind VersionedDataKind) (map[string]VersionedData, error) {
store.RLock()
defer store.RUnlock()
ret := make(map[string]VersionedData)
for k, v := range store.allData[kind] {
if !v.IsDeleted() {
ret[k] = v
}
}
return ret, nil
}
// Delete removes an item of a given kind from the store
func (store *InMemoryFeatureStore) Delete(kind VersionedDataKind, key string, version int) error {
store.Lock()
defer store.Unlock()
if store.allData[kind] == nil {
store.allData[kind] = make(map[string]VersionedData)
}
items := store.allData[kind]
item := items[key]
if item == nil || item.GetVersion() < version {
deletedItem := kind.MakeDeletedItem(key, version)
items[key] = deletedItem
}
return nil
}
// Init populates the store with a complete set of versioned data
func (store *InMemoryFeatureStore) Init(allData map[VersionedDataKind]map[string]VersionedData) error {
store.Lock()
defer store.Unlock()
store.allData = make(map[VersionedDataKind]map[string]VersionedData)
for k, v := range allData {
items := make(map[string]VersionedData)
for k1, v1 := range v {
items[k1] = v1
}
store.allData[k] = items
}
store.isInitialized = true
return nil
}
// Upsert inserts or replaces an item in the store unless there it already contains an item with an equal or larger version
func (store *InMemoryFeatureStore) Upsert(kind VersionedDataKind, item VersionedData) error {
store.Lock()
defer store.Unlock()
if store.allData[kind] == nil {
store.allData[kind] = make(map[string]VersionedData)
}
items := store.allData[kind]
old := items[item.GetKey()]
if old == nil || old.GetVersion() < item.GetVersion() {
items[item.GetKey()] = item
}
return nil
}
// Initialized returns whether the store has been initialized with data
func (store *InMemoryFeatureStore) Initialized() bool {
store.RLock()
defer store.RUnlock()
return store.isInitialized
} | vendor/gopkg.in/launchdarkly/go-client.v4/feature_store.go | 0.672654 | 0.41253 | feature_store.go | starcoder |
package longpalsubstr
// Don't want to convert to float64, so let's avoid using math.Min.
// Instead, we'll create our own min() that will return lowest value between two integers.
func min(x, y int) int {
if x < y {
return x
} else {
return y
}
}
// Return an output similiar to Python's enumerate().
// Return two values: the current slice position and the current max value.
func enumerate(T []int) (int, int) {
var position, max int
for i, val := range T {
if val > curMax {
cur, curMax = i, val
}
}
return position, max
}
func longestPalindrome(s string) string {
// Handle case like input "a" or "".
if len(s) <= 1 {
return s
}
// We are going to preprocess the string from "abc" to "$a$b$c$" or "abcd" to "$a$b$c$d$".
// The resulting string will be 2*N + 1.
// This is so that our apporach works for both odd and even inputs.
// Since strings are immutable in Go, we'll create an entirely new string.
newStr, T := make([]string, len(s)*2+1), make([]int, len(s)*2+1)
orgStrCount := 0
for i := 0; i < len(newStr); i++ {
if i%2 == 0 {
newStr[i] = "$"
} else {
newStr[i] = string(s[orgStrCount])
orgStrCount++
}
}
start, end, i, newCenter := 0, 0, 0, 0
// We are evaluating four cases when we're picking a new center:
// 1. Bad - Current position is entirely contained within the current palindrome.
// 2. Bad - Current position is at the end of the input. We should break the loop in this case.
// 3. Good - Current position is proper suffix of current middle's palindrome and its left side mirror is a proper prefix.
// 4. Bad - Current position is proper suffix of current palindrome, but its left side mirror extends beyond the left side of the current middle's palindrome. Selecting this as the new middle would not extend at all.
// i = current center
for i < len(newStr) {
// Start and end positions represent length of current palindrome at center "i".
for start > 0 && end < len(newStr)-1 && newStr[start-1] == newStr[end+1] {
start--
end++
}
// If end - start = 0, then +1 gives us a floor of 1.
// If end - start > 0, then +1 accounts for a floor of 3.
T[i] = end - start + 1
// Case 2, as mentioned above.
if end == len(newStr)-1 {
break
}
// Set possible new center based on start + end.
// If even, add 1 to move off of '$' (our preprocess character).
// If odd, add 0 since we're already on an original character.
if i%2 == 0 {
newCenter = end + 1
} else {
newCenter = end
}
// Is there a better center?
// Evaluate as we mirror the left side of the current middle in our solution array.
for j := i + 1; j <= end; j++ {
// Pick either left side T[i-(j-i)] of current middle "i" or "j" to the end.
// (end-j)*2+1 This handles case where left side mirror extends beyond right edge of current palindrome
T[j] = min(T[i-(j-i)], (end-j)*2+1)
// Evaluate the following criteria:
// Does point to potential new enter (j) plus half of the left side mirror (we're already accounting for the other half in "j") equal the end of current palindrome?
// If yes, then we've found case 3 mentioned above.
if j+T[i-(j-i)]/2 == end {
newCenter = j
break
}
}
// Set "i" as the new center.
i = newCenter
// Move backward from "i" based on length of current center to determine "start".
start = i - T[i]/2
// Move backward from "i" based on length of current center to determine "end".
end = i + T[i]/2
}
centerIndex, maxLen := enumerate(T)
// centerIndex/2
// Remember that T is based on preprocessed string.
// We need to halve the index to get the actual position in the original input string.
// maxLen/2/2
// We then we want to move back half of the length.
// Again, need to halve it to get the actual length of the original input string.
start = centerIndex/2 - maxLen/2/2
// Substring from start to start plus length of result (again halved to convert to size of input string).
return s[start : start+maxLen/2]
} | longpalsubstr/longpalsubstr.go | 0.790449 | 0.58347 | longpalsubstr.go | starcoder |
package talibcdl
type candleSetting struct {
rangeType rangeType
avgPeriod int
factor float64
}
var (
// real body is long when it's longer than the average of the 10 previous
// candles' real body
settingBodyLong = candleSetting{rangeTypeRealBody, 10, 1.0}
// real body is very long when it's longer than 3 times the average of the 10
// previous candles' real body
settingBodyVeryLong = candleSetting{rangeTypeRealBody, 10, 3.0}
// real body is short when it's shorter than the average of the 10 previous
// candles' real bodies
settingBodyShort = candleSetting{rangeTypeRealBody, 10, 1.0}
// real body is like doji's body when it's shorter than 10% the average of the
// 10 previous candles' high-low range
settingBodyDoji = candleSetting{rangeTypeHighLow, 10, 0.1}
// shadow is long when it's longer than the real body
settingShadowLong = candleSetting{rangeTypeRealBody, 0, 1.0}
// shadow is very long when it's longer than 2 times the real body
settingShadowVeryLong = candleSetting{rangeTypeRealBody, 0, 2.0}
// shadow is short when it's shorter than half the average of the 10 previous
// candles' sum of shadows
settingShadowShort = candleSetting{rangeTypeShadows, 10, 1.0}
// shadow is very short when it's shorter than 10% the average of the 10
// previous candles' high-low range
settingShadowVeryShort = candleSetting{rangeTypeHighLow, 10, 0.1}
// when measuring distance between parts of candles or width of gaps
// "near" means "<= 20% of the average of the 5 previous candles' high-low range"
settingNear = candleSetting{rangeTypeHighLow, 5, 0.2}
// when measuring distance between parts of candles or width of gaps
// "far" means ">= 60% of the average of the 5 previous candles' high-low range"
settingFar = candleSetting{rangeTypeHighLow, 5, 0.6}
// when measuring distance between parts of candles or width of gaps
// "equal" means "<= 5% of the average of the 5 previous candles' high-low range"
settingEqual = candleSetting{rangeTypeHighLow, 5, 0.05}
)
type rangeType int
const (
rangeTypeRealBody rangeType = iota
rangeTypeHighLow
rangeTypeShadows
)
func (rt rangeType) rangeOf(s enhancedSeries, i int) float64 {
switch rt {
case rangeTypeRealBody:
return s.realBody(i)
case rangeTypeHighLow:
return s.highLowRange(i)
case rangeTypeShadows:
return s.upperShadow(i) + s.lowerShadow(i)
default:
return 0
}
}
func intMax(a, b int) int {
if a > b {
return a
}
return b
}
func intMin(a, b int) int {
if a < b {
return a
}
return b
}
const DefaultFloat64 = -4e+37 | global.go | 0.502197 | 0.51251 | global.go | starcoder |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.