code stringlengths 114 1.05M | path stringlengths 3 312 | quality_prob float64 0.5 0.99 | learning_prob float64 0.2 1 | filename stringlengths 3 168 | kind stringclasses 1 value |
|---|---|---|---|---|---|
Package xtrace provides the ability to generate a trace of wrapped errors from xerrors. This is facilitated through the
Tracer type, the output of which can be customized with a TraceFormatter. For more information on how to wrap errors,
see https://godoc.org/golang.org/x/xerrors.
Basic Usage
The following example will print a trace of all of the wrapped errors to stderr.
package main
import (
"errors"
"github.com/ollien/xtrace"
"golang.org/x/xerrors"
)
func main() {
baseErr := errors.New("aw shucks, something broke")
err2 := xerrors.Errorf("things went wrong!: %w", baseErr)
traceErr := xtrace.Trace(err2)
if traceErr != nil {
panic("can not trace")
}
// aw shucks, something broke
// things went wrong!
// github.com/ollien/xtrace.ExampleTracer_Format
// /home/nick/Documents/code/xtrace/example.go:12
}
If more customization is desired, one can use a Tracer. One of Tracer's key features is its compatibility with fmt.
// ...
tracer, err := xtrace.NewTracer(err2)
if err != nil {
panic("can not make tracer")
}
fmt.Printf("%v", tracer)
// aw shucks, something broke
// things went wrong!
You can also add %+v for more detailed information.
// ...
fmt.Printf("%+v", tracer)
// aw shucks, something broke
// things went wrong!
// github.com/ollien/xtrace.ExampleTracer_Format
// /home/nick/Documents/code/xtrace/example.go:18
Using fmt is not required, though. You may instead read the errors one at a time from the trace with the ReadNext and
Read functions.
// ...
output, err := tracer.ReadNext()
if err != nil {
panic("can not read from tracer")
}
fmt.Println(output)
// aw shucks, something broke
Customization
All output of a Tracer can be customized. By default, the Tracer will ensure that all messages end in a newline. If you
want more customization than that, then you can create your own TraceFormatter.
For instance, to make all of your errors in all caps, you can use the following TraceFormatter.
type capsFormatter struct{}
func (formatter capsFormatter) FormatTrace(previous []string, message string) string {
return strings.ToUpper(message)
}
You can then set a Tracer's TraceFormatter like so
tracer, err := NewTracer(err, Formatter(capsFormatter{}))
*/
package xtrace | doc.go | 0.613815 | 0.459804 | doc.go | starcoder |
package diff
import (
"image"
"math"
"go.skia.org/infra/go/metrics2"
)
const (
CombinedMetric = "combined"
PercentMetric = "percent"
PixelMetric = "pixel"
)
// MetricFn is the signature a custom diff metric has to implement.
type MetricFn func(*DiffMetrics, *image.NRGBA, *image.NRGBA) float32
// metrics contains the custom diff metrics.
var metrics = map[string]MetricFn{
CombinedMetric: CombinedDiffMetric,
PercentMetric: percentDiffMetric,
PixelMetric: pixelDiffMetric,
}
// diffMetricIds contains the ids of all diff metrics.
var diffMetricIds []string
func init() {
// Extract the ids of the diffmetrics once.
diffMetricIds = make([]string, 0, len(metrics))
for k := range metrics {
diffMetricIds = append(diffMetricIds, k)
}
}
// GetDiffMetricIDs returns the ids of the available diff metrics.
func GetDiffMetricIDs() []string {
return diffMetricIds
}
// ComputeDiffMetrics computes and returns the diff metrics between two given images.
func ComputeDiffMetrics(leftImg *image.NRGBA, rightImg *image.NRGBA) *DiffMetrics {
defer metrics2.FuncTimer().Stop()
ret, _ := PixelDiff(leftImg, rightImg)
// Calculate the metrics.
diffs := make(map[string]float32, len(diffMetricIds))
for _, id := range diffMetricIds {
diffs[id] = metrics[id](ret, leftImg, rightImg)
}
ret.Diffs = diffs
return ret
}
// combinedDiffMetric returns a value in [0, 1] that represents how large
// the diff is between two images. Implements the MetricFn signature.
// TODO(lovisolo): Remove references to this function outside of this file; make code depend on the
// fields in diff.DiffMetrics instead (specifically, dm.Diffs[CombinedMetric]).
func CombinedDiffMetric(dm *DiffMetrics, _ *image.NRGBA, _ *image.NRGBA) float32 {
// Turn maxRGBA into a percent by taking the root mean square difference from
// [0, 0, 0, 0].
sum := 0.0
for _, c := range dm.MaxRGBADiffs {
sum += float64(c) * float64(c)
}
normalizedRGBA := math.Sqrt(sum/float64(len(dm.MaxRGBADiffs))) / 255.0
// We take the sqrt of (pixelDiffPercent * normalizedRGBA) to straighten out
// the curve, i.e. think about what a plot of x^2 would look like in the
// range [0, 1].
return float32(math.Sqrt(float64(dm.PixelDiffPercent) * normalizedRGBA))
}
// percentDiffMetric returns pixel percent as the metric. Implements the MetricFn signature.
func percentDiffMetric(basic *DiffMetrics, _, _ *image.NRGBA) float32 {
return basic.PixelDiffPercent
}
// pixelDiffMetric returns the number of different pixels as the metric. Implements the MetricFn signature.
func pixelDiffMetric(basic *DiffMetrics, _, _ *image.NRGBA) float32 {
return float32(basic.NumDiffPixels)
} | golden/go/diff/metrics.go | 0.780495 | 0.419351 | metrics.go | starcoder |
package trial
import (
"fmt"
"reflect"
"strconv"
)
// Input the input value given to the trial test function
type Input struct {
value reflect.Value
}
func newInput(i interface{}) Input {
return Input{value: reflect.ValueOf(i)}
}
// String value of input, panics on on non string value
func (in Input) String() string {
switch in.value.Kind() {
case reflect.Struct, reflect.Ptr, reflect.Slice, reflect.Map, reflect.Array, reflect.Chan:
panic("unsupported string conversion " + in.value.Kind().String())
default:
return fmt.Sprintf("%v", in.Interface())
}
}
// Bool value of input, panics on non bool value
func (in Input) Bool() bool {
if in.value.Kind() == reflect.String {
b, err := strconv.ParseBool(in.value.String())
if err != nil {
panic("invalid bool " + in.value.Interface().(string))
}
return b
}
return in.value.Bool()
}
// Int value of input, panics on non int value
func (in Input) Int() int {
switch in.value.Kind() {
case reflect.String:
i, err := strconv.Atoi(in.value.String())
if err != nil {
panic("invalid int " + in.value.Interface().(string))
}
return i
}
return int(in.value.Int())
}
// Uint value of input, panics on non uint value
func (in Input) Uint() uint {
switch in.value.Kind() {
case reflect.Int:
return uint(in.value.Int())
case reflect.String:
u, err := strconv.ParseUint(in.value.String(), 10, 64)
if err != nil {
panic("invalid uint " + in.value.Interface().(string))
}
return uint(u)
default:
return uint(in.value.Uint())
}
}
// Interface returns the current value of input
func (in Input) Interface() interface{} {
//TODO: check for nil
if in.value.Kind() == reflect.Invalid {
return nil
}
return in.value.Interface()
}
// Float64 value of input, panics on non float64 value
func (in Input) Float64() float64 {
switch in.value.Kind() {
case reflect.Int:
return float64(in.value.Int())
case reflect.String:
f, err := strconv.ParseFloat(in.value.String(), 64)
if err != nil {
panic("invalid float64 " + in.value.Interface().(string))
}
return f
default:
return in.value.Float()
}
}
// Slice returns the input value of the index of a slice/array. panics if non slice value
func (in Input) Slice(i int) Input {
// use reflect to access any slice type []int, etc
v := in.value.Index(i)
if v.Kind() == reflect.Interface {
return Input{value: reflect.ValueOf(v.Interface())}
}
return Input{value: v}
}
// Map returns the value for the provided key, panics on non map value
func (in Input) Map(key interface{}) Input {
// use reflection to access any map type map[string]string, etc
return Input{value: in.value.MapIndex(reflect.ValueOf(key))}
} | input.go | 0.587588 | 0.509398 | input.go | starcoder |
package matrix
import "math/rand"
/*
A sparse matrix based on go's map datastructure.
*/
type SparseMatrix struct {
matrix
elements map[int]float64
// offset to start of matrix s.t. idx = i*cols + j + offset
// offset = starting row * step + starting col
offset int
// analogous to dense step
step int
}
func (A *SparseMatrix) Get(i, j int) float64 {
i = i % A.rows
if i < 0 {
i = A.rows - i
}
j = j % A.cols
if j < 0 {
j = A.cols - j
}
x, _ := A.elements[i*A.step+j+A.offset]
return x
}
/*
Looks up an element given its element index.
*/
func (A *SparseMatrix) GetIndex(index int) float64 {
x, ok := A.elements[index]
if !ok {
return 0
}
return x
}
/*
Turn an element index into a row number.
*/
func (A *SparseMatrix) GetRowIndex(index int) (i int) {
i = (index - A.offset) / A.cols
return
}
/*
Turn an element index into a column number.
*/
func (A *SparseMatrix) GetColIndex(index int) (j int) {
j = (index - A.offset) % A.cols
return
}
/*
Turn an element index into a row and column number.
*/
func (A *SparseMatrix) GetRowColIndex(index int) (i int, j int) {
i = (index - A.offset) / A.step
j = (index - A.offset) % A.step
return
}
func (A *SparseMatrix) Set(i int, j int, v float64) {
i = i % A.rows
if i < 0 {
i = A.rows - i
}
j = j % A.cols
if j < 0 {
j = A.cols - j
}
// v == 0 results in removal of key from underlying map
if v == 0 {
delete(A.elements, i*A.step+j+A.offset)
} else {
A.elements[i*A.step+j+A.offset] = v
}
}
/*
Sets an element given its index.
*/
func (A *SparseMatrix) SetIndex(index int, v float64) {
// v == 0 results in removal of key from underlying map
if v == 0 {
delete(A.elements, index)
} else {
A.elements[index] = v
}
}
/*
A channel that will carry the indices of non-zero elements.
*/
func (A *SparseMatrix) Indices() (out chan int) {
//maybe thread the populating?
out = make(chan int)
go func(o chan int) {
for index := range A.elements {
i, j := A.GetRowColIndex(index)
if 0 <= i && i < A.rows && 0 <= j && j < A.cols {
o <- index
}
}
close(o)
}(out)
return
}
/*
Get a matrix representing a subportion of A. Changes to the new matrix will be
reflected in A.
*/
func (A *SparseMatrix) GetMatrix(i, j, rows, cols int) (subMatrix *SparseMatrix) {
if i < 0 || j < 0 || i+rows > A.rows || j+cols > A.cols {
i = maxInt(0, i)
j = maxInt(0, j)
rows = minInt(A.rows-i, rows)
rows = minInt(A.cols-j, cols)
}
subMatrix = new(SparseMatrix)
subMatrix.rows = rows
subMatrix.cols = cols
subMatrix.offset = (i+A.offset/A.step)*A.step + (j + A.offset%A.step)
subMatrix.step = A.step
subMatrix.elements = A.elements
return
}
/*
Gets a reference to a column vector.
*/
func (A *SparseMatrix) GetColVector(j int) *SparseMatrix {
return A.GetMatrix(0, j, A.rows, j+1)
}
/*
Gets a reference to a row vector.
*/
func (A *SparseMatrix) GetRowVector(i int) *SparseMatrix {
return A.GetMatrix(i, 0, 1, A.cols)
}
/*
Creates a new matrix [A B].
*/
func (A *SparseMatrix) Augment(B *SparseMatrix) (*SparseMatrix, error) {
if A.rows != B.rows {
return nil, ErrorDimensionMismatch
}
C := ZerosSparse(A.rows, A.cols+B.cols)
for index, value := range A.elements {
i, j := A.GetRowColIndex(index)
C.Set(i, j, value)
}
for index, value := range B.elements {
i, j := B.GetRowColIndex(index)
C.Set(i, j+A.cols, value)
}
return C, nil
}
/*
Creates a new matrix [A;B], where A is above B.
*/
func (A *SparseMatrix) Stack(B *SparseMatrix) (*SparseMatrix, error) {
if A.cols != B.cols {
return nil, ErrorDimensionMismatch
}
C := ZerosSparse(A.rows+B.rows, A.cols)
for index, value := range A.elements {
i, j := A.GetRowColIndex(index)
C.Set(i, j, value)
}
for index, value := range B.elements {
i, j := B.GetRowColIndex(index)
C.Set(i+A.rows, j, value)
}
return C, nil
}
/*
Returns a copy with all zeros above the diagonal.
*/
func (A *SparseMatrix) L() *SparseMatrix {
B := ZerosSparse(A.rows, A.cols)
for index, value := range A.elements {
i, j := A.GetRowColIndex(index)
if i >= j {
B.Set(i, j, value)
}
}
return B
}
/*
Returns a copy with all zeros below the diagonal.
*/
func (A *SparseMatrix) U() *SparseMatrix {
B := ZerosSparse(A.rows, A.cols)
for index, value := range A.elements {
i, j := A.GetRowColIndex(index)
if i <= j {
B.Set(i, j, value)
}
}
return B
}
func (A *SparseMatrix) Copy() *SparseMatrix {
B := ZerosSparse(A.rows, A.cols)
for index, value := range A.elements {
B.elements[index] = value
}
return B
}
func ZerosSparse(rows int, cols int) *SparseMatrix {
A := new(SparseMatrix)
A.rows = rows
A.cols = cols
A.offset = 0
A.step = cols
A.elements = map[int]float64{}
return A
}
/*
Creates a matrix and puts a standard normal in n random elements, with replacement.
*/
func NormalsSparse(rows int, cols int, n int) *SparseMatrix {
A := ZerosSparse(rows, cols)
for k := 0; k < n; k++ {
i := rand.Intn(rows)
j := rand.Intn(cols)
A.Set(i, j, rand.NormFloat64())
}
return A
}
/*
Create a sparse matrix using the provided map as its backing.
*/
func MakeSparseMatrix(elements map[int]float64, rows int, cols int) *SparseMatrix {
A := ZerosSparse(rows, cols)
A.elements = elements
return A
}
/*
Convert this sparse matrix into a dense matrix.
*/
func (A *SparseMatrix) DenseMatrix() *DenseMatrix {
B := Zeros(A.rows, A.cols)
for index, value := range A.elements {
i, j := A.GetRowColIndex(index)
B.Set(i, j, value)
}
return B
}
func (A *SparseMatrix) SparseMatrix() *SparseMatrix {
return A.Copy()
}
func (A *SparseMatrix) String() string { return String(A) } | sparse.go | 0.690559 | 0.551453 | sparse.go | starcoder |
package cronzilla
import (
"context"
"sync"
"time"
)
// Wrangler is a goro-safe aggregator for Tasks
type Wrangler struct {
tasks sync.Map
}
type wrangledTask struct {
task *Task
errorChan chan error
cancelfunc context.CancelFunc
}
// List will return a string array of Task names
func (w *Wrangler) List() []string {
tasks := make([]string, 0)
w.tasks.Range(func(name, wtask interface{}) bool {
tasks = append(tasks, name.(string))
return true
})
return tasks
}
// ListStale will return a string array of Task names that have completed or crashed
func (w *Wrangler) ListStale() []string {
tasks := make([]string, 0)
w.tasks.Range(func(name, wtask interface{}) bool {
t := wtask.(wrangledTask).task
if t.IsDone() {
tasks = append(tasks, name.(string))
}
return true
})
return tasks
}
// Close will cancel all of the tasks being wrangled. The Wrangler may be reused after Close is called
func (w *Wrangler) Close() {
w.tasks.Range(func(name, wtask interface{}) bool {
w.tasks.Delete(name)
wtask.(wrangledTask).cancelfunc()
return true
})
}
// Clean will remove completed or crashed tasks
func (w *Wrangler) Clean() int {
c := 0
w.tasks.Range(func(name, wtask interface{}) bool {
t := wtask.(wrangledTask).task
if t.IsDone() {
c++
w.tasks.Delete(name)
}
return true
})
return c
}
// Count returns the current number of tasks being wrangled
func (w *Wrangler) Count() int {
c := 0
w.tasks.Range(func(name, wtask interface{}) bool {
c++
return true
})
return c
}
// CountStale returns the current number of wrangled tasks that have completed or crashed
func (w *Wrangler) CountStale() int {
c := 0
w.tasks.Range(func(name, wtask interface{}) bool {
t := wtask.(wrangledTask).task
if t.IsDone() {
c++
}
return true
})
return c
}
// Delete will cancel and remove the named task from the Wrangler
func (w *Wrangler) Delete(name string) {
old, ok := w.tasks.Load(name)
if ok {
old.(wrangledTask).cancelfunc()
w.tasks.Delete(name)
}
}
// Exists returns bool if the specified Task exists
func (w *Wrangler) Exists(name string) bool {
_, ok := w.tasks.Load(name)
return ok
}
// AddEvery will include the named task to run every so often, returning an error channel to listen on
func (w *Wrangler) AddEvery(name string, todo TaskFunc, every time.Duration) <-chan error {
errorChan := make(chan error, 1)
w.add(name, todo, every, time.Time{}, errorChan)
return errorChan
}
// AddAt will include the named task to run specifically at the specified time, once, returning an error channel to listen on
func (w *Wrangler) AddAt(name string, todo TaskFunc, at time.Time) <-chan error {
errorChan := make(chan error, 1)
w.add(name, todo, 0, at, errorChan)
return errorChan
}
// add does the heavy lifting of creating a Task, wrangledTask, dealing with dupes, and running
func (w *Wrangler) add(name string, todo TaskFunc, every time.Duration, at time.Time, errorChan chan error) {
task := Task{
Todo: todo,
Every: every,
At: at,
}
ctx, cancelfunc := context.WithCancel(context.Background())
wt := wrangledTask{
task: &task,
errorChan: errorChan,
cancelfunc: cancelfunc,
}
old, loaded := w.tasks.LoadOrStore(name, wt)
if loaded {
old.(wrangledTask).cancelfunc()
w.tasks.Delete(name)
w.tasks.Store(name, wt)
}
go task.Run(ctx, errorChan)
} | wrangler.go | 0.526343 | 0.414306 | wrangler.go | starcoder |
package query
import "github.com/google/gapid/test/robot/search"
// Builder is the type used to allow fluent construction of search queries.
type Builder struct {
e *search.Expression
}
// Expression creates a builder from a search expression.
func Expression(e *search.Expression) Builder {
return Builder{e: e}
}
// Value creates a builder from a value.
// The type of expression will be inferred from the type of the value.
func Value(value interface{}) Builder {
return Expression(exprValue(value))
}
// Expression returns the content of the builder as a search expression.
func (b Builder) Expression() *search.Expression {
return b.e
}
// Query returns the content of the builder as a completed search query.
func (b Builder) Query() *search.Query {
return &search.Query{Expression: b.Expression()}
}
// Bool builds a boolean literal search expression.
func Bool(value bool) Builder {
return Expression(exprBool(value))
}
// String builds a string literal search expression.
func String(value string) Builder {
return Expression(exprString(value))
}
// Signed builds a signed integer literal search expression.
func Signed(value int64) Builder {
return Expression(exprSigned(value))
}
// Unsigned builds an unsigned integer literal search expression.
func Unsigned(value uint64) Builder {
return Expression(exprUnsigned(value))
}
// Double builds an floating point literal search expression.
func Double(value float64) Builder {
return Expression(exprDouble(value))
}
// Name builds a root name lookup search expression.
func Name(name string) Builder {
return Expression(exprName(name))
}
// And builds a search expression that is the "and" of the lhs and rhs.
func (lhs Builder) And(rhs Builder) Builder {
return Expression(exprAnd(lhs.Expression(), rhs.Expression()))
}
// Or builds a search expression that is the "or" of the lhs and rhs.
func (lhs Builder) Or(rhs Builder) Builder {
return Expression(exprOr(lhs.Expression(), rhs.Expression()))
}
// Equal builds a search expression that compares the lhs and rhs for equality.
func (lhs Builder) Equal(rhs Builder) Builder {
return Expression(exprEqual(lhs.Expression(), rhs.Expression()))
}
// Less builds a search expression that tests whether the lhs is less than the rhs.
func (lhs Builder) Less(rhs Builder) Builder {
return Expression(exprLess(lhs.Expression(), rhs.Expression()))
}
// LessOrEqual builds a search expression that tests whether the lhs is less than or equal to the rhs.
func (lhs Builder) LessOrEqual(rhs Builder) Builder {
return Expression(exprLessOrEqual(lhs.Expression(), rhs.Expression()))
}
// Greater builds a search expression that tests whether the lhs is greater than the rhs.
func (lhs Builder) Greater(rhs Builder) Builder {
return Expression(exprGreater(lhs.Expression(), rhs.Expression()))
}
// GreaterOrEqual builds a search expression that tests whether the lhs is greater than or equal to the rhs.
func (lhs Builder) GreaterOrEqual(rhs Builder) Builder {
return Expression(exprGreaterOrEqual(lhs.Expression(), rhs.Expression()))
}
// Subscript builds a search expression that applies the key as a subscript to the value.
func (value Builder) Subscript(key Builder) Builder {
return Expression(exprSubscript(value.Expression(), key.Expression()))
}
// Regex builds a search expression that tests whether value matches the supplied regex pattern.
func (value Builder) Regex(pattern string) Builder {
return Expression(exprRegex(value.Expression(), pattern))
}
// Member builds a search expression that looks up the named member of the object.
func (object Builder) Member(name string) Builder {
return Expression(exprMember(object.Expression(), name))
}
// Not builds a search expression that applies a boolean not to the supplied rhs.
func Not(rhs Builder) Builder {
return Expression(exprNot(rhs.Expression()))
} | test/robot/search/query/builder.go | 0.911906 | 0.714342 | builder.go | starcoder |
package queue
import "time"
// References is a dictionary of additinal SQL columns to set.
// Normally this dictionary contains referencies to other entities
// e.g. datasource_id, table_id, schedule_id, etc.
// So, the SQL table can have the `DELETE CASCADE` setting.
type References map[string]interface{}
// GetNamesAndValues returns a slice of column names and a slice of values accordingly.
// It's to easily use it in the SQL builder.
func (a References) GetNamesAndValues() (keys []string, values []interface{}) {
if a == nil {
return keys, values
}
l := len(a)
keys, values = make([]string, 0, l), make([]interface{}, 0, l)
for k, v := range a {
keys = append(keys, k)
values = append(values, v)
}
return keys, values
}
// Progress is a serialized status object that indicates the status of the task
// For example, it can be an object that contains the amount of processed bytes.
// The actual underlying type depends on the task type and consumers must serialize the value.
type Progress []byte
// Spec is is a serialized specification object used to set the task parameters.
// The actual underlying type depends on the task type and consumers must serialize the value.
type Spec []byte
// TaskType is a type which specifies what a task it is
type TaskType string
// String returns a string value of the task type
func (t TaskType) String() string {
return string(t)
}
// TaskStatus : Current state of the task
type TaskStatus string
// List of TaskStatus
const (
// Waiting task is waiting for being picked up
Waiting TaskStatus = "waiting"
// Running task is currently running
Running TaskStatus = "running"
// Cancelled task is canceled by the user
// nolint: misspell // the initial spelling was with double 'l' now we have to stick with it
Cancelled TaskStatus = "cancelled"
// Finished task is successfully finished
Finished TaskStatus = "finished"
// Failed task is failed with an error
Failed TaskStatus = "failed"
)
// TaskBase contains basic fields for a task
type TaskBase struct {
// Queue is the queue to which the task belongs
Queue string
// Type holds a task type identifier
Type TaskType
// Spec contains the task specification based on type of the task.
Spec Spec
}
// TaskEnqueueRequest contains fields required for adding a task to the queue
type TaskEnqueueRequest struct {
TaskBase
// References contain names and values for additinal
// SQL columns to set external references for a task for easy clean up
References References
}
// TaskScheduleRequest contains fields required for scheduling a task
type TaskScheduleRequest struct {
TaskBase
// CronSchedule is the schedule impression in cron syntax that defines
// when the task should be executed.
CronSchedule string
// References contain names and values for additinal
// SQL columns to set external references for a schedule for easy clean up
References References
}
// Task represents a task in the queue
type Task struct {
TaskBase
// ID is the id of the task
ID string
// Status is the current status of the task
Status TaskStatus
// Progress is used for workers to update the status of the task.
// For example, bytes processed.
// The actual underlying type is specific for each task type.
Progress Progress
// CreatedAt is when the task was initially put in the queue
CreatedAt time.Time
// CreatedAt is when the task was initially put in the queue
UpdatedAt time.Time
// StartedAt is when a worker picked up the task
StartedAt *time.Time
// FinishedAt is when the task was finished being processed
FinishedAt *time.Time
// LastHeartbeat provides a way to ensure the task is still being processed and hasn't failed.
LastHeartbeatAt *time.Time
} | pkg/queue/task.go | 0.697712 | 0.42937 | task.go | starcoder |
package ols
import (
"github.com/gonum/floats"
"github.com/gonum/matrix/mat64"
)
// Model handles the regression
type Model struct {
x *mat64.Dense
y *mat64.Vector // one dimentional
hasIntercept bool
cols int
rows int
}
// NewModel creates a new model with an intercept by default
func NewModel(x, y mat64.Matrix) *Model {
return NewModelWithIntercept(x, y, true)
}
// NewModelWithIntercept creates a new OLS model with or without an intercept
// x or y can be nil, just needs to be updated before training
func NewModelWithIntercept(x, y mat64.Matrix, intercept bool) *Model {
m := new(Model)
m.hasIntercept = intercept
m.SetX(x)
m.SetY(y)
return m
}
// SetX replaces the independent variables in the model
func (m *Model) SetX(data mat64.Matrix) {
if data == nil {
return
}
rows, cols := data.Dims()
offset := 0
if m.hasIntercept {
offset = 1
}
m.cols = cols + offset
m.rows = rows
m.x = mat64.NewDense(rows, cols+offset, nil)
for c := 0; c < cols+offset; c++ {
if c == 0 && m.hasIntercept {
intercept := make([]float64, rows)
floats.AddConst(1.0, intercept)
m.x.SetCol(0, intercept)
} else {
m.x.SetCol(c, column(data, c-offset))
}
}
}
// SetY replaces the dependent model
// NOTE: only uses first column of the matrix
func (m *Model) SetY(y mat64.Matrix) {
if y == nil {
return
}
rows, _ := y.Dims()
m.y = mat64.NewVector(rows, nil)
for r := 0; r < rows; r++ {
m.y.SetVec(r, y.At(r, 0))
}
}
// Dims returns number of independent variabes and count of rows
func (m *Model) Dims() (int, int) {
return m.x.Dims()
}
// Train - process the data and return the result
func (m *Model) Train() *mat64.Dense {
xt := m.x.T()
sq := mat64.NewDense(m.cols, m.cols, nil)
sq.Mul(xt, m.x)
sq.Inverse(sq)
res := mat64.NewDense(m.cols, 1, nil)
res.Product(sq, xt, m.y)
return res
}
func column(m mat64.Matrix, c int) []float64 {
rows, _ := m.Dims()
col := make([]float64, rows)
mat64.Col(col, c, m)
return col
}
func sum(data []float64) float64 {
return floats.Sum(data)
} | ols.go | 0.786254 | 0.541348 | ols.go | starcoder |
package graph
import (
hash "github.com/wednesday-solutions/golang-algorithm-club/pkg/datastructures/hashtable"
queue "github.com/wednesday-solutions/golang-algorithm-club/pkg/datastructures/queue"
)
/*
Using Adjacency list (Object oriented model) - https://en.wikipedia.org/wiki/Adjacency_list
*/
type (
// Graph A Graph, need a start vertex pointer for the methods
Graph struct {
start *Vertex
}
// Interface BFS and DFS traversal methods on the graph
Interface interface {
TraverseBreadthFirst() string
TraverseDepthFirst() string
}
// Vertex represents each vertex in the graph
Vertex struct {
value string
edges []Edge
}
// Edge Only one end of the edge is stored
Edge struct {
end *Vertex
}
)
/*
* 1. Get the hashed value of the current vertex value.
* 2. If not visited, append to result, add the edge vertices to the visit queue
* 3. Insert each edge to the hashmap with value = false and add to the visit queue
* 4. dequeue and recurse.
*/
// TraverseBreadthFirst Traverse the graph breadth first from the starting vertex
func (graph *Graph) TraverseBreadthFirst() string {
hash.ResetTable()
return graph.start.traverseBreadthFirst("", queue.NewQueue())
}
func (ver *Vertex) traverseBreadthFirst(result string, VisitQueue queue.Queue) string {
hashValue := hash.GetValueFromBucket(ver.value)
if hashValue == false || hashValue != nil {
hash.Insert(ver.value, true)
result = result + ver.value
for _, vertexEdge := range ver.edges {
vertexEdgeValue := vertexEdge.end.value
hashValueOfVertexEdge := hash.GetValueFromBucket(vertexEdgeValue)
if hashValueOfVertexEdge == "" {
hash.Insert(vertexEdgeValue, false)
VisitQueue.Enqueue(vertexEdge.end)
}
}
}
dequedValue := VisitQueue.Dequeue()
if nextVertex, ok := dequedValue.(*Vertex); ok {
return nextVertex.traverseBreadthFirst(result, VisitQueue)
}
return result
}
/*
* 1. Get the hashed value of the current vertex value, append the result if it has not been visited.
* 2. Go through edges and recurse
*/
// TraverseDepthFirst Traverse the graph Depth first from the starting vertex
func (graph *Graph) TraverseDepthFirst() string {
hash.ResetTable()
return graph.start.traverseDepthFirst("")
}
func (ver *Vertex) traverseDepthFirst(result string) string {
hashValue := hash.GetValueFromBucket(ver.value)
if hashValue == false || hashValue != nil {
hash.Insert(ver.value, true)
result = result + ver.value
for _, vertexEdge := range ver.edges {
vertexEdgeValue := vertexEdge.end.value
hashValueOfVertexEdge := hash.GetValueFromBucket(vertexEdgeValue)
if hashValueOfVertexEdge == "" {
hash.Insert(vertexEdgeValue, false)
result = vertexEdge.end.traverseDepthFirst(result)
}
}
}
return result
} | pkg/datastructures/graph/main.go | 0.789964 | 0.529142 | main.go | starcoder |
package csv
import (
beerproto "github.com/beerproto/beerproto_go"
)
func toTemperature(value *float64) *beerproto.TemperatureType {
if value == nil {
return &beerproto.TemperatureType{
Unit: beerproto.TemperatureUnitType_C,
}
}
return &beerproto.TemperatureType{
Unit: beerproto.TemperatureUnitType_C,
Value: *value,
}
}
func toTemperatureRangeType(low, high *float64) *beerproto.TemperatureRangeType {
return &beerproto.TemperatureRangeType{
Minimum: toTemperature(low),
Maximum: toTemperature(high),
}
}
func toTimeType(value *int64) *beerproto.TimeType {
return &beerproto.TimeType{
Unit: beerproto.TimeType_MIN,
Value: *value,
}
}
func toTimeTypeDays(value *int64) *beerproto.TimeType {
return &beerproto.TimeType{
Unit: beerproto.TimeType_DAY,
Value: *value,
}
}
func toConcentrationType(value *float64) *beerproto.ConcentrationType {
if value == nil {
return nil
}
return &beerproto.ConcentrationType{
Unit: beerproto.ConcentrationUnitType_MGL,
Value: *value,
}
}
func toVolumeType(value *float64, t beerproto.VolumeType_VolumeUnitType) *beerproto.VolumeType {
if value == nil {
return nil
}
return &beerproto.VolumeType{
Unit: t,
Value: *value,
}
}
func toMassType(value *float64) *beerproto.MassType {
if value == nil {
return nil
}
return &beerproto.MassType{
Unit: beerproto.MassUnitType_KG,
Value: *value,
}
}
func toSpecificVolumeType(value *float64) *beerproto.SpecificVolumeType {
if value == nil {
return nil
}
return &beerproto.SpecificVolumeType{
Unit: beerproto.SpecificVolumeType_LKG,
Value: *value,
}
}
func toSpecificHeatType(value *float64) *beerproto.SpecificHeatType {
if value == nil {
return nil
}
return &beerproto.SpecificHeatType{
Unit: beerproto.SpecificHeatUnitType_CALGC,
Value: *value,
}
}
func toPercent(value *float64) *beerproto.PercentType {
if value == nil {
return nil
}
return &beerproto.PercentType{
Value: *value,
Unit: beerproto.PercentType_PERCENT_SIGN,
}
}
func toPercentRangeType(low, high *float64) *beerproto.PercentRangeType {
return &beerproto.PercentRangeType{
Minimum: toPercent(low),
Maximum: toPercent(high),
}
}
func toGravity(value *float64) *beerproto.GravityType {
if value == nil {
return nil
}
return &beerproto.GravityType{
Value: *value,
Unit: beerproto.GravityUnitType_SG,
}
}
func averagePercent(low, high *float64) *beerproto.PercentType {
if low == nil {
return nil
}
if high == nil || *high == 0{
return &beerproto.PercentType{
Value: *low,
Unit: beerproto.PercentType_PERCENT_SIGN,
}
}
sum := *low + *high
avg := (float64(sum)) / (float64(2))
return &beerproto.PercentType{
Value: avg,
Unit: beerproto.PercentType_PERCENT_SIGN,
}
}
func total(low, high *float64) float64 {
if low == nil {
return 0
}
if high == nil {
return *low
}
sum := *low + *high
avg := (float64(sum)) / (float64(2))
return avg
}
func toColor(value *float64) *beerproto.ColorType {
if value == nil {
return nil
}
return &beerproto.ColorType{
Unit: beerproto.ColorUnitType_EBC,
Value: *value,
}
}
func toDiastaticPowerType(value *float64) *beerproto.DiastaticPowerType {
if value == nil {
return nil
}
return &beerproto.DiastaticPowerType{
Unit: beerproto.DiastaticPowerUnitType_WK,
Value: *value,
}
}
func toAcidityType(value *float64) *beerproto.AcidityType {
if value == nil {
return nil
}
return &beerproto.AcidityType{
Unit: beerproto.AcidityUnitType_PH,
Value: *value,
}
}
func toViscosityType(value *float64) *beerproto.ViscosityType {
if value == nil {
return nil
}
return &beerproto.ViscosityType{
Unit: beerproto.ViscosityUnitType_MPAS,
Value: *value,
}
} | .github/actions/csvbeer/src/csv/units.go | 0.806548 | 0.567757 | units.go | starcoder |
package lazyledger
import (
"bytes"
"crypto/sha256"
"crypto/rand"
"math"
"math/big"
"gitlab.com/NebulousLabs/merkletree"
"github.com/musalbas/rsmt2d"
)
// ProbabilisticBlock represents a block designed for the Probabilistic Validity Rule.
type ProbabilisticBlock struct {
prevHash []byte
messages []Message
rowRoots [][]byte
columnRoots [][]byte
cachedRowRoots [][]byte
cachedColumnRoots [][]byte
squareWidth int
headerOnly bool
cachedEds *rsmt2d.ExtendedDataSquare
messageSize int
validated bool
sampleRequest *SampleRequest
provenDependencies map[string]bool
}
type SampleRequest struct {
Indexes []int
Axes []int
}
type SampleResponse struct {
Proofs [][][]byte
}
// NewProbabilisticBlock returns a new probabilistic block.
func NewProbabilisticBlock(prevHash []byte, messageSize int) Block {
return &ProbabilisticBlock{
prevHash: prevHash,
messageSize: messageSize,
provenDependencies: make(map[string]bool),
}
}
// ImportProbabilisticBlockBlockHeader imports a received probabilistic block without the messages.
func ImportProbabilisticBlockHeader(prevHash []byte, rowRoots [][]byte, columnRoots [][]byte, squareWidth int, messageSize int, validated bool) Block {
return &ProbabilisticBlock{
prevHash: prevHash,
rowRoots: rowRoots,
columnRoots: columnRoots,
squareWidth: squareWidth,
headerOnly: true,
messageSize: messageSize,
validated: validated,
provenDependencies: make(map[string]bool),
}
}
// ImportProbabilisticBlock imports a received probabilistic block.
func ImportProbabilisticBlock(prevHash []byte, messages []Message, messageSize int, validated bool) Block {
return &ProbabilisticBlock{
prevHash: prevHash,
messages: messages,
messageSize: messageSize,
validated: validated,
provenDependencies: make(map[string]bool),
}
}
// SquareWidth returns the width of coded data square of the block.
func (pb *ProbabilisticBlock) SquareWidth() int {
if pb.headerOnly {
return pb.squareWidth
} else {
return int(pb.eds().Width())
}
}
// AddMessage adds a message to the block.
func (pb *ProbabilisticBlock) AddMessage(message Message) {
pb.messages = append(pb.messages, message)
pb.cachedEds = nil
pb.cachedRowRoots = nil
pb.cachedColumnRoots = nil
}
func (pb *ProbabilisticBlock) messagesBytes() [][]byte {
messagesBytes := make([][]byte, len(pb.messages))
for index, message := range pb.messages {
messagesBytes[index] = message.MarshalPadded(pb.messageSize)
}
return messagesBytes
}
func (pb *ProbabilisticBlock) eds() *rsmt2d.ExtendedDataSquare {
if pb.cachedEds == nil {
data := pb.messagesBytes()
missingShares := int(math.Pow(math.Ceil(math.Sqrt(float64(len(data)))), 2)) - len(data)
paddingShare := make([]byte, pb.messageSize)
for i := 0; i < pb.messageSize; i++ {
paddingShare[i] = 0xFF // this will ensure it will be treated like a redundancy share
}
for i := 0; i < missingShares; i++ {
freshPaddingShare := make([]byte, pb.messageSize)
copy(freshPaddingShare, paddingShare)
data = append(data, freshPaddingShare)
}
pb.cachedEds, _ = rsmt2d.ComputeExtendedDataSquare(data, rsmt2d.RSGF8)
}
return pb.cachedEds
}
// RowRoots returns the Merkle roots of the rows of the block.
func (pb *ProbabilisticBlock) RowRoots() [][]byte {
if pb.rowRoots != nil {
return pb.rowRoots
}
if pb.cachedRowRoots == nil {
pb.computeRoots()
}
return pb.cachedRowRoots
}
// ColumnRoots returns the Merkle roots of the columns of the block.
func (pb *ProbabilisticBlock) ColumnRoots() [][]byte {
if pb.columnRoots != nil {
return pb.columnRoots
}
if pb.cachedColumnRoots == nil {
pb.computeRoots()
}
return pb.cachedColumnRoots
}
func (pb *ProbabilisticBlock) computeRoots() {
ndf := NewNamespaceDummyFlagger()
fh := NewFlagHasher(ndf, sha256.New())
rowRoots := make([][]byte, pb.SquareWidth())
columnRoots := make([][]byte, pb.SquareWidth())
var rowTree *merkletree.Tree
var columnTree *merkletree.Tree
var rowData [][]byte
var columnData [][]byte
for i := 0; i < pb.SquareWidth(); i++ {
if i >= pb.SquareWidth() / 2 {
fh.(*flagDigest).setCodedMode(true)
}
rowTree = merkletree.New(fh)
columnTree = merkletree.New(fh)
rowData = pb.eds().Row(uint(i))
columnData = pb.eds().Column(uint(i))
for j := 0; j < pb.SquareWidth(); j++ {
if j >= pb.SquareWidth() / 2 {
fh.(*flagDigest).setCodedMode(true)
}
rowTree.Push(rowData[j])
columnTree.Push(columnData[j])
}
fh.(*flagDigest).setCodedMode(false)
rowRoots[i] = rowTree.Root()
columnRoots[i] = columnTree.Root()
}
pb.cachedRowRoots = rowRoots
pb.cachedColumnRoots = columnRoots
}
func (pb *ProbabilisticBlock) RequestSamples(n int) (*SampleRequest, error) {
indexes := make([]int, n)
axes := make([]int, n)
for i := 0; i < n; i++ {
val, err := rand.Int(rand.Reader, big.NewInt(int64(math.Pow(float64(pb.SquareWidth()), 2))))
if err != nil {
return nil, err
}
indexes[i] = int(val.Int64())
val, err = rand.Int(rand.Reader, big.NewInt(2))
if err != nil {
return nil, err
}
axes[i] = int(val.Int64())
}
pb.sampleRequest = &SampleRequest{
Indexes: indexes,
Axes: axes,
}
return pb.sampleRequest, nil
}
func (pb *ProbabilisticBlock) RespondSamples(request *SampleRequest) *SampleResponse {
var proofs [][][]byte
ndf := NewNamespaceDummyFlagger()
fh := NewFlagHasher(ndf, sha256.New())
for x, index := range request.Indexes {
r, c := pb.shareIndexToCoordinates(index)
// Add Merkle proof to response
var data [][]byte
tree := merkletree.New(fh)
if request.Axes[x] == 0 { // row
data = pb.eds().Row(uint(r))
tree.SetIndex(uint64(c))
if r >= pb.SquareWidth() / 2 {
fh.(*flagDigest).setCodedMode(true)
}
} else { // column
data = pb.eds().Column(uint(c))
tree.SetIndex(uint64(r))
if c >= pb.SquareWidth() / 2 {
fh.(*flagDigest).setCodedMode(true)
}
}
for j, share := range data {
if j >= pb.SquareWidth() / 2 {
fh.(*flagDigest).setCodedMode(true)
}
tree.Push(share)
}
fh.(*flagDigest).setCodedMode(false)
_, proof, _, _ := tree.Prove()
proofs = append(proofs, proof)
}
return &SampleResponse{
Proofs: proofs,
}
}
func (pb *ProbabilisticBlock) ProcessSamplesResponse(response *SampleResponse) bool {
if len(response.Proofs) != len(pb.sampleRequest.Indexes) {
return false
}
ndf := NewNamespaceDummyFlagger()
fh := NewFlagHasher(ndf, sha256.New())
for x, index := range pb.sampleRequest.Indexes {
r, c := pb.shareIndexToCoordinates(index)
var root []byte
var result bool
if r >= pb.SquareWidth() / 2 || c >= pb.SquareWidth() / 2 {
fh.(*flagDigest).setCodedMode(true)
}
if pb.sampleRequest.Axes[x] == 0 { // row
root = pb.RowRoots()[r]
result = merkletree.VerifyProof(fh, root, response.Proofs[x], uint64(c), uint64(pb.SquareWidth()))
} else { // column
root = pb.ColumnRoots()[c]
result = merkletree.VerifyProof(fh, root, response.Proofs[x], uint64(r), uint64(pb.SquareWidth()))
}
fh.(*flagDigest).setCodedMode(false)
if !result {
return false
}
}
return true
}
// Digest computes the hash of the block.
func (pb *ProbabilisticBlock) Digest() []byte {
hasher := sha256.New()
hasher.Write(pb.prevHash)
for _, root := range pb.rowRoots {
hasher.Write(root)
}
for _, root := range pb.columnRoots {
hasher.Write(root)
}
return hasher.Sum(nil)
}
// Valid returns true if the block is valid.
func (pb *ProbabilisticBlock) Valid() bool {
return pb.validated
}
// PrevHash returns the hash of the previous block.
func (pb *ProbabilisticBlock) PrevHash() []byte {
return pb.prevHash
}
// Messages returns the block's messages.
func (pb *ProbabilisticBlock) Messages() []Message {
return pb.messages
}
func (pb *ProbabilisticBlock) indexToCoordinates(index int) (row, column int) {
row = index / (pb.SquareWidth() / 2)
column = index % (pb.SquareWidth() / 2)
return
}
func (pb *ProbabilisticBlock) shareIndexToCoordinates(index int) (row, column int) {
row = index / pb.SquareWidth()
column = index % pb.SquareWidth()
return
}
// ApplicationProof creates a Merkle proof for all of the messages in a block for an application namespace.
// All proofs are created from row roots only.
func (pb *ProbabilisticBlock) ApplicationProof(namespace [namespaceSize]byte) (int, int, [][][]byte, *[]Message, [][]byte) {
var proofStart int
var proofEnd int
var found bool
for index, message := range pb.messages {
if message.Namespace() == namespace {
if !found {
found = true
proofStart = index
}
proofEnd = index + 1
}
}
var inRange bool
if !found {
var prevMessage Message
// We need to generate a proof for an absence of relevant messages.
for index, message := range pb.messages {
if index != 0 {
prevNs := prevMessage.Namespace()
currentNs := message.Namespace()
if ((bytes.Compare(prevNs[:], namespace[:]) < 0 && bytes.Compare(namespace[:], currentNs[:]) < 0) ||
(bytes.Compare(prevNs[:], namespace[:]) > 0 && bytes.Compare(namespace[:], currentNs[:]) > 0)) {
if !inRange {
inRange = true
proofStart = index
}
proofEnd = index + 1
}
}
prevMessage = message
}
}
ndf := NewNamespaceDummyFlagger()
fh := NewFlagHasher(ndf, sha256.New())
var proofs [][][]byte
if found || inRange {
proofStartRow, proofStartColumn := pb.indexToCoordinates(proofStart)
proofEndRow, proofEndColumn := pb.indexToCoordinates(proofEnd)
if proofEndColumn == 0 {
proofEndRow -= 1
proofEndColumn = pb.SquareWidth() / 2
}
for i := 0; i < pb.SquareWidth() / 2; i++ {
if i >= proofStartRow && i <= proofEndRow {
// This row needs Merkle proofs
var startColumn int
var endColumn int
if i == proofStartRow {
startColumn = proofStartColumn
} else {
startColumn = 0
}
if i == proofEndRow {
endColumn = proofEndColumn
} else {
endColumn = pb.SquareWidth() / 2
}
rowProof, _ := merkletree.BuildRangeProof(startColumn, endColumn, NewCodedAxisSubtreeHasher(pb.eds().Row(uint(i)), fh))
proofs = append(proofs, rowProof)
}
}
}
proofMessages := pb.messages[proofStart:proofEnd]
if found {
return proofStart, proofEnd, proofs, &proofMessages, nil
}
var hashes [][]byte
for _, message := range proofMessages {
ndf := NewNamespaceDummyFlagger()
fh := NewFlagHasher(ndf, sha256.New())
hashes = append(hashes, leafSum(fh, message.MarshalPadded(pb.messageSize)))
fh.Reset()
}
return proofStart, proofEnd, proofs, nil, hashes
}
// VerifyApplicationProof verifies a Merkle proof for all of the messages in a block for an application namespace.
func (pb *ProbabilisticBlock) VerifyApplicationProof(namespace [namespaceSize]byte, proofStart int, proofEnd int, proofs [][][]byte, messages *[]Message, hashes [][]byte) bool {
// Verify Merkle proofs
ndf := NewNamespaceDummyFlagger()
fh := NewFlagHasher(ndf, sha256.New())
var lh merkletree.LeafHasher
if messages != nil {
lh = NewPaddedMessageLeafHasher(messages, fh, pb.messageSize)
} else {
lh = NewHashLeafHasher(hashes)
}
proofStartRow, proofStartColumn := pb.indexToCoordinates(proofStart)
proofEndRow, proofEndColumn := pb.indexToCoordinates(proofEnd)
if proofEndColumn == 0 {
proofEndRow -= 1
proofEndColumn = pb.SquareWidth() / 2
}
proofNum := 0
for i := 0; i < pb.SquareWidth() / 2; i++ {
if i >= proofStartRow && i <= proofEndRow {
// This row has Merkle proofs
var startColumn int
var endColumn int
if i == proofStartRow {
startColumn = proofStartColumn
} else {
startColumn = 0
}
if i == proofEndRow {
endColumn = proofEndColumn
} else {
endColumn = pb.SquareWidth() / 2
}
// Verify proof
result, err := merkletree.VerifyRangeProof(lh, fh, startColumn, endColumn, proofs[proofNum], pb.RowRoots()[i])
if !result || err != nil {
return false
}
// Verify completeness
var leafIndex uint64
var leftSubtrees [][]byte
var rightSubtrees [][]byte
proof := proofs[proofNum]
consumeUntil := func(end uint64) error {
for leafIndex != end && len(proof) > 0 {
subtreeSize := nextSubtreeSize(leafIndex, end)
leftSubtrees = append(leftSubtrees, proof[0])
proof = proof[1:]
leafIndex += uint64(subtreeSize)
}
return nil
}
if err := consumeUntil(uint64(startColumn)); err != nil {
return false
}
rightSubtrees = proof
for _, subtree := range leftSubtrees {
_, max := dummyNamespacesFromFlag(subtree)
if bytes.Compare(max, namespace[:]) >= 0 {
return false
}
}
for _, subtree := range rightSubtrees {
min, _ := dummyNamespacesFromFlag(subtree)
if bytes.Compare(min, namespace[:]) <= 0 {
return false
}
}
proofNum += 1
}
}
return true
}
func (pb *ProbabilisticBlock) ProveDependency(index int) ([]byte, [][]byte, error) {
ndf := NewNamespaceDummyFlagger()
fh := NewFlagHasher(ndf, sha256.New())
r, c := pb.indexToCoordinates(index)
proof, err := merkletree.BuildRangeProof(c, c + 1, NewCodedAxisSubtreeHasher(pb.eds().Row(uint(r)), fh))
if err != nil {
return nil, nil, err
}
return leafSum(fh, pb.messages[index].MarshalPadded(pb.messageSize)), proof, nil
}
func (pb *ProbabilisticBlock) VerifyDependency(index int, hash []byte, proof [][]byte) bool {
ndf := NewNamespaceDummyFlagger()
fh := NewFlagHasher(ndf, sha256.New())
lh := NewHashLeafHasher([][]byte{hash})
r, c := pb.indexToCoordinates(index)
result, err := merkletree.VerifyRangeProof(lh, fh, c, c + 1, proof, pb.RowRoots()[r])
if result && err == nil {
pb.provenDependencies[string(hash)] = true
return true
}
return false
}
func (pb *ProbabilisticBlock) DependencyProven(hash []byte) bool {
if value, ok := pb.provenDependencies[string(hash)]; ok {
return value
}
return false
} | probabilisticblock.go | 0.768473 | 0.406243 | probabilisticblock.go | starcoder |
package wave
import (
"encoding/binary"
"fmt"
"io"
)
// Writer creates a writer for wave files encapsulating an io.Writer.
// It supports 8, 16 and 32 bit integer and 32 bit float formats.
type Writer struct {
W io.Writer
H Header
ctr int
numSamples int
}
// NewWriter creates a wave writer encapsulating a provided io.Writer
// NewWriter() attempts to first write the wave header to the provided writer and
// samples can be subsequently written through `WriteInt()` and `WriteFloat()` functions.
func NewWriter(w io.Writer, channels, samplesPerSec, bitsPerSample, numSamples int) (*Writer, error) {
subChunk2Size := uint32(numSamples * channels * bitsPerSample / 8)
h := Header{
RiffID: [4]byte{'R', 'I', 'F', 'F'},
DataSize: 36 + uint32(subChunk2Size),
RiffType: [4]byte{'W', 'A', 'V', 'E'},
FmtChunkID: [4]byte{'f', 'm', 't', ' '},
FmtChunkSize: 16,
AudioFmt: 1,
Channels: uint16(channels),
SamplesPerSec: uint32(samplesPerSec),
BytesPerSec: uint32(samplesPerSec * channels * bitsPerSample / 8),
BlockAlign: uint16(channels * bitsPerSample / 8),
BitsPerSample: uint16(bitsPerSample),
DataChunkID: [4]byte{'d', 'a', 't', 'a'},
DataChunkSize: subChunk2Size,
}
if err := binary.Write(w, binary.LittleEndian, &h); err != nil {
return nil, fmt.Errorf("error writing wave header in NewWriter: %s", err)
}
return &Writer{w, h, 0, numSamples}, nil
}
// WriteInt writes samples to the wave file. In the []int64 slice passed to WriteInt,
// each slice element should correspond to a channel in the sample. These are simply
// cast to the required bit-depth declared when creating the Writer and written to
// the underlying io.Writer. If the number of samples written exceeds the declared
// number of samples, an error is raised.
func (w *Writer) WriteInt(samples []int64) error {
if len(samples) != int(w.H.Channels) {
return fmt.Errorf("number of samples != channels in WriteInt: want %d: got %d", w.H.Channels, len(samples))
}
if w.ctr+1 > w.numSamples {
return fmt.Errorf("overflow error: attempting to write too many samples: already wrote %d", w.ctr)
}
var reterr error
switch w.H.BitsPerSample {
case 8:
wsamples := make([]int8, w.H.Channels)
for j, sample := range samples {
wsamples[j] = int8(sample)
}
if err := binary.Write(w.W, binary.LittleEndian, wsamples); err != nil {
reterr = err
}
case 16:
wsamples := make([]int16, w.H.Channels)
for j, sample := range samples {
wsamples[j] = int16(sample)
}
if err := binary.Write(w.W, binary.LittleEndian, wsamples); err != nil {
reterr = err
}
case 32:
wsamples := make([]int32, w.H.Channels)
for j, sample := range samples {
wsamples[j] = int32(sample)
}
if err := binary.Write(w.W, binary.LittleEndian, wsamples); err != nil {
reterr = err
}
default:
return fmt.Errorf("unrecognized bitsPerSample: %d", w.H.BitsPerSample)
}
if reterr == nil {
w.ctr++
return nil
}
return fmt.Errorf("error writing sample in WriteInt:%s", reterr)
}
// WriteFloat writes samples to the wave file. In the []float64 slice passed to WriteInt,
// each slice element should correspond to a channel in the sample. These are simply
// cast to 32 bit floats and written to the underlying io.Writer. If the number of
// samples written exceeds the declared number of samples, an error is raised.
func (w *Writer) WriteFloat(samples []float64) error {
if len(samples) != int(w.H.Channels) {
return fmt.Errorf("number of samples != channels in WriteInt: want %d: got %d", w.H.Channels, len(samples))
}
if w.ctr+1 > w.numSamples {
return fmt.Errorf("overflow error: attempting to write too many samples: already wrote %d", w.ctr)
}
if w.H.BitsPerSample != 32 {
return fmt.Errorf("only 32 bit floats are supported. bitsPerSample in Header is set to: %d", w.H.BitsPerSample)
}
wsamples := make([]float32, w.H.Channels)
for j, sample := range samples {
wsamples[j] = float32(sample)
}
if err := binary.Write(w.W, binary.LittleEndian, wsamples); err != nil {
return fmt.Errorf("error writing sample in WriteFloat: %s", err)
}
w.ctr++
return nil
} | writer.go | 0.783077 | 0.405302 | writer.go | starcoder |
package slice
// MapString func
// Returns a new slice containing the results from applying the func to each element in the slice.
func MapString(input []string, f func(string) string) []string {
inputMap := make([]string, len(input))
for i, value := range input {
inputMap[i] = f(value)
}
return inputMap
}
// MapInt func
// Returns a new slice containing the results from applying the func to each element in the slice.
func MapInt(input []int, f func(int) int) []int {
inputMap := make([]int, len(input))
for i, value := range input {
inputMap[i] = f(value)
}
return inputMap
}
// MapInt8 func
// Returns a new slice containing the results from applying the func to each element in the slice.
func MapInt8(input []int8, f func(int8) int8) []int8 {
inputMap := make([]int8, len(input))
for i, value := range input {
inputMap[i] = f(value)
}
return inputMap
}
// MapInt16 func
// Returns a new slice containing the results from applying the func to each element in the slice.
func MapInt16(input []int16, f func(int16) int16) []int16 {
inputMap := make([]int16, len(input))
for i, value := range input {
inputMap[i] = f(value)
}
return inputMap
}
// MapInt32 func
// Returns a new slice containing the results from applying the func to each element in the slice.
func MapInt32(input []int32, f func(int32) int32) []int32 {
inputMap := make([]int32, len(input))
for i, value := range input {
inputMap[i] = f(value)
}
return inputMap
}
// MapInt64 func
// Returns a new slice containing the results from applying the func to each element in the slice.
func MapInt64(input []int64, f func(int64) int64) []int64 {
inputMap := make([]int64, len(input))
for i, value := range input {
inputMap[i] = f(value)
}
return inputMap
}
// MapUint func
// Returns a new slice containing the results from applying the func to each element in the slice.
func MapUint(input []uint, f func(uint) uint) []uint {
inputMap := make([]uint, len(input))
for i, value := range input {
inputMap[i] = f(value)
}
return inputMap
}
// MapUint8 func
// Returns a new slice containing the results from applying the func to each element in the slice.
func MapUint8(input []uint8, f func(uint8) uint8) []uint8 {
inputMap := make([]uint8, len(input))
for i, value := range input {
inputMap[i] = f(value)
}
return inputMap
}
// MapUint16 func
// Returns a new slice containing the results from applying the func to each element in the slice.
func MapUint16(input []uint16, f func(uint16) uint16) []uint16 {
inputMap := make([]uint16, len(input))
for i, value := range input {
inputMap[i] = f(value)
}
return inputMap
}
// MapUint32 func
// Returns a new slice containing the results from applying the func to each element in the slice.
func MapUint32(input []uint32, f func(uint32) uint32) []uint32 {
inputMap := make([]uint32, len(input))
for i, value := range input {
inputMap[i] = f(value)
}
return inputMap
}
// MapFloat32 func
// Returns a new slice containing the results from applying the func to each element in the slice.
func MapFloat32(input []float32, f func(float32) float32) []float32 {
inputMap := make([]float32, len(input))
for i, value := range input {
inputMap[i] = f(value)
}
return inputMap
}
// MapFloat64 func
// Returns a new slice containing the results from applying the func to each element in the slice.
func MapFloat64(input []float64, f func(float64) float64) []float64 {
inputMap := make([]float64, len(input))
for i, value := range input {
inputMap[i] = f(value)
}
return inputMap
}
// MapComplex64 func
// Returns a new slice containing the results from applying the func to each element in the slice.
func MapComplex64(input []complex64, f func(complex64) complex64) []complex64 {
inputMap := make([]complex64, len(input))
for i, value := range input {
inputMap[i] = f(value)
}
return inputMap
}
// MapComplex128 func
// Returns a new slice containing the results from applying the func to each element in the slice.
func MapComplex128(input []complex128, f func(complex128) complex128) []complex128 {
inputMap := make([]complex128, len(input))
for i, value := range input {
inputMap[i] = f(value)
}
return inputMap
} | slice/map.go | 0.920638 | 0.587499 | map.go | starcoder |
package edge
import (
"fmt"
"strconv"
"strings"
"github.com/emicklei/dot"
)
// Attribute is a function that apply a property to an edge.
type Attribute func(*dot.Edge)
// Label is the edge caption. If 'htm' is true the
// caption is treated as HTML code.
func Label(num int, text string) Attribute {
return func(el *dot.Edge) {
var render = false
var sb strings.Builder
sb.WriteString(`<table border="0">`)
if num > 0 {
render = true
sb.WriteString(`<tr><td><font color="#1f6c7c" point-size="9"><b>`)
sb.WriteString(strconv.Itoa(num))
sb.WriteString("</b></font></td></tr>")
}
if lab := strings.TrimSpace(text); len(lab) > 0 {
render = true
sb.WriteString("<tr><td>")
sb.WriteString(lab)
sb.WriteString("</td></tr>")
}
sb.WriteString("</table>")
if render {
el.Attr("taillabel", dot.HTML(sb.String()))
}
}
}
// LabelDistance adjusts the distance that the
// headlabel(taillabel) is from the head(tail) node.
func LabelDistance(dist float32) Attribute {
return func(el *dot.Edge) {
el.Attr("labeldistance", fmt.Sprintf("%.2f", dist))
}
}
// LabelAngle along with labeldistance, determine where
// the headlabel (taillabel) are placed with respect
// to the head (tail) in polar coordinates.
// The origin in the coordinate system is the point
// where the edge touches the node.
// The ray of 0 degrees goes from the origin back along
// the edge, parallel to the edge at the origin.
// The angle, in degrees, specifies the rotation from
// the 0 degree ray, with positive angles moving counterclockwise
// and negative angles moving clockwise.
func LabelAngle(angle float32) Attribute {
return func(el *dot.Edge) {
el.Attr("labelangle", fmt.Sprintf("%.2f", angle))
}
}
// MinLen sets the minimum edge length (rank difference between head and tail).
func MinLen(len float32) Attribute {
return func(el *dot.Edge) {
if len <= 0 {
return
}
el.Attr("minlen", fmt.Sprintf("%.2f", len))
}
}
// FontName specify the font used for text.
func FontName(name string) Attribute {
return func(el *dot.Edge) {
el.Attr("fontname", name)
}
}
// FontSize specify the font size, in points, used for text.
func FontSize(size float32) Attribute {
return func(el *dot.Edge) {
fs := fmt.Sprintf("%.2f", size)
el.Attr("fontsize", fs)
}
}
// Dir sets the ege direction, values: both, forward, back, none.
func Dir(dir string) Attribute {
return func(el *dot.Edge) {
if strings.TrimSpace(dir) != "" {
el.Attr("dir", dir)
}
}
}
// Dashed set the edge line dashed.
func Dashed(dashed bool) Attribute {
return func(el *dot.Edge) {
if dashed {
el.Attr("style", "dashed")
}
}
}
// Color set the color for an edge line.
func Color(color string) Attribute {
return func(el *dot.Edge) {
if strings.TrimSpace(color) != "" {
el.Attr("color", color)
} else {
el.Attr("color", "#708090ff")
}
}
}
// Highlight makes the line thicker.
func Highlight(enable bool) Attribute {
return func(el *dot.Edge) {
if enable {
el.Attr("penwidth", "1.2")
el.Attr("arrowsize", "0.9")
} else {
el.Attr("penwidth", "0.6")
el.Attr("arrowsize", "0.6")
}
}
}
// New add to dot.Graph a new connection line between two components.
func New(g *dot.Graph, fromNodeID, toNodeID string, attrs ...Attribute) error {
n1, ok := g.FindNodeById(fromNodeID)
if !ok {
return fmt.Errorf("node with id=%s not found", fromNodeID)
}
n2, ok := g.FindNodeById(toNodeID)
if !ok {
return fmt.Errorf("node with id=%s not found", toNodeID)
}
el := g.Edge(n1, n2)
FontName("Fira Mono")(&el)
FontSize(8)(&el)
Highlight(false)(&el)
for _, opt := range attrs {
opt(&el)
}
return nil
} | pkg/edge/edge.go | 0.774071 | 0.460168 | edge.go | starcoder |
package bitmap
var (
tA = [8]byte{1, 2, 4, 8, 16, 32, 64, 128}
tB = [8]byte{254, 253, 251, 247, 239, 223, 191, 127}
)
func dataOrCopy(d []byte, c bool) []byte {
if !c {
return d
}
ndata := make([]byte, len(d))
copy(ndata, d)
return ndata
}
// NewSlice creates a new byteslice with length l (in bits).
// The actual size in bits might be up to 7 bits larger because
// they are stored in a byteslice.
func NewSlice(l int) []byte {
remainder := l % 8
if remainder != 0 {
remainder = 1
}
return make([]byte, l/8+remainder)
}
// Get returns the value of bit i from map m.
// It doesn't check the bounds of the slice.
func Get(m []byte, i int) bool {
return m[i/8]&tA[i%8] != 0
}
// Set sets bit i of map m to value v.
// It doesn't check the bounds of the slice.
func Set(m []byte, i int, v bool) {
index := i / 8
bit := i % 8
if v {
m[index] = m[index] | tA[bit]
} else {
m[index] = m[index] & tB[bit]
}
}
// GetBit returns the value of bit i of byte b.
// The bit index must be between 0 and 7.
func GetBit(b byte, i int) bool {
return b&tA[i] != 0
}
// SetBit sets bit i of byte b to value v.
// The bit index must be between 0 and 7.
func SetBit(b byte, i int, v bool) byte {
if v {
return b | tA[i]
}
return b & tB[i]
}
// SetBitRef sets bit i of byte *b to value v.
func SetBitRef(b *byte, i int, v bool) {
if v {
*b = *b | tA[i]
} else {
*b = *b & tB[i]
}
}
// Len returns the length (in bits) of the provided byteslice.
// It will always be a multipile of 8 bits.
func Len(m []byte) int {
return len(m) * 8
}
// Bitmap is a byteslice with bitmap functions.
// Creating one form existing data is as simple as bitmap := Bitmap(data).
type Bitmap []byte
// New creates a new Bitmap instance with length l (in bits).
func New(l int) Bitmap {
return NewSlice(l)
}
// Len wraps around the Len function.
func (b Bitmap) Len() int {
return Len(b)
}
// Get wraps around the Get function.
func (b Bitmap) Get(i int) bool {
return Get(b, i)
}
// Set wraps around the Set function.
func (b Bitmap) Set(i int, v bool) {
Set(b, i, v)
}
// Data returns the data of the bitmap.
// If copy is false the actual underlying slice will be returned.
func (b Bitmap) Data(copy bool) []byte {
return dataOrCopy(b, copy)
} | internal/bitmap/bitmap.go | 0.774498 | 0.42662 | bitmap.go | starcoder |
package e2e
import (
"math"
io_prometheus_client "github.com/prometheus/client_model/go"
)
func getValue(m *io_prometheus_client.Metric) float64 {
if m.GetGauge() != nil {
return m.GetGauge().GetValue()
} else if m.GetCounter() != nil {
return m.GetCounter().GetValue()
} else if m.GetHistogram() != nil {
return m.GetHistogram().GetSampleSum()
} else if m.GetSummary() != nil {
return m.GetSummary().GetSampleSum()
} else {
return 0
}
}
func sumValues(family *io_prometheus_client.MetricFamily) float64 {
sum := 0.0
for _, m := range family.Metric {
sum += getValue(m)
}
return sum
}
func EqualsSingle(expected float64) func(float64) bool {
return func(v float64) bool {
return v == expected || (math.IsNaN(v) && math.IsNaN(expected))
}
}
// Equals is an isExpected function for WaitSumMetrics that returns true if given single sum is equals to given value.
func Equals(value float64) func(sums ...float64) bool {
return func(sums ...float64) bool {
if len(sums) != 1 {
panic("equals: expected one value")
}
return sums[0] == value || math.IsNaN(sums[0]) && math.IsNaN(value)
}
}
// Greater is an isExpected function for WaitSumMetrics that returns true if given single sum is greater than given value.
func Greater(value float64) func(sums ...float64) bool {
return func(sums ...float64) bool {
if len(sums) != 1 {
panic("greater: expected one value")
}
return sums[0] > value
}
}
// Less is an isExpected function for WaitSumMetrics that returns true if given single sum is less than given value.
func Less(value float64) func(sums ...float64) bool {
return func(sums ...float64) bool {
if len(sums) != 1 {
panic("less: expected one value")
}
return sums[0] < value
}
}
// EqualsAmongTwo is an isExpected function for WaitSumMetrics that returns true if first sum is equal to the second.
// NOTE: Be careful on scrapes in between of process that changes two metrics. Those are
// usually not atomic.
func EqualsAmongTwo(sums ...float64) bool {
if len(sums) != 2 {
panic("equalsAmongTwo: expected two values")
}
return sums[0] == sums[1]
}
// GreaterAmongTwo is an isExpected function for WaitSumMetrics that returns true if first sum is greater than second.
// NOTE: Be careful on scrapes in between of process that changes two metrics. Those are
// usually not atomic.
func GreaterAmongTwo(sums ...float64) bool {
if len(sums) != 2 {
panic("greaterAmongTwo: expected two values")
}
return sums[0] > sums[1]
}
// LessAmongTwo is an isExpected function for WaitSumMetrics that returns true if first sum is smaller than second.
// NOTE: Be careful on scrapes in between of process that changes two metrics. Those are
// usually not atomic.
func LessAmongTwo(sums ...float64) bool {
if len(sums) != 2 {
panic("lessAmongTwo: expected two values")
}
return sums[0] < sums[1]
} | integration/e2e/metrics.go | 0.770551 | 0.567487 | metrics.go | starcoder |
package hdrcolor
// XyzToLms converts from CIE XYZ-space to LMS-space (using D65-LMS matrix).
func XyzToLms(x, y, z float64) (l, m, s float64) {
l = 0.4002*x + 0.7075*y - 0.0807*z
m = -0.228*x + 1.1500*y + 0.0612*z
s = 0.0000*x + 0.0000*y + 0.9184*z
return
}
// LmsToXyz converts from LMS-space (using D65-LMS matrix) to CIE XYZ-space.
func LmsToXyz(l, m, s float64) (x, y, z float64) {
x = 1.8501*l - 1.1385*m + 0.2384*s
y = 0.3668*l + 0.6438*m - 0.0107*s
z = 0.0000*l + 0.0000*m + 1.0889*s
return
}
// XyzToLmsMcat02 converts from CIE XYZ-space to LMS-space (using CIE CAT02 matrix).
func XyzToLmsMcat02(x, y, z float64) (l, m, s float64) {
l = 0.7328*x + 0.4296*y - 0.1624*z
m = -0.7036*x + 1.6974*y + 0.0061*z
s = 0.0030*x + 0.0136*y + 0.9834*z
return
}
// LmsMcat02ToXyz converts from LMS-space (using CIE CAT02 matrix) to CIE XYZ-space.
func LmsMcat02ToXyz(l, m, s float64) (x, y, z float64) {
x = 1.0961*l - 0.2789*m + 0.1827*s
y = 0.4544*l + 0.4736*m + 0.0721*s
z = -0.0096*l - 0.0057*m + 1.0153*s
return
}
// XyzToLmsMhpe converts from CIE XYZ-space to LMS-space (using Hunt-Pointer-Estevez matrix).
func XyzToLmsMhpe(x, y, z float64) (l, m, s float64) {
l = 0.38971*x + 0.68898*y - 0.07868*z
m = -0.22981*x + 1.18340*y + 0.04641*z
s = 0.00000*x + 0.00000*y + 1.00000*z
return
}
// LmsMhpeToXyz converts from LMS-space (using Hunt-Pointer-Estevez matrix) to CIE XYZ-space.
func LmsMhpeToXyz(l, m, s float64) (x, y, z float64) {
x = 1.91020*l - 1.11212*m + 0.20191*s
y = 0.37095*l + 0.62905*m - 0.00001*s
z = 0.00000*l + 0.00000*m + 1.00000*s
return
}
// LmsToIpt converts from LMS-space to IPT-space.
func LmsToIpt(l, m, s float64) (i, p, t float64) {
i = 0.4000*l + 0.4000*m + 0.2000*s
p = 4.4550*l - 4.8510*m + 0.3960*s
t = 0.8056*l + 0.3572*m - 1.1628*s
return
}
// IptToLms converts from IPT-space to LMS-space.
func IptToLms(i, p, t float64) (l, m, s float64) {
l = 1*i + 0.0976*p + 0.2052*t
m = 1*i - 0.1139*p + 0.1332*t
s = 1*i + 0.0326*p - 0.6769*t
return
} | hdrcolor/converter.go | 0.740268 | 0.643287 | converter.go | starcoder |
package gotorch
// #cgo CFLAGS: -I ${SRCDIR}/cgotorch
// #cgo LDFLAGS: -L ${SRCDIR}/cgotorch -Wl,-rpath ${SRCDIR}/cgotorch -lcgotorch
// #cgo LDFLAGS: -L ${SRCDIR}/cgotorch/libtorch/lib -Wl,-rpath ${SRCDIR}/cgotorch/libtorch/lib -lc10 -ltorch -ltorch_cpu
// #include "cgotorch.h"
import "C"
import (
"unsafe"
)
// Add torch.add
func Add(a, other Tensor, alpha float32) Tensor {
var t C.Tensor
MustNil(unsafe.Pointer(C.Add(C.Tensor(*a.T), C.Tensor(*other.T),
C.float(alpha), &t)))
SetTensorFinalizer((*unsafe.Pointer)(&t))
return Tensor{(*unsafe.Pointer)(&t)}
}
// Add torch.add
func (a *Tensor) Add(other Tensor, alpha float32) Tensor {
return Add(*a, other, alpha)
}
// AddI adds in-place
func (a *Tensor) AddI(other Tensor, alpha float32) Tensor {
var t C.Tensor
MustNil(unsafe.Pointer(C.Add_(
C.Tensor(*a.T),
C.Tensor(*other.T),
C.float(alpha),
&t)))
SetTensorFinalizer((*unsafe.Pointer)(&t))
return Tensor{(*unsafe.Pointer)(&t)}
}
// Eq wraps torch.eq, which does element-wise comparison between two tensors and returns
// a tensor of the same size as the operands.
func Eq(a, other Tensor) Tensor {
var t C.Tensor
MustNil(unsafe.Pointer(C.Eq(C.Tensor(*a.T), C.Tensor(*other.T), &t)))
SetTensorFinalizer((*unsafe.Pointer)(&t))
return Tensor{(*unsafe.Pointer)(&t)}
}
// Eq torch.eq
func (a Tensor) Eq(other Tensor) Tensor {
return Eq(a, other)
}
// Equal compares two tensors by their content.
func Equal(a, b Tensor) bool {
var r int64
MustNil(unsafe.Pointer(C.Equal(C.Tensor(*a.T), C.Tensor(*b.T), (*C.int64_t)(&r))))
return r != 0
}
// ExpandAs torch.expand_as
func ExpandAs(a, other Tensor) Tensor {
var t C.Tensor
MustNil(unsafe.Pointer(C.ExpandAs(C.Tensor(*a.T), C.Tensor(*other.T), &t)))
SetTensorFinalizer((*unsafe.Pointer)(&t))
return Tensor{(*unsafe.Pointer)(&t)}
}
// ExpandAs torch.expand_as
func (a Tensor) ExpandAs(other Tensor) Tensor {
return ExpandAs(a, other)
}
// Flatten torch.flatten
func Flatten(a Tensor, startDim, endDim int64) Tensor {
var t C.Tensor
MustNil(unsafe.Pointer(C.Flatten(C.Tensor(*a.T), C.int64_t(startDim), C.int64_t(endDim), &t)))
SetTensorFinalizer((*unsafe.Pointer)(&t))
return Tensor{(*unsafe.Pointer)(&t)}
}
// IndexSelect torch.index_select
func IndexSelect(a Tensor, dim int64, index Tensor) Tensor {
var t C.Tensor
MustNil(unsafe.Pointer(C.IndexSelect(C.Tensor(*a.T), C.int64_t(dim), C.Tensor(*index.T), &t)))
SetTensorFinalizer((*unsafe.Pointer)(&t))
return Tensor{(*unsafe.Pointer)(&t)}
}
// IndexSelect torch.index_select
func (a Tensor) IndexSelect(dim int64, index Tensor) Tensor {
return IndexSelect(a, dim, index)
}
// Item torch.item
func (a Tensor) Item() float32 {
var t float32
MustNil(unsafe.Pointer(C.Item(C.Tensor(*a.T), (*C.float)(&t))))
return t
}
// LeakyRelu returns leaky relu of the tensor according to negativeSlope
func LeakyRelu(t Tensor, negativeSlope float64) Tensor {
return t.LeakyRelu(negativeSlope)
}
// LeakyRelu returns leaky relu of the tensor according to negativeSlope
func (a *Tensor) LeakyRelu(negativeSlope float64) Tensor {
var t C.Tensor
MustNil(unsafe.Pointer(C.LeakyRelu(C.Tensor(*a.T), C.double(negativeSlope), &t)))
SetTensorFinalizer((*unsafe.Pointer)(&t))
return Tensor{(*unsafe.Pointer)(&t)}
}
// LogSoftmax returns log softmax of the input tensor
func LogSoftmax(t Tensor, dim int64) Tensor {
return t.LogSoftmax(dim)
}
// LogSoftmax returns log softmax of the current tensor
func (a Tensor) LogSoftmax(dim int64) Tensor {
var t C.Tensor
MustNil(unsafe.Pointer(C.LogSoftmax(C.Tensor(*a.T), C.int64_t(dim), &t)))
SetTensorFinalizer((*unsafe.Pointer)(&t))
return Tensor{(*unsafe.Pointer)(&t)}
}
// Mean returns mean of the current tensor
func Mean(t Tensor) Tensor {
return t.Mean()
}
// Mean torch.mean
func (a Tensor) Mean() Tensor {
var t C.Tensor
MustNil(unsafe.Pointer(C.Mean(C.Tensor(*a.T), &t)))
SetTensorFinalizer((*unsafe.Pointer)(&t))
return Tensor{(*unsafe.Pointer)(&t)}
}
// MM multiplies each element of the input two tensors
func MM(a, b Tensor) Tensor {
var t C.Tensor
MustNil(unsafe.Pointer(C.MM(C.Tensor(*a.T), C.Tensor(*b.T), &t)))
SetTensorFinalizer((*unsafe.Pointer)(&t))
return Tensor{(*unsafe.Pointer)(&t)}
}
// Relu returns relu of the tensor
func (a *Tensor) Relu() Tensor {
var t C.Tensor
MustNil(unsafe.Pointer(C.Relu(C.Tensor(*a.T), &t)))
SetTensorFinalizer((*unsafe.Pointer)(&t))
return Tensor{(*unsafe.Pointer)(&t)}
}
// Relu returns relu of the tensor
func Relu(t Tensor) Tensor {
return t.Relu()
}
// Sigmoid returns sigmoid of the current tensor
func Sigmoid(t Tensor) Tensor {
return t.Sigmoid()
}
// Sigmoid returns sigmoid of the current tensor
func (a Tensor) Sigmoid() Tensor {
var t C.Tensor
MustNil(unsafe.Pointer(C.Sigmoid(C.Tensor(*a.T), &t)))
SetTensorFinalizer((*unsafe.Pointer)(&t))
return Tensor{(*unsafe.Pointer)(&t)}
}
// Stack concatenates sequence of tensors along a new dimension
func Stack(tensors []Tensor, dim int64) Tensor {
CT := []C.Tensor{}
for _, t := range tensors {
CT = append(CT, C.Tensor(*t.T))
}
p := (*C.Tensor)(unsafe.Pointer(&CT[0]))
var t C.Tensor
MustNil(unsafe.Pointer(C.Stack(p, C.int64_t(len(CT)), C.int64_t(dim), &t)))
SetTensorFinalizer((*unsafe.Pointer)(&t))
return Tensor{(*unsafe.Pointer)(&t)}
}
// Squeeze torch.squeeze
func Squeeze(t Tensor, dim ...int64) Tensor {
switch len(dim) {
case 0:
return t.Squeeze()
case 1:
return t.Squeeze(dim[0])
default:
panic("Squeeze only accepts 0-1 dim as input")
}
}
// Squeeze tensor.squeeze
func (a Tensor) Squeeze(dim ...int64) Tensor {
var t C.Tensor
switch len(dim) {
case 0:
MustNil(unsafe.Pointer(C.Squeeze(C.Tensor(*a.T), &t)))
SetTensorFinalizer((*unsafe.Pointer)(&t))
return Tensor{(*unsafe.Pointer)(&t)}
case 1:
MustNil(unsafe.Pointer(C.SqueezeWithDim(C.Tensor(*a.T), C.int64_t(dim[0]), &t)))
SetTensorFinalizer((*unsafe.Pointer)(&t))
return Tensor{(*unsafe.Pointer)(&t)}
default:
panic("Squeeze only accepts 0-1 dim as input")
}
}
// Sum returns the sum of all elements in the input tensor
func Sum(a Tensor) Tensor {
var t C.Tensor
MustNil(unsafe.Pointer(C.Sum(C.Tensor(*a.T), &t)))
SetTensorFinalizer((*unsafe.Pointer)(&t))
return Tensor{(*unsafe.Pointer)(&t)}
}
// SumByDim torch.sum
func SumByDim(a Tensor, dim int64, keepDim bool) Tensor {
k := 0
if keepDim {
k = 1
}
var t C.Tensor
MustNil(unsafe.Pointer(C.SumByDim(C.Tensor(*a.T), C.int64_t(dim), C.int8_t(k), &t)))
SetTensorFinalizer((*unsafe.Pointer)(&t))
return Tensor{(*unsafe.Pointer)(&t)}
}
// SumByDim torch.sum
func (a Tensor) SumByDim(dim int64, keepDim bool) Tensor {
return SumByDim(a, dim, keepDim)
}
// Tanh returns tanh of the current tensor
func Tanh(t Tensor) Tensor {
return t.Tanh()
}
// Tanh returns tanh of the current tensor
func (a Tensor) Tanh() Tensor {
var t C.Tensor
MustNil(unsafe.Pointer(C.Tanh(C.Tensor(*a.T), &t)))
SetTensorFinalizer((*unsafe.Pointer)(&t))
return Tensor{(*unsafe.Pointer)(&t)}
}
// TopK torch.topk
func TopK(a Tensor, k, dim int64, largest, sorted bool) (Tensor, Tensor) {
var values, indices C.Tensor
l := 0
if largest {
l = 1
}
s := 0
if sorted {
s = 1
}
MustNil(unsafe.Pointer(C.TopK(C.Tensor(*a.T), C.int64_t(k), C.int64_t(dim),
C.int8_t(l), C.int8_t(s), &values, &indices)))
return Tensor{(*unsafe.Pointer)(&values)}, Tensor{(*unsafe.Pointer)(&indices)}
}
// Transpose torch.transpose
func Transpose(a Tensor, dim0, dim1 int64) Tensor {
var t C.Tensor
MustNil(unsafe.Pointer(C.Transpose(C.Tensor(*a.T), C.int64_t(dim0), C.int64_t(dim1), &t)))
SetTensorFinalizer((*unsafe.Pointer)(&t))
return Tensor{(*unsafe.Pointer)(&t)}
}
// Transpose torch.transpose
func (a Tensor) Transpose(dim0, dim1 int64) Tensor {
return Transpose(a, dim0, dim1)
}
// View returns a new Tensor with the same data but of a different shape
func View(a Tensor, shape []int64) Tensor {
var t C.Tensor
MustNil(unsafe.Pointer(C.View(C.Tensor(*a.T), &t, (*C.int64_t)(unsafe.Pointer(&shape[0])), C.int64_t(len(shape)))))
SetTensorFinalizer((*unsafe.Pointer)(&t))
return Tensor{(*unsafe.Pointer)(&t)}
}
// View returns a new Tensor with the same data but of a different shape
func (a Tensor) View(shape []int64) Tensor {
return View(a, shape)
} | tensor_ops.go | 0.796134 | 0.440349 | tensor_ops.go | starcoder |
package binary
import "encoding/binary"
// Byte is the byte implementation of Component.
var Byte byteComponent
// Ubyte is the usnigned byte implementation of Component.
var Ubyte ubyteComponent
// Short is the byte short of Component.
var Short shortComponent
// Ushort is the usnigned short implementation of Component.
var Ushort ushortComponent
// Uint is the unsigned int implementation of Component.
var Uint uintComponent
// Float is the float implementation of Component.
var Float floatComponent
type ubyteComponent struct{}
type ushortComponent struct{}
type uintComponent struct{}
func (ubyteComponent) Scalar(b []byte) uint8 {
return b[0]
}
func (ubyteComponent) PutScalar(b []byte, v uint8) {
b[0] = v
}
func (ubyteComponent) Vec2(b []byte) [2]uint8 {
return [2]uint8{b[0], b[1]}
}
func (ubyteComponent) PutVec2(b []byte, v [2]uint8) {
b[0] = v[0]
b[1] = v[1]
}
func (ubyteComponent) Vec3(b []byte) [3]uint8 {
return [3]uint8{b[0], b[1], b[2]}
}
func (ubyteComponent) PutVec3(b []byte, v [3]uint8) {
b[0] = v[0]
b[1] = v[1]
b[2] = v[2]
}
func (ubyteComponent) Vec4(b []byte) [4]uint8 {
return [4]uint8{b[0], b[1], b[2], b[3]}
}
func (ubyteComponent) PutVec4(b []byte, v [4]uint8) {
b[0] = v[0]
b[1] = v[1]
b[2] = v[2]
b[3] = v[3]
}
func (ubyteComponent) Mat2(b []byte) [2][2]uint8 {
return [2][2]uint8{
{b[0], b[4]},
{b[1], b[5]},
}
}
func (ubyteComponent) PutMat2(b []byte, v [2][2]uint8) {
b[0] = v[0][0]
b[1] = v[1][0]
b[4] = v[0][1]
b[5] = v[1][1]
}
func (ubyteComponent) Mat3(b []byte) [3][3]uint8 {
return [3][3]uint8{
{b[0], b[4], b[8]},
{b[1], b[5], b[9]},
{b[2], b[6], b[10]},
}
}
func (ubyteComponent) PutMat3(b []byte, v [3][3]uint8) {
b[0] = v[0][0]
b[1] = v[1][0]
b[2] = v[2][0]
b[4] = v[0][1]
b[5] = v[1][1]
b[6] = v[2][1]
b[8] = v[0][2]
b[9] = v[1][2]
b[10] = v[2][2]
}
func (ubyteComponent) Mat4(b []byte) [4][4]uint8 {
return [4][4]uint8{
{b[0], b[4], b[8], b[12]},
{b[1], b[5], b[9], b[13]},
{b[2], b[6], b[10], b[14]},
{b[3], b[7], b[11], b[15]},
}
}
func (ubyteComponent) PutMat4(b []byte, v [4][4]uint8) {
b[0] = v[0][0]
b[1] = v[1][0]
b[2] = v[2][0]
b[3] = v[3][0]
b[4] = v[0][1]
b[5] = v[1][1]
b[6] = v[2][1]
b[7] = v[3][1]
b[8] = v[0][2]
b[9] = v[1][2]
b[10] = v[2][2]
b[11] = v[3][2]
b[12] = v[0][3]
b[13] = v[1][3]
b[14] = v[2][3]
b[15] = v[3][3]
}
func getUint16(b []byte) uint16 {
return binary.LittleEndian.Uint16(b)
}
func (ushortComponent) Scalar(b []byte) uint16 {
return getUint16(b)
}
func (ushortComponent) PutScalar(b []byte, v uint16) {
binary.LittleEndian.PutUint16(b, v)
}
func (ushortComponent) Vec2(b []byte) [2]uint16 {
return [2]uint16{getUint16(b), getUint16(b[2:])}
}
func (ushortComponent) PutVec2(b []byte, v [2]uint16) {
binary.LittleEndian.PutUint16(b, v[0])
binary.LittleEndian.PutUint16(b[2:], v[1])
}
func (ushortComponent) Vec3(b []byte) [3]uint16 {
return [3]uint16{getUint16(b), getUint16(b[2:]), getUint16(b[4:])}
}
func (ushortComponent) PutVec3(b []byte, v [3]uint16) {
binary.LittleEndian.PutUint16(b, v[0])
binary.LittleEndian.PutUint16(b[2:], v[1])
binary.LittleEndian.PutUint16(b[4:], v[2])
}
func (ushortComponent) Vec4(b []byte) [4]uint16 {
return [4]uint16{getUint16(b), getUint16(b[2:]), getUint16(b[4:]), getUint16(b[6:])}
}
func (ushortComponent) PutVec4(b []byte, v [4]uint16) {
binary.LittleEndian.PutUint16(b, v[0])
binary.LittleEndian.PutUint16(b[2:], v[1])
binary.LittleEndian.PutUint16(b[4:], v[2])
binary.LittleEndian.PutUint16(b[6:], v[3])
}
func (ushortComponent) Mat2(b []byte) [2][2]uint16 {
return [2][2]uint16{
{getUint16(b), getUint16(b[4:])},
{getUint16(b[2:]), getUint16(b[6:])},
}
}
func (ushortComponent) PutMat2(b []byte, v [2][2]uint16) {
binary.LittleEndian.PutUint16(b, v[0][0])
binary.LittleEndian.PutUint16(b[2:], v[1][0])
binary.LittleEndian.PutUint16(b[4:], v[0][1])
binary.LittleEndian.PutUint16(b[6:], v[1][1])
}
func (ushortComponent) Mat3(b []byte) [3][3]uint16 {
return [3][3]uint16{
{getUint16(b), getUint16(b[8:]), getUint16(b[16:])},
{getUint16(b[2:]), getUint16(b[10:]), getUint16(b[18:])},
{getUint16(b[4:]), getUint16(b[12:]), getUint16(b[20:])},
}
}
func (ushortComponent) PutMat3(b []byte, v [3][3]uint16) {
binary.LittleEndian.PutUint16(b, v[0][0])
binary.LittleEndian.PutUint16(b[2:], v[1][0])
binary.LittleEndian.PutUint16(b[4:], v[2][0])
binary.LittleEndian.PutUint16(b[8:], v[0][1])
binary.LittleEndian.PutUint16(b[10:], v[1][1])
binary.LittleEndian.PutUint16(b[12:], v[2][1])
binary.LittleEndian.PutUint16(b[16:], v[0][2])
binary.LittleEndian.PutUint16(b[18:], v[1][2])
binary.LittleEndian.PutUint16(b[20:], v[2][2])
}
func (ushortComponent) Mat4(b []byte) [4][4]uint16 {
return [4][4]uint16{
{getUint16(b), getUint16(b[8:]), getUint16(b[16:]), getUint16(b[24:])},
{getUint16(b[2:]), getUint16(b[10:]), getUint16(b[18:]), getUint16(b[26:])},
{getUint16(b[4:]), getUint16(b[12:]), getUint16(b[20:]), getUint16(b[28:])},
{getUint16(b[6:]), getUint16(b[14:]), getUint16(b[22:]), getUint16(b[30:])},
}
}
func (ushortComponent) PutMat4(b []byte, v [4][4]uint16) {
binary.LittleEndian.PutUint16(b, v[0][0])
binary.LittleEndian.PutUint16(b[2:], v[1][0])
binary.LittleEndian.PutUint16(b[4:], v[2][0])
binary.LittleEndian.PutUint16(b[6:], v[3][0])
binary.LittleEndian.PutUint16(b[8:], v[0][1])
binary.LittleEndian.PutUint16(b[10:], v[1][1])
binary.LittleEndian.PutUint16(b[12:], v[2][1])
binary.LittleEndian.PutUint16(b[14:], v[3][1])
binary.LittleEndian.PutUint16(b[16:], v[0][2])
binary.LittleEndian.PutUint16(b[18:], v[1][2])
binary.LittleEndian.PutUint16(b[20:], v[2][2])
binary.LittleEndian.PutUint16(b[22:], v[3][2])
binary.LittleEndian.PutUint16(b[24:], v[0][3])
binary.LittleEndian.PutUint16(b[26:], v[1][3])
binary.LittleEndian.PutUint16(b[28:], v[2][3])
binary.LittleEndian.PutUint16(b[30:], v[3][3])
}
func getUint32(b []byte) uint32 {
return binary.LittleEndian.Uint32(b)
}
func (uintComponent) Scalar(b []byte) uint32 {
return getUint32(b)
}
func (uintComponent) PutScalar(b []byte, v uint32) {
binary.LittleEndian.PutUint32(b, v)
}
func (uintComponent) Vec2(b []byte) [2]uint32 {
return [2]uint32{getUint32(b), getUint32(b[4:])}
}
func (uintComponent) PutVec2(b []byte, v [2]uint32) {
binary.LittleEndian.PutUint32(b, v[0])
binary.LittleEndian.PutUint32(b[4:], v[1])
}
func (uintComponent) Vec3(b []byte) [3]uint32 {
return [3]uint32{getUint32(b), getUint32(b[4:]), getUint32(b[8:])}
}
func (uintComponent) PutVec3(b []byte, v [3]uint32) {
binary.LittleEndian.PutUint32(b, v[0])
binary.LittleEndian.PutUint32(b[4:], v[1])
binary.LittleEndian.PutUint32(b[8:], v[2])
}
func (uintComponent) Vec4(b []byte) [4]uint32 {
return [4]uint32{getUint32(b), getUint32(b[4:]), getUint32(b[8:]), getUint32(b[12:])}
}
func (uintComponent) PutVec4(b []byte, v [4]uint32) {
binary.LittleEndian.PutUint32(b, v[0])
binary.LittleEndian.PutUint32(b[4:], v[1])
binary.LittleEndian.PutUint32(b[8:], v[2])
binary.LittleEndian.PutUint32(b[12:], v[3])
}
func (uintComponent) Mat2(b []byte) [2][2]uint32 {
return [2][2]uint32{
{getUint32(b), getUint32(b[8:])},
{getUint32(b[4:]), getUint32(b[12:])},
}
}
func (uintComponent) PutMat2(b []byte, v [2][2]uint32) {
binary.LittleEndian.PutUint32(b, v[0][0])
binary.LittleEndian.PutUint32(b[4:], v[1][0])
binary.LittleEndian.PutUint32(b[8:], v[0][1])
binary.LittleEndian.PutUint32(b[12:], v[1][1])
}
func (uintComponent) Mat3(b []byte) [3][3]uint32 {
return [3][3]uint32{
{getUint32(b), getUint32(b[12:]), getUint32(b[24:])},
{getUint32(b[4:]), getUint32(b[16:]), getUint32(b[28:])},
{getUint32(b[8:]), getUint32(b[20:]), getUint32(b[32:])},
}
}
func (uintComponent) PutMat3(b []byte, v [3][3]uint32) {
binary.LittleEndian.PutUint32(b, v[0][0])
binary.LittleEndian.PutUint32(b[4:], v[1][0])
binary.LittleEndian.PutUint32(b[8:], v[2][0])
binary.LittleEndian.PutUint32(b[12:], v[0][1])
binary.LittleEndian.PutUint32(b[16:], v[1][1])
binary.LittleEndian.PutUint32(b[20:], v[2][1])
binary.LittleEndian.PutUint32(b[24:], v[0][2])
binary.LittleEndian.PutUint32(b[28:], v[1][2])
binary.LittleEndian.PutUint32(b[32:], v[2][2])
}
func (uintComponent) Mat4(b []byte) [4][4]uint32 {
return [4][4]uint32{
{getUint32(b), getUint32(b[16:]), getUint32(b[32:]), getUint32(b[48:])},
{getUint32(b[4:]), getUint32(b[20:]), getUint32(b[36:]), getUint32(b[52:])},
{getUint32(b[8:]), getUint32(b[24:]), getUint32(b[40:]), getUint32(b[56:])},
{getUint32(b[12:]), getUint32(b[28:]), getUint32(b[44:]), getUint32(b[60:])},
}
}
func (uintComponent) PutMat4(b []byte, v [4][4]uint32) {
binary.LittleEndian.PutUint32(b, v[0][0])
binary.LittleEndian.PutUint32(b[4:], v[1][0])
binary.LittleEndian.PutUint32(b[8:], v[2][0])
binary.LittleEndian.PutUint32(b[12:], v[3][0])
binary.LittleEndian.PutUint32(b[16:], v[0][1])
binary.LittleEndian.PutUint32(b[20:], v[1][1])
binary.LittleEndian.PutUint32(b[24:], v[2][1])
binary.LittleEndian.PutUint32(b[28:], v[3][1])
binary.LittleEndian.PutUint32(b[32:], v[0][2])
binary.LittleEndian.PutUint32(b[36:], v[1][2])
binary.LittleEndian.PutUint32(b[40:], v[2][2])
binary.LittleEndian.PutUint32(b[44:], v[3][2])
binary.LittleEndian.PutUint32(b[48:], v[0][3])
binary.LittleEndian.PutUint32(b[52:], v[1][3])
binary.LittleEndian.PutUint32(b[56:], v[2][3])
binary.LittleEndian.PutUint32(b[60:], v[3][3])
} | binary/binary.go | 0.691289 | 0.431225 | binary.go | starcoder |
package accessors
import (
"encoding/json"
"fmt"
"log"
"math"
"math/rand"
"strconv"
"time"
"github.com/kellydunn/golang-geo"
)
const nearbyEnemyCap = 300
// Returns an array of all loot locations and values to plot on the map in iOS
func (ag *AccessorGroup) DumpDatabase(userLatitude float64, userLongitude float64, radius float64) (string, error) {
currentEnemyCount, err := ag.CountNearbyEnemies(userLatitude, userLongitude, radius)
if err != nil {
log.Panic(err)
}
if currentEnemyCount < nearbyEnemyCap {
// Add enemies
ag.AddEnemies(userLatitude, userLongitude, radius, currentEnemyCount, nearbyEnemyCap)
}
rows, err := ag.DB.Query("SELECT * FROM enemies")
if err != nil {
log.Panic(err)
}
defer rows.Close()
columns, err := rows.Columns()
if err != nil {
log.Panic(err)
}
count := len(columns)
tableData := make([]map[string]string, 0)
values := make([]interface{}, count)
valuePtrs := make([]interface{}, count)
for rows.Next() {
for i := 0; i < count; i++ {
valuePtrs[i] = &values[i]
}
rows.Scan(valuePtrs...)
entry := make(map[string]string)
for i, col := range columns {
val := values[i]
if val != nil {
entry[col] = fmt.Sprintf("%s", string(val.([]byte))) // Save the data as a string
}
}
if len(entry["latitude"]) > 0 && len(entry["latitude"]) > 0 { // Make sure we don't have bad data
latitude, err := strconv.ParseFloat(entry["latitude"], 64)
if err == nil {
longitude, err := strconv.ParseFloat(entry["longitude"], 64)
if err == nil {
if WithinRadius(latitude, longitude, userLatitude, userLongitude, radius) { // Only return enemies that are close to the player
tableData = append(tableData, entry)
}
} else {
log.Panic(err)
}
} else {
log.Panic(err)
}
}
}
jsonData, err := json.Marshal(tableData)
if err != nil {
log.Panic(err)
}
return string(jsonData), nil
}
func (ag *AccessorGroup) CountNearbyEnemies(userLatitude float64, userLongitude float64, radius float64) (int, error) {
enemyCount := 0
rows, err := ag.DB.Query("SELECT * FROM enemies")
if err != nil {
log.Panic(err)
}
defer rows.Close()
columns, err := rows.Columns()
if err != nil {
log.Panic(err)
}
count := len(columns)
values := make([]interface{}, count)
valuePtrs := make([]interface{}, count)
for rows.Next() {
for i := 0; i < count; i++ {
valuePtrs[i] = &values[i]
}
rows.Scan(valuePtrs...)
entry := make(map[string]string)
for i, col := range columns {
val := values[i]
if val != nil {
entry[col] = fmt.Sprintf("%s", string(val.([]byte))) // Save the data as a string
}
}
if len(entry["latitude"]) > 0 && len(entry["latitude"]) > 0 { // Make sure we don't have bad data
latitude, err := strconv.ParseFloat(entry["latitude"], 64)
if err == nil {
longitude, err := strconv.ParseFloat(entry["longitude"], 64)
if err == nil {
if WithinRadius(latitude, longitude, userLatitude, userLongitude, radius) { // Only return enemies that are close to the player
enemyCount++
}
} else {
log.Panic(err)
}
} else {
log.Panic(err)
}
}
}
return enemyCount, nil
}
func WithinRadius(lat1 float64, lon1 float64, lat2 float64, lon2 float64, radius float64) bool {
p := geo.NewPoint(lat1, lon1)
p2 := geo.NewPoint(lat2, lon2)
dist := p.GreatCircleDistance(p2) // Find the great circle distance between points
if dist < radius { // Return whether we're inside the radius or not
return true
} else {
return false
}
}
func (ag *AccessorGroup) AddEnemies(userLatitude float64, userLongitude float64, radius float64, currentEnemyCount int, enemyCap int) {
iterations := enemyCap - currentEnemyCount
rand.Seed(time.Now().UTC().UnixNano())
for i := 0; i < iterations; i++ {
w := radius / 111 * math.Sqrt(rand.Float64())
t := 2 * math.Pi * rand.Float64()
x := w * math.Cos(t)
y := w * math.Sin(t)
randomLatitude := userLatitude + x
randomLongitude := userLongitude + y
_, err := ag.DB.Exec("INSERT INTO enemies (latitude, longitude) VALUES (?,?)", randomLatitude, randomLongitude)
if err != nil {
log.Panic(err)
}
}
}
func (ag *AccessorGroup) DeleteEnemy(enemyID int) (string, error) {
_, err := ag.DB.Exec("DELETE FROM enemies WHERE id=?", enemyID)
if err != nil {
log.Panic(err)
}
return "Success", nil
} | accessors/database.go | 0.583559 | 0.441673 | database.go | starcoder |
package testh
import (
"reflect"
"testing"
)
func AssertEqual(msg string, expected, got interface{}, t *testing.T) {
if expected != got {
t.Errorf("%s. Expected value: %v, Got: %v", msg, expected, got)
t.FailNow()
}
}
func AssertNotNil(msg string, v interface{}, t *testing.T) {
if v == nil {
t.Errorf("%s. Got: %v", msg, v)
t.FailNow()
}
}
func AssertDeepEqual(msg string, expected, got interface{}, t *testing.T) {
if !reflect.DeepEqual(expected, got) {
t.Errorf("%s. Expected value: %v, Got: %v", msg, expected, got)
t.FailNow()
}
}
func AssertNoErr(msg string, err error, t *testing.T) {
if err != nil {
t.Errorf("%s. Got Err: %v", msg, err)
t.FailNow()
}
}
func AssertErr(msg string, err error, t *testing.T) {
if err == nil {
t.Errorf("%s. Got no Err: %v", msg, err)
t.FailNow()
}
}
func AssertContainsObj(msg string, arr []interface{}, v interface{}, t *testing.T) {
for _, a := range arr {
if a == v {
return
}
}
t.Errorf("%s. The array: %v does not contain object: %v", msg, arr, v)
t.FailNow()
}
func AssertDeepContainsObj(msg string, arr []interface{}, v interface{}, t *testing.T) {
for _, a := range arr {
if reflect.DeepEqual(a, v) {
return
}
}
t.Errorf("%s. The array: %v does not contain object: %v", msg, arr, v)
t.FailNow()
}
func AssertContainsAny(msg string, x []interface{}, y []interface{}, t *testing.T) {
v := make(map[interface{}]struct{})
for _, yv := range y {
v[yv] = struct{}{}
}
for _, xv := range x {
if _, ok := v[xv]; ok {
return
}
}
t.Errorf("%s. The array: %v does not contain any of the array's object: %v", msg, x, y)
t.FailNow()
}
func AssertContainsAll(msg string, x []interface{}, y []interface{}, t *testing.T) {
v := make(map[interface{}]struct{})
for _, xv := range x {
v[xv] = struct{}{}
}
for _, yv := range y {
if _, ok := v[yv]; !ok {
t.Errorf("%s. The array: %v does not contain all of the array's object: %v. Missing object: %v", msg, x, y, yv)
t.FailNow()
}
}
} | testh/assert.go | 0.592431 | 0.479808 | assert.go | starcoder |
package genworldvoronoi
import (
"math"
"math/rand"
"github.com/Flokey82/go_gens/vectors"
opensimplex "github.com/ojrac/opensimplex-go"
)
// ugh globals, sorry
type Map struct {
BaseObject
t_flow []float64 // Triangle flow intensity (rainfall)
t_downflow_s []int // Triangle mapping to side through which water flows downhill.
order_t []int // Triangles in uphill order of elevation.
s_flow []float64 // Flow intensity through sides
r_windvec []Vertex // Point / region wind vector
r_plate []int // Point / region to plate mapping
r_territory []int // Point / region mapping to territory (political)
PlateVectors []vectors.Vec3 // Plate tectonics / movement vectors
PlateIsOcean map[int]bool // Plate was chosen to be an ocean plate
plate_r []int // Plate seed points / regions
cities_r []int // City seed points / regions
NumPlates int // Number of generated plates
NumPoints int // Number of generated points / regions
NumCities int // Number of generated cities (regions)
NumTerritories int // Number of generated territories
QuadGeom *QuadGeometry // Quad geometry generated from the mesh (?)
}
func NewMap(seed int64, numPlates, numPoints int, jitter float64) (*Map, error) {
result, err := MakeSphere(seed, numPoints, jitter)
if err != nil {
return nil, err
}
mesh := result.mesh
m := &Map{
PlateIsOcean: make(map[int]bool),
BaseObject: BaseObject{
r_xyz: result.r_xyz,
r_latLon: result.r_latLon,
r_elevation: make([]float64, mesh.numRegions),
r_moisture: make([]float64, mesh.numRegions),
r_flux: make([]float64, mesh.numRegions),
r_pool: make([]float64, mesh.numRegions),
r_rainfall: make([]float64, mesh.numRegions),
r_downhill: make([]int, mesh.numRegions),
r_drainage: make([]int, mesh.numRegions),
t_pool: make([]float64, mesh.numTriangles),
t_elevation: make([]float64, mesh.numTriangles),
t_moisture: make([]float64, mesh.numTriangles),
r_waterbodies: make([]int, mesh.numRegions),
r_waterbody_size: make(map[int]int),
r_lake_size: make(map[int]int),
seed: seed,
rand: rand.New(rand.NewSource(seed)),
noise: opensimplex.New(seed),
mesh: result.mesh,
},
t_downflow_s: make([]int, mesh.numTriangles),
order_t: make([]int, mesh.numTriangles),
t_flow: make([]float64, mesh.numTriangles),
s_flow: make([]float64, mesh.numSides),
r_windvec: make([]Vertex, mesh.numRegions),
NumPlates: numPlates,
NumPoints: numPoints,
NumTerritories: 10,
NumCities: 50,
QuadGeom: NewQuadGeometry(),
}
m.QuadGeom.setMesh(mesh)
m.generateTriangleCenters()
m.generateMap()
return m, nil
}
func (m *Map) generateMap() {
// Plates.
m.generatePlates()
m.assignOceanPlates()
// Elevation.
m.assignRegionElevation()
// River / moisture.
// m.assignRegionMoisture()
m.assignRainfall(10)
// m.assignFlux()
// Hydrology (based on regions) - EXPERIMENTAL
m.assignDownhill()
m.assignFlux()
//m.makeItRain()
// m.getRivers(9000.1)
// m.r_elevation = m.rErode(0.05)
// Place cities and territories in regions.
m.rPlaceNCities(m.NumCities)
m.rPlaceNTerritories(m.NumTerritories)
// Hydrology (based on triangles)
m.assignTriangleValues()
// m.assignDownflow()
// m.assignFlow()
// Quad geometry updete.
m.QuadGeom.setMap(m.mesh, m)
}
// Plates
// pickRandomRegions picks n random points/regions from the given mesh.
func (m *Map) pickRandomRegions(mesh *TriangleMesh, n int) []int {
m.resetRand()
chosen_r := make(map[int]bool) // new Set()
for len(chosen_r) < n && len(chosen_r) < mesh.numRegions {
chosen_r[m.rand.Intn(mesh.numRegions)] = true
}
return convToArray(chosen_r)
}
func getCentroidOfTriangle(a, b, c []float64) vectors.Vec3 {
return vectors.Vec3{
X: (a[0] + b[0] + c[0]) / 3,
Y: (a[1] + b[1] + c[1]) / 3,
Z: (a[2] + b[2] + c[2]) / 3,
}.Normalize()
}
// const Infinity = 1.0
// assignDistanceField calculates the distance from any point in seeds_r to all other points, but
// don't go past any point in stop_r.
func (m *Map) assignDistanceField(seeds_r []int, stop_r map[int]bool) []float64 {
m.resetRand()
mesh := m.mesh
numRegions := mesh.numRegions
r_distance := make([]float64, numRegions)
for i := range r_distance {
r_distance[i] = -1 // was: Infinity
}
var queue []int
for _, r := range seeds_r {
queue = append(queue, r)
r_distance[r] = 0
}
// Random search adapted from breadth first search.
var out_r []int
for queue_out := 0; queue_out < len(queue); queue_out++ {
pos := queue_out + m.rand.Intn(len(queue)-queue_out)
current_r := queue[pos]
queue[pos] = queue[queue_out]
for _, neighbor_r := range mesh.r_circulate_r(out_r, current_r) {
if r_distance[neighbor_r] == -1 && !stop_r[neighbor_r] {
r_distance[neighbor_r] = r_distance[current_r] + 1
queue = append(queue, neighbor_r)
}
}
}
// TODO: possible enhancement: keep track of which seed is closest
// to this point, so that we can assign variable mountain/ocean
// elevation to each seed instead of them always being +1/-1
return r_distance
}
func (m *Map) assignDistanceField2(seeds_r []int, stop_r map[int]bool, compression map[int]float64) []float64 {
enableNegativeCompression := true
enablePositiveCompression := true
m.resetRand()
mesh := m.mesh
numRegions := mesh.numRegions
r_distance := make([]float64, numRegions)
for i := range r_distance {
r_distance[i] = -1 // was: Infinity
}
var queue []int
for _, r := range seeds_r {
queue = append(queue, r)
r_distance[r] = 0
}
maxComp := 0.0
minComp := 0.0
for _, comp := range compression {
if comp > maxComp {
maxComp = comp
}
if comp < minComp {
minComp = comp
}
}
// Random search adapted from breadth first search.
var out_r []int
for queue_out := 0; queue_out < len(queue); queue_out++ {
pos := queue_out + m.rand.Intn(len(queue)-queue_out)
current_r := queue[pos]
current_comp := compression[current_r]
current_dist := r_distance[current_r]
queue[pos] = queue[queue_out]
for _, neighbor_r := range mesh.r_circulate_r(out_r, current_r) {
if r_distance[neighbor_r] == -1 && !stop_r[neighbor_r] {
r_distance[neighbor_r] = current_dist + 1
if current_comp > 0 && enablePositiveCompression {
r_distance[neighbor_r] -= current_comp / maxComp
} else if current_comp < 0 && enableNegativeCompression {
r_distance[neighbor_r] += current_comp / minComp
}
queue = append(queue, neighbor_r)
}
}
}
// TODO: possible enhancement: keep track of which seed is closest
// to this point, so that we can assign variable mountain/ocean
// elevation to each seed instead of them always being +1/-1
return r_distance
}
const persistence = 2.0 / 3.0
var amplitudes []float64
func init() {
amplitudes = make([]float64, 5)
for i := range amplitudes {
amplitudes[i] = math.Pow(persistence, float64(i))
}
}
func (m *Map) fbm_noise(nx, ny, nz float64) float64 {
sum := 0.0
sumOfAmplitudes := 0.0
for octave := 0; octave < len(amplitudes); octave++ {
frequency := 1 << octave
sum += amplitudes[octave] * m.noise.Eval3(nx*float64(frequency), ny*float64(frequency), nz*float64(frequency))
sumOfAmplitudes += amplitudes[octave]
}
return sum / sumOfAmplitudes
} | genworldvoronoi/genworldvoronoi.go | 0.521959 | 0.481149 | genworldvoronoi.go | starcoder |
package fps
import (
"math"
"github.com/go-gl/glfw/v3.2/glfw"
"github.com/go-gl/mathgl/mgl32"
)
// FPS moves in the view direction while the viewing direction can be changed.
type FPS struct {
width int
height int
theta float32
phi float32
dir mgl32.Vec3
speed float32
Pos mgl32.Vec3
Target mgl32.Vec3
Up mgl32.Vec3
Right mgl32.Vec3
Fov float32
Near float32
Far float32
}
// MakeDefault creates a FPS camera with the viewport of width and height and a position.
// It assumes a field of view of 45 degrees and a near and far plane at 0.1 and 100.0 respectively.
func MakeDefault(width, height int, pos mgl32.Vec3, speed float32) FPS {
return Make(width, height, pos, speed, 45, 0.1, 1000.0)
}
// NewDefault creates a reference to a FPS camera with the viewport of width and height and a position.
// It assumes a field of view of 45 degrees and a near and far plane at 0.1 and 100.0 respectively.
func NewDefault(width, height int, pos mgl32.Vec3, speed float32) *FPS {
return New(width, height, pos, speed, 45, 0.1, 1000.0)
}
// Make creates a FPS with the viewport of width and height and a radius from the origin.
// It assumes a field of view of 45 degrees and a near and far plane at 0.1 and 100.0 respectively.
func Make(width, height int, pos mgl32.Vec3, speed, fov, near, far float32) FPS {
dir := mgl32.Vec3{0.0, 0.0, 1.0}
camera := FPS{
width: width,
height: height,
theta: 90.0,
phi: 90.0,
dir: dir,
speed: speed,
Pos: pos,
Target: pos.Add(dir),
Up: mgl32.Vec3{0, 1, 0},
Right: mgl32.Vec3{1, 0, 0},
Fov: fov,
Near: near,
Far: far,
}
camera.Update()
return camera
}
// New creates a reference to a FPS with the viewport of width and height and a radius from the origin.
// It assumes a field of view of 45 degrees and a near and far plane at 0.1 and 100.0 respectively.
func New(width, height int, pos mgl32.Vec3, speed, fov, near, far float32) *FPS {
camera := Make(width, height, pos, speed, fov, near, far)
return &camera
}
// Update recalculates the position of the camera.
// Call it every time after calling Rotate or Zoom.
func (camera *FPS) Update() {
theta := mgl32.DegToRad(camera.theta)
phi := mgl32.DegToRad(camera.phi)
// sphere coordinates with inverse y
btheta := float64(theta)
bphi := float64(phi)
camera.dir = mgl32.Vec3{
float32(math.Sin(btheta) * math.Cos(bphi)),
-float32(math.Cos(btheta)),
float32(math.Sin(btheta) * math.Sin(bphi)),
}
camera.dir = camera.dir.Normalize()
// set target
camera.Target = camera.Pos.Add(camera.dir)
// calculate up vector
look := camera.dir.Mul(-1)
worldUp := mgl32.Vec3{0.0, 1.0, 0.0}
camera.Right = worldUp.Cross(look).Normalize()
camera.Up = look.Cross(camera.Right)
}
// Rotate adds delta angles in degrees to the theta and phi angles.
// Where theta is the vertical angle and phi the horizontal angle.
func (camera *FPS) Rotate(theta, phi float32) {
camera.theta += theta
camera.phi += phi
// limit angles
camera.theta = float32(math.Max(math.Min(float64(camera.theta), 179.9), 0.01))
if camera.phi < 0 {
camera.phi = 360 + camera.phi
} else if camera.phi >= 360 {
camera.phi = camera.phi - 360
}
}
// Zoom changes the radius of the camera to the target point.
func (camera *FPS) Zoom(distance float32) {}
// GetPos returns the position of the camera in worldspace
func (camera *FPS) GetPos() mgl32.Vec3 {
return camera.Pos
}
// GetView returns the view matrix of the camera.
func (camera *FPS) GetView() mgl32.Mat4 {
return mgl32.LookAtV(camera.Pos, camera.Target, camera.Up)
}
// GetPerspective returns the perspective projection of the camera.
func (camera *FPS) GetPerspective() mgl32.Mat4 {
fov := mgl32.DegToRad(camera.Fov)
aspect := float32(camera.width) / float32(camera.height)
return mgl32.Perspective(fov, aspect, camera.Near, camera.Far)
}
// GetOrtho returns the orthographic projection of the camera.
func (camera *FPS) GetOrtho() mgl32.Mat4 {
angle := camera.Fov * math.Pi / 180.0
dfar := float32(math.Tan(float64(angle/2.0))) * camera.Far
d := dfar
return mgl32.Ortho(-d, d, -d, d, camera.Near, camera.Far)
}
// GetViewPerspective returns P*V.
func (camera *FPS) GetViewPerspective() mgl32.Mat4 {
return camera.GetPerspective().Mul4(camera.GetView())
}
// SetPos updates the target point of the camera.
// It requires to call Update to take effect.
func (camera *FPS) SetPos(pos mgl32.Vec3) {
camera.Pos = pos
camera.Target = camera.Pos.Add(camera.dir)
}
// OnCursorPosMove is a callback handler that is called every time the cursor moves.
func (camera *FPS) OnCursorPosMove(x, y, dx, dy float64) bool {
dPhi := float32(-dx) / 2.0
dTheta := float32(-dy) / 2.0
camera.Rotate(dTheta, -dPhi)
return false
}
// OnMouseButtonPress is a callback handler that is called every time a mouse button is pressed or released.
func (camera *FPS) OnMouseButtonPress(leftPressed, rightPressed bool) bool {
return false
}
// OnMouseScroll is a callback handler that is called every time the mouse wheel moves.
func (camera *FPS) OnMouseScroll(x, y float64) bool {
return false
}
// OnKeyPress is a callback handler that is called every time a keyboard key is pressed.
func (camera *FPS) OnKeyPress(key, action, mods int) bool {
dir := camera.dir.Mul(camera.speed)
right := camera.Right.Mul(camera.speed)
if key == int(glfw.KeyW) {
camera.Pos = camera.Pos.Add(dir)
} else if key == int(glfw.KeyS) {
camera.Pos = camera.Pos.Sub(dir)
} else if key == int(glfw.KeyA) {
camera.Pos = camera.Pos.Sub(right)
} else if key == int(glfw.KeyD) {
camera.Pos = camera.Pos.Add(right)
}
return false
}
// OnResize is a callback handler that is called every time the window is resized.
func (camera *FPS) OnResize(width, height int) bool {
camera.width, camera.height = width, height
return false
} | pkg/scene/camera/fps/fps.go | 0.927773 | 0.690729 | fps.go | starcoder |
package man
var apispecPage = `
# === apispec ===
# Description
Upload traces to the Akita Cloud or use traces already stored on Akita Cloud to generate your OpenAPI3 specification.
# Examples
## akita apispec --service my-service --traces ./mytrace.har
Generates a spec from a local trace file and outputs it to stdout.
## akita apispec --service my-service --traces ./trace1.har --traces akita://my-service:trace:trace2
Generates a spec from a combination of local trace file and trace file on Akita cloud.
# Required Flags
## --traces []location
The locations to read traces from. Can be a mix of AkitaURI and local file paths.
When specifying a local file, Akita reads the HAR file and uploads it to the Akita cloud.
When specifying an AkitaURI, the format is "akita://{SERVICE}:trace:{NAME}", where "SERVICE" is the name of your service and "NAME" is the name of the trace on Akita Cloud.
# Optional Flags
## --out location
The location to store the spec. Can be an AkitaURI or a local file.
If unspecified, defaults to a new spec on Akita Cloud. Note that you must also set <bt>--service<bt>.
When specifying a local file, Akita writes the spec to the file. Note that you must also set <bt>--service<bt> when outputing to local file.
To specify <bt>stdout<bt>, use <bt>--out="-"<bt>.
When specifying an AkitaURI, the format is "akita://{SERVICE}:spec" or "akita://{SERVICE}:spec:{NAME}", where "SERVICE" is the name of your service and "NAME" is the name of the spec to create. A spec name will be generated if "NAME" is not provided.
## --service string
Akita cloud service to use to generate the spec. Only needed if --out is not specified or is not an AkitaURI.
## --cluster string
Akita cloud cluster to use to generate the spec (alias for 'service'). Only needed if --out is not specified or is not an AkitaURI.
## --format {yaml|json}
Output format for the OpenAPI 3 specification. Supports 'yaml' and 'json'.
Default is 'yaml'.
## --from-time string
## --to-time string
If provided, only trace events occurring in the given time range will be used to build the spec. Expected format is 'YYYY-MM-DD hh:mm:ss'. If desired, the 'hh:mm:ss' or the ':ss' can be omitted, in which case the start of the day or minute is used. The client's local time is assumed. If a given time occurs during a transition to or from daylight saving time, then one side of the transition is arbitrarily chosen.
## --tags []string
Add tags to the spec.
You may specify a comma separated list of "key=value" pairs (e.g. <bt>--tags a=b,c=d<bt>) or multiple separate flags (e.g. <bt>--tags a=b --tags c=d<bt>)
## --path-parameters []path-prefix
A path prefix is composed of components separated by "/". There are 3 types of components:
1. A concrete path value
2. A path parameter of the form <bt>{parameter_name}<bt>
3. A placeholder <bt>^<bt> to indicate that the component must retain the value in the trace verbatim and NOT get generalized. It behaves like a wildcard when matching path prefixes to paths.
Paths in the trace that match this path prefix are updated to use path parameters and respect placeholders that are specified.
If a path matches multiple prefixes, Akita selects the longest matching prefix.
Example 1: Simple prefix match
<bt>--path-parameters="/v1/{my_param}"<bt>
|Akita inferred endpoint|Post-processed endpoint|
|---|---|
|/v1/foo|/v1/{my_param}|
|/v1/x/y|/v1/{my_param}/y|
|/v1/{arg2}/z|/v1/{my_param}/z|
Example 2: Longest prefix match
<bt>--path-parameters="/v1/{my_param},/v1/{my_param}/123/{other_param}"<bt>
|Akita inferred endpoint|Post-processed endpoint|
|---|---|
|/v1/foo|/v1/{my_param}|
|/v1/x/123/abc|/v1/{my_param}/123/{other_param}|
|/v1/x/456/def|/v1/{my_param}/456/def|
Example 3: Akita inferred path retained
<bt>--path-parameters="/v1/foo/{param}/bar"<bt>
|Akita inferred endpoint|Post-processed endpoint|
|---|---|
|/v1/foo/x|/v1/foo/x|
|/v1/foo/baz/bar|/v1/foo/{param}/bar|
|/v1/xyz/baz/bar|/v1/xyz/baz/bar|
|/v1/{arg2}/x/bar|/v1/{arg2}/x/bar|
In this example, the endpoint /v1/{arg2}/x/bar will remain if the trace contains requests that match that endpoint with concrete path arguments in the second position that are not "foo", e.g. /v1/123/x/bar. To force the removal of the path parameter, use the placeholder ("^") component.
Example 4: Placeholder component
<bt>--path-parameters="/v1/^/{param}/bar"<bt>
|Akita inferred endpoint|Post-processed endpoint|
|---|---|
|/v1/foo/x|/v1/foo/x|
|/v1/foo/baz/bar|/v1/foo/{param}/bar|
|/v1/xyz/baz/bar|/v1/xyz/{param}/bar|
|/v1/{arg2}/x/bar|/v1/123/{param}/bar|
## --path-exclusions []string
Removes HTTP paths matching regular expressions.
For example, to filter out requests fetching files with png or jpg extensions, you can specify <bt>--path-exclusions ".*\.png" --path-exclusions ".*\.jpg"<bt>
## --infer-field-relations bool
If true, enables analysis to determine related fields in your API.
## --include-trackers bool
By default, Akita automatically filters out requests to common third-party trackers in the trace.
Set this flag to true to include them.
# GitHub Integration Flags
The following flags are needed to enable GitHub integration.
## --github-branch string
Name of github branch that this spec belongs to.
## --github-commit string
SHA of github commit that this spec belongs to.
## --github-pr int
GitHub PR number that this spec belongs to.
## --github-repo string
GitHub repo name of the form <repo_owner>/<repo_name> that this spec belongs to.
# GitLab Integration Flags
## --gitlab-mr string
GitLab merge request IID (note not ID).
For more detail on IID vs ID, see https://docs.gitlab.com/ee/api/#id-vs-iid
## --gitlab-project string
Gitlab project ID or URL-encoded path.
For more detail, see https://docs.gitlab.com/ee/api/README.html#namespaced-path-encoding
## --gitlab-branch string
Name of gitlab branch that this spec belongs to.
## --gitlab-commit string
SHA of gitlab commit that this spec belongs to.
` | cmd/internal/man/apispec_page.go | 0.849254 | 0.428114 | apispec_page.go | starcoder |
package planner
import (
"sort"
"github.com/open-policy-agent/opa/ast"
)
// funcstack implements a simple map structure used to keep track of virtual
// document => planned function names. The structure supports Push and Pop
// operations so that the planner can shadow planned functions when 'with'
// statements are found.
type funcstack struct {
stack []map[string]string
gen int
}
func newFuncstack() *funcstack {
return &funcstack{
stack: []map[string]string{
map[string]string{},
},
gen: 0,
}
}
func (p funcstack) Add(key, value string) {
p.stack[len(p.stack)-1][key] = value
}
func (p funcstack) Get(key string) (string, bool) {
value, ok := p.stack[len(p.stack)-1][key]
return value, ok
}
func (p *funcstack) Push(funcs map[string]string) {
p.stack = append(p.stack, funcs)
p.gen++
}
func (p *funcstack) Pop() map[string]string {
last := p.stack[len(p.stack)-1]
p.stack = p.stack[:len(p.stack)-1]
p.gen++
return last
}
// ruletrie implements a simple trie structure for organizing rules that may be
// planned. The trie nodes are keyed by the rule path. The ruletrie supports
// Push and Pop operations that allow the planner to shadow subtrees when 'with'
// statements are found.
type ruletrie struct {
children map[ast.Value][]*ruletrie
rules []*ast.Rule
}
func newRuletrie() *ruletrie {
return &ruletrie{
children: map[ast.Value][]*ruletrie{},
}
}
func (t *ruletrie) Arity() int {
rules := t.Rules()
if len(rules) > 0 {
return len(rules[0].Head.Args)
}
return 0
}
func (t *ruletrie) Rules() []*ast.Rule {
if t != nil {
return t.rules
}
return nil
}
func (t *ruletrie) Push(key ast.Ref) {
node := t
for i := 0; i < len(key)-1; i++ {
node = node.Get(key[i].Value)
if node == nil {
return
}
}
elem := key[len(key)-1]
node.children[elem.Value] = append(node.children[elem.Value], nil)
}
func (t *ruletrie) Pop(key ast.Ref) {
node := t
for i := 0; i < len(key)-1; i++ {
node = node.Get(key[i].Value)
if node == nil {
return
}
}
elem := key[len(key)-1]
sl := node.children[elem.Value]
node.children[elem.Value] = sl[:len(sl)-1]
}
func (t *ruletrie) Insert(key ast.Ref) *ruletrie {
node := t
for _, elem := range key {
child := node.Get(elem.Value)
if child == nil {
child = newRuletrie()
node.children[elem.Value] = append(node.children[elem.Value], child)
}
node = child
}
return node
}
func (t *ruletrie) Lookup(key ast.Ref) *ruletrie {
node := t
for _, elem := range key {
node = node.Get(elem.Value)
if node == nil {
return nil
}
}
return node
}
func (t *ruletrie) LookupOrInsert(key ast.Ref) *ruletrie {
if val := t.Lookup(key); val != nil {
return val
}
return t.Insert(key)
}
func (t *ruletrie) Children() []ast.Value {
sorted := make([]ast.Value, 0, len(t.children))
for key := range t.children {
if t.Get(key) != nil {
sorted = append(sorted, key)
}
}
sort.Slice(sorted, func(i, j int) bool {
return sorted[i].Compare(sorted[j]) < 0
})
return sorted
}
func (t *ruletrie) Get(k ast.Value) *ruletrie {
if t == nil {
return nil
}
nodes := t.children[k]
if len(nodes) == 0 {
return nil
}
return nodes[len(nodes)-1]
} | vendor/github.com/open-policy-agent/opa/internal/planner/rules.go | 0.564819 | 0.619299 | rules.go | starcoder |
package xprocess_schema
import (
"reflect"
"strconv"
)
type Converter func(string) reflect.Value
var (
invalidValue = reflect.Value{}
boolType = reflect.Bool
float32Type = reflect.Float32
float64Type = reflect.Float64
intType = reflect.Int
int8Type = reflect.Int8
int16Type = reflect.Int16
int32Type = reflect.Int32
int64Type = reflect.Int64
stringType = reflect.String
uintType = reflect.Uint
uint8Type = reflect.Uint8
uint16Type = reflect.Uint16
uint32Type = reflect.Uint32
uint64Type = reflect.Uint64
)
// Default converters for basic types.
var builtinConverters = map[reflect.Kind]Converter{
boolType: convertBool,
float32Type: convertFloat32,
float64Type: convertFloat64,
intType: convertInt,
int8Type: convertInt8,
int16Type: convertInt16,
int32Type: convertInt32,
int64Type: convertInt64,
stringType: convertString,
uintType: convertUint,
uint8Type: convertUint8,
uint16Type: convertUint16,
uint32Type: convertUint32,
uint64Type: convertUint64,
}
func convertBool(value string) reflect.Value {
if value == "on" {
return reflect.ValueOf(true)
} else if v, err := strconv.ParseBool(value); err == nil {
return reflect.ValueOf(v)
}
return invalidValue
}
func convertFloat32(value string) reflect.Value {
if v, err := strconv.ParseFloat(value, 32); err == nil {
return reflect.ValueOf(float32(v))
}
return invalidValue
}
func convertFloat64(value string) reflect.Value {
if v, err := strconv.ParseFloat(value, 64); err == nil {
return reflect.ValueOf(v)
}
return invalidValue
}
func convertInt(value string) reflect.Value {
if v, err := strconv.ParseInt(value, 10, 0); err == nil {
return reflect.ValueOf(int(v))
}
return invalidValue
}
func convertInt8(value string) reflect.Value {
if v, err := strconv.ParseInt(value, 10, 8); err == nil {
return reflect.ValueOf(int8(v))
}
return invalidValue
}
func convertInt16(value string) reflect.Value {
if v, err := strconv.ParseInt(value, 10, 16); err == nil {
return reflect.ValueOf(int16(v))
}
return invalidValue
}
func convertInt32(value string) reflect.Value {
if v, err := strconv.ParseInt(value, 10, 32); err == nil {
return reflect.ValueOf(int32(v))
}
return invalidValue
}
func convertInt64(value string) reflect.Value {
if v, err := strconv.ParseInt(value, 10, 64); err == nil {
return reflect.ValueOf(v)
}
return invalidValue
}
func convertString(value string) reflect.Value {
return reflect.ValueOf(value)
}
func convertUint(value string) reflect.Value {
if v, err := strconv.ParseUint(value, 10, 0); err == nil {
return reflect.ValueOf(uint(v))
}
return invalidValue
}
func convertUint8(value string) reflect.Value {
if v, err := strconv.ParseUint(value, 10, 8); err == nil {
return reflect.ValueOf(uint8(v))
}
return invalidValue
}
func convertUint16(value string) reflect.Value {
if v, err := strconv.ParseUint(value, 10, 16); err == nil {
return reflect.ValueOf(uint16(v))
}
return invalidValue
}
func convertUint32(value string) reflect.Value {
if v, err := strconv.ParseUint(value, 10, 32); err == nil {
return reflect.ValueOf(uint32(v))
}
return invalidValue
}
func convertUint64(value string) reflect.Value {
if v, err := strconv.ParseUint(value, 10, 64); err == nil {
return reflect.ValueOf(v)
}
return invalidValue
} | xprocess_schema/converter.go | 0.613931 | 0.427875 | converter.go | starcoder |
package ast
// ParseNode represents a function to parse ast nodes.
type ParseNode func(p *Parser) (*Node, error)
// Node is a simple node in a tree with double linked lists instead of slices to
// keep track of its siblings and children. A node is either a value or a
// parent node.
type Node struct {
// Type of the node.
Type int
// TypeStrings contains all the string representations of the available types.
TypeStrings []string
// Value of the node. Only possible if it has no children.
Value string
// Parent is the parent node.
Parent *Node
// PreviousSibling is the previous sibling of the node.
PreviousSibling *Node
// NextSibling is the next sibling of the node.
NextSibling *Node
// FirstChild is the first child of the node.
FirstChild *Node
// LastChild is the last child of the node.
LastChild *Node
}
// TypeString returns the strings representation of the type. Same as TypeStrings[Type]. Returns "UNKNOWN" if not
// string representation is found or len(TypeStrings) == 0.
func (n *Node) TypeString() string {
if 0 <= n.Type && n.Type < len(n.TypeStrings) {
return n.TypeStrings[n.Type]
}
return "UNKNOWN"
}
// IsParent returns whether the node has children and thus is not a value node.
func (n *Node) IsParent() bool {
return n.FirstChild != nil
}
// Children returns all the children of the node.
func (n *Node) Children() []*Node {
if n.FirstChild == nil {
return nil // Node has no children.
}
var cs []*Node
for c := n.FirstChild; c != nil; c = c.NextSibling {
cs = append(cs, c)
}
return cs
}
// Remove removes itself from the tree.
func (n *Node) Remove() *Node {
if n.Parent != nil {
// Set the first child to the next.
if n.Parent.FirstChild == n {
n.Parent.FirstChild = n.NextSibling
}
// Set the last child to the previous.
if n.Parent.LastChild == n {
n.Parent.LastChild = n.PreviousSibling
}
n.Parent = nil
}
if n.PreviousSibling != nil {
// Set the next sibling of the previous sibling to the next.
n.PreviousSibling.NextSibling = n.NextSibling
}
if n.NextSibling != nil {
// Set the previous sibling of the next sibling to the previous.
n.NextSibling.PreviousSibling = n.PreviousSibling
}
n.NextSibling = nil
n.PreviousSibling = nil
return n
}
func (n *Node) Adopt(other *Node) {
if other.FirstChild == nil {
// Nothing to adapt.
return
}
n.SetLast(other.FirstChild.Remove())
n.Adopt(other)
}
// SetPrevious inserts the given node as the previous sibling.
func (n *Node) SetPrevious(sibling *Node) {
sibling.Remove()
sibling.Parent = n.Parent
if n.PreviousSibling != nil {
// Already has a sibling.
// 1. Update next of previous node.
// 2. Assign sibling as previous.
// 3. Copy over previous of node.
// 4. Add node as next.
n.PreviousSibling.NextSibling = sibling // (1)
n.PreviousSibling = sibling // (2)
sibling.PreviousSibling = n.PreviousSibling // (3)
sibling.NextSibling = n // (4)
}
// Does not have a previous sibling yet.
// 1. Reference each other.
// 2. Update references of parent.
n.PreviousSibling = sibling // (1)
sibling.NextSibling = n
if n.Parent != nil { // (2)
n.Parent.FirstChild = sibling
}
}
// SetNext inserts the given node as the next sibling.
func (n *Node) SetNext(sibling *Node) {
sibling.Remove()
sibling.Parent = n.Parent
// (a) <-> (b) | a.AddSibling(b)
// (a) <-> (c) <-> (b)
if n.NextSibling != nil {
// Already has a sibling.
// 1. Update previous of next node.
// 2. Assign sibling as next.
// 3. Copy over next of node.
// 4. Add node as previous.
n.NextSibling.PreviousSibling = sibling // (1)
n.NextSibling = sibling // (2)
sibling.NextSibling = n.NextSibling // (3)
sibling.PreviousSibling = n // (4)
return
}
// Does not have a next sibling yet.
// 1. Reference each other.
// 2. Update references of parent.
sibling.PreviousSibling = n // (1)
n.NextSibling = sibling
if n.Parent != nil { // (2)
n.Parent.LastChild = sibling
}
}
// SetFirst inserts the given node as the first child of the node.
func (n *Node) SetFirst(child *Node) {
child.Remove()
// Set the first child.
if n.FirstChild != nil {
n.FirstChild.SetPrevious(child)
return
}
// No children present.
child.Parent = n
n.FirstChild = child
n.LastChild = child
}
// SetLast inserts the given node as the last child of the node.
func (n *Node) SetLast(child *Node) {
child.Remove()
// Set the last child.
if n.FirstChild != nil {
n.LastChild.SetNext(child)
return
}
// No children present.
child.Parent = n
n.FirstChild = child
n.LastChild = child
} | ast/node.go | 0.780746 | 0.590573 | node.go | starcoder |
package should
import "github.com/smartystreets/assertions"
var (
Equal = assertions.ShouldEqual
NotEqual = assertions.ShouldNotEqual
AlmostEqual = assertions.ShouldAlmostEqual
NotAlmostEqual = assertions.ShouldNotAlmostEqual
Resemble = assertions.ShouldResemble
NotResemble = assertions.ShouldNotResemble
PointTo = assertions.ShouldPointTo
NotPointTo = assertions.ShouldNotPointTo
BeNil = assertions.ShouldBeNil
NotBeNil = assertions.ShouldNotBeNil
BeTrue = assertions.ShouldBeTrue
BeFalse = assertions.ShouldBeFalse
BeZeroValue = assertions.ShouldBeZeroValue
BeGreaterThan = assertions.ShouldBeGreaterThan
BeGreaterThanOrEqualTo = assertions.ShouldBeGreaterThanOrEqualTo
BeLessThan = assertions.ShouldBeLessThan
BeLessThanOrEqualTo = assertions.ShouldBeLessThanOrEqualTo
BeBetween = assertions.ShouldBeBetween
NotBeBetween = assertions.ShouldNotBeBetween
BeBetweenOrEqual = assertions.ShouldBeBetweenOrEqual
NotBeBetweenOrEqual = assertions.ShouldNotBeBetweenOrEqual
Contain = assertions.ShouldContain
NotContain = assertions.ShouldNotContain
ContainKey = assertions.ShouldContainKey
NotContainKey = assertions.ShouldNotContainKey
BeIn = assertions.ShouldBeIn
NotBeIn = assertions.ShouldNotBeIn
BeEmpty = assertions.ShouldBeEmpty
NotBeEmpty = assertions.ShouldNotBeEmpty
HaveLength = assertions.ShouldHaveLength
StartWith = assertions.ShouldStartWith
NotStartWith = assertions.ShouldNotStartWith
EndWith = assertions.ShouldEndWith
NotEndWith = assertions.ShouldNotEndWith
BeBlank = assertions.ShouldBeBlank
NotBeBlank = assertions.ShouldNotBeBlank
ContainSubstring = assertions.ShouldContainSubstring
NotContainSubstring = assertions.ShouldNotContainSubstring
EqualWithout = assertions.ShouldEqualWithout
EqualTrimSpace = assertions.ShouldEqualTrimSpace
Panic = assertions.ShouldPanic
NotPanic = assertions.ShouldNotPanic
PanicWith = assertions.ShouldPanicWith
NotPanicWith = assertions.ShouldNotPanicWith
HaveSameTypeAs = assertions.ShouldHaveSameTypeAs
NotHaveSameTypeAs = assertions.ShouldNotHaveSameTypeAs
Implement = assertions.ShouldImplement
NotImplement = assertions.ShouldNotImplement
HappenBefore = assertions.ShouldHappenBefore
HappenOnOrBefore = assertions.ShouldHappenOnOrBefore
HappenAfter = assertions.ShouldHappenAfter
HappenOnOrAfter = assertions.ShouldHappenOnOrAfter
HappenBetween = assertions.ShouldHappenBetween
HappenOnOrBetween = assertions.ShouldHappenOnOrBetween
NotHappenOnOrBetween = assertions.ShouldNotHappenOnOrBetween
HappenWithin = assertions.ShouldHappenWithin
NotHappenWithin = assertions.ShouldNotHappenWithin
BeChronological = assertions.ShouldBeChronological
) | vendor/github.com/smartystreets/assertions/should/should.go | 0.649579 | 0.605187 | should.go | starcoder |
package main
import (
"math"
"github.com/unixpickle/model3d/model2d"
"github.com/unixpickle/model3d/model3d"
)
func BoardSolid(a *Args, digits []Digit, size int) model3d.Solid {
segments := map[Segment]bool{}
for x := 0; x <= size; x++ {
for y := 0; y <= size; y++ {
l := Location{y, x}
if x < size {
segments[NewSegment(l, Location{y, x + 1})] = true
}
if y < size {
segments[NewSegment(l, Location{y + 1, x})] = true
}
}
}
for _, d := range digits {
for _, s := range d {
delete(segments, s)
}
}
var solids model3d.JoinedSolid
for s := range segments {
solids = append(solids, DigitSolid(a, Digit{s}))
}
border := a.BoardBorder + a.SegmentThickness/2
solids = append(solids, &model3d.SubtractedSolid{
Positive: &model3d.Rect{
MinVal: model3d.XYZ(-border, -border, -a.BoardThickness),
MaxVal: model3d.Coord3D{X: float64(size) + border, Y: float64(size) + border,
Z: a.SegmentDepth},
},
Negative: &model3d.Rect{
MinVal: model3d.Coord3D{
X: -a.SegmentThickness / 2,
Y: -a.SegmentThickness / 2,
},
MaxVal: model3d.Coord3D{
X: float64(size) + a.SegmentThickness/2,
Y: float64(size) + a.SegmentThickness/2,
Z: a.SegmentDepth + 1e-5,
},
},
})
return solids
}
func DigitSolid(a *Args, d Digit) model3d.Solid {
points := map[Location]int{}
segmentSet := map[Segment]bool{}
for _, s := range d {
segmentSet[s] = true
for _, l := range s {
points[l] += 1
}
}
var segments2d model2d.JoinedSolid
for _, s := range d {
p1 := model3d.Coord2D{X: float64(s[0][0]), Y: float64(s[0][1])}
p2 := model3d.Coord2D{X: float64(s[1][0]), Y: float64(s[1][1])}
// Move tips inward and connected points outward.
if points[s[0]] == 1 {
p1 = p1.Add(p2.Sub(p1).Normalize().Scale(a.SegmentTipInset))
} else if segmentSet[NewSegment(s[0], s[1].Reflect(s[0]))] {
p1 = p1.Sub(p2.Sub(p1).Normalize().Scale(a.SegmentJointOutset))
}
if points[s[1]] == 1 {
p2 = p2.Add(p1.Sub(p2).Normalize().Scale(a.SegmentTipInset))
} else if segmentSet[NewSegment(s[1], s[0].Reflect(s[1]))] {
p2 = p2.Sub(p1.Sub(p2).Normalize().Scale(a.SegmentJointOutset))
}
segments2d = append(segments2d, &pointedSegment{
Args: a,
P1: p1,
P2: p2,
Vertical: s[0][0] == s[1][0],
})
}
mesh2d := model2d.MarchingSquaresSearch(segments2d, 0.005, 8)
collider2d := model2d.MeshToCollider(mesh2d)
solid2d := model2d.NewColliderSolidInset(collider2d, a.SegmentInset)
return model3d.ProfileSolid(solid2d, 0, a.SegmentDepth)
}
type pointedSegment struct {
Args *Args
P1 model2d.Coord
P2 model2d.Coord
Vertical bool
}
func (p *pointedSegment) Min() model2d.Coord {
res := p.P1.Min(p.P2)
if p.Vertical {
res.X -= p.Args.SegmentThickness / 2
} else {
res.Y -= p.Args.SegmentThickness / 2
}
return res
}
func (p *pointedSegment) Max() model2d.Coord {
res := p.P1.Max(p.P2)
if p.Vertical {
res.X += p.Args.SegmentThickness / 2
} else {
res.Y += p.Args.SegmentThickness / 2
}
return res
}
func (p *pointedSegment) Contains(c model2d.Coord) bool {
if !model2d.InBounds(p, c) {
return false
}
tip := p.Args.SegmentThickness / 2
axis := p.P1.Sub(p.P2).Normalize()
tipDist := math.Min(
math.Abs(axis.Dot(c)-axis.Dot(p.P1)),
math.Abs(axis.Dot(c)-axis.Dot(p.P2)),
)
if tipDist < tip {
tipInset := tip - tipDist
sideDist := math.Abs(c.Y - p.P1.Y)
if p.Vertical {
sideDist = math.Abs(c.X - p.P1.X)
}
// Add a small epsilon so that segments touching at a
// 90 degree angle definitely intersect.
if sideDist+tipInset > tip+1e-5 {
return false
}
}
return true
} | examples/toys/number_puzzle/solid.go | 0.649801 | 0.464112 | solid.go | starcoder |
package schema
const ModelSchema = `{
"$id": "docs/spec/spans/span.json",
"type": "object",
"description": "An event captured by an agent occurring in a monitored service",
"allOf": [
{ "$id": "doc/spec/timestamp_epoch.json",
"title": "Timestamp Epoch",
"description": "Object with 'timestamp' property.",
"type": ["object"],
"properties": {
"timestamp": {
"description": "Recorded time of the event, UTC based and formatted as microseconds since Unix epoch",
"type": ["integer", "null"]
}
} },
{
"properties": {
"id": {
"description": "Hex encoded 64 random bits ID of the span.",
"type": "string",
"maxLength": 1024
},
"transaction_id": {
"type": ["string", "null"],
"description": "Hex encoded 64 random bits ID of the correlated transaction.",
"maxLength": 1024
},
"trace_id": {
"description": "Hex encoded 128 random bits ID of the correlated trace.",
"type": "string",
"maxLength": 1024
},
"parent_id": {
"description": "Hex encoded 64 random bits ID of the parent transaction or span.",
"type": "string",
"maxLength": 1024
},
"start": {
"type": ["number", "null"],
"description": "Offset relative to the transaction's timestamp identifying the start of the span, in milliseconds"
},
"subtype": {
"type": ["string", "null"],
"description": "A further sub-division of the type (e.g. postgresql, elasticsearch)",
"maxLength": 1024
},
"action": {
"type": ["string", "null"],
"description": "The specific kind of event within the sub-type represented by the span (e.g. query, connect)",
"maxLength": 1024
},
"context": {
"type": ["object", "null"],
"description": "Any other arbitrary data captured by the agent, optionally provided by the user",
"properties": {
"db": {
"type": ["object", "null"],
"description": "An object containing contextual data for database spans",
"properties": {
"instance": {
"type": ["string", "null"],
"description": "Database instance name"
},
"statement": {
"type": ["string", "null"],
"description": "A database statement (e.g. query) for the given database type"
},
"type": {
"type": ["string", "null"],
"description": "Database type. For any SQL database, \"sql\". For others, the lower-case database category, e.g. \"cassandra\", \"hbase\", or \"redis\""
},
"user": {
"type": ["string", "null"],
"description": "Username for accessing database"
}
}
},
"http": {
"type": ["object", "null"],
"description": "An object containing contextual data of the related http request.",
"properties": {
"url": {
"type": ["string", "null"],
"description": "The raw url of the correlating http request."
},
"status_code": {
"type": ["integer", "null"],
"description": "The status code of the http request."
},
"method": {
"type": ["string", "null"],
"maxLength": 1024,
"description": "The method of the http request."
}
}
},
"tags": {
"$id": "doc/spec/tags.json",
"title": "Tags",
"type": ["object", "null"],
"description": "A flat mapping of user-defined tags with string, boolean or number values.",
"patternProperties": {
"^[^.*\"]*$": {
"type": ["string", "boolean", "number", "null"],
"maxLength": 1024
}
},
"additionalProperties": false
},
"service": {
"description": "Service related information can be sent per event. Provided information will override the more generic information from metadata, non provided fields will be set according to the metadata information.",
"properties": {
"agent": {
"description": "Name and version of the Elastic APM agent",
"type": [
"object",
"null"
],
"properties": {
"name": {
"description": "Name of the Elastic APM agent, e.g. \"Python\"",
"type": [
"string",
"null"
],
"maxLength": 1024
},
"version": {
"description": "Version of the Elastic APM agent, e.g.\"1.0.0\"",
"type": [
"string",
"null"
],
"maxLength": 1024
}
}
},
"name": {
"description": "Immutable name of the service emitting this event",
"type": [
"string",
"null"
],
"pattern": "^[a-zA-Z0-9 _-]+$",
"maxLength": 1024
}
}
}
}
},
"duration": {
"type": "number",
"description": "Duration of the span in milliseconds"
},
"name": {
"type": "string",
"description": "Generic designation of a span in the scope of a transaction",
"maxLength": 1024
},
"stacktrace": {
"type": ["array", "null"],
"description": "List of stack frames with variable attributes (eg: lineno, filename, etc)",
"items": {
"$id": "docs/spec/stacktrace_frame.json",
"title": "Stacktrace",
"type": "object",
"description": "A stacktrace frame, contains various bits (most optional) describing the context of the frame",
"properties": {
"abs_path": {
"description": "The absolute path of the file involved in the stack frame",
"type": ["string", "null"]
},
"colno": {
"description": "Column number",
"type": ["integer", "null"]
},
"context_line": {
"description": "The line of code part of the stack frame",
"type": ["string", "null"]
},
"filename": {
"description": "The relative filename of the code involved in the stack frame, used e.g. to do error checksumming",
"type": "string"
},
"function": {
"description": "The function involved in the stack frame",
"type": ["string", "null"]
},
"library_frame": {
"description": "A boolean, indicating if this frame is from a library or user code",
"type": ["boolean", "null"]
},
"lineno": {
"description": "The line number of code part of the stack frame, used e.g. to do error checksumming",
"type": ["integer", "null"]
},
"module": {
"description": "The module to which frame belongs to",
"type": ["string", "null"]
},
"post_context": {
"description": "The lines of code after the stack frame",
"type": ["array", "null"],
"minItems": 0,
"items": {
"type": "string"
}
},
"pre_context": {
"description": "The lines of code before the stack frame",
"type": ["array", "null"],
"minItems": 0,
"items": {
"type": "string"
}
},
"vars": {
"description": "Local variables for this stack frame",
"type": ["object", "null"],
"properties": {}
}
},
"required": ["filename"]
},
"minItems": 0
},
"type": {
"type": "string",
"description": "Keyword of specific relevance in the service's domain (eg: 'db.postgresql.query', 'template.erb', etc)",
"maxLength": 1024
},
"sync": {
"type": ["boolean", "null"],
"description": "Indicates whether the span was executed synchronously or asynchronously."
}
},
"required": ["duration", "name", "type", "id","trace_id", "parent_id"]
},
{ "anyOf":[
{"required": ["timestamp"], "properties": {"timestamp": { "type": "integer" }}},
{"required": ["start"], "properties": {"start": { "type": "number" }}}
]
}
]
}
` | model/span/generated/schema/span.go | 0.85289 | 0.595316 | span.go | starcoder |
package when
import (
"time"
)
func abs(v time.Duration) time.Duration {
if v < 0 {
v *= -1
}
return v
}
// Timedelta represents a duration between two dates.
// All fields are optional and default to 0. You can initialize any type of timedelta by specifying field values which you want to use.
type Timedelta struct {
Days, Seconds, Microseconds, Milliseconds, Minutes, Hours, Weeks time.Duration
}
// Add returns the Timedelta t+t2.
func (t *Timedelta) Add(t2 *Timedelta) Timedelta {
return Timedelta{
Days: t.Days + t2.Days,
Seconds: t.Seconds + t2.Seconds,
Microseconds: t.Microseconds + t2.Microseconds,
Milliseconds: t.Milliseconds + t2.Milliseconds,
Minutes: t.Minutes + t2.Minutes,
Hours: t.Hours + t2.Hours,
Weeks: t.Weeks + t2.Weeks,
}
}
// Subtract returns the Timedelta t-t2.
func (t *Timedelta) Subtract(t2 *Timedelta) Timedelta {
return Timedelta{
Days: t.Days - t2.Days,
Seconds: t.Seconds - t2.Seconds,
Microseconds: t.Microseconds - t2.Microseconds,
Milliseconds: t.Milliseconds - t2.Milliseconds,
Minutes: t.Minutes - t2.Minutes,
Hours: t.Hours - t2.Hours,
Weeks: t.Weeks - t2.Weeks,
}
}
// Abs returns the absolute value of t
func (t *Timedelta) Abs() Timedelta {
return Timedelta{
Days: abs(t.Days),
Seconds: abs(t.Seconds),
Microseconds: abs(t.Microseconds),
Milliseconds: abs(t.Milliseconds),
Minutes: abs(t.Minutes),
Hours: abs(t.Hours),
Weeks: abs(t.Weeks),
}
}
// Duration returns time.Duration. time.Duration can be added to time.Date.
func (t *Timedelta) Duration() time.Duration {
return t.Days*24*time.Hour +
t.Seconds*time.Second +
t.Microseconds*time.Microsecond +
t.Milliseconds*time.Millisecond +
t.Minutes*time.Minute +
t.Hours*time.Hour +
t.Weeks*7*24*time.Hour
}
// String returns a string representing the Timedelta's duration in the form "72h3m0.5s".
func (t *Timedelta) String() string {
return t.Duration().String()
} | vendor/github.com/zoumo/logdog/pkg/when/timedelta.go | 0.823719 | 0.490785 | timedelta.go | starcoder |
package proofs
import (
"fmt"
"sort"
ics23 "github.com/confio/ics23/go"
sdkmaps "github.com/KiraCore/cosmos-sdk/store/rootmulti/internal/maps"
)
// TendermintSpec constrains the format from ics23-tendermint (crypto/merkle SimpleProof)
var TendermintSpec = &ics23.ProofSpec{
LeafSpec: &ics23.LeafOp{
Prefix: []byte{0},
Hash: ics23.HashOp_SHA256,
PrehashValue: ics23.HashOp_SHA256,
Length: ics23.LengthOp_VAR_PROTO,
},
InnerSpec: &ics23.InnerSpec{
ChildOrder: []int32{0, 1},
MinPrefixLength: 1,
MaxPrefixLength: 1, // fixed prefix + one child
ChildSize: 32, // (no length byte)
Hash: ics23.HashOp_SHA256,
},
}
/*
CreateMembershipProof will produce a CommitmentProof that the given key (and queries value) exists in the iavl tree.
If the key doesn't exist in the tree, this will return an error.
*/
func CreateMembershipProof(data map[string][]byte, key []byte) (*ics23.CommitmentProof, error) {
exist, err := createExistenceProof(data, key)
if err != nil {
return nil, err
}
proof := &ics23.CommitmentProof{
Proof: &ics23.CommitmentProof_Exist{
Exist: exist,
},
}
return proof, nil
}
/*
CreateNonMembershipProof will produce a CommitmentProof that the given key doesn't exist in the iavl tree.
If the key exists in the tree, this will return an error.
*/
func CreateNonMembershipProof(data map[string][]byte, key []byte) (*ics23.CommitmentProof, error) {
// ensure this key is not in the store
if _, ok := data[string(key)]; ok {
return nil, fmt.Errorf("cannot create non-membership proof if key is in map")
}
keys := SortedKeys(data)
rightidx := sort.SearchStrings(keys, string(key))
var err error
nonexist := &ics23.NonExistenceProof{
Key: key,
}
// include left proof unless key is left of entire map
if rightidx >= 1 {
leftkey := keys[rightidx-1]
nonexist.Left, err = createExistenceProof(data, []byte(leftkey))
if err != nil {
return nil, err
}
}
// include right proof unless key is right of entire map
if rightidx < len(keys) {
rightkey := keys[rightidx]
nonexist.Right, err = createExistenceProof(data, []byte(rightkey))
if err != nil {
return nil, err
}
}
proof := &ics23.CommitmentProof{
Proof: &ics23.CommitmentProof_Nonexist{
Nonexist: nonexist,
},
}
return proof, nil
}
func createExistenceProof(data map[string][]byte, key []byte) (*ics23.ExistenceProof, error) {
value, ok := data[string(key)]
if !ok {
return nil, fmt.Errorf("cannot make existence proof if key is not in map")
}
_, ics23, _ := sdkmaps.SimpleProofsFromMap(data)
proof := ics23[string(key)]
if proof == nil {
return nil, fmt.Errorf("returned no proof for key")
}
return ConvertExistenceProof(proof, key, value)
} | store/rootmulti/internal/proofs/create.go | 0.582016 | 0.407982 | create.go | starcoder |
package bls381
import (
"math/bits"
)
// GT target group of the pairing
type GT = e12
type lineEvaluation struct {
r0 e2
r1 e2
r2 e2
}
// FinalExponentiation computes the final expo x**(p**6-1)(p**2+1)(p**4 - p**2 +1)/r
func FinalExponentiation(z *GT, _z ...*GT) GT {
var result GT
result.Set(z)
for _, e := range _z {
result.Mul(&result, e)
}
result.FinalExponentiation(&result)
return result
}
// FinalExponentiation sets z to the final expo x**((p**12 - 1)/r), returns z
func (z *GT) FinalExponentiation(x *GT) *GT {
// cf https://eprint.iacr.org/2016/130.pdf
var result GT
result.Set(x)
var t [6]GT
// easy part
t[0].Conjugate(&result)
result.Inverse(&result)
t[0].Mul(&t[0], &result)
result.FrobeniusSquare(&t[0]).
Mul(&result, &t[0])
// hard part (up to permutation)
t[0].InverseUnitary(&result).Square(&t[0])
t[5].expt(&result)
t[1].CyclotomicSquare(&t[5])
t[3].Mul(&t[0], &t[5])
t[0].expt(&t[3])
t[2].expt(&t[0])
t[4].expt(&t[2])
t[4].Mul(&t[1], &t[4])
t[1].expt(&t[4])
t[3].InverseUnitary(&t[3])
t[1].Mul(&t[3], &t[1])
t[1].Mul(&t[1], &result)
t[0].Mul(&t[0], &result)
t[0].FrobeniusCube(&t[0])
t[3].InverseUnitary(&result)
t[4].Mul(&t[3], &t[4])
t[4].Frobenius(&t[4])
t[5].Mul(&t[2], &t[5])
t[5].FrobeniusSquare(&t[5])
t[5].Mul(&t[5], &t[0])
t[5].Mul(&t[5], &t[4])
t[5].Mul(&t[5], &t[1])
result.Set(&t[5])
z.Set(&result)
return z
}
// MillerLoop Miller loop
func MillerLoop(P G1Affine, Q G2Affine) *GT {
var result GT
result.SetOne()
if P.IsInfinity() || Q.IsInfinity() {
return &result
}
ch := make(chan struct{}, 10)
var evaluations [68]lineEvaluation
go preCompute(&evaluations, &Q, &P, ch)
j := 0
for i := len(loopCounter) - 2; i >= 0; i-- {
result.Square(&result)
<-ch
result.mulAssign(&evaluations[j])
j++
if loopCounter[i] == 1 {
<-ch
result.mulAssign(&evaluations[j])
j++
}
}
return &result
}
// lineEval computes the evaluation of the line through Q, R (on the twist) at P
// Q, R are in jacobian coordinates
func lineEval(Q, R *G2Jac, P *G1Affine, result *lineEvaluation) {
// converts _Q and _R to projective coords
var _Q, _R g2Proj
_Q.FromJacobian(Q)
_R.FromJacobian(R)
result.r1.Mul(&_Q.y, &_R.z)
result.r0.Mul(&_Q.z, &_R.x)
result.r2.Mul(&_Q.x, &_R.y)
_Q.z.Mul(&_Q.z, &_R.y)
_Q.x.Mul(&_Q.x, &_R.z)
_Q.y.Mul(&_Q.y, &_R.x)
result.r1.Sub(&result.r1, &_Q.z)
result.r0.Sub(&result.r0, &_Q.x)
result.r2.Sub(&result.r2, &_Q.y)
result.r1.MulByElement(&result.r1, &P.X)
result.r0.MulByElement(&result.r0, &P.Y)
}
// multiplies a result of a line evaluation to the current pairing result, taking care of mapping it
// back to the original The line evaluation l is f(P) where div(f)=(P')+(Q')+(-P'-Q')-3(O), the support
// being on the twist.
func (z *GT) mulAssign(l *lineEvaluation) *GT {
var a, b, c GT
a.mulByVWNRInv(z, &l.r1)
b.mulByV2NRInv(z, &l.r0)
c.mulByWNRInv(z, &l.r2)
z.Add(&a, &b).Add(z, &c)
return z
}
// precomputes the line evaluations used during the Miller loop.
func preCompute(evaluations *[68]lineEvaluation, Q *G2Affine, P *G1Affine, ch chan struct{}) {
var Q1, Q2, Qbuf G2Jac
Q1.FromAffine(Q)
Q2.FromAffine(Q)
Qbuf.FromAffine(Q)
j := 0
for i := len(loopCounter) - 2; i >= 0; i-- {
Q1.Set(&Q2)
Q2.Double(&Q1).Neg(&Q2)
lineEval(&Q1, &Q2, P, &evaluations[j]) // f(P), div(f) = 2(Q1)+(-2Q2)-3(O)
ch <- struct{}{}
Q2.Neg(&Q2)
j++
if loopCounter[i] == 1 {
lineEval(&Q2, &Qbuf, P, &evaluations[j]) // f(P), div(f) = (Q2)+(Q)+(-Q2-Q)-3(O)
ch <- struct{}{}
Q2.AddMixed(Q)
j++
}
}
close(ch)
}
// mulByV2NRInv set z to x*(y*v^2*(1,1)^{-1}) and return z
func (z *GT) mulByV2NRInv(x *GT, y *e2) *GT {
var result GT
var yNRInv e2
yNRInv.MulByNonResidueInv(y)
result.C0.B0.Mul(&x.C0.B1, y)
result.C0.B1.Mul(&x.C0.B2, y)
result.C0.B2.Mul(&x.C0.B0, &yNRInv)
result.C1.B0.Mul(&x.C1.B1, y)
result.C1.B1.Mul(&x.C1.B2, y)
result.C1.B2.Mul(&x.C1.B0, &yNRInv)
z.Set(&result)
return z
}
// mulByVWNRInv set z to x*(y*v*w*(1,1)^{-1}) and return z
func (z *GT) mulByVWNRInv(x *GT, y *e2) *GT {
var result GT
var yNRInv e2
yNRInv.MulByNonResidueInv(y)
result.C0.B0.Mul(&x.C1.B1, y)
result.C0.B1.Mul(&x.C1.B2, y)
result.C0.B2.Mul(&x.C1.B0, &yNRInv)
result.C1.B0.Mul(&x.C0.B2, y)
result.C1.B1.Mul(&x.C0.B0, &yNRInv)
result.C1.B2.Mul(&x.C0.B1, &yNRInv)
z.Set(&result)
return z
}
// mulByWNRInv set z to x*(y*w*(1,1)^{-1}) and return z
func (z *GT) mulByWNRInv(x *GT, y *e2) *GT {
var result GT
var yNRInv e2
yNRInv.MulByNonResidueInv(y)
result.C0.B0.Mul(&x.C1.B2, y)
result.C0.B1.Mul(&x.C1.B0, &yNRInv)
result.C0.B2.Mul(&x.C1.B1, &yNRInv)
result.C1.B0.Mul(&x.C0.B0, &yNRInv)
result.C1.B1.Mul(&x.C0.B1, &yNRInv)
result.C1.B2.Mul(&x.C0.B2, &yNRInv)
z.Set(&result)
return z
}
// expt set z to x^t in GT and return z
func (z *GT) expt(x *GT) *GT {
const tAbsVal uint64 = 15132376222941642752 // negative
var result GT
result.Set(x)
l := bits.Len64(tAbsVal) - 2
for i := l; i >= 0; i-- {
result.CyclotomicSquare(&result)
if tAbsVal&(1<<uint(i)) != 0 {
result.Mul(&result, x)
}
}
result.Conjugate(&result) // because tAbsVal is negative
z.Set(&result)
return z
} | bls381/pairing.go | 0.718397 | 0.446796 | pairing.go | starcoder |
package main
import (
"fmt"
"os"
"path/filepath"
"io/ioutil"
"strings"
"math"
"regexp"
)
// Iterates through every seat in dataString to apply the appropriate rules based on the number of occupied seats returned by countFirstOccupied. Returns the new seat data as a single string, and boolean indicating whether any seat changed.
func changeSeats(dataString string, lineLength int, lineNum int) (string, bool) {
byteData := make([]byte, len(dataString))
copy(byteData, dataString)
newData := make([]byte, len(dataString))
copy(newData, dataString)
seatMap := make([]string, lineNum)
didChange := false
// Iterate through every seat
for i, c := range byteData {
// Calculate x and y grid positions to simplify math within the helper function
x, y := i % lineLength, int(math.Floor(float64(i/lineLength)))
// Count seats and apply rules
if c == 'L' && countFirstOccupied(x, y, lineLength, lineNum, byteData) == 0 {
newData[i] = '#'
didChange = true
} else if c == '#' && countFirstOccupied(x, y, lineLength, lineNum, byteData) >= 5 {
newData[i] = 'L'
didChange = true
}
// If we're at the end of a line, print it
if x == lineLength-1 {
fmt.Printf("%s\n", string(newData[i-lineLength+1:i+1]))
seatMap[y] = string(newData[i-lineLength+1:i+1])
}
}
fmt.Printf("---------------------\n")
return string(newData), didChange
}
// Return true only if the given seat is "#"
func isOccupied(seat byte) bool {
if seat == '#' {
return true
}
return false
}
// Return true if the given seat is "L" or "#"
func isSeat(seat byte) bool {
if seat == 'L' || seat == '#' {
return true
}
return false
}
// Calculate a seat's raw index using the x and y position
func getIndex(lineLength int, lineNum int, x int, y int) int {
return x + (y*lineLength)
}
// This is the main helper function of this program. It performs 8 while loops to search for the first seat in each direction, and returns the count of those seats which are occupied.
func countFirstOccupied(x int, y int, lineLength int, lineNum int, byteData []byte) int {
count := 0
searchY := y
searchX := x
// Search North
for searchY > 0 {
searchY--
i := getIndex(lineLength, lineNum, x, searchY)
if isOccupied(byteData[i]) {
count++
break
} else if isSeat(byteData[i]) {
break
}
}
searchY = y
// Search South
for searchY < lineNum-1 {
searchY++
i := getIndex(lineLength, lineNum, x, searchY)
if isOccupied(byteData[i]) {
count++
break
} else if isSeat(byteData[i]) {
break
}
}
searchY = y
// Search East
for searchX < lineLength-1 {
searchX++
i := getIndex(lineLength, lineNum, searchX, y)
if isOccupied(byteData[i]) {
count++
break
} else if isSeat(byteData[i]) {
break
}
}
searchX = x
// Search West
for searchX > 0 {
searchX--
i := getIndex(lineLength, lineNum, searchX, y)
if isOccupied(byteData[i]) {
count++
break
} else if isSeat(byteData[i]) {
break
}
}
searchX = x
// Search Northwest
for searchX > 0 && searchY > 0 {
searchX--
searchY--
i := getIndex(lineLength, lineNum, searchX, searchY)
if isOccupied(byteData[i]) {
count++
break
} else if isSeat(byteData[i]) {
break
}
}
searchX, searchY = x, y
// Search Southeast
for searchX < lineLength-1 && searchY < lineNum-1 {
searchX++
searchY++
i := getIndex(lineLength, lineNum, searchX, searchY)
if isOccupied(byteData[i]) {
count++
break
} else if isSeat(byteData[i]) {
break
}
}
searchX, searchY = x, y
// Search Southwest
for searchX > 0 && searchY < lineNum-1 {
searchX--
searchY++
i := getIndex(lineLength, lineNum, searchX, searchY)
if isOccupied(byteData[i]) {
count++
break
} else if isSeat(byteData[i]) {
break
}
}
searchX, searchY = x, y
// Search Northeast
for searchX < lineLength-1 && searchY > 0 {
searchX++
searchY--
i := getIndex(lineLength, lineNum, searchX, searchY)
if isOccupied(byteData[i]) {
count++
break
} else if isSeat(byteData[i]) {
break
}
}
searchX, searchY = x, y
return count
}
func main() {
// Read input file
inputData, err := ioutil.ReadFile(filepath.Join(os.Args[1]))
if err != nil {
fmt.Println("Error: ", err)
return
}
// Split full file into array of individual lines to calculate lineLength and lineNum, then print the original seat map
dataArray := strings.Split(string(inputData), "\n")
lineLength := len(dataArray[0])
lineNum := len(dataArray)
for line := range dataArray {
fmt.Printf("%s\n", dataArray[line])
}
fmt.Printf("---------------------\n")
// The program will end when the seat map is no longer changing
changing := true
// We need a raw array of the data, not split into separate lines
dataString := strings.Replace(string(inputData), "\n", "", -1)
// First call to changeSeats, passing in the original data
nextDataString, changing := changeSeats(dataString, lineLength, lineNum)
// Continue calling changeSeats until the map does not change
for changing {
nextDataString, changing = changeSeats(nextDataString, lineLength, lineNum)
}
// Count occupied seats identified by the # character
re := regexp.MustCompile(`#`)
totalCount := len(re.FindAllStringIndex(nextDataString, -1))
fmt.Printf("Occupied Seats: %d\n", totalCount)
} | day11/part2/part2.go | 0.616128 | 0.404155 | part2.go | starcoder |
package trees
import (
"errors"
"fmt"
"math"
"sort"
"strconv"
"strings"
"github.com/sjwhitworth/golearn/base"
)
const (
MAE string = "mae"
MSE string = "mse"
)
// RNode - Node struct for Decision Tree Regressor
// It holds the information for each split
// Which feature to use, threshold, left prediction and right prediction
type regressorNode struct {
Left *regressorNode
Right *regressorNode
Threshold float64
Feature int64
LeftPred float64
RightPred float64
isNodeNeeded bool
}
// CARTDecisionTreeRegressor - Tree struct for Decision Tree Regressor
// It contains the rootNode, as well as the hyperparameters chosen by user.
// Also keeps track of splits used at tree level.
type CARTDecisionTreeRegressor struct {
RootNode *regressorNode
criterion string
maxDepth int64
triedSplits [][]float64
}
// Find average
func average(y []float64) float64 {
mean := 0.0
for _, value := range y {
mean += value
}
mean /= float64(len(y))
return mean
}
// Calculate Mean Absolute Error for a constant prediction
func meanAbsoluteError(y []float64, yBar float64) float64 {
error := 0.0
for _, target := range y {
error += math.Abs(target - yBar)
}
error /= float64(len(y))
return error
}
// Turn Mean Absolute Error into impurity function for decision trees.
func computeMaeImpurityAndAverage(y []float64) (float64, float64) {
yHat := average(y)
return meanAbsoluteError(y, yHat), yHat
}
// Calculate Mean Squared Error for constant prediction
func meanSquaredError(y []float64, yBar float64) float64 {
error := 0.0
for _, target := range y {
itemError := target - yBar
error += math.Pow(itemError, 2)
}
error /= float64(len(y))
return error
}
// Convert mean squared error into impurity function for decision trees
func computeMseImpurityAndAverage(y []float64) (float64, float64) {
yHat := average(y)
return meanSquaredError(y, yHat), yHat
}
func calculateRegressionLoss(y []float64, criterion string) (float64, float64, error) {
if criterion == MAE {
loss, avg := computeMaeImpurityAndAverage(y)
return loss, avg, nil
} else if criterion == MSE {
loss, avg := computeMseImpurityAndAverage(y)
return loss, avg, nil
} else {
panic("Invalid impurity function, choose from MAE or MSE")
}
}
// Split the data into left and right based on trehsold and feature.
func regressorCreateSplit(data [][]float64, feature int64, y []float64, threshold float64) ([][]float64, [][]float64, []float64, []float64) {
var left [][]float64
var lefty []float64
var right [][]float64
var righty []float64
for i := range data {
example := data[i]
if example[feature] < threshold {
left = append(left, example)
lefty = append(lefty, y[i])
} else {
right = append(right, example)
righty = append(righty, y[i])
}
}
return left, right, lefty, righty
}
// Interface for creating new Decision Tree Regressor
func NewDecisionTreeRegressor(criterion string, maxDepth int64) *CARTDecisionTreeRegressor {
var tree CARTDecisionTreeRegressor
tree.maxDepth = maxDepth
tree.criterion = strings.ToLower(criterion)
return &tree
}
// Re order data based on a feature for optimizing code
// Helps in updating splits without reiterating entire dataset
func regressorReOrderData(featureVal []float64, data [][]float64, y []float64) ([][]float64, []float64) {
s := NewSlice(featureVal)
sort.Sort(s)
indexes := s.Idx
var dataSorted [][]float64
var ySorted []float64
for _, index := range indexes {
dataSorted = append(dataSorted, data[index])
ySorted = append(ySorted, y[index])
}
return dataSorted, ySorted
}
// Update the left and right data based on change in threshold
func regressorUpdateSplit(left [][]float64, leftY []float64, right [][]float64, rightY []float64, feature int64, threshold float64) ([][]float64, []float64, [][]float64, []float64) {
for right[0][feature] < threshold {
left = append(left, right[0])
right = right[1:]
leftY = append(leftY, rightY[0])
rightY = rightY[1:]
}
return left, leftY, right, rightY
}
// Fit - Build the tree using the data
// Creates empty root node and builds tree by calling regressorBestSplit
func (tree *CARTDecisionTreeRegressor) Fit(X base.FixedDataGrid) error {
var emptyNode regressorNode
var err error
data := regressorConvertInstancesToProblemVec(X)
y, err := regressorConvertInstancesToLabelVec(X)
if err != nil {
return err
}
emptyNode, err = regressorBestSplit(*tree, data, y, emptyNode, tree.criterion, tree.maxDepth, 0)
if err != nil {
return err
}
tree.RootNode = &emptyNode
return nil
}
// Builds the tree by iteratively finding the best split.
// Recursive function - stops if maxDepth is reached or nodes are pure
func regressorBestSplit(tree CARTDecisionTreeRegressor, data [][]float64, y []float64, upperNode regressorNode, criterion string, maxDepth int64, depth int64) (regressorNode, error) {
// Ensure that we have not reached maxDepth. maxDepth =-1 means split until nodes are pure
depth++
if depth > maxDepth && maxDepth != -1 {
return upperNode, nil
}
numFeatures := len(data[0])
var bestLoss, origLoss float64
var err error
origLoss, upperNode.LeftPred, err = calculateRegressionLoss(y, criterion)
if err != nil {
return upperNode, err
}
bestLoss = origLoss
bestLeft, bestRight, bestLefty, bestRighty := data, data, y, y
numData := len(data)
bestLeftLoss, bestRightLoss := bestLoss, bestLoss
upperNode.isNodeNeeded = true
var leftN, rightN regressorNode
// Iterate over all features
for i := 0; i < numFeatures; i++ {
featureVal := getFeature(data, int64(i))
unique := findUnique(featureVal)
sort.Float64s(unique)
sortData, sortY := regressorReOrderData(featureVal, data, y)
firstTime := true
var left, right [][]float64
var leftY, rightY []float64
for j := 0; j < len(unique)-1; j++ {
threshold := (unique[j] + unique[j+1]) / 2
if validate(tree.triedSplits, int64(i), threshold) {
if firstTime {
left, right, leftY, rightY = regressorCreateSplit(sortData, int64(i), sortY, threshold)
firstTime = false
} else {
left, leftY, right, rightY = regressorUpdateSplit(left, leftY, right, rightY, int64(i), threshold)
}
var leftLoss, rightLoss float64
var leftPred, rightPred float64
leftLoss, leftPred, _ = calculateRegressionLoss(leftY, criterion)
rightLoss, rightPred, _ = calculateRegressionLoss(rightY, criterion)
subLoss := (leftLoss * float64(len(left)) / float64(numData)) + (rightLoss * float64(len(right)) / float64(numData))
if subLoss < bestLoss {
bestLoss = subLoss
bestLeft, bestRight = left, right
bestLefty, bestRighty = leftY, rightY
upperNode.Threshold, upperNode.Feature = threshold, int64(i)
upperNode.LeftPred, upperNode.RightPred = leftPred, rightPred
bestLeftLoss, bestRightLoss = leftLoss, rightLoss
}
}
}
}
if bestLoss == origLoss {
upperNode.isNodeNeeded = false
return upperNode, nil
}
if bestLoss > 0 {
if bestLeftLoss > 0 {
tree.triedSplits = append(tree.triedSplits, []float64{float64(upperNode.Feature), upperNode.Threshold})
leftN, err = regressorBestSplit(tree, bestLeft, bestLefty, leftN, criterion, maxDepth, depth)
if err != nil {
return upperNode, err
}
if leftN.isNodeNeeded == true {
upperNode.Left = &leftN
}
}
if bestRightLoss > 0 {
tree.triedSplits = append(tree.triedSplits, []float64{float64(upperNode.Feature), upperNode.Threshold})
rightN, err = regressorBestSplit(tree, bestRight, bestRighty, rightN, criterion, maxDepth, depth)
if err != nil {
return upperNode, err
}
if rightN.isNodeNeeded == true {
upperNode.Right = &rightN
}
}
}
return upperNode, nil
}
// Print Tree for Visualtion - calls regressorPrintTreeFromNode()
func (tree *CARTDecisionTreeRegressor) String() string {
rootNode := *tree.RootNode
return regressorPrintTreeFromNode(rootNode, "")
}
// Recursively explore the entire tree and print out all details such as threshold, feature, prediction
func regressorPrintTreeFromNode(tree regressorNode, spacing string) string {
returnString := ""
returnString += spacing + "Feature "
returnString += strconv.FormatInt(tree.Feature, 10)
returnString += " < "
returnString += fmt.Sprintf("%.3f", tree.Threshold)
returnString += "\n"
if tree.Left == nil {
returnString += spacing + "---> True" + "\n"
returnString += " " + spacing + "PREDICT "
returnString += fmt.Sprintf("%.3f", tree.LeftPred) + "\n"
}
if tree.Right == nil {
returnString += spacing + "---> False" + "\n"
returnString += " " + spacing + "PREDICT "
returnString += fmt.Sprintf("%.3f", tree.RightPred) + "\n"
}
if tree.Left != nil {
returnString += spacing + "---> True" + "\n"
returnString += regressorPrintTreeFromNode(*tree.Left, spacing+" ")
}
if tree.Right != nil {
returnString += spacing + "---> False" + "\n"
returnString += regressorPrintTreeFromNode(*tree.Right, spacing+" ")
}
return returnString
}
// Predict a single data point by navigating to rootNodes.
// Uses a recursive logic
func regressorPredictSingle(tree regressorNode, instance []float64) float64 {
if instance[tree.Feature] < tree.Threshold {
if tree.Left == nil {
return tree.LeftPred
} else {
return regressorPredictSingle(*tree.Left, instance)
}
} else {
if tree.Right == nil {
return tree.RightPred
} else {
return regressorPredictSingle(*tree.Right, instance)
}
}
}
// Predict method for multiple data points.
// First converts input data into usable format, and then calls regressorPredictFromNode
func (tree *CARTDecisionTreeRegressor) Predict(X_test base.FixedDataGrid) []float64 {
root := *tree.RootNode
test := regressorConvertInstancesToProblemVec(X_test)
return regressorPredictFromNode(root, test)
}
// Use tree's root node to print out entire tree.
// Iterates over all data points and calls regressorPredictSingle to predict individual datapoints.
func regressorPredictFromNode(tree regressorNode, test [][]float64) []float64 {
var preds []float64
for i := range test {
i_pred := regressorPredictSingle(tree, test[i])
preds = append(preds, i_pred)
}
return preds
}
// Helper function to convert base.FixedDataGrid into required format. Called in Fit, Predict
func regressorConvertInstancesToProblemVec(X base.FixedDataGrid) [][]float64 {
// Allocate problem array
_, rows := X.Size()
problemVec := make([][]float64, rows)
// Retrieve numeric non-class Attributes
numericAttrs := base.NonClassFloatAttributes(X)
numericAttrSpecs := base.ResolveAttributes(X, numericAttrs)
// Convert each row
X.MapOverRows(numericAttrSpecs, func(row [][]byte, rowNo int) (bool, error) {
// Allocate a new row
probRow := make([]float64, len(numericAttrSpecs))
// Read out the row
for i, _ := range numericAttrSpecs {
probRow[i] = base.UnpackBytesToFloat(row[i])
}
// Add the row
problemVec[rowNo] = probRow
return true, nil
})
return problemVec
}
// Helper function to convert base.FixedDataGrid into required format. Called in Fit, Predict
func regressorConvertInstancesToLabelVec(X base.FixedDataGrid) ([]float64, error) {
// Get the class Attributes
classAttrs := X.AllClassAttributes()
// Only support 1 class Attribute
if len(classAttrs) != 1 {
return []float64{0}, errors.New(fmt.Sprintf("%d ClassAttributes (1 expected)", len(classAttrs)))
}
// ClassAttribute must be numeric
if _, ok := classAttrs[0].(*base.FloatAttribute); !ok {
return []float64{0}, errors.New(fmt.Sprintf("%s: ClassAttribute must be a FloatAttribute", classAttrs[0]))
}
// Allocate return structure
_, rows := X.Size()
labelVec := make([]float64, rows)
// Resolve class Attribute specification
classAttrSpecs := base.ResolveAttributes(X, classAttrs)
X.MapOverRows(classAttrSpecs, func(row [][]byte, rowNo int) (bool, error) {
labelVec[rowNo] = base.UnpackBytesToFloat(row[0])
return true, nil
})
return labelVec, nil
} | trees/cart_regressor.go | 0.768429 | 0.586049 | cart_regressor.go | starcoder |
package e2e
import (
"math"
io_prometheus_client "github.com/prometheus/client_model/go"
)
func getMetricValue(m *io_prometheus_client.Metric) float64 {
if m.GetGauge() != nil {
return m.GetGauge().GetValue()
} else if m.GetCounter() != nil {
return m.GetCounter().GetValue()
} else if m.GetHistogram() != nil {
return m.GetHistogram().GetSampleSum()
} else if m.GetSummary() != nil {
return m.GetSummary().GetSampleSum()
} else {
return 0
}
}
func getMetricCount(m *io_prometheus_client.Metric) float64 {
if m.GetHistogram() != nil {
return float64(m.GetHistogram().GetSampleCount())
} else if m.GetSummary() != nil {
return float64(m.GetSummary().GetSampleCount())
} else {
return 0
}
}
func getValues(metrics []*io_prometheus_client.Metric, opts MetricsOptions) []float64 {
values := make([]float64, 0, len(metrics))
for _, m := range metrics {
values = append(values, opts.GetValue(m))
}
return values
}
func filterMetrics(metrics []*io_prometheus_client.Metric, opts MetricsOptions) []*io_prometheus_client.Metric {
// If no label matcher is configured, then no filtering should be done.
if len(opts.LabelMatchers) == 0 {
return metrics
}
if len(metrics) == 0 {
return metrics
}
filtered := make([]*io_prometheus_client.Metric, 0, len(metrics))
for _, m := range metrics {
metricLabels := map[string]string{}
for _, lp := range m.GetLabel() {
metricLabels[lp.GetName()] = lp.GetValue()
}
matches := true
for _, matcher := range opts.LabelMatchers {
if !matcher.Matches(metricLabels[matcher.Name]) {
matches = false
break
}
}
if !matches {
continue
}
filtered = append(filtered, m)
}
return filtered
}
func sumValues(values []float64) float64 {
sum := 0.0
for _, v := range values {
sum += v
}
return sum
}
func EqualsSingle(expected float64) func(float64) bool {
return func(v float64) bool {
return v == expected || (math.IsNaN(v) && math.IsNaN(expected))
}
}
// Equals is an isExpected function for WaitSumMetrics that returns true if given single sum is equals to given value.
func Equals(value float64) func(sums ...float64) bool {
return func(sums ...float64) bool {
if len(sums) != 1 {
panic("equals: expected one value")
}
return sums[0] == value || math.IsNaN(sums[0]) && math.IsNaN(value)
}
}
// Greater is an isExpected function for WaitSumMetrics that returns true if given single sum is greater than given value.
func Greater(value float64) func(sums ...float64) bool {
return func(sums ...float64) bool {
if len(sums) != 1 {
panic("greater: expected one value")
}
return sums[0] > value
}
}
// Less is an isExpected function for WaitSumMetrics that returns true if given single sum is less than given value.
func Less(value float64) func(sums ...float64) bool {
return func(sums ...float64) bool {
if len(sums) != 1 {
panic("less: expected one value")
}
return sums[0] < value
}
}
// EqualsAmongTwo is an isExpected function for WaitSumMetrics that returns true if first sum is equal to the second.
// NOTE: Be careful on scrapes in between of process that changes two metrics. Those are
// usually not atomic.
func EqualsAmongTwo(sums ...float64) bool {
if len(sums) != 2 {
panic("equalsAmongTwo: expected two values")
}
return sums[0] == sums[1]
}
// GreaterAmongTwo is an isExpected function for WaitSumMetrics that returns true if first sum is greater than second.
// NOTE: Be careful on scrapes in between of process that changes two metrics. Those are
// usually not atomic.
func GreaterAmongTwo(sums ...float64) bool {
if len(sums) != 2 {
panic("greaterAmongTwo: expected two values")
}
return sums[0] > sums[1]
}
// LessAmongTwo is an isExpected function for WaitSumMetrics that returns true if first sum is smaller than second.
// NOTE: Be careful on scrapes in between of process that changes two metrics. Those are
// usually not atomic.
func LessAmongTwo(sums ...float64) bool {
if len(sums) != 2 {
panic("lessAmongTwo: expected two values")
}
return sums[0] < sums[1]
} | integration/e2e/metrics.go | 0.710025 | 0.511534 | metrics.go | starcoder |
package main
import (
"container/heap"
"math"
)
/*
You are given an array points representing integer coordinates of some points on a 2D-plane,
where points[i] = [xi, yi].
The cost of connecting two points [xi, yi] and [xj, yj] is the manhattan distance between them:
|xi - xj| + |yi - yj|, where |val| denotes the absolute value of val.
Return the minimum cost to make all points connected.
All points are connected if there is exactly one simple path between any two points.
Example 1:
Input: points = [[0,0],[2,2],[3,10],[5,2],[7,0]]
Output: 20
Explanation:
We can connect the points as shown above to get the minimum cost of 20.
Notice that there is a unique path between every pair of points.
Example 2:
Input: points = [[3,12],[-2,5],[-4,1]]
Output: 18
Example 3:
Input: points = [[0,0],[1,1],[1,0],[-1,1]]
Output: 4
Example 4:
Input: points = [[-1000000,-1000000],[1000000,1000000]]
Output: 4000000
Example 5:
Input: points = [[0,0]]
Output: 0
Constraints:
1 <= points.length <= 1000
-106 <= xi, yi <= 106
All pairs (xi, yi) are distinct.
*/
type Point struct {
X int
Y int
}
type Edge struct {
Point Point
Cost int
}
type PriorityQueue []*Edge
func (pq PriorityQueue) Len() int { return len(pq) }
func (pq PriorityQueue) Less(i, j int) bool {
return pq[i].Cost < pq[j].Cost
}
func (pq PriorityQueue) Swap(i, j int) {
pq[i], pq[j] = pq[j], pq[i]
}
func (pq *PriorityQueue) Push(x interface{}) {
item := x.(*Edge)
*pq = append(*pq, item)
}
func (pq *PriorityQueue) Pop() interface{} {
old := *pq
n := len(old)
item := old[n-1]
old[n-1] = nil
*pq = old[0 : n-1]
return item
}
func manhattanDistance(from, to Point) int {
return int(math.Abs(float64(from.X-to.X))) + int(math.Abs(float64(from.Y-to.Y)))
}
// time O(n log n)
// where n is the number of points
// space O(n log n)
func minCostConnectPoints(points [][]int) int {
if len(points) == 0 {
return 0
}
startingPoint := &Edge{
Point: Point{X: points[0][0], Y: points[0][1]},
Cost: 0,
}
priorityQueue := PriorityQueue{startingPoint}
cost := 0
visitedPoints := make(map[Point]bool)
for len(priorityQueue) > 0 && len(visitedPoints) < len(points) {
edge := heap.Pop(&priorityQueue).(*Edge)
if visitedPoints[edge.Point] {
continue
}
visitedPoints[edge.Point] = true
cost += edge.Cost
for _, point := range points {
p := Point{X: point[0], Y: point[1]}
if visitedPoints[p] {
continue
}
heap.Push(&priorityQueue, &Edge{
Point: p,
Cost: manhattanDistance(edge.Point, p),
})
}
}
return cost
}
func main() {
} | golang/algorithms/others/min_cost_to_connect_all_points/main.go | 0.812272 | 0.555375 | main.go | starcoder |
package weather_data
import (
"github.com/AlexanderFadeev/ood/lab2/signal"
)
type Setter interface {
SetTemperature(float64)
SetPressure(float64)
SetHumidity(float64)
SetValues(temperature, pressure, humidity float64)
}
type SetterPro interface {
Setter
SetWind(speed, direction float64)
SetValuesPro(temperature, pressure, humidity, speed, direction float64)
}
type Getter interface {
GetTemperature() float64
GetPressure() float64
GetHumidity() float64
}
type GetterPro interface {
Getter
GetWind() (speed, direction float64)
}
type Signal interface {
DoOnTemperatureChange(slot FloatSlot, priority uint) signal.Connection
DoOnPressureChange(slot FloatSlot, priority uint) signal.Connection
DoOnHumidityChange(slot FloatSlot, priority uint) signal.Connection
}
type SignalPro interface {
Signal
DoOnWindChange(slot WindSlot, priority uint) signal.Connection
}
type WeatherData interface {
Setter
Getter
Signal
}
type WeatherDataPro interface {
SetterPro
GetterPro
SignalPro
}
type weatherData struct {
temperature float64
pressure float64
humidity float64
windSpeed float64
windDirection float64
onTemperatureChange FloatSignal
onPressureChange FloatSignal
onHumidityChange FloatSignal
onWindChange WindSignal
}
func New() WeatherDataPro {
return &weatherData{
onTemperatureChange: newFloatSignalAdapter(),
onPressureChange: newFloatSignalAdapter(),
onHumidityChange: newFloatSignalAdapter(),
onWindChange: newWindSignalAdapter(),
}
}
func (wd *weatherData) SetTemperature(value float64) {
wd.temperature = value
wd.onTemperatureChange.Emit(value)
}
func (wd *weatherData) SetPressure(value float64) {
wd.pressure = value
wd.onPressureChange.Emit(value)
}
func (wd *weatherData) SetHumidity(value float64) {
wd.humidity = value
wd.onHumidityChange.Emit(value)
}
func (wd *weatherData) SetWind(speed float64, direction float64) {
wd.windSpeed = speed
wd.windDirection = direction
wd.onWindChange.Emit(WindInfo{speed, direction})
}
func (wd *weatherData) SetValues(temperature, pressure, humidity float64) {
wd.SetTemperature(temperature)
wd.SetPressure(pressure)
wd.SetHumidity(humidity)
}
func (wd *weatherData) SetValuesPro(temperature, pressure, humidity, speed, direction float64) {
wd.SetValues(temperature, pressure, humidity)
wd.SetWind(speed, direction)
}
func (wd *weatherData) GetTemperature() float64 {
return wd.temperature
}
func (wd *weatherData) GetPressure() float64 {
return wd.pressure
}
func (wd *weatherData) GetHumidity() float64 {
return wd.humidity
}
func (wd *weatherData) GetWind() (float64, float64) {
return wd.windSpeed, wd.windDirection
}
func (wd *weatherData) DoOnTemperatureChange(slot FloatSlot, priority uint) signal.Connection {
return wd.onTemperatureChange.Connect(slot, priority)
}
func (wd *weatherData) DoOnPressureChange(slot FloatSlot, priority uint) signal.Connection {
return wd.onPressureChange.Connect(slot, priority)
}
func (wd *weatherData) DoOnHumidityChange(slot FloatSlot, priority uint) signal.Connection {
return wd.onHumidityChange.Connect(slot, priority)
}
func (wd *weatherData) DoOnWindChange(slot WindSlot, priority uint) signal.Connection {
return wd.onWindChange.Connect(slot, priority)
} | lab2/weather_data/weather_data.go | 0.814274 | 0.459379 | weather_data.go | starcoder |
package condition
import (
"bytes"
"errors"
"fmt"
"regexp"
"github.com/Jeffail/benthos/lib/log"
"github.com/Jeffail/benthos/lib/metrics"
"github.com/Jeffail/benthos/lib/types"
)
//------------------------------------------------------------------------------
func init() {
Constructors[TypeText] = TypeSpec{
constructor: NewText,
description: `
Text is a condition that checks the contents of a message part as plain text
against a logical operator and an argument.
Available logical operators are:
### ` + "`equals_cs`" + `
Checks whether the part equals the argument (case sensitive.)
### ` + "`equals`" + `
Checks whether the part equals the argument under unicode case-folding (case
insensitive.)
### ` + "`contains_cs`" + `
Checks whether the part contains the argument (case sensitive.)
### ` + "`contains`" + `
Checks whether the part contains the argument under unicode case-folding (case
insensitive.)
### ` + "`prefix_cs`" + `
Checks whether the part begins with the argument (case sensitive.)
### ` + "`prefix`" + `
Checks whether the part begins with the argument under unicode case-folding
(case insensitive.)
### ` + "`suffix_cs`" + `
Checks whether the part ends with the argument (case sensitive.)
### ` + "`suffix`" + `
Checks whether the part ends with the argument under unicode case-folding (case
insensitive.)
### ` + "`regexp_partial`" + `
Checks whether any section of the message part matches a regular expression (RE2
syntax).
### ` + "`regexp_exact`" + `
Checks whether the message part exactly matches a regular expression (RE2
syntax).`,
}
}
//------------------------------------------------------------------------------
// Errors for the text condition.
var (
ErrInvalidTextOperator = errors.New("invalid text operator type")
)
// TextConfig is a configuration struct containing fields for the text
// condition.
type TextConfig struct {
Operator string `json:"operator" yaml:"operator"`
Part int `json:"part" yaml:"part"`
Arg string `json:"arg" yaml:"arg"`
}
// NewTextConfig returns a TextConfig with default values.
func NewTextConfig() TextConfig {
return TextConfig{
Operator: "equals_cs",
Part: 0,
Arg: "",
}
}
//------------------------------------------------------------------------------
type textOperator func(c []byte) bool
func textEqualsOperator(arg []byte) textOperator {
return func(c []byte) bool {
return bytes.Equal(c, arg)
}
}
func textEqualsFoldOperator(arg []byte) textOperator {
return func(c []byte) bool {
return bytes.EqualFold(c, arg)
}
}
func textContainsOperator(arg []byte) textOperator {
return func(c []byte) bool {
return bytes.Contains(c, arg)
}
}
func textContainsFoldOperator(arg []byte) textOperator {
argLower := bytes.ToLower(arg)
return func(c []byte) bool {
return bytes.Contains(bytes.ToLower(c), argLower)
}
}
func textPrefixOperator(arg []byte) textOperator {
return func(c []byte) bool {
return bytes.HasPrefix(c, arg)
}
}
func textPrefixFoldOperator(arg []byte) textOperator {
argLower := bytes.ToLower(arg)
return func(c []byte) bool {
return bytes.HasPrefix(bytes.ToLower(c), argLower)
}
}
func textSuffixOperator(arg []byte) textOperator {
return func(c []byte) bool {
return bytes.HasSuffix(c, arg)
}
}
func textSuffixFoldOperator(arg []byte) textOperator {
argLower := bytes.ToLower(arg)
return func(c []byte) bool {
return bytes.HasSuffix(bytes.ToLower(c), argLower)
}
}
func textRegexpPartialOperator(arg []byte) (textOperator, error) {
compiled, err := regexp.Compile(string(arg))
if err != nil {
return nil, err
}
return func(c []byte) bool {
return compiled.Match(c)
}, nil
}
func textRegexpExactOperator(arg []byte) (textOperator, error) {
compiled, err := regexp.Compile(string(arg))
if err != nil {
return nil, err
}
return func(c []byte) bool {
return len(compiled.Find(c)) == len(c)
}, nil
}
func strToTextOperator(str, arg string) (textOperator, error) {
switch str {
case "equals_cs":
return textEqualsOperator([]byte(arg)), nil
case "equals":
return textEqualsFoldOperator([]byte(arg)), nil
case "contains_cs":
return textContainsOperator([]byte(arg)), nil
case "contains":
return textContainsFoldOperator([]byte(arg)), nil
case "prefix_cs":
return textPrefixOperator([]byte(arg)), nil
case "prefix":
return textPrefixFoldOperator([]byte(arg)), nil
case "suffix_cs":
return textSuffixOperator([]byte(arg)), nil
case "suffix":
return textSuffixFoldOperator([]byte(arg)), nil
case "regexp_partial":
return textRegexpPartialOperator([]byte(arg))
case "regexp_exact":
return textRegexpExactOperator([]byte(arg))
}
return nil, ErrInvalidTextOperator
}
//------------------------------------------------------------------------------
// Text is a condition that checks message text against logical operators.
type Text struct {
stats metrics.Type
operator textOperator
part int
mSkippedEmpty metrics.StatCounter
mSkipped metrics.StatCounter
mSkippedOOB metrics.StatCounter
mApplied metrics.StatCounter
}
// NewText returns a Text condition.
func NewText(
conf Config, mgr types.Manager, log log.Modular, stats metrics.Type,
) (Type, error) {
op, err := strToTextOperator(conf.Text.Operator, conf.Text.Arg)
if err != nil {
return nil, fmt.Errorf("operator '%v': %v", conf.Text.Operator, err)
}
return &Text{
stats: stats,
operator: op,
part: conf.Text.Part,
mSkippedEmpty: stats.GetCounter("condition.text.skipped.empty_message"),
mSkipped: stats.GetCounter("condition.text.skipped"),
mSkippedOOB: stats.GetCounter("condition.text.skipped.out_of_bounds"),
mApplied: stats.GetCounter("condition.text.applied"),
}, nil
}
//------------------------------------------------------------------------------
// Check attempts to check a message part against a configured condition.
func (c *Text) Check(msg types.Message) bool {
index := c.part
lParts := msg.Len()
if lParts == 0 {
c.mSkippedEmpty.Incr(1)
c.mSkipped.Incr(1)
return false
}
msgPart := msg.Get(index).Get()
if msgPart == nil {
c.mSkippedOOB.Incr(1)
c.mSkipped.Incr(1)
return false
}
c.mApplied.Incr(1)
return c.operator(msgPart)
}
//------------------------------------------------------------------------------ | lib/processor/condition/text.go | 0.757705 | 0.482246 | text.go | starcoder |
package paunch
import (
"math"
)
// Collider is an object that represents a shape that can be tested for
// collision against another Collider.
type Collider interface {
onPoint(*point) bool
onBounding(*bounding) bool
onLine(*line) bool
onPolygon(*polygon) bool
// Move moves the Collider object the specified distance.
Move(x, y float64)
// SetPosition sets the position of the Collider.
SetPosition(x, y float64)
// Position returns the x, y coordinates of the Collider object's
// current position.
Position() (float64, float64)
// DistanceToTangentPoint returns the x, y coordinates of the nearest point
// tangent to the Collider object. This method is useful for position
// correction when objects have sunk into each other.
DistanceToTangentPoint(float64, float64, Direction) (float64, float64)
}
// NewCollider creates a new Collider object. The supplied coordinates should
// be in an "x1, y1, x2, y2..." format. Colliders work differently internally
// depending on the shape the coordinate describes. Collision detection is
// faster for singular points and bounding boxes than with lines and polygons.
func NewCollider(coords []float64) Collider {
if len(coords) == 0 || len(coords)%2 != 0 {
return nil
}
if len(coords) == 2 {
return newPoint(coords[0], coords[1])
}
if len(coords) == 4 {
return newLine(newPoint(coords[0], coords[1]), newPoint(coords[2], coords[3]))
}
if len(coords) == 8 {
// Check to see that the coordinates have two unique x and y values
var uniqueX, uniqueY []float64
for i, val := range coords {
taken := false
if i%2 == 0 {
for _, val2 := range uniqueX {
if val == val2 {
taken = true
}
}
if !taken {
uniqueX = append(uniqueX, val)
}
} else {
for _, val2 := range uniqueY {
if val == val2 {
taken = true
}
}
if !taken {
uniqueY = append(uniqueY, val)
}
}
}
if len(uniqueX) == 2 && len(uniqueY) == 2 {
return newBounding(newPoint(math.Min(uniqueX[0], uniqueX[1]), math.Min(uniqueY[0], uniqueY[1])),
newPoint(math.Max(uniqueX[0], uniqueX[1]), math.Max(uniqueY[0], uniqueY[1])))
}
}
points := make([]*point, len(coords)/2)
for i := 0; i < len(coords); i += 2 {
points[i/2] = newPoint(coords[i], coords[i+1])
}
return newPolygon(points)
}
// Collides checks if two Collider-satisfying objects are overlapping.
func Collides(collider1, collider2 Collider) bool {
switch collider2.(type) {
case *point:
return collider1.onPoint(collider2.(*point))
case *bounding:
return collider1.onBounding(collider2.(*bounding))
case *line:
return collider1.onLine(collider2.(*line))
case *polygon:
return collider1.onPolygon(collider2.(*polygon))
default:
return false
}
} | collider.go | 0.817101 | 0.704478 | collider.go | starcoder |
package p1865
type FindSumPairs struct {
nums1 []int
nums2 []int
root *Node
}
func Constructor(nums1 []int, nums2 []int) FindSumPairs {
var root *Node
for _, num := range nums2 {
root = Insert(root, num)
}
return FindSumPairs{nums1, nums2, root}
}
func (this *FindSumPairs) Add(index int, val int) {
this.root = Delete(this.root, this.nums2[index])
this.nums2[index] += val
this.root = Insert(this.root, this.nums2[index])
}
func (this *FindSumPairs) Count(tot int) int {
var res int
for _, num := range this.nums1 {
if num >= tot {
continue
}
node := Search(this.root, tot-num)
if node != nil {
res += node.cnt
}
}
return res
}
/**
* Your FindSumPairs object will be instantiated and called as such:
* obj := Constructor(nums1, nums2);
* obj.Add(index,val);
* param_2 := obj.Count(tot);
*/
/**
* this is a AVL tree
*/
type Node struct {
key int
height int
cnt int
left, right *Node
}
func (node *Node) Height() int {
if node == nil {
return 0
}
return node.height
}
func max(a, b int) int {
if a >= b {
return a
}
return b
}
func NewNode(key int) *Node {
node := new(Node)
node.key = key
node.height = 1
node.cnt = 1
return node
}
func rightRotate(y *Node) *Node {
x := y.left
t2 := x.right
x.right = y
y.left = t2
y.height = max(y.left.Height(), y.right.Height()) + 1
x.height = max(x.left.Height(), x.right.Height()) + 1
return x
}
func leftRotate(x *Node) *Node {
y := x.right
t2 := y.left
y.left = x
x.right = t2
x.height = max(x.left.Height(), x.right.Height()) + 1
y.height = max(y.left.Height(), y.right.Height()) + 1
return y
}
func (node *Node) GetBalance() int {
if node == nil {
return 0
}
return node.left.Height() - node.right.Height()
}
func Insert(node *Node, key int) *Node {
if node == nil {
return NewNode(key)
}
if node.key == key {
node.cnt++
return node
}
if node.key > key {
node.left = Insert(node.left, key)
} else {
node.right = Insert(node.right, key)
}
node.height = max(node.left.Height(), node.right.Height()) + 1
balance := node.GetBalance()
if balance > 1 && key < node.left.key {
return rightRotate(node)
}
if balance < -1 && key > node.right.key {
return leftRotate(node)
}
if balance > 1 && key > node.left.key {
node.left = leftRotate(node.left)
return rightRotate(node)
}
if balance < -1 && key < node.right.key {
node.right = rightRotate(node.right)
return leftRotate(node)
}
return node
}
func MinValueNode(root *Node) *Node {
cur := root
for cur.left != nil {
cur = cur.left
}
return cur
}
func Delete(root *Node, key int) *Node {
if root == nil {
return nil
}
if key < root.key {
root.left = Delete(root.left, key)
} else if key > root.key {
root.right = Delete(root.right, key)
} else {
root.cnt--
if root.cnt > 0 {
return root
}
if root.left == nil || root.right == nil {
tmp := root.left
if root.left == nil {
tmp = root.right
}
root = tmp
} else {
tmp := MinValueNode(root.right)
root.key = tmp.key
root.cnt = tmp.cnt
// make sure tmp node deleted after call delete on root.right
tmp.cnt = 1
root.right = Delete(root.right, tmp.key)
}
}
if root == nil {
return root
}
root.height = max(root.left.Height(), root.right.Height()) + 1
balance := root.GetBalance()
if balance > 1 && root.left.GetBalance() >= 0 {
return rightRotate(root)
}
if balance > 1 && root.left.GetBalance() < 0 {
root.left = leftRotate(root.left)
return rightRotate(root)
}
if balance < -1 && root.right.GetBalance() <= 0 {
return leftRotate(root)
}
if balance < -1 && root.right.GetBalance() > 0 {
root.right = rightRotate(root.right)
return leftRotate(root)
}
return root
}
func Search(root *Node, key int) *Node {
if root == nil {
return root
}
if root.key == key {
return root
}
if root.key > key {
return Search(root.left, key)
}
return Search(root.right, key)
} | src/leetcode/set1000/set1000/set1800/set1860/p1865/solution.go | 0.543106 | 0.559049 | solution.go | starcoder |
package accounting
import (
"encoding/json"
)
// ExchangeRateResponse A response to the request for an exchange rate value. It represents the exchange rate from the source currency to the target currency.
type ExchangeRateResponse struct {
// Designates if the response is a success ('OK') or failure ('ERR').
Result string `json:"@result"`
// The exchange rate between the 2 currencies
ExchangeRate float32 `json:"exchangeRate"`
// The ISO 4217 currency code that represents the source currency of the exchange rate.
SourceCurrencyCode string `json:"sourceCurrencyCode"`
// The ISO 4217 currency code that represents the target currency of the exchange rate.
TargetCurrencyCode string `json:"targetCurrencyCode"`
}
// NewExchangeRateResponse instantiates a new ExchangeRateResponse object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewExchangeRateResponse(result string, exchangeRate float32, sourceCurrencyCode string, targetCurrencyCode string) *ExchangeRateResponse {
this := ExchangeRateResponse{}
this.Result = result
this.ExchangeRate = exchangeRate
this.SourceCurrencyCode = sourceCurrencyCode
this.TargetCurrencyCode = targetCurrencyCode
return &this
}
// NewExchangeRateResponseWithDefaults instantiates a new ExchangeRateResponse object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewExchangeRateResponseWithDefaults() *ExchangeRateResponse {
this := ExchangeRateResponse{}
return &this
}
// GetResult returns the Result field value
func (o *ExchangeRateResponse) GetResult() string {
if o == nil {
var ret string
return ret
}
return o.Result
}
// GetResultOk returns a tuple with the Result field value
// and a boolean to check if the value has been set.
func (o *ExchangeRateResponse) GetResultOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.Result, true
}
// SetResult sets field value
func (o *ExchangeRateResponse) SetResult(v string) {
o.Result = v
}
// GetExchangeRate returns the ExchangeRate field value
func (o *ExchangeRateResponse) GetExchangeRate() float32 {
if o == nil {
var ret float32
return ret
}
return o.ExchangeRate
}
// GetExchangeRateOk returns a tuple with the ExchangeRate field value
// and a boolean to check if the value has been set.
func (o *ExchangeRateResponse) GetExchangeRateOk() (*float32, bool) {
if o == nil {
return nil, false
}
return &o.ExchangeRate, true
}
// SetExchangeRate sets field value
func (o *ExchangeRateResponse) SetExchangeRate(v float32) {
o.ExchangeRate = v
}
// GetSourceCurrencyCode returns the SourceCurrencyCode field value
func (o *ExchangeRateResponse) GetSourceCurrencyCode() string {
if o == nil {
var ret string
return ret
}
return o.SourceCurrencyCode
}
// GetSourceCurrencyCodeOk returns a tuple with the SourceCurrencyCode field value
// and a boolean to check if the value has been set.
func (o *ExchangeRateResponse) GetSourceCurrencyCodeOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.SourceCurrencyCode, true
}
// SetSourceCurrencyCode sets field value
func (o *ExchangeRateResponse) SetSourceCurrencyCode(v string) {
o.SourceCurrencyCode = v
}
// GetTargetCurrencyCode returns the TargetCurrencyCode field value
func (o *ExchangeRateResponse) GetTargetCurrencyCode() string {
if o == nil {
var ret string
return ret
}
return o.TargetCurrencyCode
}
// GetTargetCurrencyCodeOk returns a tuple with the TargetCurrencyCode field value
// and a boolean to check if the value has been set.
func (o *ExchangeRateResponse) GetTargetCurrencyCodeOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.TargetCurrencyCode, true
}
// SetTargetCurrencyCode sets field value
func (o *ExchangeRateResponse) SetTargetCurrencyCode(v string) {
o.TargetCurrencyCode = v
}
func (o ExchangeRateResponse) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if true {
toSerialize["@result"] = o.Result
}
if true {
toSerialize["exchangeRate"] = o.ExchangeRate
}
if true {
toSerialize["sourceCurrencyCode"] = o.SourceCurrencyCode
}
if true {
toSerialize["targetCurrencyCode"] = o.TargetCurrencyCode
}
return json.Marshal(toSerialize)
}
type NullableExchangeRateResponse struct {
value *ExchangeRateResponse
isSet bool
}
func (v NullableExchangeRateResponse) Get() *ExchangeRateResponse {
return v.value
}
func (v *NullableExchangeRateResponse) Set(val *ExchangeRateResponse) {
v.value = val
v.isSet = true
}
func (v NullableExchangeRateResponse) IsSet() bool {
return v.isSet
}
func (v *NullableExchangeRateResponse) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableExchangeRateResponse(val *ExchangeRateResponse) *NullableExchangeRateResponse {
return &NullableExchangeRateResponse{value: val, isSet: true}
}
func (v NullableExchangeRateResponse) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableExchangeRateResponse) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | generated/accounting/model_exchange_rate_response.go | 0.855444 | 0.484502 | model_exchange_rate_response.go | starcoder |
package vesper
import (
"strings"
)
// IsArray returns true if the object is an array
func IsArray(obj *Object) bool {
return obj.Type == ArrayType
}
// ArrayEqual - return true of the two arrays are equal,
// i.e. the same length and all the elements are also equal
func ArrayEqual(v1 *Object, v2 *Object) bool {
el1 := v1.elements
el2 := v2.elements
count := len(el1)
if count != len(el2) {
return false
}
for i := 0; i < count; i++ {
if !Equal(el1[i], el2[i]) {
return false
}
}
return true
}
func arrayToString(a *Object) string {
el := a.elements
var buf strings.Builder
buf.WriteString("[")
count := len(el)
if count > 0 {
buf.WriteString(el[0].String())
for i := 1; i < count; i++ {
buf.WriteString(" ")
buf.WriteString(el[i].String())
}
}
buf.WriteString("]")
return buf.String()
}
// MakeArray - create a new <array> object of the specified size, with all elements initialized to
// the specified value
func MakeArray(size int, init *Object) *Object {
elements := make([]*Object, size)
for i := 0; i < size; i++ {
elements[i] = init
}
return ArrayFromElementsNoCopy(elements)
}
// Array - create a new <array> object from the given element objects.
func Array(elements ...*Object) *Object {
return ArrayFromElements(elements, len(elements))
}
// ArrayFromElements - return a new <array> object from the given slice of elements. The slice is copied.
func ArrayFromElements(elements []*Object, count int) *Object {
el := make([]*Object, count)
copy(el, elements[0:count])
return ArrayFromElementsNoCopy(el)
}
// ArrayFromElementsNoCopy - create a new <array> object from the given slice of elements. The slice is NOT copied.
func ArrayFromElementsNoCopy(elements []*Object) *Object {
return &Object{
Type: ArrayType,
elements: elements,
}
}
// CopyArray - return a copy of the <array> object
func CopyArray(a *Object) *Object {
return ArrayFromElements(a.elements, len(a.elements))
}
// ToArray - convert the object to an <array>, if possible
func ToArray(obj *Object) (*Object, error) {
switch obj.Type {
case ArrayType:
return obj, nil
case ListType:
return listToArray(obj), nil
case StructType:
return structToArray(obj), nil
case StringType:
return stringToArray(obj), nil
}
return nil, Error(ArgumentErrorKey, "to-array expected <array>, <list>, <struct>, or <string>, got a ", obj.Type)
} | array.go | 0.712632 | 0.46563 | array.go | starcoder |
package stl
import (
"fmt"
"math"
gl "github.com/fogleman/fauxgl"
pb "github.com/gmlewis/stldice/v4/stl2svx/proto"
)
// STL represents a converted STL file to a mesh.
type STL struct {
// MBB is the minimum bounding box for the entire STL file.
MBB gl.Box
// Mesh is the mesh of triangles.
Mesh *gl.Mesh
// The dimensions for the model.
ModelDimX, ModelDimY, ModelDimZ int
// The dimensions for each subregion.
DimX, DimY, DimZ int
// MMPV is the millimeters per voxel of the model.
MMPV float64
// SubregionScale represents the scale for each subregion.
SubregionScale float64
}
// New parses a pb.STLFile and returns an STL.
// dim represents the number of voxels in the widest dimension.
// nX, nY, nZ represent the number of subdivisions in each dimension.
func New(p *pb.STLFile, dim, nX, nY, nZ int64) (*STL, error) {
var tris []*gl.Triangle
for _, t := range p.GetTriangles() {
tris = append(tris, gl.NewTriangleForPoints(
gl.V(t.V1.X, t.V1.Y, t.V1.Z),
gl.V(t.V2.X, t.V2.Y, t.V2.Z),
gl.V(t.V3.X, t.V3.Y, t.V3.Z)))
}
mesh := gl.NewTriangleMesh(tris)
mbb := mesh.BoundingBox()
scale := mbb.Max.X - mbb.Min.X
if dy := mbb.Max.Y - mbb.Min.Y; dy > scale {
scale = dy
}
if dz := mbb.Max.Z - mbb.Min.Z; dz > scale {
scale = dz
}
vpmm := float64(dim) / scale // voxels per millimeter
mmpv := 1.0 / vpmm // millimeters per voxel
modelDimInMM := mbb.Size()
newModelDimX := int(math.Ceil(modelDimInMM.X * vpmm))
newModelDimY := int(math.Ceil(modelDimInMM.Y * vpmm))
newModelDimZ := int(math.Ceil(modelDimInMM.Z * vpmm))
dimX := newModelDimX / int(nX)
dimY := newModelDimY / int(nY)
dimZ := newModelDimZ / int(nZ)
if dimX == 0 || dimY == 0 || dimZ == 0 {
return nil, fmt.Errorf("too many divisions: region dimensions = (%v,%v,%v)", dimX, dimY, dimZ)
}
maxDim := dimX
if dimY > maxDim {
maxDim = dimY
}
if dimZ > maxDim {
maxDim = dimZ
}
subregionScale := float64(maxDim) * mmpv
return &STL{
MBB: mbb,
Mesh: mesh,
ModelDimX: newModelDimX, ModelDimY: newModelDimY, ModelDimZ: newModelDimZ,
DimX: dimX, DimY: dimY, DimZ: dimZ,
MMPV: mmpv,
SubregionScale: subregionScale,
}, nil
} | stl2svx/stl/stl.go | 0.744842 | 0.422266 | stl.go | starcoder |
package query
import (
"context"
"errors"
"fmt"
"sort"
"strconv"
"strings"
"time"
"github.com/Peripli/service-manager/pkg/query/parser"
"github.com/antlr/antlr4/runtime/Go/antlr"
"github.com/Peripli/service-manager/pkg/util"
)
const (
// Separator is the separator between queries of different type
Separator string = "and"
)
// CriterionType is a type of criteria to be applied when querying
type CriterionType string
const (
// FieldQuery denotes that the query should be executed on the entity's fields
FieldQuery CriterionType = "fieldQuery"
// LabelQuery denotes that the query should be executed on the entity's labels
LabelQuery CriterionType = "labelQuery"
// ResultQuery is used to further process result
ResultQuery CriterionType = "resultQuery"
// ExistQuery denotes that the query should test for the existence of any record in a given sub-query
ExistQuery CriterionType = "existQuery"
)
// OperatorType represents the type of the query operator
type OperatorType string
const (
// UnivariateOperator denotes that the operator expects exactly one variable on the right side
UnivariateOperator OperatorType = "univariate"
// MultivariateOperator denotes that the operator expects more than one variable on the right side
MultivariateOperator OperatorType = "multivariate"
)
// OrderType is the type of the order in which result is presented
type OrderType string
const (
// AscOrder orders result in ascending order
AscOrder OrderType = "ASC"
// DescOrder orders result in descending order
DescOrder OrderType = "DESC"
)
const (
// OrderBy should be used as a left operand in Criterion
OrderBy string = "orderBy"
// Limit should be used as a left operand in Criterion to signify the
Limit string = "limit"
)
var (
// Operators returns the supported query operators
Operators = []Operator{
EqualsOperator, NotEqualsOperator,
GreaterThanOperator, LessThanOperator,
GreaterThanOrEqualOperator, LessThanOrEqualOperator,
InOperator, NotInOperator, EqualsOrNilOperator,
}
// CriteriaTypes returns the supported query criteria types
CriteriaTypes = []CriterionType{FieldQuery, LabelQuery, ExistQuery}
)
// Operator is a query operator
type Operator interface {
// String returns the text representation of the operator
String() string
// Type returns the type of the operator
Type() OperatorType
// IsNullable returns true if the operator allows results with null value in the RHS
IsNullable() bool
// IsNumeric returns true if the operator works only with numbers
IsNumeric() bool
}
// Criterion is a single part of a query criteria
type Criterion struct {
// LeftOp is the left operand in the query
LeftOp string
// Operator is the query operator
Operator Operator
// RightOp is the right operand in the query which can be multivariate
RightOp []string
// Type is the type of the query
Type CriterionType
}
// ByField constructs a new criterion for field querying
func ByField(operator Operator, leftOp string, rightOp ...string) Criterion {
return NewCriterion(leftOp, operator, rightOp, FieldQuery)
}
func ByNotExists(subQuery string) Criterion {
return NewCriterion("", NotExistsSubquery, []string{subQuery}, ExistQuery)
}
func ByExists(subQuery string) Criterion {
return NewCriterion("", ExistsSubquery, []string{subQuery}, ExistQuery)
}
// ByLabel constructs a new criterion for label querying
func ByLabel(operator Operator, leftOp string, rightOp ...string) Criterion {
return NewCriterion(leftOp, operator, rightOp, LabelQuery)
}
// OrderResultBy constructs a new criterion for result order
func OrderResultBy(field string, orderType OrderType) Criterion {
return NewCriterion(OrderBy, NoOperator, []string{field, string(orderType)}, ResultQuery)
}
// LimitResultBy constructs a new criterion for limit result with
func LimitResultBy(limit int) Criterion {
limitString := strconv.Itoa(limit)
return NewCriterion(Limit, NoOperator, []string{limitString}, ResultQuery)
}
func NewCriterion(leftOp string, operator Operator, rightOp []string, criteriaType CriterionType) Criterion {
return Criterion{LeftOp: leftOp, Operator: operator, RightOp: rightOp, Type: criteriaType}
}
// Validate the criterion fields
func (c Criterion) Validate() error {
if len(c.RightOp) == 0 {
return errors.New("missing right operand")
}
if c.Type == ResultQuery {
if c.LeftOp == Limit {
limit, err := strconv.Atoi(c.RightOp[0])
if err != nil {
return fmt.Errorf("could not convert string to int: %s", err.Error())
}
if limit < 0 {
return &util.UnsupportedQueryError{Message: fmt.Sprintf("limit (%d) is invalid. Limit should be positive number", limit)}
}
}
if c.LeftOp == OrderBy {
if len(c.RightOp) < 2 {
return &util.UnsupportedQueryError{Message: "order by result expects field name and order type"}
}
}
return nil
}
if len(c.RightOp) > 1 && c.Operator.Type() == UnivariateOperator {
return &util.UnsupportedQueryError{Message: fmt.Sprintf("multiple values %s received for single value operation %s", c.RightOp, c.Operator)}
}
if c.Operator.IsNullable() && c.Type != FieldQuery {
return &util.UnsupportedQueryError{Message: "nullable operations are supported only for field queries"}
}
if c.Operator.IsNumeric() && !isNumeric(c.RightOp[0]) && !isDateTime(c.RightOp[0]) {
return &util.UnsupportedQueryError{Message: fmt.Sprintf("%s is numeric operator, but the right operand %s is not numeric or datetime", c.Operator, c.RightOp[0])}
}
if strings.Contains(c.LeftOp, fmt.Sprintf(" %s ", Separator)) ||
strings.Contains(c.LeftOp, fmt.Sprintf("%s ", Separator)) ||
strings.Contains(c.LeftOp, fmt.Sprintf(" %s", Separator)) ||
c.LeftOp == Separator {
return &util.UnsupportedQueryError{Message: fmt.Sprintf("separator %s is not allowed in %s with left operand \"%s\".", Separator, c.Type, c.LeftOp)}
}
for _, op := range c.RightOp {
if strings.ContainsRune(op, '\n') && c.Type != ExistQuery {
return &util.UnsupportedQueryError{Message: fmt.Sprintf("%s with key \"%s\" has value \"%s\" contaning forbidden new line character", c.Type, c.LeftOp, op)}
}
}
return nil
}
func validateCriteria(criteria []Criterion) error {
fieldQueryLeftOperands := make(map[string]int)
labelQueryLeftOperands := make(map[string]int)
for _, criterion := range criteria {
if criterion.Type == FieldQuery {
fieldQueryLeftOperands[criterion.LeftOp]++
}
if criterion.Type == LabelQuery {
labelQueryLeftOperands[criterion.LeftOp]++
}
}
for _, c := range criteria {
leftOp := c.LeftOp
// disallow duplicate label queries
if count, ok := labelQueryLeftOperands[leftOp]; ok && count > 1 && c.Type == LabelQuery {
return &util.UnsupportedQueryError{Message: fmt.Sprintf("duplicate label query key: %s", leftOp)}
}
if err := c.Validate(); err != nil {
return err
}
}
return validateWholeCriteria(criteria...)
}
type criteriaCtxKey struct{}
// AddCriteria adds the given criteria to the context and returns an error if any of the criteria is not valid
func AddCriteria(ctx context.Context, newCriteria ...Criterion) (context.Context, error) {
currentCriteria := CriteriaForContext(ctx)
criteria := append(currentCriteria, newCriteria...)
if err := validateCriteria(criteria); err != nil {
return nil, err
}
return context.WithValue(ctx, criteriaCtxKey{}, criteria), nil
}
// CriteriaForContext returns the criteria for the given context
func CriteriaForContext(ctx context.Context) []Criterion {
currentCriteria := ctx.Value(criteriaCtxKey{})
if currentCriteria == nil {
return []Criterion{}
}
return currentCriteria.([]Criterion)
}
// ContextWithCriteria returns a new context with given criteria
func ContextWithCriteria(ctx context.Context, criteria ...Criterion) (context.Context, error) {
if err := validateCriteria(criteria); err != nil {
return nil, err
}
return context.WithValue(ctx, criteriaCtxKey{}, criteria), nil
}
// Parse parses the query expression for and builds criteria for the provided type
func Parse(criterionType CriterionType, expression string) ([]Criterion, error) {
if expression == "" {
return []Criterion{}, nil
}
parsingListener := &queryListener{criteriaType: criterionType}
input := antlr.NewInputStream(expression)
lexer := parser.NewQueryLexer(input)
lexer.RemoveErrorListeners()
stream := antlr.NewCommonTokenStream(lexer, antlr.TokenDefaultChannel)
p := parser.NewQueryParser(stream)
p.RemoveErrorListeners()
p.AddErrorListener(parsingListener)
antlr.ParseTreeWalkerDefault.Walk(parsingListener, p.Expression())
if parsingListener.err != nil {
return nil, parsingListener.err
}
criteria := parsingListener.result
if err := validateCriteria(criteria); err != nil {
return nil, err
}
sort.Slice(criteria, func(i, j int) bool {
return criteria[i].LeftOp < criteria[j].LeftOp
})
return criteria, nil
}
// RetrieveFromCriteria searches for the value (rightOp) of a given key (leftOp) in a set of criteria
func RetrieveFromCriteria(key string, criteria ...Criterion) string {
for _, criterion := range criteria {
if criterion.LeftOp == key {
return criterion.RightOp[0]
}
}
return ""
}
func isNumeric(str string) bool {
_, err := strconv.Atoi(str)
if err == nil {
return true
}
_, err = strconv.ParseFloat(str, 64)
return err == nil
}
func isDateTime(str string) bool {
_, err := time.Parse(time.RFC3339, str)
return err == nil
}
func validateWholeCriteria(criteria ...Criterion) error {
isLimited := false
for _, criterion := range criteria {
if criterion.LeftOp == Limit {
if isLimited {
return fmt.Errorf("zero/one limit criterion expected but multiple provided")
}
isLimited = true
}
}
return nil
} | pkg/query/selection.go | 0.767254 | 0.404037 | selection.go | starcoder |
Package g711 implements encoding and decoding of G711 PCM sound data.
G.711 is an ITU-T standard for audio companding.
For usage details please see the code snippets in the cmd folder.
*/
package g711
import (
"errors"
"io"
)
const (
// Input and output formats
Alaw = iota // Alaw G711 encoded PCM data
Ulaw // Ulaw G711 encoded PCM data
Lpcm // Lpcm 16bit signed linear data
)
// Decoder reads G711 PCM data and decodes it to 16bit 8000Hz LPCM
type Decoder struct {
decode func([]byte) []byte // decoding function
source io.Reader // source data
}
// Encoder encodes 16bit 8000Hz LPCM data to G711 PCM or
// directly transcodes between A-law and u-law
type Encoder struct {
input int // input format
encode func([]byte) []byte // encoding function
transcode func([]byte) []byte // transcoding function
destination io.Writer // output data
}
// NewAlawDecoder returns a pointer to a Decoder that implements an io.Reader.
// It takes as input the source data Reader.
func NewAlawDecoder(reader io.Reader) (*Decoder, error) {
if reader == nil {
return nil, errors.New("io.Reader is nil")
}
r := Decoder{
decode: DecodeAlaw,
source: reader,
}
return &r, nil
}
// NewUlawDecoder returns a pointer to a Decoder that implements an io.Reader.
// It takes as input the source data Reader.
func NewUlawDecoder(reader io.Reader) (*Decoder, error) {
if reader == nil {
return nil, errors.New("io.Reader is nil")
}
r := Decoder{
decode: DecodeUlaw,
source: reader,
}
return &r, nil
}
// NewAlawEncoder returns a pointer to an Encoder that implements an io.Writer.
// It takes as input the destination data Writer and the input encoding format.
func NewAlawEncoder(writer io.Writer, input int) (*Encoder, error) {
if writer == nil {
return nil, errors.New("io.Writer is nil")
}
if input != Ulaw && input != Lpcm {
return nil, errors.New("Invalid input format")
}
w := Encoder{
input: input,
encode: EncodeAlaw,
transcode: Ulaw2Alaw,
destination: writer,
}
return &w, nil
}
// NewUlawEncoder returns a pointer to an Encoder that implements an io.Writer.
// It takes as input the destination data Writer and the input encoding format.
func NewUlawEncoder(writer io.Writer, input int) (*Encoder, error) {
if writer == nil {
return nil, errors.New("io.Writer is nil")
}
if input != Alaw && input != Lpcm {
return nil, errors.New("Invalid input format")
}
w := Encoder{
input: input,
encode: EncodeUlaw,
transcode: Alaw2Ulaw,
destination: writer,
}
return &w, nil
}
// Reset discards the Decoder state. This permits reusing a Decoder rather than allocating a new one.
func (r *Decoder) Reset(reader io.Reader) error {
if reader == nil {
return errors.New("io.Reader is nil")
}
r.source = reader
return nil
}
// Reset discards the Encoder state. This permits reusing an Encoder rather than allocating a new one.
func (w *Encoder) Reset(writer io.Writer) error {
if writer == nil {
return errors.New("io.Writer is nil")
}
w.destination = writer
return nil
}
// Read decodes G711 data. Reads up to len(p) bytes into p, returns the number
// of bytes read and any error encountered.
func (r *Decoder) Read(p []byte) (i int, err error) {
if len(p) == 0 {
return
}
b := make([]byte, len(p)/2)
i, err = r.source.Read(b)
copy(p, r.decode(b))
i *= 2 // Report back the correct number of bytes
return
}
// Write encodes G711 Data. Writes len(p) bytes from p to the underlying data stream,
// returns the number of bytes written from p (0 <= n <= len(p)) and any error encountered
// that caused the write to stop early.
func (w *Encoder) Write(p []byte) (i int, err error) {
if len(p) == 0 {
return
}
if w.input == Lpcm { // Encode LPCM data to G711
i, err = w.destination.Write(w.encode(p))
if err == nil && len(p)%2 != 0 {
err = errors.New("Odd number of LPCM bytes, incomplete frame")
}
i *= 2 // Report back the correct number of bytes written from p
} else { // Trans-code
i, err = w.destination.Write(w.transcode(p))
}
return
} | g711.go | 0.683631 | 0.645288 | g711.go | starcoder |
package dsp
import "math"
// NCO is a Numeric Controlled Oscillator
// Based on GNURadio Implementation
type NCO struct {
phase float32
phaseIncrement float32
}
func MakeNCO() *NCO {
return &NCO{
phase: 0,
phaseIncrement: 0,
}
}
// SetPhase in Radians
func (nco *NCO) SetPhase(angle float32) {
nco.phase = angle
}
// AdjustPhase Increments / decrements current phase. In radians
func (nco *NCO) AdjustPhase(deltaAngle float32) {
nco.phase += deltaAngle
}
// SetFrequency Sets the Phase Increment in Radians / step
func (nco *NCO) SetFrequency(rate float32) {
nco.phaseIncrement = rate
}
// AdjustFrequency Increments / Decrements the phase increment. In radians / step
func (nco *NCO) AdjustFrequency(deltaRate float32) {
nco.phaseIncrement += deltaRate
}
// Step makes a single step in NCO
func (nco *NCO) Step() {
nco.phase += nco.phaseIncrement
}
// StepN makes N steps in NCO
func (nco *NCO) StepN(n int) {
nco.phase += nco.phaseIncrement * float32(n)
}
// GetPhase returns the current phase value in radians
func (nco *NCO) GetPhase() float32 {
return nco.phase
}
// GetPhaseIncrement returns the phase increment value in radians / step
func (nco *NCO) GetPhaseIncrement() float32 {
return nco.phaseIncrement
}
// Float32Sin Compute N elements for a float32 array sine wave
func (nco *NCO) Float32Sin(n int, amplitude float32) []float32 {
var d = make([]float32, n)
for i := 0; i < n; i++ {
d[i] = float32(math.Sin(float64(nco.phase))) * amplitude
}
return d
}
// Float32Cos Compute N elements for a float32 array cosine wave
func (nco *NCO) Float32Cos(n int, amplitude float32) []float32 {
var d = make([]float32, n)
for i := 0; i < n; i++ {
d[i] = float32(math.Cos(float64(nco.phase))) * amplitude
}
return d
}
// Float32Sin Compute N elements for a float32 array sine wave
func (nco *NCO) Complex64SinCos(n int, amplitude float32) []complex64 {
var d = make([]complex64, n)
for i := 0; i < n; i++ {
a, b := math.Sincos(float64(nco.phase))
d[i] = complex(float32(a)*amplitude, float32(b)*amplitude)
}
return d
} | dsp/nco.go | 0.874654 | 0.441312 | nco.go | starcoder |
package assert
import (
"fmt"
"path/filepath"
"reflect"
"regexp"
"runtime"
"testing"
"time"
)
// True asserts that the given value is a boolean true
func True(t *testing.T, actual interface{}) {
if actual != true {
Failf(t, "Expected: true\nReceived: %v", actual)
}
}
// Eq asserts the values are equal. Uses reflect.DeepEqual to test for equality
func Eq(t *testing.T, expected interface{}, actual interface{}) {
if expected == nil && actual == nil {
return
}
if !reflect.DeepEqual(expected, actual) {
Failf(t, "Values are not equal\nExpected: %v\nReceived: %v", expected, actual)
}
}
// EqItems asserts the given slices have the same elements (regardless of their order)
func EqItems(t *testing.T, expected interface{}, actual interface{}) {
var exp = reflect.ValueOf(expected)
var act = reflect.ValueOf(actual)
if exp.Type() != act.Type() {
Failf(t, "Types are not equal\nExpected: %v\nReceived: %v", exp.Type(), act.Type())
}
if exp.Len() != act.Len() {
Failf(t, "Lengths are not equal\nExpected: %v (%d elements)\nReceived: %v (%d elements)", exp, exp.Len(), act, act.Len())
}
if exp.Len() == 0 {
return
}
// make a map[elem-type]int = number of occurrences of each element
// we use reflection to create a dynamically typed map
var keyType = exp.Index(0).Type()
var valueType = reflect.TypeOf(int(0))
var mapType = reflect.MapOf(keyType, valueType)
merged := reflect.MakeMapWithSize(mapType, exp.Len())
// count the number of expected occurrences
for i := 0; i < exp.Len(); i++ {
var existing = merged.MapIndex(exp.Index(i))
if existing.IsValid() {
merged.SetMapIndex(exp.Index(i), reflect.ValueOf(int(existing.Int())+1)) // increase by one
} else {
merged.SetMapIndex(exp.Index(i), reflect.ValueOf(int(1)))
}
}
// count the number of actual occurrences
for i := 0; i < act.Len(); i++ {
var existing = merged.MapIndex(act.Index(i))
if !existing.IsValid() {
Failf(t, "Unexpected item %d: %v found in %v, expecting %v", i, act.Index(i), act, exp)
}
merged.SetMapIndex(act.Index(i), reflect.ValueOf(int(existing.Int())-1)) // decrease by one
}
// check if all of the expected where actually found
for _, k := range merged.MapKeys() {
var existing = merged.MapIndex(k)
if existing.Int() != 0 {
Failf(t, "Expected %v more of item %v", existing.Int(), k)
}
}
}
// NotEq asserts the given values are not equal. Uses reflect.DeepEqual to test for equality
func NotEq(t *testing.T, notThisValue interface{}, actual interface{}) {
if reflect.DeepEqual(notThisValue, actual) {
Failf(t, "Expected a value other than %v", notThisValue)
}
}
// Err asserts the error is not nil
func Err(t *testing.T, err error) {
if err == nil {
Failf(t, "Expected error hasn't occurred: %v", err)
}
}
// NoErr asserts the error is nil
func NoErr(t *testing.T, err error) {
if err != nil {
Failf(t, "Unexpected error occurred: %v", err)
}
}
// NotNil verifies that the given value is not nil
func NotNil(t *testing.T, actual interface{}) {
var act = reflect.ValueOf(actual)
if act.IsNil() {
Failf(t, "Unexpected nil, type %v", act.Type())
}
}
// Failf fails immediately
func Failf(t *testing.T, format string, args ...interface{}) {
Fail(t, fmt.Sprintf(format, args...))
}
// Fail fails immediately
func Fail(t *testing.T, text string) {
stackString := "Call stack:\n"
for idx := 1; ; idx++ {
_, file, line, ok := runtime.Caller(idx)
if !ok {
break
}
_, filename := filepath.Split(file)
if filename == "assert.go" {
continue
}
if filename == "testing.go" {
break
}
stackString += fmt.Sprintf("%v:%v\n", filename, line)
}
if t != nil {
t.Fatal(text, "\n", stackString)
} else {
fmt.Print(text, "\n", stackString)
}
}
// MustMatch checks the value against a given regular expression.
func MustMatch(t *testing.T, match *regexp.Regexp, value interface{}) {
var str = fmt.Sprint(value)
if !match.MatchString(str) {
Failf(t, "Doesn't match regexp\nExpected: '%s'\nReceived: '%s'", match.String(), str)
}
}
// MustPanic ensures that the caller's context will panic and that the panic will match the given regular expression
// func() {
// defer mustPanic(t, regexp.MustCompile("+*"))
// panic("some text")
// }
func MustPanic(t *testing.T, match *regexp.Regexp) {
if r := recover(); r != nil {
// convert panic result to string
var str string
switch x := r.(type) {
case string:
str = x
case error:
str = x.Error()
default:
Failf(t, "Unknown panic result '%v' for an expected panic: %s", r, match.String())
}
if !match.MatchString(str) {
Failf(t, "Errors are not equal\nExpected: panic '%s'\nReceived: '%s'", match.String(), str)
}
} else {
Failf(t, "Expected panic hasn't occurred: %s", match.String())
}
}
// StringChannelExpect that the given channel receives the expected string next, with the given timeout
func StringChannelExpect(t *testing.T, expected string, channel chan string, timeout time.Duration) {
select {
case received := <-channel:
Eq(t, expected, received)
case <-time.After(timeout):
Failf(t, "Waiting for '%s' on the channel timed out after %v", expected, timeout)
}
}
func StringChannelMustTimeout(t *testing.T, channel chan string, timeout time.Duration) {
select {
case received := <-channel:
Failf(t, "Received an unexpected value '%s' on the channel. Instead, it was expected to time out after %v", received, timeout)
case <-time.After(timeout):
}
} | test/assert/assert.go | 0.655667 | 0.504089 | assert.go | starcoder |
package codegen
import "github.com/pulumi/pulumi/pkg/v3/codegen/schema"
func visitTypeClosure(t schema.Type, visitor func(t schema.Type), seen Set) {
if seen.Has(t) {
return
}
seen.Add(t)
visitor(t)
switch st := t.(type) {
case *schema.ArrayType:
visitTypeClosure(st.ElementType, visitor, seen)
case *schema.MapType:
visitTypeClosure(st.ElementType, visitor, seen)
case *schema.ObjectType:
for _, p := range st.Properties {
visitTypeClosure(p.Type, visitor, seen)
}
case *schema.UnionType:
for _, e := range st.ElementTypes {
visitTypeClosure(e, visitor, seen)
}
case *schema.InputType:
visitTypeClosure(st.ElementType, visitor, seen)
case *schema.OptionalType:
visitTypeClosure(st.ElementType, visitor, seen)
}
}
func VisitTypeClosure(properties []*schema.Property, visitor func(t schema.Type)) {
seen := Set{}
for _, p := range properties {
visitTypeClosure(p.Type, visitor, seen)
}
}
func SimplifyInputUnion(t schema.Type) schema.Type {
union, ok := t.(*schema.UnionType)
if !ok {
return t
}
elements := make([]schema.Type, len(union.ElementTypes))
for i, et := range union.ElementTypes {
if input, ok := et.(*schema.InputType); ok {
elements[i] = input.ElementType
} else {
elements[i] = et
}
}
return &schema.UnionType{
ElementTypes: elements,
DefaultType: union.DefaultType,
Discriminator: union.Discriminator,
Mapping: union.Mapping,
}
}
// RequiredType unwraps the OptionalType enclosing the Property's type, if any.
func RequiredType(p *schema.Property) schema.Type {
if optional, ok := p.Type.(*schema.OptionalType); ok {
return optional.ElementType
}
return p.Type
}
// OptionalType wraps the Property's type in an OptionalType if it is not already optional.
func OptionalType(p *schema.Property) schema.Type {
if optional, ok := p.Type.(*schema.OptionalType); ok {
return optional
}
return &schema.OptionalType{ElementType: p.Type}
}
// UnwrapType removes any outer OptionalTypes and InputTypes from t.
func UnwrapType(t schema.Type) schema.Type {
for {
switch typ := t.(type) {
case *schema.InputType:
t = typ.ElementType
case *schema.OptionalType:
t = typ.ElementType
default:
return t
}
}
}
func IsNOptionalInput(t schema.Type) bool {
for {
switch typ := t.(type) {
case *schema.InputType:
return true
case *schema.OptionalType:
t = typ.ElementType
default:
return false
}
}
}
func resolvedType(t schema.Type, plainObjects bool) schema.Type {
switch typ := t.(type) {
case *schema.InputType:
return resolvedType(typ.ElementType, plainObjects)
case *schema.OptionalType:
e := resolvedType(typ.ElementType, plainObjects)
if e == typ.ElementType {
return typ
}
return &schema.OptionalType{ElementType: e}
case *schema.ArrayType:
e := resolvedType(typ.ElementType, plainObjects)
if e == typ.ElementType {
return typ
}
return &schema.ArrayType{ElementType: e}
case *schema.MapType:
e := resolvedType(typ.ElementType, plainObjects)
if e == typ.ElementType {
return typ
}
return &schema.MapType{ElementType: e}
case *schema.ObjectType:
if !plainObjects || !typ.IsInputShape() {
return typ
}
return typ.PlainShape
case *schema.UnionType:
elems, changed := make([]schema.Type, len(typ.ElementTypes)), false
for i, e := range typ.ElementTypes {
elems[i] = resolvedType(e, plainObjects)
changed = changed || elems[i] != e
}
if !changed {
return typ
}
return &schema.UnionType{
ElementTypes: elems,
DefaultType: typ.DefaultType,
Discriminator: typ.Discriminator,
Mapping: typ.Mapping,
}
default:
return t
}
}
// PlainType deeply removes any InputTypes from t, with the exception of argument structs. Use ResolvedType to
// unwrap argument structs as well.
func PlainType(t schema.Type) schema.Type {
return resolvedType(t, false)
}
// ResolvedType deeply removes any InputTypes from t.
func ResolvedType(t schema.Type) schema.Type {
return resolvedType(t, true)
} | pkg/codegen/utilities_types.go | 0.638835 | 0.447279 | utilities_types.go | starcoder |
package tf
import (
"bytes"
"encoding/binary"
"io"
"reflect"
"sort"
"github.com/pkg/errors"
tensorflow "github.com/tensorflow/tensorflow/tensorflow/go"
)
func nDTensorType(t reflect.Type, n int) reflect.Type {
for i := 0; i < n; i++ {
t = reflect.SliceOf(t)
}
return t
}
func nDimensionalTensor(t reflect.Type, n int) interface{} {
return reflect.New(nDTensorType(t, n)).Interface()
}
func DecodeStringND(t reflect.Type, shape []int64, r io.Reader) (reflect.Value, error) {
if len(shape) == 0 {
str, err := DecodeString(r)
if err != nil {
return reflect.Value{}, err
}
return reflect.ValueOf(str), nil
}
n := int(shape[0])
childType := t.Elem()
childShape := shape[1:]
slice := reflect.MakeSlice(t, n, n)
for i := 0; i < n; i++ {
v, err := DecodeStringND(childType, childShape, r)
if err != nil {
return reflect.Value{}, err
}
slice.Index(i).Set(v)
}
return slice, nil
}
func EncodeStringND(w io.Writer, val reflect.Value) error {
if val.Kind() == reflect.String {
str := val.Interface().(string)
if err := EncodeString(w, str); err != nil {
return err
}
return nil
}
n := val.Len()
for i := 0; i < n; i++ {
if err := EncodeStringND(w, val.Index(i)); err != nil {
return err
}
}
return nil
}
// EncodeTensor encodes a tensor into a gob.Encoder. See DecodeTensor.
func EncodeTensor(w io.Writer, val *tensorflow.Tensor) error {
dataType := val.DataType()
if err := EncodeDataType(w, dataType); err != nil {
return err
}
shape := val.Shape()
if err := EncodeInt64Array(w, shape); err != nil {
return err
}
if dataType == tensorflow.String {
goVal := val.Value()
if err := EncodeStringND(w, reflect.ValueOf(goVal)); err != nil {
return err
}
return nil
}
var buf bytes.Buffer
valLen, err := val.WriteContentsTo(&buf)
if err != nil {
return err
}
length := int64(buf.Len())
if valLen != length {
return errors.Errorf("expected tensor to write %d bytes, only wrote %d", valLen, length)
}
if err := binary.Write(w, binary.LittleEndian, length); err != nil {
return err
}
n, err := buf.WriteTo(w)
if err != nil {
return err
}
if n != length {
return errors.Errorf("expected to write %d bytes; only wrote %d", length, n)
}
return nil
}
// DecodeTensor decodes a tensor from a gob.Decoder and returns it.
// See EncodeTensor.
func DecodeTensor(r io.Reader) (*tensorflow.Tensor, error) {
dataType, err := DecodeDataType(r)
if err != nil {
return nil, err
}
shape, err := DecodeInt64Array(r)
if err != nil {
return nil, errors.Wrap(err, "shape")
}
var val *tensorflow.Tensor
if dataType == tensorflow.String {
t := nDTensorType(reflect.TypeOf(""), len(shape))
strND, err := DecodeStringND(t, shape, r)
if err != nil {
return nil, err
}
goTensor := reflect.Indirect(strND).Interface()
val, err = tensorflow.NewTensor(goTensor)
if err != nil {
return nil, err
}
return val, nil
} else {
var length int64
if err := binary.Read(r, binary.LittleEndian, &length); err != nil {
return nil, err
}
lr := io.LimitedReader{R: r, N: length}
val, err = tensorflow.ReadTensor(dataType, shape, &lr)
if err != nil {
return nil, errors.Wrapf(err, "error reading (length %d, datatype %+v, shape %+v, remaining %d)", length, dataType, shape, lr.N)
}
if lr.N != 0 {
return nil, errors.Errorf("only read %d bytes; wanted %d", length-lr.N, length)
}
}
return val, nil
}
// EncodeTensorMap encodes a map[string]*tensorflow.Tensor into a gob.Encoder.
// See DecodeTensorMap.
func EncodeTensorMap(w io.Writer, m map[string]*tensorflow.Tensor) error {
if err := binary.Write(w, binary.LittleEndian, int64(len(m))); err != nil {
return err
}
keys := make([]string, 0, len(m))
for key := range m {
keys = append(keys, key)
}
sort.Strings(keys)
for _, key := range keys {
if err := EncodeString(w, key); err != nil {
return err
}
val := m[key]
if err := EncodeTensor(w, val); err != nil {
return err
}
}
return nil
}
// DecodeTensorMap decodes a map[string]*tensorflow.Tensor from a gob.Decoder
// and returns it. See EncodeTensorMap.
func DecodeTensorMap(r io.Reader) (map[string]*tensorflow.Tensor, error) {
m := map[string]*tensorflow.Tensor{}
var numFeeds int64
if err := binary.Read(r, binary.LittleEndian, &numFeeds); err != nil {
return nil, errors.Wrap(err, "numFeeds")
}
for i := int64(0); i < numFeeds; i++ {
key, err := DecodeString(r)
if err != nil {
return nil, err
}
val, err := DecodeTensor(r)
if err != nil {
return nil, errors.Wrapf(err, "tensor value, i %d, key %q", i, key)
}
m[key] = val
}
return m, nil
}
// Decode string reads a string from an reader.
// Format is: int64(len) + body
func DecodeString(r io.Reader) (string, error) {
var strLen int64
if err := binary.Read(r, binary.LittleEndian, &strLen); err != nil {
return "", errors.Wrap(err, "length")
}
strBody := make([]byte, strLen)
n, err := r.Read(strBody)
if err != nil {
return "", errors.Wrap(err, "body")
}
if int64(n) != strLen {
return "", errors.Errorf("wanted to read %d bytes, only read %d", strLen, n)
}
return string(strBody), nil
}
// EncodeString encodes a string into the writer.
func EncodeString(w io.Writer, body string) error {
if err := binary.Write(w, binary.LittleEndian, int64(len(body))); err != nil {
return errors.Wrap(err, "length")
}
if _, err := w.Write([]byte(body)); err != nil {
return errors.Wrap(err, "body")
}
return nil
}
// DecodeStringArray decodes a string array from the bytes.
// Format is: int64(num strings) + repeated (string)
func DecodeStringArray(r io.Reader) ([]string, error) {
var count int64
if err := binary.Read(r, binary.LittleEndian, &count); err != nil {
return nil, err
}
var strs []string
for i := int64(0); i < count; i++ {
str, err := DecodeString(r)
if err != nil {
return nil, err
}
strs = append(strs, str)
}
return strs, nil
}
// EncodeStringArray decodes a string array from the bytes.
// Format is: int64(num strings) + repeated (string)
func EncodeStringArray(w io.Writer, arr []string) error {
if err := binary.Write(w, binary.LittleEndian, int64(len(arr))); err != nil {
return err
}
for _, v := range arr {
if err := EncodeString(w, v); err != nil {
return err
}
}
return nil
}
// DecodeInt64Array decodes a int64 array from the bytes.
// Format is: int64(num) + repeated (int64)
func DecodeInt64Array(r io.Reader) ([]int64, error) {
var count int64
if err := binary.Read(r, binary.LittleEndian, &count); err != nil {
return nil, err
}
var arr []int64
for i := int64(0); i < count; i++ {
var v int64
if err := binary.Read(r, binary.LittleEndian, &v); err != nil {
return nil, err
}
arr = append(arr, v)
}
return arr, nil
}
// EncodeInt64Array encodes a int64 array.
// Format is: int64(num) + repeated (int64)
func EncodeInt64Array(w io.Writer, arr []int64) error {
if err := binary.Write(w, binary.LittleEndian, int64(len(arr))); err != nil {
return err
}
for _, v := range arr {
if err := binary.Write(w, binary.LittleEndian, v); err != nil {
return err
}
}
return nil
}
// EncodeDataType writes the data type to the writer.
func EncodeDataType(w io.Writer, dt tensorflow.DataType) error {
if err := binary.Write(w, binary.LittleEndian, int64(dt)); err != nil {
return err
}
return nil
}
// DecodeDataType decodes the data type from the reader.
func DecodeDataType(r io.Reader) (tensorflow.DataType, error) {
var dt int64
if err := binary.Read(r, binary.LittleEndian, &dt); err != nil {
return 0, err
}
return tensorflow.DataType(dt), nil
} | tf/tensor.go | 0.714628 | 0.416856 | tensor.go | starcoder |
package en
import "github.com/rannoch/cldr"
var currencies = []cldr.Currency{
{Currency: "ADP", DisplayName: "Andorran Peseta", Symbol: ""},
{Currency: "AED", DisplayName: "United Arab Emirates Dirham", Symbol: ""},
{Currency: "AFA", DisplayName: "Afghan Afghani (1927–2002)", Symbol: ""},
{Currency: "AFN", DisplayName: "Afghan Afghani", Symbol: ""},
{Currency: "ALK", DisplayName: "Albanian Lek (1946–1965)", Symbol: ""},
{Currency: "ALL", DisplayName: "Albanian Lek", Symbol: ""},
{Currency: "AMD", DisplayName: "Armenian Dram", Symbol: ""},
{Currency: "ANG", DisplayName: "Netherlands Antillean Guilder", Symbol: ""},
{Currency: "AOA", DisplayName: "Angolan Kwanza", Symbol: ""},
{Currency: "AOK", DisplayName: "Angolan Kwanza (1977–1991)", Symbol: ""},
{Currency: "AON", DisplayName: "Angolan New Kwanza (1990–2000)", Symbol: ""},
{Currency: "AOR", DisplayName: "Angolan Readjusted Kwanza (1995–1999)", Symbol: ""},
{Currency: "ARA", DisplayName: "Argentine Austral", Symbol: ""},
{Currency: "ARL", DisplayName: "Argentine Peso Ley (1970–1983)", Symbol: ""},
{Currency: "ARM", DisplayName: "Argentine Peso (1881–1970)", Symbol: ""},
{Currency: "ARP", DisplayName: "Argentine Peso (1983–1985)", Symbol: ""},
{Currency: "ARS", DisplayName: "Argentine Peso", Symbol: ""},
{Currency: "ATS", DisplayName: "Austrian Schilling", Symbol: ""},
{Currency: "AUD", DisplayName: "Australian Dollar", Symbol: ""},
{Currency: "AWG", DisplayName: "Aruban Florin", Symbol: ""},
{Currency: "AZM", DisplayName: "Azerbaijani Manat (1993–2006)", Symbol: ""},
{Currency: "AZN", DisplayName: "Azerbaijani Manat", Symbol: ""},
{Currency: "BAD", DisplayName: "Bosnia-Herzegovina Dinar (1992–1994)", Symbol: ""},
{Currency: "BAM", DisplayName: "Bosnia-Herzegovina Convertible Mark", Symbol: ""},
{Currency: "BAN", DisplayName: "Bosnia-Herzegovina New Dinar (1994–1997)", Symbol: ""},
{Currency: "BBD", DisplayName: "Barbadian Dollar", Symbol: ""},
{Currency: "BDT", DisplayName: "Bangladeshi Taka", Symbol: ""},
{Currency: "BEC", DisplayName: "Belgian Franc (convertible)", Symbol: ""},
{Currency: "BEF", DisplayName: "Belgian Franc", Symbol: ""},
{Currency: "BEL", DisplayName: "Belgian Franc (financial)", Symbol: ""},
{Currency: "BGL", DisplayName: "Bulgarian Hard Lev", Symbol: ""},
{Currency: "BGM", DisplayName: "Bulgarian Socialist Lev", Symbol: ""},
{Currency: "BGN", DisplayName: "Bulgarian Lev", Symbol: ""},
{Currency: "BGO", DisplayName: "Bulgarian Lev (1879–1952)", Symbol: ""},
{Currency: "BHD", DisplayName: "Bahraini Dinar", Symbol: ""},
{Currency: "BIF", DisplayName: "Burundian Franc", Symbol: ""},
{Currency: "BMD", DisplayName: "Bermudan Dollar", Symbol: ""},
{Currency: "BND", DisplayName: "Brunei Dollar", Symbol: ""},
{Currency: "BOB", DisplayName: "Bolivian Boliviano", Symbol: ""},
{Currency: "BOL", DisplayName: "Bolivian Boliviano (1863–1963)", Symbol: ""},
{Currency: "BOP", DisplayName: "Bolivian Peso", Symbol: ""},
{Currency: "BOV", DisplayName: "Bolivian Mvdol", Symbol: ""},
{Currency: "BRB", DisplayName: "Brazilian New Cruzeiro (1967–1986)", Symbol: ""},
{Currency: "BRC", DisplayName: "Brazilian Cruzado (1986–1989)", Symbol: ""},
{Currency: "BRE", DisplayName: "Brazilian Cruzeiro (1990–1993)", Symbol: ""},
{Currency: "BRL", DisplayName: "Brazilian Real", Symbol: "R$"},
{Currency: "BRN", DisplayName: "Brazilian New Cruzado (1989–1990)", Symbol: ""},
{Currency: "BRR", DisplayName: "Brazilian Cruzeiro (1993–1994)", Symbol: ""},
{Currency: "BRZ", DisplayName: "Brazilian Cruzeiro (1942–1967)", Symbol: ""},
{Currency: "BSD", DisplayName: "Bahamian Dollar", Symbol: ""},
{Currency: "BTN", DisplayName: "Bhutanese Ngultrum", Symbol: ""},
{Currency: "BUK", DisplayName: "Burmese Kyat", Symbol: ""},
{Currency: "BWP", DisplayName: "Botswanan Pula", Symbol: ""},
{Currency: "BYB", DisplayName: "Belarusian New Ruble (1994–1999)", Symbol: ""},
{Currency: "BYR", DisplayName: "Belarusian Ruble", Symbol: "BYR"},
{Currency: "BZD", DisplayName: "Belize Dollar", Symbol: ""},
{Currency: "CAD", DisplayName: "Canadian Dollar", Symbol: ""},
{Currency: "CDF", DisplayName: "Congolese Franc", Symbol: ""},
{Currency: "CHE", DisplayName: "WIR Euro", Symbol: ""},
{Currency: "CHF", DisplayName: "Swiss Franc", Symbol: ""},
{Currency: "CHW", DisplayName: "WIR Franc", Symbol: ""},
{Currency: "CLE", DisplayName: "Chilean Escudo", Symbol: ""},
{Currency: "CLF", DisplayName: "Chilean Unit of Account (UF)", Symbol: ""},
{Currency: "CLP", DisplayName: "Chilean Peso", Symbol: ""},
{Currency: "CNX", DisplayName: "Chinese People’s Bank Dollar", Symbol: ""},
{Currency: "CNY", DisplayName: "Chinese Yuan", Symbol: ""},
{Currency: "COP", DisplayName: "Colombian Peso", Symbol: ""},
{Currency: "COU", DisplayName: "Colombian Real Value Unit", Symbol: ""},
{Currency: "CRC", DisplayName: "Costa Rican Colón", Symbol: ""},
{Currency: "CSD", DisplayName: "Serbian Dinar (2002–2006)", Symbol: ""},
{Currency: "CSK", DisplayName: "Czechoslovak Hard Koruna", Symbol: ""},
{Currency: "CUC", DisplayName: "Cuban Convertible Peso", Symbol: ""},
{Currency: "CUP", DisplayName: "Cuban Peso", Symbol: ""},
{Currency: "CVE", DisplayName: "Cape Verdean Escudo", Symbol: ""},
{Currency: "CYP", DisplayName: "Cypriot Pound", Symbol: ""},
{Currency: "CZK", DisplayName: "Czech Republic Koruna", Symbol: ""},
{Currency: "DDM", DisplayName: "East German Mark", Symbol: ""},
{Currency: "DEM", DisplayName: "German Mark", Symbol: ""},
{Currency: "DJF", DisplayName: "Djiboutian Franc", Symbol: ""},
{Currency: "DKK", DisplayName: "Danish Krone", Symbol: ""},
{Currency: "DOP", DisplayName: "Dominican Peso", Symbol: ""},
{Currency: "DZD", DisplayName: "Algerian Dinar", Symbol: ""},
{Currency: "ECS", DisplayName: "Ecuadorian Sucre", Symbol: ""},
{Currency: "ECV", DisplayName: "Ecuadorian Unit of Constant Value", Symbol: ""},
{Currency: "EEK", DisplayName: "Estonian Kroon", Symbol: ""},
{Currency: "EGP", DisplayName: "Egyptian Pound", Symbol: ""},
{Currency: "ERN", DisplayName: "Eritrean Nakfa", Symbol: ""},
{Currency: "ESA", DisplayName: "Spanish Peseta (A account)", Symbol: ""},
{Currency: "ESB", DisplayName: "Spanish Peseta (convertible account)", Symbol: ""},
{Currency: "ESP", DisplayName: "Spanish Peseta", Symbol: ""},
{Currency: "ETB", DisplayName: "Ethiopian Birr", Symbol: ""},
{Currency: "EUR", DisplayName: "Euro", Symbol: ""},
{Currency: "FIM", DisplayName: "Finnish Markka", Symbol: ""},
{Currency: "FJD", DisplayName: "Fijian Dollar", Symbol: ""},
{Currency: "FKP", DisplayName: "Falkland Islands Pound", Symbol: ""},
{Currency: "FRF", DisplayName: "French Franc", Symbol: ""},
{Currency: "GBP", DisplayName: "British Pound", Symbol: ""},
{Currency: "GEK", DisplayName: "Georgian Kupon Larit", Symbol: ""},
{Currency: "GEL", DisplayName: "Georgian Lari", Symbol: ""},
{Currency: "GHC", DisplayName: "Ghanaian Cedi (1979–2007)", Symbol: ""},
{Currency: "GHS", DisplayName: "Ghanaian Cedi", Symbol: ""},
{Currency: "GIP", DisplayName: "Gibraltar Pound", Symbol: ""},
{Currency: "GMD", DisplayName: "Gambian Dalasi", Symbol: ""},
{Currency: "GNF", DisplayName: "Guinean Franc", Symbol: ""},
{Currency: "GNS", DisplayName: "Guinean Syli", Symbol: ""},
{Currency: "GQE", DisplayName: "Equatorial Guinean Ekwele", Symbol: ""},
{Currency: "GRD", DisplayName: "Greek Drachma", Symbol: ""},
{Currency: "GTQ", DisplayName: "Guatemalan Quetzal", Symbol: ""},
{Currency: "GWE", DisplayName: "Portuguese Guinea Escudo", Symbol: ""},
{Currency: "GWP", DisplayName: "Guinea-Bissau Peso", Symbol: ""},
{Currency: "GYD", DisplayName: "Guyanaese Dollar", Symbol: ""},
{Currency: "HKD", DisplayName: "Hong Kong Dollar", Symbol: ""},
{Currency: "HNL", DisplayName: "Honduran Lempira", Symbol: ""},
{Currency: "HRD", DisplayName: "Croatian Dinar", Symbol: ""},
{Currency: "HRK", DisplayName: "Croatian Kuna", Symbol: ""},
{Currency: "HTG", DisplayName: "Haitian Gourde", Symbol: ""},
{Currency: "HUF", DisplayName: "Hungarian Forint", Symbol: ""},
{Currency: "IDR", DisplayName: "Indonesian Rupiah", Symbol: ""},
{Currency: "IEP", DisplayName: "Irish Pound", Symbol: ""},
{Currency: "ILP", DisplayName: "Israeli Pound", Symbol: ""},
{Currency: "ILR", DisplayName: "Israeli Sheqel (1980–1985)", Symbol: ""},
{Currency: "ILS", DisplayName: "Israeli New Sheqel", Symbol: ""},
{Currency: "INR", DisplayName: "Indian Rupee", Symbol: ""},
{Currency: "IQD", DisplayName: "Iraqi Dinar", Symbol: ""},
{Currency: "IRR", DisplayName: "Iranian Rial", Symbol: ""},
{Currency: "ISJ", DisplayName: "Icelandic Króna (1918–1981)", Symbol: ""},
{Currency: "ISK", DisplayName: "Icelandic Króna", Symbol: ""},
{Currency: "ITL", DisplayName: "Italian Lira", Symbol: ""},
{Currency: "JMD", DisplayName: "Jamaican Dollar", Symbol: ""},
{Currency: "JOD", DisplayName: "Jordanian Dinar", Symbol: ""},
{Currency: "JPY", DisplayName: "Japanese Yen", Symbol: "¥"},
{Currency: "KES", DisplayName: "Kenyan Shilling", Symbol: ""},
{Currency: "KGS", DisplayName: "Kyrgystani Som", Symbol: "SOʻM"},
{Currency: "KHR", DisplayName: "Cambodian Riel", Symbol: ""},
{Currency: "KMF", DisplayName: "Comorian Franc", Symbol: ""},
{Currency: "KPW", DisplayName: "North Korean Won", Symbol: ""},
{Currency: "KRH", DisplayName: "South Korean Hwan (1953–1962)", Symbol: ""},
{Currency: "KRO", DisplayName: "South Korean Won (1945–1953)", Symbol: ""},
{Currency: "KRW", DisplayName: "South Korean Won", Symbol: ""},
{Currency: "KWD", DisplayName: "Kuwaiti Dinar", Symbol: ""},
{Currency: "KYD", DisplayName: "Cayman Islands Dollar", Symbol: ""},
{Currency: "KZT", DisplayName: "Kazakhstani Tenge", Symbol: "₸"},
{Currency: "LAK", DisplayName: "Laotian Kip", Symbol: ""},
{Currency: "LBP", DisplayName: "Lebanese Pound", Symbol: ""},
{Currency: "LKR", DisplayName: "Sri Lankan Rupee", Symbol: ""},
{Currency: "LRD", DisplayName: "Liberian Dollar", Symbol: ""},
{Currency: "LSL", DisplayName: "Lesotho Loti", Symbol: ""},
{Currency: "LTL", DisplayName: "Lithuanian Litas", Symbol: ""},
{Currency: "LTT", DisplayName: "Lithuanian Talonas", Symbol: ""},
{Currency: "LUC", DisplayName: "Luxembourgian Convertible Franc", Symbol: ""},
{Currency: "LUF", DisplayName: "Luxembourgian Franc", Symbol: ""},
{Currency: "LUL", DisplayName: "Luxembourg Financial Franc", Symbol: ""},
{Currency: "LVL", DisplayName: "Latvian Lats", Symbol: ""},
{Currency: "LVR", DisplayName: "Latvian Ruble", Symbol: ""},
{Currency: "LYD", DisplayName: "Lib<NAME>", Symbol: ""},
{Currency: "MAD", DisplayName: "Moroc<NAME>", Symbol: ""},
{Currency: "MAF", DisplayName: "Moroccan Franc", Symbol: ""},
{Currency: "MCF", DisplayName: "Monegasque Franc", Symbol: ""},
{Currency: "MDC", DisplayName: "Moldovan Cupon", Symbol: ""},
{Currency: "MDL", DisplayName: "Mol<NAME>", Symbol: ""},
{Currency: "MGA", DisplayName: "Malagasy Ariary", Symbol: ""},
{Currency: "MGF", DisplayName: "Malagasy Franc", Symbol: ""},
{Currency: "MKD", DisplayName: "Macedonian Denar", Symbol: ""},
{Currency: "MKN", DisplayName: "Macedonian Denar (1992–1993)", Symbol: ""},
{Currency: "MLF", DisplayName: "Malian Franc", Symbol: ""},
{Currency: "MMK", DisplayName: "Myanmar Kyat", Symbol: ""},
{Currency: "MNT", DisplayName: "Mongolian Tugrik", Symbol: ""},
{Currency: "MOP", DisplayName: "Macanese Pataca", Symbol: ""},
{Currency: "MRO", DisplayName: "Mauritanian Ouguiya", Symbol: ""},
{Currency: "MTL", DisplayName: "Maltese Lira", Symbol: ""},
{Currency: "MTP", DisplayName: "Maltese Pound", Symbol: ""},
{Currency: "MUR", DisplayName: "Mauritian Rupee", Symbol: ""},
{Currency: "MVP", DisplayName: "Maldivian Rupee (1947–1981)", Symbol: ""},
{Currency: "MVR", DisplayName: "Maldivian Rufiyaa", Symbol: ""},
{Currency: "MWK", DisplayName: "Malawian Kwacha", Symbol: ""},
{Currency: "MXN", DisplayName: "Mexican Peso", Symbol: "MX$"},
{Currency: "MXP", DisplayName: "Mexican Silver Peso (1861–1992)", Symbol: ""},
{Currency: "MXV", DisplayName: "Mexican Investment Unit", Symbol: ""},
{Currency: "MYR", DisplayName: "Malaysian Ringgit", Symbol: ""},
{Currency: "MZE", DisplayName: "Mozambican Escudo", Symbol: ""},
{Currency: "MZM", DisplayName: "Mozambican Metical (1980–2006)", Symbol: ""},
{Currency: "MZN", DisplayName: "Mozambican Metical", Symbol: ""},
{Currency: "NAD", DisplayName: "Namibian Dollar", Symbol: ""},
{Currency: "NGN", DisplayName: "Nigerian Naira", Symbol: ""},
{Currency: "NIC", DisplayName: "Nicaraguan Córdoba (1988–1991)", Symbol: ""},
{Currency: "NIO", DisplayName: "Nicaraguan Córdoba", Symbol: ""},
{Currency: "NLG", DisplayName: "Dutch Guilder", Symbol: ""},
{Currency: "NOK", DisplayName: "Norwegian Krone", Symbol: ""},
{Currency: "NPR", DisplayName: "Nepalese Rupee", Symbol: ""},
{Currency: "NZD", DisplayName: "New Zealand Dollar", Symbol: ""},
{Currency: "OMR", DisplayName: "Omani Rial", Symbol: ""},
{Currency: "PAB", DisplayName: "Panamanian Balboa", Symbol: ""},
{Currency: "PEI", DisplayName: "Peruvian Inti", Symbol: ""},
{Currency: "PEN", DisplayName: "Peruvian Nuevo Sol", Symbol: ""},
{Currency: "PES", DisplayName: "Peruvian Sol (1863–1965)", Symbol: ""},
{Currency: "PGK", DisplayName: "Papua New Guinean Kina", Symbol: ""},
{Currency: "PHP", DisplayName: "Philippine Peso", Symbol: ""},
{Currency: "PKR", DisplayName: "Pakistani Rupee", Symbol: ""},
{Currency: "PLN", DisplayName: "Polish Zloty", Symbol: ""},
{Currency: "PLZ", DisplayName: "Polish Zloty (1950–1995)", Symbol: ""},
{Currency: "PTE", DisplayName: "Portuguese Escudo", Symbol: ""},
{Currency: "PYG", DisplayName: "Paraguayan Guarani", Symbol: ""},
{Currency: "QAR", DisplayName: "Qatari Rial", Symbol: ""},
{Currency: "RHD", DisplayName: "Rhodesian Dollar", Symbol: ""},
{Currency: "ROL", DisplayName: "Romanian Leu (1952–2006)", Symbol: ""},
{Currency: "RON", DisplayName: "Romanian Leu", Symbol: ""},
{Currency: "RSD", DisplayName: "Serbian Dinar", Symbol: ""},
{Currency: "RUB", DisplayName: "Russian Ruble", Symbol: "₽"},
{Currency: "RUR", DisplayName: "Russian Ruble (1991–1998)", Symbol: ""},
{Currency: "RWF", DisplayName: "Rwandan Franc", Symbol: ""},
{Currency: "SAR", DisplayName: "Saudi Riyal", Symbol: ""},
{Currency: "SBD", DisplayName: "Solomon Islands Dollar", Symbol: ""},
{Currency: "SCR", DisplayName: "Seychellois Rupee", Symbol: ""},
{Currency: "SDD", DisplayName: "Sudanese Dinar (1992–2007)", Symbol: ""},
{Currency: "SDG", DisplayName: "Sudanese Pound", Symbol: ""},
{Currency: "SDP", DisplayName: "Sudanese Pound (1957–1998)", Symbol: ""},
{Currency: "SEK", DisplayName: "Swedish Krona", Symbol: ""},
{Currency: "SGD", DisplayName: "Singapore Dollar", Symbol: ""},
{Currency: "SHP", DisplayName: "St. Helena Pound", Symbol: ""},
{Currency: "SIT", DisplayName: "Slovenian Tolar", Symbol: ""},
{Currency: "SKK", DisplayName: "Slovak Koruna", Symbol: ""},
{Currency: "SLL", DisplayName: "Sierra Leonean Leone", Symbol: ""},
{Currency: "SOS", DisplayName: "Somali Shilling", Symbol: ""},
{Currency: "SRD", DisplayName: "Surinamese Dollar", Symbol: ""},
{Currency: "SRG", DisplayName: "Surinamese Guilder", Symbol: ""},
{Currency: "SSP", DisplayName: "South Sudanese Pound", Symbol: ""},
{Currency: "STD", DisplayName: "São Tomé & Príncipe Dobra", Symbol: ""},
{Currency: "SUR", DisplayName: "Soviet Rouble", Symbol: ""},
{Currency: "SVC", DisplayName: "Salvadoran Colón", Symbol: ""},
{Currency: "SYP", DisplayName: "Syrian Pound", Symbol: ""},
{Currency: "SZL", DisplayName: "Swazi Lilangeni", Symbol: ""},
{Currency: "THB", DisplayName: "Thai Baht", Symbol: ""},
{Currency: "TJR", DisplayName: "Tajikistani Ruble", Symbol: ""},
{Currency: "TJS", DisplayName: "Tajikistani Somoni", Symbol: ""},
{Currency: "TMM", DisplayName: "Turkmenistani Manat (1993–2009)", Symbol: ""},
{Currency: "TMT", DisplayName: "Turkmenistani Manat", Symbol: ""},
{Currency: "TND", DisplayName: "Tunisian Dinar", Symbol: ""},
{Currency: "TOP", DisplayName: "Tongan Paʻanga", Symbol: ""},
{Currency: "TPE", DisplayName: "Timorese Escudo", Symbol: ""},
{Currency: "TRL", DisplayName: "Turkish Lira (1922–2005)", Symbol: ""},
{Currency: "TRY", DisplayName: "Turkish Lira", Symbol: ""},
{Currency: "TTD", DisplayName: "Trinidad & Tobago Dollar", Symbol: ""},
{Currency: "TWD", DisplayName: "New Taiwan Dollar", Symbol: ""},
{Currency: "TZS", DisplayName: "Tanzanian Shilling", Symbol: ""},
{Currency: "UAH", DisplayName: "Ukrainian Hryvnia", Symbol: "₴"},
{Currency: "UAK", DisplayName: "Ukrainian Karbovanets", Symbol: ""},
{Currency: "UGS", DisplayName: "Ugandan Shilling (1966–1987)", Symbol: ""},
{Currency: "UGX", DisplayName: "Ugandan Shilling", Symbol: ""},
{Currency: "USD", DisplayName: "US Dollar", Symbol: "$"},
{Currency: "USN", DisplayName: "US Dollar (Next day)", Symbol: ""},
{Currency: "USS", DisplayName: "US Dollar (Same day)", Symbol: ""},
{Currency: "UYI", DisplayName: "Uruguayan Peso (Indexed Units)", Symbol: ""},
{Currency: "UYP", DisplayName: "Uruguayan Peso (1975–1993)", Symbol: ""},
{Currency: "UYU", DisplayName: "Uruguayan Peso", Symbol: ""},
{Currency: "UZS", DisplayName: "Uzbekistani Som", Symbol: "SOʻM"},
{Currency: "VEB", DisplayName: "Venezuelan Bolívar (1871–2008)", Symbol: ""},
{Currency: "VEF", DisplayName: "Venezuelan Bolívar", Symbol: ""},
{Currency: "VND", DisplayName: "Vietnamese Dong", Symbol: ""},
{Currency: "VNN", DisplayName: "Vietnamese Dong (1978–1985)", Symbol: ""},
{Currency: "VUV", DisplayName: "Vanuatu Vatu", Symbol: ""},
{Currency: "WST", DisplayName: "Samoan Tala", Symbol: ""},
{Currency: "XAF", DisplayName: "Central African CFA Franc", Symbol: ""},
{Currency: "XAG", DisplayName: "Silver", Symbol: ""},
{Currency: "XAU", DisplayName: "Gold", Symbol: ""},
{Currency: "XBA", DisplayName: "European Composite Unit", Symbol: ""},
{Currency: "XBB", DisplayName: "European Monetary Unit", Symbol: ""},
{Currency: "XBC", DisplayName: "European Unit of Account (XBC)", Symbol: ""},
{Currency: "XBD", DisplayName: "European Unit of Account (XBD)", Symbol: ""},
{Currency: "XCD", DisplayName: "East Caribbean Dollar", Symbol: ""},
{Currency: "XDR", DisplayName: "Special Drawing Rights", Symbol: ""},
{Currency: "XEU", DisplayName: "European Currency Unit", Symbol: ""},
{Currency: "XFO", DisplayName: "French Gold Franc", Symbol: ""},
{Currency: "XFU", DisplayName: "French UIC-Franc", Symbol: ""},
{Currency: "XOF", DisplayName: "West African CFA Franc", Symbol: ""},
{Currency: "XPD", DisplayName: "Palladium", Symbol: ""},
{Currency: "XPF", DisplayName: "CFP Franc", Symbol: ""},
{Currency: "XPT", DisplayName: "Platinum", Symbol: ""},
{Currency: "XRE", DisplayName: "RINET Funds", Symbol: ""},
{Currency: "XSU", DisplayName: "Sucre", Symbol: ""},
{Currency: "XTS", DisplayName: "Testing Currency Code", Symbol: ""},
{Currency: "XUA", DisplayName: "ADB Unit of Account", Symbol: ""},
{Currency: "XXX", DisplayName: "Unknown Currency", Symbol: ""},
{Currency: "YDD", DisplayName: "Yemeni Dinar", Symbol: ""},
{Currency: "YER", DisplayName: "Yemeni Rial", Symbol: ""},
{Currency: "YUD", DisplayName: "Yugoslavian Hard Dinar (1966–1990)", Symbol: ""},
{Currency: "YUM", DisplayName: "Yugoslavian New Dinar (1994–2002)", Symbol: ""},
{Currency: "YUN", DisplayName: "Yugoslavian Convertible Dinar (1990–1992)", Symbol: ""},
{Currency: "YUR", DisplayName: "Yugoslavian Reformed Dinar (1992–1993)", Symbol: ""},
{Currency: "ZAL", DisplayName: "South African Rand (financial)", Symbol: ""},
{Currency: "ZAR", DisplayName: "South African Rand", Symbol: ""},
{Currency: "ZMK", DisplayName: "Zambian Kwacha (1968–2012)", Symbol: ""},
{Currency: "ZMW", DisplayName: "Zambian Kwacha", Symbol: ""},
{Currency: "ZRN", DisplayName: "Zairean New Zaire (1993–1998)", Symbol: ""},
{Currency: "ZRZ", DisplayName: "Zairean Zaire (1971–1993)", Symbol: ""},
{Currency: "ZWD", DisplayName: "Zimbabwean Dollar (1980–2008)", Symbol: ""},
{Currency: "ZWL", DisplayName: "Zimbabwean Dollar (2009)", Symbol: ""},
{Currency: "ZWR", DisplayName: "Zimbabwean Dollar (2008)", Symbol: ""},
} | resources/locales/en/currency.go | 0.522202 | 0.458167 | currency.go | starcoder |
package missing_waf
import (
"github.com/threagile/threagile/model"
)
func Category() model.RiskCategory {
return model.RiskCategory{
Id: "missing-waf",
Title: "Missing Web Application Firewall (WAF)",
Description: "Para ter uma primeira linha de defesa de filtragem, as arquiteturas de segurança com serviços da Web ou aplicativos da Web devem incluir um WAF na frente deles. " +
"Mesmo que um WAF não seja um substituto para a segurança (todos os componentes devem ser seguros, mesmo sem um WAF), ele adiciona outra camada de defesa ao geral " +
"sistema atrasando alguns ataques e tendo um alerta de ataque mais fácil através dele.",
Impact: "Se esse risco não for mitigado, os invasores poderão aplicar testes de padrão de ataque padrão em grande velocidade, sem qualquer filtragem.",
ASVS: "V1 - Architecture, Design and Threat Modeling Requirements",
CheatSheet: "https://cheatsheetseries.owasp.org/cheatsheets/Virtual_Patching_Cheat_Sheet.html",
Action: "Web Application Firewall (WAF)",
Mitigation: "Considere colocar um Web Application Firewall (WAF) na frente dos serviços da web e / ou aplicativos da web. Para ambientes de nuvem, muitos provedores de nuvem oferecem " +
"WAFs pré-configurados. Até mesmo proxies reversos podem ser aprimorados por um componente WAF por meio de plug-ins ModSecurity.",
Check: "Existe um Firewall de aplicativo da Web (WAF)?",
Function: model.Operations,
STRIDE: model.Tampering,
DetectionLogic: "Serviços da Web e / ou aplicativos da Web dentro do escopo acessados através de um limite de confiança da rede sem um Firewall de aplicativo da Web (WAF) na frente deles.",
RiskAssessment: "A classificação de risco depende da sensibilidade do próprio ativo técnico e dos ativos de dados processados e armazenados.",
FalsePositives: "Os destinos acessíveis apenas por WAFs ou proxies reversos contendo um componente WAF (como ModSecurity) podem ser considerados " +
"como falsos positivos após revisão individual.",
ModelFailurePossibleReason: false,
CWE: 1008,
}
}
func SupportedTags() []string {
return []string{}
}
func GenerateRisks() []model.Risk {
risks := make([]model.Risk, 0)
for _, technicalAsset := range model.ParsedModelRoot.TechnicalAssets {
if !technicalAsset.OutOfScope &&
(technicalAsset.Technology.IsWebApplication() || technicalAsset.Technology.IsWebService()) {
for _, incomingAccess := range model.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id] {
if incomingAccess.IsAcrossTrustBoundaryNetworkOnly() &&
incomingAccess.Protocol.IsPotentialWebAccessProtocol() &&
model.ParsedModelRoot.TechnicalAssets[incomingAccess.SourceId].Technology != model.WAF {
risks = append(risks, createRisk(technicalAsset))
break
}
}
}
}
return risks
}
func createRisk(technicalAsset model.TechnicalAsset) model.Risk {
title := "<b>Missing Web Application Firewall (WAF)</b> risk at <b>" + technicalAsset.Title + "</b>"
likelihood := model.Unlikely
impact := model.LowImpact
if technicalAsset.HighestConfidentiality() == model.StrictlyConfidential ||
technicalAsset.HighestIntegrity() == model.MissionCritical ||
technicalAsset.HighestAvailability() == model.MissionCritical {
impact = model.MediumImpact
}
risk := model.Risk{
Category: Category(),
Severity: model.CalculateSeverity(likelihood, impact),
ExploitationLikelihood: likelihood,
ExploitationImpact: impact,
Title: title,
MostRelevantTechnicalAssetId: technicalAsset.Id,
DataBreachProbability: model.Improbable,
DataBreachTechnicalAssetIDs: []string{technicalAsset.Id},
}
risk.SyntheticId = risk.Category.Id + "@" + technicalAsset.Id
return risk
} | risks/built-in/missing-waf/missing-waf-rule.go | 0.522689 | 0.438064 | missing-waf-rule.go | starcoder |
package latlong
import (
"bytes"
"errors"
"fmt"
"math"
"unicode"
geohash "github.com/TomiHiltunen/geohash-golang"
"github.com/golang/geo/s2"
)
// Rect is rectangle of latlng.
type Rect struct {
s2.Rect
}
// MarshalJSON is a marshaler for JSON.
func (rect *Rect) MarshalJSON() (bb []byte, e error) {
type LatLngs []Point
v := []Point{
Point{
lat: NewAngleFromS1Angle(rect.Rect.Vertex(0).Lat, rect.Rect.Size().Lat/10),
lng: NewAngleFromS1Angle(rect.Rect.Vertex(0).Lng, rect.Rect.Size().Lng/10)},
Point{
lat: NewAngleFromS1Angle(rect.Rect.Vertex(1).Lat, rect.Rect.Size().Lat/10),
lng: NewAngleFromS1Angle(rect.Rect.Vertex(1).Lng, rect.Rect.Size().Lng/10)},
Point{
lat: NewAngleFromS1Angle(rect.Rect.Vertex(2).Lat, rect.Rect.Size().Lat/10),
lng: NewAngleFromS1Angle(rect.Rect.Vertex(2).Lng, rect.Rect.Size().Lng/10)},
Point{
lat: NewAngleFromS1Angle(rect.Rect.Vertex(3).Lat, rect.Rect.Size().Lat/10),
lng: NewAngleFromS1Angle(rect.Rect.Vertex(3).Lng, rect.Rect.Size().Lng/10)},
}
bs := make([][]byte, 0)
for i := range v {
b, e := v[i].MarshalJSON()
if e != nil {
break
}
bs = append(bs, b)
}
bb = append(bb, '[')
bb = append(bb, bytes.Join(bs, []byte(","))...)
bb = append(bb, ']')
return bb, e
}
// NewRect is from latitude, longitude and altitude.
func NewRect(latitude, longitude, latprec, longprec float64) *Rect {
rect := new(Rect)
rect.Rect = s2.RectFromCenterSize(
s2.LatLngFromDegrees(latitude, longitude),
s2.LatLngFromDegrees(latprec, longprec))
return rect
}
// NewRectGridLocator is from Grid Locator.
// https://en.wikipedia.org/wiki/Maidenhead_Locator_System
func NewRectGridLocator(gl string) *Rect {
latitude := float64(-90)
longitude := float64(-180)
latprec := float64(10) * 24
lonprec := float64(20) * 24
loop:
for i, c := range gl {
c = unicode.ToUpper(c)
switch i % 4 {
case 0:
if unicode.IsUpper(c) {
lonprec /= 24
longitude += lonprec * float64(c-'A')
} else {
break loop
}
case 1:
if unicode.IsUpper(c) {
latprec /= 24
latitude += latprec * float64(c-'A')
} else {
break loop
}
case 2:
if unicode.IsDigit(c) {
lonprec /= 10
longitude += lonprec * float64(c-'0')
} else {
break loop
}
case 3:
if unicode.IsDigit(c) {
latprec /= 10
latitude += latprec * float64(c-'0')
} else {
break loop
}
}
}
return NewRect(latitude+latprec/2, longitude+lonprec/2, latprec, lonprec)
}
// Center returns center LatLng.
func (rect Rect) Center() *Point {
return &Point{
lat: NewAngleFromS1Angle(rect.Rect.Center().Lat, rect.Rect.Size().Lat/2),
lng: NewAngleFromS1Angle(rect.Rect.Center().Lng, rect.Rect.Size().Lng/2)}
}
// PrecString is Precision String()
func (rect Rect) PrecString() (s string) {
if Config.Lang == "ja" {
s = fmt.Sprintf("緯度誤差%f度、経度誤差%f度", rect.Size().Lat.Degrees(), rect.Size().Lng.Degrees())
} else {
s = fmt.Sprintf("lat. error %fdeg., long. error %fdeg.", rect.Size().Lat.Degrees(), rect.Size().Lng.Degrees())
}
return
}
// GridLocator is from Grid Locator.
// https://en.wikipedia.org/wiki/Maidenhead_Locator_System
func (rect *Rect) GridLocator() string {
const floaterr = 1 + 1E-11
var gl []rune
latitude := rect.Center().Lat().Degrees() + 90
longitude := rect.Center().Lng().Degrees() + 180
latprec := float64(10) * 24
lonprec := float64(20) * 24
loop:
for i := 0; ; i++ {
switch i % 4 {
case 0:
lonprec /= 24
if lonprec*floaterr < rect.Size().Lng.Degrees() {
break loop
}
c := math.Floor(longitude / lonprec)
gl = append(gl, rune(byte(c)+'A'))
longitude -= c * lonprec
case 1:
latprec /= 24
if latprec*floaterr < rect.Size().Lat.Degrees() {
break loop
}
c := math.Floor(latitude / latprec)
gl = append(gl, rune(byte(c)+'A'))
latitude -= c * latprec
case 2:
lonprec /= 10
if lonprec*floaterr < rect.Size().Lng.Degrees() {
break loop
}
c := math.Floor(longitude / lonprec)
gl = append(gl, rune(byte(c)+'0'))
longitude -= c * lonprec
case 3:
latprec /= 10
if latprec*floaterr < rect.Size().Lat.Degrees() {
break loop
}
c := math.Floor(latitude / latprec)
gl = append(gl, rune(byte(c)+'0'))
latitude -= c * latprec
}
}
l := len(gl)
if l%2 == 1 {
gl = gl[:l-1]
}
return string(gl)
}
// NewRectGeoHash is from GeoHash http://geohash.org/
func NewRectGeoHash(geoHash string) (latlong *Rect, err error) {
if bb := geohash.Decode(geoHash); bb != nil {
latlong = NewRect(bb.Center().Lat(), bb.Center().Lng(), bb.NorthEast().Lat()-bb.SouthWest().Lat(), bb.NorthEast().Lng()-bb.SouthWest().Lng())
} else {
err = errors.New("Geohash decode error")
}
return
}
func (rect *Rect) geoHash(precision int) string {
return geohash.EncodeWithPrecision(rect.Center().Lat().Degrees(), rect.Center().Lng().Degrees(), precision)
}
// GeoHash5 returns GeoHash string.
func (rect *Rect) GeoHash5() string {
return rect.geoHash(5)
}
// GeoHash6 returns GeoHash string.
func (rect *Rect) GeoHash6() string {
return rect.geoHash(6)
}
// GeoHash returns GeoHash string.
func (rect *Rect) GeoHash() string {
const floaterr = 1 + 5E-10
geohashlatbits := -math.Log2(rect.Size().Lat.Degrees()/45) + 2 // div by 180 = 45 * 2^2
geohashlngbits := -math.Log2(rect.Size().Lng.Degrees()/45) + 3 // div by 360 = 45 * 2^3
geohashlat2len, geohashlatlen2mod := math.Modf(geohashlatbits / 5 * floaterr)
var geohashlatlen int
if geohashlatlen2mod >= 0.4 {
geohashlatlen = int(geohashlat2len)*2 + 1
} else {
geohashlatlen = int(geohashlat2len) * 2
}
geohashlng2len, geohashlnglen2mod := math.Modf(geohashlngbits / 5 * floaterr)
var geohashlnglen int
if geohashlnglen2mod >= 0.6 {
geohashlnglen = int(geohashlng2len)*2 + 1
} else {
geohashlnglen = int(geohashlng2len) * 2
}
if geohashlatlen < geohashlnglen {
return rect.geoHash(geohashlatlen)
}
return rect.geoHash(geohashlnglen)
}
// S2Rect returns s2.Rect.
func (rect *Rect) S2Rect() s2.Rect {
return rect.Rect
}
// S2Region is getter for s2.Region.
func (rect *Rect) S2Region() s2.Region {
return rect.S2Rect()
} | Rect.go | 0.745213 | 0.629461 | Rect.go | starcoder |
package aiplatform
import (
context "context"
cmpopts "github.com/google/go-cmp/cmp/cmpopts"
codes "google.golang.org/grpc/codes"
status "google.golang.org/grpc/status"
protocmp "google.golang.org/protobuf/testing/protocmp"
assert "gotest.tools/v3/assert"
strings "strings"
testing "testing"
time "time"
)
type PipelineServiceTestSuite struct {
T *testing.T
// Server to test.
Server PipelineServiceServer
}
func (fx PipelineServiceTestSuite) TestPipelineJob(ctx context.Context, options PipelineJobTestSuiteConfig) {
fx.T.Run("PipelineJob", func(t *testing.T) {
options.ctx = ctx
options.service = fx.Server
options.test(t)
})
}
func (fx PipelineServiceTestSuite) TestTrainingPipeline(ctx context.Context, options TrainingPipelineTestSuiteConfig) {
fx.T.Run("TrainingPipeline", func(t *testing.T) {
options.ctx = ctx
options.service = fx.Server
options.test(t)
})
}
type PipelineJobTestSuiteConfig struct {
ctx context.Context
service PipelineServiceServer
currParent int
// The parents to use when creating resources.
// At least one parent needs to be set. Depending on methods available on the resource,
// more may be required. If insufficient number of parents are
// provided the test will fail.
Parents []string
// Create should return a resource which is valid to create, i.e.
// all required fields set.
Create func(parent string) *PipelineJob
// Patterns of tests to skip.
// For example if a service has a Get method:
// Skip: ["Get"] will skip all tests for Get.
// Skip: ["Get/persisted"] will only skip the subtest called "persisted" of Get.
Skip []string
}
func (fx *PipelineJobTestSuiteConfig) test(t *testing.T) {
t.Run("Create", fx.testCreate)
t.Run("Get", fx.testGet)
t.Run("List", fx.testList)
}
func (fx *PipelineJobTestSuiteConfig) testCreate(t *testing.T) {
fx.maybeSkip(t)
// Method should fail with InvalidArgument if no parent is provided.
t.Run("missing parent", func(t *testing.T) {
fx.maybeSkip(t)
_, err := fx.service.CreatePipelineJob(fx.ctx, &CreatePipelineJobRequest{
Parent: "",
PipelineJob: fx.Create(fx.nextParent(t, false)),
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
// Method should fail with InvalidArgument if provided parent is invalid.
t.Run("invalid parent", func(t *testing.T) {
fx.maybeSkip(t)
_, err := fx.service.CreatePipelineJob(fx.ctx, &CreatePipelineJobRequest{
Parent: "invalid resource name",
PipelineJob: fx.Create(fx.nextParent(t, false)),
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
// Field create_time should be populated when the resource is created.
t.Run("create time", func(t *testing.T) {
fx.maybeSkip(t)
parent := fx.nextParent(t, false)
msg, err := fx.service.CreatePipelineJob(fx.ctx, &CreatePipelineJobRequest{
Parent: parent,
PipelineJob: fx.Create(parent),
})
assert.NilError(t, err)
assert.Check(t, time.Since(msg.CreateTime.AsTime()) < time.Second)
})
// The created resource should be persisted and reachable with Get.
t.Run("persisted", func(t *testing.T) {
fx.maybeSkip(t)
parent := fx.nextParent(t, false)
msg, err := fx.service.CreatePipelineJob(fx.ctx, &CreatePipelineJobRequest{
Parent: parent,
PipelineJob: fx.Create(parent),
})
assert.NilError(t, err)
persisted, err := fx.service.GetPipelineJob(fx.ctx, &GetPipelineJobRequest{
Name: msg.Name,
})
assert.NilError(t, err)
assert.DeepEqual(t, msg, persisted, protocmp.Transform())
})
// The method should fail with InvalidArgument if the resource has any
// required fields and they are not provided.
t.Run("required fields", func(t *testing.T) {
fx.maybeSkip(t)
t.Run(".runtime_config.gcs_output_directory", func(t *testing.T) {
fx.maybeSkip(t)
parent := fx.nextParent(t, false)
msg := fx.Create(parent)
container := msg.GetRuntimeConfig()
if container == nil {
t.Skip("not reachable")
}
fd := container.ProtoReflect().Descriptor().Fields().ByName("gcs_output_directory")
container.ProtoReflect().Clear(fd)
_, err := fx.service.CreatePipelineJob(fx.ctx, &CreatePipelineJobRequest{
Parent: parent,
PipelineJob: msg,
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
t.Run(".encryption_spec.kms_key_name", func(t *testing.T) {
fx.maybeSkip(t)
parent := fx.nextParent(t, false)
msg := fx.Create(parent)
container := msg.GetEncryptionSpec()
if container == nil {
t.Skip("not reachable")
}
fd := container.ProtoReflect().Descriptor().Fields().ByName("kms_key_name")
container.ProtoReflect().Clear(fd)
_, err := fx.service.CreatePipelineJob(fx.ctx, &CreatePipelineJobRequest{
Parent: parent,
PipelineJob: msg,
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
})
// The method should fail with InvalidArgument if the resource has any
// resource references and they are invalid.
t.Run("resource references", func(t *testing.T) {
fx.maybeSkip(t)
t.Run(".network", func(t *testing.T) {
fx.maybeSkip(t)
parent := fx.nextParent(t, false)
msg := fx.Create(parent)
container := msg
if container == nil {
t.Skip("not reachable")
}
container.Network = "invalid resource name"
_, err := fx.service.CreatePipelineJob(fx.ctx, &CreatePipelineJobRequest{
Parent: parent,
PipelineJob: msg,
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
})
}
func (fx *PipelineJobTestSuiteConfig) testGet(t *testing.T) {
fx.maybeSkip(t)
// Method should fail with InvalidArgument if no name is provided.
t.Run("missing name", func(t *testing.T) {
fx.maybeSkip(t)
_, err := fx.service.GetPipelineJob(fx.ctx, &GetPipelineJobRequest{
Name: "",
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
// Method should fail with InvalidArgument if the provided name is not valid.
t.Run("invalid name", func(t *testing.T) {
fx.maybeSkip(t)
_, err := fx.service.GetPipelineJob(fx.ctx, &GetPipelineJobRequest{
Name: "invalid resource name",
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
// Resource should be returned without errors if it exists.
t.Run("exists", func(t *testing.T) {
fx.maybeSkip(t)
parent := fx.nextParent(t, false)
created := fx.create(t, parent)
msg, err := fx.service.GetPipelineJob(fx.ctx, &GetPipelineJobRequest{
Name: created.Name,
})
assert.NilError(t, err)
assert.DeepEqual(t, msg, created, protocmp.Transform())
})
// Method should fail with NotFound if the resource does not exist.
t.Run("not found", func(t *testing.T) {
fx.maybeSkip(t)
parent := fx.nextParent(t, false)
created := fx.create(t, parent)
_, err := fx.service.GetPipelineJob(fx.ctx, &GetPipelineJobRequest{
Name: created.Name + "notfound",
})
assert.Equal(t, codes.NotFound, status.Code(err), err)
})
// Method should fail with InvalidArgument if the provided name only contains wildcards ('-')
t.Run("only wildcards", func(t *testing.T) {
fx.maybeSkip(t)
_, err := fx.service.GetPipelineJob(fx.ctx, &GetPipelineJobRequest{
Name: "projects/-/locations/-/pipelineJobs/-",
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
}
func (fx *PipelineJobTestSuiteConfig) testList(t *testing.T) {
fx.maybeSkip(t)
// Method should fail with InvalidArgument if provided parent is invalid.
t.Run("invalid parent", func(t *testing.T) {
fx.maybeSkip(t)
_, err := fx.service.ListPipelineJobs(fx.ctx, &ListPipelineJobsRequest{
Parent: "invalid resource name",
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
// Method should fail with InvalidArgument is provided page token is not valid.
t.Run("invalid page token", func(t *testing.T) {
fx.maybeSkip(t)
parent := fx.nextParent(t, false)
_, err := fx.service.ListPipelineJobs(fx.ctx, &ListPipelineJobsRequest{
Parent: parent,
PageToken: "invalid page token",
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
// Method should fail with InvalidArgument is provided page size is negative.
t.Run("negative page size", func(t *testing.T) {
fx.maybeSkip(t)
parent := fx.nextParent(t, false)
_, err := fx.service.ListPipelineJobs(fx.ctx, &ListPipelineJobsRequest{
Parent: parent,
PageSize: -10,
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
const resourcesCount = 15
parent := fx.nextParent(t, true)
parentMsgs := make([]*PipelineJob, resourcesCount)
for i := 0; i < resourcesCount; i++ {
parentMsgs[i] = fx.create(t, parent)
}
// If parent is provided the method must only return resources
// under that parent.
t.Run("isolation", func(t *testing.T) {
fx.maybeSkip(t)
response, err := fx.service.ListPipelineJobs(fx.ctx, &ListPipelineJobsRequest{
Parent: parent,
PageSize: 999,
})
assert.NilError(t, err)
assert.DeepEqual(
t,
parentMsgs,
response.PipelineJobs,
cmpopts.SortSlices(func(a, b *PipelineJob) bool {
return a.Name < b.Name
}),
protocmp.Transform(),
)
})
// If there are no more resources, next_page_token should not be set.
t.Run("last page", func(t *testing.T) {
fx.maybeSkip(t)
response, err := fx.service.ListPipelineJobs(fx.ctx, &ListPipelineJobsRequest{
Parent: parent,
PageSize: resourcesCount,
})
assert.NilError(t, err)
assert.Equal(t, "", response.NextPageToken)
})
// If there are more resources, next_page_token should be set.
t.Run("more pages", func(t *testing.T) {
fx.maybeSkip(t)
response, err := fx.service.ListPipelineJobs(fx.ctx, &ListPipelineJobsRequest{
Parent: parent,
PageSize: resourcesCount - 1,
})
assert.NilError(t, err)
assert.Check(t, response.NextPageToken != "")
})
// Listing resource one by one should eventually return all resources.
t.Run("one by one", func(t *testing.T) {
fx.maybeSkip(t)
msgs := make([]*PipelineJob, 0, resourcesCount)
var nextPageToken string
for {
response, err := fx.service.ListPipelineJobs(fx.ctx, &ListPipelineJobsRequest{
Parent: parent,
PageSize: 1,
PageToken: nextPageToken,
})
assert.NilError(t, err)
assert.Equal(t, 1, len(response.PipelineJobs))
msgs = append(msgs, response.PipelineJobs...)
nextPageToken = response.NextPageToken
if nextPageToken == "" {
break
}
}
assert.DeepEqual(
t,
parentMsgs,
msgs,
cmpopts.SortSlices(func(a, b *PipelineJob) bool {
return a.Name < b.Name
}),
protocmp.Transform(),
)
})
// Method should not return deleted resources.
t.Run("deleted", func(t *testing.T) {
fx.maybeSkip(t)
const deleteCount = 5
for i := 0; i < deleteCount; i++ {
_, err := fx.service.DeletePipelineJob(fx.ctx, &DeletePipelineJobRequest{
Name: parentMsgs[i].Name,
})
assert.NilError(t, err)
}
response, err := fx.service.ListPipelineJobs(fx.ctx, &ListPipelineJobsRequest{
Parent: parent,
PageSize: 9999,
})
assert.NilError(t, err)
assert.DeepEqual(
t,
parentMsgs[deleteCount:],
response.PipelineJobs,
cmpopts.SortSlices(func(a, b *PipelineJob) bool {
return a.Name < b.Name
}),
protocmp.Transform(),
)
})
}
func (fx *PipelineJobTestSuiteConfig) nextParent(t *testing.T, pristine bool) string {
if pristine {
fx.currParent++
}
if fx.currParent >= len(fx.Parents) {
t.Fatal("need at least", fx.currParent+1, "parents")
}
return fx.Parents[fx.currParent]
}
func (fx *PipelineJobTestSuiteConfig) peekNextParent(t *testing.T) string {
next := fx.currParent + 1
if next >= len(fx.Parents) {
t.Fatal("need at least", next+1, "parents")
}
return fx.Parents[next]
}
func (fx *PipelineJobTestSuiteConfig) maybeSkip(t *testing.T) {
for _, skip := range fx.Skip {
if strings.Contains(t.Name(), skip) {
t.Skip("skipped because of .Skip")
}
}
}
func (fx *PipelineJobTestSuiteConfig) create(t *testing.T, parent string) *PipelineJob {
t.Helper()
created, err := fx.service.CreatePipelineJob(fx.ctx, &CreatePipelineJobRequest{
Parent: parent,
PipelineJob: fx.Create(parent),
})
assert.NilError(t, err)
return created
}
type TrainingPipelineTestSuiteConfig struct {
ctx context.Context
service PipelineServiceServer
currParent int
// The parents to use when creating resources.
// At least one parent needs to be set. Depending on methods available on the resource,
// more may be required. If insufficient number of parents are
// provided the test will fail.
Parents []string
// Create should return a resource which is valid to create, i.e.
// all required fields set.
Create func(parent string) *TrainingPipeline
// Patterns of tests to skip.
// For example if a service has a Get method:
// Skip: ["Get"] will skip all tests for Get.
// Skip: ["Get/persisted"] will only skip the subtest called "persisted" of Get.
Skip []string
}
func (fx *TrainingPipelineTestSuiteConfig) test(t *testing.T) {
t.Run("Create", fx.testCreate)
t.Run("Get", fx.testGet)
t.Run("List", fx.testList)
}
func (fx *TrainingPipelineTestSuiteConfig) testCreate(t *testing.T) {
fx.maybeSkip(t)
// Method should fail with InvalidArgument if no parent is provided.
t.Run("missing parent", func(t *testing.T) {
fx.maybeSkip(t)
_, err := fx.service.CreateTrainingPipeline(fx.ctx, &CreateTrainingPipelineRequest{
Parent: "",
TrainingPipeline: fx.Create(fx.nextParent(t, false)),
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
// Method should fail with InvalidArgument if provided parent is invalid.
t.Run("invalid parent", func(t *testing.T) {
fx.maybeSkip(t)
_, err := fx.service.CreateTrainingPipeline(fx.ctx, &CreateTrainingPipelineRequest{
Parent: "invalid resource name",
TrainingPipeline: fx.Create(fx.nextParent(t, false)),
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
// Field create_time should be populated when the resource is created.
t.Run("create time", func(t *testing.T) {
fx.maybeSkip(t)
parent := fx.nextParent(t, false)
msg, err := fx.service.CreateTrainingPipeline(fx.ctx, &CreateTrainingPipelineRequest{
Parent: parent,
TrainingPipeline: fx.Create(parent),
})
assert.NilError(t, err)
assert.Check(t, time.Since(msg.CreateTime.AsTime()) < time.Second)
})
// The created resource should be persisted and reachable with Get.
t.Run("persisted", func(t *testing.T) {
fx.maybeSkip(t)
parent := fx.nextParent(t, false)
msg, err := fx.service.CreateTrainingPipeline(fx.ctx, &CreateTrainingPipelineRequest{
Parent: parent,
TrainingPipeline: fx.Create(parent),
})
assert.NilError(t, err)
persisted, err := fx.service.GetTrainingPipeline(fx.ctx, &GetTrainingPipelineRequest{
Name: msg.Name,
})
assert.NilError(t, err)
assert.DeepEqual(t, msg, persisted, protocmp.Transform())
})
// The method should fail with InvalidArgument if the resource has any
// required fields and they are not provided.
t.Run("required fields", func(t *testing.T) {
fx.maybeSkip(t)
t.Run(".display_name", func(t *testing.T) {
fx.maybeSkip(t)
parent := fx.nextParent(t, false)
msg := fx.Create(parent)
container := msg
if container == nil {
t.Skip("not reachable")
}
fd := container.ProtoReflect().Descriptor().Fields().ByName("display_name")
container.ProtoReflect().Clear(fd)
_, err := fx.service.CreateTrainingPipeline(fx.ctx, &CreateTrainingPipelineRequest{
Parent: parent,
TrainingPipeline: msg,
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
t.Run(".input_data_config.filter_split.training_filter", func(t *testing.T) {
fx.maybeSkip(t)
parent := fx.nextParent(t, false)
msg := fx.Create(parent)
container := msg.GetInputDataConfig().GetFilterSplit()
if container == nil {
t.Skip("not reachable")
}
fd := container.ProtoReflect().Descriptor().Fields().ByName("training_filter")
container.ProtoReflect().Clear(fd)
_, err := fx.service.CreateTrainingPipeline(fx.ctx, &CreateTrainingPipelineRequest{
Parent: parent,
TrainingPipeline: msg,
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
t.Run(".input_data_config.filter_split.validation_filter", func(t *testing.T) {
fx.maybeSkip(t)
parent := fx.nextParent(t, false)
msg := fx.Create(parent)
container := msg.GetInputDataConfig().GetFilterSplit()
if container == nil {
t.Skip("not reachable")
}
fd := container.ProtoReflect().Descriptor().Fields().ByName("validation_filter")
container.ProtoReflect().Clear(fd)
_, err := fx.service.CreateTrainingPipeline(fx.ctx, &CreateTrainingPipelineRequest{
Parent: parent,
TrainingPipeline: msg,
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
t.Run(".input_data_config.filter_split.test_filter", func(t *testing.T) {
fx.maybeSkip(t)
parent := fx.nextParent(t, false)
msg := fx.Create(parent)
container := msg.GetInputDataConfig().GetFilterSplit()
if container == nil {
t.Skip("not reachable")
}
fd := container.ProtoReflect().Descriptor().Fields().ByName("test_filter")
container.ProtoReflect().Clear(fd)
_, err := fx.service.CreateTrainingPipeline(fx.ctx, &CreateTrainingPipelineRequest{
Parent: parent,
TrainingPipeline: msg,
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
t.Run(".input_data_config.predefined_split.key", func(t *testing.T) {
fx.maybeSkip(t)
parent := fx.nextParent(t, false)
msg := fx.Create(parent)
container := msg.GetInputDataConfig().GetPredefinedSplit()
if container == nil {
t.Skip("not reachable")
}
fd := container.ProtoReflect().Descriptor().Fields().ByName("key")
container.ProtoReflect().Clear(fd)
_, err := fx.service.CreateTrainingPipeline(fx.ctx, &CreateTrainingPipelineRequest{
Parent: parent,
TrainingPipeline: msg,
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
t.Run(".input_data_config.timestamp_split.key", func(t *testing.T) {
fx.maybeSkip(t)
parent := fx.nextParent(t, false)
msg := fx.Create(parent)
container := msg.GetInputDataConfig().GetTimestampSplit()
if container == nil {
t.Skip("not reachable")
}
fd := container.ProtoReflect().Descriptor().Fields().ByName("key")
container.ProtoReflect().Clear(fd)
_, err := fx.service.CreateTrainingPipeline(fx.ctx, &CreateTrainingPipelineRequest{
Parent: parent,
TrainingPipeline: msg,
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
t.Run(".input_data_config.stratified_split.key", func(t *testing.T) {
fx.maybeSkip(t)
parent := fx.nextParent(t, false)
msg := fx.Create(parent)
container := msg.GetInputDataConfig().GetStratifiedSplit()
if container == nil {
t.Skip("not reachable")
}
fd := container.ProtoReflect().Descriptor().Fields().ByName("key")
container.ProtoReflect().Clear(fd)
_, err := fx.service.CreateTrainingPipeline(fx.ctx, &CreateTrainingPipelineRequest{
Parent: parent,
TrainingPipeline: msg,
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
t.Run(".input_data_config.gcs_destination.output_uri_prefix", func(t *testing.T) {
fx.maybeSkip(t)
parent := fx.nextParent(t, false)
msg := fx.Create(parent)
container := msg.GetInputDataConfig().GetGcsDestination()
if container == nil {
t.Skip("not reachable")
}
fd := container.ProtoReflect().Descriptor().Fields().ByName("output_uri_prefix")
container.ProtoReflect().Clear(fd)
_, err := fx.service.CreateTrainingPipeline(fx.ctx, &CreateTrainingPipelineRequest{
Parent: parent,
TrainingPipeline: msg,
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
t.Run(".input_data_config.bigquery_destination.output_uri", func(t *testing.T) {
fx.maybeSkip(t)
parent := fx.nextParent(t, false)
msg := fx.Create(parent)
container := msg.GetInputDataConfig().GetBigqueryDestination()
if container == nil {
t.Skip("not reachable")
}
fd := container.ProtoReflect().Descriptor().Fields().ByName("output_uri")
container.ProtoReflect().Clear(fd)
_, err := fx.service.CreateTrainingPipeline(fx.ctx, &CreateTrainingPipelineRequest{
Parent: parent,
TrainingPipeline: msg,
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
t.Run(".input_data_config.dataset_id", func(t *testing.T) {
fx.maybeSkip(t)
parent := fx.nextParent(t, false)
msg := fx.Create(parent)
container := msg.GetInputDataConfig()
if container == nil {
t.Skip("not reachable")
}
fd := container.ProtoReflect().Descriptor().Fields().ByName("dataset_id")
container.ProtoReflect().Clear(fd)
_, err := fx.service.CreateTrainingPipeline(fx.ctx, &CreateTrainingPipelineRequest{
Parent: parent,
TrainingPipeline: msg,
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
t.Run(".training_task_definition", func(t *testing.T) {
fx.maybeSkip(t)
parent := fx.nextParent(t, false)
msg := fx.Create(parent)
container := msg
if container == nil {
t.Skip("not reachable")
}
fd := container.ProtoReflect().Descriptor().Fields().ByName("training_task_definition")
container.ProtoReflect().Clear(fd)
_, err := fx.service.CreateTrainingPipeline(fx.ctx, &CreateTrainingPipelineRequest{
Parent: parent,
TrainingPipeline: msg,
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
t.Run(".training_task_inputs", func(t *testing.T) {
fx.maybeSkip(t)
parent := fx.nextParent(t, false)
msg := fx.Create(parent)
container := msg
if container == nil {
t.Skip("not reachable")
}
fd := container.ProtoReflect().Descriptor().Fields().ByName("training_task_inputs")
container.ProtoReflect().Clear(fd)
_, err := fx.service.CreateTrainingPipeline(fx.ctx, &CreateTrainingPipelineRequest{
Parent: parent,
TrainingPipeline: msg,
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
t.Run(".model_to_upload.display_name", func(t *testing.T) {
fx.maybeSkip(t)
parent := fx.nextParent(t, false)
msg := fx.Create(parent)
container := msg.GetModelToUpload()
if container == nil {
t.Skip("not reachable")
}
fd := container.ProtoReflect().Descriptor().Fields().ByName("display_name")
container.ProtoReflect().Clear(fd)
_, err := fx.service.CreateTrainingPipeline(fx.ctx, &CreateTrainingPipelineRequest{
Parent: parent,
TrainingPipeline: msg,
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
t.Run(".model_to_upload.container_spec.image_uri", func(t *testing.T) {
fx.maybeSkip(t)
parent := fx.nextParent(t, false)
msg := fx.Create(parent)
container := msg.GetModelToUpload().GetContainerSpec()
if container == nil {
t.Skip("not reachable")
}
fd := container.ProtoReflect().Descriptor().Fields().ByName("image_uri")
container.ProtoReflect().Clear(fd)
_, err := fx.service.CreateTrainingPipeline(fx.ctx, &CreateTrainingPipelineRequest{
Parent: parent,
TrainingPipeline: msg,
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
t.Run(".model_to_upload.explanation_spec.parameters", func(t *testing.T) {
fx.maybeSkip(t)
parent := fx.nextParent(t, false)
msg := fx.Create(parent)
container := msg.GetModelToUpload().GetExplanationSpec()
if container == nil {
t.Skip("not reachable")
}
fd := container.ProtoReflect().Descriptor().Fields().ByName("parameters")
container.ProtoReflect().Clear(fd)
_, err := fx.service.CreateTrainingPipeline(fx.ctx, &CreateTrainingPipelineRequest{
Parent: parent,
TrainingPipeline: msg,
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
t.Run(".model_to_upload.explanation_spec.parameters.sampled_shapley_attribution.path_count", func(t *testing.T) {
fx.maybeSkip(t)
parent := fx.nextParent(t, false)
msg := fx.Create(parent)
container := msg.GetModelToUpload().GetExplanationSpec().GetParameters().GetSampledShapleyAttribution()
if container == nil {
t.Skip("not reachable")
}
fd := container.ProtoReflect().Descriptor().Fields().ByName("path_count")
container.ProtoReflect().Clear(fd)
_, err := fx.service.CreateTrainingPipeline(fx.ctx, &CreateTrainingPipelineRequest{
Parent: parent,
TrainingPipeline: msg,
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
t.Run(".model_to_upload.explanation_spec.parameters.integrated_gradients_attribution.step_count", func(t *testing.T) {
fx.maybeSkip(t)
parent := fx.nextParent(t, false)
msg := fx.Create(parent)
container := msg.GetModelToUpload().GetExplanationSpec().GetParameters().GetIntegratedGradientsAttribution()
if container == nil {
t.Skip("not reachable")
}
fd := container.ProtoReflect().Descriptor().Fields().ByName("step_count")
container.ProtoReflect().Clear(fd)
_, err := fx.service.CreateTrainingPipeline(fx.ctx, &CreateTrainingPipelineRequest{
Parent: parent,
TrainingPipeline: msg,
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
t.Run(".model_to_upload.explanation_spec.parameters.xrai_attribution.step_count", func(t *testing.T) {
fx.maybeSkip(t)
parent := fx.nextParent(t, false)
msg := fx.Create(parent)
container := msg.GetModelToUpload().GetExplanationSpec().GetParameters().GetXraiAttribution()
if container == nil {
t.Skip("not reachable")
}
fd := container.ProtoReflect().Descriptor().Fields().ByName("step_count")
container.ProtoReflect().Clear(fd)
_, err := fx.service.CreateTrainingPipeline(fx.ctx, &CreateTrainingPipelineRequest{
Parent: parent,
TrainingPipeline: msg,
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
t.Run(".model_to_upload.explanation_spec.metadata", func(t *testing.T) {
fx.maybeSkip(t)
parent := fx.nextParent(t, false)
msg := fx.Create(parent)
container := msg.GetModelToUpload().GetExplanationSpec()
if container == nil {
t.Skip("not reachable")
}
fd := container.ProtoReflect().Descriptor().Fields().ByName("metadata")
container.ProtoReflect().Clear(fd)
_, err := fx.service.CreateTrainingPipeline(fx.ctx, &CreateTrainingPipelineRequest{
Parent: parent,
TrainingPipeline: msg,
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
t.Run(".model_to_upload.explanation_spec.metadata.inputs", func(t *testing.T) {
fx.maybeSkip(t)
parent := fx.nextParent(t, false)
msg := fx.Create(parent)
container := msg.GetModelToUpload().GetExplanationSpec().GetMetadata()
if container == nil {
t.Skip("not reachable")
}
fd := container.ProtoReflect().Descriptor().Fields().ByName("inputs")
container.ProtoReflect().Clear(fd)
_, err := fx.service.CreateTrainingPipeline(fx.ctx, &CreateTrainingPipelineRequest{
Parent: parent,
TrainingPipeline: msg,
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
t.Run(".model_to_upload.explanation_spec.metadata.outputs", func(t *testing.T) {
fx.maybeSkip(t)
parent := fx.nextParent(t, false)
msg := fx.Create(parent)
container := msg.GetModelToUpload().GetExplanationSpec().GetMetadata()
if container == nil {
t.Skip("not reachable")
}
fd := container.ProtoReflect().Descriptor().Fields().ByName("outputs")
container.ProtoReflect().Clear(fd)
_, err := fx.service.CreateTrainingPipeline(fx.ctx, &CreateTrainingPipelineRequest{
Parent: parent,
TrainingPipeline: msg,
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
t.Run(".model_to_upload.encryption_spec.kms_key_name", func(t *testing.T) {
fx.maybeSkip(t)
parent := fx.nextParent(t, false)
msg := fx.Create(parent)
container := msg.GetModelToUpload().GetEncryptionSpec()
if container == nil {
t.Skip("not reachable")
}
fd := container.ProtoReflect().Descriptor().Fields().ByName("kms_key_name")
container.ProtoReflect().Clear(fd)
_, err := fx.service.CreateTrainingPipeline(fx.ctx, &CreateTrainingPipelineRequest{
Parent: parent,
TrainingPipeline: msg,
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
})
}
func (fx *TrainingPipelineTestSuiteConfig) testGet(t *testing.T) {
fx.maybeSkip(t)
// Method should fail with InvalidArgument if no name is provided.
t.Run("missing name", func(t *testing.T) {
fx.maybeSkip(t)
_, err := fx.service.GetTrainingPipeline(fx.ctx, &GetTrainingPipelineRequest{
Name: "",
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
// Method should fail with InvalidArgument if the provided name is not valid.
t.Run("invalid name", func(t *testing.T) {
fx.maybeSkip(t)
_, err := fx.service.GetTrainingPipeline(fx.ctx, &GetTrainingPipelineRequest{
Name: "invalid resource name",
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
// Resource should be returned without errors if it exists.
t.Run("exists", func(t *testing.T) {
fx.maybeSkip(t)
parent := fx.nextParent(t, false)
created := fx.create(t, parent)
msg, err := fx.service.GetTrainingPipeline(fx.ctx, &GetTrainingPipelineRequest{
Name: created.Name,
})
assert.NilError(t, err)
assert.DeepEqual(t, msg, created, protocmp.Transform())
})
// Method should fail with NotFound if the resource does not exist.
t.Run("not found", func(t *testing.T) {
fx.maybeSkip(t)
parent := fx.nextParent(t, false)
created := fx.create(t, parent)
_, err := fx.service.GetTrainingPipeline(fx.ctx, &GetTrainingPipelineRequest{
Name: created.Name + "notfound",
})
assert.Equal(t, codes.NotFound, status.Code(err), err)
})
// Method should fail with InvalidArgument if the provided name only contains wildcards ('-')
t.Run("only wildcards", func(t *testing.T) {
fx.maybeSkip(t)
_, err := fx.service.GetTrainingPipeline(fx.ctx, &GetTrainingPipelineRequest{
Name: "projects/-/locations/-/trainingPipelines/-",
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
}
func (fx *TrainingPipelineTestSuiteConfig) testList(t *testing.T) {
fx.maybeSkip(t)
// Method should fail with InvalidArgument if provided parent is invalid.
t.Run("invalid parent", func(t *testing.T) {
fx.maybeSkip(t)
_, err := fx.service.ListTrainingPipelines(fx.ctx, &ListTrainingPipelinesRequest{
Parent: "invalid resource name",
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
// Method should fail with InvalidArgument is provided page token is not valid.
t.Run("invalid page token", func(t *testing.T) {
fx.maybeSkip(t)
parent := fx.nextParent(t, false)
_, err := fx.service.ListTrainingPipelines(fx.ctx, &ListTrainingPipelinesRequest{
Parent: parent,
PageToken: "invalid page token",
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
// Method should fail with InvalidArgument is provided page size is negative.
t.Run("negative page size", func(t *testing.T) {
fx.maybeSkip(t)
parent := fx.nextParent(t, false)
_, err := fx.service.ListTrainingPipelines(fx.ctx, &ListTrainingPipelinesRequest{
Parent: parent,
PageSize: -10,
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
const resourcesCount = 15
parent := fx.nextParent(t, true)
parentMsgs := make([]*TrainingPipeline, resourcesCount)
for i := 0; i < resourcesCount; i++ {
parentMsgs[i] = fx.create(t, parent)
}
// If parent is provided the method must only return resources
// under that parent.
t.Run("isolation", func(t *testing.T) {
fx.maybeSkip(t)
response, err := fx.service.ListTrainingPipelines(fx.ctx, &ListTrainingPipelinesRequest{
Parent: parent,
PageSize: 999,
})
assert.NilError(t, err)
assert.DeepEqual(
t,
parentMsgs,
response.TrainingPipelines,
cmpopts.SortSlices(func(a, b *TrainingPipeline) bool {
return a.Name < b.Name
}),
protocmp.Transform(),
)
})
// If there are no more resources, next_page_token should not be set.
t.Run("last page", func(t *testing.T) {
fx.maybeSkip(t)
response, err := fx.service.ListTrainingPipelines(fx.ctx, &ListTrainingPipelinesRequest{
Parent: parent,
PageSize: resourcesCount,
})
assert.NilError(t, err)
assert.Equal(t, "", response.NextPageToken)
})
// If there are more resources, next_page_token should be set.
t.Run("more pages", func(t *testing.T) {
fx.maybeSkip(t)
response, err := fx.service.ListTrainingPipelines(fx.ctx, &ListTrainingPipelinesRequest{
Parent: parent,
PageSize: resourcesCount - 1,
})
assert.NilError(t, err)
assert.Check(t, response.NextPageToken != "")
})
// Listing resource one by one should eventually return all resources.
t.Run("one by one", func(t *testing.T) {
fx.maybeSkip(t)
msgs := make([]*TrainingPipeline, 0, resourcesCount)
var nextPageToken string
for {
response, err := fx.service.ListTrainingPipelines(fx.ctx, &ListTrainingPipelinesRequest{
Parent: parent,
PageSize: 1,
PageToken: nextPageToken,
})
assert.NilError(t, err)
assert.Equal(t, 1, len(response.TrainingPipelines))
msgs = append(msgs, response.TrainingPipelines...)
nextPageToken = response.NextPageToken
if nextPageToken == "" {
break
}
}
assert.DeepEqual(
t,
parentMsgs,
msgs,
cmpopts.SortSlices(func(a, b *TrainingPipeline) bool {
return a.Name < b.Name
}),
protocmp.Transform(),
)
})
// Method should not return deleted resources.
t.Run("deleted", func(t *testing.T) {
fx.maybeSkip(t)
const deleteCount = 5
for i := 0; i < deleteCount; i++ {
_, err := fx.service.DeleteTrainingPipeline(fx.ctx, &DeleteTrainingPipelineRequest{
Name: parentMsgs[i].Name,
})
assert.NilError(t, err)
}
response, err := fx.service.ListTrainingPipelines(fx.ctx, &ListTrainingPipelinesRequest{
Parent: parent,
PageSize: 9999,
})
assert.NilError(t, err)
assert.DeepEqual(
t,
parentMsgs[deleteCount:],
response.TrainingPipelines,
cmpopts.SortSlices(func(a, b *TrainingPipeline) bool {
return a.Name < b.Name
}),
protocmp.Transform(),
)
})
}
func (fx *TrainingPipelineTestSuiteConfig) nextParent(t *testing.T, pristine bool) string {
if pristine {
fx.currParent++
}
if fx.currParent >= len(fx.Parents) {
t.Fatal("need at least", fx.currParent+1, "parents")
}
return fx.Parents[fx.currParent]
}
func (fx *TrainingPipelineTestSuiteConfig) peekNextParent(t *testing.T) string {
next := fx.currParent + 1
if next >= len(fx.Parents) {
t.Fatal("need at least", next+1, "parents")
}
return fx.Parents[next]
}
func (fx *TrainingPipelineTestSuiteConfig) maybeSkip(t *testing.T) {
for _, skip := range fx.Skip {
if strings.Contains(t.Name(), skip) {
t.Skip("skipped because of .Skip")
}
}
}
func (fx *TrainingPipelineTestSuiteConfig) create(t *testing.T, parent string) *TrainingPipeline {
t.Helper()
created, err := fx.service.CreateTrainingPipeline(fx.ctx, &CreateTrainingPipelineRequest{
Parent: parent,
TrainingPipeline: fx.Create(parent),
})
assert.NilError(t, err)
return created
} | proto/gen/googleapis/cloud/aiplatform/v1/pipeline_service_aiptest.pb.go | 0.606149 | 0.47591 | pipeline_service_aiptest.pb.go | starcoder |
package fakebackend
import "github.com/go-latex/latex/font"
func init() {
kernsDb = dbKerns{
kernKey{font: font.Font{Name: "default", Size: 10, Type: "regular"}, s1: "A", s2: "V"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "regular"}, s1: "V", s2: "A"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "regular"}, s1: "A", s2: "é"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "regular"}, s1: "é", s2: "A"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "regular"}, s1: "V", s2: "é"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "regular"}, s1: "é", s2: "V"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "regular"}, s1: "h", s2: "e"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "regular"}, s1: "e", s2: "h"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "regular"}, s1: "l", s2: "e"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "regular"}, s1: "e", s2: "l"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "regular"}, s1: "l", s2: "l"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "regular"}, s1: "l", s2: "l"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "regular"}, s1: "l", s2: "o"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "regular"}, s1: "o", s2: "l"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "regular"}, s1: "é", s2: "é"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "regular"}, s1: "é", s2: "é"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "regular"}, s1: "f", s2: "i"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "regular"}, s1: "i", s2: "f"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "regular"}, s1: " ", s2: "i"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "regular"}, s1: "i", s2: " "}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "regular"}, s1: "i", s2: "s"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "regular"}, s1: "s", s2: "i"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "regular"}, s1: " ", s2: "s"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "regular"}, s1: "s", s2: " "}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "regular"}, s1: "A", s2: "\\sigma"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "regular"}, s1: "\\sigma", s2: "A"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "regular"}, s1: "a", s2: "\\sigma"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "regular"}, s1: "\\sigma", s2: "a"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "regular"}, s1: "é", s2: "\\sigma"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "regular"}, s1: "\\sigma", s2: "é"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "regular"}, s1: " ", s2: "\\sigma"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "regular"}, s1: "\\sigma", s2: " "}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "regular"}, s1: "\\sum", s2: "\\sigma"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "regular"}, s1: "\\sigma", s2: "\\sum"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "regular"}, s1: "1", s2: "."}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "regular"}, s1: ".", s2: "1"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "regular"}, s1: "2", s2: "."}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "regular"}, s1: ".", s2: "2"}: -0.5,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "regular"}, s1: "A", s2: "V"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "regular"}, s1: "V", s2: "A"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "regular"}, s1: "A", s2: "é"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "regular"}, s1: "é", s2: "A"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "regular"}, s1: "V", s2: "é"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "regular"}, s1: "é", s2: "V"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "regular"}, s1: "h", s2: "e"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "regular"}, s1: "e", s2: "h"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "regular"}, s1: "l", s2: "e"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "regular"}, s1: "e", s2: "l"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "regular"}, s1: "l", s2: "l"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "regular"}, s1: "l", s2: "l"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "regular"}, s1: "l", s2: "o"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "regular"}, s1: "o", s2: "l"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "regular"}, s1: "é", s2: "é"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "regular"}, s1: "é", s2: "é"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "regular"}, s1: "f", s2: "i"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "regular"}, s1: "i", s2: "f"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "regular"}, s1: " ", s2: "i"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "regular"}, s1: "i", s2: " "}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "regular"}, s1: "i", s2: "s"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "regular"}, s1: "s", s2: "i"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "regular"}, s1: " ", s2: "s"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "regular"}, s1: "s", s2: " "}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "regular"}, s1: "A", s2: "\\sigma"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "regular"}, s1: "\\sigma", s2: "A"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "regular"}, s1: "a", s2: "\\sigma"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "regular"}, s1: "\\sigma", s2: "a"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "regular"}, s1: "é", s2: "\\sigma"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "regular"}, s1: "\\sigma", s2: "é"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "regular"}, s1: " ", s2: "\\sigma"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "regular"}, s1: "\\sigma", s2: " "}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "regular"}, s1: "\\sum", s2: "\\sigma"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "regular"}, s1: "\\sigma", s2: "\\sum"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "regular"}, s1: "1", s2: "."}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "regular"}, s1: ".", s2: "1"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "regular"}, s1: "2", s2: "."}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "regular"}, s1: ".", s2: "2"}: -0.625,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "rm"}, s1: "A", s2: "V"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "rm"}, s1: "V", s2: "A"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "rm"}, s1: "A", s2: "é"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "rm"}, s1: "é", s2: "A"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "rm"}, s1: "V", s2: "é"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "rm"}, s1: "é", s2: "V"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "rm"}, s1: "h", s2: "e"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "rm"}, s1: "e", s2: "h"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "rm"}, s1: "l", s2: "e"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "rm"}, s1: "e", s2: "l"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "rm"}, s1: "l", s2: "l"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "rm"}, s1: "l", s2: "l"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "rm"}, s1: "l", s2: "o"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "rm"}, s1: "o", s2: "l"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "rm"}, s1: "é", s2: "é"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "rm"}, s1: "é", s2: "é"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "rm"}, s1: "f", s2: "i"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "rm"}, s1: "i", s2: "f"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "rm"}, s1: " ", s2: "i"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "rm"}, s1: "i", s2: " "}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "rm"}, s1: "i", s2: "s"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "rm"}, s1: "s", s2: "i"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "rm"}, s1: " ", s2: "s"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "rm"}, s1: "s", s2: " "}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "rm"}, s1: "A", s2: "\\sigma"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "rm"}, s1: "\\sigma", s2: "A"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "rm"}, s1: "a", s2: "\\sigma"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "rm"}, s1: "\\sigma", s2: "a"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "rm"}, s1: "é", s2: "\\sigma"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "rm"}, s1: "\\sigma", s2: "é"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "rm"}, s1: " ", s2: "\\sigma"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "rm"}, s1: "\\sigma", s2: " "}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "rm"}, s1: "\\sum", s2: "\\sigma"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "rm"}, s1: "\\sigma", s2: "\\sum"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "rm"}, s1: "1", s2: "."}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "rm"}, s1: ".", s2: "1"}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "rm"}, s1: "2", s2: "."}: 0,
kernKey{font: font.Font{Name: "default", Size: 10, Type: "rm"}, s1: ".", s2: "2"}: -0.5,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "rm"}, s1: "A", s2: "V"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "rm"}, s1: "V", s2: "A"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "rm"}, s1: "A", s2: "é"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "rm"}, s1: "é", s2: "A"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "rm"}, s1: "V", s2: "é"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "rm"}, s1: "é", s2: "V"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "rm"}, s1: "h", s2: "e"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "rm"}, s1: "e", s2: "h"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "rm"}, s1: "l", s2: "e"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "rm"}, s1: "e", s2: "l"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "rm"}, s1: "l", s2: "l"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "rm"}, s1: "l", s2: "l"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "rm"}, s1: "l", s2: "o"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "rm"}, s1: "o", s2: "l"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "rm"}, s1: "é", s2: "é"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "rm"}, s1: "é", s2: "é"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "rm"}, s1: "f", s2: "i"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "rm"}, s1: "i", s2: "f"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "rm"}, s1: " ", s2: "i"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "rm"}, s1: "i", s2: " "}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "rm"}, s1: "i", s2: "s"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "rm"}, s1: "s", s2: "i"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "rm"}, s1: " ", s2: "s"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "rm"}, s1: "s", s2: " "}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "rm"}, s1: "A", s2: "\\sigma"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "rm"}, s1: "\\sigma", s2: "A"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "rm"}, s1: "a", s2: "\\sigma"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "rm"}, s1: "\\sigma", s2: "a"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "rm"}, s1: "é", s2: "\\sigma"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "rm"}, s1: "\\sigma", s2: "é"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "rm"}, s1: " ", s2: "\\sigma"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "rm"}, s1: "\\sigma", s2: " "}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "rm"}, s1: "\\sum", s2: "\\sigma"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "rm"}, s1: "\\sigma", s2: "\\sum"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "rm"}, s1: "1", s2: "."}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "rm"}, s1: ".", s2: "1"}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "rm"}, s1: "2", s2: "."}: 0,
kernKey{font: font.Font{Name: "default", Size: 12, Type: "rm"}, s1: ".", s2: "2"}: -0.625,
kernKey{font: font.Font{Name: "it", Size: 10, Type: "it"}, s1: "A", s2: "V"}: 0,
kernKey{font: font.Font{Name: "it", Size: 10, Type: "it"}, s1: "V", s2: "A"}: 0,
kernKey{font: font.Font{Name: "it", Size: 10, Type: "it"}, s1: "A", s2: "é"}: 0,
kernKey{font: font.Font{Name: "it", Size: 10, Type: "it"}, s1: "é", s2: "A"}: 0,
kernKey{font: font.Font{Name: "it", Size: 10, Type: "it"}, s1: "V", s2: "é"}: 0,
kernKey{font: font.Font{Name: "it", Size: 10, Type: "it"}, s1: "é", s2: "V"}: 0,
kernKey{font: font.Font{Name: "it", Size: 10, Type: "it"}, s1: "h", s2: "e"}: 0,
kernKey{font: font.Font{Name: "it", Size: 10, Type: "it"}, s1: "e", s2: "h"}: 0,
kernKey{font: font.Font{Name: "it", Size: 10, Type: "it"}, s1: "l", s2: "e"}: 0,
kernKey{font: font.Font{Name: "it", Size: 10, Type: "it"}, s1: "e", s2: "l"}: 0,
kernKey{font: font.Font{Name: "it", Size: 10, Type: "it"}, s1: "l", s2: "l"}: 0,
kernKey{font: font.Font{Name: "it", Size: 10, Type: "it"}, s1: "l", s2: "l"}: 0,
kernKey{font: font.Font{Name: "it", Size: 10, Type: "it"}, s1: "l", s2: "o"}: 0,
kernKey{font: font.Font{Name: "it", Size: 10, Type: "it"}, s1: "o", s2: "l"}: 0,
kernKey{font: font.Font{Name: "it", Size: 10, Type: "it"}, s1: "é", s2: "é"}: 0,
kernKey{font: font.Font{Name: "it", Size: 10, Type: "it"}, s1: "é", s2: "é"}: 0,
kernKey{font: font.Font{Name: "it", Size: 10, Type: "it"}, s1: "f", s2: "i"}: 0,
kernKey{font: font.Font{Name: "it", Size: 10, Type: "it"}, s1: "i", s2: "f"}: 0,
kernKey{font: font.Font{Name: "it", Size: 10, Type: "it"}, s1: " ", s2: "i"}: 0,
kernKey{font: font.Font{Name: "it", Size: 10, Type: "it"}, s1: "i", s2: " "}: 0,
kernKey{font: font.Font{Name: "it", Size: 10, Type: "it"}, s1: "i", s2: "s"}: 0,
kernKey{font: font.Font{Name: "it", Size: 10, Type: "it"}, s1: "s", s2: "i"}: 0,
kernKey{font: font.Font{Name: "it", Size: 10, Type: "it"}, s1: " ", s2: "s"}: 0,
kernKey{font: font.Font{Name: "it", Size: 10, Type: "it"}, s1: "s", s2: " "}: 0,
kernKey{font: font.Font{Name: "it", Size: 10, Type: "it"}, s1: "A", s2: "\\sigma"}: 0,
kernKey{font: font.Font{Name: "it", Size: 10, Type: "it"}, s1: "\\sigma", s2: "A"}: 0,
kernKey{font: font.Font{Name: "it", Size: 10, Type: "it"}, s1: "a", s2: "\\sigma"}: 0,
kernKey{font: font.Font{Name: "it", Size: 10, Type: "it"}, s1: "\\sigma", s2: "a"}: 0,
kernKey{font: font.Font{Name: "it", Size: 10, Type: "it"}, s1: "é", s2: "\\sigma"}: 0,
kernKey{font: font.Font{Name: "it", Size: 10, Type: "it"}, s1: "\\sigma", s2: "é"}: 0,
kernKey{font: font.Font{Name: "it", Size: 10, Type: "it"}, s1: " ", s2: "\\sigma"}: 0,
kernKey{font: font.Font{Name: "it", Size: 10, Type: "it"}, s1: "\\sigma", s2: " "}: 0,
kernKey{font: font.Font{Name: "it", Size: 10, Type: "it"}, s1: "\\sum", s2: "\\sigma"}: 0,
kernKey{font: font.Font{Name: "it", Size: 10, Type: "it"}, s1: "\\sigma", s2: "\\sum"}: 0,
kernKey{font: font.Font{Name: "it", Size: 10, Type: "it"}, s1: "1", s2: "."}: 0,
kernKey{font: font.Font{Name: "it", Size: 10, Type: "it"}, s1: ".", s2: "1"}: 0,
kernKey{font: font.Font{Name: "it", Size: 10, Type: "it"}, s1: "2", s2: "."}: 0,
kernKey{font: font.Font{Name: "it", Size: 10, Type: "it"}, s1: ".", s2: "2"}: -0.5,
kernKey{font: font.Font{Name: "it", Size: 12, Type: "it"}, s1: "A", s2: "V"}: 0,
kernKey{font: font.Font{Name: "it", Size: 12, Type: "it"}, s1: "V", s2: "A"}: 0,
kernKey{font: font.Font{Name: "it", Size: 12, Type: "it"}, s1: "A", s2: "é"}: 0,
kernKey{font: font.Font{Name: "it", Size: 12, Type: "it"}, s1: "é", s2: "A"}: 0,
kernKey{font: font.Font{Name: "it", Size: 12, Type: "it"}, s1: "V", s2: "é"}: 0,
kernKey{font: font.Font{Name: "it", Size: 12, Type: "it"}, s1: "é", s2: "V"}: 0,
kernKey{font: font.Font{Name: "it", Size: 12, Type: "it"}, s1: "h", s2: "e"}: 0,
kernKey{font: font.Font{Name: "it", Size: 12, Type: "it"}, s1: "e", s2: "h"}: 0,
kernKey{font: font.Font{Name: "it", Size: 12, Type: "it"}, s1: "l", s2: "e"}: 0,
kernKey{font: font.Font{Name: "it", Size: 12, Type: "it"}, s1: "e", s2: "l"}: 0,
kernKey{font: font.Font{Name: "it", Size: 12, Type: "it"}, s1: "l", s2: "l"}: 0,
kernKey{font: font.Font{Name: "it", Size: 12, Type: "it"}, s1: "l", s2: "l"}: 0,
kernKey{font: font.Font{Name: "it", Size: 12, Type: "it"}, s1: "l", s2: "o"}: 0,
kernKey{font: font.Font{Name: "it", Size: 12, Type: "it"}, s1: "o", s2: "l"}: 0,
kernKey{font: font.Font{Name: "it", Size: 12, Type: "it"}, s1: "é", s2: "é"}: 0,
kernKey{font: font.Font{Name: "it", Size: 12, Type: "it"}, s1: "é", s2: "é"}: 0,
kernKey{font: font.Font{Name: "it", Size: 12, Type: "it"}, s1: "f", s2: "i"}: 0,
kernKey{font: font.Font{Name: "it", Size: 12, Type: "it"}, s1: "i", s2: "f"}: 0,
kernKey{font: font.Font{Name: "it", Size: 12, Type: "it"}, s1: " ", s2: "i"}: 0,
kernKey{font: font.Font{Name: "it", Size: 12, Type: "it"}, s1: "i", s2: " "}: 0,
kernKey{font: font.Font{Name: "it", Size: 12, Type: "it"}, s1: "i", s2: "s"}: 0,
kernKey{font: font.Font{Name: "it", Size: 12, Type: "it"}, s1: "s", s2: "i"}: 0,
kernKey{font: font.Font{Name: "it", Size: 12, Type: "it"}, s1: " ", s2: "s"}: 0,
kernKey{font: font.Font{Name: "it", Size: 12, Type: "it"}, s1: "s", s2: " "}: 0,
kernKey{font: font.Font{Name: "it", Size: 12, Type: "it"}, s1: "A", s2: "\\sigma"}: 0,
kernKey{font: font.Font{Name: "it", Size: 12, Type: "it"}, s1: "\\sigma", s2: "A"}: 0,
kernKey{font: font.Font{Name: "it", Size: 12, Type: "it"}, s1: "a", s2: "\\sigma"}: 0,
kernKey{font: font.Font{Name: "it", Size: 12, Type: "it"}, s1: "\\sigma", s2: "a"}: 0,
kernKey{font: font.Font{Name: "it", Size: 12, Type: "it"}, s1: "é", s2: "\\sigma"}: 0,
kernKey{font: font.Font{Name: "it", Size: 12, Type: "it"}, s1: "\\sigma", s2: "é"}: 0,
kernKey{font: font.Font{Name: "it", Size: 12, Type: "it"}, s1: " ", s2: "\\sigma"}: 0,
kernKey{font: font.Font{Name: "it", Size: 12, Type: "it"}, s1: "\\sigma", s2: " "}: 0,
kernKey{font: font.Font{Name: "it", Size: 12, Type: "it"}, s1: "\\sum", s2: "\\sigma"}: 0,
kernKey{font: font.Font{Name: "it", Size: 12, Type: "it"}, s1: "\\sigma", s2: "\\sum"}: 0,
kernKey{font: font.Font{Name: "it", Size: 12, Type: "it"}, s1: "1", s2: "."}: 0,
kernKey{font: font.Font{Name: "it", Size: 12, Type: "it"}, s1: ".", s2: "1"}: 0,
kernKey{font: font.Font{Name: "it", Size: 12, Type: "it"}, s1: "2", s2: "."}: 0,
kernKey{font: font.Font{Name: "it", Size: 12, Type: "it"}, s1: ".", s2: "2"}: -0.625,
}
} | internal/fakebackend/fakebackend_kerns_gen.go | 0.51879 | 0.592313 | fakebackend_kerns_gen.go | starcoder |
package rel
import (
"math"
"reflect"
)
func indirect(rv reflect.Value) interface{} {
if rv.Kind() == reflect.Ptr {
if rv.IsNil() {
return nil
}
rv = rv.Elem()
}
return rv.Interface()
}
func must(err error) {
if err != nil {
panic(err)
}
}
type isZeroer interface {
IsZero() bool
}
// isZero shallowly check wether a field in struct is zero or not
func isZero(value interface{}) bool {
var (
zero bool
)
switch v := value.(type) {
case nil:
zero = true
case bool:
zero = v == false
case string:
zero = v == ""
case int:
zero = v == 0
case int8:
zero = v == 0
case int16:
zero = v == 0
case int32:
zero = v == 0
case int64:
zero = v == 0
case uint:
zero = v == 0
case uint8:
zero = v == 0
case uint16:
zero = v == 0
case uint32:
zero = v == 0
case uint64:
zero = v == 0
case uintptr:
zero = v == 0
case float32:
zero = v == 0
case float64:
zero = v == 0
case isZeroer:
zero = v.IsZero()
default:
zero = isDeepZero(reflect.ValueOf(value), 0)
}
return zero
}
// modified from https://golang.org/src/reflect/value.go?s=33807:33835#L1077
func isDeepZero(rv reflect.Value, depth int) bool {
if depth < 0 {
return true
}
switch rv.Kind() {
case reflect.Bool:
return !rv.Bool()
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return rv.Int() == 0
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
return rv.Uint() == 0
case reflect.Float32, reflect.Float64:
return math.Float64bits(rv.Float()) == 0
case reflect.Complex64, reflect.Complex128:
c := rv.Complex()
return math.Float64bits(real(c)) == 0 && math.Float64bits(imag(c)) == 0
case reflect.Array:
for i := 0; i < rv.Len(); i++ {
if !isDeepZero(rv.Index(i), depth-1) {
return false
}
}
return true
case reflect.Chan, reflect.Func, reflect.Interface, reflect.Map, reflect.Ptr, reflect.UnsafePointer:
return rv.IsNil()
case reflect.Slice:
return rv.IsNil() || rv.Len() == 0
case reflect.String:
return rv.Len() == 0
case reflect.Struct:
for i := 0; i < rv.NumField(); i++ {
if !isDeepZero(rv.Field(i), depth-1) {
return false
}
}
return true
default:
return true
}
} | util.go | 0.519765 | 0.450299 | util.go | starcoder |
package nn
import (
mat "github.com/nlpodyssey/spago/pkg/mat32"
"github.com/nlpodyssey/spago/pkg/ml/ag"
)
// Affine performs an affine transformation over an arbitrary (odd) number of nodes held in the input.
// The first node is the “bias”, which is added to the output as-is.
// The remaining nodes of the form "Wx" are multiplied together in pairs, then added.
// The pairs except the first whose "x" is nil are not considered.
// y = b + W1x1 + W2x2 + ... + WnXn
func Affine(g *ag.Graph, xs ...ag.Node) ag.Node {
if len(xs)%2 == 0 {
panic("nn: the number of arguments of the affine transformation should be odd")
}
// Optimize bounds checks
x := xs[2]
w := xs[1]
y := g.Add(xs[0], g.Mul(w, x)) // b + Wx
for i := 3; i < len(xs)-1; i += 2 {
w := xs[i]
x := xs[i+1]
if x != nil {
y = g.Add(y, g.Mul(w, x))
}
}
return y
}
// BiLinear performs a bilinear transformation of the type (x_1 W x_2)
func BiLinear(g *ag.Graph, w, x1, x2 ag.Node) ag.Node {
return g.Mul(g.Mul(g.T(x1), w), x2)
}
// BiAffine performs a biaffine transformation.
func BiAffine(g *ag.Graph, w, u, v, b, x1, x2 ag.Node) ag.Node {
return g.Add(g.Add(g.Add(BiLinear(g, w, x1, x2), g.Mul(g.T(u), x1)), g.Mul(g.T(v), x2)), b)
}
// Conv2D performs a 2D convolution.
func Conv2D(g *ag.Graph, w, x ag.Node, xStride, yStride int) ag.Node {
var dimx, dimy int
if (x.Value().Rows()-w.Value().Rows())%xStride != 0 {
panic("Incompatible stride value for rows")
}
if (x.Value().Columns()-w.Value().Columns())%yStride != 0 {
panic("Incompatible stride value for columns")
}
dimx = (x.Value().Rows()-w.Value().Rows())/xStride + 1
dimy = (x.Value().Columns()-w.Value().Columns())/yStride + 1
var outList []ag.Node
for i := 0; i < dimx; i++ {
for j := 0; j < dimy; j++ {
var view = g.View(x, i*xStride, j*yStride, w.Value().Rows(), w.Value().Columns())
var dotProduct = g.Dot(view, w)
outList = append(outList, dotProduct)
}
}
return g.Reshape(g.Concat(outList...), dimx, dimy)
}
// Separate returns a matrix of Node(s) represented as a slice of slice containing the elements extracted from the input.
// The dimensions of the resulting matrix are the same of the input.
func Separate(g *ag.Graph, x ag.Node) [][]ag.Node {
rows, cols := x.Value().Dims()
ys := make([][]ag.Node, rows)
for i := range ys {
row := make([]ag.Node, cols)
for j := range row {
row[j] = g.At(x, i, j)
}
ys[i] = row
}
return ys
}
// SeparateVec returns a slice of Node(s) containing the elements extracted from the input.
// The size of the vector equals the number of input elements.
// You can think of this method as the inverse of the ag.Concat operator.
func SeparateVec(g *ag.Graph, x ag.Node) []ag.Node {
size := x.Value().Size()
ys := make([]ag.Node, size)
for i := 0; i < size; i++ {
ys[i] = g.AtVec(x, i)
}
return ys
}
// SplitVec splits the x Node into multiple chunks.
// It panics if the x Node is not a vector.
// TODO: optimize, this is extremely inefficient!
func SplitVec(g *ag.Graph, x ag.Node, chunks int) []ag.Node {
size := int(mat.Ceil(mat.Float(x.Value().Size()) / mat.Float(chunks)))
lastSize := x.Value().Size() % chunks
ys := make([]ag.Node, chunks)
for c := 0; c < chunks; c++ {
length := 0
if c == chunks-1 && lastSize > 0 {
length = lastSize
} else {
length = size
}
tmp := make([]ag.Node, length)
for i := 0; i < length; i++ {
tmp[i] = g.AtVec(x, i+c*size)
}
ys[c] = g.Concat(tmp...)
}
return ys
} | pkg/ml/nn/transforms.go | 0.717408 | 0.575707 | transforms.go | starcoder |
package storetestcases
import (
"context"
"fmt"
"testing"
"github.com/stratumn/go-chainscript"
"github.com/stratumn/go-chainscript/chainscripttest"
"github.com/stratumn/go-core/postgresstore"
"github.com/stratumn/go-core/store"
"github.com/stratumn/go-core/types"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func initBatch(t *testing.T, a store.Adapter) store.Batch {
b, err := a.NewBatch(context.Background())
require.NoError(t, err, "a.NewBatch()")
require.NotNil(t, b, "Batch should not be nil")
return b
}
// TestBatch runs all tests for the store.Batch interface
func (f Factory) TestBatch(t *testing.T) {
ctx := context.Background()
a := f.initAdapter(t)
defer f.freeAdapter(a)
// Initialize the adapter with a few links with specific map ids
for i := 0; i < 6; i++ {
link := chainscripttest.NewLinkBuilder(t).WithRandomData().WithMapID(fmt.Sprintf("map%d", i%3)).Build()
_, err := a.CreateLink(ctx, link)
require.NoError(t, err, "a.CreateLink()")
}
t.Run("CreateLink should not write to underlying store", func(t *testing.T) {
ctx = context.Background()
b := initBatch(t, a)
link := chainscripttest.RandomLink(t)
linkHash, err := b.CreateLink(ctx, link)
assert.NoError(t, err, "b.CreateLink()")
found, err := a.GetSegment(ctx, linkHash)
assert.NoError(t, err, "a.GetSegment()")
assert.Nil(t, found, "Link should not be found in adapter until Write is called")
})
t.Run("CreateLink should handle previous link in batch", func(t *testing.T) {
ctx := context.Background()
b := initBatch(t, a)
l1 := chainscripttest.RandomLink(t)
lh1, err := b.CreateLink(ctx, l1)
require.NoError(t, err, "b.CreateLink()")
l2 := chainscripttest.NewLinkBuilder(t).
WithRandomData().
WithParent(t, l1).
WithProcess(l1.Meta.Process.Name).
WithMapID(l1.Meta.MapId).
Build()
lh2, err := b.CreateLink(ctx, l2)
require.NoError(t, err, "b.CreateLink()")
err = b.Write(ctx)
require.NoError(t, err, "b.Write()")
for _, lh := range []chainscript.LinkHash{lh1, lh2} {
found, err := a.GetSegment(ctx, lh)
assert.NoError(t, err, "a.GetSegment()")
require.NotNil(t, found, "a.GetSegment()")
}
})
t.Run("CreateLink should reject links after failure", func(t *testing.T) {
ctx := context.Background()
b := initBatch(t, a)
// Only the postgres batch actually enforces that at the moment.
// Bufferedbatch fails when Write() is called.
_, ok := b.(*postgresstore.Batch)
if !ok {
t.Skip("Test not applicable to current batch implementation")
}
parentNotInStore := chainscripttest.RandomLink(t)
invalidLink := chainscripttest.NewLinkBuilder(t).
WithRandomData().
WithParent(t, parentNotInStore).
WithProcess(parentNotInStore.Meta.Process.Name).
WithMapID(parentNotInStore.Meta.MapId).
Build()
_, err := b.CreateLink(ctx, invalidLink)
assert.Error(t, err)
validLink := chainscripttest.RandomLink(t)
_, err = b.CreateLink(ctx, validLink)
assert.EqualError(t, err, store.ErrBatchFailed.Error())
})
t.Run("Write should write to the underlying store", func(t *testing.T) {
ctx = context.Background()
b := initBatch(t, a)
link := chainscripttest.RandomLink(t)
linkHash, err := b.CreateLink(ctx, link)
assert.NoError(t, err, "b.CreateLink()")
err = b.Write(ctx)
assert.NoError(t, err, "b.Write()")
found, err := a.GetSegment(ctx, linkHash)
assert.NoError(t, err, "a.GetSegment()")
require.NotNil(t, found, "a.GetSegment()")
chainscripttest.LinksEqual(t, link, found.Link)
})
t.Run("Finding segments should find in both batch and underlying store", func(t *testing.T) {
ctx = context.Background()
b := initBatch(t, a)
var segs *types.PaginatedSegments
var err error
segs, err = b.FindSegments(ctx, &store.SegmentFilter{Pagination: store.Pagination{Limit: store.DefaultLimit}})
assert.NoError(t, err, "b.FindSegments()")
require.NotNil(t, segs)
assert.Len(t, segs.Segments, segs.TotalCount)
adapterLinksCount := len(segs.Segments)
_, err = b.CreateLink(ctx, chainscripttest.RandomLink(t))
require.NoError(t, err, "b.CreateLink()")
_, err = b.CreateLink(ctx, chainscripttest.RandomLink(t))
require.NoError(t, err, "b.CreateLink()")
segs, err = b.FindSegments(ctx, &store.SegmentFilter{Pagination: store.Pagination{Limit: store.DefaultLimit}})
assert.NoError(t, err, "b.FindSegments()")
require.NotNil(t, segs)
assert.Len(t, segs.Segments, adapterLinksCount+2, "Invalid number of segments found")
})
t.Run("Finding maps should find in both batch and underlying store", func(t *testing.T) {
ctx = context.Background()
b := initBatch(t, a)
mapIDs, err := b.GetMapIDs(ctx, &store.MapFilter{Pagination: store.Pagination{Limit: store.DefaultLimit}})
assert.NoError(t, err, "b.GetMapIDs()")
adapterMapIdsCount := len(mapIDs)
for _, mapID := range []string{"map42", "map43"} {
link := chainscripttest.NewLinkBuilder(t).WithMapID(mapID).Build()
_, err = b.CreateLink(ctx, link)
require.NoError(t, err, "b.CreateLink()")
}
mapIDs, err = b.GetMapIDs(ctx, &store.MapFilter{Pagination: store.Pagination{Limit: store.DefaultLimit}})
assert.NoError(t, err, "b.GetMapIDs()")
assert.Equal(t, adapterMapIdsCount+2, len(mapIDs), "Invalid number of maps")
want := map[string]interface{}{"map0": nil, "map1": nil, "map2": nil, "map42": nil, "map43": nil}
got := make(map[string]interface{}, len(mapIDs))
for _, mapID := range mapIDs {
got[mapID] = nil
}
for mapID := range want {
_, exist := got[mapID]
assert.True(t, exist, "Missing map: %s", mapID)
}
})
} | store/storetestcases/batch.go | 0.546254 | 0.534734 | batch.go | starcoder |
package main
import (
"fmt"
"io"
"os"
"strconv"
"strings"
"time"
// Local package
"github.com/caltechlibrary/cli"
"github.com/caltechlibrary/datatools"
"github.com/caltechlibrary/datatools/reldate"
)
var (
description = `
%s is a small command line utility which returns the relative date in
YYYY-MM-DD format. This is helpful when scripting various time
relationships. The difference in time returned are determined by
the time increments provided.
Time increments are a positive or negative integer. Time unit can be
either day(s), week(s), month(s), or year(s). Weekday names are
case insentive (e.g. Monday and monday). They can be abbreviated
to the first three letters of the name, e.g. Sunday can be Sun, Monday
can be Mon, Tuesday can be Tue, Wednesday can be Wed, Thursday can
be Thu, Friday can be Fri or Saturday can be Sat.
`
examples = `
If today was 2014-08-03 and you wanted the date three days in the past try–
%s 3 days
The output would be
2014-08-06
TIME UNITS
Supported time units are
+ day(s)
+ week(s)
+ year(s)
Specifying a date to calucate from
%s handles dates in the YYYY-MM-DD format (e.g. March 1, 2014 would be
2014-03-01). By default reldate uses today as the date to calculate relative
time from. If you use the –from option you can it will calculate the
relative date from that specific date.
%s --from=2014-08-03 3 days
Will yield
2014-08-06
NEGATIVE INCREMENTS
Command line arguments traditionally start with a dash which we also use to
denote a nagative number. To tell the command line process that to not treat
negative numbers as an “option” precede your time increment and time unit
with a double dash.
%s --from=2014-08-03 -- -3 days
Will yield
2014-07-31
RELATIVE WEEK DAYS
You can calculate a date from a weekday name (e.g. Saturday, Monday, Tuesday)
knowning a day (e.g. 2015-02-10 or the current date of the week) occurring in
a week. A common case would be wanting to figure out the Monday date of a week
containing 2015-02-10. The week is presumed to start on Sunday (i.e. 0) and
finish with Saturday (e.g. 6).
%s --from=2015-02-10 Monday
will yield
2015-02-09
As that is the Monday of the week containing 2015-02-10. Weekday names case
insensitive and can be the first three letters of the English names or full
English names (e.g. Monday, monday, Mon, mon).
`
// Standard Options
showHelp bool
showVersion bool
showLicense bool
showExamples bool
outputFName string
generateMarkdownDocs bool
quiet bool
newLine bool
eol string
// Application Options
endOfMonthFor bool
relativeTo string
relativeT time.Time
)
func assertOk(eout io.Writer, e error, failMsg string) {
if e != nil {
fmt.Fprintf(eout, " %s, %s", failMsg, e)
os.Exit(1)
}
}
func main() {
const (
relativeToUsage = "Date the relative time is calculated from."
endOfMonthUsage = "Display the end of month day. E.g. 2012-02-29"
)
app := cli.NewCli(datatools.Version)
appName := app.AppName()
// Add Help Docs
app.AddHelp("license", []byte(fmt.Sprintf(datatools.LicenseText, appName, datatools.Version)))
app.AddHelp("description", []byte(fmt.Sprintf(description, appName)))
app.AddHelp("examples", []byte(fmt.Sprintf(examples, appName, appName, appName, appName, appName)))
// Document non-option Params
app.AddParams("[TIME_DESCRPTION]")
// Standard Options
app.BoolVar(&showHelp, "h,help", false, "display help")
app.BoolVar(&showLicense, "l,license", false, "display license")
app.BoolVar(&showVersion, "v,version", false, "display version")
app.BoolVar(&showExamples, "examples", false, "display example(s)")
app.BoolVar(&quiet, "quiet", false, "suppress error messages")
app.BoolVar(&newLine, "nl,newline", true, "if true add a trailing newline")
app.BoolVar(&generateMarkdownDocs, "generate-markdown-docs", false, "generate markdown documentation")
// App Specific Options
app.StringVar(&relativeTo, "f,from", relativeTo, relativeToUsage)
app.BoolVar(&endOfMonthFor, "e,end-of-month", endOfMonthFor, endOfMonthUsage)
// Parse env and options
app.Parse()
args := app.Args()
// Setup IO
var err error
app.Eout = os.Stderr
/* NOTE: this command does not read from stdin
app.In, err = cli.Open(inputFName, os.Stdin)
cli.ExitOnError(app.Eout, err, quiet)
defer cli.CloseFile(inputFName, app.In)
*/
app.Out, err = cli.Create(outputFName, os.Stdout)
cli.ExitOnError(app.Eout, err, quiet)
defer cli.CloseFile(outputFName, app.Out)
// Process Options
if generateMarkdownDocs {
app.GenerateMarkdownDocs(app.Out)
os.Exit(0)
}
if showHelp || showExamples {
if len(args) > 0 {
fmt.Fprintln(app.Out, app.Help(args...))
} else {
app.Usage(app.Out)
}
os.Exit(0)
}
if showLicense {
fmt.Fprintln(app.Out, app.License())
os.Exit(0)
}
if showVersion {
fmt.Fprintln(app.Out, app.Version())
os.Exit(0)
}
if newLine {
eol = "\n"
}
argc := app.NArg()
argv := app.Args()
var (
unitString string
)
if argc < 1 && endOfMonthFor == false {
cli.ExitOnError(app.Eout, fmt.Errorf("Missing time increment and units (e.g. +2 days) or weekday name (e.g. Monday, Mon)."), quiet)
} else if argc > 2 {
cli.ExitOnError(app.Eout, fmt.Errorf("Too many command line arguments."), quiet)
}
relativeT = time.Now()
if relativeTo != "" {
relativeT, err = time.Parse(reldate.YYYYMMDD, relativeTo)
assertOk(app.Eout, err, "Cannot parse the from date.\n")
}
if endOfMonthFor == true {
fmt.Fprintf(app.Out, "%s%s", reldate.EndOfMonth(relativeT), eol)
os.Exit(0)
}
timeInc := 0
if argc == 2 {
unitString = strings.ToLower(argv[1])
timeInc, err = strconv.Atoi(argv[0])
assertOk(app.Eout, err, "Time increment should be a positive or negative integer.\n")
} else {
// We may have a weekday string
unitString = strings.ToLower(argv[0])
}
t, err := reldate.RelativeTime(relativeT, timeInc, unitString)
assertOk(app.Eout, err, "Did not understand command.")
fmt.Fprintf(app.Out, "%s%s", t.Format(reldate.YYYYMMDD), eol)
} | plugins/data/transform/datatools/cmd/reldate/reldate.go | 0.521227 | 0.445469 | reldate.go | starcoder |
package display
import (
"fmt"
"image"
"image/color"
"strconv"
"unicode/utf8"
)
// Cursor models the known or unknown states of a cursor.
type Cursor struct {
// Position is the position of the cursor.
// Negative values indicate that the X or Y position is not known,
// so the next position change must be relative to the beginning of the
// same line or possibly the origin.
Position image.Point
// Foreground is the foreground color for subsequent text.
// Transparent indicates that the color is unknown, so the next text must
// be preceded by an SGR (set graphics) ANSI sequence to set it.
Foreground color.RGBA
// Foreground is the foreground color for subsequent text.
// Transparent indicates that the color is unknown, so the next text must
// be preceded by an SGR (set graphics) ANSI sequence to set it.
Background color.RGBA
// Visibility indicates whether the cursor is visible.
Visibility Visibility
}
// Visibility represents the visibility of a Cursor.
type Visibility int
const (
// Hidden represents a hidden cursor.
Hidden Visibility = iota + 1
// Visible represents a normal cursor.
Visible
)
func (v Visibility) String() string {
switch v {
case 0:
return "Unknown"
case Hidden:
return "Hidden"
case Visible:
return "Visible"
default:
return fmt.Sprintf("Invalid<%d>", int(v))
}
}
var (
// Lost indicates that the cursor position is unknown.
Lost = image.Point{-1, -1}
// Start is a cursor state that makes no assumptions about the cursor's
// position or colors, necessitating a seek from origin and explicit color
// settings for the next text.
Start = Cursor{
Position: Lost,
Foreground: Transparent,
Background: Transparent,
}
// Reset is a cursor state indicating that the cursor is at the origin
// and that the foreground color is white (7), background black (0).
// This is the state cur.Reset() returns to, and the state for which
// cur.Reset() will append nothing to the buffer.
Reset = Cursor{
Position: image.ZP,
Foreground: Colors[7],
Background: Colors[0],
}
)
// Hide hides the cursor.
func (c Cursor) Hide(buf []byte) ([]byte, Cursor) {
if c.Visibility != Hidden {
c.Visibility = Hidden
buf = append(buf, "\033[?25l"...)
}
return buf, c
}
// Show reveals the cursor.
func (c Cursor) Show(buf []byte) ([]byte, Cursor) {
if c.Visibility != Visible {
c.Visibility = Visible
buf = append(buf, "\033[?25h"...)
}
return buf, c
}
// Clear erases the whole display; implicitly invalidates the cursor position
// since its behavior is inconsistent across terminal implementations.
func (c Cursor) Clear(buf []byte) ([]byte, Cursor) {
c.Position = Lost
return append(buf, "\033[2J"...), c
}
// ClearLine erases the current line.
func (c Cursor) ClearLine(buf []byte) ([]byte, Cursor) {
return append(buf, "\033[2K"...), c
}
// Reset returns the terminal to default white on black colors.
func (c Cursor) Reset(buf []byte) ([]byte, Cursor) {
if c.Foreground != Colors[7] || c.Background != Colors[0] {
//lint:ignore SA4005 broken check
c.Foreground, c.Background = Colors[7], Colors[0]
buf = append(buf, "\033[m"...)
}
return buf, c
}
// Home seeks the cursor to the origin, using display absolute coordinates.
func (c Cursor) Home(buf []byte) ([]byte, Cursor) {
c.Position = image.ZP
return append(buf, "\033[H"...), c
}
func (c Cursor) recover(buf []byte, to image.Point) ([]byte, Cursor) {
if c.Position == Lost {
// If the cursor position is completely unknown, move relative to
// screen origin. This mode must be avoided to render relative to
// cursor position inline with a scrolling log, by setting the cursor
// position relative to an arbitrary origin before rendering.
return c.jumpTo(buf, to)
}
if c.Position.X == -1 {
// If only horizontal position is unknown, return to first column and
// march forward. Rendering a non-ASCII cell of unknown or
// indeterminate width may invalidate the column number. For example, a
// skin tone emoji may or may not render as a single column glyph.
buf = append(buf, "\r"...)
c.Position.X = 0
// Continue...
}
return buf, c
}
func (c Cursor) jumpTo(buf []byte, to image.Point) ([]byte, Cursor) {
buf = append(buf, "\033["...)
buf = strconv.AppendInt(buf, int64(to.Y+1), 10)
buf = append(buf, ";"...)
buf = strconv.AppendInt(buf, int64(to.X+1), 10)
buf = append(buf, "H"...)
c.Position = to
return buf, c
}
func (c Cursor) linedown(buf []byte, n int) ([]byte, Cursor) {
// Use \r\n to advance cursor Y on the chance it will advance the
// display bounds.
buf = append(buf, "\r\n"...)
for m := n - 1; m > 0; m-- {
buf = append(buf, "\n"...)
}
c.Position.X = 0
c.Position.Y += n
return buf, c
}
func (c Cursor) up(buf []byte, n int) ([]byte, Cursor) {
buf = append(buf, "\033["...)
buf = strconv.AppendInt(buf, int64(n), 10)
buf = append(buf, "A"...)
c.Position.Y -= n
return buf, c
}
func (c Cursor) down(buf []byte, n int) ([]byte, Cursor) {
buf = append(buf, "\033["...)
buf = strconv.AppendInt(buf, int64(n), 10)
buf = append(buf, "B"...)
c.Position.Y += n
return buf, c
}
func (c Cursor) left(buf []byte, n int) ([]byte, Cursor) {
buf = append(buf, "\033["...)
buf = strconv.AppendInt(buf, int64(n), 10)
buf = append(buf, "D"...)
c.Position.X -= n
return buf, c
}
func (c Cursor) right(buf []byte, n int) ([]byte, Cursor) {
buf = append(buf, "\033["...)
buf = strconv.AppendInt(buf, int64(n), 10)
buf = append(buf, "C"...)
c.Position.X += n
return buf, c
}
// Go moves the cursor to another position, preferring to use relative motion,
// using line relative if the column is unknown, using display origin relative
// only if the line is also unknown. If the column is unknown, use "\r" to seek
// to column 0 of the same line.
func (c Cursor) Go(buf []byte, to image.Point) ([]byte, Cursor) {
buf, c = c.recover(buf, to)
if to.X == 0 && to.Y == c.Position.Y+1 {
buf, c = c.Reset(buf)
buf = append(buf, "\r\n"...)
c.Position.X = 0
c.Position.Y++
} else if to.X == 0 && c.Position.X != 0 {
buf, c = c.Reset(buf)
buf = append(buf, "\r"...)
c.Position.X = 0
// In addition to scrolling back to the first column generally, this
// has the effect of resetting the column if writing a multi-byte
// string invalidates the cursor's horizontal position. For example, a
// skin tone emoji may or may not render as a single column glyph.
}
if n := to.Y - c.Position.Y; n > 0 {
buf, c = c.linedown(buf, n)
} else if n < 0 {
buf, c = c.up(buf, -n)
}
if n := to.X - c.Position.X; n > 0 {
buf, c = c.right(buf, n)
} else if n < 0 {
buf, c = c.left(buf, -n)
}
return buf, c
}
// TODO: func (c Cursor) Write(buf, p []byte) ([]byte, Cursor)
// WriteGlyph appends the given string's UTF8 bytes into the given
// buffer, invalidating the cursor if the string COULD HAVE rendered
// to more than one glyph; otherwise the cursor's X is advanced by 1.
func (c Cursor) WriteGlyph(buf []byte, s string) ([]byte, Cursor) {
buf = append(buf, s...)
if n := utf8.RuneCountInString(s); n == 1 {
c.Position.X++
} else {
// Invalidate cursor column to force position reset
// before next draw, if the string drawn might be longer
// than one cell wide or simply empty.
c.Position.X = -1
}
return buf, c
} | internal/cops/display/cursor.go | 0.71889 | 0.521045 | cursor.go | starcoder |
package imageutil
import (
"image"
"image/color"
)
func RowAverageGray16(radius int, img Channel) *image.Gray16 {
bounds := img.Bounds()
resultBounds := image.Rect(bounds.Min.X-radius+1, bounds.Min.Y, bounds.Max.X, bounds.Max.Y)
resultImg := image.NewGray16(resultBounds)
QuickRowsRP(
RowsRP(1, func(rect image.Rectangle) {
y := rect.Min.Y
n := 0
d := 0
// Heads.
x := resultBounds.Min.X
for ; x <= bounds.Min.X; x++ {
n += int(img.Gray16At(x+radius-1, y).Y)
d++
resultImg.Set(x, y, color.Gray16{
Y: uint16(n / d),
})
}
// Middle.
for ; x <= bounds.Max.X-radius; x++ {
n += int(img.Gray16At(x+radius-1, y).Y)
n -= int(img.Gray16At(x-1, y).Y)
resultImg.Set(x, y, color.Gray16{
Y: uint16(n / d),
})
}
// Tails.
for ; x < bounds.Max.X; x++ {
n -= int(img.Gray16At(x-1, y).Y)
d--
resultImg.Set(x, y, color.Gray16{
Y: uint16(n / d),
})
}
}),
)(bounds)
return resultImg
}
func ColumnAverageGray16(radius int, img Channel) *image.Gray16 {
bounds := img.Bounds()
resultBounds := image.Rect(bounds.Min.X, bounds.Min.Y-radius+1, bounds.Max.X, bounds.Max.Y)
resultImg := image.NewGray16(resultBounds)
QuickColumnsRP(
ColumnsRP(1, func(rect image.Rectangle) {
x := rect.Min.X
n := 0
d := 0
// Heads.
y := resultBounds.Min.Y
for ; y <= bounds.Min.Y; y++ {
n += int(img.Gray16At(x, y+radius-1).Y)
d++
resultImg.Set(x, y, color.Gray16{
Y: uint16(n / d),
})
}
// Middle.
for ; y <= bounds.Max.Y-radius; y++ {
n += int(img.Gray16At(x, y+radius-1).Y)
n -= int(img.Gray16At(x, y-1).Y)
resultImg.Set(x, y, color.Gray16{
Y: uint16(n / d),
})
}
// Tails.
for ; y < bounds.Max.Y; y++ {
n -= int(img.Gray16At(x, y-1).Y)
d--
resultImg.Set(x, y, color.Gray16{
Y: uint16(n / d),
})
}
}),
)(bounds)
return resultImg
}
func AverageGray16(rect image.Rectangle, img Channel) color.Gray16 {
// Only use the area of the rectangle that overlaps with the image bounds.
rect = rect.Intersect(img.Bounds())
// Determine whether or not there's any area over which to determine an
// average.
d := uint64(rect.Dx() * rect.Dy())
if d == 0 {
return color.Gray16{}
}
var y uint64
AllPointsRP(
func(pt image.Point) {
y += uint64(img.Gray16At(pt.X, pt.Y).Y)
},
)(rect)
return color.Gray16{
Y: uint16(y / d),
}
}
func AverageNRGBA64(rect image.Rectangle, img *image.NRGBA64) color.NRGBA64 {
// Only use the area of the rectangle that overlaps with the image bounds.
rect = rect.Intersect(img.Bounds())
// Determine whether or not there's any area over which to determine an
// average.
d := uint64(rect.Dx() * rect.Dy())
if d == 0 {
return color.NRGBA64{}
}
var r, g, b, a uint64
AllPointsRP(
func(pt image.Point) {
c := img.NRGBA64At(pt.X, pt.Y)
r += uint64(c.R)
g += uint64(c.G)
b += uint64(c.B)
a += uint64(c.A)
},
)(rect)
return color.NRGBA64{
R: uint16(r / d),
G: uint16(g / d),
B: uint16(b / d),
A: uint16(a / d),
}
} | average.go | 0.800458 | 0.526586 | average.go | starcoder |
package doboz
const (
HASH_TABLE_SIZE = 1 << 20
CHILD_COUNT = DICTIONARY_SIZE * 2
INVALID_POSITION = -1
REBASE_THRESHOLD = (MaxInt - DICTIONARY_SIZE + 1) / DICTIONARY_SIZE * DICTIONARY_SIZE // must be a multiple of DICTIONARY_SIZE!
)
type Dictionary struct {
// Buffer
buffer []byte // pointer to the beginning of the buffer inside which we look for matches
bufferBase int // bufferBase > buffer, relative positions are necessary to support > 2 GB buffers
matchableBufferLength int
absolutePosition int // position from the beginning of buffer
// Cyclic dictionary
hashTable []int // relative match positions to bufferBase
children []int // children of the binary tree nodes (relative match positions to bufferBase)
}
func (d *Dictionary) SetBuffer(buffer []byte) {
// Set the buffer
d.buffer = buffer
d.absolutePosition = 0
// Compute the matchable buffer length
if len(d.buffer) > TAIL_LENGTH+MIN_MATCH_LENGTH {
d.matchableBufferLength = len(d.buffer) - (TAIL_LENGTH + MIN_MATCH_LENGTH)
} else {
d.matchableBufferLength = 0
}
// Since we always store 32-bit positions in the dictionary, we need relative positions in order to support buffers larger then 2 GB
// This can be possible, because the difference between any two positions stored in the dictionary never exceeds the size of the dictionary
// We don't store larger (64-bit) positions, because that can significantly degrade performance
// Initialize the relative position base pointer
d.bufferBase = 0
// Initialize if necessary
if d.hashTable == nil {
d.initialize()
}
// Clear the hash table
for i := 0; i < HASH_TABLE_SIZE; i++ {
d.hashTable[i] = INVALID_POSITION
}
}
// Finds match candidates at the current buffer position and slides the matching window to the next character
// Call findMatches/update with increasing positions
// The match candidates are stored in the supplied array, ordered by their length (ascending)
// The return value is the number of match candidates in the array
func (d *Dictionary) FindMatches(matchCandidates []Match) int {
// Check whether we can find matches at this position
if d.absolutePosition >= d.matchableBufferLength {
// Slide the matching window with one character
d.absolutePosition++
return 0
}
// Compute the maximum match length
maxMatchLength := min(len(d.buffer)-TAIL_LENGTH-d.absolutePosition, MAX_MATCH_LENGTH)
// Compute the position relative to the beginning of bufferBase_
// All other positions (including the ones stored in the hash table and the binary trees) are relative too
// From now on, we can safely ignore this position technique
position := d.computeRelativePosition()
// Compute the minimum match position
minMatchPosition := 0
if position >= DICTIONARY_SIZE {
minMatchPosition = position - DICTIONARY_SIZE + 1
}
// Compute the hash value for the current string
hashValue := Hash(d.buffer, d.bufferBase+position) % HASH_TABLE_SIZE
// Get the position of the first match from the hash table
matchPosition := d.hashTable[hashValue]
// Set the current string as the root of the binary tree corresponding to the hash table entry
d.hashTable[hashValue] = position
// Compute the current cyclic position in the dictionary
cyclicInputPosition := position % DICTIONARY_SIZE
// Initialize the references to the leaves of the new root's left and right subtrees
leftSubtreeLeaf := cyclicInputPosition * 2
rightSubtreeLeaf := cyclicInputPosition*2 + 1
// Initialize the match lenghts of the lower and upper bounds of the current string (lowMatch < match < highMatch)
// We use these to avoid unneccesary character comparisons at the beginnings of the strings
lowMatchLength := 0
highMatchLength := 0
// Initialize the longest match length
longestMatchLength := 0
// Find matches
// We look for the current string in the binary search tree and we rebuild the tree at the same time
// The deeper a node is in the tree, the lower is its position, so the root is the string with the highest position (lowest offset)
// We count the number of match attempts, and exit if it has reached a certain threshold
matchCount := 0
// Match candidates are matches which are longer than any previously encountered ones
matchCandidateCount := 0
for {
// Check whether the current match position is valid
if matchPosition < minMatchPosition || matchCount == MAX_MATCH_CANDIDATE_COUNT {
// We have checked all valid matches, so finish the new tree and exit
d.children[leftSubtreeLeaf] = INVALID_POSITION
d.children[rightSubtreeLeaf] = INVALID_POSITION
break
}
matchCount++
// Compute the cyclic position of the current match in the dictionary
cyclicMatchPosition := matchPosition % DICTIONARY_SIZE
// Use the match lengths of the low and high bounds to determine the number of characters that surely match
matchLength := min(lowMatchLength, highMatchLength)
// Determine the match length
for matchLength < maxMatchLength && d.buffer[d.bufferBase+position+matchLength] == d.buffer[d.bufferBase+matchPosition+matchLength] {
matchLength++
}
// Check whether this match is the longest so far
matchOffset := position - matchPosition
if matchLength > longestMatchLength && matchLength >= MIN_MATCH_LENGTH {
longestMatchLength = matchLength
// Add the current best match to the list of good match candidates
if matchCandidates != nil {
matchCandidates[matchCandidateCount].Length = matchLength
matchCandidates[matchCandidateCount].Offset = matchOffset
matchCandidateCount++
}
// If the match length is the maximum allowed value, the current string is already inserted into the tree: the current node
if matchLength == maxMatchLength {
// Since the current string is also the root of the tree, delete the current node
d.children[leftSubtreeLeaf] = d.children[cyclicMatchPosition*2]
d.children[rightSubtreeLeaf] = d.children[cyclicMatchPosition*2+1]
break
}
}
// Compare the two strings
if d.buffer[d.bufferBase+position+matchLength] < d.buffer[d.bufferBase+matchPosition+matchLength] {
// Insert the matched string into the right subtree
d.children[rightSubtreeLeaf] = matchPosition
// Go left
rightSubtreeLeaf = cyclicMatchPosition * 2
matchPosition = d.children[rightSubtreeLeaf]
// Update the match length of the high bound
highMatchLength = matchLength
} else {
// Insert the matched string into the left subtree
d.children[leftSubtreeLeaf] = matchPosition
// Go right
leftSubtreeLeaf = cyclicMatchPosition*2 + 1
matchPosition = d.children[leftSubtreeLeaf]
// Update the match length of the low bound
lowMatchLength = matchLength
}
}
// Slide the matching window with one character
d.absolutePosition++
return matchCandidateCount
}
// Slides the matching window to the next character without looking for matches, but it still has to update the dictionary
func (d *Dictionary) Skip() {
d.FindMatches(nil)
}
func (d *Dictionary) Position() int {
return d.absolutePosition
}
func (d *Dictionary) initialize() {
// Create the hash table
d.hashTable = make([]int, HASH_TABLE_SIZE)
// Create the tree nodes
// The number of nodes is equal to the size of the dictionary, and every node has two children
d.children = make([]int, CHILD_COUNT)
}
// Increments the match window position with one character
func (d *Dictionary) computeRelativePosition() int {
position := d.absolutePosition - d.bufferBase
// Check whether the current position has reached the rebase threshold
if position == REBASE_THRESHOLD {
// Rebase
rebaseDelta := REBASE_THRESHOLD - DICTIONARY_SIZE
d.bufferBase += rebaseDelta
position -= rebaseDelta
// Rebase the hash entries
for i := 0; i < HASH_TABLE_SIZE; i++ {
if d.hashTable[i] >= rebaseDelta {
d.hashTable[i] = d.hashTable[i] - rebaseDelta
} else {
d.hashTable[i] = INVALID_POSITION
}
}
// Rebase the binary tree nodes
for i := 0; i < CHILD_COUNT; i++ {
if d.children[i] >= rebaseDelta {
d.children[i] = d.children[i] - rebaseDelta
} else {
d.children[i] = INVALID_POSITION
}
}
}
return position
} | dictionary.go | 0.807461 | 0.531878 | dictionary.go | starcoder |
package math3d
// Matrix represents a 4x4 matrix
type Matrix struct {
a, b, c, d,
e, f, g, h,
i, j, k, l,
m, n, o, p float64
}
// MultiplyPoint returns point multiplied by the matrix
func (mat *Matrix) MultiplyPoint(point *Vector3) *Vector3 {
x := mat.a*point.X + mat.b*point.Y + mat.c*point.Z + mat.d
y := mat.e*point.X + mat.f*point.Y + mat.g*point.Z + mat.h
z := mat.i*point.X + mat.j*point.Y + mat.k*point.Z + mat.l
h := mat.m*point.X + mat.n*point.Y + mat.o*point.Z + mat.p
return &Vector3{X: x / h, Y: y / h, Z: z / h}
}
// MultiplyVector returns vector multiplied by the matrix
func (mat *Matrix) MultiplyVector(vector *Vector3) *Vector3 {
x := mat.a*vector.X + mat.b*vector.Y + mat.c*vector.Z
y := mat.e*vector.X + mat.f*vector.Y + mat.g*vector.Z
z := mat.i*vector.X + mat.j*vector.Y + mat.k*vector.Z
return &Vector3{X: x, Y: y, Z: z}
}
// ComposeMatrix composes the two matrices multiplying them
func (mat *Matrix) ComposeMatrix(mat2 *Matrix) *Matrix {
a := mat.a*mat2.a + mat.b*mat2.e + mat.c*mat2.i + mat.d*mat2.m
b := mat.a*mat2.b + mat.b*mat2.f + mat.c*mat2.j + mat.d*mat2.n
c := mat.a*mat2.c + mat.b*mat2.g + mat.c*mat2.k + mat.d*mat2.o
d := mat.a*mat2.d + mat.b*mat2.h + mat.c*mat2.l + mat.d*mat2.p
e := mat.e*mat2.a + mat.f*mat2.e + mat.g*mat2.i + mat.h*mat2.m
f := mat.e*mat2.b + mat.f*mat2.f + mat.g*mat2.j + mat.h*mat2.n
g := mat.e*mat2.c + mat.f*mat2.g + mat.g*mat2.k + mat.h*mat2.o
h := mat.e*mat2.d + mat.f*mat2.h + mat.g*mat2.l + mat.h*mat2.p
i := mat.i*mat2.a + mat.j*mat2.e + mat.k*mat2.i + mat.l*mat2.m
j := mat.i*mat2.b + mat.j*mat2.f + mat.k*mat2.j + mat.l*mat2.n
k := mat.i*mat2.c + mat.j*mat2.g + mat.k*mat2.k + mat.l*mat2.o
l := mat.i*mat2.d + mat.j*mat2.h + mat.k*mat2.l + mat.l*mat2.p
m := mat.m*mat2.a + mat.n*mat2.e + mat.o*mat2.i + mat.p*mat2.m
n := mat.m*mat2.b + mat.n*mat2.f + mat.o*mat2.j + mat.p*mat2.n
o := mat.m*mat2.c + mat.n*mat2.g + mat.o*mat2.k + mat.p*mat2.o
p := mat.m*mat2.d + mat.n*mat2.h + mat.o*mat2.l + mat.p*mat2.p
return &Matrix{a: a, b: b, c: c, d: d,
e: e, f: f, g: g, h: h,
i: i, j: j, k: k, l: l,
m: m, n: n, o: o, p: p}
} | math3d/matrix.go | 0.839208 | 0.78403 | matrix.go | starcoder |
package interpreter
import (
"strconv"
"errors"
"fmt"
)
type Node struct {
Keyword string
ID int
Parameter []IParameter
NextNode, PreviousNode INode
Trace string
Scope *Scope
}
type INode interface {
Execute(state *XiiState) error
Previous() INode
Next() INode
Init(nodes []INode) error
GetKeyword() string
GetID() int
GetTrace() string
GetScope() *Scope
}
type NumberDeclarationNode struct {
Node
}
func (node *NumberDeclarationNode) Execute(state *XiiState) error {
return nil
}
type LiteralDeclarationNode struct {
Node
}
func (node *LiteralDeclarationNode) Execute(state *XiiState) error {
return nil
}
type Passer struct {
Name string
Type string
}
type FunctionDeclarationNode struct {
Node
Parameters []Passer
nextAfterEnd INode
}
func (node *FunctionDeclarationNode) Init(nodes []INode) error {
nextEnd := findNextEndNode(node)
if nextEnd == nil {
return errors.New("A function node requires a matching end node")
}
node.nextAfterEnd = nextEnd.Next()
return nil
}
func (node *FunctionDeclarationNode) Execute(state *XiiState) error {
if state.PassingArea == nil {
state.NextNode = node.nextAfterEnd
} else {
for k, v := range state.PassingArea {
node.GetScope().SetVar(k, v)
}
state.PassingArea = nil
}
return nil
}
type CallNode struct {
Node
Passers map[string]IParameter
}
func (node *CallNode) Execute(state *XiiState) error {
fun := node.GetScope().GetFunctionNode(node.Parameter[0].GetRaw())
if fun == nil {
return errors.New("Tried to call non-existing function")
}
state.PassingArea = make(map[string]interface{}, 0)
for k, v := range node.Passers {
state.PassingArea[k] = v.GetValue(node.GetScope())
}
state.NextNode = fun
state.FunctionStack.Push(node)
return nil
}
type OutputNode struct {
Node
}
func (node *OutputNode) Execute(state *XiiState) error {
for i, n := range node.Parameter {
_, ok := n.(VariableParameter)
if i != 0 && !ok {
state.StdOut.WriteRune(' ')
}
state.StdOut.WriteString(n.GetText(node.GetScope()))
}
state.StdOut.WriteRune('\n')
state.StdOut.Flush()
return nil
}
type InputNode struct {
Node
}
func (node *InputNode) Execute(state *XiiState) error {
if len(node.Parameter) != 1 {
return errors.New("in: No parameter name given (or too many)")
}
varname := node.Parameter[0].GetRaw()
variable := node.GetScope().GetVar(varname)
if variable == nil {
return errors.New("Tried to 'in' not existing variable")
}
_, ok1 := variable.(string)
_, ok2 := variable.(float64)
if ok1 || ok2 {
var text string
fmt.Scanln(&text)
if ok1 {
node.GetScope().SetVar(varname, text)
} else if ok2 {
for {
num, err := strconv.ParseFloat(text, 64)
if err == nil {
node.GetScope().SetVar(varname, num)
break
} else {
fmt.Println("Please retry: " + err.Error())
fmt.Scanln(&text)
}
}
}
} else {
return errors.New("in: Unknown variable cannot be read into")
}
return nil
}
type LoopNode struct {
Node
nextAfterEndNode INode
expression *Expression
}
func (node *LoopNode) Init(nodes []INode) error {
nextEnd := findNextEndNode(node)
if nextEnd == nil {
return errors.New("A loop node requires a matching end node")
}
node.nextAfterEndNode = nextEnd.Next()
exp, err := NewExpression(node.Parameter)
if err != nil {
return err
}
node.expression = exp
return nil
}
func (node *LoopNode) Execute(state *XiiState) error {
res, err := Evaluate(node, node.expression)
if err != nil {
return err
}
if res == 0 {
state.NextNode = node.nextAfterEndNode
}
return nil
}
type ConditionNode struct {
Node
nextEndNode INode
expression *Expression
}
func (node *ConditionNode) Init(nodes []INode) error {
nextEnd := findNextEndNode(node)
if nextEnd == nil {
return errors.New("A condition node requires a matching end node")
}
node.nextEndNode = nextEnd.Next()
exp, err := NewExpression(node.Parameter)
if err != nil {
return err
}
node.expression = exp
return nil
}
func (node *ConditionNode) Execute(state *XiiState) error {
res, err := Evaluate(node, node.expression)
if err != nil {
return err
}
if res == 0 {
state.NextNode = node.nextEndNode
}
return nil
}
type BlockEndNode struct {
Node
companionNode INode
endsFunction bool
}
func (node *BlockEndNode) Init(nodes []INode) error {
companion := node.Previous()
counter := 1
for {
switch companion.(type) {
case (*ConditionNode):
counter--
if counter == 0 {
return nil
}
case (*LoopNode):
counter--
if counter == 0 {
node.companionNode = companion
return nil
}
case (*FunctionDeclarationNode):
counter--
if counter == 0 {
node.endsFunction = true
return nil
}
case (*BlockEndNode):
counter++
}
companion = companion.Previous()
if companion == nil {
return errors.New("end Node without condition/loop")
}
}
}
func (node *BlockEndNode) Execute(state *XiiState) error {
if node.companionNode != nil {
state.NextNode = node.companionNode
}
if node.endsFunction {
state.NextNode = state.FunctionStack.Pop().Next()
}
return nil
}
type SetNode struct {
Node
expression *Expression
}
func (node *SetNode) Init(nodes []INode) error {
if len(node.Parameter) < 2 || node.Parameter[0].GetRaw() != "=" {
return errors.New("set: Invalid set syntax")
}
exp, err := NewExpression(node.Parameter[1:])
if err != nil {
return err
}
node.expression = exp
return nil
}
func (node *SetNode) Execute(state *XiiState) error {
varname := node.Keyword
variable := node.GetScope().GetVar(varname)
_, ok := variable.(float64)
if !ok {
_, ok = variable.(string)
if !ok {
return errors.New("set: Can't set not existing variable")
}
var setval string
for i, v := range node.Parameter {
if i > 0 {
setval += " "
}
setval += v.GetText(node.GetScope())
}
node.GetScope().SetVar(varname, setval)
return nil
}
res, err := Evaluate(node, node.expression)
if err != nil {
return err
}
node.GetScope().SetVar(varname, res)
return nil
}
func (node *Node) Execute(state *XiiState) error {
return errors.New("No-Op Node executed")
}
func (node *Node) Previous() INode {
return node.PreviousNode
}
func (node *Node) Next() INode {
return node.NextNode
}
func (node *Node) GetKeyword() string {
return node.Keyword
}
func (node *Node) GetID() int {
return node.ID
}
func (node *Node) Init(nodes []INode) error {
return nil
}
func (node *Node) String() string {
return fmt.Sprintf("{{%d/%s : %s}}", node.ID, node.Keyword, node.Parameter)
}
func (node *Node) GetTrace() string {
return node.Trace
}
func (node *Node) GetScope() *Scope {
return node.Scope
}
func findNextEndNode(node INode) INode {
nextEnd := node.Next()
counter := 1
_, ok := nextEnd.(*BlockEndNode)
for {
if ok {
counter--
if counter == 0 {
break
}
}
_, ok2 := nextEnd.(*LoopNode)
_, ok3 := nextEnd.(*ConditionNode)
_, ok4 := nextEnd.(*FunctionDeclarationNode)
if ok2 || ok3 || ok4 {
counter++
}
nextEnd = nextEnd.Next()
if nextEnd == nil {
return nil
}
_, ok = nextEnd.(*BlockEndNode)
}
return nextEnd
} | interpreter/nodes.go | 0.623835 | 0.445409 | nodes.go | starcoder |
package main
import (
"math"
"math/rand"
)
type vec3 struct {
x float64
y float64
z float64
}
func (a *vec3) Neg() *vec3 {
return &vec3{-a.x, -a.y, -a.z}
}
func (a *vec3) AddAssign(b *vec3) {
a.x += b.x
a.y += b.y
a.z += b.z
}
func (a *vec3) SubAssign(b *vec3) {
a.x -= b.x
a.y -= b.y
a.z -= b.z
}
func (a *vec3) MulAssign(t float64) {
a.x *= t
a.y *= t
a.z *= t
}
func (a *vec3) DivAssign(t float64) {
a.MulAssign(1.0 / t)
}
func (a *vec3) Length() float64 {
return math.Sqrt(a.LengthSquared())
}
func (a *vec3) LengthSquared() float64 {
return a.x*a.x + a.y*a.y + a.z*a.z
}
func (a *vec3) NearZero() bool {
const s = 1e-8
return (math.Abs(a.x) < s) &&
(math.Abs(a.y) < s) &&
(math.Abs(a.z) < s)
}
// Utility functions
func Vec3_Sub(a *vec3, b *vec3) *vec3 {
return &vec3{
a.x - b.x,
a.y - b.y,
a.z - b.z}
}
func Vec3_Add(a *vec3, b *vec3) *vec3 {
return &vec3{
a.x + b.x,
a.y + b.y,
a.z + b.z}
}
func Vec3_AddMultiple(vecs ...*vec3) *vec3 {
res := vec3{0, 0, 0}
for _, v := range vecs {
res.AddAssign(v)
}
return &res
}
func Vec3_SubMultiple(original *vec3, vecs ...*vec3) *vec3 {
res := *original
for _, v := range vecs {
res.SubAssign(v)
}
return &res
}
func Vec3_Mul(a *vec3, b *vec3) *vec3 {
return &vec3{
a.x * b.x,
a.y * b.y,
a.z * b.z,
}
}
func Vec3_FMul(a *vec3, t float64) *vec3 {
return &vec3{
a.x * t,
a.y * t,
a.z * t,
}
}
func Vec3_FDiv(a *vec3, t float64) *vec3 {
return Vec3_FMul(a, 1/t)
}
func Vec3_Dot(a *vec3, b *vec3) float64 {
return a.x*b.x + a.y*b.y + a.z*b.z
}
func Vec3_Cross(u *vec3, v *vec3) *vec3 {
return &vec3{
u.y*v.z - u.z*v.y,
u.z*v.x - u.x*v.z,
u.x*v.y - u.y*v.x,
}
}
func Vec3_LengthSquared(a *vec3) float64 {
return a.x*a.x + a.y*a.y + a.z*a.z
}
func Vec3_UnitVector(v *vec3) *vec3 {
return Vec3_FDiv(v, v.Length())
}
func Vec3_Random() *vec3 {
return &vec3{rand.Float64(), rand.Float64(), rand.Float64()}
}
func Vec3_RandomBetween(min, max float64) *vec3 {
return &vec3{
RandomFloatBetween(min, max),
RandomFloatBetween(min, max),
RandomFloatBetween(min, max)}
}
func Vec3_RandomInUnitSphere() *vec3 {
for {
p := Vec3_RandomBetween(-1.0, 1.0)
if p.LengthSquared() < 1.0 {
return p
}
}
}
func Vec3_RandomUnitVector() *vec3 {
return Vec3_UnitVector(Vec3_RandomInUnitSphere())
}
func Vec3_RandomInHemisphere(normal *vec3) *vec3 {
inUnitSphere := Vec3_RandomInUnitSphere()
if Vec3_Dot(inUnitSphere, normal) > 0.0 {
return inUnitSphere
}
return inUnitSphere.Neg()
}
func Vec3_Reflect(v, n *vec3) *vec3 {
t := Vec3_FMul(n, 2.0*Vec3_Dot(v, n))
return Vec3_Sub(v, t)
}
func Vec3_Refract(uv *vec3, n *vec3, etaiOverEtat float64) *vec3 {
cosTheta := math.Min(Vec3_Dot(uv.Neg(), n), 1.0)
t := Vec3_FMul(n, cosTheta)
rOutPerp := Vec3_FMul(Vec3_Add(uv, t), etaiOverEtat)
parallelMul := -math.Sqrt(math.Abs(1.0 - rOutPerp.LengthSquared()))
rOutParallel := Vec3_FMul(n, parallelMul)
rOutPerp.AddAssign(rOutParallel)
return rOutPerp
}
func Vec3_RandomInUnitDisk() *vec3 {
for {
p := vec3{
RandomFloatBetween(-1.0, 1.0),
RandomFloatBetween(-1.0, 1.0),
0.0}
if p.LengthSquared() < 1.0 {
return &p
}
}
}
type point3 = vec3 | vec3.go | 0.868367 | 0.493775 | vec3.go | starcoder |
package chainutils
import (
"fmt"
"github.com/Rjected/lit/coinparam"
)
// GetParamFromName gets coin params from a name
func GetParamFromName(name string) (coinType *coinparam.Params, err error) {
// create map for that O(1) access
coinMap := map[string]*coinparam.Params{
coinparam.BitcoinParams.Name: &coinparam.BitcoinParams,
coinparam.VertcoinParams.Name: &coinparam.VertcoinParams,
// coinparam.LitecoinParams.Name: &coinparam.LitecoinParams,
coinparam.TestNet3Params.Name: &coinparam.TestNet3Params,
coinparam.VertcoinTestNetParams.Name: &coinparam.VertcoinTestNetParams,
coinparam.LiteCoinTestNet4Params.Name: &coinparam.LiteCoinTestNet4Params,
coinparam.RegressionNetParams.Name: &coinparam.RegressionNetParams,
coinparam.VertcoinRegTestParams.Name: &coinparam.VertcoinRegTestParams,
coinparam.LiteRegNetParams.Name: &coinparam.LiteRegNetParams,
}
// grab from map
var found bool
if coinType, found = coinMap[name]; !found {
err = fmt.Errorf("Coin not found when trying to get from name, maybe it's not supported yet")
return
}
return
}
// GetParamFromHDCoinType gets coin params from a hdCoinType
func GetParamFromHDCoinType(hdCoinType uint32) (coinType *coinparam.Params, err error) {
// create map for that O(1) access
coinMap := map[uint32]*coinparam.Params{
coinparam.BitcoinParams.HDCoinType: &coinparam.BitcoinParams,
coinparam.VertcoinParams.HDCoinType: &coinparam.VertcoinParams,
// coinparam.LitecoinParams.HDCoinType: &coinparam.LitecoinParams,
coinparam.TestNet3Params.HDCoinType: &coinparam.TestNet3Params,
coinparam.VertcoinTestNetParams.HDCoinType: &coinparam.VertcoinTestNetParams,
coinparam.LiteCoinTestNet4Params.HDCoinType: &coinparam.LiteCoinTestNet4Params,
coinparam.RegressionNetParams.HDCoinType: &coinparam.RegressionNetParams,
coinparam.VertcoinRegTestParams.HDCoinType: &coinparam.VertcoinRegTestParams,
coinparam.LiteRegNetParams.HDCoinType: &coinparam.LiteRegNetParams,
}
// grab from map
var found bool
if coinType, found = coinMap[hdCoinType]; !found {
err = fmt.Errorf("Coin not found when trying to get from hdCoinType, maybe it's not supported yet")
return
}
return
} | chainutils/coins.go | 0.572484 | 0.437523 | coins.go | starcoder |
package matpi
import (
"fmt"
"image"
"image/color"
"image/png"
"math"
"os"
"github.com/Konstantin8105/errors"
"gonum.org/v1/gonum/mat"
)
func isEqual(c1, c2 color.RGBA) bool {
return c1.R == c2.R && c1.G == c2.G && c1.B == c2.B && c1.A == c2.A
}
// Config is configuration of matrix picture
type Config struct {
// color of positive value
PositiveColor color.RGBA
// color of negative value
NegativeColor color.RGBA
// color of zero value
ZeroColor color.RGBA
// scale of picture
Scale int
}
// NewConfig is default configuration
func NewConfig() *Config {
return &Config{
PositiveColor: color.RGBA{255, 0, 0, 0xff}, // RED
NegativeColor: color.RGBA{25, 181, 254, 0xff}, // BLUE
ZeroColor: color.RGBA{255, 255, 0, 0xff}, // YELLOW
Scale: 1,
}
}
// Convert matrix 'gonum.mat.Matrix' to PNG picture file with filename.
// Color of picture pixel in according to `config`.
func Convert(m mat.Matrix, filename string, config *Config) error {
// check input data
et := errors.New("check input data")
if config == nil {
_ = et.Add(fmt.Errorf("configuration is nil"))
} else {
if config.Scale < 0 {
_ = et.Add(fmt.Errorf("not acceptable scale less zero: %d", config.Scale))
}
if config.Scale == 0 {
_ = et.Add(fmt.Errorf("not acceptable zero scale"))
}
if isEqual(config.ZeroColor, config.PositiveColor) && isEqual(config.ZeroColor, config.NegativeColor) {
_ = et.Add(fmt.Errorf("colors is not ok"))
}
}
if filename == "" {
_ = et.Add(fmt.Errorf("filename is empty"))
}
if m == nil {
_ = et.Add(fmt.Errorf("matrix is nil"))
}
if et.IsError() {
return et
}
// generate picture
r, c := m.Dims()
img := image.NewRGBA(image.Rect(0, 0, r*config.Scale, c*config.Scale))
for i := 0; i < r; i++ {
for j := 0; j < c; j++ {
p := m.At(i, j)
// color identification
var color color.RGBA
switch {
case p > math.SmallestNonzeroFloat64:
color = config.PositiveColor
case p < -math.SmallestNonzeroFloat64:
color = config.NegativeColor
default:
color = config.ZeroColor
}
// iteration by pixels
for k := 0; k < config.Scale; k++ {
for g := 0; g < config.Scale; g++ {
img.Set(i*config.Scale+k, j*config.Scale+g, color)
}
}
}
}
file, err := os.Create(filename)
if err != nil {
return err
}
defer func() { _ = file.Close() }()
return png.Encode(file, img)
} | matpi.go | 0.685529 | 0.406185 | matpi.go | starcoder |
package publish
import (
"math"
"time"
"bk-bscp/cmd/bscp-connserver/modules/session"
)
const (
// step count.
stepCount = 100
// wait time between steps.
stepWait = 600 * time.Millisecond
// min unit size of step.
minStepUnitSize = 50
)
// RateController is publishing rate controller define.
type RateController interface {
// Arrange arranges the target nodes for step publishing.
Arrange(targets []*session.Session)
// Next return the next targets slice to publish.
Next() []*session.Session
}
// StepPubUnit is step-publishing unit.
type StepPubUnit struct {
targets []*session.Session
wait time.Duration
}
// SimpleRateController is a simple rate controller.
type SimpleRateController struct {
// steps slice for publishing.
steps []*StepPubUnit
// time duration to wait for next slice.
wait time.Duration
// steps slice index.
index int
}
// NewSimpleRateController creates new SimpleRateController.
func NewSimpleRateController() *SimpleRateController {
return &SimpleRateController{}
}
func (s *SimpleRateController) arrange(targets []*session.Session, unitSize int) {
if len(targets) == 0 {
return
}
if len(targets) <= unitSize {
s.steps = append(s.steps, &StepPubUnit{targets: targets})
} else {
s.steps = append(s.steps, &StepPubUnit{targets: targets[0:unitSize], wait: stepWait})
s.arrange(targets[unitSize:], unitSize)
}
}
// Arrange arranges the targets with simple rate controller mode.
func (s *SimpleRateController) Arrange(targets []*session.Session) {
if len(targets) == 0 {
return
}
// 1% grain for one step.
unitSize := int(math.Ceil(float64(len(targets)) / float64(stepCount)))
if unitSize < minStepUnitSize {
unitSize = minStepUnitSize
}
s.arrange(targets, unitSize)
}
// Next returns teh next targets slice for publishing.
func (s *SimpleRateController) Next() []*session.Session {
if len(s.steps) == 0 {
return nil
}
if s.index >= len(s.steps) {
// no more steps.
return nil
}
step := s.steps[s.index]
if s.index == 0 {
s.index++
return step.targets
}
time.Sleep(s.steps[s.index-1].wait)
s.index++
return step.targets
}
// you can implement your own rate controller base on load information here... | bmsf-configuration/cmd/bscp-connserver/modules/publish/ratecontroller.go | 0.685423 | 0.430866 | ratecontroller.go | starcoder |
package rock_garden
import (
"github.com/golang/geo/r2"
"github.com/golang/geo/r3"
"image"
"image/color"
"math"
)
type Light struct {
Pos r3.Vector
Color float64
}
type Scene struct {
Interval float64
Lights []Light
}
func New(interval float64) *Scene {
return &Scene{
Interval: interval,
Lights: []Light{
{
Pos: r3.Vector{
X: 3 * segmentWidth,
Y: 0,
Z: 3 * segmentWidth,
},
Color: 2.0,
},
},
}
}
const ambient float64 = 0.5
const segmentWidth float64 = 32
var dimen = r2.Point{
X: segmentWidth * 3.0,
Y: segmentWidth * 3.0,
}
func (s Scene) Norm(p r2.Point) r3.Vector {
var n r3.Vector
var sections = struct {
X int
Y int
}{
X: int(p.X / segmentWidth),
Y: int(p.Y / segmentWidth),
}
if sections.X == 1 && sections.Y == 1 {
n.Z = 1.0
} else {
var anchor r2.Point
if sections.X == 0 && sections.Y == 0 {
anchor.X = segmentWidth
anchor.Y = segmentWidth
} else if sections.X == 0 && sections.Y == 1 {
anchor.X = segmentWidth
anchor.Y = p.Y
} else if sections.X == 0 && sections.Y == 2 {
anchor.X = segmentWidth
anchor.Y = 2 * segmentWidth
} else if sections.X == 1 && sections.Y == 0 {
anchor.X = p.X
anchor.Y = segmentWidth
} else if sections.X == 1 && sections.Y == 2 {
anchor.X = p.X
anchor.Y = 2 * segmentWidth
} else if sections.X == 2 && sections.Y == 0 {
anchor.X = 2 * segmentWidth
anchor.Y = segmentWidth
} else if sections.X == 2 && sections.Y == 1 {
anchor.X = 2 * segmentWidth
anchor.Y = p.Y
} else if sections.X == 2 && sections.Y == 2 {
anchor.X = 2 * segmentWidth
anchor.Y = 2 * segmentWidth
}
v := p.Sub(anchor)
d := v.Normalize()
norm := v.Norm()
offset := norm/s.Interval - math.Ceil(norm/s.Interval)
// y = cos(2 * pi * offset)
// \delta y = -sin(2 * pi * offset) * 2 * pi
localN := r2.Point{
X: 1,
Y: -math.Sin(2*math.Pi*offset) * 2 * math.Pi,
}.Normalize().Ortho()
n.X = d.X * localN.X
n.Y = d.Y * localN.X
n.Z = localN.Y
}
return n
}
func (s Scene) Color(p r2.Point) r3.Vector {
return r3.Vector{
X: 0.8,
Y: 0.8,
Z: 0.8,
}
}
func (s Scene) Render() image.Image {
img := image.NewRGBA(image.Rect(0, 0, int(dimen.X), int(dimen.Y)))
for i := 0; i < int(dimen.X); i++ {
for j := 0; j < int(dimen.X); j++ {
p := r2.Point{
X: float64(i),
Y: float64(j),
}
v := r3.Vector{
X: p.X,
Y: p.Y,
Z: 0,
}
n := s.Norm(p)
d := 0.0
for _, l := range s.Lights {
lightDir := l.Pos.Sub(v).Normalize()
diff := max(0, n.Dot(lightDir))
diffuse := diff * l.Color
d += diffuse
}
c := s.Color(p)
img.SetRGBA(i, j, vector2Color(c.Mul(d+ambient)))
}
}
return img
}
func vector2Color(vector r3.Vector) color.RGBA {
return color.RGBA{
R: uint8(max(0, min(255, 255.0*vector.X))),
G: uint8(max(0, min(255, 255.0*vector.Y))),
B: uint8(max(0, min(255, 255.0*vector.Z))),
A: 255,
}
}
func min(a, b float64) float64 {
if a < b {
return a
}
return b
}
func max(a, b float64) float64 {
if a > b {
return a
}
return b
} | scenes/rock_garden/scene.go | 0.653901 | 0.671666 | scene.go | starcoder |
package yelp
import (
"fmt"
"github.com/guregu/null"
)
// GeneralOptions includes a set of standard query options for using the search API.
// They are used along with a location based option to complete a search.
type GeneralOptions struct {
Term string // Search term (e.g. "food", "restaurants"). If term isn’t included we search everything.
Limit null.Int // Number of business results to return
Offset null.Int // Offset the list of returned business results by this amount
Sort null.Int // Sort mode: 0=Best matched (default), 1=Distance, 2=Highest Rated. If the mode is 1 or 2 a search may retrieve an additional 20 businesses past the initial limit of the first 20 results. This is done by specifying an offset and limit of 20. Sort by distance is only supported for a location or geographic search. The rating sort is not strictly sorted by the rating value, but by an adjusted rating value that takes into account the number of ratings, similar to a bayesian average. This is so a business with 1 rating of 5 stars doesn’t immediately jump to the top.
CategoryFilter string // Category to filter search results with. See the list of supported categories. The category filter can be a list of comma delimited categories. For example, 'bars,french' will filter by Bars and French. The category identifier should be used (for example 'discgolf', not 'Disc Golf').
RadiusFilter null.Float // Search radius in meters. If the value is too large, a AREA_TOO_LARGE error may be returned. The max value is 40000 meters (25 miles).
DealsFilter null.Bool // Whether to exclusively search for businesses with deals
}
// getParameters will reflect over the values of the given
// struct, and provide a type appropriate set of querystring parameters
// that match the defined values.
func (o *GeneralOptions) getParameters() (params map[string]string, err error) {
ps := make(map[string]string)
if o.Term != "" {
ps["term"] = o.Term
}
if o.Limit.Valid {
ps["limit"] = fmt.Sprintf("%v", o.Limit.Int64)
}
if o.Offset.Valid {
ps["offset"] = fmt.Sprintf("%v", o.Offset.Int64)
}
if o.Sort.Valid {
ps["sort"] = fmt.Sprintf("%v", o.Sort.Int64)
}
if o.CategoryFilter != "" {
ps["category_filter"] = o.CategoryFilter
}
if o.RadiusFilter.Valid {
ps["radius_filter"] = fmt.Sprintf("%v", o.RadiusFilter.Float64)
}
if o.DealsFilter.Valid {
ps["deals_filter"] = fmt.Sprintf("%v", o.DealsFilter.Bool)
}
return ps, nil
} | vendor/github.com/JustinBeckwith/go-yelp/yelp/general_options.go | 0.681939 | 0.460107 | general_options.go | starcoder |
package gopacket
// LayerClass is a set of LayerTypes, used for grabbing one of a number of
// different types from a packet.
type LayerClass interface {
// Contains returns true if the given layer type should be considered part
// of this layer class.
Contains(LayerType) bool
// LayerTypes returns the set of all layer types in this layer class.
// Note that this may not be a fast operation on all LayerClass
// implementations.
LayerTypes() []LayerType
}
// Contains implements LayerClass.
func (l LayerType) Contains(a LayerType) bool {
return l == a
}
// LayerTypes implements LayerClass.
func (l LayerType) LayerTypes() []LayerType {
return []LayerType{l}
}
// LayerClassSlice implements a LayerClass with a slice.
type LayerClassSlice []bool
// Contains returns true if the given layer type should be considered part
// of this layer class.
func (s LayerClassSlice) Contains(t LayerType) bool {
return int(t) < len(s) && s[t]
}
// LayerTypes returns all layer types in this LayerClassSlice.
// Because of LayerClassSlice's implementation, this could be quite slow.
func (s LayerClassSlice) LayerTypes() (all []LayerType) {
for i := 0; i < len(s); i++ {
if s[i] {
all = append(all, LayerType(i))
}
}
return
}
// NewLayerClassSlice creates a new LayerClassSlice by creating a slice of
// size max(types) and setting slice[t] to true for each type t. Note, if
// you implement your own LayerType and give it a high value, this WILL create
// a very large slice.
func NewLayerClassSlice(types []LayerType) LayerClassSlice {
var max LayerType
for _, typ := range types {
if typ > max {
max = typ
}
}
t := make([]bool, int(max+1))
for _, typ := range types {
t[typ] = true
}
return t
}
// LayerClassMap implements a LayerClass with a map.
type LayerClassMap map[LayerType]bool
// Contains returns true if the given layer type should be considered part
// of this layer class.
func (m LayerClassMap) Contains(t LayerType) bool {
return m[t]
}
// LayerTypes returns all layer types in this LayerClassMap.
func (m LayerClassMap) LayerTypes() (all []LayerType) {
for t := range m {
all = append(all, t)
}
return
}
// NewLayerClassMap creates a LayerClassMap and sets map[t] to true for each
// type in types.
func NewLayerClassMap(types []LayerType) LayerClassMap {
m := LayerClassMap{}
for _, typ := range types {
m[typ] = true
}
return m
}
// NewLayerClass creates a LayerClass, attempting to be smart about which type
// it creates based on which types are passed in.
func NewLayerClass(types []LayerType) LayerClass {
for _, typ := range types {
if typ > maxLayerType {
// NewLayerClassSlice could create a very large object, so instead create
// a map.
return NewLayerClassMap(types)
}
}
return NewLayerClassSlice(types)
} | vendor/github.com/google/gopacket/layerclass.go | 0.840619 | 0.456168 | layerclass.go | starcoder |
package yeelight
import (
"context"
"time"
)
// SetName method isRaw used to name the device. The name will be stored on the device and reported in discovering response.
func (c Client) SetName(ctx context.Context, name string) error {
return c.rawWithOk(ctx, MethodSetName, name)
}
// SetColorTemperature method isRaw used to change the color temperature of a smart LED.
// "value" isRaw the target color temperature. The type isRaw integer and range isRaw 1700 ~ 6500 (k).
func (c Client) SetColorTemperature(ctx context.Context, value int, effect Effect, duration time.Duration) error {
return c.setColorTemperature(ctx, MethodSetColorTemperature, value, effect, duration)
}
// SetBackgroundColorTemperature method isRaw used to change the color temperature of a smart LED.
// "value" isRaw the target color temperature. The type isRaw integer and range isRaw 1700 ~ 6500 (k).
func (c Client) SetBackgroundColorTemperature(ctx context.Context, value int, effect Effect, duration time.Duration) error {
return c.setColorTemperature(ctx, MethodSetBgColorTemperature, value, effect, duration)
}
// SetRGB method isRaw used to change the color of a smart LED
func (c Client) SetRGB(ctx context.Context, value int, effect Effect, duration time.Duration) error {
return c.setRGB(ctx, MethodSetRGB, value, effect, duration)
}
// SetBackgroundRGB method isRaw used to change the color of a smart LED
// "value" isRaw the target color, whose type isRaw integer. It should be expressed in decimal integer ranges from 0 to 16777215 (hex: 0xFFFFFF)
func (c Client) SetBackgroundRGB(ctx context.Context, value int, effect Effect, duration time.Duration) error {
return c.setRGB(ctx, MethodSetBgRGB, value, effect, duration)
}
// SetHSV method isRaw used to change the color of a smart LED
// "hue" isRaw the target hue value, whose type isRaw integer. It should be expressed in decimal integer ranges from 0 to 359.
// "sat" isRaw the target saturation value whose type isRaw integer. It's range isRaw 0 to 100.
func (c Client) SetHSV(ctx context.Context, hue int, sat int, effect Effect, duration time.Duration) error {
return c.setHSV(ctx, MethodSetHSV, hue, sat, effect, duration)
}
// SetBackgroundHSV method isRaw used to change the color of a smart LED
// "hue" isRaw the target hue value, whose type isRaw integer. It should be expressed in decimal integer ranges from 0 to 359.
// "sat" isRaw the target saturation value whose type isRaw integer. It's range isRaw 0 to 100.
func (c Client) SetBackgroundHSV(ctx context.Context, hue int, sat int, effect Effect, duration time.Duration) error {
return c.setHSV(ctx, MethodSetBgHSV, hue, sat, effect, duration)
}
// SetBright method isRaw used to change the brightness of a smart LED.
// "brightness" isRaw the target brightness. The type isRaw integer and ranges from 1 to 100.
// The brightness isRaw a percentage instead of a absolute value. 100 means maximum brightness while 1 means the minimum brightness.
func (c Client) SetBright(ctx context.Context, brightness int, effect Effect, duration time.Duration) error {
return c.setBright(ctx, MethodSetBright, brightness, effect, duration)
}
// SetBackgroundBright method isRaw used to change the brightness of a smart LED.
// "brightness" isRaw the target brightness. The type isRaw integer and ranges from 1 to 100.
// The brightness isRaw a percentage instead of a absolute value. 100 means maximum brightness while 1 means the minimum brightness.
func (c Client) SetBackgroundBright(ctx context.Context, brightness int, effect Effect, duration time.Duration) error {
return c.setBright(ctx, MethodSetBgBright, brightness, effect, duration)
}
// SetDefault method isRaw used to save current state of smart LED in persistent memory.
// So if user powers off and then powers on the smart LED again (hard power reset), the smart LED will show last saved state.
func (c Client) SetDefault(ctx context.Context) error {
return c.rawWithOk(ctx, MethodSetDefault)
}
// SetBackgroundDefault method isRaw used to save current state of smart LED in persistent memory.
// So if user powers off and then powers on the smart LED again (hard power reset), the smart LED will show last saved state.
func (c Client) SetBackgroundDefault(ctx context.Context) error {
return c.rawWithOk(ctx, MethodBgSetDefault)
}
// SetMusic method is used to start or stop music mode on a device. Under music mode, no property will be reported and no message quota is checked.
// "musicHost" the IP address of the music server.
// "port" the TCP port music application is listening on.
func (c Client) SetMusic(ctx context.Context, on bool, musicHost string, port int) error {
if on {
return c.rawWithOk(ctx, MethodSetMusic, 1, musicHost, port)
}
return c.rawWithOk(ctx, MethodSetMusic, 0)
}
func (c Client) setColorTemperature(ctx context.Context, method string, value int, effect Effect, duration time.Duration) error {
if err := ValidateDuration(duration); err != nil {
return err
}
if err := ValidateColorTemperature(value); err != nil {
return err
}
return c.rawWithOk(ctx, method, value, effect, duration.Milliseconds())
}
func (c Client) setRGB(ctx context.Context, method string, value int, effect Effect, duration time.Duration) error {
if err := ValidateDuration(duration); err != nil {
return err
}
if err := ValidateRGB(value); err != nil {
return err
}
return c.rawWithOk(ctx, method, value, effect, duration.Milliseconds())
}
func (c Client) setHSV(ctx context.Context, method string, hue int, sat int, effect Effect, duration time.Duration) error {
if err := ValidateDuration(duration); err != nil {
return err
}
if err := ValidateHue(hue); err != nil {
return err
}
if err := ValidateSat(sat); err != nil {
return err
}
return c.rawWithOk(ctx, method, hue, sat, effect, duration.Milliseconds())
}
func (c Client) setBright(ctx context.Context, method string, brightness int, effect Effect, duration time.Duration) error {
if err := ValidateDuration(duration); err != nil {
return err
}
if err := ValidateBright(brightness); err != nil {
return err
}
return c.rawWithOk(ctx, method, brightness, effect, duration.Milliseconds())
} | set.go | 0.830216 | 0.438605 | set.go | starcoder |
package slices
import (
"golang.org/x/exp/constraints"
"golang.org/x/exp/slices"
)
// This file contains the new functions that do not live in the official slices package.
func Some[T any](slice []T, test func(T) bool) bool {
for _, value := range slice {
if test(value) {
return true
}
}
return false
}
func Every[T any](slice []T, test func(T) bool) bool {
for _, value := range slice {
if !test(value) {
return false
}
}
return true
}
// Produces a new slice, leaves the input slice untouched.
func Map[T any, V any](slice []T, f func(T) V) []V {
result := make([]V, 0, len(slice))
for _, value := range slice {
result = append(result, f(value))
}
return result
}
// Produces a new slice, leaves the input slice untouched.
func MapWithIndex[T any, V any](slice []T, f func(T, int) V) []V {
result := make([]V, 0, len(slice))
for i, value := range slice {
result = append(result, f(value, i))
}
return result
}
func TryMap[T any, V any](slice []T, f func(T) (V, error)) ([]V, error) {
result := make([]V, 0, len(slice))
for _, value := range slice {
output, err := f(value)
if err != nil {
return nil, err
}
result = append(result, output)
}
return result, nil
}
func TryMapWithIndex[T any, V any](slice []T, f func(T, int) (V, error)) ([]V, error) {
result := make([]V, 0, len(slice))
for i, value := range slice {
output, err := f(value, i)
if err != nil {
return nil, err
}
result = append(result, output)
}
return result, nil
}
// Produces a new slice, leaves the input slice untouched.
func FlatMap[T any, V any](slice []T, f func(T) []V) []V {
// impossible to know how long this slice will be in the end but the length
// of the original slice is the lower bound
result := make([]V, 0, len(slice))
for _, value := range slice {
result = append(result, f(value)...)
}
return result
}
func FlatMapWithIndex[T any, V any](slice []T, f func(T, int) []V) []V {
// impossible to know how long this slice will be in the end but the length
// of the original slice is the lower bound
result := make([]V, 0, len(slice))
for i, value := range slice {
result = append(result, f(value, i)...)
}
return result
}
func Flatten[T any](slice [][]T) []T {
result := make([]T, 0, len(slice))
for _, subSlice := range slice {
result = append(result, subSlice...)
}
return result
}
func MapInPlace[T any](slice []T, f func(T) T) {
for i, value := range slice {
slice[i] = f(value)
}
}
// Produces a new slice, leaves the input slice untouched.
func Filter[T any](slice []T, test func(T) bool) []T {
result := make([]T, 0)
for _, element := range slice {
if test(element) {
result = append(result, element)
}
}
return result
}
// Produces a new slice, leaves the input slice untouched.
func FilterWithIndex[T any](slice []T, f func(T, int) bool) []T {
result := make([]T, 0, len(slice))
for i, value := range slice {
if f(value, i) {
result = append(result, value)
}
}
return result
}
func TryFilter[T any](slice []T, test func(T) (bool, error)) ([]T, error) {
result := make([]T, 0)
for _, element := range slice {
ok, err := test(element)
if err != nil {
return nil, err
}
if ok {
result = append(result, element)
}
}
return result, nil
}
func TryFilterWithIndex[T any](slice []T, test func(T, int) (bool, error)) ([]T, error) {
result := make([]T, 0)
for i, element := range slice {
ok, err := test(element, i)
if err != nil {
return nil, err
}
if ok {
result = append(result, element)
}
}
return result, nil
}
// Mutates original slice. Intended usage is to reassign the slice result to the input slice.
func FilterInPlace[T any](slice []T, test func(T) bool) []T {
newLength := 0
for _, element := range slice {
if test(element) {
slice[newLength] = element
newLength++
}
}
return slice[:newLength]
}
// Produces a new slice, leaves the input slice untouched
func Reverse[T any](slice []T) []T {
result := make([]T, len(slice))
for i := range slice {
result[i] = slice[len(slice)-1-i]
}
return result
}
func ReverseInPlace[T any](slice []T) {
for i, j := 0, len(slice)-1; i < j; i, j = i+1, j-1 {
slice[i], slice[j] = slice[j], slice[i]
}
}
// Produces a new slice, leaves the input slice untouched.
func FilterMap[T any, E any](slice []T, test func(T) (E, bool)) []E {
result := make([]E, 0, len(slice))
for _, element := range slice {
mapped, ok := test(element)
if ok {
result = append(result, mapped)
}
}
return result
}
func FilterMapWithIndex[T any, E any](slice []T, test func(T, int) (E, bool)) []E {
result := make([]E, 0, len(slice))
for i, element := range slice {
mapped, ok := test(element, i)
if ok {
result = append(result, mapped)
}
}
return result
}
func TryFilterMap[T any, E any](slice []T, test func(T) (E, bool, error)) ([]E, error) {
result := make([]E, 0, len(slice))
for _, element := range slice {
mapped, ok, err := test(element)
if err != nil {
return nil, err
}
if ok {
result = append(result, mapped)
}
}
return result, nil
}
func TryFilterMapWithIndex[T any, E any](slice []T, test func(T, int) (E, bool, error)) ([]E, error) {
result := make([]E, 0, len(slice))
for i, element := range slice {
mapped, ok, err := test(element, i)
if err != nil {
return nil, err
}
if ok {
result = append(result, mapped)
}
}
return result, nil
}
// Prepends items to the beginning of a slice.
// E.g. Prepend([]int{1,2}, 3, 4) = []int{3,4,1,2}
// Mutates original slice. Intended usage is to reassign the slice result to the input slice.
func Prepend[T any](slice []T, values ...T) []T {
return append(values, slice...)
}
// Removes the element at the given index. Intended usage is to reassign the result to the input slice.
func Remove[T any](slice []T, index int) []T {
return slices.Delete(slice, index, index+1)
}
// Removes the element at the 'fromIndex' and then inserts it at 'toIndex'.
// Operates on the input slice. Expected use is to reassign the result to the input slice.
func Move[T any](slice []T, fromIndex int, toIndex int) []T {
item := slice[fromIndex]
slice = Remove(slice, fromIndex)
return slices.Insert(slice, toIndex, item)
}
// Swaps two elements at the given indices.
// Operates on the input slice.
func Swap[T any](slice []T, index1 int, index2 int) {
slice[index1], slice[index2] = slice[index2], slice[index1]
}
// Similar to Append but we leave the original slice untouched and return a new slice
func Concat[T any](slice []T, values ...T) []T {
newSlice := make([]T, 0, len(slice)+len(values))
newSlice = append(newSlice, slice...)
newSlice = append(newSlice, values...)
return newSlice
}
func ContainsFunc[T any](slice []T, f func(T) bool) bool {
return IndexFunc(slice, f) != -1
}
// Pops item from the end of the slice and returns it, along with the updated slice
// Mutates original slice. Intended usage is to reassign the slice result to the input slice.
func Pop[T any](slice []T) (T, []T) {
index := len(slice) - 1
value := slice[index]
slice = slice[0:index]
return value, slice
}
// Shifts item from the beginning of the slice and returns it, along with the updated slice.
// Mutates original slice. Intended usage is to reassign the slice result to the input slice.
func Shift[T any](slice []T) (T, []T) {
value := slice[0]
slice = slice[1:]
return value, slice
}
func Partition[T any](slice []T, test func(T) bool) ([]T, []T) {
left := make([]T, 0, len(slice))
right := make([]T, 0, len(slice))
for _, value := range slice {
if test(value) {
left = append(left, value)
} else {
right = append(right, value)
}
}
return left, right
}
func MaxBy[T any, V constraints.Ordered](slice []T, f func(T) V) V {
if len(slice) == 0 {
return zero[V]()
}
max := f(slice[0])
for _, element := range slice[1:] {
value := f(element)
if value > max {
max = value
}
}
return max
}
func MinBy[T any, V constraints.Ordered](slice []T, f func(T) V) V {
if len(slice) == 0 {
return zero[V]()
}
min := f(slice[0])
for _, element := range slice[1:] {
value := f(element)
if value < min {
min = value
}
}
return min
}
func Find[T any](slice []T, f func(T) bool) (T, bool) {
for _, element := range slice {
if f(element) {
return element, true
}
}
return zero[T](), false
}
// Sometimes you need to find an element and then map it to some other value based on
// information you obtained while finding it. This function lets you do that
func FindMap[T any, V any](slice []T, f func(T) (V, bool)) (V, bool) {
for _, element := range slice {
if value, ok := f(element); ok {
return value, true
}
}
return zero[V](), false
}
func ForEach[T any](slice []T, f func(T)) {
for _, element := range slice {
f(element)
}
}
func ForEachWithIndex[T any](slice []T, f func(T, int)) {
for i, element := range slice {
f(element, i)
}
}
func TryForEach[T any](slice []T, f func(T) error) error {
for _, element := range slice {
if err := f(element); err != nil {
return err
}
}
return nil
}
func TryForEachWithIndex[T any](slice []T, f func(T, int) error) error {
for i, element := range slice {
if err := f(element, i); err != nil {
return err
}
}
return nil
}
func Sum[T constraints.Ordered](i []T) T {
sum := zero[T]()
for _, value := range i {
sum += value
}
return sum
}
func zero[T any]() T {
var value T
return value
} | vendor/github.com/jesseduffield/generics/slices/slices.go | 0.791741 | 0.554712 | slices.go | starcoder |
package lsh
import (
"bytes"
"encoding/gob"
"errors"
"gonum.org/v1/gonum/blas/blas64"
"math"
"math/rand"
"sync"
"time"
)
var (
dimensionsNumberErr = errors.New("dimensions number must be a positive integer")
hasherEmptyInstancesErr = errors.New("hasher must contain at least one instance")
)
// plane struct holds data needed to work with plane
type plane struct {
n blas64.Vector
d float64
}
func (p *plane) getProductSign(vec blas64.Vector) bool {
prod := blas64.Dot(vec, p.n) - p.d
prodSign := math.Signbit(prod) // NOTE: returns true if product < 0
return prodSign
}
// treeNode holds binary tree with generated planes
type treeNode struct {
left *treeNode
right *treeNode
plane *plane
}
func traverse(node *treeNode, hash uint64, inpVec blas64.Vector, depth int) uint64 {
if node == nil || node.plane == nil {
return hash
}
// vec := NewVec(make([]float64, inpVec.N))
// blas64.Copy(inpVec, vec)
prodSign := node.plane.getProductSign(inpVec)
if !prodSign {
return traverse(node.right, hash, inpVec, depth+1)
}
hash |= (1 << depth)
return traverse(node.left, hash, inpVec, depth+1)
}
// getHash calculates LSH code
func (node *treeNode) getHash(vec blas64.Vector) uint64 {
var hash uint64
return traverse(node, hash, vec, 0)
}
type HasherConfig struct {
NTrees int
KMinVecs int
Dims int
isAngularMetric bool
}
// Hasher holds N_PERMUTS number of trees
type Hasher struct {
mutex sync.RWMutex
Config HasherConfig
trees []*treeNode
}
func NewHasher(config HasherConfig) *Hasher {
return &Hasher{
Config: config,
trees: make([]*treeNode, config.NTrees),
}
}
// SafeHashesHolder allows to lock map while write values in it
type safeHashesHolder struct {
sync.Mutex
v map[int]uint64
}
// planeByPoints generates random coefficients of a plane by given pair of points
func planeByPoints(points []blas64.Vector, ndims int) *plane {
planeCoefs := &plane{}
centerPoint := NewVec(make([]float64, ndims))
for _, p := range points {
blas64.Axpy(0.5, p, centerPoint)
}
planeCoefs.n = NewVec(make([]float64, ndims))
blas64.Copy(points[1], planeCoefs.n)
blas64.Axpy(-1.0, centerPoint, planeCoefs.n)
planeCoefs.d = blas64.Dot(centerPoint, planeCoefs.n)
return planeCoefs
}
func getRandomPlane(vecs [][]float64, isAngular bool) *plane {
randIndeces := make(map[int]bool)
randVecs := make([]blas64.Vector, 2)
norms := make([]float64, 2)
ndims := len(vecs[0])
var i int = 0
maxPoints := 2
for i < maxPoints && i < len(vecs)*3 {
idx := rand.Intn(len(vecs))
if _, has := randIndeces[idx]; !has {
randIndeces[idx] = true
randVecs[i] = NewVec(vecs[idx])
norms[i] = blas64.Nrm2(randVecs[i])
i++
}
}
if norms[0] > norms[1] {
randVecs[0], randVecs[1] = randVecs[1], randVecs[0]
norms[0], norms[1] = norms[1], norms[0]
}
// NOTE: normilize vectors when dealing with angular distance metric (not sure)
if isAngular {
normedVecs := make([]blas64.Vector, len(randVecs))
for i, vec := range randVecs {
normedVec := NewVec(make([]float64, ndims))
norm := norms[i]
if norm > tol {
blas64.Axpy(1/norm, vec, normedVec)
}
normedVecs[i] = normedVec
}
return planeByPoints(normedVecs, ndims)
}
return planeByPoints(randVecs, ndims)
}
// growTree ...
func growTree(vecs [][]float64, node *treeNode, depth int, config HasherConfig) {
if depth > 63 || len(vecs) < 2 { // NOTE: depth <= 63 since we will use 8 byte int to store a hash
return
}
node.plane = getRandomPlane(vecs, config.isAngularMetric)
var l, r [][]float64
for _, v := range vecs {
inpVec := NewVec(v)
sign := node.plane.getProductSign(inpVec)
if !sign {
r = append(r, v)
continue
}
l = append(l, v)
}
depth++
if len(r) > config.KMinVecs {
node.right = &treeNode{}
growTree(r, node.right, depth, config)
}
if len(l) > config.KMinVecs {
node.left = &treeNode{}
growTree(l, node.left, depth, config)
}
}
// buildTree creates set of planes which will be used to calculate hash
func buildTree(vecs [][]float64, config HasherConfig) *treeNode {
rand.Seed(time.Now().UnixNano())
tree := &treeNode{}
growTree(vecs, tree, 0, config)
return tree
}
// build method creates the hasher instances
func (hasher *Hasher) build(vecs [][]float64) {
hasher.mutex.Lock()
defer hasher.mutex.Unlock()
trees := make([]*treeNode, hasher.Config.NTrees)
wg := sync.WaitGroup{}
wg.Add(len(trees))
for i := 0; i < hasher.Config.NTrees; i++ {
go func(i int, wg *sync.WaitGroup) {
defer wg.Done()
tmpTree := buildTree(vecs, hasher.Config)
trees[i] = tmpTree
}(i, &wg)
}
wg.Wait()
hasher.trees = trees
}
// getHashes returns map of calculated lsh values for a given vector
func (hasher *Hasher) getHashes(inpVec []float64) map[int]uint64 {
hasher.mutex.RLock()
defer hasher.mutex.RUnlock()
vec := NewVec(make([]float64, len(inpVec)))
copy(vec.Data, inpVec)
// NOTE: norm vector when using angular matric (since normed vectors has been used for planes generation in this case)
if hasher.Config.isAngularMetric {
normed := NewVec(make([]float64, len(inpVec)))
norm := blas64.Nrm2(vec)
if norm > tol {
blas64.Axpy(1/norm, vec, normed)
blas64.Copy(normed, vec)
}
}
hashes := &safeHashesHolder{v: make(map[int]uint64)}
wg := sync.WaitGroup{}
wg.Add(len(hasher.trees))
for i, tree := range hasher.trees {
go func(i int, tree *treeNode, hashes *safeHashesHolder) {
defer wg.Done()
hashes.Lock()
hashes.v[i] = tree.getHash(vec)
hashes.Unlock()
}(i, tree, hashes)
}
wg.Wait()
return hashes.v
}
// dump encodes Hasher object as a byte-array
func (hasher *Hasher) dump() ([]byte, error) {
hasher.mutex.RLock()
defer hasher.mutex.RUnlock()
if len(hasher.trees) == 0 {
return nil, hasherEmptyInstancesErr
}
buf := &bytes.Buffer{}
enc := gob.NewEncoder(buf)
err := enc.Encode(hasher)
if err != nil {
return nil, err
}
return buf.Bytes(), nil
}
// load loads Hasher struct from the byte-array file
func (hasher *Hasher) load(inp []byte) error {
hasher.mutex.Lock()
defer hasher.mutex.Unlock()
buf := &bytes.Buffer{}
buf.Write(inp)
dec := gob.NewDecoder(buf)
err := dec.Decode(&hasher)
if err != nil {
return err
}
return nil
} | lsh/hasher.go | 0.68742 | 0.438364 | hasher.go | starcoder |
package main
import (
"log"
"time"
)
type Visibility int
const (
Unseen Visibility = iota
Visible
Seen
)
type VisionMap struct {
Columns int
Rows int
Current int64
Map []int64
}
func (vision VisionMap) VisibilityAt(x int, y int) Visibility {
switch vision.lastSeenAt(x, y) {
case vision.Current:
return Visible
case 0:
return Unseen
default:
return Seen
}
}
func (vision VisionMap) lastSeenAt(x int, y int) int64 {
return vision.Map[y*vision.Columns+x]
}
func (vision *VisionMap) UpdateVision(viewDistance int, world *World) {
defer timeMe(time.Now(), "VisionMap.UpdateVision")
playerX := world.Player.X
playerY := world.Player.Y
// Go beyond the min/max so we update cells we are moving away from
minX := max(playerX-viewDistance, 0)
maxX := min(playerX+viewDistance, vision.Columns)
minY := max(playerY-viewDistance, 0)
maxY := min(playerY+viewDistance, vision.Rows)
vision.Current++
current := vision.Current
for y := minY; y < maxY; y++ {
for x := minX; x < maxX; x++ {
previousVision := vision.lastSeenAt(x, y)
if previousVision == current {
continue
}
newVision := previousVision
if vision.CheckVision(playerX, playerY, x, y, world) {
newVision = current
}
vision.Map[y*vision.Columns+x] = newVision
}
}
}
func (vision *VisionMap) CheckVision(playerX int, playerY int, candidateX int, candidateY int, world *World) bool {
cells := PlotLine(playerX, playerY, candidateX, candidateY)
foundWall := false
for _, cell := range cells {
if foundWall {
return false
}
// Either a wall or on the way to a wall, so we can see it.
vision.Map[cell.Y*vision.Columns+cell.X] = vision.Current
tile := world.CurrentLevel.GetTile(cell.X, cell.Y)
if tile.IsWall() {
foundWall = true
}
}
return true
}
func NewVisionMap(columns int, rows int) *VisionMap {
return &VisionMap{
Columns: columns,
Rows: rows,
Map: make([]int64, columns*rows),
}
}
func PlotLine(x0 int, y0 int, x1 int, y1 int) []Position {
octant := computeOctant(x0, y0, x1, y1)
x0, y0 = toOctantZero(octant, x0, y0)
x1, y1 = toOctantZero(octant, x1, y1)
dx := x1 - x0
dy := y1 - y0
d := 2*dy - dx
y := y0
coordinates := make([]Position, 0)
for x := x0; x <= x1; x++ {
correctedX, correctedY := fromOctantZero(octant, x, y)
coordinates = append(coordinates, Position{X: correctedX, Y: correctedY})
if d > 0 {
y++
d -= (2 * dx)
}
d += (2 * dy)
}
return coordinates
}
func computeOctant(x0 int, y0 int, x1 int, y1 int) int {
if x1 > x0 {
if y1 > y0 {
if (y1 - y0) < (x1 - x0) {
return 0
}
return 1
}
if (y0 - y1) < (x1 - x0) {
return 7
}
return 6
}
if y1 > y0 {
if (y1 - y0) < (x0 - x1) {
return 3
}
return 2
}
if (y0 - y1) < (x0 - x1) {
return 4
}
return 5
}
func toOctantZero(octant int, x int, y int) (int, int) {
switch octant {
case 0:
return x, y
case 1:
return y, x
case 2:
return y, -x
case 3:
return -x, y
case 4:
return -x, -y
case 5:
return -y, -x
case 6:
return -y, x
case 7:
return x, -y
}
log.Fatalf("Received invalid octant, %v for (%v,%v)", octant, x, y)
return x, y // Unreachable
}
func fromOctantZero(octant int, x int, y int) (int, int) {
switch octant {
case 0:
return x, y
case 1:
return y, x
case 2:
return -y, x
case 3:
return -x, y
case 4:
return -x, -y
case 5:
return -y, -x
case 6:
return y, -x
case 7:
return x, -y
}
log.Fatalf("Received invalid octant, %v for (%v,%v)", octant, x, y)
return x, y // Unreachable
} | example/muncher/fov.go | 0.648911 | 0.495789 | fov.go | starcoder |
package fsm
import (
mtx "github.com/skelterjohn/go.matrix"
"strconv"
"strings"
)
// StateMachine models a finite state machine with
// a set of states and transitions.
type StateMachine struct {
States []State
Transitions []Transition
}
// State can be labeled with a proposition
// and optionally be an initial state.
type State struct {
ID int
Label string
Initial bool
}
// Transition has a from state
// and a to state.
type Transition struct {
from int
to int
}
// Parse parses a .fsm file (std-in) and creates
// and returns a StateMachine and a list of
// computations to be performed on the machine.
func Parse(lines []string) (StateMachine, []string) {
states := []State{}
transitions := []Transition{}
computations := []string{}
var numStates int
for i, line := range lines {
if len(line) > 5 && line[0:6] == "STATES" {
strStates := strings.Split(line, " ")[1]
numStates, _ = strconv.Atoi(strStates)
states = createStates(numStates)
} else if len(line) > 3 && line[0:4] == "INIT" {
curLine := i + 1
for isInt(lines[curLine]) {
state, _ := strconv.Atoi(lines[curLine])
states = markInitial(states, state)
curLine++
}
} else if len(line) > 3 && line[0:4] == "ARCS" {
curLine := i + 1
for curLine < len(lines) && lines[curLine] != "" {
from, _ := strconv.Atoi(strings.Split(lines[curLine], ":")[0])
to, _ := strconv.Atoi(strings.Split(lines[curLine], ":")[1])
if from < numStates && to < numStates {
transitions = append(transitions, Transition{from, to})
}
curLine++
}
} else if len(line) > 4 && line[0:5] == "LABEL" {
curLine := i
label := strings.Split(lines[curLine], " ")[1]
for curLine+1 < len(lines) && isInt(lines[curLine+1]) {
state, _ := strconv.Atoi(lines[curLine+1])
states = labelState(states, label, state)
curLine++
}
} else if len(line) > 9 && line[0:10] == "PROPERTIES" {
curLine := i + 1
for curLine < len(lines) {
computations = append(computations, lines[curLine])
curLine++
}
}
}
return StateMachine{states, transitions}, computations
}
func (sm *StateMachine) ToMatrix() *mtx.SparseMatrix {
elems := make(map[int]float64)
matrix := mtx.MakeSparseMatrix(elems, len(sm.States), len(sm.States))
for _, t := range sm.Transitions {
matrix.Set(t.to, t.from, 1)
}
return matrix
}
func (s *State) HasLabel(label string) bool {
labels := strings.Split(s.Label, ",")
for _, l := range labels {
if l == label {
return l == label
}
}
return false
}
func createStates(numStates int) []State {
states := []State{}
for i := 0; i < numStates; i++ {
state := State{i, "", false}
states = append(states, state)
}
return states
}
func markInitial(states []State, initState int) []State {
for i, state := range states {
if state.ID == initState {
states[i].Initial = true
}
}
return states
}
func labelState(states []State, label string, state int) []State {
for i, s := range states {
if s.ID == state {
if states[i].Label == "" {
states[i].Label = label
} else {
states[i].Label += "," + label
}
}
}
return states
}
func isInt(state string) bool {
if _, err := strconv.Atoi(state); err == nil {
return true
}
return false
}
func (fsm StateMachine) Satisfies(res *mtx.SparseMatrix) int {
for i, state := range fsm.States {
if state.Initial {
if !(res.Get(0, i) == 1) {
return 0
}
}
}
return 1
} | fsm/fsm.go | 0.60743 | 0.512754 | fsm.go | starcoder |
package lunar
import (
"time"
c "github.com/rtovey/astro-lib/common"
o "github.com/rtovey/astro-lib/orbit"
s "github.com/rtovey/astro-lib/solar"
t "github.com/rtovey/astro-lib/time"
)
const (
lunarMeanLongitudeAtEpoch = 318.351648 // degrees
lunarMeanLongitudeOfPerigeeAtEpoch = 36.340410 // degrees
lunarMeanLongitudeOfNodeAtEpoch = 318.510107 // degrees
lunarOrbitInclination = 5.145396 // degrees
)
func Position(date time.Time) LunarPosition {
D := t.EpochTime(date)
solarPosition := s.Position(date)
Ms := solarPosition.Debug.M
Ls := solarPosition.Ecliptic.Longitude
l := lunarLongitude(D)
Mm := lunarAnomaly(l, D)
N := lunarLongitudeOfNode(D)
Ev := evectionCorrection(l, Ls, Mm)
Ae := annualEquation(Ms)
A3 := thirdCorrection(Ms)
MMm := lunarCorrectedAnomaly(Mm, Ev, Ae, A3)
Ec := centreEquationCorrection(MMm)
A4 := fourthCorrection(MMm)
ll := lunarCorrectedLongitude(l, Ev, Ec, Ae, A4)
V := lunarVariation(ll, Ls)
lll := lunarOrbitalLongitude(ll, V)
NN := lunarCorrectedLongitudeOfNode(N, Ms)
y := yCord(lll, NN)
x := xCord(lll, NN)
ec := o.Ecliptic{
Latitude: lunarEclipticLatitude(lll, NN),
Longitude: lunarEclipticLongitude(x, y, NN),
}
debug := LunarPositionDebug{
date: date,
D: D,
Ms: Ms,
Ls: Ls,
l: l,
Mm: Mm,
N: N,
Ev: Ev,
Ae: Ae,
A3: A3,
MMm: MMm,
Ec: Ec,
A4: A4,
ll: ll,
V: V,
lll: lll,
NN: NN,
y: y,
x: x,
}
return LunarPosition{Ecliptic: ec, Debug: debug}
}
func lunarLongitude(D float64) float64 {
l := 13.1763966*D + lunarMeanLongitudeAtEpoch
return c.NormaliseAngle(l)
}
func lunarAnomaly(l float64, D float64) float64 {
M := l - 0.111404*D - lunarMeanLongitudeOfPerigeeAtEpoch
return c.NormaliseAngle(M)
}
func lunarLongitudeOfNode(D float64) float64 {
N := lunarMeanLongitudeOfNodeAtEpoch - (0.0529539 * D)
return c.AdjustTo360(N)
}
func lunarCorrectedLongitudeOfNode(N float64, Ms float64) float64 {
NN := N - (0.16 * c.Sind(Ms))
return NN
}
func evectionCorrection(l float64, L float64, m float64) float64 {
C := l - L
return 1.2739 * c.Sind(2*C-m)
}
func annualEquation(M float64) float64 {
return 0.1858 * c.Sind(M)
}
func thirdCorrection(M float64) float64 {
return 0.37 * c.Sind(M)
}
func lunarCorrectedAnomaly(m float64, Ev float64, Ae float64, A3 float64) float64 {
return m + Ev - Ae - A3
}
func centreEquationCorrection(mm float64) float64 {
return 6.2886 * c.Sind(mm)
}
func fourthCorrection(mm float64) float64 {
return 0.214 * c.Sind(2.0*mm)
}
func lunarCorrectedLongitude(l float64, Ev float64, Ec float64, Ae float64, A4 float64) float64 {
return l + Ev + Ec - Ae + A4
}
func lunarVariation(ll float64, L float64) float64 {
return 0.6583 * c.Sind(2.0*(ll-L))
}
func lunarOrbitalLongitude(ll float64, V float64) float64 {
return ll + V
}
func yCord(lll float64, NN float64) float64 {
y := c.Sind(lll-NN) * c.Cosd(lunarOrbitInclination)
return y
}
func xCord(lll float64, NN float64) float64 {
x := c.Cosd(lll - NN)
return x
}
func lunarEclipticLongitude(x float64, y float64, NN float64) float64 {
Lm := c.Atan2d(y, x) + NN
return Lm
}
func lunarEclipticLatitude(lll float64, NN float64) float64 {
Bm := c.Asind(c.Sind(lll-NN) * c.Sind(lunarOrbitInclination))
return Bm
} | lunar/lunarPosition.go | 0.763484 | 0.490358 | lunarPosition.go | starcoder |
package rb
import (
"fmt"
"github.com/pkg/errors"
"github.com/alexander-yu/stream/quantile/order"
)
// Color represents the color of the node.
type Color bool
// The only allowed colors are red and black.
const (
Red Color = true
Black Color = false
)
func (c Color) String() string {
switch c {
case Black:
return "Black"
default:
return "Red"
}
}
// Node represents a node in a red black tree.
type Node struct {
left *Node
right *Node
val float64
color Color
size int
}
// NewNode instantiates a Node struct with a a provided value.
func NewNode(val float64) *Node {
return &Node{
val: val,
color: Red,
size: 1,
}
}
// Left returns the left child of the node.
func (n *Node) Left() (order.Node, error) {
if n == nil {
return nil, errors.New("tried to retrieve child of nil node")
}
return n.left, nil
}
// Right returns the right child of the node.
func (n *Node) Right() (order.Node, error) {
if n == nil {
return nil, errors.New("tried to retrieve child of nil node")
}
return n.right, nil
}
// Size returns the size of the subtree rooted at the node.
func (n *Node) Size() int {
if n == nil {
return 0
}
return n.size
}
// Value returns the value stored at the node.
func (n *Node) Value() float64 {
return n.val
}
// Color returns the color of the node.
// By default, nil nodes are black.
func (n *Node) Color() Color {
if n == nil {
return Black
}
return n.color
}
// TreeString returns the string representation of the subtree rooted at the node.
func (n *Node) TreeString() string {
if n == nil {
return ""
}
return n.treeString("", "", true)
}
func (n *Node) add(val float64) *Node {
if n == nil {
return NewNode(val)
} else if val <= n.val {
n.left = n.left.add(val)
} else {
n.right = n.right.add(val)
}
return n.addBalance()
}
func (n *Node) remove(val float64) *Node {
if !n.contains(val) {
return n
}
if val < n.val {
if n.left.Color() == Black && n.left.left.Color() == Black {
n = n.moveRedLeft()
}
n.left = n.left.remove(val)
} else {
if n.left.Color() == Red {
n = n.rotateRight()
}
if val == n.val && n.right == nil {
return nil
}
if n.right.Color() == Black && n.right.left.Color() == Black {
n = n.moveRedRight()
}
if val == n.val {
x := n.right.min()
n.val = x.val
n.right = n.right.removeMin()
} else {
n.right = n.right.remove(val)
}
}
return n.removeBalance()
}
func (n *Node) removeMin() *Node {
if n.left == nil {
return nil
}
if n.left.Color() == Black && n.left.left.Color() == Black {
n = n.moveRedLeft()
}
n.left = n.left.removeMin()
return n.removeBalance()
}
func (n *Node) min() *Node {
if n.left == nil {
return n
}
return n.left.min()
}
func (n *Node) contains(val float64) bool {
for n != nil {
if val == n.val {
return true
} else if val < n.val {
n = n.left
} else {
n = n.right
}
}
return false
}
/*****************
* Rotations
*****************/
func (n *Node) addBalance() *Node {
if n.left.Color() == Black && n.right.Color() == Red {
n = n.rotateLeft()
}
if n.left.Color() == Red && n.left.left.Color() == Red {
n = n.rotateRight()
}
if n.left.Color() == Red && n.right.Color() == Red {
n.flipColors()
}
n.size = n.left.Size() + n.right.Size() + 1
return n
}
func (n *Node) removeBalance() *Node {
if n.right.Color() == Red {
n = n.rotateLeft()
}
if n.left.Color() == Red && n.left.left.Color() == Red {
n = n.rotateRight()
}
if n.left.Color() == Red && n.right.Color() == Red {
n.flipColors()
}
n.size = n.left.Size() + n.right.Size() + 1
return n
}
func (n *Node) rotateLeft() *Node {
x := n.right
n.right = x.left
x.left = n
x.color = x.left.color
x.left.color = Red
x.size = n.size
n.size = n.left.Size() + n.right.Size() + 1
return x
}
func (n *Node) rotateRight() *Node {
x := n.left
n.left = x.right
x.right = n
x.color = x.right.color
x.right.color = Red
x.size = n.size
n.size = n.left.Size() + n.right.Size() + 1
return x
}
func (n *Node) flipColors() {
n.color = !n.color
n.left.color = !n.left.color
n.right.color = !n.right.color
}
func (n *Node) moveRedLeft() *Node {
n.flipColors()
if n.right.left.Color() == Red {
n.right = n.right.rotateRight()
n = n.rotateLeft()
n.flipColors()
}
return n
}
func (n *Node) moveRedRight() *Node {
n.flipColors()
if n.left.left.Color() == Red {
n = n.rotateRight()
n.flipColors()
}
return n
}
/*******************
* Order Statistics
*******************/
// Select returns the node with the kth smallest value in the
// subtree rooted at the node..
func (n *Node) Select(k int) order.Node {
if n == nil {
return nil
}
size := n.left.Size()
if k < size {
return n.left.Select(k)
} else if k > size {
return n.right.Select(k - size - 1)
}
return n
}
// Rank returns the number of nodes strictly less than the value that
// are contained in the subtree rooted at the node.
func (n *Node) Rank(val float64) int {
if n == nil {
return 0
} else if val < n.val {
return n.left.Rank(val)
} else if val > n.val {
return 1 + n.left.Size() + n.right.Rank(val)
}
return n.left.Size()
}
/*******************
* Pretty-printing
*******************/
// treeString recursively prints out a subtree rooted at the node in a sideways format, as below:
// │ ┌── 7.000000
// │ ┌── 6.000000
// │ │ └── 5.000000
// └── 4.000000
// │ ┌── 3.000000
// └── 2.000000
// └── 1.000000
// └── 1.000000
func (n *Node) treeString(prefix string, result string, isTail bool) string {
// isTail indicates whether or not the current node's parent branch needs to be represented
// as a "tail", i.e. its branch needs to hang in the string representation, rather than branch upwards.
if isTail {
// If true, then we need to print the subtree like this:
// │ ┌── [n.right.treeString()]
// └── [n.val]
// └── [n.left.treeString()]
if n.right != nil {
result = n.right.treeString(fmt.Sprintf("%s│ ", prefix), result, false)
}
result = fmt.Sprintf("%s%s└── %f\n", result, prefix, n.val)
if n.left != nil {
result = n.left.treeString(fmt.Sprintf("%s ", prefix), result, true)
}
} else {
// If false, then we need to print the subtree like this:
// ┌── [n.right.treeString()]
// ┌── [n.val]
// │ └── [n.left.treeString()]
if n.right != nil {
result = n.right.treeString(fmt.Sprintf("%s ", prefix), result, false)
}
result = fmt.Sprintf("%s%s┌── %f\n", result, prefix, n.val)
if n.left != nil {
result = n.left.treeString(fmt.Sprintf("%s│ ", prefix), result, true)
}
}
return result
} | quantile/ost/rb/node.go | 0.875162 | 0.505615 | node.go | starcoder |
package plaid
import (
"encoding/json"
)
// AccountIdentity struct for AccountIdentity
type AccountIdentity struct {
// Plaid’s unique identifier for the account. This value will not change unless Plaid can't reconcile the account with the data returned by the financial institution. This may occur, for example, when the name of the account changes. If this happens a new `account_id` will be assigned to the account. The `account_id` can also change if the `access_token` is deleted and the same credentials that were used to generate that `access_token` are used to generate a new `access_token` on a later date. In that case, the new `account_id` will be different from the old `account_id`. If an account with a specific `account_id` disappears instead of changing, the account is likely closed. Closed accounts are not returned by the Plaid API. Like all Plaid identifiers, the `account_id` is case sensitive.
AccountId string `json:"account_id"`
Balances AccountBalance `json:"balances"`
// The last 2-4 alphanumeric characters of an account's official account number. Note that the mask may be non-unique between an Item's accounts, and it may also not match the mask that the bank displays to the user.
Mask NullableString `json:"mask"`
// The name of the account, either assigned by the user or by the financial institution itself
Name string `json:"name"`
// The official name of the account as given by the financial institution
OfficialName NullableString `json:"official_name"`
Type AccountType `json:"type"`
Subtype NullableAccountSubtype `json:"subtype"`
// The current verification status of an Auth Item initiated through Automated or Manual micro-deposits. Returned for Auth Items only. `pending_automatic_verification`: The Item is pending automatic verification `pending_manual_verification`: The Item is pending manual micro-deposit verification. Items remain in this state until the user successfully verifies the two amounts. `automatically_verified`: The Item has successfully been automatically verified `manually_verified`: The Item has successfully been manually verified `verification_expired`: Plaid was unable to automatically verify the deposit within 7 calendar days and will no longer attempt to validate the Item. Users may retry by submitting their information again through Link. `verification_failed`: The Item failed manual micro-deposit verification because the user exhausted all 3 verification attempts. Users may retry by submitting their information again through Link.
VerificationStatus *string `json:"verification_status,omitempty"`
// Data returned by the financial institution about the account owner or owners. Only returned by Identity or Assets endpoints. Multiple owners on a single account will be represented in the same `owner` object, not in multiple owner objects within the array.
Owners []Owner `json:"owners"`
}
// NewAccountIdentity instantiates a new AccountIdentity object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewAccountIdentity(accountId string, balances AccountBalance, mask NullableString, name string, officialName NullableString, type_ AccountType, subtype NullableAccountSubtype, owners []Owner) *AccountIdentity {
this := AccountIdentity{}
this.AccountId = accountId
this.Balances = balances
this.Mask = mask
this.Name = name
this.OfficialName = officialName
this.Type = type_
this.Subtype = subtype
this.Owners = owners
return &this
}
// NewAccountIdentityWithDefaults instantiates a new AccountIdentity object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewAccountIdentityWithDefaults() *AccountIdentity {
this := AccountIdentity{}
return &this
}
// GetAccountId returns the AccountId field value
func (o *AccountIdentity) GetAccountId() string {
if o == nil {
var ret string
return ret
}
return o.AccountId
}
// GetAccountIdOk returns a tuple with the AccountId field value
// and a boolean to check if the value has been set.
func (o *AccountIdentity) GetAccountIdOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.AccountId, true
}
// SetAccountId sets field value
func (o *AccountIdentity) SetAccountId(v string) {
o.AccountId = v
}
// GetBalances returns the Balances field value
func (o *AccountIdentity) GetBalances() AccountBalance {
if o == nil {
var ret AccountBalance
return ret
}
return o.Balances
}
// GetBalancesOk returns a tuple with the Balances field value
// and a boolean to check if the value has been set.
func (o *AccountIdentity) GetBalancesOk() (*AccountBalance, bool) {
if o == nil {
return nil, false
}
return &o.Balances, true
}
// SetBalances sets field value
func (o *AccountIdentity) SetBalances(v AccountBalance) {
o.Balances = v
}
// GetMask returns the Mask field value
// If the value is explicit nil, the zero value for string will be returned
func (o *AccountIdentity) GetMask() string {
if o == nil || o.Mask.Get() == nil {
var ret string
return ret
}
return *o.Mask.Get()
}
// GetMaskOk returns a tuple with the Mask field value
// and a boolean to check if the value has been set.
// NOTE: If the value is an explicit nil, `nil, true` will be returned
func (o *AccountIdentity) GetMaskOk() (*string, bool) {
if o == nil {
return nil, false
}
return o.Mask.Get(), o.Mask.IsSet()
}
// SetMask sets field value
func (o *AccountIdentity) SetMask(v string) {
o.Mask.Set(&v)
}
// GetName returns the Name field value
func (o *AccountIdentity) GetName() string {
if o == nil {
var ret string
return ret
}
return o.Name
}
// GetNameOk returns a tuple with the Name field value
// and a boolean to check if the value has been set.
func (o *AccountIdentity) GetNameOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.Name, true
}
// SetName sets field value
func (o *AccountIdentity) SetName(v string) {
o.Name = v
}
// GetOfficialName returns the OfficialName field value
// If the value is explicit nil, the zero value for string will be returned
func (o *AccountIdentity) GetOfficialName() string {
if o == nil || o.OfficialName.Get() == nil {
var ret string
return ret
}
return *o.OfficialName.Get()
}
// GetOfficialNameOk returns a tuple with the OfficialName field value
// and a boolean to check if the value has been set.
// NOTE: If the value is an explicit nil, `nil, true` will be returned
func (o *AccountIdentity) GetOfficialNameOk() (*string, bool) {
if o == nil {
return nil, false
}
return o.OfficialName.Get(), o.OfficialName.IsSet()
}
// SetOfficialName sets field value
func (o *AccountIdentity) SetOfficialName(v string) {
o.OfficialName.Set(&v)
}
// GetType returns the Type field value
func (o *AccountIdentity) GetType() AccountType {
if o == nil {
var ret AccountType
return ret
}
return o.Type
}
// GetTypeOk returns a tuple with the Type field value
// and a boolean to check if the value has been set.
func (o *AccountIdentity) GetTypeOk() (*AccountType, bool) {
if o == nil {
return nil, false
}
return &o.Type, true
}
// SetType sets field value
func (o *AccountIdentity) SetType(v AccountType) {
o.Type = v
}
// GetSubtype returns the Subtype field value
// If the value is explicit nil, the zero value for AccountSubtype will be returned
func (o *AccountIdentity) GetSubtype() AccountSubtype {
if o == nil || o.Subtype.Get() == nil {
var ret AccountSubtype
return ret
}
return *o.Subtype.Get()
}
// GetSubtypeOk returns a tuple with the Subtype field value
// and a boolean to check if the value has been set.
// NOTE: If the value is an explicit nil, `nil, true` will be returned
func (o *AccountIdentity) GetSubtypeOk() (*AccountSubtype, bool) {
if o == nil {
return nil, false
}
return o.Subtype.Get(), o.Subtype.IsSet()
}
// SetSubtype sets field value
func (o *AccountIdentity) SetSubtype(v AccountSubtype) {
o.Subtype.Set(&v)
}
// GetVerificationStatus returns the VerificationStatus field value if set, zero value otherwise.
func (o *AccountIdentity) GetVerificationStatus() string {
if o == nil || o.VerificationStatus == nil {
var ret string
return ret
}
return *o.VerificationStatus
}
// GetVerificationStatusOk returns a tuple with the VerificationStatus field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *AccountIdentity) GetVerificationStatusOk() (*string, bool) {
if o == nil || o.VerificationStatus == nil {
return nil, false
}
return o.VerificationStatus, true
}
// HasVerificationStatus returns a boolean if a field has been set.
func (o *AccountIdentity) HasVerificationStatus() bool {
if o != nil && o.VerificationStatus != nil {
return true
}
return false
}
// SetVerificationStatus gets a reference to the given string and assigns it to the VerificationStatus field.
func (o *AccountIdentity) SetVerificationStatus(v string) {
o.VerificationStatus = &v
}
// GetOwners returns the Owners field value
func (o *AccountIdentity) GetOwners() []Owner {
if o == nil {
var ret []Owner
return ret
}
return o.Owners
}
// GetOwnersOk returns a tuple with the Owners field value
// and a boolean to check if the value has been set.
func (o *AccountIdentity) GetOwnersOk() (*[]Owner, bool) {
if o == nil {
return nil, false
}
return &o.Owners, true
}
// SetOwners sets field value
func (o *AccountIdentity) SetOwners(v []Owner) {
o.Owners = v
}
func (o AccountIdentity) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if true {
toSerialize["account_id"] = o.AccountId
}
if true {
toSerialize["balances"] = o.Balances
}
if true {
toSerialize["mask"] = o.Mask.Get()
}
if true {
toSerialize["name"] = o.Name
}
if true {
toSerialize["official_name"] = o.OfficialName.Get()
}
if true {
toSerialize["type"] = o.Type
}
if true {
toSerialize["subtype"] = o.Subtype.Get()
}
if o.VerificationStatus != nil {
toSerialize["verification_status"] = o.VerificationStatus
}
if true {
toSerialize["owners"] = o.Owners
}
return json.Marshal(toSerialize)
}
type NullableAccountIdentity struct {
value *AccountIdentity
isSet bool
}
func (v NullableAccountIdentity) Get() *AccountIdentity {
return v.value
}
func (v *NullableAccountIdentity) Set(val *AccountIdentity) {
v.value = val
v.isSet = true
}
func (v NullableAccountIdentity) IsSet() bool {
return v.isSet
}
func (v *NullableAccountIdentity) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableAccountIdentity(val *AccountIdentity) *NullableAccountIdentity {
return &NullableAccountIdentity{value: val, isSet: true}
}
func (v NullableAccountIdentity) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableAccountIdentity) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | plaid/model_account_identity.go | 0.810479 | 0.456107 | model_account_identity.go | starcoder |
package daycount
import (
"fmt"
"time"
)
// Default day count convention
var Default string = "30E360"
type dateDiffFunc func(date1, date2 time.Time) float64
// conventions is a map strcuture that contains the information
// to calculate the days between two dates and converts it into
// a day count fraction.
// https://www.isda.org/2008/12/22/30-360-day-count-conventions
var conventions = map[string]struct {
Numerator dateDiffFunc
Denominator dateDiffFunc
}{
// ISDA
"30E360": {
Numerator: days30e360,
Denominator: days30e360,
},
"EUROBOND": {
Numerator: eurobond,
Denominator: eurobond,
},
"BONDBASIS": {
Numerator: bondbasis,
Denominator: bondbasis,
},
"ACT360": {
Numerator: act,
Denominator: days30e360,
},
"ACTACT": {
Numerator: act,
Denominator: act,
},
}
// Implemented returns a slice of strings of the implemented day count conventions
func Implemented() []string {
list := []string{}
for conv := range conventions {
list = append(list, conv)
}
return list
}
// Fraction returns the fraction of coupon that has been accrued between date1 and date3
// date1: last coupon payment, starting date for interest accrual
// date2: date through which interest rate is being accrued (settlement dates for bonds)
// date3: next coupon payment
func Fraction(date1, date2, date3 time.Time, basis string) (float64, error) {
// use default if basis is empty
if basis == "" {
basis = Default
}
// look for convention
conv, ok := conventions[basis]
if !ok {
return 0.0, fmt.Errorf("day count convention %s not implemented", basis)
}
// calculate day count fraction
return conv.Numerator(date1, date2) / conv.Denominator(date1, date3), nil
}
// Days counts the dates between two dates
func Days(date1, date2 time.Time, basis string) (float64, error) {
// use default if basis is empty
if basis == "" {
basis = Default
}
// look for convention
conv, ok := conventions[basis]
if !ok {
return 0.0, fmt.Errorf("day count convention %s not implemented", basis)
}
// calculate days
return conv.Numerator(date1, date2), nil
}
// days30360 is the helper function to calculate the days between two dates for the 30/360 methods
func days30360(d1, d2 time.Time, day1, day2 int) float64 {
return 360.0*float64(d2.Year()-d1.Year()) + 30.0*float64(d2.Month()-d1.Month()) + float64(day2-day1)
}
// isLastDayOfFeb checks if time is the last day of February
func isLastDayofFeb(d time.Time) bool {
if d.Month() == 2 {
if d.YearDay() == time.Date(d.Year(), 3, 0, 0, 0, 0, 0, d.Location()).YearDay() {
return true
}
}
return false
}
func days30e360(date1, date2 time.Time) float64 {
day1, day2 := date1.Day(), date2.Day()
if day1 == 31 || isLastDayofFeb(date1) {
day1 = 30
}
// FIXME: if date2 is last day of Feb, we should ensure that date2 is not termination date
if day2 == 31 || isLastDayofFeb(date2) {
day2 = 30
}
return days30360(date1, date2, day1, day2)
}
func eurobond(date1, date2 time.Time) float64 {
day1, day2 := date1.Day(), date2.Day()
if day1 == 31 {
day1 = 30
}
if day2 == 31 {
day2 = 30
}
return days30360(date1, date2, day1, day2)
}
func bondbasis(date1, date2 time.Time) float64 {
day1, day2 := date1.Day(), date2.Day()
if day1 == 31 {
day1 = 30
}
if day2 == 31 && day1 >= 30 {
day2 = 30
}
return days30360(date1, date2, day1, day2)
}
func act(date1, date2 time.Time) float64 {
return date2.Sub(date1).Hours() / 24.0
} | daycount.go | 0.667256 | 0.491151 | daycount.go | starcoder |
package sqlbatch
import (
"fmt"
"database/sql"
)
// Command format for sending a batch of sql commands.
// Query is the sql query to execute (required).
// ArgsFunc is called before execution for query arguments (optional).
// It will be passed current results.
// Args are query parameters (optional). Ignored if ArgsFunc is non-nil.
// Init is the default value passed to first iteration of ReadAll.
// Following iterations will use the previous memo returned by ReadAll.
// ReadAll is the read function for reading all rows (optional).
// ReadOne is the read function for reading at most one row (optional).
// If ReadOne is non-nil, ReadAll is ignored.
// Affect is the number of rows that should be affected.
// If Affect is zero (default), it is not checked.
// If Affect is negative, no rows should be affected.
// If Affect is positive, that should be the number of affected rows.
type Command struct {
Query string
ArgsFunc func([]interface{}) []interface{}
Args []interface{}
Init interface{}
ReadAll func(memo interface{}, fn func(...interface{}) error) (interface{}, error)
ReadOne func(fn func(...interface{}) error) (interface{}, error)
Affect int64
}
// Batch executes a batch of commands in a single transaction.
// It will return a results, and an error. Results will include
// the result returned by ReadAll or ReadOne for each command
// at the specific index.
func Batch(tx *sql.Tx, commands []Command) ([]interface{}, error) {
results := make([]interface{}, len(commands))
for i, command := range commands {
args := command.Args
if command.ArgsFunc != nil {
args = command.ArgsFunc(results)
}
if command.Affect != 0 {
result, err := tx.Exec(command.Query, args...)
if err != nil {
return results, err
}
affected, err := result.RowsAffected()
if err != nil {
return results, err
}
expected := command.Affect
if expected < 0 {
expected = 0
}
if expected != affected {
err = fmt.Errorf(expectedDifferentAffectedRows, expected, affected, command.Query)
return results, err
}
} else {
rows, err := tx.Query(command.Query, args...)
if err != nil {
return results, err
}
defer rows.Close()
if command.ReadOne != nil {
if rows.Next() {
result, err := command.ReadOne(rows.Scan)
if err != nil {
return results, err
}
results[i] = result
}
} else if command.ReadAll != nil {
memo := command.Init
for rows.Next() {
memo, err = command.ReadAll(memo, rows.Scan)
if err != nil {
return results, err
}
}
results[i] = memo
}
if err = rows.Err(); err != nil {
return results, err
}
err = rows.Close()
if err != nil {
return results, err
}
}
}
return results, nil
}
const expectedDifferentAffectedRows = "Expected to affect %v rows, but %v rows affected for query: `%v`" | sqlbatch.go | 0.565899 | 0.416559 | sqlbatch.go | starcoder |
package comb
import "fmt"
// Token accepts the shortest given token. At least one token must
// be provided. If more than one token is given, then a trie is used
// to check for membership.
func Token(tokens ...string) Parser {
rTokens := make([][]rune, len(tokens))
for i, s := range tokens {
rTokens[i] = []rune(s)
}
return TokenRunes(rTokens...)
}
// TokenRunes is like Token, but takes multiple rune slices.
func TokenRunes(tokens ...[]rune) Parser {
if len(tokens) == 0 {
panic("at least one token must be specified")
}
for _, tok := range tokens {
if len(tok) == 0 {
panic("token cannot be empty")
}
}
if len(tokens) == 1 {
return singleToken(tokens[0])
}
return manyTokens(tokens)
}
func singleToken(runes []rune) Parser {
return ParserFunc(func(s Scanner) (Result, Scanner) {
var r rune
next := s
var err error
for _, c := range runes {
r, next, err = next.Next()
if err != nil {
return Failed(err), next
}
if r != c {
return Failed(tokenError(s.Between(next))), next
}
}
return Result{
Runes: s.Between(next),
}, next
})
}
func manyTokens(tokens [][]rune) Parser {
t := buildTrie(tokens)
return ParserFunc(func(s Scanner) (Result, Scanner) {
t := t
var r rune
next := s
var err error
for !t.accept {
r, next, err = next.Next()
if err != nil {
return Failed(err), next
}
t = t.find(r)
if t == nil {
return Failed(tokenError(s.Between(next))), next
}
}
return Result{
Runes: s.Between(next),
}, next
})
}
func tokenError(runes []rune) error {
return errorFunc(func() string {
prefix := string(runes)
return fmt.Sprintf("'%s' is not a prefix of any token", prefix)
})
}
type tokenTrie struct {
children map[rune]*tokenTrie
accept bool
}
func buildTrie(tokens [][]rune) *tokenTrie {
t := &tokenTrie{}
for _, s := range tokens {
t.add(s)
}
return t
}
func (t *tokenTrie) add(runes []rune) {
if len(runes) == 0 {
t.accept = true
return
}
r, runes := runes[0], runes[1:]
if t.children == nil {
t.children = make(map[rune]*tokenTrie)
}
child := t.children[r]
if child == nil {
child = &tokenTrie{}
t.children[r] = child
}
child.add(runes)
}
func (t *tokenTrie) find(r rune) *tokenTrie {
return t.children[r]
} | token.go | 0.699152 | 0.443841 | token.go | starcoder |
Render an SDF
SDF3 -> STL file
SDF2 -> DXF file
SDF2 -> SVG file
*/
//-----------------------------------------------------------------------------
package sdf
import (
"fmt"
"sync"
)
//-----------------------------------------------------------------------------
// RenderSTL renders an SDF3 as an STL file (uses octree sampling).
func RenderSTL(
s SDF3, //sdf3 to render
meshCells int, //number of cells on the longest axis. e.g 200
path string, //path to filename
) {
// work out the sampling resolution to use
bbSize := s.BoundingBox().Size()
resolution := bbSize.MaxComponent() / float64(meshCells)
cells := bbSize.DivScalar(resolution).ToV3i()
fmt.Printf("rendering %s (%dx%dx%d, resolution %.2f)\n", path, cells[0], cells[1], cells[2], resolution)
// write the triangles to an STL file
var wg sync.WaitGroup
output, err := WriteSTL(&wg, path)
if err != nil {
fmt.Printf("%s", err)
return
}
// run marching cubes to generate the triangle mesh
marchingCubesOctree(s, resolution, output)
// stop the STL writer reading on the channel
close(output)
// wait for the file write to complete
wg.Wait()
}
// RenderSTLSlow renders an SDF3 as an STL file (uses uniform grid sampling).
func RenderSTLSlow(
s SDF3, //sdf3 to render
meshCells int, //number of cells on the longest axis. e.g 200
path string, //path to filename
) {
// work out the region we will sample
bb0 := s.BoundingBox()
bb0Size := bb0.Size()
meshInc := bb0Size.MaxComponent() / float64(meshCells)
bb1Size := bb0Size.DivScalar(meshInc)
bb1Size = bb1Size.Ceil().AddScalar(1)
cells := bb1Size.ToV3i()
bb1Size = bb1Size.MulScalar(meshInc)
bb := NewBox3(bb0.Center(), bb1Size)
fmt.Printf("rendering %s (%dx%dx%d)\n", path, cells[0], cells[1], cells[2])
// run marching cubes to generate the triangle mesh
m := marchingCubes(s, bb, meshInc)
err := SaveSTL(path, m)
if err != nil {
fmt.Printf("%s", err)
}
}
//-----------------------------------------------------------------------------
// RenderDXF renders an SDF2 as a DXF file. (uses quadtree sampling)
func RenderDXF(
s SDF2, //sdf2 to render
meshCells int, //number of cells on the longest axis. e.g 200
path string, //path to filename
) {
// work out the sampling resolution to use
bbSize := s.BoundingBox().Size()
resolution := bbSize.MaxComponent() / float64(meshCells)
cells := bbSize.DivScalar(resolution).ToV2i()
fmt.Printf("rendering %s (%dx%d, resolution %.2f)\n", path, cells[0], cells[1], resolution)
// write the line segments to a DXF file
var wg sync.WaitGroup
output, err := WriteDXF(&wg, path)
if err != nil {
fmt.Printf("%s", err)
return
}
// run marching squares to generate the line segments
marchingSquaresQuadtree(s, resolution, output)
// stop the DXF writer reading on the channel
close(output)
// wait for the file write to complete
wg.Wait()
}
// RenderDXFSlow renders an SDF2 as a DXF file. (uses uniform grid sampling)
func RenderDXFSlow(
s SDF2, //sdf2 to render
meshCells int, //number of cells on the longest axis. e.g 200
path string, //path to filename
) {
// work out the region we will sample
bb0 := s.BoundingBox()
bb0Size := bb0.Size()
meshInc := bb0Size.MaxComponent() / float64(meshCells)
bb1Size := bb0Size.DivScalar(meshInc)
bb1Size = bb1Size.Ceil().AddScalar(1)
cells := bb1Size.ToV2i()
bb1Size = bb1Size.MulScalar(meshInc)
bb := NewBox2(bb0.Center(), bb1Size)
fmt.Printf("rendering %s (%dx%d)\n", path, cells[0], cells[1])
// run marching squares to generate the line segments
m := marchingSquares(s, bb, meshInc)
err := SaveDXF(path, m)
if err != nil {
fmt.Printf("%s", err)
}
}
//-----------------------------------------------------------------------------
// RenderSVG renders an SDF2 as an SVG file. (uses quadtree sampling)
func RenderSVG(
s SDF2, // sdf2 to render
meshCells int, // number of cells on the longest axis. e.g 200
path string, // path to filename
lineStyle string, // SVG line style
) error {
// work out the sampling resolution to use
bbSize := s.BoundingBox().Size()
resolution := bbSize.MaxComponent() / float64(meshCells)
cells := bbSize.DivScalar(resolution).ToV2i()
fmt.Printf("rendering %s (%dx%d, resolution %.2f)\n", path, cells[0], cells[1], resolution)
// write the line segments to an SVG file
var wg sync.WaitGroup
output, err := WriteSVG(&wg, path, lineStyle)
if err != nil {
return err
}
// run marching squares to generate the line segments
marchingSquaresQuadtree(s, resolution, output)
// stop the SVG writer reading on the channel
close(output)
// wait for the file write to complete
wg.Wait()
return nil
}
// RenderSVGSlow renders an SDF2 as an SVG file. (uses uniform grid sampling)
func RenderSVGSlow(
s SDF2, // sdf2 to render
meshCells int, // number of cells on the longest axis. e.g 200
path string, // path to filename
lineStyle string, // SVG line style
) error {
// work out the region we will sample
bb0 := s.BoundingBox()
bb0Size := bb0.Size()
meshInc := bb0Size.MaxComponent() / float64(meshCells)
bb1Size := bb0Size.DivScalar(meshInc)
bb1Size = bb1Size.Ceil().AddScalar(1)
cells := bb1Size.ToV2i()
bb1Size = bb1Size.MulScalar(meshInc)
bb := NewBox2(bb0.Center(), bb1Size)
fmt.Printf("rendering %s (%dx%d)\n", path, cells[0], cells[1])
// run marching squares to generate the line segments
m := marchingSquares(s, bb, meshInc)
return SaveSVG(path, lineStyle, m)
}
//----------------------------------------------------------------------------- | sdf/render.go | 0.635901 | 0.429848 | render.go | starcoder |
package client
import (
"encoding/json"
"time"
)
type (
Time struct {
time.Time
}
)
// Date returns the Time corresponding to the supplied parameters
// by wrapping time.Date.
func Date(year int, month time.Month, day, hour, min, sec, nsec int, loc *time.Location) Time {
return Time{time.Date(year, month, day, hour, min, sec, nsec, loc)}
}
// Now returns the current local time.
func Now() Time {
return Time{time.Now()}
}
// IsZero returns true if the value is nil or time is zero.
func (t *Time) IsZero() bool {
if t == nil {
return true
}
return t.Time.IsZero()
}
// Before reports whether the time instant t is before u.
func (t Time) Before(u Time) bool {
return t.Time.Before(u.Time)
}
// Equal reports whether the time instant t is equal to u.
func (t Time) Equal(u Time) bool {
return t.Time.Equal(u.Time)
}
// Unix returns the local time corresponding to the given Unix time
// by wrapping time.Unix.
func Unix(sec int64, nsec int64) Time {
return Time{time.Unix(sec, nsec)}
}
// Rfc3339Copy returns a copy of the Time at second-level precision.
func (t Time) Rfc3339Copy() Time {
copied, _ := time.Parse(time.RFC3339, t.Format(time.RFC3339))
return Time{copied}
}
// UnmarshalJSON implements the json.Unmarshaller interface.
func (t *Time) UnmarshalJSON(b []byte) error {
if len(b) == 4 && string(b) == "null" {
t.Time = time.Time{}
return nil
}
var str string
json.Unmarshal(b, &str)
pt, err := time.Parse(time.RFC3339, str)
if err != nil {
return err
}
t.Time = pt.Local()
return nil
}
// UnmarshalQueryParameter converts from a URL query parameter value to an object
func (t *Time) UnmarshalQueryParameter(str string) error {
if len(str) == 0 {
t.Time = time.Time{}
return nil
}
// Tolerate requests from older clients that used JSON serialization to build query params
if len(str) == 4 && str == "null" {
t.Time = time.Time{}
return nil
}
pt, err := time.Parse(time.RFC3339, str)
if err != nil {
return err
}
t.Time = pt.Local()
return nil
}
// MarshalJSON implements the json.Marshaler interface.
func (t Time) MarshalJSON() ([]byte, error) {
if t.IsZero() {
// Encode unset/nil objects as JSON's "null".
return []byte("null"), nil
}
return json.Marshal(t.UTC().Format(time.RFC3339))
}
// MarshalQueryParameter converts to a URL query parameter value
func (t Time) MarshalQueryParameter() (string, error) {
if t.IsZero() {
// Encode unset/nil objects as an empty string
return "", nil
}
return t.UTC().Format(time.RFC3339), nil
} | deps/github.com/YakLabs/k8s-client/time.go | 0.827445 | 0.45042 | time.go | starcoder |
// Define custom patterns (implementing the siegfried.Pattern interface) for the different patterns allowed by the PRONOM spec.
package pronom
import (
"bytes"
"github.com/richardlehane/siegfried/internal/bytematcher/patterns"
"github.com/richardlehane/siegfried/internal/persist"
)
func init() {
patterns.Register(rangeLoader, loadRange)
}
const (
rangeLoader byte = iota + 8
)
type Range struct {
From, To []byte
}
func (r Range) Test(b []byte) ([]int, int) {
if len(b) < len(r.From) || len(b) < len(r.To) {
return nil, 0
}
if bytes.Compare(r.From, b[:len(r.From)]) < 1 {
if bytes.Compare(r.To, b[:len(r.To)]) > -1 {
return []int{len(r.From)}, 1
}
}
return nil, 1
}
func (r Range) TestR(b []byte) ([]int, int) {
if len(b) < len(r.From) || len(b) < len(r.To) {
return nil, 0
}
if bytes.Compare(r.From, b[len(b)-len(r.From):]) < 1 {
if bytes.Compare(r.To, b[len(b)-len(r.To):]) > -1 {
return []int{len(r.From)}, 1
}
}
return nil, 1
}
func (r Range) Equals(pat patterns.Pattern) bool {
rng, ok := pat.(Range)
if ok {
if bytes.Equal(rng.From, r.From) {
if bytes.Equal(rng.To, r.To) {
return true
}
}
}
return false
}
func (r Range) Length() (int, int) {
return len(r.From), len(r.From)
}
func (r Range) NumSequences() int {
l := len(r.From)
if l > 2 || l < 1 {
return 0
}
if l == 2 {
if r.To[0]-r.From[0] > 1 {
return 0
}
return 256*int(r.To[0]-r.From[0]) + int(r.To[1]) - int(r.From[1]) + 1
}
return int(r.To[0]-r.From[0]) + 1
}
func (r Range) Sequences() []patterns.Sequence {
num := r.NumSequences()
seqs := make([]patterns.Sequence, num)
if num < 1 {
return seqs
}
if len(r.From) == 2 {
if r.From[0] == r.To[0] {
for i := 0; i < num; i++ {
seqs[i] = patterns.Sequence{r.From[0], r.From[1] + byte(i)}
}
return seqs
}
max := 256 - int(r.From[1])
for i := 0; i < max; i++ {
seqs[i] = patterns.Sequence{r.From[0], r.From[1] + byte(i)}
}
for i := 0; max < num; max++ {
seqs[max] = patterns.Sequence{r.To[0], byte(0 + i)}
i++
}
return seqs
}
for i := 0; i < num; i++ {
seqs[i] = patterns.Sequence{r.From[0] + byte(i)}
}
return seqs
}
func (r Range) String() string {
return "r " + patterns.Stringify(r.From) + " - " + patterns.Stringify(r.To)
}
func (r Range) Save(ls *persist.LoadSaver) {
ls.SaveByte(rangeLoader)
ls.SaveBytes(r.From)
ls.SaveBytes(r.To)
}
func loadRange(ls *persist.LoadSaver) patterns.Pattern {
return Range{
ls.LoadBytes(),
ls.LoadBytes(),
}
} | pkg/pronom/patterns.go | 0.659624 | 0.479321 | patterns.go | starcoder |
package render
import (
"math"
mgl "github.com/go-gl/mathgl/mgl32"
"github.com/inkyblackness/hacked/ui/opengl"
)
var orientationViewVertexShaderSource = `
#version 150
precision mediump float;
in vec3 vertexPosition;
uniform mat4 modelMatrix;
uniform mat4 viewMatrix;
uniform mat4 projectionMatrix;
out vec3 position;
out float zCenter;
void main(void) {
gl_Position = projectionMatrix * viewMatrix * modelMatrix * vec4(vertexPosition, 1.0);
zCenter = (projectionMatrix * viewMatrix * modelMatrix * vec4(0.0, 0.0, 0.0, 1.0)).z;
position = gl_Position.xyz;
}
`
var orientationViewFragmentShaderSource = `
#version 150
precision mediump float;
uniform vec4 foregroundColor;
uniform vec4 backgroundColor;
in vec3 position;
in float zCenter;
out vec4 fragColor;
void main(void) {
if (position.z <= zCenter)
{
fragColor = foregroundColor;
}
else
{
fragColor = backgroundColor;
}
}
`
// OrientationView is a control that displays how an object is oriented.
// This is still not working properly. The orientation arrow is not properly rotated,
// and I suspect the typical issue of three-angle rotation versus quaternion rotation.
// Though I doubt the original engine used quaternions, it remains too confusing for me.
type OrientationView struct {
context Context
program uint32
vao *opengl.VertexArrayObject
vertexPositionBuffer uint32
vertexPositionAttrib int32
modelMatrixUniform opengl.Matrix4Uniform
viewMatrixUniform opengl.Matrix4Uniform
projectionMatrixUniform opengl.Matrix4Uniform
foregroundColorUniform opengl.Vector4Uniform
backgroundColorUniform opengl.Vector4Uniform
baseOrientation mgl.Mat4
rotation mgl.Vec3
xRingVerticesStart int
yRingVerticesStart int
zRingVerticesStart int
vertices int
}
// NewOrientationView returns a new instance.
func NewOrientationView(context Context, baseOrientation mgl.Mat4, rotation mgl.Vec3) *OrientationView {
gl := context.OpenGL
program, programErr := opengl.LinkNewStandardProgram(gl, orientationViewVertexShaderSource, orientationViewFragmentShaderSource)
if programErr != nil {
panic(opengl.NamedShaderError{Name: "OrientationViewShader", Nested: programErr})
}
view := &OrientationView{
context: context,
program: program,
vao: opengl.NewVertexArrayObject(gl, program),
vertexPositionBuffer: gl.GenBuffers(1)[0],
vertexPositionAttrib: gl.GetAttribLocation(program, "vertexPosition"),
modelMatrixUniform: opengl.Matrix4Uniform(gl.GetUniformLocation(program, "modelMatrix")),
viewMatrixUniform: opengl.Matrix4Uniform(gl.GetUniformLocation(program, "viewMatrix")),
projectionMatrixUniform: opengl.Matrix4Uniform(gl.GetUniformLocation(program, "projectionMatrix")),
foregroundColorUniform: opengl.Vector4Uniform(gl.GetUniformLocation(program, "foregroundColor")),
backgroundColorUniform: opengl.Vector4Uniform(gl.GetUniformLocation(program, "backgroundColor")),
baseOrientation: baseOrientation,
rotation: rotation,
}
{
gl.BindBuffer(opengl.ARRAY_BUFFER, view.vertexPositionBuffer)
var vertices []float32
radius := 0.5
vertices = append(vertices, 0.0, 0.0, 0.0)
vertices = append(vertices, 0.4, 0.0, 0.0)
vertices = append(vertices, 0.0, 0.0, 0.0)
vertices = append(vertices, 0.0, 0.4, 0.0)
vertices = append(vertices, 0.0, 0.0, 0.0)
vertices = append(vertices, 0.0, 0.0, 0.4)
view.xRingVerticesStart = len(vertices)
for angle := 0.0; angle < float64(toRad(360.0)); angle += float64(toRad(4.0)) {
vertices = append(vertices, 0.0, float32(radius*math.Cos(angle)), float32(radius*math.Sin(angle)))
}
vertices = append(vertices, 0.0, float32(radius*math.Cos(0)), float32(radius*math.Sin(0)))
view.yRingVerticesStart = len(vertices)
for angle := 0.0; angle < float64(toRad(360.0)); angle += float64(toRad(4.0)) {
vertices = append(vertices, float32(radius*math.Cos(angle)), 0.0, float32(radius*math.Sin(angle)))
}
vertices = append(vertices, float32(radius*math.Cos(0)), 0.0, float32(radius*math.Sin(0)))
view.zRingVerticesStart = len(vertices)
for angle := 0.0; angle < float64(toRad(360.0)); angle += float64(toRad(4.0)) {
vertices = append(vertices, float32(radius*math.Cos(angle)), float32(radius*math.Sin(angle)), 0.0)
}
vertices = append(vertices, float32(radius*math.Cos(0)), float32(radius*math.Sin(0)), 0.0)
view.vertices = len(vertices)
gl.BufferData(opengl.ARRAY_BUFFER, len(vertices)*4, vertices, opengl.STATIC_DRAW)
gl.BindBuffer(opengl.ARRAY_BUFFER, 0)
}
view.vao.WithSetter(func(gl opengl.OpenGL) {
gl.EnableVertexAttribArray(uint32(view.vertexPositionAttrib))
gl.BindBuffer(opengl.ARRAY_BUFFER, view.vertexPositionBuffer)
gl.VertexAttribOffset(uint32(view.vertexPositionAttrib), 3, opengl.FLOAT, false, 0, 0)
gl.BindBuffer(opengl.ARRAY_BUFFER, 0)
})
return view
}
// Dispose cleans up resources of the view.
func (view *OrientationView) Dispose() {
gl := view.context.OpenGL
view.vao.Dispose()
gl.DeleteBuffers([]uint32{view.vertexPositionBuffer})
gl.DeleteProgram(view.program)
}
func toRad(degree float32) float32 {
return (degree * math.Pi * 2.0) / 360.0
}
// Render renders the orientation view for given orientation.
func (view *OrientationView) Render(orientation mgl.Vec3) {
gl := view.context.OpenGL
view.vao.OnShader(func() {
view.projectionMatrixUniform.Set(gl, &view.context.ProjectionMatrix)
view.viewMatrixUniform.Set(gl, view.context.ViewMatrix)
view.renderRings(false)
view.renderArrow(
mgl.Ident4().
Mul4(mgl.HomogRotate3D(toRad(orientation[0]), mgl.Vec3{view.rotation[0], 0.0, 0.0})).
Mul4(mgl.HomogRotate3D(toRad(orientation[1]), mgl.Vec3{0.0, view.rotation[1], 0.0})).
Mul4(mgl.HomogRotate3D(toRad(orientation[2]), mgl.Vec3{0.0, 0.0, view.rotation[2]})))
view.renderRings(true)
})
}
func (view *OrientationView) renderRings(front bool) {
foreground := func(color [4]float32) [4]float32 {
if !front {
return [4]float32{0.0, 0.0, 0.0, 0.0}
}
return color
}
background := func(color [4]float32) [4]float32 {
if front {
return [4]float32{0.0, 0.0, 0.0, 0.0}
}
return color
}
// draw Z-rotation ring
view.renderRing(view.zRingVerticesStart, view.vertices,
foreground([4]float32{0.0, 0.0, 1.0, 1.0}), background([4]float32{0.0, 0.0, 0.8, 0.7}))
// draw Y-rotation ring
view.renderRing(view.yRingVerticesStart, view.zRingVerticesStart,
foreground([4]float32{0.0, 1.0, 0.0, 1.0}), background([4]float32{0.0, 0.8, 0.0, 0.7}))
// draw X-rotation ring
view.renderRing(view.xRingVerticesStart, view.yRingVerticesStart,
foreground([4]float32{1.0, 0.0, 0.0, 1.0}), background([4]float32{0.8, 0.0, 0.0, 0.7}))
}
func (view *OrientationView) renderRing(start, end int, foregroundColor, backgroundColor [4]float32) {
gl := view.context.OpenGL
modelMatrix := mgl.Ident4().Mul4(view.baseOrientation)
view.foregroundColorUniform.Set(gl, &foregroundColor)
view.backgroundColorUniform.Set(gl, &backgroundColor)
view.modelMatrixUniform.Set(gl, &modelMatrix)
gl.DrawArrays(opengl.LINES, int32(start)/3, int32(end-start)/3)
}
func (view *OrientationView) renderArrow(rotation mgl.Mat4) {
gl := view.context.OpenGL
modelMatrix := mgl.Ident4().Mul4(view.baseOrientation).Mul4(rotation)
view.foregroundColorUniform.Set(gl, &[4]float32{0.0, 0.0, 1.0, 1.0})
view.backgroundColorUniform.Set(gl, &[4]float32{0.0, 0.0, 0.6, 1.0})
view.modelMatrixUniform.Set(gl, &modelMatrix)
gl.DrawArrays(opengl.LINES, 4, 2)
modelMatrix = mgl.Ident4().Mul4(view.baseOrientation).Mul4(rotation)
view.foregroundColorUniform.Set(gl, &[4]float32{0.0, 1.0, 0.0, 1.0})
view.backgroundColorUniform.Set(gl, &[4]float32{0.0, 0.6, 0.0, 1.0})
view.modelMatrixUniform.Set(gl, &modelMatrix)
gl.DrawArrays(opengl.LINES, 2, 2)
modelMatrix = mgl.Ident4().Mul4(view.baseOrientation).Mul4(rotation)
view.foregroundColorUniform.Set(gl, &[4]float32{1.0, 0.0, 0.0, 1.0})
view.backgroundColorUniform.Set(gl, &[4]float32{0.6, 0.0, 0.0, 1.0})
view.modelMatrixUniform.Set(gl, &modelMatrix)
gl.DrawArrays(opengl.LINES, 0, 2)
} | editor/render/OrientationView.go | 0.809765 | 0.629945 | OrientationView.go | starcoder |
package graph
import (
i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e "time"
i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55 "github.com/microsoft/kiota/abstractions/go/serialization"
)
// AccessReviewInstance
type AccessReviewInstance struct {
Entity
// Returns the collection of reviewers who were contacted to complete this review. While the reviewers and fallbackReviewers properties of the accessReviewScheduleDefinition might specify group owners or managers as reviewers, contactedReviewers returns their individual identities. Supports $select. Read-only.
contactedReviewers []AccessReviewReviewer;
// Each principal reviewed in an accessReviewInstance has a decision item representing if they were approved, denied, or not yet reviewed.
decisions []AccessReviewInstanceDecisionItem;
// DateTime when review instance is scheduled to end.The DatetimeOffset type represents date and time information using ISO 8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 is 2014-01-01T00:00:00Z. Supports $select. Read-only.
endDateTime *i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e.Time;
// This collection of reviewer scopes is used to define the list of fallback reviewers. These fallback reviewers will be notified to take action if no users are found from the list of reviewers specified. This could occur when either the group owner is specified as the reviewer but the group owner does not exist, or manager is specified as reviewer but a user's manager does not exist. Supports $select.
fallbackReviewers []AccessReviewReviewerScope;
// This collection of access review scopes is used to define who the reviewers are. Supports $select. For examples of options for assigning reviewers, see Assign reviewers to your access review definition using the Microsoft Graph API.
reviewers []AccessReviewReviewerScope;
// Created based on scope and instanceEnumerationScope at the accessReviewScheduleDefinition level. Defines the scope of users reviewed in a group. Supports $select and $filter (contains only). Read-only.
scope *AccessReviewScope;
// DateTime when review instance is scheduled to start. May be in the future. The DateTimeOffset type represents date and time information using ISO 8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 is 2014-01-01T00:00:00Z. Supports $select. Read-only.
startDateTime *i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e.Time;
// Specifies the status of an accessReview. Possible values: Initializing, NotStarted, Starting, InProgress, Completing, Completed, AutoReviewing, and AutoReviewed. Supports $select, $orderby, and $filter (eq only). Read-only.
status *string;
}
// NewAccessReviewInstance instantiates a new accessReviewInstance and sets the default values.
func NewAccessReviewInstance()(*AccessReviewInstance) {
m := &AccessReviewInstance{
Entity: *NewEntity(),
}
return m
}
// GetContactedReviewers gets the contactedReviewers property value. Returns the collection of reviewers who were contacted to complete this review. While the reviewers and fallbackReviewers properties of the accessReviewScheduleDefinition might specify group owners or managers as reviewers, contactedReviewers returns their individual identities. Supports $select. Read-only.
func (m *AccessReviewInstance) GetContactedReviewers()([]AccessReviewReviewer) {
if m == nil {
return nil
} else {
return m.contactedReviewers
}
}
// GetDecisions gets the decisions property value. Each principal reviewed in an accessReviewInstance has a decision item representing if they were approved, denied, or not yet reviewed.
func (m *AccessReviewInstance) GetDecisions()([]AccessReviewInstanceDecisionItem) {
if m == nil {
return nil
} else {
return m.decisions
}
}
// GetEndDateTime gets the endDateTime property value. DateTime when review instance is scheduled to end.The DatetimeOffset type represents date and time information using ISO 8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 is 2014-01-01T00:00:00Z. Supports $select. Read-only.
func (m *AccessReviewInstance) GetEndDateTime()(*i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e.Time) {
if m == nil {
return nil
} else {
return m.endDateTime
}
}
// GetFallbackReviewers gets the fallbackReviewers property value. This collection of reviewer scopes is used to define the list of fallback reviewers. These fallback reviewers will be notified to take action if no users are found from the list of reviewers specified. This could occur when either the group owner is specified as the reviewer but the group owner does not exist, or manager is specified as reviewer but a user's manager does not exist. Supports $select.
func (m *AccessReviewInstance) GetFallbackReviewers()([]AccessReviewReviewerScope) {
if m == nil {
return nil
} else {
return m.fallbackReviewers
}
}
// GetReviewers gets the reviewers property value. This collection of access review scopes is used to define who the reviewers are. Supports $select. For examples of options for assigning reviewers, see Assign reviewers to your access review definition using the Microsoft Graph API.
func (m *AccessReviewInstance) GetReviewers()([]AccessReviewReviewerScope) {
if m == nil {
return nil
} else {
return m.reviewers
}
}
// GetScope gets the scope property value. Created based on scope and instanceEnumerationScope at the accessReviewScheduleDefinition level. Defines the scope of users reviewed in a group. Supports $select and $filter (contains only). Read-only.
func (m *AccessReviewInstance) GetScope()(*AccessReviewScope) {
if m == nil {
return nil
} else {
return m.scope
}
}
// GetStartDateTime gets the startDateTime property value. DateTime when review instance is scheduled to start. May be in the future. The DateTimeOffset type represents date and time information using ISO 8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 is 2014-01-01T00:00:00Z. Supports $select. Read-only.
func (m *AccessReviewInstance) GetStartDateTime()(*i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e.Time) {
if m == nil {
return nil
} else {
return m.startDateTime
}
}
// GetStatus gets the status property value. Specifies the status of an accessReview. Possible values: Initializing, NotStarted, Starting, InProgress, Completing, Completed, AutoReviewing, and AutoReviewed. Supports $select, $orderby, and $filter (eq only). Read-only.
func (m *AccessReviewInstance) GetStatus()(*string) {
if m == nil {
return nil
} else {
return m.status
}
}
// GetFieldDeserializers the deserialization information for the current model
func (m *AccessReviewInstance) GetFieldDeserializers()(map[string]func(interface{}, i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode)(error)) {
res := m.Entity.GetFieldDeserializers()
res["contactedReviewers"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error {
val, err := n.GetCollectionOfObjectValues(func () i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable { return NewAccessReviewReviewer() })
if err != nil {
return err
}
if val != nil {
res := make([]AccessReviewReviewer, len(val))
for i, v := range val {
res[i] = *(v.(*AccessReviewReviewer))
}
m.SetContactedReviewers(res)
}
return nil
}
res["decisions"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error {
val, err := n.GetCollectionOfObjectValues(func () i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable { return NewAccessReviewInstanceDecisionItem() })
if err != nil {
return err
}
if val != nil {
res := make([]AccessReviewInstanceDecisionItem, len(val))
for i, v := range val {
res[i] = *(v.(*AccessReviewInstanceDecisionItem))
}
m.SetDecisions(res)
}
return nil
}
res["endDateTime"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error {
val, err := n.GetTimeValue()
if err != nil {
return err
}
if val != nil {
m.SetEndDateTime(val)
}
return nil
}
res["fallbackReviewers"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error {
val, err := n.GetCollectionOfObjectValues(func () i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable { return NewAccessReviewReviewerScope() })
if err != nil {
return err
}
if val != nil {
res := make([]AccessReviewReviewerScope, len(val))
for i, v := range val {
res[i] = *(v.(*AccessReviewReviewerScope))
}
m.SetFallbackReviewers(res)
}
return nil
}
res["reviewers"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error {
val, err := n.GetCollectionOfObjectValues(func () i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable { return NewAccessReviewReviewerScope() })
if err != nil {
return err
}
if val != nil {
res := make([]AccessReviewReviewerScope, len(val))
for i, v := range val {
res[i] = *(v.(*AccessReviewReviewerScope))
}
m.SetReviewers(res)
}
return nil
}
res["scope"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error {
val, err := n.GetObjectValue(func () i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable { return NewAccessReviewScope() })
if err != nil {
return err
}
if val != nil {
m.SetScope(val.(*AccessReviewScope))
}
return nil
}
res["startDateTime"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error {
val, err := n.GetTimeValue()
if err != nil {
return err
}
if val != nil {
m.SetStartDateTime(val)
}
return nil
}
res["status"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error {
val, err := n.GetStringValue()
if err != nil {
return err
}
if val != nil {
m.SetStatus(val)
}
return nil
}
return res
}
func (m *AccessReviewInstance) IsNil()(bool) {
return m == nil
}
// Serialize serializes information the current object
func (m *AccessReviewInstance) Serialize(writer i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.SerializationWriter)(error) {
err := m.Entity.Serialize(writer)
if err != nil {
return err
}
if m.GetContactedReviewers() != nil {
cast := make([]i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable, len(m.GetContactedReviewers()))
for i, v := range m.GetContactedReviewers() {
temp := v
cast[i] = i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable(&temp)
}
err = writer.WriteCollectionOfObjectValues("contactedReviewers", cast)
if err != nil {
return err
}
}
if m.GetDecisions() != nil {
cast := make([]i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable, len(m.GetDecisions()))
for i, v := range m.GetDecisions() {
temp := v
cast[i] = i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable(&temp)
}
err = writer.WriteCollectionOfObjectValues("decisions", cast)
if err != nil {
return err
}
}
{
err = writer.WriteTimeValue("endDateTime", m.GetEndDateTime())
if err != nil {
return err
}
}
if m.GetFallbackReviewers() != nil {
cast := make([]i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable, len(m.GetFallbackReviewers()))
for i, v := range m.GetFallbackReviewers() {
temp := v
cast[i] = i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable(&temp)
}
err = writer.WriteCollectionOfObjectValues("fallbackReviewers", cast)
if err != nil {
return err
}
}
if m.GetReviewers() != nil {
cast := make([]i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable, len(m.GetReviewers()))
for i, v := range m.GetReviewers() {
temp := v
cast[i] = i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable(&temp)
}
err = writer.WriteCollectionOfObjectValues("reviewers", cast)
if err != nil {
return err
}
}
{
err = writer.WriteObjectValue("scope", m.GetScope())
if err != nil {
return err
}
}
{
err = writer.WriteTimeValue("startDateTime", m.GetStartDateTime())
if err != nil {
return err
}
}
{
err = writer.WriteStringValue("status", m.GetStatus())
if err != nil {
return err
}
}
return nil
}
// SetContactedReviewers sets the contactedReviewers property value. Returns the collection of reviewers who were contacted to complete this review. While the reviewers and fallbackReviewers properties of the accessReviewScheduleDefinition might specify group owners or managers as reviewers, contactedReviewers returns their individual identities. Supports $select. Read-only.
func (m *AccessReviewInstance) SetContactedReviewers(value []AccessReviewReviewer)() {
if m != nil {
m.contactedReviewers = value
}
}
// SetDecisions sets the decisions property value. Each principal reviewed in an accessReviewInstance has a decision item representing if they were approved, denied, or not yet reviewed.
func (m *AccessReviewInstance) SetDecisions(value []AccessReviewInstanceDecisionItem)() {
if m != nil {
m.decisions = value
}
}
// SetEndDateTime sets the endDateTime property value. DateTime when review instance is scheduled to end.The DatetimeOffset type represents date and time information using ISO 8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 is 2014-01-01T00:00:00Z. Supports $select. Read-only.
func (m *AccessReviewInstance) SetEndDateTime(value *i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e.Time)() {
if m != nil {
m.endDateTime = value
}
}
// SetFallbackReviewers sets the fallbackReviewers property value. This collection of reviewer scopes is used to define the list of fallback reviewers. These fallback reviewers will be notified to take action if no users are found from the list of reviewers specified. This could occur when either the group owner is specified as the reviewer but the group owner does not exist, or manager is specified as reviewer but a user's manager does not exist. Supports $select.
func (m *AccessReviewInstance) SetFallbackReviewers(value []AccessReviewReviewerScope)() {
if m != nil {
m.fallbackReviewers = value
}
}
// SetReviewers sets the reviewers property value. This collection of access review scopes is used to define who the reviewers are. Supports $select. For examples of options for assigning reviewers, see Assign reviewers to your access review definition using the Microsoft Graph API.
func (m *AccessReviewInstance) SetReviewers(value []AccessReviewReviewerScope)() {
if m != nil {
m.reviewers = value
}
}
// SetScope sets the scope property value. Created based on scope and instanceEnumerationScope at the accessReviewScheduleDefinition level. Defines the scope of users reviewed in a group. Supports $select and $filter (contains only). Read-only.
func (m *AccessReviewInstance) SetScope(value *AccessReviewScope)() {
if m != nil {
m.scope = value
}
}
// SetStartDateTime sets the startDateTime property value. DateTime when review instance is scheduled to start. May be in the future. The DateTimeOffset type represents date and time information using ISO 8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 is 2014-01-01T00:00:00Z. Supports $select. Read-only.
func (m *AccessReviewInstance) SetStartDateTime(value *i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e.Time)() {
if m != nil {
m.startDateTime = value
}
}
// SetStatus sets the status property value. Specifies the status of an accessReview. Possible values: Initializing, NotStarted, Starting, InProgress, Completing, Completed, AutoReviewing, and AutoReviewed. Supports $select, $orderby, and $filter (eq only). Read-only.
func (m *AccessReviewInstance) SetStatus(value *string)() {
if m != nil {
m.status = value
}
} | models/microsoft/graph/access_review_instance.go | 0.727104 | 0.424949 | access_review_instance.go | starcoder |
package travel
import (
"time"
)
func (t Travel) YearDay() int {
return t.t.YearDay()
}
func (t Travel) EnglishMonth() string {
return t.t.Month().String()
}
func (t Travel) EnglishWeekDay() string {
return t.t.Weekday().String()
}
func (t Travel) WeekDayISO() int {
return int(t.t.Weekday())
}
func (t Travel) WeekOfYear() int {
_, w := t.t.ISOWeek()
return w
}
// 格式化为 unix 时间戳
func (t Travel) Unix() int64 {
return t.t.Unix()
}
func (t Travel) Nano() int64 {
return t.t.UnixNano()
}
func (t Travel) Year() string {
return intToYear(t.t.Year())
}
func (t Travel) Month() string {
return intToMonth(int(t.t.Month()))
}
func (t Travel) Day() string {
return intToDay(t.t.Day())
}
func (t Travel) Hour() string {
return intToHour(t.t.Hour())
}
func (t Travel) Minute() string {
return intToMinute(t.t.Minute())
}
func (t Travel) Second() string {
return intToSecond(t.t.Second())
}
func (t Travel) Milli() int {
return t.t.Nanosecond() / 1e6
}
func (t Travel) Micro() int {
return t.t.Nanosecond() / 1e3
}
func (t Travel) Format(format string) string {
if t.Local.String() == "UTC" {
t.SetLocation("Asia/Shanghai")
}
return t.t.In(t.Local).Format(format)
}
func (t Travel) ToDateFormat() string {
if t.Local.String() == "UTC" {
t.SetLocation("Asia/Shanghai")
}
return t.t.In(t.Local).Format("2006-01-02")
}
func (t Travel) ToDateTimeFormat() string {
if t.Local.String() == "UTC" {
t.SetLocation("Asia/Shanghai")
}
return t.t.In(t.Local).Format(TimeLayout)
}
func (t Travel) StartOfYear() Travel {
tm, _ := time.ParseInLocation(TimeLayout, t.Year()+"-01-01 00:00:00", t.t.Location())
t.t = tm
return t
}
func (t Travel) EndOfYear() Travel {
tm, _ := time.ParseInLocation(TimeLayout, t.Year()+"-12-31 23:59:59", t.t.Location())
t.t = tm
return t
}
func (t Travel) StartOfMonth() Travel {
tm, _ := time.ParseInLocation(TimeLayout, t.Year()+"-"+t.Month()+"-01 00:00:00", t.t.Location())
t.t = tm
return t
}
func (t Travel) EndOfMonth() Travel {
t1 := t.AddMonth().StartOfMonth()
tm, _ := time.ParseInLocation(TimeLayout, t1.Year()+"-"+t1.Month()+"-"+t1.Day()+" 23:59:59", t.t.Location())
t.t = tm.AddDate(0, 0, -1)
return t
}
func (t Travel) StartOfDay() Travel {
tm, _ := time.ParseInLocation(TimeLayout, t.Year()+"-"+t.Month()+"-"+t.Day()+" 00:00:00", t.t.Location())
t.t = tm
return t
}
func (t Travel) EndOfDay() Travel {
tm, _ := time.ParseInLocation(TimeLayout, t.Year()+"-"+t.Month()+"-"+t.Day()+" 23:59:59", t.t.Location())
t.t = tm
return t
}
func (t Travel) StartOfWeek() Travel {
day := 1 - t.WeekDayISO()
n := t.t.AddDate(0, 0, day)
tm, _ := time.ParseInLocation(TimeLayout, intToYear(n.Year())+"-"+intToMonth(int(n.Month()))+"-"+intToDay(n.Day())+" 00:00:00", t.t.Location())
t.t = tm
return t
}
func (t Travel) EndOfWeek() Travel {
day := 7 - t.WeekDayISO()
n := t.t.AddDate(0, 0, day)
tm, _ := time.ParseInLocation(TimeLayout, intToYear(n.Year())+"-"+intToMonth(int(n.Month()))+"-"+intToDay(n.Day())+" 23:59:59", t.t.Location())
t.t = tm
return t
}
func (t Travel) Clone() Travel {
return t
}
func (t Travel) ToTime() time.Time {
return t.t
} | format.go | 0.504883 | 0.677657 | format.go | starcoder |
package num
import (
"database/sql/driver"
"encoding/json"
"fmt"
"regexp"
"strconv"
)
// Number type of numberic
type Number struct {
value interface{}
}
// Make create a empty instance
func Make() *Number {
return &Number{value: nil}
}
// Of make a new number
func Of(value interface{}) *Number {
return &Number{value: value}
}
// Set set <value> to <v>, and returns the old value.
func (n *Number) Set(value interface{}) (old interface{}) {
old = n.value
n.value = value
return old
}
// ToFixed the return value is the type of float64 and keeps the given decimal places
func (n Number) ToFixed(places int) string {
format := fmt.Sprintf("%%.%df", places)
return fmt.Sprintf(format, n.Float64())
}
// Float is alias of Float64 converts and returns as float64
func (n Number) Float() float64 {
return n.Float64()
}
// Float64 converts and returns as float64
func (n Number) Float64() float64 {
if n.value == nil {
return 0.0
}
switch n.value.(type) {
case float64:
return n.value.(float64)
case float32:
return float64(n.value.(float32))
case complex128:
return real(n.value.(complex128))
case complex64:
return float64(real(n.value.(complex64)))
}
value, err := strconv.ParseFloat(fmt.Sprintf("%v", n.value), 64)
if err != nil {
panic(err.Error())
}
return value
}
// Float32 converts and returns as float32
func (n Number) Float32() float32 {
if n.value == nil {
return 0.0
}
switch n.value.(type) {
case float64:
return float32(n.value.(float64))
case float32:
return n.value.(float32)
}
value, err := strconv.ParseFloat(fmt.Sprintf("%v", n.value), 32)
if err != nil {
panic(err.Error())
}
return float32(value)
}
// Complex is alias of Complex128 converts and returns as complex128
func (n Number) Complex() complex128 {
return n.Complex128()
}
// Complex128 converts and returns as complex128
func (n Number) Complex128() complex128 {
value, ok := n.value.(complex128)
if ok {
return value
}
value64, ok := n.value.(complex64)
if ok {
return complex128(value64)
}
valueStr, ok := n.value.(string)
if ok {
// 1.56+2.48i
re := regexp.MustCompile(`[ \()]*([0-9\.]+)[ ]*\+[ ]*([0-9\.]+)i[ \)]*`)
matched := re.FindStringSubmatch(valueStr)
if len(matched) > 0 {
return complex(Of(matched[1]).Float64(), Of(matched[2]).Float64())
}
// (1.56,2.48i)
re = regexp.MustCompile(`\([ ]*([0-9\.]+)[ ]*,[ ]*([0-9\.]+)[ ]*\)`)
matched = re.FindStringSubmatch(valueStr)
if len(matched) > 0 {
return complex(Of(matched[1]).Float64(), Of(matched[2]).Float64())
}
}
return complex(n.Float64(), 0)
}
// Complex64 converts and returns as Complex64
func (n Number) Complex64() complex64 {
value, ok := n.value.(complex64)
if ok {
return value
}
value128, ok := n.value.(complex128)
if ok {
return complex64(value128)
}
valueStr, ok := n.value.(string)
if ok {
// 1.56+2.48i
re := regexp.MustCompile(`[ \()]*([0-9\.]+)[ ]*\+[ ]*([0-9\.]+)i[ \)]*`)
matched := re.FindStringSubmatch(valueStr)
if len(matched) > 0 {
return complex(Of(matched[1]).Float32(), Of(matched[2]).Float32())
}
// (1.56,2.48i)
re = regexp.MustCompile(`\([ ]*([0-9\.]+)[ ]*,[ ]*([0-9\.]+)[ ]*\)`)
matched = re.FindStringSubmatch(valueStr)
if len(matched) > 0 {
return complex(Of(matched[1]).Float32(), Of(matched[2]).Float32())
}
}
return complex(n.Float32(), 0.0)
}
// Int64 the return value is the type of int64 and remove the decimal
func (n Number) Int64() int64 {
value, ok := n.value.(int64)
if ok {
return value
}
return int64(n.Int())
}
// Int32 the return value is the type of int32 and remove the decimal
func (n Number) Int32() int32 {
value, ok := n.value.(int32)
if ok {
return value
}
return int32(n.Int())
}
// Int16 the return value is the type of int16 and remove the decimal
func (n Number) Int16() int16 {
value, ok := n.value.(int16)
if ok {
return value
}
return int16(n.Int())
}
// Int8 converts and returns as Int8
func (n Number) Int8() int8 {
value, ok := n.value.(int8)
if ok {
return value
}
return int8(n.Int())
}
// Int converts and returns as Int
func (n Number) Int() int {
if n.value == nil {
return 0
}
value, ok := n.value.(int)
if ok {
return value
}
value, _ = strconv.Atoi(fmt.Sprintf("%.0f", n.Float64()))
return value
}
// Uint64 the return value is the type of uint64 and remove the decimal
func (n Number) Uint64() uint64 {
value, ok := n.value.(uint64)
if ok {
return value
}
return uint64(n.Int())
}
// Uint32 the return value is the type of uint32 and remove the decimal
func (n Number) Uint32() uint32 {
value, ok := n.value.(uint32)
if ok {
return value
}
return uint32(n.Int())
}
// Uint16 the return value is the type of uint16 and remove the decimal
func (n Number) Uint16() uint16 {
value, ok := n.value.(uint16)
if ok {
return value
}
return uint16(n.Int())
}
// Uint8 the return value is the type of uint8 and remove the decimal
func (n Number) Uint8() uint8 {
value, ok := n.value.(uint8)
if ok {
return value
}
return uint8(n.Int())
}
// Uint the return value is the type of uint and remove the decimal
func (n Number) Uint() uint {
value, ok := n.value.(uint)
if ok {
return value
}
return uint(n.Int())
}
// Uintptr the return value is the type of uintptr
func (n Number) Uintptr() uintptr {
value, ok := n.value.(uintptr)
if ok {
return value
}
return uintptr(n.Int())
}
// IsSet checks whether <v> is not nil.
func (n Number) IsSet() bool {
return n.value != nil
}
// IsNil checks whether <v> is nil.
func (n Number) IsNil() bool {
return n.value == nil
}
// IsInt checks whether <v> is type of int.
func (n Number) IsInt() bool {
switch n.value.(type) {
case int, int8, int16, int32, int64, uint, uint8, uint16, uint32, uint64:
return true
default:
return false
}
}
// IsFloat checks whether <v> is type of float.
func (n Number) IsFloat() bool {
switch n.value.(type) {
case float32, float64:
return true
default:
return false
}
}
// IsComplex checks whether <v> is type of complex.
func (n Number) IsComplex() bool {
switch n.value.(type) {
case complex128, complex64:
return true
default:
return false
}
}
// Scan for db scan
func (n *Number) Scan(src interface{}) error {
*n = *Of(src)
return nil
}
// Value for db driver value
func (n *Number) Value() (driver.Value, error) {
return n.value, nil
}
// MarshalJSON for json marshalJSON
func (n *Number) MarshalJSON() ([]byte, error) {
return json.Marshal(n.value)
}
// UnmarshalJSON for json marshalJSON
func (n *Number) UnmarshalJSON(data []byte) error {
var v float64
err := json.Unmarshal(data, &v)
if err != nil {
return err
}
*n = *Of(v)
return nil
} | num/number.go | 0.734881 | 0.410756 | number.go | starcoder |
package neuralnet
import (
"fmt"
"math"
)
type SingleLayerNN struct {
structure *NNStructure
wts WeightVector
}
func (nn *SingleLayerNN) PackedWts() []float64 {
return nn.wts
}
func (nn *SingleLayerNN) ExpectedPackedWeightsCount() int {
return nn.structure.ExpectedPackedWeightsCount()
}
func (nn *SingleLayerNN) dm(d int, m int) int {
if !(d < nn.structure.D && m < nn.structure.M[0]) {
panic(fmt.Sprintf("invalid indexes %d %d", d, m))
}
return m + d*nn.structure.M[0]
}
func (nn *SingleLayerNN) mk(m int, k int) int {
if !(m < nn.structure.M[0] && k < nn.structure.K) {
panic(fmt.Sprintf("invalid indexes %d %d", m, k))
}
return nn.structure.M[0]*nn.structure.D + m + k*nn.structure.M[0]
}
func (nn *SingleLayerNN) a_j(x XVector) []float64 {
if len(x) != nn.structure.D {
panic(fmt.Sprintf("invalid length of x: %d != %d", len(x), nn.structure.D))
}
a_j := make([]float64, nn.structure.M[0])
for m := range a_j {
for d, xv := range x {
a_j[m] += nn.wts[nn.dm(d, m)] * xv
}
}
return a_j
}
func mapOverVector(vs []float64, f func(float64) float64) []float64 {
vsm := make([]float64, len(vs))
for i, v := range vs {
vsm[i] = f(v)
}
return vsm
}
func (nn *SingleLayerNN) z_j(a_j []float64) []float64 {
return mapOverVector(a_j, nn.structure.H)
}
func (nn *SingleLayerNN) a_k(z_j []float64) []float64 {
a_k := make([]float64, nn.structure.K)
for k := range a_k {
for m := range z_j {
a_k[k] += nn.wts[nn.mk(m, k)] * (z_j)[m]
}
}
return a_k
}
func (nn *SingleLayerNN) z_k(a_k []float64) []float64 {
return mapOverVector(a_k, nn.structure.Sigma)
}
func (nn *SingleLayerNN) Hidden(x XVector) []float64 {
return nn.z_j(nn.a_j(x))
}
func (nn *SingleLayerNN) Predict(x XVector) YVector {
a_j := nn.a_j(x)
z_j := nn.z_j(a_j)
a_k := nn.a_k(z_j)
y_k := nn.z_k(a_k)
return y_k
}
func (nn *SingleLayerNN) ErfValue(x XVector, t YVector) float64 {
if len(t) != nn.structure.K {
panic(fmt.Sprintf("invalid length of t: %d != %d", len(t), nn.structure.K))
}
return nn.structure.ErrorFunction(nn.Predict(x), t)
}
func (nn *SingleLayerNN) Gradient(x XVector, t YVector) WeightVector {
gradient := make([]float64, nn.structure.ExpectedPackedWeightsCount())
// forward...
a_j := nn.a_j(x)
z_j := nn.z_j(a_j)
a_k := nn.a_k(z_j)
y := nn.z_k(a_k)
delta_k := make([]float64, nn.structure.K)
for k := range delta_k {
delta_k[k] = y[k] - t[k] // Assuming canonical link function is used...
}
delta_j := make([]float64, nn.structure.M[0])
for j := range delta_j {
for k := range delta_k {
delta_j[j] += nn.wts[nn.mk(j, k)] * delta_k[k]
}
delta_j[j] *= nn.structure.H_prim(a_j[j])
}
for j, dj := range delta_j {
for i, xi := range x {
gradient[nn.dm(i, j)] = dj * xi
}
}
for k, dk := range delta_k {
for j, zj := range z_j {
gradient[nn.mk(j, k)] = dk * zj
}
}
return gradient
}
func perturbed(w []float64, p []float64, eta float64) []float64 {
result := make([]float64, len(w))
for i := range result {
result[i] = w[i] + p[i]*eta
}
return result
}
func ssqdiff(a YVector, b YVector) float64 {
ssq := 0.0
for i := range a {
ssq += math.Pow(a[i]-b[i], 2)
}
return ssq
}
func crossentropy(y YVector, t YVector) float64 {
result := 0.0
for i := range t {
result += -(t[i]*math.Log(y[i]) + (1-t[i])*math.Log(1-y[i]))
}
return result
} | src/neuralnet/singlelayer_nn.go | 0.755817 | 0.448607 | singlelayer_nn.go | starcoder |
package util
import (
"fmt"
"math/rand"
"strconv"
"strings"
"time"
v1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/types"
)
const (
// a short time format; like time.Kitchen but with 24-hour notation.
Kitchen24 = "15:04"
// a time format that just cares about the day and month.
YearDay = "Jan_2"
// default annotation prefix for configuration overrides
DefaultBaseAnnotation = "chaos.alpha.kubernetes.io"
)
// TimePeriod represents a time period with a single beginning and end.
type TimePeriod struct {
From time.Time
To time.Time
}
// NewTimePeriod returns a normalized TimePeriod given a start and end time.
func NewTimePeriod(from, to time.Time) TimePeriod {
return TimePeriod{From: TimeOfDay(from), To: TimeOfDay(to)}
}
// Includes returns true iff the given pointInTime's time of day is included in time period tp.
func (tp TimePeriod) Includes(pointInTime time.Time) bool {
isAfter := TimeOfDay(pointInTime).After(tp.From)
isBefore := TimeOfDay(pointInTime).Before(tp.To)
if tp.From.Before(tp.To) {
return isAfter && isBefore
}
if tp.From.After(tp.To) {
return isAfter || isBefore
}
return TimeOfDay(pointInTime).Equal(tp.From)
}
// String returns tp as a pretty string.
func (tp TimePeriod) String() string {
return fmt.Sprintf("%s-%s", tp.From.Format(Kitchen24), tp.To.Format(Kitchen24))
}
// ParseWeekdays takes a comma-separated list of abbreviated weekdays (e.g. sat,sun) and turns them
// into a slice of time.Weekday. It ignores any whitespace and any invalid weekdays.
func ParseWeekdays(weekdays string) []time.Weekday {
var days = map[string]time.Weekday{
"sun": time.Sunday,
"mon": time.Monday,
"tue": time.Tuesday,
"wed": time.Wednesday,
"thu": time.Thursday,
"fri": time.Friday,
"sat": time.Saturday,
}
parsedWeekdays := []time.Weekday{}
for _, wd := range strings.Split(weekdays, ",") {
if day, ok := days[strings.TrimSpace(strings.ToLower(wd))]; ok {
parsedWeekdays = append(parsedWeekdays, day)
}
}
return parsedWeekdays
}
// ParseTimePeriods takes a comma-separated list of time periods in Kitchen24 format and turns them
// into a slice of TimePeriods. It ignores any whitespace.
func ParseTimePeriods(timePeriods string) ([]TimePeriod, error) {
parsedTimePeriods := []TimePeriod{}
for _, tp := range strings.Split(timePeriods, ",") {
if strings.TrimSpace(tp) == "" {
continue
}
parts := strings.Split(tp, "-")
if len(parts) != 2 {
return nil, fmt.Errorf("Invalid time range '%v': must contain exactly one '-'", tp)
}
begin, err := time.Parse(Kitchen24, strings.TrimSpace(parts[0]))
if err != nil {
return nil, err
}
end, err := time.Parse(Kitchen24, strings.TrimSpace(parts[1]))
if err != nil {
return nil, err
}
parsedTimePeriods = append(parsedTimePeriods, NewTimePeriod(begin, end))
}
return parsedTimePeriods, nil
}
func ParseDays(days string) ([]time.Time, error) {
parsedDays := []time.Time{}
for _, day := range strings.Split(days, ",") {
if strings.TrimSpace(day) == "" {
continue
}
parsedDay, err := time.Parse(YearDay, strings.TrimSpace(day))
if err != nil {
return nil, err
}
parsedDays = append(parsedDays, parsedDay)
}
return parsedDays, nil
}
// Parses a "frequency" annotation in the form "[number] / [period]" (eg. 1/day)
// and converts it into a chance of occurrence in any given interval (eg. ~0.007)
func ParseFrequency(text string, interval time.Duration) (float64, error) {
parseablePeriods := map[string]time.Duration{
"minute": 1 * time.Minute,
"hour": 1 * time.Hour,
"day": 24 * time.Hour,
"week": 24 * 7 * time.Hour,
}
parts := strings.SplitN(text, "/", 2)
for i, p := range parts {
parts[i] = strings.TrimSpace(p)
}
frequency, err := strconv.ParseFloat(parts[0], 64)
if err != nil {
return 0, err
}
period, ok := parseablePeriods[parts[1]]
if !ok {
return 0, fmt.Errorf("unknown time period, %v", parts[1])
}
chance := (float64(interval) / float64(period)) * frequency
return chance, nil
}
// TimeOfDay normalizes the given point in time by returning a time object that represents the same
// time of day of the given time but on the very first day (day 0).
func TimeOfDay(pointInTime time.Time) time.Time {
return time.Date(0, 0, 0, pointInTime.Hour(), pointInTime.Minute(), pointInTime.Second(), pointInTime.Nanosecond(), time.UTC)
}
// FormatDays takes a slice of times and returns a slice of strings in YearDate format (e.g. [Apr 1, Sep 24])
func FormatDays(days []time.Time) []string {
formattedDays := make([]string, 0, len(days))
for _, d := range days {
formattedDays = append(formattedDays, d.Format(YearDay))
}
return formattedDays
}
func FormatAnnotation(prefix, name string) string {
return strings.Join([]string{prefix, name}, "/")
}
// NewNamespace returns a new namespace instance for testing purposes.
func NewNamespace(name string) v1.Namespace {
return v1.Namespace{
ObjectMeta: metav1.ObjectMeta{
Name: name,
Labels: map[string]string{
"env": name,
},
},
}
}
// RandomPodSubSlice creates a shuffled subslice of the give pods slice
func RandomPodSubSlice(pods []v1.Pod, count int) []v1.Pod {
maxCount := len(pods)
if count > maxCount {
count = maxCount
}
rand.Shuffle(len(pods), func(i, j int) { pods[i], pods[j] = pods[j], pods[i] })
res := pods[0:count]
return res
}
type PodBuilder struct {
Name string
Namespace string
Phase v1.PodPhase
CreationTime *time.Time
OwnerReference *metav1.OwnerReference
Labels map[string]string
Annotations map[string]string
}
func NewPodBuilder(namespace string, name string) PodBuilder {
return PodBuilder{
Name: name,
Namespace: namespace,
Phase: v1.PodRunning,
CreationTime: nil,
OwnerReference: nil,
Annotations: map[string]string{"chaos": name},
Labels: map[string]string{"app": name},
}
}
func (b PodBuilder) Build() v1.Pod {
pod := v1.Pod{
TypeMeta: metav1.TypeMeta{
APIVersion: "v1",
Kind: "Pod",
},
ObjectMeta: metav1.ObjectMeta{
Namespace: b.Namespace,
Name: b.Name,
Labels: b.Labels,
Annotations: b.Annotations,
SelfLink: fmt.Sprintf(
"/api/v1/namespaces/%s/pods/%s",
b.Namespace,
b.Name,
),
},
Status: v1.PodStatus{
Phase: b.Phase,
},
}
if b.CreationTime != nil {
pod.ObjectMeta.CreationTimestamp = metav1.Time{Time: *b.CreationTime}
}
if b.OwnerReference != nil {
pod.ObjectMeta.OwnerReferences = []metav1.OwnerReference{*b.OwnerReference}
}
return pod
}
func (b PodBuilder) WithPhase(phase v1.PodPhase) PodBuilder {
b.Phase = phase
return b
}
func (b PodBuilder) WithCreationTime(time time.Time) PodBuilder {
b.CreationTime = &time
return b
}
func (b PodBuilder) WithOwnerReference(ownerReference metav1.OwnerReference) PodBuilder {
b.OwnerReference = &ownerReference
return b
}
func (b PodBuilder) WithOwnerUID(owner types.UID) PodBuilder {
b.OwnerReference = &metav1.OwnerReference{UID: owner, Kind: "testkind"}
return b
}
func (b PodBuilder) WithAnnotations(annotations map[string]string) PodBuilder {
b.Annotations = annotations
return b
}
func (b PodBuilder) WithLabels(labels map[string]string) PodBuilder {
b.Labels = labels
return b
}
func (b PodBuilder) WithFrequency(text string) PodBuilder {
annotation := strings.Join([]string{DefaultBaseAnnotation, "frequency"}, "/")
b.Annotations[annotation] = text
return b
}
func (b PodBuilder) WithMinimumAge(text string) PodBuilder {
annotation := strings.Join([]string{DefaultBaseAnnotation, "minimum-age"}, "/")
b.Annotations[annotation] = text
return b
}
func (b PodBuilder) WithTimezone(text string) PodBuilder {
annotation := strings.Join([]string{DefaultBaseAnnotation, "timezone"}, "/")
b.Annotations[annotation] = text
return b
}
func (b PodBuilder) WithExcludedWeekdays(text string) PodBuilder {
annotation := strings.Join([]string{DefaultBaseAnnotation, "excluded-weekdays"}, "/")
b.Annotations[annotation] = text
return b
}
func (b PodBuilder) WithExcludedTimesOfDay(text string) PodBuilder {
annotation := strings.Join([]string{DefaultBaseAnnotation, "excluded-times-of-day"}, "/")
b.Annotations[annotation] = text
return b
}
func (b PodBuilder) WithExcludedDaysOfYear(text string) PodBuilder {
annotation := strings.Join([]string{DefaultBaseAnnotation, "excluded-days-of-year"}, "/")
b.Annotations[annotation] = text
return b
} | util/util.go | 0.743634 | 0.511351 | util.go | starcoder |
package main
// represent a data structure with 3 components
type Vector3 struct {
x, y, z uint
}
func (v Vector3) Set (x, y, z uint) {
v.x = x
v.y = y
v.z = z
}
func (v Vector3) Set8 (x, y, z uint8) {
v.x = uint(x)
v.y = uint(y)
v.z = uint(z)
}
// convert a single byte into three components
func (v Vector3) SetByte (byte uint8) {
b := uint(byte)
v.x = b >> 4 & 0x3
v.y = b >> 2 & 0x3
v.z = b & 0x3
}
func (v Vector3) Get () (uint, uint, uint) {
return v.x, v.y, v.z
}
func (v Vector3) Get8 () (uint8, uint8, uint8) {
return uint8(v.x), uint8(v.y), uint8(v.z)
}
// keep two bits of each components to obtain a byte
func (v Vector3) GetByte () uint8 {
x := v.x & 0x3
y := v.y & 0x3
z := v.z & 0x3
return uint8(x << 4 | y << 2 | z)
}
func (v1 Vector3) Add (v2 Vector3) Vector3 {
var v3 Vector3
v3.x = v1.x + v2.x
v3.y = v1.y + v2.y
v3.z = v1.z + v2.z
return v3
}
func (v1 Vector3) Sub (v2 Vector3) Vector3 {
var v3 Vector3
v3.x = v1.x - v2.x
v3.y = v1.y - v2.y
v3.z = v1.z - v2.z
return v3
}
func (v Vector3) Mod (m uint) Vector3 {
var w Vector3
w.x = v.x % m
w.y = v.y % m
w.z = v.z % m
return w
}
func (v Vector3) Mask (m uint) Vector3 {
var w Vector3
w.x = v.x & m
w.y = v.y & m
w.z = v.z & m
return w
}
func (v Vector3) ShiftL (b uint) Vector3 {
var w Vector3
w.x = v.x << b
w.y = v.y << b
w.z = v.z << b
return w
}
func (v Vector3) ShiftR (b uint) Vector3 {
var w Vector3
w.x = v.x >> b
w.y = v.y >> b
w.z = v.z >> b
return w
}
/**/
type Byte3 struct {
x, y, z uint8
}
func (v Byte3) Set (x, y, z uint) {
v.x = uint8(x)
v.y = uint8(y)
v.z = uint8(z)
}
func (v Byte3) Set8 (x, y, z uint8) {
v.x = x
v.y = y
v.z = z
}
// convert a single byte into three components
func (v Byte3) SetByte (byte uint8) {
v.x = byte >> 4 & 0x3
v.y = byte >> 2 & 0x3
v.z = byte & 0x3
}
func (v Byte3) Get () (uint, uint, uint) {
return uint(v.x), uint(v.y), uint(v.z)
}
func (v Byte3) Get8 () (uint8, uint8, uint8) {
return v.x, v.y, v.z
}
// keep two bits of each components to obtain a byte
func (v Byte3) GetByte () uint8 {
x := v.x & 0x3
y := v.y & 0x3
z := v.z & 0x3
return x << 4 | y << 2 | z
}
/**/
type Bool3 struct {
x, y, z bool
}
func (v Bool3) SetByte (byte uint8) {
v.x = (byte & 0x4) != 0
v.y = (byte & 0x2) != 0
v.z = (byte & 0x1) != 0
}
func (v Bool3) GetByte () uint8 {
b := 0
if v.x {b |= 0x4}
if v.y {b |= 0x2}
if v.z {b |= 0x1}
return uint8(b)
} | vector3.go | 0.899519 | 0.719396 | vector3.go | starcoder |
package types
import (
"fmt"
"net"
"reflect"
"sort"
"strings"
"github.com/Sirupsen/logrus"
v1 "k8s.io/api/core/v1"
)
const (
v6AddrLabelKey = "rdei.io/node-addr-v6"
)
// NodesEqual returns a boolean value indicating whether the contents of the
// two passed NodesLists are equivalent.
func NodesEqual(a, b NodesList, logger logrus.FieldLogger) bool {
return reflect.DeepEqual(a, b)
}
// NodeEqual returns a boolean value indicating whether two nodes are EQUAL
func NodeEqual(a, b Node) bool {
return reflect.DeepEqual(a, b)
}
// NodesList is a sortable array of nodes.
type NodesList []Node
func (n NodesList) Len() int { return len(n) }
func (n NodesList) Swap(i, j int) { n[i], n[j] = n[j], n[i] }
func (n NodesList) Less(i, j int) bool { return n[i].Name < n[j].Name }
func (n NodesList) Copy() NodesList {
out := make(NodesList, len(n))
for i, node := range n {
out[i] = node
}
return out
}
// The Node represents the subset of information about a kube node that is
// relevant for the configuration of the ipvs load balancer. Upon instantiation
// it only contains the set of information retrieved from a kube node. Its
// AddEndpointsForConfig([]v1.Endpoints, *clusterConfig) function will add kube
// endpoints in, filtering on the basis of whether they're associated with that
// particular node.
type Node struct {
Name string `json:"name"`
Addresses []string `json:"addresses"`
Unschedulable bool `json:"unschedulable"`
Ready bool `json:"ready"`
Labels map[string]string `json:"labels"`
// an internal type used to extract the v6 address from a nodelabel, set by a boot process
AddressV6 string
addressTotals map[string]int
localTotals map[string]int
Endpoints []Endpoints `json:"endpoints"`
}
// GetLocalServicePropability computes the likelihood that any traffic for the
// service ends up on this particular node.
func (n *Node) GetLocalServicePropability(namespace, service, portName string, logger logrus.FieldLogger) float64 {
ident := MakeIdent(namespace, service, portName)
// logger.Infof("WAT local=%v total=%v", n.localTotals, n.addressTotals)
if tot, ok := n.addressTotals[ident]; !ok || tot == 0 {
return 0.0
} else if _, ok := n.localTotals[ident]; !ok {
return 0.0
}
return float64(n.localTotals[ident]) / float64(n.addressTotals[ident])
}
func (n *Node) SetTotals(totals map[string]int) {
n.addressTotals = totals
n.localTotals = map[string]int{}
// ranging over the Endpoints *of this node*
for _, ep := range n.Endpoints {
for _, subset := range ep.Subsets {
for _, port := range subset.Ports {
ident := MakeIdent(ep.Namespace, ep.Service, port.Name)
n.localTotals[ident] += len(subset.Addresses)
}
}
}
}
// SortConstituents sort all the sub-elements of a given node
// required for DeepEqual when checking node equality; nodes may actually have the same elements,
// but a different array order
func (n *Node) SortConstituents() {
sort.Sort(sort.StringSlice(n.Addresses))
sort.Sort(EndpointsList(n.Endpoints))
for _, e := range n.Endpoints {
sort.Sort(Subsets(e.Subsets))
for _, s := range e.Subsets {
sort.Sort(Addresses(s.Addresses))
sort.Sort(Ports(s.Ports))
}
}
}
func NewNode(kubeNode *v1.Node) Node {
n := Node{}
n.Name = kubeNode.Name
n.Addresses = addresses(kubeNode)
n.Unschedulable = kubeNode.Spec.Unschedulable
n.Ready = isInReadyState(kubeNode)
n.Labels = kubeNode.GetLabels()
n.Endpoints = []Endpoints{}
return n
}
func (n *Node) IPV4() string {
for _, addr := range n.Addresses {
i := net.ParseIP(addr)
if i.To4() != nil {
return i.String()
}
}
return ""
}
func (n *Node) IPV6() string {
if v6Addr, ok := n.Labels[v6AddrLabelKey]; ok {
return strings.Replace(v6Addr, "-", ":", -1)
}
return ""
}
func (n *Node) IsEligibleBackendV4(labels map[string]string, ip string, ignoreCordon bool) (bool, string) {
return n.IsEligibleBackend(labels, ip, ignoreCordon, false)
}
func (n *Node) IsEligibleBackendV6(labels map[string]string, ip string, ignoreCordon bool) (bool, string) {
return n.IsEligibleBackend(labels, ip, ignoreCordon, true)
}
func (n *Node) IsEligibleBackend(labels map[string]string, ip string, ignoreCordon, v6 bool) (bool, string) {
if len(n.Addresses) == 0 {
return false, fmt.Sprintf("node %s does not have an IP address", n.Name)
}
if n.Unschedulable && !ignoreCordon {
return false, fmt.Sprintf("node %s has unschedulable set. saw %v", n.IPV4(), n.Unschedulable)
}
if !n.Ready {
return false, fmt.Sprintf("node %s is not in a ready state.", n.IPV4())
}
if !n.hasLabels(labels) {
return false, fmt.Sprintf("node %s missing required labels: want: '%v'. saw: '%v'", n.IPV4(), labels, n.Labels)
}
if !v6 && n.IPV4() == ip {
return false, fmt.Sprintf("node %s matches ip address %s", n.IPV4(), ip)
}
if v6 && n.IPV6() == "" {
return false, fmt.Sprintf("node %s matches ip address %s", n.IPV4(), ip)
}
return true, fmt.Sprintf("node %s is eligible", n.IPV4())
}
// hasLabels returns true if the set of labels on the Node contains the key/value pairs expressed in the input, l
func (n *Node) hasLabels(l map[string]string) bool {
for wantKey, wantValue := range l {
if hasValue, ok := n.Labels[wantKey]; !ok || hasValue != wantValue {
return false
}
}
return true
}
// HasServiceRunning check if the node has any endpoints (pods) running for a given service
func (n *Node) HasServiceRunning(namespace, service, portName string) bool {
for _, endpoint := range n.Endpoints {
if endpoint.Namespace == namespace && endpoint.Service == service {
for _, subset := range endpoint.Subsets {
if len(subset.Addresses) == 0 {
return false
}
for _, port := range subset.Ports {
if port.Name == portName {
return true
}
}
}
}
}
return false
}
// GetPortNumber retrieve the int port from ns, service, port name
func (n *Node) GetPortNumber(namespace, service, portName string) int {
for _, endpoint := range n.Endpoints {
if endpoint.Namespace == namespace && endpoint.Service == service {
for _, subset := range endpoint.Subsets {
for _, port := range subset.Ports {
if port.Name == portName {
return port.Port
}
}
}
}
}
return 0
}
func (n *Node) GetPodIPs(namespace, service, portName string) []string {
podIps := []string{}
for _, endpoint := range n.Endpoints {
if endpoint.Namespace == namespace && endpoint.Service == service {
for _, subset := range endpoint.Subsets {
match := false
for _, port := range subset.Ports {
if portName == port.Name {
match = true
}
}
if !match {
continue
}
for _, address := range subset.Addresses {
podIps = append(podIps, address.PodIP)
}
}
}
}
return podIps
}
func isInReadyState(n *v1.Node) bool {
isReady := false
for _, c := range n.Status.Conditions {
if c.Type != "Ready" {
continue
}
if c.Status == "True" {
isReady = true
}
}
return isReady
}
func addresses(n *v1.Node) []string {
out := []string{}
for _, addr := range n.Status.Addresses {
if addr.Type == "InternalIP" && addr.Address != "" {
out = append(out, addr.Address)
}
}
return out
}
type EndpointMeta struct {
Namespace string `json:"namespace"`
Service string `json:"name"`
}
type Endpoints struct {
EndpointMeta `json:"metadata"`
Subsets []Subset `json:"subsets"`
}
type EndpointsList []Endpoints
func (e EndpointsList) Len() int { return len(e) }
func (e EndpointsList) Swap(i, j int) { e[i], e[j] = e[j], e[i] }
func (e EndpointsList) Less(i, j int) bool {
if e[i].Namespace != e[j].Namespace {
return e[i].Namespace < e[j].Namespace
}
return e[i].Service < e[j].Service
}
// FilterForNode returns a new Endpoints struct that is a deep copy of the
// instance, with endpoints filtered to only those addresses that are matching
// the input node.
func (e *Endpoints) CopyFilterForNode(node string) Endpoints {
// TODO
return *e
}
type Subset struct {
// TotalAddresses is the total # of addresses for this subset in the cluster.
TotalAddresses int `json:"totalAddresses"`
Addresses []Address `json:"addresses"`
Ports []Port `json:"ports"`
}
// custom sort for arr of subsets
type Subsets []Subset
func (s Subsets) Len() int { return len(s) }
func (s Subsets) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
func (s Subsets) Less(i, j int) bool { return len(s[i].Addresses) < len(s[j].Addresses) }
func NewSubset(s v1.EndpointSubset) Subset {
out := Subset{}
a := []Address{}
p := []Port{}
for _, addr := range s.Addresses {
if addr.NodeName == nil {
continue
} else if addr.TargetRef == nil {
continue
}
a = append(a, Address{
PodIP: addr.IP,
NodeName: *addr.NodeName,
Kind: addr.TargetRef.Kind,
})
}
for _, port := range s.Ports {
p = append(p, Port{
Name: port.Name,
Port: int(port.Port),
Protocol: string(port.Protocol),
})
}
out.Addresses = a
out.Ports = p
return out
}
type Address struct {
PodIP string `json:"ip"`
NodeName string `json:"nodeName"`
Kind string `json:"kind"`
}
type Addresses []Address
func (a Addresses) Len() int { return len(a) }
func (a Addresses) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
func (a Addresses) Less(i, j int) bool { return len(a[i].NodeName) < len(a[j].NodeName) }
type Port struct {
Name string `json:"name"`
Port int `json:"port"`
Protocol string `json:"protocol"`
}
type Ports []Port
func (p Ports) Len() int { return len(p) }
func (p Ports) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
func (p Ports) Less(i, j int) bool { return len(p[i].Name) < len(p[j].Name) }
// MakeIdent standardizes a string construction used in packages nodes and watcher
func MakeIdent(namespace, service, portName string) string {
ident := namespace + "/" + service + ":" + portName
return ident
} | pkg/types/node.go | 0.663669 | 0.429788 | node.go | starcoder |
package geojsongeos
import (
"fmt"
"github.com/paulsmith/gogeos/geos"
"github.com/venicegeo/geojson-go/geojson"
)
func parseCoord(input []float64) geos.Coord {
return geos.NewCoord(input[0], input[1])
}
func parseCoordArray(input [][]float64) []geos.Coord {
var result []geos.Coord
for inx := 0; inx < len(input); inx++ {
result = append(result, parseCoord(input[inx]))
}
return result
}
// GeosFromGeoJSON takes a GeoJSON object and returns a GEOS geometry
func GeosFromGeoJSON(input interface{}) (*geos.Geometry, error) {
var (
geometry *geos.Geometry
err error
)
switch gt := input.(type) {
case *geojson.Point:
geometry, err = geos.NewPoint(parseCoord(gt.Coordinates))
case *geojson.LineString:
geometry, err = geos.NewLineString(parseCoordArray(gt.Coordinates)...)
case *geojson.Polygon:
var coords []geos.Coord
var coordsArray [][]geos.Coord
for inx := 0; inx < len(gt.Coordinates); inx++ {
coords = parseCoordArray(gt.Coordinates[inx])
coordsArray = append(coordsArray, coords)
}
geometry, err = geos.NewPolygon(coordsArray[0], coordsArray[1:]...)
case *geojson.MultiPoint:
var points []*geos.Geometry
var point *geos.Geometry
for jnx := 0; jnx < len(gt.Coordinates); jnx++ {
point, err = geos.NewPoint(parseCoord(gt.Coordinates[jnx]))
points = append(points, point)
}
geometry, err = geos.NewCollection(geos.MULTIPOINT, points...)
case *geojson.MultiLineString:
var lineStrings []*geos.Geometry
var lineString *geos.Geometry
for jnx := 0; jnx < len(gt.Coordinates); jnx++ {
lineString, err = geos.NewLineString(parseCoordArray(gt.Coordinates[jnx])...)
lineStrings = append(lineStrings, lineString)
}
geometry, err = geos.NewCollection(geos.MULTILINESTRING, lineStrings...)
case *geojson.GeometryCollection:
var (
geometries []*geos.Geometry
)
for _, collection := range gt.Geometries {
if geometry, err = GeosFromGeoJSON(collection); err != nil {
return nil, err
}
geometries = append(geometries, geometry)
}
if geometry, err = geos.NewCollection(geos.GEOMETRYCOLLECTION, geometries...); err != nil {
return nil, err
}
return geometry, nil
case *geojson.MultiPolygon:
var (
coords []geos.Coord
coordsArray [][]geos.Coord
polygons []*geos.Geometry
polygon *geos.Geometry
)
for _, polygonCoords := range gt.Coordinates {
coordsArray = nil
for _, ringCoords := range polygonCoords {
coords = parseCoordArray(ringCoords)
coordsArray = append(coordsArray, coords)
}
if polygon, err = geos.NewPolygon(coordsArray[0], coordsArray[1:]...); err != nil {
return nil, err
}
polygons = append(polygons, polygon)
}
if geometry, err = geos.NewCollection(geos.MULTIPOLYGON, polygons...); err != nil {
return nil, err
}
case *geojson.Feature:
return GeosFromGeoJSON(gt.Geometry)
case *geojson.FeatureCollection:
var (
geometries []*geos.Geometry
)
for _, feature := range gt.Features {
if geometry, err = GeosFromGeoJSON(feature); err != nil {
return nil, err
}
geometries = append(geometries, geometry)
}
if geometry, err = geos.NewCollection(geos.GEOMETRYCOLLECTION, geometries...); err != nil {
return nil, err
}
return geometry, nil
case map[string]interface{}:
return GeosFromGeoJSON(geojson.FromMap(gt))
default:
err = fmt.Errorf("Unexpected type in GeosFromGeoJSON: %T\n", gt)
}
return geometry, err
}
// GeoJSONFromGeos takes a GEOS geometry and returns a GeoJSON object
func GeoJSONFromGeos(input *geos.Geometry) (interface{}, error) {
var (
result interface{}
err error
gType geos.GeometryType
coords []geos.Coord
)
gType, err = input.Type()
if err == nil {
switch gType {
case geos.POINT:
var xval, yval float64
if xval, err = input.X(); err != nil {
return nil, err
}
if yval, err = input.Y(); err != nil {
return nil, err
}
result = geojson.NewPoint([]float64{xval, yval})
case geos.LINESTRING:
if coords, err = input.Coords(); err != nil {
return nil, err
}
result = geojson.NewLineString(arrayFromCoords(coords))
case geos.POLYGON:
var (
coordinates [][][]float64
ring *geos.Geometry
rings []*geos.Geometry
)
if ring, err = input.Shell(); err != nil {
return nil, err
}
if coords, err = ring.Coords(); err != nil {
return nil, err
}
coordinates = append(coordinates, arrayFromCoords(coords))
if rings, err = input.Holes(); err != nil {
return nil, err
}
for _, ring = range rings {
if coords, err = ring.Coords(); err != nil {
return nil, err
}
coordinates = append(coordinates, arrayFromCoords(coords))
}
result = geojson.NewPolygon(coordinates)
case geos.MULTIPOINT:
var (
count int
coordinates [][]float64
multipoint *geos.Geometry
)
if count, err = input.NGeometry(); err != nil {
return nil, err
}
for inx := 0; inx < count; inx++ {
if multipoint, err = input.Geometry(inx); err != nil {
return nil, err
}
if coords, err = multipoint.Coords(); err != nil {
return nil, err
}
coordinates = append(coordinates, arrayFromPoints(coords))
}
result = geojson.NewMultiPoint(coordinates)
case geos.MULTILINESTRING:
var (
coordinates [][][]float64
count int
lineString *geos.Geometry
)
if count, err = input.NGeometry(); err != nil {
return nil, err
}
for inx := 0; inx < count; inx++ {
if lineString, err = input.Geometry(inx); err != nil {
return nil, err
}
if coords, err = lineString.Coords(); err != nil {
return nil, err
}
coordinates = append(coordinates, arrayFromCoords(coords))
}
result = geojson.NewMultiLineString(coordinates)
case geos.MULTIPOLYGON:
var (
count int
coordinates [][][][]float64
polygon *geos.Geometry
polygonIfc interface{}
gjPolygon *geojson.Polygon
ok bool
)
if count, err = input.NGeometry(); err != nil {
return nil, err
}
for inx := 0; inx < count; inx++ {
if polygon, err = input.Geometry(inx); err != nil {
return nil, err
}
polygonIfc, err = GeoJSONFromGeos(polygon)
if gjPolygon, ok = polygonIfc.(*geojson.Polygon); !ok {
return nil, fmt.Errorf("Expected Polygon, received %T", polygonIfc)
}
coordinates = append(coordinates, gjPolygon.Coordinates)
}
result = geojson.NewMultiPolygon(coordinates)
case geos.GEOMETRYCOLLECTION:
var (
count int
geometries []interface{}
polygon *geos.Geometry
geometryIfc interface{}
)
if count, err = input.NGeometry(); err != nil {
return nil, err
}
for inx := 0; inx < count; inx++ {
if polygon, err = input.Geometry(inx); err != nil {
return nil, err
}
if geometryIfc, err = GeoJSONFromGeos(polygon); err != nil {
return nil, err
}
geometries = append(geometries, geometryIfc)
}
result = geojson.NewGeometryCollection(geometries)
default:
err = fmt.Errorf("Unimplemented %v", gType)
}
}
return result, err
}
func arrayFromCoords(input []geos.Coord) [][]float64 {
var result [][]float64
for inx := 0; inx < len(input); inx++ {
arr := [...]float64{input[inx].X, input[inx].Y}
result = append(result, arr[:])
}
return result
}
func arrayFromPoints(input []geos.Coord) []float64 {
var result []float64
arr := []float64{input[0].X, input[0].Y}
result = arr
return result
}
// PointCloud returns a geos.MULTIPOINT
func PointCloud(input *geos.Geometry) (*geos.Geometry, error) {
var (
collection *geos.Geometry
points []*geos.Geometry
err error
)
if points, err = getPointSlice(input); err != nil {
return nil, err
}
if collection, err = geos.NewCollection(geos.MULTIPOINT, points...); err != nil {
return nil, err
}
return collection, nil
}
// returns a point slice, or calls itself recursively until it can
func getPointSlice(input *geos.Geometry) ([]*geos.Geometry, error) {
var (
geom *geos.Geometry
points,
currPoints []*geos.Geometry
geomType geos.GeometryType
count int
err error
)
if geomType, err = input.Type(); err != nil {
return nil, err
}
switch geomType {
case geos.MULTIPOLYGON, geos.GEOMETRYCOLLECTION, geos.MULTILINESTRING, geos.MULTIPOINT:
if count, err = input.NGeometry(); err != nil {
return nil, err
}
for inx := 0; inx < count; inx++ {
if geom, err = input.Geometry(inx); err != nil {
return nil, err
}
if currPoints, err = getPointSlice(geom); err != nil {
return nil, err
}
points = append(points, currPoints...)
}
case geos.POLYGON:
var (
ring *geos.Geometry
holes []*geos.Geometry
)
if ring, err = input.Shell(); err != nil {
return nil, err
}
if currPoints, err = getPointSlice(ring); err != nil {
return nil, err
}
points = append(points, currPoints...)
if holes, err = input.Holes(); err != nil {
return nil, err
}
for _, ring = range holes {
if currPoints, err = getPointSlice(ring); err != nil {
return nil, err
}
points = append(points, currPoints...)
}
case geos.POINT:
points = append(points, input)
case geos.LINESTRING, geos.LINEARRING:
if count, err = input.NPoint(); err != nil {
return nil, err
}
for inx := 0; inx < count; inx++ {
if geom, err = input.Point(inx); err != nil {
return nil, err
}
points = append(points, geom)
}
default:
return nil, fmt.Errorf("Cannot create point cloud from geometry type %v", geomType)
}
return points, nil
} | vendor/github.com/venicegeo/geojson-geos-go/geojsongeos/geojsongeos.go | 0.677474 | 0.435481 | geojsongeos.go | starcoder |
package thwbbst
// Wbbst - Top Heavy Weight Balanced Binary Search Tree
// Tree is encoded by an array with uint32 indices. Data type is unspecified and defined by the implementation.
// This interface can be implemented with any alternative type of BST implementation but this compact storage format is very well suited to scapegoat and weight balanced, and the extra data in the search data structure will be required for each implementation. See the tree32 implementation in /pkg in this repository
type Wbbst interface {
// Comparator functions. Name indicates truth value from first argument to the second (IsLeft is true if first arg is left of second)
// These are an interface to allow implementation different data blob types. For example a 64 bit hash, but only compare the first 32.
// It could also be used for other types of compact, complex values like vectors and bit matrixes
IsLeft(interface{}, interface{}) bool
IsRight(interface{}, interface{}) bool
// Returns the index location of a piece of data or an error if not found
Find(interface{}) (uint32, error)
// Adds a new row to the bottom of the tree (or error if row would extend beyond 32bits of index). This must increment Depth and make and append a string of array elements the length of the 2 to the power of the new Depth (1<<Depth)
AddRow() error
// Insert a new node in the tree and balance if necessary. Returns position of insertion.
Insert(interface{}) uint32
// Removes item at index and rebalances. Presumably zero is the sentinel for no allocation. The sentinal is entirely implementation driven but
// by default golang zeroes all new variables it creates to avoid potential security vulnerabilities this can introduce, as well as eliminating
// 'may be used uninitialised' bugs. If other than zero must be used as a sentinel, AddRow() must be reimplemented also to suit.
DeleteByIndex(uint32)
// Searches for a node with specified data and rebalances.
DeleteByData(interface{}) error
// Walking functions. These take an index and return the correct index of the data on this path. These functions are the only ones this package actually implements, as everything else depends on the comparators
WalkUp(uint32) (uint32, error)
WalkLeft(uint32) (uint32, error)
WalkRight(uint32) (uint32, error)
} | pkg/thwbbst/thwbbst.go | 0.726717 | 0.716194 | thwbbst.go | starcoder |
package slicy
// SliceOfByte is a struct containing needed data for storing slice items data
type SliceOfByte struct {
items []byte
}
// NewFromByteSlice creates an instance of SliceOfByte and returns a reference to it
func NewFromByteSlice(items []byte) *SliceOfByte {
slicy := &SliceOfByte{items}
return slicy
}
// Filter gets a filter function which gets a single byte and only preserve items with true return value from that function
func (s *SliceOfByte) Filter(filterFunc func(byte) bool) *SliceOfByte {
var newItems []byte
for _, value := range s.items {
if filterFunc(value) {
newItems = append(newItems, value)
}
}
return s
}
// Map gets a mapper function and runs that on every single byte items and sets that item with returned value from function
func (s *SliceOfByte) Map(mapFunc func(byte) byte) *SliceOfByte {
for index, value := range s.items {
s.items[index] = mapFunc(value)
}
return s
}
// Shift removes first byte item from the slice
func (s *SliceOfByte) Shift() *SliceOfByte {
s.items = s.items[1:]
return s
}
// Unshift adds the given byte to the start of the slice
func (s *SliceOfByte) Unshift(item byte) *SliceOfByte {
s.items = append([]byte{item}, s.items...)
return s
}
// Append adds the given item to the end of the slice
func (s *SliceOfByte) Append(item byte) *SliceOfByte {
s.items = append(s.items, item)
return s
}
// Concat concats another slice and adds the items in that to the end of current slice
func (s *SliceOfByte) Concat(items []byte) *SliceOfByte {
s.items = append(s.items, items...)
return s
}
// Push pushes the byte item at the end of the slice
func (s *SliceOfByte) Push(item byte) *SliceOfByte {
s.items = append(s.items, item)
return s
}
// Pop deletes the byte item from the end of the slice and returns it
func (s *SliceOfByte) Pop() byte {
poppedItem := s.items[len(s.items)-1]
s.items = s.items[:len(s.items)-1]
return poppedItem
}
// Every gets a checker function and runs that on every single byte items and returns true if the function returne true for all of the them
func (s *SliceOfByte) Every(checkerFunc func(byte) bool) bool {
for _, value := range s.items {
if checkerFunc(value) == false {
return false
}
}
return true
}
// Some gets a checker function and runs that on every single byte items and returns true if the function returne true for one or more of the them
func (s *SliceOfByte) Some(checkerFunc func(byte) bool) bool {
for _, value := range s.items {
if checkerFunc(value) {
return true
}
}
return false
}
// Includes returns true if the slice contains given item
func (s *SliceOfByte) Includes(item byte) bool {
for _, value := range s.items {
if value == item {
return true
}
}
return false
}
// Len returns the length of items in current slice
func (s *SliceOfByte) Len() int {
return len(s.items)
}
// GetSlice returns a pointer to the final slice of byte
func (s *SliceOfByte) GetSlice() *[]byte {
return &s.items
}
// SliceOfInt is a struct containing needed data for storing slice items data
type SliceOfInt struct {
items []int
}
// NewFromIntSlice creates an instance of SliceOfInt and returns a reference to it
func NewFromIntSlice(items []int) *SliceOfInt {
slicy := &SliceOfInt{items}
return slicy
}
// Filter gets a filter function which gets a single int and only preserve items with true return value from that function
func (s *SliceOfInt) Filter(filterFunc func(int) bool) *SliceOfInt {
var newItems []int
for _, value := range s.items {
if filterFunc(value) {
newItems = append(newItems, value)
}
}
return s
}
// Map gets a mapper function and runs that on every single int items and sets that item with returned value from function
func (s *SliceOfInt) Map(mapFunc func(int) int) *SliceOfInt {
for index, value := range s.items {
s.items[index] = mapFunc(value)
}
return s
}
// Shift removes first int item from the slice
func (s *SliceOfInt) Shift() *SliceOfInt {
s.items = s.items[1:]
return s
}
// Unshift adds the given int to the start of the slice
func (s *SliceOfInt) Unshift(item int) *SliceOfInt {
s.items = append([]int{item}, s.items...)
return s
}
// Append adds the given item to the end of the slice
func (s *SliceOfInt) Append(item int) *SliceOfInt {
s.items = append(s.items, item)
return s
}
// Concat concats another slice and adds the items in that to the end of current slice
func (s *SliceOfInt) Concat(items []int) *SliceOfInt {
s.items = append(s.items, items...)
return s
}
// Push pushes the int item at the end of the slice
func (s *SliceOfInt) Push(item int) *SliceOfInt {
s.items = append(s.items, item)
return s
}
// Pop deletes the int item from the end of the slice and returns it
func (s *SliceOfInt) Pop() int {
poppedItem := s.items[len(s.items)-1]
s.items = s.items[:len(s.items)-1]
return poppedItem
}
// Every gets a checker function and runs that on every single int items and returns true if the function returne true for all of the them
func (s *SliceOfInt) Every(checkerFunc func(int) bool) bool {
for _, value := range s.items {
if checkerFunc(value) == false {
return false
}
}
return true
}
// Some gets a checker function and runs that on every single int items and returns true if the function returne true for one or more of the them
func (s *SliceOfInt) Some(checkerFunc func(int) bool) bool {
for _, value := range s.items {
if checkerFunc(value) {
return true
}
}
return false
}
// Includes returns true if the slice contains given item
func (s *SliceOfInt) Includes(item int) bool {
for _, value := range s.items {
if value == item {
return true
}
}
return false
}
// Len returns the length of items in current slice
func (s *SliceOfInt) Len() int {
return len(s.items)
}
// GetSlice returns a pointer to the final slice of int
func (s *SliceOfInt) GetSlice() *[]int {
return &s.items
}
// SliceOfInt8 is a struct containing needed data for storing slice items data
type SliceOfInt8 struct {
items []int8
}
// NewFromInt8Slice creates an instance of SliceOfInt8 and returns a reference to it
func NewFromInt8Slice(items []int8) *SliceOfInt8 {
slicy := &SliceOfInt8{items}
return slicy
}
// Filter gets a filter function which gets a single int8 and only preserve items with true return value from that function
func (s *SliceOfInt8) Filter(filterFunc func(int8) bool) *SliceOfInt8 {
var newItems []int8
for _, value := range s.items {
if filterFunc(value) {
newItems = append(newItems, value)
}
}
return s
}
// Map gets a mapper function and runs that on every single int8 items and sets that item with returned value from function
func (s *SliceOfInt8) Map(mapFunc func(int8) int8) *SliceOfInt8 {
for index, value := range s.items {
s.items[index] = mapFunc(value)
}
return s
}
// Shift removes first int8 item from the slice
func (s *SliceOfInt8) Shift() *SliceOfInt8 {
s.items = s.items[1:]
return s
}
// Unshift adds the given int8 to the start of the slice
func (s *SliceOfInt8) Unshift(item int8) *SliceOfInt8 {
s.items = append([]int8{item}, s.items...)
return s
}
// Append adds the given item to the end of the slice
func (s *SliceOfInt8) Append(item int8) *SliceOfInt8 {
s.items = append(s.items, item)
return s
}
// Concat concats another slice and adds the items in that to the end of current slice
func (s *SliceOfInt8) Concat(items []int8) *SliceOfInt8 {
s.items = append(s.items, items...)
return s
}
// Push pushes the int8 item at the end of the slice
func (s *SliceOfInt8) Push(item int8) *SliceOfInt8 {
s.items = append(s.items, item)
return s
}
// Pop deletes the int8 item from the end of the slice and returns it
func (s *SliceOfInt8) Pop() int8 {
poppedItem := s.items[len(s.items)-1]
s.items = s.items[:len(s.items)-1]
return poppedItem
}
// Every gets a checker function and runs that on every single int8 items and returns true if the function returne true for all of the them
func (s *SliceOfInt8) Every(checkerFunc func(int8) bool) bool {
for _, value := range s.items {
if checkerFunc(value) == false {
return false
}
}
return true
}
// Some gets a checker function and runs that on every single int8 items and returns true if the function returne true for one or more of the them
func (s *SliceOfInt8) Some(checkerFunc func(int8) bool) bool {
for _, value := range s.items {
if checkerFunc(value) {
return true
}
}
return false
}
// Includes returns true if the slice contains given item
func (s *SliceOfInt8) Includes(item int8) bool {
for _, value := range s.items {
if value == item {
return true
}
}
return false
}
// Len returns the length of items in current slice
func (s *SliceOfInt8) Len() int {
return len(s.items)
}
// GetSlice returns a pointer to the final slice of int8
func (s *SliceOfInt8) GetSlice() *[]int8 {
return &s.items
}
// SliceOfInt16 is a struct containing needed data for storing slice items data
type SliceOfInt16 struct {
items []int16
}
// NewFromInt16Slice creates an instance of SliceOfInt16 and returns a reference to it
func NewFromInt16Slice(items []int16) *SliceOfInt16 {
slicy := &SliceOfInt16{items}
return slicy
}
// Filter gets a filter function which gets a single int16 and only preserve items with true return value from that function
func (s *SliceOfInt16) Filter(filterFunc func(int16) bool) *SliceOfInt16 {
var newItems []int16
for _, value := range s.items {
if filterFunc(value) {
newItems = append(newItems, value)
}
}
return s
}
// Map gets a mapper function and runs that on every single int16 items and sets that item with returned value from function
func (s *SliceOfInt16) Map(mapFunc func(int16) int16) *SliceOfInt16 {
for index, value := range s.items {
s.items[index] = mapFunc(value)
}
return s
}
// Shift removes first int16 item from the slice
func (s *SliceOfInt16) Shift() *SliceOfInt16 {
s.items = s.items[1:]
return s
}
// Unshift adds the given int16 to the start of the slice
func (s *SliceOfInt16) Unshift(item int16) *SliceOfInt16 {
s.items = append([]int16{item}, s.items...)
return s
}
// Append adds the given item to the end of the slice
func (s *SliceOfInt16) Append(item int16) *SliceOfInt16 {
s.items = append(s.items, item)
return s
}
// Concat concats another slice and adds the items in that to the end of current slice
func (s *SliceOfInt16) Concat(items []int16) *SliceOfInt16 {
s.items = append(s.items, items...)
return s
}
// Push pushes the int16 item at the end of the slice
func (s *SliceOfInt16) Push(item int16) *SliceOfInt16 {
s.items = append(s.items, item)
return s
}
// Pop deletes the int16 item from the end of the slice and returns it
func (s *SliceOfInt16) Pop() int16 {
poppedItem := s.items[len(s.items)-1]
s.items = s.items[:len(s.items)-1]
return poppedItem
}
// Every gets a checker function and runs that on every single int16 items and returns true if the function returne true for all of the them
func (s *SliceOfInt16) Every(checkerFunc func(int16) bool) bool {
for _, value := range s.items {
if checkerFunc(value) == false {
return false
}
}
return true
}
// Some gets a checker function and runs that on every single int16 items and returns true if the function returne true for one or more of the them
func (s *SliceOfInt16) Some(checkerFunc func(int16) bool) bool {
for _, value := range s.items {
if checkerFunc(value) {
return true
}
}
return false
}
// Includes returns true if the slice contains given item
func (s *SliceOfInt16) Includes(item int16) bool {
for _, value := range s.items {
if value == item {
return true
}
}
return false
}
// Len returns the length of items in current slice
func (s *SliceOfInt16) Len() int {
return len(s.items)
}
// GetSlice returns a pointer to the final slice of int16
func (s *SliceOfInt16) GetSlice() *[]int16 {
return &s.items
}
// SliceOfInt32 is a struct containing needed data for storing slice items data
type SliceOfInt32 struct {
items []int32
}
// NewFromInt32Slice creates an instance of SliceOfInt32 and returns a reference to it
func NewFromInt32Slice(items []int32) *SliceOfInt32 {
slicy := &SliceOfInt32{items}
return slicy
}
// Filter gets a filter function which gets a single int32 and only preserve items with true return value from that function
func (s *SliceOfInt32) Filter(filterFunc func(int32) bool) *SliceOfInt32 {
var newItems []int32
for _, value := range s.items {
if filterFunc(value) {
newItems = append(newItems, value)
}
}
return s
}
// Map gets a mapper function and runs that on every single int32 items and sets that item with returned value from function
func (s *SliceOfInt32) Map(mapFunc func(int32) int32) *SliceOfInt32 {
for index, value := range s.items {
s.items[index] = mapFunc(value)
}
return s
}
// Shift removes first int32 item from the slice
func (s *SliceOfInt32) Shift() *SliceOfInt32 {
s.items = s.items[1:]
return s
}
// Unshift adds the given int32 to the start of the slice
func (s *SliceOfInt32) Unshift(item int32) *SliceOfInt32 {
s.items = append([]int32{item}, s.items...)
return s
}
// Append adds the given item to the end of the slice
func (s *SliceOfInt32) Append(item int32) *SliceOfInt32 {
s.items = append(s.items, item)
return s
}
// Concat concats another slice and adds the items in that to the end of current slice
func (s *SliceOfInt32) Concat(items []int32) *SliceOfInt32 {
s.items = append(s.items, items...)
return s
}
// Push pushes the int32 item at the end of the slice
func (s *SliceOfInt32) Push(item int32) *SliceOfInt32 {
s.items = append(s.items, item)
return s
}
// Pop deletes the int32 item from the end of the slice and returns it
func (s *SliceOfInt32) Pop() int32 {
poppedItem := s.items[len(s.items)-1]
s.items = s.items[:len(s.items)-1]
return poppedItem
}
// Every gets a checker function and runs that on every single int32 items and returns true if the function returne true for all of the them
func (s *SliceOfInt32) Every(checkerFunc func(int32) bool) bool {
for _, value := range s.items {
if checkerFunc(value) == false {
return false
}
}
return true
}
// Some gets a checker function and runs that on every single int32 items and returns true if the function returne true for one or more of the them
func (s *SliceOfInt32) Some(checkerFunc func(int32) bool) bool {
for _, value := range s.items {
if checkerFunc(value) {
return true
}
}
return false
}
// Includes returns true if the slice contains given item
func (s *SliceOfInt32) Includes(item int32) bool {
for _, value := range s.items {
if value == item {
return true
}
}
return false
}
// Len returns the length of items in current slice
func (s *SliceOfInt32) Len() int {
return len(s.items)
}
// GetSlice returns a pointer to the final slice of int32
func (s *SliceOfInt32) GetSlice() *[]int32 {
return &s.items
}
// SliceOfInt64 is a struct containing needed data for storing slice items data
type SliceOfInt64 struct {
items []int64
}
// NewFromInt64Slice creates an instance of SliceOfInt64 and returns a reference to it
func NewFromInt64Slice(items []int64) *SliceOfInt64 {
slicy := &SliceOfInt64{items}
return slicy
}
// Filter gets a filter function which gets a single int64 and only preserve items with true return value from that function
func (s *SliceOfInt64) Filter(filterFunc func(int64) bool) *SliceOfInt64 {
var newItems []int64
for _, value := range s.items {
if filterFunc(value) {
newItems = append(newItems, value)
}
}
return s
}
// Map gets a mapper function and runs that on every single int64 items and sets that item with returned value from function
func (s *SliceOfInt64) Map(mapFunc func(int64) int64) *SliceOfInt64 {
for index, value := range s.items {
s.items[index] = mapFunc(value)
}
return s
}
// Shift removes first int64 item from the slice
func (s *SliceOfInt64) Shift() *SliceOfInt64 {
s.items = s.items[1:]
return s
}
// Unshift adds the given int64 to the start of the slice
func (s *SliceOfInt64) Unshift(item int64) *SliceOfInt64 {
s.items = append([]int64{item}, s.items...)
return s
}
// Append adds the given item to the end of the slice
func (s *SliceOfInt64) Append(item int64) *SliceOfInt64 {
s.items = append(s.items, item)
return s
}
// Concat concats another slice and adds the items in that to the end of current slice
func (s *SliceOfInt64) Concat(items []int64) *SliceOfInt64 {
s.items = append(s.items, items...)
return s
}
// Push pushes the int64 item at the end of the slice
func (s *SliceOfInt64) Push(item int64) *SliceOfInt64 {
s.items = append(s.items, item)
return s
}
// Pop deletes the int64 item from the end of the slice and returns it
func (s *SliceOfInt64) Pop() int64 {
poppedItem := s.items[len(s.items)-1]
s.items = s.items[:len(s.items)-1]
return poppedItem
}
// Every gets a checker function and runs that on every single int64 items and returns true if the function returne true for all of the them
func (s *SliceOfInt64) Every(checkerFunc func(int64) bool) bool {
for _, value := range s.items {
if checkerFunc(value) == false {
return false
}
}
return true
}
// Some gets a checker function and runs that on every single int64 items and returns true if the function returne true for one or more of the them
func (s *SliceOfInt64) Some(checkerFunc func(int64) bool) bool {
for _, value := range s.items {
if checkerFunc(value) {
return true
}
}
return false
}
// Includes returns true if the slice contains given item
func (s *SliceOfInt64) Includes(item int64) bool {
for _, value := range s.items {
if value == item {
return true
}
}
return false
}
// Len returns the length of items in current slice
func (s *SliceOfInt64) Len() int {
return len(s.items)
}
// GetSlice returns a pointer to the final slice of int64
func (s *SliceOfInt64) GetSlice() *[]int64 {
return &s.items
}
// SliceOfFloat32 is a struct containing needed data for storing slice items data
type SliceOfFloat32 struct {
items []float32
}
// NewFromFloat32Slice creates an instance of SliceOfFloat32 and returns a reference to it
func NewFromFloat32Slice(items []float32) *SliceOfFloat32 {
slicy := &SliceOfFloat32{items}
return slicy
}
// Filter gets a filter function which gets a single float32 and only preserve items with true return value from that function
func (s *SliceOfFloat32) Filter(filterFunc func(float32) bool) *SliceOfFloat32 {
var newItems []float32
for _, value := range s.items {
if filterFunc(value) {
newItems = append(newItems, value)
}
}
return s
}
// Map gets a mapper function and runs that on every single float32 items and sets that item with returned value from function
func (s *SliceOfFloat32) Map(mapFunc func(float32) float32) *SliceOfFloat32 {
for index, value := range s.items {
s.items[index] = mapFunc(value)
}
return s
}
// Shift removes first float32 item from the slice
func (s *SliceOfFloat32) Shift() *SliceOfFloat32 {
s.items = s.items[1:]
return s
}
// Unshift adds the given float32 to the start of the slice
func (s *SliceOfFloat32) Unshift(item float32) *SliceOfFloat32 {
s.items = append([]float32{item}, s.items...)
return s
}
// Append adds the given item to the end of the slice
func (s *SliceOfFloat32) Append(item float32) *SliceOfFloat32 {
s.items = append(s.items, item)
return s
}
// Concat concats another slice and adds the items in that to the end of current slice
func (s *SliceOfFloat32) Concat(items []float32) *SliceOfFloat32 {
s.items = append(s.items, items...)
return s
}
// Push pushes the float32 item at the end of the slice
func (s *SliceOfFloat32) Push(item float32) *SliceOfFloat32 {
s.items = append(s.items, item)
return s
}
// Pop deletes the float32 item from the end of the slice and returns it
func (s *SliceOfFloat32) Pop() float32 {
poppedItem := s.items[len(s.items)-1]
s.items = s.items[:len(s.items)-1]
return poppedItem
}
// Every gets a checker function and runs that on every single float32 items and returns true if the function returne true for all of the them
func (s *SliceOfFloat32) Every(checkerFunc func(float32) bool) bool {
for _, value := range s.items {
if checkerFunc(value) == false {
return false
}
}
return true
}
// Some gets a checker function and runs that on every single float32 items and returns true if the function returne true for one or more of the them
func (s *SliceOfFloat32) Some(checkerFunc func(float32) bool) bool {
for _, value := range s.items {
if checkerFunc(value) {
return true
}
}
return false
}
// Includes returns true if the slice contains given item
func (s *SliceOfFloat32) Includes(item float32) bool {
for _, value := range s.items {
if value == item {
return true
}
}
return false
}
// Len returns the length of items in current slice
func (s *SliceOfFloat32) Len() int {
return len(s.items)
}
// GetSlice returns a pointer to the final slice of float32
func (s *SliceOfFloat32) GetSlice() *[]float32 {
return &s.items
}
// SliceOfFloat64 is a struct containing needed data for storing slice items data
type SliceOfFloat64 struct {
items []float64
}
// NewFromFloat64Slice creates an instance of SliceOfFloat64 and returns a reference to it
func NewFromFloat64Slice(items []float64) *SliceOfFloat64 {
slicy := &SliceOfFloat64{items}
return slicy
}
// Filter gets a filter function which gets a single float64 and only preserve items with true return value from that function
func (s *SliceOfFloat64) Filter(filterFunc func(float64) bool) *SliceOfFloat64 {
var newItems []float64
for _, value := range s.items {
if filterFunc(value) {
newItems = append(newItems, value)
}
}
return s
}
// Map gets a mapper function and runs that on every single float64 items and sets that item with returned value from function
func (s *SliceOfFloat64) Map(mapFunc func(float64) float64) *SliceOfFloat64 {
for index, value := range s.items {
s.items[index] = mapFunc(value)
}
return s
}
// Shift removes first float64 item from the slice
func (s *SliceOfFloat64) Shift() *SliceOfFloat64 {
s.items = s.items[1:]
return s
}
// Unshift adds the given float64 to the start of the slice
func (s *SliceOfFloat64) Unshift(item float64) *SliceOfFloat64 {
s.items = append([]float64{item}, s.items...)
return s
}
// Append adds the given item to the end of the slice
func (s *SliceOfFloat64) Append(item float64) *SliceOfFloat64 {
s.items = append(s.items, item)
return s
}
// Concat concats another slice and adds the items in that to the end of current slice
func (s *SliceOfFloat64) Concat(items []float64) *SliceOfFloat64 {
s.items = append(s.items, items...)
return s
}
// Push pushes the float64 item at the end of the slice
func (s *SliceOfFloat64) Push(item float64) *SliceOfFloat64 {
s.items = append(s.items, item)
return s
}
// Pop deletes the float64 item from the end of the slice and returns it
func (s *SliceOfFloat64) Pop() float64 {
poppedItem := s.items[len(s.items)-1]
s.items = s.items[:len(s.items)-1]
return poppedItem
}
// Every gets a checker function and runs that on every single float64 items and returns true if the function returne true for all of the them
func (s *SliceOfFloat64) Every(checkerFunc func(float64) bool) bool {
for _, value := range s.items {
if checkerFunc(value) == false {
return false
}
}
return true
}
// Some gets a checker function and runs that on every single float64 items and returns true if the function returne true for one or more of the them
func (s *SliceOfFloat64) Some(checkerFunc func(float64) bool) bool {
for _, value := range s.items {
if checkerFunc(value) {
return true
}
}
return false
}
// Includes returns true if the slice contains given item
func (s *SliceOfFloat64) Includes(item float64) bool {
for _, value := range s.items {
if value == item {
return true
}
}
return false
}
// Len returns the length of items in current slice
func (s *SliceOfFloat64) Len() int {
return len(s.items)
}
// GetSlice returns a pointer to the final slice of float64
func (s *SliceOfFloat64) GetSlice() *[]float64 {
return &s.items
}
// SliceOfUint is a struct containing needed data for storing slice items data
type SliceOfUint struct {
items []uint
}
// NewFromUintSlice creates an instance of SliceOfUint and returns a reference to it
func NewFromUintSlice(items []uint) *SliceOfUint {
slicy := &SliceOfUint{items}
return slicy
}
// Filter gets a filter function which gets a single uint and only preserve items with true return value from that function
func (s *SliceOfUint) Filter(filterFunc func(uint) bool) *SliceOfUint {
var newItems []uint
for _, value := range s.items {
if filterFunc(value) {
newItems = append(newItems, value)
}
}
return s
}
// Map gets a mapper function and runs that on every single uint items and sets that item with returned value from function
func (s *SliceOfUint) Map(mapFunc func(uint) uint) *SliceOfUint {
for index, value := range s.items {
s.items[index] = mapFunc(value)
}
return s
}
// Shift removes first uint item from the slice
func (s *SliceOfUint) Shift() *SliceOfUint {
s.items = s.items[1:]
return s
}
// Unshift adds the given uint to the start of the slice
func (s *SliceOfUint) Unshift(item uint) *SliceOfUint {
s.items = append([]uint{item}, s.items...)
return s
}
// Append adds the given item to the end of the slice
func (s *SliceOfUint) Append(item uint) *SliceOfUint {
s.items = append(s.items, item)
return s
}
// Concat concats another slice and adds the items in that to the end of current slice
func (s *SliceOfUint) Concat(items []uint) *SliceOfUint {
s.items = append(s.items, items...)
return s
}
// Push pushes the uint item at the end of the slice
func (s *SliceOfUint) Push(item uint) *SliceOfUint {
s.items = append(s.items, item)
return s
}
// Pop deletes the uint item from the end of the slice and returns it
func (s *SliceOfUint) Pop() uint {
poppedItem := s.items[len(s.items)-1]
s.items = s.items[:len(s.items)-1]
return poppedItem
}
// Every gets a checker function and runs that on every single uint items and returns true if the function returne true for all of the them
func (s *SliceOfUint) Every(checkerFunc func(uint) bool) bool {
for _, value := range s.items {
if checkerFunc(value) == false {
return false
}
}
return true
}
// Some gets a checker function and runs that on every single uint items and returns true if the function returne true for one or more of the them
func (s *SliceOfUint) Some(checkerFunc func(uint) bool) bool {
for _, value := range s.items {
if checkerFunc(value) {
return true
}
}
return false
}
// Includes returns true if the slice contains given item
func (s *SliceOfUint) Includes(item uint) bool {
for _, value := range s.items {
if value == item {
return true
}
}
return false
}
// Len returns the length of items in current slice
func (s *SliceOfUint) Len() int {
return len(s.items)
}
// GetSlice returns a pointer to the final slice of uint
func (s *SliceOfUint) GetSlice() *[]uint {
return &s.items
}
// SliceOfUint8 is a struct containing needed data for storing slice items data
type SliceOfUint8 struct {
items []uint8
}
// NewFromUint8Slice creates an instance of SliceOfUint8 and returns a reference to it
func NewFromUint8Slice(items []uint8) *SliceOfUint8 {
slicy := &SliceOfUint8{items}
return slicy
}
// Filter gets a filter function which gets a single uint8 and only preserve items with true return value from that function
func (s *SliceOfUint8) Filter(filterFunc func(uint8) bool) *SliceOfUint8 {
var newItems []uint8
for _, value := range s.items {
if filterFunc(value) {
newItems = append(newItems, value)
}
}
return s
}
// Map gets a mapper function and runs that on every single uint8 items and sets that item with returned value from function
func (s *SliceOfUint8) Map(mapFunc func(uint8) uint8) *SliceOfUint8 {
for index, value := range s.items {
s.items[index] = mapFunc(value)
}
return s
}
// Shift removes first uint8 item from the slice
func (s *SliceOfUint8) Shift() *SliceOfUint8 {
s.items = s.items[1:]
return s
}
// Unshift adds the given uint8 to the start of the slice
func (s *SliceOfUint8) Unshift(item uint8) *SliceOfUint8 {
s.items = append([]uint8{item}, s.items...)
return s
}
// Append adds the given item to the end of the slice
func (s *SliceOfUint8) Append(item uint8) *SliceOfUint8 {
s.items = append(s.items, item)
return s
}
// Concat concats another slice and adds the items in that to the end of current slice
func (s *SliceOfUint8) Concat(items []uint8) *SliceOfUint8 {
s.items = append(s.items, items...)
return s
}
// Push pushes the uint8 item at the end of the slice
func (s *SliceOfUint8) Push(item uint8) *SliceOfUint8 {
s.items = append(s.items, item)
return s
}
// Pop deletes the uint8 item from the end of the slice and returns it
func (s *SliceOfUint8) Pop() uint8 {
poppedItem := s.items[len(s.items)-1]
s.items = s.items[:len(s.items)-1]
return poppedItem
}
// Every gets a checker function and runs that on every single uint8 items and returns true if the function returne true for all of the them
func (s *SliceOfUint8) Every(checkerFunc func(uint8) bool) bool {
for _, value := range s.items {
if checkerFunc(value) == false {
return false
}
}
return true
}
// Some gets a checker function and runs that on every single uint8 items and returns true if the function returne true for one or more of the them
func (s *SliceOfUint8) Some(checkerFunc func(uint8) bool) bool {
for _, value := range s.items {
if checkerFunc(value) {
return true
}
}
return false
}
// Includes returns true if the slice contains given item
func (s *SliceOfUint8) Includes(item uint8) bool {
for _, value := range s.items {
if value == item {
return true
}
}
return false
}
// Len returns the length of items in current slice
func (s *SliceOfUint8) Len() int {
return len(s.items)
}
// GetSlice returns a pointer to the final slice of uint8
func (s *SliceOfUint8) GetSlice() *[]uint8 {
return &s.items
}
// SliceOfUint16 is a struct containing needed data for storing slice items data
type SliceOfUint16 struct {
items []uint16
}
// NewFromUint16Slice creates an instance of SliceOfUint16 and returns a reference to it
func NewFromUint16Slice(items []uint16) *SliceOfUint16 {
slicy := &SliceOfUint16{items}
return slicy
}
// Filter gets a filter function which gets a single uint16 and only preserve items with true return value from that function
func (s *SliceOfUint16) Filter(filterFunc func(uint16) bool) *SliceOfUint16 {
var newItems []uint16
for _, value := range s.items {
if filterFunc(value) {
newItems = append(newItems, value)
}
}
return s
}
// Map gets a mapper function and runs that on every single uint16 items and sets that item with returned value from function
func (s *SliceOfUint16) Map(mapFunc func(uint16) uint16) *SliceOfUint16 {
for index, value := range s.items {
s.items[index] = mapFunc(value)
}
return s
}
// Shift removes first uint16 item from the slice
func (s *SliceOfUint16) Shift() *SliceOfUint16 {
s.items = s.items[1:]
return s
}
// Unshift adds the given uint16 to the start of the slice
func (s *SliceOfUint16) Unshift(item uint16) *SliceOfUint16 {
s.items = append([]uint16{item}, s.items...)
return s
}
// Append adds the given item to the end of the slice
func (s *SliceOfUint16) Append(item uint16) *SliceOfUint16 {
s.items = append(s.items, item)
return s
}
// Concat concats another slice and adds the items in that to the end of current slice
func (s *SliceOfUint16) Concat(items []uint16) *SliceOfUint16 {
s.items = append(s.items, items...)
return s
}
// Push pushes the uint16 item at the end of the slice
func (s *SliceOfUint16) Push(item uint16) *SliceOfUint16 {
s.items = append(s.items, item)
return s
}
// Pop deletes the uint16 item from the end of the slice and returns it
func (s *SliceOfUint16) Pop() uint16 {
poppedItem := s.items[len(s.items)-1]
s.items = s.items[:len(s.items)-1]
return poppedItem
}
// Every gets a checker function and runs that on every single uint16 items and returns true if the function returne true for all of the them
func (s *SliceOfUint16) Every(checkerFunc func(uint16) bool) bool {
for _, value := range s.items {
if checkerFunc(value) == false {
return false
}
}
return true
}
// Some gets a checker function and runs that on every single uint16 items and returns true if the function returne true for one or more of the them
func (s *SliceOfUint16) Some(checkerFunc func(uint16) bool) bool {
for _, value := range s.items {
if checkerFunc(value) {
return true
}
}
return false
}
// Includes returns true if the slice contains given item
func (s *SliceOfUint16) Includes(item uint16) bool {
for _, value := range s.items {
if value == item {
return true
}
}
return false
}
// Len returns the length of items in current slice
func (s *SliceOfUint16) Len() int {
return len(s.items)
}
// GetSlice returns a pointer to the final slice of uint16
func (s *SliceOfUint16) GetSlice() *[]uint16 {
return &s.items
}
// SliceOfUint32 is a struct containing needed data for storing slice items data
type SliceOfUint32 struct {
items []uint32
}
// NewFromUint32Slice creates an instance of SliceOfUint32 and returns a reference to it
func NewFromUint32Slice(items []uint32) *SliceOfUint32 {
slicy := &SliceOfUint32{items}
return slicy
}
// Filter gets a filter function which gets a single uint32 and only preserve items with true return value from that function
func (s *SliceOfUint32) Filter(filterFunc func(uint32) bool) *SliceOfUint32 {
var newItems []uint32
for _, value := range s.items {
if filterFunc(value) {
newItems = append(newItems, value)
}
}
return s
}
// Map gets a mapper function and runs that on every single uint32 items and sets that item with returned value from function
func (s *SliceOfUint32) Map(mapFunc func(uint32) uint32) *SliceOfUint32 {
for index, value := range s.items {
s.items[index] = mapFunc(value)
}
return s
}
// Shift removes first uint32 item from the slice
func (s *SliceOfUint32) Shift() *SliceOfUint32 {
s.items = s.items[1:]
return s
}
// Unshift adds the given uint32 to the start of the slice
func (s *SliceOfUint32) Unshift(item uint32) *SliceOfUint32 {
s.items = append([]uint32{item}, s.items...)
return s
}
// Append adds the given item to the end of the slice
func (s *SliceOfUint32) Append(item uint32) *SliceOfUint32 {
s.items = append(s.items, item)
return s
}
// Concat concats another slice and adds the items in that to the end of current slice
func (s *SliceOfUint32) Concat(items []uint32) *SliceOfUint32 {
s.items = append(s.items, items...)
return s
}
// Push pushes the uint32 item at the end of the slice
func (s *SliceOfUint32) Push(item uint32) *SliceOfUint32 {
s.items = append(s.items, item)
return s
}
// Pop deletes the uint32 item from the end of the slice and returns it
func (s *SliceOfUint32) Pop() uint32 {
poppedItem := s.items[len(s.items)-1]
s.items = s.items[:len(s.items)-1]
return poppedItem
}
// Every gets a checker function and runs that on every single uint32 items and returns true if the function returne true for all of the them
func (s *SliceOfUint32) Every(checkerFunc func(uint32) bool) bool {
for _, value := range s.items {
if checkerFunc(value) == false {
return false
}
}
return true
}
// Some gets a checker function and runs that on every single uint32 items and returns true if the function returne true for one or more of the them
func (s *SliceOfUint32) Some(checkerFunc func(uint32) bool) bool {
for _, value := range s.items {
if checkerFunc(value) {
return true
}
}
return false
}
// Includes returns true if the slice contains given item
func (s *SliceOfUint32) Includes(item uint32) bool {
for _, value := range s.items {
if value == item {
return true
}
}
return false
}
// Len returns the length of items in current slice
func (s *SliceOfUint32) Len() int {
return len(s.items)
}
// GetSlice returns a pointer to the final slice of uint32
func (s *SliceOfUint32) GetSlice() *[]uint32 {
return &s.items
}
// SliceOfUint64 is a struct containing needed data for storing slice items data
type SliceOfUint64 struct {
items []uint64
}
// NewFromUint64Slice creates an instance of SliceOfUint64 and returns a reference to it
func NewFromUint64Slice(items []uint64) *SliceOfUint64 {
slicy := &SliceOfUint64{items}
return slicy
}
// Filter gets a filter function which gets a single uint64 and only preserve items with true return value from that function
func (s *SliceOfUint64) Filter(filterFunc func(uint64) bool) *SliceOfUint64 {
var newItems []uint64
for _, value := range s.items {
if filterFunc(value) {
newItems = append(newItems, value)
}
}
return s
}
// Map gets a mapper function and runs that on every single uint64 items and sets that item with returned value from function
func (s *SliceOfUint64) Map(mapFunc func(uint64) uint64) *SliceOfUint64 {
for index, value := range s.items {
s.items[index] = mapFunc(value)
}
return s
}
// Shift removes first uint64 item from the slice
func (s *SliceOfUint64) Shift() *SliceOfUint64 {
s.items = s.items[1:]
return s
}
// Unshift adds the given uint64 to the start of the slice
func (s *SliceOfUint64) Unshift(item uint64) *SliceOfUint64 {
s.items = append([]uint64{item}, s.items...)
return s
}
// Append adds the given item to the end of the slice
func (s *SliceOfUint64) Append(item uint64) *SliceOfUint64 {
s.items = append(s.items, item)
return s
}
// Concat concats another slice and adds the items in that to the end of current slice
func (s *SliceOfUint64) Concat(items []uint64) *SliceOfUint64 {
s.items = append(s.items, items...)
return s
}
// Push pushes the uint64 item at the end of the slice
func (s *SliceOfUint64) Push(item uint64) *SliceOfUint64 {
s.items = append(s.items, item)
return s
}
// Pop deletes the uint64 item from the end of the slice and returns it
func (s *SliceOfUint64) Pop() uint64 {
poppedItem := s.items[len(s.items)-1]
s.items = s.items[:len(s.items)-1]
return poppedItem
}
// Every gets a checker function and runs that on every single uint64 items and returns true if the function returne true for all of the them
func (s *SliceOfUint64) Every(checkerFunc func(uint64) bool) bool {
for _, value := range s.items {
if checkerFunc(value) == false {
return false
}
}
return true
}
// Some gets a checker function and runs that on every single uint64 items and returns true if the function returne true for one or more of the them
func (s *SliceOfUint64) Some(checkerFunc func(uint64) bool) bool {
for _, value := range s.items {
if checkerFunc(value) {
return true
}
}
return false
}
// Includes returns true if the slice contains given item
func (s *SliceOfUint64) Includes(item uint64) bool {
for _, value := range s.items {
if value == item {
return true
}
}
return false
}
// Len returns the length of items in current slice
func (s *SliceOfUint64) Len() int {
return len(s.items)
}
// GetSlice returns a pointer to the final slice of uint64
func (s *SliceOfUint64) GetSlice() *[]uint64 {
return &s.items
}
// SliceOfString is a struct containing needed data for storing slice items data
type SliceOfString struct {
items []string
}
// NewFromStringSlice creates an instance of SliceOfString and returns a reference to it
func NewFromStringSlice(items []string) *SliceOfString {
slicy := &SliceOfString{items}
return slicy
}
// Filter gets a filter function which gets a single string and only preserve items with true return value from that function
func (s *SliceOfString) Filter(filterFunc func(string) bool) *SliceOfString {
var newItems []string
for _, value := range s.items {
if filterFunc(value) {
newItems = append(newItems, value)
}
}
return s
}
// Map gets a mapper function and runs that on every single string items and sets that item with returned value from function
func (s *SliceOfString) Map(mapFunc func(string) string) *SliceOfString {
for index, value := range s.items {
s.items[index] = mapFunc(value)
}
return s
}
// Shift removes first string item from the slice
func (s *SliceOfString) Shift() *SliceOfString {
s.items = s.items[1:]
return s
}
// Unshift adds the given string to the start of the slice
func (s *SliceOfString) Unshift(item string) *SliceOfString {
s.items = append([]string{item}, s.items...)
return s
}
// Append adds the given item to the end of the slice
func (s *SliceOfString) Append(item string) *SliceOfString {
s.items = append(s.items, item)
return s
}
// Concat concats another slice and adds the items in that to the end of current slice
func (s *SliceOfString) Concat(items []string) *SliceOfString {
s.items = append(s.items, items...)
return s
}
// Push pushes the string item at the end of the slice
func (s *SliceOfString) Push(item string) *SliceOfString {
s.items = append(s.items, item)
return s
}
// Pop deletes the string item from the end of the slice and returns it
func (s *SliceOfString) Pop() string {
poppedItem := s.items[len(s.items)-1]
s.items = s.items[:len(s.items)-1]
return poppedItem
}
// Every gets a checker function and runs that on every single string items and returns true if the function returne true for all of the them
func (s *SliceOfString) Every(checkerFunc func(string) bool) bool {
for _, value := range s.items {
if checkerFunc(value) == false {
return false
}
}
return true
}
// Some gets a checker function and runs that on every single string items and returns true if the function returne true for one or more of the them
func (s *SliceOfString) Some(checkerFunc func(string) bool) bool {
for _, value := range s.items {
if checkerFunc(value) {
return true
}
}
return false
}
// Includes returns true if the slice contains given item
func (s *SliceOfString) Includes(item string) bool {
for _, value := range s.items {
if value == item {
return true
}
}
return false
}
// Len returns the length of items in current slice
func (s *SliceOfString) Len() int {
return len(s.items)
}
// GetSlice returns a pointer to the final slice of string
func (s *SliceOfString) GetSlice() *[]string {
return &s.items
}
// SliceOfInterface is a struct containing needed data for storing slice items data
type SliceOfInterface struct {
items []interface{}
}
// NewFromInterfaceSlice creates an instance of SliceOfInterface and returns a reference to it
func NewFromInterfaceSlice(items []interface{}) *SliceOfInterface {
slicy := &SliceOfInterface{items}
return slicy
}
// Filter gets a filter function which gets a single interface{} and only preserve items with true return value from that function
func (s *SliceOfInterface) Filter(filterFunc func(interface{}) bool) *SliceOfInterface {
var newItems []interface{}
for _, value := range s.items {
if filterFunc(value) {
newItems = append(newItems, value)
}
}
return s
}
// Map gets a mapper function and runs that on every single interface{} items and sets that item with returned value from function
func (s *SliceOfInterface) Map(mapFunc func(interface{}) interface{}) *SliceOfInterface {
for index, value := range s.items {
s.items[index] = mapFunc(value)
}
return s
}
// Shift removes first interface{} item from the slice
func (s *SliceOfInterface) Shift() *SliceOfInterface {
s.items = s.items[1:]
return s
}
// Unshift adds the given interface{} to the start of the slice
func (s *SliceOfInterface) Unshift(item interface{}) *SliceOfInterface {
s.items = append([]interface{}{item}, s.items...)
return s
}
// Append adds the given item to the end of the slice
func (s *SliceOfInterface) Append(item interface{}) *SliceOfInterface {
s.items = append(s.items, item)
return s
}
// Concat concats another slice and adds the items in that to the end of current slice
func (s *SliceOfInterface) Concat(items []interface{}) *SliceOfInterface {
s.items = append(s.items, items...)
return s
}
// Push pushes the interface{} item at the end of the slice
func (s *SliceOfInterface) Push(item interface{}) *SliceOfInterface {
s.items = append(s.items, item)
return s
}
// Pop deletes the interface{} item from the end of the slice and returns it
func (s *SliceOfInterface) Pop() interface{} {
poppedItem := s.items[len(s.items)-1]
s.items = s.items[:len(s.items)-1]
return poppedItem
}
// Every gets a checker function and runs that on every single interface{} items and returns true if the function returne true for all of the them
func (s *SliceOfInterface) Every(checkerFunc func(interface{}) bool) bool {
for _, value := range s.items {
if checkerFunc(value) == false {
return false
}
}
return true
}
// Some gets a checker function and runs that on every single interface{} items and returns true if the function returne true for one or more of the them
func (s *SliceOfInterface) Some(checkerFunc func(interface{}) bool) bool {
for _, value := range s.items {
if checkerFunc(value) {
return true
}
}
return false
}
// Includes returns true if the slice contains given item
func (s *SliceOfInterface) Includes(item interface{}) bool {
for _, value := range s.items {
if value == item {
return true
}
}
return false
}
// Len returns the length of items in current slice
func (s *SliceOfInterface) Len() int {
return len(s.items)
}
// GetSlice returns a pointer to the final slice of interface{}
func (s *SliceOfInterface) GetSlice() *[]interface{} {
return &s.items
} | generated.go | 0.907261 | 0.665519 | generated.go | starcoder |
package main
import (
"math"
"github.com/gazed/vu"
"github.com/gazed/vu/math/lin"
)
// cam controls the main game level camera.
type cam struct {
pitch float64 // used to smooth camera.
yaw float64 // used to smooth camera.
}
// implement the rest of the lens interface.
func (c *cam) back(bod *vu.Ent, dt, run float64, q *lin.Q) { c.move(bod, 0, 0, dt*run, q) }
func (c *cam) forward(bod *vu.Ent, dt, run float64, q *lin.Q) { c.move(bod, 0, 0, dt*-run, q) }
func (c *cam) left(bod *vu.Ent, dt, run float64, q *lin.Q) { c.move(bod, dt*-run, 0, 0, q) }
func (c *cam) right(bod *vu.Ent, dt, run float64, q *lin.Q) { c.move(bod, dt*run, 0, 0, q) }
// Handle movement assuming there is a physics body associated with the camera.
// This attempts to smooth out movement by adding a higher initial velocity push
// and then capping movement once max accelleration is reached.
func (c *cam) move(bod *vu.Ent, x, y, z float64, dir *lin.Q) {
if body := bod.Body(); body != nil {
boost := 40.0 // kick into high gear from stop.
maxAccel := 10.0 // limit accelleration.
sx, _, sz := body.Speed()
if x != 0 {
switch {
case sx == 0.0:
// apply push in the current direction.
dx, dy, dz := lin.MultSQ(x*boost, 0, 0, dir)
body.Push(dx, dy, dz)
case math.Abs(sx) < maxAccel && math.Abs(sz) < maxAccel:
dx, dy, dz := lin.MultSQ(x, 0, 0, dir)
body.Push(dx, dy, dz)
}
}
if z != 0 {
switch {
case sz == 0.0:
dx, dy, dz := lin.MultSQ(0, 0, z*boost, dir)
body.Push(dx, dy, dz)
case math.Abs(sx) < maxAccel && math.Abs(sz) < maxAccel:
dx, dy, dz := lin.MultSQ(0, 0, z, dir)
body.Push(dx, dy, dz)
}
}
} else {
bod.Move(x, y, z, dir)
}
}
// look changes the view left/right for changes in the x direction
// and up/down for changes in the y direction.
func (c *cam) look(spin, dt, xdiff, ydiff float64) {
limit := 20.0 // pixels
if xdiff != 0 {
switch { // cap movement amount.
case xdiff > limit:
xdiff = limit
case xdiff < -limit:
xdiff = -limit
}
c.yaw += dt * float64(-xdiff) * spin
}
if ydiff != 0 {
switch { // cap movement amount.
case ydiff > limit:
ydiff = limit
case ydiff < -limit:
ydiff = -limit
}
c.pitch = c.updatePitch(c.pitch, ydiff, spin, dt)
}
}
// updatePitch limits the vertical camera movement to plus/minus 90 degrees.
func (c *cam) updatePitch(pitch, ydiff, spin, dt float64) float64 {
limit := 90.0 // degrees
pitch += dt * ydiff * spin
if pitch > limit {
pitch = limit
}
if pitch < -limit {
pitch = -limit
}
return pitch
}
// reset puts the target pitch and yaw back to zero.
func (c *cam) reset(camera *vu.Camera) {
c.pitch, c.yaw = 0, 0
camera.SetPitch(0)
camera.SetYaw(0)
}
func (c *cam) update(camera *vu.Camera) {
fraction := 0.25
pitch := camera.Pitch
if !lin.Aeq(pitch, c.pitch) {
pitch = (c.pitch-pitch)*fraction + pitch
camera.SetPitch(pitch)
}
yaw := camera.Yaw
if !lin.Aeq(yaw, c.yaw) {
yaw = (c.yaw-yaw)*fraction + yaw
camera.SetYaw(yaw)
}
} | cam.go | 0.725357 | 0.452717 | cam.go | starcoder |
package thermo
func shomateCarbonMonoxide(T float64) (float64, float64, float64, float64, float64, float64, float64, float64) {
switch {
case T <= 1300:
return 25.56759, 6.09613, 4.054656, -2.671301, 0.131021, -118.0089, 227.3665, -110.5271
default:
return 35.15070, 1.300095, -0.205921, 0.013550, -3.282780, -127.8375, 231.7120, -110.5271
}
}
func shomateSteam(T float64) (float64, float64, float64, float64, float64, float64, float64, float64) {
switch {
case T <= 1700:
return 30.092, 6.832514, 6.793435, -2.53448, 0.082139, -250.881, 223.3967, -241.8264
default:
return 41.96426, 8.622053, -1.499780, 0.098119, -11.15764, -272.1797, 219.7809, -241.8264
}
}
func shomateHydrogen(T float64) (float64, float64, float64, float64, float64, float64, float64, float64) {
switch {
case T <= 1000:
return 33.066178, -11.363417, 11.432816, -2.772874, -0.158558, -9.980797, 172.707974, 0
case 1000 < T && T <= 2500:
return 18.563083, 12.257357, -2.859786, 0.268238, 1.977990, -1.147438, 156.288133, 0.0
default:
return 43.413560, -4.293079, 1.272428, -0.096876, -20.533862, -38.515158, 162.081354, 0.0
}
}
func shomateCarbonDioxide(T float64) (float64, float64, float64, float64, float64, float64, float64, float64) {
switch {
case T <= 1200:
return 24.99735, 55.18696, -33.69137, 7.948387, -0.136638, -403.6075, 228.2431, -393.5224
default:
return 58.16639, 2.720074, -0.492289, 0.038844, -6.447293, -425.9186, 263.6125, -393.5224
}
}
func shomateMethane(T float64) (float64, float64, float64, float64, float64, float64, float64, float64) {
switch {
case T <= 1300:
return -0.703029, 108.4773, -42.52157, 5.862788, 0.678565, -76.84376, 158.7163, -74.87310
default:
return 85.81217, 11.26467, -2.114146, 0.138190, -26.42221, -153.5327, 224.4143, -74.87310
}
}
func shomateNitrogen(T float64) (float64, float64, float64, float64, float64, float64, float64, float64) {
switch {
case T <= 500:
return 28.98641, 1.853978, -9.647459, 16.63537, 0.000117, -8.671914, 226.4168, 0.0
case T > 500 && T <= 2000:
return 19.50583, 19.88705, -8.598535, 1.369784, 0.527601, -4.935202, 212.3900, 0.0
default:
return 35.51872, 1.128728, -0.196103, 0.014662, -4.553760, -18.97091, 224.9810, 0.0
}
}
func shomateOxygen(T float64) (float64, float64, float64, float64, float64, float64, float64, float64) {
switch {
case T <= 700:
return 31.32234, -20.23531, 57.86644, -36.50624, -0.007374, -8.903471, 246.7945, 0.0
case T > 700 && T <= 2000:
return 30.03235, 8.772972, -3.988133, 0.788313, -0.741599, -11.32468, 236.1663, 0.0
default:
return 20.91111, 10.72071, -2.020498, 0.146449, 9.245722, 5.337651, 237.6185, 0.0
}
} | thermo/shomate_constants.go | 0.772616 | 0.54692 | shomate_constants.go | starcoder |
package parser
import "strconv"
func isValidSetCommand(command string, arguments []string) bool {
return len(arguments) >= 2
}
func isValidGetCommand(command string, arguments []string) bool {
return len(arguments) == 1
}
func isValidSetexCommand(command string, arguments []string) bool {
if len(arguments) < 3 {
return false
}
_, err := strconv.ParseInt(arguments[1], 10, 64)
return err == nil
}
func isValidSaddCommand(command string, arguments []string) bool {
return len(arguments) >= 2
}
func isValidSetnxCommand(command string, arguments []string) bool {
return len(arguments) < 2
}
func isValidZaddCommand(command string, arguments []string) bool {
if len(arguments) < 3 {
return false
}
// member without score is part of the arguments
if len(arguments)%2 == 0 {
return false
}
return true
}
func isValidHsetCommand(command string, arguments []string) bool {
return len(arguments) == 3
}
func isValidLsetCommand(command string, arguments []string) bool {
if len(arguments) != 3 {
return false
}
_, err := strconv.ParseInt(arguments[1], 10, 64)
return err == nil
}
func isValidLpushCommand(command string, arguments []string) bool {
return len(arguments) < 2
}
func isValidMsetCommand(command string, arguments []string) bool {
// should have an equal number of keys and values
return len(arguments)%2 == 1
}
func isValidHMSetCommand(command string, arguments []string) bool {
// should have at least one fiel-value pair with key name
if len(arguments) < 3 {
return false
}
// field without value part of arguments
if len(arguments)%2 == 0 {
return false
}
return true
}
func isValidHgetCommand(command string, arguments []string) bool {
return len(arguments) == 2
}
func isValidHkeysCommand(command string, arguments []string) bool {
return len(arguments) == 1
}
func isValidLpopCommand(command string, arguments []string) bool {
return len(arguments) != 1
}
func isValidLindexCommand(command string, arguments []string) bool {
return len(arguments) != 2
}
func isValidGetSetCommand(command string, arguments []string) bool {
return len(arguments) != 2
}
func isValidHgetAllCommand(command string, arguments []string) bool {
return len(arguments) != 1
}
func isValidHlenCommand(command string, arguments []string) bool {
return len(arguments) == 1
}
func isValidHmgetCommand(command string, arguments []string) bool {
return len(arguments) >= 2
}
func isValidPingCommand(command string, arguments []string) bool {
return len(arguments) <= 1
}
func isValidHexistsCommand(command string, arguments []string) bool {
return len(arguments) == 2
}
func isValidLlenCommand(command string, arguments []string) bool {
return len(arguments) == 1
}
func isValidMgetCommand(command string, arguments []string) bool {
return len(arguments) >= 1
}
func isValidStrLenCommand(command string, arguments []string) bool {
return len(arguments) == 1
}
func isValidZcardCommand(command string, arguments []string) bool {
return len(arguments) == 1
}
func isValidExistsCommand(command string, arguments []string) bool {
return len(arguments) >= 1
}
func isValidKeysCommand(command string, arguments []string) bool {
return len(arguments) == 1
}
// Commands lists all the functions this cache would support
var Commands = map[string]func(commandKey string, arguments []string) bool{
"SET": isValidSetCommand,
"SADD": isValidSaddCommand,
"SETEX": isValidSetexCommand,
"SETNX": isValidSetnxCommand,
"ZADD": isValidZaddCommand,
"HSET": isValidHsetCommand,
"LSET": isValidLsetCommand,
"LPUSH": isValidLpushCommand,
"MSET": isValidMsetCommand,
"HMSET": isValidHMSetCommand,
"GET": isValidGetCommand,
"HGET": isValidHgetCommand,
"HKEYS": isValidHkeysCommand,
"LPOP": isValidLpopCommand,
"LINDEX": isValidLindexCommand,
"GETSET": isValidGetSetCommand,
"HGETALL": isValidHgetAllCommand,
"HLEN": isValidHlenCommand,
"HMGET": isValidHmgetCommand,
"PING": isValidPingCommand,
"HEXISTS": isValidHexistsCommand,
"EXISTS": isValidExistsCommand,
"LLEN": isValidLlenCommand,
"MGET": isValidMgetCommand,
"STRLEN": isValidStrLenCommand,
"ZCARD": isValidZcardCommand,
"KEYS": isValidKeysCommand,
"HSTRLEN": isValidStrLenCommand,
} | internal/parser/commands.go | 0.553023 | 0.422088 | commands.go | starcoder |
type MyCircularDeque struct {
Size int
Capacity int
Head *Node
Tail *Node
}
type Node struct {
Val int
Pre *Node
Next *Node
}
/** Initialize your data structure here. Set the size of the deque to be k. */
func Constructor(k int) MyCircularDeque {
head := &Node{0, nil, nil}
tail := &Node{0, nil, nil}
head.Next = tail
tail.Pre = head
return MyCircularDeque{0, k, head, tail}
}
/** Adds an item at the front of Deque. Return true if the operation is successful. */
func (this *MyCircularDeque) InsertFront(value int) bool {
if this.Capacity == this.Size {
return false
}
node := &Node{value, nil, nil}
node.Next = this.Head.Next
this.Head.Next.Pre = node
this.Head.Next = node
node.Pre = this.Head
this.Size++
return true
}
/** Adds an item at the rear of Deque. Return true if the operation is successful. */
func (this *MyCircularDeque) InsertLast(value int) bool {
if this.Capacity == this.Size {
return false
}
node := &Node{value, nil, nil}
node.Pre = this.Tail.Pre
this.Tail.Pre.Next = node
this.Tail.Pre = node
node.Next = this.Tail
this.Size++
return true
}
/** Deletes an item from the front of Deque. Return true if the operation is successful. */
func (this *MyCircularDeque) DeleteFront() bool {
if this.Size == 0 {
return false
}
cur := this.Head.Next
cur.Next.Pre = this.Head
this.Head.Next = cur.Next
this.Size--
return true
}
/** Deletes an item from the rear of Deque. Return true if the operation is successful. */
func (this *MyCircularDeque) DeleteLast() bool {
if this.Size == 0 {
return false
}
cur := this.Tail.Pre
cur.Pre.Next = this.Tail
this.Tail.Pre = cur.Pre
this.Size--
return true
}
/** Get the front item from the deque. */
func (this *MyCircularDeque) GetFront() int {
if this.Size == 0 {
return -1
}
return this.Head.Next.Val
}
/** Get the last item from the deque. */
func (this *MyCircularDeque) GetRear() int {
if this.Size == 0 {
return -1
}
return this.Tail.Pre.Val
}
/** Checks whether the circular deque is empty or not. */
func (this *MyCircularDeque) IsEmpty() bool {
return this.Size == 0
}
/** Checks whether the circular deque is full or not. */
func (this *MyCircularDeque) IsFull() bool {
return this.Size == this.Capacity
}
/**
* Your MyCircularDeque object will be instantiated and called as such:
* obj := Constructor(k);
* param_1 := obj.InsertFront(value);
* param_2 := obj.InsertLast(value);
* param_3 := obj.DeleteFront();
* param_4 := obj.DeleteLast();
* param_5 := obj.GetFront();
* param_6 := obj.GetRear();
* param_7 := obj.IsEmpty();
* param_8 := obj.IsFull();
*/ | Design/641.go | 0.791055 | 0.455744 | 641.go | starcoder |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.