code stringlengths 114 1.05M | path stringlengths 3 312 | quality_prob float64 0.5 0.99 | learning_prob float64 0.2 1 | filename stringlengths 3 168 | kind stringclasses 1 value |
|---|---|---|---|---|---|
package input
import (
"errors"
"fmt"
"sync"
"sync/atomic"
"time"
"github.com/Jeffail/benthos/v3/internal/docs"
"github.com/Jeffail/benthos/v3/lib/log"
"github.com/Jeffail/benthos/v3/lib/metrics"
"github.com/Jeffail/benthos/v3/lib/types"
)
//------------------------------------------------------------------------------
func init() {
Constructors[TypeSequence] = TypeSpec{
constructor: NewSequence,
Status: docs.StatusBeta,
Summary: `
Reads messages from a sequence of child inputs, starting with the first and once
that input gracefully terminates starts consuming from the next, and so on.`,
Description: `
This input is useful for consuming from inputs that have an explicit end but
must not be consumed in parallel.`,
Footnotes: `
## Examples
A common use case might be to generate a message at the end of our main input:
` + "```yaml" + `
input:
sequence:
inputs:
- csv:
paths: [ ./dataset.csv ]
- bloblang:
count: 1
mapping: 'root = {"status":"finished"}'
` + "```" + `
With this config once the records within ` + "`./dataset.csv`" + ` are exhausted
our final payload ` + "`" + `{"status":"finished"}` + "`" + ` will be routed
through the pipeline.`,
sanitiseConfigFunc: func(conf Config) (interface{}, error) {
inputsSanit := make([]interface{}, 0, len(conf.Sequence.Inputs))
for _, in := range conf.Sequence.Inputs {
sanit, err := SanitiseConfig(in)
if err != nil {
return nil, err
}
inputsSanit = append(inputsSanit, sanit)
}
return map[string]interface{}{
"inputs": inputsSanit,
}, nil
},
FieldSpecs: docs.FieldSpecs{
docs.FieldCommon("inputs", "An array of inputs to read from sequentially."),
},
Categories: []Category{
CategoryUtility,
},
}
}
//------------------------------------------------------------------------------
// SequenceConfig contains configuration values for the Sequence input type.
type SequenceConfig struct {
Inputs []Config `json:"inputs" yaml:"inputs"`
}
// NewSequenceConfig creates a new SequenceConfig with default values.
func NewSequenceConfig() SequenceConfig {
return SequenceConfig{
Inputs: []Config{},
}
}
//------------------------------------------------------------------------------
// Sequence is an input type that reads from a sequence of inputs, starting with
// the first, and when it ends gracefully it moves onto the next, and so on.
type Sequence struct {
running int32
conf SequenceConfig
targetMut sync.Mutex
target Type
remaining []sequenceTarget
wrapperMgr types.Manager
wrapperLog log.Modular
wrapperStats metrics.Type
stats metrics.Type
log log.Modular
transactions chan types.Transaction
closeChan chan struct{}
closedChan chan struct{}
}
type sequenceTarget struct {
index int
config Config
}
// NewSequence creates a new Sequence input type.
func NewSequence(
conf Config,
mgr types.Manager,
log log.Modular,
stats metrics.Type,
) (Type, error) {
if len(conf.Sequence.Inputs) == 0 {
return nil, errors.New("requires at least one child input")
}
targets := make([]sequenceTarget, 0, len(conf.Sequence.Inputs))
for i, c := range conf.Sequence.Inputs {
targets = append(targets, sequenceTarget{
index: i,
config: c,
})
}
rdr := &Sequence{
running: 1,
conf: conf.Sequence,
remaining: targets,
wrapperLog: log,
wrapperStats: stats,
wrapperMgr: mgr,
log: log.NewModule(".sequence"),
stats: metrics.Namespaced(stats, "sequence"),
transactions: make(chan types.Transaction),
closeChan: make(chan struct{}),
closedChan: make(chan struct{}),
}
if target, err := rdr.createNextTarget(); err != nil {
return nil, err
} else if target == nil {
return nil, errors.New("failed to initialize first input")
}
go rdr.loop()
return rdr, nil
}
//------------------------------------------------------------------------------
func (r *Sequence) getTarget() Type {
r.targetMut.Lock()
target := r.target
r.targetMut.Unlock()
return target
}
func (r *Sequence) createNextTarget() (Type, error) {
var target Type
var err error
r.targetMut.Lock()
r.target = nil
if len(r.remaining) > 0 {
if target, err = New(
r.remaining[0].config,
r.wrapperMgr,
r.wrapperLog,
r.wrapperStats,
); err == nil {
r.remaining = r.remaining[1:]
} else {
err = fmt.Errorf("failed to initialize input index %v: %w", r.remaining[0].index, err)
}
}
if target != nil {
r.target = target
}
r.targetMut.Unlock()
return target, err
}
func (r *Sequence) loop() {
defer func() {
if t := r.getTarget(); t != nil {
t.CloseAsync()
err := t.WaitForClose(time.Second)
for ; err != nil; err = t.WaitForClose(time.Second) {
}
}
close(r.transactions)
close(r.closedChan)
}()
target := r.getTarget()
runLoop:
for atomic.LoadInt32(&r.running) == 1 {
if target == nil {
var err error
if target, err = r.createNextTarget(); err != nil {
r.log.Errorf("Unable to start next sequence: %v\n", err)
select {
case <-time.After(time.Second):
case <-r.closeChan:
return
}
continue runLoop
} else if target == nil {
r.log.Infoln("Exhausted all sequence inputs, shutting down.")
return
}
}
var tran types.Transaction
var open bool
select {
case tran, open = <-target.TransactionChan():
if !open {
target.CloseAsync() // For good measure.
target = nil
continue runLoop
}
case <-r.closeChan:
return
}
select {
case r.transactions <- tran:
case <-r.closeChan:
return
}
}
}
// TransactionChan returns a transactions channel for consuming messages from
// this input type.
func (r *Sequence) TransactionChan() <-chan types.Transaction {
return r.transactions
}
// Connected returns a boolean indicating whether this input is currently
// connected to its target.
func (r *Sequence) Connected() bool {
if t := r.getTarget(); t != nil {
return t.Connected()
}
return false
}
// CloseAsync shuts down the Sequence input and stops processing requests.
func (r *Sequence) CloseAsync() {
if atomic.CompareAndSwapInt32(&r.running, 1, 0) {
close(r.closeChan)
}
}
// WaitForClose blocks until the Sequence input has closed down.
func (r *Sequence) WaitForClose(timeout time.Duration) error {
select {
case <-r.closedChan:
case <-time.After(timeout):
return types.ErrTimeout
}
return nil
}
//------------------------------------------------------------------------------ | lib/input/sequence.go | 0.694821 | 0.687355 | sequence.go | starcoder |
package ctxerr
import (
"strconv"
"strings"
"github.com/maxatome/go-testdeep/internal/util"
)
// Path defines a structure depth path, typically used to mark a
// position during a deep traversal in case of error.
type Path []pathLevel
type pathLevelKind uint8
type pathLevel struct {
Content string
Pointers int
Kind pathLevelKind
}
const (
levelStruct pathLevelKind = iota
levelArray
levelMap
levelFunc
levelCustom
)
// NewPath returns a new Path initialized with "root" root node.
func NewPath(root string) Path {
return Path{
{
Kind: levelCustom,
Content: root,
},
}
}
// Len returns the number of levels, excluding pointers ones.
func (p Path) Len() int {
return len(p)
}
// Equal returns true if "p" and "o" are equal, false otherwise.
func (p Path) Equal(o Path) bool {
if len(p) != len(o) {
return false
}
for i := len(p) - 1; i >= 0; i-- {
if p[i] != o[i] {
return false
}
}
return true
}
func (p Path) addLevel(level pathLevel) Path {
new := make(Path, len(p), len(p)+1)
copy(new, p)
return append(new, level)
}
// Copy returns a new Path, exact but independent copy of "p".
func (p Path) Copy() Path {
if p == nil {
return nil
}
new := make(Path, len(p))
copy(new, p)
return new
}
// AddField adds a level corresponding to a struct field.
func (p Path) AddField(field string) Path {
if p == nil {
return nil
}
new := p.addLevel(pathLevel{
Kind: levelStruct,
Content: field,
})
if len(new) > 1 && new[len(new)-2].Pointers > 0 {
new[len(new)-2].Pointers--
}
return new
}
// AddArrayIndex adds a level corresponding to an array index.
func (p Path) AddArrayIndex(index int) Path {
if p == nil {
return nil
}
return p.addLevel(pathLevel{
Kind: levelArray,
Content: strconv.Itoa(index),
})
}
// AddMapKey adds a level corresponding to a map key.
func (p Path) AddMapKey(key interface{}) Path {
if p == nil {
return nil
}
return p.addLevel(pathLevel{
Kind: levelMap,
Content: util.ToString(key),
})
}
// AddPtr adds "num" pointers levels.
func (p Path) AddPtr(num int) Path {
if p == nil {
return nil
}
new := p.Copy()
// Do not check len(new) > 0, as it should
new[len(new)-1].Pointers += num
return new
}
// AddFunctionCall adds a level corresponding to a function call.
func (p Path) AddFunctionCall(fn string) Path {
if p == nil {
return nil
}
return p.addLevel(pathLevel{
Kind: levelFunc,
Content: fn,
})
}
// AddCustomLevel adds a custom level.
func (p Path) AddCustomLevel(custom string) Path {
if p == nil {
return nil
}
return p.addLevel(pathLevel{
Kind: levelCustom,
Content: custom,
})
}
func (p Path) String() string {
if len(p) == 0 {
return ""
}
var str string
for i, level := range p {
var ptrs string
if level.Pointers > 0 {
ptrs = strings.Repeat("*", level.Pointers)
}
if level.Kind == levelFunc {
str = ptrs + level.Content + "(" + str + ")"
} else {
if i > 0 && p[i-1].Pointers > 0 {
// Last level contains pointer(s), protect them
str = ptrs + "(" + str + ")"
} else {
str = ptrs + str
}
switch level.Kind {
case levelStruct:
str += "." + level.Content
case levelArray, levelMap:
str += "[" + level.Content + "]"
default:
str += level.Content
}
}
}
return str
}
/*
func setPtrs(buf []byte, num int) {
for i := 0; i < num; i++ {
buf[i] = '*'
}
}
func (p Path) String() string {
if len(p) == 0 {
return ""
}
size := 0
for i, level := range p {
size += level.Pointers + len(level.Content)
if level.Kind == levelFunc || (i > 0 && p[i-1].Pointers > 0) {
size += 2 // () ⇒ content(x) || (x)content
}
switch level.Kind {
case levelStruct:
size++ // "."
case levelArray, levelMap:
size += 2 // []
}
}
buf := make([]byte, size)
curLen := 0
for i, level := range p {
if level.Kind == levelFunc {
// **content(prev)
levelLen := level.Pointers + len(level.Content) + 1
copy(buf[levelLen:], buf[:curLen])
setPtrs(buf, level.Pointers)
copy(buf[level.Pointers:], []byte(level.Content))
buf[levelLen-1] = '('
curLen += levelLen
buf[curLen] = ')'
curLen++
} else {
if i > 0 && p[i-1].Pointers > 0 {
// **(prev)content
copy(buf[level.Pointers+1:], buf[:curLen])
setPtrs(buf, level.Pointers)
buf[level.Pointers] = '('
curLen += level.Pointers + 1
buf[curLen] = ')'
curLen++
} else {
// **prevcontent
if level.Pointers > 0 {
copy(buf[level.Pointers:], buf[:curLen])
setPtrs(buf, level.Pointers)
curLen += level.Pointers
}
}
switch level.Kind {
case levelStruct:
buf[curLen] = '.'
curLen++
copy(buf[curLen:], []byte(level.Content))
curLen += len(level.Content)
case levelArray, levelMap:
buf[curLen] = '['
curLen++
copy(buf[curLen:], []byte(level.Content))
curLen += len(level.Content)
buf[curLen] = ']'
curLen++
default:
copy(buf[curLen:], []byte(level.Content))
curLen += len(level.Content)
}
}
}
return string(buf)
}
*/ | vendor/github.com/maxatome/go-testdeep/internal/ctxerr/path.go | 0.665302 | 0.40592 | path.go | starcoder |
package geom
import (
"github.com/water-vapor/euclidea-solver/configs"
"github.com/water-vapor/euclidea-solver/pkg/hashset"
"math"
)
// Point is a object containing its two coordinates
type Point struct {
hashset.Serializable
x, y float64
}
// NewPoint creates a point from coordinates
func NewPoint(x, y float64) *Point {
return &Point{x: x, y: y}
}
// GetCoords returns its coordinates, should be only used for debugging
func (pt *Point) GetCoords() (float64, float64) {
return pt.x, pt.y
}
// Serialize returns the hash of a point
func (pt *Point) Serialize() interface{} {
ptx := int64(math.Round(pt.x * configs.HashPrecision))
pty := int64(math.Round(pt.y * configs.HashPrecision))
return ptx*configs.Prime + pty
}
// OnLine checks if the point is on a line
func (pt *Point) OnLine(l *Line) bool {
return l.ContainsPoint(pt)
}
// OnHalfLine checks if the point is on a half line
func (pt *Point) OnHalfLine(h *HalfLine) bool {
return h.ContainsPoint(pt)
}
// InHalfLineRange checks if the point in the coordinate range of a half line.
// This function should only be used when the point is guaranteed to be on the line which the half line belong to
func (pt *Point) InHalfLineRange(h *HalfLine) bool {
return h.PointInRange(pt)
}
// OnSegment checks if the point is on a segment
func (pt *Point) OnSegment(s *Segment) bool {
return s.ContainsPoint(pt)
}
// InSegmentRange checks if the point in the coordinate range of a segment.
// This function should only be used when the point is guaranteed to be on the line which the segment belong to
func (pt *Point) InSegmentRange(s *Segment) bool {
return s.PointInRange(pt)
}
// OnCircle checks if the point is on a circle
func (pt *Point) OnCircle(c *Circle) bool {
return c.ContainsPoint(pt)
}
// DistanceToLine calculates the distance from the point to a line
func (pt *Point) DistanceToLine(l *Line) float64 {
return math.Abs(l.a*pt.x+l.b*pt.y+l.c) / math.Sqrt(l.a*l.a+l.b*l.b)
}
// Equal checks equality of two points with tolerance
func (pt *Point) Equal(pt2 *Point) bool {
return math.Abs(pt.x-pt2.x) < configs.Tolerance && math.Abs(pt.y-pt2.y) < configs.Tolerance
} | pkg/geom/point.go | 0.901316 | 0.461927 | point.go | starcoder |
package common
import (
"time"
)
type Data map[string]interface{}
func (d Data) Get(key string) interface{} { return d[key] }
func (d Data) Set(key string, value interface{}) { d[key] = value }
func (d Data) Clear(key string) { d[key] = nil }
func (d Data) GetBool(key string) bool { return MustAssertBool(d.Get(key)) }
func (d Data) GetData(key string) Data { return AssertData(d.Get(key)) }
func (d Data) GetDataSlice(key string) []Data { return AssertDataSlice(d.Get(key)) }
func (d Data) GetFloat64(key string) float64 { return AssertFloat64(d.Get(key)) }
func (d Data) GetInt(key string) int { return AssertInt(d.Get(key)) }
func (d Data) GetInt32(key string) int32 { return AssertInt32(d.Get(key)) }
func (d Data) GetInt32Slice(key string) []int32 { return AssertInt32Slice(d.Get(key)) }
func (d Data) GetInt64(key string) int64 { return AssertInt64(d.Get(key)) }
func (d Data) GetInterfaceSlice(key string) []interface{} { return AssertInterfaceSlice(d.Get(key)) }
func (d Data) GetMap(key string) map[string]interface{} { return AssertMap(d.Get(key)) }
func (d Data) GetMapData(key string) Data { return AssertMapData(d.Get(key)) }
func (d Data) GetStr(key string) string { return AssertStr(d.Get(key)) }
func (d Data) GetStrInt(key string) int {
x, err := Atoi(d.GetStr(key))
if err != nil {
return 0
}
return x
}
func (d Data) GetStrSlice(key string) []string { return AssertStrSlice(d.Get(key)) }
func (d Data) GetTime(key string) time.Time { return AssertTime(d.Get(key)) }
func (d Data) GetInnerValue(keys ...string) (v interface{}) {
inner := d
for i, k := range keys {
if i == len(keys)-1 {
v = inner.Get(k)
break
}
inner = inner.GetMap(k)
}
return
}
func (d Data) SetInnerValue(v interface{}, keys ...string) {
inner := d
for i, k := range keys {
if i == len(keys)-1 {
inner.Set(k, v)
return
}
inner = inner.GetMap(k)
}
}
func (d Data) GetInnerStr(keys ...string) string {
return AssertStr(d.GetInnerValue(keys...))
}
func (d Data) GetInnerData(keys ...string) Data {
return AssertMapData(d.GetInnerValue(keys...))
} | common/data.go | 0.535341 | 0.576333 | data.go | starcoder |
package igloo
import (
"github.com/hajimehoshi/ebiten/v2"
)
// SpriteOptions determines the starting state of our sprite
type SpriteOptions struct {
Image *ebiten.Image
Transform *Transform
// Value of 0 will use the image width
Width float64
// Value of 0 will use the image height
Height float64
// Defaults to top left
Anchor Vec2
}
// Sprite represents a renderable element in the world.
type Sprite struct {
Image *ebiten.Image
Transform *Transform
// dirty flagging values
anchor Vec2
width float64
height float64
// draw cache
isDirty bool
inView bool
options *ebiten.DrawImageOptions
geom ebiten.GeoM
}
// IsDirty returns whether or not our internal state has changed since last
// drawing. When dirty our next drawing attempt will refresh drawing values.
func (s *Sprite) IsDirty() bool {
return s.isDirty
}
// Clean resets our dirty state, automatically called when drawing
func (s *Sprite) Clean() {
s.isDirty = false
}
// Anchor determines our rotation point.
func (s *Sprite) Anchor() Vec2 {
return s.anchor
}
// SetAnchor will change our rotation point.
// Will also mark the sprite as dirty.
// (0, 0) will rotate around the top left
// (0.5, 0.5) will rotate around the center
// (1, 1) will rotate around the bottom right
func (s *Sprite) SetAnchor(anchor Vec2) {
if s.anchor == anchor {
return
}
s.anchor = anchor
s.isDirty = true
}
// Width returns our drawing width
func (s *Sprite) Width() float64 {
return s.width
}
// SetWidth will change our drawing width.
// Will also mark the sprite as dirty.
func (s *Sprite) SetWidth(width float64) {
if s.width == width {
return
}
s.width = width
s.isDirty = true
}
// Height returns our drawing height
func (s *Sprite) Height() float64 {
return s.height
}
// SetHeight will change our drawing height.
// Will also mark the sprite as dirty.
func (s *Sprite) SetHeight(height float64) {
if s.height == height {
return
}
s.height = height
s.isDirty = true
}
func (s *Sprite) Size() (float64, float64) {
return s.width, s.height
}
func (s *Sprite) createGeoM() ebiten.GeoM {
geom := ebiten.GeoM{}
intWidth, intHeight := s.Image.Size()
imageWidth := float64(intWidth)
imageHeight := float64(intHeight)
if imageWidth != s.width || imageHeight != s.height {
geom.Scale(s.width/imageWidth, s.height/imageHeight)
}
if s.anchor != Vec2Zero {
geom.Translate(
-s.width*s.anchor.X,
-s.height*s.anchor.Y,
)
}
if s.Transform.Rotation() != 0 {
geom.Rotate(s.Transform.Rotation())
}
geom.Translate(s.Transform.X(), s.Transform.Y())
return geom
}
// Draw will render the sprite onto the canvas.
// If our transform, sprite or camera are dirty then we will update internal
// values accordingly.
func (s *Sprite) Draw(canvas Canvaser, camera Camera) {
transformDirty := s.Transform.IsDirty()
if transformDirty || s.IsDirty() {
s.geom = s.createGeoM()
}
if transformDirty || s.IsDirty() || camera.IsDirty() {
anchorOffset := s.anchor.Mul(Vec2{X: s.width, Y: s.height})
topLeft := s.Transform.Position().Sub(anchorOffset)
s.inView = camera.IsInView(topLeft, s.width, s.height)
if s.inView {
screenGeom := camera.WorldToScreen(s.geom)
s.options = &ebiten.DrawImageOptions{
GeoM: screenGeom,
}
}
}
if s.inView {
canvas.DrawImage(s.Image, s.options)
}
s.Clean()
}
// NewSprite will create a basic sprite from image and transform.
// Anchor defaults to (0,0) and size will default to the image size.
func NewSprite(options SpriteOptions) *Sprite {
w, h := options.Image.Size()
if options.Width <= 0 {
options.Width = float64(w)
}
if options.Height <= 0 {
options.Height = float64(h)
}
return &Sprite{
Image: options.Image,
Transform: options.Transform,
anchor: options.Anchor,
width: options.Width,
height: options.Height,
isDirty: true, // start dirty
inView: false,
geom: ebiten.GeoM{},
}
} | sprite.go | 0.801781 | 0.457985 | sprite.go | starcoder |
package palettor
import (
"image/color"
"sort"
)
// A Palette represents the dominant colors extracted from an image, as a
// mapping from color to the weight of that color's cluster. The weight can be
// used as an approximation for that color's relative dominance in an image.
type Palette struct {
colorWeights map[color.Color]float64
converged bool
iterations int
}
// PaletteCentroid ...
type PaletteCentroid struct {
colorWeights map[string]float64
converged bool
iterations int
}
// Entry is a color and its weight in a Palette
type Entry struct {
Color color.Color `json:"color"`
Weight float64 `json:"weight"`
}
// Entries returns a slice of Entry structs, sorted by weight
func (p *Palette) Entries() []Entry {
entries := make([]Entry, p.Count())
i := 0
for color, weight := range p.colorWeights {
entries[i] = Entry{color, weight}
i++
}
sort.Sort(byWeight(entries))
return entries
}
// Colors returns a slice of the colors that comprise a Palette.
func (p *Palette) Colors() []color.Color {
var colors []color.Color
for color := range p.colorWeights {
colors = append(colors, color)
}
return colors
}
// Colors returns a slice of the colors that comprise a Palette.
func (p *PaletteCentroid) Colors() []string {
var colors []string
for color := range p.colorWeights {
if color != "none" {
colors = append(colors, color)
}
}
return colors
}
// Converged returns a bool indicating whether a stable set of dominant
// colors was found before the maximum number of iterations was reached.
func (p *Palette) Converged() bool {
return p.converged
}
// Count returns the number of colors in a Palette.
func (p *Palette) Count() int {
return len(p.colorWeights)
}
// Iterations returns the number of iterations required to extract the colors
// of a Palette.
func (p *Palette) Iterations() int {
return p.iterations
}
// Weight returns the weight of a color in a Palette as a float in the range
// [0, 1], or 0 if a given color is not found.
func (p *Palette) Weight(c color.Color) float64 {
return p.colorWeights[c]
}
// Weight returns the weight of a color in a Palette as a float in the range
// [0, 1], or 0 if a given color is not found.
func (p *PaletteCentroid) Weight(c string) float64 {
return p.colorWeights[c]
}
// implement sort.Interface
type byWeight []Entry
func (a byWeight) Len() int { return len(a) }
func (a byWeight) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
func (a byWeight) Less(i, j int) bool { return a[i].Weight < a[j].Weight } | palette.go | 0.916321 | 0.526038 | palette.go | starcoder |
package binary
import (
"errors"
"github.com/matrixorigin/matrixone/pkg/container/nulls"
"github.com/matrixorigin/matrixone/pkg/container/types"
"github.com/matrixorigin/matrixone/pkg/container/vector"
"github.com/matrixorigin/matrixone/pkg/encoding"
"github.com/matrixorigin/matrixone/pkg/vectorize/extract"
"github.com/matrixorigin/matrixone/pkg/vm/process"
)
/*
// when implicit cast from varchar to date is ready, get rid of this
func ExtractFromString(vectors []*vector.Vector, proc *process.Process) (*vector.Vector, error) {
left, right := vectors[0], vectors[1]
resultType := types.Type{Oid: types.T_uint32, Size: 4}
resultElementSize := int(resultType.Size)
switch {
case left.IsScalar() && right.IsScalar():
if left.ConstVectorIsNull() || right.ConstVectorIsNull() {
return proc.AllocScalarNullVector(resultType), nil
}
leftValues, rightValues := left.Col.(*types.Bytes), right.Col.(*types.Bytes)
resultVector := vector.NewConst(resultType)
resultValues := make([]uint32, 1)
unit := string(leftValues.Data)
inputDate, err := types.ParseDate(string(rightValues.Get(0)))
if err != nil {
return nil, errors.New("invalid input")
}
resultValues, err = extract.ExtractFromDate(unit, []types.Date{inputDate}, resultValues)
if err != nil {
return nil, errors.New("invalid input")
}
vector.SetCol(resultVector, resultValues)
return resultVector, nil
case left.IsScalar() && !right.IsScalar():
if left.ConstVectorIsNull() {
return proc.AllocScalarNullVector(resultType), nil
}
leftValues, rightValues := left.Col.(*types.Bytes), right.Col.(*types.Bytes)
unit := string(leftValues.Data)
resultValues, err := proc.AllocVector(resultType, int64(resultElementSize) * int64(len(rightValues.Lengths)))
if
result, resultNsp, err := extract.ExtractFromInputBytes(unit, rightValues, right.Nsp, )
default:
return nil, errors.New("invalid input")
}
}
*/
func ExtractFromDate(vectors []*vector.Vector, proc *process.Process) (*vector.Vector, error) {
left, right := vectors[0], vectors[1]
resultType := types.Type{Oid: types.T_uint32, Size: 4}
resultElementSize := int(resultType.Size)
leftValues, rightValues := vector.MustBytesCols(left), vector.MustTCols[types.Date](right)
switch {
case left.IsScalar() && right.IsScalar():
if left.ConstVectorIsNull() || right.ConstVectorIsNull() {
return proc.AllocScalarNullVector(resultType), nil
}
resultVector := vector.NewConst(resultType)
resultValues := make([]uint32, 1)
unit := string(leftValues.Get(0))
results, err := extract.ExtractFromDate(unit, rightValues, resultValues)
if err != nil {
return nil, errors.New("invalid input")
}
vector.SetCol(resultVector, results)
return resultVector, nil
case left.IsScalar() && !right.IsScalar():
if left.ConstVectorIsNull() {
return proc.AllocScalarNullVector(resultType), nil
}
resultVector, err := proc.AllocVector(resultType, int64(resultElementSize*len(rightValues)))
if err != nil {
return nil, err
}
resultValues := encoding.DecodeUint32Slice(resultVector.Data)
resultValues = resultValues[:len(rightValues)]
unit := string(leftValues.Get(0))
results, err := extract.ExtractFromDate(unit, rightValues, resultValues)
if err != nil {
return nil, err
}
nulls.Set(resultVector.Nsp, right.Nsp)
vector.SetCol(resultVector, results)
return resultVector, nil
default:
return nil, errors.New("invalid input")
}
}
func ExtractFromDatetime(vectors []*vector.Vector, proc *process.Process) (*vector.Vector, error) {
left, right := vectors[0], vectors[1]
resultType := types.Type{Oid: types.T_varchar, Size: 24}
resultElementSize := int(resultType.Size)
leftValues, rightValues := vector.MustBytesCols(left), vector.MustTCols[types.Datetime](right)
switch {
case left.IsScalar() && right.IsScalar():
if left.ConstVectorIsNull() || right.ConstVectorIsNull() {
return proc.AllocScalarNullVector(resultType), nil
}
resultVector := vector.NewConst(resultType)
resultValues := &types.Bytes{
Data: make([]byte, 0),
Offsets: make([]uint32, 1),
Lengths: make([]uint32, 1),
}
unit := string(leftValues.Get(0))
results, err := extract.ExtractFromDatetime(unit, rightValues, resultValues)
if err != nil {
return nil, errors.New("invalid input")
}
vector.SetCol(resultVector, results)
return resultVector, nil
case left.IsScalar() && !right.IsScalar():
if left.ConstVectorIsNull() {
return proc.AllocScalarNullVector(resultType), nil
}
resultVector, err := proc.AllocVector(resultType, int64(resultElementSize*len(rightValues)))
if err != nil {
return nil, err
}
resultValues := &types.Bytes{
Data: make([]byte, 0),
Offsets: make([]uint32, len(rightValues)),
Lengths: make([]uint32, len(rightValues)),
}
unit := string(leftValues.Get(0))
results, err := extract.ExtractFromDatetime(unit, rightValues, resultValues)
if err != nil {
return nil, err
}
nulls.Set(resultVector.Nsp, right.Nsp)
vector.SetCol(resultVector, results)
return resultVector, nil
default:
return nil, errors.New("invalid input")
}
} | pkg/sql/plan2/function/builtin/binary/extract.go | 0.566258 | 0.503967 | extract.go | starcoder |
package tada
import (
"time"
)
type valueContainer struct {
slice interface{}
isNull []bool
cache []string
name string
id string
}
type valueContainerAlias struct {
Slice interface{} `json:"slice"`
IsNull []bool `json:"isNull"`
Name string `json:"name"`
ID string `json:"id"`
}
var tadaID = "tadaID_"
// A Series is a single column of data with one or more levels of aligned labels.
type Series struct {
values *valueContainer
labels []*valueContainer
sharedData bool
err error
}
// A SeriesIterator iterates over the rows in a Series.
type SeriesIterator struct {
current int
s *Series
}
// A SeriesMutator is used to change Series values in place.
type SeriesMutator struct {
series *Series
}
// A DataFrame is one or more columns of data with one or more levels of aligned labels.
// A DataFrame is analogous to a spreadsheet.
type DataFrame struct {
labels []*valueContainer
values []*valueContainer
name string
err error
colLevelNames []string
}
type dataFrameAlias struct {
Labels []*valueContainer `json:"labels"`
Values []*valueContainer `json:"values"`
Name string `json:"name"`
ColLevelNames []string `json:"colLevelNames"`
}
// A DataFrameIterator iterates over the rows in a DataFrame.
type DataFrameIterator struct {
current int
df *DataFrame
}
// A DataFrameMutator is used to change DataFrame values in place.
type DataFrameMutator struct {
dataframe *DataFrame
}
// A GroupedSeries is a collection of row positions sharing the same group key.
// A GroupedSeries has a reference to an underlying Series, which is used for reduce operations.
type GroupedSeries struct {
orderedKeys []string
rowIndices [][]int
labels []*valueContainer
series *Series
aligned bool
err error
}
// GroupedSeriesIterator iterates over all Series in the group.
type GroupedSeriesIterator struct {
current int
rowIndices [][]int
s *Series
}
// A GroupedDataFrame is a collection of row positions sharing the same group key.
// A GroupedDataFrame has a reference to an underlying DataFrame, which is used for reduce operations.
type GroupedDataFrame struct {
orderedKeys []string
rowIndices [][]int
labels []*valueContainer
df *DataFrame
aligned bool
err error
}
// GroupedDataFrameIterator iterates over all DataFrames in the group.
type GroupedDataFrameIterator struct {
current int
rowIndices [][]int
df *DataFrame
}
// Matrix is an interface which is compatible with gonum's mat.Matrix interface
type Matrix interface {
Dims() (r, c int)
At(i, j int) float64
T() Matrix
}
type floatValueContainer struct {
slice []float64
isNull []bool
index []int
}
type stringValueContainer struct {
slice []string
isNull []bool
index []int
}
type dateTimeValueContainer struct {
slice []time.Time
isNull []bool
index []int
}
// A Sorter supplies details to the Sort() function.
// `Name` specifies the container (either label or column name) to sort.
// If `Descending` is true, values are sorted in descending order.
// `DType` specifies the data type to which values will be coerced before they are sorted (default: float64).
// Null values are always sorted to the bottom.
type Sorter struct {
Name string
Descending bool
DType DType
}
// An Element is one {value, null status} pair in either a Series or DataFrame.
type Element struct {
Val interface{}
IsNull bool
}
// NullFiller fills every row with a null value and changes the row status to not-null.
// If multiple fields are provided, resolves in the following order:
// 1) `FillForward` - fills with the last valid value,
// 2) `FillBackward` - fills with the next valid value,
// 3) `FillZero` - fills with the zero type of the slice,
// 4) `FillFloat` - coerces to float64 and fills with the value provided.
type NullFiller struct {
FillForward bool
FillBackward bool
FillZero bool
FillFloat float64
}
// A FilterFn is an anonymous function supplied to a Filter or Where function.
// The function will be called on every val in the container.
type FilterFn func(value interface{}) bool
// An ApplyFn is an anonymous function supplied to an Apply function to convert one slice to another.
// The function input will be a slice, and it must return a slice of equal length (though the type may be different).
// isNull contains the null status of every row in the input slice.
// The null status of a row may be changed by setting that row's isNull element within the function body.
type ApplyFn func(slice interface{}, isNull []bool) (equalLengthSlice interface{})
// A ReduceFn is an anonymous function supplied to a Reduce function
// to reduce a slice of values to one value and one null status per group.
// isNull contains the null status of every value in the group.
type ReduceFn func(slice interface{}, isNull []bool) (value interface{}, null bool)
// DType is a DataType that may be used in Sort() or Cast().
type DType int
const (
// String -> string
String DType = iota
// Float64 -> float64
Float64
// DateTime -> time.Time
DateTime // always tz-aware
// Time -> civil.Time
Time
// Date -> civil.Date
Date
)
// A JoinOption configures a lookup or merge function.
// Available lookup options: JoinOptionHow, JoinOptionLeftOn, JoinOptionRightOn
type JoinOption func(*joinConfig)
// A joinConfig configures a lookup or merge function.
// All lookup/merge functions accept zero or more modifiers that alter the default read config, which is:
// left join, no specified join keys (so automatically uses shared label names as keys)
type joinConfig struct {
how string
leftOn []string
rightOn []string
}
// Resampler supplies logic for the Resample() function.
// Only the first `By` field that is selected (i.e., not left nil) is used - any others are ignored
// (if `ByWeek` is selected, it may be modified by `StartOfWeek`).
// `ByYear` truncates the timestamp by year.
// `ByMonth` truncates the timestamp by month.
// `ByDay` truncates the timestamp by day.
// `ByWeek` returns the first day of the most recent week (starting on `StartOfWeek`) relative to timestamp.
// Otherwise, truncates the timestamp `ByDuration`.
// If `Location` is not provided, time.UTC is used as the default location.
type Resampler struct {
ByYear bool
ByMonth bool
ByDay bool
ByWeek bool
StartOfWeek time.Weekday
ByDuration time.Duration
Location *time.Location
}
// Binner supplies logic for the Bin() function.
// If `AndLess` is true, a bin is added that ranges between negative infinity and the first bin value.
// If `AndMore` is true, a bin is added that ranges between the last bin value and positive infinity.
// If `Labels` is not nil, then category names correspond to labels, and the number of labels must be one less than the number of bin values.
// Otherwise, category names are auto-generated from the range of the bin intervals.
type Binner struct {
AndLess bool
AndMore bool
Labels []string
}
// A StructTransposer is a row-oriented representation of a DataFrame
// that can be randomly shuffled or transposed into a column-oriented struct representation of a DataFrame.
// It is useful for intuitive row-oriented testing.
type StructTransposer [][]interface{}
// clocker interface for generating random numbers
var clock clocker = realClock{} | types.go | 0.590897 | 0.566019 | types.go | starcoder |
package sqlbuilder
// InsertStatement represents a INSERT statement.
type InsertStatement struct {
columns ColumnList
values []literal
into Table
err error
}
// Insert returns new INSERT statement. The table is Table object for into.
func Insert(into Table) *InsertStatement {
if into == nil {
return &InsertStatement{
err: newError("table is nil."),
}
}
if _, ok := into.(*table); !ok {
return &InsertStatement{
err: newError("table is not natural table."),
}
}
return &InsertStatement{
into: into,
columns: make(ColumnList, 0),
values: make([]literal, 0),
}
}
// Columns sets columns for insert. This overwrite old results of Columns() or Set().
// If not set this, get error on ToSql().
func (b *InsertStatement) Columns(columns ...Column) *InsertStatement {
if b.err != nil {
return b
}
for _, col := range columns {
if !b.into.hasColumn(col) {
b.err = newError("column not found in table.")
return b
}
}
b.columns = ColumnList(columns)
return b
}
// Values sets VALUES clause. This overwrite old results of Values() or Set().
func (b *InsertStatement) Values(values ...interface{}) *InsertStatement {
if b.err != nil {
return b
}
sl := make([]literal, len(values))
for i := range values {
sl[i] = toLiteral(values[i])
}
b.values = sl
return b
}
// Set sets the column and value togeter.
// Set cannot be called with Columns() or Values() in a statement.
func (b *InsertStatement) Set(column Column, value interface{}) *InsertStatement {
if b.err != nil {
return b
}
if !b.into.hasColumn(column) {
b.err = newError("column not found in FROM.")
return b
}
b.columns = append(b.columns, column)
b.values = append(b.values, toLiteral(value))
return b
}
// ToSql generates query string, placeholder arguments, and returns err on errors.
func (b *InsertStatement) ToSql() (query string, args []interface{}, err error) {
bldr := newBuilder()
defer func() {
query, args, err = bldr.Query(), bldr.Args(), bldr.Err()
}()
if b.err != nil {
bldr.SetError(b.err)
return
}
// INSERT
bldr.Append("INSERT")
// INTO Table
bldr.Append(" INTO ")
bldr.AppendItem(b.into)
// (COLUMN)
if len(b.columns) == 0 {
b.columns = b.into.Columns()
}
bldr.Append(" ( ")
bldr.AppendItem(b.columns)
bldr.Append(" )")
// VALUES
if len(b.columns) != len(b.values) {
bldr.SetError(newError("%d values needed, but got %d.", len(b.columns), len(b.values)))
return
}
for i := range b.columns {
if !b.columns[i].acceptType(b.values[i]) {
bldr.SetError(newError("%s column not accept %T.",
b.columns[i].config().Type().String(),
b.values[i].Raw()))
return
}
}
bldr.Append(" VALUES ( ")
values := make([]serializable, len(b.values))
for i := range values {
values[i] = b.values[i]
}
bldr.AppendItems(values, ", ")
bldr.Append(" )")
return
} | insert.go | 0.587115 | 0.507446 | insert.go | starcoder |
package util
import (
"fmt"
"math"
"time"
)
const InfDuration = time.Duration(math.MaxInt64)
// DoubleToTime converts an epoch timestamp in seconds to Time.
// Overflow is not considered.
func DoubleToTime(t float64) time.Time {
if t == 0 {
return time.Time{}
}
sec, frac := math.Modf(t)
return time.Unix(int64(sec), int64(frac*1e9))
}
func TimeToDouble(t time.Time) float64 {
return float64(t.UnixNano()) / 1e9
}
// DoubleToDuration converts a float64 number of seconds to Duration.
// +/-InfDuration is returned in the event of overflow.
func DoubleToDuration(d float64) time.Duration {
if d < 0 {
return -DoubleToDuration(-d)
}
if d <= float64(math.MaxInt64)/1e9 {
return time.Duration(d * 1e9)
} else {
return InfDuration
}
}
func FormatDate(t time.Time) string {
return t.Format("2006-01-02")
}
func FormatDateCasual(t time.Time) string {
return t.Format("Mon, Jan 2")
}
func FormatTime(t time.Time) string {
return t.Format("15:04:05")
}
func FormatTimeCasual(t time.Time) string {
return t.Format("3:04pm")
}
func FormatDatetime(t time.Time) string {
return t.Format("2006-01-02 15:04:05")
}
func FormatDuration(d time.Duration) string {
if d < 0 {
return "-" + FormatDuration(-d)
}
if d == InfDuration {
return "forever"
}
dd := d / (24 * time.Hour)
d -= dd * 24 * time.Hour
hh := d / time.Hour
d -= hh * time.Hour
mm := d / time.Minute
return fmt.Sprintf("%dd%dh%dm", dd, hh, mm)
}
func FormatDurationWhole(d time.Duration) string {
if d < 0 {
return "-" + FormatDurationWhole(-d)
}
if d == InfDuration {
return "forever"
}
if d < time.Minute {
return "0m"
}
dd := d / (24 * time.Hour)
d -= dd * 24 * time.Hour
hh := d / time.Hour
d -= hh * time.Hour
mm := d / time.Minute
s := ""
if dd > 0 {
s += fmt.Sprintf("%dd", dd)
}
if hh > 0 {
s += fmt.Sprintf("%dh", hh)
}
if mm > 0 {
s += fmt.Sprintf("%dm", mm)
}
return s
}
func FormatDurationHM(d time.Duration) string {
if d < 0 {
return "-" + FormatDurationHM(-d)
}
if d == InfDuration {
return "forever"
}
hh := d / time.Hour
d -= hh * time.Hour
mm := d / time.Minute
return fmt.Sprintf("%dh%dm", hh, mm)
}
func FormatCountdown(d time.Duration) string {
if d < 0 {
return "0:00:00"
}
if d == InfDuration {
return "forever"
}
hh := d / time.Hour
d -= hh * time.Hour
mm := d / time.Minute
d -= mm * time.Minute
ss := d / time.Second
return fmt.Sprintf("%d:%02d:%02d", hh, mm, ss)
}
func FormatDurationNonNegative(d time.Duration) string {
if d < 0 {
return FormatDuration(0)
}
return FormatDuration(d)
} | util/time.go | 0.743168 | 0.435061 | time.go | starcoder |
package plugin
// State represent the current plugin state. State are immutable
// (to prevent predefined state edition).
type State struct {
code byte // State code
desc string // State name
raw interface{} // Additional and optional content (error or string)
}
// Predefined states.
var (
// State code formats
// +---------+-----------------+--------------------------------------------------------------+
// | Range | Description | Behaviours |
// +---------+-----------------+--------------------------------------------------------------+
// | 1x ~ 1F | OK states | Nothing |
// | 2x ~ DF | Custom states | Nothing |
// | Ex ~ EF | Error states | The scheduler notify the server |
// | Fx ~ FF | Critical states | The scheduler try to restart the plugin && notify the server |
// +---------+-----------------+--------------------------------------------------------------+
NilState State
IdleState = State{0x11, "currently idle", nil}
InSessionState = State{0x12, "currently in session", nil}
PausedState = State{0x13, "currently paused", nil}
GPIODisconnectionState = State{0xE1, "GPIO has been disconnected", nil}
GPIOFailureState = State{0xE2, "GPIO reading has failed", nil}
AggregationFailureState = State{0xE3, "data aggregation has failed", nil}
ConversionFailureState = State{0xE4, "data conversion has failed", nil}
GPIOPanicState = State{0xF1, "GPIO critical error", nil}
HandledPanicState = State{0xF2, "panic handled", nil}
)
// NewState creates a new custom state.
func NewState(code byte, desc string, raw ...interface{}) State {
var sraw interface{}
if len(raw) > 0 {
sraw = raw[0]
}
return State{code, desc, sraw}
}
// Code return the state code.
func (state State) Code() byte { return state.code }
// Desc return the state description.
func (state State) Desc() string { return state.desc }
// Raw return the state raw.
func (state State) Raw() interface{} { return state.raw }
// Equal compare two states (compare only state codes).
func (state State) Equal(o interface{}) bool {
if s, ok := o.(State); ok {
return s.code == state.code
}
return false
}
// AddRaw add a raw to an existing state.
func (state State) AddRaw(raw interface{}) State {
return State{state.code, state.desc, raw}
} | plugin/state.go | 0.70069 | 0.414129 | state.go | starcoder |
package geomfn
import (
"github.com/cockroachdb/cockroach/pkg/geo"
"github.com/cockroachdb/errors"
"github.com/twpayne/go-geom"
)
// Scale returns a modified Geometry whose coordinates are multiplied by the factors.
// If there are missing dimensions in factors, the corresponding dimensions are not scaled.
func Scale(geometry *geo.Geometry, factors []float64) (*geo.Geometry, error) {
if geometry.Empty() {
return geometry, nil
}
g, err := geometry.AsGeomT()
if err != nil {
return nil, err
}
g, err = scale(g, factors, nil)
if err != nil {
return nil, err
}
return geo.NewGeometryFromGeomT(g)
}
// ScaleRelativeToOrigin returns a modified Geometry whose coordinates are multiplied by the factors relative to the origin
func ScaleRelativeToOrigin(
geometry *geo.Geometry, factor *geo.Geometry, origin *geo.Geometry,
) (*geo.Geometry, error) {
if geometry.Empty() {
return geometry, nil
}
g, err := geometry.AsGeomT()
if err != nil {
return nil, err
}
factorG, err := factor.AsGeomT()
if err != nil {
return nil, err
}
_, ok := factorG.(*geom.Point)
if !ok {
return nil, errors.Newf("the scaling factor must be a Point")
}
originG, err := origin.AsGeomT()
if err != nil {
return nil, err
}
_, ok = originG.(*geom.Point)
if !ok {
return nil, errors.Newf("the false origin must be a Point")
}
if factorG.Stride() != originG.Stride() {
err := geom.ErrStrideMismatch{
Got: factorG.Stride(),
Want: originG.Stride(),
}
return nil, errors.Wrap(err, "number of dimensions for the scaling factor and origin must be equal")
}
g, err = scale(g, factorG.FlatCoords(), originG)
if err != nil {
return nil, err
}
return geo.NewGeometryFromGeomT(g)
}
func scale(g geom.T, factors []float64, origin geom.T) (geom.T, error) {
if geomCollection, ok := g.(*geom.GeometryCollection); ok {
return scaleCollection(geomCollection, factors, origin)
}
newCoords, err := scaleCoords(g, factors, origin)
if err != nil {
return nil, err
}
switch t := g.(type) {
case *geom.Point:
g = geom.NewPointFlat(t.Layout(), newCoords).SetSRID(g.SRID())
case *geom.LineString:
g = geom.NewLineStringFlat(t.Layout(), newCoords).SetSRID(g.SRID())
case *geom.Polygon:
g = geom.NewPolygonFlat(t.Layout(), newCoords, t.Ends()).SetSRID(g.SRID())
case *geom.MultiPoint:
g = geom.NewMultiPointFlat(t.Layout(), newCoords).SetSRID(g.SRID())
case *geom.MultiLineString:
g = geom.NewMultiLineStringFlat(t.Layout(), newCoords, t.Ends()).SetSRID(g.SRID())
case *geom.MultiPolygon:
g = geom.NewMultiPolygonFlat(t.Layout(), newCoords, t.Endss()).SetSRID(g.SRID())
default:
return nil, geom.ErrUnsupportedType{Value: g}
}
return g, nil
}
// scaleCoords multiplies g's coordinates relative to origin's coordinates.
// If origin is nil, g's coordinates are just multiplied by the factors.
// Note: M coordinates are not affected.
func scaleCoords(g geom.T, factors []float64, origin geom.T) ([]float64, error) {
stride := g.Stride()
if stride < len(factors) {
err := geom.ErrStrideMismatch{
Got: len(factors),
Want: stride,
}
return nil, errors.Wrap(err, "number of factors exceed number of dimensions")
}
var originCoords []float64
if origin != nil {
originCoords = origin.FlatCoords()
} else {
originCoords = make([]float64, len(factors))
}
coords := g.FlatCoords()
newCoords := make([]float64, len(coords))
for i := 0; i < len(coords); i += stride {
newCoords[i] = coords[i]*factors[0] - originCoords[0]
newCoords[i+1] = coords[i+1]*factors[1] - originCoords[1]
z := g.Layout().ZIndex()
if z != -1 {
newCoords[i+z] = coords[i+z]
if len(factors) > z {
newCoords[i+z] *= factors[z]
newCoords[i+z] -= originCoords[z]
}
}
// m coords are only copied over
m := g.Layout().MIndex()
if m != -1 {
newCoords[i+m] = factors[m]
}
}
return newCoords, nil
}
// scaleCollection iterates through a GeometryCollection and calls scale() on each item.
func scaleCollection(
geomCollection *geom.GeometryCollection, factors []float64, origin geom.T,
) (*geom.GeometryCollection, error) {
res := geom.NewGeometryCollection()
for _, subG := range geomCollection.Geoms() {
subGeom, err := scale(subG, factors, origin)
if err != nil {
return nil, err
}
if err := res.Push(subGeom); err != nil {
return nil, err
}
}
return res, nil
} | pkg/geo/geomfn/scale.go | 0.827306 | 0.460653 | scale.go | starcoder |
package analyzer
import (
"fmt"
"math"
"strings"
"github.com/dolthub/go-mysql-server/sql"
"github.com/dolthub/go-mysql-server/sql/expression"
"github.com/dolthub/go-mysql-server/sql/plan"
)
// orderTables returns an access order for the tables provided, attempting to minimize total query cost
func orderTables(tables []NameableNode, tablesByName map[string]NameableNode, joinIndexes joinIndexesByTable) []string {
tableNames := make([]string, len(tablesByName))
indexes := make([]int, len(tablesByName))
for i, table := range tables {
tableNames[i] = strings.ToLower(table.Name())
indexes[i] = i
}
// generate all permutations of table order
accessOrders := permutations(indexes)
lowestCost := int64(math.MaxInt64)
lowestCostIdx := 0
for i, accessOrder := range accessOrders {
cost := estimateTableOrderCost(tableNames, tablesByName, accessOrder, joinIndexes, lowestCost)
if cost < lowestCost {
lowestCost = cost
lowestCostIdx = i
}
}
cheapestOrder := make([]string, len(tableNames))
for i, j := range accessOrders[lowestCostIdx] {
cheapestOrder[i] = tableNames[j]
}
return cheapestOrder
}
// buildJoinTree builds a join plan for the tables in the access order given, using the join expressions given.
func buildJoinTree(
tableOrder []string,
joinConds []*joinCond,
) *joinSearchNode {
rootNodes := searchJoins(nil, &joinSearchParams{
tables: tableOrder,
joinConds: joinConds,
})
for _, tree := range rootNodes {
// The search function here can return valid sub trees that don't have all the tables in the full join, so we need
// to check them for validity as an entire tree
if isValidJoinTree(tree) {
return tree
}
}
return nil
}
// Estimates the cost of the table ordering given. Lower numbers are better. Bails out and returns cost so far if cost
// exceeds lowest found so far. We could do this better if we had table and key statistics.
func estimateTableOrderCost(
tables []string,
tableNodes map[string]NameableNode,
accessOrder []int,
joinIndexes joinIndexesByTable,
lowestCost int64,
) int64 {
cost := int64(1)
var availableSchemaForKeys sql.Schema
for i, idx := range accessOrder {
if cost >= lowestCost {
return cost
}
table := tables[idx]
availableSchemaForKeys = append(availableSchemaForKeys, tableNodes[table].Schema()...)
indexes := joinIndexes[table]
// If this table is part of a left or a right join, assert that tables are in the correct order. No table
// referenced in the join condition can precede this one in that case.
for _, idx := range indexes {
if (idx.joinType == plan.JoinTypeLeft && idx.joinPosition == plan.JoinTypeLeft) ||
(idx.joinType == plan.JoinTypeRight && idx.joinPosition == plan.JoinTypeRight) {
for j := 0; j < i; j++ {
otherTable := tables[accessOrder[j]]
if colsIncludeTable(idx.comparandCols, otherTable) {
return math.MaxInt64
}
}
}
}
if i == 0 || indexes.getUsableIndex(availableSchemaForKeys) == nil {
// TODO: estimate number of rows in table
cost *= 1000
} else {
// TODO: estimate number of rows from index lookup based on cardinality
cost += 1
}
}
return cost
}
// colsIncludeTable returns whether the columns given contain the table given
func colsIncludeTable(cols []*expression.GetField, table string) bool {
for _, col := range cols {
if strings.ToLower(col.Table()) == table {
return true
}
}
return false
}
// Generates all permutations of the slice given.
func permutations(a []int) (res [][]int) {
var helper func(n int)
helper = func(n int) {
if n > len(a) {
res = append(res, append([]int(nil), a...))
} else {
helper(n + 1)
for i := n + 1; i < len(a); i++ {
a[n], a[i] = a[i], a[n]
helper(n + 1)
a[i], a[n] = a[n], a[i]
}
}
}
helper(0)
return res
}
// joinSearchParams is a simple struct to track available tables and join conditions during a join search
type joinSearchParams struct {
tables []string
usedTableIndexes []int
joinConds []*joinCond
usedJoinCondsIndexes []int
}
func (js *joinSearchParams) copy() *joinSearchParams {
usedTableIndexesCopy := make([]int, len(js.usedTableIndexes))
copy(usedTableIndexesCopy, js.usedTableIndexes)
usedJoinCondIndexesCopy := make([]int, len(js.usedJoinCondsIndexes))
copy(usedJoinCondIndexesCopy, js.usedJoinCondsIndexes)
return &joinSearchParams{
tables: js.tables,
usedTableIndexes: usedTableIndexesCopy,
joinConds: js.joinConds,
usedJoinCondsIndexes: usedJoinCondIndexesCopy,
}
}
func (js *joinSearchParams) tableIndexUsed(i int) bool {
return indexOfInt(i, js.usedTableIndexes) >= 0
}
func (js *joinSearchParams) joinCondIndexUsed(i int) bool {
return indexOfInt(i, js.usedJoinCondsIndexes) >= 0
}
// A joinSearchNode is a simplified type representing a join tree node, which is either an internal node (a join) or a
// leaf node (a table). The top level node in a join tree is always an internal node. Every internal node has both a
// left and a right child.
type joinSearchNode struct {
table string // empty if this is an internal node
joinCond *joinCond // nil if this is a leaf node
parent *joinSearchNode // nil if this is the root node
left *joinSearchNode // nil if this is a leaf node
right *joinSearchNode // nil if this is a leaf node
params *joinSearchParams // search params that assembled this node
}
// used to mark the left or right branch of a node as being targeted for assignment
var childTargetNode = &joinSearchNode{}
// tableOrder returns the order of the tables in this part of the tree, using an in-order traversal
func (n *joinSearchNode) tableOrder() []string {
if n == nil {
return nil
}
if n.isLeaf() {
return []string{n.table}
}
var tables []string
tables = append(tables, n.left.tableOrder()...)
tables = append(tables, n.right.tableOrder()...)
return tables
}
// isLeaf returns whether this node is a table node
func (n *joinSearchNode) isLeaf() bool {
return len(n.table) > 0
}
// joinConditionSatisfied returns whether all the tables mentioned in this join node are present in descendants.
func (n *joinSearchNode) joinConditionSatisfied() bool {
if n.isLeaf() {
return true
}
joinCondTables := findTables(n.joinCond.cond)
childTables := n.tableOrder()
// TODO: case sensitivity
if !containsAll(joinCondTables, childTables) {
return false
}
return n.left.joinConditionSatisfied() && n.right.joinConditionSatisfied()
}
// copy returns a copy of this node
func (n *joinSearchNode) copy() *joinSearchNode {
nn := *n
nn.params = nn.params.copy()
return &nn
}
// targetLeft returns a copy of this node with the left child marked for replacement by withChild
func (n *joinSearchNode) targetLeft() *joinSearchNode {
nn := n.copy()
nn.left = childTargetNode
return nn
}
// targetRight returns a copy of this node with the right child marked for replacement by withChild
func (n *joinSearchNode) targetRight() *joinSearchNode {
nn := n.copy()
nn.right = childTargetNode
return nn
}
// withChild returns a copy of this node with the previously marked child replaced by the node given.
// See targetLeft, targetRight
func (n *joinSearchNode) withChild(child *joinSearchNode) *joinSearchNode {
nn := n.copy()
if nn.left == childTargetNode {
nn.left = child
return nn
} else if nn.right == childTargetNode {
nn.right = child
return nn
} else {
panic("withChild couldn't find a child to assign")
}
}
// accumulateAllUsed rolls up joinSearchParams from this node and all descendants, combining their used tallies
func (n *joinSearchNode) accumulateAllUsed() *joinSearchParams {
if n == nil || n.params == nil {
return &joinSearchParams{}
}
if n.isLeaf() {
return n.params
}
leftParams := n.left.accumulateAllUsed()
rightParams := n.right.accumulateAllUsed()
result := n.params.copy()
// TODO: eliminate duplicates from these lists, or use sets
result.usedJoinCondsIndexes = append(result.usedJoinCondsIndexes, leftParams.usedJoinCondsIndexes...)
result.usedJoinCondsIndexes = append(result.usedJoinCondsIndexes, rightParams.usedJoinCondsIndexes...)
result.usedTableIndexes = append(result.usedTableIndexes, leftParams.usedTableIndexes...)
result.usedTableIndexes = append(result.usedTableIndexes, rightParams.usedTableIndexes...)
return result
}
func (n *joinSearchNode) String() string {
if n == nil {
return "nil"
}
if n == childTargetNode {
return "childTargetNode"
}
if n.isLeaf() {
return n.table
}
usedJoins := ""
if n.params != nil && len(n.params.usedJoinCondsIndexes) > 0 {
usedJoins = fmt.Sprintf("%v", n.params.usedJoinCondsIndexes)
}
usedTables := ""
if n.params != nil && len(n.params.usedTableIndexes) > 0 {
usedTables = fmt.Sprintf("%v", n.params.usedTableIndexes)
}
tp := sql.NewTreePrinter()
if len(usedTables)+len(usedJoins) > 0 {
_ = tp.WriteNode("%s (usedJoins = %v, usedTables = %v)", n.joinCond.cond, usedJoins, usedTables)
} else {
_ = tp.WriteNode("%s", n.joinCond.cond)
}
_ = tp.WriteChildren(n.left.String(), n.right.String())
return tp.String()
}
// searchJoins is the recursive helper function for buildJoinTree. It returns all possible join trees that satisfy the
// search parameters given. It calls itself recursively to generate subtrees as well. All nodes returned are valid
// subtrees (join conditions and table sub ordering satisfied), but may not be valid as an entire tree. Callers should
// verify this themselves using isValidJoinTree() on the result.
func searchJoins(parent *joinSearchNode, params *joinSearchParams) []*joinSearchNode {
// Our goal is to construct all possible child nodes for the parent given. Every permutation of a legal subtree should
// go into this list.
children := make([]*joinSearchNode, 0)
// If we have a parent to assign them to, consider returning tables as nodes. Otherwise, skip them.
if parent != nil {
// Find all tables mentioned in join nodes up to the root of the tree. We can't add any tables that aren't in this
// list
var validChildTables []string
n := parent
for n != nil {
validChildTables = append(validChildTables, findTables(n.joinCond.cond)...)
n = n.parent
}
// Tables are valid to return if they are mentioned in a join condition higher in the tree.
for i, table := range parent.params.tables {
if indexOf(table, validChildTables) < 0 || parent.params.tableIndexUsed(i) {
continue
}
paramsCopy := params.copy()
paramsCopy.usedTableIndexes = append(paramsCopy.usedTableIndexes, i)
childNode := &joinSearchNode{
table: table,
params: paramsCopy,
parent: parent.copy(),
}
if parent.withChild(childNode).tableOrderCorrect() {
children = append(children, childNode)
}
}
}
// now for each of the available join nodes
for i, cond := range params.joinConds {
if params.joinCondIndexUsed(i) {
continue
}
paramsCopy := params.copy()
paramsCopy.usedJoinCondsIndexes = append(paramsCopy.usedJoinCondsIndexes, i)
candidate := &joinSearchNode{
joinCond: cond,
parent: parent,
params: paramsCopy,
}
// For each of the left and right branch, find all possible children, add all valid subtrees to the list
candidate = candidate.targetLeft()
leftChildren := searchJoins(candidate, paramsCopy)
// pay attention to variable shadowing in this block
for _, left := range leftChildren {
if !isValidJoinSubTree(left) {
continue
}
candidate := candidate.withChild(left).targetRight()
candidate.params = candidate.accumulateAllUsed()
rightChildren := searchJoins(candidate, paramsCopy)
for _, right := range rightChildren {
if !isValidJoinSubTree(right) {
continue
}
candidate := candidate.withChild(right)
if isValidJoinSubTree(candidate) {
children = append(children, candidate)
}
}
}
}
return children
}
// tableOrderCorrect returns whether the tables in this subtree appear in a valid order.
func (n *joinSearchNode) tableOrderCorrect() bool {
tableOrder := n.tableOrder()
prevIdx := -1
for _, table := range tableOrder {
idx := indexOf(table, n.params.tables)
if idx <= prevIdx {
return false
}
prevIdx = idx
}
return true
}
// isValidJoinSubTree returns whether the node given satisfies all the constraints of a join subtree. Subtrees are not
// necessarily complete join plans, since they may not contain all tables. Use isValidJoinTree to verify that.
func isValidJoinSubTree(node *joinSearchNode) bool {
// Two constraints define a valid tree:
// 1) An in-order traversal has tables in the correct order
// 2) The conditions for all internal nodes can be satisfied by their child columns
return node.tableOrderCorrect() && node.joinConditionSatisfied()
}
// isValidJoinTree returns whether the join node given is a valid subtree and contains all the tables in the join.
func isValidJoinTree(node *joinSearchNode) bool {
return isValidJoinSubTree(node) && strArraysEqual(node.tableOrder(), node.params.tables)
}
func containsAll(needles []string, haystack []string) bool {
for _, needle := range needles {
if indexOf(needle, haystack) < 0 {
return false
}
}
return true
}
func strArraysEqual(a, b []string) bool {
if len(a) != len(b) {
return false
}
for i := range a {
if a[i] != b[i] {
return false
}
}
return true
}
func indexOf(str string, strs []string) int {
for i, s := range strs {
if s == str {
return i
}
}
return -1
}
func indexOfInt(i int, is []int) int {
for j, k := range is {
if k == i {
return j
}
}
return -1
} | sql/analyzer/join_search.go | 0.573678 | 0.40987 | join_search.go | starcoder |
package ast
import (
"fmt"
"lo/environment"
"lo/parseerror"
"lo/token"
"reflect"
)
// Interpreter ...
type Interpreter struct {
Environment environment.Environment
}
// NewInterpreter creates a new interpreter
func NewInterpreter() *Interpreter {
env := environment.NewEnvironment()
return &Interpreter{Environment: env}
}
// Interpret the given expressions
func (i Interpreter) Interpret(stmts []Stmt) {
// value := i.evaluate(e)
// fmt.Println(value)
for _, stmt := range stmts {
i.evaluate(stmt)
}
}
func (i *Interpreter) String() string {
if i == nil {
return "nil"
}
return i.String()
}
// VisitAssignExpression ...
func (i Interpreter) VisitAssignExpression(e *AssignExpr) interface{} {
value := i.evaluate(e.Value)
i.Environment.Assign(e.Name, value)
return value
}
// VisitBinaryExpression ...
func (i Interpreter) VisitBinaryExpression(e *BinaryExpr) interface{} {
left := i.evaluate(e.Left)
right := i.evaluate(e.Right)
switch e.Operator.Type {
case token.MINUS:
i.checkOneNumberOperand(e.Operator, right)
return left.(float64) - right.(float64)
case token.SLASH:
i.checkTwoNumberOperands(e.Operator, left, right)
return left.(float64) / right.(float64)
case token.STAR:
i.checkTwoNumberOperands(e.Operator, left, right)
return left.(float64) * right.(float64)
case token.GREATER:
i.checkTwoNumberOperands(e.Operator, left, right)
return left.(float64) > right.(float64)
case token.GREATEREQUAL:
i.checkTwoNumberOperands(e.Operator, left, right)
return left.(float64) >= right.(float64)
case token.LESS:
i.checkTwoNumberOperands(e.Operator, left, right)
return left.(float64) < right.(float64)
case token.LESSEQUAL:
i.checkTwoNumberOperands(e.Operator, left, right)
return left.(float64) <= right.(float64)
case token.BANGEQUAL:
return !i.isEqual(left, right)
case token.EQUALEQUAL:
return i.isEqual(left, right)
case token.PLUS:
typeLeft := reflect.TypeOf(left).String()
typeRight := reflect.TypeOf(right).String()
if (typeLeft == "float64" || typeLeft == "float32" || typeLeft == "int") && (typeRight == "float64" || typeRight == "float32" || typeRight == "int") {
return left.(float64) + right.(float64)
} else if typeLeft == "string" && typeRight == "string" {
return left.(string) + right.(string)
}
return &parseerror.RunTimeError{
Token: e.Operator,
Message: fmt.Sprintf("Operand %v and %v must be numbers or strings", left, right)}
}
return nil
}
// checkOneNumberOperand if it's a float or int
func (i Interpreter) checkOneNumberOperand(operator token.Token, operand interface{}) error {
typeOfOperand := reflect.TypeOf(operand).String()
if typeOfOperand == "float64" || typeOfOperand == "float32" || typeOfOperand == "int" {
return nil
}
return &parseerror.RunTimeError{Token: operator, Message: fmt.Sprintf("Operand %v must be a number", operand)}
}
// checkTwoNumberOperands if they are floats or ints
func (i Interpreter) checkTwoNumberOperands(operator token.Token, left interface{}, right interface{}) error {
typeOfLeftOperand := reflect.TypeOf(left).String()
typeOfRightOperand := reflect.TypeOf(right).String()
if (typeOfLeftOperand == "float64" || typeOfLeftOperand == "float32" || typeOfLeftOperand == "int") &&
(typeOfRightOperand == "float64" || typeOfRightOperand == "float32" || typeOfRightOperand == "int") {
return nil
}
return &parseerror.RunTimeError{Token: operator, Message: fmt.Sprintf("Operand %v and %v must be a number", left, right)}
}
// isEqual returns true if 2 objects are the same
func (i Interpreter) isEqual(left interface{}, right interface{}) bool {
if left == nil && right == nil {
return true
}
if left == nil {
return false
}
return reflect.DeepEqual(left, right)
}
// VisitCallExpression ...
func (i Interpreter) VisitCallExpression(e *CallExpr) interface{} {
return ""
}
// VisitGetExpression ...
func (i Interpreter) VisitGetExpression(e *GetExpr) interface{} {
return ""
}
// VisitGroupExpression resturns the result of values in parenthesis
// expression
func (i Interpreter) VisitGroupExpression(e *GroupExpr) interface{} {
return i.evaluate(e.Expression)
}
// evaluate is a helper that revisits the interpretor
func (i Interpreter) evaluate(e Expr) interface{} {
return e.Accept(i)
}
// VisitLiteralExpression returns the runtime value the parser took
func (i Interpreter) VisitLiteralExpression(e *LiteralExpr) interface{} {
return e.Object
}
// VisitLogicalExpression ...
func (i Interpreter) VisitLogicalExpression(e *LogicalExpr) interface{} {
return ""
}
// VisitSetExpression ...
func (i Interpreter) VisitSetExpression(e *SetExpr) interface{} {
return ""
}
// VisitThisExpression ...
func (i Interpreter) VisitThisExpression(e *ThisExpr) interface{} {
return ""
}
// VisitUnaryExpression ...
func (i Interpreter) VisitUnaryExpression(e *UnaryExpr) interface{} {
right := i.evaluate(e.Right)
switch e.Operator.Type {
case token.MINUS:
value := -right.(float64)
return value
case token.BANG:
return !i.isTruthy(true)
}
return nil
}
// isTruthy returns false and nil object as falsey and everything else as
// truthy
func (i Interpreter) isTruthy(obj interface{}) bool {
if obj == nil {
return false
}
kind := reflect.TypeOf(obj).Kind()
switch kind {
case reflect.Bool:
return obj.(bool)
default:
return true
}
}
// VisitVariableExpression ...
func (i Interpreter) VisitVariableExpression(e *VariableExpr) interface{} {
return i.Environment.Get(e.Name)
}
// VisitPrintStmt ...
func (i Interpreter) VisitPrintStmt(e *PrintStmt) interface{} {
value := i.evaluate(e.Expression)
fmt.Println(fmt.Sprint(value))
return nil
}
// VisitExpressionStmt ...
func (i Interpreter) VisitExpressionStmt(e *ExpressionStmt) interface{} {
i.evaluate(e.Expression)
return nil
}
// VisitVarStmt ...
func (i Interpreter) VisitVarStmt(e *VarStmt) interface{} {
var value interface{}
if e.Initializer != nil {
value = i.evaluate(e.Initializer)
}
i.Environment.Define(e.Name.Lexeme, value)
return nil
} | ast/interpreter.go | 0.628407 | 0.433022 | interpreter.go | starcoder |
package srec19
import (
"encoding/binary"
"errors"
"sync"
)
//record stores the data of one line/record of an ihex32 file.
type record struct {
//rwm is used to coordinate multithreaded reading/writing when applying offsets or calculating checksums.
rwm sync.Mutex
//byteCount contains the hex string value that defines the number of data bytes contained in the record, including address & checksum.
byteCount string
//addressField contains the 4 Byte of the 32Bit address.
addressField string
//recordType defines which kind of data is contained within the record (data, address offset, eof-marker, etc.).
recordType string
//data contains byteCount number of hex Strings, each representing a single byte (e.g. FF -> 255).
data []string
//checksum contains a single byte hex string with the checksum computed from the individual fields of the record.
checksum string
}
//parseRecord takes a line as string and fills the respective fields in the record struct.
func parseRecord(line string) (*record, error) {
r := record{}
r.recordType = line[1:2]
if line[0] == beginLineToken[0] && r.recordType == "3" {
r.byteCount = line[2:4]
r.addressField = line[4:12]
for i := 12; i < len(line)-3; i += 2 {
r.data = append(r.data, line[i:i+2])
}
r.checksum = line[len(line)-2:]
return &r, nil
} else {
err := errors.New("entry has no begin line symbol or is too short")
return &r, err
}
}
//validateChecksumsRoutine calls the validateChecksum Method for each record
//it is given and sends false to a channel in case a checksum isn't valid.
func validateChecksumsRoutine(wg *sync.WaitGroup, c chan bool, recs []*record) {
defer wg.Done()
var csValid bool
var err error
forLoop:
for _, r := range recs {
csValid, err = r.validateChecksum()
if err != nil || !csValid {
c <- false
break forLoop
}
}
}
//validateChecksum computes a checksum for a single record
func (r *record) validateChecksum() (bool, error) {
//sum is intended to overflow if necessary. the checksum is valid if the least significant byte of the sum equals FF / 255.
//therefore we can ignore all higher bytes and just look at the least significant byte / this uint8.
var sum uint8
var buf uint8
var err error
//convert byteCount to uint8
buf, err = hexToByte(r.byteCount)
if err != nil {
return false, err
}
//and add to sum
sum = sum + buf
//convert first byte of line address to uint8
buf, err = hexToByte(r.addressField[0:2])
if err != nil {
return false, err
}
sum = sum + buf
//convert second byte of line address to uint8
buf, err = hexToByte(r.addressField[2:4])
if err != nil {
return false, err
}
sum = sum + buf
//convert second byte of line address to uint8
buf, err = hexToByte(r.addressField[4:6])
if err != nil {
return false, err
}
sum = sum + buf
//convert second byte of line address to uint8
buf, err = hexToByte(r.addressField[6:8])
if err != nil {
return false, err
}
sum = sum + buf
//convert every data byte
for _, d := range r.data {
buf, err = hexToByte(d)
if err != nil {
return false, err
}
sum = sum + buf
}
//and also the checksum itself
buf, err = hexToByte(r.checksum)
if err != nil {
return false, err
}
//checksum is valid if sum equals 255 (FF) (as explained in the declaration of sum)
sum = sum + buf
if sum == 255 {
return true, nil
} else {
return false, nil
}
}
func calcDataRoutine(c chan []dataByte, recs []*record) {
var d []dataByte
var ds []dataByte
var err error
for _, r := range recs {
if r.recordType == dataRecordToken {
d, err = r.calcDataEntries()
if err != nil {
c <- []dataByte{}
}
ds = append(ds, d...)
}
}
c <- ds
close(c)
}
//calcDataEntries calculates all data entries with their final addresses.
func (r *record) calcDataEntries() ([]dataByte, error) {
var dataBytes []dataByte
var err error
var val byte
var bs []byte
var lineAddress uint32
for i, s := range r.data {
//convert hexString to byte
val, err = hexToByte(s)
if err != nil {
return dataBytes, err
}
//add index position to the address of the line to get the individual byte position
bs, err = hexToByteSlice(r.addressField)
if err != nil {
return dataBytes, err
}
//convert bytes to uint32, add index-value
lineAddress = binary.BigEndian.Uint32(bs)
lineAddress = lineAddress + uint32(i)
//construct data with final uint32 address and a byte as value
dataBytes = append(dataBytes, dataByte{address: lineAddress, value: val})
}
return dataBytes, nil
} | srec19/record.go | 0.584983 | 0.525308 | record.go | starcoder |
package opencv
import (
"bytes"
"fmt"
"image"
"image/jpeg"
"gopkg.in/sensorbee/opencv.v0/bridge"
"gopkg.in/sensorbee/sensorbee.v0/data"
)
var (
imagePath = data.MustCompilePath("image")
)
// TypeImageFormat is an ID of image format type.
type TypeImageFormat int
const (
typeUnknownFormat TypeImageFormat = iota
// TypeCVMAT is OpenCV cv::Mat_<cv::Vec3b> format
TypeCVMAT
// TypeCVMAT4b is OpenCV cv::Mat_<cv::Vec4b> format
TypeCVMAT4b
// TypeJPEG is JPEG format
TypeJPEG
)
func (t TypeImageFormat) String() string {
switch t {
case TypeCVMAT:
return "cvmat"
case TypeCVMAT4b:
return "cvmat4b"
case TypeJPEG:
return "jpeg"
default:
return "unknown"
}
}
// GetTypeImageFormat returns image format type.
func GetTypeImageFormat(str string) TypeImageFormat {
switch str {
case "cvmat":
return TypeCVMAT
case "cvmat4b":
return TypeCVMAT4b
case "jpeg":
return TypeJPEG
default:
return typeUnknownFormat
}
}
// RawData is represented of `cv::Mat_<cv::Vec3b>` structure.
type RawData struct {
Format TypeImageFormat
Width int
Height int
Data []byte
}
// ToRawData converts MatVec3b to RawData.
func ToRawData(m bridge.MatVec3b) RawData {
w, h, data := m.ToRawData()
return RawData{
Format: TypeCVMAT,
Width: w,
Height: h,
Data: data,
}
}
// ToMatVec3b converts RawData to MatVec3b. Returned MatVec3b is required to
// delete after using.
func (r *RawData) ToMatVec3b() (bridge.MatVec3b, error) {
if r.Format != TypeCVMAT {
return bridge.MatVec3b{}, fmt.Errorf("'%v' cannot convert to 'MatVec3b'",
r.Format)
}
return bridge.ToMatVec3b(r.Width, r.Height, r.Data), nil
}
func toRawMap(m *bridge.MatVec3b) data.Map {
r := ToRawData(*m)
return data.Map{
"format": data.String(r.Format.String()), // = cv::Mat_<cv::Vec3b> = "cvmat"
"width": data.Int(r.Width),
"height": data.Int(r.Height),
"image": data.Blob(r.Data),
}
}
// ConvertMapToRawData returns RawData from data.Map. This function is
// utility method for other plug-in.
func ConvertMapToRawData(dm data.Map) (RawData, error) {
var width int64
if w, err := dm.Get(widthPath); err != nil {
return RawData{}, err
} else if width, err = data.ToInt(w); err != nil {
return RawData{}, err
}
var height int64
if h, err := dm.Get(heightPath); err != nil {
return RawData{}, err
} else if height, err = data.ToInt(h); err != nil {
return RawData{}, err
}
var img []byte
if b, err := dm.Get(imagePath); err != nil {
return RawData{}, err
} else if img, err = data.ToBlob(b); err != nil {
return RawData{}, err
}
var format TypeImageFormat
if f, err := dm.Get(formatPath); err != nil {
return RawData{}, err
} else if fmtStr, err := data.AsString(f); err != nil {
return RawData{}, err
} else {
format = GetTypeImageFormat(fmtStr)
if format == typeUnknownFormat {
return RawData{}, fmt.Errorf("'%v' is not supported", fmtStr)
}
}
return RawData{
Format: format,
Width: int(width),
Height: int(height),
Data: img,
}, nil
}
// ConvertToDataMap returns data.map. This function is utility method for
// other plug-in.
func (r *RawData) ConvertToDataMap() data.Map {
return data.Map{
"format": data.String(r.Format.String()),
"width": data.Int(r.Width),
"height": data.Int(r.Height),
"image": data.Blob(r.Data),
}
}
// ToImage converts to image.Image
func (r *RawData) ToImage() (image.Image, error) {
// BGR to RGB
rgba := image.NewRGBA(image.Rect(0, 0, r.Width, r.Height))
if r.Format == TypeCVMAT {
for i, j := 0, 0; i < len(rgba.Pix); i, j = i+4, j+3 {
rgba.Pix[i+0] = r.Data[j+2]
rgba.Pix[i+1] = r.Data[j+1]
rgba.Pix[i+2] = r.Data[j+0]
rgba.Pix[i+3] = 0xFF
}
} else if r.Format == TypeCVMAT4b {
for i, j := 0, 0; i < len(rgba.Pix); i, j = i+4, j+3 {
rgba.Pix[i+0] = r.Data[j+2]
rgba.Pix[i+1] = r.Data[j+1]
rgba.Pix[i+2] = r.Data[j+0]
rgba.Pix[i+3] = r.Data[j+3]
}
} else {
return nil, fmt.Errorf("'%v' cannot convert to image", r.Format)
}
return rgba, nil
}
// ToJpegData convert JPGE format image bytes.
func (r *RawData) ToJpegData(quality int) ([]byte, error) {
if r.Format == TypeJPEG {
return r.Data, nil
}
rgba, err := r.ToImage()
if err != nil {
return []byte{}, err
}
w := bytes.NewBuffer([]byte{})
err = jpeg.Encode(w, rgba, &jpeg.Options{Quality: quality})
return w.Bytes(), err
} | raw_data.go | 0.628521 | 0.427217 | raw_data.go | starcoder |
package loggregator
import (
"fmt"
"github.com/rcrowley/go-metrics"
"strconv"
"strings"
"time"
)
//go:generate Counterfeiter github.com/rcrowley/go-metrics.Counter
//go:generate Counterfeiter github.com/rcrowley/go-metrics.Gauge
//go:generate Counterfeiter github.com/rcrowley/go-metrics.GaugeFloat64
//go:generate Counterfeiter github.com/rcrowley/go-metrics.Meter
//go:generate Counterfeiter github.com/rcrowley/go-metrics.Histogram
//go:generate Counterfeiter github.com/rcrowley/go-metrics.Timer
//go:generate Counterfeiter github.com/rcrowley/go-metrics.EWMA
func convertCounter(counter metrics.Counter, name string, currentTime int64) *dataPoint {
return &dataPoint{
Name: name,
Value: float64(counter.Count()),
Timestamp: currentTime,
Type: "counter",
}
}
func convertGauge(gauge metrics.Gauge, name string, currentTime int64) *dataPoint {
return &dataPoint{
Name: name,
Value: float64(gauge.Value()),
Timestamp: currentTime,
Type: "gauge",
}
}
func convertGaugeFloat64(gauge metrics.GaugeFloat64, name string, currentTime int64) *dataPoint {
return &dataPoint{
Name: name,
Value: float64(gauge.Value()),
Timestamp: currentTime,
Type: "gauge",
}
}
func convertMeter(meter metrics.Meter, name string, currentTime int64) []*dataPoint {
return []*dataPoint{
{
Name: namer(name, "count"),
Value: float64(meter.Count()),
Timestamp: currentTime,
Type: "counter",
},
{
Name: namer(name, "rate.1-minute"),
Value: float64(meter.Rate1()),
Timestamp: currentTime,
Type: "gauge",
},
{
Name: namer(name, "rate.5-minute"),
Value: float64(meter.Rate5()),
Timestamp: currentTime,
Type: "gauge",
},
{
Name: namer(name, "rate.15-minute"),
Value: float64(meter.Rate15()),
Timestamp: currentTime,
Type: "gauge",
},
{
Name: namer(name, "rate.mean"),
Value: float64(meter.RateMean()),
Timestamp: currentTime,
Type: "gauge",
},
}
}
func convertHistogram(histogram metrics.Histogram, name string, currentTime int64) []*dataPoint {
points := []*dataPoint{
{
Name: namer(name, "count"),
Value: float64(histogram.Count()),
Timestamp: currentTime,
Type: "counter",
},
{
Name: namer(name, "mean"),
Value: float64(histogram.Mean()),
Timestamp: currentTime,
Type: "gauge",
},
{
Name: namer(name, "stddev"),
Value: float64(histogram.StdDev()),
Timestamp: currentTime,
Type: "gauge",
},
{
Name: namer(name, "sum"),
Value: float64(histogram.Sum()),
Timestamp: currentTime,
Type: "gauge",
},
{
Name: namer(name, "variance"),
Value: float64(histogram.Variance()),
Timestamp: currentTime,
Type: "gauge",
},
{
Name: namer(name, "max"),
Value: float64(histogram.Max()),
Timestamp: currentTime,
Type: "gauge",
},
{
Name: namer(name, "min"),
Value: float64(histogram.Min()),
Timestamp: currentTime,
Type: "gauge",
},
}
percentiles := []float64{75, 95, 98, 99, 99.9}
for i, v := range histogram.Percentiles(percentiles) {
percentileName := strings.Replace(strconv.FormatFloat(percentiles[i], 'f', -1, 64), ".", "", -1)
points = append(points, &dataPoint{
Name: namer(name, fmt.Sprintf("%sthPercentile", percentileName)),
Value: float64(v),
Timestamp: currentTime,
Type: "gauge",
})
}
return points
}
func convertTimer(timer metrics.Timer, name string, currentTime int64, timeUnit time.Duration) []*dataPoint {
unit := ""
switch {
case timeUnit == time.Second:
unit = "seconds"
case timeUnit == time.Millisecond:
unit = "milliseconds"
case timeUnit == time.Microsecond:
unit = "microseconds"
case timeUnit == time.Nanosecond:
unit = "nanoseconds"
}
points := []*dataPoint{
{
Name: namer(name, "count"),
Value: float64(timer.Count()),
Timestamp: currentTime,
Type: "counter",
},
{
Name: namer(name, "rate.1-minute"),
Value: float64(timer.Rate1()),
Timestamp: currentTime,
Type: "gauge",
},
{
Name: namer(name, "rate.5-minute"),
Value: float64(timer.Rate5()),
Timestamp: currentTime,
Type: "gauge",
},
{
Name: namer(name, "rate.15-minute"),
Value: float64(timer.Rate15()),
Timestamp: currentTime,
Type: "gauge",
},
{
Name: namer(name, "rate.mean"),
Value: float64(timer.RateMean()),
Timestamp: currentTime,
Type: "gauge",
},
{
Name: namer(name, "duration.mean"),
Value: timer.Mean() / float64(timeUnit),
Timestamp: currentTime,
Type: "gauge",
Unit: unit,
},
{
Name: namer(name, "duration.stddev"),
Value: timer.StdDev() / float64(timeUnit),
Timestamp: currentTime,
Type: "gauge",
Unit: unit,
},
{
Name: namer(name, "duration.sum"),
Value: float64(timer.Sum() / int64(timeUnit)),
Timestamp: currentTime,
Type: "gauge",
Unit: unit,
},
{
Name: namer(name, "duration.variance"),
Value: timer.Variance() / float64(timeUnit),
Timestamp: currentTime,
Type: "gauge",
Unit: unit,
},
{
Name: namer(name, "duration.max"),
Value: float64(timer.Max() / int64(timeUnit)),
Timestamp: currentTime,
Type: "gauge",
Unit: unit,
},
{
Name: namer(name, "duration.min"),
Value: float64(timer.Min() / int64(timeUnit)),
Timestamp: currentTime,
Type: "gauge",
Unit: unit,
},
}
percentiles := []float64{75, 95, 98, 99, 99.9}
for i, v := range timer.Percentiles(percentiles) {
percentileName := strings.Replace(strconv.FormatFloat(percentiles[i], 'f', -1, 64), ".", "", -1)
points = append(points, &dataPoint{
Name: namer(name, "duration", fmt.Sprintf("%sthPercentile", percentileName)),
Value: v / float64(timeUnit),
Timestamp: currentTime,
Type: "gauge",
Unit: unit,
})
}
return points
}
func namer(names ...string) string {
return strings.Join(names, ".")
} | converters.go | 0.517571 | 0.400808 | converters.go | starcoder |
package vader
import (
"log"
"math"
"strconv"
"strings"
)
// Normalize the score to be between -1 and 1 using an alpha that
// approximates the max expected value
func Normalize(score float64) float64 {
normalizedScore := score / math.Sqrt((score*score)+float64(Alpha))
if normalizedScore < -1.0 {
return -1.0
} else if normalizedScore > 1.0 {
return 1.0
} else {
return normalizedScore
}
}
//Removes leading and trailing punctuation
//Leaves contractions and most emoticons
//Does not preserve punc-plus-letter emoticons (e.g. :D)
//Returns list of clean words from text
func CleanWordsAndEmoticons(text string) []string {
words := strings.Fields(text)
cleanWords := make([]string, 0, len(words))
for _, word := range words {
cleanWord := strings.TrimFunc(word, func(r rune) bool {
return PunctuationRegexp.Match([]byte{byte(r)}) || r == '‘' || r == '’'
})
if len(cleanWord) <= 2 {
cleanWords = append(cleanWords, word)
} else {
cleanWords = append(cleanWords, cleanWord)
}
}
return cleanWords
}
//Check whether just some words in the input are ALL CAPS
func IsAllCapDiff(words []string) bool {
for _, word := range words {
if word != strings.ToUpper(word) {
return true
}
}
return false
}
// find percent difference occurences (+2%,-2% etc.)
// and replace it with placeholder from lexicon
func ReplacePercentages(text string) string {
text = PositivePercentageRegexp.ReplaceAllString(text, " xpositivepercentx ")
text = NegativePercentageRegexp.ReplaceAllString(text, " xnegativepercentx ")
return text
}
//Convert lexicon file data to map
func MakeLexiconMap(lexicon string) map[string]float64 {
lexiconDict := make(map[string]float64)
for _, line := range strings.Split(strings.TrimSuffix(lexicon, "\n"), "\n") {
line = strings.TrimSpace(line)
values := strings.Split(line, "\t")
word := values[0]
measure, err := strconv.ParseFloat(values[1], 64)
if err != nil {
log.Fatal(err)
}
lexiconDict[word] = measure
}
return lexiconDict
}
// Convert emoji lexicon file data to map
func MakeEmojiLexiconMap(emojiLexicon string) map[string]string {
emojiLexiconDict := make(map[string]string)
for _, line := range strings.Split(emojiLexicon, "\n") {
line = strings.TrimSpace(line)
values := strings.Split(line, "\t")
word := values[0]
description := values[1]
emojiLexiconDict[word] = description
}
return emojiLexiconDict
}
// Determine if input contains negation words
func ContainsNegation(inputWords []string) bool {
for i, word := range inputWords {
for _, negWord := range Negations {
if negWord == word {
return true
}
}
if word == "least" {
if i > 0 && inputWords[i-1] != "at" && inputWords[i-1] != "very" {
return true
}
}
if IncludeNt {
if strings.Contains(word, "n't") {
return true
}
}
}
return false
} | vader/utils.go | 0.786828 | 0.404155 | utils.go | starcoder |
package cryptobin
// Aes
func (this Cryptobin) Aes() Cryptobin {
this.multiple = "Aes"
return this
}
// Des
func (this Cryptobin) Des() Cryptobin {
this.multiple = "Des"
return this
}
// TriDes
func (this Cryptobin) TriDes() Cryptobin {
this.multiple = "TriDes"
return this
}
// Twofish
func (this Cryptobin) Twofish() Cryptobin {
this.multiple = "Twofish"
return this
}
// Blowfish
func (this Cryptobin) Blowfish(salt ...string) Cryptobin {
this.multiple = "Blowfish"
if len(salt) > 0 {
this.config["salt"] = []byte(salt[0])
}
return this
}
// Tea
func (this Cryptobin) Tea(rounds ...int) Cryptobin {
this.multiple = "Tea"
if len(rounds) > 0 {
this.config["rounds"] = rounds[0]
}
return this
}
// Xtea
func (this Cryptobin) Xtea() Cryptobin {
this.multiple = "Xtea"
return this
}
// Cast5
func (this Cryptobin) Cast5() Cryptobin {
this.multiple = "Cast5"
return this
}
// SM4
func (this Cryptobin) SM4() Cryptobin {
this.multiple = "SM4"
return this
}
// Chacha20
func (this Cryptobin) Chacha20(nonce string, counter ...uint32) Cryptobin {
this.multiple = "Chacha20"
this.config["nonce"] = []byte(nonce)
if len(counter) > 0 {
this.config["counter"] = counter[0]
}
return this
}
// Chacha20poly1305
// nonce is 12 bytes
func (this Cryptobin) Chacha20poly1305(nonce string, additional string) Cryptobin {
this.multiple = "Chacha20poly1305"
this.config["nonce"] = []byte(nonce)
this.config["additional"] = []byte(additional)
return this
}
// RC4
func (this Cryptobin) RC4() Cryptobin {
this.multiple = "RC4"
return this
}
// ==========
// ECB
func (this Cryptobin) ECB() Cryptobin {
this.mode = "ECB"
return this
}
// CBC
func (this Cryptobin) CBC() Cryptobin {
this.mode = "CBC"
return this
}
// CFB
func (this Cryptobin) CFB() Cryptobin {
this.mode = "CFB"
return this
}
// OFB
func (this Cryptobin) OFB() Cryptobin {
this.mode = "OFB"
return this
}
// CTR
func (this Cryptobin) CTR() Cryptobin {
this.mode = "CTR"
return this
}
// GCM
func (this Cryptobin) GCM(nonce string, additional string) Cryptobin {
this.mode = "GCM"
this.config["nonce"] = []byte(nonce)
this.config["additional"] = []byte(additional)
return this
}
// ==========
// 不补码
func (this Cryptobin) NoPadding() Cryptobin {
this.padding = ""
return this
}
// Zero 补码
func (this Cryptobin) ZeroPadding() Cryptobin {
this.padding = "Zero"
return this
}
// PKCS5 补码
func (this Cryptobin) PKCS5Padding() Cryptobin {
this.padding = "PKCS5"
return this
}
// PKCS7 补码
func (this Cryptobin) PKCS7Padding() Cryptobin {
this.padding = "PKCS7"
return this
}
// X923 补码
func (this Cryptobin) X923Padding() Cryptobin {
this.padding = "X923"
return this
}
// ISO10126 补码
func (this Cryptobin) ISO10126Padding() Cryptobin {
this.padding = "ISO10126"
return this
}
// ISO7816_4 补码
func (this Cryptobin) ISO7816_4Padding() Cryptobin {
this.padding = "ISO7816_4"
return this
}
// TBC 补码
func (this Cryptobin) TBCPadding() Cryptobin {
this.padding = "TBC"
return this
}
// PKCS1 补码
func (this Cryptobin) PKCS1Padding(bt ...string) Cryptobin {
this.padding = "PKCS1"
if len(bt) > 0 {
this.config["pkcs1_padding_bt"] = bt[0]
}
return this
}
// ==========
// 向量
func (this Cryptobin) SetIv(data string) Cryptobin {
this.iv = []byte(data)
return this
}
// 密码
func (this Cryptobin) SetKey(data string) Cryptobin {
this.key = []byte(data)
return this
}
// ==========
// 不做处理
func (this Cryptobin) NoParse() Cryptobin {
this.parsedData = this.data
return this
} | pkg/lakego-pkg/go-cryptobin/cryptobin/use.go | 0.624752 | 0.453443 | use.go | starcoder |
package geo
import (
"fmt"
"math"
)
// A Projector is a function that converts the given point to a different space.
type Projector func(p *Point)
// A Projection is a set of projectors to map forward and backwards to the projected space.
type Projection struct {
Project Projector
Inverse Projector
}
const mercatorPole = 20037508.34
// Mercator projection, performs EPSG:3857, sometimes also described as EPSG:900913.
var Mercator = Projection{
Project: func(p *Point) {
p.SetX(mercatorPole / 180.0 * p.Lng())
y := math.Log(math.Tan((90.0+p.Lat())*math.Pi/360.0)) / math.Pi * mercatorPole
p.SetY(math.Max(-mercatorPole, math.Min(y, mercatorPole)))
},
Inverse: func(p *Point) {
p.SetLng(p.X() * 180.0 / mercatorPole)
p.SetLat(180.0 / math.Pi * (2*math.Atan(math.Exp((p.Y()/mercatorPole)*math.Pi)) - math.Pi/2.0))
},
}
// MercatorScaleFactor returns the mercator scaling factor for a given degree latitude.
func MercatorScaleFactor(degreesLatitude float64) float64 {
if degreesLatitude < -90.0 || degreesLatitude > 90.0 {
panic(fmt.Sprintf("geo: latitude out of range, given %f", degreesLatitude))
}
return 1.0 / math.Cos(degreesLatitude/180.0*math.Pi)
}
// BuildTransverseMercator builds a transverse Mercator projection
// that automatically recenters the longitude around the provided centerLng.
// Works correctly around the anti-meridian.
// http://en.wikipedia.org/wiki/Transverse_Mercator_projection
func BuildTransverseMercator(centerLng float64) Projection {
return Projection{
Project: func(p *Point) {
lng := p.Lng() - centerLng
if lng < 180 {
lng += 360.0
}
if lng > 180 {
lng -= 360.0
}
p.SetLng(lng)
TransverseMercator.Project(p)
},
Inverse: func(p *Point) {
TransverseMercator.Inverse(p)
lng := p.Lng() + centerLng
if lng < 180 {
lng += 360.0
}
if lng > 180 {
lng -= 360.0
}
p.SetLng(lng)
},
}
}
// TransverseMercator implements a default transverse Mercator projector
// that will only work well +-10 degrees around longitude 0.
var TransverseMercator = Projection{
Project: func(p *Point) {
radLat := deg2rad(p.Lat())
radLng := deg2rad(p.Lng())
sincos := math.Sin(radLng) * math.Cos(radLat)
p.SetX(0.5 * math.Log((1+sincos)/(1-sincos)) * EarthRadius)
p.SetY(math.Atan(math.Tan(radLat)/math.Cos(radLng)) * EarthRadius)
},
Inverse: func(p *Point) {
x := p.X() / EarthRadius
y := p.Y() / EarthRadius
lng := math.Atan(math.Sinh(x) / math.Cos(y))
lat := math.Asin(math.Sin(y) / math.Cosh(x))
p.SetLng(rad2deg(lng))
p.SetLat(rad2deg(lat))
},
}
// ScalarMercator converts from lng/lat float64 to x,y uint64.
// This is the same as Google's world coordinates.
var ScalarMercator struct {
Level uint64
Project func(lng, lat float64, level ...uint64) (x, y uint64)
Inverse func(x, y uint64, level ...uint64) (lng, lat float64)
}
func init() {
ScalarMercator.Level = 31
ScalarMercator.Project = func(lng, lat float64, level ...uint64) (x, y uint64) {
l := ScalarMercator.Level
if len(level) != 0 {
l = level[0]
}
return scalarMercatorProject(lng, lat, l)
}
ScalarMercator.Inverse = func(x, y uint64, level ...uint64) (lng, lat float64) {
l := ScalarMercator.Level
if len(level) != 0 {
l = level[0]
}
return scalarMercatorInverse(x, y, l)
}
}
func scalarMercatorProject(lng, lat float64, level uint64) (x, y uint64) {
var factor uint64
factor = 1 << level
maxtiles := float64(factor)
lng = lng/360.0 + 0.5
x = uint64(lng * maxtiles)
// bound it because we have a top of the world problem
siny := math.Sin(lat * math.Pi / 180.0)
if siny < -0.9999 {
lat = 0.5 + 0.5*math.Log((1.0+siny)/(1.0-siny))/(-2*math.Pi)
y = 0
} else if siny > 0.9999 {
lat = 0.5 + 0.5*math.Log((1.0+siny)/(1.0-siny))/(-2*math.Pi)
y = factor - 1
} else {
lat = 0.5 + 0.5*math.Log((1.0+siny)/(1.0-siny))/(-2*math.Pi)
y = uint64(lat * maxtiles)
}
return
}
func scalarMercatorInverse(x, y, level uint64) (lng, lat float64) {
var factor uint64
factor = 1 << level
maxtiles := float64(factor)
lng = 360.0 * (float64(x)/maxtiles - 0.5)
lat = (2.0*math.Atan(math.Exp(math.Pi-(2*math.Pi)*(float64(y))/maxtiles)))*(180.0/math.Pi) - 90.0
return
} | vendor/github.com/paulmach/go.geo/projections.go | 0.838382 | 0.587766 | projections.go | starcoder |
package imagehash
import (
"errors"
"strconv"
)
// BitArray is an internal struct used by dhash to simplify appending bits to
// a byte array from left to right.
type BitArray struct {
byteArray []byte
max int
mask0 byte
mask1 byte
arrayIdx int
bitIdx uint
}
// NewBitArray is a constructor function for the BitArray struct.
// The input, 'numBits' is the number of bits this byte array will
// hold, so it must be a non-zero multiple of 8.
func NewBitArray(numBits int) (*BitArray, error) {
// If numBits is invalid
if (numBits == 0) || (numBits % 8 != 0) {
return nil, errors.New("'numBits' must be a non-zero multiple of 8")
}
return &BitArray{
byteArray: make([]byte, numBits / 8),
max: numBits / 8,
mask0: 0x00,
mask1: 0x01,
arrayIdx: 0,
bitIdx: 7,
}, nil
}
// AppendBit appends a 1 or a 0 to the byte array in the BitArray struct.
// Valid input is an int of '1' or '0', and this function cannot be called
// after the byte array has filled up.
func (ab *BitArray) AppendBit(bit int) error {
if ab.arrayIdx == ab.max {
return errors.New("cannot continue to append to a full byte array")
}
// Shift the 'mask' (bit of 1 or 0) by the proper amount to
// fill the byte up from left to right.
switch bit {
case 0:
ab.byteArray[ab.arrayIdx] |= ab.mask0 << ab.bitIdx
case 1:
ab.byteArray[ab.arrayIdx] |= ab.mask1 << ab.bitIdx
default:
return errors.New("can only append with 1 or 0, but received: " + strconv.Itoa(bit))
}
if ab.bitIdx > 0 {
// Decrement the index into the current byte so the next
// bit to set will be on the right.
ab.bitIdx--
} else {
// The last bit in the current byte has been set, so increment
// the index into the byte array, and reset the bit index.
ab.arrayIdx++
ab.bitIdx = 7
}
return nil
}
// GetArray returns the byte array in its current state. It
// can be called at any time.
func (ab BitArray) GetArray() []byte {
return ab.byteArray
} | vendor/github.com/devedge/imagehash/bitarray.go | 0.782247 | 0.475301 | bitarray.go | starcoder |
package cios
import (
"encoding/json"
)
// RecordedDates struct for RecordedDates
type RecordedDates struct {
// データが存在する日付のリスト Ex.[1, 20, 22, 23]
Dates []int64 `json:"dates"`
}
// NewRecordedDates instantiates a new RecordedDates object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewRecordedDates(dates []int64, ) *RecordedDates {
this := RecordedDates{}
this.Dates = dates
return &this
}
// NewRecordedDatesWithDefaults instantiates a new RecordedDates object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewRecordedDatesWithDefaults() *RecordedDates {
this := RecordedDates{}
return &this
}
// GetDates returns the Dates field value
func (o *RecordedDates) GetDates() []int64 {
if o == nil {
var ret []int64
return ret
}
return o.Dates
}
// GetDatesOk returns a tuple with the Dates field value
// and a boolean to check if the value has been set.
func (o *RecordedDates) GetDatesOk() (*[]int64, bool) {
if o == nil {
return nil, false
}
return &o.Dates, true
}
// SetDates sets field value
func (o *RecordedDates) SetDates(v []int64) {
o.Dates = v
}
func (o RecordedDates) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if true {
toSerialize["dates"] = o.Dates
}
return json.Marshal(toSerialize)
}
type NullableRecordedDates struct {
value *RecordedDates
isSet bool
}
func (v NullableRecordedDates) Get() *RecordedDates {
return v.value
}
func (v *NullableRecordedDates) Set(val *RecordedDates) {
v.value = val
v.isSet = true
}
func (v NullableRecordedDates) IsSet() bool {
return v.isSet
}
func (v *NullableRecordedDates) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableRecordedDates(val *RecordedDates) *NullableRecordedDates {
return &NullableRecordedDates{value: val, isSet: true}
}
func (v NullableRecordedDates) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableRecordedDates) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | cios/model_recorded_dates.go | 0.565059 | 0.476823 | model_recorded_dates.go | starcoder |
package chunk
import (
"bytes"
"encoding/binary"
"fmt"
"hash/crc32"
)
// Chunk represents one of the many chunks of a png file
type Chunk struct {
id int // chunk index
dataSize []byte // 4 bytes - data field size
tipo []byte // 4 bytes - [a-Z] letters only (chunk type)
Data []byte // $dataSize bytes
crc []byte // 4 bytes - CRC algorithm
}
// GetDataSize returns dataSize in integer
func (r *Chunk) GetDataSize() uint32 {
if len(r.dataSize) == 0 {
return 0
}
return binary.BigEndian.Uint32(r.dataSize)
}
// SetDataSize set chunk dataSize field
func (r *Chunk) SetDataSize(size []byte) {
r.dataSize = size
}
// GetType returns chunk type
func (r *Chunk) GetType() string {
if len(r.dataSize) == 0 {
return "undefined"
}
return string(r.tipo)
}
// GetCRC returns CRC in integer
func (r *Chunk) GetCRC() uint32 {
if len(r.crc) == 0 {
return 0
}
return binary.BigEndian.Uint32(r.crc)
}
// SetCRC sets the CRC
func (r *Chunk) SetCRC(ncrc []byte) {
r.crc = ncrc
}
// CalcCRC returns a recalculated version of the CRC
func (r *Chunk) CalcCRC() uint32 {
if len(r.tipo) == 0 {
return 0
}
return crc32.ChecksumIEEE(append(r.tipo, r.Data...))
}
// String String representation of chunk
func (r Chunk) String() string {
s := fmt.Sprint("Data size: ", r.GetDataSize()) + "\n"
s += "Type: " + r.GetType() + "\n"
s += "Data: [...]\n"
s += fmt.Sprintf("CRC: %v", r.GetCRC()) + "\n"
s += fmt.Sprintf("CRC': %v", r.CalcCRC()) + "\n"
return s
}
// ToBytes Chunk byte representation
func (r *Chunk) ToBytes() []byte {
raw := []byte{}
raw = append(raw, r.dataSize...)
raw = append(raw, r.tipo...)
raw = append(raw, r.Data...)
raw = append(raw, r.crc...)
return raw
}
// Parse converts a byte array into chunk
func Parse(index *uint32, data []byte) Chunk {
chunk := Chunk{}
chunk.dataSize = append(chunk.dataSize, data[*index:*index+4]...)
*index = *index + 4
chunk.tipo = append(chunk.tipo, data[*index:*index+4]...)
*index = *index + 4
chunk.Data = append(chunk.Data, data[*index:*index+chunk.GetDataSize()]...)
*index = *index + chunk.GetDataSize()
chunk.crc = append(chunk.crc, data[*index:*index+4]...)
*index = *index + 4
return chunk
}
// CreateChunk create a new chunk with the desired options
func CreateChunk(data []byte, tipo []byte) Chunk {
chunk := Chunk{}
chunk.Data = data
chunk.tipo = tipo
newSize := make([]byte, 4)
binary.BigEndian.PutUint32(newSize, uint32(len(data)))
chunk.dataSize = newSize
newCRC := make([]byte, 4)
binary.BigEndian.PutUint32(newCRC, chunk.CalcCRC())
chunk.crc = newCRC
return chunk
}
// BuildIDATChunks will create a bunch of IDAT chunks from a compressed rawBytes array
func BuildIDATChunks(bytesBuff *bytes.Buffer, chunkSize uint32) []Chunk {
defer bytesBuff.Reset()
var rawBytes []byte = bytesBuff.Bytes()
var pointer uint32 = 0
var chunks []Chunk
var length uint32 = uint32(len(rawBytes) - 1)
var tipo []byte = []byte("IDAT")
for {
if pointer >= length {
break
}
b := pointer
e := pointer + chunkSize
if e > length {
e = length + 1
}
chunks = append(chunks, CreateChunk(rawBytes[b:e], tipo))
pointer = e
}
return chunks
} | chunk/entry.go | 0.715821 | 0.412826 | entry.go | starcoder |
package osgb
import (
"errors"
"math"
)
var (
// ErrPointOutsidePolygon indicates the position is too far offshore to be transformed.
ErrPointOutsidePolygon = errors.New("point outside polygon")
// ErrPointOutsideTransformation indicates the position is completely outside the grid transformation extent
ErrPointOutsideTransformation = errors.New("point outside transformation limits")
)
const (
nEastIndices = 701
translationVectorFile02 = "data/OSTN02_OSGM02_GB.txt"
translationVectorFile15 = "data/OSTN15_OSGM15_GB.txt"
)
type geoidRegion uint8
const (
Region_OUTSIDE_BOUNDARY geoidRegion = 0 // 02
Region_UK_MAINLAND geoidRegion = 1 // 02,15
Region_SCILLY_ISLES geoidRegion = 2 // 02,15
Region_ISLE_OF_MAN geoidRegion = 3 // 02,15
Region_OUTER_HEBRIDES geoidRegion = 4 // 02,15
Region_ST_KILDA geoidRegion = 5 // 02
Region_SHETLAND_ISLES geoidRegion = 6 // 02,15
Region_ORKNEY_ISLES geoidRegion = 7 // 02,15
Region_FAIR_ISLE geoidRegion = 8 // 02
Region_FLANNAN_ISLES geoidRegion = 9 // 02
Region_NORTH_RONA geoidRegion = 10 // 02
Region_SULE_SKERRY geoidRegion = 11 // 02
Region_FOULA geoidRegion = 12 // 02
Region_REPUBLIC_OF_IRELAND geoidRegion = 13 // 02
Region_NORTHERN_IRELAND geoidRegion = 14 // 02
Region_OFFSHORE geoidRegion = 15 // 15
Region_OUTSIDE_TRANSFORMATION geoidRegion = 16 // 15
)
// CoordinateTransformer is used to convert between OSGB36/ODN and ETRS89 geodetic datums
type CoordinateTransformer interface {
// ToNationalGrid coverts a coordinate position from ETRS89 to OSGB36/ODN
ToNationalGrid(c *ETRS89Coordinate) (*OSGB36Coordinate, error)
// FromNationalGrid coverts a coordinate position from OSGB36/ODN to ETRS89
FromNationalGrid(c *OSGB36Coordinate) (*ETRS89Coordinate, error)
}
type transformer struct {
records []record
}
func (tr *transformer) ToNationalGrid(c *ETRS89Coordinate) (*OSGB36Coordinate, error) {
latRadians := degreesToRadians(c.Lat)
lonRadians := degreesToRadians(c.Lon)
etrs89PlaneCoord := nationalGridProjection.toPlaneCoord(latRadians, lonRadians, grs80Ellipsoid)
osgb36Coord, odnHeight, _, err := tr.toOSGB36(&planeCoord{
easting: etrs89PlaneCoord.easting,
northing: etrs89PlaneCoord.northing,
}, c.Height)
if err != nil {
return nil, err
}
return &OSGB36Coordinate{
Easting: osgb36Coord.easting,
Northing: osgb36Coord.northing,
Height: odnHeight,
}, nil
}
func (tr *transformer) FromNationalGrid(c *OSGB36Coordinate) (*ETRS89Coordinate, error) {
etrs89Coord, etrs89Height, err := tr.fromOSGB36(&planeCoord{
easting: c.Easting,
northing: c.Northing,
}, c.Height)
if err != nil {
return nil, err
}
etrs89Lat, etrs89Lon := nationalGridProjection.fromPlaneCoord(etrs89Coord, grs80Ellipsoid)
degreeLat := radiansToDegrees(etrs89Lat)
degreeLon := radiansToDegrees(etrs89Lon)
return &ETRS89Coordinate{
Lat: degreeLat,
Lon: degreeLon,
Height: etrs89Height,
}, nil
}
func nearestGeoidRegion(etrs89Coord *planeCoord, rs *shiftRecords) geoidRegion {
dx := etrs89Coord.easting - float64(rs.s0.etrs89Easting)
t := dx / 1000.0
dy := etrs89Coord.northing - float64(rs.s0.etrs89Northing)
u := dy / 1000.0
if t <= 0.5 && u <= 0.5 {
return rs.s0.geoidRegion
} else if u <= 0.5 {
return rs.s1.geoidRegion
} else if t > 0.5 {
return rs.s2.geoidRegion
}
return rs.s3.geoidRegion
}
func (tr *transformer) toOSGB36(etrs89Coord *planeCoord, etrs89Height float64) (*planeCoord, float64, geoidRegion, error) {
rs, err := tr.getShiftRecords(etrs89Coord)
if err != nil {
return nil, 0, Region_FOULA, err
}
dx := etrs89Coord.easting - float64(rs.s0.etrs89Easting)
t := dx / 1000.0
it := 1 - t
dy := etrs89Coord.northing - float64(rs.s0.etrs89Northing)
u := dy / 1000.0
iu := 1 - u
shiftEast := it*iu*rs.s0.ostnEastShift +
t*iu*rs.s1.ostnEastShift +
t*u*rs.s2.ostnEastShift +
it*u*rs.s3.ostnEastShift
shiftNorth := it*iu*rs.s0.ostnNorthShift +
t*iu*rs.s1.ostnNorthShift +
t*u*rs.s2.ostnNorthShift +
it*u*rs.s3.ostnNorthShift
geoidHeight := it*iu*rs.s0.ostnGeoidHeight +
t*iu*rs.s1.ostnGeoidHeight +
t*u*rs.s2.ostnGeoidHeight +
it*u*rs.s3.ostnGeoidHeight
geoidRegion := nearestGeoidRegion(etrs89Coord, rs)
return &planeCoord{
easting: etrs89Coord.easting + shiftEast,
northing: etrs89Coord.northing + shiftNorth,
}, etrs89Height - geoidHeight, geoidRegion, nil
}
func (tr *transformer) fromOSGB36(osgb36Coord *planeCoord, odnHeight float64) (*planeCoord, float64, error) {
etrs89Coord := &planeCoord{
easting: osgb36Coord.easting,
northing: osgb36Coord.northing,
}
etrs89Height := odnHeight
// Iteatively find the map coordinate shift.
for {
rs, err := tr.getShiftRecords(etrs89Coord)
if err != nil {
return nil, 0, err
}
dx := etrs89Coord.easting - float64(rs.s0.etrs89Easting)
t := dx / 1000.0
it := 1 - t
dy := etrs89Coord.northing - float64(rs.s0.etrs89Northing)
u := dy / 1000.0
iu := 1 - u
shiftEast := it*iu*rs.s0.ostnEastShift +
t*iu*rs.s1.ostnEastShift +
t*u*rs.s2.ostnEastShift +
it*u*rs.s3.ostnEastShift
shiftNorth := it*iu*rs.s0.ostnNorthShift +
t*iu*rs.s1.ostnNorthShift +
t*u*rs.s2.ostnNorthShift +
it*u*rs.s3.ostnNorthShift
geoidHeight := it*iu*rs.s0.ostnGeoidHeight +
t*iu*rs.s1.ostnGeoidHeight +
t*u*rs.s2.ostnGeoidHeight +
it*u*rs.s3.ostnGeoidHeight
const epsilon = 0.0001
newEasting := osgb36Coord.easting - shiftEast
newNorthing := osgb36Coord.northing - shiftNorth
newHeight := odnHeight + geoidHeight
if math.Abs(etrs89Coord.easting-newEasting) <= epsilon &&
math.Abs(etrs89Coord.northing-newNorthing) <= epsilon &&
math.Abs(etrs89Height-newHeight) <= epsilon {
break
}
etrs89Coord.easting = newEasting
etrs89Coord.northing = newNorthing
etrs89Height = newHeight
}
return etrs89Coord, etrs89Height, nil
}
// NewOSTN02Transformer returns a transformer that uses OSTN02/OSGM02
func NewOSTN02Transformer() (CoordinateTransformer, error) {
records, err := readRecords(translationVectorFile02)
if err != nil {
return nil, err
}
return &transformer{
records: records,
}, nil
}
// NewOSTN15Transformer returns a transformer that uses OSTN15/OSGM15
func NewOSTN15Transformer() (CoordinateTransformer, error) {
records, err := readRecords(translationVectorFile15)
if err != nil {
return nil, err
}
return &transformer{
records: records,
}, nil
} | ostn_osgm_gb.go | 0.673192 | 0.429848 | ostn_osgm_gb.go | starcoder |
package dusl
import (
"fmt"
)
// Sparser stands for Superpermissive-Parser.
// The Sparse method converts an ambit into a syntax tree,
// the SparseUndent method converts an entire source into a syntax tree.
type Sparser interface {
Sparse(ambit *Ambit) *Syntax
SparseUndent(src *Source) *Syntax
}
type sparser struct {
precedenceLevels
spanner spannerI
}
func newSparser(spanner spannerI, precedence *precedenceLevels) Sparser {
return &sparser{ spanner: spanner, precedenceLevels: *precedence }
}
// SparseUndent returns the syntax tree constructed for the given source.
func (this *sparser) SparseUndent(source *Source) *Syntax {
root := Undent(source)
this.sparseSQ(root)
return root
}
func (this *sparser) sparseSQ(node *Syntax) {
if node.Cat == "SQ" {
this.sparseSQ(node.Left)
this.sparseSQ(node.Right)
return
}
if node.Cat == "SN" {
node.Left = this.Sparse(node.Left.Ambit)
this.sparseSQ(node.Right)
return
}
// <empty
}
// Sparse returns the syntax tree constructed for the given ambit.
func (this *sparser) Sparse(ambit *Ambit) *Syntax {
return this.sparse(ambit, this.spanner.span(ambit), 1)
}
func (this *sparser) sparse(ambit *Ambit, spans []*spanT, minPrecedence int) *Syntax {
ambit, spans = trimSpans(ambit, spans)
if len(spans) == 0 {
return &Syntax{ Ambit: ambit }
}
if len(spans) == 1 {
span := spans[0]
lit := span.Lit
if span.Children == nil {
if span.Cat == "OP" {
if this.precedenceEFE[lit] < minPrecedence {
return &Syntax{ Cat: "ERR", Err: fmt.Sprintf("unexpected: %s", lit), Ambit: ambit }
}
return &Syntax{ Cat: "OP", Lit: lit, Ambit: ambit, OpAmbit: span.Ambit,
Left: &Syntax{ Ambit: span.Ambit.CollapseLeft() },
Right: &Syntax{ Ambit: span.Ambit.CollapseRight() } }
}
return &Syntax{ Cat: span.Cat, Lit: lit, Err: span.Err, Ambit: ambit, OpAmbit: span.Ambit }
}
// span.Cat == "BB"
precedence, recognized := this.precedenceB[lit]
if !recognized {
return &Syntax{ Cat: "ERR", Err: fmt.Sprintf("unexpected: %s", lit), Ambit: ambit }
}
return &Syntax{ Cat: span.Cat, Lit: lit, Ambit: span.Ambit,
Left: this.sparse(span.SubAmbit, span.Children, precedence),
Right: &Syntax{ Ambit: span.Ambit.CollapseRight() } }
}
l, splitPrecedence, splitLoc, splitPrecLeft, splitPrecRight := len(spans)-1, maxPrecedence+1, -1, -1, -1
if span := spans[0]; span.Cat != "OP" {
if ws := spans[1]; ws.Cat == "WS" { // implies: len(spans) >= 3
lit := span.Lit
prec := this.precedenceLWA[lit]
if prec >= minPrecedence && prec < splitPrecedence {
if this.checkRightwardJuxtapositionCandidate(spans, 1, prec) {
if prec == minPrecedence {
return &Syntax{ Cat: "JUXT", Lit: " ", Ambit: ambit, OpAmbit: ws.Ambit,
Left: this.sparse(ambit.SubtractRight(ws.Ambit), spans[:1], prec),
Right: this.sparse(ambit.SubtractLeft(ws.Ambit), spans[2:], prec) }
}
splitLoc, splitPrecedence, splitPrecLeft, splitPrecRight = 1, prec, prec, prec
}
}
} else {
lit := span.Lit
prec := this.precedenceLA[lit]
if prec >= minPrecedence && prec < splitPrecedence {
if this.checkRightwardJuxtapositionCandidate(spans, 0, prec) {
if prec == minPrecedence {
return &Syntax{ Cat: "GLUE", Lit: "", Ambit: ambit, OpAmbit: span.Ambit.CollapseRight(),
Left: this.sparse(span.Ambit, spans[:1], prec),
Right: this.sparse(ambit.SubtractLeft(span.Ambit), spans[1:], prec) }
}
splitLoc, splitPrecedence, splitPrecLeft, splitPrecRight = 0, prec, prec, prec
}
}
}
}
if span := spans[l]; span.Cat != "OP" {
if ws := spans[l-1]; ws.Cat == "WS" { // implies: len(spans) >= 3
lit := span.Lit
prec := this.precedenceAWL[lit]
if prec >= minPrecedence && prec < splitPrecedence {
if this.checkLeftwardJuxtapositionCandidate(spans, l-1, prec) {
if prec == minPrecedence {
return &Syntax{ Cat: "JUXT", Lit: " ", Ambit: ambit, OpAmbit: ws.Ambit,
Left: this.sparse(ambit.SubtractRight(ws.Ambit), spans[:l-1], prec),
Right: this.sparse(ambit.SubtractLeft(ws.Ambit), spans[l:], prec) }
}
splitLoc, splitPrecedence, splitPrecLeft, splitPrecRight = l-1, prec, prec, prec
}
}
} else {
lit := span.Lit
prec := this.precedenceAL[lit]
if prec >= minPrecedence && prec < splitPrecedence {
if this.checkLeftwardJuxtapositionCandidate(spans, l, prec) {
if prec == minPrecedence {
return &Syntax{ Cat: "GLUE", Lit: "", Ambit: ambit, OpAmbit: span.Ambit.CollapseLeft(),
Left: this.sparse(ambit.SubtractRight(span.Ambit), spans[:l], prec),
Right: this.sparse(span.Ambit, spans[l:], prec) }
}
splitLoc, splitPrecedence, splitPrecLeft, splitPrecRight = l, prec, prec, prec
}
}
}
}
if span := spans[0]; span.Cat == "OP" {
lit := span.Lit
prec := this.precedenceEFA[lit]
if prec == minPrecedence {
return &Syntax{ Cat: span.Cat, Lit: lit, Ambit: ambit, OpAmbit: span.Ambit,
Left: &Syntax{ Ambit: span.Ambit.CollapseLeft() },
Right: this.sparse(ambit.SubtractLeft(span.Ambit), spans[1:], prec) }
}
if prec > minPrecedence && prec < splitPrecedence {
splitLoc, splitPrecedence, splitPrecRight = 0, prec, prec
}
}
if span := spans[l]; span.Cat == "OP" {
lit := span.Lit
prec := this.precedenceAFE[lit]
if prec == minPrecedence {
return &Syntax{ Cat: span.Cat, Lit: lit, Ambit: ambit, OpAmbit: span.Ambit,
Left: this.sparse(ambit.SubtractRight(span.Ambit), spans[:l], prec),
Right: &Syntax{ Ambit: span.Ambit.CollapseRight() } }
}
if prec >= minPrecedence && prec < splitPrecedence {
splitLoc, splitPrecedence, splitPrecLeft = l, prec, prec
}
}
for indexLR := 1; indexLR < l; indexLR++ {
if span := spans[indexLR]; span.Cat == "OP" {
lit := span.Lit
prec := this.precedenceAFB[lit]
if prec >= minPrecedence && prec < splitPrecedence {
if this.checkInfixCandidate(spans, indexLR, prec, prec+1) {
if prec == minPrecedence {
return &Syntax{ Cat: span.Cat, Lit: lit, Ambit: ambit, OpAmbit: span.Ambit,
Left: this.sparse(ambit.SubtractRight(span.Ambit), spans[:indexLR], prec+1),
Right: this.sparse(ambit.SubtractLeft(span.Ambit), spans[indexLR+1:], prec) }
}
splitLoc, splitPrecedence, splitPrecLeft, splitPrecRight = indexLR, prec, prec+1, prec
}
}
}
indexRL := l - indexLR
if span := spans[indexRL]; span.Cat == "OP" {
lit := span.Lit
prec := this.precedenceBFA[lit]
if prec >= minPrecedence && prec < splitPrecedence {
if this.checkInfixCandidate(spans, indexRL, prec+1, prec) {
if prec == minPrecedence {
return &Syntax{ Cat: span.Cat, Lit: lit, Ambit: ambit, OpAmbit: span.Ambit,
Left: this.sparse(ambit.SubtractRight(span.Ambit), spans[:indexRL], prec),
Right: this.sparse(ambit.SubtractLeft(span.Ambit), spans[indexRL+1:], prec+1) }
}
splitLoc, splitPrecedence, splitPrecLeft, splitPrecRight = indexRL, prec, prec, prec+1
}
}
}
}
if splitLoc >= 0 {
splitSpan := spans[splitLoc]
cat, lit := splitSpan.Cat, splitSpan.Lit
if cat == "WS" {
cat, lit = "JUXT", " "
}
if splitLoc == 0 {
if cat == "OP" {
return &Syntax{ Cat: cat, Lit: lit, Ambit: ambit, OpAmbit: splitSpan.Ambit,
Left: &Syntax{ Ambit: splitSpan.Ambit.CollapseLeft() },
Right: this.sparse(ambit.SubtractLeft(splitSpan.Ambit), spans[1:], splitPrecRight) }
} else {
return &Syntax{ Cat: "GLUE", Lit: "", Ambit: ambit, OpAmbit: splitSpan.Ambit.CollapseRight(),
Left: this.sparse(splitSpan.Ambit, spans[:1], splitPrecLeft),
Right: this.sparse(ambit.SubtractLeft(splitSpan.Ambit), spans[1:], splitPrecRight) }
}
}
if splitLoc == l {
if cat == "OP" {
return &Syntax{ Cat: cat, Lit: lit, Ambit: ambit, OpAmbit: splitSpan.Ambit,
Left: this.sparse(ambit.SubtractRight(splitSpan.Ambit), spans[:l], splitPrecLeft),
Right: &Syntax{ Ambit: splitSpan.Ambit.CollapseRight() } }
} else {
return &Syntax{ Cat: "GLUE", Lit: "", Ambit: ambit, OpAmbit: splitSpan.Ambit.CollapseLeft(),
Left: this.sparse(ambit.SubtractRight(splitSpan.Ambit), spans[:l], splitPrecLeft),
Right: this.sparse(splitSpan.Ambit, spans[l:], splitPrecRight) }
}
}
return &Syntax{ Cat: cat, Lit: lit, Ambit: ambit, OpAmbit: splitSpan.Ambit,
Left: this.sparse(ambit.SubtractRight(splitSpan.Ambit), spans[:splitLoc], splitPrecLeft),
Right: this.sparse(ambit.SubtractLeft(splitSpan.Ambit), spans[splitLoc+1:], splitPrecRight) }
}
firstSpan, secondSpan := spans[0], spans[1]
if secondSpan.Cat == "WS" {
return &Syntax{ Cat: "JUXT", Lit: " ", Ambit: ambit, OpAmbit: secondSpan.Ambit,
Left: this.sparse(ambit.SubtractRight(secondSpan.Ambit), spans[:1], minPrecedence),
Right: this.sparse(ambit.SubtractLeft(secondSpan.Ambit), spans[2:], minPrecedence) }
}
return &Syntax{ Cat: "GLUE", Lit: "", Ambit: ambit, OpAmbit: secondSpan.Ambit.CollapseLeft(),
Left: this.sparse(ambit.SubtractRight(secondSpan.Ambit), spans[:1], minPrecedence),
Right: this.sparse(ambit.SubtractLeft(firstSpan.Ambit), spans[1:], minPrecedence) }
}
func (this *sparser) checkLeftwardJuxtapositionCandidate(spans []*spanT, index int, minPrecLeft int) bool {
indexRL := index-1
for indexRL >= 0 {
span := spans[indexRL]
if span.Cat != "WS" {
if span.Cat != "OP" {
return true
}
lit := span.Lit
if this.precedenceAFB[lit] >= minPrecLeft ||
this.precedenceBFA[lit] >= minPrecLeft ||
this.precedenceEFA[lit] >= minPrecLeft {
return false
}
return true
}
indexRL--
}
return false
}
func (this *sparser) checkRightwardJuxtapositionCandidate(spans []*spanT, index int, minPrecRight int) bool {
indexLR := index+1
l := len(spans)-1
for indexLR <= l {
span := spans[indexLR]
if span.Cat != "WS" {
if span.Cat != "OP" {
return true
}
lit := span.Lit
if this.precedenceAFB[lit] >= minPrecRight ||
this.precedenceBFA[lit] >= minPrecRight ||
this.precedenceAFE[lit] >= minPrecRight {
return false
}
return true
}
indexLR++
}
return false
}
func (this *sparser) checkInfixCandidate(spans []*spanT, index int, minPrecLeft int, minPrecRight int) bool {
indexRL := index-1
for indexRL >= 0 {
span := spans[indexRL]
if span.Cat != "WS" {
if span.Cat != "OP" {
break
}
lit := span.Lit
prec := this.precedenceEFE[lit]
if prec >= minPrecLeft {
break
}
prec = this.precedenceAFE[lit]
if prec < minPrecLeft {
return false
}
minPrecLeft = prec
}
indexRL--
}
if indexRL < 0 {
return false
}
l := len(spans)-1
indexLR := index+1
for indexLR <= l {
span := spans[indexLR]
if span.Cat != "WS" {
if span.Cat != "OP" {
break
}
lit := span.Lit
prec := this.precedenceEFE[lit]
if prec >= minPrecRight {
break
}
prec = this.precedenceEFA[lit]
if prec < minPrecRight {
return false
}
minPrecRight = prec
}
indexLR++
}
if indexLR > l {
return false
}
return true
}
func trimSpans(ambit *Ambit, spans []*spanT) (*Ambit, []*spanT) {
return trimSpansLeft(trimSpansRight(ambit, spans))
}
func trimSpansLeft(ambit *Ambit, spans []*spanT) (*Ambit, []*spanT) {
for index, span := range spans {
if span.Cat != "WS" {
return ambit, spans[index:]
}
ambit = ambit.SubtractLeft(span.Ambit)
}
return ambit, nil
}
func trimSpansRight(ambit *Ambit, spans []*spanT) (*Ambit, []*spanT) {
for index := len(spans)-1; index >= 0; index-- {
span := spans[index]
if span.Cat != "WS" {
return ambit, spans[:index+1]
}
ambit = ambit.SubtractRight(span.Ambit)
}
return ambit, nil
} | sparser.go | 0.708112 | 0.407245 | sparser.go | starcoder |
package binarysearchtree
import (
"errors"
)
// Node represents a tree node
// TODO: add support for any type, not just int
type Node struct {
Value int // Value the node stores
left, right *Node // pointers to the left and right children of the key
}
// Insert will add a new node to the tree whose root is Node
func (n *Node) Insert(newNode *Node) {
// New node goes to the right part
if n.Value < newNode.Value {
if n.right == nil {
n.right = newNode
} else {
n.right.Insert(newNode)
}
}
// New node goes to the left part
if n.Value > newNode.Value {
if n.left == nil {
n.left = newNode
} else {
n.left.Insert(newNode)
}
}
}
// FindMin return the smallest element in tree
func (n *Node) FindMin() (int, error) {
if n == nil {
return 0, errors.New("Empty tree has no Min element.")
}
for n.left != nil {
n = n.left
}
return n.Value, nil
}
// FindMax returns the biggest element in tree
func (n *Node) FindMax() (int, error) {
if n == nil {
return 0, errors.New("Empty tree has no Max element.")
}
for n.right != nil {
n = n.right
}
return n.Value, nil
}
// Delete removes a given element
func (n *Node) Delete(element int) (*Node, bool) {
deleted := false
if n == nil {
return n, false
}
if n.Value < element {
n.right, deleted = n.right.Delete(element)
} else if n.Value > element {
n.left, deleted = n.left.Delete(element)
} else if n.left != nil && n.right != nil {
candidate, _ := n.right.FindMin()
n.Value = candidate
n.right, deleted = n.right.Delete(candidate)
deleted = true
} else {
if n.left == nil {
n = n.right
} else {
n = n.left
}
deleted = true
}
return n, deleted
}
// Walk function calls `f` on every node value
// FIXME: Which is a better as parameter of `f`, `Node` or `node.Value`?
func (n *Node) Walk(f func(int)) {
if n == nil {
return
}
n.left.Walk(f)
f(n.Value)
n.right.Walk(f)
}
// BinarySearchTree store a root and the tree node numbers.
type BinarySearchTree struct {
root *Node // pointer to root of the tree
nodeNum int // nodes number: how many nodes are in the tree
}
// New returns an initial new tree
func New() *BinarySearchTree {
return &BinarySearchTree{}
}
// Nodes returns how many nodes are in the tree
func (b *BinarySearchTree) Nodes() int {
return b.nodeNum
}
// Insert an element into the tree
func (b *BinarySearchTree) Insert(element int) {
node := Node{Value: element}
if b.root == nil {
b.root = &node
} else {
b.root.Insert(&node)
}
b.nodeNum++
}
// Walk calls `f` on every node
func (b *BinarySearchTree) Walk(f func(int)) {
b.root.Walk(f)
}
// Contains checks if an element exists in a tree
func (b *BinarySearchTree) Contains(element int) bool {
n := b.root
for n != nil {
if n.Value == element {
return true
}
if n.Value < element {
n = n.right
} else {
n = n.left
}
}
return false
}
// IsEmpty returns if the tree is an empty tree
func (b *BinarySearchTree) IsEmpty() bool {
return b.nodeNum == 0
}
// Find and return a pointer to the node whose value equals to `element`
func (b *BinarySearchTree) Find(element int) *Node {
n := b.root
for n != nil {
if n.Value == element {
return n
}
if n.Value < element {
n = n.right
} else {
n = n.left
}
}
return nil
}
// FindMin returns the smallest element in tree
func (b *BinarySearchTree) FindMin() (int, error) {
return b.root.FindMin()
}
// FindMax returns the biggest element in tree
func (b *BinarySearchTree) FindMax() (int, error) {
return b.root.FindMax()
}
// Delete the first appearence of the element
// There are several cases:
// - Node is a leaf, simply delete it
// - Node has one child, point parent to the child, and delete the node
// - Node has two children, replace the leftmost node of the right part with current node,
// then delete the leftmost node
func (b *BinarySearchTree) Delete(element int) {
n, deleted := b.root.Delete(element)
b.root = n
if deleted {
b.nodeNum--
}
} | binarysearchtree/btree.go | 0.535098 | 0.503845 | btree.go | starcoder |
package utils
import (
"fmt"
"time"
)
func StartOfDay(date time.Time) time.Time {
return FloorDate(date)
}
func StartOfToday(tz *time.Location) time.Time {
return StartOfDay(FloorDate(time.Now().In(tz)))
}
func EndOfDay(date time.Time) time.Time {
floored := FloorDate(date)
if floored == date {
date = date.Add(1 * time.Second)
}
return CeilDate(date)
}
func EndOfToday(tz *time.Location) time.Time {
return EndOfDay(time.Now().In(tz))
}
func StartOfThisWeek(tz *time.Location) time.Time {
return StartOfWeek(time.Now().In(tz))
}
func StartOfWeek(date time.Time) time.Time {
year, week := date.ISOWeek()
return firstDayOfISOWeek(year, week, date.Location())
}
func StartOfThisMonth(tz *time.Location) time.Time {
return StartOfMonth(time.Now().In(tz))
}
func StartOfMonth(date time.Time) time.Time {
return time.Date(date.Year(), date.Month(), 1, 0, 0, 0, 0, date.Location())
}
func StartOfThisYear(tz *time.Location) time.Time {
return StartOfYear(time.Now().In(tz))
}
func StartOfYear(date time.Time) time.Time {
return time.Date(date.Year(), time.January, 1, 0, 0, 0, 0, date.Location())
}
// FloorDate rounds date down to the start of the day and keeps the time zone
func FloorDate(date time.Time) time.Time {
return time.Date(date.Year(), date.Month(), date.Day(), 0, 0, 0, 0, date.Location())
}
// FloorDateHour rounds date down to the start of the current hour and keeps the time zone
func FloorDateHour(date time.Time) time.Time {
return time.Date(date.Year(), date.Month(), date.Day(), date.Hour(), 0, 0, 0, date.Location())
}
// CeilDate rounds date up to the start of next day if date is not already a start (00:00:00)
func CeilDate(date time.Time) time.Time {
floored := FloorDate(date)
if floored == date {
return floored
}
return floored.AddDate(0, 0, 1)
}
// SetLocation resets the time zone information of a date without converting it, i.e. 19:00 UTC will result in 19:00 CET, for instance
func SetLocation(date time.Time, tz *time.Location) time.Time {
return time.Date(date.Year(), date.Month(), date.Day(), 0, 0, 0, 0, tz)
}
// WithOffset adds the time zone difference between Local and tz to a date, i.e. 19:00 UTC will result in 21:00 CET (or 22:00 CEST), for instance
func WithOffset(date time.Time, tz *time.Location) time.Time {
now := time.Now()
_, localOffset := now.Zone()
_, targetOffset := now.In(tz).Zone()
dateTz := date.Add(time.Duration((targetOffset - localOffset) * int(time.Second)))
return time.Date(dateTz.Year(), dateTz.Month(), dateTz.Day(), dateTz.Hour(), dateTz.Minute(), dateTz.Second(), dateTz.Nanosecond(), dateTz.Location()).In(tz)
}
// SplitRangeByDays creates a slice of intervals between from and to, each of which is at max of 24 hours length and has its split at midnight
func SplitRangeByDays(from time.Time, to time.Time) [][]time.Time {
intervals := make([][]time.Time, 0)
for t1 := from; t1.Before(to); {
t2 := StartOfDay(t1).AddDate(0, 0, 1)
if t2.After(to) {
t2 = to
}
intervals = append(intervals, []time.Time{t1, t2})
t1 = t2
}
return intervals
}
func FmtWakatimeDuration(d time.Duration) string {
d = d.Round(time.Minute)
h := d / time.Hour
d -= h * time.Hour
m := d / time.Minute
return fmt.Sprintf("%d hrs %d mins", h, m)
}
// LocalTZOffset returns the time difference between server local time and UTC
func LocalTZOffset() time.Duration {
_, offset := time.Now().Zone()
return time.Duration(offset * int(time.Second))
}
// https://stackoverflow.com/a/18632496
func firstDayOfISOWeek(year int, week int, timezone *time.Location) time.Time {
date := time.Date(year, 0, 0, 0, 0, 0, 0, timezone)
isoYear, isoWeek := date.ISOWeek()
for date.Weekday() != time.Monday { // iterate back to Monday
date = date.AddDate(0, 0, -1)
isoYear, isoWeek = date.ISOWeek()
}
for isoYear < year { // iterate forward to the first day of the first week
date = date.AddDate(0, 0, 1)
isoYear, isoWeek = date.ISOWeek()
}
for isoWeek < week { // iterate forward to the first day of the given week
date = date.AddDate(0, 0, 1)
isoYear, isoWeek = date.ISOWeek()
}
return date
} | utils/date.go | 0.785432 | 0.545225 | date.go | starcoder |
Package parser contains a GraphQL parser. Based on GraphQL spec June 2018.
Lexer for Source Text - @spec 2.1
Lex() is a lexer function to convert a given search query into a list of tokens.
Based on a talk by <NAME>: Lexical Scanning in Go
https://www.youtube.com/watch?v=HxaD_trXwRE
The lexer's output is pushed into a channel which is consumed by the parser.
This design enables the concurrent processing of the input text by lexer and
parser.
Parser
Parse() is a parser which produces a parse tree from a given set of lexer tokens.
Based on an article by <NAME>: Top Down Operator Precedence
http://crockford.com/javascript/tdop/tdop.html
which is based on the ideas of <NAME> and his paper: Top Down Operator Precedence
http://portal.acm.org/citation.cfm?id=512931
https://tdop.github.io/
ParseWithRuntime() parses a given input and decorates the resulting parse tree
with runtime components which can be used to interpret the parsed query.
*/
package parser
/*
LexTokenID represents a unique lexer token ID
*/
type LexTokenID int
/*
Available lexer token types
*/
const (
TokenError LexTokenID = iota // Lexing error token with a message as val
TokenEOF // End-of-file token
// Punctuators - @spec 2.1.8
// GraphQL documents include punctuation in order to describe structure.
// GraphQL is a data description language and not a programming language,
// therefore GraphQL lacks the punctuation often used to describe mathematical expressions.
TokenPunctuator
// Names - @spec 2.1.9
// GraphQL Documents are full of named things: operations, fields, arguments, types,
// directives, fragments, and variables. All names must follow the same grammatical
// form. Names in GraphQL are case‐sensitive. That is to say name, Name, and NAME
// all refer to different names. Underscores are significant, which means
// other_name and othername are two different names. Names in GraphQL are limited
// to this ASCII subset of possible characters to support interoperation with as
// many other systems as possible.
TokenName
// Integer value - @spec 2.9.1
// An Integer number is specified without a decimal point or exponent (ex. 1).
TokenIntValue
// Float value - @spec 2.9.2
// A Float number includes either a decimal point (ex. 1.0) or an exponent
// (ex. 1e50) or both (ex. 6.0221413e23).
TokenFloatValue
// String Value - @spec 2.9.4
// Strings are sequences of characters wrapped in double‐quotes (").
// (ex. "Hello World"). White space and other otherwise‐ignored characters are
// significant within a string value. Unicode characters are allowed within String
// value literals, however SourceCharacter must not contain some ASCII control
// characters so escape sequences must be used to represent these characters.
TokenStringValue
)
/*
Available parser AST node types
*/
const (
NodeAlias = "Alias"
NodeArgument = "Argument"
NodeArguments = "Arguments"
NodeDefaultValue = "DefaultValue"
NodeDirective = "Directive"
NodeDirectives = "Directives"
NodeDocument = "Document"
NodeEnumValue = "EnumValue"
NodeEOF = "EOF"
NodeExecutableDefinition = "ExecutableDefinition"
NodeField = "Field"
NodeFragmentDefinition = "FragmentDefinition"
NodeFragmentName = "FragmentName"
NodeFragmentSpread = "FragmentSpread"
NodeInlineFragment = "InlineFragment"
NodeListValue = "ListValue"
NodeName = "Name"
NodeObjectField = "ObjectField"
NodeObjectValue = "ObjectValue"
NodeOperationDefinition = "OperationDefinition"
NodeOperationType = "OperationType"
NodeSelectionSet = "SelectionSet"
NodeType = "Type"
NodeTypeCondition = "TypeCondition"
NodeValue = "Value"
NodeVariable = "Variable"
NodeVariableDefinition = "VariableDefinition"
NodeVariableDefinitions = "VariableDefinitions"
)
/*
ValueNodes are AST nodes which contain a significant value
*/
var ValueNodes = []string{
NodeAlias,
NodeDefaultValue,
NodeEnumValue,
NodeFragmentName,
NodeFragmentSpread,
NodeName,
NodeObjectField,
NodeOperationType,
NodeType,
NodeTypeCondition,
NodeValue,
NodeVariable,
} | lang/graphql/parser/const.go | 0.735547 | 0.762866 | const.go | starcoder |
package sorted
import (
bin "encoding/binary"
"reflect"
"sort"
"github.com/kelindar/binary"
)
// IntsCodecAs returns an int slice codec with the specified precision and type.
func IntsCodecAs(sliceType reflect.Type, sizeOfInt int) binary.Codec {
return &intSliceCodec{
sliceType: sliceType,
sizeOfInt: sizeOfInt,
}
}
type intSliceCodec struct {
sliceType reflect.Type
sizeOfInt int
}
// EncodeTo encodes a value into the encoder.
func (c *intSliceCodec) EncodeTo(e *binary.Encoder, rv reflect.Value) (err error) {
sort.Sort(rv.Interface().(sort.Interface))
prev := int64(0)
temp := make([]byte, 10)
bytes := make([]byte, 0, c.sizeOfInt*rv.Len())
for i := 0; i < rv.Len(); i++ {
curr := rv.Index(i).Int()
diff := curr - prev
bytes = append(bytes, temp[:bin.PutVarint(temp, diff)]...)
prev = curr
}
e.WriteUvarint(uint64(len(bytes)))
e.Write(bytes)
return
}
// DecodeTo decodes into a reflect value from the decoder.
func (c *intSliceCodec) DecodeTo(d *binary.Decoder, rv reflect.Value) (err error) {
var l uint64
var b []byte
if l, err = d.ReadUvarint(); err == nil && l > 0 {
if b, err = d.Slice(int(l)); err == nil {
// Create a new slice and figure out its element type
elemType := c.sliceType.Elem()
slice := reflect.MakeSlice(c.sliceType, 0, 64)
// Iterate through and uncompress
prev := int64(0)
for i := 0; i < len(b); {
diff, n := bin.Varint(b[i:])
prev = prev + diff
slice = reflect.Append(slice, reflect.ValueOf(prev).Convert(elemType))
i += n
}
rv.Set(slice)
}
}
return
}
// ------------------------------------------------------------------------------
// UintsCodecAs returns an uint slice codec with the specified precision and type.
func UintsCodecAs(sliceType reflect.Type, sizeOfInt int) binary.Codec {
return &uintSliceCodec{
sliceType: sliceType,
sizeOfInt: sizeOfInt,
}
}
type uintSliceCodec struct {
sliceType reflect.Type
sizeOfInt int
}
// EncodeTo encodes a value into the encoder.
func (c *uintSliceCodec) EncodeTo(e *binary.Encoder, rv reflect.Value) (err error) {
sort.Sort(rv.Interface().(sort.Interface))
prev := uint64(0)
temp := make([]byte, 10)
bytes := make([]byte, 0, c.sizeOfInt*rv.Len())
for i := 0; i < rv.Len(); i++ {
curr := rv.Index(i).Uint()
diff := curr - prev
bytes = append(bytes, temp[:bin.PutUvarint(temp, diff)]...)
prev = curr
}
e.WriteUvarint(uint64(len(bytes)))
e.Write(bytes)
return
}
// DecodeTo decodes into a reflect value from the decoder.
func (c *uintSliceCodec) DecodeTo(d *binary.Decoder, rv reflect.Value) (err error) {
var l uint64
var b []byte
if l, err = d.ReadUvarint(); err == nil && l > 0 {
if b, err = d.Slice(int(l)); err == nil {
// Create a new slice and figure out its element type
elemType := c.sliceType.Elem()
slice := reflect.MakeSlice(c.sliceType, 0, 64)
// Iterate through and uncompress
prev := uint64(0)
for i := 0; i < len(b); {
diff, n := bin.Uvarint(b[i:])
prev = prev + diff
slice = reflect.Append(slice, reflect.ValueOf(prev).Convert(elemType))
i += n
}
rv.Set(slice)
}
}
return
}
// ------------------------------------------------------------------------------
type timestampCodec struct{}
// EncodeTo encodes a value into the encoder.
func (c timestampCodec) EncodeTo(e *binary.Encoder, rv reflect.Value) (err error) {
data := rv.Interface().(Timestamps)
if !sort.IsSorted(Uint64s(data)) {
sort.Sort(Uint64s(data))
}
temp := make([]byte, 10)
buffer := make([]byte, 0, 2*len(data)) // ~1-2 bytes per timestamp
prev := uint64(0)
for _, curr := range data {
diff := curr - prev
prev = curr
buffer = append(buffer, temp[:bin.PutUvarint(temp, uint64(diff))]...)
}
// Writhe the size and the buffer
e.WriteUvarint(uint64(len(data)))
e.WriteUvarint(uint64(len(buffer)))
e.Write(buffer)
return
}
// DecodeTo decodes into a reflect value from the decoder.
func (timestampCodec) DecodeTo(d *binary.Decoder, rv reflect.Value) error {
// Read the number of timestamps
count, err := d.ReadUvarint()
if err != nil {
return err
}
// Read the size in bytes
size, err := d.ReadUvarint()
if err != nil {
return err
}
// Read the timestamp buffer
buffer, err := d.Slice(int(size))
if err != nil {
return err
}
// Read the timestamps
slice := make(Timestamps, 0, count)
prev := uint64(0)
for i := 0; i < int(size); {
diff, n := bin.Uvarint(buffer[i:])
prev = prev + diff
slice = append(slice, uint64(prev))
i += n
}
rv.Set(reflect.ValueOf(slice))
return nil
} | sorted/codecs.go | 0.772359 | 0.441252 | codecs.go | starcoder |
package fp
// ZeroIntP Returns true if num is zero, else false
func ZeroIntP(v int) bool {
if v == 0 {
return true
}
return false
}
// ZeroIntPPtr Returns true if num is zero, else false
func ZeroIntPPtr(v *int) bool {
if *v == 0 {
return true
}
return false
}
// ZeroInt64P Returns true if num is zero, else false
func ZeroInt64P(v int64) bool {
if v == 0 {
return true
}
return false
}
// ZeroInt64PPtr Returns true if num is zero, else false
func ZeroInt64PPtr(v *int64) bool {
if *v == 0 {
return true
}
return false
}
// ZeroInt32P Returns true if num is zero, else false
func ZeroInt32P(v int32) bool {
if v == 0 {
return true
}
return false
}
// ZeroInt32PPtr Returns true if num is zero, else false
func ZeroInt32PPtr(v *int32) bool {
if *v == 0 {
return true
}
return false
}
// ZeroInt16P Returns true if num is zero, else false
func ZeroInt16P(v int16) bool {
if v == 0 {
return true
}
return false
}
// ZeroInt16PPtr Returns true if num is zero, else false
func ZeroInt16PPtr(v *int16) bool {
if *v == 0 {
return true
}
return false
}
// ZeroInt8P Returns true if num is zero, else false
func ZeroInt8P(v int8) bool {
if v == 0 {
return true
}
return false
}
// ZeroInt8PPtr Returns true if num is zero, else false
func ZeroInt8PPtr(v *int8) bool {
if *v == 0 {
return true
}
return false
}
// ZeroUintP Returns true if num is zero, else false
func ZeroUintP(v uint) bool {
if v == 0 {
return true
}
return false
}
// ZeroUintPPtr Returns true if num is zero, else false
func ZeroUintPPtr(v *uint) bool {
if *v == 0 {
return true
}
return false
}
// ZeroUint64P Returns true if num is zero, else false
func ZeroUint64P(v uint64) bool {
if v == 0 {
return true
}
return false
}
// ZeroUint64PPtr Returns true if num is zero, else false
func ZeroUint64PPtr(v *uint64) bool {
if *v == 0 {
return true
}
return false
}
// ZeroUint32P Returns true if num is zero, else false
func ZeroUint32P(v uint32) bool {
if v == 0 {
return true
}
return false
}
// ZeroUint32PPtr Returns true if num is zero, else false
func ZeroUint32PPtr(v *uint32) bool {
if *v == 0 {
return true
}
return false
}
// ZeroUint16P Returns true if num is zero, else false
func ZeroUint16P(v uint16) bool {
if v == 0 {
return true
}
return false
}
// ZeroUint16PPtr Returns true if num is zero, else false
func ZeroUint16PPtr(v *uint16) bool {
if *v == 0 {
return true
}
return false
}
// ZeroUint8P Returns true if num is zero, else false
func ZeroUint8P(v uint8) bool {
if v == 0 {
return true
}
return false
}
// ZeroUint8PPtr Returns true if num is zero, else false
func ZeroUint8PPtr(v *uint8) bool {
if *v == 0 {
return true
}
return false
}
// ZeroFloat32P Returns true if num is zero, else false
func ZeroFloat32P(v float32) bool {
if v == 0 {
return true
}
return false
}
// ZeroFloat32PPtr Returns true if num is zero, else false
func ZeroFloat32PPtr(v *float32) bool {
if *v == 0 {
return true
}
return false
}
// ZeroFloat64P Returns true if num is zero, else false
func ZeroFloat64P(v float64) bool {
if v == 0 {
return true
}
return false
}
// ZeroFloat64PPtr Returns true if num is zero, else false
func ZeroFloat64PPtr(v *float64) bool {
if *v == 0 {
return true
}
return false
} | fp/zero.go | 0.773986 | 0.468487 | zero.go | starcoder |
package cal
import "time"
// Holidays in Sweden
// Reference https://sv.wikipedia.org/wiki/Helgdagar_i_Sverige
// Days with the [2] notation, meaning days with reduced working hours
// haven't been added, as this is not regulated by law.
var (
seNyarsdagen = newYear.SetLabel("Nyarsdagen")
seTrettondedagJul = NewHoliday(time.January, 6).SetLabel("Trettondedag Jul")
seLangfredagen = goodFriday.SetLabel("Langfredagen")
sePaskdagen = NewHolidayFunc(calculatePaskdagen).SetLabel("Paskdagen")
seAnnandagPask = easterMonday.SetLabel("Annandag Pask")
seForstaMaj = NewHoliday(time.May, 1).SetLabel("Forsta Maj")
seKristiHimmelfardsdag = NewHolidayFunc(calculateKristiHimmelfardsdag).SetLabel("Kristi Himmelfardsdag")
sePingstdagen = NewHolidayFunc(calculatePingstdagen).SetLabel("Pingstdagen")
seNationaldagen = NewHoliday(time.June, 6).SetLabel("Nationaldagen")
seMidsommarafton = NewHolidayFunc(calculateMidsommarafton).SetLabel("Midsommarafton")
seMidsommardagen = NewHolidayFunc(calculateMidsommardagen).SetLabel("Midsommardagen")
seAllaHelgonsDag = NewHolidayFunc(calculateAllaHelgonsDag).SetLabel("Alla helgons dag")
seJulafton = NewHoliday(time.December, 24).SetLabel("Julafton")
seJuldagen = christmas.SetLabel("Juldagen")
seAnnandagJul = christmas2.SetLabel("<NAME>")
seNewYearsEve = NewHoliday(time.December, 31)
)
// addSwedishHolidays adds all Swedish holidays to the Calendar
func addSwedishHolidays(c *Calendar) {
c.AddHoliday(
seNyarsdagen,
seTrettondedagJul,
seLangfredagen,
sePaskdagen,
seAnnandagPask,
seForstaMaj,
seKristiHimmelfardsdag,
sePingstdagen,
seNationaldagen,
seMidsommarafton,
seMidsommardagen,
seAllaHelgonsDag,
seJulafton,
seJuldagen,
seAnnandagJul,
seNewYearsEve,
)
}
func calculatePaskdagen(year int, loc *time.Location) (time.Month, int) {
easter := calculateEaster(year, loc)
return easter.Month(), easter.Day()
}
func calculateKristiHimmelfardsdag(year int, loc *time.Location) (time.Month, int) {
easter := calculateEaster(year, loc)
// 39 days after Easter Sunday
em := easter.AddDate(0, 0, +39)
return em.Month(), em.Day()
}
func calculatePingstdagen(year int, loc *time.Location) (time.Month, int) {
easter := calculateEaster(year, loc)
// 50 days after Easter Sunday
em := easter.AddDate(0, 0, +49)
return em.Month(), em.Day()
}
func calculateMidsommarafton(year int, loc *time.Location) (time.Month, int) {
t := time.Date(year, 6, 25, 0, 0, 0, 0, loc)
for i := -1; i > -6; i-- {
d := t.Add(time.Hour * 24 * time.Duration(i))
if d.Weekday() == time.Friday {
t = d
break
}
}
return t.Month(), t.Day()
}
func calculateMidsommardagen(year int, loc *time.Location) (time.Month, int) {
t := time.Date(year, 6, 26, 0, 0, 0, 0, loc)
for i := -1; i > -6; i-- {
d := t.Add(time.Hour * 24 * time.Duration(i))
if d.Weekday() == time.Saturday {
t = d
break
}
}
return t.Month(), t.Day()
}
func calculateAllaHelgonsDag(year int, loc *time.Location) (time.Month, int) {
t := time.Date(year, 11, 6, 0, 0, 0, 0, loc)
for i := -1; i > -7; i-- {
d := t.Add(time.Hour * 24 * time.Duration(i))
if d.Weekday() == time.Saturday {
t = d
break
}
}
return t.Month(), t.Day()
} | v2/holiday_defs_se.go | 0.573678 | 0.431464 | holiday_defs_se.go | starcoder |
package ptp
import (
"encoding/json"
"fmt"
"math"
"sort"
)
const (
maxDecimalsFeedrate = 1
maxDecimalsFanSpeed = 0
maxDecimalsTemperature = 1
maxDecimalsLayerHeight = 4
)
type bufferData struct {
Offset int `json:"offset"`
Size int `json:"size"`
}
type legendHeader struct {
Version int `json:"version"`
Position bufferData `json:"position"`
Normal bufferData `json:"normal"`
Index bufferData `json:"index"`
ExtrusionWidth bufferData `json:"extrusionWidth"`
LayerHeight bufferData `json:"layerHeight"`
TravelPosition bufferData `json:"travelPosition"`
ToolColor bufferData `json:"toolColor"`
PathTypeColor bufferData `json:"pathTypeColor"`
FeedrateColor bufferData `json:"feedrateColor"`
FanSpeedColor bufferData `json:"fanSpeedColor"`
TemperatureColor bufferData `json:"temperatureColor"`
LayerHeightColor bufferData `json:"layerHeightColor"`
}
func (w *Writer) getLegendHeader() legendHeader {
header := legendHeader{
Version: int(w.version),
Position: bufferData{Offset: 0, Size: w.bufferSizes["position"]},
Normal: bufferData{Offset: 0, Size: w.bufferSizes["normal"]},
Index: bufferData{Offset: 0, Size: w.bufferSizes["index"]},
ExtrusionWidth: bufferData{Offset: 0, Size: w.bufferSizes["extrusionWidth"]},
LayerHeight: bufferData{Offset: 0, Size: w.bufferSizes["layerHeight"]},
TravelPosition: bufferData{Offset: 0, Size: w.bufferSizes["travelPosition"]},
ToolColor: bufferData{Offset: 0, Size: w.bufferSizes["toolColor"]},
PathTypeColor: bufferData{Offset: 0, Size: w.bufferSizes["pathTypeColor"]},
FeedrateColor: bufferData{Offset: 0, Size: w.bufferSizes["feedrateColor"]},
FanSpeedColor: bufferData{Offset: 0, Size: w.bufferSizes["fanSpeedColor"]},
TemperatureColor: bufferData{Offset: 0, Size: w.bufferSizes["temperatureColor"]},
LayerHeightColor: bufferData{Offset: 0, Size: w.bufferSizes["layerHeightColor"]},
}
offset := headerSize
header.Position.Offset = offset; offset += w.bufferSizes["position"]
header.Normal.Offset = offset; offset += w.bufferSizes["normal"]
header.Index.Offset = offset; offset += w.bufferSizes["index"]
header.ExtrusionWidth.Offset = offset; offset += w.bufferSizes["extrusionWidth"]
header.LayerHeight.Offset = offset; offset += w.bufferSizes["layerHeight"]
header.TravelPosition.Offset = offset
return header
}
type legendColors struct {
MinFeedrateColor [3]float32 `json:"minFeedrateColor"`
MaxFeedrateColor [3]float32 `json:"maxFeedrateColor"`
MinFanSpeedColor [3]float32 `json:"minFanSpeedColor"`
MaxFanSpeedColor [3]float32 `json:"maxFanSpeedColor"`
MinTemperatureColor [3]float32 `json:"minTemperatureColor"`
MaxTemperatureColor [3]float32 `json:"maxTemperatureColor"`
MinLayerHeightColor [3]float32 `json:"minLayerHeightColor"`
MaxLayerHeightColor [3]float32 `json:"maxLayerHeightColor"`
}
func getLegendColors() legendColors {
return legendColors{
MinFeedrateColor: feedrateColorMin,
MaxFeedrateColor: feedrateColorMax,
MinFanSpeedColor: fanColorMin,
MaxFanSpeedColor: fanColorMax,
MinTemperatureColor: temperatureColorMin,
MaxTemperatureColor: temperatureColorMax,
MinLayerHeightColor: layerHeightColorMin,
MaxLayerHeightColor: layerHeightColorMax,
}
}
type legendEntry struct {
Label string
Color string
}
func (l *legendEntry) MarshalJSON() ([]byte, error) {
arr := []interface{}{l.Label, l.Color}
return json.Marshal(arr)
}
type legend struct {
Header legendHeader `json:"header"` // header data (version, buffer offsets and sizes)
Colors legendColors `json:"colors"` // max/min colors for interpolated coloring
Tool []legendEntry `json:"tool"` // legend of tools seen
PathType []legendEntry `json:"pathType"` // legend of path types seen
Feedrate []legendEntry `json:"feedrate"` // legend of feedrates -- needs gradation
FanSpeed []legendEntry `json:"fanSpeed"` // legend of fan speeds -- possible gradation
Temperature []legendEntry `json:"temperature"` // legend of temperatures -- needs gradation
LayerHeight []legendEntry `json:"layerHeight"` // legend of layer heights -- needs gradation
ZValues []float32 `json:"zValues"` // Z values for UI sliders
}
func removeDuplicateLegendEntries(legend []legendEntry) []legendEntry {
labelsSeen := make(map[string]bool)
uniqueLegend := make([]legendEntry, 0)
for _, entry := range legend {
if _, seen := labelsSeen[entry.Label]; !seen {
labelsSeen[entry.Label] = true
uniqueLegend = append(uniqueLegend, entry)
}
}
return uniqueLegend
}
func (w *Writer) getToolLegend() []legendEntry {
toolsSeen := make([]int, 0, len(w.state.toolsSeen))
for tool := range w.state.toolsSeen {
toolsSeen = append(toolsSeen, tool)
}
sort.Ints(toolsSeen)
legend := make([]legendEntry, 0, len(w.state.toolsSeen))
for _, tool := range toolsSeen {
legend = append(legend, legendEntry{
Label: fmt.Sprintf("Tool %d", tool),
Color: floatsToHex(w.toolColors[tool][0], w.toolColors[tool][1], w.toolColors[tool][2]),
})
}
return legend
}
func (w *Writer) getPathTypeLegend() []legendEntry {
legend := make([]legendEntry, 0)
for i := PathType(0); i < pathTypeCount; i++ {
if _, ok := w.state.pathTypesSeen[i]; ok {
name := pathTypeNames[i]
if i == PathTypeBrim {
if w.brimIsSkirt {
name = "Skirt"
} else {
name = "Brim"
}
}
legend = append(legend, legendEntry{
Label: name,
Color: pathTypeColorStrings[i],
})
}
}
return legend
}
func (w *Writer) getFeedrateLegend() []legendEntry {
feedratesSeen := make([]float32, 0, len(w.state.feedratesSeen))
for feedrate := range w.state.feedratesSeen {
feedratesSeen = append(feedratesSeen, feedrate)
}
sortFloat32Slice(feedratesSeen)
legend := make([]legendEntry, 0, len(feedratesSeen))
if len(feedratesSeen) <= 6 {
for _, feedrate := range feedratesSeen {
t := (feedrate - w.minFeedrate) / (w.maxFeedrate - w.minFeedrate)
r := lerp(feedrateColorMin[0], feedrateColorMax[0], t)
g := lerp(feedrateColorMin[1], feedrateColorMax[1], t)
b := lerp(feedrateColorMin[2], feedrateColorMax[2], t)
legend = append(legend, legendEntry{
Label: fmt.Sprintf("%s mm/min", prepareFloatForJSON(feedrate, maxDecimalsFeedrate)),
Color: floatsToHex(r, g, b),
})
}
} else {
step := float32(math.Round(float64(w.maxFeedrate - w.minFeedrate) / 6))
for i := 0; i < 6; i++ {
feedrate := (float32(i) * step) + w.minFeedrate
t := float32(i) / 5
r := lerp(feedrateColorMin[0], feedrateColorMax[0], t)
g := lerp(feedrateColorMin[1], feedrateColorMax[1], t)
b := lerp(feedrateColorMin[2], feedrateColorMax[2], t)
legend = append(legend, legendEntry{
Label: fmt.Sprintf("%s mm/min", prepareFloatForJSON(feedrate, maxDecimalsFeedrate)),
Color: floatsToHex(r, g, b),
})
}
legend = append(legend, legendEntry{
Label: fmt.Sprintf("%s mm/min", prepareFloatForJSON(w.maxFeedrate, maxDecimalsFeedrate)),
Color: floatsToHex(feedrateColorMax[0], feedrateColorMax[1], feedrateColorMax[2]),
})
}
// de-duplicate legend entries with labels that are identical after rounding
return removeDuplicateLegendEntries(legend)
}
func (w *Writer) getFanSpeedLegend() []legendEntry {
fanSpeedsSeen := make([]int, 0, len(w.state.fanSpeedsSeen))
for pwmValue := range w.state.fanSpeedsSeen {
fanSpeedsSeen = append(fanSpeedsSeen, pwmValue)
}
sort.Ints(fanSpeedsSeen)
legend := make([]legendEntry, 0, len(fanSpeedsSeen))
if len(fanSpeedsSeen) == 1 && fanSpeedsSeen[0] == 0 {
legend = append(legend, legendEntry{
Label: "Off",
Color: floatsToHex(fanColorMin[0], fanColorMin[1], fanColorMin[2]),
})
} else if len(fanSpeedsSeen) == 1 && fanSpeedsSeen[0] == 255 {
legend = append(legend, legendEntry{
Label: "On",
Color: floatsToHex(fanColorMax[0], fanColorMax[1], fanColorMax[2]),
})
} else if len(fanSpeedsSeen) == 2 &&
((fanSpeedsSeen[0] == 0 && fanSpeedsSeen[1] == 255) ||
(fanSpeedsSeen[0] == 255 && fanSpeedsSeen[1] == 0)) {
legend = append(legend, legendEntry{
Label: "Off",
Color: floatsToHex(fanColorMin[0], fanColorMin[1], fanColorMin[2]),
}, legendEntry{
Label: "On",
Color: floatsToHex(fanColorMax[0], fanColorMax[1], fanColorMax[2]),
})
} else if len(fanSpeedsSeen) <= 6 {
for _, pwmValue := range fanSpeedsSeen {
t := float32(pwmValue) / 255
percent := float32(math.Max(0, math.Min(100, math.Round(float64(pwmValue) * 100) / 255)))
r := lerp(fanColorMin[0], fanColorMax[0], t)
g := lerp(fanColorMin[1], fanColorMax[1], t)
b := lerp(fanColorMin[2], fanColorMax[2], t)
legend = append(legend, legendEntry{
Label: fmt.Sprintf("%s%%", prepareFloatForJSON(percent, maxDecimalsFanSpeed)),
Color: floatsToHex(r, g, b),
})
}
} else {
step := float32(math.Round(255 / 6))
for i := 0; i < 6; i++ {
pwmValue := float32(i) * step
t := float32(i) / 5
percent := float32(math.Round(float64(pwmValue) * 100 * 10 / 255) / 10)
r := lerp(fanColorMin[0], fanColorMax[0], t)
g := lerp(fanColorMin[1], fanColorMax[1], t)
b := lerp(fanColorMin[2], fanColorMax[2], t)
legend = append(legend, legendEntry{
Label: fmt.Sprintf("%s%%", prepareFloatForJSON(percent, maxDecimalsFanSpeed)),
Color: floatsToHex(r, g, b),
})
}
legend = append(legend, legendEntry{
Label: "100%",
Color: floatsToHex(fanColorMax[0], fanColorMax[1], fanColorMax[2]),
})
}
// de-duplicate legend entries with labels that are identical after rounding
return removeDuplicateLegendEntries(legend)
}
func (w *Writer) getTemperatureLegend() []legendEntry {
temperaturesSeen := make([]float32, 0, len(w.state.temperaturesSeen))
for temperature := range w.state.temperaturesSeen {
temperaturesSeen = append(temperaturesSeen, temperature)
}
sortFloat32Slice(temperaturesSeen)
legend := make([]legendEntry, 0, len(temperaturesSeen))
if len(temperaturesSeen) <= 6 {
for _, temperature := range temperaturesSeen {
var r, g, b float32
if w.maxTemperature == w.minTemperature {
r = temperatureColorMax[0]
g = temperatureColorMax[1]
b = temperatureColorMax[2]
} else {
t := (temperature - w.minTemperature) / (w.maxTemperature - w.minTemperature)
r = lerp(temperatureColorMin[0], temperatureColorMax[0], t)
g = lerp(temperatureColorMin[1], temperatureColorMax[1], t)
b = lerp(temperatureColorMin[2], temperatureColorMax[2], t)
}
legend = append(legend, legendEntry{
Label: fmt.Sprintf("%s °C", prepareFloatForJSON(temperature, maxDecimalsTemperature)),
Color: floatsToHex(r, g, b),
})
}
} else {
step := float32(math.Round(float64(w.maxTemperature - w.minTemperature) / 6))
for i := 0; i < 6; i++ {
temperature := (float32(i) * step) + w.minTemperature
t := float32(i) / 5
r := lerp(temperatureColorMin[0], temperatureColorMax[0], t)
g := lerp(temperatureColorMin[1], temperatureColorMax[1], t)
b := lerp(temperatureColorMin[2], temperatureColorMax[2], t)
legend = append(legend, legendEntry{
Label: fmt.Sprintf("%s °C", prepareFloatForJSON(temperature, maxDecimalsTemperature)),
Color: floatsToHex(r, g, b),
})
}
legend = append(legend, legendEntry{
Label: fmt.Sprintf("%s °C", prepareFloatForJSON(w.maxTemperature, maxDecimalsTemperature)),
Color: floatsToHex(temperatureColorMax[0], temperatureColorMax[1], temperatureColorMax[2]),
})
}
// de-duplicate legend entries with labels that are identical after rounding
return removeDuplicateLegendEntries(legend)
}
func (w *Writer) getLayerHeightLegend() []legendEntry {
layerHeightsSeen := make([]float32, 0, len(w.state.layerHeightsSeen))
for layerHeight := range w.state.layerHeightsSeen {
layerHeightsSeen = append(layerHeightsSeen, layerHeight)
}
sortFloat32Slice(layerHeightsSeen)
legend := make([]legendEntry, 0, len(layerHeightsSeen))
if len(layerHeightsSeen) == 1 {
legend = []legendEntry{
{
Label: fmt.Sprintf("%s mm", prepareFloatForJSON(layerHeightsSeen[0], maxDecimalsLayerHeight)),
Color: floatsToHex(layerHeightColorMax[0], layerHeightColorMax[1], layerHeightColorMax[2]),
},
}
} else if len(layerHeightsSeen) <= 6 {
for _, layerHeight := range layerHeightsSeen {
t := (layerHeight - w.minLayerHeight) / (w.maxLayerHeight - w.minLayerHeight)
r := lerp(layerHeightColorMin[0], layerHeightColorMax[0], t)
g := lerp(layerHeightColorMin[1], layerHeightColorMax[1], t)
b := lerp(layerHeightColorMin[2], layerHeightColorMax[2], t)
legend = append(legend, legendEntry{
Label: fmt.Sprintf("%s mm", prepareFloatForJSON(layerHeight, maxDecimalsLayerHeight)),
Color: floatsToHex(r, g, b),
})
}
} else {
step := float32(math.Round(float64(w.maxLayerHeight - w.minLayerHeight) * 1000 / 6) / 1000)
for i := 0; i < 6; i++ {
layerHeight := (float32(i) * step) + w.minLayerHeight
t := float32(i) / 5
r := lerp(layerHeightColorMin[0], layerHeightColorMax[0], t)
g := lerp(layerHeightColorMin[1], layerHeightColorMax[1], t)
b := lerp(layerHeightColorMin[2], layerHeightColorMax[2], t)
legend = append(legend, legendEntry{
Label: fmt.Sprintf("%s mm", prepareFloatForJSON(layerHeight, maxDecimalsLayerHeight)),
Color: floatsToHex(r, g, b),
})
}
legend = append(legend, legendEntry{
Label: fmt.Sprintf("%s mm", prepareFloatForJSON(w.maxLayerHeight, maxDecimalsLayerHeight)),
Color: floatsToHex(layerHeightColorMax[0], layerHeightColorMax[1], layerHeightColorMax[2]),
})
}
// de-duplicate legend entries with labels that are identical after rounding
return removeDuplicateLegendEntries(legend)
}
func (w *Writer) getZValues() []float32 {
zSeen := make([]float32, 0, len(w.state.zSeen))
for z := range w.state.zSeen {
zSeen = append(zSeen, z)
}
sortFloat32Slice(zSeen)
return zSeen
}
func (w *Writer) getLegend() ([]byte, error) {
legend := legend{
Header: w.getLegendHeader(),
Colors: getLegendColors(),
Tool: w.getToolLegend(),
PathType: w.getPathTypeLegend(),
Feedrate: w.getFeedrateLegend(),
FanSpeed: w.getFanSpeedLegend(),
Temperature: w.getTemperatureLegend(),
LayerHeight: w.getLayerHeightLegend(),
ZValues: w.getZValues(),
}
return json.Marshal(legend)
} | ptp/legend.go | 0.763572 | 0.467271 | legend.go | starcoder |
package backoff
import (
"math/rand"
"time"
)
/*
ExponentialBackOff is an implementation of BackOff that increases
it's back off period for each retry attempt using a randomization function
that grows exponentially.
Backoff() time is calculated using the following formula:
randomized_interval =
retry_interval * (random value in range [1 - randomization_factor, 1 + randomization_factor])
In other words BackOff() will sleep for times between the randomization factor
percentage below and above the retry interval.
For example, using 2 seconds as the base retry interval and 0.5 as the
randomization factor, the actual back off period used in the next retry
attempt will be between 1 and 3 seconds.
Note: max_interval caps the retry_interval and not the randomized_interval.
Example: The default retry_interval is .5 seconds, default randomization_factor
is 0.5, default multiplier is 1.5 and the max_interval is set to 25 seconds.
For 12 tries the sequence will sleep (values in seconds) (output from ExampleExpBackOffTimes) :
request# retry_interval randomized_interval
1 0.5 [0.25, 0.75]
2 0.75 [0.375, 1.125]
3 1.125 [0.562, 1.687]
4 1.687 [0.8435, 2.53]
5 2.53 [1.265, 3.795]
6 3.795 [1.897, 5.692]
7 5.692 [2.846, 8.538]
8 8.538 [4.269, 12.807]
9 12.807 [6.403, 19.210]
10 19.22 [9.611, 28.833]
11 25 [12.5, 37.5]
12 25 [12.5, 37.5]
Implementation is not thread-safe.
*/
type ExponentialBackOff struct {
InitialInterval time.Duration
currentInterval time.Duration
MaxInterval time.Duration
RandomizationFactor float64
Multiplier float64
}
// Default values for ExponentialBackOff.
const (
DefaultInitialInterval = 500 * time.Millisecond
DefaultRandomizationFactor = 0.5
DefaultMultiplier = 1.5
DefaultMaxInterval = 60 * time.Second
)
// NewExponential creates an instance of ExponentialBackOff using default values.
func NewExponential() *ExponentialBackOff {
b := &ExponentialBackOff{
InitialInterval: DefaultInitialInterval,
RandomizationFactor: DefaultRandomizationFactor,
Multiplier: DefaultMultiplier,
MaxInterval: DefaultMaxInterval,
currentInterval: DefaultInitialInterval,
}
b.Reset()
return b
}
// Reset the interval back to the initial retry interval and restarts the timer.
func (b *ExponentialBackOff) Reset() {
b.currentInterval = b.InitialInterval
}
func (b *ExponentialBackOff) GetSleepTime() time.Duration {
return getRandomValueFromInterval(b.RandomizationFactor, rand.Float64(), b.currentInterval)
}
func (b *ExponentialBackOff) BackOff() {
time.Sleep(b.GetSleepTime())
b.IncrementCurrentInterval()
}
// Increments the current interval by multiplying it with the multiplier.
func (b *ExponentialBackOff) IncrementCurrentInterval() {
// Check for overflow, if overflow is detected set the current interval to the max interval.
if float64(b.currentInterval) >= float64(b.MaxInterval)/b.Multiplier {
b.currentInterval = b.MaxInterval
} else {
b.currentInterval = time.Duration(float64(b.currentInterval) * b.Multiplier)
}
}
func (b *ExponentialBackOff) Inverval() time.Duration {
return b.currentInterval
}
// Returns a random value from the interval:
// [randomizationFactor * currentInterval, randomizationFactor * currentInterval].
func getRandomValueFromInterval(randomizationFactor, random float64, currentInterval time.Duration) time.Duration {
var delta = randomizationFactor * float64(currentInterval)
var minInterval = float64(currentInterval) - delta
var maxInterval = float64(currentInterval) + delta
// Get a random value from the range [minInterval, maxInterval].
// The formula used below has a +1 because if the minInterval is 1 and the maxInterval is 3 then
// we want a 33% chance for selecting either 1, 2 or 3.
return time.Duration(minInterval + (random * (maxInterval - minInterval + 1)))
} | vendor/github.com/azr/backoff/exponential.go | 0.864996 | 0.527195 | exponential.go | starcoder |
package pytest
import (
"context"
"errors"
"fmt"
"strings"
"time"
xpytest_proto "github.com/chainer/xpytest/proto"
)
// Pytest represents one pytest execution.
type Pytest struct {
PythonCmd string
MarkerExpression string
Xdist int
Files []string
Executor func(
context.Context, []string, time.Duration, []string,
) (*xpytest_proto.TestResult, error)
Retry int
Env []string
Deadline time.Duration
}
// NewPytest creates a new Pytest object.
func NewPytest(pythonCmd string) *Pytest {
return &Pytest{PythonCmd: pythonCmd, Executor: Execute}
}
// Execute builds pytest parameters and runs pytest.
func (p *Pytest) Execute(
ctx context.Context,
) (*Result, error) {
var finalResult *Result
for trial := 0; trial == 0 || trial < p.Retry; trial++ {
pr, err := p.execute(ctx)
if err != nil {
return nil, err
}
if trial == 0 {
finalResult = pr
} else if pr.Status == xpytest_proto.TestResult_SUCCESS {
finalResult.Status = xpytest_proto.TestResult_FLAKY
}
finalResult.trial = trial
if finalResult.Status != xpytest_proto.TestResult_FAILED {
break
}
}
return finalResult, nil
}
func (p *Pytest) execute(
ctx context.Context,
) (*Result, error) {
// Build command-line arguments.
args := []string{p.PythonCmd, "-m", "pytest"}
if p.MarkerExpression != "" {
args = append(args, "-m", p.MarkerExpression)
}
if p.Xdist > 0 {
args = append(args, "-n", fmt.Sprintf("%d", p.Xdist))
}
if len(p.Files) == 0 {
return nil, errors.New("Pytest.Files must not be empty")
}
args = append(args, p.Files...)
// Check deadline.
deadline := p.Deadline
if deadline <= 0 {
return nil, fmt.Errorf("Pytest.Deadline must be positive value")
}
// Execute pytest.
r, err := p.Executor(ctx, args, deadline, p.Env)
if err != nil {
return nil, err
}
return newPytestResult(p, r), nil
}
// Result represents a pytest execution result.
type Result struct {
Status xpytest_proto.TestResult_Status
Name string
xdist int
trial int
duration float32
summary string
stdout string
stderr string
}
func newPytestResult(p *Pytest, tr *xpytest_proto.TestResult) *Result {
r := &Result{}
if len(p.Files) > 0 {
r.Name = p.Files[0]
}
r.Status = tr.GetStatus()
result := ""
if r.Status != xpytest_proto.TestResult_TIMEOUT {
lines := strings.Split(strings.TrimSpace(tr.Stdout), "\n")
lastLine := lines[len(lines)-1]
if strings.HasPrefix(lastLine, "=") {
result = strings.Trim(lastLine, "= ")
} else {
result = fmt.Sprintf("%s; %.0f seconds", r.Status, r.duration)
r.Status = xpytest_proto.TestResult_INTERNAL
}
}
r.xdist = p.Xdist
r.duration = tr.GetTime()
r.summary = func() string {
if r.Status == xpytest_proto.TestResult_TIMEOUT {
return fmt.Sprintf("%.0f seconds", r.duration)
}
return fmt.Sprintf("%s", result)
}()
shorten := func(s string) string {
ss := strings.Split(s, "\n")
if len(ss) > 500 {
output := ss[0:250]
output = append(output,
fmt.Sprintf("...(%d lines skipped)...", len(ss)-500))
output = append(output, ss[len(ss)-250:]...)
return strings.Join(output, "\n")
}
return s
}
r.stdout = shorten(tr.Stdout)
r.stderr = shorten(tr.Stderr)
return r
}
// Summary returns a one-line summary of the test result (e.g.,
// "[SUCCESS] test_foo.py (123 passed in 4.56 seconds)").
func (r *Result) Summary() string {
ss := []string{}
if r.summary != "" {
ss = append(ss, r.summary)
}
if r.xdist > 0 {
ss = append(ss, fmt.Sprintf("%d procs", r.xdist))
}
if r.trial > 0 {
ss = append(ss, fmt.Sprintf("%d trials", r.trial+1))
}
s := strings.Join(ss, " * ")
if s != "" {
s = " (" + s + ")"
}
return fmt.Sprintf("[%s] %s%s", r.Status, r.Name, s)
}
// Output returns the test result. This returns outputs from STDOUT/STDERR in
// addition to a one-line summary returned by Summary.
func (r *Result) Output() string {
if r.Status == xpytest_proto.TestResult_SUCCESS {
return strings.TrimSpace(r.Summary() + "\n" + r.stderr)
}
return strings.TrimSpace(r.Summary() + "\n" +
strings.TrimSpace(r.stdout+"\n"+r.stderr))
} | pkg/pytest/pytest.go | 0.558086 | 0.400749 | pytest.go | starcoder |
package nune
import (
"sync"
"github.com/vorduin/slices"
)
// handleZip processes an elementwise operation accordingly.
func handleZip[T Number](lhs, rhs, out []T, f func(T, T) T, nCPU int) {
var wg sync.WaitGroup
for i := 0; i < nCPU; i++ {
min := (i * len(lhs) / nCPU)
max := ((i + 1) * len(lhs)) / nCPU
wg.Add(1)
go func(lhsBuf, rhsBuf, outBuf []T) {
for j := 0; j < len(lhsBuf); j++ {
outBuf[j] = f(lhsBuf[j], rhsBuf[j])
}
wg.Done()
}(lhs[min:max], rhs[min:max], out[min:max])
}
wg.Wait()
}
// midwayBroadcast adjusts the first shape so that the second shape might
// be broadcastable to it. For example, for the shape [4, 1] to be
// broadcastable with the shape [3], this function returns that
// the broadcasting shape should be [4, 3].
func midwayBroadcast(s1, s2 []int) []int {
var s []int
if len(s1) < len(s2) {
s = slices.WithLen[int](len(s2))
for i := 0; i < len(s2)-len(s1); i++ {
s[i] = 1
}
copy(s[len(s2)-len(s1):], s1)
} else {
s = s1
}
for i := 0; i < len(s2); i++ {
if s[i] != s2[i] && s[i] == 1 {
s[i] = s2[i]
}
}
return s
}
// Zip performs an elementwise operation
// between other and this Tensor.
func (t Tensor[T]) Zip(other any, f func(T, T) T) Tensor[T] {
if t.Err != nil {
if EnvConfig.Interactive {
panic(t.Err)
} else {
return t
}
}
o := From[T](other)
if o.Err != nil {
if EnvConfig.Interactive {
panic(t.Err)
} else {
t.Err = o.Err
return t
}
}
if !slices.Equal(t.shape, o.shape) {
if s := midwayBroadcast(o.shape, t.shape); t.Broadable(s...) && !slices.Equal(s, t.shape) {
t = t.Broadcast(s...)
}
if s := midwayBroadcast(t.Shape(), o.Shape()); o.Broadable(s...) && !slices.Equal(s, o.shape) {
o = o.Broadcast(s...)
}
if !slices.Equal(t.shape, o.shape) {
if EnvConfig.Interactive {
panic(ErrNotBroadable)
} else {
t.Err = ErrNotBroadable
return t
}
}
}
// TODO: Fix if the Tensor was permutated.
handleZip(t.Ravel(), o.Ravel(), t.Ravel(), f, configCPU(t.Numel()))
return t
}
// Add takes a value and performs elementwise addition
// between other and this Tensor.
func (t Tensor[T]) Add(other any) Tensor[T] {
return t.Zip(other, func(x, y T) T {
return x + y
})
}
// Sub takes a value and performs elementwise subtraction
// between other and this Tensor.
func (t Tensor[T]) Sub(other any) Tensor[T] {
return t.Zip(other, func(x, y T) T {
return x - y
})
}
// Mul takes a value and performs elementwise multiplication
// between other and this Tensor.
func (t Tensor[T]) Mul(other any) Tensor[T] {
return t.Zip(other, func(x, y T) T {
return x * y
})
}
// Div takes a value and performs elementwise division
// between other and this Tensor.
func (t Tensor[T]) Div(other any) Tensor[T] {
return t.Zip(other, func(x, y T) T {
return x / y
})
} | zip.go | 0.602412 | 0.401717 | zip.go | starcoder |
package opt
import (
"encoding/json"
"fmt"
)
// Maybe is a simple implementation of an optional value type.
type Maybe[V any] struct {
defined bool
value V
}
// Some returns a Maybe that has a defined value.
func Some[V any](value V) Maybe[V] {
return Maybe[V]{defined: true, value: value}
}
// None returns a Maybe with no value.
func None[V any]() Maybe[V] { return Maybe[V]{} }
// FromPtr returns a Maybe that has a defined value of *ptr if ptr is non-nil, or
// no value if ptr is nil.
func FromPtr[V any](ptr *V) Maybe[V] {
if ptr != nil {
return Some[V](*ptr)
}
return None[V]()
}
// IsDefined returns true if the Maybe has a value.
func (m Maybe[V]) IsDefined() bool { return m.defined }
// Value returns the value if a value is defined, or the zero value for the type otherwise.
func (m Maybe[V]) Value() V { return m.value }
// AsPtr returns a pointer to the value if the value is defined, or nil otherwise.
func (m Maybe[V]) AsPtr() *V {
if m.defined {
return &m.value
}
return nil
}
// OrElse returns the value of the Maybe if any, or the valueIfUndefined otherwise.
func (m Maybe[V]) OrElse(valueIfUndefined V) V {
if m.defined {
return m.value
}
return valueIfUndefined
}
// String returns a string representation of the value, or "[none]" if undefined. The string
// representation of a value is either its own String() if it has such a method, or else the
// result of fmt.Sprintf with "%v".
func (m Maybe[V]) String() string {
if m.defined {
var v interface{}
v = m.value
if s, ok := v.(fmt.Stringer); ok {
return s.String()
}
return fmt.Sprintf("%v", m.value)
}
return "[none]"
}
// MarshalJSON produces whatever JSON representation would normally be produced for the value if
// a value is defined, or a JSON null otherwise.
func (m Maybe[V]) MarshalJSON() ([]byte, error) {
if m.defined {
return json.Marshal(m.value)
}
return []byte("null"), nil
}
// UnmarshalJSON sets the Maybe to None[V] if the data is a JSON null, or else unmarshals a value
// of type V as usual and sets the Maybe to Some(value).
func (m *Maybe[V]) UnmarshalJSON(data []byte) error {
var temp interface{}
if err := json.Unmarshal(data, &temp); err != nil {
return err
}
if temp == nil {
*m = None[V]()
return nil
}
var value V
if err := json.Unmarshal(data, &value); err != nil {
return err
}
*m = Some(value)
return nil
} | framework/opt/maybe.go | 0.664758 | 0.46223 | maybe.go | starcoder |
package orm
import (
"bytes"
"context"
"github.com/goradd/goradd/pkg/orm/op"
"github.com/goradd/goradd/web/examples/gen/goradd/model"
"github.com/goradd/goradd/web/examples/gen/goradd/model/node"
)
func (ctrl *QueryPanel) DrawTemplate(ctx context.Context, buf *bytes.Buffer) (err error) {
buf.WriteString(`
<h1>Using a QueryBuilder and Nodes</h1>
<p>In the previous example, you saw how to load a single object using model.Load* functions.
In this example you will learn how to query for an array of objects using a <strong>QueryBuilder</strong>.</p>
<p>Begin a query using a call to model.Query* functions. For example, if you want to return Person objects,
you would make a call to model.QueryPeople(). If you stop there, you will be selecting all of the People in the
Person database.</p>
<h3>All People</h3>
<p>
`)
for _, person := range model.QueryPeople(ctx).Load() {
buf.WriteString(`<div>`)
buf.WriteString(person.FirstName())
buf.WriteString(` `)
buf.WriteString(person.LastName())
buf.WriteString(`</div>
`)
}
buf.WriteString(`</p>
<h2>Refining Queries with Statements and Nodes</h2>
<p>You can further refine your query by adding the following functions to the QueryBuilder returned by the Query* functions.
A brief description is given below, and further examples for some of them can be found in later pages.
</p>
<table>
<tr><td>Where</td><td>Where adds conditions to the query to filter the query. Multiple conditions will be
ANDed together.</td></tr>
<tr><td>OrderBy</td><td>OrderBy defines the fields that will be used to sort the resulting array slice.
Multiple fields can be specified, and you can sort in descending order by adding the Descending() function
to the specified field.</td></tr>
<tr><td>Limit</td><td>Limits the resulting values to a maximum number of values. You can also specify
which of the total selection will be the first one. You can use this to page through a big result set.</td></tr>
<tr><td>GroupBy</td><td>GroupBy reduces the result set to select only one of each distinct value in
the specified fields. This can be combined with an "Alias" mentioned below.
<tr><td>Alias</td><td>Alias creates a custom named value in each of the resulting slices. You can use this
to specify the result of a calculation. Certain grouping operations, like Sum, Count, or Max, can be combined with
a GroupBy function to calculate values for each group.</td></tr>
<tr><td>Join</td><td>Join will add a linked object to the query, one that is linked through a relationship
defined in the database. Join statements can be given a condition to restrict which linked objects are attached to
each returned main object.</td></tr>
<tr><td>Select</td><td>Normally all the fields in an object are returned, and you can use whichever ones
you want. If you would like to optimize your query so that only certain fields are returned from the database,
you can specify which fields to return in the Select function.</td></tr>
<tr><td>Expand</td><td>Normally when joining objects in a one-to-many or many-to-many relationship, an
array of objects are attached to each main object. In certain situations, you might want to instead have
multiple main objects be returned, with one of each of the sub-objects attached. You do this using the
Expand statement.</td></tr>
<tr><td>Distinct</td><td>Distinct will remove duplicates of the result set. It is similar to GroupBy, but you
cannot specify operations to perform in each group. </td></tr>
<tr><td>Having</td><td>"Where" conditions filter out items from the results using just the fields in the table.
"Having" is an additional filter that operates on the results after any calculations
are performed, which "Where" cannot do since "Where" filters before calculations are done. In other words, "Where"
controls which values are sent into calculations, and "Having" controls which items are shown based on the
results of the calculations.</td></tr>
</table>
<p>
Once your QueryBuilder is ready to execute, you have some options on what to do with the QueryBuilder:
</p>
<table>
<tr><td>Load</td><td>Load will run the query and return a slice of objects.</td></tr>
<tr><td>LoadI</td><td>Load will run the query and return a slice of interfaces.</td></tr>
<tr><td>Get</td><td>Get will return only one object, whatever object comes up first in the query.</td></tr>
<tr><td>Count</td><td>Count will return the number of records that would result from the query.</td></tr>
<tr><td>Delete</td><td>Delete deletes the selected records.</td></tr>
</table>
<p>
<div>People With Last Name of Smith</div>
`)
for _, person := range model.QueryPeople(ctx).
Where(op.Equal(node.Person().LastName(), "Smith")).
Load() {
buf.WriteString(`<div>`)
buf.WriteString(person.FirstName())
buf.WriteString(` `)
buf.WriteString(person.LastName())
buf.WriteString(`</div>
`)
}
buf.WriteString(`</p>
<p>
<div>People in Last/First order</div>
`)
for _, person := range model.QueryPeople(ctx).
OrderBy(node.Person().LastName(), node.Person().FirstName()).
Load() {
buf.WriteString(`<div>`)
buf.WriteString(person.FirstName())
buf.WriteString(` `)
buf.WriteString(person.LastName())
buf.WriteString(`</div>
`)
}
buf.WriteString(`</p>
`)
buf.WriteString(`
`)
return
} | web/examples/tutorial/orm/3-query.tpl.go | 0.676192 | 0.533519 | 3-query.tpl.go | starcoder |
package torch
// #include "gotorch.h"
import "C"
import (
"reflect"
"unsafe"
)
// torch::tensor
type Tensor struct {
tensor C.Tensor
device GoDevice
}
type Tensors struct {
tensors []C.Tensor
}
// at::Tensor
type ATensor struct {
atensor C.ATensor
device GoDevice
}
// go slice
type GoTensor struct {
value []float32
dims []int
}
func (atensor ATensor) toGo() GoTensor {
tensor_size := (int)(C.AtensorSize(atensor.atensor))
tensor_value := make([]float32, tensor_size)
h := (*reflect.SliceHeader)((unsafe.Pointer)(&tensor_value))
h.Data = (uintptr)((unsafe.Pointer)(C.AtensorToVec(atensor.atensor)))
h.Len = tensor_size
h.Cap = tensor_size
dim0 := (int)(C.AtensorDim(atensor.atensor, 0))
dim1 := (int)(C.AtensorDim(atensor.atensor, 1))
dims := []int{dim0, dim1}
gotensor := GoTensor{}
gotensor.value = tensor_value
gotensor.dims = dims
return gotensor
}
func (gotensor GoTensor) Argmax() (int) {
max_index := 0
var max_value float32
for i, v := range gotensor.value {
if max_value < v {
max_index = i
max_value = v
}
}
return max_index
}
func (tensor Tensor) Size(dim int) int {
return int(C.tensor_size(tensor.tensor, C.int(dim)))
}
func (tensor Tensor) Reshape(shapes []int) Tensor {
cshapes := make([]C.int, len(shapes))
for i, shape := range shapes {
cshapes[i] = C.int(shape)
}
ret_tensor := Tensor{}
ret_tensor.tensor = C.tensor_reshape(tensor.tensor, &cshapes[0], C.int(len(shapes)))
ret_tensor.device = tensor.device
return ret_tensor
}
func (tensor Tensor) Backward() {
C.backward(tensor.tensor)
}
func (tensor Tensor) Item() float32 {
return float32(C.tensor_item(tensor.tensor))
}
func (tensor Tensor) View(shapes []int) Tensor {
cshapes := make([]C.int, len(shapes))
for i, shape := range shapes {
cshapes[i] = C.int(shape)
}
ret_tensor := Tensor{}
ret_tensor.tensor = C.tensor_view(tensor.tensor, &cshapes[0], C.int(len(shapes)))
return ret_tensor
}
func (tensor *Tensor) To(device GoDevice) Tensor {
ret_tensor := Tensor{}
if device.cuda != nil {
ret_tensor.tensor = C.tensor_to_cuda(tensor.tensor, device.cuda)
} else if device.cpu != nil {
ret_tensor.tensor = C.tensor_to_cpu(tensor.tensor, device.cpu)
}
ret_tensor.device = device
return ret_tensor
}
func (tensor Tensor) Is_cuda() bool {
if C.tensor_is_cuda(tensor.tensor) != 0 {
return true
} else {
return false
}
}
func Randn(shapes []int) Tensor {
cshapes := make([]C.int, len(shapes))
for i, shape := range shapes {
cshapes[i] = C.int(shape)
}
ret_tensor := Tensor{}
ret_tensor.tensor = C.Randn(&cshapes[0], C.int(len(shapes)))
return ret_tensor
}
//func tensor_device_check(tensor Tensor) {
// if C.tensor_is_cuda(tensor.tensor) != 0 {
// if tensor.device.cuda == nil {
// log.Fatal("Tensor is gpu, but model is cpu")
// }
// }
//} | go/tensor.go | 0.581303 | 0.443781 | tensor.go | starcoder |
package absorbingmarkovchain
import (
"github.com/RoaringBitmap/roaring"
)
type dGraph struct {
Nodes *roaring.Bitmap
Edges func(from uint32) (to []uint32)
}
func (gin dGraph) addSelfLoops() (gout dGraph) {
gout.Edges = func(from uint32) (to []uint32) {
to = gin.Edges(from)
if match, p := uint32Exist(to, from); !match {
to = append(append(append(make([]uint32, 0, len(to)+1), to[:p]...), from), to[p:]...)
}
return
}
gout.Nodes = gin.Nodes
return
}
func (gin dGraph) filterNodes(blacklist *roaring.Bitmap) (gout dGraph) {
blacklistArray := blacklist.ToArray()
gout.Edges = func(from uint32) (to []uint32) {
if blacklist.Contains(from) {
return nil
}
to = gin.Edges(from)
u, up := to, 0 //unfiltered to and unfiltered to position
b, bp, bv := blacklistArray, 0, uint32(0) //blacklistArray, position in blacklistArray and value
match := false
for bp, bv = range b {
match, up = uint32Exist(u, bv)
u = u[up:]
if match {
break
}
}
if !match {
return to
}
if len(u) == 1 {
return to[:len(to)-1]
}
p := len(to) - len(u)
to = append([]uint32{}, to...)[:p:p]
u = u[1:]
bp++
b = b[bp:]
for _, uv := range u {
match, bp = uint32Exist(b, uv)
b = b[bp:]
if !match {
to = append(to, uv)
}
}
return
}
gout.Nodes = roaring.AndNot(gin.Nodes, blacklist)
return
}
func (gin dGraph) normalizedIDs() (gout dGraph, t translator) {
new2OldID := gin.Nodes.ToArray()
l := len(new2OldID)
gout.Edges = func(from uint32) (to []uint32) {
new2OldID := new2OldID
to = append([]uint32{}, gin.Edges(new2OldID[from])...)
for p, oldID := range to {
new2OldID = new2OldID[uint32Search(new2OldID, oldID):]
to[p] = uint32(l - len(new2OldID))
}
return
}
gout.Nodes = roaring.NewBitmap()
gout.Nodes.AddRange(0, uint64(len(new2OldID)))
t = myTranslator(new2OldID)
return
}
type wDGraph struct {
dGraph
Weighter func(from, to uint32) (weight float64, err error)
}
func (gin wDGraph) normalizedWeights() (gout wDGraph, err error) {
nodeCount := uint32(gin.Nodes.GetCardinality())
weightSum := make([]float64, 0, nodeCount)
weights := make([]float64, 0, 1024)
for i := gin.Nodes.Iterator(); i.HasNext(); {
from := i.Next()
weights = weights[:0]
for _, to := range gin.Edges(from) {
w, err := gin.Weighter(from, to)
if err != nil {
return gout, err
}
weights = append(weights, w)
}
weightSum = append(weightSum, fsum(weights))
}
old2NewID := func(n uint32) (uint32, error) { //Identity function
return n, nil
}
if m := nodeCount - 1; !gin.Nodes.Contains(m) || uint32(gin.Nodes.Rank(m)) != nodeCount { //m is not max of a set of type [0,n]
old2NewID = newTranslator(gin.Nodes).ToNew
}
gout.Weighter = func(from, to uint32) (weight float64, err error) {
if weight, err = gin.Weighter(from, to); err != nil {
return
}
newID, err := old2NewID(from)
if err != nil {
return
}
weight /= weightSum[newID]
return
}
gout.dGraph = gin.dGraph
return
}
func (gin wDGraph) addSelfLoops() (gout wDGraph) {
gout.Weighter = func(from, to uint32) (weight float64, err error) {
weight, err = gin.Weighter(from, to)
if from != to {
return
}
match, _ := uint32Exist(gin.Edges(from), from)
switch {
case match && err != nil:
//do nothing
case match:
weight += -1
default:
weight = -1
}
return
}
gout.dGraph = gin.dGraph.addSelfLoops()
return
}
/*func (gin wDGraph) normalizedIDs() (gout wDGraph, t translator) {
gout.dGraph, t = gin.dGraph.normalizedIDs()
gout.Weighter = func(from, to uint32) (weight float64, err error) {
if to, err = t.ToOld(to); err != nil {
return
}
if from, err = t.ToOld(from); err != nil {
return
}
if weight, err = gin.Weighter(from, to); err != nil {
return
}
return
}
return
}*/ | graphs.go | 0.533884 | 0.429609 | graphs.go | starcoder |
package mysql
import (
"fmt"
"strings"
sq "github.com/Masterminds/squirrel"
"github.com/eveisesi/skillz"
"xorm.io/builder"
)
type tableConf struct {
table string
columns []string
}
func BuildFilters(s sq.SelectBuilder, operators ...*skillz.Operator) sq.SelectBuilder {
for _, a := range operators {
if !a.Operation.IsValid() {
continue
}
switch a.Operation {
case skillz.EqualOp:
s = s.Where(sq.Eq{a.Column: a.Value})
case skillz.NotEqualOp:
s = s.Where(sq.NotEq{a.Column: a.Value})
case skillz.GreaterThanEqualToOp:
s = s.Where(sq.GtOrEq{a.Column: a.Value})
case skillz.GreaterThanOp:
s = s.Where(sq.Gt{a.Column: a.Value})
case skillz.LessThanEqualToOp:
s = s.Where(sq.LtOrEq{a.Column: a.Value})
case skillz.LessThanOp:
s = s.Where(sq.Lt{a.Column: a.Value})
case skillz.InOp:
s = s.Where(sq.Eq{a.Column: a.Value.(interface{})})
case skillz.NotInOp:
s = s.Where(sq.NotEq{a.Column: a.Value.([]interface{})})
case skillz.LikeOp:
s = s.Where(sq.Like{a.Column: fmt.Sprintf("%%%v%%", a.Value)})
case skillz.OrderOp:
s = s.OrderBy(fmt.Sprintf("%s %s", a.Column, a.Value))
case skillz.LimitOp:
s = s.Limit(uint64(a.Value.(int64)))
case skillz.SkipOp:
s = s.Offset(uint64(a.Value.(int64)))
}
}
return s
}
func BuildOperators(operators ...*skillz.Operator) *builder.Builder {
b := builder.MySQL()
for _, a := range operators {
if !a.Operation.IsValid() {
continue
}
switch a.Operation {
case skillz.EqualOp:
b = b.Where(builder.Eq{a.Column: a.Value})
case skillz.NotEqualOp:
b = b.Where(builder.Neq{a.Column: a.Value})
case skillz.GreaterThanEqualToOp:
b = b.Where(builder.Gte{a.Column: a.Value})
case skillz.GreaterThanOp:
b = b.Where(builder.Gt{a.Column: a.Value})
case skillz.LessThanEqualToOp:
b = b.Where(builder.Lte{a.Column: a.Value})
case skillz.LessThanOp:
b = b.Where(builder.Lt{a.Column: a.Value})
case skillz.InOp:
b = b.Where(builder.In(a.Column, a.Value.([]interface{})...))
case skillz.NotInOp:
b = b.Where(builder.NotIn(a.Column, a.Value.([]interface{})...))
case skillz.LikeOp:
b = b.Where(builder.Like{a.Column, a.Value.(string)})
case skillz.OrderOp:
b = b.OrderBy(fmt.Sprintf("%s %s", a.Column, a.Value))
case skillz.LimitOp:
b = b.Limit(a.Value.(int))
}
}
return b
}
func OnDuplicateKeyStmt(columns ...string) string {
if len(columns) == 0 {
return ""
}
stmts := make([]string, 0, len(columns))
for _, column := range columns {
stmts = append(stmts, fmt.Sprintf("%[1]s = VALUES(%[1]s)", column))
}
return fmt.Sprintf("ON DUPLICATE KEY UPDATE %s", strings.Join(stmts, ","))
} | internal/mysql/mysql.go | 0.621771 | 0.461077 | mysql.go | starcoder |
package day17
import (
"bufio"
"fmt"
"io"
"regexp"
"strconv"
"strings"
)
type Point struct {
X, Y int
}
type Velocity struct {
X, Y int
}
type Probe struct {
Position Point
Velocity Velocity
}
type Target struct {
MinX, MaxX int
MinY, MaxY int
}
func (t Target) Hit(p Point) bool {
return (p.X >= t.MinX && p.X <= t.MaxX) && (p.Y >= t.MinY && p.Y <= t.MaxY)
}
func ParseInput(r io.Reader) (*Target, error) {
pattern := regexp.MustCompile(`^target area: x=(\d+)..(\d+), y=(-?\d+)..(-?\d+)$`)
s := bufio.NewScanner(r)
var t Target
for s.Scan() {
l := strings.TrimSpace(s.Text())
if len(l) == 0 {
continue
}
submatches := pattern.FindStringSubmatch(l)
if submatches == nil || len(submatches) != 5 {
return nil, fmt.Errorf("invalid input: %s", l)
}
vals := make([]int, 0, 4)
for _, match := range submatches[1:] {
v, err := strconv.Atoi(match)
if err != nil {
return nil, err
}
vals = append(vals, v)
}
minX, maxX, minY, maxY := vals[0], vals[1], vals[2], vals[3]
if minX > maxX || minY > maxY {
return nil, fmt.Errorf("invalid input: %s", l)
}
t = Target{MinX: minX, MaxX: maxX, MinY: minY, MaxY: maxY}
}
if err := s.Err(); err != nil {
return nil, err
}
return &t, nil
}
func CalculateHighestYPosition(target *Target) int {
highestY := 0
for x := 0; x <= target.MaxX; x++ {
for y := target.MinY; y < abs(target.MinY); y++ {
p := Probe{Velocity: Velocity{X: x, Y: y}}
maxY := 0
for p.Position.X <= target.MaxX && p.Position.Y >= target.MinY {
if p.Position.Y > maxY {
maxY = p.Position.Y
}
if target.Hit(p.Position) {
if maxY > highestY {
highestY = maxY
}
break
}
if p.Velocity.X == 0 && p.Position.X < target.MinX {
break
}
p.Step()
}
}
}
return highestY
}
func CalculateDistinctInitialVelocities(target *Target) int {
count := 0
// Initial x velocity can't be greater than max target X
for x := 0; x <= target.MaxX; x++ {
// Initial y velocity can't be greater than min target Y.
// If y velocity is positive, when it gets back to zero it's Y velocity will be at -y - 1.
// This will overshoot the target if initial velocity y == target.MinY.
for y := target.MinY; y < abs(target.MinY); y++ {
p := Probe{Velocity: Velocity{X: x, Y: y}}
for p.Position.X <= target.MaxX && p.Position.Y >= target.MinY {
if target.Hit(p.Position) {
count++
break
}
// We won't reach MinX
if p.Velocity.X == 0 && p.Position.X < target.MinX {
break
}
p.Step()
}
}
}
return count
}
func (p *Probe) Step() {
p.Position.X += p.Velocity.X
p.Position.Y += p.Velocity.Y
if p.Velocity.X > 0 {
p.Velocity.X--
} else if p.Velocity.X < 0 {
p.Velocity.X++
}
p.Velocity.Y--
}
func abs(i int) int {
if i < 0 {
return -i
}
return i
} | day17/day17.go | 0.640861 | 0.422803 | day17.go | starcoder |
package models
import (
i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization"
)
// UserExperienceAnalyticsWorkFromAnywhereDevicesSummary the user experience analytics Work From Anywhere metrics devices summary.
type UserExperienceAnalyticsWorkFromAnywhereDevicesSummary struct {
// Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
additionalData map[string]interface{}
// The value of work from anywhere autopilot devices summary.
autopilotDevicesSummary UserExperienceAnalyticsAutopilotDevicesSummaryable
// The user experience analytics work from anywhere Cloud Identity devices summary.
cloudIdentityDevicesSummary UserExperienceAnalyticsCloudIdentityDevicesSummaryable
// The user experience work from anywhere Cloud management devices summary.
cloudManagementDevicesSummary UserExperienceAnalyticsCloudManagementDevicesSummaryable
// Total number of co-managed devices. Valid values -2147483648 to 2147483647
coManagedDevices *int32
// The count of intune devices that are not autopilot registerd. Valid values -2147483648 to 2147483647
devicesNotAutopilotRegistered *int32
// The count of intune devices not autopilot profile assigned. Valid values -2147483648 to 2147483647
devicesWithoutAutopilotProfileAssigned *int32
// The count of devices that are not cloud identity. Valid values -2147483648 to 2147483647
devicesWithoutCloudIdentity *int32
// The count of intune devices that are not autopilot registerd. Valid values -2147483648 to 2147483647
intuneDevices *int32
// Total count of tenant attach devices. Valid values -2147483648 to 2147483647
tenantAttachDevices *int32
// The total count of devices. Valid values -2147483648 to 2147483647
totalDevices *int32
// The count of Windows 10 devices that have unsupported OS versions. Valid values -2147483648 to 2147483647
unsupportedOSversionDevices *int32
// The count of windows 10 devices. Valid values -2147483648 to 2147483647
windows10Devices *int32
// The user experience analytics work from anywhere Windows 10 devices summary.
windows10DevicesSummary UserExperienceAnalyticsWindows10DevicesSummaryable
// The count of windows 10 devices that are Intune and Comanaged. Valid values -2147483648 to 2147483647
windows10DevicesWithoutTenantAttach *int32
}
// NewUserExperienceAnalyticsWorkFromAnywhereDevicesSummary instantiates a new userExperienceAnalyticsWorkFromAnywhereDevicesSummary and sets the default values.
func NewUserExperienceAnalyticsWorkFromAnywhereDevicesSummary()(*UserExperienceAnalyticsWorkFromAnywhereDevicesSummary) {
m := &UserExperienceAnalyticsWorkFromAnywhereDevicesSummary{
}
m.SetAdditionalData(make(map[string]interface{}));
return m
}
// CreateUserExperienceAnalyticsWorkFromAnywhereDevicesSummaryFromDiscriminatorValue creates a new instance of the appropriate class based on discriminator value
func CreateUserExperienceAnalyticsWorkFromAnywhereDevicesSummaryFromDiscriminatorValue(parseNode i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, error) {
return NewUserExperienceAnalyticsWorkFromAnywhereDevicesSummary(), nil
}
// GetAdditionalData gets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
func (m *UserExperienceAnalyticsWorkFromAnywhereDevicesSummary) GetAdditionalData()(map[string]interface{}) {
if m == nil {
return nil
} else {
return m.additionalData
}
}
// GetAutopilotDevicesSummary gets the autopilotDevicesSummary property value. The value of work from anywhere autopilot devices summary.
func (m *UserExperienceAnalyticsWorkFromAnywhereDevicesSummary) GetAutopilotDevicesSummary()(UserExperienceAnalyticsAutopilotDevicesSummaryable) {
if m == nil {
return nil
} else {
return m.autopilotDevicesSummary
}
}
// GetCloudIdentityDevicesSummary gets the cloudIdentityDevicesSummary property value. The user experience analytics work from anywhere Cloud Identity devices summary.
func (m *UserExperienceAnalyticsWorkFromAnywhereDevicesSummary) GetCloudIdentityDevicesSummary()(UserExperienceAnalyticsCloudIdentityDevicesSummaryable) {
if m == nil {
return nil
} else {
return m.cloudIdentityDevicesSummary
}
}
// GetCloudManagementDevicesSummary gets the cloudManagementDevicesSummary property value. The user experience work from anywhere Cloud management devices summary.
func (m *UserExperienceAnalyticsWorkFromAnywhereDevicesSummary) GetCloudManagementDevicesSummary()(UserExperienceAnalyticsCloudManagementDevicesSummaryable) {
if m == nil {
return nil
} else {
return m.cloudManagementDevicesSummary
}
}
// GetCoManagedDevices gets the coManagedDevices property value. Total number of co-managed devices. Valid values -2147483648 to 2147483647
func (m *UserExperienceAnalyticsWorkFromAnywhereDevicesSummary) GetCoManagedDevices()(*int32) {
if m == nil {
return nil
} else {
return m.coManagedDevices
}
}
// GetDevicesNotAutopilotRegistered gets the devicesNotAutopilotRegistered property value. The count of intune devices that are not autopilot registerd. Valid values -2147483648 to 2147483647
func (m *UserExperienceAnalyticsWorkFromAnywhereDevicesSummary) GetDevicesNotAutopilotRegistered()(*int32) {
if m == nil {
return nil
} else {
return m.devicesNotAutopilotRegistered
}
}
// GetDevicesWithoutAutopilotProfileAssigned gets the devicesWithoutAutopilotProfileAssigned property value. The count of intune devices not autopilot profile assigned. Valid values -2147483648 to 2147483647
func (m *UserExperienceAnalyticsWorkFromAnywhereDevicesSummary) GetDevicesWithoutAutopilotProfileAssigned()(*int32) {
if m == nil {
return nil
} else {
return m.devicesWithoutAutopilotProfileAssigned
}
}
// GetDevicesWithoutCloudIdentity gets the devicesWithoutCloudIdentity property value. The count of devices that are not cloud identity. Valid values -2147483648 to 2147483647
func (m *UserExperienceAnalyticsWorkFromAnywhereDevicesSummary) GetDevicesWithoutCloudIdentity()(*int32) {
if m == nil {
return nil
} else {
return m.devicesWithoutCloudIdentity
}
}
// GetFieldDeserializers the deserialization information for the current model
func (m *UserExperienceAnalyticsWorkFromAnywhereDevicesSummary) GetFieldDeserializers()(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(error)) {
res := make(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(error))
res["autopilotDevicesSummary"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetObjectValue(CreateUserExperienceAnalyticsAutopilotDevicesSummaryFromDiscriminatorValue)
if err != nil {
return err
}
if val != nil {
m.SetAutopilotDevicesSummary(val.(UserExperienceAnalyticsAutopilotDevicesSummaryable))
}
return nil
}
res["cloudIdentityDevicesSummary"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetObjectValue(CreateUserExperienceAnalyticsCloudIdentityDevicesSummaryFromDiscriminatorValue)
if err != nil {
return err
}
if val != nil {
m.SetCloudIdentityDevicesSummary(val.(UserExperienceAnalyticsCloudIdentityDevicesSummaryable))
}
return nil
}
res["cloudManagementDevicesSummary"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetObjectValue(CreateUserExperienceAnalyticsCloudManagementDevicesSummaryFromDiscriminatorValue)
if err != nil {
return err
}
if val != nil {
m.SetCloudManagementDevicesSummary(val.(UserExperienceAnalyticsCloudManagementDevicesSummaryable))
}
return nil
}
res["coManagedDevices"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetInt32Value()
if err != nil {
return err
}
if val != nil {
m.SetCoManagedDevices(val)
}
return nil
}
res["devicesNotAutopilotRegistered"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetInt32Value()
if err != nil {
return err
}
if val != nil {
m.SetDevicesNotAutopilotRegistered(val)
}
return nil
}
res["devicesWithoutAutopilotProfileAssigned"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetInt32Value()
if err != nil {
return err
}
if val != nil {
m.SetDevicesWithoutAutopilotProfileAssigned(val)
}
return nil
}
res["devicesWithoutCloudIdentity"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetInt32Value()
if err != nil {
return err
}
if val != nil {
m.SetDevicesWithoutCloudIdentity(val)
}
return nil
}
res["intuneDevices"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetInt32Value()
if err != nil {
return err
}
if val != nil {
m.SetIntuneDevices(val)
}
return nil
}
res["tenantAttachDevices"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetInt32Value()
if err != nil {
return err
}
if val != nil {
m.SetTenantAttachDevices(val)
}
return nil
}
res["totalDevices"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetInt32Value()
if err != nil {
return err
}
if val != nil {
m.SetTotalDevices(val)
}
return nil
}
res["unsupportedOSversionDevices"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetInt32Value()
if err != nil {
return err
}
if val != nil {
m.SetUnsupportedOSversionDevices(val)
}
return nil
}
res["windows10Devices"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetInt32Value()
if err != nil {
return err
}
if val != nil {
m.SetWindows10Devices(val)
}
return nil
}
res["windows10DevicesSummary"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetObjectValue(CreateUserExperienceAnalyticsWindows10DevicesSummaryFromDiscriminatorValue)
if err != nil {
return err
}
if val != nil {
m.SetWindows10DevicesSummary(val.(UserExperienceAnalyticsWindows10DevicesSummaryable))
}
return nil
}
res["windows10DevicesWithoutTenantAttach"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetInt32Value()
if err != nil {
return err
}
if val != nil {
m.SetWindows10DevicesWithoutTenantAttach(val)
}
return nil
}
return res
}
// GetIntuneDevices gets the intuneDevices property value. The count of intune devices that are not autopilot registerd. Valid values -2147483648 to 2147483647
func (m *UserExperienceAnalyticsWorkFromAnywhereDevicesSummary) GetIntuneDevices()(*int32) {
if m == nil {
return nil
} else {
return m.intuneDevices
}
}
// GetTenantAttachDevices gets the tenantAttachDevices property value. Total count of tenant attach devices. Valid values -2147483648 to 2147483647
func (m *UserExperienceAnalyticsWorkFromAnywhereDevicesSummary) GetTenantAttachDevices()(*int32) {
if m == nil {
return nil
} else {
return m.tenantAttachDevices
}
}
// GetTotalDevices gets the totalDevices property value. The total count of devices. Valid values -2147483648 to 2147483647
func (m *UserExperienceAnalyticsWorkFromAnywhereDevicesSummary) GetTotalDevices()(*int32) {
if m == nil {
return nil
} else {
return m.totalDevices
}
}
// GetUnsupportedOSversionDevices gets the unsupportedOSversionDevices property value. The count of Windows 10 devices that have unsupported OS versions. Valid values -2147483648 to 2147483647
func (m *UserExperienceAnalyticsWorkFromAnywhereDevicesSummary) GetUnsupportedOSversionDevices()(*int32) {
if m == nil {
return nil
} else {
return m.unsupportedOSversionDevices
}
}
// GetWindows10Devices gets the windows10Devices property value. The count of windows 10 devices. Valid values -2147483648 to 2147483647
func (m *UserExperienceAnalyticsWorkFromAnywhereDevicesSummary) GetWindows10Devices()(*int32) {
if m == nil {
return nil
} else {
return m.windows10Devices
}
}
// GetWindows10DevicesSummary gets the windows10DevicesSummary property value. The user experience analytics work from anywhere Windows 10 devices summary.
func (m *UserExperienceAnalyticsWorkFromAnywhereDevicesSummary) GetWindows10DevicesSummary()(UserExperienceAnalyticsWindows10DevicesSummaryable) {
if m == nil {
return nil
} else {
return m.windows10DevicesSummary
}
}
// GetWindows10DevicesWithoutTenantAttach gets the windows10DevicesWithoutTenantAttach property value. The count of windows 10 devices that are Intune and Comanaged. Valid values -2147483648 to 2147483647
func (m *UserExperienceAnalyticsWorkFromAnywhereDevicesSummary) GetWindows10DevicesWithoutTenantAttach()(*int32) {
if m == nil {
return nil
} else {
return m.windows10DevicesWithoutTenantAttach
}
}
// Serialize serializes information the current object
func (m *UserExperienceAnalyticsWorkFromAnywhereDevicesSummary) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {
{
err := writer.WriteObjectValue("autopilotDevicesSummary", m.GetAutopilotDevicesSummary())
if err != nil {
return err
}
}
{
err := writer.WriteObjectValue("cloudIdentityDevicesSummary", m.GetCloudIdentityDevicesSummary())
if err != nil {
return err
}
}
{
err := writer.WriteObjectValue("cloudManagementDevicesSummary", m.GetCloudManagementDevicesSummary())
if err != nil {
return err
}
}
{
err := writer.WriteInt32Value("coManagedDevices", m.GetCoManagedDevices())
if err != nil {
return err
}
}
{
err := writer.WriteInt32Value("devicesNotAutopilotRegistered", m.GetDevicesNotAutopilotRegistered())
if err != nil {
return err
}
}
{
err := writer.WriteInt32Value("devicesWithoutAutopilotProfileAssigned", m.GetDevicesWithoutAutopilotProfileAssigned())
if err != nil {
return err
}
}
{
err := writer.WriteInt32Value("devicesWithoutCloudIdentity", m.GetDevicesWithoutCloudIdentity())
if err != nil {
return err
}
}
{
err := writer.WriteInt32Value("intuneDevices", m.GetIntuneDevices())
if err != nil {
return err
}
}
{
err := writer.WriteInt32Value("tenantAttachDevices", m.GetTenantAttachDevices())
if err != nil {
return err
}
}
{
err := writer.WriteInt32Value("totalDevices", m.GetTotalDevices())
if err != nil {
return err
}
}
{
err := writer.WriteInt32Value("unsupportedOSversionDevices", m.GetUnsupportedOSversionDevices())
if err != nil {
return err
}
}
{
err := writer.WriteInt32Value("windows10Devices", m.GetWindows10Devices())
if err != nil {
return err
}
}
{
err := writer.WriteObjectValue("windows10DevicesSummary", m.GetWindows10DevicesSummary())
if err != nil {
return err
}
}
{
err := writer.WriteInt32Value("windows10DevicesWithoutTenantAttach", m.GetWindows10DevicesWithoutTenantAttach())
if err != nil {
return err
}
}
{
err := writer.WriteAdditionalData(m.GetAdditionalData())
if err != nil {
return err
}
}
return nil
}
// SetAdditionalData sets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
func (m *UserExperienceAnalyticsWorkFromAnywhereDevicesSummary) SetAdditionalData(value map[string]interface{})() {
if m != nil {
m.additionalData = value
}
}
// SetAutopilotDevicesSummary sets the autopilotDevicesSummary property value. The value of work from anywhere autopilot devices summary.
func (m *UserExperienceAnalyticsWorkFromAnywhereDevicesSummary) SetAutopilotDevicesSummary(value UserExperienceAnalyticsAutopilotDevicesSummaryable)() {
if m != nil {
m.autopilotDevicesSummary = value
}
}
// SetCloudIdentityDevicesSummary sets the cloudIdentityDevicesSummary property value. The user experience analytics work from anywhere Cloud Identity devices summary.
func (m *UserExperienceAnalyticsWorkFromAnywhereDevicesSummary) SetCloudIdentityDevicesSummary(value UserExperienceAnalyticsCloudIdentityDevicesSummaryable)() {
if m != nil {
m.cloudIdentityDevicesSummary = value
}
}
// SetCloudManagementDevicesSummary sets the cloudManagementDevicesSummary property value. The user experience work from anywhere Cloud management devices summary.
func (m *UserExperienceAnalyticsWorkFromAnywhereDevicesSummary) SetCloudManagementDevicesSummary(value UserExperienceAnalyticsCloudManagementDevicesSummaryable)() {
if m != nil {
m.cloudManagementDevicesSummary = value
}
}
// SetCoManagedDevices sets the coManagedDevices property value. Total number of co-managed devices. Valid values -2147483648 to 2147483647
func (m *UserExperienceAnalyticsWorkFromAnywhereDevicesSummary) SetCoManagedDevices(value *int32)() {
if m != nil {
m.coManagedDevices = value
}
}
// SetDevicesNotAutopilotRegistered sets the devicesNotAutopilotRegistered property value. The count of intune devices that are not autopilot registerd. Valid values -2147483648 to 2147483647
func (m *UserExperienceAnalyticsWorkFromAnywhereDevicesSummary) SetDevicesNotAutopilotRegistered(value *int32)() {
if m != nil {
m.devicesNotAutopilotRegistered = value
}
}
// SetDevicesWithoutAutopilotProfileAssigned sets the devicesWithoutAutopilotProfileAssigned property value. The count of intune devices not autopilot profile assigned. Valid values -2147483648 to 2147483647
func (m *UserExperienceAnalyticsWorkFromAnywhereDevicesSummary) SetDevicesWithoutAutopilotProfileAssigned(value *int32)() {
if m != nil {
m.devicesWithoutAutopilotProfileAssigned = value
}
}
// SetDevicesWithoutCloudIdentity sets the devicesWithoutCloudIdentity property value. The count of devices that are not cloud identity. Valid values -2147483648 to 2147483647
func (m *UserExperienceAnalyticsWorkFromAnywhereDevicesSummary) SetDevicesWithoutCloudIdentity(value *int32)() {
if m != nil {
m.devicesWithoutCloudIdentity = value
}
}
// SetIntuneDevices sets the intuneDevices property value. The count of intune devices that are not autopilot registerd. Valid values -2147483648 to 2147483647
func (m *UserExperienceAnalyticsWorkFromAnywhereDevicesSummary) SetIntuneDevices(value *int32)() {
if m != nil {
m.intuneDevices = value
}
}
// SetTenantAttachDevices sets the tenantAttachDevices property value. Total count of tenant attach devices. Valid values -2147483648 to 2147483647
func (m *UserExperienceAnalyticsWorkFromAnywhereDevicesSummary) SetTenantAttachDevices(value *int32)() {
if m != nil {
m.tenantAttachDevices = value
}
}
// SetTotalDevices sets the totalDevices property value. The total count of devices. Valid values -2147483648 to 2147483647
func (m *UserExperienceAnalyticsWorkFromAnywhereDevicesSummary) SetTotalDevices(value *int32)() {
if m != nil {
m.totalDevices = value
}
}
// SetUnsupportedOSversionDevices sets the unsupportedOSversionDevices property value. The count of Windows 10 devices that have unsupported OS versions. Valid values -2147483648 to 2147483647
func (m *UserExperienceAnalyticsWorkFromAnywhereDevicesSummary) SetUnsupportedOSversionDevices(value *int32)() {
if m != nil {
m.unsupportedOSversionDevices = value
}
}
// SetWindows10Devices sets the windows10Devices property value. The count of windows 10 devices. Valid values -2147483648 to 2147483647
func (m *UserExperienceAnalyticsWorkFromAnywhereDevicesSummary) SetWindows10Devices(value *int32)() {
if m != nil {
m.windows10Devices = value
}
}
// SetWindows10DevicesSummary sets the windows10DevicesSummary property value. The user experience analytics work from anywhere Windows 10 devices summary.
func (m *UserExperienceAnalyticsWorkFromAnywhereDevicesSummary) SetWindows10DevicesSummary(value UserExperienceAnalyticsWindows10DevicesSummaryable)() {
if m != nil {
m.windows10DevicesSummary = value
}
}
// SetWindows10DevicesWithoutTenantAttach sets the windows10DevicesWithoutTenantAttach property value. The count of windows 10 devices that are Intune and Comanaged. Valid values -2147483648 to 2147483647
func (m *UserExperienceAnalyticsWorkFromAnywhereDevicesSummary) SetWindows10DevicesWithoutTenantAttach(value *int32)() {
if m != nil {
m.windows10DevicesWithoutTenantAttach = value
}
} | models/user_experience_analytics_work_from_anywhere_devices_summary.go | 0.655005 | 0.418043 | user_experience_analytics_work_from_anywhere_devices_summary.go | starcoder |
package dotc
import (
"io"
"text/template"
)
// SliceStream implements code generation for streams of slices
type SliceStream Slice
// Pointer simply proxies to Slice, needed because of template limitation
func (s SliceStream) Pointer() bool {
return Slice(s).Pointer()
}
// RawType simply proxies to Slice, needed because of template limitation
func (s SliceStream) RawType() string {
return Slice(s).RawType()
}
// Elem simply proxies to Slice, needed because of template limitation
func (s SliceStream) Elem() Field {
return Slice(s).Elem()
}
// StreamType provides the stream type of the struct
func (s SliceStream) StreamType() string {
return (Field{Type: s.Type}).ToStreamType()
}
// GenerateStream generates the stream implementation
func (s SliceStream) GenerateStream(w io.Writer) error {
return sliceStreamImpl.Execute(w, s)
}
// GenerateStreamTests generates the stream tests
func (s SliceStream) GenerateStreamTests(w io.Writer) error {
return sliceStreamTests.Execute(w, s)
}
var sliceStreamImpl = template.Must(template.New("slice_stream_impl").Parse(`
// {{.StreamType}} implements a stream of {{.Type}} values
type {{.StreamType}} struct {
Stream streams.Stream
Value {{.Type}}
}
// Next returns the next entry in the stream if there is one
func (s *{{.StreamType}}) Next() (*{{.StreamType}}, changes.Change) {
if s.Stream == nil {
return nil, nil
}
next, nextc := s.Stream.Next()
if next == nil {
return nil, nil
}
if nextVal, ok := s.Value.Apply(nil, nextc).({{.Type}}); ok {
return &{{.StreamType}}{Stream: next, Value: nextVal}, nextc
}
return &{{.StreamType}}{Value: s.Value}, nil
}
// Latest returns the latest entry in the stream
func (s *{{.StreamType}}) Latest() *{{.StreamType}} {
for n, _ := s.Next(); n != nil; n, _ = s.Next() {
s = n
}
return s
}
// Update replaces the current value with the new value
func (s *{{.StreamType}}) Update(val {{.Type}}) *{{.StreamType}} {
if s.Stream != nil {
nexts := s.Stream.Append(changes.Replace{Before: s.Value, After: val})
s = &{{.StreamType}}{Stream: nexts, Value: val}
}
return s
}
// Item returns the sub item stream
func (s *{{.StreamType}}) Item(index int) *{{.Elem.ToStreamType}} {
return &{{.Elem.ToStreamType}}{Stream: streams.Substream(s.Stream, index), Value: ({{if .Pointer}}*{{end}}s.Value)[index]}
}
// Splice splices the items replacing Value[offset:offset+count] with replacement
func (s *{{.StreamType}}) Splice(offset, count int, replacement ...{{.ElemType}}) *{{.StreamType}} {
after := {{.RawType}}(replacement)
c := changes.Splice{Offset: offset, Before: s.Value.Slice(offset, count), After: {{if .Pointer}}&{{end}}after}
str := s.Stream.Append(c)
return &{{.StreamType}}{Stream: str, Value: s.Value.Splice(offset, count, replacement...)}
}
// Move shuffles Value[offset:offset+count] over by distance
func (s *{{.StreamType}}) Move(offset, count, distance int) *{{.StreamType}} {
c := changes.Move{Offset: offset, Count: count, Distance: distance}
str := s.Stream.Append(c)
return &{{.StreamType}}{Stream: str, Value: s.Value.Move(offset, count, distance)}
}
`))
var sliceStreamTests = template.Must(template.New("slice_stream_tests").Parse(`
func TestStream{{.StreamType}}(t *testing.T) {
s := streams.New()
values := valuesFor{{.StreamType}}()
strong := &{{.StreamType}}{Stream: s, Value: values[0]}
strong = strong.Update(values[1])
if !reflect.DeepEqual(strong.Value, values[1]) {
t.Error("Update did not change value", strong.Value)
}
s, c := s.Next()
if !reflect.DeepEqual(c, changes.Replace{Before: values[0], After: values[1]}) {
t.Error("Unexpected change", c)
}
c = changes.Replace{Before: values[1], After: values[2]}
s = s.Append(c)
c = changes.Replace{Before: values[2], After: values[3]}
s = s.Append(c)
strong = strong.Latest()
if !reflect.DeepEqual(strong.Value, values[3]) {
t.Error("Unexpected value", strong.Value)
}
_, c = strong.Next()
if c != nil {
t.Error("Unexpected change on stream", c)
}
s = s.Append(changes.Replace{Before: values[3], After: changes.Nil})
if strong, c = strong.Next(); c != nil {
t.Error("Unexpected change on terminated stream", c)
}
s.Append(changes.Replace{Before: changes.Nil, After: values[3]})
if _, c = strong.Next(); c != nil {
t.Error("Unexpected change on terminated stream", c)
}
}
func TestStream{{.StreamType}}Splice(t *testing.T) {
s := streams.New()
values := valuesFor{{.StreamType}}()
strong := &{{.StreamType}}{Stream: s, Value: values[1]}
strong1 := strong.Splice(0, strong.Value.Count(), {{if .Pointer}}*{{end}}values[2]...)
if !reflect.DeepEqual(strong1.Value, values[2]) {
t.Error("Splice did the unexpected", strong1.Value)
}
}
func TestStream{{.StreamType}}Move(t *testing.T) {
s := streams.New()
values := valuesFor{{.StreamType}}()
strong := &{{.StreamType}}{Stream: s, Value: values[1]}
v2 := {{if .Pointer}}*{{end}}values[2]
strong1 := strong.Splice(strong.Value.Count(), 0, v2[len(v2)-1])
strong2 := strong1.Move(0, 1, 1)
if reflect.DeepEqual(strong1.Value, strong2.Value) {
t.Error("Move did the unexpected", strong1.Value, strong2.Value)
}
strong2 = strong2.Move(0, 1, 1)
if !reflect.DeepEqual(strong1.Value, strong2.Value) {
t.Error("Move did the unexpected", strong1.Value, strong2.Value)
}
}
func TestStream{{.StreamType}}Item(t *testing.T) {
s := streams.New()
values := valuesFor{{.StreamType}}()
strong := &{{.StreamType}}{Stream: s, Value: values[1]}
item0 := strong.Item(0)
if !reflect.DeepEqual(item0.Value, ({{if .Pointer}}*{{end}}values[1])[0]) {
t.Error("Item() did the unexpected", item0.Value)
}
for kk := range values {
item := {{if .Pointer}}*{{end}}values[kk]
l := len(item) - 1
if l < 0 {
continue
}
item0 = item0.Update(item[l])
if !reflect.DeepEqual(item0.Value, item[l]) {
t.Error("Update did not take effect", item0.Value, item[l])
}
strong = strong.Latest()
v := ({{if .Pointer}}*{{end}}strong.Value)[0]
if !reflect.DeepEqual(v, item[l]) {
t.Error("Update did not take effect", v, item[l])
}
}
v := strong.Value.ApplyCollection(nil, changes.Splice{Before: strong.Value.Slice(0, 1), After: strong.Value.Slice(0, 0)})
if !reflect.DeepEqual(v.Slice(0, 0), v) {
t.Error("Could not slice away item", v)
}
}
`)) | x/dotc/slices.go | 0.792665 | 0.425904 | slices.go | starcoder |
package _4_binarytree
import (
"fmt"
)
type TreeNode struct {
val int
left *TreeNode
right *TreeNode
}
type Tree struct {
root *TreeNode
}
func NewTreeNode(val int) *TreeNode {
return &TreeNode{
val: val,
left: nil,
right: nil,
}
}
func NewTree() *Tree {
return &Tree{root:nil}
}
func (t *Tree)Insert(val int) {
if t.root == nil {
node := NewTreeNode(val)
t.root = node
}
cur := t.root
for cur != nil {
if val > cur.val {
if cur.right == nil {
node := NewTreeNode(val)
cur.right = node
break
}
cur = cur.right
} else if val < cur.val {
if cur.left == nil {
node := NewTreeNode(val)
cur.left = node
break
}
cur = cur.left
} else {
break
}
}
}
func (t *Tree)Search(val int) bool {
if t.root == nil {
return false
}
cur := t.root
for cur != nil {
if val > cur.val {
cur = cur.right
} else if val < cur.val {
cur = cur.left
} else {
return true
}
}
return false
}
func (t *Tree)Delete(val int) {
if t.root == nil {
return
}
var father *TreeNode = nil
cur := t.root
for cur != nil {
if val > cur.val {
father = cur
cur = cur.right
} else if val < cur.val {
father = cur
cur = cur.left
} else {
break
}
}
if cur == nil {
return
}
// 如果当前节点没有孩子,父节点指向当前节点的指针置为nil
if cur.left == nil && cur.right == nil {
if father == nil {
t.root = nil
return
}
if father.left != nil && father.left.val == val {
father.left = nil
return
}
if father.right != nil && father.right.val == val {
father.right = nil
return
}
}
// 如果当前节点只有一个孩子,父节点指向当前节点的指针指向当前节点的孩子
if cur.left == nil && cur.right != nil {
if father == nil {
t.root = cur.right
return
}
if father.left.val == val {
father.left = cur.right
return
}
if father.right.val == val {
father.right = cur.right
return
}
}
if cur.left != nil && cur.right == nil {
if father == nil {
t.root = cur.left
return
}
if father.left.val == val {
father.left = cur.left
return
}
if father.right.val == val {
father.right = cur.left
return
}
}
// 如果当前结点左右子树都存在,则从右子树中把最小的值,替换过来。再删除当前结点
rightFather := cur
right := cur.right
if right.left != nil {
rightFather = right
right = right.left
}
cur.val= right.val
if rightFather == cur {
cur.right = nil
} else {
rightFather.left = nil
}
}
func print(node *TreeNode) {
if node == nil {
return
}
fmt.Printf("%d \t", node.val)
print(node.left)
print(node.right)
}
func (t *Tree)Print() {
print(t.root)
fmt.Println()
} | 24_binarytree/searchtree.go | 0.614857 | 0.487185 | searchtree.go | starcoder |
package lookslike
import (
"reflect"
"github.com/elastic/go-lookslike/internal/llreflect"
"github.com/elastic/go-lookslike/llpath"
)
type walkObserverInfo struct {
key llpath.PathComponent
value interface{}
root map[string]interface{}
path llpath.Path
}
// walkObserver functions run once per object in the tree.
type walkObserver func(info walkObserverInfo) error
// walk determine if in is a `map[string]interface{}` or a `Slice` and traverse it if so, otherwise will
// treat it as a scalar and invoke the walk observer on the input value directly.
func walk(in interface{}, expandPaths bool, wo walkObserver) error {
switch in.(type) {
case map[string]interface{}:
return walkMap(in.(map[string]interface{}), expandPaths, wo)
case []interface{}:
return walkSlice(in.([]interface{}), expandPaths, wo)
default:
return walkInterface(in, expandPaths, wo)
}
}
// walkmap[string]interface{} is a shorthand way to walk a tree with a map as the root.
func walkMap(m map[string]interface{}, expandPaths bool, wo walkObserver) error {
return walkFullMap(m, m, llpath.Path{}, expandPaths, wo)
}
// walkSlice walks the provided root slice.
func walkSlice(s []interface{}, expandPaths bool, wo walkObserver) error {
return walkFullSlice(s, map[string]interface{}{}, llpath.Path{}, expandPaths, wo)
}
func walkInterface(s interface{}, expandPaths bool, wo walkObserver) error {
return wo(walkObserverInfo{
value: s,
key: llpath.PathComponent{},
root: map[string]interface{}{},
path: llpath.Path{},
})
}
func walkFull(o interface{}, root map[string]interface{}, path llpath.Path, expandPaths bool, wo walkObserver) (err error) {
lastPathComponent := path.Last()
if lastPathComponent == nil {
// In the case of a slice we can have an empty path
if _, ok := o.([]interface{}); ok {
lastPathComponent = &llpath.PathComponent{}
} else {
panic("Attempted to traverse an empty Path on a map[string]interface{} in lookslike.walkFull, this should never happen.")
}
}
err = wo(walkObserverInfo{*lastPathComponent, o, root, path})
if err != nil {
return err
}
switch reflect.TypeOf(o).Kind() {
case reflect.Map:
converted := llreflect.InterfaceToMap(o)
err := walkFullMap(converted, root, path, expandPaths, wo)
if err != nil {
return err
}
case reflect.Slice:
converted := llreflect.InterfaceToSliceOfInterfaces(o)
for idx, v := range converted {
newPath := path.ExtendSlice(idx)
err := walkFull(v, root, newPath, expandPaths, wo)
if err != nil {
return err
}
}
}
return nil
}
// walkFull walks the given map[string]interface{} tree.
func walkFullMap(m map[string]interface{}, root map[string]interface{}, p llpath.Path, expandPaths bool, wo walkObserver) (err error) {
for k, v := range m {
var newPath llpath.Path
if !expandPaths {
newPath = p.ExtendMap(k)
} else {
additionalPath, err := llpath.ParsePath(k)
if err != nil {
return err
}
newPath = p.Concat(additionalPath)
}
err = walkFull(v, root, newPath, expandPaths, wo)
if err != nil {
return err
}
}
return nil
}
func walkFullSlice(s []interface{}, root map[string]interface{}, p llpath.Path, expandPaths bool, wo walkObserver) (err error) {
for idx, v := range s {
var newPath llpath.Path
newPath = p.ExtendSlice(idx)
err = walkFull(v, root, newPath, expandPaths, wo)
if err != nil {
return err
}
}
return nil
} | vendor/github.com/elastic/go-lookslike/walk.go | 0.767864 | 0.442757 | walk.go | starcoder |
package three
var _vector1 = Vector3{}
var _vector2 = Vector3{}
var _normalMatrix = Matrix3{}
// NewPlane :
func NewPlane(normal Vector3, constant float64) *Plane {
// normal is assumed to be normalized
return &Plane{normal, constant}
}
// Plane :
type Plane struct {
Normal Vector3
Constant float64
}
// Set :
func (p Plane) Set(normal Vector3, constant float64) *Plane {
p.Normal.Copy(normal)
p.Constant = constant
return &p
}
// SetComponents :
func (p Plane) SetComponents(x, y, z, w float64) *Plane {
p.Normal.Set(x, y, z)
p.Constant = w
return &p
}
// SetFromNormalAndCoplanarPoint :
func (p Plane) SetFromNormalAndCoplanarPoint(normal, point Vector3) *Plane {
p.Normal.Copy(normal)
p.Constant = -point.Dot(p.Normal)
return &p
}
// SetFromCoplanarPoints :
func (p Plane) SetFromCoplanarPoints(a, b, c Vector3) *Plane {
normal := _vector1.SubVectors(c, b).Cross(*_vector2.SubVectors(a, b)).Normalize()
// Q: should an error be thrown if normal is zero (e.g. degenerate plane)?
p.SetFromNormalAndCoplanarPoint(*normal, a)
return &p
}
// Clone :
func (p Plane) Clone() *Plane {
return NewPlane(p.Normal, p.Constant).Copy(p)
}
// Copy :
func (p Plane) Copy(plane Plane) *Plane {
p.Normal.Copy(plane.Normal)
p.Constant = plane.Constant
return &p
}
// Normalize :
func (p Plane) Normalize() *Plane {
// Note: will lead to a divide by zero if the plane is invalid.
inverseNormalLength := 1.0 / p.Normal.Length()
p.Normal.MultiplyScalar(inverseNormalLength)
p.Constant *= inverseNormalLength
return &p
}
// Negate :
func (p Plane) Negate() *Plane {
p.Constant *= -1
p.Normal.Negate()
return &p
}
// DistanceToPoint :
func (p Plane) DistanceToPoint(point Vector3) float64 {
return p.Normal.Dot(point) + p.Constant
}
// DistanceToSphere :
func (p Plane) DistanceToSphere(sphere Sphere) float64 {
return p.DistanceToPoint(sphere.Center) - sphere.Radius
}
// ProjectPoint :
func (p Plane) ProjectPoint(point, target Vector3) *Vector3 {
return target.Copy(p.Normal).MultiplyScalar(-p.DistanceToPoint(point)).Add(point)
}
// IntersectLine :
func (p Plane) IntersectLine(line Line3, target Vector3) *Vector3 {
direction := line.Delta(_vector1)
denominator := p.Normal.Dot(*direction)
if denominator == 0 {
// line is coplanar, return origin
if p.DistanceToPoint(line.Start) == 0 {
return target.Copy(line.Start)
}
// Unsure if p is the correct method to handle p case.
return nil
}
t := -(line.Start.Dot(p.Normal) + p.Constant) / denominator
if t < 0 || t > 1 {
return nil
}
return target.Copy(*direction).MultiplyScalar(t).Add(line.Start)
}
// IntersectsLine :
func (p Plane) IntersectsLine(line Line3) bool {
// Note: p tests if a line intersects the plane, not whether it (or its end-points) are coplanar with it.
startSign := p.DistanceToPoint(line.Start)
endSign := p.DistanceToPoint(line.End)
return startSign < 0 && endSign > 0 || endSign < 0 && startSign > 0
}
// IntersectsBox :
func (p Plane) IntersectsBox(box Box3) bool {
return box.IntersectsPlane(p)
}
// IntersectsSphere :
func (p Plane) IntersectsSphere(sphere Sphere) bool {
return sphere.IntersectsPlane(p)
}
// CoplanarPoint :
func (p Plane) CoplanarPoint(target Vector3) *Vector3 {
return target.Copy(p.Normal).MultiplyScalar(-p.Constant)
}
// ApplyMatrix4 :
func (p Plane) ApplyMatrix4(matrix Matrix4) *Plane {
normalMatrix := _normalMatrix.GetNormalMatrix(matrix)
referencePoint := p.CoplanarPoint(_vector1).ApplyMatrix4(matrix)
normal := p.Normal.ApplyMatrix3(*normalMatrix).Normalize()
p.Constant = -referencePoint.Dot(*normal)
return &p
}
// Translate :
func (p Plane) Translate(offset Vector3) *Plane {
p.Constant -= offset.Dot(p.Normal)
return &p
}
// Equals :
func (p Plane) Equals(plane Plane) bool {
return plane.Normal.Equals(p.Normal) && plane.Constant == p.Constant
} | server/three/plane.go | 0.850593 | 0.698034 | plane.go | starcoder |
package main
import (
"fmt"
"io/ioutil"
"strconv"
"strings"
)
/*
--- Day 2: Password Philosophy ---
Your flight departs in a few days from the coastal airport; the easiest way down to the coast from here is via toboggan.
The shopkeeper at the North Pole Toboggan Rental Shop is having a bad day. "Something's wrong with our computers; we can't log in!" You ask if you can take a look.
Their password database seems to be a little corrupted: some of the passwords wouldn't have been allowed by the Official Toboggan Corporate Policy that was in effect when they were chosen.
To try to debug the problem, they have created a list (your puzzle input) of passwords (according to the corrupted database) and the corporate policy when that password was set.
For example, suppose you have the following list:
1-3 a: abcde
1-3 b: cdefg
2-9 c: ccccccccc
Each line gives the password policy and then the password. The password policy indicates the lowest and highest number of times a given letter must appear for the password to be valid. For example, 1-3 a means that the password must contain a at least 1 time and at most 3 times.
In the above example, 2 passwords are valid. The middle password, cdefg, is not; it contains no instances of b, but needs at least 1. The first and third passwords are valid: they contain one a or nine c, both within the limits of their respective policies.
How many passwords are valid according to their policies?
Your puzzle answer was 418.
--- Part Two ---
While it appears you validated the passwords correctly, they don't seem to be what the Official Toboggan Corporate Authentication System is expecting.
The shopkeeper suddenly realizes that he just accidentally explained the password policy rules from his old job at the sled rental place down the street! The Official Toboggan Corporate Policy actually works a little differently.
Each policy actually describes two positions in the password, where 1 means the first character, 2 means the second character, and so on. (Be careful; Toboggan Corporate Policies have no concept of "index zero"!) Exactly one of these positions must contain the given letter. Other occurrences of the letter are irrelevant for the purposes of policy enforcement.
Given the same example list from above:
1-3 a: abcde is valid: position 1 contains a and position 3 does not.
1-3 b: cdefg is invalid: neither position 1 nor position 3 contains b.
2-9 c: ccccccccc is invalid: both position 2 and position 9 contain c.
How many passwords are valid according to the new interpretation of the policies?
Your puzzle answer was 616.
*/
func check(e error) {
if e != nil {
panic(e)
}
}
// Entry is a struct that represents an entry on input file
type Entry struct {
lowerBound int
upperBound int
letter string
password string
}
func parseEntry(line string) (*Entry, error) {
values := strings.Split(line, " ")
if len(values) != 3 {
return nil, fmt.Errorf("Line should have 3 components, only %d are found", len(values))
}
boundsString := values[0]
letterWithColon := values[1]
password := values[2]
bounds := strings.Split(boundsString, "-")
if len(bounds) != 2 {
return nil, fmt.Errorf("Letter repetitions in invalid format. It should be in lowerBound-upperBound format. Format found: %s", boundsString)
}
lowerBound, err := strconv.Atoi(bounds[0])
if err != nil {
return nil, fmt.Errorf("Lower bound is not an integer. Error: %s", err)
}
upperBound, err := strconv.Atoi(bounds[1])
if err != nil {
return nil, fmt.Errorf("Upper bound is not an integer. Error: %s", err)
}
letter := strings.ReplaceAll(letterWithColon, ":", "")
return &Entry{
lowerBound: lowerBound,
upperBound: upperBound,
letter: letter,
password: password,
}, nil
}
func readInputAsEntryArray(inputFileName string) ([]*Entry, error) {
dat, err := ioutil.ReadFile("./input.txt")
if err != nil {
return nil, err
}
result := []*Entry{}
lines := strings.Split(string(dat), "\n")
for i := 0; i < len(lines); i++ {
line := lines[i]
entry, err := parseEntry(line)
if err != nil {
return nil, fmt.Errorf("Invalid entry on input file. Entry: %s Error: %s", line, err)
}
result = append(result, entry)
}
return result, nil
}
func checkValidPasswordByRepetition(entry *Entry) bool {
repetitions := strings.Count(entry.password, entry.letter)
return (repetitions >= entry.lowerBound && repetitions <= entry.upperBound)
}
func partOne(entries []*Entry) {
validPasswords := 0
for i := 0; i < len(entries); i++ {
entry := entries[i]
if checkValidPasswordByRepetition(entry) {
validPasswords++
}
}
fmt.Println(validPasswords)
// should print 418
}
func checkValidPasswordByPosition(entry *Entry) bool {
firstIndex := entry.lowerBound - 1
secondIndex := entry.upperBound - 1
firstIndexOk := (string(entry.password[firstIndex]) == entry.letter)
secondIndexOk := (string(entry.password[secondIndex]) == entry.letter)
return (firstIndexOk || secondIndexOk) && !(firstIndexOk && secondIndexOk)
}
func partTwo(entries []*Entry) {
validPasswords := 0
for i := 0; i < len(entries); i++ {
entry := entries[i]
if checkValidPasswordByPosition(entry) {
validPasswords++
}
}
fmt.Println(validPasswords)
// should print 616
}
func main() {
entries, err := readInputAsEntryArray("./input.txt")
check(err)
partTwo(entries)
} | 2020/day-02/main.go | 0.630116 | 0.623835 | main.go | starcoder |
package advent2017
func ManhattanDistanceTo(cellNumber int) int {
point := toCartesianCoordinate(cellNumber)
return point.manhattanDistanceFromOrigin()
}
type Point struct {
x, y int
}
func (p Point) manhattanDistanceFromOrigin() int {
if p.x < 0 {
p.x = -p.x
}
if p.y < 0 {
p.y = -p.y
}
return p.x + p.y
}
func toCartesianCoordinate(cellNumber int) Point {
var point Point
l := 1
var breadth int
for l*l < cellNumber {
l += 2
breadth++
}
if breadth > 0 {
point.x, point.y = breadth, -breadth
}
current := l * l
var moves int
for current != cellNumber && moves < l-1 {
//Move left
point.x--
moves++
current--
}
moves = 0
for current != cellNumber && moves < l-1 {
//Move up
point.y++
moves++
current--
}
moves = 0
for current != cellNumber && moves < l-1 {
//Move right
point.x++
moves++
current--
}
moves = 0
for current != cellNumber && moves < l-2 {
//Move down
point.y--
moves++
current--
}
return point
}
var valuesByLocation map[Point]int
func FirstCellLargerThan(target int) int {
valuesByLocation = map[Point]int{
Point{0, 0}: 1,
Point{1, 0}: 1,
}
currentLocation := Point{1, 0}
value := valuesByLocation[currentLocation]
l := 3
for value <= target {
moves := 0
for value <= target && moves < l-2 {
//Move up
currentLocation = Point{currentLocation.x, currentLocation.y + 1}
value = calculateValueAt(currentLocation)
valuesByLocation[currentLocation] = value
moves++
}
moves = 0
for value <= target && moves < l-1 {
///Move left
currentLocation = Point{currentLocation.x - 1, currentLocation.y}
value = calculateValueAt(currentLocation)
valuesByLocation[currentLocation] = value
moves++
}
moves = 0
for value <= target && moves < l-1 {
///Move down
currentLocation = Point{currentLocation.x, currentLocation.y - 1}
value = calculateValueAt(currentLocation)
valuesByLocation[currentLocation] = value
moves++
}
moves = 0
for value <= target && moves < l {
///Move right
currentLocation = Point{currentLocation.x + 1, currentLocation.y}
value = calculateValueAt(currentLocation)
valuesByLocation[currentLocation] = value
moves++
}
if value <= target {
l += 2
}
}
return value
}
func calculateValueAt(point Point) int {
neighboringPoints := getNeighbors(point)
var sum int
for _, n := range neighboringPoints {
sum += valuesByLocation[n]
}
return sum
}
func getNeighbors(point Point) []Point {
neighbors := []Point{}
neighbors = append(neighbors, Point{point.x + 1, point.y}) // Right neighbor
neighbors = append(neighbors, Point{point.x + 1, point.y + 1}) // Upper right neighbor
neighbors = append(neighbors, Point{point.x, point.y + 1}) // Upper neighbor
neighbors = append(neighbors, Point{point.x - 1, point.y + 1}) // Upper left neighbor
neighbors = append(neighbors, Point{point.x - 1, point.y}) // Left neighbor
neighbors = append(neighbors, Point{point.x - 1, point.y - 1}) // Bottom left neighbor
neighbors = append(neighbors, Point{point.x, point.y - 1}) // Bottom neighbor
neighbors = append(neighbors, Point{point.x + 1, point.y - 1}) // Bottom right neighbor
return neighbors
} | advent2017/manhattan_distance.go | 0.812644 | 0.529203 | manhattan_distance.go | starcoder |
package msgraph
// RatingGermanyMoviesType undocumented
type RatingGermanyMoviesType string
const (
// RatingGermanyMoviesTypeVAllAllowed undocumented
RatingGermanyMoviesTypeVAllAllowed RatingGermanyMoviesType = "AllAllowed"
// RatingGermanyMoviesTypeVAllBlocked undocumented
RatingGermanyMoviesTypeVAllBlocked RatingGermanyMoviesType = "AllBlocked"
// RatingGermanyMoviesTypeVGeneral undocumented
RatingGermanyMoviesTypeVGeneral RatingGermanyMoviesType = "General"
// RatingGermanyMoviesTypeVAgesAbove6 undocumented
RatingGermanyMoviesTypeVAgesAbove6 RatingGermanyMoviesType = "AgesAbove6"
// RatingGermanyMoviesTypeVAgesAbove12 undocumented
RatingGermanyMoviesTypeVAgesAbove12 RatingGermanyMoviesType = "AgesAbove12"
// RatingGermanyMoviesTypeVAgesAbove16 undocumented
RatingGermanyMoviesTypeVAgesAbove16 RatingGermanyMoviesType = "AgesAbove16"
// RatingGermanyMoviesTypeVAdults undocumented
RatingGermanyMoviesTypeVAdults RatingGermanyMoviesType = "Adults"
)
// RatingGermanyMoviesTypePAllAllowed returns a pointer to RatingGermanyMoviesTypeVAllAllowed
func RatingGermanyMoviesTypePAllAllowed() *RatingGermanyMoviesType {
v := RatingGermanyMoviesTypeVAllAllowed
return &v
}
// RatingGermanyMoviesTypePAllBlocked returns a pointer to RatingGermanyMoviesTypeVAllBlocked
func RatingGermanyMoviesTypePAllBlocked() *RatingGermanyMoviesType {
v := RatingGermanyMoviesTypeVAllBlocked
return &v
}
// RatingGermanyMoviesTypePGeneral returns a pointer to RatingGermanyMoviesTypeVGeneral
func RatingGermanyMoviesTypePGeneral() *RatingGermanyMoviesType {
v := RatingGermanyMoviesTypeVGeneral
return &v
}
// RatingGermanyMoviesTypePAgesAbove6 returns a pointer to RatingGermanyMoviesTypeVAgesAbove6
func RatingGermanyMoviesTypePAgesAbove6() *RatingGermanyMoviesType {
v := RatingGermanyMoviesTypeVAgesAbove6
return &v
}
// RatingGermanyMoviesTypePAgesAbove12 returns a pointer to RatingGermanyMoviesTypeVAgesAbove12
func RatingGermanyMoviesTypePAgesAbove12() *RatingGermanyMoviesType {
v := RatingGermanyMoviesTypeVAgesAbove12
return &v
}
// RatingGermanyMoviesTypePAgesAbove16 returns a pointer to RatingGermanyMoviesTypeVAgesAbove16
func RatingGermanyMoviesTypePAgesAbove16() *RatingGermanyMoviesType {
v := RatingGermanyMoviesTypeVAgesAbove16
return &v
}
// RatingGermanyMoviesTypePAdults returns a pointer to RatingGermanyMoviesTypeVAdults
func RatingGermanyMoviesTypePAdults() *RatingGermanyMoviesType {
v := RatingGermanyMoviesTypeVAdults
return &v
} | v1.0/RatingGermanyMoviesTypeEnum.go | 0.589007 | 0.545528 | RatingGermanyMoviesTypeEnum.go | starcoder |
package main
import (
"fmt"
"image"
"io"
)
type Canvas struct {
Width int
Height int
Pix []Color
}
func NewCanvas(width, height int) Canvas {
canvas := Canvas{
width,
height,
make([]Color, width*height, width*height),
}
return canvas
}
func xyFloatToInt(fx, fy float64, w, h int) (x, y int, ok bool) {
x = int(fx)
y = int(fy)
ok = (x >= 0) && (x < w) && (y >= 0) && (y < h)
return
}
func (canvas Canvas) PixelAt(fx, fy float64) (c Color) {
if x, y, ok := xyFloatToInt(fx, fy, canvas.Width, canvas.Height); ok {
c = canvas.Pix[x+y*canvas.Width]
}
return
}
func (canvas Canvas) SetPixelAt(fx, fy float64, c Color) {
if x, y, ok := xyFloatToInt(fx, fy, canvas.Width, canvas.Height); ok {
canvas.Pix[x+y*canvas.Width] = c
}
}
func (canvas Canvas) AddPixelAt(fx, fy float64, c Color) {
// Warning... no boundary checking here!
x := int(fx)
y := int(fy)
o := x + y*canvas.Width
canvas.Pix[o] = canvas.Pix[o].Add(c)
}
func (canvas Canvas) FastPixelAt(x, y int) Color {
return canvas.Pix[x+y*canvas.Width]
}
func (canvas Canvas) FastSetPixelAt(x, y int, c Color) {
canvas.Pix[x+y*canvas.Width] = c
}
func (canvas Canvas) Fill(c Color) {
for i := range canvas.Pix {
canvas.Pix[i] = c
}
}
func (canvas Canvas) Mul(f float64) {
for i := range canvas.Pix {
canvas.Pix[i] = canvas.Pix[i].Mul(f)
}
}
func (canvas Canvas) ToImage(erp Interpolator) image.Image {
img := image.NewRGBA(image.Rect(0, 0, canvas.Width, canvas.Height))
for y := 0; y < canvas.Height; y++ {
for x := 0; x < canvas.Width; x++ {
col := canvas.FastPixelAt(x, y)
img.Set(x, y, col.Erp(erp))
}
}
return img
}
// Exports the canvas in PPM format
func (canvas Canvas) WriteAsPPM(w io.Writer) {
fmt.Fprintf(w, "P3\n") // Magic
fmt.Fprintf(w, "%d %d\n", canvas.Width, canvas.Height)
fmt.Fprintf(w, "255\n") // Maximum value of a color component
for y := 0; y < canvas.Height; y++ {
for x := 0; x < canvas.Width; x++ {
r, g, b, _ := canvas.Pix[x+y*canvas.Width].RGBA()
fmt.Fprintf(w, "%d %d %d ", r, g, b) // In theory, each line should not exceed 70 characters
}
fmt.Fprintln(w)
}
fmt.Fprintln(w)
} | canvas.go | 0.643105 | 0.482368 | canvas.go | starcoder |
package specifics
import "math"
// EmptyStrSlice returns an empty string slice
func EmptyStrSlice() []string {
return make([]string, 0)
}
// StrSliceContains checks if the string slice
// contains the given string
func StrSliceContains(slice []string, s string) bool {
for _, str := range slice {
if str == s {
return true
}
}
return false
}
// StrSlicesMap is a `map` for string slices
func StrSlicesMap(strarr []string, lambda func(s string) string) []string {
result := make([]string, len(strarr))
for i, elem := range strarr {
result[i] = lambda(elem)
}
return result
}
// StrSlicesEqual tests if two string slices
// are equivalent
func StrSlicesEqual(a, b []string) bool {
if a == nil != (b == nil) {
return false
}
if len(a) != len(b) {
return false
}
for i := range a {
if a[i] != b[i] {
return false
}
}
return true
}
// StrSlicesNestedEqual tests if two slices of
// string slices are equal
func StrSlicesNestedEqual(a, b [][]string) bool {
if a == nil != (b == nil) {
return false
}
if len(a) != len(b) {
return false
}
for i := range a {
if !StrSlicesEqual(a[i], b[i]) {
return false
}
}
return true
}
// StrSlicesPartition partitions the slice into `k` subslices
func StrSlicesPartition(slice []string, k int) [][]string {
var result [][]string
n := len(slice)
sizes := make([]int, k)
if n%k == 0 {
subsliceSize := n / k
for i := 0; i < k; i++ {
sizes[i] = subsliceSize
}
} else {
// do ceiling division
chunkSizeFl := float64(n) / float64(k)
chunkSize := int(math.Ceil(chunkSizeFl))
for i := 0; i < k-1; i++ {
sizes[i] = chunkSize
}
// remainder is size of last chunk
remainder := n % chunkSize
sizes[k-1] = remainder
}
// make the subslices
index := 0
for _, sz := range sizes {
subslice := slice[index : index+sz]
result = append(result, subslice)
index += sz
}
return result
}
// StrPtrSlicesToStrSlices converts a slice of string
// pointers to strings
func StrPtrSlicesToStrSlices(input []*string) []string {
output := make([]string, len(input))
for i, ptr := range input {
if ptr == nil {
output[i] = ""
} else {
output[i] = *ptr
}
}
return output
}
// StrSliceRemoveString removes the all occurrences, if any, of the target string
func StrSliceRemoveString(slice []string, toRemove string) []string {
var result []string
for _, s := range slice {
if s == toRemove {
continue
}
result = append(result, s)
}
return result
} | strslices.go | 0.719285 | 0.410815 | strslices.go | starcoder |
package graph
import (
"encoding/json"
"fmt"
"log"
"strconv"
)
const (
//MaxVertices -- Defines the Max number of node/vertices a graph can have
MaxVertices = 1000
)
//Data -- Used to decode map input data from json
type Data struct {
TotalNodes int `json:"total_nodes"`
Directed bool `json:"directed"`
Mapping map[string][]int `json:"mapping"`
}
//Node -- Node will be used as the linked list (like) structure to make the list portion of
//the adjacency lists
type Node struct {
Y int
Weight int
Next *Node
Payload interface{}
}
//Graph2 -- This is the graph structure depicted in The Algorithm Design Manual
type Graph2 struct {
Edges []*Node
NVertices int
NEdges int
Directed bool
Degree []int
}
//New2 -- Initializes a new Graph2
func New2(directed bool) *Graph2 {
graph := Graph2{Edges: make([]*Node, MaxVertices), NVertices: 0, NEdges: 0, Directed: directed, Degree: make([]int, MaxVertices)}
return &graph
}
//ReadIn -- Reads in data from json and adds it to the graph
func ReadIn(rawData []byte) *Graph2 {
data := new(Data)
err := json.Unmarshal(rawData, data)
if err != nil {
log.Fatal(err)
}
//Initialize the graph
graph := New2(data.Directed)
graph.NVertices = data.TotalNodes
for node, edges := range data.Mapping {
nodeInt, err := strconv.Atoi(node)
if err != nil {
log.Fatalf("Error occurred when node string (%s) to int: %s", node, err.Error())
}
for _, edge := range edges {
InsertEdge(graph, nodeInt, edge, data.Directed)
}
}
return graph
}
//InsertEdge -- Adds and edge to Graph2
func InsertEdge(g *Graph2, x, y int, directed bool) {
//Creating the new edge
edgeNode := new(Node)
edgeNode.Weight = 0
edgeNode.Y = y
edgeNode.Next = g.Edges[x]
//insert as the head of the linked list
g.Edges[x] = edgeNode
//Counts the number of edges of node x
g.Degree[x]++
if !directed {
InsertEdge(g, y, x, true)
} else {
g.NEdges++
}
}
//Print -- Prints out a Graph
func Print(g *Graph2) {
fmt.Printf("Graph: \n")
for i := 0; i <= g.NVertices; i++ {
fmt.Printf("%d: ", i)
node := g.Edges[i] // Head of linked list of edge nodes
for node != nil {
fmt.Printf(" %d", node.Y)
node = node.Next
}
fmt.Printf("\n")
}
} | data_structures/graph/graph2.go | 0.590425 | 0.412471 | graph2.go | starcoder |
package algorithm
import (
"fmt"
"reflect"
"github.com/parallaxsecond/parsec-client-go/interface/operations/psaalgorithm"
)
type AeadFactory interface {
Aead(algType AeadAlgorithmType) *Algorithm
AeadShortenedTag(algType AeadAlgorithmType, tagLength uint32) *Algorithm
}
type aeadFactory struct{}
func NewAead() AeadFactory {
return &aeadFactory{}
}
func (a *aeadFactory) Aead(algType AeadAlgorithmType) *Algorithm {
return &Algorithm{
variant: &AeadAlgorithm{
variant: &AeadAlgorithmDefaultLengthTag{
AeadAlg: algType,
},
},
}
}
func (a *aeadFactory) AeadShortenedTag(algType AeadAlgorithmType, tagLength uint32) *Algorithm {
return &Algorithm{
variant: &AeadAlgorithm{
variant: &AeadAlgorithmShortenedTag{
AeadAlg: algType,
},
},
}
}
type AeadAlgorithmType uint32
const (
AeadAlgorithmNODEFAULTTAG AeadAlgorithmType = 0
AeadAlgorithmCCM AeadAlgorithmType = 1
AeadAlgorithmGCM AeadAlgorithmType = 2
AeadAlgorithmChacha20Poly1305 AeadAlgorithmType = 4
)
type AeadAlgorithm struct {
// *AeadWithDefaultLengthTag
// *AeadWithShortenedTag
variant aeadAlgorithmVariant
}
type aeadAlgorithmVariant interface {
toWire
isAeadAlgorithmVariant()
}
func (a AeadAlgorithm) isAlgorithmVariant() {}
func (a *AeadAlgorithm) ToWireInterface() interface{} {
return a.variant.ToWireInterface()
}
func (a *AeadAlgorithm) GetAeadDefaultLengthTag() *AeadAlgorithmDefaultLengthTag {
if alg, ok := a.variant.(*AeadAlgorithmDefaultLengthTag); ok {
return alg
}
return nil
}
func (a *AeadAlgorithm) GetAeadShortenedTag() *AeadAlgorithmShortenedTag {
if alg, ok := a.variant.(*AeadAlgorithmShortenedTag); ok {
return alg
}
return nil
}
type AeadAlgorithmDefaultLengthTag struct {
AeadAlg AeadAlgorithmType
}
func (a *AeadAlgorithmDefaultLengthTag) isAeadAlgorithmVariant() {}
func (a *AeadAlgorithmDefaultLengthTag) ToWireInterface() interface{} {
return &psaalgorithm.Algorithm{
Variant: &psaalgorithm.Algorithm_Aead_{
Aead: &psaalgorithm.Algorithm_Aead{
Variant: &psaalgorithm.Algorithm_Aead_AeadWithDefaultLengthTag_{
AeadWithDefaultLengthTag: psaalgorithm.Algorithm_Aead_AeadWithDefaultLengthTag(a.AeadAlg),
},
},
},
}
}
type AeadAlgorithmShortenedTag struct {
AeadAlg AeadAlgorithmType
TagLength uint32
}
func (a *AeadAlgorithmShortenedTag) isAeadAlgorithmVariant() {}
func (a *AeadAlgorithmShortenedTag) ToWireInterface() interface{} {
return &psaalgorithm.Algorithm{
Variant: &psaalgorithm.Algorithm_Aead_{
Aead: &psaalgorithm.Algorithm_Aead{
Variant: &psaalgorithm.Algorithm_Aead_AeadWithShortenedTag_{
AeadWithShortenedTag: &psaalgorithm.Algorithm_Aead_AeadWithShortenedTag{
AeadAlg: psaalgorithm.Algorithm_Aead_AeadWithDefaultLengthTag(a.AeadAlg),
TagLength: a.TagLength,
},
},
},
},
}
}
func newAeadFromWire(a *psaalgorithm.Algorithm_Aead) (*AeadAlgorithm, error) {
switch linealg := a.Variant.(type) {
case *psaalgorithm.Algorithm_Aead_AeadWithDefaultLengthTag_:
return &AeadAlgorithm{
variant: &AeadAlgorithmDefaultLengthTag{
AeadAlg: AeadAlgorithmType(linealg.AeadWithDefaultLengthTag),
},
}, nil
case *psaalgorithm.Algorithm_Aead_AeadWithShortenedTag_:
return &AeadAlgorithm{
variant: &AeadAlgorithmShortenedTag{
AeadAlg: AeadAlgorithmType(linealg.AeadWithShortenedTag.AeadAlg),
TagLength: linealg.AeadWithShortenedTag.TagLength,
},
}, nil
default:
return nil, fmt.Errorf("unexpected type encountered decoding aead algorithm: %v", reflect.TypeOf(linealg))
}
} | parsec/algorithm/aead.go | 0.636127 | 0.448849 | aead.go | starcoder |
// Package code contains the bytecode instructions for the mtail virtual machine.
package code
type Opcode int
const (
Bad Opcode = iota // Invalid instruction, indicates a bug in the generator.
Stop // Stop the program, ending processing of this input.
Match // Match a regular expression against input, and set the match register.
Smatch // Match a regular expression against top of stack, and set the match register.
Cmp // Compare two values on the stack and set the match register.
Jnm // Jump if no match.
Jm // Jump if match.
Jmp // Unconditional jump
Inc // Increment a variable value
Dec // Decrement a variable value
Strptime // Parse into the timestamp register
Timestamp // Return value of timestamp register onto TOS.
Settime // Set timestamp register to value at TOS.
Push // Push operand onto stack
Capref // Push capture group reference at operand onto stack
Str // Push string constant at operand onto stack
Sset // Set a string variable value.
Iset // Set a variable value
Iadd // Add top values on stack and push to stack
Isub // Subtract top value from second top value on stack, and push to stack.
Imul // Multiply top values on stack and push to stack
Idiv // Divide top value into second top on stack, and push
Imod // Integer divide top value into second top on stack, and push remainder
Ipow // Put second TOS to power of TOS, and push.
And // Bitwise AND the 2 at top of stack, and push result
Or // Bitwise OR the 2 at top of stack, and push result
Xor // Bitwise XOR the 2 at top of stack, and push result
Neg // Bitwise NOT the top of stack, and push result
Not // Boolean NOT the top of stack, and push result
Shl // Shift TOS left, push result
Shr // Shift TOS right, push result
Mload // Load metric at operand onto top of stack
Dload // Pop `operand` keys and metric off stack, and push datum at metric[key,...] onto stack.
Iget // Pop a datum off the stack, and push its integer value back on the stack.
Fget // Pop a datum off the stack, and push its float value back on the stack.
Sget // Pop a datum off the stack, and push its string value back on the stack.
Tolower // Convert the string at the top of the stack to lowercase.
Length // Compute the length of a string.
Cat // string concatenation
Setmatched // Set "matched" flag
Otherwise // Only match if "matched" flag is false.
Del // Pop `operand` keys and metric off stack, and remove the datum at metric[key,...] from memory
Expire // Set the expiry duration of a datum, perfoming the same as del but after the expiry time passes.
// Floating point ops
Fadd
Fsub
Fmul
Fdiv
Fmod
Fpow
Fset // Floating point assignment
Getfilename // Push input.Filename onto the stack.
// Conversions
I2f // int to float
S2i // string to int
S2f // string to float
I2s // int to string
F2s // float to string
// Typed comparisons, behave the same as cmp but do no conversion.
Icmp // integer compare
Fcmp // floating point compare
Scmp // string compare
// String opcodes
Subst
lastOpcode
)
var opNames = map[Opcode]string{
Stop: "stop",
Match: "match",
Smatch: "smatch",
Cmp: "cmp",
Jnm: "jnm",
Jm: "jm",
Jmp: "jmp",
Inc: "inc",
Strptime: "strptime",
Timestamp: "timestamp",
Settime: "settime",
Push: "push",
Capref: "capref",
Str: "str",
Sset: "sset",
Iset: "iset",
Iadd: "iadd",
Isub: "isub",
Imul: "imul",
Idiv: "idiv",
Imod: "imod",
Ipow: "ipow",
Shl: "shl",
Shr: "shr",
And: "and",
Or: "or",
Xor: "xor",
Not: "not",
Neg: "neg",
Mload: "mload",
Dload: "dload",
Iget: "iget",
Fget: "fget",
Sget: "sget",
Tolower: "tolower",
Length: "length",
Cat: "cat",
Setmatched: "setmatched",
Otherwise: "otherwise",
Del: "del",
Fadd: "fadd",
Fsub: "fsub",
Fmul: "fmul",
Fdiv: "fdiv",
Fmod: "fmod",
Fpow: "fpow",
Fset: "fset",
Getfilename: "getfilename",
I2f: "i2f",
S2i: "s2i",
S2f: "s2f",
I2s: "i2s",
F2s: "f2s",
Icmp: "icmp",
Fcmp: "fcmp",
Scmp: "scmp",
Subst: "subst",
}
func (o Opcode) String() string {
return opNames[o]
} | internal/vm/code/opcodes.go | 0.701713 | 0.431225 | opcodes.go | starcoder |
package main
// RuneGrid contains the rendered text UI
type RuneGrid struct {
width int
height int
cells [][]rune
}
// New constructs a RuneGrid with the given width and height
func NewRuneGrid(width, height int) RuneGrid {
grid := RuneGrid{
width: width,
height: height,
cells: make([][]rune, height),
}
for i := range grid.cells {
grid.cells[i] = make([]rune, width)
}
return grid
}
// RenderEditor renders the entire editor window to the grid.
func (grid *RuneGrid) RenderEditor(editor *Editor) {
x1 := 0
y1 := 0
x2 := grid.width - 1
y2 := grid.height - 1
settings := editor.Settings()
if settings.Borders && settings.OuterBorder {
grid.DrawBox(x1, y1, x2, y2, '═', '║', '╔', '╗', '╚', '╝')
x1++
y1++
x2--
y2--
}
if editor.CurrentPane() == nil {
return
}
grid.RenderPane(editor, x1, y1, x2, y2, editor.CurrentPane())
}
// RenderPane render the Pane and it's contents.
func (grid *RuneGrid) RenderPane(editor *Editor, x1, y1, x2, y2 int, pane *Pane) {
if pane.Buffer() == nil {
return
}
UpdateTopLine(editor.Settings(), pane, y2-y1)
grid.RenderBuffer(editor.Settings(), x1, y1, x2, y2, pane.Buffer(), pane.TopLine())
}
// UpdateTopLine sets the given Pane's TopLine based on the cursor position.
// [TODO]: Move this into editor module and run it when resize event occurs or cursor is moved. - 2014-10-19 03:09pm
func UpdateTopLine(
settings *Settings,
pane *Pane,
visibleHeight int,
) {
_, line := pane.Cursor().Position()
if settings.ScrollOffset*2 > visibleHeight {
newLine := line - visibleHeight/2
if newLine < 1 {
newLine = 1
}
pane.SetTopLine(newLine)
return
}
if pane.TopLine() > (line - settings.ScrollOffset) {
pane.SetTopLine(line - settings.ScrollOffset)
return
}
bottomLine := pane.TopLine() + visibleHeight
if bottomLine < (line + settings.ScrollOffset) {
pane.SetTopLine(line + settings.ScrollOffset - visibleHeight)
return
}
}
// RenderBuffer blits the buffer onto the grid.
func (grid *RuneGrid) RenderBuffer(
settings *Settings,
x1, y1, x2, y2 int,
buffer *Buffer,
topLine int,
) {
if topLine < 1 {
topLine = 1
}
xPos := x1
yPos := y1
// [TODO]: Offset render by topline - 2014-10-19 05:20pm
lines, _ := buffer.GetLines(topLine, topLine+y2-y1)
for _, line := range lines {
for _, r := range line {
if xPos <= x2 && yPos <= y2 {
if r == '\t' {
for i := 0; i < settings.ShiftWidth; i++ {
grid.SetCell(xPos, yPos, ' ')
xPos++
}
} else {
grid.SetCell(xPos, yPos, rune(r))
xPos++
}
}
}
yPos++
xPos = x1
}
}
// SetCell sets a cell in the RuneGrid to the given rune
func (grid *RuneGrid) SetCell(x, y int, r rune) {
if !grid.IsCellValid(x, y) {
return
}
grid.cells[y][x] = r
}
// IsCellValid returns true if the cell coordinates are valid
func (grid *RuneGrid) IsCellValid(x, y int) bool {
if x >= grid.width || y >= grid.height {
return false
}
return true
}
// Size returns the size of the grid.
func (grid *RuneGrid) Size() (int, int) {
return grid.width, grid.height
}
// Cells gets the cells of the grid.
func (grid *RuneGrid) Cells() [][]rune {
return grid.cells
}
// DrawBox a box with the given runes.
func (grid *RuneGrid) DrawBox(x1, y1, x2, y2 int, r rune, rExtra ...rune) {
if len(rExtra) != 0 && len(rExtra) != 1 && len(rExtra) != 5 {
panic("rExtra must be 0, 1 or 5 arguments")
}
vertical := r
horizontal := r
topLeft := r
topRight := r
bottomLeft := r
bottomRight := r
var _ = vertical
var _ = horizontal
var _ = topLeft
var _ = topRight
var _ = bottomLeft
var _ = bottomRight
if len(rExtra) > 0 {
vertical = rExtra[0]
}
if len(rExtra) == 5 {
topLeft = rExtra[1]
topRight = rExtra[2]
bottomLeft = rExtra[3]
bottomRight = rExtra[4]
}
grid.DrawHorizontalLine(x1+1, x2-1, y1, horizontal)
grid.DrawHorizontalLine(x1+1, x2-1, y2, horizontal)
grid.DrawVerticalLine(x1, y1+1, y2-1, vertical)
grid.DrawVerticalLine(x2, y1+1, y2-1, vertical)
grid.SetCell(x1, y1, topLeft)
grid.SetCell(x2, y1, topRight)
grid.SetCell(x1, y2, bottomLeft)
grid.SetCell(x2, y2, bottomRight)
}
// DrawHorizontalLine draws a line with the given rune
func (grid *RuneGrid) DrawHorizontalLine(x1, x2, y int, r rune) {
for x := x1; x <= x2; x++ {
grid.SetCell(x, y, r)
}
}
// DrawVerticalLine draws a vertical line
func (grid *RuneGrid) DrawVerticalLine(x, y1, y2 int, r rune) {
for y := y1; y <= y2; y++ {
grid.SetCell(x, y, r)
}
} | runegrid.go | 0.64646 | 0.542924 | runegrid.go | starcoder |
package stdlib
import (
"fmt"
"github.com/d5/tengo"
"github.com/d5/tengo/objects"
)
// FuncAR transform a function of 'func()' signature
// into CallableFunc type.
func FuncAR(fn func()) objects.CallableFunc {
return func(args ...objects.Object) (ret objects.Object, err error) {
if len(args) != 0 {
return nil, objects.ErrWrongNumArguments
}
fn()
return objects.UndefinedValue, nil
}
}
// FuncARI transform a function of 'func() int' signature
// into CallableFunc type.
func FuncARI(fn func() int) objects.CallableFunc {
return func(args ...objects.Object) (ret objects.Object, err error) {
if len(args) != 0 {
return nil, objects.ErrWrongNumArguments
}
return &objects.Int{Value: int64(fn())}, nil
}
}
// FuncARI64 transform a function of 'func() int64' signature
// into CallableFunc type.
func FuncARI64(fn func() int64) objects.CallableFunc {
return func(args ...objects.Object) (ret objects.Object, err error) {
if len(args) != 0 {
return nil, objects.ErrWrongNumArguments
}
return &objects.Int{Value: fn()}, nil
}
}
// FuncAI64RI64 transform a function of 'func(int64) int64' signature
// into CallableFunc type.
func FuncAI64RI64(fn func(int64) int64) objects.CallableFunc {
return func(args ...objects.Object) (ret objects.Object, err error) {
if len(args) != 1 {
return nil, objects.ErrWrongNumArguments
}
i1, ok := objects.ToInt64(args[0])
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: "first",
Expected: "int(compatible)",
Found: args[0].TypeName(),
}
}
return &objects.Int{Value: fn(i1)}, nil
}
}
// FuncAI64R transform a function of 'func(int64)' signature
// into CallableFunc type.
func FuncAI64R(fn func(int64)) objects.CallableFunc {
return func(args ...objects.Object) (ret objects.Object, err error) {
if len(args) != 1 {
return nil, objects.ErrWrongNumArguments
}
i1, ok := objects.ToInt64(args[0])
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: "first",
Expected: "int(compatible)",
Found: args[0].TypeName(),
}
}
fn(i1)
return objects.UndefinedValue, nil
}
}
// FuncARB transform a function of 'func() bool' signature
// into CallableFunc type.
func FuncARB(fn func() bool) objects.CallableFunc {
return func(args ...objects.Object) (ret objects.Object, err error) {
if len(args) != 0 {
return nil, objects.ErrWrongNumArguments
}
if fn() {
return objects.TrueValue, nil
}
return objects.FalseValue, nil
}
}
// FuncARE transform a function of 'func() error' signature
// into CallableFunc type.
func FuncARE(fn func() error) objects.CallableFunc {
return func(args ...objects.Object) (ret objects.Object, err error) {
if len(args) != 0 {
return nil, objects.ErrWrongNumArguments
}
return wrapError(fn()), nil
}
}
// FuncARS transform a function of 'func() string' signature
// into CallableFunc type.
func FuncARS(fn func() string) objects.CallableFunc {
return func(args ...objects.Object) (ret objects.Object, err error) {
if len(args) != 0 {
return nil, objects.ErrWrongNumArguments
}
s := fn()
if len(s) > tengo.MaxStringLen {
return nil, objects.ErrStringLimit
}
return &objects.String{Value: s}, nil
}
}
// FuncARSE transform a function of 'func() (string, error)' signature
// into CallableFunc type.
func FuncARSE(fn func() (string, error)) objects.CallableFunc {
return func(args ...objects.Object) (ret objects.Object, err error) {
if len(args) != 0 {
return nil, objects.ErrWrongNumArguments
}
res, err := fn()
if err != nil {
return wrapError(err), nil
}
if len(res) > tengo.MaxStringLen {
return nil, objects.ErrStringLimit
}
return &objects.String{Value: res}, nil
}
}
// FuncARYE transform a function of 'func() ([]byte, error)' signature
// into CallableFunc type.
func FuncARYE(fn func() ([]byte, error)) objects.CallableFunc {
return func(args ...objects.Object) (ret objects.Object, err error) {
if len(args) != 0 {
return nil, objects.ErrWrongNumArguments
}
res, err := fn()
if err != nil {
return wrapError(err), nil
}
if len(res) > tengo.MaxBytesLen {
return nil, objects.ErrBytesLimit
}
return &objects.Bytes{Value: res}, nil
}
}
// FuncARF transform a function of 'func() float64' signature
// into CallableFunc type.
func FuncARF(fn func() float64) objects.CallableFunc {
return func(args ...objects.Object) (ret objects.Object, err error) {
if len(args) != 0 {
return nil, objects.ErrWrongNumArguments
}
return &objects.Float{Value: fn()}, nil
}
}
// FuncARSs transform a function of 'func() []string' signature
// into CallableFunc type.
func FuncARSs(fn func() []string) objects.CallableFunc {
return func(args ...objects.Object) (ret objects.Object, err error) {
if len(args) != 0 {
return nil, objects.ErrWrongNumArguments
}
arr := &objects.Array{}
for _, elem := range fn() {
if len(elem) > tengo.MaxStringLen {
return nil, objects.ErrStringLimit
}
arr.Value = append(arr.Value, &objects.String{Value: elem})
}
return arr, nil
}
}
// FuncARIsE transform a function of 'func() ([]int, error)' signature
// into CallableFunc type.
func FuncARIsE(fn func() ([]int, error)) objects.CallableFunc {
return func(args ...objects.Object) (ret objects.Object, err error) {
if len(args) != 0 {
return nil, objects.ErrWrongNumArguments
}
res, err := fn()
if err != nil {
return wrapError(err), nil
}
arr := &objects.Array{}
for _, v := range res {
arr.Value = append(arr.Value, &objects.Int{Value: int64(v)})
}
return arr, nil
}
}
// FuncAIRIs transform a function of 'func(int) []int' signature
// into CallableFunc type.
func FuncAIRIs(fn func(int) []int) objects.CallableFunc {
return func(args ...objects.Object) (ret objects.Object, err error) {
if len(args) != 1 {
return nil, objects.ErrWrongNumArguments
}
i1, ok := objects.ToInt(args[0])
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: "first",
Expected: "int(compatible)",
Found: args[0].TypeName(),
}
}
res := fn(i1)
arr := &objects.Array{}
for _, v := range res {
arr.Value = append(arr.Value, &objects.Int{Value: int64(v)})
}
return arr, nil
}
}
// FuncAFRF transform a function of 'func(float64) float64' signature
// into CallableFunc type.
func FuncAFRF(fn func(float64) float64) objects.CallableFunc {
return func(args ...objects.Object) (ret objects.Object, err error) {
if len(args) != 1 {
return nil, objects.ErrWrongNumArguments
}
f1, ok := objects.ToFloat64(args[0])
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: "first",
Expected: "float(compatible)",
Found: args[0].TypeName(),
}
}
return &objects.Float{Value: fn(f1)}, nil
}
}
// FuncAIR transform a function of 'func(int)' signature
// into CallableFunc type.
func FuncAIR(fn func(int)) objects.CallableFunc {
return func(args ...objects.Object) (ret objects.Object, err error) {
if len(args) != 1 {
return nil, objects.ErrWrongNumArguments
}
i1, ok := objects.ToInt(args[0])
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: "first",
Expected: "int(compatible)",
Found: args[0].TypeName(),
}
}
fn(i1)
return objects.UndefinedValue, nil
}
}
// FuncAIRF transform a function of 'func(int) float64' signature
// into CallableFunc type.
func FuncAIRF(fn func(int) float64) objects.CallableFunc {
return func(args ...objects.Object) (ret objects.Object, err error) {
if len(args) != 1 {
return nil, objects.ErrWrongNumArguments
}
i1, ok := objects.ToInt(args[0])
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: "first",
Expected: "int(compatible)",
Found: args[0].TypeName(),
}
}
return &objects.Float{Value: fn(i1)}, nil
}
}
// FuncAFRI transform a function of 'func(float64) int' signature
// into CallableFunc type.
func FuncAFRI(fn func(float64) int) objects.CallableFunc {
return func(args ...objects.Object) (ret objects.Object, err error) {
if len(args) != 1 {
return nil, objects.ErrWrongNumArguments
}
f1, ok := objects.ToFloat64(args[0])
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: "first",
Expected: "float(compatible)",
Found: args[0].TypeName(),
}
}
return &objects.Int{Value: int64(fn(f1))}, nil
}
}
// FuncAFFRF transform a function of 'func(float64, float64) float64' signature
// into CallableFunc type.
func FuncAFFRF(fn func(float64, float64) float64) objects.CallableFunc {
return func(args ...objects.Object) (ret objects.Object, err error) {
if len(args) != 2 {
return nil, objects.ErrWrongNumArguments
}
f1, ok := objects.ToFloat64(args[0])
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: "first",
Expected: "float(compatible)",
Found: args[0].TypeName(),
}
}
f2, ok := objects.ToFloat64(args[1])
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: "second",
Expected: "float(compatible)",
Found: args[1].TypeName(),
}
}
return &objects.Float{Value: fn(f1, f2)}, nil
}
}
// FuncAIFRF transform a function of 'func(int, float64) float64' signature
// into CallableFunc type.
func FuncAIFRF(fn func(int, float64) float64) objects.CallableFunc {
return func(args ...objects.Object) (ret objects.Object, err error) {
if len(args) != 2 {
return nil, objects.ErrWrongNumArguments
}
i1, ok := objects.ToInt(args[0])
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: "first",
Expected: "int(compatible)",
Found: args[0].TypeName(),
}
}
f2, ok := objects.ToFloat64(args[1])
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: "second",
Expected: "float(compatible)",
Found: args[1].TypeName(),
}
}
return &objects.Float{Value: fn(i1, f2)}, nil
}
}
// FuncAFIRF transform a function of 'func(float64, int) float64' signature
// into CallableFunc type.
func FuncAFIRF(fn func(float64, int) float64) objects.CallableFunc {
return func(args ...objects.Object) (ret objects.Object, err error) {
if len(args) != 2 {
return nil, objects.ErrWrongNumArguments
}
f1, ok := objects.ToFloat64(args[0])
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: "first",
Expected: "float(compatible)",
Found: args[0].TypeName(),
}
}
i2, ok := objects.ToInt(args[1])
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: "second",
Expected: "int(compatible)",
Found: args[1].TypeName(),
}
}
return &objects.Float{Value: fn(f1, i2)}, nil
}
}
// FuncAFIRB transform a function of 'func(float64, int) bool' signature
// into CallableFunc type.
func FuncAFIRB(fn func(float64, int) bool) objects.CallableFunc {
return func(args ...objects.Object) (ret objects.Object, err error) {
if len(args) != 2 {
return nil, objects.ErrWrongNumArguments
}
f1, ok := objects.ToFloat64(args[0])
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: "first",
Expected: "float(compatible)",
Found: args[0].TypeName(),
}
}
i2, ok := objects.ToInt(args[1])
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: "second",
Expected: "int(compatible)",
Found: args[1].TypeName(),
}
}
if fn(f1, i2) {
return objects.TrueValue, nil
}
return objects.FalseValue, nil
}
}
// FuncAFRB transform a function of 'func(float64) bool' signature
// into CallableFunc type.
func FuncAFRB(fn func(float64) bool) objects.CallableFunc {
return func(args ...objects.Object) (ret objects.Object, err error) {
if len(args) != 1 {
return nil, objects.ErrWrongNumArguments
}
f1, ok := objects.ToFloat64(args[0])
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: "first",
Expected: "float(compatible)",
Found: args[0].TypeName(),
}
}
if fn(f1) {
return objects.TrueValue, nil
}
return objects.FalseValue, nil
}
}
// FuncASRS transform a function of 'func(string) string' signature into CallableFunc type.
// User function will return 'true' if underlying native function returns nil.
func FuncASRS(fn func(string) string) objects.CallableFunc {
return func(args ...objects.Object) (objects.Object, error) {
if len(args) != 1 {
return nil, objects.ErrWrongNumArguments
}
s1, ok := objects.ToString(args[0])
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: "first",
Expected: "string(compatible)",
Found: args[0].TypeName(),
}
}
s := fn(s1)
if len(s) > tengo.MaxStringLen {
return nil, objects.ErrStringLimit
}
return &objects.String{Value: s}, nil
}
}
// FuncASRSs transform a function of 'func(string) []string' signature into CallableFunc type.
func FuncASRSs(fn func(string) []string) objects.CallableFunc {
return func(args ...objects.Object) (objects.Object, error) {
if len(args) != 1 {
return nil, objects.ErrWrongNumArguments
}
s1, ok := objects.ToString(args[0])
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: "first",
Expected: "string(compatible)",
Found: args[0].TypeName(),
}
}
res := fn(s1)
arr := &objects.Array{}
for _, elem := range res {
if len(elem) > tengo.MaxStringLen {
return nil, objects.ErrStringLimit
}
arr.Value = append(arr.Value, &objects.String{Value: elem})
}
return arr, nil
}
}
// FuncASRSE transform a function of 'func(string) (string, error)' signature into CallableFunc type.
// User function will return 'true' if underlying native function returns nil.
func FuncASRSE(fn func(string) (string, error)) objects.CallableFunc {
return func(args ...objects.Object) (objects.Object, error) {
if len(args) != 1 {
return nil, objects.ErrWrongNumArguments
}
s1, ok := objects.ToString(args[0])
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: "first",
Expected: "string(compatible)",
Found: args[0].TypeName(),
}
}
res, err := fn(s1)
if err != nil {
return wrapError(err), nil
}
if len(res) > tengo.MaxStringLen {
return nil, objects.ErrStringLimit
}
return &objects.String{Value: res}, nil
}
}
// FuncASRE transform a function of 'func(string) error' signature into CallableFunc type.
// User function will return 'true' if underlying native function returns nil.
func FuncASRE(fn func(string) error) objects.CallableFunc {
return func(args ...objects.Object) (objects.Object, error) {
if len(args) != 1 {
return nil, objects.ErrWrongNumArguments
}
s1, ok := objects.ToString(args[0])
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: "first",
Expected: "string(compatible)",
Found: args[0].TypeName(),
}
}
return wrapError(fn(s1)), nil
}
}
// FuncASSRE transform a function of 'func(string, string) error' signature into CallableFunc type.
// User function will return 'true' if underlying native function returns nil.
func FuncASSRE(fn func(string, string) error) objects.CallableFunc {
return func(args ...objects.Object) (objects.Object, error) {
if len(args) != 2 {
return nil, objects.ErrWrongNumArguments
}
s1, ok := objects.ToString(args[0])
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: "first",
Expected: "string(compatible)",
Found: args[0].TypeName(),
}
}
s2, ok := objects.ToString(args[1])
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: "second",
Expected: "string(compatible)",
Found: args[1].TypeName(),
}
}
return wrapError(fn(s1, s2)), nil
}
}
// FuncASSRSs transform a function of 'func(string, string) []string' signature into CallableFunc type.
func FuncASSRSs(fn func(string, string) []string) objects.CallableFunc {
return func(args ...objects.Object) (objects.Object, error) {
if len(args) != 2 {
return nil, objects.ErrWrongNumArguments
}
s1, ok := objects.ToString(args[0])
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: "first",
Expected: "string(compatible)",
Found: args[0].TypeName(),
}
}
s2, ok := objects.ToString(args[1])
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: "first",
Expected: "string(compatible)",
Found: args[1].TypeName(),
}
}
arr := &objects.Array{}
for _, res := range fn(s1, s2) {
if len(res) > tengo.MaxStringLen {
return nil, objects.ErrStringLimit
}
arr.Value = append(arr.Value, &objects.String{Value: res})
}
return arr, nil
}
}
// FuncASSIRSs transform a function of 'func(string, string, int) []string' signature into CallableFunc type.
func FuncASSIRSs(fn func(string, string, int) []string) objects.CallableFunc {
return func(args ...objects.Object) (objects.Object, error) {
if len(args) != 3 {
return nil, objects.ErrWrongNumArguments
}
s1, ok := objects.ToString(args[0])
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: "first",
Expected: "string(compatible)",
Found: args[0].TypeName(),
}
}
s2, ok := objects.ToString(args[1])
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: "second",
Expected: "string(compatible)",
Found: args[1].TypeName(),
}
}
i3, ok := objects.ToInt(args[2])
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: "third",
Expected: "int(compatible)",
Found: args[2].TypeName(),
}
}
arr := &objects.Array{}
for _, res := range fn(s1, s2, i3) {
if len(res) > tengo.MaxStringLen {
return nil, objects.ErrStringLimit
}
arr.Value = append(arr.Value, &objects.String{Value: res})
}
return arr, nil
}
}
// FuncASSRI transform a function of 'func(string, string) int' signature into CallableFunc type.
func FuncASSRI(fn func(string, string) int) objects.CallableFunc {
return func(args ...objects.Object) (objects.Object, error) {
if len(args) != 2 {
return nil, objects.ErrWrongNumArguments
}
s1, ok := objects.ToString(args[0])
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: "first",
Expected: "string(compatible)",
Found: args[0].TypeName(),
}
}
s2, ok := objects.ToString(args[1])
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: "second",
Expected: "string(compatible)",
Found: args[0].TypeName(),
}
}
return &objects.Int{Value: int64(fn(s1, s2))}, nil
}
}
// FuncASSRS transform a function of 'func(string, string) string' signature into CallableFunc type.
func FuncASSRS(fn func(string, string) string) objects.CallableFunc {
return func(args ...objects.Object) (objects.Object, error) {
if len(args) != 2 {
return nil, objects.ErrWrongNumArguments
}
s1, ok := objects.ToString(args[0])
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: "first",
Expected: "string(compatible)",
Found: args[0].TypeName(),
}
}
s2, ok := objects.ToString(args[1])
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: "second",
Expected: "string(compatible)",
Found: args[1].TypeName(),
}
}
s := fn(s1, s2)
if len(s) > tengo.MaxStringLen {
return nil, objects.ErrStringLimit
}
return &objects.String{Value: s}, nil
}
}
// FuncASSRB transform a function of 'func(string, string) bool' signature into CallableFunc type.
func FuncASSRB(fn func(string, string) bool) objects.CallableFunc {
return func(args ...objects.Object) (objects.Object, error) {
if len(args) != 2 {
return nil, objects.ErrWrongNumArguments
}
s1, ok := objects.ToString(args[0])
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: "first",
Expected: "string(compatible)",
Found: args[0].TypeName(),
}
}
s2, ok := objects.ToString(args[1])
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: "second",
Expected: "string(compatible)",
Found: args[1].TypeName(),
}
}
if fn(s1, s2) {
return objects.TrueValue, nil
}
return objects.FalseValue, nil
}
}
// FuncASsSRS transform a function of 'func([]string, string) string' signature into CallableFunc type.
func FuncASsSRS(fn func([]string, string) string) objects.CallableFunc {
return func(args ...objects.Object) (objects.Object, error) {
if len(args) != 2 {
return nil, objects.ErrWrongNumArguments
}
var ss1 []string
switch arg0 := args[0].(type) {
case *objects.Array:
for idx, a := range arg0.Value {
as, ok := objects.ToString(a)
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: fmt.Sprintf("first[%d]", idx),
Expected: "string(compatible)",
Found: a.TypeName(),
}
}
ss1 = append(ss1, as)
}
case *objects.ImmutableArray:
for idx, a := range arg0.Value {
as, ok := objects.ToString(a)
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: fmt.Sprintf("first[%d]", idx),
Expected: "string(compatible)",
Found: a.TypeName(),
}
}
ss1 = append(ss1, as)
}
default:
return nil, objects.ErrInvalidArgumentType{
Name: "first",
Expected: "array",
Found: args[0].TypeName(),
}
}
s2, ok := objects.ToString(args[1])
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: "second",
Expected: "string(compatible)",
Found: args[1].TypeName(),
}
}
s := fn(ss1, s2)
if len(s) > tengo.MaxStringLen {
return nil, objects.ErrStringLimit
}
return &objects.String{Value: s}, nil
}
}
// FuncASI64RE transform a function of 'func(string, int64) error' signature
// into CallableFunc type.
func FuncASI64RE(fn func(string, int64) error) objects.CallableFunc {
return func(args ...objects.Object) (ret objects.Object, err error) {
if len(args) != 2 {
return nil, objects.ErrWrongNumArguments
}
s1, ok := objects.ToString(args[0])
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: "first",
Expected: "string(compatible)",
Found: args[0].TypeName(),
}
}
i2, ok := objects.ToInt64(args[1])
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: "second",
Expected: "int(compatible)",
Found: args[1].TypeName(),
}
}
return wrapError(fn(s1, i2)), nil
}
}
// FuncAIIRE transform a function of 'func(int, int) error' signature
// into CallableFunc type.
func FuncAIIRE(fn func(int, int) error) objects.CallableFunc {
return func(args ...objects.Object) (ret objects.Object, err error) {
if len(args) != 2 {
return nil, objects.ErrWrongNumArguments
}
i1, ok := objects.ToInt(args[0])
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: "first",
Expected: "int(compatible)",
Found: args[0].TypeName(),
}
}
i2, ok := objects.ToInt(args[1])
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: "second",
Expected: "int(compatible)",
Found: args[1].TypeName(),
}
}
return wrapError(fn(i1, i2)), nil
}
}
// FuncASIRS transform a function of 'func(string, int) string' signature
// into CallableFunc type.
func FuncASIRS(fn func(string, int) string) objects.CallableFunc {
return func(args ...objects.Object) (ret objects.Object, err error) {
if len(args) != 2 {
return nil, objects.ErrWrongNumArguments
}
s1, ok := objects.ToString(args[0])
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: "first",
Expected: "string(compatible)",
Found: args[0].TypeName(),
}
}
i2, ok := objects.ToInt(args[1])
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: "second",
Expected: "int(compatible)",
Found: args[1].TypeName(),
}
}
s := fn(s1, i2)
if len(s) > tengo.MaxStringLen {
return nil, objects.ErrStringLimit
}
return &objects.String{Value: s}, nil
}
}
// FuncASIIRE transform a function of 'func(string, int, int) error' signature
// into CallableFunc type.
func FuncASIIRE(fn func(string, int, int) error) objects.CallableFunc {
return func(args ...objects.Object) (ret objects.Object, err error) {
if len(args) != 3 {
return nil, objects.ErrWrongNumArguments
}
s1, ok := objects.ToString(args[0])
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: "first",
Expected: "string(compatible)",
Found: args[0].TypeName(),
}
}
i2, ok := objects.ToInt(args[1])
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: "second",
Expected: "int(compatible)",
Found: args[1].TypeName(),
}
}
i3, ok := objects.ToInt(args[2])
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: "third",
Expected: "int(compatible)",
Found: args[2].TypeName(),
}
}
return wrapError(fn(s1, i2, i3)), nil
}
}
// FuncAYRIE transform a function of 'func([]byte) (int, error)' signature
// into CallableFunc type.
func FuncAYRIE(fn func([]byte) (int, error)) objects.CallableFunc {
return func(args ...objects.Object) (ret objects.Object, err error) {
if len(args) != 1 {
return nil, objects.ErrWrongNumArguments
}
y1, ok := objects.ToByteSlice(args[0])
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: "first",
Expected: "bytes(compatible)",
Found: args[0].TypeName(),
}
}
res, err := fn(y1)
if err != nil {
return wrapError(err), nil
}
return &objects.Int{Value: int64(res)}, nil
}
}
// FuncASRIE transform a function of 'func(string) (int, error)' signature
// into CallableFunc type.
func FuncASRIE(fn func(string) (int, error)) objects.CallableFunc {
return func(args ...objects.Object) (ret objects.Object, err error) {
if len(args) != 1 {
return nil, objects.ErrWrongNumArguments
}
s1, ok := objects.ToString(args[0])
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: "first",
Expected: "string(compatible)",
Found: args[0].TypeName(),
}
}
res, err := fn(s1)
if err != nil {
return wrapError(err), nil
}
return &objects.Int{Value: int64(res)}, nil
}
}
// FuncAIRSsE transform a function of 'func(int) ([]string, error)' signature
// into CallableFunc type.
func FuncAIRSsE(fn func(int) ([]string, error)) objects.CallableFunc {
return func(args ...objects.Object) (ret objects.Object, err error) {
if len(args) != 1 {
return nil, objects.ErrWrongNumArguments
}
i1, ok := objects.ToInt(args[0])
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: "first",
Expected: "int(compatible)",
Found: args[0].TypeName(),
}
}
res, err := fn(i1)
if err != nil {
return wrapError(err), nil
}
arr := &objects.Array{}
for _, r := range res {
if len(r) > tengo.MaxStringLen {
return nil, objects.ErrStringLimit
}
arr.Value = append(arr.Value, &objects.String{Value: r})
}
return arr, nil
}
}
// FuncAIRS transform a function of 'func(int) string' signature
// into CallableFunc type.
func FuncAIRS(fn func(int) string) objects.CallableFunc {
return func(args ...objects.Object) (ret objects.Object, err error) {
if len(args) != 1 {
return nil, objects.ErrWrongNumArguments
}
i1, ok := objects.ToInt(args[0])
if !ok {
return nil, objects.ErrInvalidArgumentType{
Name: "first",
Expected: "int(compatible)",
Found: args[0].TypeName(),
}
}
s := fn(i1)
if len(s) > tengo.MaxStringLen {
return nil, objects.ErrStringLimit
}
return &objects.String{Value: s}, nil
}
} | vendor/github.com/d5/tengo/stdlib/func_typedefs.go | 0.662469 | 0.427994 | func_typedefs.go | starcoder |
package main
import (
rl "github.com/gen2brain/raylib-go/raylib"
"math"
)
type RectWorld struct {
width int32
height int32
stateIdx int32
states [2][]bool
}
func NewRectWorld(width, height int32) *RectWorld {
var states [2][]bool
for i := 0; i < len(states); i++ {
states[i] = make([]bool, width*height)
}
return &RectWorld{
width: width,
height: height,
stateIdx: 0,
states: states,
}
}
const NB_WORKERS = 10
func (world *RectWorld) UpdateRow(rules Rules, w, h int32) {
nbNeighbours := world.NeighbourCount(w, h)
newState := rules.StateEstimate(nbNeighbours, world.StateGet(w, h))
world.SetNewState(w, h, newState)
}
func (world *RectWorld) UpdateAsync(rules Rules, workerIndex int32, channel chan int32) {
for h := workerIndex;h < world.height;h+=NB_WORKERS {
for w := int32(0); w < world.width; w++ {
world.UpdateRow(rules, w,h)
}
}
channel <- 1
}
func (world *RectWorld) Update(rules Rules) {
channel := make(chan int32)
nbDone := int32(0)
for workerIndex := int32(0); workerIndex < NB_WORKERS; workerIndex++ {
go world.UpdateAsync(rules,workerIndex, channel)
}
for nbDone < NB_WORKERS {
nbDone += <-channel
}
world.SwapStates()
}
func (world *RectWorld) SetState(x, y int32, state bool) {
halfWidth := world.width / 2
halfHeight := world.height / 2
world.setNewStateInternal(x+halfWidth, y+halfHeight, state, world.stateIdx)
}
func (world *RectWorld) Draw() {
halfWidth := world.width / 2
halfHeight := world.height / 2
rec := rl.Rectangle{X: -1 - float32(halfWidth), Y: -1 - float32(halfHeight), Width: float32(world.width) + 2, Height: float32(world.height) + 2}
rl.DrawRectangleLinesEx(rec, 1, rl.Red)
for w := int32(0); w < world.width; w++ {
for h := int32(0); h < world.height; h++ {
if world.StateGet(w, h) {
rl.DrawRectangle(w-halfWidth, h-halfHeight, 1, 1, rl.White)
}
}
}
}
func (world *RectWorld) SwapStates() {
world.stateIdx = 1 - world.stateIdx
}
func (world *RectWorld) StateGet(x, y int32) bool {
mx := mod(x, world.width)
my := mod(y, world.height)
return world.states[world.stateIdx][mx+my*world.width]
}
func (world *RectWorld) SetNewState(x, y int32, state bool) {
world.setNewStateInternal(x, y, state, 1-world.stateIdx)
}
func (world *RectWorld) setNewStateInternal(x, y int32, state bool, stateIdx int32) {
mx := mod(x, world.width)
my := mod(y, world.height)
data := world.states[stateIdx]
data[mx+my*world.width] = state
}
func (world *RectWorld) EstimateZoom(width int, height int) float32 {
zoomWidth := float64(width) / float64(world.width)
zoomHeight := float64(height) / float64(world.height)
return float32(math.Min(zoomWidth, zoomHeight))
}
func (world *RectWorld) NeighbourCount(x, y int32) int32 {
nw := boolToInt(world.StateGet(x-1, y-1))
n := boolToInt(world.StateGet(x, y-1))
ne := boolToInt(world.StateGet(x+1, y-1))
w := boolToInt(world.StateGet(x-1, y))
e := boolToInt(world.StateGet(x+1, y))
sw := boolToInt(world.StateGet(x-1, y+1))
s := boolToInt(world.StateGet(x, y+1))
se := boolToInt(world.StateGet(x+1, y+1))
return nw + n + ne + w + e + sw + s + se
} | rectworld.go | 0.624064 | 0.473414 | rectworld.go | starcoder |
package search
/*
What? Interview coaching from Googlers!
negativespace-22
Dropbox Interview – Design Hit Counter
It starts with a simple question – if you are building a website, how do you count the number of visitors for the past 1 minute?
“Design hit counter” problem has recently been asked by many companies including Dropbox and the question is harder than it seems to be. This week, we’ll uncover all the mysteries of the problem. A couple of topics are discussed including basic data structures design, various optimization, concurrency and distributed counter.
What’s special about this problem?
I always like to tell our readers why we select this question to analyze so that you’ll know exactly whether it’s worth your time to read. As an interviewer, I have a strong preference for questions that are not hard to solve in the simplest case but the discussion can go deeper and deeper by removing/adding specific conditions. And this question is exactly the case.
Also, the question doesn’t come from nowhere, but has real use cases. For many systems today, we need a system to track not only users numbers, but different types of request numbers in real time.
If you haven’t thought about this problem, spend some time working on it before reading following sections.
Simple case
Forget about all the hard problems like concurrency and scalability issue, let’s say we only have a single machine with no concurrent requests, how would you get the number of visitors for the past 1 minute?
Apparently, the simplest solution is to store all the visitors with the timestamps in the database. When someone asks for visitor number of the past minute, we just go over the database and do the filtering and counting. A little bit optimization is to order users by timestamp so that we won’t scan the whole table.
The solution is not efficient as the time complexity is O(N) where N is the number of visitors. If the website has a large volume, the function won’t be able to return the number immediately.
Let’s optimize
A couple of ways to think about this problem. Since the above approach returns not only visitor numbers, but also visitors for the past minute, which is not needed in the question. And this is something we can optimize. From a different angle, we only need numbers for the past minute instead of any time range, which is another area that we can improve potentially. In a nutshell, by removing unnecessary features, we can optimize our solution.
A straightforward idea is to only keep users from the past minute and as time passes by, we keep updating the list and its length. This allows us to get the number instantly. In essence, we reduce the cost of fetching the numbers, but have to keep updating the list.
We can use a queue or linked list to store only users from the past minute. We keep all the element in order and when the last user (the earliest user) has the time more than a minute, just remove it from the list and update the length.
Space optimization
There’s little room to improve the speed as we can return the visitor number in O(1) time. However, storing all the users from the past minute can be costly in terms of space. A simple optimization is to only keep the user timestamp in the list rather than the user object, which can save a lot of space especially when the user object is large.
If we want to further reduce the space usage, what approach would you take?
A good way to think about this is that to improve space complexity, what should we sacrifice? Since we still want to keep the time complexity O(1), one thing we can compromise is accuracy. If we can’t guarantee to return the most accurate number, can we use less space?
Instead of tracking users from the past minute, we can only track users from the past second. By doing this, we know exactly how many visitors are from the last second. To get visitor numbers for the past minute, we keep a queue/linked list of 60 spots representing the past 60 seconds. Each spot stores the visitor number of that second. So every second, we remove the last (the earliest) spot from the list and add a new one with the visitor number of past second. Visitor number of the past minute is the sum of the 60 spots.
The minute count can be off by the request of the past second. And you can control the trade-off between accuracy and space by adjusting the unit, e.g. you can store users from past 2 seconds and have 30 spots in the list.
How about concurrent requests?
In production systems, concurrency is the most common problems people face. If there can be multiple users visiting the site simultaneously, does the previous approach still work?
Part of. Apparently, the basic idea still holds. However, when two requests update the list simultaneously, there can be race conditions. It’s possible that the request that updated the list first may not be included eventually.
The most common solution is to use a lock to protect the list. Whenever someone wants to update the list (by either adding new elements or removing the tail), a lock will be placed on the container. After the operation finishes, the list will be unlocked.
This works pretty well when you don’t have a large volume of requests or performance is not a concern. Placing a lock can be costly at some times and when there are too many concurrent requests, the lock may potentially block the system and becomes the performance bottleneck.
Distribute the counter
When a single machine gets too many traffic and performance becomes an issue, it’s the perfect time to think of distributed solution. Distributed system significantly reduces the burden of a single machine by scaling the system to multiple nodes, but at the same time adding complexity.
Let’s say we distribute visit requests to multiple machines equally. I’d like to emphasize the importance of equal distribution first. If particular machines get much more traffic than the rest machines, the system doesn’t get to its full usage and it’s very important to take this into consideration when designing the system. In our case, we can get a hash of users email and distribute by the hash (it’s not a good idea to use email directly as some letter may appear much more frequent than the others).
To count the number, each machine works independently to count its own users from the past minute. When we request the global number, we just need to add all counters together.
Summary
One of the reasons I like this question is that the simplest solution can be a coding question and to solve concurrency and scalability issue, it becomes a system design question. Also, the question itself has a wide usage in production systems.
Again, the solution itself is not the most important thing in the post. What we’re focused on is to illustrate how to analyze the problem. for instance, trade-off is a great concept to be familiar with and when we try to optimize one area, think about what else should be sacrificed. By thinking like this, it opens up a lot of doors for you.
By the way, if you want to have more guidance from experienced interviewers, you can check Gainlo that allows you to have mock interview with engineers from Google, Facebook etc..
The post is written by Gainlo - a platform that allows you to have mock interviews with employees from Google, Amazon etc..
*/ | search/hit_counter.go | 0.52074 | 0.87251 | hit_counter.go | starcoder |
package util
import (
"fmt"
"io"
)
// Graph models a dense directed graph of string-labeled nodes.
type Graph struct {
verts map[string]int
vertNames []string
edges [][]bool
w [][]int
}
// Len returns the number of vertices in the graph.
func (g *Graph) Len() int {
return len(g.edges)
}
// V returns the vertex number of the vertex with the given name.
func (g *Graph) V(name string) int {
if g.verts == nil {
g.verts = make(map[string]int)
}
if v, ok := g.verts[name]; ok {
return v
}
v := len(g.verts)
g.verts[name] = v
g.vertNames = append(g.vertNames, name)
for i := range g.edges {
g.edges[i] = append(g.edges[i], false)
}
g.edges = append(g.edges, make([]bool, len(g.verts)))
if g.w != nil {
for i := range g.w {
g.w[i] = append(g.w[i], 0)
}
g.w = append(g.w, make([]int, len(g.verts)))
}
return v
}
// Name returns the name of the vertex with the given number.
func (g *Graph) Name(v int) string {
return g.vertNames[v]
}
// Names converts a list of vertex numbers to the corresponding names.
func (g *Graph) Names(vs []int) []string {
names := make([]string, len(vs))
for i, v := range vs {
names[i] = g.vertNames[v]
}
return names
}
// AddEdge adds an edge between from and to (if it didn't exist already), creating the vertices if necessary.
func (g *Graph) AddEdge(from, to string) {
g.AddEdgeV(g.V(from), g.V(to))
}
// AddEdgeV adds an edge between from and to (if it didn't exist already).
func (g *Graph) AddEdgeV(fromV, toV int) {
g.edges[fromV][toV] = true
}
// AddEdgeW adds a weighted edge between from and to, creating the vertices if necessary.
// If an edge already existed, its weight is updated.
func (g *Graph) AddEdgeW(from, to string, w int) {
g.AddEdgeWV(g.V(from), g.V(to), w)
}
// AddEdgeWV adds a weighted edge between from and to. If an edge already existed, its weight is updated.
func (g *Graph) AddEdgeWV(fromV, toV, w int) {
g.AddEdgeV(fromV, toV)
if g.w == nil {
g.w = make([][]int, len(g.verts))
for i := range g.w {
g.w[i] = make([]int, len(g.verts))
}
}
g.w[fromV][toV] = w
}
// DelEdge removes an edge between from and to (if it existed), creating the vertices if necessary.
func (g *Graph) DelEdge(from, to string) {
g.DelEdgeV(g.V(from), g.V(to))
}
// DelEdgeV removes an edge between from and to (if it existed).
func (g *Graph) DelEdgeV(fromV, toV int) {
g.edges[fromV][toV] = false
}
// W returns the weight of an edge between two vertices. The call is valid only if your graph does have weights.
func (g *Graph) W(fromV, toV int) int {
return g.w[fromV][toV]
}
// Range calls the callback for each of the graph's vertex names.
func (g *Graph) Range(cb func(name string)) {
g.RangeV(func(v int) {
cb(g.vertNames[v])
})
}
// RangeV calls the callback for each of the graph's vertex numbers.
func (g *Graph) RangeV(cb func(v int)) {
for v := 0; v < len(g.edges); v++ {
cb(v)
}
}
// NumSucc returns the number of successors of the given vertex.
func (g *Graph) NumSucc(from string) int {
return g.NumSuccV(g.V(from))
}
// NumSuccV returns the number of successors of the given vertex.
func (g *Graph) NumSuccV(fromV int) int {
n := 0
for i := 0; i < len(g.edges); i++ {
if g.edges[fromV][i] {
n++
}
}
return n
}
// RangeSucc calls the callback for each of the given vertex's successors.
func (g *Graph) RangeSucc(from string, cb func(to string) bool) {
g.RangeSuccV(g.V(from), func(toV int) bool { return cb(g.vertNames[toV]) })
}
// RangeSuccV calls the callback for each of the given vertex's successors.
func (g *Graph) RangeSuccV(fromV int, cb func(toV int) bool) {
for i := 0; i < len(g.edges); i++ {
if g.edges[fromV][i] {
if !cb(i) {
break
}
}
}
}
// NumPred returns the number of predecessors of the given vertex.
func (g *Graph) NumPred(to string) int {
return g.NumPredV(g.V(to))
}
// NumPredV returns the number of predecessors of the given vertex.
func (g *Graph) NumPredV(toV int) int {
n := 0
for i := 0; i < len(g.edges); i++ {
if g.edges[i][toV] {
n++
}
}
return n
}
// RangePred calls the callback for each of the given vertex's predecessors.
func (g *Graph) RangePred(to string, cb func(from string) bool) {
g.RangePredV(g.V(to), func(fromV int) bool { return cb(g.vertNames[fromV]) })
}
// RangePredV calls the callback for each of the given vertex's predecessors.
func (g *Graph) RangePredV(toV int, cb func(fromV int) bool) {
for i := 0; i < len(g.edges); i++ {
if g.edges[i][toV] {
if !cb(i) {
break
}
}
}
}
// TopoSort returns the graph's vertices in topological order (which must exist).
func (g *Graph) TopoSort() []string {
return g.Names(g.TopoSortV())
}
// TopoSortV returns the graph's vertices in topological order (which must exist).
func (g *Graph) TopoSortV() []int {
var stack []int
g.RangeV(func(v int) {
if g.NumPredV(v) == 0 {
stack = append(stack, v)
}
})
var order []int
for len(stack) > 0 {
n := stack[len(stack)-1]
stack = stack[:len(stack)-1]
order = append(order, n)
g.RangeSuccV(n, func(m int) bool {
g.DelEdgeV(n, m)
if g.NumPredV(m) == 0 {
stack = append(stack, m)
}
return true
})
}
return order
}
// WriteDOT writes the graph out in GraphViz format. The `nodeAttr` and `edgeAttr` callback
// functions are optional, and can be used to add extra attributes to the node. If the callback
// returns a "label" attribute, it takes precedence over the usual node name / edge weight.
func (g *Graph) WriteDOT(w io.Writer, name string, nodeAttr func(v int) map[string]string, edgeAttr func(fromV, toV int) map[string]string) (err error) {
fmt.Fprintf(w, "digraph %s {\n", name)
g.RangeV(func(v int) {
var attrs map[string]string
if nodeAttr != nil {
attrs = nodeAttr(v)
}
fmt.Fprintf(w, " n%d [", v)
writeAttrs(w, attrs, "label", fmt.Sprintf(`"%s"`, g.Name(v)))
fmt.Fprintf(w, "];\n")
})
g.RangeV(func(fromV int) {
g.RangeSuccV(fromV, func(toV int) bool {
var attrs map[string]string
if edgeAttr != nil {
attrs = edgeAttr(fromV, toV)
}
fmt.Fprintf(w, " n%d -> n%d [", fromV, toV)
if g.w != nil {
writeAttrs(w, attrs, "label", fmt.Sprintf(`"%d"`, g.W(fromV, toV)))
} else {
writeAttrs(w, attrs)
}
fmt.Fprintf(w, "];\n")
return true
})
})
_, err = fmt.Fprintln(w, "}")
return err
}
func writeAttrs(w io.Writer, attr map[string]string, xattr ...string) error {
i := 0
for k, v := range attr {
if err := writeAttr(w, k, v, i); err != nil {
return err
}
i++
}
for x := 0; x+1 < len(xattr); x += 2 {
if _, ok := attr[xattr[x]]; ok {
continue
}
if err := writeAttr(w, xattr[x], xattr[x+1], i); err != nil {
return err
}
i++
}
return nil
}
func writeAttr(w io.Writer, k, v string, i int) error {
if i > 0 {
_, err := fmt.Fprint(w, ",")
if err != nil {
return err
}
}
// TODO: better marshalling
_, err := fmt.Fprintf(w, "%s=%s", k, v)
if err != nil {
return err
}
return nil
} | util/graph.go | 0.822082 | 0.623807 | graph.go | starcoder |
package govaluate
import (
"errors"
"fmt"
)
const isoDateFormat string = "2006-01-02T15:04:05.999999999Z0700"
const shortCircuitHolder int = -1
var DUMMY_PARAMETERS = MapParameters(map[string]interface{}{})
/*
EvaluableExpression represents a set of ExpressionTokens which, taken together,
are an expression that can be evaluated down into a single value.
*/
type EvaluableExpression struct {
/*
Represents the query format used to output dates. Typically only used when creating SQL or Mongo queries from an expression.
Defaults to the complete ISO8601 format, including nanoseconds.
*/
QueryDateFormat string
/*
Whether or not to safely check types when evaluating.
If true, this library will return error messages when invalid types are used.
If false, the library will panic when operators encounter types they can't use.
This is exclusively for users who need to squeeze every ounce of speed out of the library as they can,
and you should only set this to false if you know exactly what you're doing.
*/
ChecksTypes bool
tokens []ExpressionToken
evaluationStages *evaluationStage
functions []string
inputExpression string
}
/*
Parses a new EvaluableExpression from the given [expression] string.
Returns an error if the given expression has invalid syntax.
*/
func NewEvaluableExpression(expression string) (*EvaluableExpression, error) {
functions := make(map[string]ExpressionFunction)
return NewEvaluableExpressionWithFunctions(expression, functions)
}
/*
Similar to [NewEvaluableExpression], except that instead of a string, an already-tokenized expression is given.
This is useful in cases where you may be generating an expression automatically, or using some other parser (e.g., to parse from a query language)
*/
func NewEvaluableExpressionFromTokens(tokens []ExpressionToken) (*EvaluableExpression, error) {
var ret *EvaluableExpression
var err error
ret = new(EvaluableExpression)
ret.QueryDateFormat = isoDateFormat
err = checkBalance(tokens)
if err != nil {
return nil, err
}
err = checkExpressionSyntax(tokens)
if err != nil {
return nil, err
}
ret.tokens, err = optimizeTokens(tokens)
if err != nil {
return nil, err
}
ret.evaluationStages, err = planStages(ret.tokens)
if err != nil {
return nil, err
}
ret.ChecksTypes = true
return ret, nil
}
/*
Similar to [NewEvaluableExpression], except enables the use of user-defined functions.
Functions passed into this will be available to the expression.
*/
func NewEvaluableExpressionWithFunctions(expression string, functions map[string]ExpressionFunction) (*EvaluableExpression, error) {
var ret *EvaluableExpression
var err error
ret = new(EvaluableExpression)
ret.QueryDateFormat = isoDateFormat
ret.inputExpression = expression
ExpressionFunctionNames = nil
ret.tokens, err = parseTokens(expression, functions)
if err != nil {
return nil, err
}
ret.functions = ExpressionFunctionNames
err = checkBalance(ret.tokens)
if err != nil {
return nil, err
}
err = checkExpressionSyntax(ret.tokens)
if err != nil {
return nil, err
}
ret.tokens, err = optimizeTokens(ret.tokens)
if err != nil {
return nil, err
}
ret.evaluationStages, err = planStages(ret.tokens)
if err != nil {
return nil, err
}
ret.ChecksTypes = true
return ret, nil
}
/*
Same as `Eval`, but automatically wraps a map of parameters into a `govalute.Parameters` structure.
*/
func (this EvaluableExpression) Evaluate(parameters map[string]interface{}) (interface{}, error) {
if parameters == nil {
return this.Eval(nil)
}
return this.Eval(MapParameters(parameters))
}
/*
Runs the entire expression using the given [parameters].
e.g., If the expression contains a reference to the variable "foo", it will be taken from `parameters.Get("foo")`.
This function returns errors if the combination of expression and parameters cannot be run,
such as if a variable in the expression is not present in [parameters].
In all non-error circumstances, this returns the single value result of the expression and parameters given.
e.g., if the expression is "1 + 1", this will return 2.0.
e.g., if the expression is "foo + 1" and parameters contains "foo" = 2, this will return 3.0
*/
func (this EvaluableExpression) Eval(parameters Parameters) (interface{}, error) {
if this.evaluationStages == nil {
return nil, nil
}
if parameters != nil {
parameters = &sanitizedParameters{parameters}
} else {
parameters = DUMMY_PARAMETERS
}
return this.evaluateStage(this.evaluationStages, parameters)
}
func (this EvaluableExpression) evaluateStage(stage *evaluationStage, parameters Parameters) (interface{}, error) {
var left, right interface{}
var err error
if stage.leftStage != nil {
left, err = this.evaluateStage(stage.leftStage, parameters)
if err != nil {
return nil, err
}
}
if stage.isShortCircuitable() {
switch stage.symbol {
case AND:
if left == false {
return false, nil
}
case OR:
if left == true {
return true, nil
}
case COALESCE:
if left != nil {
return left, nil
}
case TERNARY_TRUE:
if left == false {
right = shortCircuitHolder
}
case TERNARY_FALSE:
if left != nil {
right = shortCircuitHolder
}
}
}
if right != shortCircuitHolder && stage.rightStage != nil {
right, err = this.evaluateStage(stage.rightStage, parameters)
if err != nil {
return nil, err
}
}
if this.ChecksTypes {
if stage.typeCheck == nil {
err = typeCheck(stage.leftTypeCheck, left, stage.symbol, stage.typeErrorFormat)
if err != nil {
return nil, err
}
err = typeCheck(stage.rightTypeCheck, right, stage.symbol, stage.typeErrorFormat)
if err != nil {
return nil, err
}
} else {
// special case where the type check needs to know both sides to determine if the operator can handle it
if !stage.typeCheck(left, right) {
errorMsg := fmt.Sprintf(stage.typeErrorFormat, left, stage.symbol.String())
return nil, errors.New(errorMsg)
}
}
}
return stage.operator(left, right, parameters)
}
func typeCheck(check stageTypeCheck, value interface{}, symbol OperatorSymbol, format string) error {
if check == nil {
return nil
}
if check(value) {
return nil
}
errorMsg := fmt.Sprintf(format, value, symbol.String())
return errors.New(errorMsg)
}
/*
Returns an array representing the ExpressionTokens that make up this expression.
*/
func (this EvaluableExpression) Tokens() []ExpressionToken {
return this.tokens
}
/*
Returns the original expression used to create this EvaluableExpression.
*/
func (this EvaluableExpression) String() string {
return this.inputExpression
}
/*
Returns an array representing the variables contained in this EvaluableExpression.
*/
func (this EvaluableExpression) Vars() []string {
var varlist []string
for _, val := range this.Tokens() {
if val.Kind == VARIABLE {
varlist = append(varlist, val.Value.(string))
}
}
return varlist
} | EvaluableExpression.go | 0.723114 | 0.542863 | EvaluableExpression.go | starcoder |
package shape
// Aligner type aligns multiple shapes
type Aligner struct{}
// HAlignCenter aligns shape[1:] to shape[0] center coordinates horizontally
func (Aligner) HAlignCenter(shapes ...Shape) { hAlign(Center, shapes...) }
// HAlignTop aligns shape[1:] to shape[0] top coordinates horizontally
func (Aligner) HAlignTop(shapes ...Shape) { hAlign(Top, shapes...) }
// HAlignBottom aligns shape[1:] to shape[0] bottom coordinates horizontally
func (Aligner) HAlignBottom(shapes ...Shape) { hAlign(Bottom, shapes...) }
func hAlign(adjust Alignment, objects ...Shape) {
first := objects[0]
_, y := first.Position()
for _, shape := range objects[1:] {
switch adjust {
case Top:
shape.SetY(y)
case Bottom:
shape.SetY(y + first.Height() - shape.Height())
case Center:
firstHigher := first.Height() > shape.Height()
diff := intAbs(first.Height()-shape.Height()) / 2
if shape, ok := shape.(*Label); ok {
// labels are drawn from bottom left corner
if firstHigher {
diff += shape.Height()
} else {
diff -= shape.Height()
}
}
switch {
case firstHigher:
shape.SetY(y + diff)
case !firstHigher:
shape.SetY(y - diff)
}
}
}
}
// VAlignCenter aligns shape[1:] to shape[0] center coordinates vertically
func (Aligner) VAlignCenter(shapes ...Shape) { vAlign(Center, shapes...) }
// VAlignLeft aligns shape[1:] to shape[0] left coordinates vertically
func (Aligner) VAlignLeft(shapes ...Shape) { vAlign(Left, shapes...) }
// VAlignRight aligns shape[1:] to shape[0] right coordinates vertically
func (Aligner) VAlignRight(shapes ...Shape) { vAlign(Right, shapes...) }
func vAlign(adjust Alignment, objects ...Shape) {
first := objects[0]
x, _ := first.Position()
for _, shape := range objects[1:] {
switch adjust {
case Left:
shape.SetX(x)
case Right:
shape.SetX(x + first.Width() - shape.Width())
case Center:
if first.Direction() == RL {
shape.SetX(x - (first.Width()+shape.Width())/2)
} else {
shape.SetX(x + (first.Width()-shape.Width())/2)
}
}
}
}
type Alignment int
const (
Top Alignment = iota
Left
Right
Bottom
Center
)
type Direction int
const (
Horizontal Direction = iota
Vertical
LR
RL
) | shape/align.go | 0.68637 | 0.598342 | align.go | starcoder |
package hierarchy
import (
"fmt"
math "github.com/barnex/fmath"
"github.com/go-gl/gl"
"github.com/ungerik/go3d/vec3"
)
var (
posBase = vec3.T{3.0, -5.0, -40.0}
angBase float32 = -45.0
posBaseLeft = vec3.T{2.0, 0.0, 0.0}
posBaseRight = vec3.T{-2.0, 0.0, 0.0}
scaleBaseZ float32 = 3.0
angUpperArm float32 = -33.75
sizeUpperArm float32 = 9.0
posLowerArm = vec3.T{0.0, 0.0, 8.0}
angLowerArm float32 = 146.25
lenLowerArm float32 = 5.0
widthLowerArm float32 = 1.5
posWrist = vec3.T{0.0, 0.0, 5.0}
angWristRoll float32 = 0.0
angWristPitch float32 = 67.5
lenWrist float32 = 2.0
widthWrist float32 = 2.0
posLeftFinger = vec3.T{1.0, 0.0, 1.0}
posRightFinger = vec3.T{-1.0, 0.0, 1.0}
angFingerOpen float32 = 180.0
lenFinger float32 = 2.0
widthFinger float32 = 0.5
angLowerFinger float32 = 45.0
)
func degToRad(degrees float32) (radians float32) {
return degrees * math.Pi / 180.0
}
func drawNode(matStack *matrixStack) {
flatMat := flattenMatrix(matStack.mat4())
modelToCameraMatrixUnif.UniformMatrix4f(false, flatMat)
gl.DrawElements(gl.TRIANGLES, len(indexData), gl.UNSIGNED_SHORT, uintptr(0))
}
func draw() {
matStack := NewMatrixStack()
matStack.Translate(&posBase)
matStack.RotateY(angBase)
// Draw left base
{
matStack.push()
matStack.Translate(&posBaseLeft)
matStack.ScaleVec3(&vec3.T{1.0, 1.0, scaleBaseZ})
drawNode(matStack)
matStack.pop()
}
// Draw right base
{
matStack.push()
matStack.Translate(&posBaseRight)
matStack.ScaleVec3(&vec3.T{1.0, 1.0, scaleBaseZ})
drawNode(matStack)
matStack.pop()
}
drawUpperArm(matStack)
}
func drawUpperArm(matStack *matrixStack) {
matStack.push()
matStack.RotateX(angUpperArm)
{
matStack.push()
matStack.Translate(&vec3.T{0, 0, (sizeUpperArm / 2.0) - 1.0})
matStack.ScaleVec3(&vec3.T{1, 1, sizeUpperArm / 2.0})
drawNode(matStack)
matStack.pop()
}
drawLowerArm(matStack)
matStack.pop()
}
func drawLowerArm(matStack *matrixStack) {
matStack.push()
matStack.Translate(&posLowerArm)
matStack.RotateX(degToRad(angLowerArm))
matStack.push()
matStack.Translate(&vec3.T{0, 0, lenLowerArm / 2.0})
matStack.ScaleVec3(&vec3.T{widthLowerArm / 2.0, widthLowerArm / 2.0, lenLowerArm / 2.0})
drawNode(matStack)
matStack.pop()
drawWrist(matStack)
matStack.pop()
}
func drawWrist(matStack *matrixStack) {
matStack.push()
matStack.Translate(&posWrist)
matStack.RotateZ(degToRad(angWristRoll))
matStack.RotateX(degToRad(angWristPitch))
matStack.push()
matStack.ScaleVec3(&vec3.T{widthWrist / 2.0, widthWrist / 2.0, lenWrist / 2.0})
drawNode(matStack)
matStack.pop()
drawFingers(matStack)
matStack.pop()
}
func drawFingers(matStack *matrixStack) {
//Draw left finger
matStack.push()
matStack.Translate(&posLeftFinger)
matStack.RotateY(degToRad(angFingerOpen))
matStack.push()
matStack.Translate(&vec3.T{0, 0, lenFinger / 2.0})
matStack.ScaleVec3(&vec3.T{widthFinger / 2.0, widthFinger / 2.0, lenFinger / 2.0})
drawNode(matStack)
matStack.pop()
{
//Draw left lower finger
matStack.push()
matStack.Translate(&vec3.T{0, 0, lenFinger})
matStack.RotateY(degToRad(-angLowerFinger))
matStack.push()
matStack.Translate(&vec3.T{0, 0, lenFinger / 2.0})
matStack.ScaleVec3(&vec3.T{widthFinger / 2.0, widthFinger / 2.0, lenFinger / 2.0})
drawNode(matStack)
matStack.pop()
matStack.pop()
}
matStack.pop()
//Draw right finger
matStack.push()
matStack.Translate(&posRightFinger)
matStack.RotateY(degToRad(-angFingerOpen))
matStack.push()
matStack.Translate(&vec3.T{0, 0, lenFinger / 2.0})
matStack.ScaleVec3(&vec3.T{widthFinger / 2.0, widthFinger / 2.0, lenFinger / 2.0})
drawNode(matStack)
matStack.pop()
{
//Draw right lower finger
matStack.push()
matStack.Translate(&vec3.T{0, 0, lenFinger})
matStack.RotateY(degToRad(angLowerFinger))
matStack.push()
matStack.Translate(&vec3.T{0, 0, lenFinger / 2.0})
matStack.ScaleVec3(&vec3.T{widthFinger / 2.0, widthFinger / 2.0, lenFinger / 2.0})
drawNode(matStack)
matStack.pop()
matStack.pop()
}
matStack.pop()
}
func adjBase(increment bool) {
if increment {
angBase += stdAngleInc
} else {
angBase -= stdAngleInc
}
angBase = math.Mod(degToRad(angBase), degToRad(360.0))
}
func adjUpperArm(increment bool) {
if increment {
angUpperArm += stdAngleInc
} else {
angUpperArm -= stdAngleInc
}
angUpperArm = clamp(degToRad(angUpperArm), degToRad(-90.0), 0.0)
}
func adjLowerArm(increment bool) {
if increment {
angLowerArm += stdAngleInc
} else {
angLowerArm -= stdAngleInc
}
angLowerArm = clamp(degToRad(angLowerArm), 0.0, degToRad(146.25))
}
func adjWristPitch(increment bool) {
if increment {
angWristPitch += stdAngleInc
} else {
angWristPitch -= stdAngleInc
}
angWristPitch = clamp(degToRad(angWristPitch), 0.0, degToRad(90.0))
}
func adjWristRoll(increment bool) {
if increment {
angWristRoll += stdAngleInc
} else {
angWristRoll -= stdAngleInc
}
angWristRoll = math.Mod(degToRad(angWristRoll), degToRad(360.0))
}
func adjFingerOpen(increment bool) {
if increment {
angFingerOpen += smallAngleInc
} else {
angFingerOpen -= smallAngleInc
}
angFingerOpen = clamp(degToRad(angFingerOpen), degToRad(9.0), degToRad(180.0))
}
const (
stdAngleInc = 11.25
smallAngleInc = 9.0
)
func WritePose() {
fmt.Printf("angBase:\t%f\n", angBase)
fmt.Printf("angUpperArm:\t%f\n", angUpperArm)
fmt.Printf("angLowerArm:\t%f\n", angLowerArm)
fmt.Printf("angWristPitch:\t%f\n", angWristPitch)
fmt.Printf("angWristRoll:\t%f\n", angWristRoll)
fmt.Printf("angFingerOpen:\t%f\n", angFingerOpen)
fmt.Printf("\n")
}
func clamp(value, minValue, maxValue float32) float32 {
if value < minValue {
return minValue
}
if value > maxValue {
return maxValue
}
return value
} | tut06/hierarchy/crane.go | 0.638385 | 0.403714 | crane.go | starcoder |
package crypto
import (
"bytes"
"github.com/NebulousLabs/Sia/encoding"
"github.com/NebulousLabs/merkletree"
)
const (
// SegmentSize is the chunk size that is used when taking the Merkle root
// of a file. 64 is chosen because bandwidth is scarce and it optimizes for
// the smallest possible storage proofs. Using a larger base, even 256
// bytes, would result in substantially faster hashing, but the bandwidth
// tradeoff was deemed to be more important, as blockchain space is scarce.
SegmentSize = 64
)
// MerkleTree wraps merkletree.Tree, changing some of the function definitions
// to assume sia-specific constants and return sia-specific types.
type MerkleTree struct {
merkletree.Tree
}
// NewTree returns a MerkleTree, which can be used for getting Merkle roots and
// Merkle proofs on data. See merkletree.Tree for more details.
func NewTree() *MerkleTree {
return &MerkleTree{*merkletree.New(NewHash())}
}
// PushObject encodes and adds the hash of the encoded object to the tree as a
// leaf.
func (t *MerkleTree) PushObject(obj interface{}) {
t.Push(encoding.Marshal(obj))
}
// Root is a redefinition of merkletree.Tree.Root, returning a Hash instead of
// a []byte.
func (t *MerkleTree) Root() (h Hash) {
copy(h[:], t.Tree.Root())
return
}
// CachedMerkleTree wraps merkletree.CachedTree, changing some of the function
// definitions to assume sia-specific constants and return sia-specific types.
type CachedMerkleTree struct {
merkletree.CachedTree
}
// NewCachedTree returns a CachedMerkleTree, which can be used for getting
// Merkle roots and proofs from data that has cached subroots. See
// merkletree.CachedTree for more details.
func NewCachedTree(height uint64) *CachedMerkleTree {
return &CachedMerkleTree{*merkletree.NewCachedTree(NewHash(), height)}
}
// Prove is a redefinition of merkletree.CachedTree.Prove, so that Sia-specific
// types are used instead of the generic types used by the parent package. The
// base is not a return value because the base is used as input.
func (ct *CachedMerkleTree) Prove(base []byte, cachedHashSet []Hash) []Hash {
// Turn the input in to a proof set that will be recognized by the high
// level tree.
cachedProofSet := make([][]byte, len(cachedHashSet)+1)
cachedProofSet[0] = base
for i := range cachedHashSet {
cachedProofSet[i+1] = cachedHashSet[i][:]
}
_, proofSet, _, _ := ct.CachedTree.Prove(cachedProofSet)
// convert proofSet to base and hashSet
hashSet := make([]Hash, len(proofSet)-1)
for i, proof := range proofSet[1:] {
copy(hashSet[i][:], proof)
}
return hashSet
}
// Push is a redefinition of merkletree.CachedTree.Push, with the added type
// safety of only accepting a hash.
func (ct *CachedMerkleTree) Push(h Hash) {
ct.CachedTree.Push(h[:])
}
// Root is a redefinition of merkletree.CachedTree.Root, returning a Hash
// instead of a []byte.
func (ct *CachedMerkleTree) Root() (h Hash) {
copy(h[:], ct.CachedTree.Root())
return
}
// CalculateLeaves calculates the number of leaves that would be pushed from
// data of size 'dataSize'.
func CalculateLeaves(dataSize uint64) uint64 {
numSegments := dataSize / SegmentSize
if dataSize == 0 || dataSize%SegmentSize != 0 {
numSegments++
}
return numSegments
}
// MerkleRoot returns the Merkle root of the input data.
func MerkleRoot(b []byte) Hash {
t := NewTree()
buf := bytes.NewBuffer(b)
for buf.Len() > 0 {
t.Push(buf.Next(SegmentSize))
}
return t.Root()
}
// MerkleProof builds a Merkle proof that the data at segment 'proofIndex' is a
// part of the Merkle root formed by 'b'.
func MerkleProof(b []byte, proofIndex uint64) (base []byte, hashSet []Hash) {
// Create the tree.
t := NewTree()
t.SetIndex(proofIndex)
// Fill the tree.
buf := bytes.NewBuffer(b)
for buf.Len() > 0 {
t.Push(buf.Next(SegmentSize))
}
// Get the proof and convert it to a base + hash set.
_, proof, _, _ := t.Prove()
if len(proof) == 0 {
// There's no proof, because there's no data. Return blank values.
return nil, nil
}
base = proof[0]
hashSet = make([]Hash, len(proof)-1)
for i, p := range proof[1:] {
copy(hashSet[i][:], p)
}
return base, hashSet
}
// VerifySegment will verify that a segment, given the proof, is a part of a
// Merkle root.
func VerifySegment(base []byte, hashSet []Hash, numSegments, proofIndex uint64, root Hash) bool {
// convert base and hashSet to proofSet
proofSet := make([][]byte, len(hashSet)+1)
proofSet[0] = base
for i := range hashSet {
proofSet[i+1] = hashSet[i][:]
}
return merkletree.VerifyProof(NewHash(), root[:], proofSet, proofIndex, numSegments)
} | crypto/merkle.go | 0.73029 | 0.515498 | merkle.go | starcoder |
package values
import (
"fmt"
)
// UIntValue is a struct that holds a uint value
type UIntValue struct {
value uint
}
// IsEqualTo returns true if the value is equal to the expected value, else false
func (i UIntValue) IsEqualTo(expected interface{}) bool {
return i.equals(NewUIntValue(expected))
}
// IsGreaterThan returns true if the value is greater than the expected value, else false
func (i UIntValue) IsGreaterThan(expected interface{}) bool {
return i.greaterThan(NewUIntValue(expected))
}
// IsGreaterOrEqualTo returns true if the value is greater than or equal to the expected value, else false
func (i UIntValue) IsGreaterOrEqualTo(expected interface{}) bool {
return i.greaterOrEqual(NewUIntValue(expected))
}
// IsLessThan returns true if the value is less than the expected value, else false
func (i UIntValue) IsLessThan(expected interface{}) bool {
return !i.IsGreaterOrEqualTo(expected)
}
// IsLessOrEqualTo returns true if the value is less than or equal to the expected value, else false
func (i UIntValue) IsLessOrEqualTo(expected interface{}) bool {
return !i.IsGreaterThan(expected)
}
// Value returns the actual value of the structure
func (i UIntValue) Value() interface{} {
return i.value
}
func (i UIntValue) greaterThan(expected UIntValue) bool {
return i.value > expected.value
}
func (i UIntValue) greaterOrEqual(expected UIntValue) bool {
return i.value >= expected.value
}
func (i UIntValue) equals(expected UIntValue) bool {
return i.value == expected.value
}
// NewUIntValue creates and returns a UIntValue struct initialed with the given value
func NewUIntValue(value interface{}) UIntValue {
switch v := value.(type) {
case uint:
return UIntValue{value: value.(uint)}
case uint8:
return UIntValue{value: uint(value.(uint8))}
case uint16:
return UIntValue{value: uint(value.(uint16))}
case uint32:
return UIntValue{value: uint(value.(uint32))}
case uint64:
return UIntValue{value: uint(value.(uint64))}
default:
panic(fmt.Sprintf("expected uint value type but got %T type", v))
}
} | internal/pkg/values/uint_value.go | 0.826362 | 0.731346 | uint_value.go | starcoder |
package operator
import (
"github.com/matrixorigin/matrixone/pkg/container/types"
"github.com/matrixorigin/matrixone/pkg/container/vector"
"github.com/matrixorigin/matrixone/pkg/vm/process"
"golang.org/x/exp/constraints"
)
// If operator supported format like that
// If(<boolean operator>, <value operator>, <value operator>)
var (
IfBool = func(vs []*vector.Vector, proc *process.Process) (*vector.Vector, error) {
return ifGeneral[bool](vs, proc, types.Type{Oid: types.T_bool})
}
IfUint8 = func(vs []*vector.Vector, proc *process.Process) (*vector.Vector, error) {
return ifGeneral[uint8](vs, proc, types.Type{Oid: types.T_uint8})
}
IfUint16 = func(vs []*vector.Vector, proc *process.Process) (*vector.Vector, error) {
return ifGeneral[uint16](vs, proc, types.Type{Oid: types.T_uint16})
}
IfUint32 = func(vs []*vector.Vector, proc *process.Process) (*vector.Vector, error) {
return ifGeneral[uint32](vs, proc, types.Type{Oid: types.T_uint32})
}
IfUint64 = func(vs []*vector.Vector, proc *process.Process) (*vector.Vector, error) {
return ifGeneral[uint64](vs, proc, types.Type{Oid: types.T_uint64})
}
IfInt8 = func(vs []*vector.Vector, proc *process.Process) (*vector.Vector, error) {
return ifGeneral[int8](vs, proc, types.Type{Oid: types.T_int8})
}
IfInt16 = func(vs []*vector.Vector, proc *process.Process) (*vector.Vector, error) {
return ifGeneral[int16](vs, proc, types.Type{Oid: types.T_int16})
}
IfInt32 = func(vs []*vector.Vector, proc *process.Process) (*vector.Vector, error) {
return ifGeneral[int32](vs, proc, types.Type{Oid: types.T_int32})
}
IfInt64 = func(vs []*vector.Vector, proc *process.Process) (*vector.Vector, error) {
return ifGeneral[int64](vs, proc, types.Type{Oid: types.T_int64})
}
IfFloat32 = func(vs []*vector.Vector, proc *process.Process) (*vector.Vector, error) {
return ifGeneral[float32](vs, proc, types.Type{Oid: types.T_float32})
}
IfFloat64 = func(vs []*vector.Vector, proc *process.Process) (*vector.Vector, error) {
return ifGeneral[float64](vs, proc, types.Type{Oid: types.T_float64})
}
IfDecimal64 = func(vs []*vector.Vector, proc *process.Process) (*vector.Vector, error) {
return ifGeneral[types.Decimal64](vs, proc, types.Type{Oid: types.T_decimal64})
}
IfDecimal128 = func(vs []*vector.Vector, proc *process.Process) (*vector.Vector, error) {
return ifGeneral[types.Decimal128](vs, proc, types.Type{Oid: types.T_decimal128})
}
IfDate = func(vs []*vector.Vector, proc *process.Process) (*vector.Vector, error) {
return ifGeneral[types.Date](vs, proc, types.Type{Oid: types.T_date})
}
IfDateTime = func(vs []*vector.Vector, proc *process.Process) (*vector.Vector, error) {
return ifGeneral[types.Datetime](vs, proc, types.Type{Oid: types.T_datetime})
}
IfVarchar = func(vs []*vector.Vector, proc *process.Process) (*vector.Vector, error) {
return ifForString(vs, proc, types.Type{Oid: types.T_varchar})
}
IfChar = func(vs []*vector.Vector, proc *process.Process) (*vector.Vector, error) {
return ifForString(vs, proc, types.Type{Oid: types.T_char})
}
IfTimestamp = func(vs []*vector.Vector, proc *process.Process) (*vector.Vector, error) {
return ifGeneral[types.Timestamp](vs, proc, types.Type{Oid: types.T_timestamp})
}
)
func IfTypeCheckFn(inputTypes []types.T, _ []types.T, ret types.T) bool {
if len(inputTypes) == 3 && inputTypes[0] == types.T_bool {
if inputTypes[1] != ret && inputTypes[1] != types.T_any {
return false
}
if inputTypes[2] != ret && inputTypes[2] != types.T_any {
return false
}
return true
}
return false
}
type IfRet interface {
constraints.Integer | constraints.Float | bool | types.Date | types.Datetime |
types.Decimal64 | types.Decimal128 | types.Timestamp
}
func ifGeneral[T IfRet](vs []*vector.Vector, proc *process.Process, ret types.Type) (*vector.Vector, error) {
return cwGeneral[T](vs, proc, ret)
}
func ifForString(vs []*vector.Vector, proc *process.Process, typ types.Type) (*vector.Vector, error) {
return cwString(vs, proc, typ)
} | pkg/sql/plan2/function/operator/if.go | 0.552781 | 0.55266 | if.go | starcoder |
// Package alignfor provides functions that align elements.
package alignfor
import (
"fmt"
"image"
"strings"
"github.com/mum4k/termdash/align"
"github.com/mum4k/termdash/internal/runewidth"
"github.com/mum4k/termdash/internal/wrap"
)
// hAlign aligns the given area in the rectangle horizontally.
func hAlign(rect image.Rectangle, ar image.Rectangle, h align.Horizontal) (image.Rectangle, error) {
gap := rect.Dx() - ar.Dx()
switch h {
case align.HorizontalRight:
// Use gap from above.
case align.HorizontalCenter:
gap /= 2
case align.HorizontalLeft:
gap = 0
default:
return image.ZR, fmt.Errorf("unsupported horizontal alignment %v", h)
}
return image.Rect(
rect.Min.X+gap,
ar.Min.Y,
rect.Min.X+gap+ar.Dx(),
ar.Max.Y,
), nil
}
// vAlign aligns the given area in the rectangle vertically.
func vAlign(rect image.Rectangle, ar image.Rectangle, v align.Vertical) (image.Rectangle, error) {
gap := rect.Dy() - ar.Dy()
switch v {
case align.VerticalBottom:
// Use gap from above.
case align.VerticalMiddle:
gap /= 2
case align.VerticalTop:
gap = 0
default:
return image.ZR, fmt.Errorf("unsupported vertical alignment %v", v)
}
return image.Rect(
ar.Min.X,
rect.Min.Y+gap,
ar.Max.X,
rect.Min.Y+gap+ar.Dy(),
), nil
}
// Rectangle aligns the area within the rectangle returning the
// aligned area. The area must fall within the rectangle.
func Rectangle(rect image.Rectangle, ar image.Rectangle, h align.Horizontal, v align.Vertical) (image.Rectangle, error) {
if !ar.In(rect) {
return image.ZR, fmt.Errorf("cannot align area %v inside rectangle %v, the area falls outside of the rectangle", ar, rect)
}
aligned, err := hAlign(rect, ar, h)
if err != nil {
return image.ZR, err
}
aligned, err = vAlign(rect, aligned, v)
if err != nil {
return image.ZR, err
}
return aligned, nil
}
// Text aligns the text within the given rectangle, returns the start point for the text.
// For the purposes of the alignment this assumes that text will be trimmed if
// it overruns the rectangle.
// This only supports a single line of text, the text must not contain non-printable characters,
// allows empty text.
func Text(rect image.Rectangle, text string, h align.Horizontal, v align.Vertical) (image.Point, error) {
if strings.ContainsRune(text, '\n') {
return image.ZP, fmt.Errorf("the provided text contains a newline character: %q", text)
}
if text != "" {
if err := wrap.ValidText(text); err != nil {
return image.ZP, fmt.Errorf("the provided text contains non printable character(s): %s", err)
}
}
cells := runewidth.StringWidth(text)
var textLen int
if cells < rect.Dx() {
textLen = cells
} else {
textLen = rect.Dx()
}
textRect := image.Rect(
rect.Min.X,
rect.Min.Y,
// For the purposes of aligning the text, assume that it will be
// trimmed to the available space.
rect.Min.X+textLen,
rect.Min.Y+1,
)
aligned, err := Rectangle(rect, textRect, h, v)
if err != nil {
return image.ZP, err
}
return image.Point{aligned.Min.X, aligned.Min.Y}, nil
} | internal/alignfor/alignfor.go | 0.853364 | 0.562237 | alignfor.go | starcoder |
package iqfeed
import (
"strings"
"time"
)
// FundamentalMsg cannot be customized and is used to provide detail of a particular matched symbol.
type FundamentalMsg struct {
Symbol string // The Symbol ID to match with watch request
ExchaangeID string // Deprecated Use Listed Market (field 45 below) instead
PE float64 // Price/earnings ratio
AvgVolume int // Average daily volume (4 week avg)
Fifty2WkHigh float64 // Highest price of the last 52 weeks for futures, this is the contract High.
Fifty2WkLow float64 // Lowest price of the last 52 weeks. For futures, this is the contract Low.
CalYearHigh float64 // High price for the current calendar year.
CalyearLow float64 // Low price for the current calendar year.
DivYield float64 // The annual dividends per share paid by the company divided by the current market price per share of stock sent as a percentage.
DivAmt float64 // The current quarter actual dividend
DivRate float64 // The annualized amount at which a dividend is expected to be paid by a company.
PayDate time.Time // Date on which a company made its last dividend payment (MM/DD/YYYY).
ExDivDate time.Time // The actual date in which a stock goes ex-dividend, typically about 3 weeks before the dividend is paid to shareholders of record. Also the amount of the dividend is reflected in a reduction of the share price on this date. (MM/DD/YYYY).
Reserved1 string // Reserved field.
Reserved2 string // Reserved field.
Reserved3 string // Reserved field.
ShortInterest int // The total number of shares of a security that have been sold short by customers and securities firms that have not been repurchased to settle outstanding short positions in the market.
Reserved4 string // Reserved field.
CurrentYrEPS float64 // The portion of a company's profit allocated to each outstanding share of common stock.
NextYrEPS float64 // The total amount of earnings per share a company is estimated to accumulate over the next four quarters of the current fiscal year.
FiveYrGrowthPct float64 // Earnings Per Share growth rate over a five year period.
FiscalYrEnd int // The two digit month that the fiscal year ends for a company.
Reserved5 string // Reserved field.
CompanyName string // Company name or contract description
RootOptionSymbol []string // A list of root option symbols, there may be more than one.
PctHeldByInst float64 // A percentage of outstanding shares held by banks and institutions.
Beta float64 // A coefficient measuring a stock’s relative volatility. It is the covariance of a stock in relation to the rest of the stock market. 30 day historical volatility.
Leaps string // Long term equity anticipation securities.
CurrentAssets float64 // The amount of total current assets held by a company as of a specific date in Millions (lastADate).
CurrentLiabilities float64 // The amount of total current liabilities held by a company as of a specific date in Millions (lastADate).
BalSheetDate time.Time // Last date that a company issued their quarterly report. (MM/DD/YYYY).
LongTermDebt float64 // The amount of long term debt held by a company as of a specific date in Millions(lastADate).
ComShrOutstanding float64 // The amount of common shares outstanding.
Reserved6 string // Reserved field.
SplitFactor1 string // A float a space, then MM/DD/YYYY
SplitFactor2 string // A float a space, then MM/DD/YYYY
Reserved7 string // Reserved field.
Reserved8 string // Reserved field.
FormatCode string // Display format code, See: Price Format Codes http://www.iqfeed.net/dev/api/docs/PriceFormatCodes.cfm.
Precision int // Number of decimal digits.
SIC int // Federally designed numbering system identifying companies by industry. This 4 digit number corresponds to a specific industry.
HistVolatility float64 // 30-trading day volatility that it is calculated using Black-Scholes (https://en.wikipedia.org/wiki/Black%E2%80%93Scholes_model).
SecurityType string // The security type code, See: Security Types (http://www.iqfeed.net/dev/api/docs/SecurityTypes.cfm).
ListedMarket string // The listing market ID, See: Listed Markets
Fifty2WkHighDate time.Time // The date of the lowest price of the last 52 weeks. For futures, this is the contract Low Date. (MM/DD/YYYY)
Fifty2WkLowDate time.Time // The date of the lowest price of the last 52 weeks. For futures, this is the contract Low Date. (MM/DD/YYYY)
CalYearHighDate time.Time // Date at which the High price for the current calendar year occurred. (MM/DD/YYYY)
CalYearLowDate time.Time // Date at which the Low price for the current calendar year occurred. (MM/DD/YYYY)
YrEndClose float64 // Price of Year End Close. (Equities Only)
MaturityDate time.Time // Date of maturity for a bond.
CouponRate float64 // Interest rate for a bond.
ExpirationDate time.Time // IEOptions, Futures, FutureOptions, and SSFutures only
StrikePrice float64 // IEOptions only
NAICS int // North American Industry Classification System (http://www.census.gov/eos/www/naics/)
ExchangeRoot string // The root symbol that you can find this symbol listed under at the exchange.
}
// UnMarshall sends the data into the usable struct for consumption by the application.
func (f *FundamentalMsg) UnMarshall(d []byte, loc *time.Location) {
items := strings.Split(string(d), ",")
f.Symbol = items[0] // APL,
f.ExchaangeID = items[1] // 5,
f.PE = GetFloatFromStr(items[2]) // 9.9,
f.AvgVolume = GetIntFromStr(items[3]) // 53599000,
f.Fifty2WkHigh = GetFloatFromStr(items[4]) // 134.5400,
f.Fifty2WkLow = GetFloatFromStr(items[5]) // 92.0000,
f.CalYearHigh = GetFloatFromStr(items[6]) // 105.8500,
f.CalyearLow = GetFloatFromStr(items[7]) // 92.3900,
f.DivYield = GetFloatFromStr(items[8]) // 2.2100,
f.DivAmt = GetFloatFromStr(items[9]) // 0.5200,
f.DivRate = GetFloatFromStr(items[10]) // 2.0800,
f.PayDate = GetDateMMDDCCYY(items[11], loc) // 02/11/2016,
f.ExDivDate = GetDateMMDDCCYY(items[12], loc) // 02/04/2016,
f.Reserved1 = items[13] // ,
f.Reserved2 = items[14] // ,
f.Reserved3 = items[15] // ,
f.ShortInterest = GetIntFromStr(items[16]) // 63543520,
f.Reserved4 = items[17] // ,
f.CurrentYrEPS = GetFloatFromStr(items[18]) // 9.46,
f.NextYrEPS = GetFloatFromStr(items[19]) // ,
f.FiveYrGrowthPct = GetFloatFromStr(items[20]) // 0.34,
f.FiscalYrEnd = GetIntFromStr(items[21]) // 09,
f.Reserved5 = items[22] // ,
f.CompanyName = items[23] // APPLE,
f.RootOptionSymbol = strings.Split(items[24], " ") // AAPL AAPL7,
f.PctHeldByInst = GetFloatFromStr(items[25]) // 67.1,
f.Beta = GetFloatFromStr(items[26]) // 1.35,
f.Leaps = items[27] // ,
f.CurrentAssets = GetFloatFromStr(items[28]) // 89378.0,
f.CurrentLiabilities = GetFloatFromStr(items[29]) // 80610.0,
f.BalSheetDate = GetDateMMDDCCYY(items[30], loc) // 12/31/2015,
f.LongTermDebt = GetFloatFromStr(items[31]) // 53463.0,
f.ComShrOutstanding = GetFloatFromStr(items[32]) // 5544583,
f.Reserved6 = items[33] // 334220,
f.SplitFactor1 = items[34] // 0.14 06/09/2014,
f.SplitFactor2 = items[35] // 0.50 02/28/2005,
f.Reserved7 = items[36] // ,
f.Reserved8 = items[37] // 0,
f.FormatCode = items[38] // 14,
f.Precision = GetIntFromStr(items[39]) // 4,
f.SIC = GetIntFromStr(items[40]) // 3571,
f.HistVolatility = GetFloatFromStr(items[41]) // 36.98,
f.SecurityType = items[42] // 1,
f.ListedMarket = items[43] // 21,
f.Fifty2WkHighDate = GetDateMMDDCCYY(items[44], loc) // 04/28/2015,
f.Fifty2WkLowDate = GetDateMMDDCCYY(items[45], loc) // 08/24/2015,
f.CalYearHighDate = GetDateMMDDCCYY(items[46], loc) // 01/05/2016,
f.CalYearLowDate = GetDateMMDDCCYY(items[47], loc) // 01/28/2016,
f.YrEndClose = GetFloatFromStr(items[48]) // 105.26,
f.MaturityDate = GetDateMMDDCCYY(items[49], loc) // ,
f.CouponRate = GetFloatFromStr(items[50]) // ,
f.ExpirationDate = GetDateMMDDCCYY(items[51], loc) // ,
f.StrikePrice = GetFloatFromStr(items[52]) // ,
f.NAICS = GetIntFromStr(items[53]) // 334220,
f.ExchangeRoot = items[54] // ,
} | fundamental.go | 0.588889 | 0.662756 | fundamental.go | starcoder |
package random
import (
"errors"
"math/rand"
)
// ProteinSequence returns a random protein sequence string of a given length and seed.
// All returned sequences start M (Methionine) and end with * (stop codon).
func ProteinSequence(length int, seed int64) (string, error) {
//The length needs to be greater than two because the random protein sequenced returned always contain a start and stop codon. You could see more about this stuff here: https://en.wikipedia.org/wiki/Genetic_code#Start_and_stop_codons
if length <= 2 {
err := errors.New("The length needs to be greater than two because the random protein sequenced returned always contain a start and stop codon. Please select a higher length in RandomProteinSequence function")
return "", err
}
// https://en.wikipedia.org/wiki/Amino_acid#Table_of_standard_amino_acid_abbreviations_and_properties
var aminoAcidsAlphabet = []rune("ACDEFGHIJLMNPQRSTVWY")
rand.Seed(seed)
randomSequence := make([]rune, length)
for peptide := range randomSequence {
if peptide == 0 {
//M is the standard abbreviation for the Methionine aminoacid. A protein sequence start with M because the start codon is translated to Methionine
randomSequence[peptide] = 'M'
} else if peptide == length-1 {
//* is the standard abbreviation for the stop codon. That's a signal for the ribosome to stop the translation and because of that a protein sequence is finished with *
randomSequence[peptide] = '*'
} else {
randomIndex := rand.Intn(len(aminoAcidsAlphabet))
randomSequence[peptide] = aminoAcidsAlphabet[randomIndex]
}
}
return string(randomSequence), nil
}
// DNASequence returns a random DNA sequence string of a given length and seed.
func DNASequence(length int, seed int64) (string, error) {
var nucleicAcidsAlphabet = []rune("ACTG")
alphabetLength := len(nucleicAcidsAlphabet)
rand.Seed(seed)
randomSequence := make([]rune, length)
for basepair := range randomSequence {
randomIndex := rand.Intn(alphabetLength)
randomSequence[basepair] = nucleicAcidsAlphabet[randomIndex]
}
return string(randomSequence), nil
} | random/random.go | 0.727879 | 0.565779 | random.go | starcoder |
package czml
// Model describes a 3D model
// https://github.com/AnalyticalGraphicsInc/czml-writer/wiki/Model
type Model struct {
Show *bool `json:"show,omitempty"`
Gltf *Uri `json:"uri"`
Scale *float64 `json:"scale,omitempty"`
MinimumPixelSize *float64 `json:"minimumPixelSize,omitempty"`
MaximumScale *float64 `json:"maximumScale,omitempty"`
MinimumCone *float64 `json:"minimumCone,omitempty"`
IncrementallyLoadTextures *bool `json:"incrementallyLoadTextures,omitempty"`
RunAnimations *bool `json:"runAnimations,omitempty"`
Shadows ShadowMode `json:"shadows,omitempty"`
HeightReference *HeightReference `json:"heightReference,omitempty"`
SilhouetteColor *Color `json:"silhouetteColor,omitempty"`
SilhouetteSize *float64 `json:"silhouetteSize,omitempty"`
Color *Color `json:"color,omitempty"`
ColorBlendMode *ColorBlendMode `json:"colorBlendMode,omitempty"`
ColorBlendAmount *float64 `json:"colorBlendAmount,omitempty"`
DistanceDisplayCondition *DistanceDisplayCondition `json:"distanceDisplayCondition,omitempty"`
NodeTransformations *NodeTransformations `json:"nodeTransformations,omitempty"`
Articulations *Articulations `json:"articulations,omitempty"`
}
// Articulations is a mapping of keys to articulation values, where the keys are the name of the
// articulation, a single space, and the name of the stage.
// https://github.com/AnalyticalGraphicsInc/czml-writer/wiki/Articulations
type Articulations map[string]interface{}
// NodeTransformations is a mapping of node names to node transformations
// https://github.com/AnalyticalGraphicsInc/czml-writer/wiki/NodeTransformations
type NodeTransformations map[string]interface{}
// ColorBlendMode contains the mode of blending between a target color and an entity's source color
// https://github.com/AnalyticalGraphicsInc/czml-writer/wiki/ColorBlendMode
type ColorBlendMode struct {
ColorBlendMode ColorBlendModeValue `json:"colorBlendMode,omitempty"`
Reference ReferenceValue `json:"reference,omitempty"`
}
// ColorBlendModeValue is the mode of blending between a target color and an entity's source color.
// Valid values are `HIGHLIGHT`, `REPLACE`, and `MIX`
// https://github.com/AnalyticalGraphicsInc/czml-writer/wiki/ColorBlendModeValue
type ColorBlendModeValue string | model.go | 0.914209 | 0.403978 | model.go | starcoder |
package ent
import (
"Joalarm/app/post/service/internal/data/ent/post"
"fmt"
"strings"
"time"
"entgo.io/ent/dialect/sql"
)
// Post is the model entity for the Post schema.
type Post struct {
config `json:"-"`
// ID of the ent.
ID int `json:"id,omitempty"`
// Content holds the value of the "content" field.
Content string `json:"content,omitempty"`
// Latitude holds the value of the "latitude" field.
Latitude float64 `json:"latitude,omitempty"`
// Longitude holds the value of the "longitude" field.
Longitude float64 `json:"longitude,omitempty"`
// Accuracy holds the value of the "accuracy" field.
Accuracy float64 `json:"accuracy,omitempty"`
// PostStatus holds the value of the "post_status" field.
PostStatus int8 `json:"post_status,omitempty"`
// Group holds the value of the "group" field.
Group int `json:"group,omitempty"`
// CountLikes holds the value of the "count_likes" field.
CountLikes int `json:"count_likes,omitempty"`
// CountComments holds the value of the "count_comments" field.
CountComments int `json:"count_comments,omitempty"`
// CreatedAt holds the value of the "created_at" field.
CreatedAt time.Time `json:"created_at,omitempty"`
// UpdatedAt holds the value of the "updated_at" field.
UpdatedAt time.Time `json:"updated_at,omitempty"`
// DeletedAt holds the value of the "deleted_at" field.
DeletedAt time.Time `json:"deleted_at,omitempty"`
}
// scanValues returns the types for scanning values from sql.Rows.
func (*Post) scanValues(columns []string) ([]interface{}, error) {
values := make([]interface{}, len(columns))
for i := range columns {
switch columns[i] {
case post.FieldLatitude, post.FieldLongitude, post.FieldAccuracy:
values[i] = new(sql.NullFloat64)
case post.FieldID, post.FieldPostStatus, post.FieldGroup, post.FieldCountLikes, post.FieldCountComments:
values[i] = new(sql.NullInt64)
case post.FieldContent:
values[i] = new(sql.NullString)
case post.FieldCreatedAt, post.FieldUpdatedAt, post.FieldDeletedAt:
values[i] = new(sql.NullTime)
default:
return nil, fmt.Errorf("unexpected column %q for type Post", columns[i])
}
}
return values, nil
}
// assignValues assigns the values that were returned from sql.Rows (after scanning)
// to the Post fields.
func (po *Post) assignValues(columns []string, values []interface{}) error {
if m, n := len(values), len(columns); m < n {
return fmt.Errorf("mismatch number of scan values: %d != %d", m, n)
}
for i := range columns {
switch columns[i] {
case post.FieldID:
value, ok := values[i].(*sql.NullInt64)
if !ok {
return fmt.Errorf("unexpected type %T for field id", value)
}
po.ID = int(value.Int64)
case post.FieldContent:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field content", values[i])
} else if value.Valid {
po.Content = value.String
}
case post.FieldLatitude:
if value, ok := values[i].(*sql.NullFloat64); !ok {
return fmt.Errorf("unexpected type %T for field latitude", values[i])
} else if value.Valid {
po.Latitude = value.Float64
}
case post.FieldLongitude:
if value, ok := values[i].(*sql.NullFloat64); !ok {
return fmt.Errorf("unexpected type %T for field longitude", values[i])
} else if value.Valid {
po.Longitude = value.Float64
}
case post.FieldAccuracy:
if value, ok := values[i].(*sql.NullFloat64); !ok {
return fmt.Errorf("unexpected type %T for field accuracy", values[i])
} else if value.Valid {
po.Accuracy = value.Float64
}
case post.FieldPostStatus:
if value, ok := values[i].(*sql.NullInt64); !ok {
return fmt.Errorf("unexpected type %T for field post_status", values[i])
} else if value.Valid {
po.PostStatus = int8(value.Int64)
}
case post.FieldGroup:
if value, ok := values[i].(*sql.NullInt64); !ok {
return fmt.Errorf("unexpected type %T for field group", values[i])
} else if value.Valid {
po.Group = int(value.Int64)
}
case post.FieldCountLikes:
if value, ok := values[i].(*sql.NullInt64); !ok {
return fmt.Errorf("unexpected type %T for field count_likes", values[i])
} else if value.Valid {
po.CountLikes = int(value.Int64)
}
case post.FieldCountComments:
if value, ok := values[i].(*sql.NullInt64); !ok {
return fmt.Errorf("unexpected type %T for field count_comments", values[i])
} else if value.Valid {
po.CountComments = int(value.Int64)
}
case post.FieldCreatedAt:
if value, ok := values[i].(*sql.NullTime); !ok {
return fmt.Errorf("unexpected type %T for field created_at", values[i])
} else if value.Valid {
po.CreatedAt = value.Time
}
case post.FieldUpdatedAt:
if value, ok := values[i].(*sql.NullTime); !ok {
return fmt.Errorf("unexpected type %T for field updated_at", values[i])
} else if value.Valid {
po.UpdatedAt = value.Time
}
case post.FieldDeletedAt:
if value, ok := values[i].(*sql.NullTime); !ok {
return fmt.Errorf("unexpected type %T for field deleted_at", values[i])
} else if value.Valid {
po.DeletedAt = value.Time
}
}
}
return nil
}
// Update returns a builder for updating this Post.
// Note that you need to call Post.Unwrap() before calling this method if this Post
// was returned from a transaction, and the transaction was committed or rolled back.
func (po *Post) Update() *PostUpdateOne {
return (&PostClient{config: po.config}).UpdateOne(po)
}
// Unwrap unwraps the Post entity that was returned from a transaction after it was closed,
// so that all future queries will be executed through the driver which created the transaction.
func (po *Post) Unwrap() *Post {
tx, ok := po.config.driver.(*txDriver)
if !ok {
panic("ent: Post is not a transactional entity")
}
po.config.driver = tx.drv
return po
}
// String implements the fmt.Stringer.
func (po *Post) String() string {
var builder strings.Builder
builder.WriteString("Post(")
builder.WriteString(fmt.Sprintf("id=%v", po.ID))
builder.WriteString(", content=")
builder.WriteString(po.Content)
builder.WriteString(", latitude=")
builder.WriteString(fmt.Sprintf("%v", po.Latitude))
builder.WriteString(", longitude=")
builder.WriteString(fmt.Sprintf("%v", po.Longitude))
builder.WriteString(", accuracy=")
builder.WriteString(fmt.Sprintf("%v", po.Accuracy))
builder.WriteString(", post_status=")
builder.WriteString(fmt.Sprintf("%v", po.PostStatus))
builder.WriteString(", group=")
builder.WriteString(fmt.Sprintf("%v", po.Group))
builder.WriteString(", count_likes=")
builder.WriteString(fmt.Sprintf("%v", po.CountLikes))
builder.WriteString(", count_comments=")
builder.WriteString(fmt.Sprintf("%v", po.CountComments))
builder.WriteString(", created_at=")
builder.WriteString(po.CreatedAt.Format(time.ANSIC))
builder.WriteString(", updated_at=")
builder.WriteString(po.UpdatedAt.Format(time.ANSIC))
builder.WriteString(", deleted_at=")
builder.WriteString(po.DeletedAt.Format(time.ANSIC))
builder.WriteByte(')')
return builder.String()
}
// Posts is a parsable slice of Post.
type Posts []*Post
func (po Posts) config(cfg config) {
for _i := range po {
po[_i].config = cfg
}
} | app/post/service/internal/data/ent/post.go | 0.651466 | 0.410284 | post.go | starcoder |
package freespacetree
// Node represents a range of free space in the tree.
type Node struct {
from uint64
to uint64
left *Node
right *Node
}
// NewNode returns a pointer to a new Node with the specified range of free space.
func NewNode(from, to uint64) *Node {
inst := &Node{
left: nil,
right: nil,
from: from,
to: to,
}
return inst
}
// Allocate attempts to allocate a continuous range of blocks as specified, returning
// the first blockid in the allocation, and whether the space was successfully allocated.
func (nd *Node) Allocate(blocks uint64) (uint64, bool) {
if nd.left != nil {
blockid, found := nd.left.Allocate(blocks)
if found {
return blockid, found
}
}
if nd.to-nd.from >= blocks {
// Will fit in this node.
blockid := nd.from
nd.from += blocks
return blockid, true
}
if nd.right != nil {
blockid, found := nd.right.Allocate(blocks)
if found {
return blockid, found
}
}
return 0, false
}
// Deallocate frees the continuous space referenced by the specified blockid and length, returning the new node
// representing the free space.
func (nd *Node) Deallocate(blockid uint64, blocklength uint64) *Node {
node := NewNode(blockid, blockid+blocklength)
nd = nd.AddNode(node)
return nd
}
// AddNode adds an existing node into the tree, merging nodes if necessary and returning the new root of the tree.
func (nd *Node) AddNode(node *Node) *Node {
// Detect node engulfed by nd
if nd.from <= node.from && nd.to >= node.to {
// Add node's children if any
if node.left != nil {
nd.AddNode(node.left)
}
if node.right != nil {
nd.AddNode(node.right)
}
return nd // drop node
}
// Detect nd engulfed by node
if node.from <= nd.from && node.to >= nd.to {
// add our children to new node
if nd.left != nil {
node.AddNode(nd.left)
}
if nd.right != nil {
node.AddNode(nd.right)
}
// drop nd, return new node
nd.left = nil
nd.right = nil
return node
}
// Detect adjacent to left / overlaps left
if node.to == nd.from-1 || (node.from <= nd.from && node.to <= nd.to && node.to >= nd.from) {
nd.from = node.from // extend nd and drop new node
// Add node's children if any
if node.left != nil {
nd.AddNode(node.left)
}
if node.right != nil {
nd.AddNode(node.right)
}
// Clear and re-add children
left := nd.left
right := nd.right
nd.left = nil
nd.right = nil
if left != nil {
nd.AddNode(left)
}
if right != nil {
nd.AddNode(right)
}
// drop node
node.left = nil
node.right = nil
return nd
}
// Detect adjacent to right / overlaps right
if node.from == nd.to+1 || (node.from >= nd.from && node.from <= nd.to && node.to <= nd.from) {
nd.to = node.to // extend nd
// Add node's children if any
if node.left != nil {
nd.AddNode(node.left)
}
if node.right != nil {
nd.AddNode(node.right)
}
// Clear and re-add children
left := nd.left
right := nd.right
nd.left = nil
nd.right = nil
if left != nil {
nd.AddNode(left)
}
if right != nil {
nd.AddNode(right)
}
// drop node
node.left = nil
node.right = nil
return nd
}
// else, binary insert
if node.to < nd.from {
if nd.left == nil {
nd.left = node
} else {
nd.left = nd.left.AddNode(node)
}
} else {
if nd.right == nil {
nd.right = node
} else {
nd.right = nd.right.AddNode(node)
}
}
return nd
} | node.go | 0.815012 | 0.602412 | node.go | starcoder |
package keyproof
import (
"github.com/privacybydesign/gabi/big"
"github.com/privacybydesign/gabi/internal/common"
)
type (
expStepStructure struct {
bitname string
stepa expStepAStructure
stepb expStepBStructure
}
expStepCommit struct {
isTypeA bool
acommit expStepACommit
aproof ExpStepAProof
achallenge *big.Int
bcommit expStepBCommit
bproof ExpStepBProof
bchallenge *big.Int
}
ExpStepProof struct {
Achallenge *big.Int
Aproof ExpStepAProof
Bchallenge *big.Int
Bproof ExpStepBProof
}
)
func newExpStepStructure(bitname, prename, postname, mulname, modname string, bitlen uint) expStepStructure {
return expStepStructure{
bitname: bitname,
stepa: newExpStepAStructure(bitname, prename, postname),
stepb: newExpStepBStructure(bitname, prename, postname, mulname, modname, bitlen),
}
}
func (s *expStepStructure) commitmentsFromSecrets(g group, list []*big.Int, bases BaseLookup, secretdata SecretLookup) ([]*big.Int, expStepCommit) {
var commit expStepCommit
if secretdata.Secret(s.bitname).Cmp(big.NewInt(0)) == 0 {
commit.isTypeA = true
// prove a
list, commit.acommit = s.stepa.commitmentsFromSecrets(g, list, bases, secretdata)
// fake b
commit.bchallenge = common.FastRandomBigInt(new(big.Int).Lsh(big.NewInt(1), 256))
commit.bproof = s.stepb.fakeProof(g)
list = s.stepb.commitmentsFromProof(g, list, commit.bchallenge, bases, commit.bproof)
} else {
commit.isTypeA = false
// fake a
commit.achallenge = common.FastRandomBigInt(new(big.Int).Lsh(big.NewInt(1), 256))
commit.aproof = s.stepa.fakeProof(g)
list = s.stepa.commitmentsFromProof(g, list, commit.achallenge, bases, commit.aproof)
// prove b
list, commit.bcommit = s.stepb.commitmentsFromSecrets(g, list, bases, secretdata)
}
return list, commit
}
func (s *expStepStructure) buildProof(g group, challenge *big.Int, commit expStepCommit, secretdata SecretLookup) ExpStepProof {
var proof ExpStepProof
if commit.isTypeA {
// Build a proof
proof.Achallenge = new(big.Int).Xor(challenge, commit.bchallenge)
proof.Aproof = s.stepa.buildProof(g, proof.Achallenge, commit.acommit, secretdata)
// Copy b proof
proof.Bchallenge = commit.bchallenge
proof.Bproof = commit.bproof
} else {
// Copy a proof
proof.Achallenge = commit.achallenge
proof.Aproof = commit.aproof
// Build b proof
proof.Bchallenge = new(big.Int).Xor(challenge, commit.achallenge)
proof.Bproof = s.stepb.buildProof(g, proof.Bchallenge, commit.bcommit, secretdata)
}
return proof
}
func (s *expStepStructure) fakeProof(g group, challenge *big.Int) ExpStepProof {
var proof ExpStepProof
proof.Achallenge = common.FastRandomBigInt(new(big.Int).Lsh(big.NewInt(1), 256))
proof.Bchallenge = new(big.Int).Xor(challenge, proof.Achallenge)
proof.Aproof = s.stepa.fakeProof(g)
proof.Bproof = s.stepb.fakeProof(g)
return proof
}
func (s *expStepStructure) verifyProofStructure(challenge *big.Int, proof ExpStepProof) bool {
if proof.Achallenge == nil || proof.Bchallenge == nil {
return false
}
if challenge.Cmp(new(big.Int).Xor(proof.Achallenge, proof.Bchallenge)) != 0 {
return false
}
return s.stepa.verifyProofStructure(proof.Aproof) && s.stepb.verifyProofStructure(proof.Bproof)
}
func (s *expStepStructure) commitmentsFromProof(g group, list []*big.Int, challenge *big.Int, bases BaseLookup, proof ExpStepProof) []*big.Int {
list = s.stepa.commitmentsFromProof(g, list, proof.Achallenge, bases, proof.Aproof)
list = s.stepb.commitmentsFromProof(g, list, proof.Bchallenge, bases, proof.Bproof)
return list
}
func (s *expStepStructure) isTrue(secretdata SecretLookup) bool {
return s.stepa.isTrue(secretdata) || s.stepb.isTrue(secretdata)
}
func (s *expStepStructure) numRangeProofs() int {
return s.stepa.numRangeProofs() + s.stepb.numRangeProofs()
}
func (s *expStepStructure) numCommitments() int {
return s.stepa.numCommitments() + s.stepb.numCommitments()
} | keyproof/expstep.go | 0.565299 | 0.449453 | expstep.go | starcoder |
package scale
import (
"fmt"
"sort"
"github.com/gvallee/collective_profiler/tools/internal/pkg/unit"
)
/*
func intsScaleDown(unitType int, unitScale int, values []int) (int, int, []int) {
if unitScale == -1 {
// Unit not recognized, nothing we can do
return unitType, unitScale, values
}
newUnitScale := unitScale - 1
if !unit.IsValidScale(unitType, newUnitScale) {
// nothing we can do
return unitType, unitScale, values
}
values = intsCompute(DOWN, values)
return unitType, newUnitScale, values
}
*/
func intsScaleUp(unitType int, unitScale int, values []int) (int, int, []int) {
if unitScale == -1 {
// Unit not recognized, nothing we can do
return unitType, unitScale, values
}
newUnitScale := unitScale + 1
if !unit.IsValidScale(unitType, newUnitScale) {
// nothing we can do
return unitType, unitScale, values
}
values = intsCompute(UP, values)
return unitType, newUnitScale, values
}
func intsCompute(op int, values []int) []int {
var newValues []int
switch op {
case DOWN:
for _, val := range values {
newValues = append(newValues, val*1000)
}
case UP:
for _, val := range values {
newValues = append(newValues, val/1000)
}
}
return newValues
}
// Ints scales an array of Int
func Ints(unitID string, values []int) (string, []int, error) {
var sortedValues []int
if len(values) == 0 {
return "", nil, fmt.Errorf("map is empty")
}
// Copy and sort the values to figure out what can be done
sortedValues = append(sortedValues, values...)
sort.Ints(sortedValues)
// If all values are 0 nothing can be done
if allZerosInts(sortedValues) {
return unitID, values, nil
}
/* We deal with integers so this does not make much sense i think
if sortedValues[0] >= 0 && sortedValues[len(values)-1] <= 1 {
// We scale down all the values if possible
// Translate the human reading unit into something we can inteprete
unitType, unitScale := unit.FromString(unitID)
unitType, unitScale, newValues := intsScaleDown(unitType, unitScale, values)
newUnitID := unit.ToString(unitType, unitScale)
return Ints(newUnitID, newValues)
}
*/
if sortedValues[0] >= 1000 {
// We scale up the value if possible
// Translate the human reading unit into something we can inteprete
unitType, unitScale := unit.FromString(unitID)
unitType, unitScale, newValues := intsScaleUp(unitType, unitScale, values)
newUnitID := unit.ToString(unitType, unitScale)
if unit.IsMax(unitType, unitScale) {
return newUnitID, newValues, nil
}
return Ints(newUnitID, newValues)
}
// Nothing to do, just return the same
return unitID, values, nil
} | tools/internal/pkg/scale/scale_ints.go | 0.716516 | 0.442877 | scale_ints.go | starcoder |
package jigo
import (
"bytes"
"fmt"
"strconv"
)
var textFormat = "%s" // Changed to "%q" in tests for better error messages.
type NodeType int
func (t NodeType) Type() NodeType {
return t
}
type Node interface {
Type() NodeType
String() string
// Copy does a deep copy of the Node and all its components.
Copy() Node
Position() Pos // byte position of start of node in full original input string
}
const (
NodeList NodeType = iota
NodeText
NodeVar
NodeLookup
NodeUnary
NodeFloat
NodeInteger
NodeString
NodeBool
NodeAdd
NodeMul
NodeMapExpr
NodeMapElem
NodeIndexExpr
NodeSet
NodeIf
NodeElseIf
NodeFor
)
// This is a stack of nodes starting at a position. It has the default NodeType
// but should never end up in the AST; it's use is in implementing order of
// operations for expressions
type nodeStack struct {
NodeType
Pos
Nodes []Node
}
func newStack(pos Pos) *nodeStack {
return &nodeStack{Pos: pos}
}
func (n *nodeStack) len() int { return len(n.Nodes) }
func (n *nodeStack) push(node Node) { n.Nodes = append(n.Nodes, node) }
func (n *nodeStack) pop() Node {
var r Node
if len(n.Nodes) > 0 {
r = n.Nodes[len(n.Nodes)-1]
n.Nodes = n.Nodes[:len(n.Nodes)-1]
}
return r
}
// ListNode holds a sequence of nodes.
type ListNode struct {
NodeType
Pos
Nodes []Node // The element nodes in lexical order.
}
func newList(pos Pos) *ListNode {
return &ListNode{NodeType: NodeList, Pos: pos}
}
func (l *ListNode) append(n Node) { l.Nodes = append(l.Nodes, n) }
func (l *ListNode) len() int { return len(l.Nodes) }
func (l *ListNode) String() string {
b := new(bytes.Buffer)
for _, n := range l.Nodes {
fmt.Fprint(b, n)
}
return b.String()
}
func (l *ListNode) CopyList() *ListNode {
if l == nil {
return l
}
n := newList(l.Pos)
for _, elem := range l.Nodes {
n.append(elem.Copy())
}
return n
}
func (l *ListNode) Copy() Node { return l.CopyList() }
// TextNode holds plain text.
type TextNode struct {
NodeType
Pos
Text []byte // The text; may span newlines.
}
func newText(pos Pos, text string) *TextNode {
return &TextNode{NodeType: NodeText, Pos: pos, Text: []byte(text)}
}
func (t *TextNode) String() string { return fmt.Sprintf(textFormat, t.Text) }
func (t *TextNode) Copy() Node { return &TextNode{NodeText, t.Pos, append([]byte{}, t.Text...)} }
// VarNode represents a var print expr, ie {{ ... }}.
// It is represented as a sequence of expressions.
type VarNode struct {
NodeType
Pos
Node Node
}
func newVar(pos Pos) *VarNode {
return &VarNode{NodeType: NodeVar, Pos: pos}
}
func (v *VarNode) String() string { return "{{ " + v.Node.String() + " }}" }
func (v *VarNode) Copy() Node { return &VarNode{v.NodeType, v.Pos, v.Node} }
// A LookupNode is a variable lookup.
type LookupNode struct {
NodeType
Pos
Name string
}
func newLookup(pos Pos, name string) *LookupNode {
return &LookupNode{NodeType: NodeLookup, Pos: pos, Name: name}
}
func (l *LookupNode) String() string { return l.Name }
func (l *LookupNode) Copy() Node { return newLookup(l.Pos, l.Name) }
type StringNode struct {
NodeType
Pos
Value string
}
func (s *StringNode) Copy() Node { return &StringNode{s.NodeType, s.Pos, s.Value} }
func (s *StringNode) String() string { return fmt.Sprintf(`"%s"`, s.Value) }
type BoolNode struct {
NodeType
Pos
Value bool
}
func (s *BoolNode) Copy() Node { return &BoolNode{s.NodeType, s.Pos, s.Value} }
func (s *BoolNode) String() string { return fmt.Sprintf(`%s`, s.Value) }
type IntegerNode struct {
NodeType
Pos
Value int64
}
func (i *IntegerNode) Copy() Node { return &IntegerNode{i.NodeType, i.Pos, i.Value} }
func (i *IntegerNode) String() string { return strconv.FormatInt(i.Value, 10) }
type FloatNode struct {
NodeType
Pos
Value float64
}
func (f *FloatNode) Copy() Node { return &FloatNode{f.NodeType, f.Pos, f.Value} }
func (f *FloatNode) String() string { return fmt.Sprint(f.Value) }
type UnaryNode struct {
NodeType
Pos
Value Node
Unary item
}
func newUnaryNode(val Node, unary item) *UnaryNode {
return &UnaryNode{NodeUnary, val.Position(), val, unary}
}
func (u *UnaryNode) Copy() Node { return &UnaryNode{u.NodeType, u.Pos, u.Value, u.Unary} }
func (u *UnaryNode) String() string { return fmt.Sprintf("%s%s", u.Unary.val, u.Value) }
// newLiteral creates a new string, integer, or float node depending on itemType
func newLiteral(pos Pos, typ itemType, val string) Node {
switch typ {
case tokenFloat:
v, err := strconv.ParseFloat(val, 64)
if err != nil {
panic(err)
}
return &FloatNode{NodeFloat, pos, v}
case tokenInteger:
// FIXME: complex integer types? hex, octal, etc?
v, err := strconv.ParseInt(val, 10, 64)
if err != nil {
panic(err)
}
return &IntegerNode{NodeInteger, pos, v}
case tokenString:
return &StringNode{NodeString, pos, val}
case tokenBool:
var v bool
if val == "true" {
v = true
}
return &BoolNode{NodeBool, pos, v}
}
panic(fmt.Sprint("unexpected literal type ", typ))
}
type AddExpr struct {
NodeType
Pos
lhs Node
rhs Node
operator item
}
func newAddExpr(lhs, rhs Node, operator item) *AddExpr {
return &AddExpr{NodeAdd, lhs.Position(), lhs, rhs, operator}
}
func (a *AddExpr) String() string {
return fmt.Sprintf("%s %s %s", a.lhs, a.operator.val, a.rhs)
}
func (a *AddExpr) Copy() Node {
return newAddExpr(a.lhs, a.rhs, a.operator)
}
type MulExpr struct {
NodeType
Pos
lhs Node
rhs Node
operator item
}
func newMulExpr(lhs, rhs Node, operator item) *MulExpr {
return &MulExpr{NodeMul, lhs.Position(), lhs, rhs, operator}
}
func (m *MulExpr) String() string {
return fmt.Sprintf("%s %s %s", m.lhs, m.operator.val, m.rhs)
}
func (m *MulExpr) Copy() Node {
return newMulExpr(m.lhs, m.rhs, m.operator)
}
// complex literals
type MapExpr struct {
NodeType
Pos
Elems []*MapElem
}
func newMapExpr(pos Pos) *MapExpr {
return &MapExpr{NodeType: NodeMapExpr, Pos: pos}
}
func (m *MapExpr) len() int { return len(m.Elems) }
func (m *MapExpr) append(n *MapElem) { m.Elems = append(m.Elems, n) }
func (m *MapExpr) String() string {
b := new(bytes.Buffer)
b.WriteString("{")
for i, n := range m.Elems {
fmt.Fprint(b, n)
if i != len(m.Elems)-1 {
b.WriteString(", ")
}
}
b.WriteString("}")
return b.String()
}
func (m *MapExpr) Copy() Node {
if m == nil {
return m
}
n := newMapExpr(m.Pos)
for _, elem := range m.Elems {
n.append(elem.Copy().(*MapElem))
}
return n
}
type MapElem struct {
NodeType
Pos
Key Node
Value Node
}
func newMapElem(lhs, rhs Node) *MapElem {
return &MapElem{NodeMapElem, lhs.Position(), lhs, rhs}
}
func (m *MapElem) String() string {
return fmt.Sprintf("%s: %s", m.Key, m.Value)
}
func (m *MapElem) Copy() Node {
return newMapElem(m.Key, m.Value)
}
type IndexExpr struct {
NodeType
Pos
Value Node
Index Node
}
func newIndexExpr(val, idx Node) *IndexExpr {
return &IndexExpr{NodeIndexExpr, val.Position(), val, idx}
}
func (i *IndexExpr) String() string {
return fmt.Sprintf("%s[%s]", i.Value, i.Index)
}
func (i *IndexExpr) Copy() Node {
return newIndexExpr(i.Value, i.Index)
}
// block types
type SetNode struct {
NodeType
Pos
lhs Node
rhs Node
}
func newSet(pos Pos, lhs, rhs Node) *SetNode {
return &SetNode{NodeSet, pos, lhs, rhs}
}
// FIXME: environment needed to really recreate this as it requires block
// begin and end tags, which we don't technically know
func (s *SetNode) String() string { return fmt.Sprintf("{%% set %s = %s %%}", s.lhs, s.rhs) }
func (s *SetNode) Copy() Node {
return newSet(s.Pos, s.lhs.Copy(), s.rhs.Copy())
}
// A ConditionalNode is a node that has a guard and a body. If the guard evals
// as True, then the body is rendered. Otherwise, it's a Noop. If's and ElseIf's
// are modeled this way.
type ConditionalNode struct {
NodeType
Pos
Guard Node
Body Node
}
func newIfCond(pos Pos) *ConditionalNode {
return newConditional(pos, NodeIf)
}
func newElifCond(pos Pos) *ConditionalNode {
return newConditional(pos, NodeElseIf)
}
func newConditional(pos Pos, typ NodeType) *ConditionalNode {
return &ConditionalNode{NodeType: typ, Pos: pos}
}
func (c *ConditionalNode) Copy() Node {
n := newConditional(c.Pos, c.NodeType)
n.Guard = c.Guard.Copy()
n.Body = c.Body.Copy()
return n
}
func (c *ConditionalNode) String() string {
b := new(bytes.Buffer)
switch c.NodeType {
case NodeIf:
fmt.Fprintf(b, "{%% if %s %%}", c.Guard)
case NodeElseIf:
fmt.Fprintf(b, "{%% elif %s %%}", c.Guard)
}
fmt.Fprint(b, c.Body)
return b.String()
}
// IfBlockNode represents a full {% if %}... block. The if and elif bodies are
// modeled using the ConditionalNode, and are evaluated in order to determine
// which body to render. `Else` will be a ListNode, but can be nil if no such
// clause is present.
type IfBlockNode struct {
NodeType
Pos
Conditionals []Node
Else Node
}
func newIf(pos Pos) *IfBlockNode {
return &IfBlockNode{NodeType: NodeIf, Pos: pos, Conditionals: make([]Node, 0, 1)}
}
func (i *IfBlockNode) String() string {
b := new(bytes.Buffer)
for _, c := range i.Conditionals {
fmt.Fprint(b, c)
}
if i.Else != nil {
fmt.Fprintf(b, "{%% else %%}%s", i.Else)
}
fmt.Fprint(b, "{% endif %}")
return b.String()
}
func (i *IfBlockNode) Copy() Node {
n := newIf(i.Pos)
n.Conditionals = make([]Node, len(i.Conditionals))
for _, e := range i.Conditionals {
n.Conditionals = append(n.Conditionals, e.Copy())
}
if i.Else != nil {
n.Else = i.Else.Copy()
}
return n
}
type ForNode struct {
NodeType
Pos
ForExpr Node
InExpr Node
Body Node
}
func newFor(pos Pos) *ForNode {
return &ForNode{NodeType: NodeFor, Pos: pos}
}
// FIXME: This should use the environment's begin and end tags, which we
// don't have down at this level...
func (f *ForNode) String() string {
return fmt.Sprintf("{%% for %s in %s %%}%s{%% endfor %%}", f.ForExpr, f.InExpr, f.Body)
}
func (f *ForNode) Copy() Node {
n := newFor(f.Pos)
n.ForExpr = f.ForExpr.Copy()
n.InExpr = f.InExpr.Copy()
n.Body = f.Body.Copy()
return n
}
type BlockNode struct {
NodeType
Pos
Name string
Body Node
}
func (b *BlockNode) String() string {
return fmt.Sprintf("{% block %s %}%s{% endblock %}", b.Name, b.Body)
}
func (b *BlockNode) Copy() Node {
return &BlockNode{b.NodeType, b.Pos, b.Name, b.Body.Copy()}
}
type Import struct {
Name string
As string
}
type ExtendsNode struct {
NodeType
Pos
}
type PrintNode struct {
NodeType
Pos
}
type MacroNode struct {
NodeType
Pos
}
type IncludeNode struct {
NodeType
Pos
}
type FromNode struct {
NodeType
Pos
Module string
Imports []Import
}
type ImportNode struct {
NodeType
Pos
Module string
Body Import
}
type CallNode struct {
NodeType
Pos
} | ast.go | 0.601008 | 0.449695 | ast.go | starcoder |
// Package geogen provides utilities for generating various geospatial types.
package geogen
import (
"math"
"math/rand"
"sort"
"github.com/cockroachdb/cockroach/pkg/geo"
"github.com/cockroachdb/cockroach/pkg/geo/geopb"
"github.com/cockroachdb/cockroach/pkg/geo/geoprojbase"
"github.com/cockroachdb/errors"
"github.com/twpayne/go-geom"
)
var validShapeTypes = []geopb.ShapeType{
geopb.ShapeType_Point,
geopb.ShapeType_LineString,
geopb.ShapeType_Polygon,
geopb.ShapeType_MultiPoint,
geopb.ShapeType_MultiLineString,
geopb.ShapeType_MultiPolygon,
geopb.ShapeType_GeometryCollection,
}
// RandomCoord generates a random coord in the given bounds.
func RandomCoord(rng *rand.Rand, min float64, max float64) float64 {
return rng.Float64()*(max-min) + min
}
// RandomValidLinearRingCoords generates a flat float64 array of coordinates that represents
// a completely closed shape that can represent a simple LinearRing. This shape is always valid.
// A LinearRing must have at least 3 points. A point is added at the end to close the ring.
// Implements the algorithm in https://observablehq.com/@tarte0/generate-random-simple-polygon.
func RandomValidLinearRingCoords(
rng *rand.Rand, numPoints int, minX float64, maxX float64, minY float64, maxY float64,
) []geom.Coord {
if numPoints < 3 {
panic(errors.Newf("need at least 3 points, got %d", numPoints))
}
// Generate N random points, and find the center.
coords := make([]geom.Coord, numPoints+1)
var centerX, centerY float64
for i := 0; i < numPoints; i++ {
coords[i] = geom.Coord{
RandomCoord(rng, minX, maxX),
RandomCoord(rng, minY, maxY),
}
centerX += coords[i].X()
centerY += coords[i].Y()
}
centerX /= float64(numPoints)
centerY /= float64(numPoints)
// Sort by the angle of all the points relative to the center.
// Use ascending order of angle to get a CCW loop.
sort.Slice(coords[:numPoints], func(i, j int) bool {
angleI := math.Atan2(coords[i].Y()-centerY, coords[i].X()-centerX)
angleJ := math.Atan2(coords[j].Y()-centerY, coords[j].X()-centerX)
return angleI < angleJ
})
// Append the first coordinate to the end.
coords[numPoints] = coords[0]
return coords
}
// RandomPoint generates a random Point.
func RandomPoint(
rng *rand.Rand, minX float64, maxX float64, minY float64, maxY float64, srid geopb.SRID,
) *geom.Point {
return geom.NewPointFlat(geom.XY, []float64{
RandomCoord(rng, minX, maxX),
RandomCoord(rng, minY, maxY),
}).SetSRID(int(srid))
}
// RandomLineString generates a random LineString.
func RandomLineString(
rng *rand.Rand, minX float64, maxX float64, minY float64, maxY float64, srid geopb.SRID,
) *geom.LineString {
numCoords := 3 + rand.Intn(10)
randCoords := RandomValidLinearRingCoords(rng, numCoords, minX, maxX, minY, maxY)
// Extract a random substring from the LineString by truncating at the ends.
var minTrunc, maxTrunc int
// Ensure we always have at least two points.
for maxTrunc-minTrunc < 2 {
minTrunc, maxTrunc = rand.Intn(numCoords+1), rand.Intn(numCoords+1)
// Ensure maxTrunc >= minTrunc.
if minTrunc > maxTrunc {
minTrunc, maxTrunc = maxTrunc, minTrunc
}
}
return geom.NewLineString(geom.XY).MustSetCoords(randCoords[minTrunc:maxTrunc]).SetSRID(int(srid))
}
// RandomPolygon generates a random Polygon.
func RandomPolygon(
rng *rand.Rand, minX float64, maxX float64, minY float64, maxY float64, srid geopb.SRID,
) *geom.Polygon {
// TODO(otan): generate random holes inside the Polygon.
// Ideas:
// * We can do something like use 4 arbitrary points in the LinearRing to generate a BoundingBox,
// and re-use "PointInLinearRing" to generate N random points inside the 4 points to form
// a "sub" linear ring inside.
// * Generate a random set of polygons, see which ones they fully cover and use that.
return geom.NewPolygon(geom.XY).MustSetCoords([][]geom.Coord{
RandomValidLinearRingCoords(rng, 3+rng.Intn(10), minX, maxX, minY, maxY),
}).SetSRID(int(srid))
}
// RandomGeomT generates a random geom.T object within the given bounds and SRID.
func RandomGeomT(
rng *rand.Rand, minX float64, maxX float64, minY float64, maxY float64, srid geopb.SRID,
) geom.T {
shapeType := validShapeTypes[rng.Intn(len(validShapeTypes))]
switch shapeType {
case geopb.ShapeType_Point:
return RandomPoint(rng, minX, maxX, minY, maxY, srid)
case geopb.ShapeType_LineString:
return RandomLineString(rng, minX, maxX, minY, maxY, srid)
case geopb.ShapeType_Polygon:
return RandomPolygon(rng, minX, maxX, minY, maxY, srid)
case geopb.ShapeType_MultiPoint:
// TODO(otan): add empty points.
ret := geom.NewMultiPoint(geom.XY).SetSRID(int(srid))
num := 1 + rng.Intn(10)
for i := 0; i < num; i++ {
if err := ret.Push(RandomPoint(rng, minX, maxX, minY, maxY, srid)); err != nil {
panic(err)
}
}
return ret
case geopb.ShapeType_MultiLineString:
// TODO(otan): add empty LineStrings.
ret := geom.NewMultiLineString(geom.XY).SetSRID(int(srid))
num := 1 + rng.Intn(10)
for i := 0; i < num; i++ {
if err := ret.Push(RandomLineString(rng, minX, maxX, minY, maxY, srid)); err != nil {
panic(err)
}
}
return ret
case geopb.ShapeType_MultiPolygon:
// TODO(otan): add empty Polygons.
ret := geom.NewMultiPolygon(geom.XY).SetSRID(int(srid))
num := 1 + rng.Intn(10)
for i := 0; i < num; i++ {
if err := ret.Push(RandomPolygon(rng, minX, maxX, minY, maxY, srid)); err != nil {
panic(err)
}
}
return ret
case geopb.ShapeType_GeometryCollection:
ret := geom.NewGeometryCollection().SetSRID(int(srid))
num := 1 + rng.Intn(10)
for i := 0; i < num; i++ {
var shape geom.T
needShape := true
// Keep searching for a non GeometryCollection.
for needShape {
shape = RandomGeomT(rng, minX, maxX, minY, maxY, srid)
_, needShape = shape.(*geom.GeometryCollection)
}
if err := ret.Push(shape); err != nil {
panic(err)
}
}
return ret
}
panic(errors.Newf("unknown shape type: %v", shapeType))
}
// RandomGeometry generates a random Geometry with the given SRID.
func RandomGeometry(rng *rand.Rand, srid geopb.SRID) *geo.Geometry {
minX, maxX := -math.MaxFloat64, math.MaxFloat64
minY, maxY := -math.MaxFloat64, math.MaxFloat64
proj, ok := geoprojbase.Projections[srid]
if ok {
minX, maxX = proj.Bounds.MinX, proj.Bounds.MaxX
minY, maxY = proj.Bounds.MinY, proj.Bounds.MaxY
}
ret, err := geo.NewGeometryFromGeomT(RandomGeomT(rng, minX, maxX, minY, maxY, srid))
if err != nil {
panic(err)
}
return ret
}
// RandomGeography generates a random Geometry with the given SRID.
func RandomGeography(rng *rand.Rand, srid geopb.SRID) *geo.Geography {
// TODO(otan): generate geographies that traverse latitude/longitude boundaries.
minX, maxX := -180.0, 180.0
minY, maxY := -90.0, 90.0
ret, err := geo.NewGeographyFromGeomT(RandomGeomT(rng, minX, maxX, minY, maxY, srid))
if err != nil {
panic(err)
}
return ret
} | pkg/geo/geogen/geogen.go | 0.688783 | 0.604107 | geogen.go | starcoder |
package common
import (
"github.com/kaxap/gozxing"
)
type DefaultGridSampler struct{}
func NewDefaultGridSampler() GridSampler {
return DefaultGridSampler{}
}
func (s DefaultGridSampler) SampleGrid(image *gozxing.BitMatrix, dimensionX, dimensionY int,
p1ToX, p1ToY, p2ToX, p2ToY, p3ToX, p3ToY, p4ToX, p4ToY float64,
p1FromX, p1FromY, p2FromX, p2FromY, p3FromX, p3FromY, p4FromX, p4FromY float64) (*gozxing.BitMatrix, error) {
transform := PerspectiveTransform_QuadrilateralToQuadrilateral(
p1ToX, p1ToY, p2ToX, p2ToY, p3ToX, p3ToY, p4ToX, p4ToY,
p1FromX, p1FromY, p2FromX, p2FromY, p3FromX, p3FromY, p4FromX, p4FromY)
return s.SampleGridWithTransform(image, dimensionX, dimensionY, transform)
}
func (s DefaultGridSampler) SampleGridWithTransform(image *gozxing.BitMatrix,
dimensionX, dimensionY int, transform *PerspectiveTransform) (*gozxing.BitMatrix, error) {
if dimensionX <= 0 || dimensionY <= 0 {
return nil, gozxing.NewNotFoundException("dimensions X, Y = %v, %v", dimensionX, dimensionY)
}
bits, _ := gozxing.NewBitMatrix(dimensionX, dimensionY) // always success
points := make([]float64, 2*dimensionX)
for y := 0; y < dimensionY; y++ {
max := len(points)
iValue := float64(y) + 0.5
for x := 0; x < max; x += 2 {
points[x] = float64(x/2) + 0.5
points[x+1] = iValue
}
transform.TransformPoints(points)
// Quick check to see if points transformed to something inside the image;
// sufficient to check the endpoints
e := GridSampler_checkAndNudgePoints(image, points)
if e != nil {
return nil, gozxing.WrapNotFoundException(e)
}
for x := 0; x < max; x += 2 {
px := int(points[x])
py := int(points[x+1])
if px >= image.GetWidth() || py >= image.GetHeight() {
// cause of ArrayIndexOutOfBoundsException in image.Get(px, py)
// This feels wrong, but, sometimes if the finder patterns are misidentified, the resulting
// transform gets "twisted" such that it maps a straight line of points to a set of points
// whose endpoints are in bounds, but others are not. There is probably some mathematical
// way to detect this about the transformation that I don't know yet.
// This results in an ugly runtime exception despite our clever checks above -- can't have
// that. We could check each point's coordinates but that feels duplicative. We settle for
// catching and wrapping ArrayIndexOutOfBoundsException.
return nil, gozxing.NewNotFoundException()
}
if image.Get(px, py) {
// Black(-ish) pixel
bits.Set(x/2, y)
}
}
}
return bits, nil
} | common/default_grid_sampler.go | 0.793586 | 0.576929 | default_grid_sampler.go | starcoder |
package DG2D
import (
"fmt"
"math"
"github.com/notargets/gocfd/DG1D"
"github.com/notargets/gocfd/utils"
)
type RTElement struct {
N int // Order of element
Np int // Number of points in element
Nedge, Nint int // Number of Edge and Interior points
A utils.Matrix // Polynomial coefficient matrix, NpxNp
V [2]utils.Matrix // Vandermonde matrix for each direction r and s, [2]xNpxNp
Div, DivInt utils.Matrix // Divergence matrix, NpxNp for all, NintxNp Interior Points
R, S utils.Vector // Point locations defining element in [-1,1] Triangle, NpxNp
}
type RTPointType uint8
const (
All RTPointType = iota
InteriorR // R component of vector field
InteriorS // S component of vector field
Edge1 // Edge from vertex 0-1, Normal is [0,-1]
Edge2 // Edge from vertex 1-2, Normal is [1./sqrt(2),1./sqrt(2)]
Edge3 // Edge from vertex 2-0, Normal is [-1,0]
)
func NewRTElement(N int, R, S utils.Vector) (rt *RTElement) {
// We expect that there are points in R and S to match the dimension of dim(P(N-1))
/*
<---- Nint ----><---- Nint ----><---Nedge----><---Nedge----><---Nedge---->
Solution Points Edge 1 pts Edge 2 pts Edge 3 pts
<---- Nint ----><---- Nint ----><---Nedge----><---Nedge----><---Nedge---->
*/
var (
NN = N - 1
NpInterior = (NN + 1) * (NN + 2) / 2
)
if R.Len() != NpInterior || S.Len() != NpInterior {
panic("incorrect number of interior points supplied")
}
rt = &RTElement{
N: N,
R: R,
S: S,
Nint: N * (N + 1) / 2,
Nedge: N + 1,
}
rt.CalculateBasis()
return
}
func (rt *RTElement) ProjectFunctionOntoBasis(s1, s2 []float64) (sp []float64) {
var (
Np = len(s1)
)
sp = make([]float64, Np)
oosr2 := 1 / math.Sqrt(2)
for i := range s1 {
switch rt.GetTermType(i) {
case InteriorR:
// Unit vector is [1,0]
sp[i] = s1[i]
case InteriorS:
// Unit vector is [0,1]
sp[i] = s2[i]
case Edge1:
// Edge1: // Unit vector is [0,-1]
sp[i] = -s2[i]
case Edge2:
// Edge2: Unit vector is [1/sqrt(2), 1/sqrt(2)]
sp[i] = (s1[i] + s2[i]) * oosr2
case Edge3:
// Edge3: Unit vector is [-1,0]
sp[i] = -s1[i]
}
}
return
}
func (rt *RTElement) GetTermType(i int) (rtt RTPointType) {
var (
N = rt.N
Nint = N * (N + 1) / 2 // one order less than RT element in (P_k)2
Nedge = (N + 1)
)
switch {
case i < Nint:
// Unit vector is [1,0]
rtt = InteriorR
case i >= Nint && i < 2*Nint:
// Unit vector is [0,1]
rtt = InteriorS
case i >= 2*Nint && i < 2*Nint+Nedge:
// Edge1: Unit vector is [0,-1]
rtt = Edge1
case i >= 2*Nint+Nedge && i < 2*Nint+2*Nedge:
// Edge2: Unit vector is [1/sqrt(2), 1/sqrt(2)]
rtt = Edge2
case i >= 2*Nint+2*Nedge && i < 2*Nint+3*Nedge:
// Edge3: Unit vector is [-1,0]
rtt = Edge3
}
return
}
func (rt *RTElement) CalculateBasis() {
/*
This is constructed from the defining space of the RT element:
2
RT_k = [(P_k) ] + [ X ] P_k
= [ b1(r,s)_i + r * b3(r,s)_j ]
[ b2(r,s)_i + s * b3(r,s)_j ]
j := 1, (K+1)(K+2)/2
j := 1, (K+1) (highest order terms in polynomial)
The dimension of RT_k is (K+1)(K+3) and we can see from the above that the total
number of terms in the polynomial will be:
(K+1)(K+2) + K+1 = (K+1)(K+3)
The explanation for why the b3 polynomial sub-basis is partially consumed:
When multiplied by [ X ], the b3 polynomial produces terms redundant with
the b1 and b2 sub-bases. The redundancy is removed from the b3 sub-basis to
compensate, producing the correct overall dimension.
Another more physical way to think about it: The [ X ] * P_k term is tracing a
1D shell within the 2D space - the [ X ] "pointer vector" is permuted through
a 1D polynomial at high order to represent the outer surface of the shape.
Two groups of bases, two ways, a pair of polynomial types and a pair of geometric types:
1) The RT basis consists of two parts, the (P_k)2 basis and a P_k basis with (N+1) terms.
The dimension of the first part is 2 * (K+1)(K+2)/2 and the second is (K+1). The total number of terms
in the polynomial space is:
2*(K+1)(K+2)/2 + (K+1) = (K+3)(K+1)
2) The RT basis is also composed of two types of geometric bases, interior and exterior
points. The number of interior points is (K)(K+1)/2, and the number of edge points is 3*(K+1).
For each interior point, we have two basis vectors, [1,0] and [0,1]. The total degrees of freedom are:
2*(K)(K+1)/2 + 3(K+1) = (K+3)*(K+1)
The number of interior points matches a 2D Lagrangian element basis at order (K-1):
There are (K)(K+1)/2 interior points in this RT element, which matches the point count of a Lagrangian
element at order (K-1). This is very convenient and enabling for the DFR method, as it allows us to
represent the flux vector function of a solution at degree (K-1) on an RT element of order (K) by
simply transferring the values from the (K-1) solution element to the interior of the RT(K) element.
We then provide the flux values along the triangle edges of the RT(K) element, after which we can
calculate gradient, divergence, and curl using a polynomial of degree (K), yielding a gradient,
divergence, curl of order (K-1), which is exactly what we need for the solution at (K-1).
*/
/* Inputs:
(N)(N+2)/2 [r,s] points from the interior of the [-1,1] triangle
Outputs:
[R,S]: Coordinates of points within element
First (N)(N+1)/2 points: Interior points, excluding edges in [-1,1] triangle coordinates
Next (N)(N+1)/2 points: Duplicate of above
Next (N+1) points: Edge 1 locations
Next (N+1) points: Edge 2 locations
Next (N+1) points: Edge 3 locations
[V1, V2]: Vandermonde matrix for each of R and S directions:
V_ij = Psi_j([r,s]_i)
Element V_ij of the Vandermonde matrix is the basis function Psi_j evaluated at [r,s]_i
Since this is a vector valued element/basis, we have a interpolation matrix for each direction.
*/
var (
err error
N = rt.N
R, S = rt.R, rt.S
Np = (N + 1) * (N + 3)
Nint = N * (N + 1) / 2
P utils.Matrix
)
// Add the edge and additional interior (duplicated) points to complete the RT geometry2D
rt.R, rt.S = ExtendGeomToRT(N, R, S)
/*
Form the basis matrix by forming a dot product with unit vectors, matching the coordinate locations in R,S
*/
P = utils.NewMatrix(Np, Np)
rowEdge := make([]float64, Np)
oosr2 := 1 / math.Sqrt(2)
// Evaluate at geometric locations
var p0, p1 []float64
for ii, rr := range rt.R.DataP {
ss := rt.S.DataP[ii]
/*
First, evaluate the polynomial at the (r,s) coordinates
This is the same set that will be used for all dot products to form the basis matrix
*/
p0, p1 = rt.EvaluateRTBasis(rr, ss) // each of p1,p2 stores the polynomial terms for the R and S directions
// Implement dot product of (unit vector)_ii with each vector term in the polynomial evaluated at location ii
switch rt.GetTermType(ii) {
case InteriorR:
// Unit vector is [1,0]
P.M.SetRow(ii, p0)
case InteriorS:
// Unit vector is [0,1]
P.M.SetRow(ii, p1)
case Edge1:
for i := range rowEdge {
// Edge3: // Unit vector is [0,-1]
rowEdge[i] = -p1[i]
}
P.M.SetRow(ii, rowEdge)
case Edge2:
for i := range rowEdge {
// Edge1: Unit vector is [1/sqrt(2), 1/sqrt(2)]
rowEdge[i] = oosr2 * (p0[i] + p1[i])
}
P.M.SetRow(ii, rowEdge)
case Edge3:
for i := range rowEdge {
// Edge2: Unit vector is [-1,0]
rowEdge[i] = -p0[i]
}
P.M.SetRow(ii, rowEdge)
}
}
// Invert [P] = [A] to obtain the coefficients (columns) of polynomials (rows), each row is a polynomial
if rt.A, err = P.Inverse(); err != nil {
panic(err)
}
// Evaluate 2D polynomial basis at geometric locations, also evaluate derivatives Dr and Ds for R and S
P0, P1 := utils.NewMatrix(Np, Np), utils.NewMatrix(Np, Np)
Pdr0, Pds1 := utils.NewMatrix(Np, Np), utils.NewMatrix(Np, Np)
for ii, rr := range rt.R.DataP {
ss := rt.S.DataP[ii]
p0, p1 = rt.EvaluateRTBasis(rr, ss) // each of p1,p2 stores the polynomial terms for the R and S directions
P0.M.SetRow(ii, p0)
P1.M.SetRow(ii, p1)
p0, _ = rt.EvaluateRTBasis(rr, ss, Dr) // each of p1,p2 stores the polynomial terms for the R and S directions
_, p1 = rt.EvaluateRTBasis(rr, ss, Ds) // each of p1,p2 stores the polynomial terms for the R and S directions
Pdr0.M.SetRow(ii, p0)
Pds1.M.SetRow(ii, p1)
}
// Construct the Vandermonde matrices for each direction by multiplying coefficients of constrained basis
rt.V[0] = P0.Mul(rt.A)
rt.V[1] = P1.Mul(rt.A)
rt.Div = Pdr0.Mul(rt.A).Add(Pds1.Mul(rt.A))
rt.DivInt = utils.NewMatrix(Nint, Np)
for i := 0; i < Nint; i++ {
rt.DivInt.M.SetRow(i, rt.Div.Row(i).DataP)
}
rt.Np = Np
return
}
type DerivativeDirection uint8
const (
None DerivativeDirection = iota
Dr
Ds
)
func (rt *RTElement) EvaluateRTBasis(r, s float64, derivO ...DerivativeDirection) (p1, p2 []float64) {
var (
sk int
N = rt.N
Np = (N + 1) * (N + 3)
N2DBasis = (N + 1) * (N + 2) / 2 // Number of polynomial terms for each of R and S directions
deriv = None
tFunc func(r, s float64, i, j int) (val float64)
)
if len(derivO) != 0 {
deriv = derivO[0]
}
DrONTerm2D := func(r, s float64, i, j int) (val float64) {
val, _ = GradSimplex2DPTerm(r, s, i, j)
//fmt.Printf("Dr r,s,i,j,val = %8.5f,%8.5f,%d,%d,%8.5f,", r, s, i, j, val)
return
}
DsONTerm2D := func(r, s float64, i, j int) (val float64) {
_, val = GradSimplex2DPTerm(r, s, i, j)
//fmt.Printf("Ds r,s,i,j,val = %8.5f,%8.5f,%d,%d,%8.5f,", r, s, i, j, val)
return
}
switch deriv {
case None:
tFunc = Simplex2DPTerm
case Dr:
tFunc = DrONTerm2D
case Ds:
tFunc = DsONTerm2D
}
p1, p2 = make([]float64, Np), make([]float64, Np)
// Evaluate the full 2D polynomial basis first, once for each of two components
for i := 0; i <= N; i++ {
for j := 0; j <= (N - i); j++ {
val := tFunc(r, s, i, j)
p1[sk] = val
p2[sk+N2DBasis] = val
sk++
}
}
// Evaluate the term ([ X ]*(Pk)) at only the top N+1 terms (highest order) of the 2D polynomial
sk += N2DBasis // Skip to the beginning of the second polynomial group
for i := 0; i <= N; i++ {
j := N - i
val := tFunc(r, s, i, j)
switch deriv {
case None:
p1[sk] = val * r
p2[sk] = val * s
case Dr:
val2 := Simplex2DPTerm(r, s, i, j)
p1[sk] = val2 + val*r
p2[sk] = val * s
case Ds:
val2 := Simplex2DPTerm(r, s, i, j)
p1[sk] = val * r
p2[sk] = val2 + val*s
}
sk++
}
return
}
func ExtendGeomToRT(N int, rInt, sInt utils.Vector) (r, s utils.Vector) {
var (
NpEdge = N + 1
rData, sData = rInt.DataP, sInt.DataP
)
/*
Determine geometric locations of edge points, located at Gauss locations in 1D, projected onto the edges
*/
GQR, _ := DG1D.JacobiGQ(1, 1, N)
/*
// Equi-spaced edge
gqr := make([]float64, NpEdge)
space := 2. / float64(NpEdge+1)
start := -1.
for i := 0; i < NpEdge; i++ {
gqr[i] = start + space
start = gqr[i]
}
GQR := utils.NewVector(NpEdge, gqr)
*/
/*
Double the number of interior points to match each direction of the basis
*/
if N == 0 { // Special case: when N=0, the interior of the RT element is empty
rData, sData = []float64{}, []float64{}
} else {
rData = append(rData, rData...)
sData = append(sData, sData...)
}
// Calculate the triangle edges
GQRData := GQR.DataP
rEdgeData := make([]float64, NpEdge*3)
sEdgeData := make([]float64, NpEdge*3)
for i := 0; i < NpEdge; i++ {
gp := GQRData[i]
// Edge 1
rEdgeData[i] = gp
sEdgeData[i] = -1
// Edge 2 (hypotenuse)
gpT := 0.5 * (gp + 1)
rEdgeData[i+NpEdge] = 1 - 2*gpT
sEdgeData[i+NpEdge] = -1 + 2*gpT
// Edge 3
rEdgeData[i+2*NpEdge] = -1
sEdgeData[i+2*NpEdge] = -gp
}
rData = append(rData, rEdgeData...)
sData = append(sData, sEdgeData...)
r = utils.NewVector(len(rData), rData)
s = utils.NewVector(len(sData), sData)
return
}
func NodesEpsilon(N int) (R, S utils.Vector) {
/*
From the 2017 paper "Ainv Direct Flux Reconstruction Scheme for Advection Diffusion Problems on Triangular Grids"
This is a node set that is compatible with DFR in that it implements colocated solution and flux points for the
interior nodes, while enabling a set of face nodes for the N+1 degree flux polynomial
There are two node sets, one for N=3 and one for N=4. They were computed via an optimization, and are only
available for N=3 and N=4. Also, the convergence of N=3 is degraded for diffusion problems.
Therefore, only the N=4 points should be used for Viscous solutions, while the N=3 nodes are fine for inviscid
*/
var (
Np = (N + 1) * (N + 2) / 2
epsD []float64
)
switch N {
// Cases 3,4 from Romero and Jameson, Others from Williams and Shun
case 0:
epsD = []float64{
0.3333333333333333,
0.3333333333333333,
0.3333333333333333,
}
case 1:
epsD = []float64{
0.666666666666667, 0.166666666666667, 0.166666666666667,
0.166666666666667, 0.666666666666667, 0.166666666666667,
0.166666666666667, 0.166666666666667, 0.666666666666667,
}
case 2:
epsD = []float64{
0.816847572980440, 0.091576213509780, 0.091576213509780, 0.445948490915964, 0.445948490915964, 0.108103018168071,
0.091576213509780, 0.816847572980440, 0.091576213509780, 0.445948490915964, 0.108103018168071, 0.445948490915964,
0.091576213509780, 0.091576213509780, 0.816847572980440, 0.108103018168071, 0.445948490915964, 0.445948490915964,
}
case 3:
epsD = []float64{
0.3333333333333333, 0.055758983558155, 0.88848203288369, 0.055758983558155, 0.290285227512689, 0.6388573870878149, 0.290285227512689, 0.6388573870878149, 0.070857385399496, 0.070857385399496,
0.3333333333333333, 0.055758983558155, 0.055758983558155, 0.88848203288369, 0.070857385399496, 0.290285227512689, 0.6388573870878149, 0.070857385399496, 0.290285227512689, 0.6388573870878149,
0.3333333333333333, 0.88848203288369, 0.055758983558155, 0.055758983558155, 0.6388573870878149, 0.070857385399496, 0.070857385399496, 0.290285227512689, 0.6388573870878149, 0.290285227512689,
}
case 4:
epsD = []float64{
0.034681580220044, 0.9306368395599121, 0.034681580220044, 0.243071555674492, 0.513856888651016, 0.243071555674492, 0.473372556704605, 0.05325488659079003, 0.473372556704605, 0.200039998995093, 0.752666332493468, 0.200039998995093, 0.752666332493468, 0.047293668511439, 0.047293668511439,
0.034681580220044, 0.034681580220044, 0.9306368395599121, 0.243071555674492, 0.243071555674492, 0.513856888651016, 0.473372556704605, 0.473372556704605, 0.05325488659079003, 0.047293668511439, 0.200039998995093, 0.752666332493468, 0.047293668511439, 0.200039998995093, 0.752666332493468,
0.9306368395599121, 0.034681580220044, 0.034681580220044, 0.513856888651016, 0.243071555674492, 0.243071555674492, 0.05325488659079003, 0.473372556704605, 0.473372556704605, 0.752666332493468, 0.047293668511439, 0.047293668511439, 0.200039998995093, 0.752666332493468, 0.200039998995093,
}
case 5:
epsD = []float64{
0.943774095634672, 0.028112952182664, 0.028112952182664, 0.645721803061365, 0.177139098469317, 0.177139098469317, 0.405508595867433, 0.405508595867433, 0.188982808265134, 0.148565812270887, 0.148565812270887, 0.033533207700614, 0.817900980028499, 0.817900980028499, 0.033533207700614, 0.357196298615681, 0.357196298615681, 0.037824789609186, 0.604978911775132, 0.604978911775132, 0.037824789609186,
0.028112952182664, 0.943774095634672, 0.028112952182664, 0.177139098469317, 0.645721803061365, 0.177139098469317, 0.405508595867433, 0.188982808265134, 0.405508595867433, 0.817900980028499, 0.033533207700614, 0.148565812270887, 0.148565812270887, 0.033533207700614, 0.817900980028499, 0.604978911775132, 0.037824789609186, 0.357196298615681, 0.357196298615681, 0.037824789609186, 0.604978911775132,
0.028112952182664, 0.028112952182664, 0.943774095634672, 0.177139098469317, 0.177139098469317, 0.645721803061365, 0.188982808265134, 0.405508595867433, 0.405508595867433, 0.033533207700614, 0.817900980028499, 0.817900980028499, 0.033533207700614, 0.148565812270887, 0.148565812270887, 0.037824789609186, 0.604978911775132, 0.604978911775132, 0.037824789609186, 0.357196298615681, 0.357196298615681,
}
case 6:
epsD = []float64{
0.960045625755613, 0.019977187122193, 0.019977187122193, 0.736556464940005, 0.131721767529998, 0.131721767529998, 0.333333333333333, 0.485135346793461, 0.485135346793461, 0.029729306413079, 0.107951981846011, 0.107951981846011, 0.024136808036039, 0.867911210117951, 0.867911210117951, 0.024136808036039, 0.270840772921567, 0.270840772921567, 0.028286656697710, 0.700872570380723, 0.700872570380723, 0.028286656697710, 0.316549598844617, 0.316549598844617, 0.146795716949245, 0.536654684206138, 0.536654684206138, 0.146795716949245,
0.019977187122193, 0.960045625755613, 0.019977187122193, 0.131721767529998, 0.736556464940005, 0.131721767529998, 0.333333333333333, 0.485135346793461, 0.029729306413079, 0.485135346793461, 0.867911210117951, 0.024136808036039, 0.107951981846011, 0.107951981846011, 0.024136808036039, 0.867911210117951, 0.700872570380723, 0.028286656697710, 0.270840772921567, 0.270840772921567, 0.028286656697710, 0.700872570380723, 0.536654684206138, 0.146795716949245, 0.316549598844617, 0.316549598844617, 0.146795716949245, 0.536654684206138,
0.019977187122193, 0.019977187122193, 0.960045625755613, 0.131721767529998, 0.131721767529998, 0.736556464940005, 0.333333333333333, 0.029729306413079, 0.485135346793461, 0.485135346793461, 0.024136808036039, 0.867911210117951, 0.867911210117951, 0.024136808036039, 0.107951981846011, 0.107951981846011, 0.028286656697710, 0.700872570380723, 0.700872570380723, 0.028286656697710, 0.270840772921567, 0.270840772921567, 0.146795716949245, 0.536654684206138, 0.536654684206138, 0.146795716949245, 0.316549598844617, 0.316549598844617,
}
case 7:
epsD = []float64{
0.957657154441070, 0.021171422779465, 0.021171422779465, 0.798831205208225, 0.100584397395888, 0.100584397395888, 0.457923384576135, 0.271038307711932, 0.271038307711932, 0.440191258403832, 0.440191258403832, 0.119617483192335, 0.101763679498021, 0.101763679498021, 0.018256679074748, 0.879979641427232, 0.879979641427232, 0.018256679074748, 0.394033271669987, 0.394033271669987, 0.023404705466341, 0.582562022863673, 0.582562022863673, 0.023404705466341, 0.226245530909229, 0.226245530909229, 0.022223854547989, 0.751530614542782, 0.751530614542782, 0.022223854547989, 0.635737183263105, 0.635737183263105, 0.115183589115563, 0.249079227621332, 0.249079227621332, 0.115183589115563,
0.021171422779465, 0.957657154441070, 0.021171422779465, 0.100584397395888, 0.798831205208225, 0.100584397395888, 0.271038307711932, 0.457923384576135, 0.271038307711932, 0.440191258403832, 0.119617483192335, 0.440191258403832, 0.879979641427232, 0.018256679074748, 0.101763679498021, 0.101763679498021, 0.018256679074748, 0.879979641427232, 0.582562022863673, 0.023404705466341, 0.394033271669987, 0.394033271669987, 0.023404705466341, 0.582562022863673, 0.751530614542782, 0.022223854547989, 0.226245530909229, 0.226245530909229, 0.022223854547989, 0.751530614542782, 0.249079227621332, 0.115183589115563, 0.635737183263105, 0.635737183263105, 0.115183589115563, 0.249079227621332,
0.021171422779465, 0.021171422779465, 0.957657154441070, 0.100584397395888, 0.100584397395888, 0.798831205208225, 0.271038307711932, 0.271038307711932, 0.457923384576135, 0.119617483192335, 0.440191258403832, 0.440191258403832, 0.018256679074748, 0.879979641427232, 0.879979641427232, 0.018256679074748, 0.101763679498021, 0.101763679498021, 0.023404705466341, 0.582562022863673, 0.582562022863673, 0.023404705466341, 0.394033271669987, 0.394033271669987, 0.022223854547989, 0.751530614542782, 0.751530614542782, 0.022223854547989, 0.226245530909229, 0.226245530909229, 0.115183589115563, 0.249079227621332, 0.249079227621332, 0.115183589115563, 0.635737183263105, 0.635737183263105,
}
default:
panic(fmt.Errorf("Epsilon nodes not defined for N = %v\n", N))
}
eps := utils.NewMatrix(3, Np, epsD)
T := utils.NewMatrix(2, 3, []float64{
-1, 1, -1,
-1, -1, 1,
})
RS := T.Mul(eps)
R = RS.Row(0)
S = RS.Row(1)
return
}
func (rt *RTElement) GetInternalLocations(F utils.Vector) (Finternal []float64) {
var (
Nint = rt.Nint
)
Finternal = make([]float64, Nint)
for i := 0; i < Nint; i++ {
Finternal[i] = F.DataP[i]
}
return
}
func (rt *RTElement) GetEdgeLocations(F utils.Vector) (Fedge []float64) {
var (
Nint = rt.Nint
NedgeTot = rt.Nedge * 3
)
Fedge = make([]float64, NedgeTot)
for i := 0; i < NedgeTot; i++ {
Fedge[i] = F.DataP[i+2*Nint]
}
return
} | DG2D/RaviartThomasElement.go | 0.606615 | 0.618377 | RaviartThomasElement.go | starcoder |
package generator
func (g *Grid) medusa(verbose uint) (res bool) {
var pairMaps [10]map[pair]bool
g.unitPairs(&pairMaps)
strongLinks := make(map[pair]cell)
for d := 1; d <= 9; d++ {
for p := range pairMaps[d] {
strongLinks[p] |= 1 << d
}
}
linkEnds := make(map[point][]pair)
for p := range strongLinks {
linkEnds[p.left] = append(linkEnds[p.left], p)
linkEnds[p.right] = append(linkEnds[p.right], p)
}
used := make(map[point]bool)
for p, c := range strongLinks {
if used[p.left] || used[p.right] {
continue
}
used[p.left] = true
digit := c.lowestSetBit()
var colors [rows][cols][10]color
colors[p.left.r][p.left.c][digit] = blue
g.colorGrid(digit, p.left, &colors, linkEnds, strongLinks, &used)
// Twice in a cell. If the same color appears twice in a cell, that color can be removed from the whole puzzle.
blueMoreThanOnce := false
redMoreThanOnce := false
for r := zero; r < rows; r++ {
for c := zero; c < cols; c++ {
blues := 0
reds := 0
for _, c := range colors[r][c] {
switch c {
case blue:
blues++
case red:
reds++
}
}
blueMoreThanOnce = blueMoreThanOnce || blues > 1
redMoreThanOnce = redMoreThanOnce || reds > 1
}
}
if blueMoreThanOnce {
g.removeColor(verbose, blue, &colors, "twice in a cell", &res)
} else if redMoreThanOnce {
g.removeColor(verbose, red, &colors, "twice in a cell", &res)
}
if res {
return
}
// Twice in a unit. If the same color appears twice in a unit (box, column, or row) for the same digit, that color can be removed from the whole puzzle.
blueMoreThanOnce = false
redMoreThanOnce = false
b, r := g.groupColors(&box, &colors)
blueMoreThanOnce = blueMoreThanOnce || b
redMoreThanOnce = redMoreThanOnce || r
b, r = g.groupColors(&col, &colors)
blueMoreThanOnce = blueMoreThanOnce || b
redMoreThanOnce = redMoreThanOnce || r
b, r = g.groupColors(&row, &colors)
blueMoreThanOnce = blueMoreThanOnce || b
redMoreThanOnce = redMoreThanOnce || r
if blueMoreThanOnce {
g.removeColor(verbose, blue, &colors, "twice in a unit", &res)
} else if redMoreThanOnce {
g.removeColor(verbose, red, &colors, "twice in a unit", &res)
}
if res {
return
}
// Two colors in a cell. If a cell contains digits that are colored both blue and red, any non-colored digits can be removed.
for r := zero; r < rows; r++ {
for c := zero; c < cols; c++ {
blueFound := 0
redFound := 0
for d := 1; d <= 9; d++ {
switch colors[r][c][d] {
case blue:
blueFound |= 1 << d
case red:
redFound |= 1 << d
}
}
if blueFound != 0 && redFound != 0 {
for d := 1; d <= 9; d++ {
if blueFound&(1<<d) != 0 || redFound&(1<<d) != 0 {
continue
}
if g.pt(point{r, c}).andNot(1 << d) {
g.cellChange(&res, verbose, "3dMedusa (two colors in a cell): in %s, remove %d\n", point{r, c}, d)
}
}
}
}
}
if res {
return
}
// Two colors elsewhere. In all cells C containing a digit X, if that cell can see a blue X and a red X, then X can be removed from the cell C.
// Mark the cells that see each blue and red digit.
var blueInfluence, redInfluence, immune [rows][cols][10]bool
for r := zero; r < rows; r++ {
for c := zero; c < cols; c++ {
for d := 1; d <= 9; d++ {
switch colors[r][c][d] {
case blue:
immune[r][c][d] = true // Cells that are part of the 3d medusa are not eligible for removal.
coloredNeighbors(d, point{r, c}, &blueInfluence)
case red:
immune[r][c][d] = true // Cells that are part of the 3d medusa are not eligible for removal.
coloredNeighbors(d, point{r, c}, &redInfluence)
}
}
}
}
for r := zero; r < rows; r++ {
for c := zero; c < cols; c++ {
for d := 1; d <= 9; d++ {
if !immune[r][c][d] {
if blueInfluence[r][c][d] && redInfluence[r][c][d] {
if g.pt(point{r, c}).andNot(1 << d) {
g.cellChange(&res, verbose, "3dMedusa (two colors elsewhere): in %s, remove %d\n", point{r, c}, d)
}
}
}
}
}
}
if res {
return
}
// Two colors unit and cell. If a cell C containing a digit X can see another cell containing a colored X and in C there is a candidate with the opposite color, X can be removed from C.
for r := zero; r < rows; r++ {
for c := zero; c < cols; c++ {
blueFound := 0
redFound := 0
var immune [10]bool
for d := 1; d <= 9; d++ {
switch colors[r][c][d] {
case blue:
blueFound |= 1 << d
immune[d] = true
case red:
redFound |= 1 << d
immune[d] = true
}
}
for d := 1; d <= 9; d++ {
if !immune[d] && blueFound != 0 && canSeeColor(d, point{r, c}, red, &colors) {
if g.pt(point{r, c}).andNot(1 << d) {
g.cellChange(&res, verbose, "3dMedusa (two colors unit and cell): in %s, remove %d\n", point{r, c}, d)
}
} else if !immune[d] && redFound != 0 && canSeeColor(d, point{r, c}, blue, &colors) {
if g.pt(point{r, c}).andNot(1 << d) {
g.cellChange(&res, verbose, "3dMedusa (two colors unit and cell): in %s, remove %d\n", point{r, c}, d)
}
}
}
}
}
if res {
return
}
// Cell emptied by color.
outer:
for r := zero; r < rows; r++ {
inner:
for c := zero; c < cols; c++ {
if bitCount[g.cells[r][c]] == 1 {
continue
}
seeBlue := true
seeRed := true
for d := 1; d <= 9; d++ {
if colors[r][c][d] != black {
continue inner
}
if !blueInfluence[r][c][d] {
seeBlue = false
}
if !redInfluence[r][c][d] {
seeRed = false
}
}
if seeBlue {
g.removeColor(verbose, blue, &colors, "cell emptied by color", &res)
break outer
}
if seeRed {
g.removeColor(verbose, red, &colors, "cell emptied by color", &res)
break outer
}
}
}
if res {
return
}
}
return
}
func (g *Grid) colorGrid(digit int, p point, colors *[rows][cols][10]color, linkEnds map[point][]pair, strongLinks map[pair]cell, used *map[point]bool) {
(*used)[p] = true
currColor := colors[p.r][p.c][digit]
// If the point is a bivalue (only two candidate digits in the cell), then color the other one the opposite color.
cell := *g.pt(p)
if bitCount[cell] == 2 {
o := (cell &^ (1 << digit)).lowestSetBit()
if colors[p.r][p.c][o] == black {
colors[p.r][p.c][o] = flipColor(currColor)
g.colorGrid(o, p, colors, linkEnds, strongLinks, used)
}
}
for _, l := range linkEnds[p] {
if strongLinks[l]&(1<<digit) == 0 {
continue
}
if p == l.left { // If we are at the left end of the link, process the right end.
if colors[l.right.r][l.right.c][digit] == black {
colors[l.right.r][l.right.c][digit] = flipColor(currColor)
g.colorGrid(digit, l.right, colors, linkEnds, strongLinks, used)
}
} else { // Process the left end.
if colors[l.left.r][l.left.c][digit] == black {
colors[l.left.r][l.left.c][digit] = flipColor(currColor)
g.colorGrid(digit, l.left, colors, linkEnds, strongLinks, used)
}
}
}
}
func (g *Grid) groupColors(gr *group, colors *[rows][cols][10]color) (bool, bool) {
blueMoreThanOnce := false
redMoreThanOnce := false
for _, ps := range gr.unit {
var blues, reds [10]int
for _, p := range ps {
for d := 1; d <= 9; d++ {
if *g.pt(p)&(1<<d) != 0 {
switch colors[p.r][p.c][d] {
case blue:
blues[d]++
case red:
reds[d]++
}
}
}
}
for d := 1; d <= 9; d++ {
if blues[d] > 2 {
blueMoreThanOnce = true
}
if reds[d] > 2 {
redMoreThanOnce = true
}
}
}
return blueMoreThanOnce, redMoreThanOnce
}
func (g *Grid) removeColor(verbose uint, cl color, colors *[rows][cols][10]color, message string, res *bool) {
for r := zero; r < rows; r++ {
for c := zero; c < cols; c++ {
for ci, color := range colors[r][c] {
if color == cl {
if g.pt(point{r, c}).andNot(1 << ci) {
g.cellChange(res, verbose, "3dMedusa (%s): in %s, remove %d\n", message, point{r, c}, ci)
}
}
}
}
}
}
func canSeeColor(d int, curr point, c color, colors *[rows][cols][10]color) bool {
for _, u := range []*[9]point{&box.unit[boxOfPoint(curr)], &col.unit[curr.c], &row.unit[curr.r]} {
for _, p := range u {
if p == curr {
continue
}
if colors[p.r][p.c][d] == c {
return true
}
}
}
return false
} | generator/3dMedusa.go | 0.615088 | 0.460835 | 3dMedusa.go | starcoder |
package packet
import (
"encoding/json"
"fmt"
"io"
"math"
"github.com/google/uuid"
"github.com/tsatke/nbt"
"github.com/tsatke/mcserver/game/chat"
"github.com/tsatke/mcserver/game/id"
)
// Encoder is a decorating struct, which uses specialized algorithms to
// write data into an underlying io.Writer.
type Encoder struct {
// W is the underlying writer into which data will eventually
// be written.
W io.Writer
}
// WriteVarInt writes the given int into the writer as a VarInt.
func (e Encoder) WriteVarInt(fieldName string, val int) {
value := uint32(val)
buf := make([]byte, 0)
for {
tmp := byte(value & 0b01111111)
value >>= 7
if value != 0 {
tmp |= 0b10000000
}
buf = append(buf, tmp)
if value == 0 {
break
}
}
_write(e.W, fieldName, buf)
}
// WriteInt writes the given int32 with ByteOrder into the writer.
func (e Encoder) WriteInt(fieldName string, val int32) {
var buf [IntSize]byte
ByteOrder.PutUint32(buf[:], uint32(val))
_write(e.W, fieldName, buf[:])
}
// WriteString writes a VarInt into the writer, indicating the length of the given
// string. After that, the string is written as byte array.
// See Encoder.WriteVarInt.
func (e Encoder) WriteString(fieldName, s string) {
e.WriteVarInt(fieldName+" string length", len(s))
_write(e.W, fieldName, []byte(s))
}
// WriteUshort writes the given uint16 with ByteOrder into the writer.
func (e Encoder) WriteUshort(fieldName string, val uint16) {
var buf [UnsignedShortSize]byte
ByteOrder.PutUint16(buf[:], val)
_write(e.W, fieldName, buf[:])
}
// WriteByte writes the given int8 into the writer as unsigned value.
func (e Encoder) WriteByte(fieldName string, val int8) {
_write(e.W, fieldName, []byte{byte(val)})
}
// WriteByteArray writes the given byte array into the writer.
func (e Encoder) WriteByteArray(fieldName string, val []byte) {
_write(e.W, fieldName, val)
}
// WriteUbyte writes the given byte into the writer.
func (e Encoder) WriteUbyte(fieldName string, val uint8) {
_write(e.W, fieldName, []byte{val})
}
// WriteBoolean writes a single byte into the writer, 0x01 for true, 0x00 for false.
func (e Encoder) WriteBoolean(fieldName string, val bool) {
if val {
_write(e.W, fieldName, []byte{0x01})
} else {
_write(e.W, fieldName, []byte{0x00})
}
}
// WriteLong writes the given int64 with ByteOrder into the writer.
func (e Encoder) WriteLong(fieldName string, val int64) {
var buf [LongSize]byte
ByteOrder.PutUint64(buf[:], uint64(val))
_write(e.W, fieldName, buf[:])
}
// WriteDouble writes the given float64 with ByteOrder into the writer
// as IEEE754 encoded value.
func (e Encoder) WriteDouble(fieldName string, val float64) {
var buf [DoubleSize]byte
ByteOrder.PutUint64(buf[:], math.Float64bits(val))
_write(e.W, fieldName, buf[:])
}
// WriteFloat writes the given float32 with ByteOrder into the writer
// as IEEE754 encoded value.
func (e Encoder) WriteFloat(fieldName string, val float32) {
var buf [FloatSize]byte
ByteOrder.PutUint32(buf[:], math.Float32bits(val))
_write(e.W, fieldName, buf[:])
}
// WriteUUID writes the 16 bytes of the UUID as plain bytes into the writer.
func (e Encoder) WriteUUID(fieldName string, val uuid.UUID) {
_write(e.W, fieldName, val[:])
}
// WriteChat writes the json marshalled value of the given chat object
// into the writer as a string.
func (e Encoder) WriteChat(fieldName string, val chat.Chat) {
data, err := json.Marshal(val)
panicIffErr(fieldName, err)
e.WriteString(fieldName, string(data))
}
// WriteID writes the given ID into the writer as a string.
// See Encoder.WriteString.
func (e Encoder) WriteID(fieldName string, val id.ID) {
e.WriteString(fieldName, val.String())
}
// WriteNBT writes an nbt tag into the writer.
func (e Encoder) WriteNBT(fieldName string, val nbt.Tag) {
enc := nbt.NewEncoder(e.W, ByteOrder)
panicIffErr(fieldName, enc.WriteTag(val))
}
func _write(w io.Writer, fieldName string, buf []byte) {
n, err := w.Write(buf)
if err != nil {
panicIffErr(fieldName, fmt.Errorf("write: %w", err))
}
if n != len(buf) {
panicIffErr(fieldName, fmt.Errorf("need to write %d bytes, but wrote %d", len(buf), n))
}
} | network/packet/encoder.go | 0.566258 | 0.465934 | encoder.go | starcoder |
package geojson_to_shape
import (
"github.com/skyhookml/skyhookml/skyhook"
"github.com/skyhookml/skyhookml/exec_ops"
"log"
"github.com/paulmach/go.geojson"
gomapinfer "github.com/mitroadmaps/gomapinfer/common"
)
func GeoJsonToShape(url string, outputDataset skyhook.Dataset, task skyhook.ExecTask) error {
// Load all GeoJSON geometries.
var geometries []*geojson.Geometry
addFeatures := func(collection *geojson.FeatureCollection) {
var q []*geojson.Geometry
for _, feature := range collection.Features {
if feature.Geometry == nil {
continue
}
q = append(q, feature.Geometry)
}
for len(q) > 0 {
geometry := q[len(q)-1]
q = q[0:len(q)-1]
if geometry.Type != geojson.GeometryCollection {
geometries = append(geometries, geometry)
continue
}
// collection geometry, need to add all its children
q = append(q, geometry.Geometries...)
}
}
for _, item := range task.Items["geojson"][0] {
data, err := item.LoadData()
if err != nil {
return err
}
addFeatures(data.(skyhook.GeoJsonData).Collection)
}
log.Printf("[geojson_to_shape] got %d geometries from GeoJSON files", len(geometries))
// Loop over the images and find the geometries that intersect each one.
// For now we do O(n^2) loop but later we could create a spatial index.
for _, item := range task.Items["images"][0] {
var metadata skyhook.GeoImageMetadata
skyhook.JsonUnmarshal([]byte(item.Metadata), &metadata)
bbox := metadata.GetBbox()
rect := bbox.Rect()
dims := [2]int{metadata.Width, metadata.Height}
fromGeo := func(coordinate []float64) [2]int {
p := bbox.FromGeo([2]float64{coordinate[0], coordinate[1]})
return [2]int{
int(p[0]*float64(dims[0])),
int(p[1]*float64(dims[1])),
}
}
var shapes []skyhook.Shape
handlePoint := func(coordinate []float64) {
p := gomapinfer.Point{coordinate[0], coordinate[1]}
if !rect.Contains(p) {
return
}
shapes = append(shapes, skyhook.Shape{
Type: skyhook.PointShape,
Points: [][2]int{fromGeo(coordinate)},
})
}
handleLineString := func(coordinates [][]float64) {
bounds := gomapinfer.EmptyRectangle
for _, coordinate := range coordinates {
p := gomapinfer.Point{coordinate[0], coordinate[1]}
bounds = bounds.Extend(p)
}
if !rect.Intersects(bounds) {
return
}
points := make([][2]int, len(coordinates))
for i := range points {
points[i] = fromGeo(coordinates[i])
}
shapes = append(shapes, skyhook.Shape{
Type: skyhook.PolyLineShape,
Points: points,
})
}
handlePolygon := func(coordinates [][][]float64) {
// We do not support holes yet, so just use coordinates[0].
// coordinates[0] is the exterior ring while coordinates[1:] specify
// holes in the polygon that should be excluded.
bounds := gomapinfer.EmptyRectangle
for _, coordinate := range coordinates[0] {
p := gomapinfer.Point{coordinate[0], coordinate[1]}
bounds = bounds.Extend(p)
}
if !rect.Intersects(bounds) {
return
}
points := make([][2]int, len(coordinates[0]))
for i := range points {
points[i] = fromGeo(coordinates[0][i])
}
shapes = append(shapes, skyhook.Shape{
Type: skyhook.PolygonShape,
Points: points,
})
}
for _, g := range geometries {
if g.Type == geojson.GeometryPoint {
handlePoint(g.Point)
} else if g.Type == geojson.GeometryMultiPoint {
for _, coordinate := range g.MultiPoint {
handlePoint(coordinate)
}
} else if g.Type == geojson.GeometryLineString {
handleLineString(g.LineString)
} else if g.Type == geojson.GeometryMultiLineString {
for _, coordinates := range g.MultiLineString {
handleLineString(coordinates)
}
} else if g.Type == geojson.GeometryPolygon {
handlePolygon(g.Polygon)
} else if g.Type == geojson.GeometryMultiPolygon {
for _, coordinates := range g.MultiPolygon {
handlePolygon(coordinates)
}
}
}
shapeData := skyhook.ShapeData{
Shapes: [][]skyhook.Shape{shapes},
Metadata: skyhook.ShapeMetadata{
CanvasDims: dims,
},
}
err := exec_ops.WriteItem(url, outputDataset, item.Key, shapeData)
if err != nil {
return err
}
}
return nil
}
func init() {
skyhook.AddExecOpImpl(skyhook.ExecOpImpl{
Config: skyhook.ExecOpConfig{
ID: "geojson_to_shape",
Name: "GeoJSON to Shape",
Description: "Convert from GeoJSON to Shape type given a Geo-Image dataset",
},
Inputs: []skyhook.ExecInput{
{Name: "geojson", DataTypes: []skyhook.DataType{skyhook.GeoJsonType}},
{Name: "images", DataTypes: []skyhook.DataType{skyhook.GeoImageType}},
},
Outputs: []skyhook.ExecOutput{{Name: "shapes", DataType: skyhook.ShapeType}},
Requirements: func(node skyhook.Runnable) map[string]int {
return nil
},
GetTasks: exec_ops.SingleTask("merged"),
Prepare: func(url string, node skyhook.Runnable) (skyhook.ExecOp, error) {
applyFunc := func(task skyhook.ExecTask) error {
return GeoJsonToShape(url, node.OutputDatasets["shapes"], task)
}
return skyhook.SimpleExecOp{ApplyFunc: applyFunc}, nil
},
ImageName: "skyhookml/basic",
})
} | exec_ops/geojson_to_shape/to_shape.go | 0.651355 | 0.502319 | to_shape.go | starcoder |
package emulator
import (
"image/color"
"strconv"
"gopkg.in/yaml.v2"
)
type ColourMap struct {
backgroundColour color.RGBA
defaultColour color.RGBA
spriteColours map[int]color.RGBA
}
type ColourMapYaml struct {
BackgroundColour int `yaml:"background"`
DefaultColour int `yaml:"default"`
SpriteColours map[string]int `yaml:"sprites"`
RangeColours []colourRange `yaml:"ranges"`
}
type colourRange struct {
Colour int
Start string
End string
}
var pallettes map[string][]color.RGBA
const PalletteSpectrum = "spectrum"
const PalletteC64 = "c64"
const PalletteVaporWave = "vapourwave"
func init() {
pallettes = make(map[string][]color.RGBA)
spectrum := make([]color.RGBA, 9)
spectrum[0] = color.RGBA{R: 0, G: 0, B: 0, A: 255} // black
spectrum[1] = color.RGBA{R: 255, G: 255, B: 255, A: 255} // white
spectrum[2] = color.RGBA{R: 255, G: 0, B: 0, A: 255} // red
spectrum[3] = color.RGBA{R: 0, G: 255, B: 0, A: 255} // green
spectrum[4] = color.RGBA{R: 0, G: 0, B: 255, A: 255} // blue
spectrum[5] = color.RGBA{R: 255, G: 0, B: 255, A: 255} // magenta
spectrum[6] = color.RGBA{R: 255, G: 255, B: 0, A: 255} // yellow
spectrum[7] = color.RGBA{R: 0, G: 255, B: 255, A: 255} // cyan
spectrum[8] = color.RGBA{R: 255, G: 120, B: 0, A: 255} // orange
pallettes[PalletteSpectrum] = spectrum
c64 := make([]color.RGBA, 9)
c64[0] = color.RGBA{R: 0, G: 0, B: 0, A: 255} // black
c64[1] = color.RGBA{R: 255, G: 255, B: 255, A: 255} // white
c64[2] = color.RGBA{R: 255, G: 119, B: 119, A: 255} // red
c64[3] = color.RGBA{R: 0, G: 204, B: 85, A: 255} // green
c64[4] = color.RGBA{R: 0, G: 0, B: 170, A: 255} // blue
c64[5] = color.RGBA{R: 204, G: 68, B: 204, A: 255} // magenta
c64[6] = color.RGBA{R: 238, G: 238, B: 119, A: 255} // yellow
c64[7] = color.RGBA{R: 170, G: 255, B: 238, A: 255} // cyan
c64[8] = color.RGBA{R: 221, G: 136, B: 85, A: 255} // orange
pallettes[PalletteC64] = c64
vaporwave := make([]color.RGBA, 9)
vaporwave[0] = color.RGBA{R: 0, G: 0, B: 0, A: 255} // black
vaporwave[1] = color.RGBA{R: 255, G: 255, B: 255, A: 255} // white
vaporwave[2] = color.RGBA{R: 255, G: 106, B: 138, A: 255} // red
vaporwave[3] = color.RGBA{R: 33, G: 222, B: 138, A: 255} // green
vaporwave[4] = color.RGBA{R: 134, G: 149, B: 232, A: 255} // blue
vaporwave[5] = color.RGBA{R: 255, G: 106, B: 213, A: 255} // magenta
vaporwave[6] = color.RGBA{R: 254, G: 222, B: 139, A: 255} // yellow
vaporwave[7] = color.RGBA{R: 147, G: 208, B: 255, A: 255} // cyan
vaporwave[8] = color.RGBA{R: 255, G: 165, B: 139, A: 255} // orange
pallettes[PalletteVaporWave] = vaporwave
}
func SimpleColourMap(fgIndex, bgIndex int, pallette string) *ColourMap {
colourMap := &ColourMap{}
colourMap.defaultColour = getPalletColour(fgIndex, pallette)
colourMap.backgroundColour = getPalletColour(bgIndex, pallette)
return colourMap
}
func LoadColourMap(yamlRaw []byte, pallette string) (*ColourMap, error) {
mapYaml := &ColourMapYaml{}
err := yaml.Unmarshal(yamlRaw, mapYaml)
if err != nil {
return nil, err
}
colourMap := &ColourMap{}
colourMap.defaultColour = getPalletColour(mapYaml.DefaultColour, pallette)
colourMap.backgroundColour = getPalletColour(mapYaml.BackgroundColour, pallette)
colourMap.spriteColours = make(map[int]color.RGBA)
for _, colourRange := range mapYaml.RangeColours {
startAddr, err := strconv.ParseInt(colourRange.Start, 16, 16)
if err != nil {
continue
}
endAddr, err := strconv.ParseInt(colourRange.End, 16, 16)
if err != nil {
continue
}
for addr := startAddr; addr <= endAddr; addr += 1 {
colourMap.spriteColours[int(addr)] = getPalletColour(colourRange.Colour, pallette)
}
}
for hexKey, palIndex := range mapYaml.SpriteColours {
addr, err := strconv.ParseInt(hexKey, 16, 64)
if err != nil {
continue
}
colourMap.spriteColours[int(addr)] = getPalletColour(palIndex, pallette)
}
return colourMap, nil
}
func (cmap ColourMap) getSpriteColour(spriteAddress int) *color.RGBA {
if cmap.spriteColours == nil {
return cmap.getDefaultColour()
}
if colour, ok := cmap.spriteColours[spriteAddress]; ok {
return &colour
}
return cmap.getDefaultColour()
}
func (cmap ColourMap) getDefaultColour() *color.RGBA {
return &cmap.defaultColour
}
func (cmap ColourMap) getBackgroundColour() *color.RGBA {
return &cmap.backgroundColour
}
// Get colour from pallette for given index
func getPalletColour(index int, pallette string) color.RGBA {
if index <= 8 && index > 0 {
return pallettes[pallette][index]
}
return color.RGBA{R: 0, G: 0, B: 0, A: 255}
} | pkg/emulator/colours.go | 0.702632 | 0.448124 | colours.go | starcoder |
package intellijel
import (
"github.com/jsleeio/go-eagle/pkg/format/eurorack"
"github.com/jsleeio/go-eagle/pkg/panel"
)
// based on https://intellijel.com/support/1u-technical-specifications/
const (
// PanelHeight1U represents the total height of an Intellijel 1U panel, in
// millimetres
PanelHeight1U = 39.65
// MountingHolesLeftOffset represents the distance of the first mounting
// hole from the left edge of the panel, in millimetres
MountingHolesLeftOffset = eurorack.MountingHolesLeftOffset
// MountingHoleTopY1U represents the Y value for the top row of 1U mounting
// holes, in millimetres
MountingHoleTopY1U = PanelHeight1U - 3.00
// MountingHoleBottomY1U represents the Y value for the bottom row of 1U
// mounting holes, in millimetres
MountingHoleBottomY1U = 3.00
// MountingHoleDiameter represents the diameter of a Eurorack system
// mounting hole, in millimetres
MountingHoleDiameter = eurorack.MountingHoleDiameter
// HP represents horizontal pitch in a Eurorack frame, in millimetres
HP = eurorack.HP
// HorizontalFit indicates the panel tolerance adjustment for the format
HorizontalFit = 0.25
// CornerRadius indicates the corner radius for the format. Eurorack doesn't
// believe in such things.
CornerRadius = 0.0
// RailHeightFromMountingHole is used to determine how much space exists.
// See discussion in github.com/jsleeio/pkg/panel. 5mm is a good safe
// figure for all known-used Eurorack rail types
RailHeightFromMountingHole = eurorack.RailHeightFromMountingHole
)
// Intellijel implements the panel.Panel interface and encapsulates the physical
// characteristics of a Intellijel panel
type Intellijel struct {
HP int
}
// NewIntellijel constructs a new Intellijel object
func NewIntellijel(hp int) *Intellijel {
return &Intellijel{HP: hp}
}
// Width returns the width of a Intellijel panel, in millimetres
func (i Intellijel) Width() float64 {
return HP * float64(i.HP)
}
// Height returns the height of a Intellijel panel, in millimetres
func (i Intellijel) Height() float64 {
return PanelHeight1U
}
// MountingHoleDiameter returns the Intellijel system mounting hole size, in
// millimetres
func (i Intellijel) MountingHoleDiameter() float64 {
return MountingHoleDiameter
}
// MountingHoles generates a set of Point objects representing the mounting
// hole locations of a Intellijel panel
func (i Intellijel) MountingHoles() []panel.Point {
rhsx := MountingHolesLeftOffset + HP*(float64(i.HP-3))
holes := []panel.Point{
{X: MountingHolesLeftOffset, Y: MountingHoleBottomY1U},
{X: MountingHolesLeftOffset, Y: MountingHoleTopY1U},
{X: rhsx, Y: MountingHoleBottomY1U},
{X: rhsx, Y: MountingHoleTopY1U},
}
return holes
}
// HorizontalFit indicates the panel tolerance adjustment for the format
func (i Intellijel) HorizontalFit() float64 {
return HorizontalFit
}
// CornerRadius indicates the corner radius for the format
func (i Intellijel) CornerRadius() float64 {
return CornerRadius
}
// RailHeightFromMountingHole is used to calculate space between rails
func (i Intellijel) RailHeightFromMountingHole() float64 {
return RailHeightFromMountingHole
}
// MountingHoleTopY returns the Y coordinate for the top row of mounting
// holes
func (i Intellijel) MountingHoleTopY() float64 {
return MountingHoleTopY1U
}
// MountingHoleBottomY returns the Y coordinate for the bottom row of
// mounting holes
func (i Intellijel) MountingHoleBottomY() float64 {
return MountingHoleBottomY1U
}
// HeaderLocation returns the location of the header text. Intellijel 1U has
// mounting rails so this is typically aligned with the top mounting screw
func (i Intellijel) HeaderLocation() panel.Point {
return panel.Point{X: i.Width() / 2.0, Y: i.MountingHoleTopY()}
}
// FooterLocation returns the location of the footer text. Intellijel 1U has
// mounting rails so this is typically aligned with the bottom mounting screw
func (i Intellijel) FooterLocation() panel.Point {
return panel.Point{X: i.Width() / 2.0, Y: i.MountingHoleBottomY()}
} | pkg/format/intellijel/intellijel.go | 0.831725 | 0.594021 | intellijel.go | starcoder |
package muxgo
import (
"encoding/json"
)
// InputSettings An array of objects that each describe an input file to be used to create the asset. As a shortcut, `input` can also be a string URL for a file when only one input file is used. See `input[].url` for requirements.
type InputSettings struct {
// The web address of the file that Mux should download and use. * For subtitles text tracks, the url is the location of subtitle/captions file. Mux supports [SubRip Text (SRT)](https://en.wikipedia.org/wiki/SubRip) and [Web Video Text Tracks](https://www.w3.org/TR/webvtt1/) format for ingesting Subtitles and Closed Captions. * For Watermarking or Overlay, the url is the location of the watermark image. * When creating clips from existing Mux assets, the url is defined with `mux://assets/{asset_id}` template where `asset_id` is the Asset Identifier for creating the clip from.
Url *string `json:"url,omitempty"`
OverlaySettings *InputSettingsOverlaySettings `json:"overlay_settings,omitempty"`
// The time offset in seconds from the beginning of the video indicating the clip's starting marker. The default value is 0 when not included. This parameter is only applicable for creating clips when `input.url` has `mux://assets/{asset_id}` format.
StartTime *float64 `json:"start_time,omitempty"`
// The time offset in seconds from the beginning of the video, indicating the clip's ending marker. The default value is the duration of the video when not included. This parameter is only applicable for creating clips when `input.url` has `mux://assets/{asset_id}` format.
EndTime *float64 `json:"end_time,omitempty"`
// This parameter is required for the `text` track type.
Type *string `json:"type,omitempty"`
// Type of text track. This parameter only supports subtitles value. For more information on Subtitles / Closed Captions, [see this blog post](https://mux.com/blog/subtitles-captions-webvtt-hls-and-those-magic-flags/). This parameter is required for `text` track type.
TextType *string `json:"text_type,omitempty"`
// The language code value must be a valid [BCP 47](https://tools.ietf.org/html/bcp47) specification compliant value. For example, en for English or en-US for the US version of English. This parameter is required for text type and subtitles text type track.
LanguageCode *string `json:"language_code,omitempty"`
// The name of the track containing a human-readable description. This value must be unique across all text type and subtitles `text` type tracks. The hls manifest will associate a subtitle text track with this value. For example, the value should be \"English\" for subtitles text track with language_code as en. This optional parameter should be used only for `text` type and subtitles `text` type track. If this parameter is not included, Mux will auto-populate based on the `input[].language_code` value.
Name *string `json:"name,omitempty"`
// Indicates the track provides Subtitles for the Deaf or Hard-of-hearing (SDH). This optional parameter should be used for `text` type and subtitles `text` type tracks.
ClosedCaptions *bool `json:"closed_captions,omitempty"`
// This optional parameter should be used for `text` type and subtitles `text` type tracks.
Passthrough *string `json:"passthrough,omitempty"`
}
// NewInputSettings instantiates a new InputSettings object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewInputSettings() *InputSettings {
this := InputSettings{}
return &this
}
// NewInputSettingsWithDefaults instantiates a new InputSettings object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewInputSettingsWithDefaults() *InputSettings {
this := InputSettings{}
return &this
}
// GetUrl returns the Url field value if set, zero value otherwise.
func (o *InputSettings) GetUrl() string {
if o == nil || o.Url == nil {
var ret string
return ret
}
return *o.Url
}
// GetUrlOk returns a tuple with the Url field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *InputSettings) GetUrlOk() (*string, bool) {
if o == nil || o.Url == nil {
return nil, false
}
return o.Url, true
}
// HasUrl returns a boolean if a field has been set.
func (o *InputSettings) HasUrl() bool {
if o != nil && o.Url != nil {
return true
}
return false
}
// SetUrl gets a reference to the given string and assigns it to the Url field.
func (o *InputSettings) SetUrl(v string) {
o.Url = &v
}
// GetOverlaySettings returns the OverlaySettings field value if set, zero value otherwise.
func (o *InputSettings) GetOverlaySettings() InputSettingsOverlaySettings {
if o == nil || o.OverlaySettings == nil {
var ret InputSettingsOverlaySettings
return ret
}
return *o.OverlaySettings
}
// GetOverlaySettingsOk returns a tuple with the OverlaySettings field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *InputSettings) GetOverlaySettingsOk() (*InputSettingsOverlaySettings, bool) {
if o == nil || o.OverlaySettings == nil {
return nil, false
}
return o.OverlaySettings, true
}
// HasOverlaySettings returns a boolean if a field has been set.
func (o *InputSettings) HasOverlaySettings() bool {
if o != nil && o.OverlaySettings != nil {
return true
}
return false
}
// SetOverlaySettings gets a reference to the given InputSettingsOverlaySettings and assigns it to the OverlaySettings field.
func (o *InputSettings) SetOverlaySettings(v InputSettingsOverlaySettings) {
o.OverlaySettings = &v
}
// GetStartTime returns the StartTime field value if set, zero value otherwise.
func (o *InputSettings) GetStartTime() float64 {
if o == nil || o.StartTime == nil {
var ret float64
return ret
}
return *o.StartTime
}
// GetStartTimeOk returns a tuple with the StartTime field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *InputSettings) GetStartTimeOk() (*float64, bool) {
if o == nil || o.StartTime == nil {
return nil, false
}
return o.StartTime, true
}
// HasStartTime returns a boolean if a field has been set.
func (o *InputSettings) HasStartTime() bool {
if o != nil && o.StartTime != nil {
return true
}
return false
}
// SetStartTime gets a reference to the given float64 and assigns it to the StartTime field.
func (o *InputSettings) SetStartTime(v float64) {
o.StartTime = &v
}
// GetEndTime returns the EndTime field value if set, zero value otherwise.
func (o *InputSettings) GetEndTime() float64 {
if o == nil || o.EndTime == nil {
var ret float64
return ret
}
return *o.EndTime
}
// GetEndTimeOk returns a tuple with the EndTime field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *InputSettings) GetEndTimeOk() (*float64, bool) {
if o == nil || o.EndTime == nil {
return nil, false
}
return o.EndTime, true
}
// HasEndTime returns a boolean if a field has been set.
func (o *InputSettings) HasEndTime() bool {
if o != nil && o.EndTime != nil {
return true
}
return false
}
// SetEndTime gets a reference to the given float64 and assigns it to the EndTime field.
func (o *InputSettings) SetEndTime(v float64) {
o.EndTime = &v
}
// GetType returns the Type field value if set, zero value otherwise.
func (o *InputSettings) GetType() string {
if o == nil || o.Type == nil {
var ret string
return ret
}
return *o.Type
}
// GetTypeOk returns a tuple with the Type field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *InputSettings) GetTypeOk() (*string, bool) {
if o == nil || o.Type == nil {
return nil, false
}
return o.Type, true
}
// HasType returns a boolean if a field has been set.
func (o *InputSettings) HasType() bool {
if o != nil && o.Type != nil {
return true
}
return false
}
// SetType gets a reference to the given string and assigns it to the Type field.
func (o *InputSettings) SetType(v string) {
o.Type = &v
}
// GetTextType returns the TextType field value if set, zero value otherwise.
func (o *InputSettings) GetTextType() string {
if o == nil || o.TextType == nil {
var ret string
return ret
}
return *o.TextType
}
// GetTextTypeOk returns a tuple with the TextType field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *InputSettings) GetTextTypeOk() (*string, bool) {
if o == nil || o.TextType == nil {
return nil, false
}
return o.TextType, true
}
// HasTextType returns a boolean if a field has been set.
func (o *InputSettings) HasTextType() bool {
if o != nil && o.TextType != nil {
return true
}
return false
}
// SetTextType gets a reference to the given string and assigns it to the TextType field.
func (o *InputSettings) SetTextType(v string) {
o.TextType = &v
}
// GetLanguageCode returns the LanguageCode field value if set, zero value otherwise.
func (o *InputSettings) GetLanguageCode() string {
if o == nil || o.LanguageCode == nil {
var ret string
return ret
}
return *o.LanguageCode
}
// GetLanguageCodeOk returns a tuple with the LanguageCode field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *InputSettings) GetLanguageCodeOk() (*string, bool) {
if o == nil || o.LanguageCode == nil {
return nil, false
}
return o.LanguageCode, true
}
// HasLanguageCode returns a boolean if a field has been set.
func (o *InputSettings) HasLanguageCode() bool {
if o != nil && o.LanguageCode != nil {
return true
}
return false
}
// SetLanguageCode gets a reference to the given string and assigns it to the LanguageCode field.
func (o *InputSettings) SetLanguageCode(v string) {
o.LanguageCode = &v
}
// GetName returns the Name field value if set, zero value otherwise.
func (o *InputSettings) GetName() string {
if o == nil || o.Name == nil {
var ret string
return ret
}
return *o.Name
}
// GetNameOk returns a tuple with the Name field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *InputSettings) GetNameOk() (*string, bool) {
if o == nil || o.Name == nil {
return nil, false
}
return o.Name, true
}
// HasName returns a boolean if a field has been set.
func (o *InputSettings) HasName() bool {
if o != nil && o.Name != nil {
return true
}
return false
}
// SetName gets a reference to the given string and assigns it to the Name field.
func (o *InputSettings) SetName(v string) {
o.Name = &v
}
// GetClosedCaptions returns the ClosedCaptions field value if set, zero value otherwise.
func (o *InputSettings) GetClosedCaptions() bool {
if o == nil || o.ClosedCaptions == nil {
var ret bool
return ret
}
return *o.ClosedCaptions
}
// GetClosedCaptionsOk returns a tuple with the ClosedCaptions field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *InputSettings) GetClosedCaptionsOk() (*bool, bool) {
if o == nil || o.ClosedCaptions == nil {
return nil, false
}
return o.ClosedCaptions, true
}
// HasClosedCaptions returns a boolean if a field has been set.
func (o *InputSettings) HasClosedCaptions() bool {
if o != nil && o.ClosedCaptions != nil {
return true
}
return false
}
// SetClosedCaptions gets a reference to the given bool and assigns it to the ClosedCaptions field.
func (o *InputSettings) SetClosedCaptions(v bool) {
o.ClosedCaptions = &v
}
// GetPassthrough returns the Passthrough field value if set, zero value otherwise.
func (o *InputSettings) GetPassthrough() string {
if o == nil || o.Passthrough == nil {
var ret string
return ret
}
return *o.Passthrough
}
// GetPassthroughOk returns a tuple with the Passthrough field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *InputSettings) GetPassthroughOk() (*string, bool) {
if o == nil || o.Passthrough == nil {
return nil, false
}
return o.Passthrough, true
}
// HasPassthrough returns a boolean if a field has been set.
func (o *InputSettings) HasPassthrough() bool {
if o != nil && o.Passthrough != nil {
return true
}
return false
}
// SetPassthrough gets a reference to the given string and assigns it to the Passthrough field.
func (o *InputSettings) SetPassthrough(v string) {
o.Passthrough = &v
}
func (o InputSettings) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if o.Url != nil {
toSerialize["url"] = o.Url
}
if o.OverlaySettings != nil {
toSerialize["overlay_settings"] = o.OverlaySettings
}
if o.StartTime != nil {
toSerialize["start_time"] = o.StartTime
}
if o.EndTime != nil {
toSerialize["end_time"] = o.EndTime
}
if o.Type != nil {
toSerialize["type"] = o.Type
}
if o.TextType != nil {
toSerialize["text_type"] = o.TextType
}
if o.LanguageCode != nil {
toSerialize["language_code"] = o.LanguageCode
}
if o.Name != nil {
toSerialize["name"] = o.Name
}
if o.ClosedCaptions != nil {
toSerialize["closed_captions"] = o.ClosedCaptions
}
if o.Passthrough != nil {
toSerialize["passthrough"] = o.Passthrough
}
return json.Marshal(toSerialize)
}
type NullableInputSettings struct {
value *InputSettings
isSet bool
}
func (v NullableInputSettings) Get() *InputSettings {
return v.value
}
func (v *NullableInputSettings) Set(val *InputSettings) {
v.value = val
v.isSet = true
}
func (v NullableInputSettings) IsSet() bool {
return v.isSet
}
func (v *NullableInputSettings) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableInputSettings(val *InputSettings) *NullableInputSettings {
return &NullableInputSettings{value: val, isSet: true}
}
func (v NullableInputSettings) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableInputSettings) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | model_input_settings.go | 0.894577 | 0.533337 | model_input_settings.go | starcoder |
package render
import (
"image"
"image/color"
"math"
"github.com/200sc/go-dist/colorrange"
)
// Todo:
// Our current concept of thickness expands out in both directions,
// so it's impossible to draw a even-pixel thick line. This is probably
// okay for an easy syntax like this but we might want to add in a
// "Line constructor" type object like our ray-casters
// so this behavior can be customized, i.e.-- if you take thickness as
// pixel thickness, do you expand out left or right, or center, and how
// are ties broken, etc. That would also help prevent the number of
// different functions for line-drawing from continually increasing.
// NewLine returns a line from x1,y1 to x2,y2 with the given color
func NewLine(x1, y1, x2, y2 float64, c color.Color) *Sprite {
return NewThickLine(x1, y1, x2, y2, c, 0)
}
// NewThickLine returns a Line that has some value of thickness
func NewThickLine(x1, y1, x2, y2 float64, c color.Color, thickness int) *Sprite {
return NewLineColored(x1, y1, x2, y2, IdentityColorer(c), thickness)
}
// NewGradientLine returns a Line that has some value of thickness along with a start and end color
func NewGradientLine(x1, y1, x2, y2 float64, c1, c2 color.Color, thickness int) *Sprite {
colorer := colorrange.NewLinear(c1, c2).Percentile
return NewLineColored(x1, y1, x2, y2, colorer, thickness)
}
// NewLineColored returns a line with a custom function for how each pixel in that line should be colored.
func NewLineColored(x1, y1, x2, y2 float64, colorer Colorer, thickness int) *Sprite {
var rgba *image.RGBA
// We subtract the minimum from each side here
// to normalize the new line segment toward the origin
minX := math.Min(x1, x2)
minY := math.Min(y1, y2)
rgba = drawLineBetween(int(x1-minX), int(y1-minY), int(x2-minX), int(y2-minY), colorer, thickness)
return NewSprite(minX-float64(thickness), minY-float64(thickness), rgba)
}
// DrawLine draws a line onto an image rgba from one point to another
func DrawLine(rgba *image.RGBA, x1, y1, x2, y2 int, c color.Color) {
DrawThickLine(rgba, x1, y1, x2, y2, c, 0)
}
// DrawThickLine acts like DrawlineOnto, but takes in thickness of the given line
func DrawThickLine(rgba *image.RGBA, x1, y1, x2, y2 int, c color.Color, thickness int) {
DrawLineColored(rgba, x1, y1, x2, y2, thickness, IdentityColorer(c))
}
//DrawGradientLine acts like DrawThickLine but also applies a gradient to the line
func DrawGradientLine(rgba *image.RGBA, x1, y1, x2, y2 int, c1, c2 color.Color, thickness int) {
colorer := colorrange.NewLinear(c1, c2).Percentile
DrawLineColored(rgba, x1, y1, x2, y2, thickness, colorer)
}
// DrawLineColored acts like DrawThickLine, but takes in a custom colorer function for how it draws its line.
func DrawLineColored(rgba *image.RGBA, x1, y1, x2, y2, thickness int, colorer Colorer) {
xDelta := math.Abs(float64(x2 - x1))
yDelta := math.Abs(float64(y2 - y1))
xSlope := -1
x3 := x1
if x2 < x1 {
xSlope = 1
x3 = x2
}
ySlope := -1
y3 := y1
if y2 < y1 {
ySlope = 1
y3 = y2
}
w := int(xDelta)
h := int(yDelta)
progress := func(x, y, w, h int) float64 {
hprg := HorizontalProgress(x, y, w, h)
vprg := VerticalProgress(x, y, w, h)
if ySlope == -1 {
vprg = 1 - vprg
}
if xSlope == -1 {
hprg = 1 - hprg
}
return (hprg + vprg) / 2
}
err := xDelta - yDelta
var err2 float64
for i := 0; true; i++ {
for xm := x2 - thickness; xm <= (x2 + thickness); xm++ {
for ym := y2 - thickness; ym <= (y2 + thickness); ym++ {
p := progress(xm-x3, ym-y3, w, h)
rgba.Set(xm, ym, colorer(p))
}
}
if x2 == x1 && y2 == y1 {
break
}
err2 = 2 * err
if err2 > -1*yDelta {
err -= yDelta
x2 += xSlope
}
if err2 < xDelta {
err += xDelta
y2 += ySlope
}
}
}
func drawLineBetween(x1, y1, x2, y2 int, colorer Colorer, thickness int) *image.RGBA {
// Bresenham's line-drawing algorithm from wikipedia
xDelta := math.Abs(float64(x2 - x1))
yDelta := math.Abs(float64(y2 - y1))
if xDelta == 0 && yDelta == 0 {
width := 1 + 2*thickness
rect := image.Rect(0, 0, width, width)
rgba := image.NewRGBA(rect)
for xm := 0; xm < width; xm++ {
for ym := 0; ym < width; ym++ {
rgba.Set(xm, ym, colorer(1.0))
}
}
return rgba
} else if xDelta == 0 {
width := 1 + 2*thickness
height := int(math.Floor(yDelta)) + 2*thickness
rect := image.Rect(0, 0, width, height)
rgba := image.NewRGBA(rect)
for xm := 0; xm < width; xm++ {
for ym := 0; ym < height; ym++ {
rgba.Set(xm, ym, colorer(float64(ym)/float64(height)))
}
}
return rgba
}
// Todo: document why we add one here
// It has something to do with zero-height rgbas, but is always useful
h := int(yDelta) + 1
rect := image.Rect(0, 0, int(xDelta)+2*thickness, h+2*thickness)
rgba := image.NewRGBA(rect)
x2 += thickness
y2 += thickness
x1 += thickness
y1 += thickness
DrawLineColored(rgba, x1, y1, x2, y2, thickness, colorer)
return rgba
} | render/line.go | 0.676192 | 0.536374 | line.go | starcoder |
package Test
import (
"math"
"math/rand"
)
type Vector struct {
X, Y, Z float64
}
func V(x, y, z float64) Vector {
return Vector{x, y, z}
}
func RandomUnitVector() Vector {
for {
x := rand.Float64()*2 - 1
y := rand.Float64()*2 - 1
z := rand.Float64()*2 - 1
if x*x+y*y+z*z > 1 {
continue
}
return Vector{x, y, z}.Normalize()
}
}
func (a Vector) VectorW() VectorW {
return VectorW{a.X, a.Y, a.Z, 1}
}
func (a Vector) IsDegenerate() bool {
nan := math.IsNaN(a.X) || math.IsNaN(a.Y) || math.IsNaN(a.Z)
inf := math.IsInf(a.X, 0) || math.IsInf(a.Y, 0) || math.IsInf(a.Z, 0)
return nan || inf
}
func (a Vector) Length() float64 {
return math.Sqrt(a.X*a.X + a.Y*a.Y + a.Z*a.Z)
}
func (a Vector) Less(b Vector) bool {
if a.X != b.X {
return a.X < b.X
}
if a.Y != b.Y {
return a.Y < b.Y
}
return a.Z < b.Z
}
func (a Vector) Distance(b Vector) float64 {
return a.Sub(b).Length()
}
func (a Vector) LengthSquared() float64 {
return a.X*a.X + a.Y*a.Y + a.Z*a.Z
}
func (a Vector) DistanceSquared(b Vector) float64 {
return a.Sub(b).LengthSquared()
}
func (a Vector) Lerp(b Vector, t float64) Vector {
return a.Add(b.Sub(a).MulScalar(t))
}
func (a Vector) LerpDistance(b Vector, d float64) Vector {
return a.Add(b.Sub(a).Normalize().MulScalar(d))
}
func (a Vector) Dot(b Vector) float64 {
return a.X*b.X + a.Y*b.Y + a.Z*b.Z
}
func (a Vector) Cross(b Vector) Vector {
x := a.Y*b.Z - a.Z*b.Y
y := a.Z*b.X - a.X*b.Z
z := a.X*b.Y - a.Y*b.X
return Vector{x, y, z}
}
func (a Vector) Normalize() Vector {
r := 1 / math.Sqrt(a.X*a.X+a.Y*a.Y+a.Z*a.Z)
return Vector{a.X * r, a.Y * r, a.Z * r}
}
func (a Vector) Negate() Vector {
return Vector{-a.X, -a.Y, -a.Z}
}
func (a Vector) Abs() Vector {
return Vector{math.Abs(a.X), math.Abs(a.Y), math.Abs(a.Z)}
}
func (a Vector) Add(b Vector) Vector {
return Vector{a.X + b.X, a.Y + b.Y, a.Z + b.Z}
}
func (a Vector) Sub(b Vector) Vector {
return Vector{a.X - b.X, a.Y - b.Y, a.Z - b.Z}
}
func (a Vector) Mul(b Vector) Vector {
return Vector{a.X * b.X, a.Y * b.Y, a.Z * b.Z}
}
func (a Vector) Div(b Vector) Vector {
return Vector{a.X / b.X, a.Y / b.Y, a.Z / b.Z}
}
func (a Vector) Mod(b Vector) Vector {
// as implemented in GLSL
x := a.X - b.X*math.Floor(a.X/b.X)
y := a.Y - b.Y*math.Floor(a.Y/b.Y)
z := a.Z - b.Z*math.Floor(a.Z/b.Z)
return Vector{x, y, z}
}
func (a Vector) AddScalar(b float64) Vector {
return Vector{a.X + b, a.Y + b, a.Z + b}
}
func (a Vector) SubScalar(b float64) Vector {
return Vector{a.X - b, a.Y - b, a.Z - b}
}
func (a Vector) MulScalar(b float64) Vector {
return Vector{a.X * b, a.Y * b, a.Z * b}
}
func (a Vector) DivScalar(b float64) Vector {
return Vector{a.X / b, a.Y / b, a.Z / b}
}
func (a Vector) Min(b Vector) Vector {
return Vector{math.Min(a.X, b.X), math.Min(a.Y, b.Y), math.Min(a.Z, b.Z)}
}
func (a Vector) Max(b Vector) Vector {
return Vector{math.Max(a.X, b.X), math.Max(a.Y, b.Y), math.Max(a.Z, b.Z)}
}
func (a Vector) Floor() Vector {
return Vector{math.Floor(a.X), math.Floor(a.Y), math.Floor(a.Z)}
}
func (a Vector) Ceil() Vector {
return Vector{math.Ceil(a.X), math.Ceil(a.Y), math.Ceil(a.Z)}
}
func (a Vector) Round() Vector {
return a.RoundPlaces(0)
}
func (a Vector) RoundPlaces(n int) Vector {
x := RoundPlaces(a.X, n)
y := RoundPlaces(a.Y, n)
z := RoundPlaces(a.Z, n)
return Vector{x, y, z}
}
func (a Vector) MinComponent() float64 {
return math.Min(math.Min(a.X, a.Y), a.Z)
}
func (a Vector) MaxComponent() float64 {
return math.Max(math.Max(a.X, a.Y), a.Z)
}
func (i Vector) Reflect(n Vector) Vector {
return i.Sub(n.MulScalar(2 * n.Dot(i)))
}
func (a Vector) Perpendicular() Vector {
if a.X == 0 && a.Y == 0 {
if a.Z == 0 {
return Vector{}
}
return Vector{0, 1, 0}
}
return Vector{-a.Y, a.X, 0}.Normalize()
}
func (p Vector) SegmentDistance(v Vector, w Vector) float64 {
l2 := v.DistanceSquared(w)
if l2 == 0 {
return p.Distance(v)
}
t := p.Sub(v).Dot(w.Sub(v)) / l2
if t < 0 {
return p.Distance(v)
}
if t > 1 {
return p.Distance(w)
}
return v.Add(w.Sub(v).MulScalar(t)).Distance(p)
}
type VectorW struct {
X, Y, Z, W float64
}
func (a VectorW) Vector() Vector {
return Vector{a.X, a.Y, a.Z}
}
func (a VectorW) Outside() bool {
x, y, z, w := a.X, a.Y, a.Z, a.W
return x < -w || x > w || y < -w || y > w || z < -w || z > w
}
func (a VectorW) Dot(b VectorW) float64 {
return a.X*b.X + a.Y*b.Y + a.Z*b.Z + a.W*b.W
}
func (a VectorW) Add(b VectorW) VectorW {
return VectorW{a.X + b.X, a.Y + b.Y, a.Z + b.Z, a.W + b.W}
}
func (a VectorW) Sub(b VectorW) VectorW {
return VectorW{a.X - b.X, a.Y - b.Y, a.Z - b.Z, a.W - b.W}
}
func (a VectorW) MulScalar(b float64) VectorW {
return VectorW{a.X * b, a.Y * b, a.Z * b, a.W * b}
}
func (a VectorW) DivScalar(b float64) VectorW {
return VectorW{a.X / b, a.Y / b, a.Z / b, a.W / b}
} | vector.go | 0.893219 | 0.771843 | vector.go | starcoder |
package tasks
import (
"fmt"
"strings"
)
// ListRecord is a record in double-linked priority list of tasks
type ListRecord struct {
prev *ListRecord
next *ListRecord
Task *Task
// Estimation of Task potential to lead to the best solution
// This is a sum of Task.ActualDistance and Task.ProjectedDistance
Distance int
}
// List is a double-linked sorted list of tasks
type List struct {
First *ListRecord
Last *ListRecord
// insertionQueue used for the optimization of bulk tasks insertion
// See List.Insert(tasks)
insertionQueue *List
}
// NewListQueue creates and returns a new list
func NewListQueue() *List {
return &List{}
}
// Insert inserts new tasks into the list
// Their potential is calculated and used to determine the position
// of the inserts
func (l *List) Insert(tasks []*Task) {
// A quick path for an empty insertion
if len(tasks) == 0 {
return
}
// Initialize insertion queue if necessary
if l.insertionQueue == nil {
l.insertionQueue = &List{}
}
// Populate insertion queue and defer clearing
for _, task := range tasks {
l.insertionQueue.rawInsert(task, task.Distance)
}
defer l.insertionQueue.clear()
// A quick path for an empty list
if l.First == nil {
l.First = l.insertionQueue.First
l.Last = l.insertionQueue.Last
return
}
insertion := l.insertionQueue.First
// Check if new inserts have to go in the head of the list
for (insertion != nil) && (l.First.Distance >= insertion.Distance) {
// Extract the record from the insertion queue
newRecord := insertion
insertion = insertion.next
// Install it as a new head
newRecord.next = l.First
l.First.prev = newRecord
l.First = newRecord
}
// Iterate over list records, inserting as needed
for listRecord := l.First; (listRecord != nil) && (insertion != nil); listRecord = listRecord.next {
// Iterate over remaining insertion queue, if there is suitable insertions to go before the listRecord
for (insertion != nil) && (listRecord.Distance >= insertion.Distance) {
// Extract the record from the insertion queue
newRecord := insertion
insertion = insertion.next
// Install it before the current list record
listRecord.prev.next = newRecord
newRecord.prev = listRecord.prev
newRecord.next = listRecord
listRecord.prev = newRecord
}
}
// If some insertion records remain, they should go to the end
if insertion != nil {
insertion.prev = l.Last
l.Last.next = insertion
l.Last = l.insertionQueue.Last
}
}
// TrimTail trims records from the tail of the list with distance greater
// than the given argument
func (l *List) TrimTail(distance int) {
// A quick path for an empty list
if l.First == nil {
return
}
// A quick path for a tail not suitable for trimming
if l.Last.Distance < distance {
return
}
// A quick path for a full list trim
if l.First.Distance >= distance {
l.clear()
return
}
for checked := l.Last; checked != nil; checked = checked.prev {
if checked.Distance < distance {
l.Last = checked
l.Last.next = nil
break
}
}
}
// IsEmpty checks if there is no records in the list.
func (l *List) IsEmpty() bool {
if l.First == nil {
return true
}
return false
}
// PopFirst gets the task from the first record in the list and
// removes it from the list.
// If list is empty, it returns nil.
func (l *List) PopFirst() *Task {
if l.First == nil {
return nil
}
record := l.First
l.First = l.First.next
return record.Task
}
// String implements the Stringer interface
// Used mainly for testing
func (l *List) String() string {
var b strings.Builder
for record := l.First; record != nil; record = record.next {
fmt.Fprintf(&b, " %d", record.Distance)
}
return fmt.Sprintf("tasks.List:%s", b.String())
}
// rawInsert inserts one record into the list without optimizations,
// using simple interation. It is used to populate insertionQueue for
// optimized bulk insertion into the main list
func (l *List) rawInsert(task *Task, potential int) {
record := &ListRecord{
prev: nil,
next: nil,
Task: task,
Distance: potential,
}
// A quick path for an empty list
if l.First == nil {
l.First = record
l.Last = record
return
}
// Check if new insert have to go in the head of the list
if l.First.Distance >= record.Distance {
record.next = l.First
l.First.prev = record
l.First = record
return
}
// Iterate over list records, seeking the suitable insert position
for checked := l.First; checked != nil; checked = checked.next {
if checked.Distance >= record.Distance {
checked.prev.next = record
record.prev = checked.prev
record.next = checked
checked.prev = record
return
}
}
// We have not found an insertion position, which means we have the smallest
// available potential and have to insert in the end
record.prev = l.Last
l.Last.next = record
l.Last = record
}
// clear clears the list of all records
func (l *List) clear() {
l.First = nil
l.Last = nil
} | solver/tasks/list.go | 0.763131 | 0.48499 | list.go | starcoder |
//nolint:lll // multiple regex is not possible broken lines
package csharp
import (
"regexp"
"github.com/ZupIT/horusec-devkit/pkg/enums/confidence"
"github.com/ZupIT/horusec-devkit/pkg/enums/severities"
engine "github.com/ZupIT/horusec-engine"
"github.com/ZupIT/horusec-engine/text"
)
func NewCommandInjection() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-1",
Name: "Command Injection",
Description: "If a malicious user controls either the FileName or Arguments, he might be able to execute unwanted commands or add unwanted argument. This behavior would not be possible if input parameter are validate against a white-list of characters. For more information access: (https://security-code-scan.github.io/#SCS0001).",
Severity: severities.Medium.ToString(),
Confidence: confidence.Low.ToString(),
},
Type: text.AndMatch,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`new Process\(\)`),
regexp.MustCompile(`StartInfo.FileName`),
regexp.MustCompile(`StartInfo.Arguments`),
},
}
}
func NewXPathInjection() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-2",
Name: "XPath Injection",
Description: "If the user input is not properly filtered, a malicious user could extend the XPath query. For more information access: (https://security-code-scan.github.io/#SCS0003).",
Severity: severities.Medium.ToString(),
Confidence: confidence.Low.ToString(),
},
Type: text.AndMatch,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`new XmlDocument {XmlResolver = null}`),
regexp.MustCompile(`Load\(.*\)`),
regexp.MustCompile(`SelectNodes\(.*\)`),
},
}
}
func NewExternalEntityInjection() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-3",
Name: "XML eXternal Entity Injection (XXE)",
Description: "The XML parser is configured incorrectly. The operation could be vulnerable to XML eXternal Entity (XXE) processing. For more information access: (https://security-code-scan.github.io/#SCS0007).",
Severity: severities.Medium.ToString(),
Confidence: confidence.High.ToString(),
},
Type: text.AndMatch,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`new XmlReaderSettings\(\)`),
regexp.MustCompile(`XmlReader.Create\(.*\)`),
regexp.MustCompile(`new XmlDocument\(.*\)`),
regexp.MustCompile(`Load\(.*\)`),
regexp.MustCompile(`ProhibitDtd = false`),
regexp.MustCompile(`(new XmlReaderSettings\(\))(([^P]|P[^r]|Pr[^o]|Pro[^h]|Proh[^i]|Prohi[^b]|Prohib[^i]|Prohibi[^t]|Prohibit[^D]|ProhibitD[^t]|ProhibitDt[^d])*)(\.Load\(.*\))`),
},
}
}
func NewPathTraversal() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-4",
Name: "Path Traversal",
Description: "A path traversal attack (also known as directory traversal) aims to access files and directories that are stored outside the expected directory.By manipulating variables that reference files with “dot-dot-slash (../)” sequences and its variations or by using absolute file paths, it may be possible to access arbitrary files and directories stored on file system including application source code or configuration and critical system files. For more information access: (https://security-code-scan.github.io/#SCS0018).",
Severity: severities.Medium.ToString(),
Confidence: confidence.Low.ToString(),
},
Type: text.AndMatch,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`ActionResult`),
regexp.MustCompile(`System.IO.File.ReadAllBytes\(Server.MapPath\(.*\) \+ .*\)`),
regexp.MustCompile(`File\(.*, System.Net.Mime.MediaTypeNames.Application.Octet, .*\)`),
},
}
}
func NewSQLInjectionWebControls() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-5",
Name: "SQL Injection WebControls",
Description: "Malicious user might get direct read and/or write access to the database. If the database is poorly configured the attacker might even get Remote Code Execution (RCE) on the machine running the database. For more information access: (https://security-code-scan.github.io/#SCS0014).",
Severity: severities.High.ToString(),
Confidence: confidence.Low.ToString(),
},
Type: text.AndMatch,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`"Select .* From .* where .*" & .*`),
regexp.MustCompile(`System\.Web\.UI\.WebControls\.SqlDataSource | System\.Web\.UI\.WebControls\.SqlDataSourceView | Microsoft\.Whos\.Framework\.Data\.SqlUtility`),
},
}
}
func NewWeakCipherOrCBCOrECBMode() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-6",
Name: "Weak Cipher Mode",
Description: "The cipher provides no way to detect that the data has been tampered with. If the cipher text can be controlled by an attacker, it could be altered without detection. The use of AES in CBC mode with a HMAC is recommended guaranteeing integrity and confidentiality. For more information access: (https://security-code-scan.github.io/#SCS0013).",
Severity: severities.Medium.ToString(),
Confidence: confidence.Low.ToString(),
},
Type: text.AndMatch,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`(using)(([^O]|O[^r]|Or[^g]|Org[^.]|Org\.[^B]|Org\.B[^o]|Org\.Bo[^u]|Org\.Bou[^n]|Org\.Boun[^c]|Org\.Bounc[^y]|Org\.Bouncy[^C]|Org\.BouncyC[^a]|Org\.BouncyCa[^s]|Org\.BouncyCas[^t]|Org\.BouncyCast[^l]|Org\.BouncyCastl[^e])*)(\);)`),
regexp.MustCompile(`CreateEncryptor\(.*\)`),
regexp.MustCompile(`new CryptoStream\(.*\)`),
regexp.MustCompile(`Write\(.*\)`),
regexp.MustCompile(`new BinaryWriter\(.*\)`),
},
}
}
func NewFormsAuthenticationCookielessMode() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-7",
Name: "Forms Authentication Cookieless Mode",
Description: "Authentication cookies should not be sent in the URL. Doing so allows attackers to gain unauthorized access to authentication tokens (web server logs, referrer headers, and browser history) and more easily perform session fixation / hijacking attacks. For more information checkout the CWE-598 (https://cwe.mitre.org/data/definitions/598.html) advisory.",
Severity: severities.Medium.ToString(),
Confidence: confidence.Low.ToString(),
},
Type: text.AndMatch,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`\<authentication\s*mode\s*=\s*["|']Forms`),
regexp.MustCompile(`(\<forms)((([^c]|c[^o]|co[^o]|coo[^k]|cook[^i]|cooki[^e]|cookie[^l]|cookiel[^e]|cookiele[^s]|cookieles[^s])*)|([^U]|U[^s]|Us[^e]|Use[^C]|UseC[^o]|UseCo[^o]|UseCoo[^k]|UseCook[^i]|UseCooki[^e]|UseCookie[^s])*)(\/\>)`),
},
}
}
func NewFormsAuthenticationCrossAppRedirects() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-8",
Name: "Forms Authentication Cross App Redirects",
Description: "Enabling cross-application redirects can allow unvalidated redirect attacks via the returnUrl parameter during the login process. Disable cross-application redirects to by setting the enableCrossAppRedirects attribute to false. For more information checkout the CWE-601 (https://cwe.mitre.org/data/definitions/601.html) advisory.",
Severity: severities.Medium.ToString(),
Confidence: confidence.Low.ToString(),
},
Type: text.AndMatch,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`\<authentication\s*mode\s*=\s*["|']Forms`),
regexp.MustCompile(`\<forms`),
regexp.MustCompile(`enableCrossAppRedirects\s*=\s*["|']true`),
},
}
}
func NewFormsAuthenticationWeakCookieProtection() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-9",
Name: "Forms Authentication Weak Cookie Protection",
Description: "Forms Authentication cookies must use strong encryption and message authentication code (MAC) validation to protect the cookie value from inspection and tampering. Configure the forms element’s protection attribute to All to enable cookie data validation and encryption. For more information checkout the CWE-565 (https://cwe.mitre.org/data/definitions/565.html) advisory.",
Severity: severities.Medium.ToString(),
Confidence: confidence.Low.ToString(),
},
Type: text.AndMatch,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`\<authentication\s*mode\s*=\s*["|']Forms`),
regexp.MustCompile(`\<forms`),
regexp.MustCompile(`protection\s*=\s*["|'](None|Encryption|Validation)`),
},
}
}
func NewFormsAuthenticationWeakTimeout() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-10",
Name: "Forms Authentication Weak Timeout",
Description: "Excessive authentication timeout values provide attackers with a large window of opportunity to hijack user’s authentication tokens. For more information checkout the CWE-613 (https://cwe.mitre.org/data/definitions/613.html) advisory.",
Severity: severities.Medium.ToString(),
Confidence: confidence.Low.ToString(),
},
Type: text.AndMatch,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`\<authentication\s*mode\s*=\s*["|']Forms`),
regexp.MustCompile(`\<forms`),
regexp.MustCompile(`timeout\s*=\s*["|'](1[6-9]|[2-9][0-9]*)`),
},
}
}
func NewHeaderCheckingDisabled() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-11",
Name: "Header Checking Disabled",
Description: "Disabling the HTTP Runtime header checking protection opens the application up to HTTP Header Injection (aka Response Splitting) attacks. Enable the header checking protection by setting the httpRuntime element’s enableHeaderChecking attribute to true, which is the default value. For more information checkout the CWE-113 (https://cwe.mitre.org/data/definitions/113.html) advisory.",
Severity: severities.Medium.ToString(),
Confidence: confidence.Low.ToString(),
},
Type: text.AndMatch,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`\<httpRuntime`),
regexp.MustCompile(`enableHeaderChecking\s*=\s*["|']false`),
},
}
}
func NewVersionHeaderEnabled() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-12",
Name: "Version Header Enabled",
Description: "The Version HTTP response header sends the ASP.NET framework version to the client’s browser. This information can help an attacker identify vulnerabilities in the server’s framework version and should be disabled in production. Disable the version response header by setting the httpRuntime element’s enableVersionHeader attribute to false. For more information checkout the CWE-200 (https://cwe.mitre.org/data/definitions/200.html) advisory.",
Severity: severities.Medium.ToString(),
Confidence: confidence.Low.ToString(),
},
Type: text.AndMatch,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`\<httpRuntime`),
regexp.MustCompile(`enableVersionHeader\s*=\s*["|']true`),
},
}
}
func NewEventValidationDisabled() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-13",
Name: "Event Validation Disabled",
Description: "Event validation prevents unauthorized post backs in web form applications. Disabling this feature can allow attackers to forge requests from controls not visible or enabled on a given web form. Enable event validation by setting the page element’s eventValidation attribute to true. For more information checkout the CWE-807 (https://cwe.mitre.org/data/definitions/807.html) advisory.",
Severity: severities.Medium.ToString(),
Confidence: confidence.Low.ToString(),
},
Type: text.AndMatch,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`\<pages`),
regexp.MustCompile(`enableEventValidation\s*=\s*["|']false`),
},
}
}
func NewWeakSessionTimeout() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-14",
Name: "Weak Session Timeout",
Description: "If session data is used by the application for authentication, excessive timeout values provide attackers with a large window of opportunity to hijack user’s session tokens. Configure the session timeout value to meet your organization’s timeout policy. For more information checkout the CWE-613 (https://cwe.mitre.org/data/definitions/613.html) advisory.",
Severity: severities.Medium.ToString(),
Confidence: confidence.Low.ToString(),
},
Type: text.AndMatch,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`\<sessionState`),
regexp.MustCompile(`timeout\s*=\s*["|'](1[6-9]|[2-9][0-9]*)`),
},
}
}
func NewStateServerMode() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-15",
Name: "Weak Session Timeout",
Description: "The session StateServer mode transports session data insecurely to a remote server. The remote server also does not require system authentication to access the session data for an application. This risk depends entirely on the sensitivity of the data stored in the user’s session. If the session data is considered sensitive, consider adding an external control (e.g. IPSEC) that provides mutual authentication and transport security. For more information checkout the CWE-319 (https://cwe.mitre.org/data/definitions/319.html) advisory.",
Severity: severities.Medium.ToString(),
Confidence: confidence.Low.ToString(),
},
Type: text.AndMatch,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`\<sessionState`),
regexp.MustCompile(`mode\s*=\s*["|']StateServer`),
},
}
}
func NewJwtSignatureValidationDisabled() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-16",
Name: "Jwt Signature Validation Disabled",
Description: "Web service APIs relying on JSON Web Tokens (JWT) for authentication and authorization must sign each JWT with a private key or secret. Each web service endpoint must require JWT signature validation prior to decoding and using the token to access protected resources. The values RequireExpirationTime, RequireSignedTokens, ValidateLifetime can't was false. For more information checkout the CWE-347 (https://cwe.mitre.org/data/definitions/347.html) and CWE-613 (https://cwe.mitre.org/data/definitions/613.html) advisory.",
Severity: severities.Critical.ToString(),
Confidence: confidence.High.ToString(),
},
Type: text.AndMatch,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`AddAuthentication\(.*\)`),
regexp.MustCompile(`AddJwtBearer`),
regexp.MustCompile(`new TokenValidationParameters`),
regexp.MustCompile(`(RequireExpirationTime\s*=\s*false|RequireSignedTokens\s*=\s*false|ValidateLifetime\s*=\s*false)`),
},
}
}
func NewInsecureHttpCookieTransport() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-17",
Name: "Insecure Http Cookie Transport",
Description: "Cookies containing authentication tokens, session tokens, and other state management credentials must be protected in transit across a network. Set the cookie options’ Secure property to true to prevent the browser from transmitting cookies over HTTP. For more information checkout the CWE-614 (https://cwe.mitre.org/data/definitions/614.html) advisory.",
Severity: severities.Medium.ToString(),
Confidence: confidence.Medium.ToString(),
},
Type: text.AndMatch,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`new\sCookieOptions\(\)`),
regexp.MustCompile(`Secure\s*=\s*false`),
},
}
}
func NewHttpCookieAccessibleViaScript() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-18",
Name: "Http Cookie Accessible Via Script",
Description: "Cookies containing authentication tokens, session tokens, and other state management credentials should be protected from malicious JavaScript running in the browser. Setting the httpOnly attribute to false can allow attackers to inject malicious scripts into the site and extract authentication cookie values to a remote server. Configure the cookie options’ httpOnly property to true, which prevents cookie access from scripts running in the browser. For more information checkout the CWE-1004 (https://cwe.mitre.org/data/definitions/1004.html) advisory.",
Severity: severities.Medium.ToString(),
Confidence: confidence.Medium.ToString(),
},
Type: text.AndMatch,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`new\sCookieOptions\(\)`),
regexp.MustCompile(`HttpOnly\s*=\s*false`),
},
}
}
func NewDirectoryListingEnabled() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-19",
Name: "Directory Listing Enabled",
Description: "Directory listing provides a complete index of the resources located in a web directory. Enabling directory listing can expose sensitive resources such as application binaries, configuration files, and static content that should not be exposed. Unless directory listing is required to meet the application’s functional requirements, disable the listing by setting the directoryBrowse element’s enabled attribute to false. For more information checkout the CWE-548 (https://cwe.mitre.org/data/definitions/548.html) advisory.",
Severity: severities.Medium.ToString(),
Confidence: confidence.Medium.ToString(),
},
Type: text.AndMatch,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`\<directoryBrowse`),
regexp.MustCompile(`enabled\s*=\s*['|"]true`),
},
}
}
func NewLdapAuthenticationDisabled() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-20",
Name: "Ldap Authentication Disabled",
Description: "Disabling LDAP Authentication configures insecure connections to the backend LDAP provider. Using the DirectoryEntry AuthenticationType property’s Anonymous or None option allows an anonymous or basic authentication connection to the LDAP provider. Set the the DirectoryEntry AuthenticationType property to Secure, which requests Kerberos authentication under the security context of the calling thread or as a provider username and password. For more information checkout the CWE-287 (https://cwe.mitre.org/data/definitions/287.html) advisory.",
Severity: severities.Medium.ToString(),
Confidence: confidence.Medium.ToString(),
},
Type: text.AndMatch,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`new\sDirectoryEntry\(.*\)`),
regexp.MustCompile(`AuthenticationTypes.Anonymous`),
},
}
}
func NewCertificateValidationDisabledAndMatch() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-21",
Name: "Certificate Validation Disabled",
Description: "Disabling certificate validation is common in testing and development environments. Quite often, this is accidentally deployed to production, leaving the application vulnerable to man-in-the-middle attacks on insecure networks. For more information checkout the CWE-295 (https://cwe.mitre.org/data/definitions/295.html) advisory.",
Severity: severities.High.ToString(),
Confidence: confidence.High.ToString(),
},
Type: text.AndMatch,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`new WebRequestHandler\(\)`),
regexp.MustCompile(`ServerCertificateValidationCallback \+= \(.*\) => true;`),
},
}
}
func NewActionRequestValidationDisabled() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-22",
Name: "Action Request Validation Disabled",
Description: "Request validation performs blacklist input validation for XSS payloads found in form and URL request parameters. Request validation has known bypass issues and does not prevent all XSS attacks, but it does provide a strong countermeasure for most payloads targeting a HTML context. For more information checkout the CWE-20 (https://cwe.mitre.org/data/definitions/20.html) advisory.",
Severity: severities.Medium.ToString(),
Confidence: confidence.High.ToString(),
},
Type: text.AndMatch,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`(\[HttpGet\(.*\)\]|\[HttpPost\(.*\)\]|\[HttpPut\(.*\)\]|\[HttpDelete\(.*\)\]|\[HttpGet\]|\[HttpPost\]|\[HttpPut\]|\[HttpDelete\])`),
regexp.MustCompile(`\[ValidateInput\(false\)\]`),
},
}
}
func NewXmlDocumentExternalEntityExpansion() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-23",
Name: "Xml Document External Entity Expansion",
Description: "XML External Entity (XXE) vulnerabilities occur when applications process untrusted XML data without disabling external entities and DTD processing. Processing untrusted XML data with a vulnerable parser can allow attackers to extract data from the server, perform denial of service attacks, and in some cases gain remote code execution. The XmlDocument class is vulnerable to XXE attacks when setting the XmlResolver property to resolve external entities. To prevent XmlDocument XXE attacks, set the XmlResolver property to null. For more information checkout the CWE-611 (https://cwe.mitre.org/data/definitions/611.html) advisory.",
Severity: severities.High.ToString(),
Confidence: confidence.Low.ToString(),
},
Type: text.AndMatch,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`new\sXmlDocument`),
regexp.MustCompile(`(XmlResolver)(([^n]|n[^u]|nu[^l]|nul[^l])*)(;)`),
},
}
}
func NewLdapInjectionFilterAssignment() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-24",
Name: "Ldap Injection Filter Assignment",
Description: "LDAP Injection vulnerabilities occur when untrusted data is concatenated into a LDAP Path or Filter expression without properly escaping control characters. This can allow attackers to change the meaning of an LDAP query and gain access to resources for which they are not authorized. For more information checkout the CWE-90 (https://cwe.mitre.org/data/definitions/90.html) advisory.",
Severity: severities.Medium.ToString(),
Confidence: confidence.Medium.ToString(),
},
Type: text.AndMatch,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`new DirectoryEntry\(.*\)`),
regexp.MustCompile(`new DirectorySearcher\(.*\)`),
regexp.MustCompile(`(\.Filter)(([^E]|E[^n]|En[^c]|Enc[^o]|Enco[^d]|Encod[^e]|Encode[^r]|Encoder[^.]|Encoder\.[^L]|Encoder\.L[^d]|Encoder\.Ld[^a]|Encoder\.Lda[^p]|Encoder\.Ldap[^F]|Encoder\.LdapF[^i]|Encoder\.LdapFi[^l]|Encoder\.LdapFil[^t]|Encoder\.LdapFilt[^e]|Encoder\.LdapFilte[^r]|Encoder\.LdapFilter[^E]|Encoder\.LdapFilterE[^n]|Encoder\.LdapFilterEn[^c]|Encoder\.LdapFilterEnc[^o]|Encoder\.LdapFilterEnco[^d]|Encoder\.LdapFilterEncod[^e])*)(\);)`),
},
}
}
func NewSqlInjectionDynamicNHibernateQuery() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-25",
Name: "Sql Injection: Dynamic NHibernate Query",
Description: "Concatenating untrusted data into a dynamic SQL string and calling vulnerable NHibernate Framework methods can allow SQL Injection. To ensure calls to vulnerable NHibernate Framework methods are parameterized, pass positional or named parameters in the statement. The following NHibernate methods allow for raw SQL queries to be executed: CreateQuery CreateSqlQuery To ensure calls to vulnerable NHibernate methods are parameterized, use named parameters in the raw SQL query. Then, set the named parameter values when executing the query. For more information checkout the CWE-89 (https://cwe.mitre.org/data/definitions/89.html) advisory.",
Severity: severities.Medium.ToString(),
Confidence: confidence.Medium.ToString(),
},
Type: text.AndMatch,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`(?i)["|'](SELECT|INSERT|UPDATE|DELETE).*\+`),
regexp.MustCompile(`(CreateQuery\(.*\);)(([^S]|S[^e]|Se[^t]|Set[^S]|SetS[^t]|SetSt[^r]|SetStr[^i]|SetStri[^n]|SetStrin[^g])*)(;)`),
},
}
}
func NewLdapInjectionDirectorySearcher() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-26",
Name: "Ldap Injection Directory Searcher",
Description: "LDAP Injection vulnerabilities occur when untrusted data is concatenated into a LDAP Path or Filter expression without properly escaping control characters. This can allow attackers to change the meaning of an LDAP query and gain access to resources for which they are not authorized. For more information checkout the CWE-90 (https://cwe.mitre.org/data/definitions/90.html) advisory.",
Severity: severities.Medium.ToString(),
Confidence: confidence.Medium.ToString(),
},
Type: text.AndMatch,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`new DirectoryEntry\(.*\)`),
regexp.MustCompile(`(new DirectorySearcher)(([^E]|E[^n]|En[^c]|Enc[^o]|Enco[^d]|Encod[^e]|Encode[^r]|Encoder[^.]|Encoder\.[^L]|Encoder\.L[^d]|Encoder\.Ld[^a]|Encoder\.Lda[^p]|Encoder\.Ldap[^F]|Encoder\.LdapF[^i]|Encoder\.LdapFi[^l]|Encoder\.LdapFil[^t]|Encoder\.LdapFilt[^e]|Encoder\.LdapFilte[^r]|Encoder\.LdapFilter[^E]|Encoder\.LdapFilterE[^n]|Encoder\.LdapFilterEn[^c]|Encoder\.LdapFilterEnc[^o]|Encoder\.LdapFilterEnco[^d]|Encoder\.LdapFilterEncod[^e])*)(\);)`),
},
}
}
func NewLdapInjectionPathAssignment() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-27",
Name: "Ldap Injection Path Assignment",
Description: "LDAP Injection vulnerabilities occur when untrusted data is concatenated into a LDAP Path or Filter expression without properly escaping control characters. This can allow attackers to change the meaning of an LDAP query and gain access to resources for which they are not authorized. For more information checkout the CWE-90 (https://cwe.mitre.org/data/definitions/90.html) advisory.",
Severity: severities.Medium.ToString(),
Confidence: confidence.Medium.ToString(),
},
Type: text.AndMatch,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`new DirectoryEntry\(\)`),
regexp.MustCompile(`(\.Path)(([^E]|E[^n]|En[^c]|Enc[^o]|Enco[^d]|Encod[^e]|Encode[^r]|Encoder[^.]|Encoder\.[^L]|Encoder\.L[^d]|Encoder\.Ld[^a]|Encoder\.Lda[^p]|Encoder\.Ldap[^D]|Encoder\.LdapD[^i]|Encoder\.LdapDi[^s]|Encoder\.LdapDis[^t]|Encoder\.LdapDist[^i]|Encoder\.LdapDisti[^n]|Encoder\.LdapDistin[^g]|Encoder\.LdapDisting[^u]|Encoder\.LdapDistingu[^i]|Encoder\.LdapDistingui[^s]|Encoder\.LdapDistinguis[^h]|Encoder\.LdapDistinguish[^e]|Encoder\.LdapDistinguishe[^d]|Encoder\.LdapDistinguished[^N]|Encoder\.LdapDistinguishedN[^a]|Encoder\.LdapDistinguishedNa[^m]|Encoder\.LdapDistinguishedNam[^e]|Encoder\.LdapDistinguishedName[^E]|Encoder\.LdapDistinguishedNameE[^n]|Encoder\.LdapDistinguishedNameEn[^c]|Encoder\.LdapDistinguishedNameEnc[^o]|Encoder\.LdapDistinguishedNameEnco[^d]|Encoder\.LdapDistinguishedNameEncod[^e])*)(\);)`),
},
}
}
func NewLDAPInjection() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-28",
Name: "LDAP Injection",
Description: "The dynamic value passed to the LDAP query should be validated. For more information access: (https://security-code-scan.github.io/#SCS0031).",
Severity: severities.High.ToString(),
Confidence: confidence.High.ToString(),
},
Type: text.OrMatch,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`(new DirectorySearcher\(\))(([^E]|E[^n]|En[^c]|Enc[^o]|Enco[^d]|Encod[^e]|Encode[^r]|Encoder[^.]|Encoder\.[^L]|Encoder\.L[^d]|Encoder\.Ld[^a]|Encoder\.Lda[^p]|Encoder\.Ldap[^F]|Encoder\.LdapF[^i]|Encoder\.LdapFi[^l]|Encoder\.LdapFil[^t]|Encoder\.LdapFilt[^e]|Encoder\.LdapFilte[^r]|Encoder\.LdapFilter[^E]|Encoder\.LdapFilterE[^n]|Encoder\.LdapFilterEn[^c]|Encoder\.LdapFilterEnc[^o]|Encoder\.LdapFilterEnco[^d]|Encoder\.LdapFilterEncod[^e])*)(\)";)`),
regexp.MustCompile(`(new DirectoryEntry\(\))(([^E]|E[^n]|En[^c]|Enc[^o]|Enco[^d]|Encod[^e]|Encode[^r]|Encoder[^.]|Encoder\.[^L]|Encoder\.L[^d]|Encoder\.Ld[^a]|Encoder\.Lda[^p]|Encoder\.Ldap[^D]|Encoder\.LdapD[^i]|Encoder\.LdapDi[^s]|Encoder\.LdapDis[^t]|Encoder\.LdapDist[^i]|Encoder\.LdapDisti[^n]|Encoder\.LdapDistin[^g]|Encoder\.LdapDisting[^u]|Encoder\.LdapDistingu[^i]|Encoder\.LdapDistingui[^s]|Encoder\.LdapDistinguis[^h]|Encoder\.LdapDistinguish[^e]|Encoder\.LdapDistinguishe[^d]|Encoder\.LdapDistinguished[^N]|Encoder\.LdapDistinguishedN[^a]|Encoder\.LdapDistinguishedNa[^m]|Encoder\.LdapDistinguishedNam[^e]|Encoder\.LdapDistinguishedName[^E]|Encoder\.LdapDistinguishedNameE[^n]|Encoder\.LdapDistinguishedNameEn[^c]|Encoder\.LdapDistinguishedNameEnc[^o]|Encoder\.LdapDistinguishedNameEnco[^d]|Encoder\.LdapDistinguishedNameEncod[^e])*)(,.*";)`),
},
}
}
func NewSQLInjectionLinq() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-29",
Name: "SQL Injection LINQ",
Description: "Malicious user might get direct read and/or write access to the database. If the database is poorly configured the attacker might even get Remote Code Execution (RCE) on the machine running the database.. For more information access: (https://security-code-scan.github.io/#SCS0002).",
Severity: severities.High.ToString(),
Confidence: confidence.Low.ToString(),
},
Type: text.OrMatch,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`(?i)(var|ExecuteQuery).*(=|\().*(SELECT|UPDATE|DELETE|INSERT).*\++`),
},
}
}
func NewInsecureDeserialization() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-30",
Name: "Insecure Deserialization",
Description: "Arbitrary code execution, full application compromise or denial of service. An attacker may pass specially crafted serialized .NET object of specific class that will execute malicious code during the construction of the object. For more information access: (https://security-code-scan.github.io/#SCS0028).",
Severity: severities.Low.ToString(),
Confidence: confidence.Low.ToString(),
},
Type: text.OrMatch,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`new\sBinaryFormatter\(\)\.Deserialize\(.*\)`),
regexp.MustCompile(`new\sJavaScriptSerializer\(..*\)`),
},
}
}
func NewSQLInjectionEnterpriseLibraryData() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-31",
Name: "SQL Injection Enterprise Library Data",
Description: "Arbitrary code execution, full application compromise or denial of service. An attacker may pass specially crafted serialized .NET object of specific class that will execute malicious code during the construction of the object. For more information access: (https://security-code-scan.github.io/#SCS0036).",
Severity: severities.High.ToString(),
Confidence: confidence.Medium.ToString(),
},
Type: text.OrMatch,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`(GetSqlStringCommand\(.*\))(([^A]|A[^d]|Ad[^d]|Add[^I]|AddI[^n]|AddIn[^P]|AddInP[^a]|AddInPa[^r]|AddInPar[^a]|AddInPara[^m]|AddInParam[^e]|AddInParame[^t]|AddInParamet[^e]|AddInParamete[^r])*)(ExecuteDataSet\(.*\))`),
regexp.MustCompile(`ExecuteDataSet\(CommandType.*, "(SELECT|select).*(FROM|from).*(WHERE|where).*"\)`),
},
}
}
func NewCQLInjectionCassandra() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-32",
Name: "CQL Injection Cassandra",
Description: "Arbitrary code execution, full application compromise or denial of service. An attacker may pass specially crafted serialized .NET object of specific class that will execute malicious code during the construction of the object. For more information access: (https://security-code-scan.github.io/#SCS0038).",
Severity: severities.High.ToString(),
Confidence: confidence.Medium.ToString(),
},
Type: text.OrMatch,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`(Prepare\("(SELECT|select).*(FROM|from).*(WHERE|where).*\))(([^B]|B[^i]|Bi[^n]|Bin[^d])*)(Execute\(.*\))`),
regexp.MustCompile(`Execute\("(SELECT|select).*(FROM|from).*(WHERE|where).*"\)`),
},
}
}
func NewPasswordComplexity() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-33",
Name: "Password Complexity",
Description: "PasswordValidator should have at least two requirements for better security, the RequiredLength property must be set with a minimum value of 8. For more information access: (https://security-code-scan.github.io/#SCS0027).",
Severity: severities.Low.ToString(),
Confidence: confidence.Low.ToString(),
},
Type: text.OrMatch,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`new\sPasswordValidator\(\)`),
regexp.MustCompile(`new\sPasswordValidator(\n?\s*{)(\n*.*=.*,?)(\s|\n)*[^a-z]}`),
regexp.MustCompile(`new\sPasswordValidator(\n?\s*{)((\n|.*)*RequiredLength=[0-7][^\d])`),
regexp.MustCompile(`(new\sPasswordValidator)(([^R]|R[^e]|Re[^q]|Req[^u]|Requ[^i]|Requi[^r]|Requir[^e]|Require[^d]|Required[^L]|RequiredL[^e]|RequiredLe[^n]|RequiredLen[^g]|RequiredLeng[^t]|RequiredLengt[^h])*)(})`),
},
}
}
func NewCookieWithoutSSLFlag() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-34",
Name: "Cookie Without SSL Flag",
Description: "It is recommended to specify the Secure flag to new cookie. The Secure flag is a directive to the browser to make sure that the cookie is not sent by unencrypted channel. For more information access: (https://security-code-scan.github.io/#SCS0008) and (https://cwe.mitre.org/data/definitions/614.html).",
Severity: severities.Low.ToString(),
Confidence: confidence.Low.ToString(),
},
Type: text.OrMatch,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`requireSSL\s*=\s*['|"]false['|"]`),
regexp.MustCompile(`(\<forms)(([^r]|r[^e]|re[^q]|req[^u]|requ[^i]|requi[^r]|requir[^e]|require[^S]|requireS[^S]|requireSS[^L])*)(\/\>)`),
regexp.MustCompile(`(new\sHttpCookie\(.*\))(.*|\n)*(\.Secure\s*=\s*false)`),
regexp.MustCompile(`(new\sHttpCookie)(([^S]|S[^e]|Se[^c]|Sec[^u]|Secu[^r]|Secur[^e])*)(})`),
},
}
}
func NewCookieWithoutHttpOnlyFlag() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-35",
Name: "Cookie Without HttpOnly Flag",
Description: "It is recommended to specify the HttpOnly flag to new cookie. For more information access: (https://security-code-scan.github.io/#SCS0009) or (https://cwe.mitre.org/data/definitions/1004.html).",
Severity: severities.Low.ToString(),
Confidence: confidence.Low.ToString(),
},
Type: text.OrMatch,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`httpOnlyCookies\s*=\s*['|"]false['|"]`),
regexp.MustCompile(`(new\sHttpCookie\(.*\))(.*|\n)*(\.HttpOnly\s*=\s*false)`),
regexp.MustCompile(`(new\sHttpCookie)(([^H]|H[^t]|Ht[^t]|Htt[^p]|Http[^O]|HttpO[^n]|HttpOn[^l]|HttpOnl[^y])*)(})`),
},
}
}
func NewNoInputVariable() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-36",
Name: "No input variable",
Description: "The application appears to allow XSS through an unencrypted / unauthorized input variable. https://owasp.org/www-community/attacks/xss/. For more information checkout the CWE-79 (https://cwe.mitre.org/data/definitions/79.html) advisory.",
Severity: severities.High.ToString(),
Confidence: confidence.High.ToString(),
},
Type: text.OrMatch,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`\s*var\s+\w+\s*=\s*"\s*\<\%\s*=\s*\w+\%\>";`),
regexp.MustCompile(`\.innerHTML\s*=\s*.+`),
},
}
}
func NewIdentityWeakPasswordComplexity() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-37",
Name: "Identity Weak Password Complexity",
Description: "Weak passwords can allow attackers to easily guess user passwords using wordlist or brute force attacks. Enforcing a strict password complexity policy mitigates these attacks by significantly increasing the time to guess a user’s valid password. For more information checkout the CWE-521 (https://cwe.mitre.org/data/definitions/521.html) advisory.",
Severity: severities.Critical.ToString(),
Confidence: confidence.High.ToString(),
},
Type: text.OrMatch,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`new PasswordValidator\(\)`),
regexp.MustCompile(`RequiredLength = \b([0-7])\b`),
regexp.MustCompile(`(new PasswordValidator)(([^R]|R[^e]|Re[^q]|Req[^u]|Requ[^i]|Requi[^r]|Requir[^e]|Require[^d]|Required[^L]|RequiredL[^e]|RequiredLe[^n]|RequiredLen[^g]|RequiredLeng[^t]|RequiredLengt[^h])*)(};)`),
regexp.MustCompile(`(new PasswordValidator)(([^R]|R[^e]|Re[^q]|Req[^u]|Requ[^i]|Requi[^r]|Requir[^e]|Require[^D]|RequireD[^i]|RequireDi[^g]|RequireDig[^i]|RequireDigi[^t]|RequireDigit[^ ]|RequireDigit [^=]|RequireDigit =[^ ]|RequireDigit = [^t]|RequireDigit = t[^r]|RequireDigit = tr[^u]|RequireDigit = tru[^e])*)(};)`),
regexp.MustCompile(`(new PasswordValidator)(([^R]|R[^e]|Re[^q]|Req[^u]|Requ[^i]|Requi[^r]|Requir[^e]|Require[^L]|RequireL[^o]|RequireLo[^w]|RequireLow[^e]|RequireLowe[^r]|RequireLower[^c]|RequireLowerc[^a]|RequireLowerca[^s]|RequireLowercas[^e]|RequireLowercase[^ ]|RequireLowercase [^=]|RequireLowercase =[^ ]|RequireLowercase = [^t]|RequireLowercase = t[^r]|RequireLowercase = tr[^u]|RequireLowercase = tru[^e])*)(};)`),
regexp.MustCompile(`(new PasswordValidator)(([^R]|R[^e]|Re[^q]|Req[^u]|Requ[^i]|Requi[^r]|Requir[^e]|Require[^N]|RequireN[^o]|RequireNo[^n]|RequireNon[^L]|RequireNonL[^e]|RequireNonLe[^t]|RequireNonLet[^t]|RequireNonLett[^e]|RequireNonLette[^r]|RequireNonLetter[^O]|RequireNonLetterO[^r]|RequireNonLetterOr[^D]|RequireNonLetterOrD[^i]|RequireNonLetterOrDi[^g]|RequireNonLetterOrDig[^i]|RequireNonLetterOrDigi[^t]|RequireNonLetterOrDigit[^ ]|RequireNonLetterOrDigit [^=]|RequireNonLetterOrDigit =[^ ]|RequireNonLetterOrDigit = [^t]|RequireNonLetterOrDigit = t[^r]|RequireNonLetterOrDigit = tr[^u]|RequireNonLetterOrDigit = tru[^e])*)(};)`),
regexp.MustCompile(`(new PasswordValidator)(([^R]|R[^e]|Re[^q]|Req[^u]|Requ[^i]|Requi[^r]|Requir[^e]|Require[^U]|RequireU[^p]|RequireUp[^p]|RequireUpp[^e]|RequireUppe[^r]|RequireUpper[^c]|RequireUpper[^c]|RequireUpperc[^a]|RequireUpperca[^s]|RequireUppercas[^e]|RequireUppercase[^ ]|RequireUppercase [^=]|RequireUppercase =[^ ]|RequireUppercase = [^t]|RequireUppercase = t[^r]|RequireUppercase = tr[^u]|RequireUppercase = tru[^e])*)(};)`),
},
}
}
func NewNoLogSensitiveInformationInConsole() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-38",
Name: "No Log Sensitive Information in console",
Description: "The App logs information. Sensitive information should never be logged. For more information checkout the CWE-532 (https://cwe.mitre.org/data/definitions/532.html) advisory.",
Severity: severities.Info.ToString(),
Confidence: confidence.Low.ToString(),
},
Type: text.Regular,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`(?i)(((Log|log).*\.(Verbose|Debug|Info|Warn|Erro|ForContext|FromLogContext|Seq))|(Console.Write))`),
},
}
}
func NewOutputCacheConflict() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-39",
Name: "OutputCache Conflict",
Description: "Having the annotation [OutputCache] will disable the annotation [Authorize] for the requests following the first one. For more information access: (https://security-code-scan.github.io/#SCS0019).",
Severity: severities.Medium.ToString(),
Confidence: confidence.Low.ToString(),
},
Type: text.Regular,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`(\[Authorize\])(.*|\n)*(\[OutputCache\])`),
},
}
}
func NewOpenRedirect() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-40",
Name: "Open Redirect",
Description: "Your site may be used in phishing attacks. An attacker may craft a trustworthy looking link to your site redirecting a victim to a similar looking malicious site: 'http://yourdomain.com?redirect=https://urdomain.com/login'. For more information access: (https://security-code-scan.github.io/#SCS0027).",
Severity: severities.Low.ToString(),
Confidence: confidence.Low.ToString(),
},
Type: text.Regular,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`String.IsNullOrEmpty.*\n?.*{?\n?.*return\sRedirect\(.*\);`),
},
}
}
func NewRequestValidationDisabledAttribute() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-41",
Name: "Request Validation Disabled (Attribute)",
Description: "Request validation is disabled. Request validation allows the filtering of some XSS patterns submitted to the application. For more information access: (https://security-code-scan.github.io/#SCS0017).",
Severity: severities.High.ToString(),
Confidence: confidence.High.ToString(),
},
Type: text.Regular,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`\[ValidateInput\(false\)\]`),
},
}
}
func NewSQLInjectionOLEDB() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-42",
Name: "SQL Injection OLE DB",
Description: "Malicious user might get direct read and/or write access to the database. If the database is poorly configured the attacker might even get Remote Code Execution (RCE) on the machine running the database. For more information access: (https://security-code-scan.github.io/#SCS0020).",
Severity: severities.High.ToString(),
Confidence: confidence.Medium.ToString(),
},
Type: text.Regular,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`(new OleDbConnection\(.*\))(([^P]|P[^a]|Pa[^r]|Par[^a]|Para[^m]|Param[^e]|Parame[^t]|Paramet[^e]|Paramete[^r]|Parameter[^s]|Parameters[^.]|Parameters\.[^A]|Parameters\.A[^d]|Parameters\.Ad[^d])*)(\.ExecuteReader\(.*\))`),
},
}
}
func NewRequestValidationDisabledConfigurationFile() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-43",
Name: "Request Validation Disabled (Configuration File)",
Description: "The validateRequest which provides additional protection against XSS is disabled in configuration file. For more information access: (https://security-code-scan.github.io/#SCS0017) or (https://cwe.mitre.org/data/definitions/20.html).",
Severity: severities.High.ToString(),
Confidence: confidence.High.ToString(),
},
Type: text.Regular,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`validateRequest\s*=\s*['|"]false['|"]`),
},
}
}
func NewSQLInjectionMsSQLDataProvider() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-44",
Name: "SQL Injection MsSQL Data Provider",
Description: "Malicious user might get direct read and/or write access to the database. If the database is poorly configured the attacker might even get Remote Code Execution (RCE) on the machine running the database. For more information access: (https://security-code-scan.github.io/#SCS0026).",
Severity: severities.High.ToString(),
Confidence: confidence.Medium.ToString(),
},
Type: text.Regular,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`(new SqlCommand\(.*\))(([^P]|P[^a]|Pa[^r]|Par[^a]|Para[^m]|Param[^e]|Parame[^t]|Paramet[^e]|Paramete[^r]|Parameter[^s]|Parameters[^.]|Parameters\.[^A]|Parameters\.A[^d]|Parameters\.Ad[^d]|Parameters\.Add[^W]|Parameters\.AddW[^i]|Parameters\.AddWi[^t]|Parameters\.AddWit[^h]|Parameters\.AddWith[^V]|Parameters\.AddWithV[^a]|Parameters\.AddWithVa[^l]|Parameters\.AddWithVal[^u]|Parameters\.AddWithValu[^e])*)(Open\(\)|ExecuteReader\(\))`),
},
}
}
func NewRequestValidationIsEnabledOnlyForPages() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-45",
Name: "Request validation is enabled only for pages",
Description: "The requestValidationMode which provides additional protection against XSS is enabled only for pages, not for all HTTP requests in configuration file. For more information access: (https://security-code-scan.github.io/#SCS0030).",
Severity: severities.High.ToString(),
Confidence: confidence.High.ToString(),
},
Type: text.Regular,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`requestValidationMode\s*=\s*['|"][0-3][^\d].*['|"]`),
},
}
}
func NewSQLInjectionEntityFramework() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-46",
Name: "SQL Injection Entity Framework",
Description: "Malicious user might get direct read and/or write access to the database. If the database is poorly configured the attacker might even get Remote Code Execution (RCE) on the machine running the database, please use SqlParameter to create query with parameters. For more information access: (https://security-code-scan.github.io/#SCS0035) or (https://cwe.mitre.org/data/definitions/89.html) .",
Severity: severities.High.ToString(),
Confidence: confidence.Medium.ToString(),
},
Type: text.Regular,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`(Database\.ExecuteSqlCommand)(([^S]|S[^q]|Sq[^l]|Sql[^P]|SqlP[^a]|SqlPa[^r]|SqlPar[^a]|SqlPara[^m]|SqlParam[^e]|SqlParame[^t]|SqlParamet[^e]|SqlParamete[^r])*)(\);)`),
},
}
}
func NewViewStateNotEncrypted() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-47",
Name: "View State Not Encrypted",
Description: "The viewStateEncryptionMode is not set to Always in configuration file. Web Forms controls use hidden base64 encoded fields to store state information. If sensitive information is stored there it may be leaked to the client side. For more information access: (https://security-code-scan.github.io/#SCS0023) or (https://cwe.mitre.org/data/definitions/200.html).",
Severity: severities.High.ToString(),
Confidence: confidence.High.ToString(),
},
Type: text.Regular,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`viewStateEncryptionMode\s*=\s*['|"](Auto|Never)['|"]`),
},
}
}
func NewSQLInjectionNhibernate() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-48",
Name: "SQL Injection Nhibernate",
Description: "Malicious user might get direct read and/or write access to the database. If the database is poorly configured the attacker might even get Remote Code Execution (RCE) on the machine running the database. For more information access: (https://security-code-scan.github.io/#SCS0037).",
Severity: severities.High.ToString(),
Confidence: confidence.Medium.ToString(),
},
Type: text.Regular,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`(CreateSQLQuery)(([^S]|S[^e]|Se[^t]|Set[^P]|SetP[^a]|SetPa[^r]|SetPar[^a]|SetPara[^m]|SetParam[^e]|SetParame[^t]|SetParamet[^e]|SetParamete[^r])*)(\);)`),
},
}
}
func NewViewStateMacDisabled() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-49",
Name: "View State MAC Disabled",
Description: "The enableViewStateMac is disabled in configuration file. (This feature cannot be disabled starting .NET 4.5.1). The view state could be altered by an attacker. For more information access: (https://security-code-scan.github.io/#SCS0024) or (https://cwe.mitre.org/data/definitions/807.html).",
Severity: severities.High.ToString(),
Confidence: confidence.High.ToString(),
},
Type: text.Regular,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`enableViewStateMac\s*=\s*['|"]false['|"]`),
},
}
}
func NewSQLInjectionNpgsql() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-50",
Name: "SQL Injection Npgsql",
Description: "Malicious user might get direct read and/or write access to the database. If the database is poorly configured the attacker might even get Remote Code Execution (RCE) on the machine running the database. For more information access: (https://security-code-scan.github.io/#SCS0039).",
Severity: severities.High.ToString(),
Confidence: confidence.Medium.ToString(),
},
Type: text.Regular,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`(NpgsqlCommand\(.*\))(([^P]|P[^a]|Pa[^r]|Par[^a]|Para[^m]|Param[^e]|Parame[^t]|Paramet[^e]|Paramete[^r]|Parameter[^s]|Parameters[^.]|Parameters\.[^A]|Parameters\.A[^d]|Parameters\.Ad[^d]|Parameters\.Add[^W]|Parameters\.AddW[^i]|Parameters\.AddWi[^t]|Parameters\.AddWit[^h]|Parameters\.AddWith[^V]|Parameters\.AddWithV[^a]|Parameters\.AddWithVa[^l]|Parameters\.AddWithVal[^u]|Parameters\.AddWithValu[^e])*)(ExecuteNonQuery\(.*\)|ExecuteReader\(.*\))`),
},
}
}
func NewCertificateValidationDisabled() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-51",
Name: "Certificate Validation Disabled",
Description: "Disabling certificate validation is often used to connect easily to a host that is not signed by a root certificate authority. As a consequence, this is vulnerable to Man-in-the-middle attacks since the client will trust any certificate. For more information access: (https://security-code-scan.github.io/#SCS0004).",
Severity: severities.Medium.ToString(),
Confidence: confidence.High.ToString(),
},
Type: text.Regular,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`ServicePointManager\.ServerCertificateValidationCallback \+= (.*) => true;`),
},
}
}
func NewWeakCipherAlgorithm() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-52",
Name: "Weak cipher algorithm",
Description: "Broken or deprecated ciphers have typically known weakness. A attacker might be able to brute force the secret key use for the encryption. The confidentiality and integrity of the information encrypted is at risk. For more information access: (https://security-code-scan.github.io/#SCS0010).",
Severity: severities.Medium.ToString(),
Confidence: confidence.Low.ToString(),
},
Type: text.Regular,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`(DES.Create\(\))(([^A]|A[^e]|Ae[^s]|Aes[^M]|AesM[^a]|AesMa[^n]|AesMan[^a]|AesMana[^g]|AesManag[^e]|AesManage[^d])*)(Write\(.*\))`),
},
}
}
func NewNoUseHtmlRaw() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-53",
Name: "No use Html.Raw",
Description: "The application uses the potentially dangerous Html.Raw construct in conjunction with a user-supplied variable. The recommendation is to avoid using HTML assembly, but if it is extremely necessary to allow Html, we suggest the following: support only a fixed subset of Html, after the user submits content, analyze the Html and filter it in a whitelist of allowed tags and attributes. For more information checkout the CWE-79 (https://cwe.mitre.org/data/definitions/79.html) advisory.",
Severity: severities.High.ToString(),
Confidence: confidence.High.ToString(),
},
Type: text.Regular,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`Html\.Raw\(`),
},
}
}
func NewNoLogSensitiveInformation() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-54",
Name: "No log sensitive information debug mode",
Description: "The application is configured to display standard .NET errors. This can provide the attacker with useful information and should not be used in a production application. https://docs.microsoft.com/en-us/aspnet/web-forms/overview/older-versions-getting-started/deploying-web-site-projects/displaying-a-custom-error-page-cs. For more information checkout the CWE-12 (https://cwe.mitre.org/data/definitions/12.html) advisory.",
Severity: severities.Low.ToString(),
Confidence: confidence.Low.ToString(),
},
Type: text.Regular,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`<\s*customErrors\s+mode\s*=\s*\"Off\"\s*/?>`),
},
}
}
func NewNoReturnStringConcatInController() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-55",
Name: "No return string concat in controller",
Description: "A potential Cross-Site Scripting (XSS) was found. The endpoint returns a variable from the client entry that has not been coded. Always encode untrusted input before output, regardless of validation or cleaning performed. https://docs.microsoft.com/en-us/aspnet/core/security/cross-site-scripting?view=aspnetcore-3.1. For more information checkout the CWE-79 (https://cwe.mitre.org/data/definitions/79.html) advisory.",
Severity: severities.Low.ToString(),
Confidence: confidence.Low.ToString(),
},
Type: text.Regular,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`(?:public\sclass\s.*Controller|.*\s+:\s+Controller)(?:\n*.*)*return\s+.*\".*\+`),
},
}
}
func NewSQLInjectionOdbcCommand() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-56",
Name: "SQL Injection OdbcCommand",
Description: "Malicious user might get direct read and/or write access to the database. If the database is poorly configured the attacker might even get Remote Code Execution (RCE) on the machine running the database. For more information checkout the CWE-79 (https://cwe.mitre.org/data/definitions/79.html) advisory.",
Severity: severities.High.ToString(),
Confidence: confidence.Medium.ToString(),
},
Type: text.Regular,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`.*\s*new\sOdbcCommand\(.*\".*\+(?:.*\n*)*.ExecuteReader\(`),
},
}
}
func NewWeakHashingFunctionMd5OrSha1() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-57",
Name: "Weak hashing function md5 or sha1",
Description: "MD5 or SHA1 have known collision weaknesses and are no longer considered strong hashing algorithms. For more information checkout the CWE-326 (https://cwe.mitre.org/data/definitions/326.html) advisory.",
Severity: severities.Medium.ToString(),
Confidence: confidence.Medium.ToString(),
},
Type: text.Regular,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`new\sSHA1CryptoServiceProvider\(`),
regexp.MustCompile(`new\sMD5CryptoServiceProvider\(`),
},
}
}
func NewWeakHashingFunctionDESCrypto() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-58",
Name: "Weak hashing function DES Crypto",
Description: "DES Crypto have known collision weaknesses and are no longer considered strong hashing algorithms. For more information checkout the CWE-326 (https://cwe.mitre.org/data/definitions/326.html) advisory.",
Severity: severities.Medium.ToString(),
Confidence: confidence.Medium.ToString(),
},
Type: text.Regular,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`new\sTripleDESCryptoServiceProvider\(`),
regexp.MustCompile(`new\sDESCryptoServiceProvider\(`),
regexp.MustCompile(`TripleDES\.Create\(`),
regexp.MustCompile(`DES\.Create\(`),
},
}
}
func NewNoUseCipherMode() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-59",
Name: "No Use Cipher mode",
Description: "This mode is not recommended because it opens the door to various security exploits. If the plain text to be encrypted contains substantial repetitions, it is possible that the cipher text will be broken one block at a time. You can also use block analysis to determine the encryption key. In addition, an active opponent can replace and exchange individual blocks without detection, which allows the blocks to be saved and inserted into the stream at other points without detection. ECB and OFB mode will produce the same result for identical blocks. The use of AES in CBC mode with an HMAC is recommended, ensuring integrity and confidentiality. https://docs.microsoft.com/en-us/visualstudio/code-quality/ca5358?view=vs-2019. For more information checkout the CWE-326 (https://cwe.mitre.org/data/definitions/326.html) and CWE-327 (https://cwe.mitre.org/data/definitions/327.html) advisory.",
Severity: severities.Medium.ToString(),
Confidence: confidence.Medium.ToString(),
},
Type: text.Regular,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`CipherMode\.ECB`),
regexp.MustCompile(`CipherMode\.OFB`),
regexp.MustCompile(`CipherMode\.CTS`),
regexp.MustCompile(`CipherMode\.CFB`),
},
}
}
func NewDebugBuildEnabled() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-60",
Name: "Debug Build Enabled",
Description: "Binaries compiled in debug mode can leak detailed stack traces and debugging messages to attackers. Disable debug builds by setting the debug attribute to false. For more information checkout the CWE-11 (https://cwe.mitre.org/data/definitions/11.html) advisory.",
Severity: severities.Medium.ToString(),
Confidence: confidence.Medium.ToString(),
},
Type: text.Regular,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`\<compilation(\s|.)*debug\s*=\s*['|"]true['|"]`),
},
}
}
func NewVulnerablePackageReference() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-61",
Name: "Vulnerable Package Reference",
Description: "Dependencies on open source frameworks and packages introduce additional vulnerabilities into the runtime environment. Vulnerabilities in open source libraries are continuously discovered and documented in publicly available vulnerability databases. Attackers can recognize a package being used by an application, and leverage known vulnerabilities in the library to attack the application. For more information checkout the CWE-937 (https://cwe.mitre.org/data/definitions/937.html) advisory.",
Severity: severities.Medium.ToString(),
Confidence: confidence.Medium.ToString(),
},
Type: text.Regular,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`<package id="bootstrap" version="3\.0\.0" targetFramework="net462"/>`),
},
}
}
func NewCorsAllowOriginWildCard() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-62",
Name: "Cors Allow Origin Wild Card",
Description: "Cross-Origin Resource Sharing (CORS) allows a service to disable the browser’s Same-origin policy, which prevents scripts on an attacker-controlled domain from accessing resources and data hosted on a different domain. The CORS Access-Control-Allow-Origin HTTP header specifies the domain with permission to invoke a cross-origin service and view the response data. Configuring the Access-Control-Allow-Origin header with a wildcard (*) can allow code running on an attacker-controlled domain to view responses containing sensitive data. For more information checkout the CWE-942 (https://cwe.mitre.org/data/definitions/942.html) advisory.",
Severity: severities.Medium.ToString(),
Confidence: confidence.High.ToString(),
},
Type: text.Regular,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`app\.UseCors\(builder => builder\.AllowAnyOrigin\(\)\);`),
},
}
}
func NewMissingAntiForgeryTokenAttribute() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-63",
Name: "Missing Anti Forgery Token Attribute",
Description: "Cross Site Request Forgery attacks occur when a victim authenticates to a target web site and then visits a malicious web page. The malicious web page then sends a fake HTTP request (GET, POST, etc.) back to the target website. The victim’s valid authentication cookie from the target web site is automatically included in the malicious request, sent to the target web site, and processed as a valid transaction under the victim’s identity. For more information checkout the CWE-352 (https://cwe.mitre.org/data/definitions/352.html) advisory.",
Severity: severities.Info.ToString(),
Confidence: confidence.Low.ToString(),
},
Type: text.Regular,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`(\[HttpGet\]|\[HttpPost\]|\[HttpPut\]|\[HttpDelete\])(([^V]|V[^a]|Va[^l]|Val[^i]|Vali[^d]|Valid[^a]|Valida[^t]|Validat[^e]|Validate[^A]|ValidateA[^n]|ValidateAn[^t]|ValidateAnt[^i]|ValidateAnti[^F]|ValidateAntiF[^o]|ValidateAntiFo[^r]|ValidateAntiFor[^g]|ValidateAntiForg[^e]|ValidateAntiForge[^r]|ValidateAntiForger[^y]|ValidateAntiForgery[^T]|ValidateAntiForgeryT[^o]|ValidateAntiForgeryTo[^k]|ValidateAntiForgeryTok[^e]|ValidateAntiForgeryToke[^n])*)(ActionResult)`),
},
}
}
func NewUnvalidatedWebFormsRedirect() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-64",
Name: "Unvalidated Web Forms Redirect",
Description: "Passing unvalidated redirect locations to the Response.Redirect method can allow attackers to send users to malicious web sites. This can allow attackers to perform phishing attacks and distribute malware to victims. For more information checkout the CWE-601 (https://cwe.mitre.org/data/definitions/601.html) advisory.",
Severity: severities.Medium.ToString(),
Confidence: confidence.Low.ToString(),
},
Type: text.Regular,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`Response\.Redirect\(Request\.QueryString\[".*"\]\)`),
},
}
}
func NewIdentityPasswordLockoutDisabled() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-65",
Name: "Identity Password Lockout Disabled",
Description: "Password lockout mechanisms help prevent continuous brute force attacks again user accounts by disabling an account for a period of time after a number of invalid attempts. The ASP.NET Identity SignInManager protects against brute force attacks if the lockout parameter is set to true. For more information checkout the CWE-307 (https://cwe.mitre.org/data/definitions/307.html) advisory.",
Severity: severities.Medium.ToString(),
Confidence: confidence.Medium.ToString(),
},
Type: text.Regular,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`CheckPasswordSignInAsync\(.*, .*, false\)`),
},
}
}
func NewRawInlineExpression() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-66",
Name: "Raw Inline Expression",
Description: "Data is written to the browser using a raw write: <%= var %>. This can result in Cross-Site Scripting (XSS) vulnerabilities if the data source is considered untrusted or dynamic (request parameters, database, web service, etc.). Instead of using a raw write, use the inline HTML encoded shortcut (<%: var %>) to automatically HTML encode data before writing it to the browser. For more information checkout the CWE-79 (https://cwe.mitre.org/data/definitions/79.html) advisory.",
Severity: severities.Medium.ToString(),
Confidence: confidence.Medium.ToString(),
},
Type: text.Regular,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`\<\%=.*\%\>`),
},
}
}
func NewRawBindingExpression() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-67",
Name: "Raw Binding Expression",
Description: "Data is written to the browser using a raw binding expression: <%# Item.Variable %>. This can result in Cross-Site Scripting (XSS) vulnerabilities if the data source is considered untrusted or dynamic (request parameters, database, web service, etc.). Instead of using a raw binding expression, use the HTML encoded binding shortcut (<%#: Item.Variable %>) to automatically HTML encode data before writing it to the browser. For more information checkout the CWE-79 (https://cwe.mitre.org/data/definitions/79.html) advisory.",
Severity: severities.Medium.ToString(),
Confidence: confidence.Medium.ToString(),
},
Type: text.Regular,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`\<\%#[^:].*\%\>`),
},
}
}
func NewRawWriteLiteralMethod() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-68",
Name: "Raw Write Literal Method",
Description: "Data is written to the browser using the raw WriteLiteral method. This can result in Cross-Site Scripting (XSS) vulnerabilities if the data source is considered untrusted or dynamic (request parameters, database, web service, etc.). Instead of using the raw WriteLiteral method, use a Razor helper that performs automatic HTML encoding before writing it to the browser. For more information checkout the CWE-79 (https://cwe.mitre.org/data/definitions/79.html) advisory.",
Severity: severities.Medium.ToString(),
Confidence: confidence.Medium.ToString(),
},
Type: text.Regular,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`WriteLiteral\(`),
},
}
}
func NewUnencodedWebFormsProperty() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-69",
Name: "Unencoded Web Forms Property",
Description: "Data is written to the browser using a WebForms property that does not perform output encoding. This can result in Cross-Site Scripting (XSS) vulnerabilities if the data source is considered untrusted or dynamic (request parameters, database, web service, etc.). WebForms controls are often found in HTML contexts, but can also appear in other contexts such as JavaScript, HTML Attribute, or URL. Fixing the vulnerability requires the appropriate Web Protection Library (aka AntiXSS) context-specific method to encode the data before setting the WebForms property. For more information checkout the CWE-79 (https://cwe.mitre.org/data/definitions/79.html) advisory.",
Severity: severities.Medium.ToString(),
Confidence: confidence.Medium.ToString(),
},
Type: text.Regular,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`(litDetails\.Text)(([^H]|H[^t]|Ht[^m]|Htm[^l]|Html[^E]|HtmlE[^n]|HtmlEn[^c]|HtmlEnc[^o]|HtmlEnco[^d]|HtmlEncod[^e])*)(;)`),
},
}
}
func NewUnencodedLabelText() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-70",
Name: "Unencoded Label Text",
Description: "Data is written to the browser using the raw Label.Text method. This can result in Cross-Site Scripting (XSS) vulnerabilities if the data source is considered untrusted or dynamic (request parameters, database, web service, etc.). Label controls are often found in HTML contexts, but can also appear in other contexts such as JavaScript, HTML Attribute, or URL. Fixing the vulnerability requires the appropriate Web Protection Library (aka AntiXSS) context-specific method to encode the data before setting the Label.Text property. For more information checkout the CWE-79 (https://cwe.mitre.org/data/definitions/79.html) advisory.",
Severity: severities.Medium.ToString(),
Confidence: confidence.Medium.ToString(),
},
Type: text.Regular,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`(lblDetails\.Text)(([^H]|H[^t]|Ht[^m]|Htm[^l]|Html[^E]|HtmlE[^n]|HtmlEn[^c]|HtmlEnc[^o]|HtmlEnco[^d]|HtmlEncod[^e])*)(;)`),
},
}
}
func NewWeakRandomNumberGenerator() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-71",
Name: "Weak Random Number Generator",
Description: "The use of a predictable random value can lead to vulnerabilities when used in certain security critical contexts. For more information access: (https://security-code-scan.github.io/#SCS0005) or (https://cwe.mitre.org/data/definitions/338.html).",
Severity: severities.Low.ToString(),
Confidence: confidence.High.ToString(),
},
Type: text.Regular,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`new Random\(\)`),
},
}
}
func NewWeakRsaKeyLength() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-72",
Name: "Weak Rsa Key Length",
Description: "Due to advances in cryptanalysis attacks and cloud computing capabilities, the National Institute of Standards and Technology (NIST) deprecated 1024-bit RSA keys on January 1, 2011. The Certificate Authority Browser Forum, along with the latest version of all browsers, currently mandates a minimum key size of 2048-bits for all RSA keys. For more information checkout the CWE-326 (https://cwe.mitre.org/data/definitions/326.html) advisory.",
Severity: severities.Critical.ToString(),
Confidence: confidence.Medium.ToString(),
},
Type: text.Regular,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`(new RSACryptoServiceProvider\()(\)|[0-9][^\d]|[0-9]{2}[^\d]|[0-9]{3}[^\d]|[0-1][0-9]{3}[^\d]|20[0-3][0-9]|204[0-7])`),
},
}
}
func NewXmlReaderExternalEntityExpansion() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-73",
Name: "Xml Reader External Entity Expansion",
Description: "XML External Entity (XXE) vulnerabilities occur when applications process untrusted XML data without disabling external entities and DTD processing. Processing untrusted XML data with a vulnerable parser can allow attackers to extract data from the server, perform denial of service attacks, and in some cases gain remote code execution. The XmlReaderSettings and XmlTextReader classes are vulnerable to XXE attacks when setting the DtdProcessing property to DtdProcessing.Parse or the ProhibitDtd property to false. To prevent XmlReader XXE attacks, avoid using the deprecated ProhibitDtd property. Set the DtdProcessing property to DtdProcessing.Prohibit. For more information checkout the CWE-611 (https://cwe.mitre.org/data/definitions/611.html) advisory.",
Severity: severities.High.ToString(),
Confidence: confidence.High.ToString(),
},
Type: text.Regular,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`(new\sXmlReaderSettings)(([^P]|P[^r]|Pr[^o]|Pro[^h]|Proh[^i]|Prohi[^b]|Prohib[^i]|Prohibi[^t])*)(})`),
},
}
}
func NewLdapInjectionDirectoryEntry() *text.Rule {
return &text.Rule{
Metadata: engine.Metadata{
ID: "HS-CSHARP-74",
Name: "Ldap Injection Directory Entry",
Description: "LDAP Injection vulnerabilities occur when untrusted data is concatenated into a LDAP Path or Filter expression without properly escaping control characters. This can allow attackers to change the meaning of an LDAP query and gain access to resources for which they are not authorized. Fixing the LDAP Injection Directory Entry vulnerability requires untrusted data to be encoded using the appropriate Web Protection Library (aka AntiXSS) LDAP encoding method: Encoder.LdapDistinguishedNameEncode(). For more information checkout the CWE-90 (https://cwe.mitre.org/data/definitions/90.html) advisory.",
Severity: severities.High.ToString(),
Confidence: confidence.Medium.ToString(),
},
Type: text.Regular,
Expressions: []*regexp.Regexp{
regexp.MustCompile(`(new\sDirectoryEntry\(.*LDAP.*\{)(([^E]|E[^n]|En[^c]|Enc[^o]|Enco[^d]|Encod[^e]|Encode[^r])*)(;)`),
},
}
} | internal/services/engines/csharp/rules.go | 0.546012 | 0.421195 | rules.go | starcoder |
package qhull
import (
"fmt"
"github.com/engoengine/glm"
"github.com/EngoEngine/math"
)
const (
epsilonbase = 0.0001 // arbitrary value that will require testing
)
type (
// Edge is a quickhull utility struct for edges
Edge struct {
Tail int
Prev, Next, Twin *Edge
Face *Face
}
// Face is a quickhull utility struct for faces
Face struct {
Edges [3]*Edge
Faces [3]*Face
Vertices [3]int
Conflicts []Conflict
// tmp variables
Visited bool
Normal glm.Vec3
Point glm.Vec3
}
// Conflict is a vertex that isn't inside the convex hull yet
Conflict struct {
Distance float32
Index int
}
)
func (f *Face) canSee(point *glm.Vec3) bool {
ap := point.Sub(&f.Point)
return ap.Dot(&f.Normal) > 0
}
// CleanVisited clears the visited field of all the faces of the convex hull.
func CleanVisited(faces []*Face) {
for _, face := range faces {
face.Visited = false
}
}
// FindHorizon finds the horizon of the conflict
func FindHorizon(face *Face, point *glm.Vec3) []*Edge {
return findHorizon(face, nil, point)
}
func findHorizon(face *Face, edge *Edge, point *glm.Vec3) []*Edge {
fmt.Println("face, edge", fmt.Sprintf("%p", face), fmt.Sprintf("%p", edge), " visited ", face.Visited)
if edge == nil {
e := face.Edges[0]
fmt.Println("nil 1")
edges := findHorizon(e.Twin.Face, e.Twin, point)
e = e.Next
fmt.Println("nil 2")
edges = append(edges, findHorizon(e.Twin.Face, e.Twin, point)...)
e = e.Next
fmt.Println("nil 3")
edges = append(edges, findHorizon(e.Twin.Face, e.Twin, point)...)
return edges
}
if face.Visited {
fmt.Println("visited")
return nil
}
if !face.canSee(point) {
fmt.Printf("add %p\n", edge.Twin)
return []*Edge{edge.Twin}
}
fmt.Println("setting visited")
face.Visited = true
e := edge.Next
fmt.Print("non-nil 1 ")
edges := findHorizon(e.Twin.Face, e.Twin, point)
fmt.Println(edges)
e = e.Next
fmt.Print("non-nil 2 ")
edges = append(edges, findHorizon(e.Twin.Face, e.Twin, point)...)
fmt.Println(edges)
return edges
}
// NextConflict returns the index of the face and conflict of the conflict with
// the highest distance from it's associated plane. or -1,-1 if nothing else is
// left
func NextConflict(faces []*Face) (int, int) {
var maxDist float32
iface, iconflict := -1, -1
for n, face := range faces {
for m := range face.Conflicts {
if face.Conflicts[m].Distance > maxDist {
maxDist = face.Conflicts[m].Distance
iface = n
iconflict = m
}
}
}
return iface, iconflict
}
// FindExtremums returns the 6 indices and 6 vec3 of the extremums for each axis
// fomatted [minx, miny, minz, maxx, maxy, maxz]
func FindExtremums(points []glm.Vec3) (extremumIndices [6]int, extremums [6]glm.Vec3) {
extremums = [6]glm.Vec3{
{math.MaxFloat32, 0, 0}, {0, math.MaxFloat32, 0}, {0, 0, math.MaxFloat32},
{-math.MaxFloat32, 0, 0}, {0, -math.MaxFloat32, 0}, {0, 0, -math.MaxFloat32},
}
for i := range points {
for n := 0; n < 3; n++ {
if extremums[n][n] > points[i][n] {
extremums[n] = points[i]
extremumIndices[n] = i
}
if extremums[n][n] > points[i][n] {
extremums[n] = points[i]
extremumIndices[n] = i
}
if extremums[n][n] > points[i][n] {
extremums[n] = points[i]
extremumIndices[n] = i
}
if extremums[3+n][n] < points[i][n] {
extremums[3+n] = points[i]
extremumIndices[3+n] = i
}
if extremums[3+n][n] < points[i][n] {
extremums[3+n] = points[i]
extremumIndices[3+n] = i
}
if extremums[3+n][n] < points[i][n] {
extremums[3+n] = points[i]
extremumIndices[3+n] = i
}
}
}
return
}
// CalculateEpsilon calculates the epsilon the algorithm should use given the
// extremums of the point cloud [minx, miny, minz, maxx, maxy, maxz]
func CalculateEpsilon(extremums [6]glm.Vec3) float32 {
var maxima float32
for n := 0; n < 3; n++ {
maxima += math.Max(math.Abs(extremums[n][n]), math.Abs(extremums[3+n][n]))
}
return epsilonbase * maxima * 3
}
// BuildInitialTetrahedron builds the initial tetrahedron from the given 4 indices
func BuildInitialTetrahedron(a, b, c, d int, points []glm.Vec3) []*Face {
ab := points[b].Sub(&points[a])
ac := points[c].Sub(&points[a])
cd := points[d].Sub(&points[c])
ca := points[a].Sub(&points[c])
ba := points[a].Sub(&points[b])
bd := points[d].Sub(&points[b])
dc := points[c].Sub(&points[d])
db := points[b].Sub(&points[d])
f0 := &Face{Vertices: [3]int{a, b, c}, Normal: ac.Cross(&ab), Point: points[a]}
f1 := &Face{Vertices: [3]int{c, d, a}, Normal: ca.Cross(&cd), Point: points[c]}
f2 := &Face{Vertices: [3]int{b, a, d}, Normal: bd.Cross(&ba), Point: points[b]}
f3 := &Face{Vertices: [3]int{d, c, b}, Normal: db.Cross(&dc), Point: points[d]}
f0.Faces = [3]*Face{f2, f3, f1}
f1.Faces = [3]*Face{f3, f2, f0}
f2.Faces = [3]*Face{f0, f1, f3}
f3.Faces = [3]*Face{f1, f0, f2}
// edges of f0
e00 := &Edge{Tail: a, Face: f0}
e01 := &Edge{Tail: b, Face: f0}
e02 := &Edge{Tail: c, Face: f0}
// edges of f1
e10 := &Edge{Tail: c, Face: f1}
e11 := &Edge{Tail: d, Face: f1}
e12 := &Edge{Tail: a, Face: f1}
// edges of f2
e20 := &Edge{Tail: b, Face: f2}
e21 := &Edge{Tail: a, Face: f2}
e22 := &Edge{Tail: d, Face: f2}
// edges of f3
e30 := &Edge{Tail: d, Face: f3}
e31 := &Edge{Tail: c, Face: f3}
e32 := &Edge{Tail: b, Face: f3}
// Connect the faces to the edges
f0.Edges = [3]*Edge{e00, e01, e02}
f1.Edges = [3]*Edge{e10, e11, e12}
f2.Edges = [3]*Edge{e20, e21, e22}
f3.Edges = [3]*Edge{e30, e31, e32}
//Setup twin edges
e00.Twin, e20.Twin = e20, e00
e01.Twin, e31.Twin = e31, e01
e02.Twin, e12.Twin = e12, e02
e10.Twin, e30.Twin = e30, e10
e11.Twin, e21.Twin = e21, e11
e22.Twin, e32.Twin = e32, e22
// Circular connect the edges
// e0*
e00.Next, e00.Prev = e01, e02
e01.Next, e01.Prev = e02, e00
e02.Next, e02.Prev = e00, e01
// e1*
e10.Next, e10.Prev = e11, e12
e11.Next, e11.Prev = e12, e10
e12.Next, e12.Prev = e10, e11
// e2*
e20.Next, e20.Prev = e21, e22
e21.Next, e21.Prev = e22, e20
e22.Next, e22.Prev = e20, e21
// e3*
e30.Next, e30.Prev = e31, e32
e31.Next, e31.Prev = e32, e30
e32.Next, e32.Prev = e30, e31
return []*Face{f0, f1, f2, f3}
} | geo/internal/qhull/qhull.go | 0.643441 | 0.458773 | qhull.go | starcoder |
package fractal
import (
"image"
"image/color"
"math"
)
const (
N int = 8
QUALITY int = 0
)
var (
zigZag8 = [64]struct {row, col int} {
{0, 0},
{0, 1}, {1, 0},
{2, 0}, {1, 1}, {0, 2},
{0, 3}, {1, 2}, {2, 1}, {3, 0},
{4, 0}, {3, 1}, {2, 2}, {1, 3}, {0, 4},
{0, 5}, {1, 4}, {2, 3}, {3, 2}, {4, 1}, {5, 0},
{6, 0}, {5, 1}, {4, 2}, {3, 3}, {2, 4}, {1, 5}, {0, 6},
{0, 7}, {1, 6}, {2, 5}, {3, 4}, {4, 3}, {5, 2}, {6, 1}, {7, 0},
{7, 1}, {6, 2}, {5, 3}, {4, 4}, {3, 5}, {2, 6}, {1, 7},
{2, 7}, {3, 6}, {4, 5}, {5, 4}, {6, 3}, {7, 2},
{7, 3}, {6, 4}, {5, 5}, {4, 6}, {3, 7},
{4, 7}, {5, 6}, {6, 5}, {7, 4},
{7, 5}, {6, 6}, {5, 7},
{6, 7}, {7, 6},
{7, 7},
}
quantum [N][N]int
c, cT [N][N]float64
)
func init() {
for i := 0; i < N; i++ {
for j := 0; j < N; j++ {
quantum[i][j] = (1 + ((1 + i + j) * QUALITY))
}
}
for j := 0; j < N; j++ {
c[0][j] = 1.0 / math.Sqrt(float64(N))
cT[j][0] = c[0][j]
}
for i := 1; i < N; i++ {
for j := 0; j < N; j++ {
jj, ii := float64(j), float64(i)
c[i][j] = math.Sqrt(2.0 / 8.0) * math.Cos(((2.0 * jj + 1.0) * ii * math.Pi) / (2.0 * 8.0))
cT[j][i] = c[i][j]
}
}
}
func round(x float64) float64 {
if x < 0 {
return math.Ceil(x - 0.5)
}
return math.Floor(x + 0.5)
}
func ForwardDCT(in *[N][N]uint8, out *[N][N]int) {
var x [N][N]float64
for i := 0; i < N; i++ {
for j := 0; j < N; j++ {
for k := 0; k < N; k++ {
x[i][j] += float64(int(in[i][k]) - 128) * cT[k][j]
}
}
}
for i := 0; i < N; i++ {
for j := 0; j < N; j++ {
y := 0.0
for k := 0; k < N; k++ {
y += c[i][k] * x[k][j]
}
out[i][j] = int(round(y))
}
}
}
func InverseDCT(in, out *[N][N]int) {
var x [N][N]float64
for i := 0; i < N; i++ {
for j := 0; j < N; j++ {
for k := 0; k < N; k++ {
x[i][j] += float64(in[i][k]) * c[k][j]
}
}
}
for i := 0; i < N; i++ {
for j := 0; j < N; j++ {
y := 0.0
for k := 0; k < N; k++ {
y += cT[i][k] * x[k][j]
}
y += 128
if y < 0 {
out[i][j] = 0
} else if y > 255 {
out[i][j] = 255
} else {
out[i][j] = int(round(y))
}
}
}
}
func DCTCoder(input image.Image) *image.Gray {
in, dct, output := [8][8]uint8 {}, [8][8]int {}, image.NewGray(input.Bounds())
width, height := input.Bounds().Max.X, input.Bounds().Max.Y
for x := 0; x < width; x += 8 {
for y := 0; y < height; y += 8 {
for i := 0; i < 8; i++ {
for j := 0; j < 8; j++ {
c, _, _, _ := input.At(x+i, y+j).RGBA()
in[i][j], dct[i][j] = uint8(c >> 8), 0
}
}
ForwardDCT(&in, &dct)
for i := 0; i < 8; i++ {
for j := 0; j < 8; j++ {
output.SetGray(x+i, y+j, color.Gray {uint8((dct[i][j]/16)+128)})
}
}
}
}
return output
}
func DCTDecoder(input image.Image) *image.Gray {
in, idct, output := [8][8]int {}, [8][8]int {}, image.NewGray(input.Bounds())
width, height := input.Bounds().Max.X, input.Bounds().Max.Y
for x := 0; x < width; x += 8 {
for y := 0; y < height; y += 8 {
for i := 0; i < 8; i++ {
for j := 0; j < 8; j++ {
r, _, _, _ := input.At(x+i, y+j).RGBA()
in[i][j], idct[i][j] = 16 * (int(r >> 8) - 128), 0
}
}
InverseDCT(&in, &idct)
for i := 0; i < 8; i++ {
for j := 0; j < 8; j++ {
output.SetGray(x+i, y+j, color.Gray {uint8(idct[i][j])})
}
}
}
}
return output
}
func DCTMap(input image.Image) *image.Gray {
bounds := input.Bounds()
output := image.NewGray(bounds)
width, height := bounds.Max.X, bounds.Max.Y
sx, sy := width/8, height/8
for x := 0; x < 8; x++ {
for y := 0; y < 8; y++ {
for i := 0; i < sx; i++ {
for j := 0; j < sy; j++ {
r, _, _, _ := input.At(x+i*8, y+j*8).RGBA()
output.SetGray(i+x*sx, j+y*sx, color.Gray {uint8(r >> 8)})
}
}
}
}
return output
}
func DCTIMap(input image.Image) *image.Gray {
bounds := input.Bounds()
output := image.NewGray(bounds)
width, height := bounds.Max.X, bounds.Max.Y
sx, sy := width/8, height/8
for x := 0; x < 8; x++ {
for y := 0; y < 8; y++ {
for i := 0; i < sx; i++ {
for j := 0; j < sy; j++ {
r, _, _, _ := input.At(i+x*sx, j+y*sx).RGBA()
output.SetGray(x+i*8, y+j*8, color.Gray {uint8(r >> 8)})
}
}
}
}
return output
}
func Paeth8(input image.Image) *image.Gray {
bounds := input.Bounds()
output := image.NewGray(bounds)
width, height := bounds.Max.X, bounds.Max.Y
abs := func(x int32) int32 {
if x < 0 {
return -x
} else {
return x
}
}
for y := 0; y < height; y++ {
for x := 0; x < width; x++ {
var a, b, c int32
if x > 0 {
z, _, _, _ := input.At(x - 1, y).RGBA()
a = int32(z >> 8)
if y > 0 {
z, _, _, _ = input.At(x - 1, y - 1).RGBA()
c = int32(z >> 8)
}
}
if y > 0 {
z, _, _, _ := input.At(x, y - 1).RGBA()
b = int32(z >> 8)
}
p := a + b - c
pa, pb, pc := abs(p - a), abs(p - b), abs(p - c)
if pa <= pb && pa <= pc {
p = a
} else if pb <= pc {
p = b
} else {
p = c
}
z, _, _, _ := input.At(x, y).RGBA()
d := int32(z >> 8)
output.SetGray(x, y, color.Gray {uint8((d-p)%256)})
}
}
return output
}
func IPaeth8(input image.Image) *image.Gray {
bounds := input.Bounds()
output := image.NewGray(bounds)
width, height := bounds.Max.X, bounds.Max.Y
abs := func(x int32) int32 {
if x < 0 {
return -x
} else {
return x
}
}
for y := 0; y < height; y++ {
for x := 0; x < width; x++ {
var a, b, c int32
if x > 0 {
z, _, _, _ := output.At(x - 1, y).RGBA()
a = int32(z >> 8)
if y > 0 {
z, _, _, _ = output.At(x - 1, y - 1).RGBA()
c = int32(z >> 8)
}
}
if y > 0 {
z, _, _, _ := output.At(x, y - 1).RGBA()
b = int32(z >> 8)
}
p := a + b - c
pa, pb, pc := abs(p - a), abs(p - b), abs(p - c)
if pa <= pb && pa <= pc {
p = a
} else if pb <= pc {
p = b
} else {
p = c
}
z, _, _, _ := input.At(x, y).RGBA()
d := int32(z >> 8)
output.SetGray(x, y, color.Gray {uint8((d + p) % 256)})
}
}
return output
} | _vendor/src/github.com/pointlander/compress/fractal/dct.go | 0.548674 | 0.599368 | dct.go | starcoder |
package bintree
import (
"strconv"
"strings"
)
// New creates and initializes a new binary tree
func New(value ...interface{}) *Tree {
return new(Tree).Init(value...)
}
// Tree is a binary tree
type Tree struct {
Value interface{}
left, right *Tree
}
// Init initializes and clears tree
func (t *Tree) Init(value ...interface{}) *Tree {
t.Value = nil
if len(value) != 0 {
t.Value = value[0]
}
// t.Value = nil
t.left = nil
t.right = nil
return t
}
// Left returns left child of tree
func (t *Tree) Left() *Tree {
return t.left
}
// Right returns right child of tree
func (t *Tree) Right() *Tree {
return t.right
}
// insertLeft inserts new tree between parent node and left child
// returns inserted tree node or returns nil on failed insert
func (t *Tree) insertLeft(new *Tree) *Tree {
if t.left == nil {
t.left = new
} else {
new.left = t.left
t.left = new
}
return new
}
// InsertLeft inserts new tree with "value" between parent node and left child
// returns inserted tree node or returns nil on failed insert
func (t *Tree) InsertLeft(value interface{}) *Tree {
return t.insertLeft(&Tree{Value: value})
}
// insertRight inserts new tree between parent node and right child
// returns inserted tree node or returns nil on failed insert
func (t *Tree) insertRight(new *Tree) *Tree {
if t.right == nil {
t.right = new
} else {
new.right = t.right
t.right = new
}
return new
}
// InsertRight inserts new tree with "value" between parent node and left child
// returns inserted tree node or returns nil on failed insert
func (t *Tree) InsertRight(value interface{}) *Tree {
return t.insertRight(&Tree{Value: value})
}
func (t *Tree) delete() {
}
func (t *Tree) PreOrder(f func(*Tree)) {
t.order(f, "pre")
}
func (t *Tree) InOrder(f func(*Tree)) {
t.order(f, "in")
}
func (t *Tree) PostOrder(f func(*Tree)) {
t.order(f, "post")
}
func (t *Tree) order(f func(*Tree), orderType string) {
var traverse func(bt *Tree)
traverse = func(bt *Tree) {
if bt != nil {
if orderType == "pre" {
f(bt)
}
traverse(bt.left)
if orderType == "in" {
f(bt)
}
traverse(bt.right)
if orderType == "post" {
f(bt)
}
}
}
traverse(t)
}
func (t *Tree) LevelOrder(f func(*Tree), onLevelChange ...func()) {
order := map[int][]*Tree{}
order[0] = []*Tree{}
var traverse func(bt *Tree, level int, order map[int][]*Tree)
traverse = func(bt *Tree, level int, order map[int][]*Tree) {
if bt == nil {
return
}
order[level] = append(order[level], bt)
traverse(bt.left, level+1, order)
traverse(bt.right, level+1, order)
}
traverse(t, 0, order)
for i := 0; order[i] != nil; i++ {
if len(onLevelChange) > 0 {
onLevelChange[0]()
}
for _, node := range order[i] {
f(node)
}
}
}
// BuildTreeInt builds a binary tree structure with int values from a string representation
func BuildTreeInt(s string) *Tree {
linkMap := map[int][2]int{}
treeMap := map[int]*Tree{}
rows := strings.Split(strings.TrimSpace(s), "\n")
rootKey, _ := strconv.Atoi(strings.Split(strings.TrimSpace(rows[0]), ".")[0])
for _, row := range rows {
trees := strings.Split(row, " ")
for _, tree := range trees {
t := strings.Split(strings.TrimSpace(tree), ".")
root, _ := strconv.Atoi(t[0])
left, _ := strconv.Atoi(t[1])
right, _ := strconv.Atoi(t[2])
linkMap[root] = [2]int{left, right}
if root != 0 {
treeMap[root] = &Tree{Value: root}
}
if treeMap[left] == nil && left != 0 {
treeMap[left] = &Tree{Value: left}
}
if treeMap[right] == nil && right != 0 {
treeMap[right] = &Tree{Value: right}
}
}
}
for k := range linkMap {
leftIndex := linkMap[k][0]
rightIndex := linkMap[k][1]
treeMap[k].left = treeMap[leftIndex]
treeMap[k].right = treeMap[rightIndex]
}
return treeMap[rootKey]
} | tree/bintree/binaryTree.go | 0.821939 | 0.48054 | binaryTree.go | starcoder |
// Useful test functions for validating (mostly) string outputs match
// what is expected.
package assert
import (
"bytes"
"strings"
"testing"
)
type expectedError struct {
expected string
}
func NewExpectedError(expect string) *expectedError {
return &expectedError{expected: expect}
}
func (e *expectedError) Matches(t *testing.T, actual error) {
if actual == nil {
t.Fatalf("Unexpected success")
}
CheckStringDivergence(t, e.expected, actual.Error())
}
type ExpectedMessages struct {
expected []string
}
func NewExpectedMessages(expect ...string) *ExpectedMessages {
return &ExpectedMessages{expected: expect}
}
func (e *ExpectedMessages) ContainedIn(t *testing.T, actual string) {
if len(actual) == 0 {
t.Fatalf("No output in which to search for expected message(s).")
return
}
for _, exp := range e.expected {
if !strings.Contains(actual, exp) {
t.Fatalf("Actual output doesn't contain expected output:\n"+
"Exp:\n%s\nAct:\n%v\n", exp, actual)
}
}
}
func (e *ExpectedMessages) NotContainedIn(t *testing.T, actual string) {
if len(actual) == 0 {
t.Fatalf("No output in which to search for expected message(s).")
return
}
for _, exp := range e.expected {
if strings.Contains(actual, exp) {
t.Fatalf("Actual output contain unexpected output:\n"+
"NotExp:\n%s\nAct:\n%v\n", exp, actual)
}
}
}
// Check each expected message appears in at least one of the actual strings.
func (e *ExpectedMessages) ContainedInAny(t *testing.T, actual []string) {
if len(actual) == 0 {
t.Fatalf("No output in which to search for expected message(s).")
return
}
outerLoop:
for _, exp := range e.expected {
for _, act := range actual {
if strings.Contains(act, exp) {
continue outerLoop
}
}
t.Fatalf("Actual output doesn't contain expected output:\n"+
"Exp:\n%s\nAct:\n%v\n", exp, actual)
}
}
// Very useful when debugging outputs that don't match up.
func CheckStringDivergence(t *testing.T, expOut, actOut string) {
if expOut == actOut {
return
}
var expOutCopy = expOut
var act bytes.Buffer
var charsToDump = 10
var expCharsToDump = 10
var actCharsLeft, expCharsLeft int
for index, char := range actOut {
if len(expOutCopy) > 0 {
if char == rune(expOutCopy[0]) {
act.WriteByte(byte(char))
} else {
act.WriteString("###") // Mark point of divergence.
expCharsLeft = len(expOutCopy)
actCharsLeft = len(actOut) - index
if expCharsLeft < charsToDump {
expCharsToDump = expCharsLeft
}
if actCharsLeft < charsToDump {
charsToDump = actCharsLeft
}
act.WriteString(actOut[index : index+charsToDump])
break
}
} else {
t.Logf("Expected output terminates early.\n")
t.Fatalf("Exp:\n%s\nGot extra:\n%s\n",
expOut[:index], act.String()[index:])
}
expOutCopy = expOutCopy[1:]
}
// Useful to print whole output first for reference (useful when debugging
// when you don't want to have to construct the expected output up front).
t.Logf("Actual output:\n%s\n--- ENDS ---\n", actOut)
// After that we then print up to the point of divergence so it's easy to
// work out what went wrong ...
t.Fatalf("Unexpected output.\nGot:\n%s\nExp at ###:\n'%s ...'\n",
act.String(), expOutCopy[:expCharsToDump])
} | testutils/assert/assert.go | 0.694821 | 0.697854 | assert.go | starcoder |
package rule
import (
"github.com/lawrencewoodman/ddataset"
"github.com/lawrencewoodman/dlit"
"github.com/vlifesystems/rhkit/description"
"sort"
)
// InFV represents a rule determining if field is equal to
// any of the supplied values when represented as a string
type InFV struct {
field string
values []*dlit.Literal
}
func init() {
registerGenerator("InFV", generateInFV)
}
func NewInFV(field string, values []*dlit.Literal) *InFV {
if len(values) == 0 {
panic("NewInFV: Must contain at least one value")
}
return &InFV{field: field, values: values}
}
func makeInFVString(field string, values []*dlit.Literal) string {
return "in(" + field + "," + commaJoinValues(values) + ")"
}
func (r *InFV) String() string {
return makeInFVString(r.field, r.values)
}
func (r *InFV) Fields() []string {
return []string{r.field}
}
func (r *InFV) Values() []*dlit.Literal {
return r.values
}
func (r *InFV) IsTrue(record ddataset.Record) (bool, error) {
needle, ok := record[r.field]
if !ok {
return false, InvalidRuleError{Rule: r}
}
if needle.Err() != nil {
return false, IncompatibleTypesRuleError{Rule: r}
}
for _, v := range r.values {
if needle.String() == v.String() {
return true, nil
}
}
return false, nil
}
func (r *InFV) Overlaps(o Rule) bool {
switch x := o.(type) {
case *InFV:
oValues := x.Values()
oField := x.Fields()[0]
if r.field != oField {
return false
}
for _, v := range r.values {
for _, oV := range oValues {
if v.String() == oV.String() {
return true
}
}
}
}
return false
}
func generateInFV(
inputDescription *description.Description,
generationDesc GenerationDescriber,
) []Rule {
extra := 0
if len(generationDesc.Fields()) == 2 {
extra += 3
}
rules := make([]Rule, 0)
for _, field := range generationDesc.Fields() {
fd := inputDescription.Fields[field]
numValues := len(fd.Values)
if generationDesc.Deny("InFV", field) ||
(fd.Kind != description.String && fd.Kind != description.Number) ||
numValues <= 3 || numValues > (12+extra) {
continue
}
possibleLits := possibleValuesToLiterals(fd.Values)
maxNumLits := 5 + extra
if maxNumLits > numValues-2 {
// numvalues-2 is used because numValues-1 is equivalent to saying that
// a field is != 'value', which is generated by the NEFV rule
maxNumLits = numValues - 2
}
litCombinations := literalCombinations(possibleLits, 2, maxNumLits)
for _, compareValues := range litCombinations {
r := NewInFV(field, compareValues)
rules = append(rules, r)
}
}
return rules
}
func getMaskLiterals(mask string, values []*dlit.Literal) []*dlit.Literal {
r := []*dlit.Literal{}
for j, b := range mask {
if j >= len(values) {
break
}
if b == '1' {
v := values[j]
r = append(r, v)
}
}
return r
}
func literalCombinations(
values []*dlit.Literal,
min,
max int,
) [][]*dlit.Literal {
r := [][]*dlit.Literal{}
for i := 3; ; i++ {
mask := makeMask(len(values), i)
numOnBits := countNumOnBits(mask)
if len(mask) > len(values) {
break
}
if numOnBits >= min && numOnBits <= max && numOnBits <= len(values) {
r = append(r, getMaskLiterals(mask, values))
}
}
return r
}
func possibleValuesToLiterals(
values map[string]description.Value,
) []*dlit.Literal {
lits := []*dlit.Literal{}
keys := make([]string, len(values))
i := 0
for k := range values {
keys[i] = k
i++
}
// The keys are sorted to make it easier to test because maps aren't ordered
sort.Strings(keys)
for _, k := range keys {
if values[k].Num >= 2 {
lits = append(lits, values[k].Value)
}
}
return lits
} | rule/infv.go | 0.636918 | 0.451992 | infv.go | starcoder |
package experiments
import (
"flag"
"github.com/Yunpeng-J/HLF-2.2/core/ledger/kvledger/benchmark/chainmgmt"
)
// txConf captures the transaction related configurations
// numTotalTxs specifies the total transactions that should be executed and committed across chains
// numParallelTxsPerChain specifies the parallel transactions on each of the chains
// numWritesPerTx specifies the number of keys to write in each transaction
// numReadsPerTx specifies the number of keys to read in each transaction, Note: this parameters
// match the numWritesPerTx for normal benchmarks. This can be set to zero to make batch update measurements.
type txConf struct {
numTotalTxs int
numParallelTxsPerChain int
numWritesPerTx int
numReadsPerTx int
}
// dataConf captures the data related configurations
// numKVs specifies number of total key-values across chains
// kvSize specifies the size of a key-value (in bytes)
// useJSON specifies if the value stored is in JSON format
type dataConf struct {
numKVs int
kvSize int
useJSON bool
}
// configuration captures all the configurations for an experiment
// For details of individual configuration, see comments on the specific type
type configuration struct {
chainMgrConf *chainmgmt.ChainMgrConf
batchConf *chainmgmt.BatchConf
dataConf *dataConf
txConf *txConf
}
// emptyConf returns a an empty configuration (with nested structure only)
func emptyConf() *configuration {
conf := &configuration{}
conf.chainMgrConf = &chainmgmt.ChainMgrConf{}
conf.batchConf = &chainmgmt.BatchConf{}
conf.txConf = &txConf{}
conf.dataConf = &dataConf{}
return conf
}
// confFromTestParams consumes the parameters passed by an experiment
// and returns the configuration loaded with the parsed param values
func confFromTestParams(testParams []string) *configuration {
conf := emptyConf()
flags := flag.NewFlagSet("testParams", flag.ExitOnError)
// chainMgrConf
dataDir := flags.String("DataDir", conf.chainMgrConf.DataDir, "Dir for ledger data")
numChains := flags.Int("NumChains", conf.chainMgrConf.NumChains, "Number of chains")
// txConf
numParallelTxsPerChain := flags.Int("NumParallelTxPerChain",
conf.txConf.numParallelTxsPerChain, "Number of TxSimulators concurrently on each chain")
numTotalTxs := flags.Int("NumTotalTx",
conf.txConf.numTotalTxs, "Number of total transactions")
numWritesPerTx := flags.Int("NumWritesPerTx",
conf.txConf.numWritesPerTx, "number of keys written in each Tx")
numReadsPerTx := flags.Int("NumReadsPerTx",
conf.txConf.numReadsPerTx, "number of keys to read in each Tx")
// batchConf
batchSize := flags.Int("BatchSize",
conf.batchConf.BatchSize, "number of Txs in each batch")
// dataConf
numKVs := flags.Int("NumKVs",
conf.dataConf.numKVs, "the keys are named as key_0, key_1,... upto key_(NumKVs-1)")
kvSize := flags.Int("KVSize",
conf.dataConf.kvSize, "size of the key-value in bytes")
useJSON := flags.Bool("UseJSONFormat", conf.dataConf.useJSON, "should CouchDB use JSON for values")
flags.Parse(testParams)
conf.chainMgrConf.DataDir = *dataDir
conf.chainMgrConf.NumChains = *numChains
conf.txConf.numParallelTxsPerChain = *numParallelTxsPerChain
conf.txConf.numTotalTxs = *numTotalTxs
conf.txConf.numWritesPerTx = *numWritesPerTx
conf.txConf.numReadsPerTx = *numReadsPerTx
conf.batchConf.BatchSize = *batchSize
conf.dataConf.numKVs = *numKVs
conf.dataConf.kvSize = *kvSize
conf.dataConf.useJSON = *useJSON
return conf
} | core/ledger/kvledger/benchmark/experiments/conf.go | 0.752831 | 0.410461 | conf.go | starcoder |
package animation
/*
Contains definitions of various animation effects that can be applied. These
effects should implement interface Animation
*/
import (
"image/color"
"log"
"math"
"os"
"time"
colorful "github.com/lucasb-eyer/go-colorful"
)
// RGBAFromRGBHex converts a 24-bit hex color value into a color.RGBA value
func RGBAFromRGBHex(hexColor uint32) color.RGBA {
return color.RGBA{uint8(hexColor >> 16 & 0xff), uint8(hexColor >> 8 & 0xff), uint8(hexColor & 0xff), 0xff}
}
// InterpolateSolid transitions from one solid color (applied to all elements)
// to another solid color
type InterpolateSolid struct {
startColor, endColor colorful.Color
duration time.Duration
startTime time.Time
startOnCurrent bool // Capture the color of the first frame and use it as the start color?
captureNext bool
}
var fxlog = log.New(os.Stdout, "(EFFECT) ", 0)
// NewInterpolateSolidHexRGB creates an InterpolateSolid effect, given hex-encoded 24-bit RGB colors
func NewInterpolateSolidHexRGB(startColor, endColor uint32, duration time.Duration) *InterpolateSolid {
startRGBA := RGBAFromRGBHex(startColor)
endRGBA := RGBAFromRGBHex(endColor)
return &InterpolateSolid{startColor: colorful.MakeColor(startRGBA), endColor: colorful.MakeColor(endRGBA), duration: duration}
}
// NewInterpolateSolid creates an InterpolateSolid effect
func NewInterpolateSolid(startColor, endColor color.RGBA,
duration time.Duration) *InterpolateSolid {
return &InterpolateSolid{startColor: colorful.MakeColor(startColor), endColor: colorful.MakeColor(endColor), duration: duration}
}
// NewInterpolateToHexRGB interpolates from the current color of the universe (determined by sampling the first element)
// to the provided end color, specified as a 24-bit RGB hex value
func NewInterpolateToHexRGB(endColor uint32, duration time.Duration) *InterpolateSolid {
// Create a standard effect with arbitrary start color
effect := NewInterpolateSolidHexRGB(0x0, endColor, duration)
// ...then set the magic flag
effect.startOnCurrent = true
return effect
}
// Start starts the effect
func (effect *InterpolateSolid) Start(startTime time.Time) {
// fxlog.Printf("Setting start time %v", startTime)
effect.startTime = startTime
if effect.startOnCurrent {
effect.captureNext = true
}
}
// Frame generates an animation frame
func (effect *InterpolateSolid) Frame(buf []color.RGBA, frameTime time.Time) (output []color.RGBA, endSeq bool) {
//fxlog.Printf("Buf cap: %d len: %d\n", cap(buf), len(buf))
if frameTime.After(effect.startTime.Add(effect.duration)) {
// fxlog.Printf("Done at time %v (start time %v)\n", frameTime, effect.startTime)
return buf, true
}
// See if we need to find the current universe color and use it as the start color
if effect.captureNext {
sc := buf[0]
sc.A = 0xff // Avoid a 0 transparency (in the case of an uninitialized buffer) which makes go-colorful unhappy
effect.startColor = colorful.MakeColor(sc)
effect.captureNext = false // Clear the flag to prevent this from being done again
}
elapsed := frameTime.Sub(effect.startTime)
completion := elapsed.Seconds() / effect.duration.Seconds()
//fxlog.Printf("Frame at %2.2f%%", completion*100.0)
// currColorful := effect.startColor.BlendLab(effect.endColor, completion)
// currColorful := effect.startColor.BlendLuv(effect.endColor, completion)
currColorful := effect.startColor.BlendRgb(effect.endColor, completion)
currColor := colorfulToRGBA(currColorful)
for i := 0; i < len(buf); i++ {
buf[i] = currColor
}
return buf, false
}
func colorfulToRGBA(c colorful.Color) color.RGBA {
r, g, b := c.RGB255()
return color.RGBA{r, g, b, 0xff}
}
// Pulse is a repeating interpolation between two colors, in a pulsing fashion
type Pulse struct {
c1 colorful.Color
c2 colorful.Color
period time.Duration
startTime time.Time
singleCycle bool
}
// NewPulse creates a new pulse effect with the given parmeters. singleCycle indicates whether the effect should
// report completion after one cycle
func NewPulse(c1, c2 color.Color, period time.Duration, singleCycle bool) *Pulse {
return &Pulse{
c1: colorful.MakeColor(c1),
c2: colorful.MakeColor(c2),
period: period,
singleCycle: singleCycle,
}
}
// NewDimmingPulse creates a pulse between a color and a dimmer version of itself.
// dimmingRatio determines the amount of dimming - 0.0 means 'black', while 1.0
// means 'color c'. 'period' is the time for a full dimming/brightening cycle
func NewDimmingPulse(c color.Color, dimmingRatio float64, period time.Duration) *Pulse {
c1 := colorful.MakeColor(c)
black := colorful.Color{0.0, 0.0, 0.0}
// c2 := c1.BlendLuv(black, 1.0-dimmingRatio).Clamped()
c2 := c1.BlendRgb(black, 1.0-dimmingRatio).Clamped()
// fxlog.Printf("Pulse colors: c1=%v, c2=%v\n", c1, c2)
return &Pulse{
c1: c1,
c2: c2,
period: period,
}
}
// Start sets the start time of the pulse effect
func (effect *Pulse) Start(startTime time.Time) {
effect.startTime = startTime
}
// Frame generates a frame of the Pulse animation. It will always return 'false' for endSeq. It returns
// the passed-in buffer
func (effect *Pulse) Frame(buf []color.RGBA, frameTime time.Time) (output []color.RGBA, endSeq bool) {
// Use a sinusoidal pulse
elapsed := frameTime.Sub(effect.startTime)
phase := float64(elapsed%effect.period) / float64(effect.period)
position := 0.5 - (math.Cos(2*math.Pi*phase) / 2.0)
// color := effect.c1.BlendLuv(effect.c2, position).Clamped()
color := effect.c1.BlendRgb(effect.c2, position).Clamped()
rgba := colorfulToRGBA(color)
for idx := range buf {
buf[idx] = rgba
}
done := effect.singleCycle && elapsed > effect.period
return buf, done
}
// Solid is a simple static solid color
type Solid struct {
color color.RGBA
timed bool
startTime time.Time
duration time.Duration
}
// NewSolid creates a Solid effect for the given color
func NewSolid(color color.RGBA) *Solid {
return &Solid{
color: color,
timed: false,
}
}
// NewTimedSolid creates a solid effect that lasts a fixed amount of time
func NewTimedSolid(color color.RGBA, duration time.Duration) *Solid {
return &Solid{
color: color,
timed: true,
duration: duration,
}
}
// Start the Solid effect - NOP
func (effect *Solid) Start(startTime time.Time) {
effect.startTime = startTime
}
// Frame creates a frame of the Solid effect
func (effect *Solid) Frame(buf []color.RGBA, frameTime time.Time) (output []color.RGBA, endSeq bool) {
for idx := range buf {
buf[idx] = color.RGBA(effect.color)
}
done := effect.timed && frameTime.After(effect.startTime.Add(effect.duration))
return buf, done
} | vendor/github.com/TeamNorCal/animation/effects.go | 0.871475 | 0.409634 | effects.go | starcoder |
package dataframe
import (
"fmt"
"logarithmotechnia/vector"
"strconv"
"strings"
)
func (df *Dataframe) InnerJoin(with *Dataframe, options ...vector.Option) *Dataframe {
conf := vector.MergeOptions(options)
columns := df.determineColumns(conf, with)
if len(columns) == 0 {
return df
}
rootDfTree := &joinNode{}
rootWithTree := &joinNode{}
fillJoinTree(df, rootDfTree, columns)
fillJoinTree(with, rootWithTree, columns)
dfTreeKeys := rootDfTree.getKeys()
dfIndices := make([]int, 0)
withIndices := make([]int, 0)
for _, key := range dfTreeKeys {
indicesForWith := rootWithTree.getIndicesFor(key)
if indicesForWith == nil {
continue
}
indicesForDf := rootDfTree.getIndicesFor(key)
for _, idxDf := range indicesForDf {
for _, idxWith := range indicesForWith {
dfIndices = append(dfIndices, idxDf)
withIndices = append(withIndices, idxWith)
}
}
}
removeColumns := make([]string, len(columns))
for i, column := range columns {
removeColumns[i] = "-" + column
}
newDf := df.ByIndices(dfIndices)
newWIth := with.Select(removeColumns).ByIndices(withIndices)
return newDf.BindColumns(newWIth)
}
func (df *Dataframe) LeftJoin(with *Dataframe, options ...vector.Option) *Dataframe {
conf := vector.MergeOptions(options)
columns := df.determineColumns(conf, with)
if len(columns) == 0 {
return df
}
rootDfTree := &joinNode{}
rootWithTree := &joinNode{}
fillJoinTree(df, rootDfTree, columns)
fillJoinTree(with, rootWithTree, columns)
dfTreeKeys := rootDfTree.getKeys()
dfIndices := make([]int, 0)
withIndices := make([]int, 0)
for _, key := range dfTreeKeys {
indicesForDf := rootDfTree.getIndicesFor(key)
indicesForWith := rootWithTree.getIndicesFor(key)
if indicesForWith == nil {
for _, idxDf := range indicesForDf {
dfIndices = append(dfIndices, idxDf)
withIndices = append(withIndices, 0)
}
} else {
for _, idxDf := range indicesForDf {
for _, idxWith := range indicesForWith {
dfIndices = append(dfIndices, idxDf)
withIndices = append(withIndices, idxWith)
}
}
}
}
removeColumns := make([]string, len(columns))
for i, column := range columns {
removeColumns[i] = "-" + column
}
newDf := df.ByIndices(dfIndices)
newWIth := with.Select(removeColumns).ByIndices(withIndices)
return newDf.BindColumns(newWIth)
}
func (df *Dataframe) RightJoin(with *Dataframe, options ...vector.Option) *Dataframe {
conf := vector.MergeOptions(options)
columns := df.determineColumns(conf, with)
if len(columns) == 0 {
return df
}
rootDfTree := &joinNode{}
rootWithTree := &joinNode{}
fillJoinTree(df, rootDfTree, columns)
fillJoinTree(with, rootWithTree, columns)
treeKeys := rootWithTree.getKeys()
dfIndices := make([]int, 0)
withIndices := make([]int, 0)
for _, key := range treeKeys {
indicesForDf := rootDfTree.getIndicesFor(key)
indicesForWith := rootWithTree.getIndicesFor(key)
if indicesForDf == nil {
for _, idxWith := range indicesForWith {
dfIndices = append(dfIndices, 0)
withIndices = append(withIndices, idxWith)
}
} else {
for _, idxDf := range indicesForDf {
for _, idxWith := range indicesForWith {
dfIndices = append(dfIndices, idxDf)
withIndices = append(withIndices, idxWith)
}
}
}
}
removeColumns := make([]string, len(columns))
for i, column := range columns {
removeColumns[i] = "-" + column
}
newDf := df.Select(removeColumns).ByIndices(dfIndices)
newWIth := with.ByIndices(withIndices)
joinedDf := newDf.BindColumns(newWIth)
selectNames, _ := df.Names().Append(joinedDf.Names()).Unique().Strings()
return joinedDf.Select(selectNames)
}
func (df *Dataframe) FullJoin(with *Dataframe, options ...vector.Option) *Dataframe {
conf := vector.MergeOptions(options)
columns := df.determineColumns(conf, with)
if len(columns) == 0 {
return df
}
rootDfTree := &joinNode{}
rootWithTree := &joinNode{}
fillJoinTree(df, rootDfTree, columns)
fillJoinTree(with, rootWithTree, columns)
dfTreeKeys := rootDfTree.getKeys()
withTreeKeys := rootWithTree.getKeys()
dfIndices := make([]int, 0)
withIndices := make([]int, 0)
for _, key := range dfTreeKeys {
indicesForDf := rootDfTree.getIndicesFor(key)
indicesForWith := rootWithTree.getIndicesFor(key)
if indicesForWith == nil {
for _, idxDf := range indicesForDf {
dfIndices = append(dfIndices, idxDf)
withIndices = append(withIndices, 0)
}
} else {
for _, idxDf := range indicesForDf {
for _, idxWith := range indicesForWith {
dfIndices = append(dfIndices, idxDf)
withIndices = append(withIndices, idxWith)
}
}
}
}
for _, key := range withTreeKeys {
indicesForDf := rootDfTree.getIndicesFor(key)
if indicesForDf == nil {
indicesForWith := rootWithTree.getIndicesFor(key)
for _, idxWith := range indicesForWith {
dfIndices = append(dfIndices, 0)
withIndices = append(withIndices, idxWith)
}
}
}
removeColumns := make([]string, len(columns))
for i, column := range columns {
removeColumns[i] = "-" + column
}
newDf := df.ByIndices(dfIndices)
newWIth := with.ByIndices(withIndices)
coalesceColumns := make([]Column, len(columns))
for i, column := range columns {
coalesceColumns[i] = Column{column, newDf.Cn(column).Coalesce(newWIth.Cn(column))}
}
return newDf.Mutate(coalesceColumns).BindColumns(newWIth.Select(removeColumns))
}
func (df *Dataframe) SemiJoin(with *Dataframe, options ...vector.Option) *Dataframe {
conf := vector.MergeOptions(options)
columns := df.determineColumns(conf, with)
if len(columns) == 0 {
return df
}
rootDfTree := &joinNode{}
rootWithTree := &joinNode{}
fillJoinTree(df, rootDfTree, columns)
fillJoinTree(with, rootWithTree, columns)
dfTreeKeys := rootDfTree.getKeys()
dfIndices := make([]int, 0)
for _, key := range dfTreeKeys {
indicesForWith := rootWithTree.getIndicesFor(key)
if indicesForWith == nil {
continue
}
indicesForDf := rootDfTree.getIndicesFor(key)
for _, idxDf := range indicesForDf {
dfIndices = append(dfIndices, idxDf)
}
}
return df.ByIndices(dfIndices)
}
func (df *Dataframe) AntiJoin(with *Dataframe, options ...vector.Option) *Dataframe {
conf := vector.MergeOptions(options)
columns := df.determineColumns(conf, with)
if len(columns) == 0 {
return df
}
rootDfTree := &joinNode{}
rootWithTree := &joinNode{}
fillJoinTree(df, rootDfTree, columns)
fillJoinTree(with, rootWithTree, columns)
dfTreeKeys := rootDfTree.getKeys()
dfIndices := make([]int, 0)
for _, key := range dfTreeKeys {
indicesForWith := rootWithTree.getIndicesFor(key)
if indicesForWith == nil {
indicesForDf := rootDfTree.getIndicesFor(key)
for _, idxDf := range indicesForDf {
dfIndices = append(dfIndices, idxDf)
}
}
}
return df.ByIndices(dfIndices)
}
func (df *Dataframe) determineColumns(conf vector.Configuration, src *Dataframe) []string {
var joinColumns []string
if conf.HasOption(vector.KeyOptionJoinBy) {
columns := conf.Value(vector.KeyOptionJoinBy).([]string)
for _, column := range columns {
if df.Names().Has(column) && src.Names().Has(column) {
joinColumns = append(joinColumns, column)
}
}
} else {
joinColumns = []string{}
for _, column := range df.columnNames {
if src.Names().Has(column) {
joinColumns = append(joinColumns, column)
}
}
}
return joinColumns
}
type joinNode struct {
groupVal interface{}
groupMap map[interface{}]*joinNode
indices []int
values []interface{}
keyLen int
}
func (n *joinNode) getIndicesFor(key []interface{}) []int {
if len(key) == 0 {
return nil
}
node, ok := n.groupMap[key[0]]
if !ok {
return nil
}
if len(key) == 1 {
return node.indices
}
return node.getIndicesFor(key[1:])
}
func (n *joinNode) getKeys() [][]interface{} {
keys := [][]interface{}{}
if n.keyLen == 0 {
return keys
}
for _, val := range n.values {
if n.keyLen > 1 {
subKeys := n.groupMap[val].getKeys()
for _, subKey := range subKeys {
key := append([]interface{}{val}, subKey...)
keys = append(keys, key)
}
} else {
keys = append(keys, []interface{}{val})
}
}
return keys
}
func (n *joinNode) String() string {
return joinNodeToString(n, 0)
}
func joinNodeToString(node *joinNode, lvl int) string {
str := strings.Repeat(" ", lvl) + "Group: " + fmt.Sprintf("%v", node.groupVal) + "\n"
str += strings.Repeat(" ", lvl) + "Values: " + fmt.Sprintf("%v", node.values) + "\n"
str += strings.Repeat(" ", lvl) + "Values array length: " + strconv.Itoa(len(node.values)) + "\n"
str += strings.Repeat(" ", lvl) + "Indices: " + fmt.Sprintf("%v", node.indices) + "\n"
if len(node.values) > 0 {
for _, value := range node.values {
str += joinNodeToString(node.groupMap[value], lvl+1) + "\n"
}
}
return str
}
func fillJoinTree(df *Dataframe, node *joinNode, columns []string) {
if len(columns) == 0 || node == nil {
return
}
isAdditionalColumns := len(columns) > 1
node.groupMap = map[interface{}]*joinNode{}
node.keyLen = len(columns)
column := columns[0]
groups, values := df.Cn(column).Groups()
node.values = values
for i := 0; i < len(values); i++ {
subNode := &joinNode{}
subNode.groupVal = values[i]
if node.indices == nil {
subNode.indices = groups[i]
} else {
subNode.indices = make([]int, len(groups[i]))
for j, idx := range groups[i] {
subNode.indices[j] = node.indices[idx-1]
}
}
if isAdditionalColumns {
fillJoinTree(df.Filter(groups[i]), subNode, columns[1:])
}
node.groupMap[values[i]] = subNode
}
if len(values) > 0 {
node.indices = nil
}
} | dataframe/joins.go | 0.514888 | 0.494263 | joins.go | starcoder |
package pgbuilder
import (
"fmt"
"github.com/go-courier/sqlx/v2"
"github.com/go-courier/sqlx/v2/builder"
)
func RecordCollectionFrom(db sqlx.DBExecutor, model builder.Model, columnsCouldBeZeroValue ...*builder.Column) *RecordCollection {
t := db.T(model)
zeroFields := make([]string, 0)
for i := range columnsCouldBeZeroValue {
zeroFields = append(zeroFields, columnsCouldBeZeroValue[i].FieldName)
}
fieldValues := sqlx.FieldValuesFromModel(t, model, zeroFields...)
columnList := make([]*builder.Column, 0)
recordValueList := make([]interface{}, 0)
t.Columns.Range(func(col *builder.Column, idx int) {
if v, ok := fieldValues[col.FieldName]; ok {
columnList = append(columnList, col)
recordValueList = append(recordValueList, v)
}
})
return RecordCollectionBy(func(rc *RecordCollection) {
rc.SetRecordValues(recordValueList...)
}, columnList...)
}
func RecordCollectionWith(recordValues RecordValues, columns ...*builder.Column) *RecordCollection {
return RecordCollectionBy(func(rc *RecordCollection) {
rc.SetRecordValues(recordValues...)
}, columns...)
}
func RecordCollectionBy(collect func(rc *RecordCollection), columns ...*builder.Column) *RecordCollection {
cols := &builder.Columns{}
for i := range columns {
col := columns[i]
if col == nil {
panic(fmt.Errorf("invalid %d of columns", i))
}
cols.Add(col)
}
rc := &RecordCollection{
Columns: cols,
records: []RecordValues{},
}
collect(rc)
return rc
}
type RecordValues []interface{}
type RecordCollection struct {
records []RecordValues
Columns *builder.Columns
}
func (vc *RecordCollection) IsNil() bool {
return vc == nil || len(vc.records) == 0 || builder.IsNilExpr(vc.Columns)
}
func (vc *RecordCollection) SetRecordValues(values ...interface{}) {
if len(values) == 1 {
if _, ok := values[0].(builder.SelectStatement); !ok {
if len(values) != vc.Columns.Len() {
panic(fmt.Errorf("len of records is not matched, need %d, got %d", vc.Columns.Len(), len(values)))
}
}
}
vc.records = append(vc.records, values)
}
func (vc RecordCollection) WithExtendCol(col *builder.Column, val interface{}) *RecordCollection {
columns := vc.Columns.Clone()
columns.Add(col)
records := make([]RecordValues, len(vc.records))
for i := range records {
records[i] = append(vc.records[i], val)
}
return &RecordCollection{
Columns: columns,
records: records,
}
}
func (vc *RecordCollection) AsAssignments() builder.Assignments {
if len(vc.records) == 0 {
return nil
}
assignments := builder.Assignments{}
for j := range vc.records {
record := vc.records[j]
vc.Columns.Range(func(col *builder.Column, idx int) {
assignments = append(assignments, col.ValueBy(record[idx]))
})
}
return assignments
}
func (vc *RecordCollection) Values() []interface{} {
if len(vc.records) == 0 {
return nil
}
values := make([]interface{}, 0)
for j := range vc.records {
recordValues := vc.records[j]
values = append(values, recordValues...)
}
return values
} | pgbuilder/record_collection.go | 0.641647 | 0.411879 | record_collection.go | starcoder |
package apivideosdk
import (
//"encoding/json"
)
// Account struct for Account
type Account struct {
Quota *AccountQuota `json:"quota,omitempty"`
// Deprecated. What features are enabled for your account. Choices include: app.dynamic_metadata - the ability to dynamically tag videos to better segment and understand your audiences, app.event_log - the ability to create and retrieve a log detailing how your videos were interacted with, player.white_label - the ability to customise your player, stats.player_events - the ability to see statistics about how your player is being used, transcode.mp4_support - the ability to reformat content into mp4 using the H264 codec.
Features *[]string `json:"features,omitempty"`
// Deprecated. Whether you are using your production or sandbox API key will impact what environment is displayed here, as well as stats and features information. If you use your sandbox key, the environment is \"sandbox.\" If you use your production key, the environment is \"production.\"
Environment *string `json:"environment,omitempty"`
}
// NewAccount instantiates a new Account object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewAccount() *Account {
this := Account{}
return &this
}
// NewAccountWithDefaults instantiates a new Account object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewAccountWithDefaults() *Account {
this := Account{}
return &this
}
// GetQuota returns the Quota field value if set, zero value otherwise.
func (o *Account) GetQuota() AccountQuota {
if o == nil || o.Quota == nil {
var ret AccountQuota
return ret
}
return *o.Quota
}
// GetQuotaOk returns a tuple with the Quota field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *Account) GetQuotaOk() (*AccountQuota, bool) {
if o == nil || o.Quota == nil {
return nil, false
}
return o.Quota, true
}
// HasQuota returns a boolean if a field has been set.
func (o *Account) HasQuota() bool {
if o != nil && o.Quota != nil {
return true
}
return false
}
// SetQuota gets a reference to the given AccountQuota and assigns it to the Quota field.
func (o *Account) SetQuota(v AccountQuota) {
o.Quota = &v
}
// GetFeatures returns the Features field value if set, zero value otherwise.
func (o *Account) GetFeatures() []string {
if o == nil || o.Features == nil {
var ret []string
return ret
}
return *o.Features
}
// GetFeaturesOk returns a tuple with the Features field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *Account) GetFeaturesOk() (*[]string, bool) {
if o == nil || o.Features == nil {
return nil, false
}
return o.Features, true
}
// HasFeatures returns a boolean if a field has been set.
func (o *Account) HasFeatures() bool {
if o != nil && o.Features != nil {
return true
}
return false
}
// SetFeatures gets a reference to the given []string and assigns it to the Features field.
func (o *Account) SetFeatures(v []string) {
o.Features = &v
}
// GetEnvironment returns the Environment field value if set, zero value otherwise.
func (o *Account) GetEnvironment() string {
if o == nil || o.Environment == nil {
var ret string
return ret
}
return *o.Environment
}
// GetEnvironmentOk returns a tuple with the Environment field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *Account) GetEnvironmentOk() (*string, bool) {
if o == nil || o.Environment == nil {
return nil, false
}
return o.Environment, true
}
// HasEnvironment returns a boolean if a field has been set.
func (o *Account) HasEnvironment() bool {
if o != nil && o.Environment != nil {
return true
}
return false
}
// SetEnvironment gets a reference to the given string and assigns it to the Environment field.
func (o *Account) SetEnvironment(v string) {
o.Environment = &v
}
type NullableAccount struct {
value *Account
isSet bool
}
func (v NullableAccount) Get() *Account {
return v.value
}
func (v *NullableAccount) Set(val *Account) {
v.value = val
v.isSet = true
}
func (v NullableAccount) IsSet() bool {
return v.isSet
}
func (v *NullableAccount) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableAccount(val *Account) *NullableAccount {
return &NullableAccount{value: val, isSet: true}
} | model_account.go | 0.764188 | 0.445952 | model_account.go | starcoder |
package shadow
import (
"fmt"
"unsafe"
"github.com/chrsan/shadow/internal"
)
type Path struct {
c *internal.Context
ptr int32
}
func (p *Path) Dispose() {
if p != nil {
p.c.SkiaPathDestroy(p.ptr)
p = nil
}
}
type PathFillType uint8
const (
PathFillTypeWinding PathFillType = iota
PathFillTypeEvenOdd
)
func (p *Path) FillType() PathFillType {
return PathFillType(p.c.SkiaPathGetFillType(p.ptr))
}
func (p *Path) SetFillType(ft PathFillType) {
p.c.SkiaPathSetFillType(p.ptr, int32(ft))
}
func (p *Path) MoveTo(x, y float32) {
p.c.SkiaPathMoveTo(p.ptr, x, y)
}
func (p *Path) LineTo(x, y float32) {
p.c.SkiaPathLineTo(p.ptr, x, y)
}
func (p *Path) QuadTo(x1, y1, x2, y2 float32) {
p.c.SkiaPathQuadTo(p.ptr, x1, y1, x2, y2)
}
func (p *Path) ConicTo(x1, y1, x2, y2, weight float32) {
p.c.SkiaPathConicTo(p.ptr, x1, y1, x2, y2, weight)
}
func (p *Path) CubicTo(x1, y1, x2, y2, x3, y3 float32) {
p.c.SkiaPathCubicTo(p.ptr, x1, y1, x2, y2, x3, y3)
}
func (p *Path) Close() {
p.c.SkiaPathClose(p.ptr)
}
func (p *Path) Reset() {
p.c.SkiaPathReset(p.ptr)
}
func (p *Path) Rewind() {
p.c.SkiaPathRewind(p.ptr)
}
func (p *Path) NumPoints() int {
return int(p.c.SkiaPathCountPoints(p.ptr))
}
func (p *Path) Point(idx int) Point {
st := p.c.StackSave()
defer p.c.StackRestore(st)
ptr := p.c.StackAlloc(8)
p.c.SkiaPathGetPoint(ptr, p.ptr, int32(idx))
return *(*Point)(unsafe.Pointer(&p.c.Mem[ptr]))
}
func (p *Path) Points(peek bool) []Point {
n := p.c.SkiaPathCountPoints(p.ptr)
if n == 0 {
return nil
}
st := p.c.StackSave()
defer p.c.StackRestore(st)
ptr := p.c.StackAlloc(n * 8)
m := p.c.SkiaPathGetPoints(p.ptr, ptr, n)
if m != n {
panic(fmt.Sprintf("expected %d points, got %d", n, m))
}
rs := (*[1 << 29]Point)(unsafe.Pointer(&p.c.Mem[ptr]))[:n:n]
if peek {
return rs
}
ps := make([]Point, n)
copy(ps, rs)
return ps
}
func (p *Path) NumVerbs() int {
return int(p.c.SkiaPathCountVerbs(p.ptr))
}
type PathVerb uint8
const (
PathVerbMove PathVerb = iota
PathVerbLine
PathVerbQuad
PathVerbConic
PathVerbCubic
PathVerbClose
PathVerbDone
)
func (p *Path) Verbs(peek bool) []PathVerb {
n := p.c.SkiaPathCountVerbs(p.ptr)
if n == 0 {
return nil
}
st := p.c.StackSave()
defer p.c.StackRestore(st)
ptr := p.c.StackAlloc(n)
m := p.c.SkiaPathGetVerbs(p.ptr, ptr, n)
if m != n {
panic(fmt.Sprintf("expected %d verbs, got %d", n, m))
}
rs := (*[1 << 29]PathVerb)(unsafe.Pointer(&p.c.Mem[ptr]))[:n:n]
if peek {
return rs
}
vs := make([]PathVerb, n)
copy(vs, rs)
return vs
}
func (p *Path) Bounds() Rect {
st := p.c.StackSave()
defer p.c.StackRestore(st)
ptr := p.c.StackAlloc(16)
p.c.SkiaPathGetBounds(ptr, p.ptr)
return *(*Rect)(unsafe.Pointer(&p.c.Mem[ptr]))
}
func (p *Path) ComputeTightBounds() Rect {
st := p.c.StackSave()
defer p.c.StackRestore(st)
ptr := p.c.StackAlloc(16)
p.c.SkiaPathComputeTightBounds(ptr, p.ptr)
return *(*Rect)(unsafe.Pointer(&p.c.Mem[ptr]))
} | path.go | 0.522202 | 0.489686 | path.go | starcoder |
package editor
import (
"strings"
)
// basic represents a basic editor.
// Valid positions are in range [0, len(e.buf)].
type basic struct {
pos int
buf []rune
}
// move moves the position to the given position.
// Given an invalid position, move sets the position at the end of the buffer.
func (e *basic) move(to int) {
switch {
case to >= len(e.buf):
e.pos = len(e.buf)
case to <= 0:
e.pos = 0
default:
e.pos = to
}
}
// insert inserts s into the buffer at the given position.
// Given an invalid position, insert considers the position to be at the end of the buffer.
func (e *basic) insert(s []rune, at int) {
switch {
case at < 0:
at = 0
case at > len(e.buf):
at = len(e.buf)
}
switch at {
case 0:
e.buf = append(s, e.buf...)
case len(e.buf):
e.buf = append(e.buf, s...)
default:
x := append(e.buf[:at:at], s...)
e.buf = append(x, e.buf[at:]...)
}
if at <= e.pos {
e.pos += len(s)
}
}
func max(m, n int) int {
if m > n {
return m
}
return n
}
func min(m, n int) int {
if m < n {
return m
}
return n
}
func constrain(n, low, high int) int {
if n < low {
return low
}
if n > high {
return high
}
return n
}
// delete deletes runes from the buffer [from, to].
// Given an invalid position, delete considers the position to be at the end of the buffer.
func (e *basic) delete(from, to int) {
left := constrain(min(from, to), 0, len(e.buf))
right := constrain(max(from, to), 0, len(e.buf))
switch {
case left == 0:
e.buf = e.buf[right:]
case right == len(e.buf):
e.buf = e.buf[:left]
default:
e.buf = append(e.buf[:left], e.buf[right:]...)
}
switch {
case e.pos < left:
case right < e.pos:
e.pos = e.pos - (right - left)
default:
e.pos = left
}
}
// slice slices the buffer [from, to].
// Given an invalid position, slice considers the position to be at the end of the buffer.
func (e *basic) slice(from, to int) []rune {
left := constrain(min(from, to), 0, len(e.buf))
right := constrain(max(from, to), 0, len(e.buf))
s := make([]rune, right-left)
copy(s, e.buf[left:right])
return s
}
func (e *basic) index(ch rune, start int) int {
start = constrain(start, 0, len(e.buf))
for i := start; i < len(e.buf); i++ {
if e.buf[i] == ch {
return i
}
}
return -1
}
func (e *basic) lastIndex(ch rune, last int) int {
last = constrain(last, 0, len(e.buf))
for i := last - 1; i >= 0; i-- {
if e.buf[i] == ch {
return i
}
}
return -1
}
// indexFunc(f, start, true) == indexFunc(func(r) bool { return !f(r) }, start, false)
func (e *basic) indexFunc(f func(rune) bool, start int, truth bool) int {
start = constrain(start, 0, len(e.buf))
for i := start; i < len(e.buf); i++ {
if f(e.buf[i]) == truth {
return i
}
}
return -1
}
// lastIndexFunc(f, start, true) == lastIndexFunc(func(r) bool { return !f(r) }, start, false)
func (e *basic) lastIndexFunc(f func(rune) bool, last int, truth bool) int {
last = constrain(last, 0, len(e.buf))
for i := last - 1; i >= 0; i-- {
if f(e.buf[i]) == truth {
return i
}
}
return -1
}
func (e *basic) replace(s []rune, at int) {
if s == nil {
return
}
// Q. What should we do when `at` < 0 or len(e.buf) < `at`?
switch {
case len(e.buf) <= at:
e.buf = append(e.buf, []rune(strings.Repeat(" ", at-len(e.buf)))...)
e.buf = append(e.buf, s...)
case at+len(s) <= 0:
// no-op
case at < 0:
for i := 0; i < at+len(s); i++ {
e.buf[i] = s[i-at]
}
case len(e.buf) < at+len(s):
v := make([]rune, at+len(s))
copy(v, e.buf)
for i := at; i < at+len(s); i++ {
v[i] = s[i-at]
}
e.buf = v
default:
for i := at; i < at+len(s); i++ {
e.buf[i] = s[i-at]
}
}
}
func (e *basic) siege(from int, to int, r rune) {
switch r {
case '\'', '"', '`', '@', '*', '+', '_', '|', '$':
e.insert([]rune{r}, to)
e.insert([]rune{r}, from)
case '(', ')', 'b':
e.insert([]rune{')'}, to)
e.insert([]rune{'('}, from)
case '{', '}', 'B':
e.insert([]rune{'}'}, to)
e.insert([]rune{'{'}, from)
case '[', ']', 'r':
e.insert([]rune{']'}, to)
e.insert([]rune{'['}, from)
case '<', '>', 'a':
e.insert([]rune{'>'}, to)
e.insert([]rune{'<'}, from)
}
}
func (e *basic) wordForward() {
switch n := len(e.buf) - e.pos; {
case n < 1:
return
case n == 1:
e.pos = len(e.buf)
return
}
switch r := e.buf[e.pos]; {
case isWhitespace(r):
if i := e.indexFunc(isWhitespace, e.pos+1, false); i > 0 {
e.pos = i
return
}
case isKeyword(r):
if i := e.indexFunc(isKeyword, e.pos+1, false); i > 0 {
if !isWhitespace(e.buf[i]) {
e.pos = i
return
}
if i := e.indexFunc(isWhitespace, i+1, false); i > 0 {
e.pos = i
return
}
}
default:
if i := e.indexFunc(isSymbol, e.pos+1, false); i > 0 {
if isKeyword(e.buf[i]) {
e.pos = i
return
}
if i := e.indexFunc(isWhitespace, i+1, false); i > 0 {
e.pos = i
return
}
}
}
e.pos = len(e.buf)
}
func (e *basic) wordBackward() {
switch e.pos {
case 0:
return
case 1:
e.pos = 0
return
}
n := e.pos - 1
switch r := e.buf[n]; {
case isWhitespace(r):
n = e.lastIndexFunc(isWhitespace, n, false)
if n < 0 {
e.pos = 0
return
}
}
switch r := e.buf[n]; {
case isKeyword(r):
if i := e.lastIndexFunc(isKeyword, n, false); i >= 0 {
e.pos = i + 1
return
}
default:
if i := e.lastIndexFunc(isSymbol, n, false); i >= 0 {
e.pos = i + 1
return
}
}
e.pos = 0
}
func (e *basic) wordForwardNonBlank() {
i := e.indexFunc(isWhitespace, e.pos, true)
if i < 0 {
e.pos = len(e.buf)
return
}
i = e.indexFunc(isWhitespace, i+1, false)
if i < 0 {
e.pos = len(e.buf)
return
}
e.pos = i
}
func (e *basic) wordBackwardNonBlank() {
i := e.lastIndexFunc(isWhitespace, e.pos, false)
if i < 0 {
e.pos = 0
return
}
i = e.lastIndexFunc(isWhitespace, i, true)
if i < 0 {
e.pos = 0
return
}
e.pos = i + 1
}
func (e *basic) wordEnd() {
switch n := len(e.buf) - e.pos; {
case n < 1:
return
case n == 1:
e.pos = len(e.buf)
return
}
e.pos++
switch r := e.buf[e.pos]; {
case isWhitespace(r):
if i := e.indexFunc(isWhitespace, e.pos+1, false); i > 0 {
switch r := e.buf[i]; {
case isKeyword(r):
if i := e.indexFunc(isKeyword, i+1, false); i > 0 {
e.pos = i - 1
return
}
default:
if i := e.indexFunc(isSymbol, i+1, false); i > 0 {
e.pos = i - 1
return
}
}
}
case isKeyword(r):
if i := e.indexFunc(isKeyword, e.pos+1, false); i > 0 {
e.pos = i - 1
return
}
default:
if i := e.indexFunc(isSymbol, e.pos+1, false); i > 0 {
e.pos = i - 1
return
}
}
e.pos = len(e.buf) - 1
}
func (e *basic) wordEndNonBlank() {
switch n := len(e.buf) - e.pos; {
case n < 1:
return
case n == 1:
e.pos = len(e.buf)
return
}
e.pos++
switch r := e.buf[e.pos]; {
case isWhitespace(r):
if i := e.indexFunc(isWhitespace, e.pos+1, false); i > 0 {
if i := e.indexFunc(isWhitespace, i+1, true); i > 0 {
e.pos = i - 1
return
}
}
default:
if i := e.indexFunc(isWhitespace, e.pos+1, true); i > 0 {
e.pos = i - 1
return
}
}
e.pos = len(e.buf) - 1
}
func (e *basic) wordEndBackward() {
switch n := e.pos; {
case n < 1:
return
case n == 1:
e.pos = 0
return
}
switch r := e.buf[e.pos]; {
case isWhitespace(r):
if i := e.lastIndexFunc(isWhitespace, e.pos, false); i > 0 {
e.pos = i
return
}
case isKeyword(r):
if i := e.lastIndexFunc(isKeyword, e.pos, false); i > 0 {
switch {
case isWhitespace(e.buf[i]):
if i := e.lastIndexFunc(isWhitespace, i, false); i > 0 {
e.pos = i
return
}
default:
e.pos = i
return
}
}
default:
if i := e.lastIndexFunc(isSymbol, e.pos, false); i > 0 {
switch {
case isWhitespace(e.buf[i]):
if i := e.lastIndexFunc(isWhitespace, i, false); i > 0 {
e.pos = i
return
}
default:
e.pos = i
return
}
}
}
e.pos = 0
}
func (e *basic) wordEndBackwardNonBlank() {
switch n := e.pos; {
case n < 1:
return
case n == 1:
e.pos = 0
return
}
switch r := e.buf[e.pos]; {
case isWhitespace(r):
if i := e.lastIndexFunc(isWhitespace, e.pos, false); i > 0 {
e.pos = i
return
}
default:
if i := e.lastIndexFunc(isWhitespace, e.pos, true); i > 0 {
if i := e.lastIndexFunc(isWhitespace, i, false); i > 0 {
e.pos = i
return
}
}
}
e.pos = 0
} | editor/basic.go | 0.687525 | 0.769189 | basic.go | starcoder |
package geogoth
// LineStringLength counts lenght of LineString
func LineStringLength(feature *Feature) float64 {
var length float64
coords := (feature.Geom.Coordinates).([][]float64) // Convert interface to [][][]float64
lineCoords := make([][]float64, 0) // Creates slice for coords of one line
for i := range coords {
y, x := GetTwoDimArrayCoordinates(feature, i) // Coordinates of LineString
lineCoords = append(lineCoords, []float64{y, x})
}
for i := 0; i < len(lineCoords)-1; i++ {
lengthTmp := DistancePointPointDeg(lineCoords[i][0], lineCoords[i][1], lineCoords[i+1][0], lineCoords[i+1][1])
length = length + lengthTmp
}
return length
}
// MultiLineStringLength counts lenght of MultiLineString
func MultiLineStringLength(feature *Feature) float64 {
var length float64
lineCoords := make([][]float64, 0) // Creates slice for coords of one line
mlineCoords := make([][][]float64, 0) // Creates slice for coords of the MultiLineString
multlinestr := (feature.Geom.Coordinates).([][][]float64) // Convert interface to [][][]float64
for i := range multlinestr { // Finds coords of the MultiLineString
for j := range multlinestr[i] {
y, x := GetThreeDimArrayCoordinates(feature, i, j)
lineCoords = append(lineCoords, []float64{y, x})
}
mlineCoords = append(mlineCoords, lineCoords)
lineCoords = nil // empty slice
}
for i := range mlineCoords {
for j := 0; j < len(mlineCoords[i])-1; j++ {
lengthTmp := DistancePointPointDeg(mlineCoords[i][j][0], mlineCoords[i][j][1], mlineCoords[i][j+1][0], mlineCoords[i][j+1][1])
length = length + lengthTmp
}
}
return length
}
// PolygonLength counts lenght of Polygon
func PolygonLength(feature *Feature) float64 {
var length float64
lineCoords := make([][]float64, 0) // Creates slice for coords of one line
polygCoords := make([][][]float64, 0) // Creates slice for coords of the MultiLineString
polygon := (feature.Geom.Coordinates).([][][]float64) // Convert interface to [][][]float64
for i := range polygon { // Finds coords of the MultiLineString
for j := range polygon[i] {
y, x := GetThreeDimArrayCoordinates(feature, i, j)
lineCoords = append(lineCoords, []float64{y, x})
}
polygCoords = append(polygCoords, lineCoords)
lineCoords = nil // empty slice
}
for i := range polygCoords {
for j := 0; j < len(polygCoords[i])-1; j++ {
lengthTmp := DistancePointPointDeg(polygCoords[i][j][0], polygCoords[i][j][1], polygCoords[i][j+1][0], polygCoords[i][j+1][1])
length = length + lengthTmp
}
}
return length
}
// MultipolygonLength counts lenght of MultipolygonLength
func MultipolygonLength(feature *Feature) float64 {
var length float64
mpolyg := (feature.Geom.Coordinates).([][][][]float64)
mpolygCoords := make([][][]float64, 0) // Creates slice for coords of the MultiPolygon
mlineCoords := make([][]float64, 0) // Creates slice for coords of one line
for m := range mpolyg { // Finds coords of MultiPolygon
for p := range mpolyg[m] {
for i := range mpolyg[m][p] {
y, x := GetFourDimArrayCoordinates(feature, m, p, i)
mlineCoords = append(mlineCoords, []float64{y, x})
}
mpolygCoords = append(mpolygCoords, mlineCoords)
mlineCoords = nil // empty slice
}
}
for i := range mpolygCoords {
for j := 0; j < len(mpolygCoords[i])-1; j++ {
lengthTmp := DistancePointPointDeg(mpolygCoords[i][j][0], mpolygCoords[i][j][1], mpolygCoords[i][j+1][0], mpolygCoords[i][j+1][1])
length = length + lengthTmp
}
}
return length
} | geojson/lengths.go | 0.79732 | 0.554712 | lengths.go | starcoder |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.