code stringlengths 114 1.05M | path stringlengths 3 312 | quality_prob float64 0.5 0.99 | learning_prob float64 0.2 1 | filename stringlengths 3 168 | kind stringclasses 1 value |
|---|---|---|---|---|---|
package geom
import (
"github.com/water-vapor/euclidea-solver/configs"
"github.com/water-vapor/euclidea-solver/pkg/hashset"
"math"
"math/rand"
)
// Segment is uniquely determined by its sorted endpoints
type Segment struct {
hashset.Serializable
point1, point2 *Point
}
// NewSegment creates a segment from two points
func NewSegment(pt1, pt2 *Point) *Segment {
pt1First := false
if pt1.Equal(pt2) {
panic("Two points of segment is same!")
}
// x equal
if math.Abs(pt1.x-pt2.x) < configs.Tolerance {
if pt1.y < pt2.y {
pt1First = true
}
}
if pt1.x < pt2.x {
pt1First = true
}
if pt1First {
return &Segment{point1: pt1, point2: pt2}
}
return &Segment{point1: pt2, point2: pt1}
}
// NewSegmentFromDirection creates a segment with one point, a direction and length
func NewSegmentFromDirection(start *Point, direction *Vector2D, length float64) *Segment {
direction.SetLength(length)
pt2 := NewPoint(start.x+direction.x, start.y+direction.y)
return NewSegment(start, pt2)
}
// GetEndPoints returns both end points of a segment
func (s *Segment) GetEndPoints() (*Point, *Point) {
return s.point1, s.point2
}
// Serialize returns the hash of the segment
func (s *Segment) Serialize() interface{} {
cx1 := int64(math.Round(s.point1.x * configs.HashPrecision))
cy1 := int64(math.Round(s.point1.y * configs.HashPrecision))
cx2 := int64(math.Round(s.point2.x * configs.HashPrecision))
cy2 := int64(math.Round(s.point2.y * configs.HashPrecision))
return ((cx1*configs.Prime+cy1)*configs.Prime+cx2)*configs.Prime + cy2
}
// PointInRange checks whether a point is in the coordinates range of the segment
func (s *Segment) PointInRange(pt *Point) bool {
// range based test
if pt.x < s.point1.x-configs.Tolerance || pt.x > s.point2.x+configs.Tolerance {
return false
}
// test on y range only if line is vertical
if math.Abs(s.point1.x-s.point2.x) < configs.Tolerance {
if pt.y < s.point1.y-configs.Tolerance || pt.y > s.point2.y+configs.Tolerance {
return false
}
}
return true
}
// ContainsPoint checks if a point is on the segment
func (s *Segment) ContainsPoint(pt *Point) bool {
if !s.PointInRange(pt) {
return false
}
return NewLineFromSegment(s).ContainsPoint(pt)
}
// IntersectLine returns intersections with a line
func (s *Segment) IntersectLine(l *Line) *Intersection {
return l.IntersectSegment(s)
}
// IntersectHalfLine returns intersections with a half line
func (s *Segment) IntersectHalfLine(h *HalfLine) *Intersection {
return h.IntersectSegment(s)
}
// IntersectSegment returns intersections with a segment
func (s *Segment) IntersectSegment(s2 *Segment) *Intersection {
// intersect as if it is a line
intersection := s.IntersectLine(NewLineFromSegment(s2))
// parallel, no solution, just return
if intersection.SolutionNumber == 0 {
return intersection
}
// checks whether the intersection is on s2, since we've extended it
pt := intersection.Solutions[0]
if pt.OnSegment(s2) {
return intersection
}
return NewIntersection()
}
// IntersectCircle returns intersections with a circle
func (s *Segment) IntersectCircle(c *Circle) *Intersection {
return c.IntersectSegment(s)
}
// Length returns the length of the segment
func (s *Segment) Length() float64 {
dx := s.point2.x - s.point1.x
dy := s.point2.y - s.point1.y
return math.Sqrt(dx*dx + dy*dy)
}
// Bisector returns a line as the bisector of the segment
func (s *Segment) Bisector() *Line {
pt := NewPoint((s.point1.x+s.point2.x)/2, (s.point1.y+s.point2.y)/2)
v := NewVector2DFromTwoPoints(s.point1, s.point2).NormalVector()
return NewLineFromDirection(pt, v)
}
// GetRandomPoint returns a random point on the segment
func (s *Segment) GetRandomPoint() *Point {
v := NewVector2DFromTwoPoints(s.point1, s.point2)
v.SetLength(rand.Float64() * v.Length())
return NewPoint(s.point1.x+v.x, s.point1.y+v.y)
} | pkg/geom/segment.go | 0.845209 | 0.461017 | segment.go | starcoder |
package aabb
import (
"reflect"
)
type Tree struct {
Root *treeNode
NodeIndexMap map[AABB]*treeNode
}
func NewTree() *Tree {
return &Tree{
NodeIndexMap: make(map[AABB]*treeNode),
}
}
func (tree *Tree) IsEmpty() bool {
return tree.Root == nil
}
func (tree *Tree) Depth() int {
stack := newTreeNodeStack()
stack.Push(tree.Root)
var maxDepth int
for !stack.Empty() {
next := stack.Pop()
if next.Left != nil {
next.Left.Depth = next.Depth + 1
stack.Push(next.Left)
}
if next.Right != nil {
next.Right.Depth = next.Depth + 1
stack.Push(next.Right)
}
if maxDepth < next.Depth+1 {
maxDepth = next.Depth + 1
}
}
return maxDepth
}
func (tree *Tree) Insert(object AABB) {
if tree.NodeIndexMap[object] != nil {
panic(ErrAlreadyInTree)
}
node := newTreeNode(object)
tree.insertLeaf(node)
tree.NodeIndexMap[object] = node
}
func (tree *Tree) Remove(object AABB) {
node, ok := tree.NodeIndexMap[object]
if !ok {
panic(ErrtNotInTree)
}
tree.removeLeaf(node)
delete(tree.NodeIndexMap, object)
}
func (tree *Tree) removeLeaf(node *treeNode) {
if node == tree.Root {
tree.Root = nil
return
}
parent := node.Parent
grandParent := parent.Parent
sibling := node.GetSibling()
node.Parent = nil
if grandParent == nil {
tree.Root = sibling
sibling.Parent = nil
return
}
parent.replaceWith(sibling)
tree.fixUpwardsTree(grandParent)
}
func (tree *Tree) insertLeaf(node *treeNode) {
if tree.Root == nil {
tree.Root = node
return
}
currentNode := tree.Root
for !currentNode.IsLeaf() {
leftNode := currentNode.Left
rightNode := currentNode.Right
combinedAabb := Merge(currentNode, node)
newParentNodeCost := 2.0 * combinedAabb.SurfaceArea()
minimumPushDownCost := 2.0 * (combinedAabb.SurfaceArea() - currentNode.AABB().SurfaceArea())
costFunc := func(side *treeNode) float64 {
if side.IsLeaf() {
return Merge(node, side).SurfaceArea() + minimumPushDownCost
} else {
newAABB := Merge(node, side)
return (newAABB.SurfaceArea() - side.AABB().SurfaceArea()) + minimumPushDownCost
}
}
costLeft := costFunc(leftNode)
costRight := costFunc(rightNode)
if newParentNodeCost < costLeft && newParentNodeCost < costRight {
break
}
if costLeft < costRight {
currentNode = leftNode
} else {
currentNode = rightNode
}
}
sibling := currentNode
oldParent := sibling.Parent
newParent := newTreeNode(Merge(node, sibling))
newParent.Parent = oldParent
newParent.Left = sibling
newParent.Right = node
node.Parent = newParent
sibling.Parent = newParent
switch {
case oldParent == nil:
tree.Root = newParent
case oldParent.Left == sibling:
oldParent.Left = newParent
case oldParent.Right == sibling:
oldParent.Right = newParent
}
tree.fixUpwardsTree(node.Parent)
}
func (tree *Tree) fixUpwardsTree(node *treeNode) {
for node != nil {
node.ObjectAABB = Merge(node.Left, node.Right)
node = node.Parent
}
}
func (tree *Tree) QueryOverlaps(object AABB) []AABB {
if reflect.ValueOf(object).Kind() != reflect.Ptr {
panic(ErrNotAReference)
}
overlaps := make([]AABB, 0)
if tree.IsEmpty() {
return overlaps
}
stack := newTreeNodeStack()
testAABB := object.AABB()
stack.Push(tree.Root)
for !stack.Empty() {
node := stack.Pop()
if Overlaps(node, testAABB) {
if node.IsLeaf() && node.Object != object {
overlaps = append(overlaps, node.Object)
continue
}
if node.Left != nil {
stack.Push(node.Left)
}
if node.Right != nil {
stack.Push(node.Right)
}
}
}
return overlaps
}
type treeNode struct {
Object AABB `json:"-"`
ObjectAABB *AABBData `json:"aabb"`
Parent *treeNode `json:"-"`
Left *treeNode `json:"left"`
Right *treeNode `json:"right"`
Depth int `json:"depth"`
}
func newTreeNode(object AABB) *treeNode {
if reflect.ValueOf(object).Kind() != reflect.Ptr {
panic(ErrNotAReference)
}
return &treeNode{
Object: object,
ObjectAABB: object.AABB(),
}
}
func (node *treeNode) IsLeaf() bool {
return node.Left == nil
}
func (node *treeNode) AABB() *AABBData {
return node.ObjectAABB
}
func (node *treeNode) GetSibling() *treeNode {
parent := node.Parent
if parent == nil {
panic("node doesn't contain a parent")
}
switch node {
case parent.Left:
return parent.Right
case parent.Right:
return parent.Left
default:
panic("parent doesn't contain children, Tree is corrupted.")
}
}
func (node *treeNode) replaceWith(other *treeNode) {
parent := node.Parent
if parent == nil {
panic("node doesn't contain a parent")
}
other.Parent = parent
switch node {
case parent.Left:
parent.Left = other
case parent.Right:
parent.Right = other
default:
panic("parent doesn't contain children, Tree is corrupted.")
}
}
type treeNodeStack struct {
data []*treeNode
}
func newTreeNodeStack() *treeNodeStack {
return &treeNodeStack{
data: make([]*treeNode, 0),
}
}
func (stack *treeNodeStack) Push(node *treeNode) {
stack.data = append(stack.data, node)
}
func (stack *treeNodeStack) Pop() *treeNode {
next := stack.data[len(stack.data)-1]
stack.data = stack.data[:len(stack.data)-1]
return next
}
func (stack *treeNodeStack) Empty() bool {
return len(stack.data) == 0
} | resolv/aabb/tree.go | 0.698329 | 0.434581 | tree.go | starcoder |
package iso20022
// Description of the financial instrument.
type FinancialInstrumentAttributes68 struct {
// Identifies the financial instrument.
SecurityIdentification *SecurityIdentification19 `xml:"SctyId"`
// Quantity of entitled intermediate securities based on the balance of underlying securities.
Quantity *DecimalNumber `xml:"Qty,omitempty"`
// Specifies whether terms of the event allow resale of the rights.
RenounceableEntitlementStatusType *RenounceableEntitlementStatusTypeFormat3Choice `xml:"RnncblEntitlmntStsTp,omitempty"`
// Specifies how fractions resulting from derived securities will be processed or how prorated decisions will be rounding, if provided with a pro ration rate.
FractionDisposition *FractionDispositionType25Choice `xml:"FrctnDspstn,omitempty"`
// Quantity of intermediate securities awarded for a given quantity of underlying security.
IntermediateSecuritiesToUnderlyingRatio *QuantityToQuantityRatio1 `xml:"IntrmdtSctiesToUndrlygRatio,omitempty"`
// Last reported/known price of a financial instrument in a market.
MarketPrice *AmountPrice2 `xml:"MktPric,omitempty"`
// Date on which an order expires or at which a privilege or offer terminates.
ExpiryDate *DateFormat30Choice `xml:"XpryDt"`
// Date of the posting (credit or debit) to the account.
PostingDate *DateFormat30Choice `xml:"PstngDt"`
// Period during which intermediate or outturn securities are tradable in a secondary market.
TradingPeriod *Period4 `xml:"TradgPrd,omitempty"`
// Balance of uninstructed position.
UninstructedBalance *BalanceFormat5Choice `xml:"UinstdBal,omitempty"`
// Balance of instructed position.
InstructedBalance *BalanceFormat5Choice `xml:"InstdBal,omitempty"`
}
func (f *FinancialInstrumentAttributes68) AddSecurityIdentification() *SecurityIdentification19 {
f.SecurityIdentification = new(SecurityIdentification19)
return f.SecurityIdentification
}
func (f *FinancialInstrumentAttributes68) SetQuantity(value string) {
f.Quantity = (*DecimalNumber)(&value)
}
func (f *FinancialInstrumentAttributes68) AddRenounceableEntitlementStatusType() *RenounceableEntitlementStatusTypeFormat3Choice {
f.RenounceableEntitlementStatusType = new(RenounceableEntitlementStatusTypeFormat3Choice)
return f.RenounceableEntitlementStatusType
}
func (f *FinancialInstrumentAttributes68) AddFractionDisposition() *FractionDispositionType25Choice {
f.FractionDisposition = new(FractionDispositionType25Choice)
return f.FractionDisposition
}
func (f *FinancialInstrumentAttributes68) AddIntermediateSecuritiesToUnderlyingRatio() *QuantityToQuantityRatio1 {
f.IntermediateSecuritiesToUnderlyingRatio = new(QuantityToQuantityRatio1)
return f.IntermediateSecuritiesToUnderlyingRatio
}
func (f *FinancialInstrumentAttributes68) AddMarketPrice() *AmountPrice2 {
f.MarketPrice = new(AmountPrice2)
return f.MarketPrice
}
func (f *FinancialInstrumentAttributes68) AddExpiryDate() *DateFormat30Choice {
f.ExpiryDate = new(DateFormat30Choice)
return f.ExpiryDate
}
func (f *FinancialInstrumentAttributes68) AddPostingDate() *DateFormat30Choice {
f.PostingDate = new(DateFormat30Choice)
return f.PostingDate
}
func (f *FinancialInstrumentAttributes68) AddTradingPeriod() *Period4 {
f.TradingPeriod = new(Period4)
return f.TradingPeriod
}
func (f *FinancialInstrumentAttributes68) AddUninstructedBalance() *BalanceFormat5Choice {
f.UninstructedBalance = new(BalanceFormat5Choice)
return f.UninstructedBalance
}
func (f *FinancialInstrumentAttributes68) AddInstructedBalance() *BalanceFormat5Choice {
f.InstructedBalance = new(BalanceFormat5Choice)
return f.InstructedBalance
} | data/train/go/cf69142594cacb429dddb669bd9a425dcfd7951eFinancialInstrumentAttributes68.go | 0.848282 | 0.425128 | cf69142594cacb429dddb669bd9a425dcfd7951eFinancialInstrumentAttributes68.go | starcoder |
package nn
import (
"image"
"image/png"
"io"
"math"
"gonum.org/v1/gonum/mat"
"gonum.org/v1/gonum/stat/distuv"
)
func sigmoid(r, c int, x float64) float64 {
return 1.0 / (1 + math.Exp(-1*x))
}
func sigmoidDx(x float64) float64 {
return x * (1 - x)
}
func sigmoidPrime(m mat.Matrix) mat.Matrix {
rows, _ := m.Dims()
o := make([]float64, rows)
for i := range o {
o[i] = 1
}
ones := mat.NewDense(rows, 1, o)
return multiply(m, subtract(ones, m)) // m * (1 - m)
}
func dot(m, n mat.Matrix) mat.Matrix {
r, _ := m.Dims()
_, c := n.Dims()
o := mat.NewDense(r, c, nil)
o.Product(m, n)
return o
}
func apply(fn func(i, j int, v float64) float64, m mat.Matrix) mat.Matrix {
r, c := m.Dims()
o := mat.NewDense(r, c, nil)
o.Apply(fn, m)
return o
}
func scale(s float64, m mat.Matrix) mat.Matrix {
r, c := m.Dims()
o := mat.NewDense(r, c, nil)
o.Scale(s, m)
return o
}
func multiply(m, n mat.Matrix) mat.Matrix {
r, c := m.Dims()
o := mat.NewDense(r, c, nil)
o.MulElem(m, n)
return o
}
func add(m, n mat.Matrix) mat.Matrix {
r, c := m.Dims()
o := mat.NewDense(r, c, nil)
o.Add(m, n)
return o
}
func subtract(m, n mat.Matrix) mat.Matrix {
r, c := m.Dims()
o := mat.NewDense(r, c, nil)
o.Sub(m, n)
return o
}
func addScalar(i float64, m mat.Matrix) mat.Matrix {
r, c := m.Dims()
a := make([]float64, r*c)
for x := 0; x < r*c; x++ {
a[x] = i
}
n := mat.NewDense(r, c, a)
return add(m, n)
}
func randomArray(size int, v float64) (data []float64) {
dist := distuv.Uniform{
Min: -1 / math.Sqrt(v),
Max: 1 / math.Sqrt(v),
}
data = make([]float64, size)
for i := 0; i < size; i++ {
data[i] = dist.Rand()
}
return
}
func max(v []float64) (int, float64) {
maxI := -1
maxV := -math.MaxFloat64
for i := 0; i < len(v); i++ {
if v[i] > maxV {
maxV = v[i]
maxI = i
}
}
return maxI, maxV
}
func dataFromFile(imgFile io.Reader) (pixels []float64) {
img, err := png.Decode(imgFile)
if err != nil {
panic(err)
}
// create a grayscale image
bounds := img.Bounds()
gray := image.NewGray(bounds)
for x := 0; x < bounds.Max.X; x++ {
for y := 0; y < bounds.Max.Y; y++ {
var rgba = img.At(x, y)
gray.Set(x, y, rgba)
}
}
// make a pixel array
pixels = make([]float64, len(gray.Pix))
for i := 0; i < len(gray.Pix); i++ {
//pixels[i] = (float64(255-gray.Pix[i]) / 255.0 * 0.99) + 0.01
pixels[i] = (float64(gray.Pix[i]) / 255.0 * 0.99) + 0.01
}
return
} | src/nn/utils.go | 0.765681 | 0.456834 | utils.go | starcoder |
package square
type Location struct {
// The Square-issued ID of the location.
Id string `json:"id,omitempty"`
// The name of the location. This information appears in the dashboard as the nickname.
Name string `json:"name,omitempty"`
Address *Address `json:"address,omitempty"`
// The [IANA Timezone](https://www.iana.org/time-zones) identifier for the timezone of the location.
Timezone string `json:"timezone,omitempty"`
// The Square features that are enabled for the location. See `LocationCapability` for possible values. See [LocationCapability](#type-locationcapability) for possible values
Capabilities []string `json:"capabilities,omitempty"`
// The status of the location, either active or inactive. See [LocationStatus](#type-locationstatus) for possible values
Status string `json:"status,omitempty"`
// The time when the location was created, in RFC 3339 format.
CreatedAt string `json:"created_at,omitempty"`
// The ID of the merchant that owns the location.
MerchantId string `json:"merchant_id,omitempty"`
// The country of the location, in ISO 3166-1-alpha-2 format. See `Country` for possible values. See [Country](#type-country) for possible values
Country string `json:"country,omitempty"`
// The language associated with the location, in [BCP 47 format](https://tools.ietf.org/html/bcp47#appendix-A).
LanguageCode string `json:"language_code,omitempty"`
// The currency used for all transactions at this location, in ISO 4217 format. See `Currency` for possible values. See [Currency](#type-currency) for possible values
Currency string `json:"currency,omitempty"`
// The phone number of the location in human readable format.
PhoneNumber string `json:"phone_number,omitempty"`
// The business name of the location This is the name visible to the customers of the location. For example, this name appears on customer receipts.
BusinessName string `json:"business_name,omitempty"`
// The type of the location, either physical or mobile. See [LocationType](#type-locationtype) for possible values
Type_ string `json:"type,omitempty"`
// The website URL of the location.
WebsiteUrl string `json:"website_url,omitempty"`
BusinessHours *BusinessHours `json:"business_hours,omitempty"`
// The email of the location. This email is visible to the customers of the location. For example, the email appears on customer receipts.
BusinessEmail string `json:"business_email,omitempty"`
// The description of the location.
Description string `json:"description,omitempty"`
// The Twitter username of the location without the '@' symbol.
TwitterUsername string `json:"twitter_username,omitempty"`
// The Instagram username of the location without the '@' symbol.
InstagramUsername string `json:"instagram_username,omitempty"`
// The Facebook profile URL of the location. The URL should begin with 'facebook.com/'.
FacebookUrl string `json:"facebook_url,omitempty"`
Coordinates *Coordinates `json:"coordinates,omitempty"`
// The URL of the logo image for the location. The Seller must choose this logo in the Seller dashboard (Receipts section) for the logo to appear on transactions (such as receipts, invoices) that Square generates on behalf of the Seller. This image should have an aspect ratio close to 1:1 and is recommended to be at least 200x200 pixels.
LogoUrl string `json:"logo_url,omitempty"`
// The URL of the Point of Sale background image for the location.
PosBackgroundUrl string `json:"pos_background_url,omitempty"`
// The merchant category code (MCC) of the location, as standardized by ISO 18245. The MCC describes the kind of goods or services sold at the location.
Mcc string `json:"mcc,omitempty"`
// The URL of a full-format logo image for the location. The Seller must choose this logo in the Seller dashboard (Receipts section) for the logo to appear on transactions (such as receipts, invoices) that Square generates on behalf of the Seller. This image can have an aspect ratio of 2:1 or greater and is recommended to be at least 1280x648 pixels.
FullFormatLogoUrl string `json:"full_format_logo_url,omitempty"`
} | square/model_location.go | 0.749637 | 0.470858 | model_location.go | starcoder |
// Package measurement export utility functions to manipulate/format performance profile sample values.
package measurement
import (
"fmt"
"strings"
"time"
"github.com/google/pprof/profile"
)
// ScaleProfiles updates the units in a set of profiles to make them
// compatible. It scales the profiles to the smallest unit to preserve
// data.
func ScaleProfiles(profiles []*profile.Profile) error {
if len(profiles) == 0 {
return nil
}
periodTypes := make([]*profile.ValueType, 0, len(profiles))
for _, p := range profiles {
if p.PeriodType != nil {
periodTypes = append(periodTypes, p.PeriodType)
}
}
periodType, err := CommonValueType(periodTypes)
if err != nil {
return fmt.Errorf("period type: %v", err)
}
// Identify common sample types
numSampleTypes := len(profiles[0].SampleType)
for _, p := range profiles[1:] {
if numSampleTypes != len(p.SampleType) {
return fmt.Errorf("inconsistent samples type count: %d != %d", numSampleTypes, len(p.SampleType))
}
}
sampleType := make([]*profile.ValueType, numSampleTypes)
for i := 0; i < numSampleTypes; i++ {
sampleTypes := make([]*profile.ValueType, len(profiles))
for j, p := range profiles {
sampleTypes[j] = p.SampleType[i]
}
sampleType[i], err = CommonValueType(sampleTypes)
if err != nil {
return fmt.Errorf("sample types: %v", err)
}
}
for _, p := range profiles {
if p.PeriodType != nil && periodType != nil {
period, _ := Scale(p.Period, p.PeriodType.Unit, periodType.Unit)
p.Period, p.PeriodType.Unit = int64(period), periodType.Unit
}
ratios := make([]float64, len(p.SampleType))
for i, st := range p.SampleType {
if sampleType[i] == nil {
ratios[i] = 1
continue
}
ratios[i], _ = Scale(1, st.Unit, sampleType[i].Unit)
p.SampleType[i].Unit = sampleType[i].Unit
}
if err := p.ScaleN(ratios); err != nil {
return fmt.Errorf("scale: %v", err)
}
}
return nil
}
// CommonValueType returns the finest type from a set of compatible
// types.
func CommonValueType(ts []*profile.ValueType) (*profile.ValueType, error) {
if len(ts) <= 1 {
return nil, nil
}
minType := ts[0]
for _, t := range ts[1:] {
if !compatibleValueTypes(minType, t) {
return nil, fmt.Errorf("incompatible types: %v %v", *minType, *t)
}
if ratio, _ := Scale(1, t.Unit, minType.Unit); ratio < 1 {
minType = t
}
}
rcopy := *minType
return &rcopy, nil
}
func compatibleValueTypes(v1, v2 *profile.ValueType) bool {
if v1 == nil || v2 == nil {
return true // No grounds to disqualify.
}
// Remove trailing 's' to permit minor mismatches.
if t1, t2 := strings.TrimSuffix(v1.Type, "s"), strings.TrimSuffix(v2.Type, "s"); t1 != t2 {
return false
}
return v1.Unit == v2.Unit ||
(isTimeUnit(v1.Unit) && isTimeUnit(v2.Unit)) ||
(isMemoryUnit(v1.Unit) && isMemoryUnit(v2.Unit))
}
// Scale a measurement from an unit to a different unit and returns
// the scaled value and the target unit. The returned target unit
// will be empty if uninteresting (could be skipped).
func Scale(value int64, fromUnit, toUnit string) (float64, string) {
// Avoid infinite recursion on overflow.
if value < 0 && -value > 0 {
v, u := Scale(-value, fromUnit, toUnit)
return -v, u
}
if m, u, ok := memoryLabel(value, fromUnit, toUnit); ok {
return m, u
}
if t, u, ok := timeLabel(value, fromUnit, toUnit); ok {
return t, u
}
// Skip non-interesting units.
switch toUnit {
case "count", "sample", "unit", "minimum", "auto":
return float64(value), ""
default:
return float64(value), toUnit
}
}
// Label returns the label used to describe a certain measurement.
func Label(value int64, unit string) string {
return ScaledLabel(value, unit, "auto")
}
// ScaledLabel scales the passed-in measurement (if necessary) and
// returns the label used to describe a float measurement.
func ScaledLabel(value int64, fromUnit, toUnit string) string {
v, u := Scale(value, fromUnit, toUnit)
sv := strings.TrimSuffix(fmt.Sprintf("%.2f", v), ".00")
if sv == "0" || sv == "-0" {
return "0"
}
return sv + u
}
// isMemoryUnit returns whether a name is recognized as a memory size
// unit.
func isMemoryUnit(unit string) bool {
switch strings.TrimSuffix(strings.ToLower(unit), "s") {
case "byte", "b", "kilobyte", "kb", "megabyte", "mb", "gigabyte", "gb":
return true
}
return false
}
func memoryLabel(value int64, fromUnit, toUnit string) (v float64, u string, ok bool) {
fromUnit = strings.TrimSuffix(strings.ToLower(fromUnit), "s")
toUnit = strings.TrimSuffix(strings.ToLower(toUnit), "s")
switch fromUnit {
case "byte", "b":
case "kilobyte", "kb":
value *= 1024
case "megabyte", "mb":
value *= 1024 * 1024
case "gigabyte", "gb":
value *= 1024 * 1024 * 1024
default:
return 0, "", false
}
if toUnit == "minimum" || toUnit == "auto" {
switch {
case value < 1024:
toUnit = "b"
case value < 1024*1024:
toUnit = "kb"
case value < 1024*1024*1024:
toUnit = "mb"
default:
toUnit = "gb"
}
}
var output float64
switch toUnit {
default:
output, toUnit = float64(value), "B"
case "kb", "kbyte", "kilobyte":
output, toUnit = float64(value)/1024, "kB"
case "mb", "mbyte", "megabyte":
output, toUnit = float64(value)/(1024*1024), "MB"
case "gb", "gbyte", "gigabyte":
output, toUnit = float64(value)/(1024*1024*1024), "GB"
}
return output, toUnit, true
}
// isTimeUnit returns whether a name is recognized as a time unit.
func isTimeUnit(unit string) bool {
unit = strings.ToLower(unit)
if len(unit) > 2 {
unit = strings.TrimSuffix(unit, "s")
}
switch unit {
case "nanosecond", "ns", "microsecond", "millisecond", "ms", "s", "second", "sec", "hr", "day", "week", "year":
return true
}
return false
}
func timeLabel(value int64, fromUnit, toUnit string) (v float64, u string, ok bool) {
fromUnit = strings.ToLower(fromUnit)
if len(fromUnit) > 2 {
fromUnit = strings.TrimSuffix(fromUnit, "s")
}
toUnit = strings.ToLower(toUnit)
if len(toUnit) > 2 {
toUnit = strings.TrimSuffix(toUnit, "s")
}
var d time.Duration
switch fromUnit {
case "nanosecond", "ns":
d = time.Duration(value) * time.Nanosecond
case "microsecond":
d = time.Duration(value) * time.Microsecond
case "millisecond", "ms":
d = time.Duration(value) * time.Millisecond
case "second", "sec", "s":
d = time.Duration(value) * time.Second
case "cycle":
return float64(value), "", true
default:
return 0, "", false
}
if toUnit == "minimum" || toUnit == "auto" {
switch {
case d < 1*time.Microsecond:
toUnit = "ns"
case d < 1*time.Millisecond:
toUnit = "us"
case d < 1*time.Second:
toUnit = "ms"
case d < 1*time.Minute:
toUnit = "sec"
case d < 1*time.Hour:
toUnit = "min"
case d < 24*time.Hour:
toUnit = "hour"
case d < 15*24*time.Hour:
toUnit = "day"
case d < 120*24*time.Hour:
toUnit = "week"
default:
toUnit = "year"
}
}
var output float64
dd := float64(d)
switch toUnit {
case "ns", "nanosecond":
output, toUnit = dd/float64(time.Nanosecond), "ns"
case "us", "microsecond":
output, toUnit = dd/float64(time.Microsecond), "us"
case "ms", "millisecond":
output, toUnit = dd/float64(time.Millisecond), "ms"
case "min", "minute":
output, toUnit = dd/float64(time.Minute), "mins"
case "hour", "hr":
output, toUnit = dd/float64(time.Hour), "hrs"
case "day":
output, toUnit = dd/float64(24*time.Hour), "days"
case "week", "wk":
output, toUnit = dd/float64(7*24*time.Hour), "wks"
case "year", "yr":
output, toUnit = dd/float64(365*7*24*time.Hour), "yrs"
default:
fallthrough
case "sec", "second", "s":
output, toUnit = dd/float64(time.Second), "s"
}
return output, toUnit, true
} | internal/measurement/measurement.go | 0.852429 | 0.624637 | measurement.go | starcoder |
// Package chunks provides facilities for representing, storing, and fetching content-addressed chunks of Noms data.
package chunks
import (
"bytes"
"github.com/liquidata-inc/dolt/go/store/d"
"github.com/liquidata-inc/dolt/go/store/hash"
)
// Chunk is a unit of stored data in noms
type Chunk struct {
r hash.Hash
data []byte
}
var EmptyChunk = NewChunk([]byte{})
func (c Chunk) Hash() hash.Hash {
return c.r
}
func (c Chunk) Data() []byte {
return c.data
}
func (c Chunk) ToChunk() (Chunk, error) {
return c, nil
}
func (c Chunk) IsEmpty() bool {
return len(c.data) == 0
}
// NewChunk creates a new Chunk backed by data. This means that the returned Chunk has ownership of this slice of memory.
func NewChunk(data []byte) Chunk {
r := hash.Of(data)
return Chunk{r, data}
}
// NewChunkWithHash creates a new chunk with a known hash. The hash is not re-calculated or verified. This should obviously only be used in cases where the caller already knows the specified hash is correct.
func NewChunkWithHash(r hash.Hash, data []byte) Chunk {
return Chunk{r, data}
}
// ChunkWriter wraps an io.WriteCloser, additionally providing the ability to grab the resulting Chunk for all data written through the interface. Calling Chunk() or Close() on an instance disallows further writing.
type ChunkWriter struct {
buffer *bytes.Buffer
c Chunk
}
func NewChunkWriter() *ChunkWriter {
b := &bytes.Buffer{}
return &ChunkWriter{
buffer: b,
}
}
func (w *ChunkWriter) Write(data []byte) (int, error) {
if w.buffer == nil {
d.Panic("Write() cannot be called after Hash() or Close().")
}
size, err := w.buffer.Write(data)
d.Chk.NoError(err)
return size, nil
}
// Chunk() closes the writer and returns the resulting Chunk.
func (w *ChunkWriter) Chunk() Chunk {
d.Chk.NoError(w.Close())
return w.c
}
// Close() closes computes the hash and Puts it into the ChunkSink Note: The Write() method never returns an error. Instead, like other noms interfaces, errors are reported via panic.
func (w *ChunkWriter) Close() error {
if w.buffer == nil {
return nil
}
w.c = NewChunk(w.buffer.Bytes())
w.buffer = nil
return nil
} | go/store/chunks/chunk.go | 0.787972 | 0.475118 | chunk.go | starcoder |
package Problem0385
import (
"strconv"
"github.com/aQuaYi/LeetCode-in-Go/kit"
)
/**
* // This is the interface that allows for creating nested lists.
* // You should not implement it, or speculate about its implementation
* type NestedInteger struct {
* }
*
* // Return true if this NestedInteger holds a single integer, rather than a nested list.
* func (n NestedInteger) IsInteger() bool {}
*
* // Return the single integer that this NestedInteger holds, if it holds a single integer
* // The result is undefined if this NestedInteger holds a nested list
* // So before calling this method, you should have a check
* func (n NestedInteger) GetInteger() int {}
*
* // Set this NestedInteger to hold a single integer.
* func (n *NestedInteger) SetInteger(value int) {}
*
* // Set this NestedInteger to hold a nested list and adds a nested integer to it.
* func (n *NestedInteger) Add(elem NestedInteger) {}
*
* // Return the nested list that this NestedInteger holds, if it holds a nested list
* // The list length is zero if this NestedInteger holds a single integer
* // You can access NestedInteger's List element directly if you want to modify it
* func (n NestedInteger) GetList() []*NestedInteger {}
*/
type NestedInteger = kit.NestedInteger
func deserialize(s string) *NestedInteger {
if len(s) == 0 {
return nil
}
if s[0] != '[' {
return getValue(s)
}
stack := new(stackChars)
var cur *NestedInteger
si, ci := 0, 0
for ; ci < len(s); ci++ {
switch s[ci] {
case '[':
if cur != nil {
stack.Push(cur)
}
cur = new(NestedInteger)
si = ci + 1
case ']':
if ci > si {
cur.Add(*getValue(s[si:ci]))
}
if !stack.Empty() {
tmp := stack.Pop()
tmp.Add(*cur)
cur = tmp
}
si = ci + 1
case ',':
if s[ci-1] != ']' {
cur.Add(*getValue(s[si:ci]))
}
si = ci + 1
}
}
return cur
}
func getValue(s string) *NestedInteger {
val, _ := strconv.Atoi(s)
item := new(NestedInteger)
item.SetInteger(val)
return item
}
type stackChars struct {
chars []*NestedInteger
}
func (s *stackChars) Push(nb *NestedInteger) {
s.chars = append(s.chars, nb)
}
func (s *stackChars) Pop() *NestedInteger {
slen := len(s.chars)
rb := s.chars[slen-1]
s.chars = s.chars[:slen-1]
return rb
}
func (s *stackChars) Empty() bool {
return len(s.chars) == 0
}
// func (s *stackChars) Peek() *NestedInteger {
// return s.chars[len(s.chars)-1]
// } | Algorithms/0385.mini-parser/mini-parser.go | 0.704262 | 0.467757 | mini-parser.go | starcoder |
package sketchy
import (
"fmt"
"github.com/tdewolff/canvas"
"log"
"math"
)
// Primitive types
// Point is a simple point in 2D space
type Point struct {
X float64
Y float64
}
// Line is two points that form a line
type Line struct {
P Point
Q Point
}
// Curve A curve is a list of points, may be closed
type Curve struct {
Points []Point
Closed bool
}
// A Circle represented by a center point and radius
type Circle struct {
Center Point
Radius float64
}
// Rect is a simple rectangle
type Rect struct {
X float64
Y float64
W float64
H float64
}
// A Triangle specified by vertices as points
type Triangle struct {
A Point
B Point
C Point
}
// Point functions
// Tuple representation of a point, useful for debugging
func (p Point) String() string {
return fmt.Sprintf("(%f, %f)", p.X, p.Y)
}
// Lerp is a linear interpolation between two points
func (p Point) Lerp(a Point, i float64) Point {
return Point{
X: Lerp(p.X, a.X, i),
Y: Lerp(p.Y, a.Y, i),
}
}
// IsEqual determines if two points are equal
func (p Point) IsEqual(q Point) bool {
return p.X == q.X && p.Y == q.Y
}
func (p Point) Draw(s float64, ctx *canvas.Context) {
ctx.DrawPath(p.X, p.Y, canvas.Circle(s))
}
// Distance between two points
func Distance(p Point, q Point) float64 {
return math.Sqrt(math.Pow(q.X-p.X, 2) + math.Pow(q.Y-p.Y, 2))
}
// SquaredDistance is the square of the distance between two points
func SquaredDistance(p Point, q Point) float64 {
return math.Pow(q.X-p.X, 2) + math.Pow(q.Y-p.Y, 2)
}
// Line functions
// String representation of a line, useful for debugging
func (l Line) String() string {
return fmt.Sprintf("(%f, %f) -> (%f, %f)", l.P.X, l.P.Y, l.Q.X, l.Q.Y)
}
func (l Line) IsEqual(k Line) bool {
return l.P.IsEqual(k.P) && l.Q.IsEqual(k.Q)
}
func (l Line) Angle() float64 {
dy := l.Q.Y - l.P.Y
dx := l.Q.X - l.P.X
angle := math.Atan(dy / dx)
return angle
}
// Slope computes the slope of the line
func (l Line) Slope() float64 {
dy := l.Q.Y - l.P.Y
dx := l.Q.X - l.P.X
if math.Abs(dx) < Smol {
if dx < 0 {
if dy > 0 {
return math.Inf(-1)
} else {
return math.Inf(1)
}
} else {
if dy > 0 {
return math.Inf(1)
} else {
return math.Inf(-1)
}
}
}
return dy / dx
}
func (l Line) InvertedSlope() float64 {
slope := l.Slope()
if math.IsInf(slope, 1) || math.IsInf(slope, -1) {
return 0
}
return -1 / slope
}
func (l Line) PerpendicularAt(percentage float64, length float64) Line {
angle := l.Angle()
point := l.P.Lerp(l.Q, percentage)
sinOffset := 0.5 * length * math.Sin(angle)
cosOffset := 0.5 * length * math.Cos(angle)
p := Point{
X: NoTinyVals(point.X - sinOffset),
Y: NoTinyVals(point.Y + cosOffset),
}
q := Point{
X: NoTinyVals(point.X + sinOffset),
Y: NoTinyVals(point.Y - cosOffset),
}
return Line{
P: p,
Q: q,
}
}
func (l Line) PerpendicularBisector(length float64) Line {
return l.PerpendicularAt(0.5, length)
}
// Lerp is an interpolation between the two points of a line
func (l Line) Lerp(i float64) Point {
return Point{
X: Lerp(l.P.X, l.Q.X, i),
Y: Lerp(l.P.Y, l.Q.Y, i),
}
}
func (l Line) Draw(ctx *canvas.Context) {
ctx.MoveTo(l.P.X, l.P.Y)
ctx.LineTo(l.Q.X, l.Q.Y)
ctx.Stroke()
}
// Midpoint Calculates the midpoint between two points
func Midpoint(p Point, q Point) Point {
return Point{X: 0.5 * (p.X + q.X), Y: 0.5 * (p.Y + q.Y)}
}
// Midpoint Calculates the midpoint of a line
func (l Line) Midpoint() Point {
return Midpoint(l.P, l.Q)
}
// Length Calculates the length of a line
func (l Line) Length() float64 {
return Distance(l.P, l.Q)
}
func (l Line) Intersects(k Line) bool {
a1 := l.Q.X - l.P.X
b1 := k.P.X - k.Q.X
c1 := k.P.X - l.P.X
a2 := l.Q.Y - l.P.Y
b2 := k.P.Y - k.Q.Y
c2 := k.P.Y - l.P.Y
d := a1*b2 - a2*b1
if d == 0 {
// lines are parallel
return false
}
// Cramer's rule
s := (c1*b2 - c2*b1) / d
t := (a1*c2 - a2*c1) / d
return s >= 0 && t >= 0 && s <= 1 && t <= 1
}
func (l Line) ParallelTo(k Line) bool {
a1 := l.Q.X - l.P.X
b1 := k.P.X - k.Q.X
a2 := l.Q.Y - l.P.Y
b2 := k.P.Y - k.Q.Y
d := a1*b2 - a2*b1
return d == 0
}
// Curve functions
// Length Calculates the length of the line segments of a curve
func (c *Curve) Length() float64 {
result := 0.0
n := len(c.Points)
for i := 0; i < n-1; i++ {
result += Distance(c.Points[i], c.Points[i+1])
}
if c.Closed {
result += Distance(c.Points[0], c.Points[n-1])
}
return result
}
// Last returns the last point in a curve
func (c *Curve) Last() Point {
n := len(c.Points)
switch n {
case 0:
return Point{
X: 0,
Y: 0,
}
case 1:
return c.Points[0]
}
if c.Closed {
return c.Points[0]
}
return c.Points[n-1]
}
func (c *Curve) LastLine() Line {
n := len(c.Points)
switch n {
case 0:
return Line{
P: Point{X: 0, Y: 0},
Q: Point{X: 0, Y: 0},
}
case 1:
return Line{
P: c.Points[0],
Q: c.Points[0],
}
}
if c.Closed {
return Line{
P: c.Points[n-1],
Q: c.Points[0],
}
}
return Line{
P: c.Points[n-2],
Q: c.Points[n-1],
}
}
func (c *Curve) AddPoint(x, y float64) {
c.Points = append(c.Points, Point{X: x, Y: y})
}
// Lerp calculates a point a given percentage along a curve
func (c *Curve) Lerp(percentage float64) Point {
var point Point
if percentage < 0 || percentage > 1 {
log.Fatalf("percentage in Lerp not between 0 and 1: %v\n", percentage)
}
if NoTinyVals(percentage) == 0 {
return c.Points[0]
}
if math.Abs(percentage-1) < Smol {
return c.Last()
}
totalDist := c.Length()
targetDist := percentage * totalDist
partialDist := 0.0
var foundPoint bool
n := len(c.Points)
for i := 0; i < n-1; i++ {
dist := Distance(c.Points[i], c.Points[i+1])
if partialDist+dist >= targetDist {
remainderDist := targetDist - partialDist
pct := remainderDist / dist
point = c.Points[i].Lerp(c.Points[i+1], pct)
foundPoint = true
break
}
partialDist += dist
}
if !foundPoint {
if c.Closed {
dist := Distance(c.Points[n-1], c.Points[0])
remainderDist := targetDist - partialDist
pct := remainderDist / dist
point = c.Points[n-1].Lerp(c.Points[0], pct)
} else {
panic("couldn't find curve lerp point")
}
}
return point
}
func (c *Curve) LineAt(percentage float64) (Line, float64) {
var line Line
var linePct float64
if percentage < 0 || percentage > 1 {
log.Fatalf("percentage in Lerp not between 0 and 1: %v\n", percentage)
}
if NoTinyVals(percentage) == 0 {
return Line{P: c.Points[0], Q: c.Points[1]}, 0
}
if math.Abs(percentage-1) < Smol {
return c.LastLine(), 1
}
totalDist := c.Length()
targetDist := percentage * totalDist
partialDist := 0.0
var foundPoint bool
n := len(c.Points)
for i := 0; i < n-1; i++ {
dist := Distance(c.Points[i], c.Points[i+1])
if partialDist+dist >= targetDist {
remainderDist := targetDist - partialDist
linePct = remainderDist / dist
line.P = c.Points[i]
line.Q = c.Points[i+1]
foundPoint = true
break
}
partialDist += dist
}
if !foundPoint {
if c.Closed {
dist := Distance(c.Points[n-1], c.Points[0])
remainderDist := targetDist - partialDist
linePct = remainderDist / dist
line.P = c.Points[n-1]
line.Q = c.Points[0]
} else {
panic("couldn't find curve lerp point")
}
}
return line, linePct
}
func (c *Curve) PerpendicularAt(percentage float64, length float64) Line {
line, linePct := c.LineAt(percentage)
return line.PerpendicularAt(linePct, length)
}
func (c *Curve) Draw(ctx *canvas.Context) {
n := len(c.Points)
for i := 0; i < n-1; i++ {
ctx.MoveTo(c.Points[i].X, c.Points[i].Y)
ctx.LineTo(c.Points[i+1].X, c.Points[i+1].Y)
}
if c.Closed {
ctx.MoveTo(c.Points[n-1].X, c.Points[n-1].Y)
ctx.LineTo(c.Points[0].X, c.Points[0].Y)
}
ctx.Stroke()
}
// Circle functions
func (c *Circle) Draw(ctx *canvas.Context) {
ctx.DrawPath(c.Center.X, c.Center.Y, canvas.Circle(c.Radius))
}
func (c *Circle) ToCurve(resolution int) Curve {
points := make([]Point, resolution)
theta := Linspace(0, Tau, resolution, false)
for i, t := range theta {
x := c.Center.X + c.Radius*math.Cos(t)
y := c.Center.Y + c.Radius*math.Sin(t)
points[i] = Point{X: x, Y: y}
}
return Curve{Points: points, Closed: true}
}
func (c *Circle) ContainsPoint(p Point) bool {
return Distance(c.Center, p) <= c.Radius
}
func (c *Circle) PointOnEdge(p Point) bool {
return Equalf(Distance(c.Center, p), c.Radius)
}
// Rect functions
// ContainsPoint determines if a point lies within a rectangle
func (r *Rect) ContainsPoint(p Point) bool {
return p.X >= r.X && p.X <= r.X+r.W && p.Y >= r.Y && p.Y <= r.Y+r.H
}
func (r *Rect) Contains(rect Rect) bool {
a := Point{X: r.X, Y: r.Y}
b := Point{X: r.X + r.W, Y: r.Y + r.H}
c := Point{X: rect.X, Y: rect.Y}
d := Point{X: rect.X + rect.W, Y: rect.Y + rect.H}
return a.X < c.X && a.Y < c.Y && b.X > d.X && b.Y > d.Y
}
func (r *Rect) IsDisjoint(rect Rect) bool {
aLeft := r.X
aRight := r.X + r.W
aTop := r.Y + r.H
aBottom := r.Y
bLeft := rect.X
bRight := rect.X + rect.W
bTop := rect.Y + rect.H
bBottom := rect.Y
if aLeft > bRight || aBottom > bTop || aRight < bLeft || aTop < bBottom {
return true
}
return false
}
func (r *Rect) Overlaps(rect Rect) bool {
return !r.IsDisjoint(rect)
}
func (r *Rect) Intersects(rect Rect) bool {
a := Point{X: r.X, Y: r.Y}
b := Point{X: r.X + r.W, Y: r.Y + r.H}
c := Point{X: rect.X, Y: rect.Y}
d := Point{X: rect.X + rect.W, Y: rect.Y + rect.H}
if a.X >= d.X || c.X >= b.X {
return false
}
if b.Y >= c.Y || d.Y >= a.Y {
return false
}
return true
}
func (r *Rect) Draw(ctx *canvas.Context) {
rect := canvas.Rectangle(r.W, r.H)
ctx.DrawPath(r.X, r.Y, rect)
}
// Triangle functions
func (t *Triangle) Draw(ctx *canvas.Context) {
ctx.MoveTo(t.A.X, t.A.Y)
ctx.LineTo(t.B.X, t.B.Y)
ctx.LineTo(t.C.X, t.C.Y)
ctx.Close()
}
func (t *Triangle) Area() float64 {
// Heron's formula
a := Line{P: t.A, Q: t.B}.Length()
b := Line{P: t.B, Q: t.C}.Length()
c := Line{P: t.C, Q: t.A}.Length()
s := (a + b + c) / 2
return math.Sqrt(s * (s - a) * (s - b) * (s - c))
}
func (t *Triangle) Perimeter() float64 {
a := Line{P: t.A, Q: t.B}.Length()
b := Line{P: t.B, Q: t.C}.Length()
c := Line{P: t.C, Q: t.A}.Length()
return a + b + c
}
func (t *Triangle) Centroid() Point {
x := (t.A.X + t.B.X + t.C.X) / 3
y := (t.A.Y + t.B.Y + t.C.Y) / 3
return Point{X: x, Y: y}
} | geometry.go | 0.795499 | 0.53206 | geometry.go | starcoder |
package oddsengine
import (
"math"
"strconv"
"strings"
)
// Summary is a type which represents the an averaged results of multiple
// conflicts.
type Summary struct {
// TotalSimulations The number of simulations that have been ran
TotalSimulations int `json:"totalSimulations"`
// AverageRounds The number of rounds on average a conflict lasted.
AverageRounds float64 `json:"averageRounds"`
// AttackerWinPercentage The percentage of conflicts that the attacker won
AttackerWinPercentage float64 `json:"attackerWinPercentage"`
// DefenderWinPercentage The percentage of conflicts that the defender won
DefenderWinPercentage float64 `json:"defenderWinPercentage"`
// DrawPercentage The percentage of conflicts that was a draw
DrawPercentage float64 `json:"drawPercentage"`
// AAAHitsAverage The number of AAA hits per round on average
AAAHitsAverage float64 `json:"aaaHitsAverage"`
// KamikazeHitsAverage The number of kamikaze hits per round on average
KamikazeHitsAverage float64 `json:"kamikazeHitsAverage"`
// AttackerAvgIpcLoss The number of IPC's the attacker loses on average
AttackerAvgIpcLoss float64 `json:"attackerAvgIpcLoss"`
// DefenderAvgIpcLoss The number of IPC's the defender loses on average
DefenderAvgIpcLoss float64 `json:"defenderAvgIpcLoss"`
// FirstRoundResults is the array of first round data. Represents the
// number of hits that an attacker and defender get on the first round,
// the frequency of such a result, and the victory result of that conflict.
FirstRoundResults FirstRoundResultCollection `json:"firstRoundResults"`
// AttackerUnitsRemaining represents all the remaining units at the end
// of conflict. The units are represented by a string and the number of
// times that that formation remained at the end of the conflict is the
// value
AttackerUnitsRemaining map[string]int `json:"attackerUnitsRemaining"`
// DefenderUnitsRemaining represents all the remaining units at the end
// of conflict. The units are represented by a string and the number of
// times that that formation remained at the end of the conflict is the
// value
DefenderUnitsRemaining map[string]int `json:"defenderUnitsRemaining"`
}
// generateSummary Creates a summary from a slice of profiles.
func generateSummary(p []ConflictProfile) *Summary {
var summary Summary
summary.AttackerUnitsRemaining = map[string]int{}
summary.DefenderUnitsRemaining = map[string]int{}
summary.TotalSimulations = len(p)
var totalRounds float64
var totalAAAHits float64
var totalKamikazeHits float64
var totalAttackerWins float64
var totalDefenderWins float64
var totalDraw float64
var totalAttackerIpcLoss float64
var totalDefenderIpcLoss float64
for _, profile := range p {
if profile.Outcome == 0 {
totalDraw++
} else if profile.Outcome == 1 {
totalAttackerWins++
attackerRemainingString := formationSliceToString(profile.AttackerUnitsRemaining)
if _, ok := summary.AttackerUnitsRemaining[attackerRemainingString]; ok {
summary.AttackerUnitsRemaining[attackerRemainingString]++
} else {
summary.AttackerUnitsRemaining[attackerRemainingString] = 1
}
} else if profile.Outcome == -1 {
totalDefenderWins++
defenderRemainingString := formationSliceToString(profile.DefenderUnitsRemaining)
if _, ok := summary.DefenderUnitsRemaining[defenderRemainingString]; ok {
summary.DefenderUnitsRemaining[defenderRemainingString]++
} else {
summary.DefenderUnitsRemaining[defenderRemainingString] = 1
}
}
firstRoundResult := FirstRoundResult{
AttackerHits: profile.AttackerHits[0],
DefenderHits: profile.DefenderHits[0],
Frequency: 1,
}
if profile.Outcome == 0 {
firstRoundResult.Draw = 1
} else if profile.Outcome == 1 {
firstRoundResult.AttackerWin = 1
} else {
firstRoundResult.DefenderWin = 1
}
summary.FirstRoundResults = summary.FirstRoundResults.Add(firstRoundResult)
totalRounds += float64(profile.Rounds)
totalAttackerIpcLoss += float64(profile.AttackerIpcLoss)
totalDefenderIpcLoss += float64(profile.DefenderIpcLoss)
totalAAAHits += float64(profile.AAAHits)
totalKamikazeHits += float64(profile.KamikazeHits)
}
summary.AttackerWinPercentage = round((totalAttackerWins/float64(len(p)))*100, 2)
summary.DefenderWinPercentage = round((totalDefenderWins/float64(len(p)))*100, 2)
summary.DrawPercentage = round((totalDraw/float64(len(p)))*100, 2)
summary.AttackerAvgIpcLoss = round((totalAttackerIpcLoss / float64(len(p))), 2)
summary.AAAHitsAverage = round((totalAAAHits / float64(len(p))), 2)
summary.KamikazeHitsAverage = round((totalKamikazeHits / float64(len(p))), 2)
summary.DefenderAvgIpcLoss = round((totalDefenderIpcLoss / float64(len(p))), 2)
summary.AverageRounds = round((totalRounds / float64(len(p))), 2)
return &summary
}
func formationSliceToString(fs []map[string]int) string {
var ss []string
for _, f := range fs {
for u, n := range f {
ss = append(ss, u+":"+strconv.Itoa(n))
}
}
return strings.Join(ss, ",")
}
// Round limits all floats to 2 decimal places
func round(f float64, places int) float64 {
shift := math.Pow(10, float64(places))
return math.Floor((f*shift)+.5) / shift
} | summary.go | 0.859472 | 0.578686 | summary.go | starcoder |
package model
// Legend is the option set for a legend component.
// Legend component shows symbol, color and name of different series. You can click legends to toggle displaying series in the chart.
// https://echarts.apache.org/en/option.html#legend
type Legend struct {
// Component ID, not specified by default. If specified, it can be used to refer the component in option or API.
ID string `json:"id,omitempty"`
// Type of legend. Optional values:
// "plain": Simple legend. (default)
// "scroll": Scrollable legend. It helps when too many legend items needed to be shown.
Type LegendType `json:"type"`
// Whether to show the Legend, default true.
Show bool `json:"show"`
// Distance between legend component and the left side of the container.
// left value can be instant pixel value like 20; it can also be a percentage
// value relative to container width like '20%'; and it can also be 'left', 'center', or 'right'.
// If the left value is set to be 'left', 'center', or 'right', then the component
// will be aligned automatically based on position.
Left string `json:"left,omitempty"`
// Distance between legend component and the top side of the container.
// top value can be instant pixel value like 20; it can also be a percentage
// value relative to container width like '20%'; and it can also be 'top', 'middle', or 'bottom'.
// If the left value is set to be 'top', 'middle', or 'bottom', then the component
// will be aligned automatically based on position.
Top string `json:"top,omitempty"`
// Distance between legend component and the right side of the container.
// right value can be instant pixel value like 20; it can also be a percentage
// value relative to container width like '20%'.
// Adaptive by default.
Right string `json:"right,omitempty"`
// Distance between legend component and the bottom side of the container.
// bottom value can be instant pixel value like 20; it can also be a percentage
// value relative to container width like '20%'.
// Adaptive by default.
Bottom string `json:"bottom,omitempty"`
// Data array of legend. An array item is usually a name representing string.
// set Data as []string{} if you wants to hide the legend.
Data []interface{} `json:"data,omitempty"`
// The layout orientation of legend.
// Options: 'horizontal', 'vertical'
Orient string `json:"orient,omitempty"`
// Legend color when not selected.
InactiveColor string `json:"inactiveColor,omitempty"`
// State table of selected legend.
// example:
// var selected = map[string]bool{}
// selected["series1"] = true
// selected["series2"] = false
Selected map[string]bool `json:"selected,omitempty"`
// Selected mode of legend, which controls whether series can be toggled displaying by clicking legends.
// It is enabled by default, and you may set it to be false to disabled it.
// Besides, it can be set to 'single' or 'multiple', for single selection and multiple selection.
SelectedMode string `json:"selectedMode,omitempty"`
// Legend space around content. The unit is px.
// Default values for each position are 5.
// And they can be set to different values with left, right, top, and bottom.
// Examples:
// 1. Set padding to be 5
// padding: 5
// 2. Set the top and bottom paddings to be 5, and left and right paddings to be 10
// padding: [5, 10]
// 3. Set each of the four paddings separately
// padding: [
// 5, // up
// 10, // right
// 5, // down
// 10, // left
// ]
Padding interface{} `json:"padding,omitempty"`
// Image width of legend symbol.
ItemWidth int `json:"itemWidth,omitempty"`
// Image height of legend symbol.
ItemHeight int `json:"itemHeight,omitempty"`
// Legend X position, right/left/center
X string `json:"x,omitempty"`
// Legend Y position, right/left/center
Y string `json:"y,omitempty"`
// Width of legend component. Adaptive by default.
Width string `json:"width,omitempty"`
// Height of legend component. Adaptive by default.
Height string `json:"height,omitempty"`
// Legend marker and text aligning.
// By default, it automatically calculates from component location and orientation.
// When left value of this component is 'right' and orient is 'vertical', it would be aligned to 'right'.
// Options: auto/left/right
Align string `json:"align,omitempty"`
// Legend text style.
TextStyle *TextStyle `json:"textStyle,omitempty"`
}
type LegendType string
const (
//Simple legend. (default). See vertically scrollable legend or horizontally scrollable legend.
LegendTypePain LegendType = "plain"
//When 'scroll' used, these options below can be used for detailed configuration:
// scrollDataIndex, pageButtonItemGap, pageButtonGap, pageButtonPosition ,legend.pageFormatter
// pageIcons, pageIconColor, pageIconInactiveColor, pageIconSize, pageTextStyle, animation, animationDurationUpdate
LegendTypeScroll LegendType = "scroll"
) | model/legend.go | 0.870294 | 0.561575 | legend.go | starcoder |
package selector
import (
"fmt"
ipld "github.com/ipfs/fs-repo-migrations/ipfs-10-to-11/_vendor/github.com/ipld/go-ipld-prime"
)
// Selector is the programmatic representation of an IPLD Selector Node
// and can be applied to traverse a given IPLD DAG
type Selector interface {
Interests() []ipld.PathSegment // returns the segments we're likely interested in **or nil** if we're a high-cardinality or expression based matcher and need all segments proposed to us.
Explore(ipld.Node, ipld.PathSegment) Selector // explore one step -- iteration comes from outside (either whole node, or by following suggestions of Interests). returns nil if no interest. you have to traverse to the next node yourself (the selector doesn't do it for you because you might be considering multiple selection reasons at the same time).
Decide(ipld.Node) bool
}
// ParsedParent is created whenever you are parsing a selector node that may have
// child selectors nodes that need to know it
type ParsedParent interface {
Link(s Selector) bool
}
// ParseContext tracks the progress when parsing a selector
type ParseContext struct {
parentStack []ParsedParent
}
// ParseSelector creates a Selector that can be traversed from an IPLD Selector node
func ParseSelector(n ipld.Node) (Selector, error) {
return ParseContext{}.ParseSelector(n)
}
// ParseSelector creates a Selector from an IPLD Selector Node with the given context
func (pc ParseContext) ParseSelector(n ipld.Node) (Selector, error) {
if n.ReprKind() != ipld.ReprKind_Map {
return nil, fmt.Errorf("selector spec parse rejected: selector is a keyed union and thus must be a map")
}
if n.Length() != 1 {
return nil, fmt.Errorf("selector spec parse rejected: selector is a keyed union and thus must be single-entry map")
}
kn, v, _ := n.MapIterator().Next()
kstr, _ := kn.AsString()
// Switch over the single key to determine which selector body comes next.
// (This switch is where the keyed union discriminators concretely happen.)
switch kstr {
case SelectorKey_ExploreFields:
return pc.ParseExploreFields(v)
case SelectorKey_ExploreAll:
return pc.ParseExploreAll(v)
case SelectorKey_ExploreIndex:
return pc.ParseExploreIndex(v)
case SelectorKey_ExploreRange:
return pc.ParseExploreRange(v)
case SelectorKey_ExploreUnion:
return pc.ParseExploreUnion(v)
case SelectorKey_ExploreRecursive:
return pc.ParseExploreRecursive(v)
case SelectorKey_ExploreRecursiveEdge:
return pc.ParseExploreRecursiveEdge(v)
case SelectorKey_Matcher:
return pc.ParseMatcher(v)
default:
return nil, fmt.Errorf("selector spec parse rejected: %q is not a known member of the selector union", kstr)
}
}
// PushParent puts a parent onto the stack of parents for a parse context
func (pc ParseContext) PushParent(parent ParsedParent) ParseContext {
l := len(pc.parentStack)
parents := make([]ParsedParent, 0, l+1)
parents = append(parents, parent)
parents = append(parents, pc.parentStack...)
return ParseContext{parents}
}
// SegmentIterator iterates either a list or a map, generating PathSegments
// instead of indexes or keys
type SegmentIterator interface {
Next() (pathSegment ipld.PathSegment, value ipld.Node, err error)
Done() bool
}
// NewSegmentIterator generates a new iterator based on the node type
func NewSegmentIterator(n ipld.Node) SegmentIterator {
if n.ReprKind() == ipld.ReprKind_List {
return listSegmentIterator{n.ListIterator()}
}
return mapSegmentIterator{n.MapIterator()}
}
type listSegmentIterator struct {
ipld.ListIterator
}
func (lsi listSegmentIterator) Next() (pathSegment ipld.PathSegment, value ipld.Node, err error) {
i, v, err := lsi.ListIterator.Next()
return ipld.PathSegmentOfInt(i), v, err
}
func (lsi listSegmentIterator) Done() bool {
return lsi.ListIterator.Done()
}
type mapSegmentIterator struct {
ipld.MapIterator
}
func (msi mapSegmentIterator) Next() (pathSegment ipld.PathSegment, value ipld.Node, err error) {
k, v, err := msi.MapIterator.Next()
if err != nil {
return ipld.PathSegment{}, v, err
}
kstr, _ := k.AsString()
return ipld.PathSegmentOfString(kstr), v, err
}
func (msi mapSegmentIterator) Done() bool {
return msi.MapIterator.Done()
} | ipfs-10-to-11/_vendor/github.com/ipld/go-ipld-prime/traversal/selector/selector.go | 0.709422 | 0.57827 | selector.go | starcoder |
package cryptoapis
import (
"encoding/json"
)
// ListUnspentTransactionOutputsByAddressRIBlockchainSpecificVShieldedSpend struct for ListUnspentTransactionOutputsByAddressRIBlockchainSpecificVShieldedSpend
type ListUnspentTransactionOutputsByAddressRIBlockchainSpecificVShieldedSpend struct {
// Defines a Merkle tree root of a note commitment tree which uniquely identifies a note commitment tree state given the assumed security properties of the Merkle tree’s hash function.
Anchor string `json:"anchor"`
// Defines a value commitment to the value of the input note.
Cv string `json:"cv"`
// Represents a sequence of nullifiers of the input notes.
Nullifier string `json:"nullifier"`
// Represents the proof.
Proof string `json:"proof"`
// Represents the randomized validating key for spendAuthSig.
Rk string `json:"rk"`
// Used to prove knowledge of the spending key authorizing spending of an input note.
SpendAuthSig string `json:"spendAuthSig"`
}
// NewListUnspentTransactionOutputsByAddressRIBlockchainSpecificVShieldedSpend instantiates a new ListUnspentTransactionOutputsByAddressRIBlockchainSpecificVShieldedSpend object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewListUnspentTransactionOutputsByAddressRIBlockchainSpecificVShieldedSpend(anchor string, cv string, nullifier string, proof string, rk string, spendAuthSig string) *ListUnspentTransactionOutputsByAddressRIBlockchainSpecificVShieldedSpend {
this := ListUnspentTransactionOutputsByAddressRIBlockchainSpecificVShieldedSpend{}
this.Anchor = anchor
this.Cv = cv
this.Nullifier = nullifier
this.Proof = proof
this.Rk = rk
this.SpendAuthSig = spendAuthSig
return &this
}
// NewListUnspentTransactionOutputsByAddressRIBlockchainSpecificVShieldedSpendWithDefaults instantiates a new ListUnspentTransactionOutputsByAddressRIBlockchainSpecificVShieldedSpend object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewListUnspentTransactionOutputsByAddressRIBlockchainSpecificVShieldedSpendWithDefaults() *ListUnspentTransactionOutputsByAddressRIBlockchainSpecificVShieldedSpend {
this := ListUnspentTransactionOutputsByAddressRIBlockchainSpecificVShieldedSpend{}
return &this
}
// GetAnchor returns the Anchor field value
func (o *ListUnspentTransactionOutputsByAddressRIBlockchainSpecificVShieldedSpend) GetAnchor() string {
if o == nil {
var ret string
return ret
}
return o.Anchor
}
// GetAnchorOk returns a tuple with the Anchor field value
// and a boolean to check if the value has been set.
func (o *ListUnspentTransactionOutputsByAddressRIBlockchainSpecificVShieldedSpend) GetAnchorOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.Anchor, true
}
// SetAnchor sets field value
func (o *ListUnspentTransactionOutputsByAddressRIBlockchainSpecificVShieldedSpend) SetAnchor(v string) {
o.Anchor = v
}
// GetCv returns the Cv field value
func (o *ListUnspentTransactionOutputsByAddressRIBlockchainSpecificVShieldedSpend) GetCv() string {
if o == nil {
var ret string
return ret
}
return o.Cv
}
// GetCvOk returns a tuple with the Cv field value
// and a boolean to check if the value has been set.
func (o *ListUnspentTransactionOutputsByAddressRIBlockchainSpecificVShieldedSpend) GetCvOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.Cv, true
}
// SetCv sets field value
func (o *ListUnspentTransactionOutputsByAddressRIBlockchainSpecificVShieldedSpend) SetCv(v string) {
o.Cv = v
}
// GetNullifier returns the Nullifier field value
func (o *ListUnspentTransactionOutputsByAddressRIBlockchainSpecificVShieldedSpend) GetNullifier() string {
if o == nil {
var ret string
return ret
}
return o.Nullifier
}
// GetNullifierOk returns a tuple with the Nullifier field value
// and a boolean to check if the value has been set.
func (o *ListUnspentTransactionOutputsByAddressRIBlockchainSpecificVShieldedSpend) GetNullifierOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.Nullifier, true
}
// SetNullifier sets field value
func (o *ListUnspentTransactionOutputsByAddressRIBlockchainSpecificVShieldedSpend) SetNullifier(v string) {
o.Nullifier = v
}
// GetProof returns the Proof field value
func (o *ListUnspentTransactionOutputsByAddressRIBlockchainSpecificVShieldedSpend) GetProof() string {
if o == nil {
var ret string
return ret
}
return o.Proof
}
// GetProofOk returns a tuple with the Proof field value
// and a boolean to check if the value has been set.
func (o *ListUnspentTransactionOutputsByAddressRIBlockchainSpecificVShieldedSpend) GetProofOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.Proof, true
}
// SetProof sets field value
func (o *ListUnspentTransactionOutputsByAddressRIBlockchainSpecificVShieldedSpend) SetProof(v string) {
o.Proof = v
}
// GetRk returns the Rk field value
func (o *ListUnspentTransactionOutputsByAddressRIBlockchainSpecificVShieldedSpend) GetRk() string {
if o == nil {
var ret string
return ret
}
return o.Rk
}
// GetRkOk returns a tuple with the Rk field value
// and a boolean to check if the value has been set.
func (o *ListUnspentTransactionOutputsByAddressRIBlockchainSpecificVShieldedSpend) GetRkOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.Rk, true
}
// SetRk sets field value
func (o *ListUnspentTransactionOutputsByAddressRIBlockchainSpecificVShieldedSpend) SetRk(v string) {
o.Rk = v
}
// GetSpendAuthSig returns the SpendAuthSig field value
func (o *ListUnspentTransactionOutputsByAddressRIBlockchainSpecificVShieldedSpend) GetSpendAuthSig() string {
if o == nil {
var ret string
return ret
}
return o.SpendAuthSig
}
// GetSpendAuthSigOk returns a tuple with the SpendAuthSig field value
// and a boolean to check if the value has been set.
func (o *ListUnspentTransactionOutputsByAddressRIBlockchainSpecificVShieldedSpend) GetSpendAuthSigOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.SpendAuthSig, true
}
// SetSpendAuthSig sets field value
func (o *ListUnspentTransactionOutputsByAddressRIBlockchainSpecificVShieldedSpend) SetSpendAuthSig(v string) {
o.SpendAuthSig = v
}
func (o ListUnspentTransactionOutputsByAddressRIBlockchainSpecificVShieldedSpend) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if true {
toSerialize["anchor"] = o.Anchor
}
if true {
toSerialize["cv"] = o.Cv
}
if true {
toSerialize["nullifier"] = o.Nullifier
}
if true {
toSerialize["proof"] = o.Proof
}
if true {
toSerialize["rk"] = o.Rk
}
if true {
toSerialize["spendAuthSig"] = o.SpendAuthSig
}
return json.Marshal(toSerialize)
}
type NullableListUnspentTransactionOutputsByAddressRIBlockchainSpecificVShieldedSpend struct {
value *ListUnspentTransactionOutputsByAddressRIBlockchainSpecificVShieldedSpend
isSet bool
}
func (v NullableListUnspentTransactionOutputsByAddressRIBlockchainSpecificVShieldedSpend) Get() *ListUnspentTransactionOutputsByAddressRIBlockchainSpecificVShieldedSpend {
return v.value
}
func (v *NullableListUnspentTransactionOutputsByAddressRIBlockchainSpecificVShieldedSpend) Set(val *ListUnspentTransactionOutputsByAddressRIBlockchainSpecificVShieldedSpend) {
v.value = val
v.isSet = true
}
func (v NullableListUnspentTransactionOutputsByAddressRIBlockchainSpecificVShieldedSpend) IsSet() bool {
return v.isSet
}
func (v *NullableListUnspentTransactionOutputsByAddressRIBlockchainSpecificVShieldedSpend) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableListUnspentTransactionOutputsByAddressRIBlockchainSpecificVShieldedSpend(val *ListUnspentTransactionOutputsByAddressRIBlockchainSpecificVShieldedSpend) *NullableListUnspentTransactionOutputsByAddressRIBlockchainSpecificVShieldedSpend {
return &NullableListUnspentTransactionOutputsByAddressRIBlockchainSpecificVShieldedSpend{value: val, isSet: true}
}
func (v NullableListUnspentTransactionOutputsByAddressRIBlockchainSpecificVShieldedSpend) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableListUnspentTransactionOutputsByAddressRIBlockchainSpecificVShieldedSpend) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | model_list_unspent_transaction_outputs_by_address_ri_blockchain_specific_v_shielded_spend.go | 0.862207 | 0.432543 | model_list_unspent_transaction_outputs_by_address_ri_blockchain_specific_v_shielded_spend.go | starcoder |
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Internally a map holds elements in up to 255 bytes of key+value.
// When key or value or both are too large, it uses pointers to key+value
// instead. Test all the combinations.
package main
func seq(x, y int) [1000]byte {
var r [1000]byte
for i := 0; i < len(r); i++ {
r[i] = byte(x + i*y)
}
return r
}
func cmp(x, y [1000]byte) {
for i := 0; i < len(x); i++ {
if x[i] != y[i] {
panic("BUG mismatch")
}
}
}
func main() {
m := make(map[int][1000]byte)
m[1] = seq(11, 13)
m[2] = seq(2, 9)
m[3] = seq(3, 17)
cmp(m[1], seq(11, 13))
cmp(m[2], seq(2, 9))
cmp(m[3], seq(3, 17))
{
type T [1]byte
type V [1]byte
m := make(map[T]V)
m[T{}] = V{1}
m[T{1}] = V{2}
if x, y := m[T{}][0], m[T{1}][0]; x != 1 || y != 2 {
println(x, y)
panic("bad map")
}
}
{
type T [100]byte
type V [1]byte
m := make(map[T]V)
m[T{}] = V{1}
m[T{1}] = V{2}
if x, y := m[T{}][0], m[T{1}][0]; x != 1 || y != 2 {
println(x, y)
panic("bad map")
}
}
{
type T [1]byte
type V [100]byte
m := make(map[T]V)
m[T{}] = V{1}
m[T{1}] = V{2}
if x, y := m[T{}][0], m[T{1}][0]; x != 1 || y != 2 {
println(x, y)
panic("bad map")
}
}
{
type T [1000]byte
type V [1]byte
m := make(map[T]V)
m[T{}] = V{1}
m[T{1}] = V{2}
if x, y := m[T{}][0], m[T{1}][0]; x != 1 || y != 2 {
println(x, y)
panic("bad map")
}
}
{
type T [1]byte
type V [1000]byte
m := make(map[T]V)
m[T{}] = V{1}
m[T{1}] = V{2}
if x, y := m[T{}][0], m[T{1}][0]; x != 1 || y != 2 {
println(x, y)
panic("bad map")
}
}
{
type T [1000]byte
type V [1000]byte
m := make(map[T]V)
m[T{}] = V{1}
m[T{1}] = V{2}
if x, y := m[T{}][0], m[T{1}][0]; x != 1 || y != 2 {
println(x, y)
panic("bad map")
}
}
{
type T [200]byte
type V [1]byte
m := make(map[T]V)
m[T{}] = V{1}
m[T{1}] = V{2}
if x, y := m[T{}][0], m[T{1}][0]; x != 1 || y != 2 {
println(x, y)
panic("bad map")
}
}
{
type T [1]byte
type V [200]byte
m := make(map[T]V)
m[T{}] = V{1}
m[T{1}] = V{2}
if x, y := m[T{}][0], m[T{1}][0]; x != 1 || y != 2 {
println(x, y)
panic("bad map")
}
}
{
type T [200]byte
type V [200]byte
m := make(map[T]V)
m[T{}] = V{1}
m[T{1}] = V{2}
if x, y := m[T{}][0], m[T{1}][0]; x != 1 || y != 2 {
println(x, y)
panic("bad map")
}
}
} | test/bigmap.go | 0.596433 | 0.504394 | bigmap.go | starcoder |
package storage
import (
"fmt"
"github.com/tendermint/iavl"
dbm "github.com/tendermint/tm-db"
"github.com/xlab/treeprint"
)
// RWTree provides an abstraction over IAVL that maintains separate read and write paths. Reads are routed to the most
// recently saved version of the tree - which provides immutable access. Writes are routed to a working tree that is
// mutable. On save the working tree is saved to DB, frozen, and replaces the previous immutable read tree.
type RWTree struct {
// Working tree accumulating writes
tree *MutableTree
// Read-only tree serving previous state
*ImmutableTree
// Have any writes occurred since last save
updated bool
}
// Creates a concurrency safe version of an IAVL tree whereby reads are routed to the last saved tree.
// Writes must be serialised (as they are within a commit for example).
func NewRWTree(db dbm.DB, cacheSize int) (*RWTree, error) {
tree, err := NewMutableTree(db, cacheSize)
return &RWTree{
tree: tree,
ImmutableTree: &ImmutableTree{iavl.NewImmutableTree(db, cacheSize)},
}, err
}
// Tries to load the execution state from DB, returns nil with no error if no state found
func (rwt *RWTree) Load(version int64, overwriting bool) error {
const errHeader = "RWTree.Load():"
if version <= 0 {
return fmt.Errorf("%s trying to load from non-positive version %d", errHeader, version)
}
err := rwt.tree.Load(version, overwriting)
if err != nil {
return fmt.Errorf("%s loading version %d: %v", errHeader, version, err)
}
// Set readTree at commit point == tree
rwt.ImmutableTree, err = rwt.tree.GetImmutable(version)
if err != nil {
return fmt.Errorf("%s loading version %d: %v", errHeader, version, err)
}
return nil
}
// Save the current write tree making writes accessible from read tree.
func (rwt *RWTree) Save() ([]byte, int64, error) {
// save state at a new version may still be orphaned before we save the version against the hash
hash, version, err := rwt.tree.SaveVersion()
if err != nil {
return nil, 0, fmt.Errorf("could not save RWTree: %v", err)
}
// Take an immutable reference to the tree we just saved for querying
rwt.ImmutableTree, err = rwt.tree.GetImmutable(version)
if err != nil {
return nil, 0, fmt.Errorf("RWTree.Save() could not obtain ImmutableTree read tree: %v", err)
}
rwt.updated = false
return hash, version, nil
}
func (rwt *RWTree) Set(key, value []byte) bool {
rwt.updated = true
return rwt.tree.Set(key, value)
}
func (rwt *RWTree) Delete(key []byte) ([]byte, bool) {
rwt.updated = true
return rwt.tree.Remove(key)
}
// Returns true if there have been any writes since last save
func (rwt *RWTree) Updated() bool {
return rwt.updated
}
func (rwt *RWTree) GetImmutable(version int64) (*ImmutableTree, error) {
return rwt.tree.GetImmutable(version)
}
func (rwt *RWTree) IterateWriteTree(start, end []byte, ascending bool, fn func(key []byte, value []byte) error) error {
return rwt.tree.IterateWriteTree(start, end, ascending, fn)
}
// Tree printing
func (rwt *RWTree) Dump() string {
tree := treeprint.New()
AddTreePrintTree("ReadTree", tree, rwt)
AddTreePrintTree("WriteTree", tree, rwt.tree)
return tree.String()
}
func AddTreePrintTree(edge string, tree treeprint.Tree, rwt KVCallbackIterableReader) {
tree = tree.AddBranch(fmt.Sprintf("%q", edge))
rwt.Iterate(nil, nil, true, func(key []byte, value []byte) error {
tree.AddNode(fmt.Sprintf("%q -> %q", string(key), string(value)))
return nil
})
} | storage/rwtree.go | 0.724578 | 0.466481 | rwtree.go | starcoder |
package slice
import (
"fmt"
"math"
"math/rand"
)
// IndexOfUInt64 gets the index of an uint64 element in an uint64 slice
func IndexOfUInt64(x []uint64, y uint64) int {
for i, v := range x {
if v == y {
return i
}
}
return -1
}
// ContainsUInt64 checks whether an uint64 element is in an uint64 slice
func ContainsUInt64(x []uint64, y uint64) bool {
return IndexOfUInt64(x, y) != -1
}
// EqualsUInt64s checks whether two uint64 slice has the same elements
func EqualsUInt64s(x []uint64, y []uint64) bool {
if len(x) != len(y) {
return false
}
for i := 0; i < len(x); i++ {
if x[i] != y[i] {
return false
}
}
return true
}
// CopyUInt64s makes a new uint64 slice that copies the content of the given uint64 slice
func CopyUInt64s(x []uint64) []uint64 {
return append([]uint64{}, x...)
}
// CutUInt64s cuts an uint64 slice by removing the elements starts from i and ends at j-1
func CutUInt64s(x []uint64, i, j int) ([]uint64, error) {
if i < 0 || j > len(x) {
return x, fmt.Errorf("out of bound")
}
if i >= j {
return x, fmt.Errorf("%d must be smaller than %d", i, j)
}
return append(x[:i], x[j:]...), nil
}
// RemoveUInt64 removes an uint64 from a given uint64 slice by value
func RemoveUInt64(x []uint64, y uint64) []uint64 {
index := IndexOfUInt64(x, y)
if index != -1 {
return append(x[:index], x[(index+1):]...)
}
return x
}
// RemoveUInt64At removes an uint64 from a given uint64 slice by index
func RemoveUInt64At(x []uint64, index int) ([]uint64, error) {
if index < 0 || index > len(x) {
return x, fmt.Errorf("out of bound")
}
return append(x[:index], x[(index+1):]...), nil
}
// InsertUInt64At inserts an uint64 value into a given uint64 slice at given index
func InsertUInt64At(x []uint64, y uint64, index int) ([]uint64, error) {
if index < 0 || index > len(x) {
return x, fmt.Errorf("out of bound")
}
x = append(x, 0)
copy(x[index+1:], x[index:])
x[index] = y
return x, nil
}
// InsertUInt64sAt inserts a uint64 slice into a given uint64 slice at given index
func InsertUInt64sAt(x []uint64, y []uint64, index int) ([]uint64, error) {
if index < 0 || index > len(x) {
return x, fmt.Errorf("out of bound")
}
return append(x[:index], append(y, x[index:]...)...), nil
}
// PopFirstUInt64 pops the first value of an uint64 slice
func PopFirstUInt64(x []uint64) (uint64, []uint64, error) {
if len(x) == 0 {
return 0, nil, fmt.Errorf("no value to pop")
}
return x[0], x[1:], nil
}
// PopLastUInt64 pops the last value of an uint64 slice
func PopLastUInt64(x []uint64) (uint64, []uint64, error) {
if len(x) == 0 {
return 0, nil, fmt.Errorf("no value to pop")
}
return x[len(x)-1], x[:len(x)-1], nil
}
// FilterUInt64s filters an uint64 slice by the given filter function
func FilterUInt64s(x []uint64, filter func(uint64) bool) []uint64 {
y := x[:0]
for _, v := range x {
if filter(v) {
y = append(y, v)
}
}
return y
}
// ReverseUInt64s reverses an uint64 slice
func ReverseUInt64s(x []uint64) []uint64 {
for i := len(x)/2 - 1; i >= 0; i-- {
opp := len(x) - 1 - i
x[i], x[opp] = x[opp], x[i]
}
return x
}
// ShuffleUInt64s shuffles an uint64 slice
func ShuffleUInt64s(x []uint64) []uint64 {
for i := len(x) - 1; i > 0; i-- {
j := rand.Intn(i + 1)
x[i], x[j] = x[j], x[i]
}
return x
}
// MergeUInt64s merges two uint64 slice with specific excluded values
func MergeUInt64s(x []uint64, y []uint64, excludes ...uint64) []uint64 {
traceMap := make(map[uint64]bool)
result := make([]uint64, 0)
for _, ex := range excludes {
traceMap[ex] = true
}
// We preserve the order by x and then y
for _, v := range x {
if !traceMap[v] {
traceMap[v] = true
result = append(result, v)
}
}
for _, v := range y {
if !traceMap[v] {
traceMap[v] = true
result = append(result, v)
}
}
return result
}
// UniqueUInt64s removes the duplicates from the uint64 slice
func UniqueUInt64s(x []uint64) []uint64 {
traceMap := make(map[uint64]bool)
result := make([]uint64, 0)
for _, v := range x {
if _, value := traceMap[v]; !value {
traceMap[v] = true
result = append(result, v)
}
}
return result
}
// SumOfUInt64s find the sum of all items in uint64 slice
func SumOfUInt64s(x []uint64) uint64 {
var sum = uint64(0)
for _, v := range x {
sum += v
}
return sum
}
// TransformUInt64s helps figure out how to transform current to target slice by returning the ones to add and remove
func TransformUInt64s(target, current []uint64) (add, remove []uint64) {
add = make([]uint64, 0)
remove = make([]uint64, 0)
// Process
statusMap := make(map[uint64]int) // the int is the status, -1: to be removed, 0: stay there, 1: to be added.
length := int(math.Max(float64(len(target)), float64(len(current))))
for i := 0; i < length; i++ {
if i <= len(target)-1 {
if _, ok := statusMap[target[i]]; ok {
statusMap[target[i]]++
} else {
statusMap[target[i]] = 1
}
}
if i <= len(current)-1 {
if _, ok := statusMap[current[i]]; ok {
statusMap[current[i]]--
} else {
statusMap[current[i]] = -1
}
}
}
for v, status := range statusMap {
if status < 0 {
remove = append(remove, v)
} else if status > 0 {
add = append(add, v)
}
}
return
}
// ChunkUInt64s chunks the slice by given chunk size
func ChunkUInt64s(x []uint64, chunkSize int) (chunks [][]uint64) {
if chunkSize < 1 {
return
}
for i := 0; i < len(x); i += chunkSize {
end := i + chunkSize
// necessary check to avoid slicing beyond give slice capacity
if end > len(x) {
end = len(x)
}
chunks = append(chunks, x[i:end])
}
return chunks
} | slice/uint64.go | 0.727879 | 0.589982 | uint64.go | starcoder |
package trueskill
import (
"math"
"github.com/bigflood/go-trueskill/gaussian"
)
type TrueSkill struct {
Mu float64 // mean of ratings
Sigma float64 // standard deviation of ratings
Beta float64
Tau float64 // dynamic factor
DrawProbability float64
}
func (ts *TrueSkill) Init() *TrueSkill {
ts.InitWithMu(25)
return ts
}
func (ts *TrueSkill) InitWithMu(mu float64) *TrueSkill {
ts.Mu = mu
ts.Sigma = ts.Mu / 3
ts.Beta = ts.Sigma / 2
ts.Tau = ts.Sigma / 100
ts.DrawProbability = 0.1
return ts
}
func (ts *TrueSkill) CalcDrawProbability(drawMargin float64, size int) float64 {
return ts.Cdf(drawMargin/(math.Sqrt(float64(size))*ts.Beta))*2 - 1
}
func (ts *TrueSkill) CalcDrawMargin(drawProbability float64, size int) float64 {
return ts.Ppf((drawProbability+1)/2.) * math.Sqrt(float64(size)) * ts.Beta
}
func (ts *TrueSkill) Cdf(x float64) float64 {
return gaussian.Cdf(0, 1, x)
}
func (ts *TrueSkill) Pdf(x float64) float64 {
return gaussian.Pdf(0, 1, x)
}
func (ts *TrueSkill) Ppf(x float64) float64 {
return gaussian.Ppf(0, 1, x)
}
func (ts *TrueSkill) Quality1vs1(a, b gaussian.Gaussian) float64 {
d := a.Mu() - b.Mu()
beta22 := ts.Beta * ts.Beta * 2
v := beta22 + a.Var() + b.Var()
e := - 0.5 * d * d / v
s := beta22 / v
return math.Exp(e) * math.Sqrt(s)
}
func (ts *TrueSkill) Rate1vs1(a, b gaussian.Gaussian, draw bool) (gaussian.Gaussian, gaussian.Gaussian) {
tau2 := ts.Tau * ts.Tau
beta2 := ts.Beta * ts.Beta
// prior factors
a2 := gaussian.MuVariance(a.Mu(), a.Var()+tau2)
b2 := gaussian.MuVariance(b.Mu(), b.Var()+tau2)
// likelyhood factors
a3 := gaussian.MuVariance(a2.Mu(), a2.Var()+beta2)
b3 := gaussian.MuVariance(b2.Mu(), b2.Var()+beta2)
// sum: diff.
d := a3.Sub(b3)
t := ts.truncateFactor(d, draw)
// sum factors (up)
a4 := b3.Add(t)
b4 := a3.Sub(t)
a5 := a4.Mul(a3)
b5 := b4.Mul(b3)
a6 := a5.Div(a3)
b6 := b5.Div(b3)
// likelyhood factors (up)
a7 := gaussian.MuVariance(a6.Mu(), a6.Var()+beta2)
b7 := gaussian.MuVariance(b6.Mu(), b6.Var()+beta2)
// player skills
a8 := a7.Mul(a2)
b8 := b7.Mul(b2)
return a8, b8
}
func (ts *TrueSkill) truncateFactor(d gaussian.Gaussian, draw bool) gaussian.Gaussian {
sqrtPi := math.Sqrt(d.Pi)
var v, w float64
if draw {
v, w = ts.vwDraw(d)
} else {
v, w = ts.vwWin(d)
}
denom := 1 - w
pi, tau := d.Pi/denom, (d.Tau+sqrtPi*v)/denom
t := gaussian.Gaussian{Pi: pi, Tau: tau}
return t.Div(d)
}
func (ts *TrueSkill) vwWin(d gaussian.Gaussian) (float64, float64) {
sqrtPi := math.Sqrt(d.Pi)
dm := ts.CalcDrawMargin(ts.DrawProbability, 2)
x := (d.Mu() - dm) * sqrtPi
cdf := ts.Cdf(x)
pdf := ts.Pdf(x)
v := pdf / cdf
w := v * (v + x)
return v, w
}
func (ts *TrueSkill) vwDraw(d gaussian.Gaussian) (float64, float64) {
dm := ts.CalcDrawMargin(ts.DrawProbability, 2)
mu := d.Mu()
sqrtPi := math.Sqrt(d.Pi)
absMuSqrtPi := math.Abs(mu) * sqrtPi
dmSqrtPi := dm * sqrtPi
a := dmSqrtPi - absMuSqrtPi
b := -dmSqrtPi - absMuSqrtPi
pdfa := ts.Pdf(a)
pdfb := ts.Pdf(b)
denom := ts.Cdf(a) - ts.Cdf(b)
numer := pdfb - pdfa
v := numer / denom
if mu < 0 {
v = -v
}
w := (v * v) + (a*pdfa-b*pdfb)/denom
return v, w
} | trueskill.go | 0.80147 | 0.57332 | trueskill.go | starcoder |
package lo
// T2 creates a tuple from a list of values.
func T2[A any, B any](a A, b B) Tuple2[A, B] {
return Tuple2[A, B]{A: a, B: b}
}
// T3 creates a tuple from a list of values.
func T3[A any, B any, C any](a A, b B, c C) Tuple3[A, B, C] {
return Tuple3[A, B, C]{A: a, B: b, C: c}
}
// T4 creates a tuple from a list of values.
func T4[A any, B any, C any, D any](a A, b B, c C, d D) Tuple4[A, B, C, D] {
return Tuple4[A, B, C, D]{A: a, B: b, C: c, D: d}
}
// T5 creates a tuple from a list of values.
func T5[A any, B any, C any, D any, E any](a A, b B, c C, d D, e E) Tuple5[A, B, C, D, E] {
return Tuple5[A, B, C, D, E]{A: a, B: b, C: c, D: d, E: e}
}
// T6 creates a tuple from a list of values.
func T6[A any, B any, C any, D any, E any, F any](a A, b B, c C, d D, e E, f F) Tuple6[A, B, C, D, E, F] {
return Tuple6[A, B, C, D, E, F]{A: a, B: b, C: c, D: d, E: e, F: f}
}
// T7 creates a tuple from a list of values.
func T7[A any, B any, C any, D any, E any, F any, G any](a A, b B, c C, d D, e E, f F, g G) Tuple7[A, B, C, D, E, F, G] {
return Tuple7[A, B, C, D, E, F, G]{A: a, B: b, C: c, D: d, E: e, F: f, G: g}
}
// T8 creates a tuple from a list of values.
func T8[A any, B any, C any, D any, E any, F any, G any, H any](a A, b B, c C, d D, e E, f F, g G, h H) Tuple8[A, B, C, D, E, F, G, H] {
return Tuple8[A, B, C, D, E, F, G, H]{A: a, B: b, C: c, D: d, E: e, F: f, G: g, H: h}
}
// T8 creates a tuple from a list of values.
func T9[A any, B any, C any, D any, E any, F any, G any, H any, I any](a A, b B, c C, d D, e E, f F, g G, h H, i I) Tuple9[A, B, C, D, E, F, G, H, I] {
return Tuple9[A, B, C, D, E, F, G, H, I]{A: a, B: b, C: c, D: d, E: e, F: f, G: g, H: h, I: i}
}
// Unpack2 returns values contained in tuple.
func Unpack2[A any, B any](tuple Tuple2[A, B]) (A, B) {
return tuple.A, tuple.B
}
// Unpack3 returns values contained in tuple.
func Unpack3[A any, B any, C any](tuple Tuple3[A, B, C]) (A, B, C) {
return tuple.A, tuple.B, tuple.C
}
// Unpack4 returns values contained in tuple.
func Unpack4[A any, B any, C any, D any](tuple Tuple4[A, B, C, D]) (A, B, C, D) {
return tuple.A, tuple.B, tuple.C, tuple.D
}
// Unpack5 returns values contained in tuple.
func Unpack5[A any, B any, C any, D any, E any](tuple Tuple5[A, B, C, D, E]) (A, B, C, D, E) {
return tuple.A, tuple.B, tuple.C, tuple.D, tuple.E
}
// Unpack6 returns values contained in tuple.
func Unpack6[A any, B any, C any, D any, E any, F any](tuple Tuple6[A, B, C, D, E, F]) (A, B, C, D, E, F) {
return tuple.A, tuple.B, tuple.C, tuple.D, tuple.E, tuple.F
}
// Unpack7 returns values contained in tuple.
func Unpack7[A any, B any, C any, D any, E any, F any, G any](tuple Tuple7[A, B, C, D, E, F, G]) (A, B, C, D, E, F, G) {
return tuple.A, tuple.B, tuple.C, tuple.D, tuple.E, tuple.F, tuple.G
}
// Unpack8 returns values contained in tuple.
func Unpack8[A any, B any, C any, D any, E any, F any, G any, H any](tuple Tuple8[A, B, C, D, E, F, G, H]) (A, B, C, D, E, F, G, H) {
return tuple.A, tuple.B, tuple.C, tuple.D, tuple.E, tuple.F, tuple.G, tuple.H
}
// Unpack9 returns values contained in tuple.
func Unpack9[A any, B any, C any, D any, E any, F any, G any, H any, I any](tuple Tuple9[A, B, C, D, E, F, G, H, I]) (A, B, C, D, E, F, G, H, I) {
return tuple.A, tuple.B, tuple.C, tuple.D, tuple.E, tuple.F, tuple.G, tuple.H, tuple.I
}
// Zip2 creates a slice of grouped elements, the first of which contains the first elements
// of the given arrays, the second of which contains the second elements of the given arrays, and so on.
// When collections have different size, the Tuple attributes are filled with zero value.
func Zip2[A any, B any](a []A, b []B) []Tuple2[A, B] {
size := Max[int]([]int{len(a), len(b)})
result := make([]Tuple2[A, B], 0, size)
for index := 0; index < size; index++ {
_a, _ := Nth[A](a, index)
_b, _ := Nth[B](b, index)
result = append(result, Tuple2[A, B]{
A: _a,
B: _b,
})
}
return result
}
// Zip3 creates a slice of grouped elements, the first of which contains the first elements
// of the given arrays, the second of which contains the second elements of the given arrays, and so on.
// When collections have different size, the Tuple attributes are filled with zero value.
func Zip3[A any, B any, C any](a []A, b []B, c []C) []Tuple3[A, B, C] {
size := Max[int]([]int{len(a), len(b), len(c)})
result := make([]Tuple3[A, B, C], 0, size)
for index := 0; index < size; index++ {
_a, _ := Nth[A](a, index)
_b, _ := Nth[B](b, index)
_c, _ := Nth[C](c, index)
result = append(result, Tuple3[A, B, C]{
A: _a,
B: _b,
C: _c,
})
}
return result
}
// Zip4 creates a slice of grouped elements, the first of which contains the first elements
// of the given arrays, the second of which contains the second elements of the given arrays, and so on.
// When collections have different size, the Tuple attributes are filled with zero value.
func Zip4[A any, B any, C any, D any](a []A, b []B, c []C, d []D) []Tuple4[A, B, C, D] {
size := Max[int]([]int{len(a), len(b), len(c), len(d)})
result := make([]Tuple4[A, B, C, D], 0, size)
for index := 0; index < size; index++ {
_a, _ := Nth[A](a, index)
_b, _ := Nth[B](b, index)
_c, _ := Nth[C](c, index)
_d, _ := Nth[D](d, index)
result = append(result, Tuple4[A, B, C, D]{
A: _a,
B: _b,
C: _c,
D: _d,
})
}
return result
}
// Zip5 creates a slice of grouped elements, the first of which contains the first elements
// of the given arrays, the second of which contains the second elements of the given arrays, and so on.
// When collections have different size, the Tuple attributes are filled with zero value.
func Zip5[A any, B any, C any, D any, E any](a []A, b []B, c []C, d []D, e []E) []Tuple5[A, B, C, D, E] {
size := Max[int]([]int{len(a), len(b), len(c), len(d), len(e)})
result := make([]Tuple5[A, B, C, D, E], 0, size)
for index := 0; index < size; index++ {
_a, _ := Nth[A](a, index)
_b, _ := Nth[B](b, index)
_c, _ := Nth[C](c, index)
_d, _ := Nth[D](d, index)
_e, _ := Nth[E](e, index)
result = append(result, Tuple5[A, B, C, D, E]{
A: _a,
B: _b,
C: _c,
D: _d,
E: _e,
})
}
return result
}
// Zip6 creates a slice of grouped elements, the first of which contains the first elements
// of the given arrays, the second of which contains the second elements of the given arrays, and so on.
// When collections have different size, the Tuple attributes are filled with zero value.
func Zip6[A any, B any, C any, D any, E any, F any](a []A, b []B, c []C, d []D, e []E, f []F) []Tuple6[A, B, C, D, E, F] {
size := Max[int]([]int{len(a), len(b), len(c), len(d), len(e), len(f)})
result := make([]Tuple6[A, B, C, D, E, F], 0, size)
for index := 0; index < size; index++ {
_a, _ := Nth[A](a, index)
_b, _ := Nth[B](b, index)
_c, _ := Nth[C](c, index)
_d, _ := Nth[D](d, index)
_e, _ := Nth[E](e, index)
_f, _ := Nth[F](f, index)
result = append(result, Tuple6[A, B, C, D, E, F]{
A: _a,
B: _b,
C: _c,
D: _d,
E: _e,
F: _f,
})
}
return result
}
// Zip7 creates a slice of grouped elements, the first of which contains the first elements
// of the given arrays, the second of which contains the second elements of the given arrays, and so on.
// When collections have different size, the Tuple attributes are filled with zero value.
func Zip7[A any, B any, C any, D any, E any, F any, G any](a []A, b []B, c []C, d []D, e []E, f []F, g []G) []Tuple7[A, B, C, D, E, F, G] {
size := Max[int]([]int{len(a), len(b), len(c), len(d), len(e), len(f), len(g)})
result := make([]Tuple7[A, B, C, D, E, F, G], 0, size)
for index := 0; index < size; index++ {
_a, _ := Nth[A](a, index)
_b, _ := Nth[B](b, index)
_c, _ := Nth[C](c, index)
_d, _ := Nth[D](d, index)
_e, _ := Nth[E](e, index)
_f, _ := Nth[F](f, index)
_g, _ := Nth[G](g, index)
result = append(result, Tuple7[A, B, C, D, E, F, G]{
A: _a,
B: _b,
C: _c,
D: _d,
E: _e,
F: _f,
G: _g,
})
}
return result
}
// Zip8 creates a slice of grouped elements, the first of which contains the first elements
// of the given arrays, the second of which contains the second elements of the given arrays, and so on.
// When collections have different size, the Tuple attributes are filled with zero value.
func Zip8[A any, B any, C any, D any, E any, F any, G any, H any](a []A, b []B, c []C, d []D, e []E, f []F, g []G, h []H) []Tuple8[A, B, C, D, E, F, G, H] {
size := Max[int]([]int{len(a), len(b), len(c), len(d), len(e), len(f), len(g), len(h)})
result := make([]Tuple8[A, B, C, D, E, F, G, H], 0, size)
for index := 0; index < size; index++ {
_a, _ := Nth[A](a, index)
_b, _ := Nth[B](b, index)
_c, _ := Nth[C](c, index)
_d, _ := Nth[D](d, index)
_e, _ := Nth[E](e, index)
_f, _ := Nth[F](f, index)
_g, _ := Nth[G](g, index)
_h, _ := Nth[H](h, index)
result = append(result, Tuple8[A, B, C, D, E, F, G, H]{
A: _a,
B: _b,
C: _c,
D: _d,
E: _e,
F: _f,
G: _g,
H: _h,
})
}
return result
}
// Zip9 creates a slice of grouped elements, the first of which contains the first elements
// of the given arrays, the second of which contains the second elements of the given arrays, and so on.
// When collections have different size, the Tuple attributes are filled with zero value.
func Zip9[A any, B any, C any, D any, E any, F any, G any, H any, I any](a []A, b []B, c []C, d []D, e []E, f []F, g []G, h []H, i []I) []Tuple9[A, B, C, D, E, F, G, H, I] {
size := Max[int]([]int{len(a), len(b), len(c), len(d), len(e), len(f), len(g), len(h), len(i)})
result := make([]Tuple9[A, B, C, D, E, F, G, H, I], 0, size)
for index := 0; index < size; index++ {
_a, _ := Nth[A](a, index)
_b, _ := Nth[B](b, index)
_c, _ := Nth[C](c, index)
_d, _ := Nth[D](d, index)
_e, _ := Nth[E](e, index)
_f, _ := Nth[F](f, index)
_g, _ := Nth[G](g, index)
_h, _ := Nth[H](h, index)
_i, _ := Nth[I](i, index)
result = append(result, Tuple9[A, B, C, D, E, F, G, H, I]{
A: _a,
B: _b,
C: _c,
D: _d,
E: _e,
F: _f,
G: _g,
H: _h,
I: _i,
})
}
return result
}
// Unzip2 accepts an array of grouped elements and creates an array regrouping the elements
// to their pre-zip configuration.
func Unzip2[A any, B any](tuples []Tuple2[A, B]) ([]A, []B) {
size := len(tuples)
r1 := make([]A, 0, size)
r2 := make([]B, 0, size)
for _, tuple := range tuples {
r1 = append(r1, tuple.A)
r2 = append(r2, tuple.B)
}
return r1, r2
}
// Unzip3 accepts an array of grouped elements and creates an array regrouping the elements
// to their pre-zip configuration.
func Unzip3[A any, B any, C any](tuples []Tuple3[A, B, C]) ([]A, []B, []C) {
size := len(tuples)
r1 := make([]A, 0, size)
r2 := make([]B, 0, size)
r3 := make([]C, 0, size)
for _, tuple := range tuples {
r1 = append(r1, tuple.A)
r2 = append(r2, tuple.B)
r3 = append(r3, tuple.C)
}
return r1, r2, r3
}
// Unzip4 accepts an array of grouped elements and creates an array regrouping the elements
// to their pre-zip configuration.
func Unzip4[A any, B any, C any, D any](tuples []Tuple4[A, B, C, D]) ([]A, []B, []C, []D) {
size := len(tuples)
r1 := make([]A, 0, size)
r2 := make([]B, 0, size)
r3 := make([]C, 0, size)
r4 := make([]D, 0, size)
for _, tuple := range tuples {
r1 = append(r1, tuple.A)
r2 = append(r2, tuple.B)
r3 = append(r3, tuple.C)
r4 = append(r4, tuple.D)
}
return r1, r2, r3, r4
}
// Unzip5 accepts an array of grouped elements and creates an array regrouping the elements
// to their pre-zip configuration.
func Unzip5[A any, B any, C any, D any, E any](tuples []Tuple5[A, B, C, D, E]) ([]A, []B, []C, []D, []E) {
size := len(tuples)
r1 := make([]A, 0, size)
r2 := make([]B, 0, size)
r3 := make([]C, 0, size)
r4 := make([]D, 0, size)
r5 := make([]E, 0, size)
for _, tuple := range tuples {
r1 = append(r1, tuple.A)
r2 = append(r2, tuple.B)
r3 = append(r3, tuple.C)
r4 = append(r4, tuple.D)
r5 = append(r5, tuple.E)
}
return r1, r2, r3, r4, r5
}
// Unzip6 accepts an array of grouped elements and creates an array regrouping the elements
// to their pre-zip configuration.
func Unzip6[A any, B any, C any, D any, E any, F any](tuples []Tuple6[A, B, C, D, E, F]) ([]A, []B, []C, []D, []E, []F) {
size := len(tuples)
r1 := make([]A, 0, size)
r2 := make([]B, 0, size)
r3 := make([]C, 0, size)
r4 := make([]D, 0, size)
r5 := make([]E, 0, size)
r6 := make([]F, 0, size)
for _, tuple := range tuples {
r1 = append(r1, tuple.A)
r2 = append(r2, tuple.B)
r3 = append(r3, tuple.C)
r4 = append(r4, tuple.D)
r5 = append(r5, tuple.E)
r6 = append(r6, tuple.F)
}
return r1, r2, r3, r4, r5, r6
}
// Unzip7 accepts an array of grouped elements and creates an array regrouping the elements
// to their pre-zip configuration.
func Unzip7[A any, B any, C any, D any, E any, F any, G any](tuples []Tuple7[A, B, C, D, E, F, G]) ([]A, []B, []C, []D, []E, []F, []G) {
size := len(tuples)
r1 := make([]A, 0, size)
r2 := make([]B, 0, size)
r3 := make([]C, 0, size)
r4 := make([]D, 0, size)
r5 := make([]E, 0, size)
r6 := make([]F, 0, size)
r7 := make([]G, 0, size)
for _, tuple := range tuples {
r1 = append(r1, tuple.A)
r2 = append(r2, tuple.B)
r3 = append(r3, tuple.C)
r4 = append(r4, tuple.D)
r5 = append(r5, tuple.E)
r6 = append(r6, tuple.F)
r7 = append(r7, tuple.G)
}
return r1, r2, r3, r4, r5, r6, r7
}
// Unzip8 accepts an array of grouped elements and creates an array regrouping the elements
// to their pre-zip configuration.
func Unzip8[A any, B any, C any, D any, E any, F any, G any, H any](tuples []Tuple8[A, B, C, D, E, F, G, H]) ([]A, []B, []C, []D, []E, []F, []G, []H) {
size := len(tuples)
r1 := make([]A, 0, size)
r2 := make([]B, 0, size)
r3 := make([]C, 0, size)
r4 := make([]D, 0, size)
r5 := make([]E, 0, size)
r6 := make([]F, 0, size)
r7 := make([]G, 0, size)
r8 := make([]H, 0, size)
for _, tuple := range tuples {
r1 = append(r1, tuple.A)
r2 = append(r2, tuple.B)
r3 = append(r3, tuple.C)
r4 = append(r4, tuple.D)
r5 = append(r5, tuple.E)
r6 = append(r6, tuple.F)
r7 = append(r7, tuple.G)
r8 = append(r8, tuple.H)
}
return r1, r2, r3, r4, r5, r6, r7, r8
}
// Unzip9 accepts an array of grouped elements and creates an array regrouping the elements
// to their pre-zip configuration.
func Unzip9[A any, B any, C any, D any, E any, F any, G any, H any, I any](tuples []Tuple9[A, B, C, D, E, F, G, H, I]) ([]A, []B, []C, []D, []E, []F, []G, []H, []I) {
size := len(tuples)
r1 := make([]A, 0, size)
r2 := make([]B, 0, size)
r3 := make([]C, 0, size)
r4 := make([]D, 0, size)
r5 := make([]E, 0, size)
r6 := make([]F, 0, size)
r7 := make([]G, 0, size)
r8 := make([]H, 0, size)
r9 := make([]I, 0, size)
for _, tuple := range tuples {
r1 = append(r1, tuple.A)
r2 = append(r2, tuple.B)
r3 = append(r3, tuple.C)
r4 = append(r4, tuple.D)
r5 = append(r5, tuple.E)
r6 = append(r6, tuple.F)
r7 = append(r7, tuple.G)
r8 = append(r8, tuple.H)
r9 = append(r9, tuple.I)
}
return r1, r2, r3, r4, r5, r6, r7, r8, r9
} | vendor/github.com/samber/lo/tuples.go | 0.869715 | 0.846514 | tuples.go | starcoder |
package sections
import (
"github.com/edanko/dxf-go/core"
)
const absRotationBit = 0x1
const textStringBit = 0x2
const elementShapeBit = 0x4
// LineElement represents a single element in a LineType.
type LineElement struct {
Length float64
AbsoluteRotation bool
IsTextString bool
IsShape bool
ShapeNumber int
Scale float64
RotationAngle float64
XOffset float64
YOffset float64
Text string
}
// Equals compares two LineElement objects for equality.
func (e LineElement) Equals(other LineElement) bool {
return core.FloatEquals(e.Length, other.Length) &&
e.AbsoluteRotation == other.AbsoluteRotation &&
e.IsTextString == other.IsTextString &&
e.IsShape == other.IsShape &&
e.ShapeNumber == other.ShapeNumber &&
core.FloatEquals(e.Scale, other.Scale) &&
core.FloatEquals(e.RotationAngle, other.RotationAngle) &&
core.FloatEquals(e.XOffset, other.XOffset) &&
core.FloatEquals(e.YOffset, other.YOffset) &&
e.Text == other.Text
}
// LineType representation
type LineType struct {
core.DxfParseable
Name string
Description string
Length float64
Pattern []*LineElement
}
// Equals compares two LineType objects for equality.
func (ltype LineType) Equals(other core.DxfElement) bool {
if otherLtype, ok := other.(*LineType); ok {
if ltype.Name != otherLtype.Name ||
ltype.Description != otherLtype.Description ||
!core.FloatEquals(ltype.Length, otherLtype.Length) ||
len(ltype.Pattern) != len(otherLtype.Pattern) {
return false
}
for i, pattern1 := range ltype.Pattern {
pattern2 := otherLtype.Pattern[i]
if !pattern1.Equals(*pattern2) {
return false
}
}
return true
}
return false
}
// NewLineType creates a new LineType object from a slice of tags.
func NewLineType(tags core.TagSlice) (*LineType, error) {
ltype := new(LineType)
ltype.Pattern = make([]*LineElement, 0)
flags74 := 0
var lineElement *LineElement
ltype.Init(map[int]core.TypeParser{
2: core.NewStringTypeParserToVar(<ype.Name),
3: core.NewStringTypeParserToVar(<ype.Description),
40: core.NewFloatTypeParserToVar(<ype.Length),
49: core.NewFloatTypeParser(func(length float64) {
if lineElement != nil {
ltype.Pattern = append(ltype.Pattern, lineElement)
}
lineElement = new(LineElement)
lineElement.Scale = 1.0
lineElement.Length = length
}),
74: core.NewIntTypeParser(func(flags int) {
flags74 = flags
if flags74 > 0 {
lineElement.AbsoluteRotation = flags74&absRotationBit > 0
lineElement.IsTextString = flags74&textStringBit > 0
lineElement.IsShape = flags74&elementShapeBit > 0
}
}),
75: core.NewIntTypeParser(func(flags int) {
if flags74 == 0 {
core.Log.Print("WARNING! there should be no 75 Code tag if 74 value is 0\n")
} else if lineElement.IsTextString && flags != 0 {
core.Log.Print("WARNING! Tag 75 should be 0 if 74 is a TextString\n")
} else if lineElement.IsShape {
lineElement.ShapeNumber = flags
}
}),
46: core.NewFloatTypeParser(func(scale float64) {
lineElement.Scale = scale
}),
50: core.NewFloatTypeParser(func(angle float64) {
lineElement.RotationAngle = angle
}),
44: core.NewFloatTypeParser(func(xOffset float64) {
lineElement.XOffset = xOffset
}),
45: core.NewFloatTypeParser(func(yOffset float64) {
lineElement.YOffset = yOffset
}),
9: core.NewStringTypeParser(func(text string) {
lineElement.Text = text
}),
})
err := ltype.Parse(tags)
if lineElement != nil {
ltype.Pattern = append(ltype.Pattern, lineElement)
}
return ltype, err
}
// NewLineTypeTable parses the slice of tags into a table that maps the LineType name to
// the parsed LineType object.
func NewLineTypeTable(tags core.TagSlice) (Table, error) {
table := make(Table)
tableSlices, err := TableEntryTags(tags)
if err != nil {
return table, err
}
for _, slice := range tableSlices {
ltype, err := NewLineType(slice)
if err != nil {
return nil, err
}
table[ltype.Name] = ltype
}
return table, nil
} | sections/linetype.go | 0.727395 | 0.514034 | linetype.go | starcoder |
package seeder
import (
"github.com/golang/glog"
"reflect"
)
func MergeStructFields(dst, src interface{}) {
in := reflect.ValueOf(src)
out := reflect.ValueOf(dst)
if src == nil || dst == nil {
return
}
if in.Kind() == reflect.Ptr {
in = in.Elem()
}
if out.Kind() == reflect.Ptr {
out = out.Elem()
}
if in.Type() != out.Type() {
// Explicit test prior to mergeStruct so that mistyped nils will fail
glog.Error("type mismatch in ", in, ", out ", out)
return
}
mergeStructFields(out, in)
}
func mergeStructFields(out, in reflect.Value) {
for i := 0; i < in.NumField(); i++ {
f := in.Type().Field(i)
if isZero(in.Field(i)) {
continue
}
switch f.Type.Kind() {
case reflect.Bool, reflect.Float32, reflect.Float64, reflect.Int, reflect.Int32, reflect.Int64,
reflect.String, reflect.Uint, reflect.Uint32, reflect.Uint64:
if out.Field(i).CanSet() {
glog.V(3).Info("merging field ", f.Name, ", new value ", in.Field(i))
out.Field(i).Set(in.Field(i))
}
case reflect.Ptr:
if f.Type.Elem().Kind() == reflect.Bool {
glog.V(3).Info("merging field ", f.Name, ", new value ", in.Field(i))
out.Field(i).Set(in.Field(i))
}
case reflect.Map:
if out.Field(i).CanSet() {
glog.V(3).Info("merging field ", f.Name, ", new value ", in.Field(i))
merged := mergeMaps(out.Field(i).Interface().(map[string]string), in.Field(i).Interface().(map[string]string))
out.Field(i).Set(reflect.ValueOf(merged))
}
case reflect.Slice:
continue
case reflect.Struct:
continue
default:
glog.Error("unsupported type encountered in merge of ", f.Name, ": ", f.Type.Kind())
}
}
}
// overwriting duplicate keys, you should handle that if there is a need
func mergeMaps(maps ...map[string]string) map[string]string {
result := make(map[string]string)
for _, m := range maps {
for k, v := range m {
result[k] = v
}
}
return result
}
func MergeStringSlices(slices ...[]string) []string {
// merge slices
set := make(map[string]bool)
for _, s := range slices {
for _, r := range s {
set[r] = true
}
}
result := make([]string, len(set))
j := 0
for r := range set {
result[j] = r
j++
}
return result
}
// isZero is mostly stolen from encoding/json package's isEmptyValue function
// determines if a value has the zero value of its type
func isZero(v reflect.Value) bool {
switch v.Kind() {
case reflect.Array, reflect.Map, reflect.Slice, reflect.String:
return v.Len() == 0
case reflect.Bool:
return !v.Bool()
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return v.Int() == 0
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
return v.Uint() == 0
case reflect.Float32, reflect.Float64:
return v.Float() == 0
case reflect.Interface, reflect.Ptr, reflect.Func:
return v.IsNil()
case reflect.Struct:
zero := reflect.Zero(v.Type()).Interface()
return reflect.DeepEqual(v.Interface(), zero)
default:
if !v.IsValid() {
return true
}
zero := reflect.Zero(v.Type())
return v.Interface() == zero.Interface()
}
} | openstack-seeder/pkg/seeder/utils.go | 0.521471 | 0.443962 | utils.go | starcoder |
package client
// JobSpec describes how the job execution will look like.
type V1JobSpec struct {
// Specifies the duration in seconds relative to the startTime that the job may be active before the system tries to terminate it; value must be positive integer
ActiveDeadlineSeconds int64 `json:"activeDeadlineSeconds,omitempty"`
// Specifies the number of retries before marking this job failed. Defaults to 6
BackoffLimit int32 `json:"backoffLimit,omitempty"`
// Specifies the desired number of successfully finished pods the job should be run with. Setting to nil means that the success of any pod signals the success of all pods, and allows parallelism to have any positive value. Setting to 1 means that parallelism is limited to 1 and the success of that pod signals the success of the job. More info: https://kubernetes.io/docs/concepts/workloads/controllers/jobs-run-to-completion/
Completions int32 `json:"completions,omitempty"`
// manualSelector controls generation of pod labels and pod selectors. Leave `manualSelector` unset unless you are certain what you are doing. When false or unset, the system pick labels unique to this job and appends those labels to the pod template. When true, the user is responsible for picking unique labels and specifying the selector. Failure to pick a unique label may cause this and other jobs to not function correctly. However, You may see `manualSelector=true` in jobs that were created with the old `extensions/v1beta1` API. More info: https://kubernetes.io/docs/concepts/workloads/controllers/jobs-run-to-completion/#specifying-your-own-pod-selector
ManualSelector bool `json:"manualSelector,omitempty"`
// Specifies the maximum desired number of pods the job should run at any given time. The actual number of pods running in steady state will be less than this number when ((.spec.completions - .status.successful) < .spec.parallelism), i.e. when the work left to do is less than max parallelism. More info: https://kubernetes.io/docs/concepts/workloads/controllers/jobs-run-to-completion/
Parallelism int32 `json:"parallelism,omitempty"`
// A label query over pods that should match the pod count. Normally, the system sets this field for you. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/#label-selectors
Selector *V1LabelSelector `json:"selector,omitempty"`
// Describes the pod that will be created when executing a job. More info: https://kubernetes.io/docs/concepts/workloads/controllers/jobs-run-to-completion/
Template *V1PodTemplateSpec `json:"template"`
} | vendor/github.com/kubernetes-client/go/kubernetes/client/v1_job_spec.go | 0.794505 | 0.47524 | v1_job_spec.go | starcoder |
package eaopt
import (
"errors"
"fmt"
"math/rand"
)
// A Speciator partitions a population into n smaller subpopulations. Each
// subpopulation shares the same random number generator inherited from the
// initial population.
type Speciator interface {
Apply(indis Individuals, rng *rand.Rand, populationIndex int) ([]Individuals, error)
Validate() error
}
// SpecKMedoids (k-medoid clustering).
type SpecKMedoids struct {
K uint // Number of medoids
MinPerCluster uint
Metric Metric // Dissimimilarity measure
MaxIterations uint
}
// Apply SpecKMedoids.
func (spec SpecKMedoids) Apply(indis Individuals, rng *rand.Rand, populationIndex int) ([]Individuals, error) {
// Check there are at least K Individuals
if len(indis) < int(spec.K) {
return nil, fmt.Errorf("SpecKMedoids: have %d individuals and need at least %d",
len(indis), spec.K)
}
var (
species = make([]Individuals, spec.K)
medoids = make(Individuals, spec.K)
dm = newDistanceMemoizer(spec.Metric)
)
// Initialize the clusters with the individuals having the lowest average
// distances with the other individuals
indis.SortByDistanceToMedoid(dm)
copy(medoids, indis[:spec.K])
// Add each medoid to a cluster
for i, medoid := range medoids {
species[i] = append(species[i], medoid)
}
// Keep track of the total distance from the medoid to each of the cluster's
// members, this total will then be used to compare with different cluster
// dispositions
var total float64
// Assign each individual that is not a medoid to the closest initial medoid
for _, indi := range indis[spec.K:] {
var i = indi.IdxOfClosest(medoids, dm)
species[i] = append(species[i], indi)
total += dm.GetDistance(medoids[i], indi)
}
var nIterations uint
for nIterations < spec.MaxIterations {
nIterations++
var (
newSpecies = make([]Individuals, len(species))
newTotal float64
)
// Recompute the new medoid inside each specie
for i, specie := range species {
specie.SortByDistanceToMedoid(dm)
medoids[i] = specie[0]
newSpecies[i] = append(newSpecies[i], specie[0])
}
// Reassign each individual to the closest new medoid
for _, specie := range species {
for _, indi := range specie[1:] {
var i = indi.IdxOfClosest(medoids, dm)
newSpecies[i] = append(newSpecies[i], indi)
newTotal += dm.GetDistance(medoids[i], indi)
}
}
// No more iterations are needed if the new total is worse
if newTotal >= total {
break
}
copy(species, newSpecies)
total = newTotal
}
// Rebalance the species so that their are at least
err := rebalanceClusters(species, dm, spec.MinPerCluster)
if err != nil {
return nil, err
}
return species, nil
}
// Validate SpecKMedoids fields.
func (spec SpecKMedoids) Validate() error {
if spec.K < 2 {
return errors.New("k should be higher than 1")
}
if spec.Metric == nil {
return errors.New("metric field has to be provided")
}
if spec.MaxIterations < 1 {
return errors.New("k should be higher than 0")
}
return nil
}
// SpecFitnessInterval speciates a population based on the fitness of each
// individual where each species contains m = n/k (rounded to the closest upper
// integer) individuals with similar fitnesses. For example, with 4 species, 30
// individuals would be split into 3 groups of 8 individuals and 1 group of 6
// individuals (3*8 + 1*6 = 30). More generally each group is of size
// min(n-i, m) where i is a multiple of m.
type SpecFitnessInterval struct {
K uint // Number of intervals
}
// Apply SpecFitnessInterval.
func (spec SpecFitnessInterval) Apply(indis Individuals, rng *rand.Rand, populationIndex int) ([]Individuals, error) {
// Check there are at least K Individuals
if len(indis) < int(spec.K) {
return nil, fmt.Errorf("specFitnessInterval: have %d individuals and need at least %d",
len(indis), spec.K)
}
var (
species = make([]Individuals, spec.K)
n = len(indis)
m = minInt(int(float64(n/int(spec.K))), n)
)
for i := range species {
var a, b = i * m, minInt((i+1)*m, n)
species[i] = indis[a:b]
}
return species, nil
}
// Validate SpecFitnessInterval fields.
func (spec SpecFitnessInterval) Validate() error {
if spec.K < 2 {
return errors.New("k should be higher than 1")
}
return nil
} | speciation.go | 0.663669 | 0.504089 | speciation.go | starcoder |
package brotli
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Computes the bit cost reduction by combining out[idx1] and out[idx2] and if
it is below a threshold, stores the pair (idx1, idx2) in the *pairs queue. */
func compareAndPushToQueueCommand(out []histogramCommand, cluster_size []uint32, idx1 uint32, idx2 uint32, max_num_pairs uint, pairs []histogramPair, num_pairs *uint) {
var is_good_pair bool = false
var p histogramPair
p.idx2 = 0
p.idx1 = p.idx2
p.cost_combo = 0
p.cost_diff = p.cost_combo
if idx1 == idx2 {
return
}
if idx2 < idx1 {
var t uint32 = idx2
idx2 = idx1
idx1 = t
}
p.idx1 = idx1
p.idx2 = idx2
p.cost_diff = 0.5 * clusterCostDiff(uint(cluster_size[idx1]), uint(cluster_size[idx2]))
p.cost_diff -= out[idx1].bit_cost_
p.cost_diff -= out[idx2].bit_cost_
if out[idx1].total_count_ == 0 {
p.cost_combo = out[idx2].bit_cost_
is_good_pair = true
} else if out[idx2].total_count_ == 0 {
p.cost_combo = out[idx1].bit_cost_
is_good_pair = true
} else {
var threshold float64
if *num_pairs == 0 {
threshold = 1e99
} else {
threshold = brotli_max_double(0.0, pairs[0].cost_diff)
}
var combo histogramCommand = out[idx1]
var cost_combo float64
histogramAddHistogramCommand(&combo, &out[idx2])
cost_combo = populationCostCommand(&combo)
if cost_combo < threshold-p.cost_diff {
p.cost_combo = cost_combo
is_good_pair = true
}
}
if is_good_pair {
p.cost_diff += p.cost_combo
if *num_pairs > 0 && histogramPairIsLess(&pairs[0], &p) {
/* Replace the top of the queue if needed. */
if *num_pairs < max_num_pairs {
pairs[*num_pairs] = pairs[0]
(*num_pairs)++
}
pairs[0] = p
} else if *num_pairs < max_num_pairs {
pairs[*num_pairs] = p
(*num_pairs)++
}
}
}
func histogramCombineCommand(out []histogramCommand, cluster_size []uint32, symbols []uint32, clusters []uint32, pairs []histogramPair, num_clusters uint, symbols_size uint, max_clusters uint, max_num_pairs uint) uint {
var cost_diff_threshold float64 = 0.0
var min_cluster_size uint = 1
var num_pairs uint = 0
{
/* We maintain a vector of histogram pairs, with the property that the pair
with the maximum bit cost reduction is the first. */
var idx1 uint
for idx1 = 0; idx1 < num_clusters; idx1++ {
var idx2 uint
for idx2 = idx1 + 1; idx2 < num_clusters; idx2++ {
compareAndPushToQueueCommand(out, cluster_size, clusters[idx1], clusters[idx2], max_num_pairs, pairs[0:], &num_pairs)
}
}
}
for num_clusters > min_cluster_size {
var best_idx1 uint32
var best_idx2 uint32
var i uint
if pairs[0].cost_diff >= cost_diff_threshold {
cost_diff_threshold = 1e99
min_cluster_size = max_clusters
continue
}
/* Take the best pair from the top of heap. */
best_idx1 = pairs[0].idx1
best_idx2 = pairs[0].idx2
histogramAddHistogramCommand(&out[best_idx1], &out[best_idx2])
out[best_idx1].bit_cost_ = pairs[0].cost_combo
cluster_size[best_idx1] += cluster_size[best_idx2]
for i = 0; i < symbols_size; i++ {
if symbols[i] == best_idx2 {
symbols[i] = best_idx1
}
}
for i = 0; i < num_clusters; i++ {
if clusters[i] == best_idx2 {
copy(clusters[i:], clusters[i+1:][:num_clusters-i-1])
break
}
}
num_clusters--
{
/* Remove pairs intersecting the just combined best pair. */
var copy_to_idx uint = 0
for i = 0; i < num_pairs; i++ {
var p *histogramPair = &pairs[i]
if p.idx1 == best_idx1 || p.idx2 == best_idx1 || p.idx1 == best_idx2 || p.idx2 == best_idx2 {
/* Remove invalid pair from the queue. */
continue
}
if histogramPairIsLess(&pairs[0], p) {
/* Replace the top of the queue if needed. */
var front histogramPair = pairs[0]
pairs[0] = *p
pairs[copy_to_idx] = front
} else {
pairs[copy_to_idx] = *p
}
copy_to_idx++
}
num_pairs = copy_to_idx
}
/* Push new pairs formed with the combined histogram to the heap. */
for i = 0; i < num_clusters; i++ {
compareAndPushToQueueCommand(out, cluster_size, best_idx1, clusters[i], max_num_pairs, pairs[0:], &num_pairs)
}
}
return num_clusters
}
/* What is the bit cost of moving histogram from cur_symbol to candidate. */
func histogramBitCostDistanceCommand(histogram *histogramCommand, candidate *histogramCommand) float64 {
if histogram.total_count_ == 0 {
return 0.0
} else {
var tmp histogramCommand = *histogram
histogramAddHistogramCommand(&tmp, candidate)
return populationCostCommand(&tmp) - candidate.bit_cost_
}
} | vendor/github.com/andybalholm/brotli/cluster_command.go | 0.605333 | 0.447098 | cluster_command.go | starcoder |
package d2
import "fmt"
// Rectangler is the interface implemented by objects that can return a
// rectangular representation of themselves in 2D space.
type Rectangler interface {
Rectangle() Rectangle
}
// A Rectangle is defined by 2 points, Min and Max, represented by 2 Vec2.
type Rectangle struct {
Min, Max Vec2
}
// ZR is the zero Rectangle.
var ZR Rectangle
func init() {
// allocate the vector of the ZR rectangle
ZR = Rectangle{Min: Vec2{0, 0}, Max: Vec2{0, 0}}
}
// Rect is shorthand for Rectangle{Vec2(x0, y0), Vec2(x1, y1)}. The returned
// rectangle has minimum and maximum coordinates swapped if necessary so that
// it is well-formed.
func Rect(x0, y0, x1, y1 float32) Rectangle {
if x0 > x1 {
x0, x1 = x1, x0
}
if y0 > y1 {
y0, y1 = y1, y0
}
return Rectangle{Vec2{x0, y0}, Vec2{x1, y1}}
}
// RectWH returns a rectangle whose origin is Vec2{x,y}, w and h are its width
// and height.
func RectWH(x, y, w, h float32) Rectangle {
return Rectangle{
Min: Vec2{x, y},
Max: Vec2{x + w, y + h},
}
}
// RectFromCircle returns the minimum rectangle that contains the circle of
// center c and radius r
func RectFromCircle(c Vec2, r float32) Rectangle {
return RectWH(c[0]-r, c[1]-r, 2*r, 2*r)
}
// NewRect creates a zero Rectangle
func NewRect() Rectangle {
return Rectangle{
Min: Vec2{0, 0},
Max: Vec2{0, 0},
}
}
// CopyRect allocates and returns a new Rectangle that is the copy of r.
func CopyRect(r Rectangle) Rectangle {
r1 := NewRect()
r1.Min.Assign(r.Min)
r1.Max.Assign(r.Max)
return r1
}
// Center returns the center of r.
func (r Rectangle) Center() Vec2 {
return r.Size().Scale(0.5).Add(r.Min)
}
// Dx returns r's width.
func (r Rectangle) Dx() float32 {
return r.Max[0] - r.Min[0]
}
// Dy returns r's height.
func (r Rectangle) Dy() float32 {
return r.Max[1] - r.Min[1]
}
// Size returns r's width and height.
func (r Rectangle) Size() Vec2 {
return Vec2{
r.Max[0] - r.Min[0],
r.Max[1] - r.Min[1],
}
}
// Add returns the rectangle r translated by v.
func (r Rectangle) Add(v Vec2) Rectangle {
return Rectangle{
Vec2{r.Min[0] + v[0], r.Min[1] + v[1]},
Vec2{r.Max[0] + v[0], r.Max[1] + v[1]},
}
}
// Sub returns the rectangle r translated by -v.
func (r Rectangle) Sub(v Vec2) Rectangle {
return Rectangle{
Vec2{r.Min[0] - v[0], r.Min[1] - v[1]},
Vec2{r.Max[0] - v[0], r.Max[1] - v[1]},
}
}
// Inset returns the rectangle r inset by n, which may be negative. If either
// of r's dimensions is less than 2*n then an empty rectangle near the center
// of r will be returned.
func (r Rectangle) Inset(n float32) Rectangle {
if r.Dx() < 2*n {
r.Min[0] = (r.Min[0] + r.Max[0]) / 2
r.Max[0] = r.Min[0]
} else {
r.Min[0] += n
r.Max[0] -= n
}
if r.Dy() < 2*n {
r.Min[1] = (r.Min[1] + r.Max[1]) / 2
r.Max[1] = r.Min[1]
} else {
r.Min[1] += n
r.Max[1] -= n
}
return r
}
// Intersect returns the largest rectangle contained by both r and s. If the
// two rectangles do not overlap then the zero rectangle will be returned.
func (r Rectangle) Intersect(s Rectangle) Rectangle {
ir := Rect(r.Min[0], r.Min[1], r.Max[0], r.Max[1])
if ir.Min[0] < s.Min[0] {
ir.Min[0] = s.Min[0]
}
if ir.Min[1] < s.Min[1] {
ir.Min[1] = s.Min[1]
}
if ir.Max[0] > s.Max[0] {
ir.Max[0] = s.Max[0]
}
if ir.Max[1] > s.Max[1] {
ir.Max[1] = s.Max[1]
}
if ir.Min[0] > ir.Max[0] || ir.Min[1] > ir.Max[1] {
return ZR
}
return ir
}
// Union returns the smallest rectangle that contains both r and s.
func (r Rectangle) Union(s Rectangle) Rectangle {
if r.Empty() {
return s
}
if s.Empty() {
return r
}
if r.Min[0] > s.Min[0] {
r.Min[0] = s.Min[0]
}
if r.Min[1] > s.Min[1] {
r.Min[1] = s.Min[1]
}
if r.Max[0] < s.Max[0] {
r.Max[0] = s.Max[0]
}
if r.Max[1] < s.Max[1] {
r.Max[1] = s.Max[1]
}
return r
}
// Empty reports whether the rectangle contains no points.
func (r Rectangle) Empty() bool {
return r.Min[0] >= r.Max[0] || r.Min[1] >= r.Max[1]
}
// Eq reports whether r and s contain the same set of points. All empty
// rectangles are considered equal.
func (r Rectangle) Eq(s Rectangle) bool {
return (r.Min.Approx(s.Min) && r.Max.Approx(s.Max)) ||
r.Empty() && s.Empty()
}
// Overlaps reports whether r and s have a non-empty intersection.
func (r Rectangle) Overlaps(s Rectangle) bool {
return !r.Empty() && !s.Empty() &&
r.Min[0] < s.Max[0] && s.Min[0] < r.Max[0] &&
r.Min[1] < s.Max[1] && s.Min[1] < r.Max[1]
}
// Contains reports whether rectangle r contains point p
func (r Rectangle) Contains(p Vec2) bool {
return r.Min[0] <= p[0] && p[0] < r.Max[0] &&
r.Min[1] <= p[1] && p[1] < r.Max[1]
}
// In reports whether Rectangle r is contained in s.
func (r Rectangle) In(s Rectangle) bool {
if r.Empty() {
return true
}
// Note that r.Max is an exclusive bound for r, so that r.In(s)
// does not require that r.Max.In(s).
return s.Min[0] <= r.Min[0] && r.Max[0] <= s.Max[0] &&
s.Min[1] <= r.Min[1] && r.Max[1] <= s.Max[1]
}
// Canon returns the canonical version of r. The returned rectangle has minimum
// and maximum coordinates swapped if necessary so that it is well-formed.
func (r Rectangle) Canon() Rectangle {
if r.Max[0] < r.Min[0] {
r.Min[0], r.Max[0] = r.Max[0], r.Min[0]
}
if r.Max[1] < r.Min[1] {
r.Min[1], r.Max[1] = r.Max[1], r.Min[1]
}
return r
}
// String returns a string representation of r.
func (r Rectangle) String() string {
return fmt.Sprintf("(Min:%v,Max:%v)", r.Min, r.Max)
} | f32/d2/rect.go | 0.909068 | 0.668899 | rect.go | starcoder |
package main
import (
"fmt"
)
// Limit represents all integers which are greater than this limit can be
// written as the sum of two abundant numbers.
const Limit = 28123
// GetProperDivisors gets all non-negative divisors of n, not inlcluding itself.
func GetProperDivisors(n int) []int {
divisors := []int{1}
limit := n
for i := 2; i < limit; i++ {
if n%i == 0 {
divisors = append(divisors, i)
limit = n / i
if limit != i {
divisors = append(divisors, limit)
}
}
}
return divisors
}
// GetSumOfIntSlice returns sum of numbers in an interger array.
func GetSumOfIntSlice(ints []int) int {
sum := 0
for _, i := range ints {
sum += i
}
return sum
}
// IsPerfectNumber returns true if the sum of all proper divisors of a given
// number is equal to that given number. Otherwise, return false.
func IsPerfectNumber(n int) bool {
sum := GetSumOfDivisors(n)
return sum == n
}
// IsDeficientNumber returns true if the sum of all proper divisors of a given
// number is less than that given number. Otherwise, return false.
func IsDeficientNumber(n int) bool {
sum := GetSumOfDivisors(n)
return sum < n
}
// IsAbundantNumber returns true if the sum of all proper divisors of a given
// number is greater than that given number. Otherwise, return false.
func IsAbundantNumber(n int) bool {
sum := GetSumOfDivisors(n)
return sum > n
}
// GetSumOfDivisors returns sum of proper divisors of a given number.
func GetSumOfDivisors(n int) int {
divisors := GetProperDivisors(n)
sum := GetSumOfIntSlice(divisors)
return sum
}
// IsSumOfTwoAbundantNumbers checks if given number is sum of two abundant numbers.
func IsSumOfTwoAbundantNumbers(n int, abundantNums map[int]int) bool {
// check if it is sum of two equal abundant numbers.
if n%2 == 0 {
_, ok := abundantNums[n/2]
if ok {
return true
}
}
// check if it is sum of two not equal abundant numbers.
for _, m := range abundantNums {
i := n - m
_, ok := abundantNums[i]
if ok {
return true
}
}
return false
}
func main() {
abundantNums := map[int]int{}
sum := 0
for i := 1; i <= Limit; i++ {
if !IsSumOfTwoAbundantNumbers(i, abundantNums) {
sum += i
}
if IsAbundantNumber(i) {
abundantNums[i] = i
}
}
fmt.Println(sum)
} | p023/main.go | 0.689096 | 0.422624 | main.go | starcoder |
package opt
import (
"math"
"github.com/cpmech/gosl/fun"
"github.com/cpmech/gosl/io"
"github.com/cpmech/gosl/la"
"github.com/cpmech/gosl/plt"
"github.com/cpmech/gosl/utl"
)
// History holds history of optmization using directiors; e.g. for Debugging
type History struct {
// data
Ndim int // dimension of x-vector
HistX []la.Vector // [it] history of x-values (position)
HistU []la.Vector // [it] history of u-values (direction)
HistF []float64 // [it] history of f-values
HistI []float64 // [it] index of iteration
// configuration
NptsI int // number of points for contour
NptsJ int // number of points for contour
RangeXi []float64 // {ximin, ximax} [may be nil for default]
RangeXj []float64 // {xjmin, xjmax} [may be nil for default]
GapXi float64 // expand {ximin, ximax}
GapXj float64 // expand {ximin, ximax}
// internal
ffcn fun.Sv // f({x}) function
}
// NewHistory returns new object
func NewHistory(nMaxIt int, f0 float64, x0 la.Vector, ffcn fun.Sv) (o *History) {
o = new(History)
o.Ndim = len(x0)
o.HistX = append(o.HistX, x0.GetCopy())
o.HistU = append(o.HistU, nil)
o.HistF = append(o.HistF, f0)
o.HistI = append(o.HistI, 0)
o.NptsI = 41
o.NptsJ = 41
o.ffcn = ffcn
return
}
// Append appends new x and u vectors, and updates F and I arrays
func (o *History) Append(fx float64, x, u la.Vector) {
o.HistX = append(o.HistX, x.GetCopy())
o.HistU = append(o.HistU, u.GetCopy())
o.HistF = append(o.HistF, fx)
o.HistI = append(o.HistI, float64(len(o.HistI)))
}
// Limits computes range of X variables
func (o *History) Limits() (Xmin []float64, Xmax []float64) {
Xmin = make([]float64, o.Ndim)
Xmax = make([]float64, o.Ndim)
for j := 0; j < o.Ndim; j++ {
Xmin[j] = math.MaxFloat64
Xmax[j] = math.SmallestNonzeroFloat64
for _, x := range o.HistX {
Xmin[j] = utl.Min(Xmin[j], x[j])
Xmax[j] = utl.Max(Xmax[j], x[j])
}
}
return
}
// PlotC plots contour
func (o *History) PlotC(iDim, jDim int, xref la.Vector) {
// limits
var Xmin, Xmax []float64
if len(o.RangeXi) != 2 || len(o.RangeXj) != 2 {
Xmin, Xmax = o.Limits()
}
// i-range
var ximin, ximax float64
if len(o.RangeXi) == 2 {
ximin, ximax = o.RangeXi[0], o.RangeXi[1]
} else {
ximin, ximax = Xmin[iDim], Xmax[iDim]
}
// j-range
var xjmin, xjmax float64
if len(o.RangeXj) == 2 {
xjmin, xjmax = o.RangeXj[0], o.RangeXj[1]
} else {
xjmin, xjmax = Xmin[jDim], Xmax[jDim]
}
// use gap
ximin -= o.GapXi
ximax += o.GapXi
xjmin -= o.GapXj
xjmax += o.GapXj
// contour
xvec := xref.GetCopy()
xx, yy, zz := utl.MeshGrid2dF(ximin, ximax, xjmin, xjmax, o.NptsI, o.NptsJ, func(r, s float64) float64 {
xvec[iDim], xvec[jDim] = r, s
return o.ffcn(xvec)
})
plt.ContourF(xx, yy, zz, nil)
// labels
plt.SetLabels(io.Sf("$x_{%d}$", iDim), io.Sf("$x_{%d}$", jDim), nil)
}
// PlotX plots trajectory of x-points
func (o *History) PlotX(iDim, jDim int, xref la.Vector, argsArrow *plt.A) {
// trajectory
x2d := la.NewVector(2)
u2d := la.NewVector(2)
for k := 0; k < len(o.HistX)-1; k++ {
x := o.HistX[k]
u := o.HistU[1+k]
x2d[0], x2d[1] = x[iDim], x[jDim]
u2d[0], u2d[1] = u[iDim], u[jDim]
if u.Norm() > 1e-10 {
plt.PlotOne(x2d[0], x2d[1], &plt.A{C: "y", M: "o", Z: 10, NoClip: true})
plt.DrawArrow2d(x2d, u2d, false, 1, argsArrow)
}
}
// final point
l := len(o.HistX) - 1
plt.PlotOne(o.HistX[l][iDim], o.HistX[l][jDim], &plt.A{C: "y", M: "*", Ms: 10, Z: 10, NoClip: true})
// labels
plt.SetLabels(io.Sf("$x_{%d}$", iDim), io.Sf("$x_{%d}$", jDim), nil)
}
// PlotF plots convergence on F values versus iteration numbers
func (o *History) PlotF(args *plt.A) {
if args == nil {
args = &plt.A{C: plt.C(2, 0), M: ".", Ls: "-", Lw: 2, NoClip: true}
}
l := len(o.HistI) - 1
plt.Plot(o.HistI, o.HistF, args)
plt.Text(o.HistI[0], o.HistF[0], io.Sf("%.3f", o.HistF[0]), &plt.A{C: plt.C(0, 0), Fsz: 7, Ha: "left", Va: "top", NoClip: true})
plt.Text(o.HistI[l], o.HistF[l], io.Sf("%.3f", o.HistF[l]), &plt.A{C: plt.C(0, 0), Fsz: 7, Ha: "right", Va: "bottom", NoClip: true})
plt.Gll("$iteration$", "$f(x)$", nil)
plt.HideTRborders()
}
// PlotAll2d plots contour using PlotC, trajectory using PlotX, and convergence on F values using
// PlotF for history data with ndim >= 2
func (o *History) PlotAll2d(name string, xref la.Vector) {
clr := "orange"
argsArrow := &plt.A{C: clr, Scale: 40}
argsF := &plt.A{C: clr, Lw: 3, L: name, NoClip: true}
o.GapXi = 0.1
o.GapXj = 0.1
plt.SplotGap(0.25, 0.25)
plt.Subplot(2, 1, 1)
o.PlotC(0, 1, xref)
o.PlotX(0, 1, xref, argsArrow)
plt.Subplot(2, 1, 2)
o.PlotF(argsF)
}
// PlotAll3d plots contour using PlotC, trajectory using PlotX, and convergence on F values using
// PlotF for history data with ndim >= 3
func (o *History) PlotAll3d(name string, xref la.Vector) {
clr := "orange"
argsArrow := &plt.A{C: clr, Scale: 40}
argsF := &plt.A{C: clr, Lw: 3, L: name, NoClip: true}
o.GapXi = 0.1
o.GapXj = 0.1
plt.SplotGap(0.25, 0.25)
plt.Subplot(2, 2, 1)
o.PlotC(0, 1, xref)
o.PlotX(0, 1, xref, argsArrow)
plt.Subplot(2, 2, 2)
o.PlotC(1, 2, xref)
o.PlotX(1, 2, xref, argsArrow)
plt.Subplot(2, 2, 3)
o.PlotC(2, 0, xref)
o.PlotX(2, 0, xref, argsArrow)
plt.Subplot(2, 2, 4)
o.PlotF(argsF)
}
// CompareHistory2d generate plots to compare two history data with ndim >= 2
func CompareHistory2d(name1, name2 string, hist1, hist2 *History, xref1, xref2 la.Vector) {
clr1 := "orange"
clr2 := "#5a5252"
argsArrow1 := &plt.A{C: clr1, Scale: 40}
argsArrow2 := &plt.A{C: clr2, Scale: 10}
argsF1 := &plt.A{C: clr1, Lw: 5, L: name1, NoClip: true}
argsF2 := &plt.A{C: clr2, Lw: 2, L: name2, NoClip: true}
Xmin1, Xmax1 := hist1.Limits()
Xmin2, Xmax2 := hist2.Limits()
hist1.RangeXi = make([]float64, 2)
hist1.RangeXj = make([]float64, 2)
hist1.GapXi = 0.1
hist1.GapXj = 0.1
plt.SplotGap(0.25, 0.25)
plt.Subplot(2, 1, 1)
hist1.RangeXi[0] = utl.Min(Xmin1[0], Xmin2[0])
hist1.RangeXi[1] = utl.Max(Xmax1[0], Xmax2[0])
hist1.RangeXj[0] = utl.Min(Xmin1[1], Xmin2[1])
hist1.RangeXj[1] = utl.Max(Xmax1[1], Xmax2[1])
hist1.PlotC(0, 1, xref1)
hist1.PlotX(0, 1, xref1, argsArrow1)
hist2.PlotX(0, 1, xref2, argsArrow2)
plt.Subplot(2, 1, 2)
hist1.PlotF(argsF1)
hist2.PlotF(argsF2)
}
// CompareHistory3d generate plots to compare two history data with ndim >= 3
func CompareHistory3d(name1, name2 string, hist1, hist2 *History, xref1, xref2 la.Vector) {
clr1 := "orange"
clr2 := "#5a5252"
argsArrow1 := &plt.A{C: clr1, Scale: 40}
argsArrow2 := &plt.A{C: clr2, Scale: 10}
argsF1 := &plt.A{C: clr1, Lw: 5, L: name1, NoClip: true}
argsF2 := &plt.A{C: clr2, Lw: 2, L: name2, NoClip: true}
Xmin1, Xmax1 := hist1.Limits()
Xmin2, Xmax2 := hist2.Limits()
hist1.RangeXi = make([]float64, 2)
hist1.RangeXj = make([]float64, 2)
hist1.GapXi = 0.1
hist1.GapXj = 0.1
plt.SplotGap(0.25, 0.25)
plt.Subplot(2, 2, 1)
hist1.RangeXi[0] = utl.Min(Xmin1[0], Xmin2[0])
hist1.RangeXi[1] = utl.Max(Xmax1[0], Xmax2[0])
hist1.RangeXj[0] = utl.Min(Xmin1[1], Xmin2[1])
hist1.RangeXj[1] = utl.Max(Xmax1[1], Xmax2[1])
hist1.PlotC(0, 1, xref1)
hist1.PlotX(0, 1, xref1, argsArrow1)
hist2.PlotX(0, 1, xref2, argsArrow2)
plt.Subplot(2, 2, 2)
hist1.RangeXi[0] = utl.Min(Xmin1[1], Xmin2[1])
hist1.RangeXi[1] = utl.Max(Xmax1[1], Xmax2[1])
hist1.RangeXj[0] = utl.Min(Xmin1[2], Xmin2[2])
hist1.RangeXj[1] = utl.Max(Xmax1[2], Xmax2[2])
hist1.PlotC(1, 2, xref1)
hist1.PlotX(1, 2, xref1, argsArrow1)
hist2.PlotX(1, 2, xref2, argsArrow2)
plt.Subplot(2, 2, 3)
hist1.RangeXi[0] = utl.Min(Xmin1[2], Xmin2[2])
hist1.RangeXi[1] = utl.Max(Xmax1[2], Xmax2[2])
hist1.RangeXj[0] = utl.Min(Xmin1[0], Xmin2[0])
hist1.RangeXj[1] = utl.Max(Xmax1[0], Xmax2[0])
hist1.PlotC(2, 0, xref1)
hist1.PlotX(2, 0, xref1, argsArrow1)
hist2.PlotX(2, 0, xref2, argsArrow2)
plt.Subplot(2, 2, 4)
hist1.PlotF(argsF1)
hist2.PlotF(argsF2)
} | opt/history.go | 0.579162 | 0.492066 | history.go | starcoder |
package models
import (
"fmt"
"github.com/ThinkiumGroup/go-common"
"github.com/ThinkiumGroup/go-common/trie"
)
type (
// The shard chain is used to send to other shards the AccountDelta list processed by this
// shard should fall on the other shard. Including block header and the proof
ShardDeltaMessage struct {
ToChainID common.ChainID
FromBlockHeader *BlockHeader
Proof []common.Hash
Deltas []*AccountDelta
}
DeltaRequestMessage struct {
FromID common.ChainID // source chain of requested delta
ToID common.ChainID // target chain of requested delta
Start common.Height // The starting height of the source chain where the requested delta is located
Length int // The number of delta requested, starting from start (including start)
}
ShardTransaction struct {
ToChainID common.ChainID
Tx *Transaction
}
)
func (m *ShardDeltaMessage) GetChainID() common.ChainID {
return m.ToChainID
}
func (m *ShardDeltaMessage) DestChainID() common.ChainID {
return m.ToChainID
}
func (m *ShardDeltaMessage) String() string {
return fmt.Sprintf("{To:%d, From:%s, len(Deltas):%d}",
m.ToChainID, m.FromBlockHeader.Summary(), len(m.Deltas))
}
func (m *DeltaRequestMessage) GetChainID() common.ChainID {
return m.FromID
}
func (m *DeltaRequestMessage) DestChainID() common.ChainID {
return m.FromID
}
func (m *DeltaRequestMessage) A() common.Height {
return m.Start
}
func (m *DeltaRequestMessage) B() common.Height {
return m.Start + common.Height(m.Length)
}
func (m *DeltaRequestMessage) String() string {
if m == nil {
return "DeltaReq<nil>"
}
return fmt.Sprintf("DeltaReq{From:%d To:%d Start:%d Length:%d}", m.FromID, m.ToID, m.Start, m.Length)
}
func (s *ShardTransaction) GetChainID() common.ChainID {
return s.ToChainID
}
type LastBlockMessage struct {
BlockHeight
}
func (m *LastBlockMessage) String() string {
if m.Height.IsNil() {
return fmt.Sprintf("LastBlock{ChainID:%d NONE}", m.ChainID)
} else {
return fmt.Sprintf("LastBlock{ChainID:%d Height:%d EpochNum:%d BlockNum:%d}",
m.ChainID, m.Height, m.GetEpochNum(), m.GetBlockNum())
}
}
type LastHeightMessage struct {
BlockHeight
BlockHash common.Hash
}
func NewLastHeightMessage(chainId common.ChainID, height common.Height, hash common.Hash) *LastHeightMessage {
return &LastHeightMessage{
BlockHeight: BlockHeight{ChainID: chainId, Height: height},
BlockHash: hash,
}
}
func (h *LastHeightMessage) String() string {
if h == nil {
return "LastHeight<nil>"
}
return fmt.Sprintf("LastHeigth{ChainID:%d Height:%s BlockHash:%x}", h.ChainID, &(h.Height), h.BlockHash[:5])
}
type SyncRequest struct {
ChainID common.ChainID
NodeID common.NodeID // Nodeid to request synchronization
ToNode common.NodeID
AllBlock bool // true: indicates synchronization from the first block, false: Indicates that synchronization starts from the current state
StartHeight common.Height
RpcAddr string
Timestamp int
}
func (s *SyncRequest) Source() common.NodeID {
return s.NodeID
}
func (s *SyncRequest) GetChainID() common.ChainID {
return s.ChainID
}
func (s *SyncRequest) String() string {
return fmt.Sprintf("SyncRequest{ChainID:%d NodeID:%s To:%s AllBlock:%t StartHeight:%d RpcAddr:%s Time:%d}",
s.ChainID, s.NodeID, s.ToNode, s.AllBlock, s.StartHeight, s.RpcAddr, s.Timestamp)
}
type SyncFinish struct {
ChainID common.ChainID
NodeID common.NodeID // Nodeid to request synchronization
EndHeight common.Height
Timestamp int
}
func (s *SyncFinish) Source() common.NodeID {
return s.NodeID
}
func (s *SyncFinish) GetChainID() common.ChainID {
return s.ChainID
}
func (s *SyncFinish) String() string {
return fmt.Sprintf("SyncFinish{ChainID:%d NodeID:%d EndHeight:%d}",
s.ChainID, s.NodeID, s.EndHeight)
}
// Pack deltas generated by multiple blocks together. It is sent to the target chain at one time.
// Proof chain:root of the trie generated with deltas in block A (1)-> A.BalanceDeltaRoot (2)-> A.BlockHeader.Hash
// (3)-> current block B.HashHistory (4)-> B.BlockHeader.Hash
// (5)-> (block C in main chain which confirmed block B).HdsRoot (6)-> C.BlockHeader.Hash
type (
// Proof.Proof(MerkleHash(Deltas)) == BlockHash of Height (1)(2)
// HistoryProof.Proof(BlockHash of Height) == BlockHash of DeltasPack.ProofedHeight (3)(4)
OneDeltas struct {
// the height of the block A where delta generated
Height common.Height
// All deltas in a block corresponding to a shard to another shard
Deltas []*AccountDelta
// The proof of this group of delta to the hash of block A at Height (1)(2)
Proof trie.ProofChain
// The proof to HashHistory of block B (specified by DeltasPack) used in this transmission (3).
// You can use this proof.Key() judge the authenticity of Height. When Height==DeltasPack.ProofedHeight,
// this proof is nil. At this time, verify with ProofedHeight in DeltasPack.
// 到本次传输统一使用的块B(由DeltasPack指定)的HashHistory的证明(3)。可以用此proof.Key()判
// 断Height的真实性。当Height==DeltasPack.ProofedHeight时,此证明为nil。此时与DeltasPack
// 中的ProofedHeight做验证。
HistoryProof trie.ProofChain
// Proof from the HashHistory of block B to the Hash of block B (4).
// When Height==DeltasPack.ProofedHeight, this proof is nil.
// At this time, verify with ProofedHeight in DeltasPack.
// 从块B的HashHistory到块B的Hash的证明(4)。当Height==DeltasPack.ProofedHeight时,此证明为nil。
// 此时与DeltasPack中的ProofedHeight做验证。
ProofToB trie.ProofChain
}
DeltasGroup []*OneDeltas
// ProofToMain.Proof(BlockHash of ProofedHeight) == BlockHash of MainHeight (5)(6)
DeltasPack struct {
FromID common.ChainID // source chain id
ToChainID common.ChainID // target shard id
ProofedHeight common.Height // block B of source shard was confirmed by the main chain
ProofToMain trie.ProofChain // proof from B.Hash to C.Hash
MainHeight common.Height // the height of main chain block C which packed and confirmed block B
Pack DeltasGroup // deltas of each block from source chain
}
)
func (o *OneDeltas) String() string {
if o == nil {
return "OD<nil>"
}
return fmt.Sprintf("OD{H:%d Dlts:%d}", o.Height, len(o.Deltas))
}
func (g DeltasGroup) Len() int {
return len(g)
}
func (g DeltasGroup) Swap(i, j int) {
g[i], g[j] = g[j], g[i]
}
func (g DeltasGroup) Less(i, j int) bool {
if less, needCompare := common.PointerSliceLess(g, i, j); needCompare {
return g[i].Height < g[j].Height
} else {
return less
}
}
func (g DeltasGroup) Summary() string {
le := len(g)
if le == 0 {
return "DG{}"
} else if le == 1 {
s := ""
if g[0] != nil {
s = g[0].Height.String()
}
return fmt.Sprintf("DG{%s}", s)
} else {
s, e := "", ""
if g[0] != nil {
s = g[0].Height.String()
}
if g[le-1] != nil {
e = g[le-1].Height.String()
}
return fmt.Sprintf("OD{%s-%s}", s, e)
}
}
func (d *DeltasPack) GetChainID() common.ChainID {
return d.ToChainID
}
func (d *DeltasPack) DestChainID() common.ChainID {
return d.ToChainID
}
func (d *DeltasPack) String() string {
if d == nil {
return "DeltasPack<nil>"
}
return fmt.Sprintf("DeltasPack{From:%d To:%d ProofHeight:%d MainHeight:%d Pack:%s}",
d.FromID, d.ToChainID, d.ProofedHeight, d.MainHeight, d.Pack)
} | models/dataevents.go | 0.578448 | 0.592313 | dataevents.go | starcoder |
package eql
import "fmt"
func mathAdd(left, right operand) (interface{}, error) {
switch v := left.(type) {
case int:
switch rv := right.(type) {
case int:
return v + rv, nil
case float64:
return float64(v) + rv, nil
default:
return 0, fmt.Errorf(
"math: +, incompatible type to add both operands must be numbers, left=%T, right=%T",
left,
right,
)
}
case float64:
switch rv := right.(type) {
case int:
return v + float64(rv), nil
case float64:
return v + rv, nil
default:
return 0, fmt.Errorf(
"math: +, incompatible type to add both operands must be numbers, left=%T, right=%T",
left,
right,
)
}
default:
return 0, fmt.Errorf(
"math: +, incompatible type to add both operands must be numbers, left=%T, right=%T",
left,
right,
)
}
}
func mathSub(left, right operand) (interface{}, error) {
switch v := left.(type) {
case int:
switch rv := right.(type) {
case int:
return v - rv, nil
case float64:
return float64(v) - rv, nil
default:
return 0, fmt.Errorf(
"math: -, incompatible type to subtract both operands must be numbers, left=%T, right=%T",
left,
right,
)
}
case float64:
switch rv := right.(type) {
case int:
return v - float64(rv), nil
case float64:
return v - rv, nil
default:
return 0, fmt.Errorf(
"math: -, incompatible type to subtract both operands must be numbers, left=%T, right=%T",
left,
right,
)
}
default:
return 0, fmt.Errorf(
"math: -, incompatible type to subtract both operands must be numbers, left=%T, right=%T",
left,
right,
)
}
}
func mathMul(left, right operand) (interface{}, error) {
switch v := left.(type) {
case int:
switch rv := right.(type) {
case int:
return v * rv, nil
case float64:
return float64(v) * rv, nil
default:
return 0, fmt.Errorf(
"math: *, incompatible type to multiply both operands must be numbers, left=%T, right=%T",
left,
right,
)
}
case float64:
switch rv := right.(type) {
case int:
return v * float64(rv), nil
case float64:
return v * rv, nil
default:
return 0, fmt.Errorf(
"math: *, incompatible type to multiply both operands must be numbers, left=%T, right=%T",
left,
right,
)
}
default:
return 0, fmt.Errorf(
"math: *, incompatible type to multiply both operands must be numbers, left=%T, right=%T",
left,
right,
)
}
}
func mathDiv(left, right operand) (interface{}, error) {
switch v := left.(type) {
case int:
switch rv := right.(type) {
case int:
if rv == 0 {
return 0, fmt.Errorf(
"math: /, division by zero, left=%T, right=%T",
left,
right,
)
}
return v / rv, nil
case float64:
if rv == 0 {
return 0, fmt.Errorf(
"math: /, division by zero, left=%T, right=%T",
left,
right,
)
}
return float64(v) / rv, nil
default:
return 0, fmt.Errorf(
"math: /, incompatible type to divide both operands must be numbers, left=%T, right=%T",
left,
right,
)
}
case float64:
switch rv := right.(type) {
case int:
if rv == 0 {
return 0, fmt.Errorf(
"math: /, division by zero, left=%T, right=%T",
left,
right,
)
}
return v / float64(rv), nil
case float64:
if rv == 0 {
return 0, fmt.Errorf(
"math: /, division by zero, left=%T, right=%T",
left,
right,
)
}
return v / rv, nil
default:
return 0, fmt.Errorf(
"math: /, incompatible type to divide both operands must be numbers, left=%T, right=%T",
left,
right,
)
}
default:
return 0, fmt.Errorf(
"math: /, incompatible type to divide both operands must be numbers, left=%T, right=%T",
left,
right,
)
}
}
func mathMod(left, right operand) (interface{}, error) {
switch v := left.(type) {
case int:
switch rv := right.(type) {
case int:
if rv == 0 {
return 0, fmt.Errorf(
"math: %%, division by zero, left=%T, right=%T",
left,
right,
)
}
return v % rv, nil
default:
return 0, fmt.Errorf(
"math: %%, incompatible type to modulus both operands must be integers, left=%T, right=%T",
left,
right,
)
}
default:
return 0, fmt.Errorf(
"math: %%, incompatible type to modulus both operands must be integers, left=%T, right=%T",
left,
right,
)
}
} | x-pack/elastic-agent/pkg/eql/math.go | 0.68215 | 0.54583 | math.go | starcoder |
package common
import (
"encoding/binary"
"errors"
)
var ErrIrregularData = errors.New("irregular data")
type ZeroCopySource struct {
s []byte
off uint64 // current reading index
}
// Len returns the number of bytes of the unread portion of the
// slice.
func (self *ZeroCopySource) Len() uint64 {
length := uint64(len(self.s))
if self.off >= length {
return 0
}
return length - self.off
}
func (self *ZeroCopySource) Pos() uint64 {
return self.off
}
// Size returns the original length of the underlying byte slice.
// Size is the number of bytes available for reading via ReadAt.
// The returned value is always the same and is not affected by calls
// to any other method.
func (self *ZeroCopySource) Size() uint64 { return uint64(len(self.s)) }
// Read implements the io.ZeroCopySource interface.
func (self *ZeroCopySource) NextBytes(n uint64) (data []byte, eof bool) {
m := uint64(len(self.s))
end, overflow := SafeAdd(self.off, n)
if overflow || end > m {
end = m
eof = true
}
data = self.s[self.off:end]
self.off = end
return
}
func (self *ZeroCopySource) Skip(n uint64) (eof bool) {
m := uint64(len(self.s))
end, overflow := SafeAdd(self.off, n)
if overflow || end > m {
end = m
eof = true
}
self.off = end
return
}
// ReadByte implements the io.ByteReader interface.
func (self *ZeroCopySource) NextByte() (data byte, eof bool) {
if self.off >= uint64(len(self.s)) {
return 0, true
}
b := self.s[self.off]
self.off++
return b, false
}
func (self *ZeroCopySource) NextUint8() (data uint8, eof bool) {
var val byte
val, eof = self.NextByte()
return uint8(val), eof
}
func (self *ZeroCopySource) NextBool() (data bool, irregular bool, eof bool) {
val, eof := self.NextByte()
if val == 0 {
data = false
} else if val == 1 {
data = true
} else {
data = true
irregular = true
}
return
}
// Backs up a number of bytes, so that the next call to NextXXX() returns data again
// that was already returned by the last call to NextXXX().
func (self *ZeroCopySource) BackUp(n uint64) {
self.off -= n
}
func (self *ZeroCopySource) NextUint16() (data uint16, eof bool) {
var buf []byte
buf, eof = self.NextBytes(UINT16_SIZE)
if eof {
return
}
return binary.LittleEndian.Uint16(buf), eof
}
func (self *ZeroCopySource) NextUint32() (data uint32, eof bool) {
var buf []byte
buf, eof = self.NextBytes(UINT32_SIZE)
if eof {
return
}
return binary.LittleEndian.Uint32(buf), eof
}
func (self *ZeroCopySource) NextUint64() (data uint64, eof bool) {
var buf []byte
buf, eof = self.NextBytes(UINT64_SIZE)
if eof {
return
}
return binary.LittleEndian.Uint64(buf), eof
}
func (self *ZeroCopySource) NextInt32() (data int32, eof bool) {
var val uint32
val, eof = self.NextUint32()
return int32(val), eof
}
func (self *ZeroCopySource) NextInt64() (data int64, eof bool) {
var val uint64
val, eof = self.NextUint64()
return int64(val), eof
}
func (self *ZeroCopySource) NextInt16() (data int16, eof bool) {
var val uint16
val, eof = self.NextUint16()
return int16(val), eof
}
func (self *ZeroCopySource) NextVarBytes() (data []byte, size uint64, irregular bool, eof bool) {
var count uint64
count, size, irregular, eof = self.NextVarUint()
size += count
if count > 0 {
data, eof = self.NextBytes(count)
}
return
}
func (self *ZeroCopySource) NextAddress() (data Address, eof bool) {
var buf []byte
buf, eof = self.NextBytes(ADDR_LEN)
if eof {
return
}
copy(data[:], buf)
return
}
func (self *ZeroCopySource) NextHash() (data Uint256, eof bool) {
var buf []byte
buf, eof = self.NextBytes(UINT256_SIZE)
if eof {
return
}
copy(data[:], buf)
return
}
func (self *ZeroCopySource) NextString() (data string, size uint64, irregular bool, eof bool) {
var val []byte
val, size, irregular, eof = self.NextVarBytes()
data = string(val)
return
}
func (self *ZeroCopySource) NextVarUint() (data uint64, size uint64, irregular bool, eof bool) {
var fb byte
fb, eof = self.NextByte()
if eof {
return
}
switch fb {
case 0xFD:
val, e := self.NextUint16()
if e {
return
}
data = uint64(val)
size = 3
case 0xFE:
val, e := self.NextUint32()
if e {
return
}
data = uint64(val)
size = 5
case 0xFF:
val, e := self.NextUint64()
if e {
return
}
data = uint64(val)
size = 9
default:
data = uint64(fb)
size = 1
}
irregular = size != getVarUintSize(data)
return
}
func getVarUintSize(value uint64) uint64 {
if value < 0xfd {
return 1
} else if value <= 0xffff {
return 3
} else if value <= 0xFFFFFFFF {
return 5
} else {
return 9
}
}
// NewReader returns a new ZeroCopySource reading from b.
func NewZeroCopySource(b []byte) *ZeroCopySource { return &ZeroCopySource{b, 0} } | common/zero_copy_source.go | 0.741019 | 0.405625 | zero_copy_source.go | starcoder |
package pg
import (
"bytes"
"database/sql/driver"
"encoding/hex"
"fmt"
"strconv"
"strings"
)
type StringArray struct {
Strings []string
}
// Scan implements the sql.Scanner interface.
func (a *StringArray) Scan(src interface{}) error {
switch src := src.(type) {
case []byte:
return a.scanBytes(src)
case string:
return a.scanBytes([]byte(src))
case nil:
a = nil
return nil
}
return fmt.Errorf("pq: cannot convert %T to StringArray", src)
}
func (a *StringArray) scanBytes(src []byte) error {
elems, err := scanLinearArray(src, []byte{','}, "StringArray")
if err != nil {
return err
}
if a != nil && a.Strings != nil && len(elems) == 0 {
a.Strings = a.Strings[:0]
} else {
ss := make([]string, len(elems))
for i, v := range elems {
if ss[i] = string(v); v == nil {
return fmt.Errorf("pq: parsing array element index %d: cannot convert nil to string", i)
}
}
*a = StringArray{
Strings: ss,
}
}
return nil
}
// Value implements the driver.Valuer interface.
func (a StringArray) Value() (driver.Value, error) {
if n := len(a.Strings); n > 0 {
// There will be at least two curly brackets, 2*N bytes of quotes,
// and N-1 bytes of delimiters.
b := make([]byte, 1, 1+3*n)
b[0] = '{'
b = appendArrayQuotedBytes(b, []byte(a.Strings[0]))
for i := 1; i < n; i++ {
b = append(b, ',')
b = appendArrayQuotedBytes(b, []byte(a.Strings[i]))
}
return string(append(b, '}')), nil
}
return "{}", nil
}
func parseArray(src, del []byte) (dims []int, elems [][]byte, err error) {
var depth, i int
if len(src) < 1 || src[0] != '{' {
return nil, nil, fmt.Errorf("pq: unable to parse array; expected %q at offset %d", '{', 0)
}
Open:
for i < len(src) {
switch src[i] {
case '{':
depth++
i++
case '}':
elems = make([][]byte, 0)
goto Close
default:
break Open
}
}
dims = make([]int, i)
Element:
for i < len(src) {
switch src[i] {
case '{':
if depth == len(dims) {
break Element
}
depth++
dims[depth-1] = 0
i++
case '"':
var elem = []byte{}
var escape bool
for i++; i < len(src); i++ {
if escape {
elem = append(elem, src[i])
escape = false
} else {
switch src[i] {
default:
elem = append(elem, src[i])
case '\\':
escape = true
case '"':
elems = append(elems, elem)
i++
break Element
}
}
}
default:
for start := i; i < len(src); i++ {
if bytes.HasPrefix(src[i:], del) || src[i] == '}' {
elem := src[start:i]
if len(elem) == 0 {
return nil, nil, fmt.Errorf("pq: unable to parse array; unexpected %q at offset %d", src[i], i)
}
if bytes.Equal(elem, []byte("NULL")) {
elem = nil
}
elems = append(elems, elem)
break Element
}
}
}
}
for i < len(src) {
if bytes.HasPrefix(src[i:], del) && depth > 0 {
dims[depth-1]++
i += len(del)
goto Element
} else if src[i] == '}' && depth > 0 {
dims[depth-1]++
depth--
i++
} else {
return nil, nil, fmt.Errorf("pq: unable to parse array; unexpected %q at offset %d", src[i], i)
}
}
Close:
for i < len(src) {
if src[i] == '}' && depth > 0 {
depth--
i++
} else {
return nil, nil, fmt.Errorf("pq: unable to parse array; unexpected %q at offset %d", src[i], i)
}
}
if depth > 0 {
err = fmt.Errorf("pq: unable to parse array; expected %q at offset %d", '}', i)
}
if err == nil {
for _, d := range dims {
if (len(elems) % d) != 0 {
err = fmt.Errorf("pq: multidimensional arrays must have elements with matching dimensions")
}
}
}
return
}
func scanLinearArray(src, del []byte, typ string) (elems [][]byte, err error) {
dims, elems, err := parseArray(src, del)
if err != nil {
return nil, err
}
if len(dims) > 1 {
return nil, fmt.Errorf("pq: cannot convert ARRAY%s to %s", strings.Replace(fmt.Sprint(dims), " ", "][", -1), typ)
}
return elems, err
}
func parseBytea(s []byte) (result []byte, err error) {
if len(s) >= 2 && bytes.Equal(s[:2], []byte("\\x")) {
// bytea_output = hex
s = s[2:] // trim off leading "\\x"
result = make([]byte, hex.DecodedLen(len(s)))
_, err := hex.Decode(result, s)
if err != nil {
return nil, err
}
} else {
// bytea_output = escape
for len(s) > 0 {
if s[0] == '\\' {
// escaped '\\'
if len(s) >= 2 && s[1] == '\\' {
result = append(result, '\\')
s = s[2:]
continue
}
// '\\' followed by an octal number
if len(s) < 4 {
return nil, fmt.Errorf("invalid bytea sequence %v", s)
}
r, err := strconv.ParseInt(string(s[1:4]), 8, 9)
if err != nil {
return nil, fmt.Errorf("could not parse bytea value: %s", err.Error())
}
result = append(result, byte(r))
s = s[4:]
} else {
// We hit an unescaped, raw byte. Try to read in as many as
// possible in one go.
i := bytes.IndexByte(s, '\\')
if i == -1 {
result = append(result, s...)
break
}
result = append(result, s[:i]...)
s = s[i:]
}
}
}
return result, nil
}
func appendArrayQuotedBytes(b, v []byte) []byte {
b = append(b, '"')
for {
i := bytes.IndexAny(v, `"\`)
if i < 0 {
b = append(b, v...)
break
}
if i > 0 {
b = append(b, v[:i]...)
}
b = append(b, '\\', v[i])
v = v[i+1:]
}
return append(b, '"')
} | string_array.go | 0.544559 | 0.415907 | string_array.go | starcoder |
package network
import (
"encoding/json"
"fmt"
"math"
"os"
"time"
"github.com/gookit/color"
"gopkg.in/cheggaaa/pb.v1"
)
// Network contains the Layers, Weights, Biases of a neural network then the actual output values
// and the learning rate.
type Network struct {
Layers []Matrix
Weights []Matrix
Biases []Matrix
Output Matrix
Rate float64
Errors []float64
Time float64
}
// LoadNetwork returns a Network from a specified file
func LoadNetwork(fileName string) *Network {
inF, err := os.Open(fileName)
if err != nil {
panic("Failed to load " + fileName + ".")
}
defer inF.Close()
decoder := json.NewDecoder(inF)
neuralNetwork := &Network{}
err = decoder.Decode(neuralNetwork)
if err != nil {
panic(err)
}
return neuralNetwork
}
// CreateNetwork creates the network by generating the layers, weights and biases
func CreateNetwork(rate float64, input, output Matrix, hiddensNodes ...int) Network {
input = append([][]float64{
make([]float64, len(input[0])),
}, input...)
output = append([][]float64{
make([]float64, len(output[0])),
}, output...)
// Create the layers arrays and add the input values
inputMatrix := input
layers := []Matrix{inputMatrix}
// Generate the hidden layer
for _, hiddenNodes := range hiddensNodes {
layers = append(layers, CreateMatrix(len(input), hiddenNodes))
}
// Add the output values to the layers arrays
layers = append(layers, output)
// Generate the weights and biases
weightsNumber := len(layers) - 1
var weights []Matrix
var biases []Matrix
for i := 0; i < weightsNumber; i++ {
rows, columns := Columns(layers[i]), Columns(layers[i+1])
weights = append(weights, RandomMatrix(rows, columns))
biases = append(biases, RandomMatrix(Rows(layers[i]), columns))
}
return Network{
Layers: layers,
Weights: weights,
Biases: biases,
Output: output,
Rate: rate,
}
}
// Save saves the neural network in a specified file which can be retrieved with LoadNetwork
func (network Network) Save(fileName string) {
outF, err := os.OpenFile(fileName, os.O_CREATE|os.O_RDWR, 0777)
if err != nil {
panic("Failed to save the network to " + fileName + ".")
}
defer outF.Close()
encoder := json.NewEncoder(outF)
err = encoder.Encode(network)
if err != nil {
panic(err)
}
}
// FeedForward executes forward propagation for the given inputs in the network
func (network *Network) FeedForward() {
for i := 0; i < len(network.Layers)-1; i++ {
layer, weights, biases := network.Layers[i], network.Weights[i], network.Biases[i]
productMatrix := DotProduct(layer, weights)
Sum(productMatrix, biases)
ApplyFunction(productMatrix, Sigmoid)
// Replace the output values
network.Layers[i+1] = productMatrix
}
}
// Predict returns the predicted value for a training example
func (network *Network) Predict(input []float64) []float64 {
network.Layers[0] = Matrix{input}
network.FeedForward()
return network.Layers[len(network.Layers)-1][0]
}
// FeedBackward executes back propagation to adjust the weights for all the layers
func (network *Network) FeedBackward() {
var derivatives []Derivative
derivatives = append(derivatives, network.ComputeLastLayerDerivatives())
// Compute the derivatives of the hidden layers
for i := 0; i < len(network.Layers)-2; i++ {
derivatives = append(derivatives, network.ComputeDerivatives(i, derivatives))
}
// Then adjust the weights and biases
network.Adjust(derivatives)
}
// ComputeError returns the average of all the errors after the training
func (network *Network) ComputeError() float64 {
// Feed forward to compute the last layer's values
network.FeedForward()
lastLayer := network.Layers[len(network.Layers)-1]
errors := Difference(network.Output, lastLayer)
// Make the sum of all the errors
var i int
var sum float64
for _, a := range errors {
for _, e := range a {
sum += e
i++
}
}
// Compute the average
return sum / float64(i)
}
// Train trains the neural network with a given number of iterations by executing
// forward and back propagation
func (network *Network) Train(iterations int) {
// Initialize the start date
start := time.Now()
// Create the progress bar
bar := pb.New(iterations).Postfix(fmt.Sprintf(
" - %s",
color.FgBlue.Render("Training the neural network"),
))
bar.Format("(██░)")
bar.SetMaxWidth(60)
bar.ShowCounters = false
bar.Start()
// Train the network
for i := 0; i < iterations; i++ {
network.FeedForward()
network.FeedBackward()
// Append errors for dashboard data
if i%(iterations/20) == 0 {
network.Errors = append(
network.Errors,
// Round the error to two decimals
network.ComputeError(),
)
}
// Increment the progress bar
bar.Increment()
}
bar.Finish()
// Print the error
arrangedError := fmt.Sprintf("%.5f", network.ComputeError())
// Calculate elapsed date
elapsed := time.Since(start)
// Round the elapsed date at two decimals
network.Time = math.Floor(elapsed.Seconds()*100) / 100
fmt.Printf("The error rate is %s.\n", color.FgGreen.Render(arrangedError))
} | network/network.go | 0.768993 | 0.526586 | network.go | starcoder |
package distance
/*
This module provides common distance functions for measuring distance
between observations.
Minkowski Distance is one the most inclusive among one as other distances are only
a specific case of Minkowski Distance(Chebyshev Distance is not straightforward, though).
when p=1 in MinkowskiDistance, it becomes ManhattanDistance,
when p=2 in MinkowskiDistance, it becomes EuclideanDIstance,
when p goes infinity, it becomes ChebyshevDistance.
Since the ManhattanDistance and EuclideanDistance are very frequently used, they are
implemented separately.
*/
import (
"errors"
"math"
)
// LPNorm of an array, given p >= 1
func LPNorm(vector []float64, p float64) (float64, error) {
distance := 0.
for _, jj := range vector {
distance += math.Pow(math.Abs(jj), p)
}
return math.Pow(distance, 1/p), nil
}
// Manhattan 1-norm distance (l_1 distance)
func Manhattan(firstVector, secondVector []float64) (float64, error) {
distance := 0.
for ii := range firstVector {
distance += math.Abs(firstVector[ii] - secondVector[ii])
}
return distance, nil
}
// Euclidean 2-norm distance (l_2 distance)
func Euclidean(firstVector, secondVector []float64) (float64, error) {
distance := 0.
for ii := range firstVector {
distance += (firstVector[ii] - secondVector[ii]) * (firstVector[ii] - secondVector[ii])
}
return math.Sqrt(distance), nil
}
// SquaredEuclidean Higher weight for the points that are far apart
// Not a real metric as it does not obey triangle inequality
func SquaredEuclidean(firstVector, secondVector []float64) (float64, error) {
distance, err := Euclidean(firstVector, secondVector)
return distance * distance, err
}
// Minkowski p-norm distance (l_p distance)
func Minkowski(firstVector, secondVector []float64, p float64) (float64, error) {
distance := 0.
for ii := range firstVector {
distance += math.Pow(math.Abs(firstVector[ii]-secondVector[ii]), p)
}
return math.Pow(distance, 1/p), nil
}
// WeightedMinkowski p-norm distance with weights (weighted l_p distance)
func WeightedMinkowski(firstVector, secondVector, weightVector []float64, p float64) (float64, error) {
distance := 0.
for ii := range firstVector {
distance += weightVector[ii] * math.Pow(math.Abs(firstVector[ii]-secondVector[ii]), p)
}
return math.Pow(distance, 1/p), nil
}
// Chebyshev computes the Chebyshev distance between two points
func Chebyshev(firstVector, secondVector []float64) (float64, error) {
distance := 0.
for ii := range firstVector {
if math.Abs(firstVector[ii]-secondVector[ii]) >= distance {
distance = math.Abs(firstVector[ii] - secondVector[ii])
}
}
return distance, nil
}
// Hamming computes the Hamming distance between two points
func Hamming(firstVector, secondVector []float64) (float64, error) {
distance := 0.
for ii := range firstVector {
if firstVector[ii] != secondVector[ii] {
distance++
}
}
return distance, nil
}
// BrayCurtis computes the BrayCurtis distance between two points
func BrayCurtis(firstVector, secondVector []float64) (float64, error) {
numerator, denominator := 0., 0.
for ii := range firstVector {
numerator += math.Abs(firstVector[ii] - secondVector[ii])
denominator += math.Abs(firstVector[ii] + secondVector[ii])
}
return numerator / denominator, nil
}
// Canberra computes the Canberra distance between two points
func Canberra(firstVector, secondVector []float64) (float64, error) {
distance := 0.
for ii := range firstVector {
distance += (math.Abs(firstVector[ii]-secondVector[ii]) / (math.Abs(firstVector[ii]) + math.Abs(secondVector[ii])))
}
return distance, nil
}
// NormalizedIntersection computes the intersection between two histograms
func NormalizedIntersection(a, b []float64) (float64, error) {
if len(a) != len(b) {
return 0, errors.New("distance: histograms must be of equal bin size")
}
var sum float64
for i := 0; i < len(a); i++ {
sum += math.Min(a[i], b[i])
}
sum = sum / math.Max(sumOf(a), sumOf(b))
return sum, nil
}
func sumOf(value []float64) (sum float64) {
for _, v := range value {
sum += v
}
return
} | distance/distance.go | 0.87079 | 0.874507 | distance.go | starcoder |
package linear
import (
"time"
)
// Units for Acceleration values. Always multiply with a unit when setting the initial value like you would for
// time.Time. This prevents you from having to worry about the internal storage format.
const (
NanometerPerSecondSquared Acceleration = Acceleration(NanometerPerSecond) / Acceleration(time.Second)
MicrometerPerSecondSquared Acceleration = Acceleration(MicrometerPerSecond) / Acceleration(time.Second)
MillimeterPerSecondSquared Acceleration = Acceleration(MillimeterPerSecond) / Acceleration(time.Second)
CentimeterPerSecondSquared Acceleration = Acceleration(CentimeterPerSecond) / Acceleration(time.Second)
DecimeterPerSecondSquared Acceleration = Acceleration(DecimeterPerSecond) / Acceleration(time.Second)
MeterPerSecondSquared Acceleration = Acceleration(MeterPerSecond) / Acceleration(time.Second)
KilometerPerSecondSquared Acceleration = Acceleration(KilometerPerSecond) / Acceleration(time.Second)
FootPerSecondSquared Acceleration = Acceleration(FootPerSecond) / Acceleration(time.Second)
)
// NanometersPerSecondSquared returns a as a floating point number of nanometerspersecondsquared.
func (a Acceleration) NanometersPerSecondSquared() float64 {
return float64(a / NanometerPerSecondSquared)
}
// MicrometersPerSecondSquared returns a as a floating point number of micrometerspersecondsquared.
func (a Acceleration) MicrometersPerSecondSquared() float64 {
return float64(a / MicrometerPerSecondSquared)
}
// MillimetersPerSecondSquared returns a as a floating point number of millimeterspersecondsquared.
func (a Acceleration) MillimetersPerSecondSquared() float64 {
return float64(a / MillimeterPerSecondSquared)
}
// CentimetersPerSecondSquared returns a as a floating point number of centimeterspersecondsquared.
func (a Acceleration) CentimetersPerSecondSquared() float64 {
return float64(a / CentimeterPerSecondSquared)
}
// DecimetersPerSecondSquared returns a as a floating point number of decimeterspersecondsquared.
func (a Acceleration) DecimetersPerSecondSquared() float64 {
return float64(a / DecimeterPerSecondSquared)
}
// MetersPerSecondSquared returns a as a floating point number of meterspersecondsquared.
func (a Acceleration) MetersPerSecondSquared() float64 {
return float64(a / MeterPerSecondSquared)
}
// KilometersPerSecondSquared returns a as a floating point number of kilometerspersecondsquared.
func (a Acceleration) KilometersPerSecondSquared() float64 {
return float64(a / KilometerPerSecondSquared)
}
// FeetPerSecondSquared returns a as a floating point number of feetpersecondsquared.
func (a Acceleration) FeetPerSecondSquared() float64 {
return float64(a / FootPerSecondSquared)
}
// Abs returns the absolute value of a as a copy.
func (a Acceleration) Abs() Acceleration {
if a < 0 {
return -a
}
return a
}
// Mul returns the product of a * x as a new Acceleration.
func (a Acceleration) Mul(x float64) Acceleration {
return a * Acceleration(x)
}
// Div returns the quotient of a / x as a new Acceleration.
func (a Acceleration) Div(x float64) Acceleration {
return a / Acceleration(x)
}
// DivAcceleration returns the quotient of a / x as a floating point number.
func (a Acceleration) DivAcceleration(x Acceleration) float64 {
return float64(a / x)
}
// MulDuration returns the product of a * t as a Velocity.
func (a Acceleration) MulDuration(t time.Duration) Velocity {
return Velocity(float64(a) * float64(t))
} | linear/acceleration_generated.go | 0.947039 | 0.698895 | acceleration_generated.go | starcoder |
package tree
import (
"github.com/lasthyphen/dijetsnetgo1.2/ids"
"github.com/lasthyphen/dijetsnetgo1.2/snow/consensus/snowman"
)
type Tree interface {
// Add places the block in the tree
Add(snowman.Block)
// Get returns the block that was added to this tree whose parent and ID
// match the provided block. If non-exists, then false will be returned.
Get(snowman.Block) (snowman.Block, bool)
// Accept marks the provided block as accepted and rejects every conflicting
// block.
Accept(snowman.Block) error
}
type tree struct {
// parentID -> childID -> childBlock
nodes map[ids.ID]map[ids.ID]snowman.Block
}
func New() Tree {
return &tree{
nodes: make(map[ids.ID]map[ids.ID]snowman.Block),
}
}
func (t *tree) Add(blk snowman.Block) {
parentID := blk.Parent()
children, exists := t.nodes[parentID]
if !exists {
children = make(map[ids.ID]snowman.Block)
t.nodes[parentID] = children
}
blkID := blk.ID()
children[blkID] = blk
}
func (t *tree) Get(blk snowman.Block) (snowman.Block, bool) {
parentID := blk.Parent()
children := t.nodes[parentID]
blkID := blk.ID()
originalBlk, exists := children[blkID]
return originalBlk, exists
}
func (t *tree) Accept(blk snowman.Block) error {
// accept the provided block
if err := blk.Accept(); err != nil {
return err
}
// get the siblings of the block
parentID := blk.Parent()
children := t.nodes[parentID]
blkID := blk.ID()
delete(children, blkID)
delete(t.nodes, parentID)
// mark the siblings of the accepted block as rejectable
childrenToReject := make([]snowman.Block, 0, len(children))
for _, child := range children {
childrenToReject = append(childrenToReject, child)
}
// reject all the rejectable blocks
for len(childrenToReject) > 0 {
i := len(childrenToReject) - 1
child := childrenToReject[i]
childrenToReject = childrenToReject[:i]
// reject the block
if err := child.Reject(); err != nil {
return err
}
// mark the progeny of this block as being rejectable
blkID := child.ID()
children := t.nodes[blkID]
for _, child := range children {
childrenToReject = append(childrenToReject, child)
}
delete(t.nodes, blkID)
}
return nil
} | vms/proposervm/tree/tree.go | 0.610337 | 0.426441 | tree.go | starcoder |
package raycaster
import (
"github.com/mattkimber/gorender/internal/geometry"
"github.com/mattkimber/gorender/internal/manifest"
"github.com/mattkimber/gorender/internal/sampler"
"github.com/mattkimber/gorender/internal/voxelobject"
"sync"
)
type RenderInfo []RenderSample
type RenderSample struct {
Collision bool
Index byte
Normal, AveragedNormal geometry.Vector3
Depth, Occlusion int
LightAmount float64
Shadowing float64
}
type RayResult struct {
X, Y, Z byte
HasGeometry bool
Depth int
}
type RenderOutput [][]RenderInfo
func GetRaycastOutput(object voxelobject.ProcessedVoxelObject, m manifest.Manifest, spr manifest.Sprite, sampler sampler.Samples) RenderOutput {
size := object.Size
// Handle slicing functionality
minX, maxX := 0, object.Size.X
if m.SliceLength > 0 && m.SliceThreshold > 0 && m.SliceThreshold < object.Size.X {
midpoint := (object.Size.X / 2) - (m.SliceLength / 2)
minX = midpoint - (m.SliceLength * spr.Slice)
maxX = minX + m.SliceLength
// Allow sprites to overlap to avoid edge transparency effects
minX -= m.SliceOverlap
maxX += m.SliceOverlap
if minX < 0 {
minX = 0
}
if maxX > 255 {
maxX = 255
}
}
bminX, bmaxX := byte(minX), byte(maxX)
limits := geometry.Vector3{X: float64(size.X), Y: float64(size.Y), Z: float64(size.Z)}
viewport := getViewportPlane(spr.Angle, m, spr.ZError, size, float64(spr.RenderElevationAngle))
ray := geometry.Zero().Subtract(getRenderDirection(spr.Angle, float64(spr.RenderElevationAngle)))
lighting := getLightingDirection(spr.Angle+float64(m.LightingAngle), float64(m.LightingElevation), spr.Flip)
result := make(RenderOutput, len(sampler))
wg := sync.WaitGroup{}
wg.Add(sampler.Width())
w, h := sampler.Width(), sampler.Height()
for x := 0; x < w; x++ {
thisX := x
go func() {
result[thisX] = make([]RenderInfo, h)
for y := 0; y < h; y++ {
samples := sampler[thisX][y]
result[thisX][y] = make(RenderInfo, len(samples))
for i, s := range samples {
raycastSample(viewport, s, ray, limits, object, spr, lighting, result, thisX, y, i, bminX, bmaxX)
}
}
wg.Done()
}()
}
wg.Wait()
return result
}
func raycastSample(viewport geometry.Plane, s geometry.Vector2, ray geometry.Vector3, limits geometry.Vector3, object voxelobject.ProcessedVoxelObject, spr manifest.Sprite, lighting geometry.Vector3, result RenderOutput, thisX int, y int, i int, minX byte, maxX byte) {
loc0 := viewport.BiLerpWithinPlane(s.X, s.Y)
loc := getIntersectionWithBounds(loc0, ray, limits)
rayResult := castFpRay(object, loc0, loc, ray, limits, spr.Flip)
if rayResult.HasGeometry && rayResult.X >= minX && rayResult.X <= maxX {
resultVec := geometry.Vector3{X: float64(rayResult.X), Y: float64(rayResult.Y), Z: float64(rayResult.Z)}
shadowLoc := resultVec
shadowVec := geometry.Zero().Subtract(lighting).Normalise()
for {
sx, sy, sz := byte(shadowLoc.X), byte(shadowLoc.Y), byte(shadowLoc.Z)
if sx != rayResult.X || sy != rayResult.Y || sz != rayResult.Z {
break
}
shadowLoc = shadowLoc.Add(shadowVec)
}
// Don't flip Y when calculating shadows, as it has been pre-flipped on input.
shadowResult := castFpRay(object, shadowLoc, shadowLoc, shadowVec, limits, false).Depth
setResult(&result[thisX][y][i], object.Elements[rayResult.X][rayResult.Y][rayResult.Z], lighting, rayResult.Depth, shadowResult)
}
}
func setResult(result *RenderSample, element voxelobject.ProcessedElement, lighting geometry.Vector3, depth int, shadowLength int) {
if shadowLength > 0 && shadowLength < 10 {
result.Shadowing = 1.0
} else if shadowLength > 0 && shadowLength < 80 {
result.Shadowing = float64(70-(shadowLength-10)) / 80.0
}
result.Collision = true
result.Index = element.Index
result.Depth = depth
result.LightAmount = getLightingValue(element.AveragedNormal, lighting)
result.Normal = element.Normal
result.Occlusion = element.Occlusion
result.AveragedNormal = element.AveragedNormal
}
func getLightingValue(normal, lighting geometry.Vector3) float64 {
return normal.Dot(lighting)
} | internal/raycaster/raycaster.go | 0.738575 | 0.425247 | raycaster.go | starcoder |
package iso20022
// Specifies prices related to a corporate action option.
type CorporateActionPrice19 struct {
// Indicates whether the price is an indicative price or a market price.
IndicativeOrMarketPrice *IndicativeOrMarketPrice2Choice `xml:"IndctvOrMktPric,omitempty"`
// 1. Price at which security will be purchased/sold if warrant is exercised, either as an actual amount or a percentage.
// 2. Price at which a bond is converted to underlying security either as an actual amount or a percentage.
// 3. Strike price of an option, represented either as an actual amount, a percentage or a number of points above an index.
ExercisePrice *PriceFormat5Choice `xml:"ExrcPric,omitempty"`
// Initial issue price of a financial instrument.
IssuePrice *PriceFormat5Choice `xml:"IssePric,omitempty"`
// Generic cash price received per product by the underlying security holder either as a percentage or an amount, for example, redemption price.
GenericCashPriceReceivedPerProduct *PriceFormat21Choice `xml:"GncCshPricRcvdPerPdct,omitempty"`
// Generic cash price paid per product by the underlying security holder either as a percentage or an amount, for example, reinvestment price.
GenericCashPricePaidPerProduct *PriceFormat5Choice `xml:"GncCshPricPdPerPdct,omitempty"`
}
func (c *CorporateActionPrice19) AddIndicativeOrMarketPrice() *IndicativeOrMarketPrice2Choice {
c.IndicativeOrMarketPrice = new(IndicativeOrMarketPrice2Choice)
return c.IndicativeOrMarketPrice
}
func (c *CorporateActionPrice19) AddExercisePrice() *PriceFormat5Choice {
c.ExercisePrice = new(PriceFormat5Choice)
return c.ExercisePrice
}
func (c *CorporateActionPrice19) AddIssuePrice() *PriceFormat5Choice {
c.IssuePrice = new(PriceFormat5Choice)
return c.IssuePrice
}
func (c *CorporateActionPrice19) AddGenericCashPriceReceivedPerProduct() *PriceFormat21Choice {
c.GenericCashPriceReceivedPerProduct = new(PriceFormat21Choice)
return c.GenericCashPriceReceivedPerProduct
}
func (c *CorporateActionPrice19) AddGenericCashPricePaidPerProduct() *PriceFormat5Choice {
c.GenericCashPricePaidPerProduct = new(PriceFormat5Choice)
return c.GenericCashPricePaidPerProduct
} | CorporateActionPrice19.go | 0.838283 | 0.499878 | CorporateActionPrice19.go | starcoder |
package isometric
import (
"image"
"image/color"
"math"
"github.com/weqqr/panorama/game"
"github.com/weqqr/panorama/lm"
"github.com/weqqr/panorama/mesh"
"github.com/weqqr/panorama/raster"
"github.com/weqqr/panorama/world"
)
const Gamma = 2.2
const BaseResolution = 16
var (
YOffsetCoef = int(math.Round(BaseResolution * (1 + math.Sqrt2) / 4))
TileBlockWidth = world.MapBlockSize * BaseResolution
TileBlockHeight = BaseResolution/2*world.MapBlockSize - 1 + YOffsetCoef*world.MapBlockSize
)
type RenderableNode struct {
Name string
Light float32
Param2 uint8
}
type NodeRasterizer struct {
cache map[RenderableNode]*raster.RenderBuffer
}
func NewNodeRasterizer() NodeRasterizer {
return NodeRasterizer{
cache: make(map[RenderableNode]*raster.RenderBuffer),
}
}
func cartesianToBarycentric(p lm.Vector2, a, b, c lm.Vector2) lm.Vector3 {
u := lm.Vec3(c.X-a.X, b.X-a.X, a.X-p.X)
v := lm.Vec3(c.Y-a.Y, b.Y-a.Y, a.Y-p.Y)
w := u.Cross(v)
return lm.Vec3(1-(w.X+w.Y)/w.Z, w.Y/w.Z, w.X/w.Z)
}
func sampleTriangle(x, y int, a, b, c lm.Vector2) (bool, lm.Vector3) {
p := lm.Vec2(float32(x), float32(y))
samplePointOffset := lm.Vec2(0.5, 0.5)
barycentric := cartesianToBarycentric(p.Add(samplePointOffset), a, b, c)
if barycentric.X > 0 && barycentric.Y > 0 && barycentric.Z > 0 {
return true, barycentric
}
return false, lm.Vector3{}
}
func sampleTexture(tex *image.NRGBA, texcoord lm.Vector2) lm.Vector4 {
x := int(texcoord.X * float32(tex.Rect.Dx()))
y := int(texcoord.Y * float32(tex.Rect.Dy()))
c := tex.NRGBAAt(x, y)
return lm.Vec4(float32(c.R)/255, float32(c.G)/255, float32(c.B)/255, float32(c.A)/255)
}
var SunLightDir = lm.Vec3(-0.5, 1, -0.8).Normalize()
var SunLightIntensity = 0.95 / SunLightDir.MaxComponent()
var Projection = lm.DimetricProjection()
func drawTriangle(target *raster.RenderBuffer, tex *image.NRGBA, lighting float32, a, b, c mesh.Vertex) {
originX := float32(target.Color.Bounds().Dx() / 2)
originY := float32(target.Color.Bounds().Dy() / 2)
origin := lm.Vec2(originX, originY)
a.Position = Projection.MulVec(a.Position)
b.Position = Projection.MulVec(b.Position)
c.Position = Projection.MulVec(c.Position)
pa := a.Position.XY().Mul(lm.Vec2(1, -1)).MulScalar(BaseResolution * math.Sqrt2 / 2).Add(origin)
pb := b.Position.XY().Mul(lm.Vec2(1, -1)).MulScalar(BaseResolution * math.Sqrt2 / 2).Add(origin)
pc := c.Position.XY().Mul(lm.Vec2(1, -1)).MulScalar(BaseResolution * math.Sqrt2 / 2).Add(origin)
bboxMin := pa.Min(pb).Min(pc)
bboxMax := pa.Max(pb).Max(pc)
for y := int(bboxMin.Y); y < int(bboxMax.Y)+1; y++ {
for x := int(bboxMin.X); x < int(bboxMax.X)+1; x++ {
pointIsInsideTriangle, barycentric := sampleTriangle(x, y, pa, pb, pc)
if !pointIsInsideTriangle {
continue
}
pixelDepth := lm.Vec3(a.Position.Z, b.Position.Z, c.Position.Z).Dot(barycentric)
normal := a.Normal.MulScalar(barycentric.X).
Add(b.Normal.MulScalar(barycentric.Y)).
Add(c.Normal.MulScalar(barycentric.Z))
lighting := SunLightIntensity * lighting * lm.Clamp(lm.Abs(normal.Dot(SunLightDir))*0.8+0.2, 0.0, 1.0)
var finalColor color.NRGBA
if tex != nil {
texcoord := a.Texcoord.MulScalar(barycentric.X).
Add(b.Texcoord.MulScalar(barycentric.Y)).
Add(c.Texcoord.MulScalar(barycentric.Z))
rgba := sampleTexture(tex, texcoord)
col := rgba.XYZ().PowScalar(Gamma).MulScalar(lighting).PowScalar(1.0/Gamma).ClampScalar(0.0, 1.0)
finalColor = color.NRGBA{
R: uint8(255 * col.X),
G: uint8(255 * col.Y),
B: uint8(255 * col.Z),
A: uint8(255 * rgba.W),
}
} else {
finalColor = color.NRGBA{
R: uint8(255 * lighting),
G: uint8(255 * lighting),
B: uint8(255 * lighting),
A: 255,
}
}
if finalColor.A > 10 {
if pixelDepth > target.Depth.At(x, y) {
continue
}
target.Color.SetNRGBA(x, y, finalColor)
target.Depth.Set(x, y, pixelDepth)
}
}
}
}
func transformToFaceDir(v lm.Vector3, facedir uint8) lm.Vector3 {
axis := (facedir >> 2) & 0x7
dir := facedir & 0x3
// Left click with screwdriver
switch dir {
case 0: // no-op
case 1:
v = v.RotateXZ(lm.Radians(-90))
case 2:
v = v.RotateXZ(lm.Radians(180))
case 3:
v = v.RotateXZ(lm.Radians(90))
}
// Right click with screwdriver
switch axis {
case 0: // no-op
case 1:
v = v.RotateYZ(lm.Radians(90))
case 2:
v = v.RotateYZ(lm.Radians(-90))
case 3:
v = v.RotateXY(lm.Radians(-90))
case 4:
v = v.RotateXY(lm.Radians(90))
case 5:
v = v.RotateXY(lm.Radians(180))
}
return v
}
func (r *NodeRasterizer) Render(node RenderableNode, nodeDef *game.NodeDefinition) *raster.RenderBuffer {
if nodeDef.DrawType == game.DrawTypeAirlike || nodeDef.Model == nil || len(nodeDef.Textures) == 0 {
return nil
}
if target, ok := r.cache[node]; ok {
return target
}
rect := image.Rect(0, 0, BaseResolution, BaseResolution+BaseResolution/8)
target := raster.NewRenderBuffer(rect)
for j, mesh := range nodeDef.Model.Meshes {
triangleCount := len(mesh.Vertices) / 3
for i := 0; i < triangleCount; i++ {
a := mesh.Vertices[i*3]
b := mesh.Vertices[i*3+1]
c := mesh.Vertices[i*3+2]
if nodeDef.ParamType2 == game.ParamType2FaceDir {
a.Position = transformToFaceDir(a.Position, node.Param2)
b.Position = transformToFaceDir(b.Position, node.Param2)
c.Position = transformToFaceDir(c.Position, node.Param2)
a.Normal = transformToFaceDir(a.Normal, node.Param2)
b.Normal = transformToFaceDir(b.Normal, node.Param2)
c.Normal = transformToFaceDir(c.Normal, node.Param2)
}
a.Position.Z = -a.Position.Z
b.Position.Z = -b.Position.Z
c.Position.Z = -c.Position.Z
a.Position.X = -a.Position.X
b.Position.X = -b.Position.X
c.Position.X = -c.Position.X
drawTriangle(target, nodeDef.Textures[j], node.Light, a, b, c)
}
}
r.cache[node] = target
return target
} | render/isometric/rasterizer.go | 0.710226 | 0.471223 | rasterizer.go | starcoder |
type LinkedList struct {
val int
next *LinkedList
}
/** Initialize your data structure here. */
func Constructor() LinkedList {
return LinkedList{}
}
func (l *LinkedList) GetNode(index int) *LinkedList {
node := l
for i := 0; i < index; i++ {
if node == nil {
return nil
}
node = node.next
}
return node
}
/** Get the value of the index-th node in the linked list. If the index is invalid, return -1. */
func (l *LinkedList) Get(index int) int {
node := l.GetNode(index)
if node == nil {
return -1
}
return node.val
}
/** Add a node of value val before the first element of the linked list. After the insertion, the new node will be the first node of the linked list. */
func (l *LinkedList) AddAtHead(val int) {
old := l
l = &LinkedList{
val: val,
next: old,
}
}
/** Append a node of value val to the last element of the linked list. */
func (l *LinkedList) AddAtTail(val int) {
node := l
if node == nil {
l.AddAtHead(val)
return
}
for node.next != nil {
node = node.next
}
node.next = &LinkedList{
val: val,
}
}
/** Add a node of value val before the index-th node in the linked list. If index equals to the length of linked list, the node will be appended to the end of linked list. If index is greater than the length, the node will not be inserted. */
func (l *LinkedList) AddAtIndex(index int, val int) {
node := l.GetNode(index - 1)
if node == nil {
return
}
if node.next == nil {
node.next = &LinkedList{
val: val,
}
} else {
node.next = &LinkedList{
val: val,
next: node.next.next,
}
}
}
/** Delete the index-th node in the linked list, if the index is valid. */
func (l *LinkedList) DeleteAtIndex(index int) {
node := l.GetNode(index - 1)
if node == nil {
return
}
if node.next == nil {
return
}
node.next = node.next.next
} | algorithms/go/linked_list.go | 0.813461 | 0.461259 | linked_list.go | starcoder |
package structs
type PacketHeader struct {
M_packetFormat uint16 // 2018
M_packetVersion uint8 // Version of this packet type, all start from 1
M_packetId uint8 // Identifier for the packet type, see below
M_sessionUID uint64 // Unique identifier for the session
M_sessionTime float32 // Session timestamp
M_frameIdentifier uint32 // Identifier for the frame the data was retrieved on
M_playerCarIndex uint8 // Index of player's car in the array
}
// MOTION PACKET:
// The motion packet gives physics data for all the cars being driven.
// There is additional data for the car being driven with the goal of being able to drive a motion platform setup.
// Frequency: Rate as specified in menus
// Size: 1341 bytes
type CarMotionData struct {
M_worldPositionX float32 // World space X position
M_worldPositionY float32 // World space Y position
M_worldPositionZ float32 // World space Z position
M_worldVelocityX float32 // Velocity in world space X
M_worldVelocityY float32 // Velocity in world space Y
M_worldVelocityZ float32 // Velocity in world space Z
M_worldForwardDirX int16 // World space forward X direction (normalised)
M_worldForwardDirY int16 // World space forward Y direction (normalised)
M_worldForwardDirZ int16 // World space forward Z direction (normalised)
M_worldRightDirX int16 // World space right X direction (normalised)
M_worldRightDirY int16 // World space right Y direction (normalised)
M_worldRightDirZ int16 // World space right Z direction (normalised)
M_gForceLateral float32 // Lateral G-Force component
M_gForceLongitudinal float32 // Longitudinal G-Force component
M_gForceVertical float32 // Vertical G-Force component
M_yaw float32 // Yaw angle in radians
M_pitch float32 // Pitch angle in radians
M_roll float32 // Roll angle in radians
}
type PacketMotionData struct {
M_header PacketHeader // Header
M_carMotionData [20]CarMotionData // Data for all cars on track
// Extra player car ONLY data
M_suspensionPosition [4]float32 // Note: All wheel arrays have the following order:
M_suspensionVelocity [4]float32 // RL, RR, FL, FR
M_suspensionAcceleration [4]float32 // RL, RR, FL, FR
M_wheelSpeed [4]float32 // Speed of each wheel
M_wheelSlip [4]float32 // Slip ratio for each wheel
M_localVelocityX float32 // Velocity in local space
M_localVelocityY float32 // Velocity in local space
M_localVelocityZ float32 // Velocity in local space
M_angularVelocityX float32 // Angular velocity x-component
M_angularVelocityY float32 // Angular velocity y-component
M_angularVelocityZ float32 // Angular velocity z-component
M_angularAccelerationX float32 // Angular velocity x-component
M_angularAccelerationY float32 // Angular velocity y-component
M_angularAccelerationZ float32 // Angular velocity z-component
M_frontWheelsAngle float32 // Current front wheels angle in radians
}
// SESSION PACKET:
// The session packet includes details about the current session in progress.
// Frequency: 2 per second
// Size: 147 bytes
type MarshalZone struct {
M_zoneStart float32 // Fraction (0..1) of way through the lap the marshal zone starts
M_zoneFlag int8 // -1 = invalid/unknown, 0 = none, 1 = green, 2 = blue, 3 = yellow, 4 = red
}
type PacketSessionData struct {
M_header PacketHeader // Header
M_weather uint8 // Weather - 0 = clear, 1 = light cloud, 2 = overcast, 3 = light rain, 4 = heavy rain, 5 = storm
M_trackTemperature int8 // Track temp. in degrees celsius
M_airTemperature int8 // Air temp. in degrees celsius
M_totalLaps uint8 // Total number of laps in this race
M_trackLength uint16 // Track length in metre
M_sessionType uint8 // 0 = unknown, 1 = P1, 2 = P2, 3 = P3, 4 = Short P, 5 = Q1, 6 = Q2, 7 = Q3, 8 = Short Q, 9 = OSQ, 10 = R, 11 = R2, 12 = Time Trial
M_trackId int8 // -1 for unknown, 0-21 for tracks, see appendix
M_era uint8 // Era, 0 = modern, 1 = classic
M_sessionTimeLeft uint16 // Time left in session in seconds
M_sessionDuration uint16 // Session duration in seconds
M_pitSpeedLimit uint8 // Pit speed limit in kilometres per hour
M_gamePaused uint8 // Whether the game is paused
M_isSpectating uint8 // Whether the player is spectating
M_spectatorCarIndex uint8 // Index of the car being spectated
M_sliProNativeSupport uint8 // SLI Pro support, 0 = inactive, 1 = active
M_numMarshalZones uint8 // Number of marshal zones to follow
M_marshalZones [21]MarshalZone // List of marshal zones – max 21
M_safetyCarStatus uint8 // 0 = no safety car, 1 = full safety car, 2 = virtual safety car
M_networkGame uint8 // 0 = offline, 1 = online
}
// LAP DATA PACKET:
// The lap data packet gives details of all the cars in the session.
// Frequency: Rate as specified in menus
// Size: 841 bytes
type LapData struct {
M_lastLapTime float32 // Last lap time in seconds
M_currentLapTime float32 // Current time around the lap in seconds
M_bestLapTime float32 // Best lap time of the session in seconds
M_sector1Time float32 // Sector 1 time in seconds
M_sector2Time float32 // Sector 2 time in seconds
M_lapDistance float32 // Distance vehicle is around current lap in metres – could be negative if line hasn’t been crossed yet
M_totalDistance float32 // Total distance travelled in session in metres – could be negative if line hasn’t been crossed yet
M_safetyCarDelta float32 // Delta in seconds for safety car
M_carPosition uint8 // Car race position
M_currentLapNum uint8 // Current lap number
M_pitStatus uint8 // 0 = none, 1 = pitting, 2 = in pit area
M_sector uint8 // 0 = sector1, 1 = sector2, 2 = sector3
M_currentLapInvalid uint8 // Current lap invalid - 0 = valid, 1 = invalid
M_penalties uint8 // Accumulated time penalties in seconds to be added
M_gridPosition uint8 // Grid position the vehicle started the race in
M_driverStatus uint8 // Status of driver - 0 = in garage, 1 = flying lap, 2 = in lap, 3 = out lap, 4 = on track
M_resultStatus uint8 // Result status - 0 = invalid, 1 = inactive, 2 = active, 3 = finished, 4 = disqualified, 5 = not classified, 6 = retired
}
type PacketLapData struct {
M_header PacketHeader // Header
M_lapData [20]LapData // Lap data for all cars on track
}
// EVENT PACKET:
// This packet gives details of events that happen during the course of the race.
// Frequency: When the event occurs
// Size: 25 bytes
type PacketEventData struct {
M_header PacketHeader // Header
M_eventStringCode [4]uint8 // Event string code, see above
}
// PARTICIPANTS PACKET:
// This is a list of participants in the race. If the vehicle is controlled by AI, then the name will be the driver name.
// If this is a multiplayer game, the names will be the Steam Id on PC, or the LAN name if appropriate.
// On Xbox One, the names will always be the driver name, on PS4 the name will be the LAN name if playing a LAN game, otherwise it will be the driver name.
// Frequency: Every 5 seconds
// Size: 1082 bytes
type ParticipantData struct {
M_aiControlled uint8 // Whether the vehicle is AI (1) or Human (0) controlled
M_driverId uint8 // Driver id - see appendix
M_teamId uint8 // Team id - see appendix
M_raceNumber uint8 // Race number of the car
M_nationality uint8 // Nationality of the driver
M_name [48]byte // Name of participant in UTF-8 format – null terminated, Will be truncated with … (U+2026) if too long
}
type PacketParticipantsData struct {
M_header PacketHeader // Header
M_numCars uint8 // Number of cars in the data
M_participants [20]ParticipantData
}
// CAR SETUPS PACKET:
// This packet details the car setups for each vehicle in the session.
// Note that in multiplayer games, other player cars will appear as blank, you will only be able to see your car setup and AI cars.
// Frequency: Every 5 seconds
// Size: 841 bytes
type CarSetupData struct {
M_frontWing uint8 // Front wing aero
M_rearWing uint8 // Rear wing aero
M_onThrottle uint8 // Differential adjustment on throttle (percentage)
M_offThrottle uint8 // Differential adjustment off throttle (percentage)
M_frontCamber float32 // Front camber angle (suspension geometry)
M_rearCamber float32 // Rear camber angle (suspension geometry)
M_frontToe float32 // Front toe angle (suspension geometry)
M_rearToe float32 // Rear toe angle (suspension geometry)
M_frontSuspension uint8 // Front suspension
M_rearSuspension uint8 // Rear suspension
M_frontAntiRollBar uint8 // Front anti-roll bar
M_rearAntiRollBar uint8 // Front anti-roll bar
M_frontSuspensionHeight uint8 // Front ride height
M_rearSuspensionHeight uint8 // Rear ride height
M_brakePressure uint8 // Brake pressure (percentage)
M_brakeBias uint8 // Brake bias (percentage)
M_frontTyrePressure float32 // Front tyre pressure (PSI)
M_rearTyrePressure float32 // Rear tyre pressure (PSI)
M_ballast uint8 // Ballast
M_fuelLoad float32 // Fuel load
}
type PacketCarSetupData struct {
M_header PacketHeader // Header
M_carSetups [20]CarSetupData
}
// CAR TELEMETRY PACKET:
// This packet details telemetry for all the cars in the race.
// It details various values that would be recorded on the car such as speed, throttle application, DRS etc.
// Frequency: Rate as specified in menus
// Size: 1085 bytes
type CarTelemetryData struct {
M_speed uint16 // Speed of car in kilometres per hour
M_throttle uint8 // Amount of throttle applied (0 to 100)
M_steer int8 // Steering (-100 (full lock left) to 100 (full lock right))
M_brake uint8 // Amount of brake applied (0 to 100)
M_clutch uint8 // Amount of clutch applied (0 to 100)
M_gear int8 // Gear selected (1-8, N=0, R=-1)
M_engineRPM uint16 // Engine RPM
M_drs uint8 // 0 = off, 1 = on
M_revLightsPercent uint8 // Rev lights indicator (percentage)
M_brakesTemperature [4]uint16 // Brakes temperature (celsius)
M_tyresSurfaceTemperature [4]uint16 // Tyres surface temperature (celsius)
M_tyresInnerTemperature [4]uint16 // Tyres inner temperature (celsius)
M_engineTemperature uint16 // Engine temperature (celsius)
M_tyresPressure [4]float32 // Tyres pressure (PSI)
}
type PacketCarTelemetryData struct {
M_header PacketHeader // Header
M_carTelemetryData [20]CarTelemetryData
M_buttonStatus uint32 // Bit flags specifying which buttons are being pressed currently - see appendices
}
// CAR STATUS PACKET:
// This packet details car statuses for all the cars in the race. It includes values such as the damage readings on the car.
// Frequency: 2 per second
// Size: 1061 bytes
type CarStatusData struct {
M_tractionControl uint8 // 0 (off) - 2 (high)
M_antiLockBrakes uint8 // 0 (off) - 1 (on)
M_fuelMix uint8 // Fuel mix - 0 = lean, 1 = standard, 2 = rich, 3 = max
M_frontBrakeBias uint8 // Front brake bias (percentage)
M_pitLimiterStatus uint8 // Pit limiter status - 0 = off, 1 = on
M_fuelInTank float32 // Current fuel mass
M_fuelCapacity float32 // Fuel capacity
M_maxRPM uint16 // Cars max RPM, point of rev limiter
M_idleRPM uint16 // Cars idle RPM
M_maxGears uint8 // Maximum number of gears
M_drsAllowed uint8 // 0 = not allowed, 1 = allowed, -1 = unknown
M_tyresWear [4]uint8 // Tyre wear percentage
M_tyreCompound uint8 // Modern - 0 = hyper soft, 1 = ultra soft, 2 = super soft, 3 = soft, 4 = medium, 5 = hard, 6 = super hard, 7 = inter, 8 = wet, Classic - 0-6 = dry, 7-8 = wet
M_tyresDamage [4]uint8 // Tyre damage (percentage)
M_frontLeftWingDamage uint8 // Front left wing damage (percentage)
M_frontRightWingDamage uint8 // Front right wing damage (percentage)
M_rearWingDamage uint8 // Rear wing damage (percentage)
M_engineDamage uint8 // Engine damage (percentage)
M_gearBoxDamage uint8 // Gear box damage (percentage)
M_exhaustDamage uint8 // Exhaust damage (percentage)
M_vehicleFiaFlags int8 // -1 = invalid/unknown, 0 = none, 1 = green, 2 = blue, 3 = yellow, 4 = red
M_ersStoreEnergy float32 // ERS energy store in Joules
M_ersDeployMode uint8 // ERS deployment mode, 0 = none, 1 = low, 2 = medium, 3 = high, 4 = overtake, 5 = hotlap
M_ersHarvestedThisLapMGUK float32 // ERS energy harvested this lap by MGU-K
M_ersHarvestedThisLapMGUH float32 // ERS energy harvested this lap by MGU-H
M_ersDeployedThisLap float32 // ERS energy deployed this lap
}
type PacketCarStatusData struct {
M_header PacketHeader // Header
M_carStatusData [20]CarStatusData
} | structs/structs.go | 0.512449 | 0.582313 | structs.go | starcoder |
package ann
import (
"math/rand"
)
type Matrix [][]float64
func (m Matrix) Rows() int {
return len(m)
}
func (m Matrix) Cols() int {
return len(m[0])
}
// Add each each element in `a` to `m`
func (m Matrix) Add(a Matrix) Matrix {
if m.Rows() != a.Rows() || m.Cols() != a.Cols() {
panic("Can't add 2 different size matrices.")
}
for i := 0; i < m.Rows(); i++ {
for j := 0; j < m.Cols(); j++ {
m[i][j] += a[i][j]
}
}
return m
}
// Multiply each element by some constant
func (m Matrix) Mult(x float64) Matrix {
for i := 0; i < m.Rows(); i++ {
for j := 0; j < m.Cols(); j++ {
m[i][j] *= x
}
}
return m
}
// Converts a Matrix to a 1d array
func (m Matrix) ToArray() []float64 {
a := make([]float64, m.Rows()*m.Cols())
counter := 0
for i := 0; i < m.Rows(); i++ {
for j := 0; j < m.Cols(); j++ {
a[counter] = m[i][j]
counter += 1
}
}
return a
}
// Creates a new Matrix
func NewMatrix(rows, cols int) Matrix {
matrix := make(Matrix, rows)
for i := 0; i < rows; i++ {
matrix[i] = make([]float64, cols)
}
return matrix
}
// Returns the transpose of `m`
func Transpose(m Matrix) Matrix {
matrix := NewMatrix(m.Cols(), m.Rows())
for i := 0; i < m.Rows(); i++ {
for j := 0; j < m.Cols(); j++ {
matrix[j][i] = m[i][j]
}
}
return matrix
}
// Converts a 1d array or slice to a Matrix
func ToMatrix(array []float64, rows int, cols int) Matrix {
if len(array) != rows*cols {
panic("Array length doesn't match rows and cols specified.")
}
matrix := NewMatrix(rows, cols)
counter := 0
for i := 0; i < rows; i++ {
for j := 0; j < cols; j++ {
matrix[i][j] = array[counter]
counter += 1
}
}
return matrix
}
// Creates a new matrix with each element (-0.5 <= x <= 0.5)
func RandomWeightMatrix(rows, cols int) Matrix {
matrix := NewMatrix(rows, cols)
for i := 0; i < rows; i++ {
for j := 0; j < cols; j++ {
matrix[i][j] = rand.Float64() - 0.5
}
}
return matrix
}
// Returns a new matrix, the dot product of `a` and `b`
func Dot(a, b Matrix) Matrix {
if a.Cols() != b.Rows() {
panic("Rows != Cols")
}
matrix := NewMatrix(a.Rows(), b.Cols())
for i := 0; i < a.Rows(); i++ {
for j := 0; j < b.Cols(); j++ {
sum := 0.0
for k := 0; k < b.Rows(); k++ {
sum = sum + (a[i][k] * b[k][j])
}
matrix[i][j] = sum
}
}
return matrix
}
// Subtracts each element in `b` from the corresponding element in `a` and returns a new matrix
func Sub(a, b Matrix) Matrix {
if a.Rows() != b.Rows() || a.Cols() != b.Cols() {
panic("Can't subtract 2 different size matrices.")
}
m := NewMatrix(a.Rows(), a.Cols())
for i := 0; i < m.Rows(); i++ {
for j := 0; j < m.Cols(); j++ {
m[i][j] = a[i][j] - b[i][j]
}
}
return m
}
// Calls `fn` on every element in `m` and returns a new Matrix
func Map(m Matrix, fn func(x float64) float64) Matrix {
matrix := NewMatrix(m.Rows(), m.Cols())
for i := 0; i < m.Rows(); i++ {
for j := 0; j < m.Cols(); j++ {
matrix[i][j] = fn(m[i][j])
}
}
return matrix
} | ann/matrix.go | 0.843219 | 0.560493 | matrix.go | starcoder |
package nl
import "github.com/MaxSlyugrov/cldr"
var calendar = cldr.Calendar{
Formats: cldr.CalendarFormats{
Date: cldr.CalendarDateFormat{Full: "EEEE d MMMM y", Long: "d MMMM y", Medium: "d MMM y", Short: "dd-MM-yy"},
Time: cldr.CalendarDateFormat{Full: "HH:mm:ss zzzz", Long: "HH:mm:ss z", Medium: "HH:mm:ss", Short: "HH:mm"},
DateTime: cldr.CalendarDateFormat{Full: "{1} {0}", Long: "{1} {0}", Medium: "{1} {0}", Short: "{1} {0}"},
},
FormatNames: cldr.CalendarFormatNames{
Months: cldr.CalendarMonthFormatNames{
Abbreviated: cldr.CalendarMonthFormatNameValue{Jan: "Jan.", Feb: "Feb.", Mar: "Mrt.", Apr: "Apr.", May: "Mei", Jun: "Jun.", Jul: "Jul.", Aug: "Aug.", Sep: "Sep.", Oct: "Okt.", Nov: "Nov.", Dec: "Dec."},
Narrow: cldr.CalendarMonthFormatNameValue{Jan: "J", Feb: "F", Mar: "M", Apr: "A", May: "M", Jun: "J", Jul: "J", Aug: "A", Sep: "S", Oct: "O", Nov: "N", Dec: "D"},
Short: cldr.CalendarMonthFormatNameValue{},
Wide: cldr.CalendarMonthFormatNameValue{Jan: "Januari", Feb: "Februari", Mar: "Maart", Apr: "April", May: "Mei", Jun: "Juni", Jul: "Juli", Aug: "Augustus", Sep: "September", Oct: "Oktober", Nov: "November", Dec: "December"},
},
Days: cldr.CalendarDayFormatNames{
Abbreviated: cldr.CalendarDayFormatNameValue{Sun: "Zo", Mon: "Ma", Tue: "Di", Wed: "Wo", Thu: "Do", Fri: "Vr", Sat: "Za"},
Narrow: cldr.CalendarDayFormatNameValue{Sun: "Z", Mon: "M", Tue: "D", Wed: "W", Thu: "D", Fri: "V", Sat: "Z"},
Short: cldr.CalendarDayFormatNameValue{Sun: "Zo", Mon: "Ma", Tue: "Di", Wed: "Wo", Thu: "Do", Fri: "Vr", Sat: "Za"},
Wide: cldr.CalendarDayFormatNameValue{Sun: "Zondag", Mon: "Maandag", Tue: "Dinsdag", Wed: "Woensdag", Thu: "Donderdag", Fri: "Vrijdag", Sat: "Zaterdag"},
},
Periods: cldr.CalendarPeriodFormatNames{
Abbreviated: cldr.CalendarPeriodFormatNameValue{AM: "a.m.", PM: "p.m."},
Narrow: cldr.CalendarPeriodFormatNameValue{AM: "a.m.", PM: "p.m."},
Short: cldr.CalendarPeriodFormatNameValue{},
Wide: cldr.CalendarPeriodFormatNameValue{AM: "voormiddag", PM: "p.m."},
},
},
} | resources/locales/nl/calendar.go | 0.507568 | 0.453746 | calendar.go | starcoder |
package tree
import (
"strconv"
"strings"
)
// Tree is a binary tree
type Tree struct {
Left *Tree
Value int
Right *Tree
}
const terminationSymbol = "#"
// NewFromPreOrderSeq creates a new binary tree from a pre-ordered sequence of values. The terminations
// are marked with character
func NewFromPreOrderedSeq(values []string) *Tree {
if len(values) == 0 {
return nil
}
root, _ := buildPreOrderedTree(values)
return root
}
func buildNode(value string) *Tree {
if strings.Compare(value, terminationSymbol) == 0 {
return nil
}
v, err := strconv.Atoi(value)
if err != nil {
return nil
}
return &Tree{Value: v}
}
func buildPreOrderedTree(values []string) (*Tree, []string) {
root := buildNode(values[0])
if root == nil {
return nil, values[1:]
}
leftNode, rightValues := buildPreOrderedTree(values[1:])
root.Left = leftNode
rightNode, remainingValues := buildPreOrderedTree(rightValues)
root.Right = rightNode
return root, remainingValues
}
func walkPreOrder(t *Tree, ch chan string, quit chan int) {
if t == nil {
ch <- terminationSymbol
return
}
select {
case ch <- strconv.Itoa(t.Value):
case <-quit:
return
}
walkPreOrder(t.Left, ch, quit)
walkPreOrder(t.Right, ch, quit)
}
// WalkPreOrder traverses the binary tree in pre-order and sends the values in the provided channel
func WalkPreOrder(t *Tree, ch chan string, quit chan int) {
walkPreOrder(t, ch, quit)
close(ch)
}
func walkInOrder(t *Tree, ch chan string, quit chan int) {
walkInOrder(t.Left, ch, quit)
if t == nil {
ch <- terminationSymbol
return
}
select {
case ch <- strconv.Itoa(t.Value):
case <-quit:
return
}
walkInOrder(t.Right, ch, quit)
}
// WalkInOrder traverses the binary tree in in-order and sends the values in the provided channel
func WalkInOrder(t *Tree, ch chan string, quit chan int) {
walkInOrder(t, ch, quit)
close(ch)
}
func walkPostOrder(t *Tree, ch chan string, quit chan int) {
walkPostOrder(t.Left, ch, quit)
walkPostOrder(t.Right, ch, quit)
if t == nil {
ch <- terminationSymbol
return
}
select {
case ch <- strconv.Itoa(t.Value):
case <-quit:
return
}
}
// WalkPostOrder traverses the binary tree in post-order and sends the values in the provided channel
func WalkPostOrder(t *Tree, ch chan string, quit chan int) {
walkPostOrder(t, ch, quit)
close(ch)
} | tree/tree.go | 0.714329 | 0.475118 | tree.go | starcoder |
package circuit
import (
"crypto/aes"
"crypto/cipher"
"crypto/rand"
"fmt"
"github.com/markkurossi/mpc/ot"
)
var (
verbose = false
)
func idxUnary(l0 ot.Label) int {
if l0.S() {
return 1
}
return 0
}
func idx(l0, l1 ot.Label) int {
var ret int
if l0.S() {
ret |= 0x2
}
if l1.S() {
ret |= 0x1
}
return ret
}
func encrypt(alg cipher.Block, a, b, c ot.Label, t uint32,
data *ot.LabelData) ot.Label {
k := makeK(a, b, t)
k.GetData(data)
alg.Encrypt(data[:], data[:])
var pi ot.Label
pi.SetData(data)
pi.Xor(k)
pi.Xor(c)
return pi
}
func decrypt(alg cipher.Block, a, b ot.Label, t uint32, c ot.Label,
data *ot.LabelData) ot.Label {
k := makeK(a, b, t)
k.GetData(data)
alg.Encrypt(data[:], data[:])
var crypted ot.Label
crypted.SetData(data)
c.Xor(crypted)
c.Xor(k)
return c
}
func makeK(a, b ot.Label, t uint32) ot.Label {
a.Mul2()
b.Mul4()
a.Xor(b)
a.Xor(ot.NewTweak(t))
return a
}
// Hash function for half gates: Hπ(x, i) to be π(K) ⊕ K where K = 2x ⊕ i
func encryptHalf(alg cipher.Block, x ot.Label, i uint32,
data *ot.LabelData) ot.Label {
k := makeKHalf(x, i)
k.GetData(data)
alg.Encrypt(data[:], data[:])
var pi ot.Label
pi.SetData(data)
pi.Xor(k)
return pi
}
// K = 2x ⊕ i
func makeKHalf(x ot.Label, i uint32) ot.Label {
x.Mul2()
x.Xor(ot.NewTweak(i))
return x
}
func makeLabels(r ot.Label) (ot.Wire, error) {
l0, err := ot.NewLabel(rand.Reader)
if err != nil {
return ot.Wire{}, err
}
l1 := l0
l1.Xor(r)
return ot.Wire{
L0: l0,
L1: l1,
}, nil
}
// Garbled contains garbled circuit information.
type Garbled struct {
R ot.Label
Wires []ot.Wire
Gates [][]ot.Label
}
// Lambda returns the lambda value of the wire.
func (g *Garbled) Lambda(wire Wire) uint {
if g.Wires[int(wire)].L0.S() {
return 1
}
return 0
}
// SetLambda sets the lambda value of the wire.
func (g *Garbled) SetLambda(wire Wire, val uint) {
w := g.Wires[int(wire)]
if val == 0 {
w.L0.SetS(false)
} else {
w.L0.SetS(true)
}
g.Wires[int(wire)] = w
}
// Garble garbles the circuit.
func (c *Circuit) Garble(key []byte) (*Garbled, error) {
// Create R.
r, err := ot.NewLabel(rand.Reader)
if err != nil {
return nil, err
}
r.SetS(true)
garbled := make([][]ot.Label, c.NumGates)
alg, err := aes.NewCipher(key)
if err != nil {
return nil, err
}
// Wire labels.
wires := make([]ot.Wire, c.NumWires)
// Assing all input wires.
for i := 0; i < c.Inputs.Size(); i++ {
w, err := makeLabels(r)
if err != nil {
return nil, err
}
wires[i] = w
}
// Garble gates.
var data ot.LabelData
var id uint32
for i := 0; i < len(c.Gates); i++ {
gate := &c.Gates[i]
data, err := gate.garble(wires, alg, r, &id, &data)
if err != nil {
return nil, err
}
garbled[i] = data
}
return &Garbled{
R: r,
Wires: wires,
Gates: garbled,
}, nil
}
// Garble garbles the gate and returns it labels.
func (g *Gate) garble(wires []ot.Wire, enc cipher.Block, r ot.Label,
idp *uint32, data *ot.LabelData) ([]ot.Label, error) {
var a, b, c ot.Wire
var table [4]ot.Label
var start, count int
// Inputs.
switch g.Op {
case XOR, XNOR, AND, OR:
b = wires[g.Input1.ID()]
fallthrough
case INV:
a = wires[g.Input0.ID()]
default:
return nil, fmt.Errorf("invalid gate type %s", g.Op)
}
// Output.
switch g.Op {
case XOR:
l0 := a.L0
l0.Xor(b.L0)
l1 := l0
l1.Xor(r)
c = ot.Wire{
L0: l0,
L1: l1,
}
case XNOR:
l0 := a.L0
l0.Xor(b.L0)
l1 := l0
l1.Xor(r)
c = ot.Wire{
L0: l1,
L1: l0,
}
case AND:
pa := a.L0.S()
pb := b.L0.S()
j0 := *idp
j1 := *idp + 1
*idp = *idp + 2
// First half gate.
tg := encryptHalf(enc, a.L0, j0, data)
tg.Xor(encryptHalf(enc, a.L1, j0, data))
if pb {
tg.Xor(r)
}
wg0 := encryptHalf(enc, a.L0, j0, data)
if pa {
wg0.Xor(tg)
}
// Second half gate.
te := encryptHalf(enc, b.L0, j1, data)
te.Xor(encryptHalf(enc, b.L1, j1, data))
te.Xor(a.L0)
we0 := encryptHalf(enc, b.L0, j1, data)
if pb {
we0.Xor(te)
we0.Xor(a.L0)
}
// Combine halves
l0 := wg0
l0.Xor(we0)
l1 := l0
l1.Xor(r)
c = ot.Wire{
L0: l0,
L1: l1,
}
table[0] = tg
table[1] = te
count = 2
case OR, INV:
// Row reduction creates labels below so that the first row is
// all zero.
default:
panic("invalid gate type")
}
switch g.Op {
case XOR, XNOR:
// Free XOR.
case AND:
// Half AND garbled above.
case OR:
// a b c
// -----
// 0 0 0
// 0 1 1
// 1 0 1
// 1 1 1
id := *idp
*idp = *idp + 1
table[idx(a.L0, b.L0)] = encrypt(enc, a.L0, b.L0, c.L0, id, data)
table[idx(a.L0, b.L1)] = encrypt(enc, a.L0, b.L1, c.L1, id, data)
table[idx(a.L1, b.L0)] = encrypt(enc, a.L1, b.L0, c.L1, id, data)
table[idx(a.L1, b.L1)] = encrypt(enc, a.L1, b.L1, c.L1, id, data)
l0Index := idx(a.L0, b.L0)
c.L0 = table[0]
c.L1 = table[0]
if l0Index == 0 {
c.L1.Xor(r)
} else {
c.L0.Xor(r)
}
for i := 0; i < 4; i++ {
if i == l0Index {
table[i].Xor(c.L0)
} else {
table[i].Xor(c.L1)
}
}
start = 1
count = 3
case INV:
// a b c
// -----
// 0 1
// 1 0
id := *idp
*idp = *idp + 1
table[idxUnary(a.L0)] = encrypt(enc, a.L0, ot.Label{}, c.L1, id, data)
table[idxUnary(a.L1)] = encrypt(enc, a.L1, ot.Label{}, c.L0, id, data)
l0Index := idxUnary(a.L0)
c.L0 = table[0]
c.L1 = table[0]
if l0Index == 0 {
c.L0.Xor(r)
} else {
c.L1.Xor(r)
}
for i := 0; i < 2; i++ {
if i == l0Index {
table[i].Xor(c.L1)
} else {
table[i].Xor(c.L0)
}
}
start = 1
count = 1
default:
return nil, fmt.Errorf("invalid operand %s", g.Op)
}
wires[g.Output.ID()] = c
return table[start : start+count], nil
} | circuit/garble.go | 0.644784 | 0.407451 | garble.go | starcoder |
package openflow
import (
"fmt"
"os/exec"
"strings"
"github.com/kelda/kelda/counter"
"github.com/kelda/kelda/minion/ipdef"
"github.com/kelda/kelda/minion/ovsdb"
)
/* OpenFlow Psuedocode -- Please, for the love of God, keep this updated.
OpenFlow is extremely difficult to reason about -- especially when its buried in Go code.
This comment aims to make it a bit easier to maintain by describing abstractly what the
OpenFlow code does, without the distraction of the go code required to implement it.
Interpreting the Psuedocode
---------------------------
The OpenFlow code is divided into a series of tables. Packets start at Table_0 and only
move to another table if explicitly instructed to by a `goto` statement.
Each table is composed of a series of if statements. Packets match either one or zero of
these statements. If they match zero they're dropped, if they match more than one then
the statement that appears first in the table is chosen.
Each if statement has one or more actions associated with it. Packets matching the
statement execute those actions in order. If one of those actions is a goto statement,
the packet is forwarded to the specified table and the process begins again.
Finally, note that some tables have loops which should be interpreted as duplicating the
inner if statements per loop element.
Registers
---------
The psuedocode currently uses three registers:
Reg0 -- Contains the OpenFlow port number of the patch port if the packet came from a
veth. Otherwise it contains zero.
Tables
------
// Table_0 initializes the registers and forwards to Table_1.
Table_0 { // Initial Table
for each db.Container {
if in_port=dbc.VethPort && dl_src=dbc.Mac {
reg0 <- dbc.PatchPort
goto Table_1
}
if in_port=dbc.PatchPort {
output:dbc.VethPort
}
}
if in_port=LOCAL {
goto Table_2
}
}
// Table_1 handles packets coming from a veth.
Table_1 {
// Send broadcasts to the gateway and patch port.
if arp,dl_dst=ff:ff:ff:ff:ff:ff {
output:LOCAL,reg0
}
// Send packets from the veth to the gateway.
if dl_dst=gwMac {
goto Table_3
}
// Everything else can be handled by OVN.
output:reg0
}
// Table_2 forwards packets coming from the LOCAL port.
Table_2 {
// If the gateway sends a broadcast, send it to all veths.
if dl_dst=ff:ff:ff:ff:ff:ff {
output:veth{1..n}
}
for each db.Container {
// The gateway may send unicast arps to the container.
if arp && dl_dst=dbc.mac {
output:veth
}
// Packets originated by the gateway (i.e. DNS) are allowed.
if ip && dl_dst=dbc.mac && nw_src=gwIP {
output:veth
}
for each toPub {
// Response packets have toPub as the source port.
[tcp|udp],dl_dst=dbc.mac,ip_dst=dbc.ip,tp_src=toPub,
actions=output:veth
}
for each fromPub {
// Inbound packets have toPub as the destination port.
[tcp|udp],dl_dst=dbc.mac,ip_dst=dbc.ip,tp_dst=fromPub,
actions=output:veth
}
}
}
// Table_3 forwards unicast packets going to LOCAL port, or drops them if they are
// disallowed.
Table_3 {
// Containers are allowed to send packets destined for the gateway.
if ip && nw_dst=gwIP {
output:LOCAL
}
// Containers are allowed to ARP the gateway.
if arp {
output:LOCAL
}
for each db.Container {
for each toPub {
// Outbound packets have fromPub as the destination port.
[tcp|udp],dl_src=dbc.mac,ip_src=dbc.ip,tp_dst=toPub,
actions=output:LOCAL
}
for each fromPub {
// Response packets have fromPub as the source port.
[tcp|udp],dl_src=dbc.mac,ip_src=dbc.ip,tp_src=fromPub,
actions=output:LOCAL
}
}
}
*/
// A Container that needs OpenFlow rules installed for it.
type Container struct {
Veth string
Patch string
Mac string
IP string
// Set of ports going to and from the public internet.
ToPub map[int]struct{}
FromPub map[int]struct{}
}
type container struct {
Container
vethPort int
patchPort int
}
var c = counter.New("OpenFlow")
var staticFlows = []string{
// Table 0
"table=0,priority=1000,in_port=LOCAL,actions=resubmit(,2)",
// Table 1
"table=1,priority=1000,arp,dl_dst=ff:ff:ff:ff:ff:ff," +
"actions=output:LOCAL,output:NXM_NX_REG0[]",
fmt.Sprintf("table=1,priority=900,dl_dst=%s,actions=resubmit(,3)",
ipdef.GatewayMac),
"table=1,priority=800,actions=output:NXM_NX_REG0[]",
// Table 3
fmt.Sprintf("table=3,priority=1000,ip,nw_dst=%s,actions=output:LOCAL",
ipdef.GatewayIP),
"table=3,priority=900,arp,actions=output:LOCAL",
}
func replaceFlows(containers []Container) error {
c.Inc("Replace Flows")
ofports, err := openflowPorts()
if err != nil {
return err
}
flows := allFlows(resolveContainers(ofports, containers))
// XXX: Due to a bug in `ovs-ofctl replace-flows`, certain flows are
// replaced even if they do not differ. `diff-flows` already has a fix to
// this problem, so for now we only run `replace-flows` when `diff-flows`
// reports no changes. The `diff-flows` check should be removed once
// `replace-flows` is fixed upstream.
if ofctl("diff-flows", flows) != nil {
c.Inc("Flows Changed")
if err := ofctl("replace-flows", flows); err != nil {
return fmt.Errorf("ovs-ofctl: %s", err)
}
}
return nil
}
// AddFlows adds flows associated with the provided containers without touching flows
// that may already be installed.
func AddFlows(containers []Container) error {
c.Inc("Add Flows")
ofports, err := openflowPorts()
if err != nil {
return err
}
flows := allContainerFlows(resolveContainers(ofports, containers))
if err := ofctl("add-flows", flows); err != nil {
return fmt.Errorf("ovs-ofctl: %s", err)
}
return nil
}
func allContainerFlows(containers []container) []string {
var flows []string
for _, c := range containers {
flows = append(flows, containerFlows(c)...)
}
return flows
}
func containerFlows(c container) []string {
flows := []string{
// Table 0
fmt.Sprintf("table=0,in_port=%d,dl_src=%s,"+
"actions=load:0x%x->NXM_NX_REG0[],resubmit(,1)",
c.vethPort, c.Mac, c.patchPort),
fmt.Sprintf("table=0,in_port=%d,actions=output:%d",
c.patchPort, c.vethPort),
// Table 2
fmt.Sprintf("table=2,priority=900,arp,dl_dst=%s,action=output:%d",
c.Mac, c.vethPort),
fmt.Sprintf("table=2,priority=800,ip,dl_dst=%s,nw_src=%s,"+
"action=output:%d", c.Mac, ipdef.GatewayIP, c.vethPort),
}
table2 := "table=2,priority=500,%s,dl_dst=%s,ip_dst=%s,tp_src=%d," +
"actions=output:%d"
table3 := "table=3,priority=500,%s,dl_src=%s,ip_src=%s,tp_dst=%d," +
"actions=output:LOCAL"
for to := range c.Container.ToPub {
flows = append(flows,
fmt.Sprintf(table2, "tcp", c.Mac, c.IP, to, c.vethPort),
fmt.Sprintf(table2, "udp", c.Mac, c.IP, to, c.vethPort),
fmt.Sprintf(table3, "tcp", c.Mac, c.IP, to),
fmt.Sprintf(table3, "udp", c.Mac, c.IP, to))
}
table2 = "table=2,priority=500,%s,dl_dst=%s,ip_dst=%s,tp_dst=%d," +
"actions=output:%d"
table3 = "table=3,priority=500,%s,dl_src=%s,ip_src=%s,tp_src=%d," +
"actions=output:LOCAL"
for from := range c.Container.FromPub {
flows = append(flows,
fmt.Sprintf(table2, "tcp", c.Mac, c.IP, from, c.vethPort),
fmt.Sprintf(table2, "udp", c.Mac, c.IP, from, c.vethPort),
fmt.Sprintf(table3, "tcp", c.Mac, c.IP, from),
fmt.Sprintf(table3, "udp", c.Mac, c.IP, from))
}
return flows
}
func allFlows(containers []container) []string {
var gatewayBroadcastActions []string
for _, c := range containers {
gatewayBroadcastActions = append(gatewayBroadcastActions,
fmt.Sprintf("output:%d", c.vethPort))
}
flows := append(staticFlows, allContainerFlows(containers)...)
return append(flows, "table=2,priority=1000,dl_dst=ff:ff:ff:ff:ff:ff,actions="+
strings.Join(gatewayBroadcastActions, ","))
}
func resolveContainers(portMap map[string]int, containers []Container) []container {
var ofcs []container
for _, c := range containers {
veth, okVeth := portMap[c.Veth]
patch, okPatch := portMap[c.Patch]
if !okVeth || !okPatch {
continue
}
ofcs = append(ofcs, container{
Container: c,
patchPort: patch,
vethPort: veth,
})
}
return ofcs
}
func openflowPorts() (map[string]int, error) {
odb, err := ovsdb.Open()
if err != nil {
return nil, fmt.Errorf("ovsdb-server connection: %s", err)
}
defer odb.Disconnect()
return odb.OpenFlowPorts()
}
var ofctl = func(action string, flows []string) error {
c.Inc("ovs-ofctl")
cmd := exec.Command("ovs-ofctl", "-O", "OpenFlow13", action,
ipdef.KeldaBridge, "/dev/stdin")
stdin, err := cmd.StdinPipe()
if err != nil {
return err
}
if err := cmd.Start(); err != nil {
return err
}
for _, f := range flows {
stdin.Write([]byte(f + "\n"))
}
stdin.Close()
return cmd.Wait()
} | minion/network/openflow/openflow.go | 0.5144 | 0.434161 | openflow.go | starcoder |
package pyfmt
import (
"errors"
"fmt"
"strconv"
"strings"
"unicode/utf8"
)
type flags struct {
fillChar rune
align int
sign string
showRadix bool
minWidth string
precision string
renderVerb string
percent bool
empty bool
}
// Render is the renderer used to render dispatched format strings into a buffer that's been set up
// beforehand.
type render struct {
buf *buffer
val interface{}
flags
}
func (r *render) init(buf *buffer) {
r.buf = buf
r.clearFlags()
}
func (r *render) clearFlags() {
r.flags = flags{}
}
// Flag state machine
const (
alignState = iota
signState
radixState
zeroState
widthState
precisionState
verbState
endState
)
// validFlags are 'bdoxXeEfFgGrts%'
func validFlag(b byte) bool {
return (b == 'b' || b == 'd' || b == 'o' || b == 'x' || b == 'X' || b == 'e' || b == 'E' || b == 'f' || b == 'F' || b == 'g' || b == 'G' || b == 'r' || b == 't' || b == 's' || b == '%')
}
func isDigit(d byte) bool {
return (d >= '0' && d <= '9')
}
// splitFlags splits out the flags into the various fields.
func splitFlags(flags string) (align, sign, radix, zeroPad, minWidth, precision, verb string, err error) {
end := len(flags)
if end == 0 {
return
}
state := alignState
for i := 0; i < end; {
switch state {
case alignState:
if flags[i] == '<' || flags[i] == '>' || flags[i] == '=' || flags[i] == '^' {
i = 1
}
if end > 1 {
_, size := utf8.DecodeRuneInString(flags)
if flags[size] == '<' || flags[size] == '>' || flags[size] == '=' || flags[size] == '^' {
i = size + 1
}
}
align = flags[0:i]
state = signState
case signState:
if flags[i] == '+' || flags[i] == '-' || flags[i] == ' ' {
sign = flags[i : i+1]
i++
}
state = radixState
case radixState:
if flags[i] == '#' {
radix = flags[i : i+1]
i++
}
state = zeroState
case zeroState:
if flags[i] == '0' {
zeroPad = flags[i : i+1]
i++
}
state = widthState
case widthState:
var j int
for j = i; j < end; {
if isDigit(flags[j]) {
j++
} else {
break
}
}
minWidth = flags[i:j]
i = j
state = precisionState
case precisionState:
if flags[i] == '.' {
var j int
for j = i + 1; j < end; {
if isDigit(flags[j]) {
j++
} else {
break
}
}
precision = flags[i:j]
i = j
}
state = verbState
case verbState:
if validFlag(flags[i]) {
verb = flags[i : i+1]
i++
}
state = endState
default:
// Get to this state when we've run out of other states. If we reach this, it means we've
// gone too far, since we've passed the verb state, but aren't at the end of the string, so
// error.
err = errors.New("Could not decode format specification: " + flags)
i = end + 1
}
}
return
}
func (r *render) parseFlags(flags string) error {
r.renderVerb = "v"
if flags == "" {
r.empty = true
return nil
}
align, sign, radix, zeroPad, minWidth, precision, verb, err := splitFlags(flags)
if err != nil {
return Error("Invalid flag pattern: {}, {}", flags, err)
}
if len(align) > 1 {
var size int
r.fillChar, size = utf8.DecodeRuneInString(align)
align = align[size:]
}
if align != "" {
switch align {
case "<":
r.align = left
case ">":
r.align = right
case "=":
r.align = padSign
case "^":
r.align = center
default:
panic("Unreachable, this should never happen.")
}
}
if sign != "" {
// "-" is the default behavior, ignore it.
if sign != "-" {
r.sign = sign
}
}
if radix == "#" {
r.showRadix = true
}
if zeroPad != "" {
if align == "" {
r.align = padSign
}
if r.fillChar == 0 {
r.fillChar = '0'
}
}
if minWidth != "" {
r.minWidth = minWidth
}
if precision != "" {
r.precision = precision
}
if verb != "" {
switch verb {
case "b", "o", "x", "X", "e", "E", "f", "F", "g", "G":
r.renderVerb = verb
case "d":
r.renderVerb = verb
r.showRadix = false
case "%":
r.percent = true
r.renderVerb = "f"
case "r":
r.renderVerb = "#v"
case "t":
r.renderVerb = "T"
case "s":
r.renderVerb = "+v"
default:
panic("Unreachable, this should never happen. Flag parsing regex is corrupted.")
}
}
return nil
}
// render renders a single element by passing that element and the translated format string
// into the fmt formatter.
func (r *render) render() error {
var prefix, radix string
var width int64
var err error
if r.empty {
fmt.Fprint(r.buf, r.val)
return nil
}
if r.percent {
if err = r.setupPercent(); err != nil {
return err
}
}
if r.showRadix {
if r.renderVerb == "x" || r.renderVerb == "X" {
radix = "#"
} else if r.renderVerb == "b" {
prefix = "0b"
} else if r.renderVerb == "o" {
prefix = "0o"
}
}
if r.minWidth == "" {
width = 0
} else {
width, err = strconv.ParseInt(r.minWidth, 10, 64)
if err != nil {
return Error("Can't convert width {} to int", r.minWidth)
}
}
// Only let Go handle the width for floating+complex types, elsewhere the alignment rules are
// different.
if r.renderVerb != "f" && r.renderVerb != "F" && r.renderVerb != "g" && r.renderVerb != "G" && r.renderVerb != "e" && r.renderVerb != "E" {
r.minWidth = ""
}
str := fmt.Sprintf("%"+r.sign+radix+r.minWidth+r.precision+r.renderVerb, r.val)
if prefix != "" {
// Get rid of any prefix added by minWidth. We'll add this back in later when we
// WriteAlignedString to the underlying buffer
str = strings.TrimLeft(str, " ")
if str != string(r.fillChar) {
str = strings.TrimLeft(str, string(r.fillChar))
}
if len(str) > 0 && str[0] == '-' {
str = strings.Join([]string{"-", prefix, str[1:]}, "")
} else if len(str) > 0 && str[0] == '+' {
str = strings.Join([]string{"+", prefix, str[1:]}, "")
} else if r.sign == " " {
str = strings.Join([]string{" ", prefix, str}, "")
} else {
str = strings.Join([]string{prefix, str}, "")
}
}
if r.renderVerb == "f" || r.renderVerb == "F" || r.renderVerb == "g" || r.renderVerb == "G" || r.renderVerb == "e" || r.renderVerb == "E" {
str = strings.TrimSpace(str)
if r.sign == " " && str[0] != '-' {
str = " " + str
}
}
if r.percent {
str, err = transformPercent(str)
if err != nil {
return err
}
}
if len(str) > 0 {
if str[0] != '(' && (r.align == left || r.align == padSign) {
if str[0] == '-' {
r.buf.WriteString("-")
str = str[1:]
width--
} else if str[0] == '+' {
r.buf.WriteString("+")
str = str[1:]
width--
} else if str[0] == ' ' {
r.buf.WriteString(" ")
str = str[1:]
width--
} else {
r.buf.WriteString(r.sign)
}
}
}
if r.showRadix && r.align == padSign {
r.buf.WriteString(str[0:2])
r.buf.WriteAlignedString(str[2:], r.align, width-2, r.fillChar)
} else {
r.buf.WriteAlignedString(str, r.align, width, r.fillChar)
}
return nil
}
func (r *render) setupPercent() error {
// Increase the precision by two, to make sure we have enough digits.
if r.precision == "" {
r.precision = ".8"
} else {
precision, err := strconv.ParseInt(r.precision[1:], 10, 64)
if err != nil {
return err
}
r.precision = Must(".{}", precision+2)
}
return nil
}
func transformPercent(p string) (string, error) {
var sign string
if p[0] == '-' {
sign = "-"
p = p[1:]
}
intPart, mantissa := split(p, '.')
var suffix string
if mantissa != "" {
prefix, err := strconv.ParseInt(intPart, 10, 64)
if err != nil {
return "", Error("Couldn't parse format prefix from: {}", p)
}
if prefix == 0 {
if mantissa[2:] != "" {
suffix = "." + mantissa[2:]
}
if mantissa[0] == '0' {
return strings.Join([]string{sign, mantissa[1:2], suffix, "%"}, ""), nil
}
return strings.Join([]string{sign, mantissa[0:2], suffix, "%"}, ""), nil
} else if len(intPart) == 1 {
if mantissa[2:] != "" {
suffix = "." + mantissa[2:]
}
return strings.Join([]string{sign, intPart, mantissa[0:2], suffix, "%"}, ""), nil
}
if mantissa[2:] != "" {
suffix = "." + mantissa[2:]
}
return strings.Join([]string{sign, intPart, mantissa[0:2], suffix, "%"}, ""), nil
}
if _, err := strconv.ParseInt(p, 10, 64); err != nil {
return p + "%", nil
}
return p + "00%", nil
} | render.go | 0.506347 | 0.418103 | render.go | starcoder |
package simulation
import (
"fmt"
"math/rand"
"time"
"github.com/cosmos/cosmos-sdk/baseapp"
"github.com/cosmos/cosmos-sdk/simapp/helpers"
sdk "github.com/cosmos/cosmos-sdk/types"
"github.com/cosmos/cosmos-sdk/x/simulation"
hub "github.com/sentinel-official/hub/types"
node "github.com/sentinel-official/hub/x/node/simulation"
"github.com/sentinel-official/hub/x/plan/expected"
"github.com/sentinel-official/hub/x/plan/keeper"
"github.com/sentinel-official/hub/x/plan/types"
provider "github.com/sentinel-official/hub/x/provider/simulation"
)
func SimulateMsgAdd(ak expected.AccountKeeper, pk expected.ProviderKeeper) simulation.Operation {
return func(r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accounts []simulation.Account, chainID string) (
simulation.OperationMsg, []simulation.FutureOperation, error) {
providers := pk.GetProviders(ctx, 0, 0)
if len(providers) == 0 {
return simulation.NoOpMsg(types.ModuleName), nil, nil
}
rProvider := provider.RandomProvider(r, providers)
rAccount, found := simulation.FindAccount(accounts, rProvider.Address)
if !found {
return simulation.NoOpMsg(types.ModuleName), nil, nil
}
var (
account = ak.GetAccount(ctx, rAccount.Address)
price = sdk.NewCoins(sdk.NewCoin("stake", sdk.NewInt(r.Int63n(100)+1)))
validity = time.Duration(r.Intn(24)+1) * time.Hour
bytes = sdk.NewInt(r.Int63n(1e12) + 1)
)
msg := types.NewMsgAdd(rProvider.Address, price, validity, bytes)
if msg.ValidateBasic() != nil {
return simulation.NoOpMsg(types.ModuleName), nil, fmt.Errorf("expected msg to pass ValidateBasic: %s", msg.GetSignBytes())
}
tx := helpers.GenTx(
[]sdk.Msg{msg},
nil,
helpers.DefaultGenTxGas,
chainID,
[]uint64{account.GetAccountNumber()},
[]uint64{account.GetSequence()},
rAccount.PrivKey,
)
_, _, err := app.Deliver(tx)
if err != nil {
return simulation.NoOpMsg(types.ModuleName), nil, err
}
return simulation.NewOperationMsg(msg, true, ""), nil, nil
}
}
func SimulateMsgSetStatus(ak expected.AccountKeeper, k keeper.Keeper) simulation.Operation {
return func(r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accounts []simulation.Account, chainID string) (
simulation.OperationMsg, []simulation.FutureOperation, error) {
plans := k.GetPlans(ctx, 0, 0)
if len(plans) == 0 {
return simulation.NoOpMsg(types.ModuleName), nil, nil
}
rPlan := RandomPlan(r, plans)
rAccount, found := simulation.FindAccount(accounts, rPlan.Provider)
if !found {
return simulation.NoOpMsg(types.ModuleName), nil, nil
}
account := ak.GetAccount(ctx, rAccount.Address)
var status hub.Status
switch r.Intn(2) {
case 0:
status = hub.StatusActive
case 1:
status = hub.StatusInactive
}
msg := types.NewMsgSetStatus(rPlan.Provider, rPlan.ID, status)
if msg.ValidateBasic() != nil {
return simulation.NoOpMsg(types.ModuleName), nil, fmt.Errorf("expected msg to pass ValidateBasic: %s", msg.GetSignBytes())
}
tx := helpers.GenTx(
[]sdk.Msg{msg},
nil,
helpers.DefaultGenTxGas,
chainID,
[]uint64{account.GetAccountNumber()},
[]uint64{account.GetSequence()},
rAccount.PrivKey,
)
_, _, err := app.Deliver(tx)
if err != nil {
return simulation.NoOpMsg(types.ModuleName), nil, err
}
return simulation.NewOperationMsg(msg, true, ""), nil, nil
}
}
func SimulateMsgAddNode(ak expected.AccountKeeper, nk expected.NodeKeeper, k keeper.Keeper) simulation.Operation {
return func(r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accounts []simulation.Account, chainID string) (
simulation.OperationMsg, []simulation.FutureOperation, error) {
plans := k.GetPlans(ctx, 0, 0)
if len(plans) == 0 {
return simulation.NoOpMsg(types.ModuleName), nil, nil
}
rPlan := RandomPlan(r, plans)
rAccount, found := simulation.FindAccount(accounts, rPlan.Provider)
if !found {
return simulation.NoOpMsg(types.ModuleName), nil, nil
}
account := ak.GetAccount(ctx, rAccount.Address)
nodes := nk.GetNodesForProvider(ctx, rPlan.Provider, 0, 0)
if len(nodes) == 0 {
return simulation.NoOpMsg(types.ModuleName), nil, nil
}
rNode := node.RandomNode(r, nodes)
msg := types.NewMsgAddNode(rPlan.Provider, rPlan.ID, rNode.Address)
if msg.ValidateBasic() != nil {
return simulation.NoOpMsg(types.ModuleName), nil, fmt.Errorf("expected msg to pass ValidateBasic: %s", msg.GetSignBytes())
}
tx := helpers.GenTx(
[]sdk.Msg{msg},
nil,
helpers.DefaultGenTxGas,
chainID,
[]uint64{account.GetAccountNumber()},
[]uint64{account.GetSequence()},
rAccount.PrivKey,
)
_, _, err := app.Deliver(tx)
if err != nil {
return simulation.NoOpMsg(types.ModuleName), nil, err
}
return simulation.NewOperationMsg(msg, true, ""), nil, nil
}
}
func SimulateMsgRemoveNode(ak expected.AccountKeeper, nk expected.NodeKeeper, k keeper.Keeper) simulation.Operation {
return func(r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accounts []simulation.Account, chainID string) (
simulation.OperationMsg, []simulation.FutureOperation, error) {
plans := k.GetPlans(ctx, 0, 0)
if len(plans) == 0 {
return simulation.NoOpMsg(types.ModuleName), nil, nil
}
rPlan := RandomPlan(r, plans)
rAccount, found := simulation.FindAccount(accounts, rPlan.Provider)
if !found {
return simulation.NoOpMsg(types.ModuleName), nil, nil
}
account := ak.GetAccount(ctx, rAccount.Address)
nodes := nk.GetNodesForProvider(ctx, rPlan.Provider, 0, 0)
if len(nodes) == 0 {
return simulation.NoOpMsg(types.ModuleName), nil, nil
}
rNode := node.RandomNode(r, nodes)
msg := types.NewMsgRemoveNode(rPlan.Provider, rPlan.ID, rNode.Address)
if msg.ValidateBasic() != nil {
return simulation.NoOpMsg(types.ModuleName), nil, fmt.Errorf("expected msg to pass ValidateBasic: %s", msg.GetSignBytes())
}
tx := helpers.GenTx(
[]sdk.Msg{msg},
nil,
helpers.DefaultGenTxGas,
chainID,
[]uint64{account.GetAccountNumber()},
[]uint64{account.GetSequence()},
rAccount.PrivKey,
)
_, _, err := app.Deliver(tx)
if err != nil {
return simulation.NoOpMsg(types.ModuleName), nil, err
}
return simulation.NewOperationMsg(msg, true, ""), nil, nil
}
} | x/plan/simulation/msgs.go | 0.533641 | 0.410402 | msgs.go | starcoder |
package main
//Find duplicates by checking if intersecting rect shares >DUPLICATE_AREA_THRESHOLD of area of the smaller of the two rects.
//Passed in pointer to Destination slice is reassigned to new slice.
//Run time O(n^2). n = number of Destinations
func deleteDuplicatesFromDestinationArray(destsArrayPointer *[]Destination) {
dests := *destsArrayPointer
originalLength := len(dests)
for i := 0; i < len(dests); i++ {
destA := dests[i]
smallerArea := destA.BBox.Dx() * destA.BBox.Dy()
for j := i + 1; j < len(dests); j++ {
destB := dests[j]
destBArea := destA.BBox.Dx() * destA.BBox.Dy()
if destBArea < smallerArea {
smallerArea = destBArea
}
//Compare intersection image.Rectangle area to the smaller of destA and destB area
//OR if same destination terminal (predicted by fuzzy match) and on 50% same horizontal line due to fuzzy match individual words from same location
var horizontalDuplicate bool
if destA.TerminalTitle == destB.TerminalTitle {
horizontalDuplicate = sameHorizontalLine(destA.BBox, destB.BBox)
}
intersection := destA.BBox.Intersect(destB.BBox)
if float64(intersection.Dx())*float64(intersection.Dy()) > float64(smallerArea)*DUPLICATE_AREA_THRESHOLD || horizontalDuplicate {
//If destA spelling distance > destB spelling distance, replace destA location in array with destB.
if destA.SpellingDistance > destB.SpellingDistance {
dests[i] = dests[j]
}
//Delete destB location. Decrement j so that same index now with different element is checked on next loop
copy(dests[j:], dests[j+1:])
dests[len(dests)-1] = Destination{}
dests = dests[:len(dests)-1]
j--
}
}
}
//If duplicates were found, alloc new Destination slice and reassign passed in slice pointer
if len(dests) != originalLength {
//Create new slice to copy elements over. Original slice will have updated length but old elements in memory (displayed when printing).
tmp := make([]Destination, len(dests))
for i := 0; i < len(dests); i++ {
tmp[i] = dests[i]
}
*destsArrayPointer = tmp
}
}
//Find duplicates by checking if intersecting rect shares >DUPLICATE_AREA_THRESHOLD of area of the smaller of the two rects.
//Passed in pointer to RollCall slice is reassigned to new slice.
//Same function as deleteDuplicatesFromDestinationArray
//Run time O(n^2). n = number of RollCalls
func deleteDuplicatesFromRCArray(arrayPointer *[]RollCall) {
dests := *arrayPointer
originalLength := len(dests)
for i := 0; i < len(dests); i++ {
destA := dests[i]
smallerArea := destA.BBox.Dx() * destA.BBox.Dy()
for j := i + 1; j < len(dests); j++ {
destB := dests[j]
destBArea := destA.BBox.Dx() * destA.BBox.Dy()
if destBArea < smallerArea {
smallerArea = destBArea
}
//Compare intersection image.Rectangle area to the smaller of destA and destB area
intersection := destA.BBox.Intersect(destB.BBox)
if float64(intersection.Dx())*float64(intersection.Dy()) > float64(smallerArea)*DUPLICATE_AREA_THRESHOLD {
//Delete destB location. Decrement j so that same index now with different element is checked on next loop
copy(dests[j:], dests[j+1:])
dests[len(dests)-1] = RollCall{}
dests = dests[:len(dests)-1]
j--
}
}
}
//If duplicates were found, alloc new Destination slice and reassign passed in slice pointer
if len(dests) != originalLength {
//Create new slice to copy elements over. Original slice will have updated length but old elements in memory (displayed when printing).
tmp := make([]RollCall, len(dests))
for i := 0; i < len(dests); i++ {
tmp[i] = dests[i]
}
*arrayPointer = tmp
}
}
//Find duplicates by checking if intersecting rect shares >DUPLICATE_AREA_THRESHOLD of area of the smaller of the two rects.
//Passed in pointer to SeatsAvailable slice is reassigned to new slice.
//Same function as deleteDuplicatesFromDestinationArray
//Run time O(n^2). n = number of SeatsAvailable
func deleteDuplicatesFromSAArray(arrayPointer *[]SeatsAvailable) {
dests := *arrayPointer
originalLength := len(dests)
for i := 0; i < len(dests); i++ {
destA := dests[i]
smallerArea := destA.BBox.Dx() * destA.BBox.Dy()
for j := i + 1; j < len(dests); j++ {
destB := dests[j]
destBArea := destA.BBox.Dx() * destA.BBox.Dy()
if destBArea < smallerArea {
smallerArea = destBArea
}
//Compare intersection image.Rectangle area to the smaller of destA and destB area
intersection := destA.BBox.Intersect(destB.BBox)
if float64(intersection.Dx())*float64(intersection.Dy()) > float64(smallerArea)*DUPLICATE_AREA_THRESHOLD {
//If destA has no number found and destB found a number replace
if destA.Number == 0 && destB.Number != 0 || destB.Number > destA.Number {
dests[i] = dests[j]
}
//Delete destB location. Decrement j so that same index now with different element is checked on next loop
copy(dests[j:], dests[j+1:])
dests[len(dests)-1] = SeatsAvailable{}
dests = dests[:len(dests)-1]
j--
}
}
}
//If duplicates were found, alloc new Destination slice and reassign passed in slice pointer
if len(dests) != originalLength {
//Create new slice to copy elements over. Original slice will have updated length but old elements in memory (displayed when printing).
tmp := make([]SeatsAvailable, len(dests))
for i := 0; i < len(dests); i++ {
tmp[i] = dests[i]
}
*arrayPointer = tmp
}
}
//Delete terminal self matches from destination array
func deleteTerminalFromDestArray(arrayPointer *[]Destination, targetTerminal Terminal) {
dests := *arrayPointer
originalLength := len(dests)
//Find and remove any matching dests
for i := 0; i < len(dests); i++ {
if dests[i].TerminalTitle == targetTerminal.Title {
copy(dests[i:], dests[i+1:])
dests[len(dests)-1] = Destination{}
dests = dests[:len(dests)-1]
i--
}
}
//If duplicates were found, alloc new Destination slice and reassign passed in slice pointer
if len(dests) != originalLength {
//Create new slice to copy elements over. Original slice will have updated length but old elements in memory (displayed when printing).
tmp := make([]Destination, len(dests))
for i := 0; i < len(dests); i++ {
tmp[i] = dests[i]
}
*arrayPointer = tmp
}
}
//Delete destination matches where keyword (minY) is too low on slide
func deleteLowDestsFromDestArray(arrayPointer *[]Destination, sReference Slide) (err error) {
dests := *arrayPointer
originalLength := len(dests)
//Find and remove any matching dests
for i := 0; i < len(dests); i++ {
var destKeywordLow bool
if destKeywordLow, err = sReference.isYCoordinateWithinHeightPercentage(dests[i].BBox.Min.Y, DESTINATION_KEYWORD_VERTICAL_THRESHOLD); err != nil {
return
}
if !destKeywordLow {
copy(dests[i:], dests[i+1:])
dests[len(dests)-1] = Destination{}
dests = dests[:len(dests)-1]
i--
}
}
//If duplicates were found, alloc new Destination slice and reassign passed in slice pointer
if len(dests) != originalLength {
//Create new slice to copy elements over. Original slice will have updated length but old elements in memory (displayed when printing).
tmp := make([]Destination, len(dests))
for i := 0; i < len(dests); i++ {
tmp[i] = dests[i]
}
*arrayPointer = tmp
}
return
} | consolidate.go | 0.556159 | 0.436922 | consolidate.go | starcoder |
package siesta
import (
"encoding/binary"
)
// Decoder is able to decode a Kafka wire protocol message into actual data.
type Decoder interface {
// Gets an int8 from this decoder. Returns EOF if end of stream is reached.
GetInt8() (int8, error)
// Gets an int16 from this decoder. Returns EOF if end of stream is reached.
GetInt16() (int16, error)
// Gets an int32 from this decoder. Returns EOF if end of stream is reached.
GetInt32() (int32, error)
// Gets an int64 from this decoder. Returns EOF if end of stream is reached.
GetInt64() (int64, error)
// Gets a []byte from this decoder. Returns EOF if end of stream is reached.
GetBytes() ([]byte, error)
// Gets a string from this decoder. Returns EOF if end of stream is reached.
GetString() (string, error)
// Tells how many bytes left unread in this decoder.
Remaining() int
}
// BinaryDecoder implements Decoder and is able to decode a Kafka wire protocol message into actual data.
type BinaryDecoder struct {
raw []byte
pos int
}
// NewBinaryDecoder creates a new BinaryDecoder that will decode a given []byte.
func NewBinaryDecoder(raw []byte) *BinaryDecoder {
return &BinaryDecoder{
raw: raw,
}
}
// GetInt8 gets an int8 from this decoder. Returns EOF if end of stream is reached.
func (bd *BinaryDecoder) GetInt8() (int8, error) {
if bd.Remaining() < 1 {
bd.pos = len(bd.raw)
return -1, ErrEOF
}
value := int8(bd.raw[bd.pos])
bd.pos++
return value, nil
}
// GetInt16 gets an int16 from this decoder. Returns EOF if end of stream is reached.
func (bd *BinaryDecoder) GetInt16() (int16, error) {
if bd.Remaining() < 2 {
bd.pos = len(bd.raw)
return -1, ErrEOF
}
value := int16(binary.BigEndian.Uint16(bd.raw[bd.pos:]))
bd.pos += 2
return value, nil
}
// GetInt32 gets an int32 from this decoder. Returns EOF if end of stream is reached.
func (bd *BinaryDecoder) GetInt32() (int32, error) {
if bd.Remaining() < 4 {
bd.pos = len(bd.raw)
return -1, ErrEOF
}
value := int32(binary.BigEndian.Uint32(bd.raw[bd.pos:]))
bd.pos += 4
return value, nil
}
// GetInt64 gets an int64 from this decoder. Returns EOF if end of stream is reached.
func (bd *BinaryDecoder) GetInt64() (int64, error) {
if bd.Remaining() < 8 {
bd.pos = len(bd.raw)
return -1, ErrEOF
}
value := int64(binary.BigEndian.Uint64(bd.raw[bd.pos:]))
bd.pos += 8
return value, nil
}
// GetBytes gets a []byte from this decoder. Returns EOF if end of stream is reached.
func (bd *BinaryDecoder) GetBytes() ([]byte, error) {
l, err := bd.GetInt32()
if err != nil || l < -1 {
return nil, ErrEOF
}
length := int(l)
switch {
case length == -1:
return nil, nil
case length == 0:
return make([]byte, 0), nil
case length > bd.Remaining():
bd.pos = len(bd.raw)
return nil, ErrEOF
}
value := bd.raw[bd.pos : bd.pos+length]
bd.pos += length
return value, nil
}
// GetString gets a string from this decoder. Returns EOF if end of stream is reached.
func (bd *BinaryDecoder) GetString() (string, error) {
l, err := bd.GetInt16()
if err != nil || l < -1 {
return "", ErrEOF
}
length := int(l)
switch {
case length < 1:
return "", nil
case length > bd.Remaining():
bd.pos = len(bd.raw)
return "", ErrEOF
}
value := string(bd.raw[bd.pos : bd.pos+length])
bd.pos += length
return value, nil
}
// Remaining tells how many bytes left unread in this decoder.
func (bd *BinaryDecoder) Remaining() int {
return len(bd.raw) - bd.pos
} | Godeps/_workspace/src/github.com/elodina/siesta/decoder.go | 0.757346 | 0.4081 | decoder.go | starcoder |
package input
import (
"github.com/gravestench/bitset"
)
// NewInputVector creates a new input vector
func NewInputVector() *Vector {
v := &Vector{
KeyVector: bitset.NewBitSet(),
ModifierVector: bitset.NewBitSet(),
MouseButtonVector: bitset.NewBitSet(),
}
return v.Clear()
}
// Vector represents the state of keys, modifiers, and mouse buttons.
// It can be used to compare input states, and is intended to be used as such:
// * whatever manages system input keeps a "current" input vector and updates it
// * things that are listening for certain inputs will be compared using `Contains` and `Intersects` methods
type Vector struct {
KeyVector *bitset.BitSet
ModifierVector *bitset.BitSet
MouseButtonVector *bitset.BitSet
}
// SetKey sets the corresponding key bit in the keys bitset
func (iv *Vector) SetKey(key Key) *Vector {
return iv.SetKeys([]Key{key})
}
// SetKeys sets multiple key bits in the keys bitset
func (iv *Vector) SetKeys(keys []Key) *Vector {
if len(keys) == 0 {
return iv
}
for _, key := range keys {
iv.KeyVector.Set(int(key), true)
}
return iv
}
// SetModifier sets the corresponding modifier bit in the modifier bitset
func (iv *Vector) SetModifier(mod Modifier) *Vector {
return iv.SetModifiers([]Modifier{mod})
}
// SetModifiers sets multiple modifier bits in the modifier bitset
func (iv *Vector) SetModifiers(mods []Modifier) *Vector {
if len(mods) == 0 {
return iv
}
for _, key := range mods {
iv.ModifierVector.Set(int(key), true)
}
return iv
}
// SetMouseButton sets the corresponding mouse button bit in the mouse button bitset
func (iv *Vector) SetMouseButton(button MouseButton) *Vector {
return iv.SetMouseButtons([]MouseButton{button})
}
// SetMouseButtons sets multiple mouse button bits in the mouse button bitset
func (iv *Vector) SetMouseButtons(buttons []MouseButton) *Vector {
if len(buttons) == 0 {
return iv
}
for _, key := range buttons {
iv.MouseButtonVector.Set(int(key), true)
}
return iv
}
// Contains returns true if this input vector is a superset of the given input vector
func (iv *Vector) Contains(other *Vector) bool {
keys := iv.KeyVector.ContainsAll(other.KeyVector)
buttons := iv.MouseButtonVector.ContainsAll(other.MouseButtonVector)
// We do Equals here, because we dont want CTRL+X and CTRL+ALT+X to fire at the same time
mods := iv.ModifierVector.Equals(other.ModifierVector)
return keys && mods && buttons
}
// Intersects returns true if this input vector shares any bits with the given input vector
func (iv *Vector) Intersects(other *Vector) bool {
keys := iv.KeyVector.Intersects(other.KeyVector)
mods := iv.ModifierVector.Intersects(other.ModifierVector)
buttons := iv.MouseButtonVector.Intersects(other.MouseButtonVector)
return keys || mods || buttons
}
// Clear sets all bits in this input vector to 0
func (iv *Vector) Clear() *Vector {
iv.KeyVector.Clear()
iv.ModifierVector.Clear()
iv.MouseButtonVector.Clear()
return iv
} | pkg/systems/input/input_vector.go | 0.659295 | 0.555918 | input_vector.go | starcoder |
package module_page
import (
"fmt"
"os"
"strings"
"github.com/charmbracelet/lipgloss"
"github.com/lucasb-eyer/go-colorful"
"golang.org/x/term"
)
const (
// In real life situations we'd adjust the document to fit the width we've
// detected. In the case of this example we're hardcoding the width, and
// later using the detected width only to truncate in order to avoid jaggy
// wrapping.
width = 96
columnWidth = 30
)
// Style definitions.
var (
// General.
subtle = lipgloss.AdaptiveColor{Light: "#D9DCCF", Dark: "#383838"}
highlight = lipgloss.AdaptiveColor{Light: "#874BFD", Dark: "#7D56F4"}
special = lipgloss.AdaptiveColor{Light: "#43BF6D", Dark: "#73F59F"}
divider = lipgloss.NewStyle().
SetString("•").
Padding(0, 1).
Foreground(subtle).
String()
url = lipgloss.NewStyle().Foreground(special).Render
// Tabs.
activeTabBorder = lipgloss.Border{
Top: "─",
Bottom: " ",
Left: "│",
Right: "│",
TopLeft: "╭",
TopRight: "╮",
BottomLeft: "┘",
BottomRight: "└",
}
tabBorder = lipgloss.Border{
Top: "─",
Bottom: "─",
Left: "│",
Right: "│",
TopLeft: "╭",
TopRight: "╮",
BottomLeft: "┴",
BottomRight: "┴",
}
tab = lipgloss.NewStyle().
Border(tabBorder, true).
BorderForeground(highlight).
Padding(0, 1)
activeTab = tab.Copy().Border(activeTabBorder, true)
tabGap = tab.Copy().
BorderTop(false).
BorderLeft(false).
BorderRight(false)
// Title.
titleStyle = lipgloss.NewStyle().
MarginLeft(1).
MarginRight(5).
Padding(0, 1).
Italic(true).
Foreground(lipgloss.Color("#FFF7DB")).
SetString("Bubble SSH")
descStyle = lipgloss.NewStyle().MarginTop(1)
infoStyle = lipgloss.NewStyle().
BorderStyle(lipgloss.NormalBorder()).
BorderTop(true).
BorderForeground(subtle)
// Dialog.
dialogBoxStyle = lipgloss.NewStyle().
Border(lipgloss.RoundedBorder()).
BorderForeground(lipgloss.Color("#874BFD")).
Padding(1, 0).
BorderTop(true).
BorderLeft(true).
BorderRight(true).
BorderBottom(true)
buttonStyle = lipgloss.NewStyle().
Foreground(lipgloss.Color("#FFF7DB")).
Background(lipgloss.Color("#888B7E")).
Padding(0, 3).
MarginTop(1)
activeButtonStyle = buttonStyle.Copy().
Foreground(lipgloss.Color("#FFF7DB")).
Background(lipgloss.Color("#F25D94")).
MarginRight(2).
Underline(true)
// List.
list = lipgloss.NewStyle().
Border(lipgloss.NormalBorder(), false, true, false, false).
BorderForeground(subtle).
MarginRight(2).
Height(8).
Width(columnWidth + 1)
listHeader = lipgloss.NewStyle().
BorderStyle(lipgloss.NormalBorder()).
BorderBottom(true).
BorderForeground(subtle).
MarginRight(2).
Render
listItem = lipgloss.NewStyle().PaddingLeft(2).Render
checkMark = lipgloss.NewStyle().SetString("✓").
Foreground(special).
PaddingRight(1).
String()
listDone = func(s string) string {
return checkMark + lipgloss.NewStyle().
Strikethrough(true).
Foreground(lipgloss.AdaptiveColor{Light: "#969B86", Dark: "#696969"}).
Render(s)
}
// Paragraphs/History.
historyStyle = lipgloss.NewStyle().
Align(lipgloss.Left).
Foreground(lipgloss.Color("#FAFAFA")).
Background(highlight).
Margin(1, 3, 0, 0).
Padding(1, 2).
Height(19).
Width(columnWidth)
// Status Bar.
statusNugget = lipgloss.NewStyle().
Foreground(lipgloss.Color("#FFFDF5")).
Padding(0, 1)
statusBarStyle = lipgloss.NewStyle().
Foreground(lipgloss.AdaptiveColor{Light: "#343433", Dark: "#C1C6B2"}).
Background(lipgloss.AdaptiveColor{Light: "#D9DCCF", Dark: "#353533"})
statusStyle = lipgloss.NewStyle().
Inherit(statusBarStyle).
Foreground(lipgloss.Color("#FFFDF5")).
Background(lipgloss.Color("#FF5F87")).
Padding(0, 1).
MarginRight(1)
encodingStyle = statusNugget.Copy().
Background(lipgloss.Color("#A550DF")).
Align(lipgloss.Right)
statusText = lipgloss.NewStyle().Inherit(statusBarStyle)
fishCakeStyle = statusNugget.Copy().Background(lipgloss.Color("#6124DF"))
// Page.
docStyle = lipgloss.NewStyle().Padding(1, 2, 1, 2)
)
func Page() {
physicalWidth, _, _ := term.GetSize(int(os.Stdout.Fd()))
doc := strings.Builder{}
// Tabs
{
row := lipgloss.JoinHorizontal(
lipgloss.Top,
activeTab.Render("Bubble SSH"),
)
gap := tabGap.Render(strings.Repeat(" ", max(0, width-lipgloss.Width(row)-2)))
row = lipgloss.JoinHorizontal(lipgloss.Bottom, row, gap)
doc.WriteString(row + "\n\n")
}
// Title
{
var (
colors = colorGrid(1, 5)
title strings.Builder
)
for i, v := range colors {
const offset = 2
c := lipgloss.Color(v[0])
fmt.Fprint(&title, titleStyle.Copy().MarginLeft(i*offset).Background(c))
if i < len(colors)-1 {
title.WriteRune('\n')
}
}
desc := lipgloss.JoinVertical(lipgloss.Left,
descStyle.Render("Bubble SSH - A convenient and fast ssh service installation assistant for Linux."),
infoStyle.Render("From Sovea"+divider+url("https://github.com/sovea")),
)
row := lipgloss.JoinHorizontal(lipgloss.Top, title.String(), desc)
doc.WriteString(row + "\n\n")
}
if physicalWidth > 0 {
docStyle = docStyle.MaxWidth(physicalWidth)
}
// Okay, let's print it
fmt.Println(docStyle.Render(doc.String()))
}
func colorGrid(xSteps, ySteps int) [][]string {
x0y0, _ := colorful.Hex("#F25D94")
x1y0, _ := colorful.Hex("#EDFF82")
x0y1, _ := colorful.Hex("#643AFF")
x1y1, _ := colorful.Hex("#14F9D5")
x0 := make([]colorful.Color, ySteps)
for i := range x0 {
x0[i] = x0y0.BlendLuv(x0y1, float64(i)/float64(ySteps))
}
x1 := make([]colorful.Color, ySteps)
for i := range x1 {
x1[i] = x1y0.BlendLuv(x1y1, float64(i)/float64(ySteps))
}
grid := make([][]string, ySteps)
for x := 0; x < ySteps; x++ {
y0 := x0[x]
grid[x] = make([]string, xSteps)
for y := 0; y < xSteps; y++ {
grid[x][y] = y0.BlendLuv(x1[x], float64(y)/float64(xSteps)).Hex()
}
}
return grid
}
func max(a, b int) int {
if a > b {
return a
}
return b
} | app/components/modules/module_page/module_page.go | 0.550607 | 0.425068 | module_page.go | starcoder |
package exp
import (
"xelf.org/xelf/ast"
"xelf.org/xelf/bfr"
"xelf.org/xelf/knd"
"xelf.org/xelf/lit"
"xelf.org/xelf/typ"
)
// Exp is the common interface of all expressions with kind, type and source info.
type Exp interface {
Kind() knd.Kind
Resl() typ.Type
Source() ast.Src
String() string
Print(*bfr.P) error
Clone() Exp
}
// Lit is a literal expression with a literal value, which may include a type or spec.
type Lit struct {
Res typ.Type
lit.Val
Src ast.Src
}
func (a *Lit) Kind() knd.Kind { return knd.Lit }
func (a *Lit) Resl() typ.Type { return a.Res }
func (a *Lit) Source() ast.Src { return a.Src }
func (a *Lit) String() string {
if a.Val == nil {
return "null"
}
return a.Val.String()
}
func (a *Lit) Print(p *bfr.P) error {
if a.Val == nil {
return p.Fmt("null")
}
return a.Val.Print(p)
}
func (a *Lit) Clone() Exp { return &Lit{a.Res, a.Val, a.Src} }
// Sym is a symbol expression which caches the resolving environment and a relative name.
type Sym struct {
Type typ.Type
Sym string
Src ast.Src
Env Env
Rel string
}
func (s *Sym) Kind() knd.Kind { return knd.Sym }
func (s *Sym) Resl() typ.Type { return s.Type }
func (s *Sym) Source() ast.Src { return s.Src }
func (s *Sym) String() string { return s.Sym }
func (s *Sym) Print(p *bfr.P) error { return p.Fmt(s.Sym) }
func (s *Sym) Clone() Exp { return &Sym{s.Type, s.Sym, s.Src, nil, ""} }
// Tag is a named quasi expression that is resolved by its parent call.
type Tag struct {
Tag string
Exp Exp
Src ast.Src
}
func (t *Tag) Kind() knd.Kind { return knd.Tag }
func (t *Tag) Resl() typ.Type {
if t.Exp == nil {
return typ.Void
}
return t.Exp.Resl()
}
func (t *Tag) Source() ast.Src { return t.Src }
func (t *Tag) String() string { return bfr.String(t) }
func (t *Tag) Print(p *bfr.P) error {
p.Fmt(t.Tag)
if t.Exp == nil {
return p.Byte(';')
}
p.Byte(':')
return t.Exp.Print(p)
}
func (t *Tag) Clone() Exp { return &Tag{t.Tag, t.Exp.Clone(), t.Src} }
// Tupl is a quasi multi-expression that is resolved by its parent call or a program.
type Tupl struct {
Type typ.Type
Els []Exp
Src ast.Src
}
func (t *Tupl) Kind() knd.Kind { return knd.Tupl }
func (t *Tupl) Resl() typ.Type { return t.Type }
func (t *Tupl) Source() ast.Src { return t.Src }
func (t *Tupl) String() string { return bfr.String(t) }
func (t *Tupl) Print(p *bfr.P) error {
for i, e := range t.Els {
if i != 0 {
p.Byte(' ')
}
err := e.Print(p)
if err != nil {
return err
}
}
return nil
}
func (t *Tupl) Clone() Exp {
els := append(([]Exp)(nil), t.Els...)
for i, e := range els {
els[i] = e.Clone()
}
return &Tupl{t.Type, els, t.Src}
}
// Call is an executable expression that uses a spec to evaluate to a literal.
// It caches the resolved spec and environment.
type Call struct {
Sig typ.Type
Spec Spec
Args []Exp
Env Env
Src ast.Src
}
func (c *Call) Kind() knd.Kind { return knd.Call }
func (c *Call) Resl() (t typ.Type) {
res := SigRes(c.Sig)
if res == nil {
return typ.Void
}
return res.Type
}
func (c *Call) Source() ast.Src { return c.Src }
func (c *Call) String() string { return bfr.String(c) }
func (c *Call) Print(p *bfr.P) error {
p.Byte('(')
name := SigName(c.Sig)
if name != "" {
p.Fmt(name)
p.Byte(' ')
}
for i, a := range c.Args {
if i != 0 {
p.Byte(' ')
}
err := a.Print(p)
if err != nil {
return err
}
}
return p.Byte(')')
}
func (c *Call) Clone() Exp {
args := append(([]Exp)(nil), c.Args...)
for i, a := range args {
args[i] = a.Clone()
}
return &Call{c.Sig, c.Spec, args, nil, c.Src}
} | exp/exp.go | 0.512205 | 0.410225 | exp.go | starcoder |
package io
import (
"regexp"
"strconv"
"strings"
)
const (
ValueRegex = `(\".+\"|-?\d+(\.\d+)?|-?\d+|(true|false)|(\[.*\]))`
listRegex = `\[\s*(((\s*\".+\"\s*,\s*)*(\s*\".+\"\s*))|((\s*\d+\s*,\s*)*(\s*\d+\s*))|((\s*\d+(\.\d+)?\s*,\s*)*(\s*\d+(\.\d+)?\s*))|((\s*(true|false)\s*,\s*)*(\s*(true|false)\s*)))\]`
stringListRegex = `\[\s*((\s*\".+\"\s*,\s*)*(\s*\".+\"\s*))?\]`
intListRegex = `\[\s*((\s*-?\d+\s*,\s*)*(\s*-?\d+\s*))?\]`
floatListRegex = `\[\s*((\s*-?\d+(\.\d+)?\s*,\s*)*(\s*-?\d+(\.\d+)?\s*))?\]`
boolListRegex = `\[\s*((\s*(true|false)\s*,\s*)*(\s*(true|false)\s*))?\]`
)
func ParseInt(text string) int {
if i, err := strconv.Atoi(text); err == nil {
return i
} else {
panic(err)
}
}
func ParseFloat(text string) float64 {
if f, err := strconv.ParseFloat(text, 64); err == nil {
return f
} else {
panic(err)
}
}
func ParseBool(text string) bool {
if b, err := strconv.ParseBool(text); err == nil {
return b
} else {
panic(err)
}
}
func ParseQuotedString(text string) string {
if matched, _ := regexp.MatchString(`\"\w+\"`, text); matched {
return strings.Trim(text, "\"")
} else {
panic("Could not parse quoted string:\n" + text)
}
}
func ParseStringList(text string) []string {
if matched, _ := regexp.MatchString(stringListRegex, text); matched {
listString := strings.Trim(text, "[]")
items := strings.Split(listString, ",")
var result []string
for _, item := range items {
cleanItem := strings.TrimSpace(item)
result = append(result, ParseQuotedString(cleanItem))
}
return result
} else {
panic("Could not parse string list:\n" + text)
}
}
func ParseIntList(text string) []int {
if matched, _ := regexp.MatchString(intListRegex, text); matched {
listString := strings.Trim(text, "[]")
items := strings.Split(listString, ",")
var result []int
for _, item := range items {
cleanItem := strings.TrimSpace(item)
result = append(result, ParseInt(cleanItem))
}
return result
} else {
panic("Could not parse int list:\n" + text)
}
}
func ParseFloatList(text string) []float64 {
if matched, _ := regexp.MatchString(floatListRegex, text); matched {
listString := strings.Trim(text, "[]")
items := strings.Split(listString, ",")
var result []float64
for _, item := range items {
cleanItem := strings.TrimSpace(item)
result = append(result, ParseFloat(cleanItem))
}
return result
} else {
panic("Could not parse float list:\n" + text)
}
}
func ParseBoolList(text string) []bool {
if matched, _ := regexp.MatchString(boolListRegex, text); matched {
listString := strings.Trim(text, "[]")
items := strings.Split(listString, ",")
var result []bool
for _, item := range items {
cleanItem := strings.TrimSpace(item)
result = append(result, ParseBool(cleanItem))
}
return result
} else {
panic("Could not parse bool list:\n" + text)
}
} | src/ecs/io/valueParser.go | 0.517083 | 0.408159 | valueParser.go | starcoder |
package main
import (
"fmt"
"math/rand"
"sync/atomic"
"time"
)
// En este ejemplo nuestro estado le pertenecerá
// a una sola gorutina. Esto garantiza que los
// datos jamás se corromperán por el acceso
// concurrente. Para poder leer o escribir a ese
// estado, otras gorutinas tienen que enviar
// mensajes a la gorutina dueña y recibir las
// respuestas correspondientes. Las estructuras
// `opLeer` y `opEscribir` encapsulan esas
// peticiones y son una forma de que la gorutina
// dueña pueda responder.
type opLeer struct {
llave int
resp chan int
}
type opEscribir struct {
llave int
val int
resp chan bool
}
func main() {
// Como antes, vamos a contar cuantas operaciones
// se realizan.
var ops int64 = 0
// Los canales `lecturas` y `escrituras` serán usados
// por otras gorutinas para hacer peticiones de
// lectura y escritura.
lecturas := make(chan *opLeer)
escrituras := make(chan *opEscribir)
// Aquí está la gorutina que es dueña del `estado` que
// en el ejemplo anterior era un mapa, pero ahora es de
// acceso privado para esta gorutina con estado. Esta
// gorutina seleccionea repetidamente de los canales
// `lecturas` y `escrituras` respondiendo a las peticiones
// según llegan. Una respuesta es ejecutada primero
// respondiendo a la operación solicitada y después
// enviando un valor en el canal de respuesta `resp`
// para indicar éxito (y el valor deseado en el caso del
// canal `lecturas`).
go func() {
var estado = make(map[int]int)
for {
select {
case leer := <-lecturas:
leer.resp <- estado[leer.llave]
case escribir := <-escrituras:
estado[escribir.llave] = escribir.val
escribir.resp <- true
}
}
}()
// Iniciamos 100 gorutinas para solicitar lecturas
// a la gorutina dueña del estado, via el canal `lecturas`.
// Cada lectura require construir una estructura `opLeer`,
// envairlo por el canal `lecturas` y recibir el
// resultado en el cadal `resp` provisto.
for r := 0; r < 100; r++ {
go func() {
for {
leer := &opLeer{
llave: rand.Intn(5),
resp: make(chan int)}
lecturas <- leer
<-leer.resp
atomic.AddInt64(&ops, 1)
}
}()
}
// Iniciamos también 10 escrituras usando un
// mecanismo similar.
for w := 0; w < 10; w++ {
go func() {
for {
escribir := &opEscribir{
llave: rand.Intn(5),
val: rand.Intn(100),
resp: make(chan bool)}
escrituras <- escribir
<-escribir.resp
atomic.AddInt64(&ops, 1)
}
}()
}
// Dejamos que las gorutinas trabajen
// por un segundo.
time.Sleep(time.Second)
// Finalmente capturamos y reportamos el conteo
// de `ops`
opsFinal := atomic.LoadInt64(&ops)
fmt.Println("ops:", opsFinal)
} | examples/gorutinas-con-estado/gorutinas-con-estado.go | 0.519521 | 0.477493 | gorutinas-con-estado.go | starcoder |
package model
/*
The model type is the tip of the modelling pyramid and contains a set of
subsidiary models - like one to model the skill hiearchy, another to model who
holds which skill etc. The type provides methods for CRUD operations like
adding a person or allocating a skill to a person. The model implements its
methods for the most part by delegating smaller operations to the subsidiary
models. It is the model that is responsible for propogating changes between
the subsidiary models, so that the subsidiary models in turn can have minimal
scope and coupling. The model methods do NOT check the legitimacy of the
parameters provided and will panic if they are wrong. For example if an email
address provided is one that is known to the system.
*/
type model struct {
tree *tree
holdings *holdings
uiStates *uiStates
}
func newModel() *model {
tree := newTree()
holdings := newHoldings()
uiStates := newUiStates()
return &model{
tree: tree,
holdings: holdings,
uiStates: uiStates,
}
}
//---------------------------------------------------------------------------
// Add operations
//---------------------------------------------------------------------------
func (model *model) addPerson(emailName string) {
model.holdings.notifyPersonAdded(emailName)
model.uiStates.notifyPersonAdded(emailName)
}
func (model *model) addRootSkillNode(title string,
description string) (skillId int) {
skillNode, skillId := model.tree.addRootSkillNode(title, description)
model.holdings.notifySkillAdded(skillNode)
return
}
func (model *model) addChildSkillNode(title string,
description string, parent int) (skillId int) {
skillNode, skillId := model.tree.addChildSkillNode(title,
description, parent)
model.holdings.notifySkillAdded(skillNode)
return
}
func (model *model) givePersonSkill(skill *skillNode, emailName string) {
model.holdings.givePersonSkill(skill, emailName)
}
//---------------------------------------------------------------------------
// Query operations
//---------------------------------------------------------------------------
func (model *model) treeIsEmpty() bool {
return model.tree.treeIsEmpty()
}
func (model *model) personExists(emailName string) bool {
return model.holdings.personExists(emailName)
}
func (model *model) skillExists(skillId int) bool {
return model.holdings.skillExists(model.tree.nodeFromUid[skillId])
}
func (model *model) skillNode(skillId int) (skillNode *skillNode) {
return model.tree.nodeFromUid[skillId]
}
func (model *model) personHasSkill(skillId int, email string) (
hasSkilll bool) {
return model.holdings.personHasSkill(
model.tree.nodeFromUid[skillId], email)
}
func (model *model) someoneHasThisSkill(skillNode *skillNode) bool {
return model.holdings.someoneHasThisSkill(skillNode)
}
func (model *model) titleOfSkill(skillId int) (title string) {
return model.tree.titleOfSkill(skillId)
}
func (model *model) isCollapsed(email string, skillId int) (
collapsed bool) {
return model.uiStates.stateOfPerson[email].collapsed.Contains(skillId)
}
/*
The EnumerateTree method provides a linear sequence of the skill Uids which
can be used essentiall as an iteratorto used to render the skill tree. It is
personalised to a particular person in the sense that it will exclude skill
nodes that that person has collapsed in the (abstract) gui. Separate query
methods are available to get the extra data that might be needed for each
row - like for example its depth in the tree.
*/
func (model *model) enumerateTree(email string) (skillSeq []int) {
blacklist := model.uiStates.stateOfPerson[email].collapsed
return model.tree.enumerateTree(blacklist)
}
//---------------------------------------------------------------------------
// UiState operations (in model space)
//---------------------------------------------------------------------------
func (model *model) toggleSkillCollapsed(emailName string, skill *skillNode) {
model.uiStates.stateOfPerson[emailName].toggleCollapsed(skill)
} | model/model.go | 0.668772 | 0.529872 | model.go | starcoder |
package bvh
import (
"container/heap"
"fmt"
"golang.org/x/exp/constraints"
)
type Node[I constraints.Float, B interface {
Union(B) B
Surface() I
}, V any] struct {
box B
Value V
parent *Node[I, B, V]
children [2]*Node[I, B, V]
isLeaf bool
}
func (n *Node[I, B, V]) findAnotherChild(not *Node[I, B, V]) *Node[I, B, V] {
if n.children[0] == not {
return n.children[1]
} else if n.children[1] == not {
return n.children[0]
}
panic("unreachable, please make sure the 'not' is the n's child")
}
func (n *Node[I, B, V]) findChildPointer(child *Node[I, B, V]) **Node[I, B, V] {
if n.children[0] == child {
return &n.children[0]
} else if n.children[1] == child {
return &n.children[1]
}
panic("unreachable, please make sure the 'not' is the n's child")
}
func (n *Node[I, B, V]) each(test func(bound B) bool, foreach func(v V)) {
if n == nil {
return
}
if n.isLeaf {
if test(n.box) {
foreach(n.Value)
}
} else {
n.children[0].each(test, foreach)
n.children[1].each(test, foreach)
}
}
type Tree[I constraints.Float, B interface {
Union(B) B
Surface() I
}, V any] struct {
root *Node[I, B, V]
}
func (t *Tree[I, B, V]) Insert(leaf B, value V) (n *Node[I, B, V]) {
n = &Node[I, B, V]{
box: leaf,
Value: value,
parent: nil,
children: [2]*Node[I, B, V]{nil, nil},
isLeaf: true,
}
if t.root == nil {
t.root = n
return
}
// Stage 1: find the best sibling for the new leaf
sibling := t.root
bestCost := t.root.box.Union(leaf).Surface()
parentTo := &t.root // the parent's children pointer which point to the sibling
var queue searchHeap[I, Node[I, B, V]]
queue.Push(searchItem[I, Node[I, B, V]]{pointer: t.root, parentTo: &t.root})
leafCost := leaf.Surface()
for queue.Len() > 0 {
p := heap.Pop(&queue).(searchItem[I, Node[I, B, V]])
// determine if node p has the best cost
mergeSurface := p.pointer.box.Union(leaf).Surface()
deltaCost := mergeSurface - p.pointer.box.Surface()
cost := p.inheritedCost + mergeSurface
if cost <= bestCost {
bestCost = cost
sibling = p.pointer
parentTo = p.parentTo
}
// determine if it is worthwhile to explore the children of node p.
inheritedCost := p.inheritedCost + deltaCost // lower bound
if !p.pointer.isLeaf && inheritedCost+leafCost < bestCost {
heap.Push(&queue, searchItem[I, Node[I, B, V]]{
pointer: p.pointer.children[0],
parentTo: &p.pointer.children[0],
inheritedCost: inheritedCost,
})
heap.Push(&queue, searchItem[I, Node[I, B, V]]{
pointer: p.pointer.children[1],
parentTo: &p.pointer.children[1],
inheritedCost: inheritedCost,
})
}
}
// Stage 2: create a new parent
*parentTo = &Node[I, B, V]{
box: sibling.box.Union(leaf), // we will calculate in Stage3
parent: sibling.parent,
children: [2]*Node[I, B, V]{sibling, n},
isLeaf: false,
}
n.parent = *parentTo
sibling.parent = *parentTo
// Stage 3: walk back up the tree refitting AABBs
for p := *parentTo; p != nil; p = p.parent {
p.box = p.children[0].box.Union(p.children[1].box)
t.rotate(p)
}
return
}
func (t *Tree[I, B, V]) Delete(n *Node[I, B, V]) interface{} {
if n.parent == nil {
// n is the root
t.root = nil
return n.Value
}
sibling := n.parent.findAnotherChild(n)
grand := n.parent.parent
if grand == nil {
// n's parent is root
t.root = sibling
sibling.parent = nil
} else {
p := grand.findChildPointer(n.parent)
*p = sibling
sibling.parent = grand
for p := sibling.parent; p.parent != nil; p = p.parent {
p.box = p.children[0].box.Union(p.children[1].box)
t.rotate(p)
}
}
return n.Value
}
func (t *Tree[I, B, V]) rotate(n *Node[I, B, V]) {
if n.isLeaf || n.parent == nil {
return
}
// trying to swap n's sibling and children
sibling := n.parent.findAnotherChild(n)
current := n.box.Surface()
if n.children[1].box.Union(sibling.box).Surface() < current {
// swap n.children[0] and sibling
t1 := [2]*Node[I, B, V]{n, n.children[0]}
t2 := [2]*Node[I, B, V]{sibling, n.children[1]}
n.parent.children, n.children, n.children[0].parent, sibling.parent = t1, t2, n.parent, n
n.box = n.children[0].box.Union(n.children[1].box)
} else if n.children[0].box.Union(sibling.box).Surface() < current {
// swap n.children[1] and sibling
t1 := [2]*Node[I, B, V]{n, n.children[1]}
t2 := [2]*Node[I, B, V]{sibling, n.children[0]}
n.parent.children, n.children, n.children[1].parent, sibling.parent = t1, t2, n.parent, n
n.box = n.children[0].box.Union(n.children[1].box)
}
}
func (t *Tree[I, B, V]) Find(test func(bound B) bool, foreach func(v V)) {
t.root.each(test, foreach)
}
func (t Tree[I, B, V]) String() string {
return t.root.String()
}
func (n *Node[I, B, V]) String() string {
if n.isLeaf {
return fmt.Sprint(n.Value)
} else {
return fmt.Sprintf("{%v, %v}", n.children[0], n.children[1])
}
}
func TouchPoint[Vec any, B interface{ WithIn(Vec) bool }](point Vec) func(bound B) bool {
return func(bound B) bool {
return bound.WithIn(point)
}
}
func TouchBound[B interface{ Touch(B) bool }](other B) func(bound B) bool {
return func(bound B) bool {
return bound.Touch(other)
}
}
type searchHeap[I constraints.Float, V any] []searchItem[I, V]
type searchItem[I constraints.Float, V any] struct {
pointer *V
parentTo **V
inheritedCost I
}
func (h searchHeap[I, V]) Len() int { return len(h) }
func (h searchHeap[I, V]) Less(i, j int) bool { return h[i].inheritedCost < h[j].inheritedCost }
func (h searchHeap[I, V]) Swap(i, j int) { h[i], h[j] = h[j], h[i] }
func (h *searchHeap[I, V]) Push(x interface{}) { *h = append(*h, x.(searchItem[I, V])) }
func (h *searchHeap[I, V]) Pop() interface{} {
old := *h
n := len(old)
x := old[n-1]
*h = old[0 : n-1]
return x
} | server/internal/bvh/bvh.go | 0.513425 | 0.403156 | bvh.go | starcoder |
package rmsprop
import (
"github.com/nlpodyssey/spago/gd"
"github.com/nlpodyssey/spago/mat"
"github.com/nlpodyssey/spago/nn"
)
var _ gd.MethodConfig = &Config[float32]{}
// Config provides configuration settings for an RMSProp optimizer.
type Config[T mat.DType] struct {
gd.MethodConfig
LR T
Epsilon T
Decay T
}
// NewConfig returns a new RMSProp Config.
func NewConfig[T mat.DType](lr, epsilon, decay T) Config[T] {
return Config[T]{
LR: lr,
Epsilon: epsilon,
Decay: decay,
}
}
// NewDefaultConfig returns a new Config with generically reasonable default values.
func NewDefaultConfig[T mat.DType]() Config[T] {
return Config[T]{
LR: 0.001,
Epsilon: 1e-08,
Decay: 0.95,
}
}
var _ gd.Method[float32] = &RMSProp[float32]{}
// The RMSProp method is a variant of AdaGrad where the squared sum of previous gradients is replaced with a moving average.
// References:
// RMSProp: Divide the gradient by a running average of its recent magnitude
// http://www.cs.toronto.edu/~tijmen/csc321/slides/lecture_slides_lec6.pdf
type RMSProp[T mat.DType] struct {
Config[T]
}
// New returns a new RMSProp optimizer, initialized according to the given configuration.
func New[T mat.DType](c Config[T]) *RMSProp[T] {
return &RMSProp[T]{Config: c}
}
// Label returns the enumeration-like value which identifies this gradient descent method.
func (o *RMSProp[_]) Label() int {
return gd.RMSProp
}
const v = 0
// NewSupport returns a new support structure with the given dimensions.
func (o *RMSProp[T]) NewSupport(r, c int) *nn.Payload[T] {
return &nn.Payload[T]{
Label: gd.RMSProp,
Data: []mat.Matrix[T]{mat.NewEmptyDense[T](r, c)}, // v at index 0
}
}
// Delta returns the difference between the current params and where the method wants it to be.
func (o *RMSProp[T]) Delta(param nn.Param[T]) mat.Matrix[T] {
return o.calcDelta(param.Grad(), gd.GetOrSetPayload[T](param, o).Data)
}
func (o *RMSProp[T]) calcDelta(grads mat.Matrix[T], supp []mat.Matrix[T]) mat.Matrix[T] {
supp[v].ProdScalarInPlace(o.Decay)
buf := grads.Prod(grads)
buf.ProdScalarInPlace(1.0 - o.Decay)
supp[v].AddInPlace(buf)
buf2 := supp[v].Sqrt()
buf2.AddScalarInPlace(o.Epsilon)
delta := grads.Div(buf2)
delta.ProdScalarInPlace(o.LR)
return delta
} | gd/rmsprop/rmsprop.go | 0.841435 | 0.425068 | rmsprop.go | starcoder |
package medium
import (
"math"
"unicode"
)
// https://leetcode.com/problems/add-two-numbers/description/
// You are given two non-empty linked lists representing two non-negative integers.
// The digits are stored in reverse order and each of their nodes contain a single digit.
// Add the two numbers and return it as a linked list.
// You may assume the two numbers do not contain any leading zero, except the number 0 itself.
// Input: (2 -> 4 -> 3) + (5 -> 6 -> 4)
// Output: 7 -> 0 -> 8
// ListNode ...
type ListNode struct {
Val int
Next *ListNode
}
func addTwoNumbers(l1 *ListNode, l2 *ListNode) *ListNode {
dummyHead := &ListNode{}
p, q, curr := l1, l2, dummyHead
carry := 0
for p != nil || q != nil {
var x, y int
if p != nil {
x = p.Val
}
if q != nil {
y = q.Val
}
sum := carry + x + y
carry = sum / 10
curr.Next = &ListNode{Val: sum % 10}
curr = curr.Next
if p != nil {
p = p.Next
}
if q != nil {
q = q.Next
}
}
if carry > 0 {
curr.Next = &ListNode{Val: carry}
}
return dummyHead.Next
}
// https://leetcode.com/problems/longest-substring-without-repeating-characters/description/
// Given a string, find the length of the longest substring without repeating characters.
// Examples:
// Given "abcabcbb", the answer is "abc", which the length is 3.
// Given "bbbbb", the answer is "b", with the length of 1.
// Given "pwwkew", the answer is "wke", with the length of 3. Note that the answer must be a substring, "pwke" is a subsequence and not a substring.
func lengthOfLongestSubstring(s string) int {
sr := []rune(s)
n := len(sr)
m := make(map[rune]bool, n)
var maxLen, i, j int
for i < n && j < n {
if _, ok := m[sr[j]]; !ok {
m[sr[j]] = true
j++
subLen := j - i
if subLen > maxLen {
maxLen = subLen
}
} else {
delete(m, sr[i])
i++
}
}
return maxLen
}
// https://leetcode.com/problems/string-to-integer-atoi/description/
// Implement atoi to convert a string to an integer.
// Hint: Carefully consider all possible input cases. If you want a challenge, please do not see below and ask yourself what are the possible input cases.
// Notes: It is intended for this problem to be specified vaguely (ie, no given input specs). You are responsible to gather all the input requirements up front.
// Requirements for atoi:
// The function first discards as many whitespace characters as necessary until the first non-whitespace character is found. Then, starting from this character, takes an optional initial plus or minus sign followed by as many numerical digits as possible, and interprets them as a numerical value.
// The string can contain additional characters after those that form the integral number, which are ignored and have no effect on the behavior of this function.
// If the first sequence of non-whitespace characters in str is not a valid integral number, or if no such sequence exists because either str is empty or it contains only whitespace characters, no conversion is performed.
// If no valid conversion could be performed, a zero value is returned. If the correct value is out of the range of representable values, INT_MAX (2147483647) or INT_MIN (-2147483648) is returned.
func myAtoi(str string) int { // nolint: gocyclo
nums, ops := make([]int, 0), make([]rune, 0) // 数字, 操作符
for _, v := range str {
if v == '-' || v == '+' {
if len(nums) != 0 {
break
}
if len(ops) == 1 {
return 0
}
ops = append(ops, v)
} else if unicode.IsSpace(v) {
if len(ops) > 0 && len(nums) == 0 {
return 0
}
if len(nums) == 0 {
continue
} else {
break
}
} else if v-'0' >= 0 && v-'0' <= 9 {
nums = append(nums, int(v-'0'))
} else {
if len(nums) == 0 {
return 0
}
if len(nums) > 0 {
break
}
}
}
pos := 1 // 正/负
if len(ops) > 0 && ops[0] == '-' {
pos = -1
}
result := 0
over := false // 是否溢出
for i := range nums {
v := 1
for j := i; j < len(nums)-1; j++ {
if math.MaxInt32/10 < 10 {
over = true
break
}
v *= 10
}
if math.MaxInt32/v < nums[i] {
over = true
break
}
result += nums[i] * v
}
if over {
if pos > 0 {
return math.MaxInt32
}
return math.MinInt32
}
result *= pos
if result > math.MaxInt32 {
result = math.MaxInt32
}
if result < math.MinInt32 {
result = math.MinInt32
}
return result
} | algorithms/medium/medium.go | 0.808786 | 0.590632 | medium.go | starcoder |
package iso20022
// Set of elements used to provide information on the dates related to the underlying individual transaction.
type TransactionDates2 struct {
// Point in time when the payment order from the initiating party meets the processing conditions of the account servicing agent. This means that the account servicing agent has received the payment order and has applied checks such as authorisation, availability of funds.
AcceptanceDateTime *ISODateTime `xml:"AccptncDtTm,omitempty"`
// Identifies when an amount of money should have contractually been credited or debited the account versus when the amount of money was actually settled (debited/credited) on the cash account.
TradeActivityContractualSettlementDate *ISODate `xml:"TradActvtyCtrctlSttlmDt,omitempty"`
// Date on which the trade was executed.
TradeDate *ISODate `xml:"TradDt,omitempty"`
// Date on which the amount of money ceases to be available to the agent that owes it and when the amount of money becomes available to the agent to which it is due.
InterbankSettlementDate *ISODate `xml:"IntrBkSttlmDt,omitempty"`
// Start date of the underlying transaction, such as a treasury transaction, an investment plan.
StartDate *ISODate `xml:"StartDt,omitempty"`
// End date of the underlying transaction, such as a treasury transaction, an investment plan.
EndDate *ISODate `xml:"EndDt,omitempty"`
// Date and time of the underlying transaction.
TransactionDateTime *ISODateTime `xml:"TxDtTm,omitempty"`
// Proprietary date related to the underlying transaction.
Proprietary []*ProprietaryDate2 `xml:"Prtry,omitempty"`
}
func (t *TransactionDates2) SetAcceptanceDateTime(value string) {
t.AcceptanceDateTime = (*ISODateTime)(&value)
}
func (t *TransactionDates2) SetTradeActivityContractualSettlementDate(value string) {
t.TradeActivityContractualSettlementDate = (*ISODate)(&value)
}
func (t *TransactionDates2) SetTradeDate(value string) {
t.TradeDate = (*ISODate)(&value)
}
func (t *TransactionDates2) SetInterbankSettlementDate(value string) {
t.InterbankSettlementDate = (*ISODate)(&value)
}
func (t *TransactionDates2) SetStartDate(value string) {
t.StartDate = (*ISODate)(&value)
}
func (t *TransactionDates2) SetEndDate(value string) {
t.EndDate = (*ISODate)(&value)
}
func (t *TransactionDates2) SetTransactionDateTime(value string) {
t.TransactionDateTime = (*ISODateTime)(&value)
}
func (t *TransactionDates2) AddProprietary() *ProprietaryDate2 {
newValue := new (ProprietaryDate2)
t.Proprietary = append(t.Proprietary, newValue)
return newValue
} | TransactionDates2.go | 0.806891 | 0.651895 | TransactionDates2.go | starcoder |
package lib
import (
"fmt"
"strconv"
"strings"
"github.com/dunelang/dune"
)
func init() {
dune.RegisterLib(Convert, `
declare namespace convert {
export function toInt(v: string | number | runtime.FunctionInfo): number
export function toFloat(v: string | number): number
export function toString(v: any): string
export function toRune(v: any): string
export function toBool(v: string | number | boolean): boolean
export function toBytes(v: string | byte[]): byte[]
}
`)
}
var Convert = []dune.NativeFunction{
{
Name: "convert.toByte",
Arguments: 1,
Function: func(this dune.Value, args []dune.Value, vm *dune.VM) (dune.Value, error) {
a := args[0]
var r dune.Value
switch a.Type {
case dune.String:
default:
return dune.NullValue, fmt.Errorf("can't convert %v to byte", a.Type)
}
s := a.String()
if len(s) != 1 {
return dune.NullValue, fmt.Errorf("can't convert %v to int", a.Type)
}
return r, nil
},
},
{
Name: "convert.toRune",
Arguments: 1,
Function: func(this dune.Value, args []dune.Value, vm *dune.VM) (dune.Value, error) {
a := args[0]
switch a.Type {
case dune.String:
s := a.String()
if len(s) != 1 {
return dune.NullValue, fmt.Errorf("can't convert %v to rune", s)
}
return dune.NewRune(rune(s[0])), nil
case dune.Int:
i := a.ToInt()
if i > 255 {
return dune.NullValue, fmt.Errorf("can't convert %v to rune", i)
}
return dune.NewRune(rune(i)), nil
default:
return dune.NullValue, fmt.Errorf("can't convert %v to byte", a.Type)
}
},
},
{
Name: "convert.toInt",
Arguments: 1,
Function: func(this dune.Value, args []dune.Value, vm *dune.VM) (dune.Value, error) {
a := args[0]
var r dune.Value
switch a.Type {
case dune.Int:
r = a
case dune.Float:
r = dune.NewInt64(a.ToInt())
case dune.Rune:
r = dune.NewInt64(a.ToInt())
case dune.String:
s := strings.Trim(a.String(), " ")
i, err := strconv.ParseInt(s, 0, 64)
if err != nil {
return dune.NullValue, err
}
r = dune.NewInt64(i)
case dune.Func:
r = dune.NewInt64(a.ToInt())
default:
return dune.NullValue, fmt.Errorf("can't convert %v to int", a.Type)
}
return r, nil
},
},
{
Name: "convert.toFloat",
Arguments: 1,
Function: func(this dune.Value, args []dune.Value, vm *dune.VM) (dune.Value, error) {
a := args[0]
switch a.Type {
case dune.Int:
return dune.NewFloat(a.ToFloat()), nil
case dune.Float:
return a, nil
case dune.String:
s := strings.Trim(a.String(), " ")
f, err := strconv.ParseFloat(s, 64)
if err != nil {
return dune.NullValue, err
}
return dune.NewFloat(f), nil
default:
return dune.NullValue, fmt.Errorf("can't convert %v to int", a.Type)
}
},
},
{
Name: "convert.toBool",
Arguments: 1,
Function: func(this dune.Value, args []dune.Value, vm *dune.VM) (dune.Value, error) {
a := args[0]
var r dune.Value
switch a.Type {
case dune.Bool:
r = a
case dune.Int:
switch a.ToInt() {
case 0:
r = dune.FalseValue
case 1:
r = dune.TrueValue
default:
return dune.NullValue, fmt.Errorf("can't convert %v to bool", a.Type)
}
case dune.String:
s := a.String()
s = strings.Trim(s, " ")
switch s {
case "true", "1":
r = dune.TrueValue
case "false", "0":
r = dune.FalseValue
default:
return dune.NullValue, fmt.Errorf("can't convert %v to bool", s)
}
default:
return dune.NullValue, fmt.Errorf("can't convert %v to bool", a.Type)
}
return r, nil
},
},
{
Name: "convert.toString",
Arguments: 1,
Function: func(this dune.Value, args []dune.Value, vm *dune.VM) (dune.Value, error) {
a := args[0]
return dune.NewString(a.String()), nil
},
},
{
Name: "convert.toBytes",
Arguments: 1,
Function: func(this dune.Value, args []dune.Value, vm *dune.VM) (dune.Value, error) {
a := args[0]
var r dune.Value
switch a.Type {
case dune.String:
r = dune.NewBytes(a.ToBytes())
case dune.Bytes:
r = a
default:
return dune.NullValue, fmt.Errorf("can't convert %v to int", a.Type)
}
return r, nil
},
},
} | lib/convert.go | 0.615897 | 0.450359 | convert.go | starcoder |
package valgo
type Int64Validator struct {
*validatorContext
}
func IsInt64(value int64, nameAndTitle ...string) *Int64Validator {
return NewValidator().IsInt64(value, nameAndTitle...)
}
func CheckInt64(value int64, nameAndTitle ...string) *Int64Validator {
return NewValidator().CheckInt64(value, nameAndTitle...)
}
func (v *validatorContext) IsInt64(value int64, nameAndTitle ...string) *Int64Validator {
return v.isInt64(true, value, nameAndTitle...)
}
func (v *validatorContext) CheckInt64(value int64, nameAndTitle ...string) *Int64Validator {
return v.isInt64(false, value, nameAndTitle...)
}
func (l *localized) IsInt64(value int64, nameAndTitle ...string) *Int64Validator {
return l.NewValidator().IsInt64(value, nameAndTitle...)
}
func (l *localized) CheckInt64(value int64, nameAndTitle ...string) *Int64Validator {
return l.NewValidator().CheckInt64(value, nameAndTitle...)
}
func (v *validatorContext) isInt64(shortCircuit bool, value int64, nameAndTitle ...string) *Int64Validator {
v.currentDataType = DataTypeInt64
v.currentValue = value
v.currentIndex += 1
v.currentValid = true
v.shortCircuit = shortCircuit
sizeNameAndTitle := len(nameAndTitle)
if sizeNameAndTitle > 0 {
v.currentName = &nameAndTitle[0]
if sizeNameAndTitle > 1 {
v.currentTitle = &nameAndTitle[1]
}
}
return &Int64Validator{v}
}
func (v *Int64Validator) EqualTo(value int64, template ...string) *Int64Validator {
if v.isShortCircuit() {
return v
} else if !v.assert(IsEqualTo(v.currentValue, value)) {
v.invalidate("equal_to", map[string]interface{}{
"title": v.currentTitle,
"value": value}, template...)
}
v.resetNegative()
return v
}
func (v *Int64Validator) GreaterThan(value int64, template ...string) *Int64Validator {
if v.isShortCircuit() {
return v
} else if !v.assert(v.currentValue.(int64) > value) {
v.invalidate("greater_than", map[string]interface{}{
"title": v.currentTitle,
"value": value}, template...)
}
v.resetNegative()
return v
}
func (v *Int64Validator) GreaterOrEqualThan(value int64, template ...string) *Int64Validator {
if v.isShortCircuit() {
return v
} else if !v.assert(v.currentValue.(int64) >= value) {
v.invalidate("greater_or_equal_than", map[string]interface{}{
"title": v.currentTitle,
"value": value}, template...)
}
v.resetNegative()
return v
}
func (v *Int64Validator) LessThan(value int64, template ...string) *Int64Validator {
if v.isShortCircuit() {
return v
} else if !v.assert(v.currentValue.(int64) < value) {
v.invalidate("less_than", map[string]interface{}{
"title": v.currentTitle,
"value": value}, template...)
}
v.resetNegative()
return v
}
func (v *Int64Validator) LessOrEqualThan(value int64, template ...string) *Int64Validator {
if v.isShortCircuit() {
return v
} else if !v.assert(v.currentValue.(int64) <= value) {
v.invalidate("less_or_equal_than", map[string]interface{}{
"title": v.currentTitle,
"value": value}, template...)
}
v.resetNegative()
return v
}
func (v *Int64Validator) Not() *Int64Validator {
v.currentNegative = true
return v
}
func (v *Int64Validator) Passing(
function func(cv *CustomValidator, t ...string), template ...string) *Int64Validator {
if v.isShortCircuit() {
return v
}
customValidator := CustomValidator{
validator: v.validatorContext,
}
function(&customValidator, template...)
v.resetNegative()
return v
}
func IsInt64InSlice(value int64, slice []int64) bool {
for _, v := range slice {
if value == v {
return true
}
}
return false
}
func (v *Int64Validator) InSlice(slice []int64, template ...string) *Int64Validator {
if v.isShortCircuit() {
return v
} else if !v.assert(IsInt64InSlice(v.currentValue.(int64), slice)) {
v.invalidate("in_slice", map[string]interface{}{
"title": v.currentTitle,
"value": v.currentValue}, template...)
}
v.resetNegative()
return v
} | int64_validator.go | 0.721743 | 0.434461 | int64_validator.go | starcoder |
package brainfuck
import "fmt"
type Instruction struct {
ptr int
tokens []rune
tape *Tape
level int
}
func NewInstruction(tokens []rune, tape *Tape) *Instruction {
return &Instruction{ptr: -1, tokens: tokens, tape: tape, level: 0}
}
// Fetch increase moves the pointer to the next tokens.
// Returns whether it successfully fetch and execute one.
func (instr *Instruction) Fetch() bool {
instr.ptr++
if instr.ptr >= len(instr.tokens) {
return false
}
instr.Execute()
return true
}
// Execute modifies the memory tape or reads from input or writes to output
// depending on the operator.
// >: increments the data pointer (points to the next cell on the right).
// <: decrements the data pointer (points to the previous cell on the left).
// +: increases by one the byte at the data pointer.
// -: decreases by one the byte at the data pointer.
// .: output the byte at the data pointer, using the ASCII character encoding.
// ,: accept one byte of input, storing its value in the byte at the pointer.
// [: if the byte at the data pointer is zero, jump forward to the command
// after ']'; move forward to the next command otherwise.
// ]: if the byte at the data pointer is nonzero, jump back to the command
// after matching '['; move forward to the next command otherwise.
func (instr *Instruction) Execute() {
op := instr.tokens[instr.ptr]
tape := instr.tape
switch op {
case '+':
tape.data[tape.ptr]++
case '-':
tape.data[tape.ptr]--
case '>':
tape.ptr++
case '<':
tape.ptr--
case '.':
fmt.Fprintf(tape.out, "%c", instr.tape.data[tape.ptr])
case ',':
buf := make([]byte, 1)
tape.in.Read(buf)
tape.data[tape.ptr] = int8(buf[0])
case '[':
if tape.data[tape.ptr] != 0 {
instr.level++
instr.Fetch()
} else {
currentLevel := instr.level
for {
instr.ptr++
if instr.tokens[instr.ptr] == '[' {
instr.level++
}
if instr.tokens[instr.ptr] == ']' {
if currentLevel == instr.level {
break
}
instr.level--
}
}
instr.Fetch()
}
case ']':
if tape.data[tape.ptr] == 0 {
instr.level--
instr.Fetch()
} else {
currentLevel := instr.level
for {
instr.ptr--
if instr.tokens[instr.ptr] == ']' {
instr.level--
}
if instr.tokens[instr.ptr] == '[' {
if currentLevel == instr.level {
break
}
instr.level++
}
}
instr.Fetch()
}
default:
return
}
} | brainfuck/instruction.go | 0.602529 | 0.407599 | instruction.go | starcoder |
package reporting
import (
"fmt"
"strconv"
"time"
)
// HoursWorkedThisWeek returns the total hours documented per day since Sunday
// (ie work week starts Monday at 00:01)
func HoursWorkedThisWeek(filename, user string) float64 {
data := getTrackedData(filename)[user]
now := time.Now()
return sumThisWeek(data, now)
}
func sumThisWeek(data Years, t time.Time) float64 {
thisDay := t
sum := 0
for i := int(t.Weekday()); i > 0; i-- {
y := strconv.Itoa(thisDay.Year())
m := thisDay.Month().String() // uses month name - might "break" if user switches locales
d := strconv.Itoa(thisDay.Day())
if data[y] != nil {
if data[y][m] != nil {
sum += data[y][m][d]
}
}
thisDay = thisDay.AddDate(0, 0, -1)
}
return float64(sum) / 60
}
// HoursWorkedThisMonth returns the total hours documented per day for the current month
func HoursWorkedThisMonth(filename, user string, daysOff []string) float64 {
data := getTrackedData(filename)[user]
now := time.Now()
return sumThisMonth(data, now, daysOff)
}
func sumThisMonth(data Years, t time.Time, daysOff []string) float64 {
thisDay := t
y := strconv.Itoa(thisDay.Year())
m := thisDay.Month().String() // uses month name - might "break" if user switches locales
days := data[y][m]
sum := 0
for key, d := range days {
day, _ := strconv.Atoi(key)
if !isDayOff(daysOff, fmt.Sprintf("%s-%02d-%02d", y, int(thisDay.Month()), day)) {
sum += d
}
}
return float64(sum) / 60
}
// AvailableDaysThisMonth returns the total days that could be working days
func AvailableDaysThisMonth(t time.Time, daysOff []string) int {
totalDays := 0
thisDay := t
lastDay := thisDay.Day()
// lastDayMonth := time.Date(thisDay.Year(), time.Month(int(thisDay.Month())+1), 0, 0, 0, 0, 0, time.UTC)
// lastDay := lastDayMonth.Day()
for i := 1; i <= lastDay; i++ {
currDay := time.Date(thisDay.Year(), time.Month(int(thisDay.Month())), i, 0, 0, 0, 0, time.UTC)
if isDayOff(daysOff, currDay.Format("2006-01-02")) {
continue
}
if int(currDay.Weekday()) == 0 || int(currDay.Weekday()) == 6 {
continue
}
totalDays++
}
return totalDays
}
func isDayOff(daysOff []string, day string) bool {
for _, d := range daysOff {
if d == day {
return true
}
}
return false
} | pkg/reporting/simple.go | 0.705886 | 0.413773 | simple.go | starcoder |
package sampler
import (
"fmt"
"math/rand"
"github.com/pkg/errors"
)
// AliasSampler implements the Alias Method to sample from a discrete
// probability distribution. Initialized with the Vose Method, the
// sampler takes O(n) to initialize and O(1) to sample.
type AliasSampler struct {
ProbabilityTable []float64
AliasTable []int
Source *rand.Rand
}
func NewAliasSampler(source *rand.Rand, weights []float64) (*AliasSampler, error) {
if len(weights) == 0 {
return &AliasSampler{}, fmt.Errorf("weights is an empty slice")
}
probabilityTable, aliasTable, err := VoseInitialization(weights)
if err != nil {
return &AliasSampler{}, errors.Wrap(err, "cannot initialize the alias sampler")
}
t := AliasSampler{}
t.ProbabilityTable = probabilityTable
t.AliasTable = aliasTable
t.Source = source
return &t, nil
}
// Sample generates a slice of items obtained by sampling the original distribution.
func (t *AliasSampler) Sample(numSamples int) []int {
n := len(t.AliasTable)
if n == 0 {
return []int{}
}
samples := make([]int, numSamples)
for i := 0; i < numSamples; i++ {
k := t.Source.Intn(n)
toss := t.Source.Float64()
if toss < t.ProbabilityTable[k] {
samples[i] = k
} else {
samples[i] = t.AliasTable[k]
}
}
return samples
}
// VoseInitialization initialises the probability and alias tables using Vose's
// method. Vose's method runs in O(n) and is more numerically stable than
// alternatives. See http://www.keithschwarz.com/darts-dice-coins/ for more
// details.
func VoseInitialization(weights []float64) ([]float64, []int, error) {
normalizedWeights, err := normalize(weights)
if err != nil {
return nil, nil, errors.Wrap(err, "cannot normalize input weights")
}
small := make([]int, 0, len(normalizedWeights))
large := make([]int, 0, len(normalizedWeights))
for i, w := range normalizedWeights {
if w < 1.0 {
small = append(small, i)
} else {
large = append(large, i)
}
}
aliasTable := make([]int, len(weights))
probabilityTable := make([]float64, len(weights))
var g, l int
for (len(small) > 0) && (len(large) > 0) {
l, small = small[0], small[1:]
g, large = large[0], large[1:]
aliasTable[l] = g
probabilityTable[l] = normalizedWeights[l]
normalizedWeights[g] = (normalizedWeights[g] + normalizedWeights[l]) - 1.0
if normalizedWeights[g] < 1.0 {
small = append(small, g)
} else {
large = append(large, g)
}
}
for len(large) > 0 {
g, large = large[0], large[1:]
probabilityTable[g] = 1
}
for len(small) > 0 {
l, small = small[0], small[1:]
probabilityTable[g] = 1
}
return probabilityTable, aliasTable, nil
}
// normalize prepares the weights for the algorithm's initialization.
func normalize(weights []float64) ([]float64, error) {
var sum float64
for _, w := range weights {
if w < 0 {
return nil, fmt.Errorf("found negative weight %v", w)
}
sum += w
}
n := len(weights)
normalizedWeights := make([]float64, n)
for i, weight := range weights {
normalizedWeights[i] = float64(n) * weight / sum
}
return normalizedWeights, nil
} | sampler/alias.go | 0.846419 | 0.523481 | alias.go | starcoder |
package config
/**
* Configuration for ACL entry resource.
*/
type Nsacl struct {
/**
* Name for the extended ACL rule. Must begin with an ASCII alphabetic or underscore (_) character, and must contain only ASCII alphanumeric, underscore, hash (#), period (.), space, colon (:), at (@), equals (=), and hyphen (-) characters.
*/
Aclname string `json:"aclname,omitempty"`
/**
* Action to perform on incoming IPv4 packets that match the extended ACL rule.
Available settings function as follows:
* ALLOW - The Citrix ADC processes the packet.
* BRIDGE - The Citrix ADC bridges the packet to the destination without processing it.
* DENY - The Citrix ADC drops the packet.
*/
Aclaction string `json:"aclaction,omitempty"`
/**
* Integer value that uniquely identifies the traffic domain in which you want to configure the entity. If you do not specify an ID, the entity becomes part of the default traffic domain, which has an ID of 0.
*/
Td int `json:"td,omitempty"`
/**
* IP address or range of IP addresses to match against the source IP address of an incoming IPv4 packet. In the command line interface, separate the range with a hyphen. For example: 10.102.29.30-10.102.29.189.
*/
Srcip bool `json:"srcip,omitempty"`
/**
* Either the equals (=) or does not equal (!=) logical operator.
*/
Srcipop string `json:"srcipop,omitempty"`
/**
* IP address or range of IP addresses to match against the source IP address of an incoming IPv4 packet. In the command line interface, separate the range with a hyphen. For example:10.102.29.30-10.102.29.189.
*/
Srcipval string `json:"srcipval,omitempty"`
/**
* Policy dataset which can have multiple IP ranges bound to it.
*/
Srcipdataset string `json:"srcipdataset,omitempty"`
/**
* Port number or range of port numbers to match against the source port number of an incoming IPv4 packet. In the command line interface, separate the range with a hyphen. For example: 40-90.
*/
Srcport bool `json:"srcport,omitempty"`
/**
* Either the equals (=) or does not equal (!=) logical operator.
*/
Srcportop string `json:"srcportop,omitempty"`
/**
* Port number or range of port numbers to match against the source port number of an incoming IPv4 packet. In the command line interface, separate the range with a hyphen. For example: 40-90.
*/
Srcportval string `json:"srcportval,omitempty"`
/**
* IP address or range of IP addresses to match against the destination IP address of an incoming IPv4 packet. In the command line interface, separate the range with a hyphen. For example: 10.102.29.30-10.102.29.189.
*/
Destip bool `json:"destip,omitempty"`
/**
* Either the equals (=) or does not equal (!=) logical operator.
*/
Destipop string `json:"destipop,omitempty"`
/**
* IP address or range of IP addresses to match against the destination IP address of an incoming IPv4 packet. In the command line interface, separate the range with a hyphen. For example: 10.102.29.30-10.102.29.189.
*/
Destipval string `json:"destipval,omitempty"`
/**
* Policy dataset which can have multiple IP ranges bound to it.
*/
Destipdataset string `json:"destipdataset,omitempty"`
/**
* Port number or range of port numbers to match against the destination port number of an incoming IPv4 packet. In the command line interface, separate the range with a hyphen. For example: 40-90.
Note: The destination port can be specified only for TCP and UDP protocols.
*/
Destport bool `json:"destport,omitempty"`
/**
* Either the equals (=) or does not equal (!=) logical operator.
*/
Destportop string `json:"destportop,omitempty"`
/**
* Port number or range of port numbers to match against the destination port number of an incoming IPv4 packet. In the command line interface, separate the range with a hyphen. For example: 40-90.
Note: The destination port can be specified only for TCP and UDP protocols.
*/
Destportval string `json:"destportval,omitempty"`
/**
* Number of seconds, in multiples of four, after which the extended ACL rule expires. If you do not want the extended ACL rule to expire, do not specify a TTL value.
*/
Ttl int `json:"ttl,omitempty"`
/**
* MAC address to match against the source MAC address of an incoming IPv4 packet.
*/
Srcmac string `json:"srcmac,omitempty"`
/**
* Used to define range of Source MAC address. It takes string of 0 and 1, 0s are for exact match and 1s for wildcard. For matching first 3 bytes of MAC address, srcMacMask value "000000111111".
*/
Srcmacmask string `json:"srcmacmask,omitempty"`
/**
* Protocol to match against the protocol of an incoming IPv4 packet.
*/
Protocol string `json:"protocol,omitempty"`
/**
* Protocol to match against the protocol of an incoming IPv4 packet.
*/
Protocolnumber int `json:"protocolnumber,omitempty"`
/**
* ID of the VLAN. The Citrix ADC applies the ACL rule only to the incoming packets of the specified VLAN. If you do not specify a VLAN ID, the appliance applies the ACL rule to the incoming packets on all VLANs.
*/
Vlan int `json:"vlan,omitempty"`
/**
* ID of the VXLAN. The Citrix ADC applies the ACL rule only to the incoming packets of the specified VXLAN. If you do not specify a VXLAN ID, the appliance applies the ACL rule to the incoming packets on all VXLANs.
*/
Vxlan int `json:"vxlan,omitempty"`
/**
* ID of an interface. The Citrix ADC applies the ACL rule only to the incoming packets from the specified interface. If you do not specify any value, the appliance applies the ACL rule to the incoming packets of all interfaces.
*/
Interface string `json:"Interface,omitempty"`
/**
* Allow only incoming TCP packets that have the ACK or RST bit set, if the action set for the ACL rule is ALLOW and these packets match the other conditions in the ACL rule.
*/
Established bool `json:"established,omitempty"`
/**
* ICMP Message type to match against the message type of an incoming ICMP packet. For example, to block DESTINATION UNREACHABLE messages, you must specify 3 as the ICMP type.
Note: This parameter can be specified only for the ICMP protocol.
*/
Icmptype int `json:"icmptype,omitempty"`
/**
* Code of a particular ICMP message type to match against the ICMP code of an incoming ICMP packet. For example, to block DESTINATION HOST UNREACHABLE messages, specify 3 as the ICMP type and 1 as the ICMP code.
If you set this parameter, you must set the ICMP Type parameter.
*/
Icmpcode int `json:"icmpcode,omitempty"`
/**
* Priority for the extended ACL rule that determines the order in which it is evaluated relative to the other extended ACL rules. If you do not specify priorities while creating extended ACL rules, the ACL rules are evaluated in the order in which they are created.
*/
Priority int `json:"priority,omitempty"`
/**
* Enable or disable the extended ACL rule. After you apply the extended ACL rules, the Citrix ADC compares incoming packets against the enabled extended ACL rules.
*/
State string `json:"state,omitempty"`
/**
* Enable or disable logging of events related to the extended ACL rule. The log messages are stored in the configured syslog or auditlog server.
*/
Logstate string `json:"logstate,omitempty"`
/**
* Maximum number of log messages to be generated per second. If you set this parameter, you must enable the Log State parameter.
*/
Ratelimit int `json:"ratelimit,omitempty"`
/**
* Type of the acl ,default will be CLASSIC.
Available options as follows:
* CLASSIC - specifies the regular extended acls.
* DFD - cluster specific acls,specifies hashmethod for steering of the packet in cluster .
*/
Type string `json:"type,omitempty"`
/**
* Specifies the type hashmethod to be applied, to steer the packet to the FP of the packet.
*/
Dfdhash string `json:"dfdhash,omitempty"`
/**
* If stateful option is enabled, transparent sessions are created for the traffic hitting this ACL and not hitting any other features like LB, INAT etc.
*/
Stateful string `json:"stateful,omitempty"`
/**
* New name for the extended ACL rule. Must begin with an ASCII alphabetic or underscore (_) character, and must contain only ASCII alphanumeric, underscore, hash (#), period (.), space, colon (:), at (@), equals (=), and hyphen (-) characters.
*/
Newname string `json:"newname,omitempty"`
//------- Read only Parameter ---------;
Hits string `json:"hits,omitempty"`
Kernelstate string `json:"kernelstate,omitempty"`
Aclassociate string `json:"aclassociate,omitempty"`
} | resource/config/nsacl.go | 0.803906 | 0.421373 | nsacl.go | starcoder |
// This file contains the code snippets included in "The Go image/draw package."
package main
import (
"image"
"image/color"
"image/draw"
)
func main() {
Color()
Rect()
RectAndScroll()
ConvAndCircle()
Glyph()
}
func Color() {
c := color.RGBA{255, 0, 255, 255}
r := image.Rect(0, 0, 640, 480)
dst := image.NewRGBA(r)
// ZERO OMIT
// image.ZP is the zero point -- the origin.
draw.Draw(dst, r, &image.Uniform{c}, image.ZP, draw.Src)
// STOP OMIT
// BLUE OMIT
m := image.NewRGBA(image.Rect(0, 0, 640, 480))
blue := color.RGBA{0, 0, 255, 255}
draw.Draw(m, m.Bounds(), &image.Uniform{blue}, image.ZP, draw.Src)
// STOP OMIT
// RESET OMIT
draw.Draw(m, m.Bounds(), image.Transparent, image.ZP, draw.Src)
// STOP OMIT
}
func Rect() {
dst := image.NewRGBA(image.Rect(0, 0, 640, 480))
sr := image.Rect(0, 0, 200, 200)
src := image.Black
dp := image.Point{100, 100}
// RECT OMIT
r := image.Rectangle{dp, dp.Add(sr.Size())}
draw.Draw(dst, r, src, sr.Min, draw.Src)
// STOP OMIT
}
func RectAndScroll() {
dst := image.NewRGBA(image.Rect(0, 0, 640, 480))
sr := image.Rect(0, 0, 200, 200)
src := image.Black
dp := image.Point{100, 100}
// RECT2 OMIT
r := sr.Sub(sr.Min).Add(dp)
draw.Draw(dst, r, src, sr.Min, draw.Src)
// STOP OMIT
m := dst
// SCROLL OMIT
b := m.Bounds()
p := image.Pt(0, 20)
// Note that even though the second argument is b,
// the effective rectangle is smaller due to clipping.
draw.Draw(m, b, m, b.Min.Add(p), draw.Src)
dirtyRect := b.Intersect(image.Rect(b.Min.X, b.Max.Y-20, b.Max.X, b.Max.Y))
// STOP OMIT
_ = dirtyRect // noop
}
func ConvAndCircle() {
src := image.NewRGBA(image.Rect(0, 0, 640, 480))
dst := image.NewRGBA(image.Rect(0, 0, 640, 480))
// CONV OMIT
b := src.Bounds()
m := image.NewRGBA(b)
draw.Draw(m, b, src, b.Min, draw.Src)
// STOP OMIT
p := image.Point{100, 100}
r := 50
// CIRCLE2 OMIT
draw.DrawMask(dst, dst.Bounds(), src, image.ZP, &circle{p, r}, image.ZP, draw.Over)
// STOP OMIT
}
func theGlyphImageForAFont() image.Image {
return image.NewRGBA(image.Rect(0, 0, 640, 480))
}
func theBoundsFor(index int) image.Rectangle {
return image.Rect(0, 0, 32, 32)
}
func Glyph() {
p := image.Point{100, 100}
dst := image.NewRGBA(image.Rect(0, 0, 640, 480))
glyphIndex := 42
// GLYPH OMIT
src := &image.Uniform{color.RGBA{0, 0, 255, 255}}
mask := theGlyphImageForAFont()
mr := theBoundsFor(glyphIndex)
draw.DrawMask(dst, mr.Sub(mr.Min).Add(p), src, image.ZP, mask, mr.Min, draw.Over)
// STOP OMIT
}
//CIRCLESTRUCT OMIT
type circle struct {
p image.Point
r int
}
func (c *circle) ColorModel() color.Model {
return color.AlphaModel
}
func (c *circle) Bounds() image.Rectangle {
return image.Rect(c.p.X-c.r, c.p.Y-c.r, c.p.X+c.r, c.p.Y+c.r)
}
func (c *circle) At(x, y int) color.Color {
xx, yy, rr := float64(x-c.p.X)+0.5, float64(y-c.p.Y)+0.5, float64(c.r)
if xx*xx+yy*yy < rr*rr {
return color.Alpha{255}
}
return color.Alpha{0}
}
//STOP OMIT | doc/progs/image_draw.go | 0.657209 | 0.421254 | image_draw.go | starcoder |
package h3go
import "math"
// BBox is geographic bounding box with coordinates defined in radians
type BBox struct {
north float64 // north latitude
south float64 // south latitude
east float64 // east longitude
west float64 // west longitude
}
// bboxIsTransmeridian returns whether the given bounding box crosses the
// antimeridian
func bboxIsTransmeridian(bbox *BBox) bool {
return bbox.east < bbox.west
}
// bboxCenter gets the center of a bounding box
func bboxCenter(bbox *BBox, center *GeoCoord) {
center.lat = (bbox.north + bbox.south) / 2.0
// If the bbox crosses the antimeridian, shift east 360 degrees
east := bbox.east
if bboxIsTransmeridian(bbox) {
east = bbox.east + M_2PI
}
center.lon = constrainLng((east + bbox.west) / 2.0)
}
// bboxContains returns whether the bounding box contains a given point
func bboxContains(bbox *BBox, point *GeoCoord) bool {
if bboxIsTransmeridian(bbox) {
return point.lat >= bbox.south && point.lat <= bbox.north &&
(point.lon >= bbox.west || point.lon <= bbox.east)
}
return point.lat >= bbox.south && point.lat <= bbox.north &&
(point.lon >= bbox.west && point.lon <= bbox.east)
}
// bboxEquals returns whether two bounding boxes are strictly equal
func bboxEquals(b1, b2 *BBox) bool {
return b1.north == b2.north && b1.south == b2.south &&
b1.east == b2.east && b1.west == b2.west
}
// _hexRadiusKm returns the radius of a given hexagon in Km
func _hexRadiusKm(h3Index H3Index) float64 {
// There is probably a cheaper way to determine the radius of a
// hexagon, but this way is conceptually simple
var h3Center GeoCoord
var h3Boundary GeoBoundary
H3ToGeo(h3Index, &h3Center)
H3ToGeoBoundary(h3Index, &h3Boundary)
return PointDistKm(&h3Center, &h3Boundary.verts[0])
}
// bboxHexEstimate returns an estimated number of hexagons that fit within the
// cartesian-projected bounding box
func bboxHexEstimate(bbox *BBox, res int) int {
// Get the area of the pentagon as the maximally-distorted area possible
pentagons := make([]H3Index, 12)
GetPentagonIndexes(res, &pentagons)
pentagonRadiusKm := _hexRadiusKm(pentagons[0])
// Area of a regular hexagon is 3/2*sqrt(3) * r * r
// The pentagon has the most distortion (smallest edges) and shares its
// edges with hexagons, so the most-distorted hexagons have this area,
// shrunk by 20% off chance that the bounding box perfectly bounds a
// pentagon.
pentagonAreaKm2 := 0.8 * (2.59807621135 * pentagonRadiusKm * pentagonRadiusKm)
// Then get the area of the bounding box of the geofence in question
var p1, p2 GeoCoord
p1.lat = bbox.north
p1.lon = bbox.east
p2.lat = bbox.south
p2.lon = bbox.west
d := PointDistKm(&p1, &p2)
// Derived constant based on: https://math.stackexchange.com/a/1921940
// Clamped to 3 as higher values tend to rapidly drag the estimate to zero.
a := d * d / math.Min(3.0, math.Abs((p1.lon-p2.lon)/(p1.lat-p2.lat)))
// Divide the two to get an estimate of the number of hexagons needed
estimate := int(math.Ceil(a / pentagonAreaKm2))
if estimate == 0 {
estimate = 1
}
return estimate
}
// lineHexEstimate returns an estimated number of hexagons that trace the
// cartesian-projected line
func lineHexEstimate(origin *GeoCoord, destination *GeoCoord, res int) int {
// Get the area of the pentagon as the maximally-distorted area possible
pentagons := make([]H3Index, 12)
GetPentagonIndexes(res, &pentagons)
pentagonRadiusKm := _hexRadiusKm(pentagons[0])
dist := PointDistKm(origin, destination)
estimate := int(math.Ceil(dist / (2 * pentagonRadiusKm)))
if estimate == 0 {
estimate = 1
}
return estimate
} | bbox.go | 0.913409 | 0.579311 | bbox.go | starcoder |
package gobls
import "bytes"
// BufferScanner enumerates newline terminated strings from a provided slice of
// bytes faster than bufio.Scanner and gobls.Scanner. This is particular useful
// when a program already has the entire buffer in a slice of bytes. This
// structure uses newline as the line terminator, but returns nether the newline
// nor an optional carriage return from each discovered string.
type BufferScanner struct {
buf []byte
left, right int
done, cr bool
}
// NewBufferScanner returns a BufferScanner that enumerates newline terminated
// strings from buf.
func NewBufferScanner(buf []byte) Scanner {
l := len(buf)
if l == 0 {
return &BufferScanner{done: true}
}
// Inspect the final byte for newline.
l--
if buf[l] != '\n' {
return &BufferScanner{buf: buf}
}
// When buffer ends with newline, remove it, to simplify logic executed for
// each loop.
return &BufferScanner{buf: buf[:l]}
}
// Bytes returns the byte slice that was just scanned. It does not return the
// terminating newline character, nor any optional preceding carriage return
// character.
func (b *BufferScanner) Bytes() []byte {
if b.cr {
return b.buf[b.left : b.right-1]
}
return b.buf[b.left:b.right]
}
// Err returns nil because scanning from a slice of bytes will never cause an
// error.
func (b *BufferScanner) Err() error { return nil }
// Scan will scan the text from the original slice of bytes, and return true if
// scanning ought to continue or false if scanning is complete, because of the
// end of the slice of bytes.
func (b *BufferScanner) Scan() bool {
if b.done {
b.buf = nil
b.cr = false
b.left = 0
b.right = 0
return false
}
if b.right > 0 {
// Trim previous line.
b.left = b.right + 1
}
next := bytes.IndexByte(b.buf[b.left:], '\n')
if next == -1 {
b.done = true
b.right = len(b.buf)
} else {
b.right = b.left + next
}
// Is the final character a carriage return?
b.cr = b.right > 0 && b.buf[b.right-1] == '\r'
return true
}
// Text returns the string representation of the byte slice returned by the most
// recent Scan call. It does not return the terminating newline character, nor
// any optional preceding carriage return character.
func (b *BufferScanner) Text() string { return string(b.Bytes()) } | bufferScanner.go | 0.839701 | 0.451568 | bufferScanner.go | starcoder |
package duration
import (
"fmt"
"math"
"os"
"sort"
"github.com/kshedden/dstream/dstream"
"gonum.org/v1/plot"
"gonum.org/v1/plot/plotter"
"gonum.org/v1/plot/plotutil"
"gonum.org/v1/plot/vg"
)
// SurvfuncRight uses the method of Kaplan and Meier to estimate the
// survival distribution based on (possibly) right censored data. The
// caller must set Data and TimeVar before calling the Fit method.
// StatusVar, WeightVar, and EntryVar are optional fields.
type SurvfuncRight struct {
// The data used to perform the estimation.
data dstream.Dstream
// The name of the variable containing the minimum of the
// event time and entry time. The underlying data must have
// float64 type.
timeVar string
// The name of a variable containing the status indicator,
// which is 1 if the event occurred at the time given by
// TimeVar, and 0 otherwise. This is optional, and is assumed
// to be identically equal to 1 if not present.
statusVar string
// The name of a variable containing case weights, optional.
weightVar string
// The name of a variable containing entry times, optional.
entryVar string
// Times at which events occur, sorted.
times []float64
// Number of events at each time in Times.
nEvents []float64
// Number of people at risk just before each time in times
nRisk []float64
// The estimated survival function evaluated at each time in Times
survProb []float64
// The standard errors for the estimates in SurvProb.
survProbSE []float64
events map[float64]float64
total map[float64]float64
entry map[float64]float64
timepos int
statuspos int
weightpos int
entrypos int
}
// NewSurvfuncRight creates a new value for fitting a survival function.
func NewSurvfuncRight(data dstream.Dstream, timevar, statusvar string) *SurvfuncRight {
return &SurvfuncRight{
data: data,
timeVar: timevar,
statusVar: statusvar,
}
}
// Weight specifies the name of a case weight variable.
func (sf *SurvfuncRight) Weight(weight string) *SurvfuncRight {
sf.weightVar = weight
return sf
}
// Entry specifies the name of an entry time variable.
func (sf *SurvfuncRight) Entry(entry string) *SurvfuncRight {
sf.entryVar = entry
return sf
}
// Time returns the times at which the survival function changes.
func (sf *SurvfuncRight) Time() []float64 {
return sf.times
}
// NumRisk returns the number of people at risk at each time point
// where the survival function changes.
func (sf *SurvfuncRight) NumRisk() []float64 {
return sf.nRisk
}
// SurvProb returns the estimated survival probabilities at the points
// where the survival function changes.
func (sf *SurvfuncRight) SurvProb() []float64 {
return sf.survProb
}
// SurvProbSE returns the standard errors of the estimated survival
// probabilities at the points where the survival function changes.
func (sf *SurvfuncRight) SurvProbSE() []float64 {
return sf.survProbSE
}
func (sf *SurvfuncRight) init() {
sf.events = make(map[float64]float64)
sf.total = make(map[float64]float64)
sf.entry = make(map[float64]float64)
sf.data.Reset()
sf.timepos = -1
sf.statuspos = -1
sf.weightpos = -1
sf.entrypos = -1
for k, na := range sf.data.Names() {
switch na {
case sf.timeVar:
sf.timepos = k
case sf.statusVar:
sf.statuspos = k
case sf.weightVar:
sf.weightpos = k
case sf.entryVar:
sf.entrypos = k
}
}
if sf.timepos == -1 {
panic("Time variable not found")
}
if sf.statuspos == -1 {
panic("Status variable not found")
}
if sf.weightVar != "" && sf.weightpos == -1 {
panic("Status variable not found")
}
if sf.entryVar != "" && sf.entrypos == -1 {
panic("Entry variable not found")
}
}
func (sf *SurvfuncRight) scanData() {
for j := 0; sf.data.Next(); j++ {
time := sf.data.GetPos(sf.timepos).([]float64)
var status []float64
if sf.statuspos != -1 {
status = sf.data.GetPos(sf.statuspos).([]float64)
}
var entry []float64
if sf.entrypos != -1 {
entry = sf.data.GetPos(sf.entrypos).([]float64)
}
var weight []float64
if sf.weightpos != -1 {
weight = sf.data.GetPos(sf.weightpos).([]float64)
}
for i, t := range time {
w := float64(1)
if sf.weightpos != -1 {
w = weight[i]
}
if sf.statuspos == -1 || status[i] == 1 {
sf.events[t] += w
}
sf.total[t] += w
if sf.entrypos != -1 {
if entry[i] >= t {
msg := fmt.Sprintf("Entry time %d in chunk %d is before the event/censoring time\n",
i, j)
os.Stderr.WriteString(msg)
os.Exit(1)
}
sf.entry[entry[i]] += w
}
}
}
}
func rollback(x []float64) {
var z float64
for i := len(x) - 1; i >= 0; i-- {
z += x[i]
x[i] = z
}
}
func (sf *SurvfuncRight) eventstats() {
// Get the sorted distinct times (event or censoring)
sf.times = make([]float64, len(sf.total))
var i int
for t := range sf.total {
sf.times[i] = t
i++
}
sort.Float64s(sf.times)
// Get the weighted event count and risk set size at each time
// point (in same order as Times).
sf.nEvents = make([]float64, len(sf.times))
sf.nRisk = make([]float64, len(sf.times))
for i, t := range sf.times {
sf.nEvents[i] = sf.events[t]
sf.nRisk[i] = sf.total[t]
}
rollback(sf.nRisk)
// Adjust for entry times
if sf.entrypos != -1 {
entry := make([]float64, len(sf.times))
for t, w := range sf.entry {
ii := sort.SearchFloat64s(sf.times, t)
if t < sf.times[ii] {
ii--
}
if ii >= 0 {
entry[ii] += w
}
}
rollback(entry)
for i := 0; i < len(sf.nRisk); i++ {
sf.nRisk[i] -= entry[i]
}
}
}
// compress removes times where no events occurred.
func (sf *SurvfuncRight) compress() {
var ix []int
for i := 0; i < len(sf.times); i++ {
// Only retain events, except for the last point,
// which is retained even if there are no events.
if sf.nEvents[i] > 0 || i == len(sf.times)-1 {
ix = append(ix, i)
}
}
if len(ix) < len(sf.times) {
for i, j := range ix {
sf.times[i] = sf.times[j]
sf.nEvents[i] = sf.nEvents[j]
sf.nRisk[i] = sf.nRisk[j]
}
sf.times = sf.times[0:len(ix)]
sf.nEvents = sf.nEvents[0:len(ix)]
sf.nRisk = sf.nRisk[0:len(ix)]
}
}
func (sf *SurvfuncRight) fit() {
sf.survProb = make([]float64, len(sf.times))
x := float64(1)
for i := range sf.times {
x *= 1 - sf.nEvents[i]/sf.nRisk[i]
sf.survProb[i] = x
}
sf.survProbSE = make([]float64, len(sf.times))
x = 0
if sf.weightpos == -1 {
for i := range sf.times {
d := sf.nEvents[i]
n := sf.nRisk[i]
x += d / (n * (n - d))
sf.survProbSE[i] = math.Sqrt(x) * sf.survProb[i]
}
} else {
for i := range sf.times {
d := sf.nEvents[i]
n := sf.nRisk[i]
x += d / (n * n)
sf.survProbSE[i] = math.Sqrt(x)
}
}
}
// Done indicates that the survival function has been configured and can now be fit.
func (sf *SurvfuncRight) Done() *SurvfuncRight {
sf.init()
sf.scanData()
sf.eventstats()
sf.compress()
sf.fit()
return sf
}
// SurvfuncRightPlotter is used to plot a survival function.
type SurvfuncRightPlotter struct {
pts []plotter.XYs
plt *plot.Plot
labels []string
lines []*plotter.Line
width vg.Length
height vg.Length
}
// NewSurvfuncRightPlotter returns a default SurvfuncRightPlotter.
func NewSurvfuncRightPlotter() *SurvfuncRightPlotter {
sp := &SurvfuncRightPlotter{
width: 4,
height: 4,
}
var err error
sp.plt, err = plot.New()
if err != nil {
panic(err)
}
return sp
}
// Width sets the width of the survival function plot.
func (sp *SurvfuncRightPlotter) Width(w float64) *SurvfuncRightPlotter {
sp.width = vg.Length(w)
return sp
}
// Height sets the height of the survival function plot.
func (sp *SurvfuncRightPlotter) Height(h float64) *SurvfuncRightPlotter {
sp.height = vg.Length(h)
return sp
}
// Add plots a given survival function to the plot.
func (sp *SurvfuncRightPlotter) Add(sf *SurvfuncRight, label string) *SurvfuncRightPlotter {
ti := sf.Time()
pr := sf.SurvProb()
m := len(ti)
n := 2*m + 1
pts := make(plotter.XYs, n)
j := 0
pts[j].X = 0
pts[j].Y = 1
j++
for i := range ti {
pts[j].X = ti[i]
pts[j].Y = pts[j-1].Y
j++
pts[j].X = ti[i]
pts[j].Y = pr[i]
j++
}
sp.pts = append(sp.pts, pts)
sp.labels = append(sp.labels, label)
line, err := plotter.NewLine(pts)
if err != nil {
panic(err)
}
line.Color = plotutil.Color(len(sp.lines))
sp.lines = append(sp.lines, line)
return sp
}
// Plot constructs the plot.
func (sp *SurvfuncRightPlotter) Plot() *SurvfuncRightPlotter {
sp.plt.Y.Min = 0
sp.plt.Y.Max = 1
sp.plt.X.Label.Text = "Time"
sp.plt.Y.Label.Text = "Proportion alive"
leg, err := plot.NewLegend()
if err != nil {
panic(err)
}
for i := range sp.lines {
sp.plt.Add(sp.lines[i])
leg.Add(sp.labels[i], sp.lines[i])
}
if len(sp.lines) > 1 {
leg.Top = false
leg.Left = true
sp.plt.Legend = leg
}
return sp
}
// GetPlotStruct returns the plotting structure for this plot.
func (sp *SurvfuncRightPlotter) GetPlotStruct() *plot.Plot {
return sp.plt
}
// Save writes the plot to the given file.
func (sp *SurvfuncRightPlotter) Save(fname string) {
if err := sp.plt.Save(sp.width*vg.Inch, sp.height*vg.Inch, fname); err != nil {
panic(err)
}
} | duration/survfunc.go | 0.667906 | 0.537102 | survfunc.go | starcoder |
package export
import (
"github.com/opendroid/hk/logger"
"go.uber.org/zap"
"sort"
"strconv"
)
// BodyMassElement elements in various types of body-mass data
type BodyMassElement struct {
CreationDate int64 `json:"creation_timestamp_sec"`
SourceName string `json:"source"`
Unit string `json:"unit,omitempty"`
Value float32 `json:"value"`
}
// BodyMassComposition data slices related to body mass.
type BodyMassComposition struct {
Mass []BodyMassElement `json:"mass"`
LeanMass []BodyMassElement `json:"lean_body_mass"`
BMI []BodyMassElement `json:"bmi"`
FatPercentage []BodyMassElement `json:"fat_percent"`
}
// BodyMass get the body mass data
func (h *HealthData) BodyMassData() *BodyMassComposition {
if h == nil {
return nil
}
var m BodyMassComposition
m.Mass = make([]BodyMassElement, 0)
m.BMI = make([]BodyMassElement, 0)
m.FatPercentage = make([]BodyMassElement, 0)
for _, d := range h.Records {
switch RecordType(d.Type) {
case BodyMass:
m.Mass = appendMassData(m.Mass, d)
case BodyMassIndex:
m.BMI = appendMassData(m.BMI, d)
case BodyFatPercentage:
m.FatPercentage = appendMassData(m.FatPercentage, d)
case LeanBodyMass:
m.LeanMass = appendMassData(m.LeanMass, d)
}
}
// Sort by creation date
sort.Slice(m.Mass, func(i, j int) bool { return m.Mass[i].CreationDate < m.Mass[j].CreationDate })
sort.Slice(m.LeanMass, func(i, j int) bool { return m.LeanMass[i].CreationDate < m.LeanMass[j].CreationDate })
sort.Slice(m.BMI, func(i, j int) bool { return m.BMI[i].CreationDate < m.BMI[j].CreationDate })
sort.Slice(m.FatPercentage, func(i, j int) bool { return m.FatPercentage[i].CreationDate < m.FatPercentage[j].CreationDate })
return &m
}
// appendMassData helper appends relevant components slice
func appendMassData(d []BodyMassElement, r Record) []BodyMassElement {
num, err := strconv.ParseFloat(r.Value, 32)
if err != nil {
logger.Error("appendData: strconv error",
zap.String("method", "appendMassData"),
zap.String("value", r.Value), zap.String("info", err.Error()))
}
e := BodyMassElement{
CreationDate: recordTime(r.CreationDate),
SourceName: r.SourceName,
Unit: r.Unit,
Value: float32(num),
}
return append(d, e)
} | export/bodymass.go | 0.594434 | 0.451145 | bodymass.go | starcoder |
package haystack
import (
"encoding/json"
"errors"
"fmt"
"math"
"strconv"
"strings"
)
// Number wraps a 64-bit floating point number and unit name.
type Number struct {
val float64
unit string
}
// NewNumber creates a new Number. For unitless numbers, use an empty string unit: ""
func NewNumber(val float64, unit string) *Number {
return &Number{val: val, unit: unit}
}
// newNumberFromStr creates a new number from a string. The string representation must have a space between the number and unit
func newNumberFromStr(str string) (*Number, error) {
if str == "INF" {
return NewNumber(math.Inf(1), ""), nil
} else if str == "-INF" {
return NewNumber(math.Inf(-1), ""), nil
} else if str == "NaN" {
return NewNumber(math.NaN(), ""), nil
} else {
numberSplit := strings.Split(str, " ")
val, valErr := strconv.ParseFloat(numberSplit[0], 64)
if valErr != nil {
return NewNumber(0.0, ""), valErr
}
unit := ""
if len(numberSplit) > 1 {
unit = numberSplit[1]
}
return NewNumber(val, unit), nil
}
}
// Float returns the numerical value
func (number *Number) Float() float64 {
return number.val
}
// Unit returns the unit symbol
func (number *Number) Unit() string {
return number.unit
}
// ToZinc representes the object as: "<val>[unit]"
func (number *Number) ToZinc() string {
return number.toStr(false)
}
// MarshalJSON representes the object as: "n:<val> [unit]"
func (number *Number) MarshalJSON() ([]byte, error) {
return json.Marshal("n:" + number.toStr(true))
}
// UnmarshalJSON interprets the json value: "n:<val> [unit]"
func (number *Number) UnmarshalJSON(buf []byte) error {
var jsonStr string
err := json.Unmarshal(buf, &jsonStr)
if err != nil {
return err
}
newNumber, newErr := numberFromJSON(jsonStr)
*number = *newNumber
return newErr
}
func numberFromJSON(jsonStr string) (*Number, error) {
if !strings.HasPrefix(jsonStr, "n:") {
return nil, errors.New("Input value does not begin with 'n:'")
}
numberStr := jsonStr[2:]
return newNumberFromStr(numberStr)
}
// MarshalHayson representes the object as: "{"_kind":"number","val":<val>,["unit":<unit>]}"
func (number *Number) MarshalHayson() ([]byte, error) {
buf := strings.Builder{}
buf.WriteString("{\"_kind\":\"number\",\"val\":")
if math.IsInf(number.val, 1) {
buf.WriteString("\"INF\"")
} else if math.IsInf(number.val, -1) {
buf.WriteString("\"-INF\"")
} else if math.IsNaN(number.val) {
buf.WriteString("\"NaN\"")
} else {
buf.WriteString(fmt.Sprintf("%g", number.val))
}
if number.unit != "" {
buf.WriteString(",\"unit\":\"")
buf.WriteString(number.unit)
buf.WriteString("\"")
}
buf.WriteString("}")
return []byte(buf.String()), nil
}
func (number *Number) toStr(spaceBeforeUnit bool) string {
if math.IsInf(number.val, 1) {
return "INF"
} else if math.IsInf(number.val, -1) {
return "-INF"
} else if math.IsNaN(number.val) {
return "NaN"
} else {
result := fmt.Sprintf("%g", number.val)
if number.unit != "" {
if spaceBeforeUnit {
result = result + " "
}
result = result + number.unit
}
return result
}
} | Number.go | 0.705684 | 0.412294 | Number.go | starcoder |
package handy
import (
"math/big"
)
func intToBigint(i interface{}) *big.Int {
bi := big.NewInt(0)
switch x := i.(type) {
case int:
bi.SetInt64(int64(x))
case int8:
bi.SetInt64(int64(x))
case int16:
bi.SetInt64(int64(x))
case int32:
bi.SetInt64(int64(x))
case int64:
bi.SetInt64(x)
case uint:
bi.SetUint64(uint64(x))
case uint8:
bi.SetUint64(uint64(x))
case uint16:
bi.SetUint64(uint64(x))
case uint32:
bi.SetUint64(uint64(x))
case uint64:
bi.SetUint64(x)
}
return bi
}
// InArrayIntFlex returns true if "item" exists in "array"
// item and array can be of different kinds, since they're integer types
// array param should be an array of any integer type (int,int8,int16,int32,int64,uint,uint8,uint16,uint32 and uint64)
// The function uses bigInt type convertions previous to values comparison
func InArrayIntFlex(item interface{}, array interface{}) bool {
if array == nil || item == nil {
return false
}
b1 := intToBigint(item)
b2 := big.NewInt(0)
switch a := array.(type) {
case []int:
for _, x := range a {
b2.SetInt64(int64(x))
if b1.Cmp(b2) == 0 {
return true
}
}
case []int8:
for _, x := range a {
b2.SetInt64(int64(x))
if b1.Cmp(b2) == 0 {
return true
}
}
case []int16:
for _, x := range a {
b2.SetInt64(int64(x))
if b1.Cmp(b2) == 0 {
return true
}
}
case []int32:
for _, x := range a {
b2.SetInt64(int64(x))
if b1.Cmp(b2) == 0 {
return true
}
}
case []int64:
for _, x := range a {
b2.SetInt64(x)
if b1.Cmp(b2) == 0 {
return true
}
}
case []uint:
for _, x := range a {
b2.SetUint64(uint64(x))
if b1.Cmp(b2) == 0 {
return true
}
}
case []uint8:
for _, x := range a {
b2.SetUint64(uint64(x))
if b1.Cmp(b2) == 0 {
return true
}
}
case []uint16:
for _, x := range a {
b2.SetUint64(uint64(x))
if b1.Cmp(b2) == 0 {
return true
}
}
case []uint32:
for _, x := range a {
b2.SetUint64(uint64(x))
if b1.Cmp(b2) == 0 {
return true
}
}
case []uint64:
for _, x := range a {
b2.SetUint64(x)
if b1.Cmp(b2) == 0 {
return true
}
}
}
return false
}
// InArray searches for "item" in "array" and returns true if it's found
// This func resides here alone only because its long size.
// TODO Embrace/comprise all native scalar/primitive types
func InArray(array interface{}, item interface{}) bool {
switch array.(type) {
case []int:
a, _ := array.([]int)
if len(a) < 1 {
return false
}
i, ok := item.(int)
if !ok {
return false
}
for _, x := range a {
if x == i {
return true
}
}
case []int8:
a, _ := array.([]int8)
if len(a) < 1 {
return false
}
i, ok := item.(int8)
if !ok {
return false
}
for _, x := range a {
if x == i {
return true
}
}
case []int16:
a, _ := array.([]int16)
if len(a) < 1 {
return false
}
i, ok := item.(int16)
if !ok {
return false
}
for _, x := range a {
if x == i {
return true
}
}
case []int32: // Works for int32 and rune types
a, _ := array.([]int32)
if len(a) < 1 {
return false
}
i, ok := item.(int32)
if !ok {
return false
}
for _, x := range a {
if x == i {
return true
}
}
case []int64:
a, _ := array.([]int64)
if len(a) < 1 {
return false
}
i, ok := item.(int64)
if !ok {
return false
}
for _, x := range a {
if x == i {
return true
}
}
case []uint:
a, _ := array.([]uint)
if len(a) < 1 {
return false
}
i, ok := item.(uint)
if !ok {
return false
}
for _, x := range a {
if x == i {
return true
}
}
case []uint8: // Works for uint8 and byte types
a, _ := array.([]uint8)
if len(a) < 1 {
return false
}
i, ok := item.(uint8)
if !ok {
return false
}
for _, x := range a {
if x == i {
return true
}
}
case []uint16:
a, _ := array.([]uint16)
if len(a) < 1 {
return false
}
i, ok := item.(uint16)
if !ok {
return false
}
for _, x := range a {
if x == i {
return true
}
}
case []uint32:
a, _ := array.([]uint32)
if len(a) < 1 {
return false
}
i, ok := item.(uint32)
if !ok {
return false
}
for _, x := range a {
if x == i {
return true
}
}
case []uint64: // Works for uint8 and byte types
a, _ := array.([]uint64)
if len(a) < 1 {
return false
}
i, ok := item.(uint64)
if !ok {
return false
}
for _, x := range a {
if x == i {
return true
}
}
case []float32:
a, _ := array.([]float32)
if len(a) < 1 {
return false
}
f, ok := item.(float32)
if !ok {
return false
}
for _, x := range a {
if x == f {
return true
}
}
case []float64:
a, _ := array.([]float64)
if len(a) < 1 {
return false
}
f, ok := item.(float64)
if !ok {
return false
}
for _, x := range a {
if x == f {
return true
}
}
case []string:
a, _ := array.([]string)
if len(a) < 1 {
return false
}
s, ok := item.(string)
if !ok {
return false
}
for _, x := range a {
if x == s {
return true
}
}
case []bool:
a, _ := array.([]bool)
if len(a) < 1 {
return false
}
s, ok := item.(bool)
if !ok {
return false
}
for _, x := range a {
if x == s {
return true
}
}
}
return false
} | inarray.go | 0.503174 | 0.512327 | inarray.go | starcoder |
package stats
import (
"sync"
"time"
)
// TimeBucketCounter is a counter that records the approximate number of events over a
// recent interval; the length of this interval and the resolution are configurable. Used
// to measure the approximate processing rate for the digger.
type TimeBucketCounter struct {
sync.Mutex
headIncrement int64
resolution time.Duration
length time.Duration
maxSize int
buckets []int64
currTotal int64
}
// NewTimeBucketCounter creates a new TimeBucketCounter instance for the given resolution and
// length.
func NewTimeBucketCounter(
resolution time.Duration,
length time.Duration,
) *TimeBucketCounter {
return &TimeBucketCounter{
resolution: resolution,
length: length,
maxSize: int(length / resolution),
buckets: []int64{},
}
}
// Increment updates the counter for the argument count, assuming that the current time is now.
func (t *TimeBucketCounter) Increment(now time.Time, count int64) {
t.Lock()
defer t.Unlock()
if t.headIncrement == 0 {
t.headIncrement = now.UnixNano() / int64(t.resolution)
t.buckets = []int64{count}
t.currTotal = count
return
}
t.advance(now)
t.buckets[0] += count
t.currTotal += count
}
// Total gets the total count for this counter.
func (t *TimeBucketCounter) Total() int64 {
t.Lock()
defer t.Unlock()
return t.currTotal
}
// RatePerSec returns the average count per second for this counter.
func (t *TimeBucketCounter) RatePerSec() float64 {
t.Lock()
defer t.Unlock()
return float64(t.currTotal) / t.length.Seconds()
}
func (t *TimeBucketCounter) advance(now time.Time) {
newHead := now.UnixNano() / int64(t.resolution)
if newHead == t.headIncrement {
// Nothing to do
return
}
t.buckets = append(
make([]int64, newHead-t.headIncrement, newHead-t.headIncrement),
t.buckets...,
)
if len(t.buckets) > t.maxSize {
for i := t.maxSize; i < len(t.buckets); i++ {
t.currTotal -= t.buckets[i]
}
t.buckets = t.buckets[0:t.maxSize]
}
t.headIncrement = newHead
} | pkg/stats/bucket.go | 0.681621 | 0.403508 | bucket.go | starcoder |
package builtin
import (
"errors"
"github.com/kode4food/ale/data"
)
// Error messages
const (
ErrIndexOutOfBounds = "index out of bounds"
ErrPutRequiresPair = "put requires a key/value combination or a pair"
)
// First returns the first value in the sequence
var First = data.Applicative(func(args ...data.Value) data.Value {
return args[0].(data.Sequence).First()
}, 1)
// Rest returns the sequence elements after the first value
var Rest = data.Applicative(func(args ...data.Value) data.Value {
return args[0].(data.Sequence).Rest()
}, 1)
// Append adds a value to the end of the provided Appender
var Append = data.Applicative(func(args ...data.Value) data.Value {
a := args[0].(data.Appender)
s := args[1]
return a.Append(s)
}, 2)
// Reverse returns a reversed copy of a Sequence
var Reverse = data.Applicative(func(args ...data.Value) data.Value {
r := args[0].(data.Reverser)
return r.Reverse()
}, 1)
// Length returns the element count of the provided Counted
var Length = data.Applicative(func(args ...data.Value) data.Value {
s := args[0].(data.Counted)
l := s.Count()
return data.Integer(l)
}, 1)
// Nth returns the nth element of the provided sequence or a default
var Nth = data.Applicative(func(args ...data.Value) data.Value {
s := args[0].(data.Indexed)
i := int(args[1].(data.Integer))
if res, ok := s.ElementAt(i); ok {
return res
}
if len(args) > 2 {
return args[2]
}
panic(errors.New(ErrIndexOutOfBounds))
}, 2, 3)
// IsSeq returns whether the provided value is a sequence
var IsSeq = data.Applicative(func(args ...data.Value) data.Value {
_, ok := args[0].(data.Sequence)
return data.Bool(ok)
}, 1)
// IsEmpty returns whether the provided sequence is empty
var IsEmpty = data.Applicative(func(args ...data.Value) data.Value {
s := args[0].(data.Sequence)
return data.Bool(s.IsEmpty())
}, 1)
// IsCounted returns whether the provided value is a counted sequence
var IsCounted = data.Applicative(func(args ...data.Value) data.Value {
_, ok := args[0].(data.Counted)
return data.Bool(ok)
}, 1)
// IsIndexed returns whether the provided value is an indexed sequence
var IsIndexed = data.Applicative(func(args ...data.Value) data.Value {
_, ok := args[0].(data.Indexed)
return data.Bool(ok)
}, 1)
// IsReverser returns whether the value is a reversible sequence
var IsReverser = data.Applicative(func(args ...data.Value) data.Value {
_, ok := args[0].(data.Reverser)
return data.Bool(ok)
}, 1) | core/internal/builtin/sequences.go | 0.776114 | 0.554229 | sequences.go | starcoder |
package linq
import (
"reflect"
"strconv"
"time"
"github.com/screeningeagledreamlab/go-util"
)
// Predicate is a function that returns a boolean for an input.
type Predicate func(item interface{}) bool
// PredicateOfByte is a function that returns a boolean for an input.
type PredicateOfByte func(item byte) bool
// PredicateOfRune is a function that returns a boolean for an input.
type PredicateOfRune func(item rune) bool
// PredicateOfInt is a function that returns a boolean for an input.
type PredicateOfInt func(item int) bool
// PredicateOfInt64 is a function that returns a boolean for an input.
type PredicateOfInt64 func(item int64) bool
// PredicateOfFloat is a function that returns a boolean for an input.
type PredicateOfFloat func(item float64) bool
// PredicateOfString is a function that returns a boolean for an input.
type PredicateOfString func(item string) bool
// PredicateOfTime is a function that returns a boolean for an input.
type PredicateOfTime func(item time.Time) bool
// PredicateOfDuration is a function that returns a boolean for an input.
type PredicateOfDuration func(item time.Duration) bool
// MapAction is an action that returns a value for an input value, a.k.a. that maps the two.
type MapAction func(item interface{}) interface{}
// MapActionOfByte is an action that returns a value for an input value, a.k.a. that maps the two.
type MapActionOfByte func(item byte) byte
// MapActionOfRune is an action that returns a value for an input value, a.k.a. that maps the two.
type MapActionOfRune func(item rune) rune
// MapActionOfInt is an action that returns a value for an input value, a.k.a. that maps the two.
type MapActionOfInt func(item int) int
// MapActionOfInt64 is an action that returns a value for an input value, a.k.a. that maps the two.
type MapActionOfInt64 func(item int64) int64
// MapActionOfFloat is an action that returns a value for an input value, a.k.a. that maps the two.
type MapActionOfFloat func(item float64) float64
// MapActionOfString is an action that returns a value for an input value, a.k.a. that maps the two.
type MapActionOfString func(item string) string
// MapActionOfTime is an action that returns a value for an input value, a.k.a. that maps the two.
type MapActionOfTime func(item time.Time) time.Time
// MapActionOfDuration is an action that returns a value for an input value, a.k.a. that maps the two.
type MapActionOfDuration func(item time.Duration) time.Time
// ReturnsTrue is a pre-built predicate.
func ReturnsTrue() Predicate {
return func(_ interface{}) bool {
return true
}
}
// ReturnsTrueOfByte is a pre-built predicate.
func ReturnsTrueOfByte() PredicateOfByte {
return func(_ byte) bool {
return true
}
}
// ReturnsTrueOfRune is a pre-built predicate.
func ReturnsTrueOfRune() PredicateOfRune {
return func(_ rune) bool {
return true
}
}
// ReturnsTrueOfInt is a pre-built predicate.
func ReturnsTrueOfInt() PredicateOfInt {
return func(_ int) bool {
return true
}
}
// ReturnsTrueOfFloat is a pre-built predicate.
func ReturnsTrueOfFloat() PredicateOfFloat {
return func(_ float64) bool {
return true
}
}
// ReturnsTrueOfString is a pre-built predicate.
func ReturnsTrueOfString() PredicateOfString {
return func(_ string) bool {
return true
}
}
// ReturnsTrueOfTime is a pre-built predicate.
func ReturnsTrueOfTime() PredicateOfTime {
return func(_ time.Time) bool {
return true
}
}
// ReturnsTrueOfDuration is a pre-built predicate.
func ReturnsTrueOfDuration() PredicateOfDuration {
return func(_ time.Duration) bool {
return true
}
}
// ReturnsFalse is a pre-built predicate.
func ReturnsFalse() Predicate {
return func(_ interface{}) bool {
return false
}
}
// ReturnsFalseOfByte is a pre-built predicate.
func ReturnsFalseOfByte() PredicateOfByte {
return func(_ byte) bool {
return false
}
}
// ReturnsFalseOfRune is a pre-built predicate.
func ReturnsFalseOfRune() PredicateOfRune {
return func(_ rune) bool {
return false
}
}
// ReturnsFalseOfInt is a pre-built predicate.
func ReturnsFalseOfInt() PredicateOfInt {
return func(_ int) bool {
return false
}
}
// ReturnsFalseOfFloat is a pre-built predicate.
func ReturnsFalseOfFloat() PredicateOfFloat {
return func(_ float64) bool {
return false
}
}
// ReturnsFalseOfString is a pre-built predicate.
func ReturnsFalseOfString() PredicateOfString {
return func(_ string) bool {
return false
}
}
// ReturnsFalseOfTime is a pre-built predicate.
func ReturnsFalseOfTime() PredicateOfTime {
return func(_ time.Time) bool {
return false
}
}
// ReturnsFalseOfDuration is a pre-built predicate.
func ReturnsFalseOfDuration() PredicateOfDuration {
return func(_ time.Duration) bool {
return false
}
}
// DeepEqual is a pre-built predicate that compares shouldBe to input objects.
func DeepEqual(shouldBe interface{}) Predicate {
return func(value interface{}) bool {
return reflect.DeepEqual(shouldBe, value)
}
}
// EqualsOfByte is a pre-built predicate that compares shouldBe to input objects.
func EqualsOfByte(shouldBe byte) PredicateOfByte {
return func(value byte) bool {
return shouldBe == value
}
}
// EqualsOfRune is a pre-built predicate that compares shouldBe to input objects.
func EqualsOfRune(shouldBe rune) PredicateOfRune {
return func(value rune) bool {
return shouldBe == value
}
}
// EqualsOfInt is a pre-built predicate that compares shouldBe to input objects.
func EqualsOfInt(shouldBe int) PredicateOfInt {
return func(value int) bool {
return shouldBe == value
}
}
// EqualsOfFloat is a pre-built predicate that compares shouldBe to input objects.
func EqualsOfFloat(shouldBe float64) PredicateOfFloat {
return func(value float64) bool {
return shouldBe == value
}
}
// EqualsOfString is a pre-built predicate that compares shouldBe to input objects.
func EqualsOfString(shouldBe string) PredicateOfString {
return func(value string) bool {
return shouldBe == value
}
}
// EqualsOfStringCaseInsenitive is a pre-built predicate that compares shouldBe to input objects.
func EqualsOfStringCaseInsenitive(shouldBe string) PredicateOfString {
return func(value string) bool {
return util.String.CaseInsensitiveEquals(shouldBe, value)
}
}
// StringToInt is a pre-built map function.
func StringToInt(item interface{}) interface{} {
if itemAsString, isString := item.(string); isString {
if intValue, intValueErr := strconv.Atoi(itemAsString); intValueErr == nil {
return intValue
}
}
return nil
}
// StringToFloat is a pre-built map function.
func StringToFloat(item interface{}) interface{} {
if itemAsString, isString := item.(string); isString {
if floatValue, floatValueErr := strconv.ParseFloat(itemAsString, 64); floatValueErr == nil {
return floatValue
}
}
return nil
}
type stringable interface {
String() string
}
// ValueToString is a pre-built map function.
func ValueToString(value interface{}) interface{} {
if typed, isTyped := value.(stringable); isTyped {
return typed.String()
}
return ""
}
// Any returns true if the predicate holds for any object in the collection.
func Any(target interface{}, predicate Predicate) bool {
t := reflect.TypeOf(target)
for t.Kind() == reflect.Ptr {
t = t.Elem()
}
v := reflect.ValueOf(target)
for v.Kind() == reflect.Ptr {
v = v.Elem()
}
if t.Kind() != reflect.Slice {
return false
}
for x := 0; x < v.Len(); x++ {
obj := v.Index(x).Interface()
if predicate == nil || predicate(obj) {
return true
}
}
return false
}
// AnyOfByte returns true if the predicate holds for any object in the collection.
func AnyOfByte(target []byte, predicate PredicateOfByte) bool {
v := reflect.ValueOf(target)
for x := 0; x < v.Len(); x++ {
obj := v.Index(x).Interface().(byte)
if predicate == nil || predicate(obj) {
return true
}
}
return false
}
// AnyOfRune returns true if the predicate holds for any object in the collection.
func AnyOfRune(target []rune, predicate PredicateOfRune) bool {
v := reflect.ValueOf(target)
for x := 0; x < v.Len(); x++ {
obj := v.Index(x).Interface().(rune)
if predicate == nil || predicate(obj) {
return true
}
}
return false
}
// AnyOfInt returns true if the predicate holds for any object in the collection.
func AnyOfInt(target []int, predicate PredicateOfInt) bool {
v := reflect.ValueOf(target)
for x := 0; x < v.Len(); x++ {
obj := v.Index(x).Interface().(int)
if predicate == nil || predicate(obj) {
return true
}
}
return false
}
// AnyOfFloat returns true if the predicate holds for any object in the collection.
func AnyOfFloat(target []float64, predicate PredicateOfFloat) bool {
v := reflect.ValueOf(target)
for x := 0; x < v.Len(); x++ {
obj := v.Index(x).Interface().(float64)
if predicate == nil || predicate(obj) {
return true
}
}
return false
}
// AnyOfString returns true if the predicate holds for any object in the collection.
func AnyOfString(target []string, predicate PredicateOfString) bool {
v := reflect.ValueOf(target)
for x := 0; x < v.Len(); x++ {
obj := v.Index(x).Interface().(string)
if predicate == nil || predicate(obj) {
return true
}
}
return false
}
// AnyOfTime returns true if the predicate holds for any object in the collection.
func AnyOfTime(target []time.Time, predicate PredicateOfTime) bool {
v := reflect.ValueOf(target)
for x := 0; x < v.Len(); x++ {
obj := v.Index(x).Interface().(time.Time)
if predicate == nil || predicate(obj) {
return true
}
}
return false
}
// AnyOfDuration returns true if the predicate holds for any object in the collection.
func AnyOfDuration(target []time.Duration, predicate PredicateOfDuration) bool {
v := reflect.ValueOf(target)
for x := 0; x < v.Len(); x++ {
obj := v.Index(x).Interface().(time.Duration)
if predicate == nil || predicate(obj) {
return true
}
}
return false
}
// All returns true if the predicate holds for all objects in the collection.
func All(target interface{}, predicate Predicate) bool {
t := reflect.TypeOf(target)
for t.Kind() == reflect.Ptr {
t = t.Elem()
}
v := reflect.ValueOf(target)
for v.Kind() == reflect.Ptr {
v = v.Elem()
}
if t.Kind() != reflect.Slice {
return false
}
for x := 0; x < v.Len(); x++ {
obj := v.Index(x).Interface()
if !predicate(obj) {
return false
}
}
return true
}
// AllOfInt returns true if the predicate holds for all objects in the collection.
func AllOfInt(target []int, predicate PredicateOfInt) bool {
v := reflect.ValueOf(target)
for x := 0; x < v.Len(); x++ {
obj := v.Index(x).Interface().(int)
if !predicate(obj) {
return false
}
}
return true
}
// AllOfFloat returns true if the predicate holds for all objects in the collection.
func AllOfFloat(target []float64, predicate PredicateOfFloat) bool {
v := reflect.ValueOf(target)
for x := 0; x < v.Len(); x++ {
obj := v.Index(x).Interface().(float64)
if !predicate(obj) {
return false
}
}
return true
}
// AllOfString returns true if the predicate holds for all objects in the collection.
func AllOfString(target []string, predicate PredicateOfString) bool {
v := reflect.ValueOf(target)
for x := 0; x < v.Len(); x++ {
obj := v.Index(x).Interface().(string)
if !predicate(obj) {
return false
}
}
return true
}
// AllOfTime returns true if the predicate holds for all objects in the collection.
func AllOfTime(target []time.Time, predicate PredicateOfTime) bool {
v := reflect.ValueOf(target)
for x := 0; x < v.Len(); x++ {
obj := v.Index(x).Interface().(time.Time)
if !predicate(obj) {
return false
}
}
return true
}
// AllOfDuration returns true if the predicate holds for all objects in the collection.
func AllOfDuration(target []time.Duration, predicate PredicateOfDuration) bool {
v := reflect.ValueOf(target)
for x := 0; x < v.Len(); x++ {
obj := v.Index(x).Interface().(time.Duration)
if !predicate(obj) {
return false
}
}
return true
}
// First returns the first object that satisfies a predicate.
func First(target interface{}, predicate Predicate) interface{} {
t := reflect.TypeOf(target)
for t.Kind() == reflect.Ptr {
t = t.Elem()
}
v := reflect.ValueOf(target)
for v.Kind() == reflect.Ptr {
v = v.Elem()
}
if t.Kind() != reflect.Slice {
return false
}
for x := 0; x < v.Len(); x++ {
obj := v.Index(x).Interface()
if predicate == nil || predicate(obj) {
return obj
}
}
return nil
}
// FirstOfByte returns the first object that satisfies a predicate.
func FirstOfByte(target []byte, predicate PredicateOfByte) *byte {
v := reflect.ValueOf(target)
for x := 0; x < v.Len(); x++ {
obj := v.Index(x).Interface().(byte)
if predicate == nil || predicate(obj) {
return &obj
}
}
return nil
}
// FirstOfInt returns the first object that satisfies a predicate.
func FirstOfInt(target []int, predicate PredicateOfInt) *int {
v := reflect.ValueOf(target)
for x := 0; x < v.Len(); x++ {
obj := v.Index(x).Interface().(int)
if predicate == nil || predicate(obj) {
return &obj
}
}
return nil
}
// FirstOfFloat returns the first object that satisfies a predicate.
func FirstOfFloat(target []float64, predicate PredicateOfFloat) *float64 {
v := reflect.ValueOf(target)
for x := 0; x < v.Len(); x++ {
obj := v.Index(x).Interface().(float64)
if predicate == nil || predicate(obj) {
return &obj
}
}
return nil
}
// FirstOfString returns the first object that satisfies a predicate.
func FirstOfString(target []string, predicate PredicateOfString) *string {
v := reflect.ValueOf(target)
for x := 0; x < v.Len(); x++ {
obj := v.Index(x).Interface().(string)
if predicate == nil || predicate(obj) {
return &obj
}
}
return nil
}
// FirstOfTime returns the first object that satisfies a predicate.
func FirstOfTime(target []time.Time, predicate PredicateOfTime) *time.Time {
v := reflect.ValueOf(target)
for x := 0; x < v.Len(); x++ {
obj := v.Index(x).Interface().(time.Time)
if predicate == nil || predicate(obj) {
return &obj
}
}
return nil
}
// FirstOfDuration returns the first object that satisfies a predicate.
func FirstOfDuration(target []time.Duration, predicate PredicateOfDuration) *time.Duration {
v := reflect.ValueOf(target)
for x := 0; x < v.Len(); x++ {
obj := v.Index(x).Interface().(time.Duration)
if predicate == nil || predicate(obj) {
return &obj
}
}
return nil
}
// Last returns the last object that satisfies a predicate.
func Last(target interface{}, predicate Predicate) interface{} {
t := reflect.TypeOf(target)
for t.Kind() == reflect.Ptr {
t = t.Elem()
}
v := reflect.ValueOf(target)
for v.Kind() == reflect.Ptr {
v = v.Elem()
}
if t.Kind() != reflect.Slice {
return false
}
for x := v.Len() - 1; x > 0; x-- {
obj := v.Index(x).Interface()
if predicate == nil || predicate(obj) {
return obj
}
}
return nil
}
// LastOfInt returns the last object that satisfies a predicate.
func LastOfInt(target []int, predicate PredicateOfInt) *int {
v := reflect.ValueOf(target)
for x := v.Len() - 1; x > 0; x-- {
obj := v.Index(x).Interface().(int)
if predicate == nil || predicate(obj) {
return &obj
}
}
return nil
}
// LastOfFloat returns the last object that satisfies a predicate.
func LastOfFloat(target []float64, predicate PredicateOfFloat) *float64 {
v := reflect.ValueOf(target)
for x := v.Len() - 1; x > 0; x-- {
obj := v.Index(x).Interface().(float64)
if predicate == nil || predicate(obj) {
return &obj
}
}
return nil
}
// LastOfString returns the last object that satisfies a predicate.
func LastOfString(target []string, predicate PredicateOfString) *string {
v := reflect.ValueOf(target)
for x := v.Len() - 1; x > 0; x-- {
obj := v.Index(x).Interface().(string)
if predicate == nil || predicate(obj) {
return &obj
}
}
return nil
}
// LastOfTime returns the last object that satisfies a predicate.
func LastOfTime(target []time.Time, predicate PredicateOfTime) *time.Time {
v := reflect.ValueOf(target)
for x := v.Len() - 1; x > 0; x-- {
obj := v.Index(x).Interface().(time.Time)
if predicate == nil || predicate(obj) {
return &obj
}
}
return nil
}
// LastOfDuration returns the last object that satisfies a predicate.
func LastOfDuration(target []time.Duration, predicate PredicateOfDuration) *time.Duration {
v := reflect.ValueOf(target)
for x := v.Len() - 1; x > 0; x-- {
obj := v.Index(x).Interface().(time.Duration)
if predicate == nil || predicate(obj) {
return &obj
}
}
return nil
}
// Filter applies a predicate to a collection.
func Filter(target interface{}, predicate Predicate) []interface{} {
t := reflect.TypeOf(target)
for t.Kind() == reflect.Ptr {
t = t.Elem()
}
v := reflect.ValueOf(target)
for v.Kind() == reflect.Ptr {
v = v.Elem()
}
if t.Kind() != reflect.Slice {
panic("cannot filter non-slice.")
}
values := []interface{}{}
for x := 0; x < v.Len(); x++ {
obj := v.Index(x).Interface()
if predicate(obj) {
values = append(values, obj)
}
}
return values
}
// Select maps the values of the target collection to the mapFn.
func Select(target interface{}, mapFn MapAction) []interface{} {
t := reflect.TypeOf(target)
for t.Kind() == reflect.Ptr {
t = t.Elem()
}
v := reflect.ValueOf(target)
for v.Kind() == reflect.Ptr {
v = v.Elem()
}
if t.Kind() != reflect.Slice {
panic("cannot map non-slice.")
}
values := []interface{}{}
for x := 0; x < v.Len(); x++ {
obj := v.Index(x).Interface()
values = append(values, mapFn(obj))
}
return values
} | linq/linq.go | 0.854308 | 0.616965 | linq.go | starcoder |
package botutil
import (
"github.com/chippydip/go-sc2ai/api"
"github.com/chippydip/go-sc2ai/enums/ability"
"github.com/chippydip/go-sc2ai/enums/buff"
"github.com/chippydip/go-sc2ai/enums/unit"
)
// Unit combines the api Unit with it's UnitTypeData and adds some additional convenience methods.
type Unit struct {
ctx *UnitContext
*api.UnitTypeData
*api.Unit
}
// IsNil checks if the underlying Unit pointer is nil.
func (u Unit) IsNil() bool {
return u.Unit == nil
}
// IsVisible checks if DisplayType is Visible.
func (u Unit) IsVisible() bool {
return u.Unit != nil && u.DisplayType == api.DisplayType_Visible
}
// IsSnapshot checks if DisplayType is Snapshot.
func (u Unit) IsSnapshot() bool {
return u.Unit != nil && u.DisplayType == api.DisplayType_Snapshot
}
// IsHidden checks if DisplayType is Hidden.
func (u Unit) IsHidden() bool {
return u.Unit != nil && u.DisplayType == api.DisplayType_Hidden
}
// HasAttribute checks if this unit has the specified attribute.
func (u Unit) HasAttribute(attr api.Attribute) bool {
if u.Unit == nil {
return false
}
for _, a := range u.Attributes {
if a == attr {
return true
}
}
return false
}
// IsStructure checks if the unit is a building (has the Structure attribute).
func (u Unit) IsStructure() bool {
return u.HasAttribute(api.Attribute_Structure)
}
// Pos2D returns the x/y location of the unit.
func (u Unit) Pos2D() api.Point2D {
return u.Pos.ToPoint2D()
}
// IsStarted returns true if the unit has started building (is not ghost placement).
func (u Unit) IsStarted() bool {
return u.Unit != nil && u.BuildProgress > 0
}
// IsBuilt returns true if the unit is done building.
func (u Unit) IsBuilt() bool {
return u.Unit != nil && u.BuildProgress == 1
}
// IsIdle returns true if the unit has no orders.
func (u Unit) IsIdle() bool {
return u.Unit != nil && len(u.Orders) == 0
}
// IsTownHall returns true if the unit is a Nexus/CC/OC/PF/Hatch/Lair/Hive.
func (u Unit) IsTownHall() bool {
if u.Unit == nil {
return false
}
switch u.UnitType {
case unit.Protoss_Nexus,
unit.Terran_CommandCenter,
unit.Terran_OrbitalCommand,
unit.Terran_PlanetaryFortress,
unit.Zerg_Hatchery,
unit.Zerg_Lair,
unit.Zerg_Hive:
return true
}
return false
}
// IsGasBuilding returns true if the unit is an Assimilator/Refinery/Extractory.
func (u Unit) IsGasBuilding() bool {
if u.Unit == nil {
return false
}
switch u.UnitType {
case unit.Protoss_Assimilator,
unit.Protoss_AssimilatorRich,
unit.Terran_Refinery,
unit.Terran_RefineryRich,
unit.Zerg_Extractor,
unit.Zerg_ExtractorRich:
return true
}
return false
}
// IsWorker returns true if the unit is a Probe/SCV/Drone (but not MULE).
func (u Unit) IsWorker() bool {
if u.Unit == nil {
return false
}
switch u.UnitType {
case unit.Protoss_Probe,
unit.Terran_SCV,
unit.Zerg_Drone:
return true
}
return false
}
// IsGathering returns true if the unit is currently gathering.
func (u Unit) IsGathering() bool {
return u.Unit != nil && !u.IsIdle() && ability.Remap(u.Orders[0].AbilityId) == ability.Harvest_Gather
}
// IsCarryingResources returns true if the unit is carrying minerals or gas.
func (u Unit) IsCarryingResources() bool {
if u.Unit == nil {
return false
}
for _, b := range u.BuffIds {
switch b {
case buff.CarryMineralFieldMinerals,
buff.CarryHighYieldMineralFieldMinerals,
buff.CarryHarvestableVespeneGeyserGas,
buff.CarryHarvestableVespeneGeyserGasProtoss,
buff.CarryHarvestableVespeneGeyserGasZerg:
return true
}
}
return false
}
// HasBuff ...
func (u Unit) HasBuff(buffID api.BuffID) bool {
if u.Unit == nil {
return false
}
for _, b := range u.BuffIds {
if b == buffID {
return true
}
}
return false
}
// HasEnergy ...
func (u Unit) HasEnergy(energy float32) bool {
return u.Unit != nil && u.Energy >= energy
}
// GroundWeaponDamage returns damage per shot the unit can do to ground targets.
func (u Unit) GroundWeaponDamage() float32 {
return u.weaponDamage(api.Weapon_Ground)
}
// AirWeaponDamage returns damage per shot the unit can do to air targets.
func (u Unit) AirWeaponDamage() float32 {
return u.weaponDamage(api.Weapon_Air)
}
// WeaponDamage returns damage per shot the unit can do to the given target.
func (u Unit) WeaponDamage(target Unit) float32 {
if target.IsFlying {
return u.weaponDamage(api.Weapon_Air)
}
return u.weaponDamage(api.Weapon_Ground)
}
func (u Unit) weaponDamage(weaponType api.Weapon_TargetType) float32 {
maxDamage := float32(0)
for _, weapon := range u.Weapons {
if weapon.Type == weaponType || weapon.Type == api.Weapon_Any {
if weapon.Damage > maxDamage {
maxDamage = weapon.Damage
}
}
}
return maxDamage
}
// WeaponRange returns the maximum range to attack the target from. If
// the result is negative the target cannot be attacked.
func (u Unit) WeaponRange(target Unit) float32 {
if target.IsNil() {
return -1
}
weaponType := api.Weapon_Ground
if target.IsFlying {
weaponType = api.Weapon_Air
}
maxRange := float32(-1)
for _, weapon := range u.Weapons {
if weapon.Type == weaponType || weapon.Type == api.Weapon_Any {
if weapon.Damage > 0 && weapon.Range > maxRange {
maxRange = weapon.Range
}
}
}
return maxRange
}
// IsInWeaponsRange returns true if the unit is within weapons range of the target.
func (u Unit) IsInWeaponsRange(target Unit, gap float32) bool {
if u.Unit == nil {
return false
}
maxRange := u.WeaponRange(target)
if maxRange < 0 {
return false
}
dist := float32(u.Pos2D().Distance(target.Pos2D()))
return dist-gap <= maxRange+u.Radius+target.Radius
}
// AttackTarget issues an attack order if the unit isn't already attacking the target.
func (u Unit) AttackTarget(target Unit) {
if u.needsAttackTargetOrder(target) {
if u.ctx.WasObserved(target.Tag) && target.CanBeTargeted() {
u.OrderTarget(ability.Attack, target)
} else {
u.OrderPos(ability.Attack, target.Pos2D())
}
}
}
func (u Unit) needsAttackTargetOrder(target Unit) bool {
if !u.IsIdle() && !target.IsNil() {
if ability.Remap(u.Orders[0].AbilityId) == ability.Attack &&
u.Orders[0].GetTargetUnitTag() == target.Tag {
return false
}
}
return true
}
// AttackMove issues an attack order if the unit isn't already attacking within tollerance of pos.
func (u Unit) AttackMove(pos api.Point2D, tollerance float32) {
if u.needsAttackMoveOrder(pos, tollerance) {
u.OrderPos(ability.Attack, pos)
}
}
func (u Unit) needsAttackMoveOrder(pos api.Point2D, tollerance float32) bool {
if !u.IsIdle() {
i := 0
// If the first order is a targeted attack, examine the second order
if tag := u.Orders[0].GetTargetUnitTag(); tag != 0 &&
len(u.Orders) > 1 &&
ability.Remap(u.Orders[0].AbilityId) == ability.Attack {
i++
}
// If the non-specific order is an attack close enough to pos just use that
if p := u.Orders[i].GetTargetWorldSpacePos(); p != nil &&
ability.Remap(u.Orders[i].AbilityId) == ability.Attack &&
p.ToPoint2D().Distance2(pos) <= tollerance*tollerance {
return false // already attacking
}
} else if u.Pos2D().Distance2(pos) <= tollerance*tollerance {
return false // close enough
}
return true
}
// MoveTo issues a move order if the unit isn't already moving to or within tollerance of pos.
func (u Unit) MoveTo(pos api.Point2D, tollerance float32) {
if u.needsMoveToOrder(pos, tollerance) {
u.OrderPos(ability.Move, pos)
}
}
func (u Unit) needsMoveToOrder(pos api.Point2D, tollerance float32) bool {
if !u.IsIdle() {
if p := u.Orders[0].GetTargetWorldSpacePos(); p != nil &&
ability.Remap(u.Orders[0].AbilityId) == ability.Move &&
p.ToPoint2D().Distance2(pos) <= tollerance*tollerance {
return false // already on the way
}
} else {
if u.Pos2D().Distance2(pos) <= tollerance*tollerance {
return false // already there
}
}
return true
}
// CanBeTargeted returns true if the unit type can be targeted for attacks.
func (u Unit) CanBeTargeted() bool {
return u.UnitType != unit.Protoss_Interceptor
} | botutil/unit.go | 0.79049 | 0.433921 | unit.go | starcoder |
package test
import (
"fmt"
"strings"
)
type test interface {
Helper()
Error(...interface{})
}
// Wrapper around testing.T.
type Assertion struct {
t test
}
// Constructs and returns the wrapper.
func NewAssertion(t test) *Assertion {
if t == nil {
panic("nil test")
}
return &Assertion{t}
}
func buildMessage(message string, args ...interface{}) string {
if len(args) == 0 {
return message
} else {
if format, ok := args[0].(string); !ok {
if len(args) == 1 {
return format
} else {
var formatCount = strings.Count(format, "%") - strings.Count(format, "%%")
if formatCount != len(args)-1 {
panic("invalid format")
}
return fmt.Sprintf(format, args[1:])
}
} else {
panic("args[0] is not a string")
}
}
}
// Checks that the provided boolean is true.
func (assert *Assertion) True(actual bool, args ...interface{}) bool {
assert.t.Helper()
var message = buildMessage("expected: true, actual: false", args)
if actual {
return true
}
assert.t.Error(message)
return false
}
// Checks that the provided boolean is false.
func (assert *Assertion) False(actual bool, args ...interface{}) bool {
assert.t.Helper()
var message = buildMessage("expected: false, actual: true", args)
if !actual {
return true
}
assert.t.Error(message)
return false
}
// Checks that the provided value is nil.
func (assert *Assertion) Nil(actual interface{}, args ...interface{}) bool {
assert.t.Helper()
var message = buildMessage(fmt.Sprintf("expected: nil, actual: %v", actual), args)
if isNil(actual) {
return true
}
assert.t.Error(message)
return false
}
// Checks that the provided value is not nil
func (assert *Assertion) NotNil(actual interface{}, args ...interface{}) bool {
assert.t.Helper()
var message = buildMessage("unexpected: nil, actual: nil", args)
if !isNil(actual) {
return true
}
assert.t.Error(message)
return false
}
// Checks that the two provided values are equal.
func (assert *Assertion) Equals(expected interface{}, actual interface{}, args ...interface{}) bool {
assert.t.Helper()
var message = buildMessage(fmt.Sprintf("expected: %v, actual: %v", expected, actual), args)
if expected == actual {
return true
}
assert.t.Error(message)
return false
}
// Checks that the two provided values are not equal.
func (assert *Assertion) NotEquals(unexpected interface{}, actual interface{}, args ...interface{}) bool {
assert.t.Helper()
var message = buildMessage(fmt.Sprintf("unexpected: %v, actual: %v", unexpected, actual), args)
if unexpected != actual {
return true
}
assert.t.Error(message)
return false
}
// Checks that the provided array is of the provided length.
func (assert *Assertion) ArrayLength(array interface{}, expected int, args ...interface{}) bool {
assert.t.Helper()
var arrayLen = getArrayLength(array)
var message = buildMessage(fmt.Sprintf("expected length: %v, actual: %v", expected, arrayLen), args)
if expected == arrayLen {
return true
}
assert.t.Error(message)
return false
}
// Checks that the provided array is not of the provided length.
func (assert *Assertion) NotArrayLength(array interface{}, unexpected int, args ...interface{}) bool {
assert.t.Helper()
var arrayLen = getArrayLength(array)
var message = buildMessage(fmt.Sprintf("unexpected length: %v, actual: %v", unexpected, arrayLen), args)
if unexpected != arrayLen {
return true
}
assert.t.Error(message)
return false
}
// Checks that the provided slice is of the provided length.
func (assert *Assertion) SliceLength(slice interface{}, expected int, args ...interface{}) bool {
assert.t.Helper()
var sliceLen = getSliceLength(slice)
var message = buildMessage(fmt.Sprintf("expected length: %v, actual: %v", expected, sliceLen), args)
if expected == sliceLen {
return true
}
assert.t.Error(message)
return false
}
// Checks that the provided slice is not of the provided length.
func (assert *Assertion) NotSliceLength(slice interface{}, unexpected int, args ...interface{}) bool {
assert.t.Helper()
var sliceLen = getSliceLength(slice)
var message = buildMessage(fmt.Sprintf("unexpected length: %v, actual: %v", unexpected, sliceLen), args)
if unexpected != sliceLen {
return true
}
assert.t.Error(message)
return false
}
// Checks that the provided map is of the provided length.
func (assert *Assertion) MapLength(_map interface{}, expected int, args ...interface{}) bool {
assert.t.Helper()
var mapLen = getMapLength(_map)
var message = buildMessage(fmt.Sprintf("expected length: %v, actual: %v", expected, mapLen), args)
if expected == mapLen {
return true
}
assert.t.Error(message)
return false
}
// Checks that the provided map is not of the provided length.
func (assert *Assertion) NotMapLength(_map interface{}, unexpected int, args ...interface{}) bool {
assert.t.Helper()
var mapLen = getMapLength(_map)
var message = buildMessage(fmt.Sprintf("unexpected length: %v, actual: %v", unexpected, mapLen), args)
if unexpected != mapLen {
return true
}
assert.t.Error(message)
return false
}
// Checks that the provided string is of the provided length.
func (assert *Assertion) StringLength(string string, expected int, args ...interface{}) bool {
assert.t.Helper()
var message = buildMessage(fmt.Sprintf("expected length: %v, actual: %v", expected, len(string)), args)
if expected == len(string) {
return true
}
assert.t.Error(message)
return false
}
// Checks that the provided string is not of the provided length.
func (assert *Assertion) NotStringLength(string string, unexpected int, args ...interface{}) bool {
assert.t.Helper()
var message = buildMessage(fmt.Sprintf("unexpected length: %v, actual: %v", unexpected, len(string)), args)
if unexpected != len(string) {
return true
}
assert.t.Error(message)
return false
}
// Checks that the provided array contains the provided elements.
func (assert *Assertion) ArrayContains(array interface{}, element interface{}, args ...interface{}) bool {
assert.t.Helper()
var message = buildMessage(fmt.Sprintf("array %v does not contain %v", array, element), args)
if arrayContains(array, element) {
return true
}
assert.t.Error(message)
return false
}
// Checks that the provided array does not contain the provided elements.
func (assert *Assertion) NotArrayContains(array interface{}, element interface{}, args ...interface{}) bool {
assert.t.Helper()
var message = buildMessage(fmt.Sprintf("array %v contains %v", array, element), args)
if !arrayContains(array, element) {
return true
}
assert.t.Error(message)
return false
}
// Checks that the provided slice contains the provided elements.
func (assert *Assertion) SliceContains(slice interface{}, element interface{}, args ...interface{}) bool {
assert.t.Helper()
var message = buildMessage(fmt.Sprintf("slice %v does not contain %v", slice, element), args)
if sliceContains(slice, element) {
return true
}
assert.t.Error(message)
return false
}
// Checks that the provided slice does not contain the provided elements.
func (assert *Assertion) NotSliceContains(slice interface{}, element interface{}, args ...interface{}) bool {
assert.t.Helper()
var message = buildMessage(fmt.Sprintf("slice %v contains %v", slice, element), args)
if !sliceContains(slice, element) {
return true
}
assert.t.Error(message)
return false
}
// Checks that the provided map contains the provided key.
func (assert *Assertion) MapHasKey(_map interface{}, key interface{}, args ...interface{}) bool {
assert.t.Helper()
var message = buildMessage(fmt.Sprintf("map %v does not contain key %v", _map, key), args)
if mapContainsKey(_map, key) {
return true
}
assert.t.Error(message)
return false
}
// Checks that the provided map does not contain the provided key.
func (assert *Assertion) NotMapHasKey(_map interface{}, key interface{}, args ...interface{}) bool {
assert.t.Helper()
var message = buildMessage(fmt.Sprintf("map %v contains key %v", _map, key), args)
if !mapContainsKey(_map, key) {
return true
}
assert.t.Error(message)
return false
}
// Checks that the provided map contains the provided value.
func (assert *Assertion) MapHasValue(_map interface{}, value interface{}, args ...interface{}) bool {
assert.t.Helper()
var message = buildMessage(fmt.Sprintf("map %v does not contain value %v", _map, value), args)
if mapContainsValue(_map, value) {
return true
}
assert.t.Error(message)
return false
}
// Checks that the provided map does not contain the provided value.
func (assert *Assertion) NotMapHasValue(_map interface{}, value interface{}, args ...interface{}) bool {
assert.t.Helper()
var message = buildMessage(fmt.Sprintf("map %v contains value %v", _map, value), args)
if !mapContainsValue(_map, value) {
return true
}
assert.t.Error(message)
return false
}
// Checks that the provided map contains the provided key-value pair.
func (assert *Assertion) MapHasPair(_map interface{}, key interface{}, value interface{}, args ...interface{}) bool {
assert.t.Helper()
var message = buildMessage(fmt.Sprintf("map %v does not contain the pair (%v, %v)", _map, key, value), args)
if mapContainsPair(_map, key, value) {
return true
}
assert.t.Error(message)
return false
}
// Checks that the provided map does not contain the provided key-value pair.
func (assert *Assertion) NotMapHasPair(_map interface{}, key interface{}, value interface{}, args ...interface{}) bool {
assert.t.Helper()
var message = buildMessage(fmt.Sprintf("map %v does not contain the pair (%v, %v)", _map, key, value), args)
if !mapContainsPair(_map, key, value) {
return true
}
assert.t.Error(message)
return false
}
// Checks that the provided string contains the provided substring.
func (assert *Assertion) StringContains(haystack string, needle string, args ...interface{}) bool {
assert.t.Helper()
var message = buildMessage(fmt.Sprintf("string \"%v\" does not contain the substring\"%v\"", haystack, needle), args)
if strings.Contains(haystack, needle) {
return true
}
assert.t.Error(message)
return false
}
// Checks that the provided string does not contain the provided substring.
func (assert *Assertion) NotStringContains(haystack string, needle string, args ...interface{}) bool {
assert.t.Helper()
var message = buildMessage(fmt.Sprintf("string \"%v\" contains the substring\"%v\"", haystack, needle), args)
if !strings.Contains(haystack, needle) {
return true
}
assert.t.Error(message)
return false
}
// Checks that the provided string has the provided prefix.
func (assert *Assertion) StringHasPrefix(_string string, prefix string, args ...interface{}) bool {
assert.t.Helper()
var message = buildMessage(fmt.Sprintf("string \"%v\" does not begin with \"%v\"", _string, prefix), args)
if strings.HasPrefix(_string, prefix) {
return true
}
assert.t.Error(message)
return false
}
// Checks that the provided string does not have the provided prefix.
func (assert *Assertion) NotStringHasPrefix(_string string, prefix string, args ...interface{}) bool {
assert.t.Helper()
var message = buildMessage(fmt.Sprintf("string \"%v\" begins with \"%v\"", _string, prefix), args)
if !strings.HasPrefix(_string, prefix) {
return true
}
assert.t.Error(message)
return false
}
// Checks that the provided string has the provided suffix.
func (assert *Assertion) StringHasSuffix(_string string, suffix string, args ...interface{}) bool {
assert.t.Helper()
var message = buildMessage(fmt.Sprintf("string \"%v\" does not end with \"%v\"", _string, suffix), args)
if strings.HasSuffix(_string, suffix) {
return true
}
assert.t.Error(message)
return false
}
// Checks that the provided string does not have the provided suffix.
func (assert *Assertion) NotStringHasSuffix(_string string, suffix string, args ...interface{}) bool {
assert.t.Helper()
var message = buildMessage(fmt.Sprintf("string \"%v\" ends with \"%v\"", _string, suffix), args)
if !strings.HasSuffix(_string, suffix) {
return true
}
assert.t.Error(message)
return false
}
// Checks that the provided function panics.
func (assert *Assertion) Panic(function func(), args ...interface{}) bool {
assert.t.Helper()
var message = buildMessage(fmt.Sprintf("function did not panic"), args)
if panics(function) {
return true
}
assert.t.Error(message)
return false
}
// Checks that the provided function does not panic.
func (assert *Assertion) NotPanic(function func(), args ...interface{}) bool {
assert.t.Helper()
var message = buildMessage(fmt.Sprintf("function panicked"), args)
if !panics(function) {
return true
}
assert.t.Error(message)
return false
}
// Checks that the provided function panics with the provided value.
func (assert *Assertion) PanicValue(function func(), expected interface{}, args ...interface{}) bool {
assert.t.Helper()
var messagePanic = buildMessage(fmt.Sprintf("function did not panic"), args)
var messageValue = buildMessage(fmt.Sprintf("function did not panic with value %v", expected), args)
var panicked, value = panicsWithValue(function, expected)
if !panicked {
assert.t.Error(messagePanic)
} else if !value {
assert.t.Error(messageValue)
} else {
return true
}
return false
}
// Checks that the provided function panics without the provided value.
func (assert *Assertion) NotPanicValue(function func(), unexpected interface{}, args ...interface{}) bool {
assert.t.Helper()
var messagePanic = buildMessage(fmt.Sprintf("function did not panic"), args)
var messageValue = buildMessage(fmt.Sprintf("function panicked with value %v", unexpected), args)
var panicked, value = panicsWithValue(function, unexpected)
if !panicked {
assert.t.Error(messagePanic)
} else if value {
assert.t.Error(messageValue)
} else {
return true
}
return false
} | test/assertions.go | 0.809012 | 0.660966 | assertions.go | starcoder |
package compact
import (
"github.com/dnovikoff/tempai-core/tile"
)
type Mask uint
const (
FullMask = 15
)
func MaskByCount(c int) uint {
return FullMask >> uint(4-c)
}
func NewMask(mask uint, t tile.Tile) Mask {
m := Mask(shift(t)) << 4
return m | Mask(mask&15)
}
func (m Mask) Tile() tile.Tile {
return tile.Tile(m>>4) + tile.TileBegin
}
func (m Mask) Mask() uint {
return (uint(m) & 15)
}
func (m Mask) Count() int {
switch m.Mask() {
case 1, 2, 4, 8:
return 1
case 3, 5, 6, 9, 10, 12:
return 2
case 7, 11, 13, 14:
return 3
case 15:
return 4
}
return 0
}
func (m Mask) NaiveCount() int {
cnt := 0
for i := 0; i < 4; i++ {
cnt += int(m & 1)
m >>= 1
}
return cnt
}
func (m Mask) SetCount(in int) Mask {
x := uint(0)
switch in {
case 0:
case 1:
x = 1
case 2:
x = 1 + 2
case 3:
x = 1 + 2 + 4
case 4:
x = 1 + 2 + 4 + 8
}
return NewMask(x, m.Tile())
}
func (m Mask) Instances() tile.Instances {
ret := make(tile.Instances, 0, 4)
m.Each(func(t tile.Instance) bool {
ret = append(ret, t)
return true
})
return ret
}
func (m Mask) InvertTiles() Mask {
return NewMask(^m.Mask(), m.Tile())
}
func (m Mask) FirstCopyNative() tile.CopyID {
switch {
case m&1 == 1:
return 0
case m&2 == 2:
return 1
case m&4 == 4:
return 2
case m&8 == 8:
return 3
}
return tile.NullCopy
}
func (m Mask) FirstCopy() tile.CopyID {
switch m & 15 {
case 1, 3, 5, 7, 9, 11, 13, 15:
return 0
case 2, 6, 10, 14:
return 1
case 4, 12:
return 2
case 8:
return 3
}
return tile.NullCopy
}
func (m Mask) First() tile.Instance {
c := m.FirstCopy()
if c == tile.NullCopy {
return tile.InstanceNull
}
return m.Tile().Instance(c)
}
func (m Mask) Each(f func(tile.Instance) bool) bool {
t := m.Tile()
for i := tile.CopyID(0); i < 4; i++ {
if m&1 == 1 {
if !f(t.Instance(i)) {
return false
}
}
m >>= 1
}
return true
}
func (m Mask) Check(index tile.CopyID) bool {
return ((1 << uint(index)) & m) != 0
}
func (m Mask) SetIntBit(index uint) Mask {
return m | ((1 << index) & 15)
}
func (m Mask) SetCopyBit(cid tile.CopyID) Mask {
return m.SetIntBit(uint(cid))
}
func (m Mask) UnsetIntBit(index uint) Mask {
mask := Mask(1<<index) & 15
return m &^ mask
}
func (m Mask) Merge(x Mask) Mask {
return NewMask(x.Mask()|m.Mask(), m.Tile())
}
func (m Mask) Remove(x Mask) Mask {
return NewMask(m.Mask()&(^x.Mask()), m.Tile())
}
func (m Mask) UnsetCopyBit(cid tile.CopyID) Mask {
return m.UnsetIntBit(uint(cid))
}
func (m Mask) IsFull() bool {
return m.Mask() == FullMask
}
func (m Mask) IsEmpty() bool {
return m.Mask() == 0
} | compact/mask.go | 0.582135 | 0.55429 | mask.go | starcoder |
package metrics_metadata
// The metadata for a single retrieved metric timeseries
type MetricTimeSeries struct {
// Name of the MTS. Metric names are UTF-8 strings with a maximum length of 256 characters (1024 bytes).
Metric string `json:"metric,omitempty"`
// Metric type of the MTS for this metadata. The possible values are \"GAUGE\", \"COUNTER\", and \"CUMULATIVE_COUNTER\".
Type string `json:"type,omitempty"`
// Dimension metadata for the MTS, in the form of a JSON object (dictionary). Each property is a dimension name and dimension value. The section [Dimension Criteria](https://developers.signalfx.com/metrics/metrics_metadata_overview.html#_dimension_criteria) lists the requirements for dimensions.
Dimensions map[string]interface{} `json:"dimensions,omitempty"`
// Custom property metadata for the MTS, in the form of a JSON object (dictionary). Each property is a custom property name and value. The section [Custom Properties Criteria](https://developers.signalfx.com/metrics/metrics_metadata_overview.html#_custom_properties_criteria) lists the requirements for custom properties. Custom property metadata for the MTS, in the form of a JSON object (dictionary). Each property is a custom property name and value. Custom property names and values have these criteria: <br> **Name:** <br> * UTF-8 string, maximum length of 128 characters (512 bytes) * Must start with an uppercase or lowercase letter. The rest of the name can contain letters, numbers, underscores (`_`) and hyphens (`-`). * Must not start with the underscore character (`_`) <br> **Value:** * String: Maximum length 256 UTF-8 characters (1024 bytes) * Integer or float: Maximum length 8192 bits (1024 bytes)
CustomProperties map[string]string `json:"customProperties,omitempty"`
// Tag metadata for the MTS, in the form of a JSON array (a list) with one element for each tag. <br> Each tag is a UTF-8 string, starting with an uppercase or lowercase alphabetic character. The maximum length is expressed in characters; if a string consists solely of single-byte UTF-8 entities, 1024 characters are available.
Tags []string `json:"tags,omitempty"`
// The time that the MTS was created, in Unix time UTC-relative
Created int64 `json:"created,omitempty"`
// SignalFx ID of the user who created the MTS. If the value is \"AAAAAAAAAAA\", SignalFx created the MTS.
Creator string `json:"creator,omitempty"`
// The time that the MTS was last updated, in Unix time UTC-relative
LastUpdated int64 `json:"lastUpdated,omitempty"`
// SignalFx ID of the user who last updated the MTS. If the value is \"AAAAAAAAAAA\", SignalFx last updated the metric.
LastUpdatedBy string `json:"lastUpdatedBy,omitempty"`
} | metrics_metadata/model_metric_time_series.go | 0.861422 | 0.494812 | model_metric_time_series.go | starcoder |
package blockchain
import (
"math"
"github.com/incognitochain/incognito-chain/common"
)
// BuildKeccak256MerkleTree creates a merkle tree using Keccak256 hash func.
// This merkle tree is used for storing all beacon (and bridge) data to relay them to Ethereum.
func BuildKeccak256MerkleTree(data [][]byte) [][]byte {
if len(data) == 0 {
emptyRoot := [32]byte{}
return [][]byte{emptyRoot[:]}
}
// Calculate how many entries are required to hold the binary merkle
// tree as a linear array and create an array of that size.
nextPoT := NextPowerOfTwo(len(data))
arraySize := nextPoT*2 - 1
merkles := make([][]byte, arraySize)
// Create the base data hashes and populate the array with them.
for i, d := range data {
h := common.Keccak256(d)
merkles[i] = h[:]
}
// Start the array offset after the last data and adjusted to the
// next power of two.
offset := nextPoT
for i := 0; i < arraySize-1; i += 2 {
switch {
// When there is no left child node, the parent is nil too.
case merkles[i] == nil:
merkles[offset] = nil
// When there is no right child, the parent is generated by
// hashing the concatenation of the left child with itself.
case merkles[i+1] == nil:
newHash := keccak256MerkleBranches(merkles[i], merkles[i])
merkles[offset] = newHash
// The normal case sets the parent node to the keccak256
// of the concatentation of the left and right children.
default:
newHash := keccak256MerkleBranches(merkles[i], merkles[i+1])
merkles[offset] = newHash
}
offset++
}
return merkles
}
func GetKeccak256MerkleRoot(data [][]byte) []byte {
merkles := BuildKeccak256MerkleTree(data)
return merkles[len(merkles)-1]
}
func GetKeccak256MerkleProofFromTree(merkles [][]byte, id int) ([][]byte, []bool) {
path := [][]byte{}
left := []bool{}
height := uint(math.Log2(float64(len(merkles))))
start := 0
for i := uint(0); i < height; i++ {
sibling := id ^ 1
path = append(path, merkles[sibling])
left = append(left, sibling < id)
id = (id-start)/2 + start + (1 << (height - i)) // Go to parent node
start += 1 << (height - i)
}
return path, left
}
// keccak256MerkleBranches concatenates the 2 branches of a Merkle tree and hash it to create the parent node using Keccak256 hash function
func keccak256MerkleBranches(left []byte, right []byte) []byte {
// Concatenate the left and right nodes.
hash := append(left, right...)
newHash := common.Keccak256(hash)
return newHash[:]
} | blockchain/keccak256_merkle.go | 0.688364 | 0.474205 | keccak256_merkle.go | starcoder |
package generator
// wxyzWing removes candidates. A group consists of one "pivot" cell and 3 "wing" cells. The pivot must be able to see all of the wing cells. The group includes 4 digits, exactly one of which must be "unrestricted". A digit is restricted if every occurrance of the digit in the group can see every other occurrance.
func (g *Grid) wxyzWing(verbose uint) (res bool) {
for r := zero; r < rows; r++ {
for c := zero; c < cols; c++ {
p := point{r, c}
cell := g.cells[r][c]
if bitCount[cell] < 2 {
continue
}
var points []point
points = append(points, box.unit[boxOf(r, c)][:]...)
points = append(points, col.unit[c][:]...)
points = append(points, row.unit[r][:]...)
for p1i, p1 := range points {
if p == p1 {
continue
}
cell1 := *g.pt(p1)
b1 := boxOfPoint(p1)
for p2i, p2 := range points {
if p == p2 || p1 == p2 || p1i >= p2i {
continue
}
b2 := boxOfPoint(p2)
if b1 != b2 && p1.c != p2.c && p1.r != p2.c {
continue
}
cell2 := *g.pt(p2)
// p3 is the disjoint wing cell. It cannot see the other two wings cells.
for _, p3 := range points {
if p == p3 || p1 == p3 || p2 == p3 {
continue
}
b3 := boxOfPoint(p3)
// At least one of the wing cells must not be able to see the other wing cells.
if b1 == b3 || b2 == b3 ||
p1.c == p3.c || p2.c == p3.c ||
p1.r == p3.r || p2.r == p3.r {
continue
}
cell3 := *g.pt(p3)
// There must be a total of 4 digits in the group.
if bitCount[cell|cell1|cell2|cell3] != 4 {
continue
}
c1 := cell & cell1
c2 := cell & cell2
c3 := cell & cell3
// group := c1 | c2 | c3
unrestricted := (c1 | c2) & c3
// The pivot must have at least two digits in common with each wing cell and there must be exactly one unrestricted digit in the group.
if bitCount[c1] < 2 ||
bitCount[c2] < 2 ||
bitCount[c3] < 2 ||
bitCount[unrestricted] != 1 {
continue
}
overlap := influence[p.r][p.c].and(influence[p1.r][p1.c]).and(influence[p2.r][p2.r]).and(influence[p3.r][p3.c])
overlap.unset(p).unset(p1).unset(p2).unset(p3) // Remove the group members from the overlap so that they do not get digits cleared.
overlap.process(func(r, c uint8) {
if g.pt(point{r, c}).andNot(unrestricted) {
// fmt.Printf("p: %s (%s), p1: %s (%s), p2: %s (%s), p3: %s (%s), c1: %s, c2: %s, c3: %s, group: %s, unrestricted: %s\n", p, cell, p1, cell1, p2, cell2, p3, cell3, c1, c2, c3, group, unrestricted)
g.cellChange(&res, verbose, "wxyzWing: removing %s from (%d, %d) because of %s, %s, %s, %s\n", unrestricted, r, c, p, p1, p2, p3)
}
})
}
}
}
}
}
return
} | generator/wxyzWing.go | 0.667364 | 0.552962 | wxyzWing.go | starcoder |
package quantile
import (
"bytes"
"fmt"
"sort"
)
// SliceSummary is a GK-summary with a slice backend
type SliceSummary struct {
Entries []Entry
N int
}
// NewSliceSummary allocates a new GK summary backed by a DLL
func NewSliceSummary() *SliceSummary {
return &SliceSummary{}
}
func (s SliceSummary) String() string {
var b bytes.Buffer
b.WriteString("summary size: ")
b.WriteString(fmt.Sprintf("%d", s.N))
b.WriteRune('\n')
gsum := 0
for i, e := range s.Entries {
gsum += e.G
b.WriteString(fmt.Sprintf("v:%6.02f g:%05d d:%05d rmin:%05d rmax: %05d ", e.V, e.G, e.Delta, gsum, gsum+e.Delta))
if i%3 == 2 {
b.WriteRune('\n')
}
}
return b.String()
}
// Insert inserts a new value v in the summary paired with t (the ID of the span it was reported from)
func (s *SliceSummary) Insert(v float64, t uint64) {
newEntry := Entry{
V: v,
G: 1,
Delta: int(2 * EPSILON * float64(s.N)),
}
i := sort.Search(len(s.Entries), func(i int) bool { return v < s.Entries[i].V })
if i == 0 || i == len(s.Entries) {
newEntry.Delta = 0
}
// allocate one more
s.Entries = append(s.Entries, Entry{})
copy(s.Entries[i+1:], s.Entries[i:])
s.Entries[i] = newEntry
s.N++
if s.N%int(1.0/float64(2.0*EPSILON)) == 0 {
s.compress()
}
}
func (s *SliceSummary) compress() {
epsN := int(2 * EPSILON * float64(s.N))
var j, sum int
for i := len(s.Entries) - 1; i >= 2; i = j - 1 {
j = i - 1
sum = s.Entries[j].G
for j >= 1 && sum+s.Entries[i].G+s.Entries[i].Delta < epsN {
j--
sum += s.Entries[j].G
}
sum -= s.Entries[j].G
j++
if j < i {
s.Entries[j].V = s.Entries[i].V
s.Entries[j].G = sum + s.Entries[i].G
s.Entries[j].Delta = s.Entries[i].Delta
// copy the rest
copy(s.Entries[j+1:], s.Entries[i+1:])
// truncate to the numbers of removed elements
s.Entries = s.Entries[:len(s.Entries)-(i-j)]
}
}
}
// Quantile returns an EPSILON estimate of the element at quantile 'q' (0 <= q <= 1)
func (s *SliceSummary) Quantile(q float64) float64 {
if len(s.Entries) == 0 {
return 0
}
// convert quantile to rank
r := int(q*float64(s.N) + 0.5)
var rmin int
epsN := int(EPSILON * float64(s.N))
for i := 0; i < len(s.Entries)-1; i++ {
t := s.Entries[i]
n := s.Entries[i+1]
rmin += t.G
if r+epsN < rmin+n.G+n.Delta {
if r+epsN < rmin+n.G {
return t.V
}
return n.V
}
}
return s.Entries[len(s.Entries)-1].V
}
// Merge two summaries entries together
func (s *SliceSummary) Merge(s2 *SliceSummary) {
if s2.N == 0 {
return
}
if s.N == 0 {
s.N = s2.N
s.Entries = make([]Entry, 0, len(s2.Entries))
s.Entries = append(s.Entries, s2.Entries...)
return
}
pos := 0
end := len(s.Entries) - 1
empties := make([]Entry, len(s2.Entries))
s.Entries = append(s.Entries, empties...)
for _, e := range s2.Entries {
for pos <= end {
if e.V > s.Entries[pos].V {
pos++
continue
}
copy(s.Entries[pos+1:end+2], s.Entries[pos:end+1])
s.Entries[pos] = e
pos++
end++
break
}
if pos > end {
s.Entries[pos] = e
pos++
}
}
s.N += s2.N
s.compress()
}
// Copy allocates a new summary with the same data
func (s *SliceSummary) Copy() *SliceSummary {
s2 := NewSliceSummary()
s2.Entries = make([]Entry, len(s.Entries))
copy(s2.Entries, s.Entries)
s2.N = s.N
return s2
}
// BySlices returns a slice of Summary slices that represents weighted ranges of
// values
// e.g. [0, 1] : 3
// [1, 23] : 12 ...
// The number of intervals is related to the precision kept in the internal
// data structure to ensure epsilon*s.N precision on quantiles, but it's bounded.
// When the bounds of the interval are equal, the weight is the number of times
// that exact value was inserted in the summary.
func (s *SliceSummary) BySlices() []SummarySlice {
var slices []SummarySlice
if len(s.Entries) == 0 {
return slices
}
// by def in GK first val is always the min
fs := SummarySlice{
Start: s.Entries[0].V,
End: s.Entries[0].V,
Weight: 1,
}
slices = append(slices, fs)
last := fs.End
for _, cur := range s.Entries[1:] {
lastSlice := &slices[len(slices)-1]
if cur.V == lastSlice.Start && cur.V == lastSlice.End {
lastSlice.Weight += cur.G
continue
}
if cur.G == 1 {
last = cur.V
}
ss := SummarySlice{
Start: last,
End: cur.V,
Weight: cur.G,
}
slices = append(slices, ss)
last = cur.V
}
return slices
} | quantile/slice_summary.go | 0.687945 | 0.408159 | slice_summary.go | starcoder |
package assert
import (
"bytes"
"fmt"
"reflect"
"runtime"
"strings"
"testing"
)
var (
equals = make(map[reflect.Type]*Matcher)
less = make(map[reflect.Type]*Matcher)
greater = make(map[reflect.Type]*Matcher)
)
type Matcher struct {
method reflect.Value
verb string
}
func zeroValueOrReal(v interface{}, t reflect.Type) reflect.Value {
if v == nil {
return reflect.New(t).Elem()
}
return reflect.ValueOf(v)
}
func (op *Matcher) call(value interface{}, expectations []interface{}) bool {
input := make([]reflect.Value, op.method.Type().NumIn())
input[0] = zeroValueOrReal(value, op.method.Type().In(0))
for i, v := range expectations {
input[i+1] = zeroValueOrReal(v, op.method.Type().In(i+1))
}
ret := op.method.Call(input)
return ret[0].Bool()
}
type Assert func(value interface{}, op *Matcher, expectations ...interface{})
func With(t *testing.T) Assert {
return func(value interface{}, op *Matcher, expectations ...interface{}) {
if !op.call(value, expectations) {
msgs := []interface{}{"Not true that (", value, ") ", op.verb, " ("}
msgs = append(msgs, expectations...)
msgs = append(msgs, ")")
fmt.Println(decorate(fmt.Sprint(msgs...)))
t.FailNow()
}
}
}
func storeToTable(m map[reflect.Type]*Matcher, f reflect.Value, op *Matcher) {
t := f.Type().In(0)
m[t] = op
}
func RegisterEqualsMatcher(f interface{}) {
op := CreateMatcher(f, "equals to")
storeToTable(equals, reflect.ValueOf(f), op)
}
func RegisterLessThanMatcher(f interface{}) {
op := CreateMatcher(f, "less than")
storeToTable(less, reflect.ValueOf(f), op)
}
func RegisterGreaterThanMatcher(f interface{}) {
op := CreateMatcher(f, "greater than")
storeToTable(greater, reflect.ValueOf(f), op)
}
func CreateMatcher(fv interface{}, verb string) *Matcher {
f := reflect.ValueOf(fv)
if f.Kind() != reflect.Func {
panic("Operator is not a function.")
}
return &Matcher{
method: f,
verb: verb,
}
}
func callInternal(m map[reflect.Type]*Matcher, v interface{}, exp interface{}) bool {
vt := reflect.TypeOf(v)
op, found := m[vt]
if !found {
panic(fmt.Sprint("Type (", vt, ") not registered."))
}
return op.call(v, []interface{}{exp})
}
var Equals = CreateMatcher(func(v interface{}, exp interface{}) bool {
vt := reflect.TypeOf(v)
op, found := equals[vt]
if found {
return op.call(v, []interface{}{exp})
}
return v == exp
}, "equals to")
var NotEquals = CreateMatcher(func(v interface{}, exp interface{}) bool {
vt := reflect.TypeOf(v)
op, found := equals[vt]
if found {
return !op.call(v, []interface{}{exp})
}
return v != exp
}, "not equals to")
var LessThan = CreateMatcher(func(v interface{}, exp interface{}) bool {
return callInternal(less, v, exp)
}, "less than")
var AtMost = CreateMatcher(func(v interface{}, exp interface{}) bool {
return !callInternal(greater, v, exp)
}, "less than or equals to")
var GreaterThan = CreateMatcher(func(v interface{}, exp interface{}) bool {
return callInternal(greater, v, exp)
}, "less than")
var AtLeast = CreateMatcher(func(v interface{}, exp interface{}) bool {
return !callInternal(less, v, exp)
}, "less than")
var IsNegative = CreateMatcher(func(v interface{}, exp interface{}) bool {
return callInternal(less, v, 0)
}, "is negative")
var IsPositive = CreateMatcher(func(v interface{}, exp interface{}) bool {
return callInternal(greater, v, 0)
}, "is positive")
var IsNil = CreateMatcher(func(v interface{}) bool {
return v == nil || reflect.ValueOf(v).IsNil()
}, "is nil")
var IsNotNil = CreateMatcher(func(v interface{}) bool {
return v != nil
}, "is not nil")
var IsTrue = CreateMatcher(func(v bool) bool {
return v
}, "is true")
var IsFalse = CreateMatcher(func(v bool) bool {
return !v
}, "is false")
var IsEmpty = CreateMatcher(func(v interface{}) bool {
return reflect.ValueOf(v).Len() == 0
}, "is empty")
func Not(op *Matcher) *Matcher {
return &Matcher{
method: reflect.MakeFunc(op.method.Type(), func(v []reflect.Value) []reflect.Value {
return []reflect.Value{reflect.ValueOf(!op.method.Call(v)[0].Bool())}
}),
verb: "not " + op.verb,
}
}
func init() {
RegisterEqualsMatcher(func(v, exp bool) bool {
return v == exp
})
RegisterEqualsMatcher(func(v, exp byte) bool {
return v == exp
})
RegisterEqualsMatcher(func(v, exp int8) bool {
return v == exp
})
RegisterEqualsMatcher(func(v, exp uint8) bool {
return v == exp
})
RegisterEqualsMatcher(func(v, exp int16) bool {
return v == exp
})
RegisterEqualsMatcher(func(v, exp uint16) bool {
return v == exp
})
RegisterEqualsMatcher(func(v int, exp int) bool {
return v == exp
})
RegisterEqualsMatcher(func(v uint, exp uint) bool {
return v == exp
})
RegisterEqualsMatcher(func(v, exp int32) bool {
return v == exp
})
RegisterEqualsMatcher(func(v, exp uint32) bool {
return v == exp
})
RegisterEqualsMatcher(func(v, exp int64) bool {
return v == exp
})
RegisterEqualsMatcher(func(v, exp uint64) bool {
return v == exp
})
RegisterLessThanMatcher(func(v, exp byte) bool {
return v < exp
})
RegisterLessThanMatcher(func(v, exp int8) bool {
return v < exp
})
RegisterLessThanMatcher(func(v, exp uint8) bool {
return v < exp
})
RegisterLessThanMatcher(func(v, exp int16) bool {
return v < exp
})
RegisterLessThanMatcher(func(v, exp uint16) bool {
return v < exp
})
RegisterLessThanMatcher(func(v int, exp int) bool {
return v < exp
})
RegisterLessThanMatcher(func(v, exp int32) bool {
return v < exp
})
RegisterLessThanMatcher(func(v, exp uint32) bool {
return v < exp
})
RegisterLessThanMatcher(func(v, exp int64) bool {
return v < exp
})
RegisterLessThanMatcher(func(v, exp uint64) bool {
return v < exp
})
RegisterGreaterThanMatcher(func(v, exp byte) bool {
return v > exp
})
RegisterGreaterThanMatcher(func(v, exp int8) bool {
return v > exp
})
RegisterGreaterThanMatcher(func(v, exp uint8) bool {
return v > exp
})
RegisterGreaterThanMatcher(func(v, exp int16) bool {
return v > exp
})
RegisterGreaterThanMatcher(func(v, exp uint16) bool {
return v > exp
})
RegisterGreaterThanMatcher(func(v int, exp int) bool {
return v > exp
})
RegisterGreaterThanMatcher(func(v, exp int32) bool {
return v > exp
})
RegisterGreaterThanMatcher(func(v, exp uint32) bool {
return v > exp
})
RegisterGreaterThanMatcher(func(v, exp int64) bool {
return v > exp
})
RegisterGreaterThanMatcher(func(v, exp uint64) bool {
return v > exp
})
RegisterEqualsMatcher(func(v, exp string) bool {
return v == exp
})
RegisterEqualsMatcher(func(v, exp []byte) bool {
if len(v) != len(exp) {
return false
}
for i, vv := range v {
if vv != exp[i] {
return false
}
}
return true
})
RegisterEqualsMatcher(func(v, exp []string) bool {
if len(v) != len(exp) {
return false
}
for i, vv := range v {
if vv != exp[i] {
return false
}
}
return true
})
}
func getCaller() (string, int) {
stackLevel := 1
for {
_, file, line, ok := runtime.Caller(stackLevel)
if strings.Contains(file, "assert") {
stackLevel++
} else {
if ok {
// Truncate file name at last file name separator.
if index := strings.LastIndex(file, "/"); index >= 0 {
file = file[index+1:]
} else if index = strings.LastIndex(file, "\\"); index >= 0 {
file = file[index+1:]
}
} else {
file = "???"
line = 1
}
return file, line
}
}
}
// decorate prefixes the string with the file and line of the call site
// and inserts the final newline if needed and indentation tabs for formatting.
func decorate(s string) string {
file, line := getCaller()
buf := new(bytes.Buffer)
// Every line is indented at least one tab.
buf.WriteString(" ")
fmt.Fprintf(buf, "%s:%d: ", file, line)
lines := strings.Split(s, "\n")
if l := len(lines); l > 1 && lines[l-1] == "" {
lines = lines[:l-1]
}
for i, line := range lines {
if i > 0 {
// Second and subsequent lines are indented an extra tab.
buf.WriteString("\n\t\t")
}
buf.WriteString(line)
}
buf.WriteByte('\n')
return buf.String()
} | assert/assert.go | 0.633183 | 0.532729 | assert.go | starcoder |
package main
/*
https://leetcode.com/problems/flipping-an-image/, accessed 31 March 2019
Given a binary matrix A, we want to flip the image horizontally, then invert
it, and return the resulting image.
To flip an image horizontally means that each row of the image is reversed.
For example, flipping [1, 1, 0] horizontally results in [0, 1, 1].
To invert an image means that each 0 is replaced by 1, and each 1 is replaced
by 0. For example, inverting [0, 1, 1] results in [1, 0, 0].
Example 1:
Input: [[1,1,0],[1,0,1],[0,0,0]]
Output: [[1,0,0],[0,1,0],[1,1,1]]
Explanation: First reverse each row: [[0,1,1],[1,0,1],[0,0,0]].
Then, invert the image: [[1,0,0],[0,1,0],[1,1,1]]
*/
import "fmt"
func main() {
A := [][]int{{1, 1, 0, 0}, {1, 0, 0, 1}, {0, 1, 1, 1}, {1, 0, 1, 0}}
fmt.Println("The initial setup is:")
fmt.Println(A)
A = flipAndInvertImage(A)
fmt.Println("The flipped and inverted slice is:")
fmt.Println(A)
}
func flipAndInvertImage(A [][]int) [][]int {
// Iterate over the rows of A
for rowIndex, row := range A {
// Get the last index position in the row
endIndex := len(row) - 1
// Iterate over each value in the row, column position
for colIndex, val := range row {
// Stop, the pointers have crossed.
if colIndex > endIndex {
break
}
// Swap the values
tmp := val
A[rowIndex][colIndex] = A[rowIndex][endIndex]
A[rowIndex][endIndex] = tmp
// Invert the values
if A[rowIndex][colIndex] == 0 {
A[rowIndex][colIndex] = 1
} else if A[rowIndex][colIndex] == 1 {
A[rowIndex][colIndex] = 0
}
// Before the loop ends endIndex = rowIndex
// At this point we would invert the value twice.
// Decrement and continue the loop to break out of the loop.
if endIndex == colIndex {
endIndex--
continue
}
if A[rowIndex][endIndex] == 0 {
A[rowIndex][endIndex] = 1
} else if A[rowIndex][endIndex] == 1 {
A[rowIndex][endIndex] = 0
}
endIndex--
}
}
return A
} | 0832_FlippingAnImage/main.go | 0.739234 | 0.530419 | main.go | starcoder |
package tile3d
func calcPadding(offset, paddingUnit uint32) uint32 {
padding := offset % paddingUnit
if padding != 0 {
padding = paddingUnit - padding
}
return padding
}
func paddingBytes(bytes []byte, srcLen int, paddingUnit uint32, paddingCode byte) {
padding := calcPadding(uint32(srcLen), paddingUnit)
for i := 0; i < int(padding); i++ {
bytes[(srcLen)+i] = paddingCode
}
}
func createPaddingBytes(bytes []byte, offset, paddingUnit uint32, paddingCode byte) []byte {
padding := calcPadding(offset, paddingUnit)
if padding == 0 {
return bytes
}
for i := 0; i < int(padding); i++ {
bytes = append(bytes, paddingCode)
}
return bytes
}
func encodeZigZag(i int) uint16 {
return uint16((i >> 15) ^ (i << 1))
}
func decodeZigZag(encoded uint16) int {
unsignedEncoded := int(encoded)
return unsignedEncoded>>1 ^ -(unsignedEncoded & 1)
}
func encodePolygonPoints(points [][2]int) (us, vs []uint16) {
us = make([]uint16, len(points))
vs = make([]uint16, len(points))
lastU := int(0)
lastV := int(0)
for i := 0; i < len(points); i++ {
u := points[i][0]
v := points[i][1]
us[i] = encodeZigZag(u - lastU)
vs[i] = encodeZigZag(v - lastV)
lastU = u
lastV = v
}
return
}
func encodePoints(points [][3]int) (us, vs, hs []uint16) {
us = make([]uint16, len(points))
vs = make([]uint16, len(points))
hs = make([]uint16, len(points))
lastU := int(0)
lastV := int(0)
lastH := int(0)
for i := 0; i < len(points); i++ {
u := points[i][0]
v := points[i][1]
h := points[i][2]
us[i] = encodeZigZag(u - lastU)
vs[i] = encodeZigZag(v - lastV)
hs[i] = encodeZigZag(h - lastH)
lastU = u
lastV = v
lastH = h
}
return
}
func decodePolygonPoints(us, vs []uint16) [][2]int {
u := int(0)
v := int(0)
pos := make([][2]int, len(us))
for i := 0; i < len(us); i++ {
u += decodeZigZag(us[i])
v += decodeZigZag(vs[i])
pos[i][0] = u
pos[i][1] = v
}
return pos
}
func decodePoints(us, vs, hs []uint16) [][3]int {
u := int(0)
v := int(0)
height := int(0)
pos := make([][3]int, len(us))
for i := 0; i < len(us); i++ {
u += decodeZigZag(us[i])
v += decodeZigZag(vs[i])
height += decodeZigZag(hs[i])
pos[i][0] = u
pos[i][1] = v
pos[i][2] = height
}
return pos
} | utils.go | 0.637482 | 0.453564 | utils.go | starcoder |
package operators
import (
"context"
"github.com/b97tsk/rx"
)
// DoAfter mirrors the source, but performs a side effect after each emission.
func DoAfter(tap rx.Observer) rx.Operator {
return func(source rx.Observable) rx.Observable {
return func(ctx context.Context, sink rx.Observer) {
source.Subscribe(ctx, func(t rx.Notification) {
defer tap(t)
sink(t)
})
}
}
}
// DoAfterNext mirrors the source, but performs a side effect after each value.
func DoAfterNext(f func(interface{})) rx.Operator {
return func(source rx.Observable) rx.Observable {
return func(ctx context.Context, sink rx.Observer) {
source.Subscribe(ctx, func(t rx.Notification) {
if t.HasValue {
defer f(t.Value)
}
sink(t)
})
}
}
}
// DoAfterError mirrors the source and, when the source throws an error,
// performs a side effect after mirroring this error.
func DoAfterError(f func(error)) rx.Operator {
return func(source rx.Observable) rx.Observable {
return func(ctx context.Context, sink rx.Observer) {
source.Subscribe(ctx, func(t rx.Notification) {
if t.HasError {
defer f(t.Error)
}
sink(t)
})
}
}
}
// DoAfterComplete mirrors the source and, when the source completes, performs
// a side effect after mirroring this completion.
func DoAfterComplete(f func()) rx.Operator {
return func(source rx.Observable) rx.Observable {
return func(ctx context.Context, sink rx.Observer) {
source.Subscribe(ctx, func(t rx.Notification) {
if !t.HasValue && !t.HasError {
defer f()
}
sink(t)
})
}
}
}
// DoAfterErrorOrComplete mirrors the source and, when the source throws an
// error or completes, performs a side effect after mirroring this error or
// completion.
func DoAfterErrorOrComplete(f func()) rx.Operator {
return func(source rx.Observable) rx.Observable {
return func(ctx context.Context, sink rx.Observer) {
source.Subscribe(ctx, func(t rx.Notification) {
if !t.HasValue {
defer f()
}
sink(t)
})
}
}
} | operators/doAfter.go | 0.780997 | 0.418637 | doAfter.go | starcoder |
package cspacegen
import (
"errors"
"fmt"
"math/rand"
)
// Indexes of coordinates.
const (
X = 0 // x coordinate
Y = 1 // y coordinate
Z = 2 // z coordinate
)
// Default vales of parameters.
const (
DefaultSize = 10 // size
MaxFullness = 9 // maximum of fullness
)
const (
insideOffset = 0.33
)
// Point3D stores three coordinates x, y, z as slice.
type Point3D []float64
// Obstacle contains origin point, three sizes and offsets for its boundary and center points.
type Obstacle struct {
origin Point3D
size Point3D
offset []Point3D
centerPoint map[int]Point3D
}
// CSpace contains description, three sizes, start and finish points, slice of obstacles.
type CSpace struct {
description string
dimension Point3D
start, finish Point3D
obstacles []Obstacle
}
// NewPoint returns new Point3D by three float numbers.
func NewPoint(x, y, z float64) *Point3D {
p := make(Point3D, 3, 3)
p[X] = x
p[Y] = y
p[Z] = z
return &p
}
// String returns point coordinates as string.
func (p *Point3D) String() string {
return fmt.Sprintf("%.2f %.2f %.2f", (*p)[X], (*p)[Y], (*p)[Z])
}
func (p *Point3D) shift(s Point3D) *Point3D {
result := NewPoint(s[X], s[Y], s[Z])
(*result)[X] += (*p)[X]
(*result)[Y] += (*p)[Y]
(*result)[Z] += (*p)[Z]
return result
}
func (p *Point3D) scale(s Point3D) *Point3D {
result := NewPoint(s[X], s[Y], s[Z])
(*result)[X] *= (*p)[X]
(*result)[Y] *= (*p)[Y]
(*result)[Z] *= (*p)[Z]
return result
}
func zeroBoundaryOffsetSlice() []Point3D {
result := make([]Point3D, 8, 8)
for i := range result {
switch i {
case 0:
result[i] = *NewPoint(0, 0, 0)
case 1:
result[i] = *NewPoint(1, 0, 0)
case 2:
result[i] = *NewPoint(1, 1, 0)
case 3:
result[i] = *NewPoint(0, 1, 0)
case 4:
result[i] = *NewPoint(0, 1, 1)
case 5:
result[i] = *NewPoint(0, 0, 1)
case 6:
result[i] = *NewPoint(1, 0, 1)
case 7:
result[i] = *NewPoint(1, 1, 1)
}
}
return result
}
func zeroCenterOffset(i int) *Point3D {
var result *Point3D
switch i {
case 8:
result = NewPoint(0.5, 0.5, 0)
case 9:
result = NewPoint(0.5, 0, 0.5)
case 10:
result = NewPoint(1, 0.5, 0.5)
case 11:
result = NewPoint(0.5, 1, 0.5)
case 12:
result = NewPoint(0, 0.5, 0.5)
case 13:
result = NewPoint(0.5, 0.5, 1)
}
return result
}
// NewObstacle creates new obstacle with zero offsets.
func NewObstacle() *Obstacle {
origin := *NewPoint(0, 0, 0)
size := *NewPoint(1, 1, 1)
offset := zeroBoundaryOffsetSlice()
return &Obstacle{origin, size, offset, make(map[int]Point3D, 0)}
}
// NewObstacleByPoints creates new obstacle with zero offsets by origin point and sizes.
func NewObstacleByPoints(o, s *Point3D) *Obstacle {
return &Obstacle{*o, *s, zeroBoundaryOffsetSlice(), make(map[int]Point3D, 0)}
}
// volume returns value of obstacle volume.
func (o *Obstacle) volume() float64 {
return o.size[X] * o.size[Y] * o.size[Z]
}
// centerPointIndex calculates and returns index of center point by number.
func (o *Obstacle) centerPointIndex(i int) int {
index := 7
for j := 8; j < 14; j++ {
if _, ok := o.centerPoint[j]; ok {
index++
}
if index == i {
index = j
break
}
}
return index
}
// point returns obstacle edge or center point by number from 0 to 13.
func (o *Obstacle) point(i int) *Point3D {
if i < 8 {
return o.origin.shift(*o.size.scale(o.offset[i]))
}
index := o.centerPointIndex(i)
return o.origin.shift(*o.size.scale(o.centerPoint[index]))
}
// points returns obstacle edges as slice of 3D points.
func (o *Obstacle) points() []*Point3D {
size := 8 + len(o.centerPoint)
result := make([]*Point3D, size, size)
for i := range result {
result[i] = o.point(i)
}
return result
}
// Function checkInterfering returns true if obstacle interferes with a space around box,
// which has origin point p and sizes x, y, z.
func (o *Obstacle) checkInterfering(p Point3D, x, y, z, offset float64) bool {
checkX := p[X]+x+offset < o.origin[X] || p[X] > o.origin[X]+o.size[X]+offset
checkY := p[Y]+y+offset < o.origin[Y] || p[Y] > o.origin[Y]+o.size[Y]+offset
checkZ := p[Z]+z+offset < o.origin[Z] || p[Z] > o.origin[Z]+o.size[Z]+offset
return !(checkX || checkY || checkZ)
}
// Function checkPointInterfering returns true if obstacle interferes with a cubic space around point p.
func (o *Obstacle) checkPointInterfering(p Point3D, offset float64) bool {
return o.checkInterfering(p, 0, 0, 0, offset)
}
func (o *Obstacle) copy() *Obstacle {
result := NewObstacle()
for i := 0; i < 3; i++ {
result.origin[i] = o.origin[i]
result.size[i] = o.size[i]
}
for i := range o.offset {
result.offset[i] = o.offset[i]
}
return result
}
// GenerateCSpace returns new generated c-space by x y z sizes, fullness and seed for random generator.
func GenerateCSpace(x, y, z float64, f int, seed int64) (*CSpace, error) {
if x <= 0 || y <= 0 || z <= 0 {
return nil, errors.New("incorrect c-space dimensions")
}
if f < 0 || f > MaxFullness {
return nil, errors.New("incorrect c-space fullness")
}
// c-space dimensions
dimension := *NewPoint(x, y, z)
r := rand.New(rand.NewSource(seed))
edgeSize := x
if y <= x && y <= z {
edgeSize = y
}
if z <= x && z <= y {
edgeSize = z
}
// c-space fullness
fullness := fmt.Sprintf("fullness value %d", f)
quantity := 0
rate := 1.0 - float64(f)/(float64(MaxFullness)/0.9)
minQuantity := f / (MaxFullness / 9)
if minQuantity == 0 {
minQuantity = 1
}
switch {
case f > 0 && f <= MaxFullness/3:
// minimum fullness
quantity = minQuantity + r.Intn(2+minQuantity)
case f > MaxFullness/3 && f <= 2*MaxFullness/3:
// medium fullness
quantity = 1 + minQuantity + r.Intn(1+minQuantity)
case f > 2*MaxFullness/3 && f <= MaxFullness:
// maximum fullness
quantity = 2 + minQuantity + r.Intn(minQuantity)
default:
fullness = "empty"
rate = 1
}
//fmt.Println("Debug: rate", rate, "minQuantity", minQuantity, "quantity", quantity)
zeroPoint := *NewPoint(0, 0, 0)
edgePoint := *NewPoint(x, y, z)
var obstacles []Obstacle
// minimum obstacle size
minSize := 0.1 * edgeSize
minDistance := rate * minSize
//start and finish points
size := minSize - minDistance
start := *NewPoint(size*r.Float64(), size*r.Float64(), size*r.Float64())
finish := *NewPoint(x-size*r.Float64(), y-size*r.Float64(), z-size*r.Float64())
volume := x * y * z
freeVolume := volume
scaleRatio := 0.1 // means size increasing rate
iteration := 0
for freeVolume > rate*volume {
if iteration == 0 {
obstacles = make([]Obstacle, 0, quantity)
for len(obstacles) < quantity {
for true {
p := NewPoint((x-minSize)*r.Float64(), (y-minSize)*r.Float64(), (z-minSize)*r.Float64())
// new obstacle with origin point p
nextObstacle := NewObstacleByPoints(p, NewPoint(minSize, minSize, minSize))
if nextObstacle.checkPointInterfering(zeroPoint, minSize) {
continue
}
if nextObstacle.checkPointInterfering(edgePoint, minSize) {
continue
}
unsuitablePoint := false
for _, o := range obstacles {
if o.checkInterfering(*p, minSize, minSize, minSize, minSize) {
unsuitablePoint = true
break
}
}
if unsuitablePoint {
continue
} else {
obstacles = append(obstacles, *nextObstacle)
break
}
} // endless cycle
} // quantity cycle
} // iteration check
if rate > 0 && len(obstacles) == 0 {
return nil, errors.New("initiation failed")
}
number := r.Intn(len(obstacles))
c := obstacles[number].copy()
dimension := r.Intn(3)
if r.Intn(2) == 0 { // probability value is 1/2
scale := 1 + scaleRatio*r.Float64()
c.size[dimension] *= scale
if c.origin[dimension]+c.size[dimension] > edgePoint[dimension] {
c.size[dimension] = edgePoint[dimension] - c.origin[dimension]
}
} else {
scale := (1 - scaleRatio) + scaleRatio*r.Float64()
size := c.origin[dimension] * scale
c.size[dimension] += c.origin[dimension] - size
c.origin[dimension] = size
}
unsuitableChange := false
if c.checkPointInterfering(zeroPoint, minSize) || c.checkPointInterfering(edgePoint, minSize) {
unsuitableChange = true
} else {
for n, o := range obstacles {
if n != number && o.checkInterfering(c.origin, c.size[X], c.size[Y], c.size[Z], minDistance) {
unsuitableChange = true
break
}
}
}
iteration++
if !unsuitableChange {
// applying obstacle changes
obstacles[number] = *c
freeVolume = volume
for _, o := range obstacles {
freeVolume -= o.volume()
}
} else {
if iteration%5000 == 0 {
//fmt.Println("Debug: Generation iteration failed, start new initiation")
iteration = 0
}
}
} // freeVolume cycle
random := func() float64 {
return 0.5 + 0.5*r.Float64()
}
// set point offsets
for _, o := range obstacles {
// boundary points offsets
for i, point := range o.offset {
for index := range point {
if r.Intn(5) < 4 { // probability value is 4/5
if o.offset[i][index] < 0.5 {
if r.Intn(2) == 0 { // probability value is 1/2
o.offset[i][index] = random() * insideOffset
} else {
o.offset[i][index] = -random() * minDistance / (2 * o.size[index])
if o.origin[index]+o.size[index]*o.offset[i][index] < zeroPoint[index] {
o.offset[i][index] = (zeroPoint[index] - o.origin[index]) / o.size[index]
}
}
} else {
if r.Intn(2) == 0 { // probability value is 1/2
o.offset[i][index] = 1 - random()*insideOffset
} else {
o.offset[i][index] = 1 + random()*minDistance/(2*o.size[index])
if o.origin[index]+o.size[index]*o.offset[i][index] > edgePoint[index] {
o.offset[i][index] = (edgePoint[index] - o.origin[index]) / o.size[index]
}
}
}
} // random check
} // point cycle
} // offset cycle
// central points offsets
for i := 8; i < 14; i++ {
if r.Intn(5) < 3 { // probability value is 3/5
p := *zeroCenterOffset(i)
for index := 0; index < 3; index++ {
if r.Intn(3) < 2 { // probability value is 2/3
switch {
case p[index] == 0:
if r.Intn(2) == 0 { // probability value is 1/2
p[index] = random() * insideOffset
} else {
p[index] = -random() * minDistance / (2 * o.size[index])
if o.origin[index]+o.size[index]*p[index] < zeroPoint[index] {
p[index] = (zeroPoint[index] - o.origin[index]) / o.size[index]
}
}
case p[index] == 1:
if r.Intn(2) == 0 { // probability value is 1/2
p[index] = 1 + random()*minDistance/(2*o.size[index])
if o.origin[index]+o.size[index]*p[index] > edgePoint[index] {
p[index] = (edgePoint[index] - o.origin[index]) / o.size[index]
}
} else {
p[index] = 1 - random()*insideOffset
}
default:
p[index] = (1-insideOffset)/2 + random()*insideOffset
} // offset check
} // random check
} // index cycle
o.centerPoint[i] = p
} // random check
}
}
description := fmt.Sprintf("c-space %.2f x %.2f x %.2f, %s, seed %d", x, y, z, fullness, seed)
//fmt.Println("Debug: quantity of obstacles", len(obstacles))
return &CSpace{description, dimension, start, finish, obstacles}, nil
} | generator/cspacegen/cspacegen.go | 0.745306 | 0.642292 | cspacegen.go | starcoder |
package machine_learning
//----------------------------------------------------------------------------------------------------------------------
func PrecisionRecallF1(prediction []int, classes []int, num_classes int) (float64, float64, float64){
if len(prediction) != len(classes) {
panic("prediction and classes are not equal length")
}
type classPrediction struct{fp, fn, tp, tn, support int; recall, precision, f1 float64}
class_predictions := make([]classPrediction, num_classes)
// firstly calc fp, fn, tp, tn for each class
for j := 0; j < num_classes; j++ {
for i := range prediction {
if classes[i] == j {
class_predictions[j].support++
}
switch {
case classes[i] != j && prediction[i] != j:
class_predictions[j].tn++
case classes[i] != j && prediction[i] == j:
class_predictions[j].fp++
case classes[i] == j && prediction[i] == j:
class_predictions[j].tp++
case classes[i] == j && prediction[i] != j:
class_predictions[j].fn++
}
}
class_predictions[j].precision = float64(class_predictions[j].tp) /
(float64(class_predictions[j].tp) + float64(class_predictions[j].fp))
class_predictions[j].recall = float64(class_predictions[j].tp) /
(float64(class_predictions[j].tp) + float64(class_predictions[j].fn))
class_predictions[j].f1 = 2.0 * (class_predictions[j].precision * class_predictions[j].recall) /
(class_predictions[j].precision + class_predictions[j].recall)
}
var avg_precision, avg_recall, avg_f1 float64
for j := 0; j < num_classes; j++ {
avg_recall += class_predictions[j].recall * float64(class_predictions[j].support)
avg_precision += class_predictions[j].precision * float64(class_predictions[j].support)
avg_f1 += class_predictions[j].f1 * float64(class_predictions[j].support)
}
avg_f1 /= float64(len(classes))
avg_precision /= float64(len(classes))
avg_recall /= float64(len(classes))
return avg_precision, avg_recall, avg_f1
} | machine_learning/classification_metrics.go | 0.663996 | 0.476823 | classification_metrics.go | starcoder |
package geom
import (
"math"
"math/rand"
)
type Bounds struct {
Min, Max Vec
Center Vec
Radius float64
MinArray, MaxArray [3]float64
}
func NewBounds(min, max Vec) *Bounds {
center := min.Plus(max).Scaled(0.5)
return &Bounds{
Min: min,
Max: max,
Center: center,
Radius: max.Minus(center).Len(),
MinArray: min.Array(),
MaxArray: max.Array(),
}
}
func MergeBounds(a, b *Bounds) *Bounds {
return NewBounds(a.Min.Min(b.Min), a.Max.Max(b.Max))
}
func (b *Bounds) Overlaps(b2 *Bounds) bool {
if b.Min.X > b2.Max.X || b.Max.X < b2.Min.X || b.Min.Y > b2.Max.Y || b.Max.Y < b2.Min.Y || b.Min.Z > b2.Max.Z || b.Max.Z < b2.Min.Z {
return false
}
return true
}
func (b *Bounds) SurfaceArea() float64 {
size := b.Max.Minus(b.Min).Abs()
return size.X*size.Y*2 + size.Z*size.Y*2 + size.X*size.Z*2
}
func (b *Bounds) Split(axis int, val float64) (left, right *Bounds) {
maxL := b.Max.Array()
minR := b.Min.Array()
maxL[axis] = val
minR[axis] = val
left = NewBounds(b.Min, ArrayToVec(maxL))
right = NewBounds(ArrayToVec(minR), b.Max)
return left, right
}
// https://www.scratchapixel.com/lessons/3d-basic-rendering/minimal-ray-tracer-rendering-simple-shapes/ray-Bounds-intersection
// http://psgraphics.blogspot.com/2016/02/new-simple-ray-Bounds-test-from-andrew.html
func (b *Bounds) Check(r *Ray) (ok bool, near, far float64) {
tmin := bias
tmax := math.Inf(1)
for a := 0; a < 3; a++ {
t0 := (b.MinArray[a] - r.OrArray[a]) * r.InvArray[a]
t1 := (b.MaxArray[a] - r.OrArray[a]) * r.InvArray[a]
if r.InvArray[a] < 0 {
t0, t1 = t1, t0
}
if t0 > tmin {
tmin = t0
}
if t1 < tmax {
tmax = t1
}
if tmax < tmin {
return false, tmin, tmax
}
}
return true, tmin, tmax
}
func (b *Bounds) Contains(p Vec) bool {
if p.X > b.Max.X || p.X < b.Min.X || p.Y > b.Max.Y || p.Y < b.Min.Y || p.Z > b.Max.Z || p.Z < b.Min.Z {
return false
}
return true
}
// RayFrom inscribes the Bounds within a unit sphere,
// projects a solid angle disc from that sphere towards the origin,
// chooses a random point within that disc,
// and returns a Ray from the origin to the random point.
// https://marine.rutgers.edu/dmcs/ms552/2009/solidangle.pdf
func (b *Bounds) ShadowRay(pt Vec, normal Dir, rnd *rand.Rand) (*Ray, float64) {
forward, _ := pt.Minus(b.Center).Unit()
x, y := RandPointInCircle(b.Radius, rnd) // TODO: push center back along "forward" axis, away from pt
right, _ := forward.Cross(Up)
up, _ := right.Cross(forward)
point := b.Center.Plus(right.Scaled(x)).Plus(up.Scaled(y))
diff, _ := point.Minus(pt).Unit()
ray := NewRay(pt, diff) // TODO: this should be a convenience method
// https://en.wikipedia.org/wiki/Solid_angle#Sun_and_Moon
dist := b.Center.Minus(pt).Len()
theta := math.Atan2(b.Radius, dist)
coverage := math.Max(0, math.Min(1, 1-math.Cos(theta)))
return ray, coverage
} | pkg/geom/bounds.go | 0.769946 | 0.448849 | bounds.go | starcoder |
package features
import (
"fmt"
"strconv"
"strings"
)
// Feature represents a feature of nats-operator.
type Feature string
// FeatureMap is a mapping between features of nats-operator and their current status.
type FeatureMap map[Feature]bool
const (
// ClusterScoped is used to indicate whether nats-operator should operate at the namespace or cluster level.
ClusterScoped Feature = "ClusterScoped"
)
var (
// defaultFeatureMap represents the default status of the nats-operator feature gates.
defaultFeatureMap = map[Feature]bool{
ClusterScoped: false,
}
)
// ParseFeatureMap parses the specified string into a feature map.
func ParseFeatureMap(str string) (FeatureMap, error) {
// Create the feature map we will be returning.
res := make(FeatureMap, len(defaultFeatureMap))
// Set all features to their default status.
for feature, status := range defaultFeatureMap {
res[feature] = status
}
// Split the provided string by "," in order to obtain all the "key=value" pairs.
kvs := strings.Split(str, ",")
// Iterate over all the "key=value" pairs and set the status of the corresponding feature in the feature map.
for _, kv := range kvs {
// Skip "empty" key/value pairs.
if len(kv) == 0 {
continue
}
// Split the key/value pair by "=".
p := strings.Split(kv, "=")
if len(p) != 2 {
return nil, fmt.Errorf("invalid key/value pair: %q", kv)
}
// Grab the key and its value.
k, v := p[0], p[1]
// Make sure the feature corresponding to the key exists.
if _, exists := defaultFeatureMap[Feature(k)]; !exists {
return nil, fmt.Errorf("invalid feature key: %q", k)
}
// Attempt to parse the value as a boolean.
b, err := strconv.ParseBool(v)
if err != nil {
return nil, fmt.Errorf("failed to parse %q as a boolean value", v)
}
// Set the feature's status in the feature map.
res[Feature(k)] = b
}
// Return the feature map.
return res, nil
}
// IsEnabled returns a value indicating whether the specified feature is enabled.
func (m FeatureMap) IsEnabled(feature Feature) bool {
return m[feature]
} | nats/nats-operator/pkg/features/features.go | 0.726717 | 0.509947 | features.go | starcoder |
package dsl
import (
"bytes"
"regexp"
"strings"
)
// Scanner is a unified structure for scanner components which defines their port signature
type Scanner struct {
// Set is an IIP that contains valid characters. Supports special characters: \r, \n, \t.
// A regular expression character class can be passed like: "[a-zA-Z\s]".
Set <-chan string
// Type is an IIP that contains string token type associated with the set
Type <-chan string
// In is an incoming empty token
In <-chan Token
// Out is a processed token (either valid or invalid)
Out chan<- Token
}
// scanner is used to test Scanner components via common interface
type scanner interface {
assign(Scanner)
Process()
}
// assign binds ports for testing
func (s *Scanner) assign(ports Scanner) {
s.Set = ports.Set
s.Type = ports.Type
s.In = ports.In
s.Out = ports.Out
}
// readIIPs reads configuration ports. Connections have to be buffered to avoid order deadlock
func (s *Scanner) readIIPs() (set string, tokenType string, ok bool) {
if set, ok = <-s.Set; !ok {
return
}
if tokenType, ok = <-s.Type; !ok {
return
}
return
}
// scanTok is a callback that scans a single token
type scanTok func(Token) (Token, bool)
// handleTokens reads incoming tokens and applies a scan callback to them
func (s *Scanner) handleTokens(scan scanTok) {
// Read incoming tokens and scan them with a callback
for tok := range s.In {
t, match := scan(tok)
if !match {
t.Type = tokIllegal
}
s.Out <- t
}
}
// ScanChars scans a token of characters belonging to Set
type ScanChars struct {
Scanner
}
// Process reads IIPs and validates incoming tokens
func (s *ScanChars) Process() {
set, tokenType, ok := s.readIIPs()
if !ok {
return
}
matcher := s.matcher(set)
s.handleTokens(func(tok Token) (Token, bool) {
buf := bytes.NewBufferString("")
dataLen := len(tok.File.Data)
// Read as many chars within the set as possible
for i := tok.Pos; i < dataLen; i++ {
r := rune(tok.File.Data[i])
if r == rune(0) || !matcher(r) {
break
}
buf.WriteRune(r)
}
tok.Value = buf.String()
tok.Type = TokenType(tokenType)
return tok, buf.Len() > 0
})
}
func (s *ScanChars) matcher(set string) func(r rune) bool {
if set[0] == '[' && set[len(set)-1] == ']' {
// A regexp class
var reg *regexp.Regexp
var err error
reg, err = regexp.Compile(set)
if err != nil {
// TODO error handling
return func(r rune) bool {
return false
}
}
return func(r rune) bool {
return reg.Match([]byte{byte(r)})
}
}
// Replace special chars
set = strings.ReplaceAll(set, `\t`, "\t")
set = strings.ReplaceAll(set, `\r`, "\r")
set = strings.ReplaceAll(set, `\n`, "\n")
return func(r rune) bool {
return strings.ContainsRune(set, r)
}
}
// ScanKeyword scans a case-insensitive keyword that is not part of another word.
// If Set is an identifier, it makes sure the keyword is not a substring of another identifier.
// If Set is an operator, it makes sure the operator is followed by identifier or space.
type ScanKeyword struct {
Scanner
}
// Process reads IIPs and validates incoming tokens
func (s *ScanKeyword) Process() {
word, tokenType, ok := s.readIIPs()
if !ok {
return
}
word = strings.ToUpper(word)
wordLen := len(word)
identReg := regexp.MustCompile(`[\w_]`)
shouldNotBeFollowedBy := identReg
isIdent := identReg.MatchString(word)
if !isIdent {
shouldNotBeFollowedBy = regexp.MustCompile(`[^\w\s]`)
}
s.handleTokens(func(tok Token) (Token, bool) {
dataLen := len(tok.File.Data)
if tok.Pos+wordLen > dataLen {
// Data is too short
return tok, false
}
tok.Value = string(tok.File.Data[tok.Pos : tok.Pos+wordLen])
if strings.ToUpper(tok.Value) == word {
// Potential match, should be followed by EOF or non-word character
if tok.Pos+wordLen < dataLen {
nextChar := tok.File.Data[tok.Pos+wordLen]
if shouldNotBeFollowedBy.Match([]byte{nextChar}) {
// This is not the whole word
return tok, false
}
}
// Checks passed, it's a match
tok.Type = TokenType(tokenType)
tok.Value = word
return tok, true
}
// No match
return tok, false
})
}
// ScanComment scans a comment from hash till the end of line
type ScanComment struct {
Scanner
}
// Process reads IIPs and validates incoming tokens
func (s *ScanComment) Process() {
prefix, tokenType, ok := s.readIIPs()
if !ok {
return
}
s.handleTokens(func(tok Token) (Token, bool) {
if tok.File.Data[tok.Pos] != prefix[0] {
return tok, false
}
buf := bytes.NewBufferString("")
dataLen := len(tok.File.Data)
// Read all characters till the end of the line
for i := tok.Pos; i < dataLen; i++ {
r := rune(tok.File.Data[i])
if r == rune(0) || r == rune('\n') || r == rune('\r') {
break
}
buf.WriteRune(r)
}
tok.Value = buf.String()
tok.Type = TokenType(tokenType)
return tok, true
})
}
// ScanQuoted scans a quoted string
type ScanQuoted struct {
Scanner
}
// Process scans for quoted strings in the incoming tokens
func (s *ScanQuoted) Process() {
quotes, tokenType, ok := s.readIIPs()
if !ok {
return
}
s.handleTokens(func(tok Token) (Token, bool) {
// Find the quote char
var q rune = 0
for _, b := range quotes {
if rune(tok.File.Data[tok.Pos]) == b {
q = b
break
}
}
if q == 0 {
return tok, false
}
var e rune = '\\'
escaped := 0
buf := bytes.NewBufferString(string(q))
dataLen := len(tok.File.Data)
for i := tok.Pos + 1; i < dataLen; i++ {
r := rune(tok.File.Data[i])
if r == e {
escaped = (escaped + 1) % 2
if escaped == 1 {
continue
}
}
buf.WriteRune(r)
if r == q && escaped == 0 {
break
}
escaped = 0
}
tok.Value = buf.String()
tok.Type = TokenType(tokenType)
return tok, true
})
} | dsl/scanners.go | 0.502441 | 0.410313 | scanners.go | starcoder |
package BinarySearchTrees
import "fmt"
type Node struct {
left *Node
right *Node
val int
}
type BST struct {
root *Node
}
func New() (BST) {
return BST{
root: nil,
}
}
func (t *BST) Add(key int) {
fmt.Printf("Adding key: %d\n", key)
t.root = t._addNode(key, t.root)
}
func (t BST) _addNode(key int, root *Node) *Node {
if root == nil {
return &Node{
left: nil,
right: nil,
val: key,
}
}
if key < root.val {
root.left = t._addNode(key, root.left)
} else {
root.right = t._addNode(key, root.right)
}
return root
}
func (t *BST) Delete(key int) {
fmt.Printf("Deleting key: %d\n", key)
t.root = t._deleteNode(key, t.root)
}
func (t BST) _deleteNode(key int, root *Node) *Node {
if root == nil { return nil }
if key < root.val {
root.left = t._deleteNode(key, root.left)
} else if key > root.val {
root.right = t._deleteNode(key, root.right)
} else {
if root.left == nil {
rightChild := root.right
root = nil
return rightChild
}
if root.right == nil {
leftChild := root.left
root = nil
return leftChild
}
successor := root.right
for successor.left != nil {
successor = successor.left
}
root.val, successor.val = successor.val, root.val
root.right = t._deleteNode(key, root.right)
}
return root
}
func (t BST) _searchNode(key int, root *Node) bool {
if(root == nil) { return false }
if(key == root.val) { return true }
if key > root.val {
return t._searchNode(key, root.right)
}
return t._searchNode(key, root.left)
}
func (t BST) Search(key int) bool {
return t._searchNode(key, t.root)
}
func (t BST) InOrder() {
fmt.Println("InOrder Sequence...")
t._inOrder(t.root)
fmt.Println()
}
func (t BST) PreOrder() {
fmt.Println("PreOrder Sequence...")
t._preOrder(t.root)
fmt.Println()
}
func (t BST) PostOrder() {
fmt.Println("PostOrder Sequence...")
t._postOrder(t.root)
fmt.Println()
}
func (t *BST) _inOrder(root *Node) {
if root == nil { return }
t._inOrder(root.left)
fmt.Printf("%d ", root.val)
t._inOrder(root.right)
}
func (t BST) _preOrder(root *Node) {
if root == nil { return }
fmt.Printf("%d ", root.val)
t._preOrder(root.left)
t._preOrder(root.right)
}
func (t BST) _postOrder(root *Node) {
if root == nil { return }
t._preOrder(root.left)
t._preOrder(root.right)
fmt.Printf("%d ", root.val)
} | internal/BinarySearchTrees/BinarySearchTree.go | 0.530966 | 0.415136 | BinarySearchTree.go | starcoder |
package main
import (
"fmt"
)
// tag::metaverse[]
// Game describes the state of one two-player game.
type Game struct {
Pos1, Pos2 int
Score1, Score2 int
}
const (
diracSize = 3
winScoreDirac = 21
)
// Metaverse tracks how many universes there are for each game state.
type Metaverse map[Game]int
// I'm too lazy to write out all possible rolls and counts when rolling a 3-sided die 3 times in a
// row. Hence, I compute that once.
func dieRolls(diracSize, numRolls int) map[int]int {
state := map[int]int{1: 1, 2: 1, 3: 1}
for rollIdx := 1; rollIdx < numRolls; rollIdx++ {
newState := map[int]int{}
for dieRoll := 1; dieRoll <= diracSize; dieRoll++ {
for val, count := range state {
newState[val+dieRoll] += count
}
}
state = newState
}
return state
}
// We always assume it's player 1's turn. But we swap players so that each has their turn after the
// other.
func updateMetaverse(meta Metaverse, dieRolls map[int]int, boardSize int) Metaverse {
newMeta := Metaverse{}
for game, gameCount := range meta {
// Deconstruct the game data.
pos1 := game.Pos1
pos2 := game.Pos2
score1 := game.Score1
score2 := game.Score2
// Update the new metaverse for each possible outcome.
for roll, rollCount := range dieRolls {
newPos := (pos1+roll-1)%boardSize + 1
newGame := Game{
Pos1: pos2,
Pos2: newPos,
Score1: score2,
Score2: score1 + newPos,
}
newMeta[newGame] += gameCount * rollCount
}
}
return newMeta
}
func countAndRemoveWins(meta *Metaverse) int {
wins := 0
for game, count := range *meta {
// The score that has last been updated will always be score 2 due to the fact that we swap
// players all the time.
if game.Score2 >= winScoreDirac {
wins += count
// Deleting from a map while iterating over it is not a problem in Go.
delete(*meta, game)
}
}
return wins
}
func buildMetaverse(pos1, pos2 int) {
rollUpdate := dieRolls(diracSize, numRolls)
fmt.Println(rollUpdate)
// Set up the starting metaverse. During each round, we will update the metaverse.
meta := Metaverse{}
startGame := Game{Pos1: pos1, Pos2: pos2, Score1: 0, Score2: 0}
meta[startGame] = 1
// Count how many victories there were for each player. Go's zero value for integers is zero, so
// we don't need to initialise the map.
numPlayers := 2
wins := map[int]int{}
// Update the metaverse.
for playerIdx := 0; len(meta) > 0; playerIdx = (playerIdx + 1) % numPlayers {
meta = updateMetaverse(meta, rollUpdate, boardSize)
wins[playerIdx] += countAndRemoveWins(&meta)
}
fmt.Println("Wins player 1:", wins[0])
fmt.Println("Wins player 2:", wins[1])
}
// end::metaverse[] | day21/go/razziel89/metaverse.go | 0.544801 | 0.403156 | metaverse.go | starcoder |
package aoc2020
/*
https://adventofcode.com/2020/day/15
--- Day 15: Rambunctious Recitation ---
You catch the airport shuttle and try to book a new flight to your vacation island. Due to the storm, all direct flights have been cancelled, but a route is available to get around the storm. You take it.
While you wait for your flight, you decide to check in with the Elves back at the North Pole. They're playing a memory game and are ever so excited to explain the rules!
In this game, the players take turns saying numbers. They begin by taking turns reading from a list of starting numbers (your puzzle input). Then, each turn consists of considering the most recently spoken number:
If that was the first time the number has been spoken, the current player says 0.
Otherwise, the number had been spoken before; the current player announces how many turns apart the number is from when it was previously spoken.
So, after the starting numbers, each turn results in that player speaking aloud either 0 (if the last number is new) or an age (if the last number is a repeat).
For example, suppose the starting numbers are 0,3,6:
Turn 1: The 1st number spoken is a starting number, 0.
Turn 2: The 2nd number spoken is a starting number, 3.
Turn 3: The 3rd number spoken is a starting number, 6.
Turn 4: Now, consider the last number spoken, 6. Since that was the first time the number had been spoken, the 4th number spoken is 0.
Turn 5: Next, again consider the last number spoken, 0. Since it had been spoken before, the next number to speak is the difference between the turn number when it was last spoken (the previous turn, 4) and the turn number of the time it was most recently spoken before then (turn 1). Thus, the 5th number spoken is 4 - 1, 3.
Turn 6: The last number spoken, 3 had also been spoken before, most recently on turns 5 and 2. So, the 6th number spoken is 5 - 2, 3.
Turn 7: Since 3 was just spoken twice in a row, and the last two turns are 1 turn apart, the 7th number spoken is 1.
Turn 8: Since 1 is new, the 8th number spoken is 0.
Turn 9: 0 was last spoken on turns 8 and 4, so the 9th number spoken is the difference between them, 4.
Turn 10: 4 is new, so the 10th number spoken is 0.
(The game ends when the Elves get sick of playing or dinner is ready, whichever comes first.)
Their question for you is: what will be the 2020th number spoken? In the example above, the 2020th number spoken will be 436.
Here are a few more examples:
Given the starting numbers 1,3,2, the 2020th number spoken is 1.
Given the starting numbers 2,1,3, the 2020th number spoken is 10.
Given the starting numbers 1,2,3, the 2020th number spoken is 27.
Given the starting numbers 2,3,1, the 2020th number spoken is 78.
Given the starting numbers 3,2,1, the 2020th number spoken is 438.
Given the starting numbers 3,1,2, the 2020th number spoken is 1836.
Given your starting numbers, what will be the 2020th number spoken?
Your puzzle input is 1,12,0,20,8,16.
*/
import (
"fmt"
"strconv"
"strings"
goutils "github.com/simonski/goutils"
)
// AOC_2020_14 is the entrypoint
func AOC_2020_15(cli *goutils.CLI) {
AOC_2020_15_part1_attempt1(cli)
// AOC_2020_15_part2_attempt1(cli)
}
func AOC_2020_15_part1_attempt1(cli *goutils.CLI) {
sequence := "1,12,0,20,8,16"
result := NextInDay15Part1Sequence(sequence, 2020)
fmt.Printf("2020th in sequence %v is %v\n", sequence, result)
result2 := NextInDay15Part1Sequence(sequence, 30000000)
fmt.Printf("30000000th in sequence %v is %v\n", sequence, result2)
}
func NextInDay15Part1Sequence(sequence string, iterations int) int {
ints := split_comma_separated_string_to_ints(sequence)
store := NewValueStore()
lastValue := -1
iteration := 1
for _, value := range ints {
store.Increment(value, iteration)
lastValue = value
iteration++
}
speakValue := 0
for iteration = iteration; iteration <= iterations; iteration++ {
lastNumber := store.Get(lastValue)
if lastNumber.Count == 1 {
// had never been spoken before
speakValue = 0
} else {
// has been spoken before ( > 1 )
speakValue = lastNumber.LastTurn - lastNumber.LastTurnBeforeThat
if speakValue == 0 {
speakValue = 1
}
}
// fmt.Printf("[%v] lastNumber=%v, (lastTurn/LastTurnBefore %v/%v) Count=%v, speakValue=%v\n", iteration, lastNumber.Value, lastNumber.LastTurn, lastNumber.LastTurnBeforeThat, lastNumber.Count, speakValue)
store.Increment(speakValue, iteration)
lastValue = speakValue
}
return speakValue
}
func split_comma_separated_string_to_ints(sequence string) []int {
splits := strings.Split(sequence, ",")
results := make([]int, 0)
for _, entry := range splits {
ivalue, _ := strconv.Atoi(entry)
results = append(results, ivalue)
}
return results
}
// split_undecorated_string_to_ints takes a string 5432 returning [ 5, 4, 3, 2 ]
func split_undecorated_string_to_ints(sequence string) []int {
results := make([]int, len(sequence))
for index := 0; index < len(sequence); index++ {
svalue := sequence[index : index+1]
ivalue, _ := strconv.Atoi(svalue)
results[index] = ivalue
}
return results
}
type ValueData struct {
Value int
LastTurn int
LastTurnBeforeThat int
Count int
}
func NewValueData(key int) *ValueData {
vd := ValueData{Value: key, LastTurn: 0, LastTurnBeforeThat: 0, Count: 0}
return &vd
}
type ValueStore struct {
Values map[int]*ValueData
}
func (v *ValueStore) Increment(key int, iteration int) int {
valueData := v.Get(key)
valueData.LastTurnBeforeThat = valueData.LastTurn
valueData.LastTurn = iteration
valueData.Count++
return valueData.Count
}
func (v *ValueStore) Get(key int) *ValueData {
vd, exists := v.Values[key]
if !exists {
vd := NewValueData(key)
v.Values[key] = vd
return vd
} else {
return vd
}
}
func NewValueStore() *ValueStore {
vs := ValueStore{Values: make(map[int]*ValueData)}
return &vs
} | app/aoc2020/aoc2020_15.go | 0.722233 | 0.826011 | aoc2020_15.go | starcoder |
package rfm95
// https://www.hoperf.com/data/upload/portal/20190801/RFM96W-V2.0.pdf
const (
// FXOSC is the radio's oscillator frequency in Hertz.
FXOSC = 32000000
// SPIWriteMode is used to encode register addresses for SPI writes.
SPIWriteMode = 1 << 7
)
// FIFO
const (
RegFifo = 0x00 // FIFO read/write access
)
// Registers for Common settings
const (
RegOpMode = 0x01 // Operating mode & LoRa / FSK selection
RegBitrateMsb = 0x02 // Bit Rate setting, Most Significant Bits
RegBitrateLsb = 0x03 // Bit Rate setting, Least Significant Bits
RegFdevMsb = 0x04 // Frequency Deviation setting, Most Significant Bits
RegFdevLsb = 0x05 // Frequency Deviation setting, Least Significant Bits
RegFrfMsb = 0x06 // RF Carrier Frequency, Most Significant Bits
RegFrfMid = 0x07 // RF Carrier Frequency, Intermediate Bits
RegFrfLsb = 0x08 // RF Carrier Frequency, Least Significant Bits
)
// Registers for the Transmitter
const (
RegPaConfig = 0x09 // PA selection and Output Power control
RegPaRamp = 0x0A // Control of the PA ramp time, low phase noise PLL
RegOcp = 0x0B // Over Current Protection control
)
// Registers for the Receiver
const (
RegLna = 0x0C // LNA settings
RegRxConfig = 0x0D // AFC, AGC, ctrl
RegRssiConfig = 0x0E // RSSI
RegRssiCollision = 0x0F // RSSI Collision detector
RegRssiThresh = 0x10 // RSSI Threshold control
RegRssiValue = 0x11 // RSSI value in dBm
RegRxBw = 0x12 // Channel Filter BW Control
RegAfcBw = 0x13 // AFC Channel Filter BW
RegOokPeak = 0x14 // OOK demodulator
RegOokFix = 0x15 // Threshold of the OOK demodulator
RegOokAvg = 0x16 // Average of the OOK demodulator
RegAfcFei = 0x1A // AFC and FEI control
RegAfcMsb = 0x1B // Frequency correction value of the AFC, MSB
RegAfcLsb = 0x1C // Frequency correction value of the AFC, LSB
RegFeiMsb = 0x1D // Value of the calculated frequency error, MSB
RegFeiLsb = 0x1E // Value of the calculated frequency error, LSB
RegPreambleDetect = 0x1F // Settings of the Preamble Detector
RegRxTimeout1 = 0x20 // Timeout duration between Rx request and RSSI detection
RegRxTimeout2 = 0x21 // Timeout duration between RSSI detection and PayloadReady
RegRxTimeout3 = 0x22 // Timeout duration between RSSI detection and SyncAddress
RegRxDelay = 0x23 // Delay between Rx cycles
)
// RC Oscillator registers
const (
RegOsc = 0x24 // RC Oscillators Settings, CLK-OUT frequency
)
// Packet Handling registers
const (
RegPreambleMsb = 0x25 // Preamble length, MSB
RegPreambleLsb = 0x26 // Preamble length, LSB
RegSyncConfig = 0x27 // Sync Word Recognition control
RegSyncValue1 = 0x28 // Sync Word bytes 1 through 8
RegSyncValue2 = 0x29
RegSyncValue3 = 0x2A
RegSyncValue4 = 0x2B
RegSyncValue5 = 0x2C
RegSyncValue6 = 0x2D
RegSyncValue7 = 0x2E
RegSyncValue8 = 0x2F
RegPacketConfig1 = 0x30 // Packet mode settings
RegPacketConfig2 = 0x31 // Packet mode settings
RegPayloadLength = 0x32 // Payload length setting
RegNodeAdrs = 0x33 // Node address
RegBroadcastAdrs = 0x34 // Broadcast address
RegFifoThresh = 0x35 // Fifo threshold, Tx start condition
)
// Sequencer registers
const (
RegSeqConfig1 = 0x36 // Top level Sequencer settings
RegSeqConfig2 = 0x37 // Top level Sequencer settings
RegTimerResol = 0x38 // Timer 1 and 2 resolution control
RegTimer1Coef = 0x39 // Timer 1 setting
RegTimer2Coef = 0x3A // Timer 2 setting
)
// Service registers
const (
RegImageCal = 0x3B // Image calibration engine control
RegTemp = 0x3C // Temperature Sensor value
RegLowBat = 0x3D // Low Battery Indicator Settings
)
// Status registers
const (
RegIrqFlags1 = 0x3E // Status register: PLL Lock state, Timeout, RSSI
RegIrqFlags2 = 0x3F // Status register: FIFO handling flags, Low Battery
)
// IO control registers
const (
RegDioMapping1 = 0x40 // Mapping of pins DIO0 to DIO3
RegDioMapping2 = 0x41 // Mapping of pins DIO4 and DIO5, ClkOut frequency
)
// Version register
const (
RegVersion = 0x42 // Hope RF ID relating the silicon revision
)
// Additional registers
const (
RegPllHop = 0x44 // Control the fast frequency hopping mode
RegTcxo = 0x4B // TCXO or XTAL input setting
RegPaDac = 0x4D // Higher power settings of the PA
RegFormerTemp = 0x5B // Stored temperature during the former IQ Calibration
RegBitRateFrac = 0x5D // Fractional part in the Bit Rate division ratio
RegAgcRef = 0x61 // Adjustment of the AGC thresholds
RegAgcThresh1 = 0x62
RegAgcThresh2 = 0x63
RegAgcThresh3 = 0x64
RegPll = 0x70 // Control of the PLL bandwidth
)
// Skip RegFifo to avoid burst mode access.
const ConfigurationStart = RegOpMode
// resetConfiguration contains the register values after reset,
// according to data sheet section 6.
var resetConfiguration = []byte{
RegOpMode: 0x01,
RegBitrateMsb: 0x1A,
RegBitrateLsb: 0x0B,
RegFdevMsb: 0x00,
RegFdevLsb: 0x52,
RegFrfMsb: 0x6C,
RegFrfMid: 0x80,
RegFrfLsb: 0x00,
RegPaConfig: 0x4F,
RegPaRamp: 0x09,
RegOcp: 0x2B,
RegLna: 0x20,
RegRxConfig: 0x0E,
RegRssiConfig: 0x02,
RegRssiCollision: 0x0A,
RegRssiThresh: 0xFF,
RegRssiValue: 0x00,
RegRxBw: 0x15,
RegAfcBw: 0x0B,
RegOokPeak: 0x28,
RegOokFix: 0x0C,
RegOokAvg: 0x12,
0x17: 0x47, // reserved
0x18: 0x32, // reserved
0x19: 0x3E, // reserved
RegAfcFei: 0x00,
RegAfcMsb: 0x00,
RegAfcLsb: 0x00,
RegFeiMsb: 0x00,
RegFeiLsb: 0x00,
RegPreambleDetect: 0x40,
RegRxTimeout1: 0x00,
RegRxTimeout2: 0x00,
RegRxTimeout3: 0x00,
RegRxDelay: 0x00,
RegOsc: 0x07,
RegPreambleMsb: 0x00,
RegPreambleLsb: 0x03,
RegSyncConfig: 0x93,
RegSyncValue1: 0x55,
RegSyncValue2: 0x55,
RegSyncValue3: 0x55,
RegSyncValue4: 0x55,
RegSyncValue5: 0x55,
RegSyncValue6: 0x55,
RegSyncValue7: 0x55,
RegSyncValue8: 0x55,
RegPacketConfig1: 0x90,
RegPacketConfig2: 0x40,
RegPayloadLength: 0x40,
RegNodeAdrs: 0x00,
RegBroadcastAdrs: 0x00,
RegFifoThresh: 0x0F,
RegSeqConfig1: 0x00,
RegSeqConfig2: 0x00,
RegTimerResol: 0x00,
RegTimer1Coef: 0xF5,
RegTimer2Coef: 0x20,
RegImageCal: 0x82,
RegTemp: 0x00,
RegLowBat: 0x02,
RegIrqFlags1: 0x80,
RegIrqFlags2: 0x40,
RegDioMapping1: 0x00,
RegDioMapping2: 0x00,
RegVersion: 0x12,
// Omit additional registers to avoid undefined behavior.
}
// ResetConfiguration returns a copy of the register values after reset.
func ResetConfiguration() []byte {
return resetConfiguration[:]
}
// defaultConfiguration contains the default (FSK) values,
// according to data sheet section 6.
var defaultConfiguration = []byte{
RegOpMode: 0x01,
RegBitrateMsb: 0x1A,
RegBitrateLsb: 0x0B,
RegFdevMsb: 0x00,
RegFdevLsb: 0x52,
RegFrfMsb: 0x6C,
RegFrfMid: 0x80,
RegFrfLsb: 0x00,
RegPaConfig: 0x4F,
RegPaRamp: 0x09,
RegOcp: 0x2B,
RegLna: 0x20,
RegRxConfig: 0x08,
RegRssiConfig: 0x02,
RegRssiCollision: 0x0A,
RegRssiThresh: 0xFF,
RegRssiValue: 0x00,
RegRxBw: 0x15,
RegAfcBw: 0x0B,
RegOokPeak: 0x28,
RegOokFix: 0x0C,
RegOokAvg: 0x12,
0x17: 0x47, // reserved
0x18: 0x32, // reserved
0x19: 0x3E, // reserved
RegAfcFei: 0x00,
RegAfcMsb: 0x00,
RegAfcLsb: 0x00,
RegFeiMsb: 0x00,
RegFeiLsb: 0x00,
RegPreambleDetect: 0x40,
RegRxTimeout1: 0x00,
RegRxTimeout2: 0x00,
RegRxTimeout3: 0x00,
RegRxDelay: 0x00,
RegOsc: 0x05,
RegPreambleMsb: 0x00,
RegPreambleLsb: 0x03,
RegSyncConfig: 0x93,
RegSyncValue1: 0x01,
RegSyncValue2: 0x01,
RegSyncValue3: 0x01,
RegSyncValue4: 0x01,
RegSyncValue5: 0x01,
RegSyncValue6: 0x01,
RegSyncValue7: 0x01,
RegSyncValue8: 0x01,
RegPacketConfig1: 0x90,
RegPacketConfig2: 0x40,
RegPayloadLength: 0x40,
RegNodeAdrs: 0x00,
RegBroadcastAdrs: 0x00,
RegFifoThresh: 0x1F,
RegSeqConfig1: 0x00,
RegSeqConfig2: 0x00,
RegTimerResol: 0x00,
RegTimer1Coef: 0x12,
RegTimer2Coef: 0x20,
RegImageCal: 0x02,
RegTemp: 0x00,
RegLowBat: 0x02,
RegIrqFlags1: 0x80,
RegIrqFlags2: 0x40,
RegDioMapping1: 0x00,
RegDioMapping2: 0x00,
RegVersion: 0x12,
}
// DefaultConfiguration returns a copy of the default (recommended) values.
func DefaultConfiguration() []byte {
return defaultConfiguration[:]
}
// RegOpMode
const (
FskOokMode = 0 << 7
LoRaMode = 1 << 7
ModulationTypeMask = 3 << 5
ModulationTypeFSK = 0 << 5
ModulationTypeOOK = 1 << 5
ModeMask = 7
SleepMode = 0
StandbyMode = 1
FreqSynthModeTX = 2
TransmitterMode = 3
FreqSynthModeRX = 4
ReceiverMode = 5
)
// RegPaConfig
const (
PaBoost = 1 << 7
OutputPowerShift = 0
)
// RegPaRamp
const (
ModulationShapingNone = 0 << 5
ModulationShapingNarrow = 1 << 5
ModulationShapingWide = 2 << 5
PaRamp3_4ms = 0x0
PaRamp2ms = 0x1
PaRamp1ms = 0x2
PaRamp500μs = 0x3
PaRamp250μs = 0x4
PaRamp125μs = 0x5
PaRamp100μs = 0x6
PaRamp62μs = 0x7
PaRamp50μs = 0x8
PaRamp40μs = 0x9
PaRamp31μs = 0xA
PaRamp25μs = 0xB
PaRamp20μs = 0xC
PaRamp15μs = 0xD
PaRamp12μs = 0xE
PaRamp10μs = 0xF
)
// RegLna
const (
LnaGainMax = 1 << 5
LnaGainMax_6dB = 2 << 5
LnaGainMax_12dB = 3 << 5
LnaGainMax_24dB = 4 << 5
LnaGainMax_36dB = 5 << 5
LnaGainMax_48dB = 6 << 5
)
// RegRxConfig
const (
AfcAutoOn = 1 << 4
AgcAutoOn = 1 << 3
RxTriggerPreamble = 6 << 0
RxTriggerRSSI = 1 << 0
)
// RegRxBw
const (
RxBwMantShift = 3
RxBwMantMask = 3 << 3
RxBwMant16 = 0 << 3
RxBwMant20 = 1 << 3
RxBwMant24 = 2 << 3
RxBwExpShift = 0
RxBwExpMask = 7 << 0
)
// RegSyncConfig
const (
SyncOn = 1 << 4
SyncSizeShift = 0
)
// RegPacketConfig1
const (
FixedLength = 0 << 7
VariableLength = 1 << 7
DcFreeShift = 5
CrcOn = 1 << 4
CrcOff = 0 << 4
CrcAutoClearOff = 1 << 3
AddressFilteringShift = 1
)
// RegPacketConfig2
const (
PacketMode = 1 << 6
PayloadLengthMSBMask = 7
)
// RegFifoThresh
const (
TxStartCondition = 1 << 7
FifoThresholdShift = 0
)
// RegSeqConfig1
const (
SequencerStart = 1 << 7
SequencerStop = 1 << 6
IdleModeStandby = 0 << 5
IdleModeSleep = 1 << 5
FromStartToLowPower = 0 << 3
FromStartToRX = 1 << 3
FromStartToTX = 2 << 3
FromStartToTXOnFifoLevel = 3 << 3
)
// RegIrqFlags1
const (
ModeReady = 1 << 7
RxReady = 1 << 6
TxReady = 1 << 5
PllLock = 1 << 4
Rssi = 1 << 3
Timeout = 1 << 2
PreambleDetect = 1 << 1
SyncAddressMatch = 1 << 0
)
// RegIrqFlags2
const (
FifoFull = 1 << 7
FifoEmpty = 1 << 6
FifoLevel = 1 << 5
FifoOverrun = 1 << 4
PacketSent = 1 << 3
PayloadReady = 1 << 2
CrcOk = 1 << 1
LowBat = 1 << 0
)
// RegDioMapping1
const (
Dio0MappingShift = 6
Dio1MappingShift = 4
Dio2MappingShift = 2
Dio3MappingShift = 0
)
// RegDioMapping2
const (
Dio4MappingShift = 6
Dio5MappingShift = 4
MapPreambleDetect = 1 << 0
MapRssi = 0 << 0
)
// RegPaDac
const (
PaDacDefault = 0x04
PaDacPlus20dBm = 0x07
) | rfm95.go | 0.547222 | 0.525734 | rfm95.go | starcoder |
package randx
import (
"errors"
"github.com/hsiafan/glow/v2/mathx/intx"
"github.com/hsiafan/glow/v2/timex"
"math"
"math/rand"
)
// Rand is a rand with more useful methods
type Rand struct {
rand.Rand
}
// New return a new Rand using timestamp as seed
func New() *Rand {
return NewWithSeed(timex.EpochMills())
}
// NewWithSeed return a new Rand using seed
func NewWithSeed(seed int64) *Rand {
return &Rand{
Rand: *rand.New(rand.NewSource(seed)),
}
}
var boundError = errors.New("bound less than or equals zero")
var boundRangeError = errors.New("low bound larger than/equals high bound")
var boundOverFlowError = errors.New("bound range overflows int")
// IntWithin return a random value within range [0, bound) if bound larger than 0.
// panics bound is less than or equals 0.
func (r *Rand) IntWithin(bound int) int {
if bound <= 0 {
panic(boundError)
}
if bound <= math.MaxInt32 {
v := r.Int32Within(int32(bound))
return int(v)
}
v := r.Int64Within(int64(bound))
return int(v)
}
// IntBetween return a random value within range [low, high) if low less than high,
// The func panics an error if low is larger than or equals high, or high-low overflows int.
func (r *Rand) IntBetween(low int, high int) int {
if low >= high {
panic(boundRangeError)
}
if low < 0 && (intx.MaxInt+low) < high {
panic(boundOverFlowError)
}
v := r.IntWithin(high - low)
return low + v
}
// Int32Within return a random int32 value within range [0, bound) if bound larger than 0,
// Panics with an error if bound is less than or equals 0.
func (r *Rand) Int32Within(bound int32) int32 {
if bound <= 0 {
panic(boundError)
}
v := r.Int31()
m := bound - 1
if bound&m == 0 {
// i.e., bound is a power of 2
// returns the of high-order bits from the underlying pseudo-random number generator.
// Linear congruential pseudo-random number generators are known to have short periods in the sequence of values of their low-order bits.
v = int32((int64(bound) * int64(v)) >> 31)
} else {
// throws away numbers at the "top" of the range so that the random number is evenly distributed.
for u := v; ; u = r.Int31() {
v = u % bound
if u-v+m >= 0 {
break
}
}
}
return v
}
// Int64Within return a random int64 value within range [0, bound).
// If bound is less than or equals with 0, panics with an error
func (r *Rand) Int64Within(bound int64) int64 {
if bound <= 0 {
panic(boundError)
}
v := r.Int63()
m := bound - 1
if bound&m == 0 {
// i.e., bound is a power of 2
// em.. just use the lower bits
v = v & m
} else {
// throws away numbers at the "top" of the range so that the random number is evenly distributed.
for u := v; ; u = r.Int63() {
v = u % bound
if u-v+m >= 0 {
break
}
}
}
return v
} | mathx/randx/rand.go | 0.67822 | 0.410225 | rand.go | starcoder |
package assert
import (
"fmt"
"strings"
)
// OnString is the result of calling ThatString on an Assertion.
// It provides assertion tests that are specific to strings.
type OnString struct {
Assertion
value string
}
// ThatString returns an OnString for string based assertions.
// The untyped argument is converted to a string using fmt.Sprint, and the result supports string specific tests.
func (a Assertion) ThatString(value interface{}) OnString {
s := ""
switch v := value.(type) {
case string:
s = v
case []byte:
s = string(v)
default:
s = fmt.Sprint(value)
}
return OnString{Assertion: a, value: s}
}
// Equals asserts that the supplied string is equal to the expected string.
func (o OnString) Equals(expect string) bool {
return o.Compare(o.value, "==", expect).Test(func() bool {
if o.value == expect {
return true
}
re := ([]rune)(expect)
for i, c := range o.value {
if i >= len(re) {
o.Printf("Longer\tby\t")
o.Println(o.value[i:])
return false
}
if c != re[i] {
o.Printf("Differs\tfrom\t")
o.Println(o.value[i:])
return false
}
}
o.Printf("Shorter\tby\t")
o.Println(expect[len(o.value):])
return false
}())
}
// NotEquals asserts that the supplied string is not equal to the test string.
func (o OnString) NotEquals(test string) bool {
return o.Compare(o.value, "!=", test).Test(o.value != test)
}
// Contains asserts that the supplied string contains substr.
func (o OnString) Contains(substr string) bool {
return o.Compare(o.value, "contains", substr).Test(strings.Contains(o.value, substr))
}
// DoesNotContain asserts that the supplied string does not contain substr.
func (o OnString) DoesNotContain(substr string) bool {
return o.Compare(o.value, "does not contain", substr).Test(!strings.Contains(o.value, substr))
}
// HasPrefix asserts that the supplied string start with substr.
func (o OnString) HasPrefix(substr string) bool {
return o.Compare(o.value, "starts with", substr).Test(strings.HasPrefix(o.value, substr))
}
// HasSuffix asserts that the supplied string ends with with substr.
func (o OnString) HasSuffix(substr string) bool {
return o.Compare(o.value, "ends with", substr).Test(strings.HasSuffix(o.value, substr))
} | core/assert/string.go | 0.643665 | 0.434041 | string.go | starcoder |
package geometry
import (
"github.com/dlespiau/dax"
"github.com/dlespiau/dax/math"
)
type Sphere struct {
radius float32
nVSegments, nHSegments int
phiStart, phiLength float32
thetaStart, thetaLength float32
}
func NewSphere(radius float32, nVSegments, nHSegments int) *Sphere {
s := new(Sphere)
s.Init(radius, nVSegments, nHSegments)
return s
}
func (s *Sphere) InitFull(radius float32, nVSegments, nHSegments int,
phiStart, phiLength, thetaStart, thetaLength float32) {
s.radius = radius
s.nVSegments = nVSegments
s.nHSegments = nHSegments
s.phiStart = phiStart
s.phiLength = phiLength
s.thetaStart = thetaStart
s.thetaLength = thetaLength
}
func (s *Sphere) Init(radius float32, nVSegments, nHSegments int) {
const angle float32 = 2 * float32(math.Pi)
s.InitFull(radius, nVSegments, nHSegments, 0, angle, 0, angle)
}
func (s *Sphere) GetMesh() *dax.Mesh {
m := dax.NewMesh()
var positions, normals, uvs dax.AttributeBuffer
thetaEnd := s.thetaStart + s.thetaLength
vertexCount := (s.nVSegments + 1) * (s.nHSegments + 1)
positions.Init("position", vertexCount, 3)
normals.Init("normal", vertexCount, 3)
uvs.Init("uvs", vertexCount, 2)
index := 0
vertices := make([][]uint, s.nHSegments+1, s.nHSegments+1)
normal := math.Vec3{}
for y := 0; y <= s.nHSegments; y++ {
verticesRow := make([]uint, s.nVSegments+1, s.nVSegments+1)
v := float32(y) / float32(s.nHSegments)
for x := 0; x <= s.nVSegments; x++ {
u := float32(x) / float32(s.nVSegments)
px := -s.radius * math.Cos(s.phiStart+u*s.phiLength) * math.Sin(s.thetaStart+v*s.thetaLength)
py := s.radius * math.Cos(s.thetaStart+v*s.thetaLength)
pz := s.radius * math.Sin(s.phiStart+u*s.phiLength) * math.Sin(s.thetaStart+v*s.thetaLength)
normal.Set(px, py, pz)
normal.Normalize()
positions.SetXYZ(index, px, py, pz)
normals.SetXYZ(index, normal[0], normal[1], normal[2])
uvs.SetXY(index, u, 1-v)
verticesRow[x] = uint(index)
index++
}
vertices[y] = verticesRow
}
indices := make([]uint, vertexCount, vertexCount)
i := 0
for y := 0; y < s.nHSegments; y++ {
for x := 0; x < s.nVSegments; x++ {
v1 := vertices[y][x+1]
v2 := vertices[y][x]
v3 := vertices[y+1][x]
v4 := vertices[y+1][x+1]
if y != 0 || s.thetaStart > 0 {
indices[i] = v1
i++
indices[i] = v2
i++
indices[i] = v4
i++
}
if y != s.nHSegments-1 || thetaEnd < math.Pi {
indices[i] = v2
i++
indices[i] = v3
i++
indices[i] = v4
i++
}
}
}
m.AddIndices(indices)
m.AddAttributeBuffer(&positions)
m.AddAttributeBuffer(&normals)
m.AddAttributeBuffer(&uvs)
return m
} | geometry/sphere.go | 0.696681 | 0.445107 | sphere.go | starcoder |
package utils
import (
"image"
)
// ExcessMode specifies how excess space is dealt with for tools that may
// produce results with different dimensions to the input image.
type ExcessMode int
const (
// Ignore any "left over" space. So the resultant Rectangles may be smaller
// than the original given.
IGNORE ExcessMode = iota
// Add the excess to the right- and bottom-most Rectangles. So the resultant
// Rectangles will be the same size, but some edge Rectangles may be larger.
ADD
// Separate the excess into new Rectangles. So the resultant Rectangles will
// be the same size, but some edge Rectangles may be smaller.
SEPARATE
)
// chopRectangle is the "god" function for rectangle chopping. It takes a
// Rectangle, with the number of rows and columns to split into, along with the
// height and width of the rows and columns. And produces a list of Rectangles.
func chopRectangle(rect image.Rectangle, rows, cols, rowHeight, colWidth int, mode ExcessMode) []image.Rectangle {
width := rect.Dx()
height := rect.Dy()
excessWidth := width % (cols * colWidth)
excessHeight := height % (rows * rowHeight)
rs := make([]image.Rectangle, cols*rows)
i := 0
for col := 0; col < cols; col++ {
localWidth := 0
// If in last column, add extra on
if mode == ADD && cols == col+1 {
localWidth = excessWidth
}
for row := 0; row < rows; row++ {
localHeight := 0
// If in last row, add extra on
if mode == ADD && rows == row+1 {
localHeight = excessHeight
}
rs[i] = image.Rectangle{
image.Point{col * colWidth, row * rowHeight},
image.Point{(col+1)*colWidth + localWidth, (row+1)*rowHeight + localHeight},
}
i++
}
}
if mode == SEPARATE {
// Do bottom row
if excessHeight > 0 {
for col := 0; col < cols; col++ {
rs = append(rs, image.Rectangle{
image.Point{col * colWidth, rows * rowHeight},
image.Point{(col + 1) * colWidth, rows*rowHeight + excessHeight},
})
}
}
// Do rightmost column
if excessWidth > 0 {
for row := 0; row < rows; row++ {
rs = append(rs, image.Rectangle{
image.Point{cols * colWidth, row * rowHeight},
image.Point{cols*colWidth + excessWidth, (row + 1) * rowHeight},
})
}
}
// Do bottom-right corner
if excessHeight > 0 && excessWidth > 0 {
rs = append(rs, image.Rectangle{
image.Point{cols * colWidth, rows * rowHeight},
image.Point{cols*colWidth + excessWidth, rows*rowHeight + excessHeight},
})
}
}
return rs
}
// ChopRectangle splits a Rectangle into the number of rows and columns given.
func ChopRectangle(rect image.Rectangle, rows, cols int, mode ExcessMode) []image.Rectangle {
width := rect.Dx()
height := rect.Dy()
colWidth := width / cols
rowHeight := height / rows
return chopRectangle(rect, rows, cols, rowHeight, colWidth, mode)
}
// ChopRectangleToSizes splits a Rectangle into smaller Rectangles with the size
// given.
func ChopRectangleToSizes(rect image.Rectangle, rowHeight, colWidth int, mode ExcessMode) []image.Rectangle {
width := rect.Dx()
height := rect.Dy()
cols := width / colWidth
rows := height / rowHeight
return chopRectangle(rect, rows, cols, rowHeight, colWidth, mode)
} | utils/image.go | 0.786582 | 0.690781 | image.go | starcoder |
package hoi
import (
"fmt"
"github.com/mg/i"
)
type zip struct {
itrs []i.Forward
err error
atEnd bool
}
// The Zip iterator will zip together a collection of data streams, stopping
// after the shortest data stream is finished. Given e.g. the data streams
// [1,2,3], [5,6,7] and [10,11,12,13] Zip will provide access to the data
// sream [[1,5,10], [2,6,11], [3,7,12]].
func Zip(itrs ...i.Forward) i.Forward {
return &zip{itrs: itrs}
}
func (z *zip) Error() error {
return z.err
}
func (z *zip) SetError(err error) {
z.err = err
}
func (z *zip) Value() interface{} {
if z.atEnd {
z.err = fmt.Errorf("Calling Value() at end")
return nil
}
ret := make([]interface{}, len(z.itrs))
for idx, itr := range z.itrs {
ret[idx] = itr.Value()
}
return ret
}
func (z *zip) Next() error {
if z.atEnd {
z.err = fmt.Errorf("Calling Next() at end")
return z.err
}
for _, itr := range z.itrs {
if !itr.AtEnd() {
err := itr.Next()
if err != nil {
z.err = err
break
}
}
}
return z.err
}
func (z *zip) AtEnd() bool {
for _, itr := range z.itrs {
if itr.AtEnd() {
z.atEnd = true
return true
}
}
return false
}
type ziplongest struct {
zip
}
// The ZipLongest iterator will zip together a collection of data streams, not
// stopping until the longest data stream is finished. Given e.g. the data
// streams [1,2,3], [5,6,7] and [10,11,12,13] Zip will provide access to the
// data sream [[1,5,10], [2,6,11], [3,7,12], [nil, nil, 13]].
func ZipLongest(itrs ...i.Forward) i.Forward {
return &ziplongest{zip{itrs: itrs}}
}
func (z *ziplongest) Value() interface{} {
if z.atEnd {
z.err = fmt.Errorf("Calling Value() at end")
return nil
}
ret := make([]interface{}, len(z.itrs))
for idx, itr := range z.itrs {
if !itr.AtEnd() {
ret[idx] = itr.Value()
} else {
ret[idx] = nil
}
}
return ret
}
func (z *ziplongest) AtEnd() bool {
atEndCount := 0
for _, itr := range z.itrs {
if itr.AtEnd() {
atEndCount++
}
}
z.atEnd = atEndCount == len(z.itrs)
return z.atEnd
} | hoi/zip.go | 0.560253 | 0.405096 | zip.go | starcoder |
package gameoflife
const (
CellStateDead = 0
CellStateNewCell = 1
CellStateAlive = 2
)
type CellState int
func (state CellState) isAlive() bool {
if state == CellStateNewCell || state == CellStateAlive {
return true
}
return false
}
type Universe struct {
Width, Height int
Cells [][]CellState
}
func NewUniverse(width, height int) *Universe {
cells := make([][]CellState, height)
for i := 0; i < height; i++ {
cells[i] = make([]CellState, width)
for j := 0; j < width; j++ {
cells[i][j] = CellStateDead
}
}
return &Universe{Width: width, Height: height, Cells: cells}
}
func NewUniverseFromLiveCoordinates(width, height int, liveCells [][2]int) *Universe {
cells := make([][]CellState, height)
for i := 0; i < height; i++ {
cells[i] = make([]CellState, width)
for j := 0; j < width; j++ {
if contains(liveCells, [2]int{i, j}) {
cells[i][j] = CellStateAlive
} else {
cells[i][j] = CellStateDead
}
}
}
return &Universe{Width: width, Height: height, Cells: cells}
}
type Neighbours [3][3]CellState
func NewNeighbours() *Neighbours {
width, height := 3, 3
neighbours := Neighbours{}
for i := 0; i < height; i++ {
for j := 0; j < width; j++ {
neighbours[i][j] = CellStateDead
}
}
return &neighbours
}
func (neighbours *Neighbours) numberOfAliveCells() int {
count := 0
for _, row := range neighbours {
for _, col := range row {
if col.isAlive() {
count++
}
}
}
return count
}
func neighbourCoordinates(x, y int) [3][3][2]int {
return [3][3][2]int{
[3][2]int{
[2]int{x - 1, y - 1}, [2]int{x - 1, y}, [2]int{x - 1, y + 1},
},
[3][2]int{
[2]int{x, y - 1}, [2]int{x, y}, [2]int{x, y + 1},
},
[3][2]int{
[2]int{x + 1, y - 1}, [2]int{x + 1, y}, [2]int{x + 1, y + 1},
},
}
}
func (universe *Universe) neighbours(x, y int) *Neighbours {
neighbours := NewNeighbours()
for i, row := range neighbourCoordinates(x, y) {
for j, col := range row {
neighbourX, neighbourY := col[0], col[1]
if (neighbourX < 0) || (neighbourY < 0) || (neighbourX >= universe.Width) || (neighbourY >= universe.Height) {
continue
} else {
neighbours[i][j] = universe.Cells[neighbourX][neighbourY]
}
}
}
return neighbours
}
func (universe *Universe) RefreshUniverse() {
nextUniverse := NewUniverse(universe.Width, universe.Height)
copy(nextUniverse.Cells, universe.Cells)
for x, row := range universe.Cells {
for y, col := range row {
numberOfAliveCells := universe.neighbours(x, y).numberOfAliveCells()
numberOfAliveCellsExceptCurrent := numberOfAliveCells
if col.isAlive() {
numberOfAliveCellsExceptCurrent--
}
nextUniverse.Cells[x][y] = nextCellStatus(col, numberOfAliveCellsExceptCurrent)
}
}
copy(universe.Cells, nextUniverse.Cells)
}
func nextCellStatus(currentCellStatus CellState, numberOfAliveCellsExceptCurrent int) CellState {
var nextStatus CellState = CellStateDead
if !currentCellStatus.isAlive() {
if numberOfAliveCellsExceptCurrent == 3 {
nextStatus = CellStateNewCell
}
} else {
if numberOfAliveCellsExceptCurrent < 2 || numberOfAliveCellsExceptCurrent > 3 {
nextStatus = CellStateDead
} else if numberOfAliveCellsExceptCurrent == 2 || numberOfAliveCellsExceptCurrent == 3 {
nextStatus = CellStateAlive
}
}
return nextStatus
}
func contains(coordinates [][2]int, coordinate [2]int) bool {
for _, c := range coordinates {
if c == coordinate {
return true
}
}
return false
} | game-of-life/universe.go | 0.631822 | 0.548432 | universe.go | starcoder |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.