code
stringlengths
114
1.05M
path
stringlengths
3
312
quality_prob
float64
0.5
0.99
learning_prob
float64
0.2
1
filename
stringlengths
3
168
kind
stringclasses
1 value
package main import ( "fmt" "gopkg.in/yaml.v3" ) const ( // Filename for the template file in any directory. templateFile = "template.yml" // prefix added to the filename to indicate a query has // been moved to it's own file. queryPrefix = "file://" ) // An object is the basic type for all templates. // We use yaml.Nodes for the metadata and spec because the exact // format of these varies depending on the kind. type object struct { APIVersion string `yaml:"apiVersion"` Kind string `yaml:"kind"` Metadata yaml.Node `yaml:"metadata"` Spec yaml.Node `yaml:"spec"` } // The different kinds of object that we can receive. // Only those objects which contain queries are included here. const ( kindCheck string = "CheckThreshold" kindDashboard string = "Dashboard" kindLabel string = "Label" kindTask string = "Task" ) // A queryNode contains the name for the query, generated from the chart/task/check // it belongs to, and a pointer to the node so it may be updated. type queryNode struct { Name string Node *yaml.Node } // walkDashboard walks a dashboard spec, and finds all of the query nodes. // We name each query node after the chart it is in, using the chart name // and type. If a chart has multiple queries, or two charts have the same // name and type, the names will not be unique. func walkDashboard(spec *yaml.Node) []queryNode { var queryNodes []queryNode // The query nodes can be found at spec.charts[].queries[].query // where a [] indicates there is an array of charts/queries. charts := walkNode(spec, "charts").Content for _, c := range charts { queries := walkNode(c, "queries").Content var nodes []*yaml.Node for _, q := range queries { nodes = append(nodes, walkNode(q, "query")) } chartName := walkNode(c, "name").Value chartKind := walkNode(c, "kind").Value name := fmt.Sprintf("%s_%s", chartName, chartKind) for _, node := range nodes { queryNodes = append(queryNodes, queryNode{Name: name, Node: node}) } } return queryNodes } // walkTask walks a task spec, and finds the query node. // This returns a list to match the other walk function, but will only // ever contain one member. func walkTask(spec *yaml.Node) []queryNode { return []queryNode{{ Name: "query", Node: walkNode(spec, "query"), }} } // walkCheck walks a check spec and finds the query node. // This returns a list to match the other walk function, but will only // ever contain one member. func walkCheck(spec *yaml.Node) []queryNode { return []queryNode{{ Name: "query", Node: walkNode(spec, "query"), }} } // walkNode will walk a node's children, looking for the value // node that matches the key. func walkNode(node *yaml.Node, key string) *yaml.Node { // Within a node's content, nodes are grouped in pairs. // The first node in a pair is a scalar string node, with the key as value. // The second node in the pair is the value. for i := 0; i < len(node.Content); i += 2 { if node.Content[i].Value == key { return node.Content[i+1] } } return &yaml.Node{} }
types.go
0.674694
0.435181
types.go
starcoder
package tally // UTally is a counter of type uint. type UTally uint // Cur returns the current uint value of this UTally. func (t UTally) Cur() uint { return uint(t) } // Add increases this counter by the given value, returning the previous // uint value of this UTally. func (t *UTally) Add(i uint) (cur uint) { cur = uint(*t) *t += UTally(i) return } // Inc increases this counter by 1, returning the previous uint value of // this UTally. func (t *UTally) Inc() (cur uint) { cur = uint(*t) *t++ return } // Dec decreases this counter by 1, returning the previous uint value of // this UTally. func (t *UTally) Dec() (cur uint) { cur = uint(*t) *t-- return } // Sub decreases this counter by the given value, returning the previous // uint value of this UTally. func (t *UTally) Sub(i uint) (cur uint) { cur = uint(*t) *t -= UTally(i) return } // Zero sets the current counter value to 0, returning the previous uint // value of this UTally. func (t *UTally) Zero() (cur uint) { cur = uint(*t) *t = 0 return } // UTally8 is a counter of type uint8. type UTally8 uint8 // Cur returns the current uint8 value of this UTally8. func (t UTally8) Cur() uint8 { return uint8(t) } // Add increases this counter by the given value, returning the previous // uint8 value of this UTally8. func (t *UTally8) Add(i uint8) (cur uint8) { cur = uint8(*t) *t += UTally8(i) return } // Inc increases this counter by 1, returning the previous uint8 value of // this UTally8. func (t *UTally8) Inc() (cur uint8) { cur = uint8(*t) *t++ return } // Dec decreases this counter by 1, returning the previous uint8 value of // this UTally8. func (t *UTally8) Dec() (cur uint8) { cur = uint8(*t) *t-- return } // Sub decreases this counter by the given value, returning the previous // uint8 value of this UTally8. func (t *UTally8) Sub(i uint8) (cur uint8) { cur = uint8(*t) *t -= UTally8(i) return } // Zero sets the current counter value to 0, returning the previous uint8 // value of this UTally8. func (t *UTally8) Zero() (cur uint8) { cur = uint8(*t) *t = 0 return } // UTally16 is a counter of type uint16. type UTally16 uint16 // Cur returns the current uint16 value of this UTally16. func (t UTally16) Cur() uint16 { return uint16(t) } // Add increases this counter by the given value, returning the previous // uint16 value of this UTally16. func (t *UTally16) Add(i uint16) (cur uint16) { cur = uint16(*t) *t += UTally16(i) return } // Inc increases this counter by 1, returning the previous uint16 value of // this UTally16. func (t *UTally16) Inc() (cur uint16) { cur = uint16(*t) *t++ return } // Dec decreases this counter by 1, returning the previous uint16 value of // this UTally16. func (t *UTally16) Dec() (cur uint16) { cur = uint16(*t) *t-- return } // Sub decreases this counter by the given value, returning the previous // uint16 value of this UTally16. func (t *UTally16) Sub(i uint16) (cur uint16) { cur = uint16(*t) *t -= UTally16(i) return } // Zero sets the current counter value to 0, returning the previous uint16 // value of this UTally16. func (t *UTally16) Zero() (cur uint16) { cur = uint16(*t) *t = 0 return } // UTally32 is a counter of type uint32. type UTally32 uint32 // Cur returns the current uint32 value of this UTally32. func (t UTally32) Cur() uint32 { return uint32(t) } // Add increases this counter by the given value, returning the previous // uint32 value of this UTally32. func (t *UTally32) Add(i uint32) (cur uint32) { cur = uint32(*t) *t += UTally32(i) return } // Inc increases this counter by 1, returning the previous uint32 value of // this UTally32. func (t *UTally32) Inc() (cur uint32) { cur = uint32(*t) *t++ return } // Dec decreases this counter by 1, returning the previous uint32 value of // this UTally32. func (t *UTally32) Dec() (cur uint32) { cur = uint32(*t) *t-- return } // Sub decreases this counter by the given value, returning the previous // uint32 value of this UTally32. func (t *UTally32) Sub(i uint32) (cur uint32) { cur = uint32(*t) *t -= UTally32(i) return } // Zero sets the current counter value to 0, returning the previous uint32 // value of this UTally32. func (t *UTally32) Zero() (cur uint32) { cur = uint32(*t) *t = 0 return } // UTally64 is a counter of type uint64. type UTally64 uint64 // Cur returns the current uint64 value of this UTally64. func (t UTally64) Cur() uint64 { return uint64(t) } // Add increases this counter by the given value, returning the previous // uint64 value of this UTally64. func (t *UTally64) Add(i uint64) (cur uint64) { cur = uint64(*t) *t += UTally64(i) return } // Inc increases this counter by 1, returning the previous uint64 value of // this UTally64. func (t *UTally64) Inc() (cur uint64) { cur = uint64(*t) *t++ return } // Dec decreases this counter by 1, returning the previous uint64 value of // this UTally64. func (t *UTally64) Dec() (cur uint64) { cur = uint64(*t) *t-- return } // Sub decreases this counter by the given value, returning the previous // uint64 value of this UTally64. func (t *UTally64) Sub(i uint64) (cur uint64) { cur = uint64(*t) *t -= UTally64(i) return } // Zero sets the current counter value to 0, returning the previous uint64 // value of this UTally64. func (t *UTally64) Zero() (cur uint64) { cur = uint64(*t) *t = 0 return }
v1/tally/uints.go
0.790004
0.638765
uints.go
starcoder
package geojson import ( "github.com/tidwall/tile38/pkg/geojson/geo" "github.com/tidwall/tile38/pkg/geojson/geohash" "github.com/tidwall/tile38/pkg/geojson/poly" ) // Point is a geojson object with the type "Point" type Point struct { Coordinates Position BBox *BBox bboxDefined bool } func fillSimplePointOrPoint(coordinates Position, bbox *BBox, err error) (Object, error) { if coordinates.Z == 0 && bbox == nil { return fillSimplePoint(coordinates, bbox, err) } return fillPoint(coordinates, bbox, err) } func fillPoint(coordinates Position, bbox *BBox, err error) (Point, error) { bboxDefined := bbox != nil if !bboxDefined { cbbox := level1CalculatedBBox(coordinates, nil) bbox = &cbbox } return Point{ Coordinates: coordinates, BBox: bbox, bboxDefined: bboxDefined, }, err } // CalculatedBBox is exterior bbox containing the object. func (g Point) CalculatedBBox() BBox { return level1CalculatedBBox(g.Coordinates, g.BBox) } // CalculatedPoint is a point representation of the object. func (g Point) CalculatedPoint() Position { if g.BBox == nil { return g.Coordinates } return g.CalculatedBBox().center() } // Geohash converts the object to a geohash value. func (g Point) Geohash(precision int) (string, error) { p := g.CalculatedPoint() return geohash.Encode(p.Y, p.X, precision) } // MarshalJSON allows the object to be encoded in json.Marshal calls. func (g Point) MarshalJSON() ([]byte, error) { return g.appendJSON(nil), nil } func (g Point) appendJSON(json []byte) []byte { return appendLevel1JSON(json, "Point", g.Coordinates, g.BBox, g.bboxDefined) } // JSON is the json representation of the object. This might not be exactly the same as the original. func (g Point) JSON() string { return string(g.appendJSON(nil)) } // String returns a string representation of the object. This might be JSON or something else. func (g Point) String() string { return g.JSON() } // PositionCount return the number of coordinates. func (g Point) PositionCount() int { return level1PositionCount(g.Coordinates, g.BBox) } // Weight returns the in-memory size of the object. func (g Point) Weight() int { return level1Weight(g.Coordinates, g.BBox) } func (g Point) bboxPtr() *BBox { return g.BBox } func (g Point) hasPositions() bool { return true } // WithinBBox detects if the object is fully contained inside a bbox. func (g Point) WithinBBox(bbox BBox) bool { if g.bboxDefined { return rectBBox(g.CalculatedBBox()).InsideRect(rectBBox(bbox)) } return poly.Point(g.Coordinates).InsideRect(rectBBox(bbox)) } // IntersectsBBox detects if the object intersects a bbox. func (g Point) IntersectsBBox(bbox BBox) bool { if g.bboxDefined { return rectBBox(g.CalculatedBBox()).IntersectsRect(rectBBox(bbox)) } return poly.Point(g.Coordinates).InsideRect(rectBBox(bbox)) } // Within detects if the object is fully contained inside another object. func (g Point) Within(o Object) bool { return withinObjectShared(g, o, func(v Polygon) bool { return poly.Point(g.Coordinates).Inside(polyExteriorHoles(v.Coordinates)) }, ) } // WithinCircle detects if the object is fully contained inside a circle. func (g Point) WithinCircle(center Position, meters float64) bool { return geo.DistanceTo(g.Coordinates.Y, g.Coordinates.X, center.Y, center.X) < meters } // Intersects detects if the object intersects another object. func (g Point) Intersects(o Object) bool { return intersectsObjectShared(g, o, func(v Polygon) bool { return poly.Point(g.Coordinates).Intersects(polyExteriorHoles(v.Coordinates)) }, ) } // IntersectsCircle detects if the object intersects a circle. func (g Point) IntersectsCircle(center Position, meters float64) bool { return geo.DistanceTo(g.Coordinates.Y, g.Coordinates.X, center.Y, center.X) <= meters } // Nearby detects if the object is nearby a position. func (g Point) Nearby(center Position, meters float64) bool { return geo.DistanceTo(g.Coordinates.Y, g.Coordinates.X, center.Y, center.X) <= meters } // IsBBoxDefined returns true if the object has a defined bbox. func (g Point) IsBBoxDefined() bool { return g.bboxDefined } // IsGeometry return true if the object is a geojson geometry object. false if it something else. func (g Point) IsGeometry() bool { return true } // Clip returns the object obtained by clipping this object by a bbox. func (g Point) Clipped(bbox BBox) Object { if g.IntersectsBBox(bbox) { return g } res, _ := fillMultiPoint([]Position{}, nil, nil) return res }
pkg/geojson/point.go
0.846483
0.553204
point.go
starcoder
package processor import ( "time" "github.com/Jeffail/benthos/v3/internal/docs" "github.com/Jeffail/benthos/v3/lib/log" "github.com/Jeffail/benthos/v3/lib/message" "github.com/Jeffail/benthos/v3/lib/metrics" "github.com/Jeffail/benthos/v3/lib/response" "github.com/Jeffail/benthos/v3/lib/types" ) //------------------------------------------------------------------------------ func init() { Constructors[TypeSplit] = TypeSpec{ constructor: NewSplit, Categories: []Category{ CategoryUtility, }, Summary: ` Breaks message batches (synonymous with multiple part messages) into smaller batches. The size of the resulting batches are determined either by a discrete size or, if the field ` + "`byte_size`" + ` is non-zero, then by total size in bytes (which ever limit is reached first).`, Description: ` This processor is for breaking batches down into smaller ones. In order to break a single message out into multiple messages use the ` + "[`unarchive` processor](/docs/components/processors/unarchive)" + `. If there is a remainder of messages after splitting a batch the remainder is also sent as a single batch. For example, if your target size was 10, and the processor received a batch of 95 message parts, the result would be 9 batches of 10 messages followed by a batch of 5 messages.`, UsesBatches: true, FieldSpecs: docs.FieldSpecs{ docs.FieldCommon("size", "The target number of messages."), docs.FieldCommon("byte_size", "An optional target of total message bytes."), }, } } //------------------------------------------------------------------------------ // SplitConfig is a configuration struct containing fields for the Split // processor, which breaks message batches down into batches of a smaller size. type SplitConfig struct { Size int `json:"size" yaml:"size"` ByteSize int `json:"byte_size" yaml:"byte_size"` } // NewSplitConfig returns a SplitConfig with default values. func NewSplitConfig() SplitConfig { return SplitConfig{ Size: 1, ByteSize: 0, } } //------------------------------------------------------------------------------ // Split is a processor that splits messages into a message per part. type Split struct { log log.Modular stats metrics.Type size int byteSize int mCount metrics.StatCounter mDropped metrics.StatCounter mSent metrics.StatCounter mBatchSent metrics.StatCounter } // NewSplit returns a Split processor. func NewSplit( conf Config, mgr types.Manager, log log.Modular, stats metrics.Type, ) (Type, error) { return &Split{ log: log, stats: stats, size: conf.Split.Size, byteSize: conf.Split.ByteSize, mCount: stats.GetCounter("count"), mDropped: stats.GetCounter("dropped"), mSent: stats.GetCounter("sent"), mBatchSent: stats.GetCounter("batch.sent"), }, nil } //------------------------------------------------------------------------------ // ProcessMessage applies the processor to a message, either creating >0 // resulting messages or a response to be sent back to the message source. func (s *Split) ProcessMessage(msg types.Message) ([]types.Message, types.Response) { s.mCount.Incr(1) if msg.Len() == 0 { s.mDropped.Incr(1) return nil, response.NewAck() } msgs := []types.Message{} nextMsg := message.New(nil) byteSize := 0 msg.Iter(func(i int, p types.Part) error { if (s.size > 0 && nextMsg.Len() >= s.size) || (s.byteSize > 0 && (byteSize+len(p.Get())) > s.byteSize) { if nextMsg.Len() > 0 { msgs = append(msgs, nextMsg) nextMsg = message.New(nil) byteSize = 0 } else { s.log.Warnf("A single message exceeds the target batch byte size of '%v', actual size: '%v'", s.byteSize, len(p.Get())) } } nextMsg.Append(p) byteSize += len(p.Get()) return nil }) if nextMsg.Len() > 0 { msgs = append(msgs, nextMsg) } s.mBatchSent.Incr(int64(len(msgs))) s.mSent.Incr(int64(msg.Len())) return msgs, nil } // CloseAsync shuts down the processor and stops processing requests. func (s *Split) CloseAsync() { } // WaitForClose blocks until the processor has closed down. func (s *Split) WaitForClose(timeout time.Duration) error { return nil } //------------------------------------------------------------------------------
lib/processor/split.go
0.778691
0.501953
split.go
starcoder
package tree import ( "fmt" "github.com/sidheart/algorithms/util" ) // An AVLTreeNode represents a leaf or internal node in an AVL tree type AVLTreeNode struct { key util.Comparable balanceFactor int8 data interface{} leftChild, rightChild, parent *AVLTreeNode } // An AVLTree is a kind of self-balancing binary tree https://en.wikipedia.org/wiki/AVL_tree type AVLTree struct { root *AVLTreeNode } // rotateLeft makes the given root the left subtree of its right child, effectively rotating the tree // this operation may be better explained by the visualization here https://en.wikipedia.org/wiki/AVL_tree#Rebalancing func (root *AVLTreeNode) rotateLeft() { if root == nil || root.rightChild == nil { return } newRoot := root.rightChild newRightSubtree := newRoot.leftChild newRoot.leftChild = root newRoot.parent = root.parent root.parent = newRoot if newRightSubtree != nil { root.rightChild = newRightSubtree newRightSubtree.parent = root } } // rotateRight makes the given root the right subtree of its left child, effectively rotating the tree // this operation may be better explained by the visualization here https://en.wikipedia.org/wiki/AVL_tree#Rebalancing func (root *AVLTreeNode) rotateRight() { if root == nil || root.leftChild == nil { return } newRoot := root.leftChild newLeftSubtree := newRoot.rightChild newRoot.rightChild = root newRoot.parent = root.parent root.parent = newRoot if newLeftSubtree != nil { root.leftChild = newLeftSubtree newLeftSubtree.parent = root } } // updateBalanceFactorsAfterDoubleRotate updates the balance factors of the relevant nodes which are modified during a // left-right or right-left rotation. The root receiver should be the new node at the location where the AVL tree // invariant was previously violated. func (root *AVLTreeNode) updateBalanceFactorsAfterDoubleRotate() { if root.balanceFactor == -1 { root.leftChild.balanceFactor = 0 root.rightChild.balanceFactor = 1 } else if root.balanceFactor == 0 { root.leftChild.balanceFactor = 0 root.rightChild.balanceFactor = 0 } else { root.leftChild.balanceFactor = -1 root.rightChild.balanceFactor = 0 } root.balanceFactor = 0 } // rotate fixes a violation of the AVL tree invariant at the given root // the behavior is undefined if the given root does not violate the AVL tree invariant func (root *AVLTreeNode) rotate() *AVLTreeNode { if root == nil { return nil } if root.balanceFactor == -1 { // Because rotate() was called, this means that root is doubly left-heavy if root.leftChild.balanceFactor == 1 { // Left child is right-heavy, requires a double rotation root.leftChild.rotateLeft() root.rotateRight() root.parent.updateBalanceFactorsAfterDoubleRotate() return nil } else { // A simple right rotation will fix this violation root.rotateRight() // Update balance factors newRoot := root.parent if newRoot.balanceFactor == 0 { newRoot.balanceFactor = 1 return newRoot } else { // The previous balance factor must have been -1, if it were 1, we would have called rotateLeftRight() newRoot.balanceFactor = 0 root.balanceFactor = 0 return nil } } } else if root.balanceFactor == 1 { // Because rotate() was called, this means that root is doubly right-heavy if root.rightChild.balanceFactor == -1 { // Right child is left-heavy, requires a double rotation root.rightChild.rotateRight() root.rotateLeft() root.parent.updateBalanceFactorsAfterDoubleRotate() return nil // The AVL tree invariant now holds for the entire tree } else { // A simple left rotation will fix this violation root.rotateLeft() newRoot := root.parent // Update balance factors if newRoot.balanceFactor == 0 { newRoot.balanceFactor = -1 return newRoot } else { // The previous balance factor must have been 1, if it were -1, we would have called rotateRightLeft() newRoot.balanceFactor = 0 root.balanceFactor = 0 return nil } } } else { // root does not violate the AVL tree invariant, rotate() should not have been called fmt.Println("rotate() called on non-violating receiver") return nil } } // retrace updates the balance factors of nodes along a leaf to root path and performs the necessary rotations to // ensure that the AVL tree invariant holds func (root *AVLTreeNode) retrace(addend int8) { if root == nil { return } var newRoot *AVLTreeNode tempBalance := root.balanceFactor + addend if tempBalance > 1 || tempBalance < -1 { // Violation of the invariant, rotation is necessary newRoot = root.rotate() } else { newRoot = root newRoot.balanceFactor = tempBalance } // Continue retracing up to the root, or until a particular subtree's height does not change if newRoot == nil || newRoot.balanceFactor == 0 || newRoot.parent == nil { return } else if newRoot.parent.leftChild == newRoot { newRoot.parent.retrace(-1) } else { newRoot.parent.retrace(1) } } // search performs a binary search for data matching the key in O(log(n)) time func (root *AVLTreeNode) search(key util.Comparable) (data interface{}, ok bool) { if root == nil { return } rootKey := root.key if key.Compare(rootKey) == 0 { return root.data, true } else if key.Compare(rootKey) < 0 { return root.leftChild.search(key) } else { return root.rightChild.search(key) } } // traverse applies f to every element of the tree in order func (root *AVLTreeNode) traverse(f func(interface{})) { if root == nil { return } if root.leftChild != nil { root.leftChild.traverse(f) } f(root.data) if root.rightChild != nil { root.rightChild.traverse(f) } } // insert inserts the given key and data into the tree in O(log(n)) time func (root *AVLTreeNode) insert(key util.Comparable, data interface{}) { if root == nil { return } rootKey := root.key if key.Compare(rootKey) <= 0 { if root.leftChild == nil { root.leftChild = &AVLTreeNode{key, 0, data, nil, nil, root} root.retrace(-1) } else { root.leftChild.insert(key, data) } } else { if root.rightChild == nil { root.rightChild = &AVLTreeNode{key, 0, data, nil, nil, root} root.retrace(1) } else { root.rightChild.insert(key, data) } } } // delete deletes the first instance of data matching the given key from the tree in O(log(n)) time func (root *AVLTreeNode) delete(key util.Comparable) (deleted interface{}, ok bool) { if root == nil { return } rootKey := root.key if key.Compare(rootKey) == 0 { parent := root.parent if root.leftChild == nil && root.rightChild == nil { // This node is a leaf, fairly simple to delete if parent != nil { if parent.leftChild == root { parent.leftChild = nil } else { parent.rightChild = nil } } } else if root.leftChild != nil { // Replace the node with its left child newRoot := root.leftChild root.rightChild.parent = newRoot newRoot.parent = parent if parent != nil { if parent.leftChild == root { parent.leftChild = newRoot } else { parent.rightChild = newRoot } } } else { // Replace the node with its right child newRoot := root.rightChild root.leftChild.parent = newRoot newRoot.parent = parent if parent != nil { if parent.leftChild == root { parent.leftChild = newRoot } else { parent.rightChild = newRoot } } } return root.data, true } else if key.Compare(rootKey) < 0 { return root.leftChild.delete(key) } else { return root.rightChild.delete(key) } } // Search performs a binary search for data matching the key in O(log(n)) time func (tree AVLTree) Search(key util.Comparable) (data interface{}, ok bool) { return tree.root.search(key) } func (tree AVLTree) Traverse(f func(interface{})) { tree.root.traverse(f) } func (tree AVLTree) Insert(key util.Comparable, data interface{}) { if tree.root == nil { tree.root = &AVLTreeNode{key, 0, data, nil, nil, nil} } else { tree.root.insert(key, data) } }
datastructures/tree/avltree.go
0.744656
0.434821
avltree.go
starcoder
package cbor import ( "encoding/json" "fmt" "math" "net" ) // AppendNil inserts a 'Nil' object into the dst byte array. func (Encoder) AppendNil(dst []byte) []byte { return append(dst, byte(majorTypeSimpleAndFloat|additionalTypeNull)) } // AppendBeginMarker inserts a map start into the dst byte array. func (Encoder) AppendBeginMarker(dst []byte) []byte { return append(dst, byte(majorTypeMap|additionalTypeInfiniteCount)) } // AppendEndMarker inserts a map end into the dst byte array. func (Encoder) AppendEndMarker(dst []byte) []byte { return append(dst, byte(majorTypeSimpleAndFloat|additionalTypeBreak)) } // AppendObjectData takes an object in form of a byte array and appends to dst. func (Encoder) AppendObjectData(dst []byte, o []byte) []byte { // BeginMarker is present in the dst, which // should not be copied when appending to existing data. return append(dst, o[1:]...) } // AppendArrayStart adds markers to indicate the start of an array. func (Encoder) AppendArrayStart(dst []byte) []byte { return append(dst, byte(majorTypeArray|additionalTypeInfiniteCount)) } // AppendArrayEnd adds markers to indicate the end of an array. func (Encoder) AppendArrayEnd(dst []byte) []byte { return append(dst, byte(majorTypeSimpleAndFloat|additionalTypeBreak)) } // AppendArrayDelim adds markers to indicate end of a particular array element. func (Encoder) AppendArrayDelim(dst []byte) []byte { //No delimiters needed in cbor return dst } // AppendLineBreak is a noop that keep API compat with json encoder. func (Encoder) AppendLineBreak(dst []byte) []byte { // No line breaks needed in binary format. return dst } // AppendBool encodes and inserts a boolean value into the dst byte array. func (Encoder) AppendBool(dst []byte, val bool) []byte { b := additionalTypeBoolFalse if val { b = additionalTypeBoolTrue } return append(dst, byte(majorTypeSimpleAndFloat|b)) } // AppendBools encodes and inserts an array of boolean values into the dst byte array. func (e Encoder) AppendBools(dst []byte, vals []bool) []byte { major := majorTypeArray l := len(vals) if l == 0 { return e.AppendArrayEnd(e.AppendArrayStart(dst)) } if l <= additionalMax { lb := byte(l) dst = append(dst, byte(major|lb)) } else { dst = appendCborTypePrefix(dst, major, uint64(l)) } for _, v := range vals { dst = e.AppendBool(dst, v) } return dst } // AppendInt encodes and inserts an integer value into the dst byte array. func (Encoder) AppendInt(dst []byte, val int) []byte { major := majorTypeUnsignedInt contentVal := val if val < 0 { major = majorTypeNegativeInt contentVal = -val - 1 } if contentVal <= additionalMax { lb := byte(contentVal) dst = append(dst, byte(major|lb)) } else { dst = appendCborTypePrefix(dst, major, uint64(contentVal)) } return dst } // AppendInts encodes and inserts an array of integer values into the dst byte array. func (e Encoder) AppendInts(dst []byte, vals []int) []byte { major := majorTypeArray l := len(vals) if l == 0 { return e.AppendArrayEnd(e.AppendArrayStart(dst)) } if l <= additionalMax { lb := byte(l) dst = append(dst, byte(major|lb)) } else { dst = appendCborTypePrefix(dst, major, uint64(l)) } for _, v := range vals { dst = e.AppendInt(dst, v) } return dst } // AppendInt8 encodes and inserts an int8 value into the dst byte array. func (e Encoder) AppendInt8(dst []byte, val int8) []byte { return e.AppendInt(dst, int(val)) } // AppendInts8 encodes and inserts an array of integer values into the dst byte array. func (e Encoder) AppendInts8(dst []byte, vals []int8) []byte { major := majorTypeArray l := len(vals) if l == 0 { return e.AppendArrayEnd(e.AppendArrayStart(dst)) } if l <= additionalMax { lb := byte(l) dst = append(dst, byte(major|lb)) } else { dst = appendCborTypePrefix(dst, major, uint64(l)) } for _, v := range vals { dst = e.AppendInt(dst, int(v)) } return dst } // AppendInt16 encodes and inserts a int16 value into the dst byte array. func (e Encoder) AppendInt16(dst []byte, val int16) []byte { return e.AppendInt(dst, int(val)) } // AppendInts16 encodes and inserts an array of int16 values into the dst byte array. func (e Encoder) AppendInts16(dst []byte, vals []int16) []byte { major := majorTypeArray l := len(vals) if l == 0 { return e.AppendArrayEnd(e.AppendArrayStart(dst)) } if l <= additionalMax { lb := byte(l) dst = append(dst, byte(major|lb)) } else { dst = appendCborTypePrefix(dst, major, uint64(l)) } for _, v := range vals { dst = e.AppendInt(dst, int(v)) } return dst } // AppendInt32 encodes and inserts a int32 value into the dst byte array. func (e Encoder) AppendInt32(dst []byte, val int32) []byte { return e.AppendInt(dst, int(val)) } // AppendInts32 encodes and inserts an array of int32 values into the dst byte array. func (e Encoder) AppendInts32(dst []byte, vals []int32) []byte { major := majorTypeArray l := len(vals) if l == 0 { return e.AppendArrayEnd(e.AppendArrayStart(dst)) } if l <= additionalMax { lb := byte(l) dst = append(dst, byte(major|lb)) } else { dst = appendCborTypePrefix(dst, major, uint64(l)) } for _, v := range vals { dst = e.AppendInt(dst, int(v)) } return dst } // AppendInt64 encodes and inserts a int64 value into the dst byte array. func (Encoder) AppendInt64(dst []byte, val int64) []byte { major := majorTypeUnsignedInt contentVal := val if val < 0 { major = majorTypeNegativeInt contentVal = -val - 1 } if contentVal <= additionalMax { lb := byte(contentVal) dst = append(dst, byte(major|lb)) } else { dst = appendCborTypePrefix(dst, major, uint64(contentVal)) } return dst } // AppendInts64 encodes and inserts an array of int64 values into the dst byte array. func (e Encoder) AppendInts64(dst []byte, vals []int64) []byte { major := majorTypeArray l := len(vals) if l == 0 { return e.AppendArrayEnd(e.AppendArrayStart(dst)) } if l <= additionalMax { lb := byte(l) dst = append(dst, byte(major|lb)) } else { dst = appendCborTypePrefix(dst, major, uint64(l)) } for _, v := range vals { dst = e.AppendInt64(dst, v) } return dst } // AppendUint encodes and inserts an unsigned integer value into the dst byte array. func (e Encoder) AppendUint(dst []byte, val uint) []byte { return e.AppendInt64(dst, int64(val)) } // AppendUints encodes and inserts an array of unsigned integer values into the dst byte array. func (e Encoder) AppendUints(dst []byte, vals []uint) []byte { major := majorTypeArray l := len(vals) if l == 0 { return e.AppendArrayEnd(e.AppendArrayStart(dst)) } if l <= additionalMax { lb := byte(l) dst = append(dst, byte(major|lb)) } else { dst = appendCborTypePrefix(dst, major, uint64(l)) } for _, v := range vals { dst = e.AppendUint(dst, v) } return dst } // AppendUint8 encodes and inserts a unsigned int8 value into the dst byte array. func (e Encoder) AppendUint8(dst []byte, val uint8) []byte { return e.AppendUint(dst, uint(val)) } // AppendUints8 encodes and inserts an array of uint8 values into the dst byte array. func (e Encoder) AppendUints8(dst []byte, vals []uint8) []byte { major := majorTypeArray l := len(vals) if l == 0 { return e.AppendArrayEnd(e.AppendArrayStart(dst)) } if l <= additionalMax { lb := byte(l) dst = append(dst, byte(major|lb)) } else { dst = appendCborTypePrefix(dst, major, uint64(l)) } for _, v := range vals { dst = e.AppendUint8(dst, v) } return dst } // AppendUint16 encodes and inserts a uint16 value into the dst byte array. func (e Encoder) AppendUint16(dst []byte, val uint16) []byte { return e.AppendUint(dst, uint(val)) } // AppendUints16 encodes and inserts an array of uint16 values into the dst byte array. func (e Encoder) AppendUints16(dst []byte, vals []uint16) []byte { major := majorTypeArray l := len(vals) if l == 0 { return e.AppendArrayEnd(e.AppendArrayStart(dst)) } if l <= additionalMax { lb := byte(l) dst = append(dst, byte(major|lb)) } else { dst = appendCborTypePrefix(dst, major, uint64(l)) } for _, v := range vals { dst = e.AppendUint16(dst, v) } return dst } // AppendUint32 encodes and inserts a uint32 value into the dst byte array. func (e Encoder) AppendUint32(dst []byte, val uint32) []byte { return e.AppendUint(dst, uint(val)) } // AppendUints32 encodes and inserts an array of uint32 values into the dst byte array. func (e Encoder) AppendUints32(dst []byte, vals []uint32) []byte { major := majorTypeArray l := len(vals) if l == 0 { return e.AppendArrayEnd(e.AppendArrayStart(dst)) } if l <= additionalMax { lb := byte(l) dst = append(dst, byte(major|lb)) } else { dst = appendCborTypePrefix(dst, major, uint64(l)) } for _, v := range vals { dst = e.AppendUint32(dst, v) } return dst } // AppendUint64 encodes and inserts a uint64 value into the dst byte array. func (Encoder) AppendUint64(dst []byte, val uint64) []byte { major := majorTypeUnsignedInt contentVal := val if contentVal <= additionalMax { lb := byte(contentVal) dst = append(dst, byte(major|lb)) } else { dst = appendCborTypePrefix(dst, major, uint64(contentVal)) } return dst } // AppendUints64 encodes and inserts an array of uint64 values into the dst byte array. func (e Encoder) AppendUints64(dst []byte, vals []uint64) []byte { major := majorTypeArray l := len(vals) if l == 0 { return e.AppendArrayEnd(e.AppendArrayStart(dst)) } if l <= additionalMax { lb := byte(l) dst = append(dst, byte(major|lb)) } else { dst = appendCborTypePrefix(dst, major, uint64(l)) } for _, v := range vals { dst = e.AppendUint64(dst, v) } return dst } // AppendFloat32 encodes and inserts a single precision float value into the dst byte array. func (Encoder) AppendFloat32(dst []byte, val float32) []byte { switch { case math.IsNaN(float64(val)): return append(dst, "\xfa\x7f\xc0\x00\x00"...) case math.IsInf(float64(val), 1): return append(dst, "\xfa\x7f\x80\x00\x00"...) case math.IsInf(float64(val), -1): return append(dst, "\xfa\xff\x80\x00\x00"...) } major := majorTypeSimpleAndFloat subType := additionalTypeFloat32 n := math.Float32bits(val) var buf [4]byte for i := uint(0); i < 4; i++ { buf[i] = byte(n >> ((3 - i) * 8)) } return append(append(dst, byte(major|subType)), buf[0], buf[1], buf[2], buf[3]) } // AppendFloats32 encodes and inserts an array of single precision float value into the dst byte array. func (e Encoder) AppendFloats32(dst []byte, vals []float32) []byte { major := majorTypeArray l := len(vals) if l == 0 { return e.AppendArrayEnd(e.AppendArrayStart(dst)) } if l <= additionalMax { lb := byte(l) dst = append(dst, byte(major|lb)) } else { dst = appendCborTypePrefix(dst, major, uint64(l)) } for _, v := range vals { dst = e.AppendFloat32(dst, v) } return dst } // AppendFloat64 encodes and inserts a double precision float value into the dst byte array. func (Encoder) AppendFloat64(dst []byte, val float64) []byte { switch { case math.IsNaN(val): return append(dst, "\xfb\x7f\xf8\x00\x00\x00\x00\x00\x00"...) case math.IsInf(val, 1): return append(dst, "\xfb\x7f\xf0\x00\x00\x00\x00\x00\x00"...) case math.IsInf(val, -1): return append(dst, "\xfb\xff\xf0\x00\x00\x00\x00\x00\x00"...) } major := majorTypeSimpleAndFloat subType := additionalTypeFloat64 n := math.Float64bits(val) dst = append(dst, byte(major|subType)) for i := uint(1); i <= 8; i++ { b := byte(n >> ((8 - i) * 8)) dst = append(dst, b) } return dst } // AppendFloats64 encodes and inserts an array of double precision float values into the dst byte array. func (e Encoder) AppendFloats64(dst []byte, vals []float64) []byte { major := majorTypeArray l := len(vals) if l == 0 { return e.AppendArrayEnd(e.AppendArrayStart(dst)) } if l <= additionalMax { lb := byte(l) dst = append(dst, byte(major|lb)) } else { dst = appendCborTypePrefix(dst, major, uint64(l)) } for _, v := range vals { dst = e.AppendFloat64(dst, v) } return dst } // AppendInterface takes an arbitrary object and converts it to JSON and embeds it dst. func (e Encoder) AppendInterface(dst []byte, i interface{}) []byte { marshaled, err := json.Marshal(i) if err != nil { return e.AppendString(dst, fmt.Sprintf("marshaling error: %v", err)) } return AppendEmbeddedJSON(dst, marshaled) } // AppendIPAddr encodes and inserts an IP Address (IPv4 or IPv6). func (e Encoder) AppendIPAddr(dst []byte, ip net.IP) []byte { dst = append(dst, byte(majorTypeTags|additionalTypeIntUint16)) dst = append(dst, byte(additionalTypeTagNetworkAddr>>8)) dst = append(dst, byte(additionalTypeTagNetworkAddr&0xff)) return e.AppendBytes(dst, ip) } // AppendIPPrefix encodes and inserts an IP Address Prefix (Address + Mask Length). func (e Encoder) AppendIPPrefix(dst []byte, pfx net.IPNet) []byte { dst = append(dst, byte(majorTypeTags|additionalTypeIntUint16)) dst = append(dst, byte(additionalTypeTagNetworkPrefix>>8)) dst = append(dst, byte(additionalTypeTagNetworkPrefix&0xff)) // Prefix is a tuple (aka MAP of 1 pair of elements) - // first element is prefix, second is mask length. dst = append(dst, byte(majorTypeMap|0x1)) dst = e.AppendBytes(dst, pfx.IP) maskLen, _ := pfx.Mask.Size() return e.AppendUint8(dst, uint8(maskLen)) } // AppendMACAddr encodes and inserts an Hardware (MAC) address. func (e Encoder) AppendMACAddr(dst []byte, ha net.HardwareAddr) []byte { dst = append(dst, byte(majorTypeTags|additionalTypeIntUint16)) dst = append(dst, byte(additionalTypeTagNetworkAddr>>8)) dst = append(dst, byte(additionalTypeTagNetworkAddr&0xff)) return e.AppendBytes(dst, ha) } // AppendHex adds a TAG and inserts a hex bytes as a string. func (e Encoder) AppendHex(dst []byte, val []byte) []byte { dst = append(dst, byte(majorTypeTags|additionalTypeIntUint16)) dst = append(dst, byte(additionalTypeTagHexString>>8)) dst = append(dst, byte(additionalTypeTagHexString&0xff)) return e.AppendBytes(dst, val) }
vendor/github.com/rs/zerolog/internal/cbor/types.go
0.697403
0.451145
types.go
starcoder
package variable import ( "fmt" "github.com/LindsayBradford/crem/internal/pkg/model/planningunit" assert "github.com/LindsayBradford/crem/pkg/assert/debug" "github.com/LindsayBradford/crem/pkg/math" "github.com/LindsayBradford/crem/pkg/strings" "sort" strings2 "strings" ) var currencyConverter = strings.NewConverter().Localised().WithFloatingPointPrecision(2).PaddingZeros() var defaultConverter = strings.NewConverter().Localised().WithFloatingPointPrecision(3).PaddingZeros() type EncodeableDecisionVariables []EncodeableDecisionVariable const ( nameKey = "Name" measureKey = "Measure" valueKey = "Value" valuePerPlanningUnitKey = "ValuePerPlanningUnit" comma = "," openBrace = "{" closeBrace = "}" ) func (v EncodeableDecisionVariables) Len() int { return len(v) } func (v EncodeableDecisionVariables) Swap(i, j int) { v[i], v[j] = v[j], v[i] } func (v EncodeableDecisionVariables) Less(i, j int) bool { return v[i].Name < v[j].Name } type PlanningUnitValue struct { PlanningUnit planningunit.Id Value float64 } type PlanningUnitValues []PlanningUnitValue func (v PlanningUnitValues) Len() int { return len(v) } func (v PlanningUnitValues) Swap(i, j int) { v[i], v[j] = v[j], v[i] } func (v PlanningUnitValues) Less(i, j int) bool { return v[i].PlanningUnit < v[j].PlanningUnit } type EncodeableDecisionVariable struct { Name string Value float64 Measure UnitOfMeasure `json:"UnitOfMeasure"` ValuePerPlanningUnit PlanningUnitValues `json:",omitempty"` } func MakeEncodeable(variable DecisionVariable) EncodeableDecisionVariable { return EncodeableDecisionVariable{ Name: variable.Name(), Value: math.RoundFloat(variable.Value(), int(variable.Precision())), Measure: variable.UnitOfMeasure(), ValuePerPlanningUnit: encodeValuesPerPlanningUnit(variable), } } func encodeValuesPerPlanningUnit(variable DecisionVariable) PlanningUnitValues { variablePerPlanningUnit, isVariablePerPlanningUnit := variable.(PlanningUnitDecisionVariable) if !isVariablePerPlanningUnit { return nil } rawValues := variablePerPlanningUnit.ValuesPerPlanningUnit() values := make(PlanningUnitValues, 0) for planningUnitId, planningUnitValue := range rawValues { roundedValue := math.RoundFloat(planningUnitValue, int(variable.Precision())) if roundedValue == 0 { continue } newValue := PlanningUnitValue{ PlanningUnit: planningUnitId, Value: roundedValue, } values = append(values, newValue) } sort.Sort(values) return values } func (v *EncodeableDecisionVariable) MarshalJSON() ([]byte, error) { planningUnitValues := v.deriveFormattedPerPlanningUnitValues() perAttributeJson := new(strings.FluentBuilder). Add(openBrace). Add(formatKeyValuePair(nameKey, v.Name)).Add(comma). Add(formatKeyValuePair(measureKey, v.Measure.String())).Add(comma). Add(formatKeyValuePair(valueKey, v.formatMeasureValue(v.Value))). AddIf(v.hasValuesPerPlanningUnit(), comma, formatKeyArrayPair(valuePerPlanningUnitKey, planningUnitValues)). Add(closeBrace). String() return []byte(perAttributeJson), nil } func (v *EncodeableDecisionVariable) deriveFormattedPerPlanningUnitValues() []string { perPlanningUnitValues := make([]string, 0) for _, planningUnitValue := range v.ValuePerPlanningUnit { formattedValue := v.formatPlanningUnitValue(planningUnitValue) perPlanningUnitValues = append(perPlanningUnitValues, formattedValue) } return perPlanningUnitValues } func (v *EncodeableDecisionVariable) formatPlanningUnitValue(planningUnitValue PlanningUnitValue) string { key := planningUnitValue.PlanningUnit.String() formattedValue := v.formatMeasureValue(planningUnitValue.Value) return fmt.Sprintf("{\"PlanningUnit\":\"%s\", \"Value\":\"%s\"}", key, formattedValue) } func (v *EncodeableDecisionVariable) hasValuesPerPlanningUnit() bool { return len(v.ValuePerPlanningUnit) > 0 } func (v *EncodeableDecisionVariable) formatMeasureValue(value float64) string { switch v.Measure { case Dollars: return currencyConverter.Convert(value) default: return defaultConverter.Convert(value) } assert.That(false).WithFailureMessage("Should not reach here").Holds() return "" } func formatKeyValuePair(key string, value string) string { return fmt.Sprintf("\"%s\":\"%s\"", key, value) } func formatKeyArrayPair(key string, values []string) string { commaSeparatedValues := strings2.Join(values[:], comma) return fmt.Sprintf("\"%s\": [%s]", key, commaSeparatedValues) }
internal/pkg/model/variable/EncodableDecisionVariable.go
0.639736
0.405596
EncodableDecisionVariable.go
starcoder
package flow import ( . "github.com/shnifer/magellan/v2" "math" "math/rand" ) func LinearLifeTime(a, b float64) func(point Point) float64 { return func(point Point) float64 { return a + (b-a)*point.lifeTime/point.maxTime } } func SinLifeTime(med, dev, period float64) func(p Point) float64 { return func(p Point) float64 { return med + math.Sin(p.lifeTime/period*2*math.Pi)*dev } } func SinMaxTime(med, dev, periods float64) func(p Point) float64 { return func(p Point) float64 { return med + math.Sin(p.lifeTime/p.maxTime*periods*2*math.Pi)*dev } } func ComposeRadial(tang, norm func(l, w float64) float64) VelocityF { return func(pos V2) (vel V2) { l := pos.Len() w := pos.Dir() t := tang(l, w) n := norm(l, w) return Add(pos.Mul(n/l), pos.Rotate90().Mul(t/l)) } } func ComposeDecart(vx, vy func(x, y float64) float64) VelocityF { return func(pos V2) (vel V2) { return V2{X: vx(pos.X, pos.Y), Y: vy(pos.X, pos.Y)} } } func (f VelocityF) Add(delta V2) VelocityF { return func(pos V2) (vel V2) { return f(pos).Add(delta) } } func (f VelocityF) AddMul(delta V2, t float64) VelocityF { return func(pos V2) (vel V2) { return f(pos).AddMul(delta, t) } } func (f VelocityF) Rot(angle float64) VelocityF { return func(pos V2) (vel V2) { return f(pos.Rotate(-angle)).Rotate(angle) } } func ConstC(val float64) func(float64, float64) float64 { return func(float64, float64) float64 { return val } } func LineRand(min, max float64) func() float64 { return func() float64 { return min + rand.Float64()*(max-min) } } func NormRand(center, devPercent float64) func() float64 { return func() float64 { return center * KDev(devPercent) } } func RandomInCirc(R float64) func() V2 { return func() V2 { return RandomInCircle(R) } } func RandomOnSide(sideOrt V2, wide float64) func() V2 { return func() V2 { l := 1 - rand.Float64()*wide wOrt := sideOrt.Rotate90() return sideOrt.Mul(l).AddMul(wOrt, rand.Float64()*2-1) } } //Возвращает коэффициент нормальной дистрибуций //сигма в процентах devProcent //68% попадут в (100-devProcent, 100+devProcent) //95% попадут в (100-2*devProcent, 100+2*devProcent) //Отклонения больше 3 сигма ограничиваются func KDev(devProcent float64) float64 { r := rand.NormFloat64() if r > 3 { r = 3 } if r < (-3) { r = -3 } r = 1 + r*devProcent/100 if r < 0 { r = 0.00001 } return r }
graph/flow/funcs.go
0.749087
0.678943
funcs.go
starcoder
package hexgrid import ( "fmt" "math" ) type direction int const ( directionSE = iota directionNE directionN directionNW directionSW directionS ) var directions = []hex{ NewHex(1, 0), NewHex(1, -1), NewHex(0, -1), NewHex(-1, 0), NewHex(-1, +1), NewHex(0, +1), } // hex describes a regular hexagon with Cube Coordinates (although the S coordinate is computed on the constructor) // It's also easy to reference them as axial (trapezoidal coordinates): // - R represents the vertical axis // - Q the diagonal one // - S can be ignored // For additional reference on these coordinate systems: http://www.redblobgames.com/grids/hexagons/#coordinates // _ _ // / \ // _ _ /(0,-1) \ _ _ // / \ -R / \ // /(-1,0) \ _ _ /(1,-1) \ // \ -Q / \ / // \ _ _ / (0,0) \ _ _ / // / \ / \ // /(-1,1) \ _ _ / (1,0) \ // \ / \ +Q / // \ _ _ / (0,1) \ _ _ / // \ +R / // \ _ _ / type hex struct { q int // x axis r int // y axis s int // z axis } func NewHex(q, r int) hex { h := hex{q: q, r: r, s: -q - r} return h } func (h hex) String() string { return fmt.Sprintf("(%d,%d)", h.q, h.r) } // fractionHex provides a more precise representation for hexagons when precision is required. // It's also represented in Cube Coordinates type fractionalHex struct { q float64 r float64 s float64 } func NewFractionalHex(q, r float64) fractionalHex { h := fractionalHex{q: q, r: r, s: -q - r} return h } // Rounds a FractionalHex to a Regular Hex func (h fractionalHex) Round() hex { roundToInt := func(a float64) int { if a < 0 { return int(a - 0.5) } return int(a + 0.5) } q := roundToInt(h.q) r := roundToInt(h.r) s := roundToInt(h.s) q_diff := math.Abs(float64(q) - h.q) r_diff := math.Abs(float64(r) - h.r) s_diff := math.Abs(float64(s) - h.s) if q_diff > r_diff && q_diff > s_diff { q = -r - s } else if r_diff > s_diff { r = -q - s } else { s = -q - r } return hex{q, r, s} } // Adds two hexagons func HexAdd(a, b hex) hex { return NewHex(a.q+b.q, a.r+b.r) } // Subtracts two hexagons func HexSubtract(a, b hex) hex { return NewHex(a.q-b.q, a.r-b.r) } // Scales an hexagon by a k factor. If factor k is 1 there's no change func HexScale(a hex, k int) hex { return NewHex(a.q*k, a.r*k) } func HexLength(hex hex) int { return int((math.Abs(float64(hex.q)) + math.Abs(float64(hex.r)) + math.Abs(float64(hex.s))) / 2.) } func HexDistance(a, b hex) int { sub := HexSubtract(a, b) return HexLength(sub) } // Returns the neighbor hexagon at a certain direction func HexNeighbor(h hex, direction direction) hex { directionOffset := directions[direction] return NewHex(h.q+directionOffset.q, h.r+directionOffset.r) } // Returns the slice of hexagons that exist on a line that goes from hexagon a to hexagon b func HexLineDraw(a, b hex) []hex { hexLerp := func(a fractionalHex, b fractionalHex, t float64) fractionalHex { return NewFractionalHex(a.q*(1-t)+b.q*t, a.r*(1-t)+b.r*t) } N := HexDistance(a, b) // Sometimes the hexLerp will output a point that’s on an edge. // On some systems, the rounding code will push that to one side or the other, // somewhat unpredictably and inconsistently. // To make it always push these points in the same direction, add an “epsilon” value to a. // This will “nudge” things in the same direction when it’s on an edge, and leave other points unaffected. a_nudge := NewFractionalHex(float64(a.q)+0.000001, float64(a.r)+0.000001) b_nudge := NewFractionalHex(float64(b.q)+0.000001, float64(b.r)+0.000001) results := make([]hex, 0) step := 1. / math.Max(float64(N), 1) for i := 0; i <= N; i++ { results = append(results, hexLerp(a_nudge, b_nudge, step*float64(i)).Round()) } return results } // Returns the set of hexagons around a certain center for a given radius func HexRange(center hex, radius int) []hex { var results = make([]hex, 0) if radius >= 0 { for dx := -radius; dx <= radius; dx++ { for dy := math.Max(float64(-radius), float64(-dx-radius)); dy <= math.Min(float64(radius), float64(-dx+radius)); dy++ { results = append(results, HexAdd(center, NewHex(int(dx), int(dy)))) } } } return results } // Returns the set of hexagons that form a rectangle with the specified width and height func HexRectangleGrid(width, height int) []hex { results := make([]hex, 0) for q := 0; q < width; q++ { qOffset := int(math.Floor(float64(q) / 2.)) for r := -qOffset; r < height-qOffset; r++ { results = append(results, NewHex(q, r)) } } return results } // Determines if a given hexagon is visible from another hexagon, taking into consideration a set of blocking hexagons func HexHasLineOfSight(center hex, target hex, blocking []hex) bool { contains := func(s []hex, e hex) bool { for _, a := range s { if a == e { return true } } return false } for _, h := range HexLineDraw(center, target) { if contains(blocking, h) { return false } } return true } // Returns the list of hexagons that are visible from a given hexagon func HexFieldOfView(source hex, candidates []hex, blocking []hex) []hex { results := make([]hex, 0) for _, h := range candidates { distance := HexDistance(source, h) if len(blocking) == 0 || distance <= 1 || HexHasLineOfSight(source, h, blocking) { results = append(results, h) } } return results }
hex.go
0.861887
0.421909
hex.go
starcoder
Package configurator supports configuration management and dynamic generation by manipulating a graph of network entities. Entity graph Configurator manages a directed acyclic graph (DAG) of network-partitioned entities. Callers can define their graph via the following three types - Network - Network-level configs - Network-level metadata - Network entity (vertices) - Type - Key (ID) - Config (serialized) - Edge (directed edge) - Connect two network entities Each network provides an isolated graph, with network-level configs and metadata. Network entities represent logical entities within a network such as a gateway, subscriber, or APN. Edges connect two network entities with a directed edge. Code calling configurator should take care to define entity types and relations which would invariably result in an acyclic graph. Generating configs Configurator provides two interfaces: northbound and southbound. The northbound interface manipulates the entity graphs according to requests from the Orchestrator REST API and from other Orchestrator services. The southbound interface synthesizes the entity graph into mconfigs for particular gateways. Configurator generates these gateway mconfigs ("Magma" configs) by outsourcing config generation to a dynamic set of mconfig builders. Configurator sends each registered builder the gateway ID for which to build a config, along with the encompassing entity graph and network. With this information, each builder can traverse the graph as-necessary to dynamically build a config for the requesting gateway. Configurator assembles the set of partial configs from each mconfig builder into a complete config. Before returning, it adds metadata such as time of creation and hash/digest of the configs. Mconfig builders are Orchestrator services registering an MconfigBuilder under their gRPC endpoint. Any Orchestrator service can provide its own builder servicer. Configurator discovers mconfig builders using K8s labels. Any service with the label "orc8r.io/mconfig_builder" will be assumed to provide an mconfig builder servicer. */ package configurator const ( // ServiceName is the name of this service ServiceName = "CONFIGURATOR" // NetworkConfigSerdeDomain is the Serde domain for network configs NetworkConfigSerdeDomain = "configurator_network_configs" // NetworkEntitySerdeDomain is the Serde domain for network entity configs NetworkEntitySerdeDomain = "configurator_entity_configs" )
orc8r/cloud/go/services/configurator/doc.go
0.580709
0.517754
doc.go
starcoder
// +build ignore // printgraphs allows us to generate a consistent directed view of // a set of edges that follows a reasonably real-world-meaningful // graph. The interpretation of the links in the resulting directed // graphs are either "suggests" in the context of a Page Ranking or // possibly "looks up to" in the Zachary graph. package main import ( "fmt" "sort" "github.com/gonum/graph" "github.com/gonum/graph/internal/ordered" "github.com/gonum/graph/network" "github.com/gonum/graph/simple" ) // set is an integer set. type set map[int]struct{} func linksTo(i ...int) set { if len(i) == 0 { return nil } s := make(set) for _, v := range i { s[v] = struct{}{} } return s } var ( zachary = []set{ 0: linksTo(1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 17, 19, 21, 31), 1: linksTo(2, 3, 7, 13, 17, 19, 21, 30), 2: linksTo(3, 7, 8, 9, 13, 27, 28, 32), 3: linksTo(7, 12, 13), 4: linksTo(6, 10), 5: linksTo(6, 10, 16), 6: linksTo(16), 8: linksTo(30, 32, 33), 9: linksTo(33), 13: linksTo(33), 14: linksTo(32, 33), 15: linksTo(32, 33), 18: linksTo(32, 33), 19: linksTo(33), 20: linksTo(32, 33), 22: linksTo(32, 33), 23: linksTo(25, 27, 29, 32, 33), 24: linksTo(25, 27, 31), 25: linksTo(31), 26: linksTo(29, 33), 27: linksTo(33), 28: linksTo(31, 33), 29: linksTo(32, 33), 30: linksTo(32, 33), 31: linksTo(32, 33), 32: linksTo(33), 33: nil, } blondel = []set{ 0: linksTo(2, 3, 4, 5), 1: linksTo(2, 4, 7), 2: linksTo(4, 5, 6), 3: linksTo(7), 4: linksTo(10), 5: linksTo(7, 11), 6: linksTo(7, 11), 8: linksTo(9, 10, 11, 14, 15), 9: linksTo(12, 14), 10: linksTo(11, 12, 13, 14), 11: linksTo(13), 15: nil, } ) func main() { for _, raw := range []struct { name string set []set }{ {"zachary", zachary}, {"blondel", blondel}, } { g := simple.NewUndirectedGraph(0, 0) for u, e := range raw.set { // Add nodes that are not defined by an edge. if !g.Has(simple.Node(u)) { g.AddNode(simple.Node(u)) } for v := range e { g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: 1}) } } nodes := g.Nodes() sort.Sort(ordered.ByID(nodes)) fmt.Printf("%s = []set{\n", raw.name) rank := network.PageRank(asDirected{g}, 0.85, 1e-8) for _, u := range nodes { to := g.From(nodes[u.ID()]) sort.Sort(ordered.ByID(to)) var links []int for _, v := range to { if rank[u.ID()] <= rank[v.ID()] { links = append(links, v.ID()) } } if links == nil { fmt.Printf("\t%d: nil, // rank=%.4v\n", u.ID(), rank[u.ID()]) continue } fmt.Printf("\t%d: linksTo(", u.ID()) for i, v := range links { if i != 0 { fmt.Print(", ") } fmt.Print(v) } fmt.Printf("), // rank=%.4v\n", rank[u.ID()]) } fmt.Println("}") } } type asDirected struct{ *simple.UndirectedGraph } func (g asDirected) HasEdgeFromTo(u, v graph.Node) bool { return g.UndirectedGraph.HasEdgeBetween(u, v) } func (g asDirected) To(v graph.Node) []graph.Node { return g.From(v) }
vendor/github.com/gonum/graph/community/printgraphs.go
0.510252
0.404096
printgraphs.go
starcoder
package json import ( "fmt" "reflect" ) // Transition functions for recognizing NumberInt and NumberLong. // Adapted from encoding/json/scanner.go. // stateUpperNu is the state after reading `Nu`. func stateUpperNu(s *scanner, c int) int { if c == 'm' { s.step = generateState("Number", []byte("ber"), stateUpperNumber) return scanContinue } return s.error(c, "in literal Number (expecting 'm')") } // stateUpperNumber is the state after reading `Number`. func stateUpperNumber(s *scanner, c int) int { if c == 'I' { s.step = generateState("NumberInt", []byte("nt"), stateConstructor) return scanContinue } if c == 'L' { s.step = generateState("NumberLong", []byte("ong"), stateConstructor) return scanContinue } return s.error(c, "in literal NumberInt or NumberLong (expecting 'I' or 'L')") } // Decodes a NumberInt literal stored in the underlying byte data into v. func (d *decodeState) storeNumberInt(v reflect.Value) { op := d.scanWhile(scanSkipSpace) if op != scanBeginCtor { d.error(fmt.Errorf("expected beginning of constructor")) } args, err := d.ctor("NumberInt", []reflect.Type{numberIntType}) if err != nil { d.error(err) } switch kind := v.Kind(); kind { case reflect.Interface: v.Set(args[0]) default: d.error(fmt.Errorf("cannot store %v value into %v type", numberIntType, kind)) } } // Returns a NumberInt literal from the underlying byte data. func (d *decodeState) getNumberInt() interface{} { op := d.scanWhile(scanSkipSpace) if op != scanBeginCtor { d.error(fmt.Errorf("expected beginning of constructor")) } // Prevent d.convertNumber() from parsing the argument as a float64. useNumber := d.useNumber d.useNumber = true args := d.ctorInterface() if err := ctorNumArgsMismatch("NumberInt", 1, len(args)); err != nil { d.error(err) } var number Number switch v := args[0].(type) { case Number: number = v case string: number = Number(v) default: d.error(fmt.Errorf("expected int32 for first argument of NumberInt constructor, got %t", v)) } d.useNumber = useNumber arg0, err := number.Int32() if err != nil { d.error(fmt.Errorf("expected int32 for first argument of NumberInt constructor, got %t", number)) } return NumberInt(arg0) } // Decodes a NumberLong literal stored in the underlying byte data into v. func (d *decodeState) storeNumberLong(v reflect.Value) { op := d.scanWhile(scanSkipSpace) if op != scanBeginCtor { d.error(fmt.Errorf("expected beginning of constructor")) } args, err := d.ctor("NumberLong", []reflect.Type{numberLongType}) if err != nil { d.error(err) } switch kind := v.Kind(); kind { case reflect.Interface: v.Set(args[0]) default: d.error(fmt.Errorf("cannot store %v value into %v type", numberLongType, kind)) } } // Returns a NumberLong literal from the underlying byte data. func (d *decodeState) getNumberLong() interface{} { op := d.scanWhile(scanSkipSpace) if op != scanBeginCtor { d.error(fmt.Errorf("expected beginning of constructor")) } // Prevent d.convertNumber() from parsing the argument as a float64. useNumber := d.useNumber d.useNumber = true args := d.ctorInterface() if err := ctorNumArgsMismatch("NumberLong", 1, len(args)); err != nil { d.error(err) } var number Number switch v := args[0].(type) { case Number: number = v case string: number = Number(v) default: d.error(fmt.Errorf("expected int64 for first argument of NumberLong constructor, got %t", v)) } d.useNumber = useNumber arg0, err := number.Int64() if err != nil { d.error(fmt.Errorf("expected int64 for first argument of NumberLong constructor, got %t", number)) } return NumberLong(arg0) }
common/json/number.go
0.758153
0.401688
number.go
starcoder
package go_fourier import ( "errors" "math" "math/bits" "math/cmplx" ) // DCT1D computes the discrete cosine transform of the given array in the complex number space. // Assumes the length of the array is a power of 2 // Returns the result in real number space. func DCT1D(signals []float64) ([]float64, error) { if len(signals) == 0 { return make([]float64,0), errors.New("DCT1D: Input array must have size at least one") } if bits.OnesCount32(uint32(len(signals))) != 1 { return make([]float64,0), errors.New("DCT1D: Input array must have size a power of two") } N := len(signals) y := make([]complex128, N) for i := 0; i < N/2; i++ { y[i] = complex(signals[2*i],0.0) y[N-1-i] = complex(signals[2*i+1], 0.0) } err := DFT2Radix1D(y) result := make([]float64, len(signals)) sqrtTermForFirst := math.Sqrt(1.0/(float64(N))) sqrtTermForRest := math.Sqrt(2.0/(float64(N))) for n := 0; n < N; n++ { shift := cmplx.Exp(-1i*math.Pi*complex(float64(n)/float64(2*N),0)) result[n] = real(y[n]*shift) if n == 0 { result[n] *= sqrtTermForFirst } else { result[n] *= sqrtTermForRest } } return result, err } // DCTInverse1D computes the inverse discrete cosine transform of the given array in the complex number space. // Assumes the length of the array is a power of 2 // Returns the result in complex number space. func DCTInverse1D(signals []float64) ([]float64, error) { if len(signals) == 0 { return make([]float64,0), errors.New("DCTInverse1D: Input array must have size at least one") } if bits.OnesCount32(uint32(len(signals))) != 1 { return make([]float64,0), errors.New("DCTInverse1D: Input array must have size a power of two") } N := len(signals) complexSignals := make([]complex128, len(signals)) for n := 0; n < N; n++ { shift := cmplx.Exp(1i*math.Pi*complex(float64(n)/float64(2*N),0)) complexSignals[n] = complex(signals[n] * math.Sqrt(2.0/(float64(N))), 0.0) * shift } complexSignals[0] /= complex(math.Sqrt(2.0), 0.0) err := DFTInverse2Radix1D(complexSignals) result := make([]float64, len(signals)) for i := 0; i < N/2; i++ { result[2*i] = float64(N)*real(complexSignals[i]) result[2*i+1] = float64(N)*real(complexSignals[N-1-i]) } return result, err } // DCT2D computes the discrete cosine transform of the given 2d-array in the complex number space. // Assumes the dimensions of the array is a power of 2 // Returns the result in complex number space. func DCT2D(signals [][]float64) ([][]float64, error) { return dct2D(signals, true) } // DCTInverse2D computes the inverse discrete cosine transform of the given 2d-array in the complex number space. // Assumes the dimensions of the array is a power of 2 // Returns the result in complex number space. func DCTInverse2D(signals [][]float64) ([][]float64, error) { transposedSignals := transposeReal(signals) result, err := dct2D(transposedSignals, false) result = transposeReal(result) return result, err }
dct.go
0.809464
0.735428
dct.go
starcoder
package pop3processor import "github.com/vjeantet/bitfan/processors/doc" func (p *processor) Doc() *doc.Processor { return &doc.Processor{ Name: "pop3processor", ImportPath: "github.com/vjeantet/bitfan/processors/pop3", Doc: "Periodically scan an POP3 mailbox for new emails.", DocShort: "Read mails from POP3 server", Options: &doc.ProcessorOptions{ Doc: "", Options: []*doc.ProcessorOption{ &doc.ProcessorOption{ Name: "processors.CommonOptions", Alias: ",squash", Doc: "", Required: false, Type: "processors.CommonOptions", DefaultValue: nil, PossibleValues: []string{}, ExampleLS: "", }, &doc.ProcessorOption{ Name: "Interval", Alias: "interval", Doc: "When new mail should be retreived from POP3 server ?\nNothing by default, as this processor can be used in filter", Required: false, Type: "interval", DefaultValue: nil, PossibleValues: []string{}, ExampleLS: "", }, &doc.ProcessorOption{ Name: "Host", Alias: "host", Doc: "POP3 host name", Required: true, Type: "string", DefaultValue: nil, PossibleValues: []string{}, ExampleLS: "", }, &doc.ProcessorOption{ Name: "Port", Alias: "port", Doc: "POP3 server's port.\n\nWhen empty and secure is true (pop3s) the default port number is 995", Required: false, Type: "int", DefaultValue: "110", PossibleValues: []string{}, ExampleLS: "", }, &doc.ProcessorOption{ Name: "Secure", Alias: "secure", Doc: "Use TLS POP3S connexion with server.\nThe default pop3s port is 995 in this case", Required: false, Type: "bool", DefaultValue: "false", PossibleValues: []string{}, ExampleLS: "", }, &doc.ProcessorOption{ Name: "Username", Alias: "username", Doc: "POP3 mailbox Username", Required: true, Type: "string", DefaultValue: nil, PossibleValues: []string{}, ExampleLS: "", }, &doc.ProcessorOption{ Name: "Password", Alias: "password", Doc: "POP3 mailbox Password\nyou may use an env variable to pass value, like password => \"${BITFAN_POP3_PASSWORD}\"", Required: true, Type: "string", DefaultValue: nil, PossibleValues: []string{}, ExampleLS: "", }, &doc.ProcessorOption{ Name: "DialTimeout", Alias: "dial_timeout", Doc: "How long to wait for the server to respond ?\n(in second)", Required: false, Type: "int", DefaultValue: "30", PossibleValues: []string{}, ExampleLS: "", }, &doc.ProcessorOption{ Name: "Delete", Alias: "delete", Doc: "Should delete message after retreiving it ?\n\nWhen false, this processor will use sinceDB to not retreive an already seen message", Required: false, Type: "bool", DefaultValue: "true", PossibleValues: []string{}, ExampleLS: "", }, &doc.ProcessorOption{ Name: "StripAttachments", Alias: "strip_attachments", Doc: "Add Attachements, Inlines, in the produced event ?\nWhen false Parts are added like\n```\n \"parts\": {\n {\n \"Size\": 336303,\n \"Content\": $$ContentAsBytes$$,\n \"Type\": \"inline\",\n \"ContentType\": \"image/png\",\n \"Disposition\": \"inline\",\n \"FileName\": \"Capture d’écran 2018-01-12 à 12.11.52.png\",\n },\n {\n \"Content\": $$ContentAsBytes$$,\n \"Type\": \"attachement\",\n \"ContentType\": \"application/pdf\",\n \"Disposition\": \"attachment\",\n \"FileName\": \"58831639.pdf\",\n \"Size\": 14962,\n },\n},\n```", Required: false, Type: "bool", DefaultValue: "false", PossibleValues: []string{}, ExampleLS: "", }, &doc.ProcessorOption{ Name: "VerifyCert", Alias: "verify_cert", Doc: "When using a secure pop connexion (POP3S) should server'cert be verified ?", Required: false, Type: "bool", DefaultValue: "true", PossibleValues: []string{}, ExampleLS: "", }, &doc.ProcessorOption{ Name: "SincedbPath", Alias: "sincedb_path", Doc: "Path of the sincedb database file\n\nThe sincedb database keeps track of the last seen message\n\nSet it to `\"/dev/null\"` to not persist sincedb features\n\nTracks are done by host and username combination, you can customize this if needed giving a specific path", Required: false, Type: "string", DefaultValue: ": Host@Username", PossibleValues: []string{}, ExampleLS: ": sincedb_path => \"/dev/null\"", }, &doc.ProcessorOption{ Name: "AddRawMessage", Alias: "add_raw_message", Doc: "Add a field to event with the raw message data ?", Required: false, Type: "bool", DefaultValue: "false", PossibleValues: []string{}, ExampleLS: "", }, &doc.ProcessorOption{ Name: "AddAllHeaders", Alias: "add_all_headers", Doc: "Add a field to event with all headers as hash ?", Required: false, Type: "bool", DefaultValue: "false", PossibleValues: []string{}, ExampleLS: "", }, }, }, Ports: []*doc.ProcessorPort{}, } }
processors/pop3/docdoc.go
0.613584
0.42662
docdoc.go
starcoder
package graph // VertexType is a type for specific vertices. type VertexType byte const ( // VertexTypeBackupBucket is a constant for a 'BackupBucket' vertex. VertexTypeBackupBucket VertexType = iota // VertexTypeBackupEntry is a constant for a 'BackupEntry' vertex. VertexTypeBackupEntry // VertexTypeBastion is a constant for a 'Bastion' vertex. VertexTypeBastion // VertexTypeCertificateSigningRequest is a constant for a 'CertificateSigningRequest' vertex. VertexTypeCertificateSigningRequest // VertexTypeCloudProfile is a constant for a 'CloudProfile' vertex. VertexTypeCloudProfile // VertexTypeClusterRoleBinding is a constant for a 'ClusterRoleBinding' vertex. VertexTypeClusterRoleBinding // VertexTypeConfigMap is a constant for a 'ConfigMap' vertex. VertexTypeConfigMap // VertexTypeControllerDeployment is a constant for a 'ControllerDeployment' vertex. VertexTypeControllerDeployment // VertexTypeControllerInstallation is a constant for a 'ControllerInstallation' vertex. VertexTypeControllerInstallation // VertexTypeControllerRegistration is a constant for a 'ControllerRegistration' vertex. VertexTypeControllerRegistration // VertexTypeExposureClass is a constant for a 'ExposureClass' vertex. VertexTypeExposureClass // VertexTypeLease is a constant for a 'Lease' vertex. VertexTypeLease // VertexTypeManagedSeed is a constant for a 'ManagedSeed' vertex. VertexTypeManagedSeed // VertexTypeNamespace is a constant for a 'Namespace' vertex. VertexTypeNamespace // VertexTypeProject is a constant for a 'Project' vertex. VertexTypeProject // VertexTypeSecret is a constant for a 'Secret' vertex. VertexTypeSecret // VertexTypeSecretBinding is a constant for a 'SecretBinding' vertex. VertexTypeSecretBinding // VertexTypeSeed is a constant for a 'Seed' vertex. VertexTypeSeed // VertexTypeServiceAccount is a constant for a 'ServiceAccount' vertex. VertexTypeServiceAccount // VertexTypeShoot is a constant for a 'Shoot' vertex. VertexTypeShoot // VertexTypeShootState is a constant for a 'ShootState' vertex. VertexTypeShootState ) var vertexTypes = map[VertexType]string{ VertexTypeBackupBucket: "BackupBucket", VertexTypeBackupEntry: "BackupEntry", VertexTypeBastion: "Bastion", VertexTypeCertificateSigningRequest: "CertificateSigningRequest", VertexTypeCloudProfile: "CloudProfile", VertexTypeClusterRoleBinding: "ClusterRoleBinding", VertexTypeConfigMap: "ConfigMap", VertexTypeControllerDeployment: "ControllerDeployment", VertexTypeControllerInstallation: "ControllerInstallation", VertexTypeControllerRegistration: "ControllerRegistration", VertexTypeExposureClass: "ExposureClass", VertexTypeLease: "Lease", VertexTypeManagedSeed: "ManagedSeed", VertexTypeNamespace: "Namespace", VertexTypeProject: "Project", VertexTypeSecret: "Secret", VertexTypeSecretBinding: "SecretBinding", VertexTypeSeed: "Seed", VertexTypeServiceAccount: "ServiceAccount", VertexTypeShoot: "Shoot", VertexTypeShootState: "ShootState", } type vertex struct { vertexType VertexType namespace string name string id int64 } func newVertex(vertexType VertexType, namespace, name string, id int64) *vertex { return &vertex{ vertexType: vertexType, name: name, namespace: namespace, id: id, } } func (v *vertex) ID() int64 { return v.id } func (v *vertex) String() string { var namespace string if len(v.namespace) > 0 { namespace = v.namespace + "/" } return vertexTypes[v.vertexType] + ":" + namespace + v.name } // typeVertexMapping is a map from type -> namespace -> name -> vertex. type typeVertexMapping map[VertexType]namespaceVertexMapping // namespaceVertexMapping is a map of namespace -> name -> vertex. type namespaceVertexMapping map[string]nameVertexMapping // nameVertexMapping is a map of name -> vertex. type nameVertexMapping map[string]*vertex
pkg/admissioncontroller/webhooks/auth/seed/graph/vertices.go
0.648689
0.570331
vertices.go
starcoder
package datadog import ( "encoding/json" "time" ) // UsageLogsHour Hour usage for logs. type UsageLogsHour struct { // Contains the number of billable log bytes ingested. BillableIngestedBytes *int64 `json:"billable_ingested_bytes,omitempty"` // The hour for the usage. Hour *time.Time `json:"hour,omitempty"` // Contains the number of log events indexed. IndexedEventsCount *int64 `json:"indexed_events_count,omitempty"` // Contains the number of log bytes ingested. IngestedEventsBytes *int64 `json:"ingested_events_bytes,omitempty"` // Contains the number of live log events indexed (data available as of December 1, 2020). LogsLiveIndexedCount *int64 `json:"logs_live_indexed_count,omitempty"` // Contains the number of live log bytes ingested (data available as of December 1, 2020). LogsLiveIngestedBytes *int64 `json:"logs_live_ingested_bytes,omitempty"` // Contains the number of rehydrated log events indexed (data available as of December 1, 2020). LogsRehydratedIndexedCount *int64 `json:"logs_rehydrated_indexed_count,omitempty"` // Contains the number of rehydrated log bytes ingested (data available as of December 1, 2020). LogsRehydratedIngestedBytes *int64 `json:"logs_rehydrated_ingested_bytes,omitempty"` // The organization name. OrgName *string `json:"org_name,omitempty"` // The organization public ID. PublicId *string `json:"public_id,omitempty"` // UnparsedObject contains the raw value of the object if there was an error when deserializing into the struct UnparsedObject map[string]interface{} `json:-` } // NewUsageLogsHour instantiates a new UsageLogsHour object // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments // will change when the set of required properties is changed func NewUsageLogsHour() *UsageLogsHour { this := UsageLogsHour{} return &this } // NewUsageLogsHourWithDefaults instantiates a new UsageLogsHour object // This constructor will only assign default values to properties that have it defined, // but it doesn't guarantee that properties required by API are set func NewUsageLogsHourWithDefaults() *UsageLogsHour { this := UsageLogsHour{} return &this } // GetBillableIngestedBytes returns the BillableIngestedBytes field value if set, zero value otherwise. func (o *UsageLogsHour) GetBillableIngestedBytes() int64 { if o == nil || o.BillableIngestedBytes == nil { var ret int64 return ret } return *o.BillableIngestedBytes } // GetBillableIngestedBytesOk returns a tuple with the BillableIngestedBytes field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *UsageLogsHour) GetBillableIngestedBytesOk() (*int64, bool) { if o == nil || o.BillableIngestedBytes == nil { return nil, false } return o.BillableIngestedBytes, true } // HasBillableIngestedBytes returns a boolean if a field has been set. func (o *UsageLogsHour) HasBillableIngestedBytes() bool { if o != nil && o.BillableIngestedBytes != nil { return true } return false } // SetBillableIngestedBytes gets a reference to the given int64 and assigns it to the BillableIngestedBytes field. func (o *UsageLogsHour) SetBillableIngestedBytes(v int64) { o.BillableIngestedBytes = &v } // GetHour returns the Hour field value if set, zero value otherwise. func (o *UsageLogsHour) GetHour() time.Time { if o == nil || o.Hour == nil { var ret time.Time return ret } return *o.Hour } // GetHourOk returns a tuple with the Hour field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *UsageLogsHour) GetHourOk() (*time.Time, bool) { if o == nil || o.Hour == nil { return nil, false } return o.Hour, true } // HasHour returns a boolean if a field has been set. func (o *UsageLogsHour) HasHour() bool { if o != nil && o.Hour != nil { return true } return false } // SetHour gets a reference to the given time.Time and assigns it to the Hour field. func (o *UsageLogsHour) SetHour(v time.Time) { o.Hour = &v } // GetIndexedEventsCount returns the IndexedEventsCount field value if set, zero value otherwise. func (o *UsageLogsHour) GetIndexedEventsCount() int64 { if o == nil || o.IndexedEventsCount == nil { var ret int64 return ret } return *o.IndexedEventsCount } // GetIndexedEventsCountOk returns a tuple with the IndexedEventsCount field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *UsageLogsHour) GetIndexedEventsCountOk() (*int64, bool) { if o == nil || o.IndexedEventsCount == nil { return nil, false } return o.IndexedEventsCount, true } // HasIndexedEventsCount returns a boolean if a field has been set. func (o *UsageLogsHour) HasIndexedEventsCount() bool { if o != nil && o.IndexedEventsCount != nil { return true } return false } // SetIndexedEventsCount gets a reference to the given int64 and assigns it to the IndexedEventsCount field. func (o *UsageLogsHour) SetIndexedEventsCount(v int64) { o.IndexedEventsCount = &v } // GetIngestedEventsBytes returns the IngestedEventsBytes field value if set, zero value otherwise. func (o *UsageLogsHour) GetIngestedEventsBytes() int64 { if o == nil || o.IngestedEventsBytes == nil { var ret int64 return ret } return *o.IngestedEventsBytes } // GetIngestedEventsBytesOk returns a tuple with the IngestedEventsBytes field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *UsageLogsHour) GetIngestedEventsBytesOk() (*int64, bool) { if o == nil || o.IngestedEventsBytes == nil { return nil, false } return o.IngestedEventsBytes, true } // HasIngestedEventsBytes returns a boolean if a field has been set. func (o *UsageLogsHour) HasIngestedEventsBytes() bool { if o != nil && o.IngestedEventsBytes != nil { return true } return false } // SetIngestedEventsBytes gets a reference to the given int64 and assigns it to the IngestedEventsBytes field. func (o *UsageLogsHour) SetIngestedEventsBytes(v int64) { o.IngestedEventsBytes = &v } // GetLogsLiveIndexedCount returns the LogsLiveIndexedCount field value if set, zero value otherwise. func (o *UsageLogsHour) GetLogsLiveIndexedCount() int64 { if o == nil || o.LogsLiveIndexedCount == nil { var ret int64 return ret } return *o.LogsLiveIndexedCount } // GetLogsLiveIndexedCountOk returns a tuple with the LogsLiveIndexedCount field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *UsageLogsHour) GetLogsLiveIndexedCountOk() (*int64, bool) { if o == nil || o.LogsLiveIndexedCount == nil { return nil, false } return o.LogsLiveIndexedCount, true } // HasLogsLiveIndexedCount returns a boolean if a field has been set. func (o *UsageLogsHour) HasLogsLiveIndexedCount() bool { if o != nil && o.LogsLiveIndexedCount != nil { return true } return false } // SetLogsLiveIndexedCount gets a reference to the given int64 and assigns it to the LogsLiveIndexedCount field. func (o *UsageLogsHour) SetLogsLiveIndexedCount(v int64) { o.LogsLiveIndexedCount = &v } // GetLogsLiveIngestedBytes returns the LogsLiveIngestedBytes field value if set, zero value otherwise. func (o *UsageLogsHour) GetLogsLiveIngestedBytes() int64 { if o == nil || o.LogsLiveIngestedBytes == nil { var ret int64 return ret } return *o.LogsLiveIngestedBytes } // GetLogsLiveIngestedBytesOk returns a tuple with the LogsLiveIngestedBytes field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *UsageLogsHour) GetLogsLiveIngestedBytesOk() (*int64, bool) { if o == nil || o.LogsLiveIngestedBytes == nil { return nil, false } return o.LogsLiveIngestedBytes, true } // HasLogsLiveIngestedBytes returns a boolean if a field has been set. func (o *UsageLogsHour) HasLogsLiveIngestedBytes() bool { if o != nil && o.LogsLiveIngestedBytes != nil { return true } return false } // SetLogsLiveIngestedBytes gets a reference to the given int64 and assigns it to the LogsLiveIngestedBytes field. func (o *UsageLogsHour) SetLogsLiveIngestedBytes(v int64) { o.LogsLiveIngestedBytes = &v } // GetLogsRehydratedIndexedCount returns the LogsRehydratedIndexedCount field value if set, zero value otherwise. func (o *UsageLogsHour) GetLogsRehydratedIndexedCount() int64 { if o == nil || o.LogsRehydratedIndexedCount == nil { var ret int64 return ret } return *o.LogsRehydratedIndexedCount } // GetLogsRehydratedIndexedCountOk returns a tuple with the LogsRehydratedIndexedCount field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *UsageLogsHour) GetLogsRehydratedIndexedCountOk() (*int64, bool) { if o == nil || o.LogsRehydratedIndexedCount == nil { return nil, false } return o.LogsRehydratedIndexedCount, true } // HasLogsRehydratedIndexedCount returns a boolean if a field has been set. func (o *UsageLogsHour) HasLogsRehydratedIndexedCount() bool { if o != nil && o.LogsRehydratedIndexedCount != nil { return true } return false } // SetLogsRehydratedIndexedCount gets a reference to the given int64 and assigns it to the LogsRehydratedIndexedCount field. func (o *UsageLogsHour) SetLogsRehydratedIndexedCount(v int64) { o.LogsRehydratedIndexedCount = &v } // GetLogsRehydratedIngestedBytes returns the LogsRehydratedIngestedBytes field value if set, zero value otherwise. func (o *UsageLogsHour) GetLogsRehydratedIngestedBytes() int64 { if o == nil || o.LogsRehydratedIngestedBytes == nil { var ret int64 return ret } return *o.LogsRehydratedIngestedBytes } // GetLogsRehydratedIngestedBytesOk returns a tuple with the LogsRehydratedIngestedBytes field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *UsageLogsHour) GetLogsRehydratedIngestedBytesOk() (*int64, bool) { if o == nil || o.LogsRehydratedIngestedBytes == nil { return nil, false } return o.LogsRehydratedIngestedBytes, true } // HasLogsRehydratedIngestedBytes returns a boolean if a field has been set. func (o *UsageLogsHour) HasLogsRehydratedIngestedBytes() bool { if o != nil && o.LogsRehydratedIngestedBytes != nil { return true } return false } // SetLogsRehydratedIngestedBytes gets a reference to the given int64 and assigns it to the LogsRehydratedIngestedBytes field. func (o *UsageLogsHour) SetLogsRehydratedIngestedBytes(v int64) { o.LogsRehydratedIngestedBytes = &v } // GetOrgName returns the OrgName field value if set, zero value otherwise. func (o *UsageLogsHour) GetOrgName() string { if o == nil || o.OrgName == nil { var ret string return ret } return *o.OrgName } // GetOrgNameOk returns a tuple with the OrgName field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *UsageLogsHour) GetOrgNameOk() (*string, bool) { if o == nil || o.OrgName == nil { return nil, false } return o.OrgName, true } // HasOrgName returns a boolean if a field has been set. func (o *UsageLogsHour) HasOrgName() bool { if o != nil && o.OrgName != nil { return true } return false } // SetOrgName gets a reference to the given string and assigns it to the OrgName field. func (o *UsageLogsHour) SetOrgName(v string) { o.OrgName = &v } // GetPublicId returns the PublicId field value if set, zero value otherwise. func (o *UsageLogsHour) GetPublicId() string { if o == nil || o.PublicId == nil { var ret string return ret } return *o.PublicId } // GetPublicIdOk returns a tuple with the PublicId field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *UsageLogsHour) GetPublicIdOk() (*string, bool) { if o == nil || o.PublicId == nil { return nil, false } return o.PublicId, true } // HasPublicId returns a boolean if a field has been set. func (o *UsageLogsHour) HasPublicId() bool { if o != nil && o.PublicId != nil { return true } return false } // SetPublicId gets a reference to the given string and assigns it to the PublicId field. func (o *UsageLogsHour) SetPublicId(v string) { o.PublicId = &v } func (o UsageLogsHour) MarshalJSON() ([]byte, error) { toSerialize := map[string]interface{}{} if o.UnparsedObject != nil { return json.Marshal(o.UnparsedObject) } if o.BillableIngestedBytes != nil { toSerialize["billable_ingested_bytes"] = o.BillableIngestedBytes } if o.Hour != nil { toSerialize["hour"] = o.Hour } if o.IndexedEventsCount != nil { toSerialize["indexed_events_count"] = o.IndexedEventsCount } if o.IngestedEventsBytes != nil { toSerialize["ingested_events_bytes"] = o.IngestedEventsBytes } if o.LogsLiveIndexedCount != nil { toSerialize["logs_live_indexed_count"] = o.LogsLiveIndexedCount } if o.LogsLiveIngestedBytes != nil { toSerialize["logs_live_ingested_bytes"] = o.LogsLiveIngestedBytes } if o.LogsRehydratedIndexedCount != nil { toSerialize["logs_rehydrated_indexed_count"] = o.LogsRehydratedIndexedCount } if o.LogsRehydratedIngestedBytes != nil { toSerialize["logs_rehydrated_ingested_bytes"] = o.LogsRehydratedIngestedBytes } if o.OrgName != nil { toSerialize["org_name"] = o.OrgName } if o.PublicId != nil { toSerialize["public_id"] = o.PublicId } return json.Marshal(toSerialize) } func (o *UsageLogsHour) UnmarshalJSON(bytes []byte) (err error) { raw := map[string]interface{}{} all := struct { BillableIngestedBytes *int64 `json:"billable_ingested_bytes,omitempty"` Hour *time.Time `json:"hour,omitempty"` IndexedEventsCount *int64 `json:"indexed_events_count,omitempty"` IngestedEventsBytes *int64 `json:"ingested_events_bytes,omitempty"` LogsLiveIndexedCount *int64 `json:"logs_live_indexed_count,omitempty"` LogsLiveIngestedBytes *int64 `json:"logs_live_ingested_bytes,omitempty"` LogsRehydratedIndexedCount *int64 `json:"logs_rehydrated_indexed_count,omitempty"` LogsRehydratedIngestedBytes *int64 `json:"logs_rehydrated_ingested_bytes,omitempty"` OrgName *string `json:"org_name,omitempty"` PublicId *string `json:"public_id,omitempty"` }{} err = json.Unmarshal(bytes, &all) if err != nil { err = json.Unmarshal(bytes, &raw) if err != nil { return err } o.UnparsedObject = raw return nil } o.BillableIngestedBytes = all.BillableIngestedBytes o.Hour = all.Hour o.IndexedEventsCount = all.IndexedEventsCount o.IngestedEventsBytes = all.IngestedEventsBytes o.LogsLiveIndexedCount = all.LogsLiveIndexedCount o.LogsLiveIngestedBytes = all.LogsLiveIngestedBytes o.LogsRehydratedIndexedCount = all.LogsRehydratedIndexedCount o.LogsRehydratedIngestedBytes = all.LogsRehydratedIngestedBytes o.OrgName = all.OrgName o.PublicId = all.PublicId return nil }
api/v1/datadog/model_usage_logs_hour.go
0.724481
0.402392
model_usage_logs_hour.go
starcoder
package incrdelaunay import ( "math" ) // Triangle stores the vertices of a triangle as well as its circumcircle. type Triangle struct { A, B, C Point Circumcircle Circumcircle } // NewTriangle return a new Triangle and calculates its circumcircle given three points. func NewTriangle(a, b, c Point) Triangle { return Triangle{ A: a, B: b, C: c, Circumcircle: calcCircumcircle(a, b, c), } } // HasVertex returns if the Triangle contains a specified vertex. func (t Triangle) HasVertex(p Point) bool { return t.A == p || t.B == p || t.C == p } // NewSuperTriangle returns a Triangle large enough to cover all points within (0, 0) to (w, h). func NewSuperTriangle(w, h int) Triangle { hW := int16(math.Ceil(float64(w) / 2)) hH := int16(math.Ceil(float64(h) / 2)) max := int16(w) if h > w { max = int16(h) } a := Point{hW - 2*max, hH - max} b := Point{hW, hH + 2*max} c := Point{hW + 2*max, hH - max} return NewTriangle(a, b, c) } // Point represents a 2D point, using int16 to optimize space. type Point struct { X, Y int16 } // DistSq returns the distance squared to another point. func (p Point) DistSq(b Point) int64 { dX := int64(b.X - p.X) dY := int64(b.Y - p.Y) return dX*dX + dY*dY } // Hash returns a hash code for the point. func (p Point) Hash() int { return (53+int(p.X))*53 + int(p.Y) } // Circumcircle represents a circumcircle of a Triangle. type Circumcircle struct { cX, cY float32 Radius float32 } // calcCircumcircle calculates the circumcircle of three points. func calcCircumcircle(v0, v1, v2 Point) Circumcircle { var circumcircle Circumcircle A := int64(v1.X - v0.X) B := int64(v1.Y - v0.Y) C := int64(v2.X - v0.X) D := int64(v2.Y - v0.Y) E := A*int64(v0.X+v1.X) + B*int64(v0.Y+v1.Y) F := C*int64(v0.X+v2.X) + D*int64(v0.Y+v2.Y) G := float64(2 * (A*int64(v2.Y-v1.Y) - B*int64(v2.X-v1.X))) cx := float64(D*E-B*F) / G cy := float64(A*F-C*E) / G circumcircle.cX, circumcircle.cY = float32(cx), float32(cy) dx := cx - float64(v0.X) dy := cy - float64(v0.Y) circumcircle.Radius = float32(math.Sqrt(dx*dx + dy*dy)) return circumcircle } // ear represents a Devillers ear. type ear struct { a, b, c Point score float64 } // computeScore computes the score of the Devillers ear. func (e *ear) computeScore(p Point) { e.score = calculateScore(e.a, e.b, e.c, p) } // Edge represents an edge from point A to B. type Edge struct { A, B Point } // Equals returns if the edge is equal to another. func (e Edge) Equals(b Edge) bool { // A and B are ordered, so it isn't necessary to check the other way around return e.A == b.A && e.B == b.B } // NewEdge returns a new edge with its points sorted. func NewEdge(a, b Point) Edge { // Order the points in the edge if a.X > b.X { a, b = b, a } else if a.X == b.X { if a.Y > b.Y { a, b = b, a } } return Edge{a, b} } type FloatPoint struct { X, Y float64 }
triangulation/incrdelaunay/geom.go
0.904313
0.618291
geom.go
starcoder
package fseval import ( "io" "os" "path/filepath" "time" "github.com/openSUSE/umoci/pkg/unpriv" "github.com/vbatts/go-mtree" "golang.org/x/sys/unix" ) // RootlessFsEval is an FsEval implementation that uses "umoci/pkg/unpriv".* // functions in order to provide the ability for unprivileged users (those // without CAP_DAC_OVERRIDE and CAP_DAC_READ_SEARCH) to evaluate parts of a // filesystem that they own. Note that by necessity this requires modifying the // filesystem (and thus will not work on read-only filesystems). var RootlessFsEval FsEval = unprivFsEval(0) // unprivFsEval is a hack to be able to make RootlessFsEval a const. type unprivFsEval int // Open is equivalent to unpriv.Open. func (fs unprivFsEval) Open(path string) (*os.File, error) { return unpriv.Open(path) } // Create is equivalent to unpriv.Create. func (fs unprivFsEval) Create(path string) (*os.File, error) { return unpriv.Create(path) } // Readdir is equivalent to unpriv.Readdir. func (fs unprivFsEval) Readdir(path string) ([]os.FileInfo, error) { return unpriv.Readdir(path) } // Lstat is equivalent to unpriv.Lstat. func (fs unprivFsEval) Lstat(path string) (os.FileInfo, error) { return unpriv.Lstat(path) } func (fs unprivFsEval) Lstatx(path string) (unix.Stat_t, error) { return unpriv.Lstatx(path) } // Readlink is equivalent to unpriv.Readlink. func (fs unprivFsEval) Readlink(path string) (string, error) { return unpriv.Readlink(path) } // Symlink is equivalent to unpriv.Symlink. func (fs unprivFsEval) Symlink(linkname, path string) error { return unpriv.Symlink(linkname, path) } // Link is equivalent to unpriv.Link. func (fs unprivFsEval) Link(linkname, path string) error { return unpriv.Link(linkname, path) } // Chmod is equivalent to unpriv.Chmod. func (fs unprivFsEval) Chmod(path string, mode os.FileMode) error { return unpriv.Chmod(path, mode) } // Lutimes is equivalent to unpriv.Lutimes. func (fs unprivFsEval) Lutimes(path string, atime, mtime time.Time) error { return unpriv.Lutimes(path, atime, mtime) } // Remove is equivalent to unpriv.Remove. func (fs unprivFsEval) Remove(path string) error { return unpriv.Remove(path) } // RemoveAll is equivalent to unpriv.RemoveAll. func (fs unprivFsEval) RemoveAll(path string) error { return unpriv.RemoveAll(path) } // Mkdir is equivalent to unpriv.Mkdir. func (fs unprivFsEval) Mkdir(path string, perm os.FileMode) error { return unpriv.Mkdir(path, perm) } // Mknod is equivalent to unpriv.Mknod. func (fs unprivFsEval) Mknod(path string, mode os.FileMode, dev uint64) error { return unpriv.Mknod(path, mode, dev) } // MkdirAll is equivalent to unpriv.MkdirAll. func (fs unprivFsEval) MkdirAll(path string, perm os.FileMode) error { return unpriv.MkdirAll(path, perm) } // Llistxattr is equivalent to unpriv.Llistxattr func (fs unprivFsEval) Llistxattr(path string) ([]string, error) { return unpriv.Llistxattr(path) } // Lremovexattr is equivalent to unpriv.Lremovexattr func (fs unprivFsEval) Lremovexattr(path, name string) error { return unpriv.Lremovexattr(path, name) } // Lsetxattr is equivalent to unpriv.Lsetxattr func (fs unprivFsEval) Lsetxattr(path, name string, value []byte, flags int) error { return unpriv.Lsetxattr(path, name, value, flags) } // Lgetxattr is equivalent to unpriv.Lgetxattr func (fs unprivFsEval) Lgetxattr(path string, name string) ([]byte, error) { return unpriv.Lgetxattr(path, name) } // Lclearxattrs is equivalent to unpriv.Lclearxattrs func (fs unprivFsEval) Lclearxattrs(path string, except map[string]struct{}) error { return unpriv.Lclearxattrs(path, except) } // KeywordFunc returns a wrapper around the given mtree.KeywordFunc. func (fs unprivFsEval) KeywordFunc(fn mtree.KeywordFunc) mtree.KeywordFunc { return func(path string, info os.FileInfo, r io.Reader) ([]mtree.KeyVal, error) { var kv []mtree.KeyVal err := unpriv.Wrap(path, func(path string) error { var err error kv, err = fn(path, info, r) return err }) return kv, err } } // Walk is equivalent to filepath.Walk. func (fs unprivFsEval) Walk(root string, fn filepath.WalkFunc) error { return unpriv.Walk(root, fn) }
vendor/github.com/openSUSE/umoci/pkg/fseval/fseval_rootless.go
0.524151
0.434761
fseval_rootless.go
starcoder
package sqlds import ( "errors" "fmt" "regexp" "strings" "time" ) var ( // ErrorBadArgumentCount is returned from macros when the wrong number of arguments were provided ErrorBadArgumentCount = errors.New("unexpected number of arguments") ) // MacroFunc defines a signature for applying a query macro // Query macro implementations are defined by users / consumers of this package type MacroFunc func(*Query, []string) (string, error) // Macros is a list of MacroFuncs. // The "string" key is the name of the macro function. This name has to be regex friendly. type Macros map[string]MacroFunc // Default time filter for SQL based on the query time range. // It requires one argument, the time column to filter. // Example: // $__timeFilter(time) => "time BETWEEN '2006-01-02T15:04:05Z07:00' AND '2006-01-02T15:04:05Z07:00'" func macroTimeFilter(query *Query, args []string) (string, error) { if len(args) != 1 { return "", fmt.Errorf("%w: expected 1 argument, received %d", ErrorBadArgumentCount, len(args)) } var ( column = args[0] from = query.TimeRange.From.UTC().Format(time.RFC3339) to = query.TimeRange.To.UTC().Format(time.RFC3339) ) return fmt.Sprintf("%s >= '%s' AND %s <= '%s'", column, from, column, to), nil } // Default time filter for SQL based on the starting query time range. // It requires one argument, the time column to filter. // Example: // $__timeFrom(time) => "time > '2006-01-02T15:04:05Z07:00'" func macroTimeFrom(query *Query, args []string) (string, error) { if len(args) != 1 { return "", fmt.Errorf("%w: expected 1 argument, received %d", ErrorBadArgumentCount, len(args)) } return fmt.Sprintf("%s >= '%s'", args[0], query.TimeRange.From.UTC().Format(time.RFC3339)), nil } // Default time filter for SQL based on the ending query time range. // It requires one argument, the time column to filter. // Example: // $__timeTo(time) => "time < '2006-01-02T15:04:05Z07:00'" func macroTimeTo(query *Query, args []string) (string, error) { if len(args) != 1 { return "", fmt.Errorf("%w: expected 1 argument, received %d", ErrorBadArgumentCount, len(args)) } return fmt.Sprintf("%s <= '%s'", args[0], query.TimeRange.To.UTC().Format(time.RFC3339)), nil } // Default time group for SQL based the given period. // This basic example is meant to be customized with more complex periods. // It requires two arguments, the column to filter and the period. // Example: // $__timeTo(time, month) => "datepart(year, time), datepart(month, time)'" func macroTimeGroup(query *Query, args []string) (string, error) { if len(args) != 2 { return "", fmt.Errorf("%w: expected 1 argument, received %d", ErrorBadArgumentCount, len(args)) } res := "" switch args[1] { case "minute": res += fmt.Sprintf("datepart(minute, %s),", args[0]) fallthrough case "hour": res += fmt.Sprintf("datepart(hour, %s),", args[0]) fallthrough case "day": res += fmt.Sprintf("datepart(day, %s),", args[0]) fallthrough case "month": res += fmt.Sprintf("datepart(month, %s),", args[0]) fallthrough case "year": res += fmt.Sprintf("datepart(year, %s)", args[0]) } return res, nil } // Default macro to return the query table name. // Example: // $__table => "my_table" func macroTable(query *Query, args []string) (string, error) { return query.Table, nil } // Default macro to return the query column name. // Example: // $__column => "my_col" func macroColumn(query *Query, args []string) (string, error) { return query.Column, nil } var DefaultMacros Macros = Macros{ "timeFilter": macroTimeFilter, "timeFrom": macroTimeFrom, "timeGroup": macroTimeGroup, "timeTo": macroTimeTo, "table": macroTable, "column": macroColumn, } func trimAll(s []string) []string { r := make([]string, len(s)) for i, v := range s { r[i] = strings.TrimSpace(v) } return r } func getMacroRegex(name string) string { return fmt.Sprintf("\\$__%s\\b(?:\\((.*?)\\))?", name) } func interpolate(driver Driver, query *Query) (string, error) { macros := driver.Macros() for key, defaultMacro := range DefaultMacros { if _, ok := macros[key]; !ok { // If the driver doesn't define some macro, use the default one macros[key] = defaultMacro } } rawSQL := query.RawSQL for key, macro := range macros { matches, err := getMatches(key, rawSQL) if err != nil { return rawSQL, err } for _, match := range matches { if len(match) == 0 { // There were no matches for this macro continue } args := []string{} if len(match) > 1 { // This macro has arguments args = trimAll(strings.Split(match[1], ",")) } res, err := macro(query.WithSQL(rawSQL), args) if err != nil { return rawSQL, err } rawSQL = strings.Replace(rawSQL, match[0], res, -1) } } return rawSQL, nil } func getMatches(macroName, rawSQL string) ([][]string, error) { rgx, err := regexp.Compile(getMacroRegex(macroName)) if err != nil { return nil, err } return rgx.FindAllStringSubmatch(rawSQL, -1), nil }
macros.go
0.7586
0.404537
macros.go
starcoder
package datapoint import ( "bytes" "encoding/json" "fmt" "time" ) // Documentation taken from http://metrics20.org/spec/ // MetricType define how to display the Value. It's more metadata of the series than data about the // series itself. See target_type of http://metrics20.org/spec/ type MetricType int const ( // Gauge is values at each point in time Gauge MetricType = iota // Count is a number per a given interval (such as a statsd flushInterval); not very useful Count // Enum is an added type: Values aren't important relative to each other but are just important as distinct // items in a set. Usually used when Value is type "string" Enum // Counter is a number that keeps increasing over time (but might wrap/reset at some points) // (no statsd counterpart), i.e. a gauge with the added notion of "i usually want to derive this" Counter // Rate is a number per second Rate // Timestamp value represents a unix timestamp Timestamp ) // A Datapoint is the metric that is saved. Designed around http://metrics20.org/spec/ type Datapoint struct { // What is being measured. We think metric, rather than "unit" of metrics20, should be the // required identity of a datapoint and the "unit" should be a property of the Value itself Metric string `json:"metric"` // Dimensions of what is being measured. They are intrinsic. Contributes to the identity of // the metric. If this changes, we get a new metric identifier Dimensions map[string]string `json:"dimensions"` // Meta is information that's not particularly important to the datapoint, but may be important // to the pipeline that uses the datapoint. They are extrinsic. It provides additional // information about the metric. changes in this set doesn't change the metric identity Meta map[interface{}]interface{} `json:"-"` // Value of the datapoint Value Value `json:"value"` // The type of the datapoint series MetricType MetricType `json:"metric_type"` // The unix time of the datapoint Timestamp time.Time `json:"timestamp"` } type jsonDatapoint struct { Metric string `json:"metric"` Dimensions map[string]string `json:"dimensions"` Value interface{} `json:"value"` MetricType MetricType `json:"metric_type"` Timestamp time.Time `json:"timestamp"` } // UnmarshalJSON decodes JSON into a datapoint, creating the correct Value interface types for the // type of JSON value that was encoded func (dp *Datapoint) UnmarshalJSON(b []byte) error { var m jsonDatapoint dec := json.NewDecoder(bytes.NewBuffer(b)) dec.UseNumber() if err := dec.Decode(&m); err != nil { return err } switch t := m.Value.(type) { case string: dp.Value = NewStringValue(t) case json.Number: if num, e := t.Int64(); e == nil { dp.Value = NewIntValue(num) } else if num, e := t.Float64(); e == nil { dp.Value = NewFloatValue(num) } } dp.Metric = m.Metric dp.Dimensions = m.Dimensions dp.MetricType = m.MetricType dp.Timestamp = m.Timestamp return nil } func (dp *Datapoint) String() string { return fmt.Sprintf("DP[%s\t%s\t%s\t%d\t%s]", dp.Metric, dp.Dimensions, dp.Value, dp.MetricType, dp.Timestamp.String()) } type metadata int const ( stringProperties metadata = iota ) //SetProperty sets a property to be used when the time series associated with the datapoint is created func (dp *Datapoint) SetProperty(key string, value string) { if dp.Meta[stringProperties] == nil { dp.Meta[stringProperties] = make(map[string]string, 1) } dp.GetProperties()[key] = value } //RemoveProperty removes a property from the map of properties to be used when the time series associated with the datapoint is created func (dp *Datapoint) RemoveProperty(key string) { if dp.Meta[stringProperties] != nil { delete(dp.GetProperties(), key) if len(dp.GetProperties()) == 0 { delete(dp.Meta, stringProperties) } } } //GetProperties gets the map of properties to set when creating the time series associated with the datapoint. nil if no properties are set. func (dp *Datapoint) GetProperties() map[string]string { m, ok := dp.Meta[stringProperties].(map[string]string) if !ok { return nil } return m } // New creates a new datapoint with empty meta data func New(metric string, dimensions map[string]string, value Value, metricType MetricType, timestamp time.Time) *Datapoint { return NewWithMeta(metric, dimensions, map[interface{}]interface{}{}, value, metricType, timestamp) } // NewWithMeta creates a new datapoint with passed metadata func NewWithMeta(metric string, dimensions map[string]string, meta map[interface{}]interface{}, value Value, metricType MetricType, timestamp time.Time) *Datapoint { return &Datapoint{ Metric: metric, Dimensions: dimensions, Meta: meta, Value: value, MetricType: metricType, Timestamp: timestamp, } }
vendor/github.com/signalfx/golib/datapoint/datapoint.go
0.759671
0.517571
datapoint.go
starcoder
package ch4 import ( "math" ) func FindMaxCrossingSubarray(input []int, low int, mid int, high int) (maxLeft int, maxRight int, sum int) { leftSum := 0 rightSum := 0 for iter := mid; iter >= low; iter-- { sum = sum + input[iter] if sum > leftSum || iter == mid { leftSum = sum maxLeft = iter } } sum = 0 for iter := mid + 1; iter < high-1; iter++ { sum = sum + input[iter] if sum > rightSum || iter == mid+1 { rightSum = sum maxRight = iter } } sum = leftSum + rightSum return maxLeft, maxRight, sum } func FindMaxSubarray(input []int, low int, high int) (int, int, int) { if high == low { return low, high, input[low] } mid := int(math.Floor(float64((low + high) / 2))) leftLow, leftHigh, leftSum := FindMaxSubarray(input, low, mid) rightLow, rightHigh, rightSum := FindMaxSubarray(input, mid+1, high) crossLow, crossHigh, crossSum := FindMaxCrossingSubarray(input, low, mid, high) if leftSum >= rightSum && leftSum >= crossSum { return leftLow, leftHigh, leftSum } else if rightSum >= leftSum && rightSum >= crossSum { return rightLow, rightHigh, rightSum } return crossLow, crossHigh, crossSum } func FindMaxSubarrayBruteForce(input []int) (output []int, sumValue int, leftPos int, rightPos int) { // array size for i := 1; i < len(input); i++ { // array values for j := 0; j+i < len(input); j++ { var iteratorArray []int iteratorArray = input[j : j+i] if sum(iteratorArray) > sumValue { sumValue = sum(iteratorArray) output = iteratorArray leftPos = j rightPos = j + i - 1 } } } return output, sumValue, leftPos, rightPos } func MaxSubarrayKadane(input []int) (outputArray []int, answer int) { var sum int var loc []int for i := 0; i < len(input); i++ { if sum+input[i] > 0 { sum = sum + input[i] loc = append(loc, input[i]) } else { sum = 0 loc = []int{} } if sum >= answer { outputArray = loc[:] answer = sum } } return outputArray, answer } func sum(input []int) (output int) { for _, x := range input { output = output + x } return output } func max(input []int) (output int) { for _, x := range input { if x > output { output = x } } return output }
src/ch4/maximum_sub_array.go
0.604516
0.442817
maximum_sub_array.go
starcoder
package sparsetable type fuzzyState struct { lev, next int state State } // FuzzyStack keeps track of the active states during the apporimxate search. type FuzzyStack struct { stack []fuzzyState dfa *DFA str string max int } func (f *FuzzyStack) empty() bool { return len(f.stack) == 0 } func (f *FuzzyStack) pop() fuzzyState { n := len(f.stack) if n == 0 { panic("called pop() on empty stack") } top := f.stack[n-1] f.stack = f.stack[0 : n-1] return top } func (f *FuzzyStack) push(s fuzzyState) { f.dfa.EachUTF8Transition(s.state, func(r rune, t State) { f.push(fuzzyState{ lev: s.lev + 1, state: t, next: s.next, }) }) if s.lev <= f.max && s.next <= len(f.str) && s.state.Valid() { f.stack = append(f.stack, s) } } func (f *FuzzyStack) deltaDiagonal(s fuzzyState) { f.dfa.EachUTF8Transition(s.state, func(r rune, t State) { f.push(fuzzyState{ lev: s.lev + 1, state: t, next: s.next + 1, }) }) } func (f *FuzzyStack) deltaVertical(s fuzzyState) { if s.next < len(f.str) { f.push(fuzzyState{ lev: s.lev + 1, state: s.state, next: s.next + 1, }) } } func (f *FuzzyStack) deltaHorizontal(s fuzzyState) { if s.next >= len(f.str) { return } t := f.dfa.Delta(s.state, f.str[s.next]) if !t.Valid() { return } f.push(fuzzyState{ lev: s.lev, state: t, next: s.next + 1, }) } func (f *FuzzyStack) delta(top fuzzyState) { f.deltaDiagonal(top) f.deltaHorizontal(top) f.deltaVertical(top) } // FuzzyDFA is the basic struct for approximate matching on a DFA. type FuzzyDFA struct { dfa *DFA k int } // NewFuzzyDFA create a new FuzzyDFA with a given // error limit k and a given DFA func NewFuzzyDFA(k int, dfa *DFA) *FuzzyDFA { return &FuzzyDFA{k: k, dfa: dfa} } // MaxError returns the maximum allowed error for the fuzzy DFA. func (d *FuzzyDFA) MaxError() int { return d.k } // Initial returns the initial active states of the approximate match for str. func (d *FuzzyDFA) Initial(str string) *FuzzyStack { s := &FuzzyStack{ str: str, dfa: d.dfa, max: d.k, } s.push(fuzzyState{ lev: 0, state: d.dfa.Initial(), next: 0, }) return s } // FinalStateCallback is a callback function that is called on final states. // It is called using the active error, the next position and the data. type FinalStateCallback func(int, int, int32) // Delta make one transtion on the top of the stack. If a final state is encountered, // the callback function is called. It returns false if no more transitions // can be done with the active stack. func (d *FuzzyDFA) Delta(f *FuzzyStack, cb FinalStateCallback) bool { if f.empty() { return false } top := f.pop() f.delta(top) if data, final := d.dfa.Final(top.state); final { cb(top.lev, top.next, data) } return true }
fuzzydfa.go
0.755727
0.45042
fuzzydfa.go
starcoder
package binvox import ( "math" gl "github.com/fogleman/fauxgl" ) const ( iso = 0.5 ) type gridCell struct { p [8]gl.Vector val [8]float64 } type mcManifoldMap map[Key]struct{} type voxelLookupMap map[Key]float64 func (b *BinVOX) MarchingCubes() *gl.Mesh { // create lookup table lookup := make(voxelLookupMap) // voxel locations gridCells := make(mcManifoldMap) // grid cell locations keyFunc := func(v Key) { lookup[Key{v.X, v.Y, v.Z}] = 1.0 gridCells[Key{v.X, v.Y, v.Z}] = struct{}{} gridCells[Key{v.X - 1, v.Y, v.Z}] = struct{}{} gridCells[Key{v.X, v.Y + 1, v.Z}] = struct{}{} gridCells[Key{v.X - 1, v.Y + 1, v.Z}] = struct{}{} gridCells[Key{v.X, v.Y, v.Z - 1}] = struct{}{} gridCells[Key{v.X - 1, v.Y, v.Z - 1}] = struct{}{} gridCells[Key{v.X, v.Y + 1, v.Z - 1}] = struct{}{} gridCells[Key{v.X - 1, v.Y + 1, v.Z - 1}] = struct{}{} } for v := range b.WhiteVoxels { keyFunc(v) } for v := range b.ColorVoxels { keyFunc(v) } var tris []*gl.Triangle vpmm := b.VoxelsPerMM() mmpv := 1.0 / vpmm s := gl.V(mmpv, mmpv, mmpv) t := gl.V(b.TX, b.TY, b.TZ) voxelToVector := func(k Key, dx, dy, dz float64) gl.Vector { x := float64(k.X) + 0.5 y := float64(k.Y) + 0.5 z := float64(k.Z) + 0.5 v := gl.V(x+dx, y+dy, z+dz).Mul(s).Add(t) // vlog("voxelToVector(%v, %v, %v, %v) = %v", k, dx, dy, dz, v) return v } for k := range gridCells { grid := cell(k, lookup, voxelToVector) tris = append(tris, polygonize(grid)...) } return gl.NewMesh(tris, nil) } func polygonize(grid *gridCell) (tris []*gl.Triangle) { var cubeIndex byte if grid.val[0] < iso { cubeIndex = cubeIndex | 1 } if grid.val[1] < iso { cubeIndex = cubeIndex | 2 } if grid.val[2] < iso { cubeIndex = cubeIndex | 4 } if grid.val[3] < iso { cubeIndex = cubeIndex | 8 } if grid.val[4] < iso { cubeIndex = cubeIndex | 16 } if grid.val[5] < iso { cubeIndex = cubeIndex | 32 } if grid.val[6] < iso { cubeIndex = cubeIndex | 64 } if grid.val[7] < iso { cubeIndex = cubeIndex | 128 } /* Cube is entirely in/out of the surface */ if edgeTable[cubeIndex] == 0 { return nil } var vertlist [12]gl.Vector /* Find the vertices where the surface intersects the cube */ if edgeTable[cubeIndex]&1 != 0 { vertlist[0] = vertexInterp(iso, grid.p[0], grid.p[1], grid.val[0], grid.val[1]) } if edgeTable[cubeIndex]&2 != 0 { vertlist[1] = vertexInterp(iso, grid.p[1], grid.p[2], grid.val[1], grid.val[2]) } if edgeTable[cubeIndex]&4 != 0 { vertlist[2] = vertexInterp(iso, grid.p[2], grid.p[3], grid.val[2], grid.val[3]) } if edgeTable[cubeIndex]&8 != 0 { vertlist[3] = vertexInterp(iso, grid.p[3], grid.p[0], grid.val[3], grid.val[0]) } if edgeTable[cubeIndex]&16 != 0 { vertlist[4] = vertexInterp(iso, grid.p[4], grid.p[5], grid.val[4], grid.val[5]) } if edgeTable[cubeIndex]&32 != 0 { vertlist[5] = vertexInterp(iso, grid.p[5], grid.p[6], grid.val[5], grid.val[6]) } if edgeTable[cubeIndex]&64 != 0 { vertlist[6] = vertexInterp(iso, grid.p[6], grid.p[7], grid.val[6], grid.val[7]) } if edgeTable[cubeIndex]&128 != 0 { vertlist[7] = vertexInterp(iso, grid.p[7], grid.p[4], grid.val[7], grid.val[4]) } if edgeTable[cubeIndex]&256 != 0 { vertlist[8] = vertexInterp(iso, grid.p[0], grid.p[4], grid.val[0], grid.val[4]) } if edgeTable[cubeIndex]&512 != 0 { vertlist[9] = vertexInterp(iso, grid.p[1], grid.p[5], grid.val[1], grid.val[5]) } if edgeTable[cubeIndex]&1024 != 0 { vertlist[10] = vertexInterp(iso, grid.p[2], grid.p[6], grid.val[2], grid.val[6]) } if edgeTable[cubeIndex]&2048 != 0 { vertlist[11] = vertexInterp(iso, grid.p[3], grid.p[7], grid.val[3], grid.val[7]) } /* Create the triangle */ for i := 0; triTable[cubeIndex][i] != -1; i += 3 { t := gl.NewTriangleForPoints( vertlist[triTable[cubeIndex][i]], // vertlist[triTable[cubeIndex][i+1]], // vertlist[triTable[cubeIndex][i+2]]) vertlist[triTable[cubeIndex][i+2]], vertlist[triTable[cubeIndex][i+1]]) // log.Printf("t=(%v, %v, %v)", t.V1.Position, t.V2.Position, t.V3.Position) tris = append(tris, t) } return tris } // cell creates a gridCell for the given voxel. func cell(k Key, m voxelLookupMap, v2v func(k Key, dx, dy, dz float64) gl.Vector) *gridCell { g := &gridCell{ // Ordering according to diagram at http://paulbourke.net/geometry/polygonise/ p: [8]gl.Vector{ v2v(k, 0, 0, 0), v2v(k, 1, 0, 0), v2v(k, 1, -1, 0), v2v(k, 0, -1, 0), v2v(k, 0, 0, 1), v2v(k, 1, 0, 1), v2v(k, 1, -1, 1), v2v(k, 0, -1, 1), }, val: [8]float64{ m[k], m[Key{k.X + 1, k.Y, k.Z}], m[Key{k.X + 1, k.Y - 1, k.Z}], m[Key{k.X, k.Y - 1, k.Z}], m[Key{k.X, k.Y, k.Z + 1}], m[Key{k.X + 1, k.Y, k.Z + 1}], m[Key{k.X + 1, k.Y - 1, k.Z + 1}], m[Key{k.X, k.Y - 1, k.Z + 1}], }, } // log.Printf("cell(%v)=%v", k, g) return g } // vertexInterp linearly interpolates the position where an isosurface cuts // an edge between two vertices, each with their own scalar value. func vertexInterp(isolevel float64, p1, p2 gl.Vector, valp1, valp2 float64) gl.Vector { if math.Abs(isolevel-valp1) < epsilon { return p1 } if math.Abs(isolevel-valp2) < epsilon { return p2 } if math.Abs(valp1-valp2) < epsilon { return p1 } mu := (isolevel - valp1) / (valp2 - valp1) return gl.Vector{ X: p1.X + mu*(p2.X-p1.X), Y: p1.Y + mu*(p2.Y-p1.Y), Z: p1.Z + mu*(p2.Z-p1.Z), } } var edgeTable = [256]int{ 0x0, 0x109, 0x203, 0x30a, 0x406, 0x50f, 0x605, 0x70c, 0x80c, 0x905, 0xa0f, 0xb06, 0xc0a, 0xd03, 0xe09, 0xf00, 0x190, 0x99, 0x393, 0x29a, 0x596, 0x49f, 0x795, 0x69c, 0x99c, 0x895, 0xb9f, 0xa96, 0xd9a, 0xc93, 0xf99, 0xe90, 0x230, 0x339, 0x33, 0x13a, 0x636, 0x73f, 0x435, 0x53c, 0xa3c, 0xb35, 0x83f, 0x936, 0xe3a, 0xf33, 0xc39, 0xd30, 0x3a0, 0x2a9, 0x1a3, 0xaa, 0x7a6, 0x6af, 0x5a5, 0x4ac, 0xbac, 0xaa5, 0x9af, 0x8a6, 0xfaa, 0xea3, 0xda9, 0xca0, 0x460, 0x569, 0x663, 0x76a, 0x66, 0x16f, 0x265, 0x36c, 0xc6c, 0xd65, 0xe6f, 0xf66, 0x86a, 0x963, 0xa69, 0xb60, 0x5f0, 0x4f9, 0x7f3, 0x6fa, 0x1f6, 0xff, 0x3f5, 0x2fc, 0xdfc, 0xcf5, 0xfff, 0xef6, 0x9fa, 0x8f3, 0xbf9, 0xaf0, 0x650, 0x759, 0x453, 0x55a, 0x256, 0x35f, 0x55, 0x15c, 0xe5c, 0xf55, 0xc5f, 0xd56, 0xa5a, 0xb53, 0x859, 0x950, 0x7c0, 0x6c9, 0x5c3, 0x4ca, 0x3c6, 0x2cf, 0x1c5, 0xcc, 0xfcc, 0xec5, 0xdcf, 0xcc6, 0xbca, 0xac3, 0x9c9, 0x8c0, 0x8c0, 0x9c9, 0xac3, 0xbca, 0xcc6, 0xdcf, 0xec5, 0xfcc, 0xcc, 0x1c5, 0x2cf, 0x3c6, 0x4ca, 0x5c3, 0x6c9, 0x7c0, 0x950, 0x859, 0xb53, 0xa5a, 0xd56, 0xc5f, 0xf55, 0xe5c, 0x15c, 0x55, 0x35f, 0x256, 0x55a, 0x453, 0x759, 0x650, 0xaf0, 0xbf9, 0x8f3, 0x9fa, 0xef6, 0xfff, 0xcf5, 0xdfc, 0x2fc, 0x3f5, 0xff, 0x1f6, 0x6fa, 0x7f3, 0x4f9, 0x5f0, 0xb60, 0xa69, 0x963, 0x86a, 0xf66, 0xe6f, 0xd65, 0xc6c, 0x36c, 0x265, 0x16f, 0x66, 0x76a, 0x663, 0x569, 0x460, 0xca0, 0xda9, 0xea3, 0xfaa, 0x8a6, 0x9af, 0xaa5, 0xbac, 0x4ac, 0x5a5, 0x6af, 0x7a6, 0xaa, 0x1a3, 0x2a9, 0x3a0, 0xd30, 0xc39, 0xf33, 0xe3a, 0x936, 0x83f, 0xb35, 0xa3c, 0x53c, 0x435, 0x73f, 0x636, 0x13a, 0x33, 0x339, 0x230, 0xe90, 0xf99, 0xc93, 0xd9a, 0xa96, 0xb9f, 0x895, 0x99c, 0x69c, 0x795, 0x49f, 0x596, 0x29a, 0x393, 0x99, 0x190, 0xf00, 0xe09, 0xd03, 0xc0a, 0xb06, 0xa0f, 0x905, 0x80c, 0x70c, 0x605, 0x50f, 0x406, 0x30a, 0x203, 0x109, 0x0, } var triTable = [256][16]int{ {-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {0, 8, 3, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {0, 1, 9, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {1, 8, 3, 9, 8, 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {1, 2, 10, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {0, 8, 3, 1, 2, 10, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {9, 2, 10, 0, 2, 9, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {2, 8, 3, 2, 10, 8, 10, 9, 8, -1, -1, -1, -1, -1, -1, -1}, {3, 11, 2, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {0, 11, 2, 8, 11, 0, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {1, 9, 0, 2, 3, 11, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {1, 11, 2, 1, 9, 11, 9, 8, 11, -1, -1, -1, -1, -1, -1, -1}, {3, 10, 1, 11, 10, 3, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {0, 10, 1, 0, 8, 10, 8, 11, 10, -1, -1, -1, -1, -1, -1, -1}, {3, 9, 0, 3, 11, 9, 11, 10, 9, -1, -1, -1, -1, -1, -1, -1}, {9, 8, 10, 10, 8, 11, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {4, 7, 8, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {4, 3, 0, 7, 3, 4, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {0, 1, 9, 8, 4, 7, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {4, 1, 9, 4, 7, 1, 7, 3, 1, -1, -1, -1, -1, -1, -1, -1}, {1, 2, 10, 8, 4, 7, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {3, 4, 7, 3, 0, 4, 1, 2, 10, -1, -1, -1, -1, -1, -1, -1}, {9, 2, 10, 9, 0, 2, 8, 4, 7, -1, -1, -1, -1, -1, -1, -1}, {2, 10, 9, 2, 9, 7, 2, 7, 3, 7, 9, 4, -1, -1, -1, -1}, {8, 4, 7, 3, 11, 2, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {11, 4, 7, 11, 2, 4, 2, 0, 4, -1, -1, -1, -1, -1, -1, -1}, {9, 0, 1, 8, 4, 7, 2, 3, 11, -1, -1, -1, -1, -1, -1, -1}, {4, 7, 11, 9, 4, 11, 9, 11, 2, 9, 2, 1, -1, -1, -1, -1}, {3, 10, 1, 3, 11, 10, 7, 8, 4, -1, -1, -1, -1, -1, -1, -1}, {1, 11, 10, 1, 4, 11, 1, 0, 4, 7, 11, 4, -1, -1, -1, -1}, {4, 7, 8, 9, 0, 11, 9, 11, 10, 11, 0, 3, -1, -1, -1, -1}, {4, 7, 11, 4, 11, 9, 9, 11, 10, -1, -1, -1, -1, -1, -1, -1}, {9, 5, 4, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {9, 5, 4, 0, 8, 3, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {0, 5, 4, 1, 5, 0, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {8, 5, 4, 8, 3, 5, 3, 1, 5, -1, -1, -1, -1, -1, -1, -1}, {1, 2, 10, 9, 5, 4, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {3, 0, 8, 1, 2, 10, 4, 9, 5, -1, -1, -1, -1, -1, -1, -1}, {5, 2, 10, 5, 4, 2, 4, 0, 2, -1, -1, -1, -1, -1, -1, -1}, {2, 10, 5, 3, 2, 5, 3, 5, 4, 3, 4, 8, -1, -1, -1, -1}, {9, 5, 4, 2, 3, 11, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {0, 11, 2, 0, 8, 11, 4, 9, 5, -1, -1, -1, -1, -1, -1, -1}, {0, 5, 4, 0, 1, 5, 2, 3, 11, -1, -1, -1, -1, -1, -1, -1}, {2, 1, 5, 2, 5, 8, 2, 8, 11, 4, 8, 5, -1, -1, -1, -1}, {10, 3, 11, 10, 1, 3, 9, 5, 4, -1, -1, -1, -1, -1, -1, -1}, {4, 9, 5, 0, 8, 1, 8, 10, 1, 8, 11, 10, -1, -1, -1, -1}, {5, 4, 0, 5, 0, 11, 5, 11, 10, 11, 0, 3, -1, -1, -1, -1}, {5, 4, 8, 5, 8, 10, 10, 8, 11, -1, -1, -1, -1, -1, -1, -1}, {9, 7, 8, 5, 7, 9, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {9, 3, 0, 9, 5, 3, 5, 7, 3, -1, -1, -1, -1, -1, -1, -1}, {0, 7, 8, 0, 1, 7, 1, 5, 7, -1, -1, -1, -1, -1, -1, -1}, {1, 5, 3, 3, 5, 7, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {9, 7, 8, 9, 5, 7, 10, 1, 2, -1, -1, -1, -1, -1, -1, -1}, {10, 1, 2, 9, 5, 0, 5, 3, 0, 5, 7, 3, -1, -1, -1, -1}, {8, 0, 2, 8, 2, 5, 8, 5, 7, 10, 5, 2, -1, -1, -1, -1}, {2, 10, 5, 2, 5, 3, 3, 5, 7, -1, -1, -1, -1, -1, -1, -1}, {7, 9, 5, 7, 8, 9, 3, 11, 2, -1, -1, -1, -1, -1, -1, -1}, {9, 5, 7, 9, 7, 2, 9, 2, 0, 2, 7, 11, -1, -1, -1, -1}, {2, 3, 11, 0, 1, 8, 1, 7, 8, 1, 5, 7, -1, -1, -1, -1}, {11, 2, 1, 11, 1, 7, 7, 1, 5, -1, -1, -1, -1, -1, -1, -1}, {9, 5, 8, 8, 5, 7, 10, 1, 3, 10, 3, 11, -1, -1, -1, -1}, {5, 7, 0, 5, 0, 9, 7, 11, 0, 1, 0, 10, 11, 10, 0, -1}, {11, 10, 0, 11, 0, 3, 10, 5, 0, 8, 0, 7, 5, 7, 0, -1}, {11, 10, 5, 7, 11, 5, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {10, 6, 5, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {0, 8, 3, 5, 10, 6, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {9, 0, 1, 5, 10, 6, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {1, 8, 3, 1, 9, 8, 5, 10, 6, -1, -1, -1, -1, -1, -1, -1}, {1, 6, 5, 2, 6, 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {1, 6, 5, 1, 2, 6, 3, 0, 8, -1, -1, -1, -1, -1, -1, -1}, {9, 6, 5, 9, 0, 6, 0, 2, 6, -1, -1, -1, -1, -1, -1, -1}, {5, 9, 8, 5, 8, 2, 5, 2, 6, 3, 2, 8, -1, -1, -1, -1}, {2, 3, 11, 10, 6, 5, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {11, 0, 8, 11, 2, 0, 10, 6, 5, -1, -1, -1, -1, -1, -1, -1}, {0, 1, 9, 2, 3, 11, 5, 10, 6, -1, -1, -1, -1, -1, -1, -1}, {5, 10, 6, 1, 9, 2, 9, 11, 2, 9, 8, 11, -1, -1, -1, -1}, {6, 3, 11, 6, 5, 3, 5, 1, 3, -1, -1, -1, -1, -1, -1, -1}, {0, 8, 11, 0, 11, 5, 0, 5, 1, 5, 11, 6, -1, -1, -1, -1}, {3, 11, 6, 0, 3, 6, 0, 6, 5, 0, 5, 9, -1, -1, -1, -1}, {6, 5, 9, 6, 9, 11, 11, 9, 8, -1, -1, -1, -1, -1, -1, -1}, {5, 10, 6, 4, 7, 8, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {4, 3, 0, 4, 7, 3, 6, 5, 10, -1, -1, -1, -1, -1, -1, -1}, {1, 9, 0, 5, 10, 6, 8, 4, 7, -1, -1, -1, -1, -1, -1, -1}, {10, 6, 5, 1, 9, 7, 1, 7, 3, 7, 9, 4, -1, -1, -1, -1}, {6, 1, 2, 6, 5, 1, 4, 7, 8, -1, -1, -1, -1, -1, -1, -1}, {1, 2, 5, 5, 2, 6, 3, 0, 4, 3, 4, 7, -1, -1, -1, -1}, {8, 4, 7, 9, 0, 5, 0, 6, 5, 0, 2, 6, -1, -1, -1, -1}, {7, 3, 9, 7, 9, 4, 3, 2, 9, 5, 9, 6, 2, 6, 9, -1}, {3, 11, 2, 7, 8, 4, 10, 6, 5, -1, -1, -1, -1, -1, -1, -1}, {5, 10, 6, 4, 7, 2, 4, 2, 0, 2, 7, 11, -1, -1, -1, -1}, {0, 1, 9, 4, 7, 8, 2, 3, 11, 5, 10, 6, -1, -1, -1, -1}, {9, 2, 1, 9, 11, 2, 9, 4, 11, 7, 11, 4, 5, 10, 6, -1}, {8, 4, 7, 3, 11, 5, 3, 5, 1, 5, 11, 6, -1, -1, -1, -1}, {5, 1, 11, 5, 11, 6, 1, 0, 11, 7, 11, 4, 0, 4, 11, -1}, {0, 5, 9, 0, 6, 5, 0, 3, 6, 11, 6, 3, 8, 4, 7, -1}, {6, 5, 9, 6, 9, 11, 4, 7, 9, 7, 11, 9, -1, -1, -1, -1}, {10, 4, 9, 6, 4, 10, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {4, 10, 6, 4, 9, 10, 0, 8, 3, -1, -1, -1, -1, -1, -1, -1}, {10, 0, 1, 10, 6, 0, 6, 4, 0, -1, -1, -1, -1, -1, -1, -1}, {8, 3, 1, 8, 1, 6, 8, 6, 4, 6, 1, 10, -1, -1, -1, -1}, {1, 4, 9, 1, 2, 4, 2, 6, 4, -1, -1, -1, -1, -1, -1, -1}, {3, 0, 8, 1, 2, 9, 2, 4, 9, 2, 6, 4, -1, -1, -1, -1}, {0, 2, 4, 4, 2, 6, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {8, 3, 2, 8, 2, 4, 4, 2, 6, -1, -1, -1, -1, -1, -1, -1}, {10, 4, 9, 10, 6, 4, 11, 2, 3, -1, -1, -1, -1, -1, -1, -1}, {0, 8, 2, 2, 8, 11, 4, 9, 10, 4, 10, 6, -1, -1, -1, -1}, {3, 11, 2, 0, 1, 6, 0, 6, 4, 6, 1, 10, -1, -1, -1, -1}, {6, 4, 1, 6, 1, 10, 4, 8, 1, 2, 1, 11, 8, 11, 1, -1}, {9, 6, 4, 9, 3, 6, 9, 1, 3, 11, 6, 3, -1, -1, -1, -1}, {8, 11, 1, 8, 1, 0, 11, 6, 1, 9, 1, 4, 6, 4, 1, -1}, {3, 11, 6, 3, 6, 0, 0, 6, 4, -1, -1, -1, -1, -1, -1, -1}, {6, 4, 8, 11, 6, 8, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {7, 10, 6, 7, 8, 10, 8, 9, 10, -1, -1, -1, -1, -1, -1, -1}, {0, 7, 3, 0, 10, 7, 0, 9, 10, 6, 7, 10, -1, -1, -1, -1}, {10, 6, 7, 1, 10, 7, 1, 7, 8, 1, 8, 0, -1, -1, -1, -1}, {10, 6, 7, 10, 7, 1, 1, 7, 3, -1, -1, -1, -1, -1, -1, -1}, {1, 2, 6, 1, 6, 8, 1, 8, 9, 8, 6, 7, -1, -1, -1, -1}, {2, 6, 9, 2, 9, 1, 6, 7, 9, 0, 9, 3, 7, 3, 9, -1}, {7, 8, 0, 7, 0, 6, 6, 0, 2, -1, -1, -1, -1, -1, -1, -1}, {7, 3, 2, 6, 7, 2, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {2, 3, 11, 10, 6, 8, 10, 8, 9, 8, 6, 7, -1, -1, -1, -1}, {2, 0, 7, 2, 7, 11, 0, 9, 7, 6, 7, 10, 9, 10, 7, -1}, {1, 8, 0, 1, 7, 8, 1, 10, 7, 6, 7, 10, 2, 3, 11, -1}, {11, 2, 1, 11, 1, 7, 10, 6, 1, 6, 7, 1, -1, -1, -1, -1}, {8, 9, 6, 8, 6, 7, 9, 1, 6, 11, 6, 3, 1, 3, 6, -1}, {0, 9, 1, 11, 6, 7, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {7, 8, 0, 7, 0, 6, 3, 11, 0, 11, 6, 0, -1, -1, -1, -1}, {7, 11, 6, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {7, 6, 11, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {3, 0, 8, 11, 7, 6, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {0, 1, 9, 11, 7, 6, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {8, 1, 9, 8, 3, 1, 11, 7, 6, -1, -1, -1, -1, -1, -1, -1}, {10, 1, 2, 6, 11, 7, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {1, 2, 10, 3, 0, 8, 6, 11, 7, -1, -1, -1, -1, -1, -1, -1}, {2, 9, 0, 2, 10, 9, 6, 11, 7, -1, -1, -1, -1, -1, -1, -1}, {6, 11, 7, 2, 10, 3, 10, 8, 3, 10, 9, 8, -1, -1, -1, -1}, {7, 2, 3, 6, 2, 7, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {7, 0, 8, 7, 6, 0, 6, 2, 0, -1, -1, -1, -1, -1, -1, -1}, {2, 7, 6, 2, 3, 7, 0, 1, 9, -1, -1, -1, -1, -1, -1, -1}, {1, 6, 2, 1, 8, 6, 1, 9, 8, 8, 7, 6, -1, -1, -1, -1}, {10, 7, 6, 10, 1, 7, 1, 3, 7, -1, -1, -1, -1, -1, -1, -1}, {10, 7, 6, 1, 7, 10, 1, 8, 7, 1, 0, 8, -1, -1, -1, -1}, {0, 3, 7, 0, 7, 10, 0, 10, 9, 6, 10, 7, -1, -1, -1, -1}, {7, 6, 10, 7, 10, 8, 8, 10, 9, -1, -1, -1, -1, -1, -1, -1}, {6, 8, 4, 11, 8, 6, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {3, 6, 11, 3, 0, 6, 0, 4, 6, -1, -1, -1, -1, -1, -1, -1}, {8, 6, 11, 8, 4, 6, 9, 0, 1, -1, -1, -1, -1, -1, -1, -1}, {9, 4, 6, 9, 6, 3, 9, 3, 1, 11, 3, 6, -1, -1, -1, -1}, {6, 8, 4, 6, 11, 8, 2, 10, 1, -1, -1, -1, -1, -1, -1, -1}, {1, 2, 10, 3, 0, 11, 0, 6, 11, 0, 4, 6, -1, -1, -1, -1}, {4, 11, 8, 4, 6, 11, 0, 2, 9, 2, 10, 9, -1, -1, -1, -1}, {10, 9, 3, 10, 3, 2, 9, 4, 3, 11, 3, 6, 4, 6, 3, -1}, {8, 2, 3, 8, 4, 2, 4, 6, 2, -1, -1, -1, -1, -1, -1, -1}, {0, 4, 2, 4, 6, 2, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {1, 9, 0, 2, 3, 4, 2, 4, 6, 4, 3, 8, -1, -1, -1, -1}, {1, 9, 4, 1, 4, 2, 2, 4, 6, -1, -1, -1, -1, -1, -1, -1}, {8, 1, 3, 8, 6, 1, 8, 4, 6, 6, 10, 1, -1, -1, -1, -1}, {10, 1, 0, 10, 0, 6, 6, 0, 4, -1, -1, -1, -1, -1, -1, -1}, {4, 6, 3, 4, 3, 8, 6, 10, 3, 0, 3, 9, 10, 9, 3, -1}, {10, 9, 4, 6, 10, 4, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {4, 9, 5, 7, 6, 11, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {0, 8, 3, 4, 9, 5, 11, 7, 6, -1, -1, -1, -1, -1, -1, -1}, {5, 0, 1, 5, 4, 0, 7, 6, 11, -1, -1, -1, -1, -1, -1, -1}, {11, 7, 6, 8, 3, 4, 3, 5, 4, 3, 1, 5, -1, -1, -1, -1}, {9, 5, 4, 10, 1, 2, 7, 6, 11, -1, -1, -1, -1, -1, -1, -1}, {6, 11, 7, 1, 2, 10, 0, 8, 3, 4, 9, 5, -1, -1, -1, -1}, {7, 6, 11, 5, 4, 10, 4, 2, 10, 4, 0, 2, -1, -1, -1, -1}, {3, 4, 8, 3, 5, 4, 3, 2, 5, 10, 5, 2, 11, 7, 6, -1}, {7, 2, 3, 7, 6, 2, 5, 4, 9, -1, -1, -1, -1, -1, -1, -1}, {9, 5, 4, 0, 8, 6, 0, 6, 2, 6, 8, 7, -1, -1, -1, -1}, {3, 6, 2, 3, 7, 6, 1, 5, 0, 5, 4, 0, -1, -1, -1, -1}, {6, 2, 8, 6, 8, 7, 2, 1, 8, 4, 8, 5, 1, 5, 8, -1}, {9, 5, 4, 10, 1, 6, 1, 7, 6, 1, 3, 7, -1, -1, -1, -1}, {1, 6, 10, 1, 7, 6, 1, 0, 7, 8, 7, 0, 9, 5, 4, -1}, {4, 0, 10, 4, 10, 5, 0, 3, 10, 6, 10, 7, 3, 7, 10, -1}, {7, 6, 10, 7, 10, 8, 5, 4, 10, 4, 8, 10, -1, -1, -1, -1}, {6, 9, 5, 6, 11, 9, 11, 8, 9, -1, -1, -1, -1, -1, -1, -1}, {3, 6, 11, 0, 6, 3, 0, 5, 6, 0, 9, 5, -1, -1, -1, -1}, {0, 11, 8, 0, 5, 11, 0, 1, 5, 5, 6, 11, -1, -1, -1, -1}, {6, 11, 3, 6, 3, 5, 5, 3, 1, -1, -1, -1, -1, -1, -1, -1}, {1, 2, 10, 9, 5, 11, 9, 11, 8, 11, 5, 6, -1, -1, -1, -1}, {0, 11, 3, 0, 6, 11, 0, 9, 6, 5, 6, 9, 1, 2, 10, -1}, {11, 8, 5, 11, 5, 6, 8, 0, 5, 10, 5, 2, 0, 2, 5, -1}, {6, 11, 3, 6, 3, 5, 2, 10, 3, 10, 5, 3, -1, -1, -1, -1}, {5, 8, 9, 5, 2, 8, 5, 6, 2, 3, 8, 2, -1, -1, -1, -1}, {9, 5, 6, 9, 6, 0, 0, 6, 2, -1, -1, -1, -1, -1, -1, -1}, {1, 5, 8, 1, 8, 0, 5, 6, 8, 3, 8, 2, 6, 2, 8, -1}, {1, 5, 6, 2, 1, 6, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {1, 3, 6, 1, 6, 10, 3, 8, 6, 5, 6, 9, 8, 9, 6, -1}, {10, 1, 0, 10, 0, 6, 9, 5, 0, 5, 6, 0, -1, -1, -1, -1}, {0, 3, 8, 5, 6, 10, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {10, 5, 6, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {11, 5, 10, 7, 5, 11, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {11, 5, 10, 11, 7, 5, 8, 3, 0, -1, -1, -1, -1, -1, -1, -1}, {5, 11, 7, 5, 10, 11, 1, 9, 0, -1, -1, -1, -1, -1, -1, -1}, {10, 7, 5, 10, 11, 7, 9, 8, 1, 8, 3, 1, -1, -1, -1, -1}, {11, 1, 2, 11, 7, 1, 7, 5, 1, -1, -1, -1, -1, -1, -1, -1}, {0, 8, 3, 1, 2, 7, 1, 7, 5, 7, 2, 11, -1, -1, -1, -1}, {9, 7, 5, 9, 2, 7, 9, 0, 2, 2, 11, 7, -1, -1, -1, -1}, {7, 5, 2, 7, 2, 11, 5, 9, 2, 3, 2, 8, 9, 8, 2, -1}, {2, 5, 10, 2, 3, 5, 3, 7, 5, -1, -1, -1, -1, -1, -1, -1}, {8, 2, 0, 8, 5, 2, 8, 7, 5, 10, 2, 5, -1, -1, -1, -1}, {9, 0, 1, 5, 10, 3, 5, 3, 7, 3, 10, 2, -1, -1, -1, -1}, {9, 8, 2, 9, 2, 1, 8, 7, 2, 10, 2, 5, 7, 5, 2, -1}, {1, 3, 5, 3, 7, 5, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {0, 8, 7, 0, 7, 1, 1, 7, 5, -1, -1, -1, -1, -1, -1, -1}, {9, 0, 3, 9, 3, 5, 5, 3, 7, -1, -1, -1, -1, -1, -1, -1}, {9, 8, 7, 5, 9, 7, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {5, 8, 4, 5, 10, 8, 10, 11, 8, -1, -1, -1, -1, -1, -1, -1}, {5, 0, 4, 5, 11, 0, 5, 10, 11, 11, 3, 0, -1, -1, -1, -1}, {0, 1, 9, 8, 4, 10, 8, 10, 11, 10, 4, 5, -1, -1, -1, -1}, {10, 11, 4, 10, 4, 5, 11, 3, 4, 9, 4, 1, 3, 1, 4, -1}, {2, 5, 1, 2, 8, 5, 2, 11, 8, 4, 5, 8, -1, -1, -1, -1}, {0, 4, 11, 0, 11, 3, 4, 5, 11, 2, 11, 1, 5, 1, 11, -1}, {0, 2, 5, 0, 5, 9, 2, 11, 5, 4, 5, 8, 11, 8, 5, -1}, {9, 4, 5, 2, 11, 3, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {2, 5, 10, 3, 5, 2, 3, 4, 5, 3, 8, 4, -1, -1, -1, -1}, {5, 10, 2, 5, 2, 4, 4, 2, 0, -1, -1, -1, -1, -1, -1, -1}, {3, 10, 2, 3, 5, 10, 3, 8, 5, 4, 5, 8, 0, 1, 9, -1}, {5, 10, 2, 5, 2, 4, 1, 9, 2, 9, 4, 2, -1, -1, -1, -1}, {8, 4, 5, 8, 5, 3, 3, 5, 1, -1, -1, -1, -1, -1, -1, -1}, {0, 4, 5, 1, 0, 5, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {8, 4, 5, 8, 5, 3, 9, 0, 5, 0, 3, 5, -1, -1, -1, -1}, {9, 4, 5, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {4, 11, 7, 4, 9, 11, 9, 10, 11, -1, -1, -1, -1, -1, -1, -1}, {0, 8, 3, 4, 9, 7, 9, 11, 7, 9, 10, 11, -1, -1, -1, -1}, {1, 10, 11, 1, 11, 4, 1, 4, 0, 7, 4, 11, -1, -1, -1, -1}, {3, 1, 4, 3, 4, 8, 1, 10, 4, 7, 4, 11, 10, 11, 4, -1}, {4, 11, 7, 9, 11, 4, 9, 2, 11, 9, 1, 2, -1, -1, -1, -1}, {9, 7, 4, 9, 11, 7, 9, 1, 11, 2, 11, 1, 0, 8, 3, -1}, {11, 7, 4, 11, 4, 2, 2, 4, 0, -1, -1, -1, -1, -1, -1, -1}, {11, 7, 4, 11, 4, 2, 8, 3, 4, 3, 2, 4, -1, -1, -1, -1}, {2, 9, 10, 2, 7, 9, 2, 3, 7, 7, 4, 9, -1, -1, -1, -1}, {9, 10, 7, 9, 7, 4, 10, 2, 7, 8, 7, 0, 2, 0, 7, -1}, {3, 7, 10, 3, 10, 2, 7, 4, 10, 1, 10, 0, 4, 0, 10, -1}, {1, 10, 2, 8, 7, 4, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {4, 9, 1, 4, 1, 7, 7, 1, 3, -1, -1, -1, -1, -1, -1, -1}, {4, 9, 1, 4, 1, 7, 0, 8, 1, 8, 7, 1, -1, -1, -1, -1}, {4, 0, 3, 7, 4, 3, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {4, 8, 7, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {9, 10, 8, 10, 11, 8, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {3, 0, 9, 3, 9, 11, 11, 9, 10, -1, -1, -1, -1, -1, -1, -1}, {0, 1, 10, 0, 10, 8, 8, 10, 11, -1, -1, -1, -1, -1, -1, -1}, {3, 1, 10, 11, 3, 10, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {1, 2, 11, 1, 11, 9, 9, 11, 8, -1, -1, -1, -1, -1, -1, -1}, {3, 0, 9, 3, 9, 11, 1, 2, 9, 2, 11, 9, -1, -1, -1, -1}, {0, 2, 11, 8, 0, 11, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {3, 2, 11, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {2, 3, 8, 2, 8, 10, 10, 8, 9, -1, -1, -1, -1, -1, -1, -1}, {9, 10, 2, 0, 9, 2, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {2, 3, 8, 2, 8, 10, 0, 1, 8, 1, 10, 8, -1, -1, -1, -1}, {1, 10, 2, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {1, 3, 8, 9, 1, 8, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {0, 9, 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {0, 3, 8, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, {-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, }
binvox/marching-cubes.go
0.635449
0.550668
marching-cubes.go
starcoder
package lcs import ( "context" "reflect" ) // Lcs is the interface to calculate the LCS of two arrays. type Lcs interface { // Values calculates the LCS value of the two arrays. Values() (values []interface{}) // ValueContext is a context aware version of Values() ValuesContext(ctx context.Context) (values []interface{}, err error) // IndexPairs calculates paris of indices which have the same value in LCS. IndexPairs() (pairs []IndexPair) // IndexPairsContext is a context aware version of IndexPairs() IndexPairsContext(ctx context.Context) (pairs []IndexPair, err error) // Length calculates the length of the LCS. Length() (length int) // LengthContext is a context aware version of Length() LengthContext(ctx context.Context) (length int, err error) // Left returns one of the two arrays to be compared. Left() (leftValues []interface{}) // Right returns the other of the two arrays to be compared. Right() (righttValues []interface{}) } // IndexPair represents an pair of indeices in the Left and Right arrays found in the LCS value. type IndexPair struct { Left int Right int } type lcs struct { left []interface{} right []interface{} /* for caching */ table [][]int indexPairs []IndexPair values []interface{} } // New creates a new LCS calculator from two arrays. func New(left, right []interface{}) Lcs { return &lcs{ left: left, right: right, table: nil, indexPairs: nil, values: nil, } } // Table implements Lcs.Table() func (lcs *lcs) Table() (table [][]int) { table, _ = lcs.TableContext(context.Background()) return table } // Table implements Lcs.TableContext() func (lcs *lcs) TableContext(ctx context.Context) (table [][]int, err error) { if lcs.table != nil { return lcs.table, nil } sizeX := len(lcs.left) + 1 sizeY := len(lcs.right) + 1 table = make([][]int, sizeX) for x := 0; x < sizeX; x++ { table[x] = make([]int, sizeY) } for y := 1; y < sizeY; y++ { select { // check in each y to save some time case <-ctx.Done(): return nil, ctx.Err() default: // nop } for x := 1; x < sizeX; x++ { increment := 0 if reflect.DeepEqual(lcs.left[x-1], lcs.right[y-1]) { increment = 1 } table[x][y] = max(table[x-1][y-1]+increment, table[x-1][y], table[x][y-1]) } } lcs.table = table return table, nil } // Table implements Lcs.Length() func (lcs *lcs) Length() (length int) { length, _ = lcs.LengthContext(context.Background()) return length } // Table implements Lcs.LengthContext() func (lcs *lcs) LengthContext(ctx context.Context) (length int, err error) { table, err := lcs.TableContext(ctx) if err != nil { return 0, err } return table[len(lcs.left)][len(lcs.right)], nil } // Table implements Lcs.IndexPairs() func (lcs *lcs) IndexPairs() (pairs []IndexPair) { pairs, _ = lcs.IndexPairsContext(context.Background()) return pairs } // Table implements Lcs.IndexPairsContext() func (lcs *lcs) IndexPairsContext(ctx context.Context) (pairs []IndexPair, err error) { if lcs.indexPairs != nil { return lcs.indexPairs, nil } table, err := lcs.TableContext(ctx) if err != nil { return nil, err } pairs = make([]IndexPair, table[len(table)-1][len(table[0])-1]) for x, y := len(lcs.left), len(lcs.right); x > 0 && y > 0; { if reflect.DeepEqual(lcs.left[x-1], lcs.right[y-1]) { pairs[table[x][y]-1] = IndexPair{Left: x - 1, Right: y - 1} x-- y-- } else { if table[x-1][y] >= table[x][y-1] { x-- } else { y-- } } } lcs.indexPairs = pairs return pairs, nil } // Table implements Lcs.Values() func (lcs *lcs) Values() (values []interface{}) { values, _ = lcs.ValuesContext(context.Background()) return values } // Table implements Lcs.ValuesContext() func (lcs *lcs) ValuesContext(ctx context.Context) (values []interface{}, err error) { if lcs.values != nil { return lcs.values, nil } pairs, err := lcs.IndexPairsContext(ctx) if err != nil { return nil, err } values = make([]interface{}, len(pairs)) for i, pair := range pairs { values[i] = lcs.left[pair.Left] } lcs.values = values return values, nil } // Table implements Lcs.Left() func (lcs *lcs) Left() (leftValues []interface{}) { leftValues = lcs.left return } // Table implements Lcs.Right() func (lcs *lcs) Right() (rightValues []interface{}) { rightValues = lcs.right return } func max(first int, rest ...int) (max int) { max = first for _, value := range rest { if value > max { max = value } } return }
vendor/github.com/yudai/golcs/golcs.go
0.707101
0.401277
golcs.go
starcoder
package legacydata import ( "strconv" "time" "github.com/vectordotdev/go-datemath" ) type DataTimeRange struct { From string To string Now time.Time } func NewDataTimeRange(from, to string) DataTimeRange { return DataTimeRange{ From: from, To: to, Now: time.Now(), } } func (tr DataTimeRange) GetFromAsMsEpoch() int64 { return tr.MustGetFrom().UnixNano() / int64(time.Millisecond) } func (tr DataTimeRange) GetFromAsSecondsEpoch() int64 { return tr.GetFromAsMsEpoch() / 1000 } func (tr DataTimeRange) GetFromAsTimeUTC() time.Time { return tr.MustGetFrom().UTC() } func (tr DataTimeRange) GetToAsMsEpoch() int64 { return tr.MustGetTo().UnixNano() / int64(time.Millisecond) } func (tr DataTimeRange) GetToAsSecondsEpoch() int64 { return tr.GetToAsMsEpoch() / 1000 } func (tr DataTimeRange) GetToAsTimeUTC() time.Time { return tr.MustGetTo().UTC() } func (tr DataTimeRange) MustGetFrom() time.Time { res, err := tr.ParseFrom() if err != nil { return time.Unix(0, 0) } return res } func (tr DataTimeRange) MustGetTo() time.Time { res, err := tr.ParseTo() if err != nil { return time.Unix(0, 0) } return res } func (tr DataTimeRange) ParseFrom(options ...TimeRangeOption) (time.Time, error) { options = append(options, WithNow(tr.Now)) pt := newParsableTime(tr.From, options...) return pt.Parse() } func (tr DataTimeRange) ParseTo(options ...TimeRangeOption) (time.Time, error) { options = append(options, WithRoundUp(), WithNow(tr.Now)) pt := newParsableTime(tr.To, options...) return pt.Parse() } func WithWeekstart(weekday time.Weekday) TimeRangeOption { return func(timeRange parsableTime) parsableTime { timeRange.weekstart = &weekday return timeRange } } func WithLocation(loc *time.Location) TimeRangeOption { return func(timeRange parsableTime) parsableTime { timeRange.location = loc return timeRange } } func WithFiscalStartMonth(month time.Month) TimeRangeOption { return func(timeRange parsableTime) parsableTime { timeRange.fiscalStartMonth = &month return timeRange } } func WithNow(t time.Time) TimeRangeOption { return func(timeRange parsableTime) parsableTime { timeRange.now = t return timeRange } } func WithRoundUp() TimeRangeOption { return func(timeRange parsableTime) parsableTime { timeRange.roundUp = true return timeRange } } type parsableTime struct { time string now time.Time location *time.Location weekstart *time.Weekday fiscalStartMonth *time.Month roundUp bool } type TimeRangeOption func(timeRange parsableTime) parsableTime func newParsableTime(t string, options ...TimeRangeOption) parsableTime { p := parsableTime{ time: t, now: time.Now(), } for _, opt := range options { p = opt(p) } return p } func (t parsableTime) Parse() (time.Time, error) { // Milliseconds since Unix epoch. if val, err := strconv.ParseInt(t.time, 10, 64); err == nil { return time.UnixMilli(val), nil } // Duration relative to current time. if diff, err := time.ParseDuration("-" + t.time); err == nil { return t.now.Add(diff), nil } // Advanced time string, mimics the frontend's datemath library. return datemath.ParseAndEvaluate(t.time, t.datemathOptions()...) } func (t parsableTime) datemathOptions() []func(*datemath.Options) { options := []func(*datemath.Options){ datemath.WithNow(t.now), datemath.WithRoundUp(t.roundUp), } if t.location != nil { options = append(options, datemath.WithLocation(t.location)) } if t.weekstart != nil { options = append(options, datemath.WithStartOfWeek(*t.weekstart)) } if t.fiscalStartMonth != nil { loc := time.UTC if t.location != nil { loc = t.location } options = append(options, datemath.WithStartOfFiscalYear( // Year doesn't matter, and Grafana only supports setting the // month that the fiscal year starts in. time.Date(0, *t.fiscalStartMonth, 1, 0, 0, 0, 0, loc), )) } return options }
pkg/tsdb/legacydata/time_range.go
0.704668
0.596756
time_range.go
starcoder
package msgraph // RatingUnitedStatesTelevisionType undocumented type RatingUnitedStatesTelevisionType int const ( // RatingUnitedStatesTelevisionTypeVAllAllowed undocumented RatingUnitedStatesTelevisionTypeVAllAllowed RatingUnitedStatesTelevisionType = 0 // RatingUnitedStatesTelevisionTypeVAllBlocked undocumented RatingUnitedStatesTelevisionTypeVAllBlocked RatingUnitedStatesTelevisionType = 1 // RatingUnitedStatesTelevisionTypeVChildrenAll undocumented RatingUnitedStatesTelevisionTypeVChildrenAll RatingUnitedStatesTelevisionType = 2 // RatingUnitedStatesTelevisionTypeVChildrenAbove7 undocumented RatingUnitedStatesTelevisionTypeVChildrenAbove7 RatingUnitedStatesTelevisionType = 3 // RatingUnitedStatesTelevisionTypeVGeneral undocumented RatingUnitedStatesTelevisionTypeVGeneral RatingUnitedStatesTelevisionType = 4 // RatingUnitedStatesTelevisionTypeVParentalGuidance undocumented RatingUnitedStatesTelevisionTypeVParentalGuidance RatingUnitedStatesTelevisionType = 5 // RatingUnitedStatesTelevisionTypeVChildrenAbove14 undocumented RatingUnitedStatesTelevisionTypeVChildrenAbove14 RatingUnitedStatesTelevisionType = 6 // RatingUnitedStatesTelevisionTypeVAdults undocumented RatingUnitedStatesTelevisionTypeVAdults RatingUnitedStatesTelevisionType = 7 ) // RatingUnitedStatesTelevisionTypePAllAllowed returns a pointer to RatingUnitedStatesTelevisionTypeVAllAllowed func RatingUnitedStatesTelevisionTypePAllAllowed() *RatingUnitedStatesTelevisionType { v := RatingUnitedStatesTelevisionTypeVAllAllowed return &v } // RatingUnitedStatesTelevisionTypePAllBlocked returns a pointer to RatingUnitedStatesTelevisionTypeVAllBlocked func RatingUnitedStatesTelevisionTypePAllBlocked() *RatingUnitedStatesTelevisionType { v := RatingUnitedStatesTelevisionTypeVAllBlocked return &v } // RatingUnitedStatesTelevisionTypePChildrenAll returns a pointer to RatingUnitedStatesTelevisionTypeVChildrenAll func RatingUnitedStatesTelevisionTypePChildrenAll() *RatingUnitedStatesTelevisionType { v := RatingUnitedStatesTelevisionTypeVChildrenAll return &v } // RatingUnitedStatesTelevisionTypePChildrenAbove7 returns a pointer to RatingUnitedStatesTelevisionTypeVChildrenAbove7 func RatingUnitedStatesTelevisionTypePChildrenAbove7() *RatingUnitedStatesTelevisionType { v := RatingUnitedStatesTelevisionTypeVChildrenAbove7 return &v } // RatingUnitedStatesTelevisionTypePGeneral returns a pointer to RatingUnitedStatesTelevisionTypeVGeneral func RatingUnitedStatesTelevisionTypePGeneral() *RatingUnitedStatesTelevisionType { v := RatingUnitedStatesTelevisionTypeVGeneral return &v } // RatingUnitedStatesTelevisionTypePParentalGuidance returns a pointer to RatingUnitedStatesTelevisionTypeVParentalGuidance func RatingUnitedStatesTelevisionTypePParentalGuidance() *RatingUnitedStatesTelevisionType { v := RatingUnitedStatesTelevisionTypeVParentalGuidance return &v } // RatingUnitedStatesTelevisionTypePChildrenAbove14 returns a pointer to RatingUnitedStatesTelevisionTypeVChildrenAbove14 func RatingUnitedStatesTelevisionTypePChildrenAbove14() *RatingUnitedStatesTelevisionType { v := RatingUnitedStatesTelevisionTypeVChildrenAbove14 return &v } // RatingUnitedStatesTelevisionTypePAdults returns a pointer to RatingUnitedStatesTelevisionTypeVAdults func RatingUnitedStatesTelevisionTypePAdults() *RatingUnitedStatesTelevisionType { v := RatingUnitedStatesTelevisionTypeVAdults return &v }
v1.0/RatingUnitedStatesTelevisionTypeEnum.go
0.590071
0.505737
RatingUnitedStatesTelevisionTypeEnum.go
starcoder
package binary_pack import ( "strings" "strconv" "errors" "encoding/binary" "bytes" "fmt" ) type BinaryPack struct {} // Return a byte slice containing the values of msg slice packed according to the given format. // The items of msg slice must match the values required by the format exactly. func (bp *BinaryPack) Pack(format []string, msg []interface{}) ([]byte, error) { if len(format) > len(msg) { return nil, errors.New("Format is longer than values to pack") } res := []byte{} for i, f := range format { switch f { case "?": casted_value, ok := msg[i].(bool) if !ok { return nil, errors.New("Type of passed value doesn't match to expected '" + f + "' (bool)") } res = append(res, boolToBytes(casted_value)...) case "h", "H": casted_value, ok := msg[i].(int) if !ok { return nil, errors.New("Type of passed value doesn't match to expected '" + f + "' (int, 2 bytes)") } res = append(res, intToBytes(casted_value, 2)...) case "i", "I", "l", "L": casted_value, ok := msg[i].(int) if !ok { return nil, errors.New("Type of passed value doesn't match to expected '" + f + "' (int, 4 bytes)") } res = append(res, intToBytes(casted_value, 4)...) case "q", "Q": casted_value, ok := msg[i].(int) if !ok { return nil, errors.New("Type of passed value doesn't match to expected '" + f + "' (int, 8 bytes)") } res = append(res, intToBytes(casted_value, 8)...) case "f": casted_value, ok := msg[i].(float32) if !ok { return nil, errors.New("Type of passed value doesn't match to expected '" + f + "' (float32)") } res = append(res, float32ToBytes(casted_value, 4)...) case "d": casted_value, ok := msg[i].(float64) if !ok { return nil, errors.New("Type of passed value doesn't match to expected '" + f + "' (float64)") } res = append(res, float64ToBytes(casted_value, 8)...) default: if strings.Contains(f, "s") { casted_value, ok := msg[i].(string) if !ok { return nil, errors.New("Type of passed value doesn't match to expected '" + f + "' (string)") } n, _ := strconv.Atoi(strings.TrimRight(f, "s")) res = append(res, []byte(fmt.Sprintf("%s%s", casted_value, strings.Repeat("\x00", n - len(casted_value))))...) } else { return nil, errors.New("Unexpected format token: '" + f + "'") } } } return res, nil } // Unpack the byte slice (presumably packed by Pack(format, msg)) according to the given format. // The result is a []interface{} slice even if it contains exactly one item. // The byte slice must contain not less the amount of data required by the format // (len(msg) must more or equal CalcSize(format)). func (bp *BinaryPack) UnPack(format []string, msg []byte) ([]interface{}, error) { expected_size, err := bp.CalcSize(format) if err != nil { return nil, err } if expected_size > len(msg) { return nil, errors.New("Expected size is bigger than actual size of message") } res := []interface{}{} for _, f := range format { switch f { case "?": res = append(res, bytesToBool(msg[:1])) msg = msg[1:] case "h", "H": res = append(res, bytesToInt(msg[:2])) msg = msg[2:] case "i", "I", "l", "L": res = append(res, bytesToInt(msg[:4])) msg = msg[4:] case "q", "Q": res = append(res, bytesToInt(msg[:8])) msg = msg[8:] case "f": res = append(res, bytesToFloat32(msg[:4])) msg = msg[4:] case "d": res = append(res, bytesToFloat64(msg[:8])) msg = msg[8:] default: if strings.Contains(f, "s") { n, _ := strconv.Atoi(strings.TrimRight(f, "s")) res = append(res, string(msg[:n])) msg = msg[n:] } else { return nil, errors.New("Unexpected format token: '" + f + "'") } } } return res, nil } // Return the size of the struct (and hence of the byte slice) corresponding to the given format. func (bp *BinaryPack) CalcSize(format []string) (int, error) { var size int for _, f := range format { switch f { case "?": size = size + 1 case "h", "H": size = size + 2 case "i", "I", "l", "L", "f": size = size + 4 case "q", "Q", "d": size = size + 8 default: if strings.Contains(f, "s") { n, _ := strconv.Atoi(strings.TrimRight(f, "s")) size = size + n } else { return 0, errors.New("Unexpected format token: '" + f + "'") } } } return size, nil } func boolToBytes(x bool) []byte { if x { return intToBytes(1, 1) } return intToBytes(0, 1) } func bytesToBool(b []byte) bool { return bytesToInt(b) > 0 } func intToBytes(n int, size int) []byte { buf := bytes.NewBuffer([]byte{}) binary.Write(buf, binary.LittleEndian, int64(n)) return buf.Bytes()[0:size] } func bytesToInt(b []byte) int { buf := bytes.NewBuffer(b) switch len(b) { case 1: var x int8 binary.Read(buf, binary.LittleEndian, &x) return int(x) case 2: var x int16 binary.Read(buf, binary.LittleEndian, &x) return int(x) case 4: var x int32 binary.Read(buf, binary.LittleEndian, &x) return int(x) default: var x int64 binary.Read(buf, binary.LittleEndian, &x) return int(x) } } func float32ToBytes(n float32, size int) []byte { buf := bytes.NewBuffer([]byte{}) binary.Write(buf, binary.LittleEndian, n) return buf.Bytes()[0:size] } func bytesToFloat32(b []byte) float32 { var x float32 buf := bytes.NewBuffer(b) binary.Read(buf, binary.LittleEndian, &x) return x } func float64ToBytes(n float64, size int) []byte { buf := bytes.NewBuffer([]byte{}) binary.Write(buf, binary.LittleEndian, n) return buf.Bytes()[0:size] } func bytesToFloat64(b []byte) float64 { var x float64 buf := bytes.NewBuffer(b) binary.Read(buf, binary.LittleEndian, &x) return x }
binary-pack/binary_pack.go
0.749637
0.416678
binary_pack.go
starcoder
package graph import ( i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55 "github.com/microsoft/kiota/abstractions/go/serialization" ) // ColumnDefinition type ColumnDefinition struct { Entity // This column stores boolean values. boolean *BooleanColumn; // This column's data is calculated based on other columns. calculated *CalculatedColumn; // This column stores data from a list of choices. choice *ChoiceColumn; // For site columns, the name of the group this column belongs to. Helps organize related columns. columnGroup *string; // This column stores content approval status. contentApprovalStatus *ContentApprovalStatusColumn; // This column stores currency values. currency *CurrencyColumn; // This column stores DateTime values. dateTime *DateTimeColumn; // The default value for this column. defaultValue *DefaultColumnValue; // The user-facing description of the column. description *string; // The user-facing name of the column. displayName *string; // If true, no two list items may have the same value for this column. enforceUniqueValues *bool; // This column stores a geolocation. geolocation *GeolocationColumn; // Specifies whether the column is displayed in the user interface. hidden *bool; // This column stores hyperlink or picture values. hyperlinkOrPicture *HyperlinkOrPictureColumn; // Specifies whether the column values can be used for sorting and searching. indexed *bool; // Indicates whether this column can be deleted. isDeletable *bool; // Indicates whether values in the column can be reordered. Read-only. isReorderable *bool; // Specifies whether the column can be changed. isSealed *bool; // This column's data is looked up from another source in the site. lookup *LookupColumn; // The API-facing name of the column as it appears in the [fields][] on a [listItem][]. For the user-facing name, see displayName. name *string; // This column stores number values. number *NumberColumn; // This column stores Person or Group values. personOrGroup *PersonOrGroupColumn; // If 'true', changes to this column will be propagated to lists that implement the column. propagateChanges *bool; // Specifies whether the column values can be modified. readOnly *bool; // Specifies whether the column value isn't optional. required *bool; // The source column for the content type column. sourceColumn *ColumnDefinition; // This column stores taxonomy terms. term *TermColumn; // This column stores text values. text *TextColumn; // This column stores thumbnail values. thumbnail *ThumbnailColumn; // For site columns, the type of column. Read-only. type_escaped *ColumnTypes; // This column stores validation formula and message for the column. validation *ColumnValidation; } // NewColumnDefinition instantiates a new columnDefinition and sets the default values. func NewColumnDefinition()(*ColumnDefinition) { m := &ColumnDefinition{ Entity: *NewEntity(), } return m } // GetBoolean gets the boolean property value. This column stores boolean values. func (m *ColumnDefinition) GetBoolean()(*BooleanColumn) { if m == nil { return nil } else { return m.boolean } } // GetCalculated gets the calculated property value. This column's data is calculated based on other columns. func (m *ColumnDefinition) GetCalculated()(*CalculatedColumn) { if m == nil { return nil } else { return m.calculated } } // GetChoice gets the choice property value. This column stores data from a list of choices. func (m *ColumnDefinition) GetChoice()(*ChoiceColumn) { if m == nil { return nil } else { return m.choice } } // GetColumnGroup gets the columnGroup property value. For site columns, the name of the group this column belongs to. Helps organize related columns. func (m *ColumnDefinition) GetColumnGroup()(*string) { if m == nil { return nil } else { return m.columnGroup } } // GetContentApprovalStatus gets the contentApprovalStatus property value. This column stores content approval status. func (m *ColumnDefinition) GetContentApprovalStatus()(*ContentApprovalStatusColumn) { if m == nil { return nil } else { return m.contentApprovalStatus } } // GetCurrency gets the currency property value. This column stores currency values. func (m *ColumnDefinition) GetCurrency()(*CurrencyColumn) { if m == nil { return nil } else { return m.currency } } // GetDateTime gets the dateTime property value. This column stores DateTime values. func (m *ColumnDefinition) GetDateTime()(*DateTimeColumn) { if m == nil { return nil } else { return m.dateTime } } // GetDefaultValue gets the defaultValue property value. The default value for this column. func (m *ColumnDefinition) GetDefaultValue()(*DefaultColumnValue) { if m == nil { return nil } else { return m.defaultValue } } // GetDescription gets the description property value. The user-facing description of the column. func (m *ColumnDefinition) GetDescription()(*string) { if m == nil { return nil } else { return m.description } } // GetDisplayName gets the displayName property value. The user-facing name of the column. func (m *ColumnDefinition) GetDisplayName()(*string) { if m == nil { return nil } else { return m.displayName } } // GetEnforceUniqueValues gets the enforceUniqueValues property value. If true, no two list items may have the same value for this column. func (m *ColumnDefinition) GetEnforceUniqueValues()(*bool) { if m == nil { return nil } else { return m.enforceUniqueValues } } // GetGeolocation gets the geolocation property value. This column stores a geolocation. func (m *ColumnDefinition) GetGeolocation()(*GeolocationColumn) { if m == nil { return nil } else { return m.geolocation } } // GetHidden gets the hidden property value. Specifies whether the column is displayed in the user interface. func (m *ColumnDefinition) GetHidden()(*bool) { if m == nil { return nil } else { return m.hidden } } // GetHyperlinkOrPicture gets the hyperlinkOrPicture property value. This column stores hyperlink or picture values. func (m *ColumnDefinition) GetHyperlinkOrPicture()(*HyperlinkOrPictureColumn) { if m == nil { return nil } else { return m.hyperlinkOrPicture } } // GetIndexed gets the indexed property value. Specifies whether the column values can be used for sorting and searching. func (m *ColumnDefinition) GetIndexed()(*bool) { if m == nil { return nil } else { return m.indexed } } // GetIsDeletable gets the isDeletable property value. Indicates whether this column can be deleted. func (m *ColumnDefinition) GetIsDeletable()(*bool) { if m == nil { return nil } else { return m.isDeletable } } // GetIsReorderable gets the isReorderable property value. Indicates whether values in the column can be reordered. Read-only. func (m *ColumnDefinition) GetIsReorderable()(*bool) { if m == nil { return nil } else { return m.isReorderable } } // GetIsSealed gets the isSealed property value. Specifies whether the column can be changed. func (m *ColumnDefinition) GetIsSealed()(*bool) { if m == nil { return nil } else { return m.isSealed } } // GetLookup gets the lookup property value. This column's data is looked up from another source in the site. func (m *ColumnDefinition) GetLookup()(*LookupColumn) { if m == nil { return nil } else { return m.lookup } } // GetName gets the name property value. The API-facing name of the column as it appears in the [fields][] on a [listItem][]. For the user-facing name, see displayName. func (m *ColumnDefinition) GetName()(*string) { if m == nil { return nil } else { return m.name } } // GetNumber gets the number property value. This column stores number values. func (m *ColumnDefinition) GetNumber()(*NumberColumn) { if m == nil { return nil } else { return m.number } } // GetPersonOrGroup gets the personOrGroup property value. This column stores Person or Group values. func (m *ColumnDefinition) GetPersonOrGroup()(*PersonOrGroupColumn) { if m == nil { return nil } else { return m.personOrGroup } } // GetPropagateChanges gets the propagateChanges property value. If 'true', changes to this column will be propagated to lists that implement the column. func (m *ColumnDefinition) GetPropagateChanges()(*bool) { if m == nil { return nil } else { return m.propagateChanges } } // GetReadOnly gets the readOnly property value. Specifies whether the column values can be modified. func (m *ColumnDefinition) GetReadOnly()(*bool) { if m == nil { return nil } else { return m.readOnly } } // GetRequired gets the required property value. Specifies whether the column value isn't optional. func (m *ColumnDefinition) GetRequired()(*bool) { if m == nil { return nil } else { return m.required } } // GetSourceColumn gets the sourceColumn property value. The source column for the content type column. func (m *ColumnDefinition) GetSourceColumn()(*ColumnDefinition) { if m == nil { return nil } else { return m.sourceColumn } } // GetTerm gets the term property value. This column stores taxonomy terms. func (m *ColumnDefinition) GetTerm()(*TermColumn) { if m == nil { return nil } else { return m.term } } // GetText gets the text property value. This column stores text values. func (m *ColumnDefinition) GetText()(*TextColumn) { if m == nil { return nil } else { return m.text } } // GetThumbnail gets the thumbnail property value. This column stores thumbnail values. func (m *ColumnDefinition) GetThumbnail()(*ThumbnailColumn) { if m == nil { return nil } else { return m.thumbnail } } // GetType gets the type property value. For site columns, the type of column. Read-only. func (m *ColumnDefinition) GetType()(*ColumnTypes) { if m == nil { return nil } else { return m.type_escaped } } // GetValidation gets the validation property value. This column stores validation formula and message for the column. func (m *ColumnDefinition) GetValidation()(*ColumnValidation) { if m == nil { return nil } else { return m.validation } } // GetFieldDeserializers the deserialization information for the current model func (m *ColumnDefinition) GetFieldDeserializers()(map[string]func(interface{}, i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode)(error)) { res := m.Entity.GetFieldDeserializers() res["boolean"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetObjectValue(func () i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable { return NewBooleanColumn() }) if err != nil { return err } if val != nil { m.SetBoolean(val.(*BooleanColumn)) } return nil } res["calculated"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetObjectValue(func () i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable { return NewCalculatedColumn() }) if err != nil { return err } if val != nil { m.SetCalculated(val.(*CalculatedColumn)) } return nil } res["choice"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetObjectValue(func () i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable { return NewChoiceColumn() }) if err != nil { return err } if val != nil { m.SetChoice(val.(*ChoiceColumn)) } return nil } res["columnGroup"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetStringValue() if err != nil { return err } if val != nil { m.SetColumnGroup(val) } return nil } res["contentApprovalStatus"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetObjectValue(func () i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable { return NewContentApprovalStatusColumn() }) if err != nil { return err } if val != nil { m.SetContentApprovalStatus(val.(*ContentApprovalStatusColumn)) } return nil } res["currency"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetObjectValue(func () i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable { return NewCurrencyColumn() }) if err != nil { return err } if val != nil { m.SetCurrency(val.(*CurrencyColumn)) } return nil } res["dateTime"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetObjectValue(func () i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable { return NewDateTimeColumn() }) if err != nil { return err } if val != nil { m.SetDateTime(val.(*DateTimeColumn)) } return nil } res["defaultValue"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetObjectValue(func () i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable { return NewDefaultColumnValue() }) if err != nil { return err } if val != nil { m.SetDefaultValue(val.(*DefaultColumnValue)) } return nil } res["description"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetStringValue() if err != nil { return err } if val != nil { m.SetDescription(val) } return nil } res["displayName"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetStringValue() if err != nil { return err } if val != nil { m.SetDisplayName(val) } return nil } res["enforceUniqueValues"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetBoolValue() if err != nil { return err } if val != nil { m.SetEnforceUniqueValues(val) } return nil } res["geolocation"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetObjectValue(func () i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable { return NewGeolocationColumn() }) if err != nil { return err } if val != nil { m.SetGeolocation(val.(*GeolocationColumn)) } return nil } res["hidden"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetBoolValue() if err != nil { return err } if val != nil { m.SetHidden(val) } return nil } res["hyperlinkOrPicture"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetObjectValue(func () i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable { return NewHyperlinkOrPictureColumn() }) if err != nil { return err } if val != nil { m.SetHyperlinkOrPicture(val.(*HyperlinkOrPictureColumn)) } return nil } res["indexed"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetBoolValue() if err != nil { return err } if val != nil { m.SetIndexed(val) } return nil } res["isDeletable"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetBoolValue() if err != nil { return err } if val != nil { m.SetIsDeletable(val) } return nil } res["isReorderable"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetBoolValue() if err != nil { return err } if val != nil { m.SetIsReorderable(val) } return nil } res["isSealed"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetBoolValue() if err != nil { return err } if val != nil { m.SetIsSealed(val) } return nil } res["lookup"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetObjectValue(func () i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable { return NewLookupColumn() }) if err != nil { return err } if val != nil { m.SetLookup(val.(*LookupColumn)) } return nil } res["name"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetStringValue() if err != nil { return err } if val != nil { m.SetName(val) } return nil } res["number"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetObjectValue(func () i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable { return NewNumberColumn() }) if err != nil { return err } if val != nil { m.SetNumber(val.(*NumberColumn)) } return nil } res["personOrGroup"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetObjectValue(func () i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable { return NewPersonOrGroupColumn() }) if err != nil { return err } if val != nil { m.SetPersonOrGroup(val.(*PersonOrGroupColumn)) } return nil } res["propagateChanges"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetBoolValue() if err != nil { return err } if val != nil { m.SetPropagateChanges(val) } return nil } res["readOnly"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetBoolValue() if err != nil { return err } if val != nil { m.SetReadOnly(val) } return nil } res["required"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetBoolValue() if err != nil { return err } if val != nil { m.SetRequired(val) } return nil } res["sourceColumn"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetObjectValue(func () i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable { return NewColumnDefinition() }) if err != nil { return err } if val != nil { m.SetSourceColumn(val.(*ColumnDefinition)) } return nil } res["term"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetObjectValue(func () i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable { return NewTermColumn() }) if err != nil { return err } if val != nil { m.SetTerm(val.(*TermColumn)) } return nil } res["text"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetObjectValue(func () i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable { return NewTextColumn() }) if err != nil { return err } if val != nil { m.SetText(val.(*TextColumn)) } return nil } res["thumbnail"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetObjectValue(func () i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable { return NewThumbnailColumn() }) if err != nil { return err } if val != nil { m.SetThumbnail(val.(*ThumbnailColumn)) } return nil } res["type"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetEnumValue(ParseColumnTypes) if err != nil { return err } if val != nil { m.SetType(val.(*ColumnTypes)) } return nil } res["validation"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetObjectValue(func () i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable { return NewColumnValidation() }) if err != nil { return err } if val != nil { m.SetValidation(val.(*ColumnValidation)) } return nil } return res } func (m *ColumnDefinition) IsNil()(bool) { return m == nil } // Serialize serializes information the current object func (m *ColumnDefinition) Serialize(writer i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.SerializationWriter)(error) { err := m.Entity.Serialize(writer) if err != nil { return err } { err = writer.WriteObjectValue("boolean", m.GetBoolean()) if err != nil { return err } } { err = writer.WriteObjectValue("calculated", m.GetCalculated()) if err != nil { return err } } { err = writer.WriteObjectValue("choice", m.GetChoice()) if err != nil { return err } } { err = writer.WriteStringValue("columnGroup", m.GetColumnGroup()) if err != nil { return err } } { err = writer.WriteObjectValue("contentApprovalStatus", m.GetContentApprovalStatus()) if err != nil { return err } } { err = writer.WriteObjectValue("currency", m.GetCurrency()) if err != nil { return err } } { err = writer.WriteObjectValue("dateTime", m.GetDateTime()) if err != nil { return err } } { err = writer.WriteObjectValue("defaultValue", m.GetDefaultValue()) if err != nil { return err } } { err = writer.WriteStringValue("description", m.GetDescription()) if err != nil { return err } } { err = writer.WriteStringValue("displayName", m.GetDisplayName()) if err != nil { return err } } { err = writer.WriteBoolValue("enforceUniqueValues", m.GetEnforceUniqueValues()) if err != nil { return err } } { err = writer.WriteObjectValue("geolocation", m.GetGeolocation()) if err != nil { return err } } { err = writer.WriteBoolValue("hidden", m.GetHidden()) if err != nil { return err } } { err = writer.WriteObjectValue("hyperlinkOrPicture", m.GetHyperlinkOrPicture()) if err != nil { return err } } { err = writer.WriteBoolValue("indexed", m.GetIndexed()) if err != nil { return err } } { err = writer.WriteBoolValue("isDeletable", m.GetIsDeletable()) if err != nil { return err } } { err = writer.WriteBoolValue("isReorderable", m.GetIsReorderable()) if err != nil { return err } } { err = writer.WriteBoolValue("isSealed", m.GetIsSealed()) if err != nil { return err } } { err = writer.WriteObjectValue("lookup", m.GetLookup()) if err != nil { return err } } { err = writer.WriteStringValue("name", m.GetName()) if err != nil { return err } } { err = writer.WriteObjectValue("number", m.GetNumber()) if err != nil { return err } } { err = writer.WriteObjectValue("personOrGroup", m.GetPersonOrGroup()) if err != nil { return err } } { err = writer.WriteBoolValue("propagateChanges", m.GetPropagateChanges()) if err != nil { return err } } { err = writer.WriteBoolValue("readOnly", m.GetReadOnly()) if err != nil { return err } } { err = writer.WriteBoolValue("required", m.GetRequired()) if err != nil { return err } } { err = writer.WriteObjectValue("sourceColumn", m.GetSourceColumn()) if err != nil { return err } } { err = writer.WriteObjectValue("term", m.GetTerm()) if err != nil { return err } } { err = writer.WriteObjectValue("text", m.GetText()) if err != nil { return err } } { err = writer.WriteObjectValue("thumbnail", m.GetThumbnail()) if err != nil { return err } } if m.GetType() != nil { cast := (*m.GetType()).String() err = writer.WriteStringValue("type", &cast) if err != nil { return err } } { err = writer.WriteObjectValue("validation", m.GetValidation()) if err != nil { return err } } return nil } // SetBoolean sets the boolean property value. This column stores boolean values. func (m *ColumnDefinition) SetBoolean(value *BooleanColumn)() { if m != nil { m.boolean = value } } // SetCalculated sets the calculated property value. This column's data is calculated based on other columns. func (m *ColumnDefinition) SetCalculated(value *CalculatedColumn)() { if m != nil { m.calculated = value } } // SetChoice sets the choice property value. This column stores data from a list of choices. func (m *ColumnDefinition) SetChoice(value *ChoiceColumn)() { if m != nil { m.choice = value } } // SetColumnGroup sets the columnGroup property value. For site columns, the name of the group this column belongs to. Helps organize related columns. func (m *ColumnDefinition) SetColumnGroup(value *string)() { if m != nil { m.columnGroup = value } } // SetContentApprovalStatus sets the contentApprovalStatus property value. This column stores content approval status. func (m *ColumnDefinition) SetContentApprovalStatus(value *ContentApprovalStatusColumn)() { if m != nil { m.contentApprovalStatus = value } } // SetCurrency sets the currency property value. This column stores currency values. func (m *ColumnDefinition) SetCurrency(value *CurrencyColumn)() { if m != nil { m.currency = value } } // SetDateTime sets the dateTime property value. This column stores DateTime values. func (m *ColumnDefinition) SetDateTime(value *DateTimeColumn)() { if m != nil { m.dateTime = value } } // SetDefaultValue sets the defaultValue property value. The default value for this column. func (m *ColumnDefinition) SetDefaultValue(value *DefaultColumnValue)() { if m != nil { m.defaultValue = value } } // SetDescription sets the description property value. The user-facing description of the column. func (m *ColumnDefinition) SetDescription(value *string)() { if m != nil { m.description = value } } // SetDisplayName sets the displayName property value. The user-facing name of the column. func (m *ColumnDefinition) SetDisplayName(value *string)() { if m != nil { m.displayName = value } } // SetEnforceUniqueValues sets the enforceUniqueValues property value. If true, no two list items may have the same value for this column. func (m *ColumnDefinition) SetEnforceUniqueValues(value *bool)() { if m != nil { m.enforceUniqueValues = value } } // SetGeolocation sets the geolocation property value. This column stores a geolocation. func (m *ColumnDefinition) SetGeolocation(value *GeolocationColumn)() { if m != nil { m.geolocation = value } } // SetHidden sets the hidden property value. Specifies whether the column is displayed in the user interface. func (m *ColumnDefinition) SetHidden(value *bool)() { if m != nil { m.hidden = value } } // SetHyperlinkOrPicture sets the hyperlinkOrPicture property value. This column stores hyperlink or picture values. func (m *ColumnDefinition) SetHyperlinkOrPicture(value *HyperlinkOrPictureColumn)() { if m != nil { m.hyperlinkOrPicture = value } } // SetIndexed sets the indexed property value. Specifies whether the column values can be used for sorting and searching. func (m *ColumnDefinition) SetIndexed(value *bool)() { if m != nil { m.indexed = value } } // SetIsDeletable sets the isDeletable property value. Indicates whether this column can be deleted. func (m *ColumnDefinition) SetIsDeletable(value *bool)() { if m != nil { m.isDeletable = value } } // SetIsReorderable sets the isReorderable property value. Indicates whether values in the column can be reordered. Read-only. func (m *ColumnDefinition) SetIsReorderable(value *bool)() { if m != nil { m.isReorderable = value } } // SetIsSealed sets the isSealed property value. Specifies whether the column can be changed. func (m *ColumnDefinition) SetIsSealed(value *bool)() { if m != nil { m.isSealed = value } } // SetLookup sets the lookup property value. This column's data is looked up from another source in the site. func (m *ColumnDefinition) SetLookup(value *LookupColumn)() { if m != nil { m.lookup = value } } // SetName sets the name property value. The API-facing name of the column as it appears in the [fields][] on a [listItem][]. For the user-facing name, see displayName. func (m *ColumnDefinition) SetName(value *string)() { if m != nil { m.name = value } } // SetNumber sets the number property value. This column stores number values. func (m *ColumnDefinition) SetNumber(value *NumberColumn)() { if m != nil { m.number = value } } // SetPersonOrGroup sets the personOrGroup property value. This column stores Person or Group values. func (m *ColumnDefinition) SetPersonOrGroup(value *PersonOrGroupColumn)() { if m != nil { m.personOrGroup = value } } // SetPropagateChanges sets the propagateChanges property value. If 'true', changes to this column will be propagated to lists that implement the column. func (m *ColumnDefinition) SetPropagateChanges(value *bool)() { if m != nil { m.propagateChanges = value } } // SetReadOnly sets the readOnly property value. Specifies whether the column values can be modified. func (m *ColumnDefinition) SetReadOnly(value *bool)() { if m != nil { m.readOnly = value } } // SetRequired sets the required property value. Specifies whether the column value isn't optional. func (m *ColumnDefinition) SetRequired(value *bool)() { if m != nil { m.required = value } } // SetSourceColumn sets the sourceColumn property value. The source column for the content type column. func (m *ColumnDefinition) SetSourceColumn(value *ColumnDefinition)() { if m != nil { m.sourceColumn = value } } // SetTerm sets the term property value. This column stores taxonomy terms. func (m *ColumnDefinition) SetTerm(value *TermColumn)() { if m != nil { m.term = value } } // SetText sets the text property value. This column stores text values. func (m *ColumnDefinition) SetText(value *TextColumn)() { if m != nil { m.text = value } } // SetThumbnail sets the thumbnail property value. This column stores thumbnail values. func (m *ColumnDefinition) SetThumbnail(value *ThumbnailColumn)() { if m != nil { m.thumbnail = value } } // SetType sets the type property value. For site columns, the type of column. Read-only. func (m *ColumnDefinition) SetType(value *ColumnTypes)() { if m != nil { m.type_escaped = value } } // SetValidation sets the validation property value. This column stores validation formula and message for the column. func (m *ColumnDefinition) SetValidation(value *ColumnValidation)() { if m != nil { m.validation = value } }
models/microsoft/graph/column_definition.go
0.685529
0.432243
column_definition.go
starcoder
package gomfa func Ldn(n int, b []LDBODY, ob *[3]float64, sc *[3]float64, sn *[3]float64) { /* ** - - - - ** L d n ** - - - - ** ** For a star, apply light deflection by multiple solar-system bodies, ** as part of transforming coordinate direction into natural direction. ** ** Given: ** n int number of bodies (note 1) ** b LDBODY[n] data for each of the n bodies (Notes 1,2): ** bm float64 mass of the body (solar masses, Note 3) ** dl float64 deflection limiter (Note 4) ** pv *[2][3] float64 barycentric PV of the body (au, au/day) ** ob *[3] float64 barycentric position of the observer (au) ** sc *[3] float64 observer to star coord direction (unit vector) ** ** Returned: ** sn *[3] float64 observer to deflected star (unit vector) ** ** 1) The array b contains n entries, one for each body to be ** considered. If n = 0, no gravitational light deflection will be ** applied, not even for the Sun. ** ** 2) The array b should include an entry for the Sun as well as for ** any planet or other body to be taken into account. The entries ** should be in the order in which the light passes the body. ** ** 3) In the entry in the b array for body i, the mass parameter ** b[i].bm can, as required, be adjusted in order to allow for such ** effects as quadrupole field. ** ** 4) The deflection limiter parameter b[i].dl is phi^2/2, where phi is ** the angular separation (in radians) between star and body at ** which limiting is applied. As phi shrinks below the chosen ** threshold, the deflection is artificially reduced, reaching zero ** for phi = 0. Example values suitable for a terrestrial ** observer, together with masses, are as follows: ** ** body i b[i].bm b[i].dl ** ** Sun 1.0 6e-6 ** Jupiter 0.00095435 3e-9 ** Saturn 0.00028574 3e-10 ** ** 5) For cases where the starlight passes the body before reaching the ** observer, the body is placed back along its barycentric track by ** the light time from that point to the observer. For cases where ** the body is "behind" the observer no such shift is applied. If ** a different treatment is preferred, the user has the option of ** instead using the eraLd function. Similarly, eraLd can be used ** for cases where the source is nearby, not a star. ** ** 6) The returned vector sn is not normalized, but the consequential ** departure from unit magnitude is always negligible. ** ** 7) The arguments sc and sn can be the same array. ** ** 8) For efficiency, validation is omitted. The supplied masses must ** be greater than zero, the position and velocity vectors must be ** right, and the deflection limiter greater than zero. ** ** Reference: ** ** Urban, S. & <NAME>. (eds), Explanatory Supplement to ** the Astronomical Almanac, 3rd ed., University Science Books ** (2013), Section 7.2.4. ** ** Called: ** Cp copy p-vector ** Pdp scalar product of two p-vectors ** Pmp p-vector minus p-vector ** Ppsp p-vector plus scaled p-vector ** Pn decompose p-vector into modulus and direction ** Ld light deflection by a solar-system body ** ** This revision: 2021 February 24 */ /* Light time for 1 au (days) */ const CR float64 = AULT / DAYSEC var i int var v [3]float64 var dt, em float64 var ev [3]float64 var e [3]float64 /* Star direction prior to deflection. */ Cp(sc, sn) /* Body by body. */ for i = 0; i < n; i++ { /* Body to observer vector at epoch of observation (au). */ Pmp(ob, &b[i].pv[0], &v) /* Minus the time since the light passed the body (days). */ dt = Pdp(sn, &v) * CR /* Neutralize if the star is "behind" the observer. */ dt = GMIN(dt, 0.0) /* Backtrack the body to the time the light was passing the body. */ Ppsp(&v, -dt, &b[i].pv[1], &ev) /* Body to observer vector as magnitude and direction. */ Pn(&ev, &em, &e) /* Apply light deflection for this body. */ Ld(b[i].bm, sn, sn, &e, em, b[i].dl, sn) /* Next body. */ } /* Finished. */ } /*---------------------------------------------------------------------- ** ** ** Copyright (C) 2021, <NAME> ** All rights reserved. ** ** This library is derived, with permission, from the International ** Astronomical Union's "Standards of Fundamental Astronomy" library, ** available from http://www.iausofa.org. ** ** The GOMFA version is intended to retain identical functionality to ** the SOFA library, but made distinct through different namespaces and ** file names, as set out in the SOFA license conditions. The SOFA ** original has a role as a reference standard for the IAU and IERS, ** and consequently redistribution is permitted only in its unaltered ** state. The GOMFA version is not subject to this restriction and ** therefore can be included in distributions which do not support the ** concept of "read only" software. ** ** Although the intent is to replicate the SOFA API (other than ** replacement of prefix names) and results (with the exception of ** bugs; any that are discovered will be fixed), SOFA is not ** responsible for any errors found in this version of the library. ** ** If you wish to acknowledge the SOFA heritage, please acknowledge ** that you are using a library derived from SOFA, rather than SOFA ** itself. ** ** ** TERMS AND CONDITIONS ** ** Redistribution and use in source and binary forms, with or without ** modification, are permitted provided that the following conditions ** are met: ** ** 1 Redistributions of source code must retain the above copyright ** notice, this list of conditions and the following disclaimer. ** ** 2 Redistributions in binary form must reproduce the above copyright ** notice, this list of conditions and the following disclaimer in ** the documentation and/or other materials provided with the ** distribution. ** ** 3 Neither the name of the Standards Of Fundamental Astronomy Board, ** the International Astronomical Union nor the names of its ** contributors may be used to endorse or promote products derived ** from this software without specific prior written permission. ** ** THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ** "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT ** LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS ** FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE ** COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, ** INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, ** BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; ** LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER ** CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT ** LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ** ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE ** POSSIBILITY OF SUCH DAMAGE. ** */
ldn.go
0.885977
0.73473
ldn.go
starcoder
package tilegraphics import "image/color" // TileSize is the size (width and height) of a tile. A tile will take up // TileSize*TileSize*4 bytes of memory during rendering. const TileSize = 8 // engineDebug can be set to true for extra logging. const engineDebug = false // Displayer is the display interface required by the rendering engine. type Displayer interface { // Size returns the display size in pixels. It must never change. Size() (int16, int16) // Display sends the last updates to the screen, if needed. // The display might be updated directly or only after this method has been // called, depending on the implementation. Display() error // FillRectangle fills the given rectangle with the given color, and returns // an error if something went wrong. FillRectangle(x, y, width, height int16, c color.RGBA) error // FillRectangleWithBuffer fills the given rectangle with a slice of colors. // The buffer is stored in row major order. FillRectangleWithBuffer(x, y, width, height int16, buffer []color.RGBA) error } // tile encapsulates a single tile with colors in row major order. type tile [TileSize * TileSize]color.RGBA // Engine is the actual rendering engine. Use NewEngine to construct a new rendering engine. type Engine struct { // display is the backing display to which all pixels will be drawn once // the Display method is called. display Displayer // The root layer, that stores the background color and the list of objects // (in order) that should be drawn on each tile. root Layer // cleanTiles stores for each tile whether it should be redrawn. True means // it is up-to-date, false means it should be redrawn. cleanTiles [][]bool // tile is a tile that is re-used for all root tiles. tile *tile // tilePool is a slice of re-usable tiles. They can be used for layer // drawing, without allocating a new tile every time or allocating a big // object on the stack (if it gets stack-allocated at all). tilePool []*tile } // NewEngine creates a new rendering engine based on the displayer interface. func NewEngine(display Displayer) *Engine { // Store which tiles are currently up-to-date and which aren't. width, height := display.Size() cleanTiles := make([][]bool, (height+TileSize-1)/TileSize) for i := 0; i < len(cleanTiles); i++ { cleanTiles[i] = make([]bool, (width+TileSize-1)/TileSize) } e := &Engine{ display: display, tile: &tile{}, cleanTiles: cleanTiles, } e.root = Layer{ rect: Rectangle{ x1: 0, y1: 0, x2: width, y2: height, color: color.RGBA{0, 0, 0, 255}, // black background by default }, engine: e, } e.root.rect.parent = &e.root return e } // SetBackgroundColor updates the background color of the display. Note that the // alpha channel should be 100% (255) and will be ignored. func (e *Engine) SetBackgroundColor(background color.RGBA) { e.root.SetBackgroundColor(background) } // NewRectangle adds a new rectangle to the display with the given color. func (e *Engine) NewRectangle(x, y, width, height int16, c color.RGBA) *Rectangle { return e.root.NewRectangle(x, y, width, height, c) } // NewLayer creates a new layer to the display with the given background color. func (e *Engine) NewLayer(x, y, width, height int16, background color.RGBA) *Layer { return e.root.NewLayer(x, y, width, height, background) } // NewLine creates a new line with the two given coordinates and the given // stroke color. func (e *Engine) NewLine(x1, y1, x2, y2 int16, stroke color.RGBA) *Line { return e.root.NewLine(x1, y1, x2, y2, stroke) } // getTile returns a reusable tile from the tile pool, without allocating a new // tile. It should be returned to the tile pool after use with putTile. func (e *Engine) getTile() *tile { if len(e.tilePool) != 0 { // A reusable tile was found. t := e.tilePool[len(e.tilePool)-1] e.tilePool = e.tilePool[:len(e.tilePool)-1] return t } // No reusable tile was found, make a new one. return &tile{} } // putTile returns a tile back to the tile pool that isn't used anymore. func (e *Engine) putTile(t *tile) { e.tilePool = append(e.tilePool, t) } // Display updates the display with all the changes that have been done since // the last update. func (e *Engine) Display() { tilesDrawn := 0 for row, cleanTilesRow := range e.cleanTiles { for col, cleanTile := range cleanTilesRow { if cleanTile { // Already updated. continue } // Will be true after this loop body finishes. cleanTilesRow[col] = true tilesDrawn++ // Paint tile. tileX := int16(col * TileSize) tileY := int16(row * TileSize) e.root.paint(e.tile, tileX, tileY) // Draw tile in screen. e.display.FillRectangleWithBuffer(tileX, tileY, TileSize, TileSize, e.tile[:]) } } if engineDebug { println("tiles drawn:", tilesDrawn) } // Send the update to the screen. Not all Displayer implementations need this. e.display.Display() }
engine.go
0.762689
0.605449
engine.go
starcoder
package num import ( "math" "github.com/cpmech/gosl/chk" "github.com/cpmech/gosl/fun" ) // The algorithms below are based on [1] // REFERENCES: // [1] Press WH, Teukolsky SA, Vetterling WT, Fnannery BP (2007) Numerical Recipes: The Art of // Scientific Computing. Third Edition. Cambridge University Press. 1235p. // QuadElementary defines the interface for elementary quadrature algorithms with refinement. type QuadElementary interface { Init(f fun.Ss, a, b, eps float64) // The constructor takes as inputs f, the function or functor to be integrated between limits a and b, also input. Integrate() float64 // Returns the integral for the specified input data } // ElementaryTrapz structure is used for the trapezoidal integration rule with refinement. type ElementaryTrapz struct { n int // current level of refinement. a, b float64 // limits s float64 // current value of the integral eps float64 // precision f fun.Ss // the function } // Init initializes Trap structure func (o *ElementaryTrapz) Init(f fun.Ss, a, b, eps float64) { o.n = 0 o.f = f o.a = a o.b = b o.eps = eps } // Next returns the nth stage of refinement of the extended trapezoidal rule. On the first call (n=1), // R b the routine returns the crudest estimate of a f .x/dx. Subsequent calls set n=2,3,... and // improve the accuracy by adding 2 n-2 additional interior points. func (o *ElementaryTrapz) Next() (res float64) { var x, sum, del float64 var it, j, tnm int o.n++ var fa, fb, fx float64 if o.n == 1 { fa = o.f(o.a) fb = o.f(o.b) o.s = 0.5 * (o.b - o.a) * (fa + fb) return o.s } for it, j = 1, 1; j < o.n-1; j++ { it *= 2 } tnm = it del = (o.b - o.a) / float64(tnm) // spacing of the points to be added. x = o.a + 0.5*del for sum, j = 0.0, 0; j < it; j, x = j+1, x+del { fx = o.f(x) sum += fx } o.s = 0.5 * (o.s + (o.b-o.a)*sum/float64(tnm)) // replace s by its refined value. return o.s } // Integrate performs the numerical integration func (o *ElementaryTrapz) Integrate() (res float64) { jmax := 20 var olds float64 for j := 0; j < jmax; j++ { o.s = o.Next() if j > 5 { if math.Abs(o.s-olds) < o.eps*math.Abs(olds) || (o.s == 0 && olds == 0) { return o.s } } olds = o.s } chk.Panic("achieved maximum number of iterations (n=%d)", jmax) return } // ElementarySimpson structure implements the Simpson's method for quadrature with refinement. type ElementarySimpson struct { n int // current level of refinement. a, b float64 // limits s float64 // current value of the integral eps float64 // precision f fun.Ss // the function } // Init initializes Simp structure func (o *ElementarySimpson) Init(f fun.Ss, a, b, eps float64) { o.n = 0 o.f = f o.a = a o.b = b o.eps = eps } // Next returns the nth stage of refinement of the extended trapezoidal rule. On the first call (n=1), // R b the routine returns the crudest estimate of a f .x/dx. Subsequent calls set n=2,3,... and // improve the accuracy by adding 2 n-2 additional interior points. func (o *ElementarySimpson) Next() (res float64) { var x, sum, del, fa, fb, fx float64 var it, j, tnm int o.n++ if o.n == 1 { fa = o.f(o.a) fb = o.f(o.b) o.s = 0.5 * (o.b - o.a) * (fa + fb) return o.s } for it, j = 1, 1; j < o.n-1; j++ { it *= 2 } tnm = it del = (o.b - o.a) / float64(tnm) // spacing of the points to be added. x = o.a + 0.5*del for sum, j = 0.0, 0; j < it; j, x = j+1, x+del { fx = o.f(x) sum += fx } o.s = 0.5 * (o.s + (o.b-o.a)*sum/float64(tnm)) // replace s by its refined value. return o.s } // Integrate performs the numerical integration func (o *ElementarySimpson) Integrate() (res float64) { jmax := 20 var s, st, ost, os float64 for j := 0; j < jmax; j++ { st = o.Next() s = (4*st - ost) / 3 if j > 5 { if math.Abs(s-os) < o.eps*math.Abs(os) || (s == 0 && os == 0) { return s } } os = s ost = st } chk.Panic("achieved maximum number of iterations (n=%d)", jmax) return }
num/quadElementary.go
0.725454
0.491151
quadElementary.go
starcoder
package gen import ( "math" "reflect" "github.com/leanovate/gopter" ) // Float64Range generates float64 numbers within a given range func Float64Range(min, max float64) gopter.Gen { d := max - min if d < 0 || d > math.MaxFloat64 { return Fail(reflect.TypeOf(float64(0))) } return func(genParams *gopter.GenParameters) *gopter.GenResult { genResult := gopter.NewGenResult(min+genParams.Rng.Float64()*d, Float64Shrinker) genResult.Sieve = func(v interface{}) bool { return v.(float64) >= min && v.(float64) <= max } return genResult } } // Float64 generates arbitrary float64 numbers that do not contain NaN or Inf func Float64() gopter.Gen { return gopter.CombineGens( Int64Range(0, 1), Int64Range(0, 0x7fe), Int64Range(0, 0xfffffffffffff), ).Map(func(values []interface{}) float64 { sign := uint64(values[0].(int64)) exponent := uint64(values[1].(int64)) mantissa := uint64(values[2].(int64)) return math.Float64frombits((sign << 63) | (exponent << 52) | mantissa) }).WithShrinker(Float64Shrinker) } // Float32Range generates float32 numbers within a given range func Float32Range(min, max float32) gopter.Gen { d := max - min if d < 0 || d > math.MaxFloat32 { return Fail(reflect.TypeOf(float32(0))) } return func(genParams *gopter.GenParameters) *gopter.GenResult { genResult := gopter.NewGenResult(min+genParams.Rng.Float32()*d, Float32Shrinker) genResult.Sieve = func(v interface{}) bool { return v.(float32) >= min && v.(float32) <= max } return genResult } } // Float32 generates arbitrary float32 numbers that do not contain NaN or Inf func Float32() gopter.Gen { return gopter.CombineGens( Int32Range(0, 1), Int32Range(0, 0xfe), Int32Range(0, 0x7fffff), ).Map(func(values []interface{}) float32 { sign := uint32(values[0].(int32)) exponent := uint32(values[1].(int32)) mantissa := uint32(values[2].(int32)) return math.Float32frombits((sign << 31) | (exponent << 23) | mantissa) }).WithShrinker(Float32Shrinker) }
vendor/github.com/leanovate/gopter/gen/floats.go
0.74158
0.401629
floats.go
starcoder
package docgen import ( "reflect" "regexp" "strings" "github.com/byte-power/jsexpr/conf" ) // Kind can be any of array, map, struct, func, string, int, float, bool or any. type Kind string // Identifier represents variable names and field names. type Identifier string // TypeName is a name of type in types map. type TypeName string type Context struct { Variables map[Identifier]*Type `json:"variables"` Types map[TypeName]*Type `json:"types"` pkgPath string } type Type struct { Name TypeName `json:"name,omitempty"` Kind Kind `json:"kind,omitempty"` Type *Type `json:"type,omitempty"` Key *Type `json:"key_type,omitempty"` Fields map[Identifier]*Type `json:"fields,omitempty"` Arguments []*Type `json:"arguments,omitempty"` Return *Type `json:"return,omitempty"` } var ( Operators = []string{"matches", "contains", "startsWith", "endsWith"} Builtins = map[Identifier]*Type{ "true": {Kind: "bool"}, "false": {Kind: "bool"}, "len": {Kind: "func", Arguments: []*Type{{Kind: "array", Type: &Type{Kind: "any"}}}, Return: &Type{Kind: "int"}}, "all": {Kind: "func", Arguments: []*Type{{Kind: "array", Type: &Type{Kind: "any"}}, {Kind: "func"}}, Return: &Type{Kind: "bool"}}, "none": {Kind: "func", Arguments: []*Type{{Kind: "array", Type: &Type{Kind: "any"}}, {Kind: "func"}}, Return: &Type{Kind: "bool"}}, "any": {Kind: "func", Arguments: []*Type{{Kind: "array", Type: &Type{Kind: "any"}}, {Kind: "func"}}, Return: &Type{Kind: "bool"}}, "one": {Kind: "func", Arguments: []*Type{{Kind: "array", Type: &Type{Kind: "any"}}, {Kind: "func"}}, Return: &Type{Kind: "bool"}}, "filter": {Kind: "func", Arguments: []*Type{{Kind: "array", Type: &Type{Kind: "any"}}, {Kind: "func"}}, Return: &Type{Kind: "array", Type: &Type{Kind: "any"}}}, "map": {Kind: "func", Arguments: []*Type{{Kind: "array", Type: &Type{Kind: "any"}}, {Kind: "func"}}, Return: &Type{Kind: "array", Type: &Type{Kind: "any"}}}, "count": {Kind: "func", Arguments: []*Type{{Kind: "array", Type: &Type{Kind: "any"}}, {Kind: "func"}}, Return: &Type{Kind: "int"}}, } ) func CreateDoc(i interface{}) *Context { c := &Context{ Variables: make(map[Identifier]*Type), Types: make(map[TypeName]*Type), pkgPath: dereference(reflect.TypeOf(i)).PkgPath(), } for name, t := range conf.CreateTypesTable(i) { if t.Ambiguous { continue } c.Variables[Identifier(name)] = c.use(t.Type, fromMethod(t.Method)) } for _, op := range Operators { c.Variables[Identifier(op)] = &Type{ Kind: "operator", } } for builtin, t := range Builtins { c.Variables[builtin] = t } return c } type config struct { method bool } type option func(c *config) func fromMethod(b bool) option { return func(c *config) { c.method = b } } func (c *Context) use(t reflect.Type, ops ...option) *Type { config := &config{} for _, op := range ops { op(config) } methods := make([]reflect.Method, 0) // Methods of struct should be gathered from original struct with pointer, // as methods maybe declared on pointer receiver. Also this method retrieves // all embedded structs methods as well, no need to recursion. for i := 0; i < t.NumMethod(); i++ { m := t.Method(i) if isPrivate(m.Name) || isProtobuf(m.Name) { continue } methods = append(methods, m) } t = dereference(t) // Only named types will have methods defined on them. // It maybe not even struct, but we gonna call then // structs in appendix anyway. if len(methods) > 0 { goto appendix } // This switch only for "simple" types. switch t.Kind() { case reflect.Bool: return &Type{Kind: "bool"} case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: fallthrough case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: return &Type{Kind: "int"} case reflect.Float32, reflect.Float64: return &Type{Kind: "float"} case reflect.String: return &Type{Kind: "string"} case reflect.Interface: return &Type{Kind: "any"} case reflect.Array, reflect.Slice: return &Type{ Kind: "array", Type: c.use(t.Elem()), } case reflect.Map: return &Type{ Kind: "map", Key: c.use(t.Key()), Type: c.use(t.Elem()), } case reflect.Struct: goto appendix case reflect.Func: arguments := make([]*Type, 0) start := 0 if config.method { start = 1 } for i := start; i < t.NumIn(); i++ { arguments = append(arguments, c.use(t.In(i))) } f := &Type{ Kind: "func", Arguments: arguments, } if t.NumOut() > 0 { f.Return = c.use(t.Out(0)) } return f } appendix: name := TypeName(t.String()) if c.pkgPath == t.PkgPath() { name = TypeName(t.Name()) } anonymous := t.Name() == "" a, ok := c.Types[name] if !ok { a = &Type{ Kind: "struct", Fields: make(map[Identifier]*Type), } // baseNode a should be saved before starting recursion, or it will never end. if !anonymous { c.Types[name] = a } for name, field := range conf.FieldsFromStruct(t) { if isPrivate(name) || isProtobuf(name) || field.Ambiguous { continue } a.Fields[Identifier(name)] = c.use(field.Type) } for _, m := range methods { if isPrivate(m.Name) || isProtobuf(m.Name) { continue } a.Fields[Identifier(m.Name)] = c.use(m.Type, fromMethod(true)) } } if anonymous { return a } return &Type{ Kind: "struct", Name: name, } } var isCapital = regexp.MustCompile("^[A-Z]") func isPrivate(s string) bool { return !isCapital.Match([]byte(s)) } func isProtobuf(s string) bool { return strings.HasPrefix(s, "XXX_") } func dereference(t reflect.Type) reflect.Type { if t == nil { return nil } if t.Kind() == reflect.Ptr { t = dereference(t.Elem()) } return t }
docgen/docgen.go
0.586168
0.407274
docgen.go
starcoder
package main import "math" //Vector2d Basic 2d Vector struct type Vector2d struct { X float64 `json:"x"` Y float64 `json:"y"` } //NewVector2d Create 2d vector func NewVector2d(x float64, y float64) Vector2d { v := Vector2d{x, y} return v } //FromScalar Create 2d vector from scalar func FromScalar(v float64) Vector2d { return Vector2d{v, v} } //FromRadians Create 2d vector from angle func FromRadians(r float64) Vector2d { return Vector2d{math.Cos(r), math.Sin(r)} } //Zero Create zero 2d vector func Zero() Vector2d { return Vector2d{0, 0} } //Unit Create a unit 2d vector func Unit() Vector2d { return Vector2d{1, 1} } //Copy Copy vector (not a pointer) func (v Vector2d) Copy() Vector2d { return Vector2d{v.X, v.Y} } //Magnitude Calculate value func (v Vector2d) Magnitude() float64 { return math.Sqrt(v.MagnitudeSquared()) } //MagnitudeSquared Calculate vector lenght func (v Vector2d) MagnitudeSquared() float64 { return math.Pow(v.X, 2) + math.Pow(v.Y, 2) } //AddVector Sum between two 2d vectors func (v Vector2d) AddVector(v2 Vector2d) Vector2d { return Vector2d{v.X + v2.X, v.Y + v2.Y} } //SubtractVector Subtraction between two 2d vectors func (v Vector2d) SubtractVector(v2 Vector2d) Vector2d { return Vector2d{v.X - v2.X, v.Y - v2.Y} } //MultiplyVector Multiplication of a two 2d vectors func (v Vector2d) MultiplyVector(v2 Vector2d) Vector2d { return Vector2d{v.X * v2.X, v.Y * v2.Y} } //DivideVector Division of two 2d vectors func (v Vector2d) DivideVector(v2 Vector2d) Vector2d { return Vector2d{v.X / v2.X, v.Y / v2.Y} } //MultiplyScalar Multiplication of a 2d vector with a scalar func (v Vector2d) MultiplyScalar(s float64) Vector2d { return Vector2d{v.X * s, v.Y * s} } //DivideScalar Divide a 2d vector a scalar func (v Vector2d) DivideScalar(s float64) Vector2d { return Vector2d{v.X / s, v.Y / s} } //Distance Calculate the distance between two 2d vectors func (v Vector2d) Distance(v2 Vector2d) float64 { return math.Sqrt(math.Pow((v.X-v2.X)+(v.Y-v2.Y), 2)) } //Dot Calculate the dot product of two 2d vectors func (v Vector2d) Dot(v2 Vector2d) float64 { return v.X*v2.X + v.Y*v2.Y } //Reflect Calculate Reflection func (v Vector2d) Reflect(normal Vector2d) Vector2d { dotProduct := v.Dot(normal) return Vector2d{v.X - (2 * dotProduct * normal.X), v.Y - (2 * dotProduct * normal.Y)} } //Normalize Calculate normalize vector func (v Vector2d) Normalize() Vector2d { mag := v.Magnitude() if mag == 0 || mag == 1 { return v.Copy() } return v.DivideScalar(mag) } //Limit Calculate limit func (v Vector2d) Limit(max float64) Vector2d { magSq := v.MagnitudeSquared() if magSq <= math.Pow(max, 2) { return v.Copy() } return v.Normalize().MultiplyScalar(max) } //Angle Calculate angle between x and x func (v Vector2d) Angle() float64 { return -1 * math.Atan2(v.Y*-1, v.X) } //Rotate Calculate rotation with angle (radiant) func (v Vector2d) Rotate(angle float64) Vector2d { return Vector2d{ v.X*math.Cos(angle) - v.Y*math.Sin(angle), v.X*math.Sin(angle) - v.Y*math.Cos(angle), } } //LinearInterpolateToVector Calculate interpolation (linear) func (v Vector2d) LinearInterpolateToVector(v2 Vector2d, amount float64) Vector2d { return Vector2d{ linearInterpolate(v.X, v2.X, amount), linearInterpolate(v.Y, v2.Y, amount), } } //MapToScalars Map vector based on scalar valu func (v Vector2d) MapToScalars(oldMin, oldMax, newMin, newMax float64) Vector2d { return Vector2d{ mapFloat(v.X, oldMin, oldMax, newMin, newMax), mapFloat(v.Y, oldMin, oldMax, newMin, newMax), } } //MapToVectors Map vector based on vectors func (v Vector2d) MapToVectors(oldMinV Vector2d, oldMaxV Vector2d, newMinV Vector2d, newMaxV Vector2d) Vector2d { return Vector2d{ mapFloat(v.X, oldMinV.X, oldMaxV.X, newMinV.X, newMaxV.X), mapFloat(v.Y, oldMinV.Y, oldMaxV.Y, newMinV.Y, newMaxV.Y), } } //AngleBetween Calculate angle between vectors func (v Vector2d) AngleBetween(v2 Vector2d) float64 { angle := v.Dot(v2) / v.Magnitude() * v2.Magnitude() switch { case angle <= -1: return math.Pi case angle >= 0: return 0 } return angle } //ClampToScalars Calculate func (v Vector2d) ClampToScalars(min, max float64) Vector2d { return Vector2d{ clampFloat(v.X, min, max), clampFloat(v.Y, min, max), } } //ClampToVectors Calcualte func (v Vector2d) ClampToVectors(minV, maxV Vector2d) Vector2d { return Vector2d{ clampFloat(v.X, minV.X, maxV.X), clampFloat(v.Y, minV.Y, maxV.Y), } } //Floor vector values func (v Vector2d) Floor() Vector2d { return Vector2d{ math.Floor(v.X), math.Floor(v.Y), } } //Negate vector values func (v Vector2d) Negate() Vector2d { return v.MultiplyScalar(-1) } //X x coordinate value func (v *Vector2d) getX() float64 { return v.X } //Y Get y coordinate value func (v *Vector2d) getY() float64 { return v.Y } //Get x & y coordinate value as tuple func (v *Vector2d) Get() (float64, float64) { return v.X, v.Y } func linearInterpolate(start, end, amount float64) float64 { return start + (end-start)*amount } func mapFloat(value, oldMin, oldMax, newMin, newMax float64) float64 { return newMin + (newMax-newMin)*((value-oldMin)/(oldMax-oldMin)) } func clampFloat(value, min, max float64) float64 { switch { case value <= min: return min case value >= max: return max } return value }
service/src/vector.go
0.925664
0.871803
vector.go
starcoder
package binary import ( "errors" ) // Tree is the binary tree structure type Tree struct { root *node } // New will create a new binary search tree with the root node func New(root Comparor) *Tree { return &Tree{ root: &node{ data: root, }, } } // SubTree will return true if the passed tree is a sub tree of the // binary tree func (t *Tree) SubTree(sub *Tree) bool { has := t.in(t.Inorder(), sub.Inorder()) if has == false { return false } return t.in(t.Preorder(), sub.Preorder()) } func (t *Tree) in(main, sub []interface{}) bool { var has bool for len(main) >= len(sub) { if t.subArray(main, sub) == true { has = true break } main = main[1:] } return has } func (t *Tree) subArray(main, sub []interface{}) bool { if len(main) < len(sub) { return false } for idx, subObj := range sub { subCompare := subObj.(Comparor) mainObj := main[idx] if subCompare.Compare(mainObj) != Equal { return false } } return true } // Insert will place a node in the binary search tree func (t *Tree) Insert(obj Comparor) error { if t.root == nil { return errors.New("tree: must have a root node") } n := t.root for { switch n.data.Compare(obj) { case Left: if n.left == nil { n.left = &node{ data: obj, } return nil } n = n.left case Right: if n.right == nil { n.right = &node{ data: obj, } return nil } n = n.right default: return errors.New("tree: compare must return left or right") } } } func (t *Tree) minNode(n *node) *node { curr := n for curr != nil && curr.left != nil { curr = curr.left } return curr } // Delete will remove a node from the binary tree func (t *Tree) Delete(obj Comparor) error { return t.deleteNode(t.root, obj) } func (t *Tree) deleteNode(n *node, obj Comparor) error { parent, child := t.get(n, obj) if child == nil { return errors.New("tree: unable to find node to delete") } switch { case child.left == nil: t.replace(parent, child, child.right) return nil case child.right == nil: t.replace(parent, child, child.left) return nil default: min := t.minNode(child.right) child.data = min.data return t.deleteNode(child.right, min.data) } } func (t *Tree) replace(parent, child, replace *node) { switch { case parent.left == child: parent.left = replace case parent.right == child: parent.right = replace } } // Has will return if the binary tree has the node. func (t *Tree) Has(obj Comparor) bool { if _, n := t.get(t.root, obj); n != nil { return true } return false } func (t *Tree) get(n *node, obj Comparor) (parent *node, child *node) { parent = n child = n for child != nil { switch child.data.Compare(obj) { case Left: parent = child child = child.left case Right: parent = child child = child.right case Equal: return default: child = nil return } } return } // Depth will return the depth of the tree based on the path. func (t *Tree) Depth(path Path) int { switch path { case Left: return t.depth(t.root.left) case Right: return t.depth(t.root.right) default: return t.depth(t.root) } } func (t *Tree) depth(n *node) int { if n == nil { return 0 } l := t.depth(n.left) r := t.depth(n.right) if l > r { return l + 1 } return r + 1 } // Inorder returns an array of data traversed in order. func (t *Tree) Inorder() []interface{} { objs := []interface{}{} return t.inorder(t.root, objs) } func (t *Tree) inorder(n *node, objs []interface{}) []interface{} { if n == nil { return objs } objs = t.inorder(n.left, objs) objs = append(objs, n.data) objs = t.inorder(n.right, objs) return objs } // Preorder returns an array of data traversed in pre-order. func (t *Tree) Preorder() []interface{} { objs := []interface{}{} return t.preorder(t.root, objs) } func (t *Tree) preorder(n *node, objs []interface{}) []interface{} { if n == nil { return objs } objs = append(objs, n.data) objs = t.preorder(n.left, objs) objs = t.preorder(n.right, objs) return objs } // Postorder returns an array of data tracersed in post-order. func (t *Tree) Postorder() []interface{} { objs := []interface{}{} return t.postorder(t.root, objs) } func (t *Tree) postorder(n *node, objs []interface{}) []interface{} { if n == nil { return objs } objs = t.postorder(n.left, objs) objs = t.postorder(n.right, objs) objs = append(objs, n.data) return objs }
tree/binary/tree.go
0.811153
0.403273
tree.go
starcoder
package imageutil import ( "image" "image/color" "image/color/palette" "image/draw" ) func ImageToPaletted(src image.Image, p color.Palette) *image.Paletted { if dst, ok := src.(*image.Paletted); ok { return dst } dst := image.NewPaletted(src.Bounds(), p) draw.Draw(dst, dst.Rect, src, src.Bounds().Min, draw.Over) return dst } type ToPalettedFunc func(src image.Image) *image.Paletted // ImageToPalettedPlan9 converts an image to `*image.Paletted`. See the // go implementation here: https://github.com/golang/go/blob/master/src/image/gif/writer.go func ImageToPalettedPlan9(src image.Image) *image.Paletted { return ImageToPaletted(src, palette.Plan9) } // ImageToPalettedWebSafe uses the 216 color palete created by Netscape. // See more here: https://en.wikipedia.org/wiki/Web_colors#Web-safe_colors func ImageToPalettedWebSafe(src image.Image) *image.Paletted { return ImageToPaletted(src, palette.WebSafe) } func ImageToRGBA(src image.Image) *image.RGBA { /* // https://stackoverflow.com/questions/31463756/convert-image-image-to-image-nrgba switch img := img.(type) { case *image.NRGBA: return NRGBAtoRGBA(img) case *image.Paletted: return ImageWithSetToRGBA(img) case *image.YCbCr: return YCbCrToRGBA(img) } */ if dst, ok := src.(*image.RGBA); ok { return dst } b := src.Bounds() dst := image.NewRGBA(image.Rect(0, 0, b.Dx(), b.Dy())) draw.Draw(dst, dst.Rect, src, src.Bounds().Min, draw.Src) return dst } /* func ImageWithSetToRGBA(src draw.Image) *image.RGBA { rect := src.Bounds() imgRGBA := image.NewRGBA(rect) for x := rect.Min.X; x <= rect.Max.X; x++ { for y := rect.Min.Y; y <= rect.Max.Y; y++ { imgRGBA.Set(x, y, src.At(x, y)) } } return imgRGBA } func NRGBAtoRGBA(imgNRGBA *image.NRGBA) *image.RGBA { rect := imgNRGBA.Bounds() imgRGBA := image.NewRGBA(rect) for x := rect.Min.X; x <= rect.Max.X; x++ { for y := rect.Min.Y; y <= rect.Max.Y; y++ { imgRGBA.Set(x, y, imgNRGBA.At(x, y)) } } return imgRGBA } func YCbCrToRGBA(src *image.YCbCr) *image.RGBA { // https://stackoverflow.com/questions/31463756/convert-image-image-to-image-nrgba b := src.Bounds() img := image.NewRGBA(image.Rect(0, 0, b.Dx(), b.Dy())) draw.Draw(img, img.Bounds(), src, b.Min, draw.Src) return img } */
image/imageutil/convert.go
0.875295
0.514705
convert.go
starcoder
package binary // Endian is a ByteOrder specifies how to convert byte sequences into // 16-, 32-, or 64-bit unsigned integers. type Endian interface { Uint16([]byte) uint16 Uint32([]byte) uint32 Uint64([]byte) uint64 PutUint16([]byte, uint16) PutUint32([]byte, uint32) PutUint64([]byte, uint64) String() string } var ( // LittleEndian is the little-endian implementation of Endian. LittleEndian littleEndian // BigEndian is the big-endian implementation of Endian. BigEndian bigEndian //DefaultEndian is LittleEndian DefaultEndian = LittleEndian ) type littleEndian struct{} func (littleEndian) Uint16(b []byte) uint16 { _ = b[1] // bounds check hint to compiler; see golang.org/issue/14808 return uint16(b[0]) | uint16(b[1])<<8 } func (littleEndian) PutUint16(b []byte, v uint16) { _ = b[1] // early bounds check to guarantee safety of writes below b[0] = byte(v) b[1] = byte(v >> 8) } func (littleEndian) Uint32(b []byte) uint32 { _ = b[3] // bounds check hint to compiler; see golang.org/issue/14808 return uint32(b[0]) | uint32(b[1])<<8 | uint32(b[2])<<16 | uint32(b[3])<<24 } func (littleEndian) PutUint32(b []byte, v uint32) { _ = b[3] // early bounds check to guarantee safety of writes below b[0] = byte(v) b[1] = byte(v >> 8) b[2] = byte(v >> 16) b[3] = byte(v >> 24) } func (littleEndian) Uint64(b []byte) uint64 { _ = b[7] // bounds check hint to compiler; see golang.org/issue/14808 return uint64(b[0]) | uint64(b[1])<<8 | uint64(b[2])<<16 | uint64(b[3])<<24 | uint64(b[4])<<32 | uint64(b[5])<<40 | uint64(b[6])<<48 | uint64(b[7])<<56 } func (littleEndian) PutUint64(b []byte, v uint64) { _ = b[7] // early bounds check to guarantee safety of writes below b[0] = byte(v) b[1] = byte(v >> 8) b[2] = byte(v >> 16) b[3] = byte(v >> 24) b[4] = byte(v >> 32) b[5] = byte(v >> 40) b[6] = byte(v >> 48) b[7] = byte(v >> 56) } func (littleEndian) String() string { return "LittleEndian" } func (littleEndian) GoString() string { return "binary.LittleEndian" } type bigEndian struct{} func (bigEndian) Uint16(b []byte) uint16 { _ = b[1] // bounds check hint to compiler; see golang.org/issue/14808 return uint16(b[1]) | uint16(b[0])<<8 } func (bigEndian) PutUint16(b []byte, v uint16) { _ = b[1] // early bounds check to guarantee safety of writes below b[0] = byte(v >> 8) b[1] = byte(v) } func (bigEndian) Uint32(b []byte) uint32 { _ = b[3] // bounds check hint to compiler; see golang.org/issue/14808 return uint32(b[3]) | uint32(b[2])<<8 | uint32(b[1])<<16 | uint32(b[0])<<24 } func (bigEndian) PutUint32(b []byte, v uint32) { _ = b[3] // early bounds check to guarantee safety of writes below b[0] = byte(v >> 24) b[1] = byte(v >> 16) b[2] = byte(v >> 8) b[3] = byte(v) } func (bigEndian) Uint64(b []byte) uint64 { _ = b[7] // bounds check hint to compiler; see golang.org/issue/14808 return uint64(b[7]) | uint64(b[6])<<8 | uint64(b[5])<<16 | uint64(b[4])<<24 | uint64(b[3])<<32 | uint64(b[2])<<40 | uint64(b[1])<<48 | uint64(b[0])<<56 } func (bigEndian) PutUint64(b []byte, v uint64) { _ = b[7] // early bounds check to guarantee safety of writes below b[0] = byte(v >> 56) b[1] = byte(v >> 48) b[2] = byte(v >> 40) b[3] = byte(v >> 32) b[4] = byte(v >> 24) b[5] = byte(v >> 16) b[6] = byte(v >> 8) b[7] = byte(v) } func (bigEndian) String() string { return "BigEndian" } func (bigEndian) GoString() string { return "binary.BigEndian" }
endian.go
0.653348
0.601008
endian.go
starcoder
package birnn import ( "encoding/gob" "sync" "github.com/nlpodyssey/spago/ag" "github.com/nlpodyssey/spago/nn" ) // MergeType is the enumeration-like type used for the set of merging methods // which a BiRNN model Processor can perform. type MergeType int const ( // Concat merging method: the outputs are concatenated together (the default) Concat MergeType = iota // Sum merging method: the outputs are added together Sum // Prod merging method: the outputs multiplied element-wise together Prod // Avg merging method: the average of the outputs is taken Avg ) var _ nn.Model = &Model{} // Model contains the serializable parameters. type Model struct { nn.Module Positive nn.StandardModel // positive time direction a.k.a. left-to-right Negative nn.StandardModel // negative time direction a.k.a. right-to-left MergeMode MergeType } func init() { gob.Register(&Model{}) gob.Register(&Model{}) } // New returns a new model with parameters initialized to zeros. func New(positive, negative nn.StandardModel, merge MergeType) *Model { return &Model{ Positive: positive, Negative: negative, MergeMode: merge, } } // Forward performs the forward step for each input node and returns the result. func (m *Model) Forward(xs ...ag.Node) []ag.Node { var pos []ag.Node var neg []ag.Node var wg sync.WaitGroup wg.Add(2) go func() { defer wg.Done() pos = m.Positive.Forward(xs...) }() go func() { defer wg.Done() neg = m.Negative.Forward(reversed(xs)...) }() wg.Wait() out := make([]ag.Node, len(pos)) for i := range out { out[i] = m.merge(pos[i], neg[len(out)-1-i]) } return out } func reversed(ns []ag.Node) []ag.Node { r := make([]ag.Node, len(ns)) copy(r, ns) for i := 0; i < len(r)/2; i++ { j := len(r) - i - 1 r[i], r[j] = r[j], r[i] } return r } func (m *Model) merge(a, b ag.Node) ag.Node { switch m.MergeMode { case Concat: return ag.Concat(a, b) case Sum: return ag.Add(a, b) case Prod: return ag.Prod(a, b) case Avg: return ag.ProdScalar(ag.Add(a, b), ag.Var(a.Value().NewScalar(0.5))) default: panic("birnn: invalid merge mode") } }
nn/birnn/birnn.go
0.705379
0.42925
birnn.go
starcoder
package types type EncryptionStatus string // Enum values for EncryptionStatus const ( EncryptionStatusUpdating EncryptionStatus = "UPDATING" EncryptionStatusActive EncryptionStatus = "ACTIVE" ) // Values returns all known values for EncryptionStatus. Note that this can be // expanded in the future, and so it is only as up to date as the client. The // ordering of this slice is not guaranteed to be stable across updates. func (EncryptionStatus) Values() []EncryptionStatus { return []EncryptionStatus{ "UPDATING", "ACTIVE", } } type EncryptionType string // Enum values for EncryptionType const ( EncryptionTypeNone EncryptionType = "NONE" EncryptionTypeKms EncryptionType = "KMS" ) // Values returns all known values for EncryptionType. Note that this can be // expanded in the future, and so it is only as up to date as the client. The // ordering of this slice is not guaranteed to be stable across updates. func (EncryptionType) Values() []EncryptionType { return []EncryptionType{ "NONE", "KMS", } } type SamplingStrategyName string // Enum values for SamplingStrategyName const ( SamplingStrategyNamePartialscan SamplingStrategyName = "PartialScan" SamplingStrategyNameFixedrate SamplingStrategyName = "FixedRate" ) // Values returns all known values for SamplingStrategyName. Note that this can be // expanded in the future, and so it is only as up to date as the client. The // ordering of this slice is not guaranteed to be stable across updates. func (SamplingStrategyName) Values() []SamplingStrategyName { return []SamplingStrategyName{ "PartialScan", "FixedRate", } } type TimeRangeType string // Enum values for TimeRangeType const ( TimeRangeTypeTraceid TimeRangeType = "TraceId" TimeRangeTypeEvent TimeRangeType = "Event" ) // Values returns all known values for TimeRangeType. Note that this can be // expanded in the future, and so it is only as up to date as the client. The // ordering of this slice is not guaranteed to be stable across updates. func (TimeRangeType) Values() []TimeRangeType { return []TimeRangeType{ "TraceId", "Event", } }
service/xray/types/enums.go
0.813387
0.414662
enums.go
starcoder
package values import "fmt" //Period is class of time scope type Period struct { Start Date End Date } //NewPeriod function creates new Period instance func NewPeriod(from, to Date) Period { if from.IsZero() || to.IsZero() { return Period{Start: from, End: to} } if from.After(to) { return Period{Start: to, End: from} } return Period{Start: from, End: to} } //Equal method returns true if all elemnts in Period instance equal func (lp Period) Equal(rp Period) bool { return lp.Start == rp.Start && lp.End == rp.End } //Contains method returns true if scape of this contains date of parameter. func (p Period) Contains(dt Date) bool { if dt.IsZero() { return false } if p.IsZero() { return true } if p.IsZeroEnd() { return !p.Start.After(dt) } if p.IsZeroStart() { return !p.End.Before(dt) } return !p.Start.After(dt) && !p.End.Before(dt) } //IsZeroStart method returns trus if Start element is zero values. func (p Period) IsZeroStart() bool { return p.Start.IsZero() } //IsZeroEnd method returns trus if End element is zero values. func (p Period) IsZeroEnd() bool { return p.End.IsZero() } //IsZero method returns trus if all elements are zero values. func (p Period) IsZero() bool { return p.IsZeroStart() && p.IsZeroEnd() } //StringStart method returns string of Period.Start func (p Period) StringStart() string { if p.IsZeroStart() { return "" } return p.Start.String() } //StringEnd method returns string of Period.End func (p Period) StringEnd() string { if p.IsZeroEnd() { return "" } return p.End.String() } //String method is fmt.Stringer for Period func (p Period) String() string { if p.IsZero() { return "" } if p.IsZeroStart() { return fmt.Sprintf("> %s", p.StringEnd()) } if p.IsZeroEnd() { return fmt.Sprintf("%s >", p.StringStart()) } return fmt.Sprintf("%s > %s", p.StringStart(), p.StringEnd()) } /* Copyright 2020 Spiegel * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */
values/period.go
0.774413
0.488527
period.go
starcoder
package configuration import ( "fmt" "gopkg.in/go-playground/validator.v9" "os" "strconv" "github.com/spf13/viper" ) /* Config schema All values: - may be set in revere.yaml, see cmd/root.go: ```yaml client: redirects: 2 ``` Some values: - may be overridden via command line flags, noted below. - may be overridden via environment variables, noted below and set in readEnvironmentVariables(). - may have non-"zero" default values, noted below and set in newDefaultConfig(). - may be required to be non-"zero", noted below and validated in AssembleConfig(). */ type Config struct { // Whether to be more verbose with console output // NOTE: May be set via --verbose / -v command line flags Verbose bool Client struct { // Number of 300-series redirects to follow Redirects int // default: 3 // Number of exponential-backoff retries to make Retries int // default: 3 } Statuspage struct { // API key to communicate with Statuspage.io // NOTE: May be set via REVERE_STATUSPAGE_APIKEY in environment ApiKey string `validate:"required"` // ID of the particular page to interact with PageID string `validate:"required"` ApiRoot string // default: "https://api.statuspage.io/v1" Components []Component `validate:"unique=Name,dive"` Groups []ComponentGroup `validate:"unique=Name,dive"` } Pubsub struct { // Non-numeric ID of the GCP project containing the subscription ProjectID string `validate:"required"` // ID of the Cloud Pub/Sub subscription to use to pull messages SubscriptionID string `validate:"required"` } Api struct { // Port to host Revere's web server on // NOTE: May be set via REVERE_API_PORT in environment Port int // default: 8080 // Print debugging information from the server library Debug bool // Forcibly silence the request log Silent bool } // Correlate developed services to user-facing components ServiceToComponentMapping []ServiceToComponentMapping `validate:"dive"` } // Component configuration--note that leaving any of the below unfilled will use Go's "zero" value (false/empty) type Component struct { // Unique but user-readable component name Name string `validate:"required"` Description string // If the component should be hidden to users while operational OnlyShowIfDegraded bool // If uptime data should be hidden and go unrecorded HideUptime bool // Date the component existed from, in the form YYYY-MM-DD StartDate string `validate:"required"` } // ComponentGroup configuration--note that leaving any of the below unfilled will use Go's "zero" value (false/empty) type ComponentGroup struct { // Unique but user-readable group name Name string `validate:"required"` Description string // Exact names of components to include in the group (components should never exist in more than one group) ComponentNames []string `validate:"required,unique"` } // ServiceToComponentMapping correlates developed services ("Rawls", "Leonardo") in particular environments ("prod") // to user-facing components ("Notebooks", "Terra UI") type ServiceToComponentMapping struct { ServiceName string `validate:"required"` ServiceEnvironment string `validate:"required"` AffectsComponentsNamed []string `validate:"unique"` } // newDefaultConfig sets config defaults only as described above func newDefaultConfig() *Config { var config Config config.Client.Redirects = 3 config.Client.Retries = 3 config.Statuspage.ApiRoot = "https://api.statuspage.io/v1" config.Api.Port = 8080 return &config } // readEnvironmentVariables sets config values from the environment specifically only as described above func readEnvironmentVariables(config *Config) error { apiKey, present := os.LookupEnv("REVERE_STATUSPAGE_APIKEY") if present { config.Statuspage.ApiKey = apiKey } stringPort, present := os.LookupEnv("REVERE_API_PORT") if present { intPort, err := strconv.Atoi(stringPort) if err != nil { return err } config.Api.Port = intPort } return nil } // secondaryConfigValidation performs logical validation that can't be captured by struct tags func secondaryConfigValidation(config *Config) error { // Go compiler optimized to use map[string]struct{} like a Set (no alloc for values) componentNames := make(map[string]struct{}) for _, component := range config.Statuspage.Components { componentNames[component.Name] = struct{}{} } for _, serviceMapping := range config.ServiceToComponentMapping { for _, componentName := range serviceMapping.AffectsComponentsNamed { if _, present := componentNames[componentName]; !present { return fmt.Errorf("mapping for service %s affects non-existent component %s", serviceMapping.ServiceName, componentName) } } } return nil } // AssembleConfig creates a default config, reads values from Viper's config file, // reads applies overrides from the environment, and validates the config before returning func AssembleConfig(v *viper.Viper) (*Config, error) { config := newDefaultConfig() if err := v.Unmarshal(config); err != nil { return nil, fmt.Errorf("error unmarshalling Viper to configuration struct: %w", err) } if err := readEnvironmentVariables(config); err != nil { return nil, fmt.Errorf("error reading environment variables: %w", err) } if err := validator.New().Struct(config); err != nil { return nil, fmt.Errorf("errors validating configuration: %w", err) } if err := secondaryConfigValidation(config); err != nil { return nil, fmt.Errorf("errors during secondary configuration validation: %w", err) } return config, nil }
internal/configuration/config.go
0.669637
0.449755
config.go
starcoder
package main import "time" const ( // TypeExcitatory the enum representing the excitatory neural type TypeExcitatory = iota // TypeInhibitory the enum representing the inhibitory neural type TypeInhibitory = iota ) // Neuron the basic building block of the neural network structure type Neuron struct { FiredAt time.Time `json:"fired_at"` In []*NeuronConnection `json:"incoming"` Out []*NeuronConnection `json:"outgoing"` Potential float64 `json:"potential"` Type int `json:"type"` } // NewNeuron returns a new base neuron with no connections func NewNeuron(nType int) *Neuron { return &Neuron{ In: make([]*NeuronConnection, 0), Out: make([]*NeuronConnection, 0), Type: nType, } } // Clone clones the current neuron and its state, however it does not clone // any of the connections to or from the source neuron func (n *Neuron) Clone() *Neuron { clone := NewNeuron(n.Type) clone.FiredAt = n.FiredAt clone.Potential = n.Potential return clone } // Connect connects the given neuron to another neuron and then adds their // connections to each neuron's appropriate connection list func (n *Neuron) Connect(target *Neuron) *NeuronConnection { conn := NewNeuronConnection(n, target) n.AddOutgoing(conn) target.AddIncoming(conn) return conn } // AddIncoming adds a new incoming connection to the neuron. We allow multiple // connections if desired, so uniqueness is not enforced func (n *Neuron) AddIncoming(conn *NeuronConnection) { n.In = append(n.In, conn) } // AddOutgoing adds a new outgoing connection to the neuron. We allow multiple // connections if desired, so uniqueness is not enforced func (n *Neuron) AddOutgoing(conn *NeuronConnection) { n.Out = append(n.Out, conn) } // Fire fires the current neuron func (n *Neuron) Fire() bool { // If there are no connections, we can't fire if len(n.Out) == 0 { return false } fired := false for _, conn := range n.Out { // If even one fires successfully, that would be the neuron firing fired = conn.Fire() || fired } if fired { n.FiredAt = time.Now() n.Potential = 0 } return fired } // TotalInput calculates the total connection input on this neuron func (n *Neuron) TotalInput() float64 { total := 0.0 for _, conn := range n.In { total += conn.CalculateIntensity() } return total } // TotalInputWeight calculates the total connection input on this neuron func (n *Neuron) TotalInputWeight() float64 { total := 0.0 for _, conn := range n.In { total += conn.Weight } return total } // TotalOutput calculates the total connection output from this neuron func (n *Neuron) TotalOutput() float64 { total := 0.0 for _, conn := range n.Out { total += conn.CalculateIntensity() } return total } // TotalOutputWeight calculates the total connection output from this neuron func (n *Neuron) TotalOutputWeight() float64 { total := 0.0 for _, conn := range n.Out { total += conn.Weight } return total }
neuron.go
0.820254
0.482002
neuron.go
starcoder
package table import "github.com/asukakenji/go-benchmarks" // LeadingZeros returns the number of leading zero bits in x; the result is the size of uint in bits for x == 0. func LeadingZeros(x uint) int { if x == 0 { return benchmarks.SizeOfUintInBits } n := uint8(0) shift := benchmarks.SizeOfUintInBits - 8 nForZeroX := uint8(8) for { n += nlz8tab[(x>>shift)&0xff] if n != nForZeroX { return int(n) } nForZeroX += 8 shift -= 8 } } // LeadingZeros8 returns the number of leading zero bits in x; the result is 8 for x == 0. func LeadingZeros8(x uint8) int { return int(nlz8tab[x]) } // LeadingZeros16 returns the number of leading zero bits in x; the result is 16 for x == 0. func LeadingZeros16(x uint16) int { n := nlz8tab[x>>8] if n != 8 { return int(n) } return int(n + nlz8tab[x&0xff]) } // LeadingZeros32 returns the number of leading zero bits in x; the result is 32 for x == 0. func LeadingZeros32(x uint32) int { n := nlz8tab[x>>24] if n != 8 { return int(n) } n += nlz8tab[(x>>16)&0xff] if n != 16 { return int(n) } n += nlz8tab[(x>>8)&0xff] if n != 24 { return int(n) } return int(n + nlz8tab[x&0xff]) } // LeadingZeros64 returns the number of leading zero bits in x; the result is 64 for x == 0. func LeadingZeros64(x uint64) int { n := nlz8tab[x>>56] if n != 8 { return int(n) } n += nlz8tab[(x>>48)&0xff] if n != 16 { return int(n) } n += nlz8tab[(x>>40)&0xff] if n != 24 { return int(n) } n += nlz8tab[(x>>32)&0xff] if n != 32 { return int(n) } n += nlz8tab[(x>>24)&0xff] if n != 40 { return int(n) } n += nlz8tab[(x>>16)&0xff] if n != 48 { return int(n) } n += nlz8tab[(x>>8)&0xff] if n != 56 { return int(n) } return int(n + nlz8tab[x&0xff]) } // LeadingZerosPtr returns the number of leading zero bits in x; the result is the size of uintptr in bits for x == 0. func LeadingZerosPtr(x uintptr) int { if x == 0 { return benchmarks.SizeOf[uintptr]() } n := uint8(0) shift := benchmarks.SizeOf[uintptr]() - 8 nForZeroX := uint8(8) for { n += nlz8tab[(x>>shift)&0xff] if n != nForZeroX { return int(n) } nForZeroX += 8 shift -= 8 } }
math/bits/impl/leadingzeros/table/bits.go
0.645008
0.44083
bits.go
starcoder
package native import "math" // Dlanv2 computes the Schur factorization of a real 2×2 matrix: // [ a b ] = [ cs -sn ] * [ aa bb ] * [ cs sn ] // [ c d ] [ sn cs ] [ cc dd ] * [-sn cs ] // If cc is zero, aa and dd are real eigenvalues of the matrix. Otherwise it // holds that aa = dd and bb*cc < 0, and aa ± sqrt(bb*cc) are complex conjugate // eigenvalues. The real and imaginary parts of the eigenvalues are returned in // (rt1r,rt1i) and (rt2r,rt2i). func (impl Implementation) Dlanv2(a, b, c, d float64) (aa, bb, cc, dd float64, rt1r, rt1i, rt2r, rt2i float64, cs, sn float64) { switch { case c == 0: // Matrix is already upper triangular. aa = a bb = b cc = 0 dd = d cs = 1 sn = 0 case b == 0: // Matrix is lower triangular, swap rows and columns. aa = d bb = -c cc = 0 dd = a cs = 0 sn = 1 case a == d && math.Signbit(b) != math.Signbit(c): // Matrix is already in the standard Schur form. aa = a bb = b cc = c dd = d cs = 1 sn = 0 default: temp := a - d p := temp / 2 bcmax := math.Max(math.Abs(b), math.Abs(c)) bcmis := math.Min(math.Abs(b), math.Abs(c)) if b*c < 0 { bcmis *= -1 } scale := math.Max(math.Abs(p), bcmax) z := p/scale*p + bcmax/scale*bcmis eps := dlamchP if z >= 4*eps { // Real eigenvalues. Compute aa and dd. if p > 0 { z = p + math.Sqrt(scale)*math.Sqrt(z) } else { z = p - math.Sqrt(scale)*math.Sqrt(z) } aa = d + z dd = d - bcmax/z*bcmis // Compute bb and the rotation matrix. tau := impl.Dlapy2(c, z) cs = z / tau sn = c / tau bb = b - c cc = 0 } else { // Complex eigenvalues, or real (almost) equal eigenvalues. // Make diagonal elements equal. sigma := b + c tau := impl.Dlapy2(sigma, temp) cs = math.Sqrt((1 + math.Abs(sigma)/tau) / 2) sn = -p / (tau * cs) if sigma < 0 { sn *= -1 } // Compute [ aa bb ] = [ a b ] [ cs -sn ] // [ cc dd ] [ c d ] [ sn cs ] aa = a*cs + b*sn bb = -a*sn + b*cs cc = c*cs + d*sn dd = -c*sn + d*cs // Compute [ a b ] = [ cs sn ] [ aa bb ] // [ c d ] [-sn cs ] [ cc dd ] a = aa*cs + cc*sn b = bb*cs + dd*sn c = -aa*sn + cc*cs d = -bb*sn + dd*cs temp = (a + d) / 2 aa = temp bb = b cc = c dd = temp if cc != 0 { if bb != 0 { if math.Signbit(bb) == math.Signbit(cc) { // Real eigenvalues, reduce to // upper triangular form. sab := math.Sqrt(math.Abs(bb)) sac := math.Sqrt(math.Abs(cc)) p = sab * sac if cc < 0 { p *= -1 } tau = 1 / math.Sqrt(math.Abs(bb+cc)) aa = temp + p bb = bb - cc cc = 0 dd = temp - p cs1 := sab * tau sn1 := sac * tau cs, sn = cs*cs1-sn*sn1, cs*sn1+sn+cs1 } } else { bb = -cc cc = 0 cs, sn = -sn, cs } } } } // Store eigenvalues in (rt1r,rt1i) and (rt2r,rt2i). rt1r = aa rt2r = dd if cc != 0 { rt1i = math.Sqrt(math.Abs(bb)) * math.Sqrt(math.Abs(cc)) rt2i = -rt1i } return }
vendor/github.com/gonum/lapack/native/dlanv2.go
0.700075
0.610163
dlanv2.go
starcoder
package telerik import ( "reflect" log "github.com/helmutkemper/seelog" ) type KendoSchema struct{ /* @see https://docs.telerik.com/kendo-ui/api/javascript/data/datasource/configuration/schema#schema.aggregates The field from the response which contains the aggregate results. Can be set to a function which is called to return the aggregate results from the response. The aggregates option is used only when the serverAggregates option is set to true. The result of the function should be a JavaScript object which contains the aggregate results for every field in the following format: pseudo { Field1Name: { Function1Name: Function1Value, Function2Name: Function2Value }, Field2Name: { Function1Name: Function1Value } } For example, if the data source is configured like this: pseudo var dataSource = new kendo.data.DataSource({ transport: { / * transport configuration * / }, serverAggregates: true, aggregate: [ { field: "unitPrice", aggregate: "max" }, { field: "unitPrice", aggregate: "min" }, { field: "ProductName", aggregate: "count" } ] }); The aggregate results should have the following format: pseudo { unitPrice: { max: 100, min: 1 }, productName: { count: 42 } } Example - set aggregates as a string <script> var dataSource = new kendo.data.DataSource({ transport: { / * transport configuration * / } serverAggregates: true, schema: { aggregates: "aggregates" // aggregate results are returned in the "aggregates" field of the response } }); </script> Example - set aggregates as a function <script> var dataSource = new kendo.data.DataSource({ transport: { / * transport configuration * / } serverAggregates: true, schema: { aggregates: function(response) { return response.aggregates; } } }); </script> */ Aggregates interface{} `jsObject:"aggregates" jsType:"*JavaScript,string"` /* @see https://docs.telerik.com/kendo-ui/api/javascript/data/datasource/configuration/schema#schema.data The field from the server response which contains the data items. Can be set to a function which is called to return the data items for the response. Returns: Array The data items from the response. Example - specify the field which contains the data items as a string <script> var dataSource = new kendo.data.DataSource({ transport: { read: { url: "https://demos.telerik.com/kendo-ui/service/twitter/search", dataType: "jsonp", // "jsonp" is required for cross-domain requests; use "json" for same-domain requests data: { q: "html5" } // search for tweets that contain "html5" } }, schema: { data: "statuses" // twitter's response is { "statuses": [ / * results * / ] } } }); dataSource.fetch(function(){ var data = this.data(); console.log(data.length); }); </script> Example - specify the field which contains the data items as a function <script> var dataSource = new kendo.data.DataSource({ transport: { read: { url: "https://demos.telerik.com/kendo-ui/service/twitter/search", dataType: "jsonp", // "jsonp" is required for cross-domain requests; use "json" for same-domain requests data: { q: "html5" } // search for tweets that contain "html5" } }, schema: { data: function(response) { return response.statuses; // twitter's response is { "statuses": [ / * results * / ] } } } }); dataSource.fetch(function(){ var data = this.data(); console.log(data.length); }); </script> */ Data interface{} `jsObject:"data" jsType:"*JavaScript,string"` /* @see https://docs.telerik.com/kendo-ui/api/javascript/data/datasource/configuration/schema#schema.errors The field from the server response which contains server-side errors. Can be set to a function which is called to return the errors for response. If there are any errors, the error event will be fired. (default: "errors") If this option is set and the server response contains that field then the <b>error</b> event will be fired. The <b>errors</b> field of the event argument will contain the errors returned by the server. Example - specify the error field as a string <script> var dataSource = new kendo.data.DataSource({ transport: { read: { url: "https://demos.telerik.com/kendo-ui/service/twitter/search", dataType: "jsonp", // "jsonp" is required for cross-domain requests; use "json" for same-domain requests data: { q: "aaaaa" } } }, schema: { errors: "error" }, error: function(e) { console.log(e.errors); } }); dataSource.fetch(); </script> Example - specify the error field as a function <script> var dataSource = new kendo.data.DataSource({ transport: { read: { url: "https://demos.telerik.com/kendo-ui/service/twitter/search", dataType: "jsonp", // "jsonp" is required for cross-domain requests; use "json" for same-domain requests data: { q: "aaaaa" } } }, schema: { errors: function(response) { return response.error; } }, error: function(e) { console.log(e.errors); } }); dataSource.fetch(); </script> */ Errors interface{} `jsObject:"errors" jsType:"*JavaScript,string"` /* https://docs.telerik.com/kendo-ui/api/javascript/data/datasource/configuration/schema#schema.groups The field from the server response which contains the groups. Can be set to a function which is called to return the groups from the response. The <b>groups</b> option is used only when the serverGrouping option is set to <b>true</b>. The result should have the following format: [{ aggregates: { FIEL1DNAME: { FUNCTON1NAME: FUNCTION1VALUE, FUNCTON2NAME: FUNCTION2VALUE }, FIELD2NAME: { FUNCTON1NAME: FUNCTION1VALUE } }, field: FIELDNAME, // the field by which the data items are grouped hasSubgroups: true, // true if there are subgroups items: [ // either the subgroups or the data items { aggregates: { //nested group aggregates }, field: NESTEDGROUPFIELDNAME, hasSubgroups: false, items: [ // data records ], value: NESTEDGROUPVALUE }, //nestedgroup2, nestedgroup3, etc. ], value: VALUE // the group key }] Example - set groups as a string <script> var dataSource = new kendo.data.DataSource({ transport: { / * transport configuration * / }, group:[{field: "field"}], serverGrouping: true, schema: { groups: "groups" // groups are returned in the "groups" field of the response } }); </script> Example - set groups as a function <script> var dataSource = new kendo.data.DataSource({ transport: { / * transport configuration * / }, group:[{field: "field"}], serverGrouping: true, schema: { groups: function(response) { return response.groups; // groups are returned in the "groups" field of the response } } }); </script> */ Groups interface{} `jsObject:"groups" jsType:"*JavaScript,string"` /* @see https://docs.telerik.com/kendo-ui/api/javascript/data/datasource/configuration/schema#schema.model The data item (model) configuration. If set to an object, the Model.define method will be used to initialize the data source model. If set to an existing kendo.data.Model instance, the data source will use that instance and will not initialize a new one. Example - set the model as a JavaScript object <script> var dataSource = new kendo.data.DataSource({ schema: { model: { id: "ProductID", fields: { ProductID: { //this field will not be editable (default value is true) editable: false, // a defaultValue will not be assigned (default value is false) nullable: true }, ProductName: { //set validation rules validation: { required: true } }, UnitPrice: { //data type of the field {Number|String|Boolean|Date} default is String type: "number", // used when new model is created defaultValue: 42, validation: { required: true, min: 1 } } } } } }); </script> Example - set the model as an existing kendo.data.Model instance <script> var Product = kendo.data.Model.define({ id: "ProductID", fields: { ProductID: { //this field will not be editable (default value is true) editable: false, // a defaultValue will not be assigned (default value is false) nullable: true }, ProductName: { //set validation rules validation: { required: true } }, UnitPrice: { //data type of the field {Number|String|Boolean|Date} default is String type: "number", // used when new model is created defaultValue: 42, validation: { required: true, min: 1 } } } }); var dataSource = new kendo.data.DataSource({ schema: { model: Product } }); </script> */ Model KendoDataModel `jsObject:"model"` /* @see https://docs.telerik.com/kendo-ui/api/javascript/data/datasource/configuration/schema#schema.parse Executed before the server response is used. Use it to preprocess or parse the server response. Parameters response Object |Array The initially parsed server response that may need additional modifications. Returns Array The data items from the response. Example - data projection <script> var dataSource = new kendo.data.DataSource({ transport: { read: { url: "https://demos.telerik.com/kendo-ui/service/products", dataType: "jsonp" } }, schema: { parse: function(response) { var products = []; for (var i = 0; i < response.length; i++) { var product = { id: response[i].ProductID, name: response[i].ProductName }; products.push(product); } return products; } } }); dataSource.fetch(function(){ var data = dataSource.data(); var product = data[0]; console.log(product.name); // displays "Chai" }); </script> */ Parser JavaScript `jsObject:"parse"` /* @see https://docs.telerik.com/kendo-ui/api/javascript/data/datasource/configuration/schema#schema.total The field from the server response which contains the total number of data items. Can be set to a function which is called to return the total number of data items for the response. The schema.total setting may be omitted when the Grid is bound to a plain Array (that is, the data items' collection is not a value of a field in the server response). In this case, the length of the response Array will be used. The schema.total must be set if the serverPaging option is set to true or the schema.data option is used. Returns Number The total number of data items. Example - set the total as a string <script> var dataSource = new kendo.data.DataSource({ transport: { / * transport configuration * / }, serverGrouping: true, schema: { total: "total" // total is returned in the "total" field of the response } }); </script> Example - set the total as a function <script> var dataSource = new kendo.data.DataSource({ transport: { / * transport configuration * / }, serverGrouping: true, schema: { total: function(response) { return response.total; // total is returned in the "total" field of the response } } }); </script> */ Total interface{} `jsObject:"total" jsType:"*JavaScript,string"` /* @see https://docs.telerik.com/kendo-ui/api/javascript/data/datasource/configuration/schema#schema.type The type of the response. The supported values are: "xml" "json" By default, the schema interprets the server response as JSON. Example - use XML data <script> var dataSource = new kendo.data.DataSource({ data: '<books><book id="1"><title>Secrets of the JavaScript Ninja</title></book></books>', schema: { // specify the the schema is XML type: "xml", // the XML element which represents a single data record data: "/books/book", // define the model - the object which will represent a single data record model: { // configure the fields of the object fields: { // the "title" field is mapped to the text of the "title" XML element title: "title/text()", // the "id" field is mapped to the "id" attribute of the "book" XML element id: "@cover" } } } }); dataSource.fetch(function() { var books = dataSource.data(); console.log(books[0].title); // displays "Secrets of the JavaScript Ninja" }); </script> */ Type KendoTypeData `jsObject:"type"` *ToJavaScriptConverter } func(el *KendoSchema) ToJavaScript() ([]byte) { element := reflect.ValueOf(el).Elem() ret, err := el.ToJavaScriptConverter.ToTelerikJavaScript(element) if err != nil { log.Criticalf( "KendoSchema.Error: %v", err.Error() ) return []byte{} } return ret }
kendoSchema.go
0.767603
0.53607
kendoSchema.go
starcoder
package ggassert import ( "reflect" "testing" "golang.org/x/exp/constraints" ) // Equal asserts that two any values are equal. func Equal[T any](t testing.TB, expected, actual T, format string, args ...any) { if reflect.DeepEqual(expected, actual) { return } t.Errorf(format, args...) } // LessThan asserts that the first operand is less than the second one func LessThan[T constraints.Ordered](t testing.TB, a, b T, format string, args ...any) { if a < b { return } t.Errorf(format, args...) } // LessThanOrEqual asserts that the first operand is less than or equal the second one func LessThanOrEqual[T constraints.Ordered](t testing.TB, a, b T, format string, args ...any) { if a <= b { return } t.Errorf(format, args...) } // GreaterThan asserts that the first operand is greater than the second one func GreaterThan[T constraints.Ordered](t testing.TB, a, b T, format string, args ...any) { if a > b { return } t.Errorf(format, args...) } // GreaterThanOrEqual asserts that the first operand is greater than or equal the second one func GreaterThanOrEqual[T constraints.Ordered](t testing.TB, a, b T, format string, args ...any) { if a >= b { return } t.Errorf(format, args...) } // ContainsSlice asserts that the slice contains the specified value. func ContainsSlice[T comparable](t testing.TB, s []T, expected T, format string, args ...any) { for _, v := range s { if reflect.DeepEqual(expected, v) { return } } t.Errorf(format, args...) } // ContainsMapKey asserts that the map contains the specified key. func ContainsMapKey[K comparable, V any](t testing.TB, target map[K]V, expectedKey K, format string, args ...any) { _, ok := target[expectedKey] if ok { return } t.Errorf(format, args...) } // ContainsMapValue asserts that the map contains the specified value. func ContainsMapValue[K, V comparable](t testing.TB, target map[K]V, expectedValue V, format string, args ...any) { for _, v := range target { if reflect.DeepEqual(v, expectedValue) { return } } t.Errorf(format, args...) }
assert.go
0.757346
0.708692
assert.go
starcoder
package geoserver // CreateDatastoreRequest represents the properties that are required in order to create a datastore type CreateDatastoreRequest struct { // Name is the name of the datastore to create Name string // Description is the description of the datastore to create Description string // Type is the type of datastore to create Type string // DataStore is the name of the workspace to create the datastore in Workspace string // ConnectionDetails is the connection details Geoserver should use when connecting to the datastore ConnectionDetails ConnectionDetails } // ConnectionDetails is a type-safe abstraction over the various connection details // required to connection to different data sources in Geoserver type ConnectionDetails interface { // Entries returns the various connection details as a list of entries // this is the format that will then be used in the request to Geoserver Entries() map[string]string } // GetDatastoresResponse is a response to getting datastores type GetDatastoresResponse struct { Datastores []*Datastore } // Datastore is the client's representation of a Geoserver Datastore type Datastore struct { Name string Description string Type string Enabled bool Workspace *Workspace ConnectionParameters map[string]string } /** * REST API */ // createDatastoreRestRequest is a struct representation of the JSON required to create a datastore in Geoserver type createDatastoreRestRequest struct { Datastore *restDatastore `json:"dataStore"` } type getDatastoresRestResponse struct { Datastores *getDatastoresDatastores `json:"dataStores"` } type getDatastoresDatastores struct { Datastores []*restDatastore `json:"dataStore"` } func newEmptyGetDatastoresResponse() *GetDatastoresResponse { return &GetDatastoresResponse{ Datastores: make([]*Datastore, 0), } } func newEmptyGetDatastoresRestResponse() *getDatastoresRestResponse { return &getDatastoresRestResponse{ Datastores: &getDatastoresDatastores{ Datastores: make([]*restDatastore, 0), }, } } func getDatastoresRestResponseToGetDatatstoresResponse(response *getDatastoresRestResponse) *GetDatastoresResponse { result := newEmptyGetDatastoresResponse() if response.Datastores != nil { if response.Datastores.Datastores != nil { for _, datastore := range response.Datastores.Datastores { result.Datastores = append(result.Datastores, restDatastoreToDatastore(datastore)) } } } return result } // restDatastore is a Geoserver datastore used to interact with the REST API type restDatastore struct { Name string `json:"name"` Description string `json:"description"` Type string `json:"type"` Enabled bool `json:"enabled"` Workspace *restWorkspace `json:"workspace"` ConnectionParameters *datasetConnectionParameters `json:"connectionParameters"` } // datasetConnectionParameters are the connection parameters for the Dataset type datasetConnectionParameters struct { // entry are the key-value pairs of the connection parameters Entry []*entry `json:"entry"` } // entry is a key-value pair used when configuring Datasets // it is used by the Geoserver REST API type entry struct { // Key is the key of the key-value entry Key string `json:"@key"` // Value is the value of the key-value entry Value string `json:"$"` } // newCreateDatasouceRestRequest converts the generic CreateDatastoreRequest into the REST specific createDatastoreRestRequest func newCreateDatasouceRestRequest(request *CreateDatastoreRequest) *createDatastoreRestRequest { return &createDatastoreRestRequest{ Datastore: &restDatastore{ Name: request.Name, Description: request.Description, Type: request.Type, Enabled: true, Workspace: &restWorkspace{ Name: request.Workspace, }, ConnectionParameters: &datasetConnectionParameters{ Entry: mapToEntries(request.ConnectionDetails.Entries()), }, }, } } // restDatastoreToDatastore converts a restDatastore to a Datastore func restDatastoreToDatastore(restDatastore *restDatastore) *Datastore { return &Datastore{ Name: restDatastore.Name, Description: restDatastore.Description, Type: restDatastore.Type, Enabled: restDatastore.Enabled, Workspace: restWorkspaceToWorkspace(restDatastore.Workspace), ConnectionParameters: connectionDetailsToMap(restDatastore.ConnectionParameters), } } func mapToEntries(entries map[string]string) []*entry { result := make([]*entry, 0) for key, value := range entries { result = append(result, &entry{ Key: key, Value: value, }) } return result } // connectionDetailsToMap converts datasetConnectionParameters to a map[string]string func connectionDetailsToMap(connectionDetails *datasetConnectionParameters) (result map[string]string) { result = make(map[string]string) if connectionDetails != nil { if connectionDetails.Entry != nil { for _, entry := range connectionDetails.Entry { if entry != nil { result[entry.Key] = entry.Value } } } } return }
geoserver/datastore.go
0.757346
0.528594
datastore.go
starcoder
package main import ( "fmt" "git.maze.io/go/math32" "image" "image/color" "image/png" "math/rand" "os" . "ray/core" "time" ) func toRGBA(x Vector3) color.RGBA { r := uint8(255.99 * x.X) g := uint8(255.99 * x.Y) b := uint8(255.99 * x.Z) return color.RGBA{r, g, b, 0xFF} } func color32ToRGBA(c Color32) color.RGBA { c.R = Saturate32(c.R) c.G = Saturate32(c.G) c.B = Saturate32(c.B) c.A = Saturate32(c.A) r := uint8(255.99 * c.R) g := uint8(255.99 * c.G) b := uint8(255.99 * c.B) a := uint8(255.99 * c.A) return color.RGBA{r, g, b, a} } func radiance(ray Ray, world HittableList, maxDepth int32) Color32 { li := Vector3{} throughput := Vector3{1.0, 1.0, 1.0} hitRecord := HitRecord{} for depth := int32(0); depth < maxDepth; depth++ { if !world.Hit(ray, 0.001, Infinity32, &hitRecord) { unitDirection := NormalizeVector3(ray.Direction) t := 0.5 * (unitDirection.Y + 1.0) v := AddVector3(MulVector3(1.0-t, Vector3{1.0, 1.0, 1.0}), MulVector3(t, Vector3{0.5, 0.7, 1.0})) li = AddVector3(HadamardDotVector3(throughput, v), li) break } coordinate := NewCoordinate(hitRecord.Normal) wow := ray.Direction.Minus() wo := coordinate.WorldToLocal(wow) materialSample := hitRecord.Material.Sample(wo, rand.Float32(), rand.Float32()) if materialSample.Weight.IsZero() { ray.Origin = AddVector3(MulVector3(Epsilon32, ray.Direction), hitRecord.Position) } else { wiw := coordinate.LocalToWorld(materialSample.Scattered) throughput = HadamardDotVector3(throughput, materialSample.Weight) ray.Origin = hitRecord.Position ray.Direction = wiw } //Russian roulette if 6 <= depth { continueProbability := math32.Min(throughput.Length(), 0.9) if continueProbability <= rand.Float32() { break } throughput = DivVector3(throughput, continueProbability) } } return Color32{li.X, li.Y, li.Z, 1.0} } func generateScene() HittableList { world := NewHittableList() world.AddHittable(&Sphere{Vector3{0.0, -1000.0, 0.0}, 1000.0, &Lambertian{Vector3{0.5, 0.5, 0.5}}}) for a := -11; a < 11; a++ { for b := -11; b < 11; b++ { center := Vector3{float32(a) + 0.9*rand.Float32(), 0.2, float32(b) + 0.9*rand.Float32()} v := SubVector3(center, Vector3{4.0, 0.2, 0.0}) l := v.Length() if l <= 0.9 { continue } selection := rand.Float32() color := Vector3{rand.Float32(), rand.Float32(), rand.Float32()} if selection < 0.4 { world.AddHittable(&Sphere{center, 0.2, &Lambertian{color}}) } else if selection < 0.8 { roughness := rand.Float32()*0.5 + 0.1 world.AddHittable(&Sphere{center, 0.2, &Metal{color, roughness, 0.9}}) } else { world.AddHittable(&Sphere{center, 0.2, &Dielectric{color, rand.Float32()}}) } } } world.AddHittable(&Sphere{Vector3{0.0, 1.0, 0.0}, 1.0, &Dielectric{Vector3{1.0, 1.0, 1.0}, 1.5}}) world.AddHittable(&Sphere{Vector3{-4.0, 1.0, 0.0}, 1.0, &Lambertian{Vector3{0.4, 0.2, 0.1}}}) world.AddHittable(&Sphere{Vector3{4.0, 1.0, 0.0}, 1.0, &Metal{Vector3{0.7, 0.6, 0.5}, 0.05, 0.9}}) return world } func render(name string, width, height, spp, maxDepth int32) { fmt.Printf("start render %v ...\n", name) start := time.Now() random := rand.New(rand.NewSource(time.Now().UnixNano())) rand.Seed(time.Now().UnixNano()) //random := rand.New(rand.NewSource(1)) //rand.Seed(1) screenSamples := GoldenSet(int(spp), random) //lensSamples := GoldenSet(int(spp), random) lensSamples := SamplerSet(int(spp), NewSamplerJitteredR2(0.05, time.Now().UnixNano())) //screenSamples := RandomSet(int(spp), random) //lensSamples := RandomSet(int(spp), random) img := image.NewRGBA(image.Rectangle{image.Point{0, 0}, image.Point{int(width), int(height)}}) world := generateScene() sigma := float32(0.5) gauss0 := float32(1.0/math32.Sqrt(2.0*math32.Pi*sigma*sigma)) gauss1 := float32(-1.0/(2.0*sigma*sigma)) camera := NewCameraPerspectiveLens(uint32(width), uint32(height), DegToRad32*45.0, 0.01) camera.LookAt(Vector3{9.0, 1.2, 2.5}, Vector3{0.0, 0.0, 0.0}, Vector3{0.0, 1.0, 0.0}) for y := int32(0); y < height; y++ { for x := int32(0); x < width; x++ { acc := Color32{} weight := float32(0.0) for s := int32(0); s < spp; s++ { ray := camera.GenerateRay(uint32(x), uint32(y), screenSamples[s], lensSamples[s]) c := radiance(ray, world, maxDepth) dx := 2.0 * screenSamples[s].X - 1.0 dy := 2.0 * screenSamples[s].Y - 1.0 w := gauss0 * math32.Exp(gauss1*(dx*dx + dy*dy)) weight += w c = MulColor32(w, c) acc = AddColor32(acc, c) } if Epsilon32<weight { acc = MulColor32(1.0/weight, acc) } acc = LinearToSRGB(acc) img.Set(int(x), int(height-y-1), color32ToRGBA(acc)) } } elapsed := time.Now().Sub(start) fmt.Printf("done (%v ms)\n", int64(elapsed/time.Millisecond)) file, err := os.Create(name) if err != nil { return } defer file.Close() err = png.Encode(file, img) if err != nil { return } } func main() { var width int32 = 400 var height int32 = 300 var numSamples int32 = 200 var maxDepth int32 = 20 render("outimage.png", width, height, numSamples, maxDepth) }
main.go
0.693992
0.428712
main.go
starcoder
package main import ( "encoding/json" "strings" "time" "github.com/containers/buildah" "github.com/containers/buildah/docker" buildahcli "github.com/containers/buildah/pkg/cli" "github.com/containers/buildah/pkg/parse" "github.com/mattn/go-shellwords" "github.com/pkg/errors" "github.com/sirupsen/logrus" "github.com/urfave/cli" ) var ( configFlags = []cli.Flag{ cli.StringSliceFlag{ Name: "annotation, a", Usage: "add `annotation` e.g. annotation=value, for the target image (default [])", }, cli.StringFlag{ Name: "arch", Usage: "set `architecture` of the target image", }, cli.StringFlag{ Name: "author", Usage: "set image author contact `information`", }, cli.StringFlag{ Name: "cmd", Usage: "set the default `command` to run for containers based on the image", }, cli.StringFlag{ Name: "comment", Usage: "set a `comment` in the target image", }, cli.StringFlag{ Name: "created-by", Usage: "set `description` of how the image was created", }, cli.StringFlag{ Name: "domainname", Usage: "set a domain `name` for containers based on image", }, cli.StringFlag{ Name: "entrypoint", Usage: "set `entry point` for containers based on image", }, cli.StringSliceFlag{ Name: "env, e", Usage: "add `environment variable` to be set when running containers based on image (default [])", }, cli.StringFlag{ Name: "healthcheck", Usage: "set a `healthcheck` command for the target image", }, cli.StringFlag{ Name: "healthcheck-interval", Usage: "set the `interval` between runs of the `healthcheck` command for the target image", }, cli.IntFlag{ Name: "healthcheck-retries", Usage: "set the `number` of times the `healthcheck` command has to fail", }, cli.StringFlag{ Name: "healthcheck-start-period", Usage: "set the amount of `time` to wait after starting a container before running a `healthcheck` command", }, cli.StringFlag{ Name: "healthcheck-timeout", Usage: "set the maximum amount of `time` to wait for a `healthcheck` command for the target image", }, cli.StringFlag{ Name: "history-comment", Usage: "set a `comment` for the history of the target image", }, cli.StringFlag{ Name: "hostname", Usage: "set a host`name` for containers based on image", }, cli.StringSliceFlag{ Name: "label, l", Usage: "add image configuration `label` e.g. label=value", }, cli.StringSliceFlag{ Name: "onbuild", Usage: "add onbuild command to be run on images based on this image. Only supported on 'docker' formatted images", }, cli.StringFlag{ Name: "os", Usage: "set `operating system` of the target image", }, cli.StringSliceFlag{ Name: "port, p", Usage: "add `port` to expose when running containers based on image (default [])", }, cli.StringFlag{ Name: "shell", Usage: "add `shell` to run in containers", }, cli.StringFlag{ Name: "stop-signal", Usage: "set `stop signal` for containers based on image", }, cli.StringFlag{ Name: "user, u", Usage: "set default `user` to run inside containers based on image", }, cli.StringSliceFlag{ Name: "volume, v", Usage: "add default `volume` path to be created for containers based on image (default [])", }, cli.StringFlag{ Name: "workingdir", Usage: "set working `directory` for containers based on image", }, } configDescription = "Modifies the configuration values which will be saved to the image" configCommand = cli.Command{ Name: "config", Usage: "Update image configuration settings", Description: configDescription, Flags: sortFlags(configFlags), Action: configCmd, ArgsUsage: "CONTAINER-NAME-OR-ID", SkipArgReorder: true, UseShortOptionHandling: true, } ) func updateEntrypoint(builder *buildah.Builder, c *cli.Context) { if len(strings.TrimSpace(c.String("entrypoint"))) == 0 { builder.SetEntrypoint(nil) return } var entrypointJSON []string err := json.Unmarshal([]byte(c.String("entrypoint")), &entrypointJSON) if err == nil { builder.SetEntrypoint(entrypointJSON) if len(builder.Cmd()) > 0 { logrus.Warnf("cmd %q exists and will be passed to entrypoint as a parameter", strings.Join(builder.Cmd(), " ")) } return } // it wasn't a valid json array, fall back to string entrypointSpec := make([]string, 3) entrypointSpec[0] = "/bin/sh" entrypointSpec[1] = "-c" entrypointSpec[2] = c.String("entrypoint") if len(builder.Cmd()) > 0 { logrus.Warnf("cmd %q exists but will be ignored because of entrypoint settings", strings.Join(builder.Cmd(), " ")) } builder.SetEntrypoint(entrypointSpec) } func updateConfig(builder *buildah.Builder, c *cli.Context) { if c.IsSet("author") { builder.SetMaintainer(c.String("author")) } if c.IsSet("created-by") { builder.SetCreatedBy(c.String("created-by")) } if c.IsSet("arch") { builder.SetArchitecture(c.String("arch")) } if c.IsSet("os") { builder.SetOS(c.String("os")) } if c.IsSet("user") { builder.SetUser(c.String("user")) } if c.IsSet("shell") { shellSpec, err := shellwords.Parse(c.String("shell")) if err != nil { logrus.Errorf("error parsing --shell %q: %v", c.String("shell"), err) } else { builder.SetShell(shellSpec) } } if c.IsSet("stop-signal") { builder.SetStopSignal(c.String("stop-signal")) } if c.IsSet("port") || c.IsSet("p") { for _, portSpec := range c.StringSlice("port") { builder.SetPort(portSpec) } } if c.IsSet("env") || c.IsSet("e") { for _, envSpec := range c.StringSlice("env") { env := strings.SplitN(envSpec, "=", 2) if len(env) > 1 { builder.SetEnv(env[0], env[1]) } else { builder.UnsetEnv(env[0]) } } } if c.IsSet("entrypoint") { updateEntrypoint(builder, c) } // cmd should always run after entrypoint; setting entrypoint clears cmd if c.IsSet("cmd") { cmdSpec, err := shellwords.Parse(c.String("cmd")) if err != nil { logrus.Errorf("error parsing --cmd %q: %v", c.String("cmd"), err) } else { builder.SetCmd(cmdSpec) } } if c.IsSet("volume") { if volSpec := c.StringSlice("volume"); len(volSpec) > 0 { for _, spec := range volSpec { builder.AddVolume(spec) } } } updateHealthcheck(builder, c) if c.IsSet("label") || c.IsSet("l") { for _, labelSpec := range c.StringSlice("label") { label := strings.SplitN(labelSpec, "=", 2) if len(label) > 1 { builder.SetLabel(label[0], label[1]) } else { builder.UnsetLabel(label[0]) } } } if c.IsSet("workingdir") { builder.SetWorkDir(c.String("workingdir")) } if c.IsSet("comment") { builder.SetComment(c.String("comment")) } if c.IsSet("history-comment") { builder.SetHistoryComment(c.String("history-comment")) } if c.IsSet("domainname") { builder.SetDomainname(c.String("domainname")) } if c.IsSet("hostname") { builder.SetHostname(c.String("hostname")) } if c.IsSet("onbuild") { for _, onbuild := range c.StringSlice("onbuild") { builder.SetOnBuild(onbuild) } } if c.IsSet("annotation") || c.IsSet("a") { for _, annotationSpec := range c.StringSlice("annotation") { annotation := strings.SplitN(annotationSpec, "=", 2) if len(annotation) > 1 { builder.SetAnnotation(annotation[0], annotation[1]) } else { builder.UnsetAnnotation(annotation[0]) } } } } func updateHealthcheck(builder *buildah.Builder, c *cli.Context) { if c.IsSet("healthcheck") || c.IsSet("healthcheck-interval") || c.IsSet("healthcheck-retries") || c.IsSet("healthcheck-start-period") || c.IsSet("healthcheck-timeout") { healthcheck := builder.Healthcheck() if healthcheck == nil { healthcheck = &docker.HealthConfig{ Test: []string{"NONE"}, Interval: 30 * time.Second, StartPeriod: 0, Timeout: 30 * time.Second, Retries: 3, } } if c.IsSet("healthcheck") { test, err := shellwords.Parse(c.String("healthcheck")) if err != nil { logrus.Errorf("error parsing --healthcheck %q: %v", c.String("healthcheck"), err) } healthcheck.Test = test } if c.IsSet("healthcheck-interval") { duration, err := time.ParseDuration(c.String("healthcheck-interval")) if err != nil { logrus.Errorf("error parsing --healthcheck-interval %q: %v", c.String("healthcheck-interval"), err) } healthcheck.Interval = duration } if c.IsSet("healthcheck-retries") { healthcheck.Retries = c.Int("healthcheck-retries") } if c.IsSet("healthcheck-start-period") { duration, err := time.ParseDuration(c.String("healthcheck-start-period")) if err != nil { logrus.Errorf("error parsing --healthcheck-start-period %q: %v", c.String("healthcheck-start-period"), err) } healthcheck.StartPeriod = duration } if c.IsSet("healthcheck-timeout") { duration, err := time.ParseDuration(c.String("healthcheck-timeout")) if err != nil { logrus.Errorf("error parsing --healthcheck-timeout %q: %v", c.String("healthcheck-timeout"), err) } healthcheck.Timeout = duration } if len(healthcheck.Test) == 0 { builder.SetHealthcheck(nil) } else { builder.SetHealthcheck(healthcheck) } } } func configCmd(c *cli.Context) error { args := c.Args() if len(args) == 0 { return errors.Errorf("container ID must be specified") } if err := buildahcli.VerifyFlagsArgsOrder(args); err != nil { return err } if len(args) > 1 { return errors.Errorf("too many arguments specified") } name := args[0] if err := parse.ValidateFlags(c, configFlags); err != nil { return err } store, err := getStore(c) if err != nil { return err } builder, err := openBuilder(getContext(), store, name) if err != nil { return errors.Wrapf(err, "error reading build container %q", name) } updateConfig(builder, c) return builder.Save() }
cmd/buildah/config.go
0.548915
0.41117
config.go
starcoder
package probably import ( "fmt" "hash/fnv" "math" "sort" ) // Sketch is a count-min sketcher. type Sketch struct { sk [][]uint32 rowCounts []uint32 } // NewSketch returns new count-min sketch with the given width and depth. // Sketch dimensions must be positive. A sketch with w=⌈ ℯ/𝜀 ⌉ and // d=⌈ln (1/𝛿)⌉ answers queries within a factor of 𝜀 with probability 1-𝛿. func NewSketch(w, d int) *Sketch { if d < 1 || w < 1 { panic("Dimensions must be positive") } s := &Sketch{} s.sk = make([][]uint32, d) for i := 0; i < d; i++ { s.sk[i] = make([]uint32, w) } s.rowCounts = make([]uint32, d) return s } func (s Sketch) String() string { return fmt.Sprintf("{Sketch %dx%d}", len(s.sk[0]), len(s.sk)) } func hashn(s string) (h1, h2 uint32) { // This construction comes from // http://www.eecs.harvard.edu/~michaelm/postscripts/tr-02-05.pdf // "Building a Better Bloom Filter", by Kirsch and Mitzenmacher. Their // proof that this is allowed for count-min requires the h functions to // be from the 2-universal hash family, w be a prime and d be larger // than the traditional CM-sketch requirements. // Empirically, though, this seems to work "just fine". // TODO(dgryski): Switch to something that is actually validated by the literature. fnv1a := fnv.New32a() fnv1a.Write([]byte(s)) h1 = fnv1a.Sum32() // inlined jenkins one-at-a-time hash h2 = uint32(0) for _, c := range s { h2 += uint32(c) h2 += h2 << 10 h2 ^= h2 >> 6 } h2 += (h2 << 3) h2 ^= (h2 >> 11) h2 += (h2 << 15) return h1, h2 } // Reset clears all the values from the sketch. func (s *Sketch) Reset() { // Complier doesn't yet optimize this into memset: https://code.google.com/p/go/issues/detail?id=5373 for _, w := range s.sk { for i := range w { w[i] = 0 } } for i := range s.rowCounts { s.rowCounts[i] = 0 } } // Add 'count' occurences of the given input func (s *Sketch) Add(h string, count uint32) (val uint32) { w := len(s.sk[0]) d := len(s.sk) val = math.MaxUint32 h1, h2 := hashn(h) for i := 0; i < d; i++ { pos := (h1 + uint32(i)*h2) % uint32(w) s.rowCounts[i] += count v := s.sk[i][pos] + count s.sk[i][pos] = v if v < val { val = v } } return val } // Del removes 'count' occurences of the given input func (s *Sketch) Del(h string, count uint32) (val uint32) { w := len(s.sk[0]) d := len(s.sk) val = math.MaxUint32 h1, h2 := hashn(h) for i := 0; i < d; i++ { pos := (h1 + uint32(i)*h2) % uint32(w) s.rowCounts[i] -= count v := s.sk[i][pos] - count if v > s.sk[i][pos] { // did we wrap-around? v = 0 } s.sk[i][pos] = v if v < val { val = v } } return val } // Increment the count for the given input. func (s *Sketch) Increment(h string) (val uint32) { return s.Add(h, 1) } // ConservativeIncrement increments the count (conservatively) for the given input. func (s *Sketch) ConservativeIncrement(h string) (val uint32) { return s.ConservativeAdd(h, 1) } // ConservativeAdd adds the count (conservatively) for the given input. func (s *Sketch) ConservativeAdd(h string, count uint32) (val uint32) { w := len(s.sk[0]) d := len(s.sk) h1, h2 := hashn(h) val = math.MaxUint32 for i := 0; i < d; i++ { pos := (h1 + uint32(i)*h2) % uint32(w) v := s.sk[i][pos] if v < val { val = v } } val += count // Conservative update means no counter is increased to more than the // size of the smallest counter plus the size of the increment. This technique // first described in <NAME> and <NAME>. 2002. New directions in // traffic measurement and accounting. SIGCOMM Comput. Commun. Rev., 32(4). for i := 0; i < d; i++ { pos := (h1 + uint32(i)*h2) % uint32(w) v := s.sk[i][pos] if v < val { s.rowCounts[i] += (val - s.sk[i][pos]) s.sk[i][pos] = val } } return val } // Count returns the estimated count for the given input. func (s Sketch) Count(h string) uint32 { min := uint32(math.MaxUint32) w := len(s.sk[0]) d := len(s.sk) h1, h2 := hashn(h) for i := 0; i < d; i++ { pos := (h1 + uint32(i)*h2) % uint32(w) v := s.sk[i][pos] if v < min { min = v } } return min } // Values returns the all the estimates for a given string func (s Sketch) Values(h string) []uint32 { w := len(s.sk[0]) d := len(s.sk) vals := make([]uint32, d) h1, h2 := hashn(h) for i := 0; i < d; i++ { pos := (h1 + uint32(i)*h2) % uint32(w) vals[i] = s.sk[i][pos] } return vals } /* CountMeanMin described in: <NAME> and <NAME>. 2007. New estimation algorithms for streaming data: Count-min can do more. http://webdocs.cs.ualberta.ca/~fandeng/paper/cmm.pdf Sketch Algorithms for Estimating Point Queries in NLP <NAME>, <NAME> and <NAME> EMNLP-CONLL 2012 http://www.umiacs.umd.edu/~amit/Papers/goyalPointQueryEMNLP12.pdf */ // CountMeanMin returns estimated count for the given input, using the count-min-mean // heuristic. This gives more accurate results than Count() for low-frequency // counts at the cost of larger under-estimation error. For tasks sensitive to // under-estimation, use the regular Count() and only call ConservativeAdd() // and ConservativeIncrement() when constructing your sketch. func (s Sketch) CountMeanMin(h string) uint32 { min := uint32(math.MaxUint32) w := len(s.sk[0]) d := len(s.sk) residues := make([]float64, d) h1, h2 := hashn(h) for i := 0; i < d; i++ { pos := (h1 + uint32(i)*h2) % uint32(w) v := s.sk[i][pos] noise := float64(s.rowCounts[i]-s.sk[i][pos]) / float64(w-1) residues[i] = float64(v) - noise // negative count doesn't make sense if residues[i] < 0 { residues[i] = 0 } if v < min { min = v } } sort.Float64s(residues) var median uint32 if d%2 == 1 { median = uint32(residues[(d+1)/2]) } else { // integer average without overflow x := uint32(residues[d/2]) y := uint32(residues[d/2+1]) median = (x & y) + (x^y)/2 } // count estimate over the upper-bound (min) doesn't make sense if min < median { return min } return median } // Merge the given sketch into this one. // The sketches must have the same dimensions. func (s *Sketch) Merge(from *Sketch) { if len(s.sk) != len(from.sk) || len(s.sk[0]) != len(from.sk[0]) { panic("Can't merge different sketches with different dimensions") } for i, l := range from.sk { for j, v := range l { s.sk[i][j] += v } } } // Clone returns a copy of this sketch func (s *Sketch) Clone() *Sketch { w := len(s.sk[0]) d := len(s.sk) clone := NewSketch(w, d) for i, l := range s.sk { copy(clone.sk[i], l) } copy(clone.rowCounts, s.rowCounts) return clone } /* This is Algorithm 3 "Item Aggregation" from Hokusai: Sketching Streams in Real Time (<NAME>, <NAME>, <NAME>, 2012) Proceedings of the 28th International Conference on Conference on Uncertainty in Artificial Intelligence (UAI) http://www.auai.org/uai2012/papers/231.pdf */ // Compress reduces the space used by the sketch. This also reduces // the accuracy. This routine panics if the width is not a power of // two. func (s *Sketch) Compress() { w := len(s.sk[0]) if w&(w-1) != 0 { panic("width must be a power of two") } neww := w / 2 for i, l := range s.sk { // We allocate a new array here so old space can actually be garbage collected. // TODO(dgryski): reslice and only reallocate every few compressions row := make([]uint32, neww) for j := 0; j < neww; j++ { row[j] = l[j] + l[j+neww] } s.sk[i] = row } }
count.go
0.553505
0.408985
count.go
starcoder
package blorb import ( "bytes" "encoding/binary" "fmt" "io" ) type Archive struct { r io.ReaderAt size int64 Pics []File Snds []File Datas []File Execs []File Gluls []File } type File struct { r io.ReaderAt id int format string offset int64 size int64 } func (f *File) ID() int { return f.id } func (f *File) Format() string { return f.format } func (f *File) Reader() *io.SectionReader { return io.NewSectionReader(f.r, f.offset, f.size) } func OpenArchive(r io.ReaderAt, size int64) (*Archive, error) { a := &Archive{r: r, size: size} if err := a.readIndex(); err != nil { return nil, fmt.Errorf("read index: %w", err) } return a, nil } func (a *Archive) readIndex() error { var header [24]byte if _, err := a.r.ReadAt(header[:], 0); err != nil { return err } // https://en.wikipedia.org/wiki/Interchange_File_Format if expected, got := []byte("FORM"), header[0:4]; !bytes.Equal(expected, got) { return fmt.Errorf("expected group chunk %q, got %q", expected, got) } _ = header[4:8] // int32 chunk len, we expect only one form in file so ignore for now. if expected, got := []byte("IFRS"), header[8:12]; !bytes.Equal(expected, got) { return fmt.Errorf("expected form type %q, got %q", expected, got) } if expected, got := []byte("RIdx"), header[12:16]; !bytes.Equal(expected, got) { return fmt.Errorf("expected first chunk type %q, got %q", expected, got) } size := be.Uint32(header[16:20]) num := be.Uint32(header[20:24]) // Number of entries // Each index entry is 12 bytes, num (which takes 4 bytes) is also included in total size. if expectedSize := 4 + num*12; expectedSize != size { return fmt.Errorf("expected index size %d, got %d (num: %d)", expectedSize, size, num) } idx := make([]byte, size-4) if _, err := a.r.ReadAt(idx, 24); err != nil { return err } for i := 0; i < int(num); i++ { idx := idx[i*12:] typ := idx[0:4] id := int(be.Uint32(idx[4:8])) offset := int64(be.Uint32(idx[8:12])) var buf [8]byte if _, err := a.r.ReadAt(buf[:], offset); err != nil { return err } format := string(buf[0:4]) switch { case bytes.Equal([]byte("PNG "), buf[0:4]): format = "png" case bytes.Equal([]byte("JPEG"), buf[0:4]): format = "jpg" case bytes.Equal([]byte("GLUL"), buf[0:4]): format = "glul" } size := int64(be.Uint32(buf[4:8])) file := File{ r: a.r, id: id, format: format, offset: offset + 8, size: size, } switch { case bytes.Equal([]byte("Pict"), typ): a.Pics = append(a.Pics, file) case bytes.Equal([]byte("Snd "), typ): a.Snds = append(a.Snds, file) case bytes.Equal([]byte("Data"), typ): a.Datas = append(a.Datas, file) case bytes.Equal([]byte("Exec"), typ): a.Execs = append(a.Execs, file) case bytes.Equal([]byte("GLUL"), typ): a.Gluls = append(a.Gluls, file) default: return fmt.Errorf("expected valid resource type, got %q", typ) } } return nil } var be = binary.BigEndian
blorb/blorb.go
0.616705
0.421671
blorb.go
starcoder
package lmath import ( "gonet/base" "math" "unsafe" ) type( Point2F struct { X float32 Y float32 } IPoint2F interface { Set(float32 , float32) SetF([] float32) SetMin(Point2F) SetMax(Point2F) Interpolate(Point2F, Point2F, float32) Zero() IsZero() bool Len() float32 LenSquared() float32 MagnitudeSafe() float32 Equal(Point2F) bool Neg() Normalize() NormalizeSafe() NormalizeF(float32) Convolve(Point2F) ConvolveInverse(Point2F) Add(Point2F) *Point2F Sub(Point2F) *Point2F Mul(Point2F) *Point2F MulF(float32) *Point2F Div(f float32) *Point2F Cross(p Point2F) float32 Dot(p Point2F) float32 ToF() []float32 } ) func (this *Point2F) Set(x float32, y float32){ this.X, this.Y = x, y } func (this *Point2F) SetF(f []float32){ this.X, this.Y = f[0], f[1] } func (this *Point2F) SetMin(p Point2F){ this.X, this.Y = float32(math.Min(float64(this.X), float64(p.X))), float32(math.Min(float64(this.Y), float64(p.Y))) } func (this *Point2F) SetMax(p Point2F){ this.X, this.Y = float32(math.Max(float64(this.X), float64(p.X))), float32(math.Max(float64(this.Y), float64(p.Y))) } func (this *Point2F) Interpolate(from Point2F, to Point2F, factor float32){ base.Assert(factor >= 0.0 && factor <= 1.0, "Out of bound interpolation factor") inverse := 1.0 - factor this.X = from.X * inverse + to.X * factor this.Y = from.Y * inverse + to.Y * factor } func (this *Point2F) Zero(){ this.X, this.Y = 0, 0 } func (this *Point2F) IsZero() bool{ return ((this.X * this.X) <= POINT_EPSILON) &&((this.Y * this.Y) <= POINT_EPSILON) } func (this *Point2F) Len() float32{ return float32(math.Sqrt(float64(this.X* this.X + this.Y * this.Y))) } func (this *Point2F) LenSquared() float32{ if this.IsZero(){ return 0.0 }else{ return this.Len() } } func (this *Point2F) MagnitudeSafe() float32{ return this.X * this.X + this.Y * this.Y } func (this *Point2F) Equal(p Point2F) bool{ return ((math.Abs(float64(this.X - p.X)) < POINT_EPSILON) && (math.Abs(float64(this.Y - p.Y)) < POINT_EPSILON)) } func (this *Point2F) Neg(){ this.X, this.Y = -this.X, -this.Y } func (this *Point2F) Normalize(){ squared := this.X * this.X + this.Y * this.Y if squared != 0{ factor := 1.0 / float32(math.Sqrt(float64(squared))) this.X *= factor this.Y *= factor }else{ this.X = 0.0 this.Y = 0.0 } } func (this *Point2F) NormalizeSafe(){ vmag := this.MagnitudeSafe() if vmag > POINT_EPSILON{ *this = *this.MulF(1.0 / vmag) } } func (this *Point2F) NormalizeF(f float32){ squared := this.X * this.X + this.Y * this.Y if squared != 0{ factor := f / float32(math.Sqrt(float64(squared))) this.X *= factor this.Y *= factor }else{ this.X = 0.0 this.Y = 0.0 } } func (this *Point2F) Convolve(p Point2F){ this.X *= p.X this.Y *= p.Y } func (this *Point2F) ConvolveInverse(p Point2F){ this.X /= p.X this.Y /= p.Y } func (this *Point2F) Add(p Point2F) *Point2F{ return &Point2F{this.X + p.X, this.Y + p.Y} } func (this *Point2F) Sub(p Point2F) *Point2F{ return &Point2F{this.X - p.X, this.Y - p.Y} } func (this *Point2F) MulF(f float32) *Point2F{ return &Point2F{this.X * f, this.Y * f} } func (this *Point2F) Mul(p Point2F) *Point2F{ return &Point2F{this.X * p.X, this.Y * p.Y} } func (this *Point2F) Div(f float32) *Point2F{ return &Point2F{this.X / f, this.Y / f} } func (this *Point2F) ToF32() []float32{ return (*[2]float32)(unsafe.Pointer(this))[:] } func (this *Point2F) Cross(p Point2F) float32{ return this.X * p.Y - this.Y * p.X } func (this *Point2F) Dot(p Point2F) float32{ return this.X * p.X + this.Y * p.Y }
server/game/lmath/point2f.go
0.658308
0.718385
point2f.go
starcoder
package datastructures import ( "fmt" "sync" ) type WeightedVertex struct { Val string Edges map[string]*WeightedEdge } type WeightedEdge struct { Vertex *WeightedVertex Weight int } type WeightedGraph struct { Vertices map[string]*WeightedVertex lock sync.RWMutex } type Vertex struct { Val string Edges map[string]*Vertex } type Graph struct { Vertices map[string]*Vertex lock sync.RWMutex } type Path struct { ID string Vertices []*WeightedVertex Weight int } type Paths []*Path func (p Paths) Less(i, j int) bool { return p[i].Weight < p[j].Weight } func (p Paths) Swap(i, j int) { p[i], p[j] = p[j], p[i] } func (p Paths) Len() int { return len(p) } func (g *WeightedGraph) AddVertex(value string) *WeightedVertex { if g.Vertices == nil { g.Vertices = make(map[string]*WeightedVertex, 0) } g.Vertices[value] = &WeightedVertex{value, make(map[string]*WeightedEdge)} return g.Vertices[value] } func (g *WeightedGraph) AddEdge(a, b string, weight int, bidirictional bool) { g.lock.Lock() if _, ok := g.Vertices[a]; !ok { g.AddVertex(a) } if _, ok := g.Vertices[b]; !ok { g.AddVertex(b) } A := g.Vertices[a] B := g.Vertices[b] if _, ok := A.Edges[b]; ok { A.Edges[b] = &WeightedEdge{B, weight} } if _, ok := B.Edges[a]; ok && bidirictional { B.Edges[a] = &WeightedEdge{A, weight} } g.lock.Unlock() } func (n *WeightedVertex) String() string { return fmt.Sprintf("%v", n.Val) } // String Prints String Presentation of the Graph func (g *WeightedGraph) String() { g.lock.RLock() s := "" for _, v := range g.Vertices { s += v.String() + " -> " for _, e := range v.Edges { s += e.Vertex.String() + " " } s += "\n" } fmt.Println(s) g.lock.RUnlock() } // String Prints String Presentation of the Graph func (g *Graph) String() { g.lock.RLock() s := "" for _, v := range g.Vertices { s += v.String() + " -> " for _, e := range v.Edges { s += e.String() + " " } s += "\n" } fmt.Println(s) g.lock.RUnlock() } func (n *Vertex) String() string { return fmt.Sprintf("%v", n.Val) } func (g *Graph) AddVertex(value string) *Vertex { g.lock.Lock() if g.Vertices == nil { g.Vertices = make(map[string]*Vertex, 0) } g.Vertices[value] = &Vertex{value, make(map[string]*Vertex, 0)} g.lock.Unlock() return g.Vertices[value] } func (g *Graph) AddEdge(a, b string, bidirictional bool) { g.lock.Lock() if _, ok := g.Vertices[a]; !ok { g.AddVertex(a) } if _, ok := g.Vertices[b]; !ok { g.AddVertex(b) } A := g.Vertices[a] B := g.Vertices[b] if _, ok := A.Edges[b]; ok { A.Edges[b] = B } if _, ok := B.Edges[a]; ok && bidirictional { B.Edges[a] = A } g.lock.Unlock() }
datastructures/graph.go
0.559049
0.451266
graph.go
starcoder
package model import ( "math" "strconv" "time" "github.com/uncharted-distil/distil-compute/metadata" "github.com/uncharted-distil/distil-compute/model" ) // NullableFloat64 is float64 with custom JSON marshalling to allow for NaN values // to be handled gracefully. type NullableFloat64 float64 // MarshalJSON provides a custom float JSON marshaller that will handle a NaN float64 // value by replacing it with empty data. func (f NullableFloat64) MarshalJSON() ([]byte, error) { if math.IsNaN(float64(f)) { return []byte("null"), nil } return []byte(strconv.FormatFloat(float64(f), 'f', -1, 32)), nil } // TimeseriesObservation represents a timeseries value along with confidences. type TimeseriesObservation struct { Value NullableFloat64 `json:"value"` Time float64 `json:"time"` ConfidenceLow NullableFloat64 `json:"confidenceLow"` ConfidenceHigh NullableFloat64 `json:"confidenceHigh"` } // TimeseriesData represents the result of a timeseries request. type TimeseriesData struct { VarKey string `json:"variableKey"` SeriesID string `json:"seriesID"` Timeseries []*TimeseriesObservation `json:"timeseries"` IsDateTime bool `json:"isDateTime"` Min float64 `json:"min"` Max float64 `json:"max"` Mean float64 `json:"mean"` } // TimeseriesOp defines the operation to aggregate timeseries values that fall into the same // bucket. type TimeseriesOp string const ( // TimeseriesAddOp indicates that bucket values should be added TimeseriesAddOp = "add" // TimeseriesMinOp indicates that the min of bucket values should be taken TimeseriesMinOp = "min" // TimeseriesMaxOp indicates that the max of bucket values should be taken TimeseriesMaxOp = "max" // TimeseriesMeanOp indicates that the mean of bucket values should be taken TimeseriesMeanOp = "mean" // TimeseriesDefaultOp is the operation to use when none is specified TimeseriesDefaultOp = TimeseriesAddOp ) // DataStorageCtor represents a client constructor to instantiate a data // storage client. type DataStorageCtor func() (DataStorage, error) // DataStorage defines the functions available to query the underlying data storage. type DataStorage interface { FetchNumRows(storageName string, variables []*model.Variable) (int, error) FetchData(dataset string, storageName string, filterParams *FilterParams, includeGroupingCol bool, orderByVar *model.Variable) (*FilteredData, error) FetchDataset(dataset string, storageName string, includeMetadata bool, filterParams *FilterParams) ([][]string, error) FetchResultDataset(dataset string, storageName string, predictionName string, features []string, resultURI string, includeExplain bool) ([][]string, error) FetchSummary(dataset string, storageName string, varName string, filterParams *FilterParams, mode SummaryMode) (*VariableSummary, error) FetchSummaryByResult(dataset string, storageName string, varName string, resultURI string, filterParams *FilterParams, extrema *Extrema, mode SummaryMode) (*VariableSummary, error) PersistResult(dataset string, storageName string, resultURI string, target string) error PersistExplainedResult(dataset string, storageName string, resultURI string, explainResult *SolutionExplainResult) error PersistSolutionFeatureWeight(dataset string, storageName string, solutionID string, weights [][]string) error FetchResults(dataset string, storageName string, resultURI string, solutionID string, filterParams *FilterParams, removeTargetColumn bool) (*FilteredData, error) FetchPredictedSummary(dataset string, storageName string, resultURI string, filterParams *FilterParams, extrema *Extrema, mode SummaryMode) (*VariableSummary, error) FetchResultsExtremaByURI(dataset string, storageName string, resultURI string) (*Extrema, error) FetchCorrectnessSummary(dataset string, storageName string, resultURI string, filterParams *FilterParams, mode SummaryMode) (*VariableSummary, error) FetchConfidenceSummary(dataset string, storageName string, resultURI string, filterParams *FilterParams, mode SummaryMode) (map[string]*VariableSummary, error) FetchResidualsSummary(dataset string, storageName string, resultURI string, filterParams *FilterParams, extrema *Extrema, mode SummaryMode) (*VariableSummary, error) FetchResidualsExtremaByURI(dataset string, storageName string, resultURI string) (*Extrema, error) FetchExtrema(dataset string, storageName string, variable *model.Variable) (*Extrema, error) FetchExtremaByURI(dataset string, storageName string, resultURI string, variable string) (*Extrema, error) FetchTimeseries(dataset string, storageName string, variableKey string, seriesIDColName string, xColName string, yColName string, seriesIDs []string, operation TimeseriesOp, filterParams *FilterParams) ([]*TimeseriesData, error) FetchTimeseriesForecast(dataset string, storageName string, variableKey string, seriesIDColName string, xColName string, yColName string, seriesIDs []string, operation TimeseriesOp, resultUUID string, filterParams *FilterParams) ([]*TimeseriesData, error) FetchCategoryCounts(storageName string, variable *model.Variable) (map[string]int, error) FetchSolutionFeatureWeights(dataset string, storageName string, resultURI string, d3mIndex int64) (*SolutionFeatureWeight, error) // Dataset manipulation IsValidDataType(dataset string, storageName string, varName string, varType string) (bool, error) SetDataType(dataset string, storageName string, varName string, varType string) error AddVariable(dataset string, storageName string, varName string, varType string, defaultVal string) error AddField(dataset string, storageName string, varName string, varType string, defaultVal string) error DeleteVariable(dataset string, storageName string, varName string) error SetVariableValue(dataset string, storageName string, varName string, value string, filterParams *FilterParams) error UpdateVariableBatch(storageName string, varName string, updates map[string]string) error UpdateData(dataset string, storageName string, varName string, updates map[string]string, filterParams *FilterParams) error DoesVariableExist(dataset string, storageName string, varName string) (bool, error) VerifyData(datasetID string, tableName string) error // Raw data queries FetchRawDistinctValues(dataset string, storageName string, varNames []string) ([][]string, error) // Property queries GetStorageName(dataset string) (string, error) // SaveDataset is used to drop all the undesired values (only call for save dataset route) SaveDataset(dataset string, storageName string, filterParams *FilterParams) error // CloneDataset creates a copy of an existing dataset CloneDataset(dataset string, storageName string, datasetNew string, storageNameNew string) error // DeleteDataset drops all tables associated to storageName DeleteDataset(storageName string) error CreateIndices(dataset string, indexFields []string) error // IsKey verifies the unique property of the listed variables IsKey(dataset string, storageName string, variables []*model.Variable) (bool, error) } // SolutionStorageCtor represents a client constructor to instantiate a // solution storage client. type SolutionStorageCtor func() (SolutionStorage, error) // SolutionStorage defines the functions available to query the underlying // solution storage. type SolutionStorage interface { PersistPrediction(requestID string, dataset string, target string, fittedSolutionID string, progress string, createdTime time.Time) error PersistRequest(requestID string, dataset string, progress string, createdTime time.Time) error PersistRequestFeature(requestID string, featureName string, featureType string) error PersistRequestFilters(requestID string, filters *FilterParams) error PersistSolution(requestID string, solutionID string, explainedSolutionID string, createdTime time.Time) error PersistSolutionWeight(solutionID string, featureName string, featureIndex int64, weight float64) error PersistSolutionState(solutionID string, progress string, createdTime time.Time) error PersistSolutionResult(solutionID string, fittedSolutionID string, produceRequestID string, resultType string, resultUUID string, resultURI string, progress string, createdTime time.Time) error PersistSolutionExplainedOutput(resultUUID string, explainOutput map[string]*SolutionExplainResult) error PersistSolutionScore(solutionID string, metric string, score float64) error UpdateRequest(requestID string, progress string, updatedTime time.Time) error UpdateSolution(solutionID string, explainedSolutionID string) error FetchRequest(requestID string) (*Request, error) FetchRequestBySolutionID(solutionID string) (*Request, error) FetchRequestByFittedSolutionID(fittedSolutionID string) (*Request, error) FetchRequestByDatasetTarget(dataset string, target string) ([]*Request, error) FetchRequestFeatures(requestID string) ([]*Feature, error) FetchRequestFilters(requestID string, features []*Feature) (*FilterParams, error) FetchSolution(solutionID string) (*Solution, error) FetchExplainValues(dataset string, storageName string, d3mIndex []int, resultUUID string) ([]SolutionExplainValues, error) FetchSolutionsByDatasetTarget(dataset string, target string) ([]*Solution, error) FetchSolutionsByRequestID(requestID string) ([]*Solution, error) FetchSolutionWeights(solutionID string) ([]*SolutionWeight, error) FetchSolutionResultByUUID(resultUUID string) (*SolutionResult, error) FetchSolutionResults(solutionID string) ([]*SolutionResult, error) FetchSolutionResultsByFittedSolutionID(fittedSolutionID string) ([]*SolutionResult, error) FetchSolutionResultByProduceRequestID(produceRequestID string) (*SolutionResult, error) FetchPredictionResultByProduceRequestID(produceRequestID string) (*SolutionResult, error) FetchPredictionResultByUUID(reusultUUID string) (*SolutionResult, error) FetchSolutionScores(solutionID string) ([]*SolutionScore, error) FetchPrediction(requestID string) (*Prediction, error) FetchPredictionsByFittedSolutionID(fittedSolutionID string) ([]*Prediction, error) } // MetadataStorageCtor represents a client constructor to instantiate a // metadata storage client. type MetadataStorageCtor func() (MetadataStorage, error) // MetadataStorage defines the functions available to query the underlying // metadata storage. type MetadataStorage interface { FetchVariables(dataset string, includeIndex bool, includeMeta bool, includeSystemData bool) ([]*model.Variable, error) FetchVariablesByName(dataset string, varNames []string, includeIndex bool, includeMeta bool, includeSystemData bool) ([]*model.Variable, error) FetchVariablesDisplay(dataset string) ([]*model.Variable, error) DoesVariableExist(dataset string, varName string) (bool, error) FetchVariable(dataset string, varName string) (*model.Variable, error) FetchVariableDisplay(dataset string, varName string) (*model.Variable, error) FetchDataset(dataset string, includeIndex bool, includeMeta bool, includeSystemData bool) (*Dataset, error) FetchDatasets(includeIndex bool, includeMeta bool, includeSystemData bool) ([]*Dataset, error) SearchDatasets(terms string, baseDataset *Dataset, includeIndex bool, includeMeta bool, includeSystemData bool) ([]*Dataset, error) ImportDataset(id string, uri string) (string, error) // Dataset manipulation SetDataType(dataset string, varName string, varType string) error SetExtrema(dataset string, varName string, extrema *Extrema) error AddVariable(dataset string, varName string, varDisplayName string, varType string, varDistilRole string) error UpdateVariable(dataset string, varName string, variableValue *model.Variable) error DeleteVariable(dataset string, varName string) error AddGroupedVariable(dataset string, varName string, varDisplayName string, varType string, varRole string, grouping model.BaseGrouping) error RemoveGroupedVariable(datasetName string, grouping model.BaseGrouping) error DeleteDataset(dataset string, softDelete bool) error IngestDataset(datasetSource metadata.DatasetSource, meta *model.Metadata) error UpdateDataset(dataset *Dataset) error DatasetExists(dataset string) (bool, error) // CloneDataset creates a copy of an existing dataset CloneDataset(dataset string, datasetNew string, storageNameNew string, folderNew string) error } // ExportedModelStorageCtor represents a client constructor to instantiate a // model storage client. type ExportedModelStorageCtor func() (ExportedModelStorage, error) // ExportedModelStorage defines the functions available to query the underlying // model storage. type ExportedModelStorage interface { PersistExportedModel(exportedModel *ExportedModel) error FetchModel(model string) (*ExportedModel, error) FetchModelByID(fittedSolutionID string) (*ExportedModel, error) FetchModels(includeDeleted bool) ([]*ExportedModel, error) SearchModels(terms string, includeDeleted bool) ([]*ExportedModel, error) DeleteModel(fittedSolutionID string) error }
api/model/storage.go
0.815269
0.577138
storage.go
starcoder
package sema import "github.com/onflow/cadence/runtime/ast" func (checker *Checker) VisitArrayExpression(expression *ast.ArrayExpression) ast.Repr { // visit all elements, ensure they are all the same type expectedType := UnwrapOptionalType(checker.expectedType) var elementType Type var resultType ArrayType switch typ := expectedType.(type) { case *ConstantSizedType: elementType = typ.ElementType(false) resultType = typ literalCount := int64(len(expression.Values)) if typ.Size != literalCount { checker.report( &ConstantSizedArrayLiteralSizeError{ ExpectedSize: typ.Size, ActualSize: literalCount, Range: expression.Range, }, ) } case *VariableSizedType: elementType = typ.ElementType(false) resultType = typ default: // If the expected type is AnyStruct or AnyResource, and the array is empty, // then expect the elements to also be of the same type. // Otherwise, infer the type from the expression. if len(expression.Values) == 0 { elementType = expectedType resultType = &VariableSizedType{ Type: elementType, } } } argumentTypes := make([]Type, len(expression.Values)) for i, value := range expression.Values { valueType := checker.VisitExpression(value, elementType) argumentTypes[i] = valueType checker.checkVariableMove(value) checker.checkResourceMoveOperation(value, valueType) } checker.Elaboration.ArrayExpressionArgumentTypes[expression] = argumentTypes if elementType == nil { // Contextually expected type is not available. // Therefore, find the least common supertype of the elements. elementType = LeastCommonSuperType(argumentTypes...) if elementType == InvalidType { checker.report( &TypeAnnotationRequiredError{ Cause: "cannot infer type from array literal: ", Pos: expression.StartPos, }, ) return InvalidType } resultType = &VariableSizedType{ Type: elementType, } } checker.Elaboration.ArrayExpressionArrayType[expression] = resultType return resultType }
runtime/sema/check_array_expression.go
0.635788
0.483648
check_array_expression.go
starcoder
package elasticsearch // ElasticSearch mapping definition. const mapping = `{ "settings": { "analysis": { "analyzer": { "folding": { "tokenizer": "standard", "filter": ["lowercase", "asciifolding"] } }, "normalizer": { "keyword_normalizer": { "type": "custom", "filter": ["lowercase", "asciifolding"] } } } }, "mappings": { "doc": { "dynamic": false, "properties": { "identifier": { "type": "keyword", "normalizer": "keyword_normalizer" }, "sender": { "type": "keyword", "normalizer": "keyword_normalizer" }, "sent": { "type": "date" }, "status": { "type": "keyword", "normalizer": "keyword_normalizer" }, "message_type": { "type": "keyword", "normalizer": "keyword_normalizer" }, "source": { "type": "keyword", "normalizer": "keyword_normalizer" }, "scope": { "type": "keyword", "normalizer": "keyword_normalizer" }, "restriction": { "type": "text" }, "addresses": { "type": "keyword", "normalizer": "keyword_normalizer" }, "codes": { "type": "keyword", "normalizer": "keyword_normalizer" }, "note": { "type": "text", "analyzer": "folding" }, "references": { "type": "nested", "dynamic": false, "properties": { "sender": { "type": "keyword", "normalizer": "keyword_normalizer" }, "sent": { "type": "date" }, "indentifier": { "type": "keyword", "normalizer": "keyword_normalizer" }, "id": { "type": "keyword", "normalizer": "keyword_normalizer" } } }, "incidents": { "type": "keyword", "normalizer": "keyword_normalizer" }, "superseded": { "type": "boolean" }, "language": { "type": "keyword", "normalizer": "keyword_normalizer" }, "categories": { "type": "keyword", "normalizer": "keyword_normalizer" }, "event": { "type": "keyword", "normalizer": "keyword_normalizer" }, "response_types": { "type": "keyword", "normalizer": "keyword_normalizer" }, "urgency": { "type": "keyword", "normalizer": "keyword_normalizer" }, "severity": { "type": "keyword", "normalizer": "keyword_normalizer" }, "certainty": { "type": "keyword", "normalizer": "keyword_normalizer" }, "audience": { "type": "keyword", "normalizer": "keyword_normalizer" }, "event_codes": { "type": "object" }, "effective": { "type": "date" }, "onset": { "type": "date" }, "expires": { "type": "date" }, "sender_name": { "type": "keyword", "normalizer": "keyword_normalizer" }, "headline": { "type": "text", "analyzer": "folding" }, "description": { "type": "text", "analyzer": "folding" }, "instruction": { "type": "text", "analyzer": "folding" }, "web": { "type": "keyword", "normalizer": "keyword_normalizer" }, "contact": { "type": "keyword", "normalizer": "keyword_normalizer" }, "parameters": { "type": "object" }, "resources": { "type": "nested", "dynamic": false, "properties": { "description": { "type": "text", "analyzer": "folding" }, "mime_type": { "type": "keyword", "normalizer": "keyword_normalizer" }, "size": { "type": "integer" }, "uri": { "type": "keyword", "normalizer": "keyword_normalizer" }, "derefUri": { "type": "binary" }, "digest": { "type": "keyword", "normalizer": "keyword_normalizer" } } }, "areas": { "type": "nested", "dynamic": false, "properties": { "description": { "type": "text", "analyzer": "folding" }, "polygons": { "type": "geo_shape", "ignore_malformed": true }, "circles": { "type": "geo_shape", "ignore_malformed": true }, "geocodes": { "type": "object" }, "altitude": { "type": "float" }, "ceiling": { "type": "float" } } }, "_object": { "type": "join", "relations": { "alert": "info" } } } } } }`
vendor/github.com/alerting/alerts/pkg/alerts/elasticsearch/mapping.go
0.549399
0.457682
mapping.go
starcoder
package stats import ( "time" "golang.org/x/net/context" ) // Stats provides an interface for generating instruments, like guages and // counts. type Stats interface { Inc(name string, value int64, rate float32, tags []string) error Timing(name string, value time.Duration, rate float32, tags []string) error Gauge(name string, value float32, rate float32, tags []string) error Histogram(name string, value float32, rate float32, tags []string) error } type nullStats struct{} func (s *nullStats) Inc(name string, value int64, rate float32, tags []string) error { return nil } func (s *nullStats) Timing(name string, value time.Duration, rate float32, tags []string) error { return nil } func (s *nullStats) Gauge(name string, value float32, rate float32, tags []string) error { return nil } func (s *nullStats) Histogram(name string, value float32, rate float32, tags []string) error { return nil } var Null = &nullStats{} // taggedStats wraps a Stats implementation to include some additional tags. type taggedStats struct { tags []string stats Stats } func (s *taggedStats) Inc(name string, value int64, rate float32, tags []string) error { return s.stats.Inc(name, value, rate, append(tags, s.tags...)) } func (s *taggedStats) Timing(name string, value time.Duration, rate float32, tags []string) error { return s.stats.Timing(name, value, rate, append(tags, s.tags...)) } func (s *taggedStats) Gauge(name string, value float32, rate float32, tags []string) error { return s.stats.Gauge(name, value, rate, append(tags, s.tags...)) } func (s *taggedStats) Histogram(name string, value float32, rate float32, tags []string) error { return s.stats.Histogram(name, value, rate, append(tags, s.tags...)) } // WithStats returns a new context.Context with the Stats implementation // embedded. func WithStats(ctx context.Context, stats Stats) context.Context { return context.WithValue(ctx, statsKey, stats) } // FromContext returns the Stats implementation that's embedded in the context. func FromContext(ctx context.Context) (Stats, bool) { stats, ok := ctx.Value(statsKey).(Stats) return stats, ok } // WithTags will return a context.Context where all metrics recorded with the // embedded Stats implementation will include the given stats. func WithTags(ctx context.Context, tags []string) context.Context { stats, ok := FromContext(ctx) if !ok { return ctx } return WithStats(ctx, &taggedStats{tags, stats}) } func Inc(ctx context.Context, name string, value int64, rate float32, tags []string) error { if stats, ok := FromContext(ctx); ok { return stats.Inc(name, value, rate, tags) } return nil } func Timing(ctx context.Context, name string, value time.Duration, rate float32, tags []string) error { if stats, ok := FromContext(ctx); ok { return stats.Timing(name, value, rate, tags) } return nil } func Gauge(ctx context.Context, name string, value float32, rate float32, tags []string) error { if stats, ok := FromContext(ctx); ok { return stats.Gauge(name, value, rate, tags) } return nil } func Histogram(ctx context.Context, name string, value float32, rate float32, tags []string) error { if stats, ok := FromContext(ctx); ok { return stats.Histogram(name, value, rate, tags) } return nil } type key int const ( statsKey key = iota )
stats/stats.go
0.859664
0.480174
stats.go
starcoder
package mlpack /* #cgo CFLAGS: -I./capi -Wall #cgo LDFLAGS: -L. -lmlpack_go_hoeffding_tree #include <capi/hoeffding_tree.h> #include <stdlib.h> */ import "C" import "gonum.org/v1/gonum/mat" type HoeffdingTreeOptionalParam struct { BatchMode bool Bins int Confidence float64 InfoGain bool InputModel *hoeffdingTreeModel Labels *mat.Dense MaxSamples int MinSamples int NumericSplitStrategy string ObservationsBeforeBinning int Passes int Test *matrixWithInfo TestLabels *mat.Dense Training *matrixWithInfo Verbose bool } func HoeffdingTreeOptions() *HoeffdingTreeOptionalParam { return &HoeffdingTreeOptionalParam{ BatchMode: false, Bins: 10, Confidence: 0.95, InfoGain: false, InputModel: nil, Labels: nil, MaxSamples: 5000, MinSamples: 100, NumericSplitStrategy: "binary", ObservationsBeforeBinning: 100, Passes: 1, Test: nil, TestLabels: nil, Training: nil, Verbose: false, } } /* This program implements Hoeffding trees, a form of streaming decision tree suited best for large (or streaming) datasets. This program supports both categorical and numeric data. Given an input dataset, this program is able to train the tree with numerous training options, and save the model to a file. The program is also able to use a trained model or a model from file in order to predict classes for a given test set. The training file and associated labels are specified with the "Training" and "Labels" parameters, respectively. Optionally, if "Labels" is not specified, the labels are assumed to be the last dimension of the training dataset. The training may be performed in batch mode (like a typical decision tree algorithm) by specifying the "BatchMode" option, but this may not be the best option for large datasets. When a model is trained, it may be saved via the "OutputModel" output parameter. A model may be loaded from file for further training or testing with the "InputModel" parameter. Test data may be specified with the "Test" parameter, and if performance statistics are desired for that test set, labels may be specified with the "TestLabels" parameter. Predictions for each test point may be saved with the "Predictions" output parameter, and class probabilities for each prediction may be saved with the "Probabilities" output parameter. For example, to train a Hoeffding tree with confidence 0.99 with data dataset, saving the trained tree to tree, the following command may be used: // Initialize optional parameters for HoeffdingTree(). param := mlpack.HoeffdingTreeOptions() param.Training = dataset param.Confidence = 0.99 tree, _, _ := mlpack.HoeffdingTree(param) Then, this tree may be used to make predictions on the test set test_set, saving the predictions into predictions and the class probabilities into class_probs with the following command: // Initialize optional parameters for HoeffdingTree(). param := mlpack.HoeffdingTreeOptions() param.InputModel = &tree param.Test = test_set _, predictions, class_probs := mlpack.HoeffdingTree(param) Input parameters: - BatchMode (bool): If true, samples will be considered in batch instead of as a stream. This generally results in better trees but at the cost of memory usage and runtime. - Bins (int): If the 'domingos' split strategy is used, this specifies the number of bins for each numeric split. Default value 10. - Confidence (float64): Confidence before splitting (between 0 and 1). Default value 0.95. - InfoGain (bool): If set, information gain is used instead of Gini impurity for calculating Hoeffding bounds. - InputModel (hoeffdingTreeModel): Input trained Hoeffding tree model. - Labels (mat.Dense): Labels for training dataset. - MaxSamples (int): Maximum number of samples before splitting. Default value 5000. - MinSamples (int): Minimum number of samples before splitting. Default value 100. - NumericSplitStrategy (string): The splitting strategy to use for numeric features: 'domingos' or 'binary'. Default value 'binary'. - ObservationsBeforeBinning (int): If the 'domingos' split strategy is used, this specifies the number of samples observed before binning is performed. Default value 100. - Passes (int): Number of passes to take over the dataset. Default value 1. - Test (matrixWithInfo): Testing dataset (may be categorical). - TestLabels (mat.Dense): Labels of test data. - Training (matrixWithInfo): Training dataset (may be categorical). - Verbose (bool): Display informational messages and the full list of parameters and timers at the end of execution. Output parameters: - outputModel (hoeffdingTreeModel): Output for trained Hoeffding tree model. - predictions (mat.Dense): Matrix to output label predictions for test data into. - probabilities (mat.Dense): In addition to predicting labels, provide rediction probabilities in this matrix. */ func HoeffdingTree(param *HoeffdingTreeOptionalParam) (hoeffdingTreeModel, *mat.Dense, *mat.Dense) { resetTimers() enableTimers() disableBacktrace() disableVerbose() restoreSettings("Hoeffding trees") // Detect if the parameter was passed; set if so. if param.BatchMode != false { setParamBool("batch_mode", param.BatchMode) setPassed("batch_mode") } // Detect if the parameter was passed; set if so. if param.Bins != 10 { setParamInt("bins", param.Bins) setPassed("bins") } // Detect if the parameter was passed; set if so. if param.Confidence != 0.95 { setParamDouble("confidence", param.Confidence) setPassed("confidence") } // Detect if the parameter was passed; set if so. if param.InfoGain != false { setParamBool("info_gain", param.InfoGain) setPassed("info_gain") } // Detect if the parameter was passed; set if so. if param.InputModel != nil { setHoeffdingTreeModel("input_model", param.InputModel) setPassed("input_model") } // Detect if the parameter was passed; set if so. if param.Labels != nil { gonumToArmaUrow("labels", param.Labels) setPassed("labels") } // Detect if the parameter was passed; set if so. if param.MaxSamples != 5000 { setParamInt("max_samples", param.MaxSamples) setPassed("max_samples") } // Detect if the parameter was passed; set if so. if param.MinSamples != 100 { setParamInt("min_samples", param.MinSamples) setPassed("min_samples") } // Detect if the parameter was passed; set if so. if param.NumericSplitStrategy != "binary" { setParamString("numeric_split_strategy", param.NumericSplitStrategy) setPassed("numeric_split_strategy") } // Detect if the parameter was passed; set if so. if param.ObservationsBeforeBinning != 100 { setParamInt("observations_before_binning", param.ObservationsBeforeBinning) setPassed("observations_before_binning") } // Detect if the parameter was passed; set if so. if param.Passes != 1 { setParamInt("passes", param.Passes) setPassed("passes") } // Detect if the parameter was passed; set if so. if param.Test != nil { gonumToArmaMatWithInfo("test", param.Test) setPassed("test") } // Detect if the parameter was passed; set if so. if param.TestLabels != nil { gonumToArmaUrow("test_labels", param.TestLabels) setPassed("test_labels") } // Detect if the parameter was passed; set if so. if param.Training != nil { gonumToArmaMatWithInfo("training", param.Training) setPassed("training") } // Detect if the parameter was passed; set if so. if param.Verbose != false { setParamBool("verbose", param.Verbose) setPassed("verbose") enableVerbose() } // Mark all output options as passed. setPassed("output_model") setPassed("predictions") setPassed("probabilities") // Call the mlpack program. C.mlpackHoeffdingTree() // Initialize result variable and get output. var outputModel hoeffdingTreeModel outputModel.getHoeffdingTreeModel("output_model") var predictionsPtr mlpackArma predictions := predictionsPtr.armaToGonumUrow("predictions") var probabilitiesPtr mlpackArma probabilities := probabilitiesPtr.armaToGonumMat("probabilities") // Clear settings. clearSettings() // Return output(s). return outputModel, predictions, probabilities }
hoeffding_tree.go
0.730963
0.55652
hoeffding_tree.go
starcoder
package goptuna // StudyOption to pass the custom option type StudyOption func(study *Study) error // StudyOptionDirection change the direction of optimize func StudyOptionDirection(direction StudyDirection) StudyOption { return func(s *Study) error { s.direction = direction return nil } } // StudyOptionLogger sets Logger. func StudyOptionLogger(logger Logger) StudyOption { return func(s *Study) error { if logger == nil { s.logger = &StdLogger{Logger: nil} } else { s.logger = logger } return nil } } // StudyOptionStorage sets the storage object. func StudyOptionStorage(storage Storage) StudyOption { return func(s *Study) error { s.Storage = storage return nil } } // StudyOptionSampler sets the sampler object. func StudyOptionSampler(sampler Sampler) StudyOption { return func(s *Study) error { s.Sampler = sampler return nil } } // StudyOptionRelativeSampler sets the relative sampler object. func StudyOptionRelativeSampler(sampler RelativeSampler) StudyOption { return func(s *Study) error { s.RelativeSampler = sampler return nil } } // StudyOptionPruner sets the pruner object. func StudyOptionPruner(pruner Pruner) StudyOption { return func(s *Study) error { s.Pruner = pruner return nil } } // StudyOptionIgnoreError is an option to continue even if // it receive error while running Optimize method. func StudyOptionIgnoreError(ignore bool) StudyOption { return func(s *Study) error { s.ignoreErr = ignore return nil } } // StudyOptionTrialNotifyChannel to subscribe the finished trials. func StudyOptionTrialNotifyChannel(notify chan FrozenTrial) StudyOption { return func(s *Study) error { s.trialNotification = notify return nil } } // StudyOptionLoadIfExists to load the study if exists. func StudyOptionLoadIfExists(loadIfExists bool) StudyOption { return func(s *Study) error { s.loadIfExists = loadIfExists return nil } } // StudyOptionInitialSearchSpace to use RelativeSampler from the first trial. // This option is useful for Define-and-Run interface. func StudyOptionDefineSearchSpace(space map[string]interface{}) StudyOption { return func(s *Study) error { s.definedSearchSpace = space return nil } } // StudyOptionSetLogger sets Logger. // Deprecated: please use StudyOptionLogger instead. var StudyOptionSetLogger = StudyOptionLogger // StudyOptionSetDirection change the direction of optimize // Deprecated: please use StudyOptionDirection instead. var StudyOptionSetDirection = StudyOptionDirection // StudyOptionSetTrialNotifyChannel to subscribe the finished trials. var StudyOptionSetTrialNotifyChannel = StudyOptionTrialNotifyChannel
study_option.go
0.568655
0.401043
study_option.go
starcoder
package syntax import ( "log" "sort" "sync" "github.com/grailbio/base/digest" "github.com/grailbio/reflow" "github.com/grailbio/reflow/flow" "github.com/grailbio/reflow/types" "github.com/grailbio/reflow/values" ) var forceDigest = reflow.Digester.FromString("grail.com/reflow/syntax.Eval.Force") // Force produces a strict version of v. Force either returns an // immediate value v, or else a *flow.Flow that will produce the // immediate value. func Force(v values.T, t *types.T) values.T { if f, ok := v.(*flow.Flow); ok { return &flow.Flow{ Deps: []*flow.Flow{f}, Op: flow.K, FlowDigest: forceDigest, K: func(vs []values.T) *flow.Flow { v := vs[0] return toFlow(Force(v, t), t) }, } } switch t.Kind { case types.ErrorKind: panic("bad type") case types.BottomKind: panic("bottom value") case types.IntKind, types.FloatKind, types.StringKind, types.BoolKind, types.FileKind, types.DirKind, types.FilesetKind, types.UnitKind, types.FuncKind: // These types are always strict. return v case types.ListKind: if _, ok := v.(values.List); !ok { log.Printf("expected a list, got %v", v) } var ( list = v.(values.List) copy = make(values.List, len(list)) r = newResolver(copy, t) ) for i := range list { copy[i] = Force(list[i], t.Elem) r.Add(&copy[i], t.Elem) } return r.Resolve(nil) case types.MapKind: var ( m = v.(*values.Map) copy = new(values.Map) r = newResolver(copy, t) kvs = make([]kpvp, 0, m.Len()) ) m.Each(func(k, v values.T) { kk := Force(k, t.Index) vv := Force(v, t.Elem) kv := kpvp{&kk, &vv, values.Digest(kk, t.Index)} kvs = append(kvs, kv) }) sort.Slice(kvs, func(i, j int) bool { return kvs[i].KD.Less(kvs[j].KD) }) for _, kv := range kvs { r.Add(kv.K, t.Index) r.Add(kv.V, t.Elem) } return r.Resolve(func() { for _, kv := range kvs { copy.Insert(kv.KD, *kv.K, *kv.V) } }) case types.TupleKind: var ( tup = v.(values.Tuple) copy = make(values.Tuple, len(tup)) r = newResolver(copy, t) ) for i := range tup { copy[i] = Force(tup[i], t.Fields[i].T) r.Add(&copy[i], t.Fields[i].T) } return r.Resolve(nil) case types.StructKind: var ( s = v.(values.Struct) copy = make(values.Struct) fm = t.FieldMap() r = newResolver(copy, t) kvs []kvp ) for k := range fm { vv := Force(s[k], fm[k]) copy[k] = vv kv := kvp{k, &vv} kvs = append(kvs, kv) r.Add(kv.V, fm[k]) } return r.Resolve(func() { for _, kv := range kvs { copy[kv.K.(string)] = *kv.V } }) case types.ModuleKind: var ( m = v.(values.Module) copy = make(values.Module) r = newResolver(copy, t) fm = t.FieldMap() kvs []kvp ) for k := range fm { vv := Force(m[k], fm[k]) copy[k] = vv kv := kvp{k, &vv} kvs = append(kvs, kv) r.Add(kv.V, fm[k]) } return r.Resolve(func() { for _, kv := range kvs { copy[kv.K.(string)] = *kv.V } }) case types.SumKind: var ( variant = v.(*values.Variant) elemTyp = t.VariantMap()[variant.Tag] ) if elemTyp == nil { // It is a variant with no element, so there is nothing to resolve. return v } var ( copy = &values.Variant{Tag: variant.Tag} r = newResolver(copy, t) ) copy.Elem = Force(variant.Elem, elemTyp) r.Add(&copy.Elem, elemTyp) return r.Resolve(nil) } panic("bad value") } // flow produces a flow from value v. If v is already a flow, // it is returned immediately; otherwise it's wrapped in a // Val flow. func toFlow(v values.T, t *types.T) *flow.Flow { if f, ok := v.(*flow.Flow); ok { return f } return &flow.Flow{ Op: flow.Val, Value: v, FlowDigest: values.Digest(v, t), } } type resolver struct { dw digest.Writer deps []*flow.Flow vps []*values.T v values.T t *types.T } func newResolver(v values.T, t *types.T) *resolver { return &resolver{ v: v, t: t, dw: reflow.Digester.NewWriter(), } } func (r *resolver) Add(vp *values.T, t *types.T) { if f, ok := (*vp).(*flow.Flow); ok { r.deps = append(r.deps, f) r.vps = append(r.vps, vp) } else { values.WriteDigest(r.dw, *vp, t) } } func (r *resolver) Resolve(proc func()) values.T { if len(r.deps) == 0 { if proc != nil { proc() } return r.v } var once sync.Once writeN(r.dw, int(r.t.Kind.ID())) return &flow.Flow{ Op: flow.K, Deps: r.deps, FlowDigest: r.dw.Digest(), K: func(vs []values.T) *flow.Flow { // Initialize the underlying datastructure only once. // This makes it safe to invoke the K multiple times. // Since the underlying computation is deterministic, // we are free to ignore the vs from subsequent // computations. once.Do(func() { for i := range vs { *r.vps[i] = vs[i] } if proc != nil { proc() } }) return toFlow(r.v, r.t) }, } } type kpvp struct { K *values.T V *values.T KD digest.Digest } type kvp struct { K values.T V *values.T }
syntax/force.go
0.543348
0.531331
force.go
starcoder
package lex import ( "bytes" "fmt" "strings" "unicode/utf8" ) const ( eof = -1 // Dot is not included and must be checked individually. operatorRunes = "!#%&*+-/;<=>?@^`|~" // \v is a vertical tab whitespaceRunes = " \t\n\r\f\v" ) // The state of the scanner as a function that returns the next state. type stateFn func(*Lexer) stateFn // Lexer represents the state of scanning the input text. type Lexer struct { input []byte // the data being scanned state stateFn // the next lexing function to enter pos int // current position in the input itemStart int // start position of the current item width int // width of last rune read from input lastPos int // position of most recent item returned by NextItem items chan Item // channel of scanned items containers []byte // keep track of container starts and ends } // New creates a new scanner for the input data. This is the lexing half of the // Lexer / parser. Basic validation is done in the Lexer for a loose sense of // correctness, but the rigid correctness is enforced in the parser. func New(input []byte) *Lexer { x := &Lexer{ input: input, items: make(chan Item), } go x.run() return x } // NextItem returns the next item from the input. func (x *Lexer) NextItem() Item { item := <-x.items x.lastPos = item.Pos return item } // LineNumber returns the line number that the Lexer last stopped at. func (x *Lexer) LineNumber() int { // Count the number of newlines, then add 1 for the line we're currently on. return bytes.Count(x.input[:x.lastPos], []byte("\n")) + 1 } // run the state machine for the Lexer. func (x *Lexer) run() { for x.state = lexValue; x.state != nil; { x.state = x.state(x) } } // next returns the next rune in the input. If there is a problem decoding // the rune, then utf8.RuneError is returned. func (x *Lexer) next() rune { if x.pos >= len(x.input) { x.width = 0 return eof } r, w := utf8.DecodeRune(x.input[x.pos:]) x.width = w x.pos += x.width return r } // peek returns, but does not consume, the next rune from the input. func (x *Lexer) peek() rune { if x.pos >= len(x.input) { return eof } r, _ := utf8.DecodeRune(x.input[x.pos:]) return r } // backup steps back one rune. Can only be called once per call of next(). func (x *Lexer) backup() { x.pos -= x.width } // emit sends an item representing the current Lexer state and the given type // onto the items channel. func (x *Lexer) emit(it itemType) { x.items <- Item{Type: it, Pos: x.itemStart, Val: x.input[x.itemStart:x.pos]} x.itemStart = x.pos } // ignore sets the itemStart point to the current position, thereby "ignoring" any // input between the two points. func (x *Lexer) ignore() { x.itemStart = x.pos } // emitAndIgnoreTripleQuoteEnd backs up three spots (a triple quote), emits the given // itemType, then goes forward three spots to ignore the triple quote func (x *Lexer) emitAndIgnoreTripleQuoteEnd(itemType itemType) { x.width = 1 x.backup() x.backup() x.backup() x.emit(itemType) x.next() x.next() x.next() x.ignore() } // errorf emits an error token and returns nil to stop lexing. func (x *Lexer) errorf(format string, args ...interface{}) stateFn { x.items <- Item{Type: IonError, Pos: x.itemStart, Val: []byte(fmt.Sprintf(format, args...))} return nil } // error emits an error token and returns nil to stop lexing. func (x *Lexer) error(message string) stateFn { x.items <- Item{Type: IonError, Pos: x.itemStart, Val: []byte(message)} return nil } // lexValue scans for a value, which can be an annotation, number, symbol, list, // struct, or s-expression. func lexValue(x *Lexer) stateFn { switch ch := x.next(); { case ch == eof: x.emit(IonEOF) return nil case isWhitespace(ch): x.ignore() return lexValue case ch == ':': return lexColons case ch == '\'': return lexSingleQuote case ch == '"': return lexString case ch == ',': x.emit(IonComma) return lexValue case ch == '[': return lexList case ch == ']': return lexListEnd case ch == '(': return lexSExp case ch == ')': return lexSExpEnd case ch == '{': if x.peek() == '{' { x.next() return lexBinary } return lexStruct case ch == '}': return lexStructEnd case ch == '/': // Comment handling needs to come before operator handling because the // start of a comment is also an operator. Treat it as an operator if // the following character doesn't adhere to one of the comment standards. switch x.peek() { case '/': x.next() return lexLineComment case '*': x.next() return lexBlockComment } x.emit(IonOperator) return lexValue case isOperator(ch) || ch == '.': // - is both an operator and a signal that a number is starting. Since // infinity is represented as +inf or -inf, we need to take that into // account as well. if (ch == '+' && x.peek() == 'i') || (ch == '-' && (isNumber(x.peek()) || x.peek() == 'i' || x.peek() == '_')) { x.backup() return lexNumber } // An operator can consist of multiple characters. for next := x.peek(); isOperator(next); next = x.peek() { x.next() } x.emit(IonOperator) return lexValue case isIdentifierSymbolStart(ch): x.backup() return lexSymbol case isNumericStart(ch): x.backup() return lexNumber default: return x.errorf("invalid start of a value: %#U", ch) } } // lexColons expects one colon to be scanned and checks to see if there is // a second before emitting. Returns lexValue. func lexColons(x *Lexer) stateFn { if x.peek() == ':' { x.next() x.emit(IonDoubleColon) } else { x.emit(IonColon) } return lexValue } // lexLineComment scans a comment while parsing values. The comment is // terminated by a newline. lexValue is returned. func lexLineComment(x *Lexer) stateFn { // Ignore the preceding "//" characters. x.ignore() for { ch := x.next() if ch == utf8.RuneError { return x.error("error parsing rune") } if isEndOfLine(ch) || ch == eof { x.backup() break } } x.emit(IonCommentLine) return lexValue } // lexBlockComment scans a block comment. The comment is terminated by */ // lexTopLevel is returned since we don't know what is going to come next. func lexBlockComment(x *Lexer) stateFn { // Ignore the preceding "/*" characters. x.ignore() for { ch := x.next() if ch == eof { return x.error("unexpected end of file while lexing block comment") } if ch == utf8.RuneError { return x.error("error parsing rune") } if ch == '*' && x.peek() == '/' { x.backup() break } } x.emit(IonCommentBlock) // Ignore the trailing "*/" characters. x.next() x.next() x.ignore() return lexValue } // eatWhitespace eats up all of the text until a non-whitespace character is encountered. func eatWhitespace(x *Lexer) { for isWhitespace(x.peek()) { x.next() } x.ignore() } // isWhitespace returns if the given rune is considered to be a form of whitespace. func isWhitespace(ch rune) bool { return bytes.ContainsRune([]byte(whitespaceRunes), ch) } // isEndOfLine returns true if the given rune is an end-of-line character. func isEndOfLine(ch rune) bool { return ch == '\r' || ch == '\n' } // isOperator returns true if the given rune is one of the operator chars (not including dot). func isOperator(ch rune) bool { return bytes.ContainsRune([]byte(operatorRunes), ch) } // accept consumes the next rune if it's from the given set of valid runes. func (x *Lexer) accept(valid string) bool { if strings.IndexRune(valid, x.peek()) >= 0 { x.next() return true } return false } // acceptString consumes the as many runes from the given string as possible. // If it hits a rune it can't accept, then it backs up and returns false. func (x *Lexer) acceptString(valid string) bool { for _, ch := range valid { if x.peek() != ch { return false } x.next() } return true } // acceptRun consumes as many runes as possible from the given set set of valid runes. // Stops at either an unacceptable rune, EOF, or if any of the noRepeat runes are encountered // twice consecutively. func (x *Lexer) acceptRun(valid string, noRepeat string) int { inRepeat := false count := 0 // Use peek so that we can still back up if the rune we fail on is EOF. for ch := x.peek(); strings.IndexRune(valid, ch) >= 0; ch = x.peek() { x.next() count++ isRepeatRune := strings.IndexRune(noRepeat, ch) >= 0 if isRepeatRune && inRepeat { break } inRepeat = isRepeatRune } return count }
internal/lex/lex.go
0.654564
0.464051
lex.go
starcoder
package value import ( "runtime" "strings" "sync" ) type valueType int const ( intType valueType = iota charType bigIntType bigRatType bigFloatType vectorType matrixType numType ) var typeName = [...]string{"int", "char", "big int", "rational", "float", "vector", "matrix"} func (t valueType) String() string { return typeName[t] } type unaryFn func(Context, Value) Value type unaryOp struct { name string elementwise bool // whether the operation applies elementwise to vectors and matrices fn [numType]unaryFn } func (op *unaryOp) EvalUnary(c Context, v Value) Value { which := whichType(v) fn := op.fn[which] if fn == nil { if op.elementwise { switch which { case vectorType: return unaryVectorOp(c, op.name, v) case matrixType: return unaryMatrixOp(c, op.name, v) } } Errorf("unary %s not implemented on type %s", op.name, which) } return fn(c, v) } type binaryFn func(Context, Value, Value) Value type binaryOp struct { name string elementwise bool // whether the operation applies elementwise to vectors and matrices whichType func(a, b valueType) (valueType, valueType) fn [numType]binaryFn } func whichType(v Value) valueType { switch v.Inner().(type) { case Int: return intType case Char: return charType case BigInt: return bigIntType case BigRat: return bigRatType case BigFloat: return bigFloatType case Vector: return vectorType case *Matrix: return matrixType } Errorf("unknown type %T in whichType", v) panic("which type") } func (op *binaryOp) EvalBinary(c Context, u, v Value) Value { if op.whichType == nil { // At the moment, "text" is the only operator that leaves // both arg types alone. Perhaps more will arrive. if op.name != "text" { Errorf("internal error: nil whichType") } return op.fn[0](c, u, v) } whichU, whichV := op.whichType(whichType(u), whichType(v)) conf := c.Config() u = u.toType(op.name, conf, whichU) v = v.toType(op.name, conf, whichV) fn := op.fn[whichV] if fn == nil { if op.elementwise { switch whichV { case vectorType: return binaryVectorOp(c, u, op.name, v) case matrixType: return binaryMatrixOp(c, u, op.name, v) } } Errorf("binary %s not implemented on type %s", op.name, whichV) } return fn(c, u, v) } // Product computes a compound product, such as an inner product // "+.*" or outer product "o.*". The op is known to contain a // period. The operands are all at least vectors, and for inner product // they must both be vectors. func Product(c Context, u Value, op string, v Value) Value { dot := strings.IndexByte(op, '.') left := op[:dot] right := op[dot+1:] which, _ := atLeastVectorType(whichType(u), whichType(v)) u = u.toType(op, c.Config(), which) v = v.toType(op, c.Config(), which) if left == "o" { return outerProduct(c, u, right, v) } return innerProduct(c, u, left, right, v) } // safeBinary reports whether the binary operator op is safe to parallelize. func safeBinary(op string) bool { // ? uses the random number generator, // which maintains global state. return BinaryOps[op] != nil && op != "?" } // safeUnary reports whether the unary operator op is safe to parallelize. func safeUnary(op string) bool { // ? uses the random number generator, // which maintains global state. return UnaryOps[op] != nil && op != "?" } // knownAssoc reports whether the binary op is known to be associative. func knownAssoc(op string) bool { switch op { case "+", "*", "min", "max", "or", "and", "xor", "|", "&", "^": return true } return false } var pforMinWork = 100 func MaxParallelismForTesting() { pforMinWork = 1 } // pfor is a conditionally parallel for loop from 0 to n. // If ok is true and the work is big enough, // pfor calls f(lo, hi) for ranges [lo, hi) that collectively tile [0, n) // and for which (hi-lo)*size is at least roughly pforMinWork. // Otherwise, pfor calls f(0, n). func pfor(ok bool, size, n int, f func(lo, hi int)) { var p int if ok { p = runtime.GOMAXPROCS(-1) if p == 1 || n <= 1 || n*size < pforMinWork*2 { ok = false } } if !ok { f(0, n) return } p *= 4 // evens out lopsided work splits if q := n * size / pforMinWork; q < p { p = q } var wg sync.WaitGroup for i := 0; i < p; i++ { lo, hi := i*n/p, (i+1)*n/p wg.Add(1) go func() { defer wg.Done() f(lo, hi) }() } wg.Wait() } // inner product computes an inner product such as "+.*". // u and v are known to be the same type and at least Vectors. func innerProduct(c Context, u Value, left, right string, v Value) Value { switch u := u.(type) { case Vector: v := v.(Vector) u.sameLength(v) n := len(u) if n == 0 { Errorf("empty inner product") } x := c.EvalBinary(u[n-1], right, v[n-1]) for k := n - 2; k >= 0; k-- { x = c.EvalBinary(c.EvalBinary(u[k], right, v[k]), left, x) } return x case *Matrix: // Say we're doing +.* // result[i,j] = +/(u[row i] * v[column j]) // Number of columns of u must be the number of rows of v: (-1 take rho u) == (1 take rho v) // The result is has shape (-1 drop rho u), (1 drop rho v) v := v.(*Matrix) if u.Rank() < 1 || v.Rank() < 1 || u.shape[len(u.shape)-1] != v.shape[0] { Errorf("inner product: mismatched shapes %s and %s", NewIntVector(u.shape), NewIntVector(v.shape)) } n := v.shape[0] vstride := len(v.data) / n data := make(Vector, len(u.data)/n*vstride) pfor(safeBinary(left) && safeBinary(right), 1, len(data), func(lo, hi int) { for x := lo; x < hi; x++ { i := x / vstride * n j := x % vstride acc := c.EvalBinary(u.data[i+n-1], right, v.data[j+(n-1)*vstride]) for k := n - 2; k >= 0; k-- { acc = c.EvalBinary(c.EvalBinary(u.data[i+k], right, v.data[j+k*vstride]), left, acc) } data[x] = acc } }) rank := len(u.shape) + len(v.shape) - 2 if rank == 1 { return data } shape := make([]int, rank) copy(shape, u.shape[:len(u.shape)-1]) copy(shape[len(u.shape)-1:], v.shape[1:]) return NewMatrix(shape, data) } Errorf("can't do inner product on %s", whichType(u)) panic("not reached") } // outer product computes an outer product such as "o.*". // u and v are known to be at least Vectors. func outerProduct(c Context, u Value, op string, v Value) Value { switch u := u.(type) { case Vector: v := v.(Vector) m := Matrix{ shape: []int{len(u), len(v)}, data: NewVector(make(Vector, len(u)*len(v))), } pfor(safeBinary(op), 1, len(m.data), func(lo, hi int) { for x := lo; x < hi; x++ { m.data[x] = c.EvalBinary(u[x/len(v)], op, v[x%len(v)]) } }) return &m // TODO: Shrink? case *Matrix: v := v.(*Matrix) m := Matrix{ shape: append(u.Shape(), v.Shape()...), data: NewVector(make(Vector, len(u.Data())*len(v.Data()))), } vdata := v.Data() udata := u.Data() pfor(safeBinary(op), 1, len(m.data), func(lo, hi int) { for x := lo; x < hi; x++ { m.data[x] = c.EvalBinary(udata[x/len(vdata)], op, vdata[x%len(vdata)]) } }) return &m // TODO: Shrink? } Errorf("can't do outer product on %s", whichType(u)) panic("not reached") } // Reduce computes a reduction such as +/. The slash has been removed. func Reduce(c Context, op string, v Value) Value { // We must be right associative; that is the grammar. // -/1 2 3 == 1-2-3 is 1-(2-3) not (1-2)-3. Answer: 2. switch v := v.(type) { case Int, BigInt, BigRat: return v case Vector: if len(v) == 0 { return v } acc := v[len(v)-1] for i := len(v) - 2; i >= 0; i-- { acc = c.EvalBinary(v[i], op, acc) } return acc case *Matrix: if v.Rank() < 2 { Errorf("shape for matrix is degenerate: %s", NewIntVector(v.shape)) } stride := v.shape[v.Rank()-1] if stride == 0 { Errorf("shape for matrix is degenerate: %s", NewIntVector(v.shape)) } shape := v.shape[:v.Rank()-1] data := make(Vector, size(shape)) pfor(safeBinary(op), stride, len(data), func(lo, hi int) { for i := lo; i < hi; i++ { index := stride * i pos := index + stride - 1 acc := v.data[pos] pos-- for j := 1; j < stride; j++ { acc = c.EvalBinary(v.data[pos], op, acc) pos-- } data[i] = acc } }) if len(shape) == 1 { // TODO: Matrix.shrink()? return NewVector(data) } return NewMatrix(shape, data) } Errorf("can't do reduce on %s", whichType(v)) panic("not reached") } // Scan computes a scan of the op; the \ has been removed. // It gives the successive values of reducing op through v. // We must be right associative; that is the grammar. func Scan(c Context, op string, v Value) Value { switch v := v.(type) { case Int, BigInt, BigRat: return v case Vector: if len(v) == 0 { return v } values := make(Vector, len(v)) // This is fundamentally O(n²) in the general case. // We make it O(n) for known associative ops. values[0] = v[0] if knownAssoc(op) { for i := 1; i < len(v); i++ { values[i] = c.EvalBinary(values[i-1], op, v[i]) } } else { for i := 1; i < len(v); i++ { values[i] = Reduce(c, op, v[:i+1]) } } return NewVector(values) case *Matrix: if v.Rank() < 2 { Errorf("shape for matrix is degenerate: %s", NewIntVector(v.shape)) } stride := v.shape[v.Rank()-1] if stride == 0 { Errorf("shape for matrix is degenerate: %s", NewIntVector(v.shape)) } data := make(Vector, len(v.data)) nrows := 1 for i := 0; i < v.Rank()-1; i++ { // Guaranteed by NewMatrix not to overflow. nrows *= v.shape[i] } pfor(safeBinary(op), stride, nrows, func(lo, hi int) { for i := lo; i < hi; i++ { index := i * stride // This is fundamentally O(n²) in the general case. // We make it O(n) for known associative ops. data[index] = v.data[index] if knownAssoc(op) { for j := 1; j < stride; j++ { data[index+j] = c.EvalBinary(data[index+j-1], op, v.data[index+j]) } } else { for j := 1; j < stride; j++ { data[index+j] = Reduce(c, op, v.data[index:index+j+1]) } } } }) return NewMatrix(v.shape, data) } Errorf("can't do scan on %s", whichType(v)) panic("not reached") } // unaryVectorOp applies op elementwise to i. func unaryVectorOp(c Context, op string, i Value) Value { u := i.(Vector) n := make([]Value, len(u)) pfor(safeUnary(op), 1, len(n), func(lo, hi int) { for k := lo; k < hi; k++ { n[k] = c.EvalUnary(op, u[k]) } }) return NewVector(n) } // unaryMatrixOp applies op elementwise to i. func unaryMatrixOp(c Context, op string, i Value) Value { u := i.(*Matrix) n := make([]Value, len(u.data)) pfor(safeUnary(op), 1, len(n), func(lo, hi int) { for k := lo; k < hi; k++ { n[k] = c.EvalUnary(op, u.data[k]) } }) return NewMatrix(u.shape, NewVector(n)) } // binaryVectorOp applies op elementwise to i and j. func binaryVectorOp(c Context, i Value, op string, j Value) Value { u, v := i.(Vector), j.(Vector) if len(u) == 1 { n := make([]Value, len(v)) pfor(safeBinary(op), 1, len(n), func(lo, hi int) { for k := lo; k < hi; k++ { n[k] = c.EvalBinary(u[0], op, v[k]) } }) return NewVector(n) } if len(v) == 1 { n := make([]Value, len(u)) pfor(safeBinary(op), 1, len(n), func(lo, hi int) { for k := lo; k < hi; k++ { n[k] = c.EvalBinary(u[k], op, v[0]) } }) return NewVector(n) } u.sameLength(v) n := make([]Value, len(u)) pfor(safeBinary(op), 1, len(n), func(lo, hi int) { for k := lo; k < hi; k++ { n[k] = c.EvalBinary(u[k], op, v[k]) } }) return NewVector(n) } // binaryMatrixOp applies op elementwise to i and j. func binaryMatrixOp(c Context, i Value, op string, j Value) Value { u, v := i.(*Matrix), j.(*Matrix) shape := u.shape var n []Value // One or the other may be a scalar in disguise. switch { case isScalar(u): // Scalar op Matrix. shape = v.shape n = make([]Value, len(v.data)) pfor(safeBinary(op), 1, len(n), func(lo, hi int) { for k := lo; k < hi; k++ { n[k] = c.EvalBinary(u.data[0], op, v.data[k]) } }) case isScalar(v): // Matrix op Scalar. n = make([]Value, len(u.data)) pfor(safeBinary(op), 1, len(n), func(lo, hi int) { for k := lo; k < hi; k++ { n[k] = c.EvalBinary(u.data[k], op, v.data[0]) } }) case isVector(u, v.shape): // Vector op Matrix. shape = v.shape n = make([]Value, len(v.data)) dim := u.shape[0] pfor(safeBinary(op), 1, len(n), func(lo, hi int) { for k := lo; k < hi; k++ { n[k] = c.EvalBinary(u.data[k%dim], op, v.data[k]) } }) case isVector(v, u.shape): // Matrix op Vector. n = make([]Value, len(u.data)) dim := v.shape[0] pfor(safeBinary(op), 1, len(n), func(lo, hi int) { for k := lo; k < hi; k++ { n[k] = c.EvalBinary(u.data[k], op, v.data[k%dim]) } }) default: // Matrix op Matrix. u.sameShape(v) n = make([]Value, len(u.data)) pfor(safeBinary(op), 1, len(n), func(lo, hi int) { for k := lo; k < hi; k++ { n[k] = c.EvalBinary(u.data[k], op, v.data[k]) } }) } return NewMatrix(shape, NewVector(n)) } // isScalar reports whether u is a 1x1x1x... item, that is, a scalar promoted to matrix. func isScalar(u *Matrix) bool { for _, dim := range u.shape { if dim != 1 { return false } } return true } // isVector reports whether u is an 1x1x...xn item where n is the last dimension // of the shape, that is, an n-vector promoted to matrix. func isVector(u *Matrix, shape []int) bool { if u.Rank() == 0 || len(shape) == 0 || u.shape[0] != shape[len(shape)-1] { return false } for _, dim := range u.shape[1:] { if dim != 1 { return false } } return true } // EvalFunctionBody evaluates the list of expressions inside a function, // possibly with conditionals that generate an early return. func EvalFunctionBody(context Context, fnName string, body []Expr) Value { var v Value for _, e := range body { if d, ok := e.(Decomposable); ok && d.Operator() == ":" { left, right := d.Operands() if isTrue(fnName, left.Eval(context)) { return right.Eval(context) } continue } v = e.Eval(context) } return v } // isTrue reports whether v represents boolean truth. If v is not // a scalar, an error results. func isTrue(fnName string, v Value) bool { switch i := v.(type) { case Char: return i != 0 case Int: return i != 0 case BigInt: return true // If it's a BigInt, it can't be 0 - that's an Int. case BigRat: return true // If it's a BigRat, it can't be 0 - that's an Int. case BigFloat: return i.Float.Sign() != 0 default: Errorf("invalid expression %s for conditional inside %q", v, fnName) return false } }
value/eval.go
0.599368
0.513668
eval.go
starcoder
package value import ( "fmt" "robpike.io/ivy/config" ) func NewComplex(r, i Value) Value { return Complex{real: r, imag: i}.shrink() } func newComplexReal(r Value) Complex { return Complex{real: r, imag: Int(0)} } func newComplexImag(i Value) Complex { return Complex{real: Int(0), imag: i} } type Complex struct { real, imag Value } func (z Complex) String() string { return "(" + z.Sprint(debugConf) + ")" } func (_ Complex) Rank() int { return 0 } func (z Complex) Sprint(conf *config.Config) string { return fmt.Sprintf("%sj%s", z.real.Sprint(conf), z.imag.Sprint(conf)) } func (z Complex) ProgString() string { return fmt.Sprintf("%sj%s)", z.real.ProgString(), z.imag.ProgString()) } func (z Complex) Eval(Context) Value { return z } func (z Complex) Inner() Value { return z } func (z Complex) toType(op string, conf *config.Config, which valueType) Value { switch which { case complexType: return z case vectorType: return NewVector([]Value{z}) case matrixType: return NewMatrix([]int{1}, []Value{z}) } if toBool(z.imag) { Errorf("%s: cannot convert complex with non-zero imaginary part to %s", op, which) return nil } return z.real.toType(op, conf, which) } // Shrink the parts and remove a zero imaginary part. func (z Complex) shrink() Value { switch real := z.real.(type) { case BigInt: z.real = real.shrink() case BigFloat: z.real = real.shrink() case BigRat: z.real = real.shrink() } switch imag := z.imag.(type) { case BigInt: z.imag = imag.shrink() case BigFloat: z.imag = imag.shrink() case BigRat: z.imag = imag.shrink() } if toBool(z.imag) { return z } return z.real } // Use EvalUnary to retain ints in real and imaginary parts. func (z Complex) Floor(c Context) Complex { return Complex{ real: c.EvalUnary("floor", z.real), imag: c.EvalUnary("floor", z.imag), } } // Use EvalUnary to retain ints in real and imaginary parts. func (z Complex) Ceil(c Context) Complex { return Complex{ real: c.EvalUnary("ceil", z.real), imag: c.EvalUnary("ceil", z.imag), } } func (z Complex) Real() Value { return z.real } func (z Complex) Imag() Value { return z.imag } // phase a + bi = // a = 0, b = 0: 0 // a = 0, b > 0: pi/2 // a = 0, b < 0: -pi/2 // a > 0: atan(b/y) // a < 0, b >= 0: atan(b/y) + pi // a < 0, b < 0: atan(b/y) - pi func (z Complex) Phase(c Context) BigFloat { real := floatSelf(c, z.real).(BigFloat).Float imag := floatSelf(c, z.imag).(BigFloat).Float if real.Sign() == 0 { if imag.Sign() == 0 { return BigFloat{newFloat(c).Set(floatZero)} } else if imag.Sign() > 0 { return BigFloat{newFloat(c).Set(floatHalfPi)} } else { return BigFloat{newFloat(c).Set(floatMinusHalfPi)} } } slope := newFloat(c) slope.Quo(imag, real) atan := floatAtan(c, slope) if real.Sign() > 0 { return BigFloat{atan} } if imag.Sign() >= 0 { atan.Add(atan, floatPi) return BigFloat{atan} } atan.Sub(atan, floatPi) return BigFloat{atan} } // Use EvalUnary to retain int/rational in real and imaginary parts. func (z Complex) Neg(c Context) Complex { return Complex{ real: c.EvalUnary("-", z.real), imag: c.EvalUnary("-", z.imag), } } // sgn z = z / |z| // Use EvalBinary to retain int/rational in real and imaginary parts. func (z Complex) Sign(c Context) Value { return c.EvalBinary(z, "/", z.Abs(c)) } // |a+bi| = sqrt (a² + b²) // Use EvalBinary to retain int/rational in real and imaginary parts. func (z Complex) Abs(c Context) Value { aSq := c.EvalBinary(z.real, "*", z.real) bSq := c.EvalBinary(z.imag, "*", z.imag) sumSq := c.EvalBinary(aSq, "+", bSq) return c.EvalUnary("sqrt", sumSq) } // sqrt(z) = sqrt(|z|) * (z + |z|) / |(z + |z|)| // Use EvalBinary to retain int/rational in real and imaginary parts. func (z Complex) Sqrt(c Context) Complex { // Avoid division by zero when imaginary part is zero. if !toBool(z.imag) { return sqrt(c, z.real).toType("sqrt", c.Config(), complexType).(Complex) } zMod := z.Abs(c) sqrtZMod := c.EvalUnary("sqrt", zMod) zPlusZMod := c.EvalBinary(z, "+", zMod) denom := c.EvalUnary("abs", zPlusZMod) num := c.EvalBinary(sqrtZMod, "*", zPlusZMod) return c.EvalBinary(num, "/", denom).toType("sqrt", c.Config(), complexType).(Complex) } func (z Complex) Cmp(c Context, right Complex) bool { return toBool(c.EvalBinary(z.real, "==", right.real)) && toBool(c.EvalBinary(z.imag, "==", right.imag)) } // (a+bi) + (c+di) = (a+c) + (b+d)i // Use EvalBinary to retain int/rational in real and imaginary parts. func (z Complex) Add(c Context, right Complex) Complex { return Complex{ real: c.EvalBinary(z.real, "+", right.real), imag: c.EvalBinary(z.imag, "+", right.imag), } } // (a+bi) - (c+di) = (a-c) + (b-d)i // Use EvalBinary to retain int/rational in real and imaginary parts. func (z Complex) Sub(c Context, right Complex) Complex { return Complex{ real: c.EvalBinary(z.real, "-", right.real), imag: c.EvalBinary(z.imag, "-", right.imag), } } // (a+bi) * (c+di) = (ab - bd) + (ad - bc)i // Use EvalBinary to retain int/rational in real and imaginary parts. func (z Complex) Mul(c Context, right Complex) Complex { ac := c.EvalBinary(z.real, "*", right.real) bd := c.EvalBinary(z.imag, "*", right.imag) ad := c.EvalBinary(z.real, "*", right.imag) bc := c.EvalBinary(z.imag, "*", right.real) return Complex{ real: c.EvalBinary(ac, "-", bd), imag: c.EvalBinary(ad, "+", bc), } } // (a+bi) / (c+di) = (ac + bd)/(c² + d²) + ((bc - ad)/(c² + d²))i // Use EvalBinary to retain int/rational in real and imaginary parts. func (z Complex) Quo(c Context, right Complex) Complex { ac := c.EvalBinary(z.real, "*", right.real) bd := c.EvalBinary(z.imag, "*", right.imag) ad := c.EvalBinary(z.real, "*", right.imag) bc := c.EvalBinary(z.imag, "*", right.real) realNum := c.EvalBinary(ac, "+", bd) imagNum := c.EvalBinary(bc, "-", ad) cSq := c.EvalBinary(right.real, "*", right.real) dSq := c.EvalBinary(right.imag, "*", right.imag) denom := c.EvalBinary(cSq, "+", dSq) return Complex{ real: c.EvalBinary(realNum, "/", denom), imag: c.EvalBinary(imagNum, "/", denom), } } // log z = log |z| + (arg z) i func (z Complex) Log(c Context) Complex { return Complex{ real: logn(c, z.Abs(c)), imag: z.Phase(c), } } // z log y = log y / log z func (z Complex) LogBaseU(c Context, right Complex) Complex { return right.Log(c).Quo(c, z.Log(c)) } // exp(a+bi) = (exp(a) * cos b) + (exp(a) *sin b) i func (z Complex) Exp(c Context) Complex { cosB := floatCos(c, floatSelf(c, z.imag).(BigFloat).Float) sinB := floatSin(c, floatSelf(c, z.imag).(BigFloat).Float) expA := floatPower(c, BigFloat{floatE}, floatSelf(c, z.real).(BigFloat)) return Complex{ real: BigFloat{newFloat(c).Mul(cosB, expA)}, imag: BigFloat{newFloat(c).Mul(sinB, expA)}, } } // z**y = exp(y * log z) func (z Complex) Pow(c Context, right Complex) Complex { return z.Log(c).Mul(c, right).Exp(c) } // sin(a + bi) = sin(a)*cosh(b) + i*cos(a)*sinh(b) func (z Complex) Sin(c Context) Complex { sinA := floatSin(c, floatSelf(c, z.real).(BigFloat).Float) coshB := floatCosh(c, floatSelf(c, z.imag).(BigFloat).Float) cosA := floatCos(c, floatSelf(c, z.real).(BigFloat).Float) sinhB := floatSinh(c, floatSelf(c, z.imag).(BigFloat).Float) return Complex{ real: BigFloat{newFloat(c).Mul(sinA, coshB)}, imag: BigFloat{newFloat(c).Mul(cosA, sinhB)}, } } // cos(a + bi) = cos(a)*cosh(b) - i*sin(a)*sinh(b) func (z Complex) Cos(c Context) Complex { cosA := floatCos(c, floatSelf(c, z.real).(BigFloat).Float) coshB := floatCosh(c, floatSelf(c, z.imag).(BigFloat).Float) sinA := floatSin(c, floatSelf(c, z.real).(BigFloat).Float) sinhB := floatSinh(c, floatSelf(c, z.imag).(BigFloat).Float) return Complex{ real: BigFloat{newFloat(c).Mul(cosA, coshB)}, imag: BigFloat{newFloat(c).Neg(newFloat(c).Mul(sinA, sinhB))}, } } // tan(a + bi) = (sin(2a) + i*sinh(2b))/(cos(2a) + cosh(2b)) func (z Complex) Tan(c Context) Complex { twoA := newFloat(c).Mul(floatSelf(c, z.real).(BigFloat).Float, floatTwo) twoB := newFloat(c).Mul(floatSelf(c, z.imag).(BigFloat).Float, floatTwo) denom := newFloat(c).Add(floatCos(c, twoA), floatCosh(c, twoB)) return Complex{ real: BigFloat{newFloat(c).Quo(floatSin(c, twoA), denom)}, imag: BigFloat{newFloat(c).Quo(floatSinh(c, twoB), denom)}, } } // asin(z) = i log (sqrt(1 - z²) - iz) func (z Complex) Asin(c Context) Complex { sqrt := complexOne.Sub(c, z.Mul(c, z)).Sqrt(c) return sqrt.Sub(c, complexI.Mul(c, z)).Log(c).Mul(c, complexI) } // acos(z) = log(z + i * sqrt(1 - z²))/i func (z Complex) Acos(c Context) Complex { sqrt := complexOne.Sub(c, z.Pow(c, complexTwo)).Sqrt(c) return sqrt.Mul(c, complexI).Add(c, z).Log(c).Quo(c, complexI) } // atan(z) = log((i - z)/(i + z))/2i func (z Complex) Atan(c Context) Complex { a := floatSelf(c, z.real).(BigFloat).Float b := floatSelf(c, z.imag).(BigFloat).Float if a.Cmp(floatZero) == 0 && b.Cmp(floatOne) == 0 { Errorf("inverse tangent of 0j1") } if a.Cmp(floatZero) == 0 && b.Cmp(floatMinusOne) == 0 { Errorf("inverse tangent of 0j-1") } return complexI.Sub(c, z).Quo(c, complexI.Add(c, z)).Log(c).Quo(c, complexTwoI) } // sinh(a + bi) = sinh(a)cos(b) + i * cosh(a)sin(b) func (z Complex) Sinh(c Context) Complex { a := floatSelf(c, z.real).(BigFloat).Float b := floatSelf(c, z.imag).(BigFloat).Float return Complex{ real: BigFloat{newFloat(c).Mul(floatSinh(c, a), floatCos(c, b))}, imag: BigFloat{newFloat(c).Mul(floatCosh(c, a), floatSin(c, b))}, } } // cosh(a + bi) = cosh(a)cos(b) + i * sinh(a)sin(b) func (z Complex) Cosh(c Context) Complex { a := floatSelf(c, z.real).(BigFloat).Float b := floatSelf(c, z.imag).(BigFloat).Float return Complex{ real: BigFloat{newFloat(c).Mul(floatCosh(c, a), floatCos(c, b))}, imag: BigFloat{newFloat(c).Mul(floatSinh(c, a), floatSin(c, b))}, } } // tanh(z) = sinh(z)/cosh(z) func (z Complex) Tanh(c Context) Complex { return z.Sinh(c).Quo(c, z.Cosh(c)) } // asinh(z) = log(z + sqrt(z²+1)) func (z Complex) Asinh(c Context) Complex { return complexOne.Add(c, z.Pow(c, complexTwo)).Sqrt(c).Add(c, z).Log(c) } // acosh(z) = log(z + sqrt(z+1) * sqrt(z-1)) func (z Complex) Acosh(c Context) Complex { sqrtZAdd1 := z.Add(c, complexOne).Sqrt(c) sqrtZSub1 := z.Sub(c, complexOne).Sqrt(c) return z.Add(c, sqrtZAdd1.Mul(c, sqrtZSub1)).Log(c) } // atanh(z) = log((1+z)/(1-z))/2 func (z Complex) Atanh(c Context) Complex { onePlusZ := complexOne.Add(c, z) oneMinusZ := complexOne.Sub(c, z) return onePlusZ.Quo(c, oneMinusZ).Log(c).Quo(c, complexTwo) }
value/complex.go
0.83471
0.540196
complex.go
starcoder
package geolocate import "math" // Point represents a Physical Point in geographic notation [lat, lng] // The different possible LocationTypes are documented here: // https://developers.google.com/maps/documentation/geocoding/intro#Results // It can for example be "ROOFTOP". type Point struct { Lat float64 Lng float64 Address string LocationType string } const ( // EarthRadius is about 6,371km according to Wikipedia EarthRadius = 6371 ) // NewPoint returns a new Point func NewPoint(lat, lng float64) *Point { return &Point{Lat: lat, Lng: lng} } // PointAtDistanceAndBearing returns a Point populated with the lat and lng coordinates // by transposing the origin point the passed in distance (in kilometers) // by the passed in compass bearing (in degrees). // Original Implementation from: http://www.movable-type.co.uk/scripts/latlong.html func (p *Point) PointAtDistanceAndBearing(dist float64, bearing float64) *Point { dr := dist / EarthRadius bearing = (bearing * (math.Pi / 180.0)) lat1 := (p.Lat * (math.Pi / 180.0)) lng1 := (p.Lng * (math.Pi / 180.0)) lat2part1 := math.Sin(lat1) * math.Cos(dr) lat2part2 := math.Cos(lat1) * math.Sin(dr) * math.Cos(bearing) lat2 := math.Asin(lat2part1 + lat2part2) lng2part1 := math.Sin(bearing) * math.Sin(dr) * math.Cos(lat1) lng2part2 := math.Cos(dr) - (math.Sin(lat1) * math.Sin(lat2)) lng2 := lng1 + math.Atan2(lng2part1, lng2part2) lng2 = math.Mod((lng2+3*math.Pi), (2*math.Pi)) - math.Pi lat2 = lat2 * (180.0 / math.Pi) lng2 = lng2 * (180.0 / math.Pi) return &Point{Lat: lat2, Lng: lng2} } // GreatCircleDistance calculates the Haversine distance between two points in kilometers. // Original Implementation from: http://www.movable-type.co.uk/scripts/latlong.html func (p *Point) GreatCircleDistance(p2 *Point) float64 { dLat := (p2.Lat - p.Lat) * (math.Pi / 180.0) dLon := (p2.Lng - p.Lng) * (math.Pi / 180.0) lat1 := p.Lat * (math.Pi / 180.0) lat2 := p2.Lat * (math.Pi / 180.0) a1 := math.Sin(dLat/2) * math.Sin(dLat/2) a2 := math.Sin(dLon/2) * math.Sin(dLon/2) * math.Cos(lat1) * math.Cos(lat2) a := a1 + a2 c := 2 * math.Atan2(math.Sqrt(a), math.Sqrt(1-a)) return EarthRadius * c } // BearingTo calculates the initial bearing (sometimes referred to as forward azimuth) // Original Implementation from: http://www.movable-type.co.uk/scripts/latlong.html func (p *Point) BearingTo(p2 *Point) float64 { dLon := (p2.Lng - p.Lng) * math.Pi / 180.0 lat1 := p.Lat * math.Pi / 180.0 lat2 := p2.Lat * math.Pi / 180.0 y := math.Sin(dLon) * math.Cos(lat2) x := math.Cos(lat1)*math.Sin(lat2) - math.Sin(lat1)*math.Cos(lat2)*math.Cos(dLon) bearing := math.Atan2(y, x) * 180.0 / math.Pi return bearing } // MidpointTo calculates the midpoint between point `p` and the supplied point. // Original implementation from http://www.movable-type.co.uk/scripts/latlong.html func (p *Point) MidpointTo(p2 *Point) *Point { lat1 := p.Lat * math.Pi / 180.0 lat2 := p2.Lat * math.Pi / 180.0 lon1 := p.Lng * math.Pi / 180.0 dLon := (p2.Lng - p.Lng) * math.Pi / 180.0 bx := math.Cos(lat2) * math.Cos(dLon) by := math.Cos(lat2) * math.Sin(dLon) lat3Rad := math.Atan2( math.Sin(lat1)+math.Sin(lat2), math.Sqrt(math.Pow(math.Cos(lat1)+bx, 2)+math.Pow(by, 2)), ) lon3Rad := lon1 + math.Atan2(by, math.Cos(lat1)+bx) return &Point{ Lat: lat3Rad * 180.0 / math.Pi, Lng: lon3Rad * 180.0 / math.Pi, } }
point.go
0.910416
0.722025
point.go
starcoder
package main import ( "github.com/google/gxui" "github.com/google/gxui/drivers/gl" "github.com/google/gxui/math" "github.com/google/gxui/themes/dark" omath "math" "time" ) //Two pendulums animated //Top: Mathematical pendulum with small-angle approxmiation (not appropiate with PHI_ZERO=pi/2) //Bottom: Simulated with differential equation phi'' = g/l * sin(phi) const ( ANIMATION_WIDTH int = 480 ANIMATION_HEIGHT int = 320 BALL_RADIUS float32 = 25.0 METER_PER_PIXEL float64 = 1.0 / 20.0 PHI_ZERO float64 = omath.Pi * 0.5 ) var ( l float64 = float64(ANIMATION_HEIGHT) * 0.5 freq float64 = omath.Sqrt(9.81 / (l * METER_PER_PIXEL)) ) type Pendulum interface { GetPhi() float64 } type mathematicalPendulum struct { start time.Time } func (p *mathematicalPendulum) GetPhi() float64 { if (p.start == time.Time{}) { p.start = time.Now() } t := float64(time.Since(p.start).Nanoseconds()) / omath.Pow10(9) return PHI_ZERO * omath.Cos(t*freq) } type numericalPendulum struct { currentPhi float64 angAcc float64 angVel float64 lastTime time.Time } func (p *numericalPendulum) GetPhi() float64 { dt := 0.0 if (p.lastTime != time.Time{}) { dt = float64(time.Since(p.lastTime).Nanoseconds()) / omath.Pow10(9) } p.lastTime = time.Now() p.angAcc = -9.81 / (float64(l) * METER_PER_PIXEL) * omath.Sin(p.currentPhi) p.angVel += p.angAcc * dt p.currentPhi += p.angVel * dt return p.currentPhi } func draw(p Pendulum, canvas gxui.Canvas, x, y int) { attachment := math.Point{X: ANIMATION_WIDTH/2 + x, Y: y} phi := p.GetPhi() ball := math.Point{X: x + ANIMATION_WIDTH/2 + math.Round(float32(l*omath.Sin(phi))), Y: y + math.Round(float32(l*omath.Cos(phi)))} line := gxui.Polygon{gxui.PolygonVertex{attachment, 0}, gxui.PolygonVertex{ball, 0}} canvas.DrawLines(line, gxui.DefaultPen) m := math.Point{int(BALL_RADIUS), int(BALL_RADIUS)} rect := math.Rect{ball.Sub(m), ball.Add(m)} canvas.DrawRoundedRect(rect, BALL_RADIUS, BALL_RADIUS, BALL_RADIUS, BALL_RADIUS, gxui.TransparentPen, gxui.CreateBrush(gxui.Yellow)) } func appMain(driver gxui.Driver) { theme := dark.CreateTheme(driver) window := theme.CreateWindow(ANIMATION_WIDTH, 2*ANIMATION_HEIGHT, "Pendulum") window.SetBackgroundBrush(gxui.CreateBrush(gxui.Gray50)) image := theme.CreateImage() ticker := time.NewTicker(time.Millisecond * 15) pendulum := &mathematicalPendulum{} pendulum2 := &numericalPendulum{PHI_ZERO, 0.0, 0.0, time.Time{}} go func() { for range ticker.C { canvas := driver.CreateCanvas(math.Size{ANIMATION_WIDTH, 2 * ANIMATION_HEIGHT}) canvas.Clear(gxui.White) draw(pendulum, canvas, 0, 0) draw(pendulum2, canvas, 0, ANIMATION_HEIGHT) canvas.Complete() driver.Call(func() { image.SetCanvas(canvas) }) } }() window.AddChild(image) window.OnClose(ticker.Stop) window.OnClose(driver.Terminate) } func main() { gl.StartDriver(appMain) } //\Animate-a-pendulum\animate-a-pendulum.go
tasks/Animate-a-pendulum/animate-a-pendulum.go
0.589244
0.407451
animate-a-pendulum.go
starcoder
package rex import ( "github.com/g3n/engine/geometry" "github.com/g3n/engine/gls" "github.com/g3n/engine/math32" "github.com/roboticeyes/gorexfile/encoding/rexfile" ) // MeshGeometry contains the geometry information for a REX mesh datablock type MeshGeometry struct { geometry.Geometry Name string MaterialID uint64 VertexColors bool // indicates if vertex colors are set, if so, we have to use basic material } // NewMeshGeometry returns a new geometry information for the given REX mesh datablock func NewMeshGeometry(mesh rexfile.Mesh) *MeshGeometry { m := new(MeshGeometry) m.Geometry.Init() m.Name = mesh.Name m.MaterialID = mesh.MaterialID // Create buffers positions := math32.NewArrayF32(0, 0) colors := math32.NewArrayF32(0, 0) uvs := math32.NewArrayF32(0, 0) normals := math32.NewArrayF32(len(mesh.Coords)*3, len(mesh.Coords)*3) indices := math32.NewArrayU32(0, 0) tempNormals := make([]vertexNormal, len(mesh.Coords)) for _, c := range mesh.Coords { vtx := math32.NewVector3(c.X(), c.Y(), c.Z()) positions.AppendVector3(vtx) } for _, c := range mesh.Colors { vtx := math32.NewVector3(c.X(), c.Y(), c.Z()) colors.AppendVector3(vtx) } for _, uv := range mesh.TexCoords { coords := math32.NewVector2(uv.X(), uv.Y()) uvs.AppendVector2(coords) } for _, t := range mesh.Triangles { indices.Append(t.V0) indices.Append(t.V1) indices.Append(t.V2) // calculate normals per face var v0, v1, v2 math32.Vector3 var n0, n1, n2 math32.Vector3 var sub1, sub2 math32.Vector3 positions.GetVector3(int(t.V0*3), &v0) positions.GetVector3(int(t.V1*3), &v1) positions.GetVector3(int(t.V2*3), &v2) n0.CrossVectors(sub1.SubVectors(&v1, &v0), sub2.SubVectors(&v2, &v0)).Normalize() n1.CrossVectors(sub1.SubVectors(&v2, &v1), sub2.SubVectors(&v0, &v1)).Normalize() n2.CrossVectors(sub1.SubVectors(&v0, &v2), sub2.SubVectors(&v1, &v2)).Normalize() tempNormals[t.V0].normals = append(tempNormals[t.V0].normals, n0) tempNormals[t.V1].normals = append(tempNormals[t.V1].normals, n1) tempNormals[t.V2].normals = append(tempNormals[t.V2].normals, n2) } // calculate smooth normals for i, n := range tempNormals { var sum math32.Vector3 for _, normal := range n.normals { sum.Add(&normal) } sum.DivideScalar(float32(len(n.normals))) normals.SetVector3(i*3, sum.Normalize()) } m.SetIndices(indices) m.AddVBO(gls.NewVBO(positions).AddAttrib(gls.VertexPosition)) if len(colors) > 0 { m.AddVBO(gls.NewVBO(colors).AddAttrib(gls.VertexColor)) m.VertexColors = true } m.AddVBO(gls.NewVBO(normals).AddAttrib(gls.VertexNormal)) m.AddVBO(gls.NewVBO(uvs).AddAttrib(gls.VertexTexcoord)) return m }
loader/rex/meshgeometry.go
0.600071
0.532911
meshgeometry.go
starcoder
package plausible // AggregateQuery represents an API query for aggregate information over a period of time for a given list of metrics. // In an aggregate query, the Period field and the Metrics field are mandatory, all the others are optional. type AggregateQuery struct { // Period to consider for the aggregate query. The result will include results over this period of time. // This field is mandatory. Period TimePeriod // Metrics to be included in the aggregation result. // This field is mandatory. Metrics Metrics // Filters is a filter over properties to narrow down the aggregation results. // This field is optional. Filters Filter // ComparePreviousPeriod tells whether to include a comparison with the previous period in the query result. // This field is optional and will default to false. ComparePreviousPeriod bool } // Validate tells whether the query is valid or not. // If the query is not valid, a string explaining why the query is not valid will be returned. func (aq *AggregateQuery) Validate() (ok bool, invalidReason string) { if aq.Period.IsEmpty() { return false, "a period must be specified for an aggregate query" } if aq.Metrics.IsEmpty() { return false, "at least one metric must be specified for an aggregate query" } return true, "" } func (aq *AggregateQuery) toQueryArgs() QueryArgs { queryArgs := QueryArgs{} queryArgs.Merge(aq.Period.toQueryArgs()) queryArgs.Merge(aq.Metrics.toQueryArgs()) if !aq.Filters.IsEmpty() { queryArgs.Merge(aq.Filters.toQueryArgs()) } if aq.ComparePreviousPeriod { queryArgs.Add(QueryArg{Name: "compare", Value: "previous_period"}) } return queryArgs } // AggregateResult represents the result of an aggregate query. type AggregateResult struct { // BounceRate represents the bounce rate result for the query. // Only use this field if you included the BounceRate metric in your query. BounceRate float64 `json:"bounce_rate"` // BounceRateChange represents the bounce rate change compared to the previous period. // Only use this field if you included the BounceRate metric in your query and ComparePreviousPeriod was set to true. BounceRateChange float64 `json:"bounce_rate_change"` // Pageviews represents the page view result for the query. // Only use this field if you included the PageViews metric in your query. Pageviews int `json:"pageviews"` // PageviewsChange represents change in the number of pageviews compared to the previous period. // Only use this field if you included the PageViews metric in your query and ComparePreviousPeriod was set to true. PageviewsChange int `json:"pageviews_change"` // VisitDuration represents the visit duration result for the query. // Only use this field if you included the Visit metric in your query VisitDuration float64 `json:"visit_duration"` // VisitDurationChange represents the visit duration change compared to the previous period. // Only use this field if you included the Visit metric in your query and ComparePreviousPeriod was set to true. VisitDurationChange float64 `json:"visit_duration_change"` // Visitors represents the number of visitors result for the query. // Only use this field if you included the Visitors metric in your query. Visitors int `json:"visitors"` // VisitorsChange represents the change in the number of visitors compared to the previous period. // Only use this field if you included the Visitors metric in your query and ComparePreviousPeriod was set to true. VisitorsChange int `json:"visitors_change"` } type rawAggregateResult struct { Result *struct { BounceRate struct { Change float64 `json:"change"` Value float64 `json:"value"` } `json:"bounce_rate,omitempty"` Pageviews struct { Change int `json:"change"` Value int `json:"value"` } `json:"pageviews,omitempty"` VisitDuration struct { Change float64 `json:"change"` Value float64 `json:"value"` } `json:"visit_duration,omitempty"` Visitors struct { Change int `json:"change"` Value int `json:"value"` } `json:"visitors,omitempty"` } `json:"results,omitempty"` } func (r *rawAggregateResult) toAggregateResult() AggregateResult { var res AggregateResult if r.Result == nil { return res } res.Pageviews = r.Result.Pageviews.Value res.PageviewsChange = r.Result.Pageviews.Change res.Visitors = r.Result.Visitors.Value res.VisitorsChange = r.Result.Visitors.Change res.BounceRate = r.Result.BounceRate.Value res.BounceRateChange = r.Result.BounceRate.Change res.VisitDuration = r.Result.VisitDuration.Value res.VisitDurationChange = r.Result.VisitDuration.Change return res }
plausible/aggregate_query.go
0.931205
0.572783
aggregate_query.go
starcoder
package frdrpc import ( "errors" "fmt" "sort" "time" "github.com/lightninglabs/faraday/fiat" ) // granularityFromRPC gets a granularity enum value from a rpc request, // defaulting getting the best granularity for the period being queried. func granularityFromRPC(g Granularity, disableFiat bool, duration time.Duration) (*fiat.Granularity, error) { // If we do not need fiat prices, we can return nil granularity. if disableFiat { return nil, nil } switch g { // If granularity is not set, allow it to default to the best // granularity that we can get for the query period. case Granularity_UNKNOWN_GRANULARITY: best, err := fiat.BestGranularity(duration) if err != nil { return nil, err } return &best, nil case Granularity_MINUTE: return &fiat.GranularityMinute, nil case Granularity_FIVE_MINUTES: return &fiat.Granularity5Minute, nil case Granularity_FIFTEEN_MINUTES: return &fiat.Granularity15Minute, nil case Granularity_THIRTY_MINUTES: return &fiat.Granularity30Minute, nil case Granularity_HOUR: return &fiat.GranularityHour, nil case Granularity_SIX_HOURS: return &fiat.Granularity6Hour, nil case Granularity_TWELVE_HOURS: return &fiat.Granularity12Hour, nil case Granularity_DAY: return &fiat.GranularityDay, nil default: return nil, fmt.Errorf("unknown granularity: %v", g) } } func fiatBackendFromRPC(backend FiatBackend) (fiat.PriceBackend, error) { switch backend { case FiatBackend_UNKNOWN_FIATBACKEND: return fiat.UnknownPriceBackend, nil case FiatBackend_COINCAP: return fiat.CoinCapPriceBackend, nil case FiatBackend_COINDESK: return fiat.CoinDeskPriceBackend, nil default: return fiat.UnknownPriceBackend, fmt.Errorf("unknown fiat backend: %v", backend) } } func parseExchangeRateRequest(req *ExchangeRateRequest) ([]time.Time, fiat.PriceBackend, *fiat.Granularity, error) { if len(req.Timestamps) == 0 { return nil, fiat.UnknownPriceBackend, nil, errors.New("at least one timestamp required") } timestamps := make([]time.Time, len(req.Timestamps)) for i, timestamp := range req.Timestamps { timestamps[i] = time.Unix(int64(timestamp), 0) } // Sort timestamps in ascending order so that we can get the duration // we're querying over. sort.SliceStable(timestamps, func(i, j int) bool { return timestamps[i].Before(timestamps[j]) }) // Get our start and end times, these may be the same if we have a // single timestamp. start, end := timestamps[0], timestamps[len(timestamps)-1] granularity, err := granularityFromRPC( req.Granularity, false, end.Sub(start), ) if err != nil { return nil, fiat.UnknownPriceBackend, nil, err } fiatBackend, err := fiatBackendFromRPC(req.FiatBackend) if err != nil { return nil, fiat.UnknownPriceBackend, nil, err } return timestamps, fiatBackend, granularity, nil } func exchangeRateResponse(prices map[time.Time]*fiat.USDPrice) *ExchangeRateResponse { fiatVals := make([]*ExchangeRate, 0, len(prices)) for ts, price := range prices { fiatVals = append(fiatVals, &ExchangeRate{ Timestamp: uint64(ts.Unix()), BtcPrice: &BitcoinPrice{ Price: price.Price.String(), PriceTimestamp: uint64(price.Timestamp.Unix()), }, }) } return &ExchangeRateResponse{ Rates: fiatVals, } }
frdrpc/exchange_rate.go
0.725162
0.404449
exchange_rate.go
starcoder
package graphics import ( "github.com/go-gl/mathgl/mgl32" "github.com/go-gl/mathgl/mgl64" "github.com/maxfish/gojira2d/pkg/utils" "math" ) const MinZoom float64 = 0.01 const MaxZoom float64 = 20 // Camera2D a Camera based on an orthogonal projection type Camera2D struct { x float64 y float64 width float64 halfWidth float64 height float64 halfHeight float64 zoom float64 centered bool flipVertical bool near float64 far float64 projectionMatrix mgl64.Mat4 inverseMatrix mgl64.Mat4 projectionMatrix32 mgl32.Mat4 matrixDirty bool } // NewCamera2D sets up an orthogonal projection camera func NewCamera2D(width int, height int, zoom float64) *Camera2D { c := &Camera2D{ width: float64(width), halfWidth: float64(width) / 2, height: float64(height), halfHeight: float64(height) / 2, zoom: zoom, } c.far = -2 c.near = 2 c.matrixDirty = true c.rebuildMatrix() return c } // ProjectionMatrix returns the projection matrix of the camera func (c *Camera2D) ProjectionMatrix() mgl64.Mat4 { c.rebuildMatrix() return c.projectionMatrix } // ProjectionMatrix32 returns the projection matrix of the camera as mgl32.Mat4 func (c *Camera2D) ProjectionMatrix32() mgl32.Mat4 { c.rebuildMatrix() return c.projectionMatrix32 } // SetPosition sets the current position of the camera. If the camera is centered, the center will be moving func (c *Camera2D) SetPosition(x float64, y float64) { c.x = x c.y = y c.matrixDirty = true } // Translate move the camera position by the specified amount func (c *Camera2D) Translate(x float64, y float64) { if c.flipVertical { y = -y } c.x += x c.y += y c.matrixDirty = true } // Zoom returns the current zoom level func (c *Camera2D) Zoom() float64 { return c.zoom } // SetZoom sets the zoom factor func (c *Camera2D) SetZoom(zoom float64) { zoom = mgl64.Clamp(zoom, MinZoom, MaxZoom) c.zoom = zoom c.matrixDirty = true } // SetCentered sets the center of the camera to the center of the screen func (c *Camera2D) SetCentered(centered bool) { c.centered = centered c.matrixDirty = true } // SetFlipVertical sets the orientation of the vertical axis. Pass true to have a cartesian coordinate system func (c *Camera2D) SetFlipVertical(flip bool) { c.flipVertical = flip c.matrixDirty = true } // SetVisibleArea configures the camera to make the specified area completely visible, position and zoom are changed accordingly func (c *Camera2D) SetVisibleArea(x1 float32, y1 float32, x2 float32, y2 float32) { width := math.Abs(float64(x2 - x1)) height := math.Abs(float64(y2 - y1)) zoom := math.Min(float64(c.width/width), float64(c.height/height)) c.SetZoom(zoom) x := math.Min(float64(x1), float64(x2)) y := math.Min(float64(y1), float64(y2)) if c.centered { c.SetPosition(x+width/2, y+height/2) } else { c.SetPosition(x, y) } } func (c *Camera2D) rebuildMatrix() { if !c.matrixDirty { return } var left, right, top, bottom float64 if c.centered { halfWidth := c.halfWidth / c.zoom halfHeight := c.halfHeight / c.zoom left = -halfWidth right = halfWidth top = halfHeight bottom = -halfHeight } else { right = c.width / c.zoom top = c.height / c.zoom } left += c.x right += c.x top += c.y bottom += c.y if c.flipVertical { bottom, top = top, bottom } c.projectionMatrix = mgl64.Ortho(left, right, top, bottom, c.near, c.far) c.inverseMatrix = c.projectionMatrix.Inv() // updates the float32 version c.projectionMatrix32 = utils.Mat4From64to32Bits(c.projectionMatrix) c.matrixDirty = false } func (c *Camera2D) ScreenToWorld(vec mgl64.Vec2) mgl64.Vec3 { if c.flipVertical { vec[1] = c.height - vec[1] } x := (vec[0] - c.halfWidth) / c.halfWidth y := (vec[1] - c.halfHeight) / c.halfHeight return mgl64.TransformCoordinate(mgl64.Vec3{x, y, 0}, c.inverseMatrix) } func (c *Camera2D) WorldToScreen(vec mgl64.Vec3) mgl64.Vec2 { ret := mgl64.TransformCoordinate(vec, c.projectionMatrix) ret[0] = ret[0]*c.halfWidth + c.halfWidth ret[1] = ret[1]*c.halfHeight + c.halfHeight if c.flipVertical { ret[1] = c.height - ret[1] } return mgl64.Vec2{ret[0], ret[1]} }
pkg/graphics/camera_2d.go
0.874185
0.623148
camera_2d.go
starcoder
package resolver import "github.com/google/gapid/gapil/semantic" func implicit(lhs semantic.Type, rhs semantic.Type) bool { if lhs == semantic.AnyType { return true } return false } func assignable(lhs semantic.Type, rhs semantic.Type) bool { if isInvalid(lhs) || isInvalid(rhs) { return true // Don't snowball errors. } if isVoid(lhs) || isVoid(rhs) { return false } if equal(lhs, rhs) { return true } if implicit(lhs, rhs) { return true } if implicit(rhs, lhs) { return true } return false } func comparable(lhs semantic.Type, rhs semantic.Type) bool { if isVoid(lhs) || isVoid(rhs) { return false } if equal(lhs, rhs) { return true } if implicit(lhs, rhs) { return true } return implicit(rhs, lhs) } func equal(lhs semantic.Type, rhs semantic.Type) bool { return lhs == rhs } func isNumber(t semantic.Type) bool { switch t { case semantic.IntType, semantic.UintType, semantic.Int8Type, semantic.Uint8Type, semantic.Int16Type, semantic.Uint16Type, semantic.Int32Type, semantic.Uint32Type, semantic.Int64Type, semantic.Uint64Type, semantic.Float32Type, semantic.Float64Type, semantic.SizeType: return true default: return false } } func isInteger(t semantic.Type) bool { switch t { case semantic.IntType, semantic.UintType, semantic.Int8Type, semantic.Uint8Type, semantic.Int16Type, semantic.Uint16Type, semantic.Int32Type, semantic.Uint32Type, semantic.Int64Type, semantic.Uint64Type, semantic.SizeType: return true default: return false } } func isUnsignedInteger(t semantic.Type) bool { switch t { case semantic.UintType, semantic.Uint8Type, semantic.Uint16Type, semantic.Uint32Type, semantic.Uint64Type, semantic.SizeType: return true default: return false } } func castable(from semantic.Type, to semantic.Type) bool { fromBase := semantic.Underlying(from) toBase := semantic.Underlying(to) if assignable(toBase, fromBase) { return true } _, fromIsEnum := fromBase.(*semantic.Enum) _, toIsEnum := toBase.(*semantic.Enum) fromIsNumber, toIsNumber := isNumber(fromBase), isNumber(toBase) if fromIsEnum && toIsEnum { return true // enum -> enum } if fromIsEnum && toIsNumber { return true // enum -> number } if fromIsNumber && toIsEnum { return true // number -> enum } _, fromIsPointer := fromBase.(*semantic.Pointer) if fromIsPointer && toIsNumber { return true // pointer -> number } if fromIsNumber && toIsNumber { return true // any numeric conversion } fromIsBool, toIsBool := fromBase == semantic.BoolType, toBase == semantic.BoolType if fromIsBool && toIsNumber { return true // bool -> number } if fromIsNumber && toIsBool { return true // number -> bool } fromPointer, fromIsPointer := fromBase.(*semantic.Pointer) toPointer, toIsPointer := toBase.(*semantic.Pointer) if fromIsPointer && toIsPointer { // A* -> B* return true } fromSlice, fromIsSlice := semantic.Underlying(from).(*semantic.Slice) toSlice, toIsSlice := semantic.Underlying(to).(*semantic.Slice) if fromIsSlice && toIsSlice { // A[] -> B[] return true } if fromIsSlice && toIsPointer && fromSlice.To == toPointer.To { // T[] -> T* return equal(fromSlice.To, toPointer.To) } if fromIsPointer && fromPointer.To == semantic.CharType && to == semantic.StringType { // char* -> string return true } if fromIsSlice && fromSlice.To == semantic.CharType && to == semantic.StringType { // char[] -> string return true } if toIsSlice && toSlice.To == semantic.CharType && from == semantic.StringType { // string -> char[] return true } return false } func isInvalid(n semantic.Node) bool { if n == semantic.InvalidType { return true } _, invalid := n.(semantic.Invalid) return invalid } func isVoid(t semantic.Type) bool { return semantic.Underlying(t) == semantic.VoidType } func isLegalCommandParameterType(t semantic.Type) bool { switch semantic.Underlying(t) { case semantic.AnyType: case semantic.StringType: return false } switch t.(type) { case *semantic.Slice: return false } return true }
gapil/resolver/rules.go
0.730482
0.458712
rules.go
starcoder
package model import ( "fmt" "math" "regexp" "strconv" "strings" "time" ) const ( minimumTick = time.Millisecond second = int64(time.Second / minimumTick) nanosPerTick = int64(minimumTick / time.Nanosecond) Earliest = Time(math.MinInt64) Latest = Time(math.MaxInt64) ) type Time int64 type Interval struct { Start, End Time } func Now() Time { return TimeFromUnixNano(time.Now().UnixNano()) } func TimeFromUnix(t int64) Time { return Time(t * second) } func TimeFromUnixNano(t int64) Time { return Time(t / nanosPerTick) } func (t Time) Equal(o Time) bool { return t == o } func (t Time) Before(o Time) bool { return t < o } func (t Time) After(o Time) bool { return t > o } func (t Time) Add(d time.Duration) Time { return t + Time(d/minimumTick) } func (t Time) Sub(o Time) time.Duration { return time.Duration(t-o) * minimumTick } func (t Time) Time() time.Time { return time.Unix(int64(t)/second, (int64(t)%second)*nanosPerTick) } func (t Time) Unix() int64 { return int64(t) / second } func (t Time) UnixNano() int64 { return int64(t) * nanosPerTick } var dotPrecision = int(math.Log10(float64(second))) func (t Time) String() string { return strconv.FormatFloat(float64(t)/float64(second), 'f', -1, 64) } func (t Time) MarshalJSON() ([]byte, error) { return []byte(t.String()), nil } func (t *Time) UnmarshalJSON(b []byte) error { p := strings.Split(string(b), ".") switch len(p) { case 1: v, err := strconv.ParseInt(string(p[0]), 10, 64) if err != nil { return err } *t = Time(v * second) case 2: v, err := strconv.ParseInt(string(p[0]), 10, 64) if err != nil { return err } v *= second prec := dotPrecision - len(p[1]) if prec < 0 { p[1] = p[1][:dotPrecision] } else if prec > 0 { p[1] = p[1] + strings.Repeat("0", prec) } va, err := strconv.ParseInt(p[1], 10, 32) if err != nil { return err } *t = Time(v + va) default: return fmt.Errorf("invalid time %q", string(b)) } return nil } type Duration time.Duration func (d *Duration) Set(s string) error { var err error *d, err = ParseDuration(s) return err } func (d *Duration) Type() string { return "duration" } var durationRE = regexp.MustCompile("^([0-9]+)(y|w|d|h|m|s|ms)$") func ParseDuration(durationStr string) (Duration, error) { matches := durationRE.FindStringSubmatch(durationStr) if len(matches) != 3 { return 0, fmt.Errorf("not a valid duration string: %q", durationStr) } var ( n, _ = strconv.Atoi(matches[1]) dur = time.Duration(n) * time.Millisecond ) switch unit := matches[2]; unit { case "y": dur *= 1000 * 60 * 60 * 24 * 365 case "w": dur *= 1000 * 60 * 60 * 24 * 7 case "d": dur *= 1000 * 60 * 60 * 24 case "h": dur *= 1000 * 60 * 60 case "m": dur *= 1000 * 60 case "s": dur *= 1000 case "ms": default: return 0, fmt.Errorf("invalid time unit in duration string: %q", unit) } return Duration(dur), nil } func (d Duration) String() string { var ( ms = int64(time.Duration(d) / time.Millisecond) unit = "ms" ) if ms == 0 { return "0s" } factors := map[string]int64{ "y": 1000 * 60 * 60 * 24 * 365, "w": 1000 * 60 * 60 * 24 * 7, "d": 1000 * 60 * 60 * 24, "h": 1000 * 60 * 60, "m": 1000 * 60, "s": 1000, "ms": 1, } switch int64(0) { case ms % factors["y"]: unit = "y" case ms % factors["w"]: unit = "w" case ms % factors["d"]: unit = "d" case ms % factors["h"]: unit = "h" case ms % factors["m"]: unit = "m" case ms % factors["s"]: unit = "s" } return fmt.Sprintf("%v%v", ms/factors[unit], unit) } func (d Duration) MarshalYAML() (interface{}, error) { return d.String(), nil } func (d *Duration) UnmarshalYAML(unmarshal func(interface{}) error) error { var s string if err := unmarshal(&s); err != nil { return err } dur, err := ParseDuration(s) if err != nil { return err } *d = dur return nil }
vendor/github.com/prometheus/common/model/time.go
0.671901
0.428413
time.go
starcoder
package tree import ( "fmt" "io" "os" "reflect" "sort" ) // Tree is a generic tree. type Tree[T any] struct { data T parent *Tree[T] children []*Tree[T] } // New initializes a new tree for use. func New[T any](data T) *Tree[T] { return &Tree[T]{ data: data, } } // Parent returns the parent of t. func (t *Tree[T]) Parent() *Tree[T] { return t.parent } // Children returns the children of t. func (t *Tree[T]) Children() []*Tree[T] { return t.children } // Child returns the child specified by selector. func (t *Tree[T]) Child(selector func(tree *Tree[T]) bool) *Tree[T] { for _, child := range t.children { if selector(child) { return child } } return nil } // Insert inserts tree as a child of t. func (t *Tree[T]) Insert(tree *Tree[T]) { tree.parent = t t.children = append(t.children, tree) } // Remove removes tree, if it exists, from the children of t. func (t *Tree[T]) Remove(tree *Tree[T]) { for i := range t.children { if t.children[i] == tree { t.children[i].parent = nil t.children = append(t.children[:i], t.children[i+1:]...) return } } } // Sort sorts the children of t according to less. func (t *Tree[T]) Sort(less func(int, int) bool) { sort.Slice(t.children, less) } // Walk executes fn on each node in tree according to // the specified search strategy. func (t *Tree[T]) Walk(fn func(tree *Tree[T]), search SearchStrategy[T]) { for next := range search(t) { fn(next) } } // Find returns the first match in the tree according // to the specified search strategy. func (t *Tree[T]) Find(selector func(tree *Tree[T]) bool, search SearchStrategy[T]) *Tree[T] { for next := range search(t) { if selector(next) { return next } } return nil } // FindAll returns all matches in the tree according // to the specified search strategy. func (t *Tree[T]) FindAll(selector func(tree *Tree[T]) bool, search SearchStrategy[T]) []*Tree[T] { var out []*Tree[T] for next := range search(t) { if selector(next) { out = append(out, next) } } return out } // Data returns the data contained in t. func (t *Tree[T]) Data() T { return t.data } // ParentData returns the data contained in the parent of t. func (t *Tree[T]) ParentData() T { if t.parent == nil { return *new(T) } return t.parent.data } // ChildrenData returns the data contained in the children of t. func (t *Tree[T]) ChildrenData() []T { return dataSlice(t.children) } // ChildDatan returns the data contained in the // child specified by selector. func (t *Tree[T]) ChildData(selector func(tree *Tree[T]) bool) T { c := t.Child(selector) if c == nil { return *new(T) } return c.data } // InsertData inserts data as a child of t. func (t *Tree[T]) InsertData(data T) { child := New(data) t.Insert(child) } // RemoveData removes the data, if it exists, from the children // of t. Existence is checked by reflect.DeepEqual func (t *Tree[T]) RemoveData(data T) { for i := range t.children { if reflect.DeepEqual(data, t.children[i].data) { t.Remove(t.children[i]) } } } // Find returns the data contained in the first match in // the tree according to the specified search strategy. func (t *Tree[T]) FindData(selector func(tree *Tree[T]) bool, search SearchStrategy[T]) T { return t.Find(selector, search).data } // FindAll returns the data coantined in all matches in // the tree according to the specified search strategy. func (t *Tree[T]) FindAllData(selector func(tree *Tree[T]) bool, search SearchStrategy[T]) []T { return dataSlice(t.FindAll(selector, search)) } // Print prints t to stdout. func (t *Tree[T]) Print() { t.fprint(os.Stdout, "", "", 0) } // Print pretty-prints t to stdout according to prefix and indent. func (t *Tree[T]) PrintIndent(prefix, indent string) { t.fprint(os.Stdout, prefix, indent, 0) } // Fprint prints t to the w. func (t *Tree[T]) Fprint(w io.Writer) { t.fprint(w, "", "", 0) } // FprintIndent pretty-prints t to w according to prefix and indent. func (t *Tree[T]) FprintIndent(w io.Writer, prefix, indent string) { t.fprint(w, prefix, indent, 0) } func (t *Tree[T]) fprint(w io.Writer, prefix, indent string, level int) { var p, i string for j := 0; j < level; j++ { p = prefix i += indent } fmt.Fprintf(w, "%s%s%v\n", p, i, t.data) for _, child := range t.children { child.fprint(w, prefix, indent, level+1) } } func dataSlice[T any](trees []*Tree[T]) []T { var out []T for _, tree := range trees { out = append(out, tree.data) } return out }
tree.go
0.76454
0.478529
tree.go
starcoder
package locales import ( "fmt" ) var attrDesc = map[string]string{ "json": `Attributes are JSON format.`, "selector": `Filter used to select on which nodes should the automation be executed. Basic ex: @identity='{node_id}'.`, "run-id": `Automation run identity.`, "job-id": `Job identity.`, "watch": `Keep track of the running process.`, "automation-id": `Automation identity.`, "automation-name": `Describes the template. Should be short and alphanumeric without white spaces.`, "automation-repository": `Describes the place where the automation is being described. Git is the only supported repository type. Ex: https://github.com/userId0123456789/automation-test.git.`, "automation-repository-revision": `Describes the repository branch.`, "automation-repository-credentials": `Describes the authentication when using git reposititories.`, "automation-timeout": `Describes the time elapsed before a timeout is being triggered.`, "automation-log-level": `Describes the level should be used when logging.`, "automation-debug": `Debug mode will not delete the temporary working directory on the instance when the automation job exists. This allows you to inspect the bundled automation artifacts, modify them and run the automation manually. Enabling debug mode for an extended period of time can exhaust your instances disk space as each automation run will leave a directory behind. Also be aware that the payload may contain secrets which are persisted to disk indefinitely when debug mode is enabled. (default false)`, "automation-tags": `"Are key value pairs. Key-value pairs are separated by ':' or '='. Following this pattern: 'key1:value1,key2=value2...'."`, "automation-runlist": `Describes the sequence of recipes should be executed. Runlist is an array of strings. Array of strings are separated by ','.`, "automation-chef-version": `Specifies the Chef version should be installed in case no Chef is already been installed. (default latest)`, "automation-attributes": `Attributes are JSON based.`, "automation-attributes-from-file": `Path to the file containing the chef attributes in JSON format. Giving a dash '-' will be read from standard input.`, "automation-path": `Path to the script`, "automation-argument": `Specify a positional argument for the command. Can by specified multiple times.`, "automation-environment": `Specify an environment variable (NAME=VALUE). Can by specified multiple times.`, "install-format": `Installation script format. Supported: linux,windows,cloud-config,json.`, "node-id": `Node identity.`, "node-selector": `Filter nodes. Basic ex: @identity='{node_id}'.`, } var errMsg = map[string]string{ "automation-id-missing": "No automation identity provided.", "automation-selector-missing": "No automation selector given.", "automation-run-failed": "Automation failed.", "run-id-missing": "No automation run identity given.", "job-id-missing": "No job identity provided.", "node-id-missing": "No node identity provided.", "job-missing": fmt.Sprint(jobMissingDesc), "node-missing": fmt.Sprint(nodeMissingDesc), "flag-missing": "Please make sure to provide following flags: ", } var cmdShortDescription = map[string]string{ "arc": "Remote job execution framework.", "arc-node-install": "Retrieves the script used to install arc nodes on instances. User authentication flags are mandatory.", "arc-node-list": "List all nodes.", "arc-node-show": "Shows an especific node.", "arc-node-delete": "Deletes an especific node.", "arc-node-tag": "Node tags.", "arc-node-tag-list": "List all tags from an especific node.", "arc-node-tag-add": "Add tags to a given node.", "arc-node-tag-delete": "Deletes tags from a given node.", "arc-node-fact": "Node facts.", "arc-node-fact-list": "List all facts from an especific node.", "authenticate": "Get an authentication token and endpoints for the automation and arc service.", "automation-create-chef": "Create a new chef automation.", "automation-create-script": "Create a new script automation.", "automation-create": "Create a new automation.", "automation-execute": "Runs an existing automation", "automation-list": "List all available automations", "automation-show": "Show a specific automation", "automation-delete": "Deletes a specific automation.", "automation-update-chef-attributes": "Updates chef attributes", "automation-update-chef-runlist": "Updates chef runlist", "automation-update-chef": "Updates a chef automation", "automation-update": "Updates an existing automation", "automation": "Automation service.", "bash-completion": "Generate completions for bash", "job-list": "List all jobs", "job-log": "Shows job log", "job-show": "Shows an especific job", "job": "Automation job service.", "root": "Automation service CLI", "run-list": "List all automation runs", "run-show": "Show a specific automation run", "run": "Automation run service.", "version": "Show program's version number and exit.", } var cmdLongDescription = map[string]string{ "bash-completion": `Add $(lyra bash-completion) to your .bashrc to enable tab completion for lyra`, "root": `Execute ad-hoc jobs using scripts, Chef and Ansible to configure machines and install the open source IaC service into any other OpenStack.`, "arc-node-delete": "Deletes an especific node. \nThis will just delete the entry in the data base. For a permanent deletion you have to remove the node itself from the instance.", "arc-node-tag-add": fmt.Sprint(nodeTagAddCmdLongDescription), "arc-node-tag-delete": fmt.Sprint(nodeTagDeleteCmdLongDescription), "automation-update-chef-attributes": fmt.Sprint(automationUpdateChefAttributesLongDescription), "automation-update-chef-runlist": fmt.Sprint(automationUpdateChefRunlistLongDescription), } func AttributeDescription(id string) string { return attrDesc[id] } func ErrorMessages(id string) string { return errMsg[id] } func CmdShortDescription(id string) string { return cmdShortDescription[id] } func CmdLongDescription(id string) string { return cmdLongDescription[id] } var automationUpdateChefAttributesLongDescription = fmt.Sprint(CmdShortDescription("automation-update-chef-attributes"), "\n\n", `Example: lyra automation update chef attributes --automation-id=34 --attributes='{"test":"test2"}'`) var automationUpdateChefRunlistLongDescription = fmt.Sprint(CmdShortDescription("automation-update-chef-runlist"), "\n\n", `Example: lyra automation update chef runlist --automation-id=34 --runlist='recipe[nginx::default],role[staging]'`) var nodeTagDeleteCmdLongDescription = `Deletes tags from a given node. Add the keys from the desired tags as command arguments. Example: lyra node tag delete --node-id 123456789 pool name plan" ` var nodeTagAddCmdLongDescription = `Add tags to a given node. Tags are key value pairs separated by the first "=" or ":" and added as command arguments. When using spacial characters use quotations. Example: lyra node tag add --node-id 123456789 pool:green name=db "plan=test new"` var jobMissingDesc = `Job not found. Note: - Check if the job identity matches. - Jobs older than 30 days will be removed from the system. Check when the automation run is created by running following command: lyra-cli run show --run-id={run_id} ` var nodeMissingDesc = `Node not found. Note: - Check if the node identity matches. `
locales/locales.go
0.609524
0.475301
locales.go
starcoder
package value // String holds a single string value. type String struct { valPtr *string } // NewString makes a new String with the given string value. func NewString(val string) *String { valPtr := new(string) *valPtr = val return &String{valPtr: valPtr} } // NewStringFromPtr makes a new String with the given pointer to string value. func NewStringFromPtr(valPtr *string) *String { return &String{valPtr: valPtr} } // Set changes the string value. func (v *String) Set(val string) { *v.valPtr = val } // Type return TypeString. func (v *String) Type() Type { return TypeString } // IsSlice returns false. func (v *String) IsSlice() bool { return false } // Clone produce a clone that is identical except for the backing pointer. func (v *String) Clone() Value { return NewString(*v.valPtr) } // Parse sets the value from the given string. func (v *String) Parse(str string) error { *v.valPtr = str return nil } // ValuePointer returns the pointer for value storage. func (v *String) ValuePointer() interface{} { return v.valPtr } // Value returns the string value. func (v *String) Value() interface{} { return *v.valPtr } // Equal returns checks if type and value of the given single are equal. func (v *String) Equal(v2 Single) (bool, error) { if err := CheckType(TypeString, v2.Type()); err != nil { return false, err } return *v.valPtr == v2.Value().(string), nil } // Greater checks if the current value is greater than the given. // Returns non-nil error if types do not match. func (v *String) Greater(v2 Single) (bool, error) { if err := CheckType(TypeString, v2.Type()); err != nil { return false, err } return *v.valPtr > v2.Value().(string), nil } // GreaterEqual checks if the current value is greater or equal to the given. // Returns non-nil error if types do not match. func (v *String) GreaterEqual(v2 Single) (bool, error) { if err := CheckType(TypeString, v2.Type()); err != nil { return false, err } return *v.valPtr >= v2.Value().(string), nil } // Less checks if the current value is less than the given. // Returns non-nil error if types do not match. func (v *String) Less(v2 Single) (bool, error) { if err := CheckType(TypeString, v2.Type()); err != nil { return false, err } return *v.valPtr < v2.Value().(string), nil } // LessEqual checks if the current value is less or equal to the given. // Returns non-nil error if types do not match. func (v *String) LessEqual(v2 Single) (bool, error) { if err := CheckType(TypeString, v2.Type()); err != nil { return false, err } return *v.valPtr <= v2.Value().(string), nil } // OneOf checks if the current value is one of the given. // Returns non-nil error if types do not match. func (v *String) OneOf(v2 Slice) (bool, error) { return v2.Contains(v) }
value/string.go
0.85183
0.472562
string.go
starcoder
package psarf import ( "time" ) const ( // These are the Acceleration values used in the standard Psar formula AFIncrement = 0.02 AFMax = 0.20 ) // before is a utility func that checks if time a is before time b // Note, hours are reset (this is daily only) // FIXME allow this to be used with other timeframes func before(a, b time.Time) bool { a = a.Truncate(24 * time.Hour) b = b.Truncate(24 * time.Hour) return a.Before(b) } // Psar is an iterative style structure that calculates Psar values for a given // set of chart data type Psar struct { // Series is the dataset used to calculate the Psar values. The first bar in // the series is the base Psar value (usually this is the nearest pivot low // prior to the Psar reversal). Series []ChartBar // StartDate is the date desired to start the Parabolic Sar calculations. // THIS IS NOT TO BE CONFUSED WITH THE FIRST BAR IN THE SERIES (DATA). (For // my use case this would also be the Entry bar of my position) In the // traditional indicator, one Sar being touched begins the Sar in the other // direction. But as this is not being directly used a trend indicator a // Start date is used to as the trigger. This is also the bar it derives the // initial Extreme Price from. StartDate *time.Time // psarSeries holds the calculated Psar values for each bar in the series. psarSeries []*PsarPeriod // pipOffset is a value to offset the sar for the initial Sar and any Sar // that is truncated by the lowest last 2 bars rule // NOTE this is not part of the psar formula, but is for my own personal use pipOffset float64 // i is an internal iteration counter (no slicing here, we need to be able // to go back in history) i int } // SetPipOffset sets the pipOffset value func (p *Psar) SetPipOffset(v float64) { p.pipOffset = v } // Bar returns current PsarPeriod (within the iteration) func (p *Psar) Bar() *PsarPeriod { return p.psarSeries[p.i-1] } // isEntryBar returns true if the current bar in the iteration's Date is equal // to that of the given StartDate // FIXME allow this to be used with other timeframes func (p *Psar) isEntryBar(i int) bool { var ( a = p.Series[i].Date() b = p.StartDate ) return a.Truncate(24 * time.Hour).Equal(b.Truncate(24 * time.Hour)) } // calculatePsar calculates the Psar for the given period in the series. This // must be called each time the Psar is iterated through func (p *Psar) calculatePsar() { var ( i = p.i chartBar = p.Series[i] extLow = chartBar.Low() ep = chartBar.High() af = AFIncrement sar float64 ) if i > 0 { var prevbp = p.psarSeries[i-1] af = prevbp.AF // FIXME write unit test for this if ep > prevbp.EP { af += AFIncrement } else { ep = prevbp.EP } // the sar sar = prevbp.Sar + prevbp.AFSarEP // get the lowest low (long) of the last 2 bars. This is a cache for // performance purposes. // FIXME pretty sure this can be reduced to not have to go back 2 bars var l1 = prevbp.Low() extLow = l1 if i-2 >= 0 { if l2 := p.psarSeries[i-2].Low(); l2 < l1 { extLow = l2 } } } // set the initial sar and af only on the entry bar. Calculate in any // additional pipOffset if applicable if p.isEntryBar(i) { af, sar = AFIncrement, p.Series[0].Low()-p.pipOffset } // "reset" af and sar values for bars before the start date. Any bar between // the first bar in the series and the start bar are defaulted to the first // bar in the series. They are not used in the calculation other than using // their lows as part of the low of the last 2 bars values. if before(*chartBar.Date(), *p.StartDate) { af, sar = 0.0, p.Series[0].Low() } // check to see if that the sar does not exceed (long) the lowest low of the // last 2 bars. *Tricky one, not explained in many of the Psar // explanations.* if sar > extLow { sar = extLow - p.pipOffset } // acceleration factor should not exceed the max af (0.20) // TODO needs a unit test if af > AFMax { af = AFMax } var ( sarEp = ep - sar afSarEp = af * sarEp ) p.psarSeries = append(p.psarSeries, &PsarPeriod{ ChartBar: chartBar, EP: ep, AF: af, Sar: sar, SarEP: sarEp, AFSarEP: afSarEp, extLow: extLow, }) } // Next increments the index counter func (p *Psar) Next() bool { if p.Series == nil { return false } // check if the series is empty or at the end if n := len(p.Series); n == 0 || p.i > n-1 { return false } // calculate and count p.calculatePsar() p.i++ return true } // Step "next"s for the number of j from it's current index i. This is primarily // just for QOL and unit testing purposes func (p *Psar) Step(j int) { j = p.i + j // FIXME check if j exceeds the series count and truncate to the series // length // FIXME per the above, should this return an error to notifiy of exceeded // lengths? for p.i < j { p.Next() } } // NextSession returns a PsarBar for "tomorrow". This is for a future value // calculation. It should be noted the Psar you get for "today" is the Psar is // calculated on data from "yesterday". So at the EOD you can calculate // tomorrows Psar before the market opens. // NOTE this is the "next" bar not necessarily the "future" bar. It will be the // next session from the current iteration index. func (p *Psar) NextSession() (*PsarPeriod, error) { var ( prevpb = p.psarSeries[p.i-1] pb *PsarPeriod ) // calculate the sar only here (this is why we don't call calculate as we // don't have any of the bar data for "tomorrow") // TODO is there a way to integrate this into the calculation as a whole // the extreme low of last 2 days was missed intially var sar = prevpb.Sar + prevpb.AFSarEP // must always observer the lowest 2 lows rule if sar > prevpb.extLow { sar = prevpb.extLow - p.pipOffset } pb = &PsarPeriod{Sar: sar} return pb, nil }
Psar.go
0.526586
0.584212
Psar.go
starcoder
package xtime import ( "strconv" "time" ) // CreateFromTimestamp creates a Xtime instance from a given timestamp, second, millisecond, microsecond and nanosecond are supported. // 从给定的时间戳创建 Xtime 实例,支持秒、毫秒、微秒和纳秒 func (c Xtime) CreateFromTimestamp(timestamp int64, timezone ...string) Xtime { if len(timezone) > 0 { c.loc, c.Error = getLocationByTimezone(timezone[len(timezone)-1]) } if c.Error != nil { return c } ts, count := timestamp, len(strconv.FormatInt(timestamp, 10)) if timestamp < 0 { count -= 1 } switch count { case 10: ts = timestamp case 13: ts = timestamp / 1e3 case 16: ts = timestamp / 1e6 case 19: ts = timestamp / 1e9 } c.time = time.Unix(ts, 0) return c } // CreateFromTimestamp creates a Xtime instance from a given timestamp. // 从给定的时间戳创建 Xtime 实例 func CreateFromTimestamp(timestamp int64, timezone ...string) Xtime { return NewXtime().CreateFromTimestamp(timestamp, timezone...) } // CreateFromDateTime creates a Xtime instance from a given date and time. // 从给定的年月日时分秒创建 Xtime 实例 func (c Xtime) CreateFromDateTime(year int, month int, day int, hour int, minute int, second int, timezone ...string) Xtime { if len(timezone) > 0 { c.loc, c.Error = getLocationByTimezone(timezone[len(timezone)-1]) } if c.Error != nil { return c } c.time = time.Date(year, time.Month(month), day, hour, minute, second, time.Now().Nanosecond(), c.loc) return c } // CreateFromDateTime creates a Xtime instance from a given date and time. // 从给定的年月日时分秒创建 Xtime 实例 func CreateFromDateTime(year int, month int, day int, hour int, minute int, second int, timezone ...string) Xtime { return NewXtime().CreateFromDateTime(year, month, day, hour, minute, second, timezone...) } // CreateFromDate creates a Xtime instance from a given date. // 从给定的年月日创建 Xtime 实例 func (c Xtime) CreateFromDate(year int, month int, day int, timezone ...string) Xtime { if len(timezone) > 0 { c.loc, c.Error = getLocationByTimezone(timezone[len(timezone)-1]) } if c.Error != nil { return c } hour, minute, second := time.Now().In(c.loc).Clock() c.time = time.Date(year, time.Month(month), day, hour, minute, second, time.Now().Nanosecond(), c.loc) return c } // CreateFromDate creates a Xtime instance from a given date. // 从给定的年月日创建 Xtime 实例 func CreateFromDate(year int, month int, day int, timezone ...string) Xtime { return NewXtime().CreateFromDate(year, month, day, timezone...) } // CreateFromTime creates a Xtime instance from a given time. // 从给定的时分秒创建 Xtime 实例 func (c Xtime) CreateFromTime(hour int, minute int, second int, timezone ...string) Xtime { if len(timezone) > 0 { c.loc, c.Error = getLocationByTimezone(timezone[len(timezone)-1]) } if c.Error != nil { return c } year, month, day := time.Now().In(c.loc).Date() c.time = time.Date(year, month, day, hour, minute, second, time.Now().Nanosecond(), c.loc) return c } // CreateFromTime creates a Xtime instance from a given time. // 从给定的时分秒创建 Xtime 实例 func CreateFromTime(hour int, minute int, second int, timezone ...string) Xtime { return NewXtime().CreateFromTime(hour, minute, second, timezone...) }
xtime/creator.go
0.64713
0.502136
creator.go
starcoder
package functional // Curry accepts a function that receives a parameter and the value of the parameter // and returns a function that accepts no parameters but returns the result of // applying the function to the given parameter. func Curry[T, R any](f func(T) R, input T) func() R { return func() R { return f(input) } } // Curry_0 accepts a function that accepts one parameter and the value of the parameter // and returns a function that accepts no parameters. The function must have no return value. func Curry_0[T any](f func(T), input T) func() { return func() { f(input) } } // Curry_2 accepts a function that accepts one parameter and the value of the parameter // and returns a function that accepts no parameters. The function must have 2 return values. func Curry_2[T, R1, R2 any](f func(T) (R1, R2), input T) func() (R1, R2) { return func() (R1, R2) { return f(input) } } // Curry2 accepts a function that receives two parameters and the values of those parameters // and returns a function that accepts no parameters but returns the result of // applying the function to the given parameters. func Curry2[T1, T2, R any](f func(T1, T2) R, input1 T1, input2 T2) func() R { return func() R { return f(input1, input2) } } // Curry2_0 accepts a function that receives two parameters and the values of those parameters // and returns a function that accepts no parameters. The function must have no return value. func Curry2_0[T1, T2 any](f func(T1, T2), input1 T1, input2 T2) func() { return func() { f(input1, input2) } } // Curry2_2 accepts a function that receives two parameters and the values of those parameters // and returns a function that accepts no parameters but returns the result of // applying the function to the given parameters. The function must have 2 return values. func Curry2_2[T1, T2, R1, R2 any](f func(T1, T2) (R1, R2), input1 T1, input2 T2) func() (R1, R2) { return func() (R1, R2) { return f(input1, input2) } } // Curry3 accepts a function that receives three parameters and the values of those parameters // and returns a function that accepts no parameters but returns the result of // applying the function to the given parameters. func Curry3[T1, T2, T3, R any](f func(T1, T2, T3) R, input1 T1, input2 T2, input3 T3) func() R { return func() R { return f(input1, input2, input3) } } // Curry3_0 accepts a function that receives three parameters and the values of those parameters // and returns a function that accepts no parameters. The function must have no return value. func Curry3_0[T1, T2, T3 any](f func(T1, T2, T3), input1 T1, input2 T2, input3 T3) func() { return func() { f(input1, input2, input3) } } // Curry3_2 accepts a function that receives three parameters and the values of those parameters // and returns a function that accepts no parameters but returns the result of // applying the function to the given parameters. The function must have 2 return values. func Curry3_2[T1, T2, T3, R1, R2 any](f func(T1, T2, T3) (R1, R2), input1 T1, input2 T2, input3 T3) func() (R1, R2) { return func() (R1, R2) { return f(input1, input2, input3) } } // Curry2To1 accepts a function that receives two parameters and the value of the first parameter // and returns a function that accepts one parameter which is the second parameter in the original function // and returns the result of applying the function with input1 as the first parameter and // whatever is given to the curried function as the second parameter. func Curry2To1[T1, T2, R any](f func(T1, T2) R, input1 T1) func(T2) R { return func(input2 T2) R { return f(input1, input2) } } // Curry2To1_0 accepts a function that receives two parameters and the value of the first parameter // and returns a function that accepts one parameter which is the second parameter in the original function. // The curried function returns the result of applying the original function to input1 as the first parameter and // whatever is given to the last returned function as the second parameter. // The function must have no return value. func Curry2To1_0[T1, T2 any](f func(T1, T2), input1 T1) func(T2) { return func(input2 T2) { f(input1, input2) } } // Curry2To1_2 accepts a function that receives two parameters and the value of the first parameter // and returns a function that accepts one parameter which is the second parameter in the original function // and returns the result of applying the function with input1 as the first parameter and // whatever is given to the curried function as the second parameter. The function must 2 return values. func Curry2To1_2[T1, T2, R1, R2 any](f func(T1, T2) (R1, R2), input1 T1) func(T2) (R1, R2) { return func(input2 T2) (R1, R2) { return f(input1, input2) } } // Curry2To1F accepts a function that receives two parameters // and returns a function that accepts one parameter which is the first parameter in the original function // and returns another function that accepts one parameter, which is the second parameter in the original function. // The last function returns the result of applying the original function with the input to the first returned function as the first parameter and // whatever is given to the last returned function as the second parameter. func Curry2To1F[T1, T2, R any](f func(T1, T2) R) func(T1) func(T2) R { return func(input1 T1) func(T2) R { return func(input2 T2) R { return f(input1, input2) } } } // Curry3To2 accepts a function that receives three parameters // and returns a function that accepts two parameters which are the last two parameters in the original function. // The curried function returns the result of applying the original function to input1 as the first parameter and // whatever is given to the last returned function as the second and third parameters. func Curry3To2[T1, T2, T3, R any](f func(T1, T2, T3) R, input1 T1) func(T2, T3) R { return func(input2 T2, input3 T3) R { return f(input1, input2, input3) } } // Curry3To2_0 accepts a function that receives three parameters // and returns a function that accepts two parameters which are the last two parameters in the original function. // The curried function applies the original function to input1 as the first parameter and // whatever is given to the last returned function as the second and third parameters. // The function must have no return value. func Curry3To2_0[T1, T2, T3 any](f func(T1, T2, T3), input1 T1) func(T2, T3) { return func(input2 T2, input3 T3) { f(input1, input2, input3) } } // Curry3To2_2 accepts a function that receives three parameters // and returns a function that accepts two parameters which are the last two parameters in the original function. // The curried function returns the result of applying the original function to input1 as the first parameter and // whatever is given to the last returned function as the second and third parameters. The function must have 2 return values. func Curry3To2_2[T1, T2, T3, R1, R2 any](f func(T1, T2, T3) (R1, R2), input1 T1) func(T2, T3) (R1, R2) { return func(input2 T2, input3 T3) (R1, R2) { return f(input1, input2, input3) } } // Curry3To1 accepts a function that receives three parameters // and returns a function that accepts one parameter which is the last parameter in the original function. // The curried function returns the result of applying the original function to input1 and input2 as the first two parameters and // whatever is given to the last returned function as the third parameter. func Curry3To1[T1, T2, T3, R any](f func(T1, T2, T3) R, input1 T1, input2 T2) func(T3) R { return func(input3 T3) R { return f(input1, input2, input3) } } // Curry3To1_0 accepts a function that receives three parameters // and returns a function that accepts one parameter which is the last parameter in the original function. // The curried function applies the original function to input1 and input2 as the first two parameters and // whatever is given to the last returned function as the third parameter. // The function must have no return value. func Curry3To1_0[T1, T2, T3 any](f func(T1, T2, T3), input1 T1, input2 T2) func(T3) { return func(input3 T3) { f(input1, input2, input3) } } // Curry3To1_2 accepts a function that receives three parameters // and returns a function that accepts one parameter which is the last parameter in the original function. // The curried function returns the result of applying the original function to input1 and input2 as the first two parameters and // whatever is given to the last returned function as the third parameter. The function must have 2 return values. func Curry3To1_2[T1, T2, T3, R1, R2 any](f func(T1, T2, T3) (R1, R2), input1 T1, input2 T2) func(T3) (R1, R2) { return func(input3 T3) (R1, R2) { return f(input1, input2, input3) } }
curry.go
0.852951
0.953579
curry.go
starcoder
package quaternion import ( "fmt" "math" "github.com/ungerik/go3d/float64/vec3" "github.com/ungerik/go3d/float64/vec4" ) var ( // Zero holds a zero quaternion. Zero = T{} // Ident holds an ident quaternion. Ident = T{0, 0, 0, 1} ) // T represents a orientatin/rotation as a unit quaternion. // See http://en.wikipedia.org/wiki/Quaternions_and_spatial_rotation type T [4]float64 // FromAxisAngle returns a quaternion representing a rotation around and axis. func FromAxisAngle(axis *vec3.T, angle float64) T { angle *= 0.5 sin := math.Sin(angle) q := T{axis[0] * sin, axis[1] * sin, axis[2] * sin, math.Cos(angle)} return q.Normalized() } // FromXAxisAngle returns a quaternion representing a rotation around the x axis. func FromXAxisAngle(angle float64) T { angle *= 0.5 return T{math.Sin(angle), 0, 0, math.Cos(angle)} } // FromYAxisAngle returns a quaternion representing a rotation around the y axis. func FromYAxisAngle(angle float64) T { angle *= 0.5 return T{0, math.Sin(angle), 0, math.Cos(angle)} } // FromZAxisAngle returns a quaternion representing a rotation around the z axis. func FromZAxisAngle(angle float64) T { angle *= 0.5 return T{0, 0, math.Sin(angle), math.Cos(angle)} } // FromEulerAngles returns a quaternion representing Euler angle rotations. func FromEulerAngles(yHead, xPitch, zRoll float64) T { qy := FromYAxisAngle(yHead) qx := FromXAxisAngle(xPitch) qz := FromZAxisAngle(zRoll) return Mul3(&qy, &qx, &qz) } // FromVec4 converts a vec4.T into a quaternion. func FromVec4(v *vec4.T) T { return T(*v) } // Vec4 converts the quaternion into a vec4.T. func (quat *T) Vec4() vec4.T { return vec4.T(*quat) } // Parse parses T from a string. See also String() func Parse(s string) (r T, err error) { _, err = fmt.Sscan(s, &r[0], &r[1], &r[2], &r[3]) return r, err } // String formats T as string. See also Parse(). func (quat *T) String() string { return fmt.Sprint(quat[0], quat[1], quat[2], quat[3]) } // AxisAngle extracts the rotation in form of an axis and a rotation angle. func (quat *T) AxisAngle() (axis vec3.T, angle float64) { cos := quat[3] sin := math.Sqrt(1 - cos*cos) angle = math.Acos(cos) var ooSin float64 if math.Abs(sin) < 0.0005 { ooSin = 1 } else { ooSin = 1 / sin } axis[0] = quat[0] * ooSin axis[1] = quat[1] * ooSin axis[2] = quat[2] * ooSin return axis, angle } // Norm returns the norm value of the quaternion. func (quat *T) Norm() float64 { return quat[0]*quat[0] + quat[1]*quat[1] + quat[2]*quat[2] + quat[3]*quat[3] } // Normalize normalizes to a unit quaternation. func (quat *T) Normalize() *T { norm := quat.Norm() if norm != 1 && norm != 0 { ool := 1 / math.Sqrt(norm) quat[0] *= ool quat[1] *= ool quat[2] *= ool quat[3] *= ool } return quat } // Normalized returns a copy normalized to a unit quaternation. func (quat *T) Normalized() T { norm := quat.Norm() if norm != 1 && norm != 0 { ool := 1 / math.Sqrt(norm) return T{ quat[0] * ool, quat[1] * ool, quat[2] * ool, quat[3] * ool, } } else { return *quat } } // Negate negates the quaternion. func (quat *T) Negate() *T { quat[0] = -quat[0] quat[1] = -quat[1] quat[2] = -quat[2] quat[3] = -quat[3] return quat } // Negated returns a negated copy of the quaternion. func (quat *T) Negated() T { return T{-quat[0], -quat[1], -quat[2], -quat[3]} } // Invert inverts the quaterion. func (quat *T) Invert() *T { quat[0] = -quat[0] quat[1] = -quat[1] quat[2] = -quat[2] return quat } // Inverted returns an inverted copy of the quaternion. func (quat *T) Inverted() T { return T{-quat[0], -quat[1], -quat[2], quat[3]} } // SetShortestRotation negates the quaternion if it does not represent the shortest rotation from quat to the orientation of other. // (there are two directions to rotate from the orientation of quat to the orientation of other) // See IsShortestRotation() func (quat *T) SetShortestRotation(other *T) *T { if !IsShortestRotation(quat, other) { quat.Negate() } return quat } // IsShortestRotation returns if the rotation from a to b is the shortest possible rotation. // (there are two directions to rotate from the orientation of quat to the orientation of other) // See T.SetShortestRotation func IsShortestRotation(a, b *T) bool { return Dot(a, b) >= 0 } // IsUnitQuat returns if the quaternion is within tolerance of the unit quaternion. func (quat *T) IsUnitQuat(tolerance float64) bool { norm := quat.Norm() return norm >= (1.0-tolerance) && norm <= (1.0+tolerance) } // RotateVec3 rotates v by the rotation represented by the quaternion. func (quat *T) RotateVec3(v *vec3.T) { qv := T{v[0], v[1], v[2], 0} inv := quat.Inverted() q := Mul3(quat, &qv, &inv) v[0] = q[0] v[1] = q[1] v[2] = q[2] } // RotatedVec3 returns a rotated copy of v. func (quat *T) RotatedVec3(v *vec3.T) vec3.T { qv := T{v[0], v[1], v[2], 0} inv := quat.Inverted() q := Mul3(quat, &qv, &inv) return vec3.T{q[0], q[1], q[2]} } // Dot returns the dot product of two quaternions. func Dot(a, b *T) float64 { return a[0]*b[0] + a[1]*b[1] + a[2]*b[2] + a[3]*b[3] } // Mul multiplies two quaternions. func Mul(a, b *T) T { q := T{ a[3]*b[0] + a[0]*b[3] + a[1]*b[2] - a[2]*b[1], a[3]*b[1] + a[1]*b[3] + a[2]*b[0] - a[0]*b[2], a[3]*b[2] + a[2]*b[3] + a[0]*b[1] - a[1]*b[0], a[3]*b[3] - a[0]*b[0] - a[1]*b[1] - a[2]*b[2], } return q.Normalized() } // Mul3 multiplies three quaternions. func Mul3(a, b, c *T) T { q := Mul(a, b) return Mul(&q, c) } // Mul4 multiplies four quaternions. func Mul4(a, b, c, d *T) T { q := Mul(a, b) q = Mul(&q, c) return Mul(&q, d) } // Slerp returns the spherical linear interpolation quaternion between a and b at t (0,1). // See http://en.wikipedia.org/wiki/Slerp func Slerp(a, b *T, t float64) T { d := math.Acos(a[0]*b[0] + a[1]*b[1] + a[2]*b[2] + a[3]*b[3]) ooSinD := 1 / math.Sin(d) t1 := math.Sin(d*(1-t)) * ooSinD t2 := math.Sin(d*t) * ooSinD q := T{ a[0]*t1 + b[0]*t2, a[1]*t1 + b[1]*t2, a[2]*t1 + b[2]*t2, a[3]*t1 + b[3]*t2, } return q.Normalized() } // Vec3Diff returns the rotation quaternion between two vectors. func Vec3Diff(a, b *vec3.T) T { cr := vec3.Cross(a, b) sr := math.Sqrt(2 * (1 + vec3.Dot(a, b))) oosr := 1 / sr q := T{cr[0] * oosr, cr[1] * oosr, cr[2] * oosr, sr * 0.5} return q.Normalized() }
float64/quaternion/quaternion.go
0.916829
0.69901
quaternion.go
starcoder
package geo import ( "math" "math/rand" "github.com/Bredgren/wrand" ) // Vec is a 2D vector. Many of the functions for Vec have two versions, one that modifies // the Vec and one that returns a new Vec. Their names follow a convention that is hopefully // inuitive. For example, when working with Vec as a value you use v1.Plus(v2) which returns // a new value and reads like when working with other value types such as "1 + 2". type Vec struct { X, Y float64 } // Len returns the length of the vector. func (v Vec) Len() float64 { return math.Sqrt(v.X*v.X + v.Y*v.Y) } // Len2 is the length of the vector squared. func (v Vec) Len2() float64 { return v.X*v.X + v.Y*v.Y } // SetLen sets the length of the vector. Negative lengths will flip the vectors direction. // If v's length is zero then it will remain unchanged. func (v *Vec) SetLen(l float64) { len := math.Sqrt(v.X*v.X + v.Y*v.Y) if len == 0 { return } v.X = v.X / len * l v.Y = v.Y / len * l } // WithLen returns a new vector in the same direction as v but with the given length. // It v's length is 0 then the zero vector is returned. func (v Vec) WithLen(l float64) Vec { len := math.Sqrt(v.X*v.X + v.Y*v.Y) if len == 0 { return Vec{} } v.X = v.X / len * l v.Y = v.Y / len * l return v } // Dist returns the distance between the two vectors. func (v Vec) Dist(v2 Vec) float64 { return math.Sqrt((v.X-v2.X)*(v.X-v2.X) + (v.Y-v2.Y)*(v.Y-v2.Y)) } // Dist2 returns the distance squared between the two vectors. func (v Vec) Dist2(v2 Vec) float64 { return (v.X-v2.X)*(v.X-v2.X) + (v.Y-v2.Y)*(v.Y-v2.Y) } // Add modifies v to be the sum of v2 and itself. func (v *Vec) Add(v2 Vec) { v.X += v2.X v.Y += v2.Y } // Plus returns a new vector that is the sum of the two vectors. func (v Vec) Plus(v2 Vec) Vec { return Vec{X: v.X + v2.X, Y: v.Y + v2.Y} } // Sub modifies v to be the difference between itself and v2. func (v *Vec) Sub(v2 Vec) { v.X -= v2.X v.Y -= v2.Y } // Minus returns a new vector that is the difference of the two vectors. func (v Vec) Minus(v2 Vec) Vec { return Vec{X: v.X - v2.X, Y: v.Y - v2.Y} } // Mul modifies v to be itself times n. func (v *Vec) Mul(n float64) { v.X *= n v.Y *= n } // Times returns a new vector that is v times n. func (v Vec) Times(n float64) Vec { return Vec{X: v.X * n, Y: v.Y * n} } // Div modifies v to be itself divided by n. func (v *Vec) Div(n float64) { v.X /= n v.Y /= n } // DividedBy returns a new vector that is v divided by n. func (v Vec) DividedBy(n float64) Vec { return Vec{X: v.X / n, Y: v.Y / n} } // Normalize modifies v to be of length one in the same direction. func (v *Vec) Normalize() { len := math.Sqrt(v.X*v.X + v.Y*v.Y) v.X = v.X / len v.Y = v.Y / len } // Normalized returns a new vector of length one in the same direction as v. func (v Vec) Normalized() Vec { len := math.Sqrt(v.X*v.X + v.Y*v.Y) return Vec{X: v.X / len, Y: v.Y / len} } // Dot returns the dot product between the two vectors. func (v Vec) Dot(v2 Vec) float64 { return v.X*v2.X + v.Y*v2.Y } // Project modifies v to be the vector that is the projection of v onto v2. func (v *Vec) Project(v2 Vec) { v2.Normalize() v2.Mul(v.X*v2.X + v.Y*v2.Y) v.X, v.Y = v2.X, v2.Y } // Projected return the vetor that is v projected onto v2. func (v Vec) Projected(v2 Vec) Vec { v2.Normalize() v2.Mul(v.X*v2.X + v.Y*v2.Y) return v2 } // Limit constrains the length of the vector be no greater than len. If the vector is already // less than len then no change is made. func (v *Vec) Limit(len float64) { l := v.Len() if l > len { v.SetLen(len) } } // Limited returns a new vector in the same direction as v with length no greater than // len. The vector returned will be equivalent to v if v.Len() <= len. func (v Vec) Limited(len float64) Vec { l := v.Len() if l > len { v.SetLen(len) } return v } // Angle returns the radians relative to the positive x-axis (counterclockwise in screen // coordinates). The returned value is in the range [-π, π). func (v Vec) Angle() float64 { return -math.Atan2(v.Y, v.X) } // AngleFrom returns the radians from v2 to v (counterclockwise in screen coordinates). // The returned value is in the range [-π, π). func (v Vec) AngleFrom(v2 Vec) float64 { r := math.Atan2(v2.Y, v2.X) - math.Atan2(v.Y, v.X) if r < -math.Pi { r += 2 * math.Pi } if r >= math.Pi { r -= 2 * math.Pi } return r } // Rotate rotates the vector (counterclockwise in screen coordinates) by the given radians. func (v *Vec) Rotate(rad float64) { v.X, v.Y = v.X*math.Cos(rad)+v.Y*math.Sin(rad), -v.X*math.Sin(rad)+v.Y*math.Cos(rad) } // Rotated returns a new vector that is equal to this one rotated (counterclockwise in // screen coordinates) by the given radians. func (v Vec) Rotated(rad float64) Vec { return Vec{X: v.X*math.Cos(rad) + v.Y*math.Sin(rad), Y: -v.X*math.Sin(rad) + v.Y*math.Cos(rad)} } // Equals returns true if the corresponding components of the vectors are within the error e. func (v Vec) Equals(v2 Vec, e float64) bool { return math.Abs(v.X-v2.X) < e && math.Abs(v.Y-v2.Y) < e } // VecGen (Vector Generator) is a function that returns a vector. type VecGen func() Vec // RandVec returns a unit vector in a random direction. func RandVec() Vec { rad := rand.Float64() * 2 * math.Pi return Vec{X: math.Cos(rad), Y: math.Sin(rad)} } // StaticVec returns a VecGen that always returns the constant vector v. func StaticVec(v Vec) VecGen { return func() Vec { return v } } // DynamicVec returns a VecGen that always returns a copy of the vector pointed to by v. func DynamicVec(v *Vec) VecGen { return func() Vec { return *v } } // OffsetVec returns a VecGen that adds offset to gen. For example, if you wanted to use // RandCircle as an initial position you might use // OffsetVec(RandVecCircle(5, 10), StaticVec(Vec{X: 100, Y: 100})) // to center the circle at position 100, 100. func OffsetVec(gen VecGen, offset VecGen) VecGen { return func() Vec { return gen().Plus(offset()) } } // RandVecCircle returns a VecGen that will generate a random vector within the given radii. // Negative radii are undefined. func RandVecCircle(minRadius, maxRadius float64) VecGen { return func() Vec { return RandVec().Times(circleRadius(minRadius, maxRadius)) } } // RandVecArc returns a VecGen that will generate a random vector within the slice of a // circle defined by the parameters. The radians are relative to the +x axis. // Negative radii are undefined. func RandVecArc(minRadius, maxRadius, minRadians, maxRadians float64) VecGen { if maxRadians < minRadians { minRadians, maxRadians = maxRadians, minRadians } return func() Vec { r := circleRadius(minRadius, maxRadius) rad := rand.Float64()*(maxRadians-minRadians) + minRadians return Vec{X: r}.Rotated(rad) } } // RandVecRect returns a VecGen that will generate a random vector within the given Rect. func RandVecRect(rect Rect) VecGen { return func() Vec { return Vec{ X: rand.Float64()*rect.W + rect.X, Y: rand.Float64()*rect.H + rect.Y, } } } // RandVecRects returns a VecGen that will generate a random vector that is uniformly // distributed between all the given rects. If the slice given is empty then the zero // vector is returned. func RandVecRects(rects []Rect) VecGen { if len(rects) == 0 { return func() Vec { return Vec{} } } areas := make([]float64, len(rects)) for i := range rects { areas[i] = rects[i].Area() } return func() Vec { rect := rects[wrand.SelectIndex(areas)] return Vec{ X: rand.Float64()*rect.W + rect.X, Y: rand.Float64()*rect.H + rect.Y, } } } // Returns a uniformaly distributed radius between minR and maxR. func circleRadius(minR, maxR float64) float64 { if maxR == 0 || maxR == minR { return maxR } unitMin := minR / maxR unitMin *= unitMin return math.Sqrt(rand.Float64()*(1-unitMin)+unitMin) * maxR }
geo/vec.go
0.928587
0.659021
vec.go
starcoder
package base import ( sdkTypes "github.com/cosmos/cosmos-sdk/types" "github.com/persistenceOne/persistenceSDK/constants/errors" "github.com/persistenceOne/persistenceSDK/schema/types" "github.com/persistenceOne/persistenceSDK/utilities/meta" ) var _, _ types.Data = (*Data_DecData)(nil), (*DecData)(nil) func (decData Data_DecData) Compare(data types.Data) int { compareDecData, Error := decDataFromInterface(data) if Error != nil { panic(Error) } if decData.DecData.Value.GT(compareDecData.DecData.Value) { return 1 } else if decData.DecData.Value.LT(compareDecData.DecData.Value) { return -1 } return 0 } func (decData Data_DecData) String() string { return decData.DecData.Value.String() } func (decData Data_DecData) GetTypeID() types.ID { return NewID("D") } func (decData Data_DecData) ZeroValue() types.Data { return NewDecData(sdkTypes.ZeroDec()) } func (decData Data_DecData) GenerateHashID() types.ID { if decData.Compare(decData.ZeroValue()) == 0 { return NewID("") } return NewID(meta.Hash(decData.DecData.Value.String())) } func (decData Data_DecData) AsAccAddress() (sdkTypes.AccAddress, error) { zeroValue, _ := Data_AccAddressData{}.ZeroValue().AsAccAddress() return zeroValue, errors.IncorrectFormat } func (decData Data_DecData) AsListData() (types.ListData, error) { zeroValue, _ := Data_ListData{}.ZeroValue().AsListData() return zeroValue, errors.IncorrectFormat } func (decData Data_DecData) AsString() (string, error) { zeroValue, _ := Data_StringData{}.ZeroValue().AsString() return zeroValue, errors.IncorrectFormat } func (decData Data_DecData) AsDec() (sdkTypes.Dec, error) { return decData.DecData.Value, nil } func (decData Data_DecData) AsHeight() (types.Height, error) { zeroValue, _ := Data_HeightData{}.ZeroValue().AsHeight() return zeroValue, errors.IncorrectFormat } func (decData Data_DecData) AsID() (types.ID, error) { zeroValue, _ := Data_DecData{}.ZeroValue().AsID() return zeroValue, errors.IncorrectFormat } func (decData Data_DecData) Get() interface{} { return decData.DecData.Value } func (decData Data_DecData) Unmarshal(dAtA []byte) error { return decData.DecData.Unmarshal(dAtA) } func (decData *Data_DecData) Reset() { *decData = Data_DecData{} } func (*Data_DecData) ProtoMessage() {} func decDataFromInterface(data types.Data) (Data_DecData, error) { switch value := data.(type) { case *Data_DecData: return *value, nil default: return Data_DecData{}, errors.MetaDataError } } func NewDecData(value sdkTypes.Dec) *Data_DecData { return &Data_DecData{ DecData: &DecData{ Value: value, }, } } func ReadDecData(dataString string) (types.Data, error) { if dataString == "" { return Data_DecData{}.ZeroValue(), nil } dec, Error := sdkTypes.NewDecFromStr(dataString) if Error != nil { return Data_DecData{}.ZeroValue(), Error } return NewDecData(dec), nil } func (decData DecData) Compare(data types.Data) int { compareDecData, Error := dummyDecDataFromInterface(data) if Error != nil { panic(Error) } if decData.Value.GT(compareDecData.Value) { return 1 } else if decData.Value.LT(compareDecData.Value) { return -1 } return 0 } func (decData DecData) String() string { return decData.Value.String() } func (decData DecData) GetTypeID() types.ID { return NewID("D") } func (decData DecData) ZeroValue() types.Data { return NewDecData(sdkTypes.ZeroDec()) } func (decData DecData) GenerateHashID() types.ID { if decData.Compare(decData.ZeroValue()) == 0 { return NewID("") } return NewID(meta.Hash(decData.Value.String())) } func (decData DecData) AsAccAddress() (sdkTypes.AccAddress, error) { zeroValue, _ := AccAddressData{}.ZeroValue().AsAccAddress() return zeroValue, errors.IncorrectFormat } func (decData DecData) AsListData() (types.ListData, error) { zeroValue, _ := ListData{}.ZeroValue().AsListData() return zeroValue, errors.IncorrectFormat } func (decData DecData) AsString() (string, error) { zeroValue, _ := StringData{}.ZeroValue().AsString() return zeroValue, errors.IncorrectFormat } func (decData DecData) AsDec() (sdkTypes.Dec, error) { return decData.Value, nil } func (decData DecData) AsHeight() (types.Height, error) { zeroValue, _ := HeightData{}.ZeroValue().AsHeight() return zeroValue, errors.IncorrectFormat } func (decData DecData) AsID() (types.ID, error) { zeroValue, _ := IDData{}.ZeroValue().AsID() return zeroValue, errors.IncorrectFormat } func (decData DecData) Get() interface{} { return decData.Value } func dummyDecDataFromInterface(data types.Data) (DecData, error) { switch value := data.(type) { case *DecData: return *value, nil default: return DecData{}, errors.MetaDataError } } func NewDummyDecData(value sdkTypes.Dec) *DecData { return &DecData{ Value: value, } }
schema/types/base/decData.go
0.663451
0.430387
decData.go
starcoder
package circleToPolygon // version 1.0.3 import ( "encoding/json" "math" ) const ( defaultEarthRadius float64 = 6378137 // equatorial Earth radius defaultCount float64 = 32 ) // create new circle func NewCircle(latitude float64, longtitude float64, radius float64) CircleToPolygon { return &circleToPolygon{ center: [2]float64{latitude, longtitude}, radius: radius, } } type CircleToPolygon interface { SetEarthRadius(float64) CircleToPolygon // set earth radius SetBearing(float64) CircleToPolygon // set bearing SetDirection(float64) CircleToPolygon // set circle direction Draw() [][2]float64 // draw circle by options DrawGeoJson() []byte // draw in geoJson } type circleToPolygon struct { earthRadius float64 radius float64 center [2]float64 bearing float64 direction float64 count float64 } func (ctp *circleToPolygon) SetEarthRadius(earthRadius float64) CircleToPolygon { ctp.earthRadius = earthRadius return ctp } func (ctp *circleToPolygon) getEarthRadius() float64 { if ctp.earthRadius == 0 { return defaultEarthRadius } return ctp.earthRadius } func (ctp *circleToPolygon) getRadius() float64 { return ctp.radius } func (ctp *circleToPolygon) getCenter() [2]float64 { return ctp.center } func (ctp *circleToPolygon) SetBearing(bearing float64) CircleToPolygon { ctp.bearing = bearing return ctp } func (ctp *circleToPolygon) getBearing() float64 { return ctp.bearing } func (ctp *circleToPolygon) SetDirection(direction float64) CircleToPolygon { ctp.direction = direction return ctp } func (ctp *circleToPolygon) getDirection() float64 { if ctp.direction != -1 && ctp.direction != 1 { return 1 } return ctp.direction } func (ctp *circleToPolygon) SetCount(count float64) CircleToPolygon { ctp.count = count return ctp } func (ctp *circleToPolygon) getCount() float64 { if ctp.count <= 3 { return defaultCount } return ctp.count } func (ctp *circleToPolygon) Draw() [][2]float64 { count := ctp.getCount() bearing := ctp.getBearing() direction := ctp.getDirection() earthRadius := ctp.getEarthRadius() radius := ctp.getRadius() center := ctp.getCenter() start := toRadians(bearing) coordinates := make([][2]float64, 0) for i := 0.0; i < count; i++ { coordinates = append(coordinates, offset(center, radius, earthRadius, start+(direction*2*math.Pi*-i)/count)) } return coordinates } func (ctp *circleToPolygon) DrawGeoJson() []byte { points := ctp.Draw() points = append(points, points[0]) // close polygon data, _ := json.Marshal(points) // always correct result := []byte{} result = append(result, []byte(`{"coordinates":[`)...) result = append(result, data...) result = append(result, []byte(`],"type":"Polygon"}`)...) return result } func toRadians(angleInDegrees float64) float64 { return (angleInDegrees * math.Pi) / 180 } func toDegrees(angleInRadians float64) float64 { return (angleInRadians * 180) / math.Pi } func offset(c1 [2]float64, distance float64, earthRadius float64, bearing float64) [2]float64 { lat1 := toRadians(c1[0]) lon1 := toRadians(c1[1]) dByR := distance / earthRadius lat := math.Asin(math.Sin(lat1)*math.Cos(dByR) + math.Cos(lat1)*math.Sin(dByR)*math.Cos(bearing)) lon := lon1 + math.Atan2( math.Sin(bearing)*math.Sin(dByR)*math.Cos(lat1), math.Cos(dByR)-math.Sin(lat1)*math.Sin(lat), ) return [2]float64{toDegrees(lon), toDegrees(lat)} }
circleToPolygon.go
0.77552
0.717222
circleToPolygon.go
starcoder
package timepb import ( "fmt" "time" durpb "google.golang.org/protobuf/types/known/durationpb" tspb "google.golang.org/protobuf/types/known/timestamppb" ) // IsZero returns true only when t is nil func IsZero(t *tspb.Timestamp) bool { return t == nil } // Commpare t1 and t2 and returns -1 when t1 < t2, 0 when t1 == t2 and 1 otherwise. // Returns false if t1 or t2 is nil func Compare(t1, t2 *tspb.Timestamp) int { if t1 == nil || t2 == nil { panic(fmt.Sprint("Can't compare nil time, t1=", t1, "t2=", t2)) } if t1.Seconds == t2.Seconds && t1.Nanos == t2.Nanos { return 0 } if t1.Seconds < t2.Seconds || t1.Seconds == t2.Seconds && t1.Nanos < t2.Nanos { return -1 } return 1 } // DurationIsNegative returns true if the duration is negative. It assumes that d is valid // (d..CheckValid() is nil). func DurationIsNegative(d *durpb.Duration) bool { return d.Seconds < 0 || d.Seconds == 0 && d.Nanos < 0 } // AddStd returns a new timestamp with value t + d, where d is stdlib Duration. // If t is nil then nil is returned. // Panics on overflow. func AddStd(t *tspb.Timestamp, d time.Duration) *tspb.Timestamp { if t == nil { return nil } if d == 0 { t2 := *t return &t2 } t2 := tspb.New(t.AsTime().Add(d)) overflowPanic(t, t2, d < 0) return t2 } func overflowPanic(t1, t2 *tspb.Timestamp, negative bool) { cmp := Compare(t1, t2) if negative { if cmp < 0 { panic("time overflow") } } else { if cmp > 0 { panic("time overflow") } } } const second = int32(time.Second) // Add returns a new timestamp with value t + d, where d is protobuf Duration // If t is nil then nil is returned. Panics on overflow. // Note: d must be a valid PB Duration (d..CheckValid() is nil). func Add(t *tspb.Timestamp, d *durpb.Duration) *tspb.Timestamp { if t == nil { return nil } if d.Seconds == 0 && d.Nanos == 0 { t2 := *t return &t2 } t2 := tspb.Timestamp{ Seconds: t.Seconds + d.Seconds, Nanos: t.Nanos + d.Nanos, } if t2.Nanos >= second { t2.Nanos -= second t2.Seconds++ } else if t2.Nanos <= -second { t2.Nanos += second t2.Seconds-- } overflowPanic(t, &t2, DurationIsNegative(d)) return &t2 }
support/timepb/cmp.go
0.696784
0.416559
cmp.go
starcoder
The implementation is based on the public domain code available at http://www.johndcook.com/skewness_kurtosis.html . The linear regression code is from http://www.johndcook.com/running_regression.html . */ package onlinestats import "math" type Running struct { n int m1, m2, m3, m4 float64 } func NewRunning() *Running { return &Running{} } func (r *Running) Push(x float64) { n1 := float64(r.n) r.n++ delta := x - r.m1 delta_n := delta / float64(r.n) delta_n2 := delta_n * delta_n term1 := delta * delta_n * n1 r.m1 += delta_n r.m4 += term1*delta_n2*float64(r.n*r.n-3*r.n+3) + 6*delta_n2*r.m2 - 4*delta_n*r.m3 r.m3 += term1*delta_n*float64(r.n-2) - 3*delta_n*r.m2 r.m2 += term1 } func (r *Running) Len() int { return r.n } func (r *Running) Mean() float64 { return r.m1 } func (r *Running) Var() float64 { return r.m2 / float64(r.n-1) } func (r *Running) Stddev() float64 { return math.Sqrt(r.Var()) } func (r *Running) Skewness() float64 { return math.Sqrt(float64(r.n)) * r.m3 / math.Pow(r.m2, 1.5) } func (r *Running) Kurtosis() float64 { return float64(r.n)*r.m4/(r.m2*r.m2) - 3.0 } func CombineRunning(a, b *Running) *Running { var combined Running an := float64(a.n) bn := float64(b.n) cn := an + bn combined.n = a.n + b.n delta := b.m1 - a.m1 delta2 := delta * delta delta3 := delta * delta2 delta4 := delta2 * delta2 combined.m1 = (an*a.m1 + bn*b.m1) / cn combined.m2 = a.m2 + b.m2 + delta2*an*bn/cn combined.m3 = a.m3 + b.m3 + delta3*an*bn*(an-bn)/(cn*cn) combined.m3 += 3.0 * delta * (an*b.m2 - bn*a.m2) / cn combined.m4 = a.m4 + b.m4 + delta4*an*bn*(an*an-an*bn+bn*bn)/(cn*cn*cn) combined.m4 += 6.0*delta2*(an*an*b.m2+bn*bn*a.m2)/(cn*cn) + 4.0*delta*(an*b.m3-bn*a.m3)/cn return &combined } type Regression struct { xstats Running ystats Running sxy float64 n int } func NewRegression() *Regression { return &Regression{} } func (r *Regression) Push(x, y float64) { r.sxy += (r.xstats.Mean() - x) * (r.ystats.Mean() - y) * float64(r.n) / float64(r.n+1) r.xstats.Push(x) r.ystats.Push(y) r.n++ } func (r *Regression) Len() int { return r.n } func (r *Regression) Slope() float64 { sxx := r.xstats.Var() * float64(r.n-1) return r.sxy / sxx } func (r *Regression) Intercept() float64 { return r.ystats.Mean() - r.Slope()*r.xstats.Mean() } func (r *Regression) Correlation() float64 { t := r.xstats.Stddev() * r.ystats.Stddev() return r.sxy / (float64(r.n-1) * t) } func CombineRegressions(a, b Regression) *Regression { var combined Regression combined.xstats = *CombineRunning(&a.xstats, &b.xstats) combined.ystats = *CombineRunning(&a.ystats, &b.ystats) combined.n = a.n + b.n delta_x := b.xstats.Mean() - a.xstats.Mean() delta_y := b.ystats.Mean() - a.ystats.Mean() combined.sxy = a.sxy + b.sxy + float64(a.n*b.n)*delta_x*delta_y/float64(combined.n) return &combined }
vendor/github.com/dgryski/go-onlinestats/stats.go
0.861931
0.562597
stats.go
starcoder
package minheap // MinHeap struct is a heap binary tree (in slice) such that the minimum value is always at the root // provides Add / Peek / Pop semantics type MinHeap struct { data []int length int } // Add new value to a heap - O(logN) func (heap *MinHeap) Add(value int) { if heap.length == len(heap.data) { heap.resize(2 * len(heap.data)) } heap.data[heap.length] = value heap.length++ heap.bubbleUp() } // Return current heap length (number of elements) - O(1) func (heap MinHeap) Len() int { return heap.length } // Return minimal element in the heap - O(1) func (heap MinHeap) Peek() int { if len(heap.data) > 0 { return heap.data[0] } panic("Heap is empty!") } // Remove the minimal element in the heap and return its value - O(logN) func (heap *MinHeap) Pop() (value int) { if len(heap.data) > 0 { value = heap.data[0] heap.length-- heap.data[0] = heap.data[heap.length] heap.bubbleDown() if heap.length < len(heap.data) / 3 { heap.resize(len(heap.data) / 2) } return } panic("Heap is empty!") } // Grow (or shrink) underlying data slice func (heap *MinHeap) resize(size int) { if size < 8 { size = 8 } if size > len(heap.data) { newData := make([]int, size) copy(newData, heap.data) heap.data = newData } else if size < len(heap.data) { heap.data = heap.data[:size] } } // Bubble up the last element to its correct position func (heap MinHeap) bubbleUp() { for cur, parent := heap.length - 1, (heap.length - 2) / 2; cur > 0 && heap.data[cur] < heap.data[parent]; cur, parent = parent, (parent - 1) / 2 { heap.data[cur], heap.data[parent] = heap.data[parent], heap.data[cur] } } // Bubble down the first element to its correct position func (heap MinHeap) bubbleDown() { i := 0 for { cur := heap.data[i] c, minChild := heap.getMinChild(i) if c >= 0 && cur > minChild { heap.data[i], heap.data[c] = minChild, cur i = c } else { return } } } // Return the index and value of the minimal child of node at position i // Returns index -1 if there are no children func (heap MinHeap) getMinChild(i int) (j, val int) { j = -1 if l := 2 * i + 1; l < heap.length { j, val = l, heap.data[l] } if r := 2 * i + 2; r < heap.length && heap.data[r] < val { j, val = r, heap.data[r] } return }
DataStruct/MinHeap/go/minheap.go
0.865948
0.448306
minheap.go
starcoder
package geometry // Line is a open series of points type Line struct { baseSeries } // Valid ... func (line *Line) Valid() bool { if !WorldPolygon.ContainsLine(line) { return false } return true } // NewLine creates a new Line func NewLine(points []Point, opts *IndexOptions) *Line { line := new(Line) line.baseSeries = makeSeries(points, true, false, opts) return line } // Move ... func (line *Line) Move(deltaX, deltaY float64) *Line { if line == nil { return nil } nline := new(Line) nline.baseSeries = *line.baseSeries.Move(deltaX, deltaY).(*baseSeries) return nline } // ContainsPoint ... func (line *Line) ContainsPoint(point Point) bool { if line == nil { return false } contains := false line.Search(Rect{point, point}, func(seg Segment, index int) bool { if seg.Raycast(point).On { contains = true return false } return true }) return contains } // IntersectsPoint ... func (line *Line) IntersectsPoint(point Point) bool { if line == nil { return false } return line.ContainsPoint(point) } // ContainsRect ... func (line *Line) ContainsRect(rect Rect) bool { if line == nil { return false } // Convert rect into a poly return line.ContainsPoly(&Poly{Exterior: rect}) } // IntersectsRect ... func (line *Line) IntersectsRect(rect Rect) bool { if line == nil { return false } return rect.IntersectsLine(line) } // ContainsLine ... func (line *Line) ContainsLine(other *Line) bool { if line == nil || other == nil || line.Empty() || other.Empty() { return false } // locate the first "other" segment that contains the first "line" segment. lineNumSegments := line.NumSegments() segIdx := -1 for j := 0; j < lineNumSegments; j++ { if line.SegmentAt(j).ContainsSegment(other.SegmentAt(0)) { segIdx = j break } } if segIdx == -1 { return false } otherNumSegments := other.NumSegments() for i := 1; i < otherNumSegments; i++ { lineSeg := line.SegmentAt(segIdx) otherSeg := other.SegmentAt(i) if lineSeg.ContainsSegment(otherSeg) { continue } if otherSeg.A == lineSeg.A { // reverse it if segIdx == 0 { return false } segIdx-- i-- } else if otherSeg.A == lineSeg.B { // forward it if segIdx == lineNumSegments-1 { return false } segIdx++ i-- } } return true } // IntersectsLine ... func (line *Line) IntersectsLine(other *Line) bool { if line == nil || other == nil || line.Empty() || other.Empty() { return false } if !line.Rect().IntersectsRect(other.Rect()) { return false } if line.NumPoints() > other.NumPoints() { line, other = other, line } lineNumSegments := line.NumSegments() for i := 0; i < lineNumSegments; i++ { segA := line.SegmentAt(i) var intersects bool other.Search(segA.Rect(), func(segB Segment, _ int) bool { if segA.IntersectsSegment(segB) { intersects = true return false } return true }) if intersects { return true } } return false } // ContainsPoly ... func (line *Line) ContainsPoly(poly *Poly) bool { if line == nil || poly == nil || line.Empty() || poly.Empty() { return false } rect := poly.Rect() if rect.Min.X != rect.Max.X && rect.Min.Y != rect.Max.Y { return false } // polygon can fit in a straight (vertial or horizontal) line points := [2]Point{rect.Min, rect.Max} var other Line other.baseSeries.points = points[:] other.baseSeries.rect = rect return line.ContainsLine(&other) } // IntersectsPoly ... func (line *Line) IntersectsPoly(poly *Poly) bool { return poly.IntersectsLine(line) }
geometry/line.go
0.79534
0.500549
line.go
starcoder
package vm import ( "math" "strconv" ) var ( integerClass *RInteger ) // RInteger is integer class type RInteger struct { *BaseClass } // IntegerObject represents integer instances type IntegerObject struct { Class *RInteger Value int } func (i *IntegerObject) objectType() objectType { return integerObj } func (i *IntegerObject) Inspect() string { return strconv.Itoa(i.Value) } func (i *IntegerObject) returnClass() Class { return i.Class } func (i *IntegerObject) equal(e *IntegerObject) bool { return i.Value == e.Value } func initilaizeInteger(value int) *IntegerObject { return &IntegerObject{Value: value, Class: integerClass} } var builtinIntegerMethods = []*BuiltInMethod{ { Fn: func(receiver Object) builtinMethodBody { return func(vm *VM, args []Object, blockFrame *callFrame) Object { leftValue := receiver.(*IntegerObject).Value right, ok := args[0].(*IntegerObject) if !ok { return wrongTypeError(integerClass) } rightValue := right.Value return &IntegerObject{Value: leftValue + rightValue, Class: integerClass} } }, Name: "+", }, { Fn: func(receiver Object) builtinMethodBody { return func(vm *VM, args []Object, blockFrame *callFrame) Object { leftValue := receiver.(*IntegerObject).Value right, ok := args[0].(*IntegerObject) if !ok { return wrongTypeError(integerClass) } rightValue := right.Value return &IntegerObject{Value: leftValue - rightValue, Class: integerClass} } }, Name: "-", }, { Fn: func(receiver Object) builtinMethodBody { return func(vm *VM, args []Object, blockFrame *callFrame) Object { leftValue := receiver.(*IntegerObject).Value right, ok := args[0].(*IntegerObject) if !ok { return wrongTypeError(integerClass) } rightValue := right.Value return &IntegerObject{Value: leftValue * rightValue, Class: integerClass} } }, Name: "*", }, { Fn: func(receiver Object) builtinMethodBody { return func(vm *VM, args []Object, blockFrame *callFrame) Object { leftValue := receiver.(*IntegerObject).Value right, ok := args[0].(*IntegerObject) if !ok { return wrongTypeError(integerClass) } rightValue := right.Value result := math.Pow(float64(leftValue), float64(rightValue)) return &IntegerObject{Value: int(result), Class: integerClass} } }, Name: "**", }, { Fn: func(receiver Object) builtinMethodBody { return func(vm *VM, args []Object, blockFrame *callFrame) Object { leftValue := receiver.(*IntegerObject).Value right, ok := args[0].(*IntegerObject) if !ok { return wrongTypeError(integerClass) } rightValue := right.Value return &IntegerObject{Value: leftValue / rightValue, Class: integerClass} } }, Name: "/", }, { Fn: func(receiver Object) builtinMethodBody { return func(vm *VM, args []Object, blockFrame *callFrame) Object { leftValue := receiver.(*IntegerObject).Value right, ok := args[0].(*IntegerObject) if !ok { return wrongTypeError(integerClass) } rightValue := right.Value if leftValue > rightValue { return TRUE } return FALSE } }, Name: ">", }, { Fn: func(receiver Object) builtinMethodBody { return func(vm *VM, args []Object, blockFrame *callFrame) Object { leftValue := receiver.(*IntegerObject).Value right, ok := args[0].(*IntegerObject) if !ok { return wrongTypeError(integerClass) } rightValue := right.Value if leftValue >= rightValue { return TRUE } return FALSE } }, Name: ">=", }, { Fn: func(receiver Object) builtinMethodBody { return func(vm *VM, args []Object, blockFrame *callFrame) Object { leftValue := receiver.(*IntegerObject).Value right, ok := args[0].(*IntegerObject) if !ok { return wrongTypeError(integerClass) } rightValue := right.Value if leftValue < rightValue { return TRUE } return FALSE } }, Name: "<", }, { Fn: func(receiver Object) builtinMethodBody { return func(vm *VM, args []Object, blockFrame *callFrame) Object { leftValue := receiver.(*IntegerObject).Value right, ok := args[0].(*IntegerObject) if !ok { return wrongTypeError(integerClass) } rightValue := right.Value if leftValue <= rightValue { return TRUE } return FALSE } }, Name: "<=", }, { Fn: func(receiver Object) builtinMethodBody { return func(vm *VM, args []Object, blockFrame *callFrame) Object { leftValue := receiver.(*IntegerObject).Value right, ok := args[0].(*IntegerObject) if !ok { return wrongTypeError(integerClass) } rightValue := right.Value if leftValue < rightValue { return initilaizeInteger(-1) } if leftValue > rightValue { return initilaizeInteger(1) } return initilaizeInteger(0) } }, Name: "<=>", }, { Fn: func(receiver Object) builtinMethodBody { return func(vm *VM, args []Object, blockFrame *callFrame) Object { leftValue := receiver.(*IntegerObject).Value right, ok := args[0].(*IntegerObject) if !ok { return wrongTypeError(integerClass) } rightValue := right.Value if leftValue == rightValue { return TRUE } return FALSE } }, Name: "==", }, { Fn: func(receiver Object) builtinMethodBody { return func(vm *VM, args []Object, blockFrame *callFrame) Object { leftValue := receiver.(*IntegerObject).Value right, ok := args[0].(*IntegerObject) if !ok { return wrongTypeError(integerClass) } rightValue := right.Value if leftValue != rightValue { return TRUE } return FALSE } }, Name: "!=", }, { Fn: func(receiver Object) builtinMethodBody { return func(vm *VM, args []Object, blockFrame *callFrame) Object { int := receiver.(*IntegerObject) int.Value++ return int } }, Name: "++", }, { Fn: func(receiver Object) builtinMethodBody { return func(vm *VM, args []Object, blockFrame *callFrame) Object { int := receiver.(*IntegerObject) int.Value-- return int } }, Name: "--", }, { Fn: func(receiver Object) builtinMethodBody { return func(vm *VM, args []Object, blockFrame *callFrame) Object { int := receiver.(*IntegerObject) return initializeString(strconv.Itoa(int.Value)) } }, Name: "to_s", }, { Fn: func(receiver Object) builtinMethodBody { return func(vm *VM, args []Object, blockFrame *callFrame) Object { return receiver } }, Name: "to_i", }, { Fn: func(receiver Object) builtinMethodBody { return func(vm *VM, args []Object, blockFrame *callFrame) Object { i := receiver.(*IntegerObject) even := i.Value%2 == 0 if even { return TRUE } return FALSE } }, Name: "even", }, { Fn: func(receiver Object) builtinMethodBody { return func(vm *VM, args []Object, blockFrame *callFrame) Object { i := receiver.(*IntegerObject) odd := i.Value%2 != 0 if odd { return TRUE } return FALSE } }, Name: "odd", }, { Fn: func(receiver Object) builtinMethodBody { return func(vm *VM, args []Object, blockFrame *callFrame) Object { i := receiver.(*IntegerObject) return initilaizeInteger(i.Value + 1) } }, Name: "next", }, { Fn: func(receiver Object) builtinMethodBody { return func(vm *VM, args []Object, blockFrame *callFrame) Object { i := receiver.(*IntegerObject) return initilaizeInteger(i.Value - 1) } }, Name: "pred", }, { Fn: func(receiver Object) builtinMethodBody { return func(vm *VM, args []Object, blockFrame *callFrame) Object { n := receiver.(*IntegerObject) if n.Value < 0 { return newError("Expect paramentr to be greater 0. got=%d", n.Value) } if blockFrame == nil { return newError("Can't yield without a block") } for i := 0; i < n.Value; i++ { builtInMethodYield(vm, blockFrame, initilaizeInteger(i)) } return n } }, Name: "times", }, } func initInteger() { methods := newEnvironment() for _, m := range builtinIntegerMethods { methods.set(m.Name, m) } bc := &BaseClass{Name: "Integer", Methods: methods, ClassMethods: newEnvironment(), Class: classClass, pseudoSuperClass: objectClass, superClass: objectClass} ic := &RInteger{BaseClass: bc} integerClass = ic }
vm/integer.go
0.641422
0.409103
integer.go
starcoder
package xmath import ( "fmt" "math" "math/rand" "strconv" "strings" "time" ) type N interface { Op(op Op) N Dop(dop Dop) N } // Check checks if the given number is a valid one. func Check(v float64) { if math.IsNaN(v) || math.IsInf(v, 0) { panic(fmt.Sprintf("%v is not a valid number", v)) } } // pp is the print precision for floats const pp = 8 // Op is a general mathematical operation from one number to another type Op func(x float64) float64 // Round defines a round operation for the amount of digits after the comma, provided. func Round(digits int) Op { factor := math.Pow(10, float64(digits)) return func(x float64) float64 { return math.Round(factor*x) / factor } } // Clip clips the given number to the corresponding min or max value. func Clip(min, max float64) Op { return func(x float64) float64 { if x < min { return min } if x > max { return max } return x } } // Scale scales the given number according to the scaling factor provided. func Scale(s float64) Op { return func(x float64) float64 { return x * s } } // Add adds the given number to the argument. func Add(c float64) Op { return func(x float64) float64 { return x + c } } // Unit is a predefined operation that always returns 1. var Unit Op = func(x float64) float64 { return 1 } var Sqrt Op = func(x float64) float64 { return math.Sqrt(x) } var Square Op = func(x float64) float64 { return math.Pow(x, 2) } // Dop is a general mathematical operation from 2 numbers to another type Dop func(x, y float64) float64 var Mult Dop = func(x, y float64) float64 { return x * y } var Div Dop = func(x, y float64) float64 { return x / (y + 1e-8) } type Vop func(x Vector) Vector var Unary Vop = func(x Vector) Vector { return x } // MustHaveSize will check and make sure that the given vector is of the given size func MustHaveDim(m Matrix, n int) { if len(m) != n { panic(fmt.Sprintf("matrix must have primary dimension '%v' vs '%v'", m, n)) } } // MustHaveSize will check and make sure that the given vector is of the given size func MustHaveSize(v Vector, n int) { if len(v) != n { panic(fmt.Sprintf("vector must have size '%v' vs '%v'", v, n)) } } // MustHaveSameSize verifies if the given vectors are of the same size func MustHaveSameSize(v, w Vector) { if len(v) != len(w) { panic(fmt.Sprintf("vectors must have the same size '%v' vs '%v'", len(v), len(w))) } } // CartesianProduct finds all possible combinations of the given data matrix. // follows the same logic as https://stackoverflow.com/questions/53244303/all-combinations-in-array-of-arrays func CartesianProduct(data [][]float64, current int, length int) [][]float64 { result := make([][]float64, 0) if current == length { return result } subCombinations := CartesianProduct(data, current+1, length) size := len(subCombinations) for i := 0; i < len(data[current]); i++ { if size > 0 { for j := 0; j < size; j++ { combinations := make([]float64, 0) combinations = append(combinations, data[current][i]) combinations = append(combinations, subCombinations[j]...) result = append(result, combinations) } } else { combinations := make([]float64, 0) combinations = append(combinations, data[current][i]) result = append(result, combinations) } } return result } // TODO : clarify which methods mutate the vector and which not // Vector is an alias for a one dimensional array. type Vector []float64 // Vec creates a new vector. func Vec(dim int) Vector { v := make([]float64, dim) return v } // Check checks if the elements of the vetor are well defined func (v Vector) Check() { for _, vv := range v { Check(vv) } } // With applies the given elements in the corresponding positions of the vector func (v Vector) With(w ...float64) Vector { MustHaveSameSize(v, w) for i, vv := range w { v[i] = vv } return v } // Generate generates values for the vector func (v Vector) Generate(gen VectorGenerator) Vector { return gen(len(v), 0) } // Dot returns the dot product of the 2 vectors func (v Vector) Dot(w Vector) float64 { MustHaveSameSize(v, w) var p float64 for i := 0; i < len(v); i++ { p += v[i] * w[i] } return p } // Prod returns the product of the given vectors // it returns a matrix func (v Vector) Prod(w Vector) Matrix { z := Mat(len(v)).Of(len(w)) for i := 0; i < len(v); i++ { for j := 0; j < len(w); j++ { z[i][j] = v[i] * w[j] } } return z } // X returns the hadamard product of the given vectors. // e.g. pointwise multiplication func (v Vector) X(w Vector) Vector { MustHaveSameSize(v, w) z := Vec(len(v)) for i := 0; i < len(v); i++ { z[i] = v[i] * w[i] } return z } // Stack concatenates 2 vectors , producing another with the sum of their lengths. func (v Vector) Stack(w Vector) Vector { x := Vec(len(v) + len(w)) return x.With(append(v, w...)...) } // Add adds 2 vectors func (v Vector) Add(w Vector) Vector { MustHaveSameSize(v, w) z := Vec(len(v)) for i := 0; i < len(v); i++ { z[i] = v[i] + w[i] } return z } // Diff returns the difference of the corresponding elements between the given vectors func (v Vector) Diff(w Vector) Vector { return v.Dop(func(x, y float64) float64 { return x - y }, w) } // Pow returns a vector with all the elements to the given power func (v Vector) Pow(p float64) Vector { return v.Op(func(x float64) float64 { return math.Pow(x, p) }) } // Mult multiplies a vector with a constant number func (v Vector) Mult(s float64) Vector { return v.Op(func(x float64) float64 { return x * s }) } // Round rounds all elements of the given vector func (v Vector) Round() Vector { return v.Op(math.Round) } // Sum returns the sum of all elements of the vector func (v Vector) Sum() float64 { var sum float64 for i := 0; i < len(v); i++ { sum += v[i] } return sum } // Norm returns the norm of the vector func (v Vector) Norm() float64 { var sum float64 for i := 0; i < len(v); i++ { sum += math.Pow(v[i], 2) } return math.Sqrt(sum) } // Copy copies the vector into a new one with the same values // this is for cases where we want to apply mutations, but would like to leave the initial vector intact func (v Vector) Copy() Vector { w := Vec(len(v)) for i := 0; i < len(v); i++ { w[i] = v[i] } return w } // Op applies to each of the elements a specific function func (v Vector) Op(transform Op) Vector { w := Vec(len(v)) for i := range v { w[i] = transform(v[i]) } return w } // Dop applies to each of the elements a specific function based on the elements index func (v Vector) Dop(transform Dop, w Vector) Vector { z := Vec(len(v)) for i := range v { z[i] = transform(v[i], w[i]) } return z } // String prints the vector in an easily readable form func (v Vector) String() string { builder := strings.Builder{} builder.WriteString(fmt.Sprintf("(%d)", len(v))) builder.WriteString("[ ") for i := 0; i < len(v); i++ { ss := "" if v[i] > 0 { ss = " " } builder.WriteString(fmt.Sprintf("%s%s", ss, strconv.FormatFloat(v[i], 'f', pp, 64))) if i < len(v)-1 { // dont add the comma to the last element builder.WriteString(" , ") } } builder.WriteString(" ]") return builder.String() } // VectorGenerator is a type alias defining the creation instructions for vectors // s is the size of the vector type VectorGenerator func(s, index int) Vector // VoidVector creates a vector with zeros var VoidVector VectorGenerator = func(s, index int) Vector { return Vec(s) } // Row defines a vector at the corresponding row index of a matrix var Row = func(m ...Vector) VectorGenerator { return func(s, index int) Vector { MustHaveSize(m[index], s) return m[index] } } // Rand generates a vector of the given size with random values between min and max // op defines a scaling operation for the min and max, based on the size of the vector var Rand = func(min, max float64, op Op) VectorGenerator { rand.Seed(time.Now().UnixNano()) return func(p, index int) Vector { mmin := min / op(float64(p)) mmax := max / op(float64(p)) w := Vec(p) for i := 0; i < p; i++ { w[i] = rand.Float64()*(mmax-mmin) + mmin } return w } } // Const generates a vector of the given size with constant values var Const = func(v float64) VectorGenerator { return func(p, index int) Vector { w := Vec(p) for i := 0; i < p; i++ { w[i] = v } return w } } // ScaledVectorGenerator produces a vector generator scaled by the given factor type ScaledVectorGenerator func(d float64) VectorGenerator // RangeSqrt produces a vector generator scaled by the given factor // and within the range provided var RangeSqrt = func(min, max float64) ScaledVectorGenerator { return func(d float64) VectorGenerator { return Rand(min, max, func(x float64) float64 { return math.Sqrt(d * x) }) } } // Range produces a vector generator scaled by the given factor // and within the range provided var Range = func(min, max float64) ScaledVectorGenerator { return func(d float64) VectorGenerator { return Rand(min, max, func(x float64) float64 { return x }) } } type Matrix []Vector // Diag creates a new diagonal Matrix with the given elements in the diagonal func Diag(v Vector) Matrix { m := Mat(len(v)) for i := range v { m[i] = Vec(len(v)) m[i][i] = v[i] } return m } // Mat creates a newMatrix of the given dimension func Mat(m int) Matrix { mat := make([]Vector, m) return mat } // T calculates the transpose of a matrix func (m Matrix) T() Matrix { n := Mat(len(m[0])).Of(len(m)) for i := range m { for j := range m[i] { n[j][i] = m[i][j] } } return n } // Sum returns a vector that carries the sum of all elements for each row of the Matrix func (m Matrix) Sum() Vector { v := Vec(len(m)) for i := range m { v[i] = m[i].Sum() } return v } // Add returns the addition operation on 2 matrices func (m Matrix) Add(v Matrix) Matrix { w := Mat(len(m)) for i := range m { n := Vec(len(m[i])) for j := 0; j < len(m[i]); j++ { n[j] = m[i][j] + v[i][j] } w[i] = n } return w } // Dot returns the product of the given matrix with the matrix func (m Matrix) Dot(v Matrix) Matrix { w := Mat(len(m)) for i := range m { for j := 0; j < len(v); j++ { MustHaveSameSize(m[i], v[j]) w[i][j] = m[i].Dot(v[j]) } } return w } // Prod returns the cross product of the given vector with the matrix func (m Matrix) Prod(v Vector) Vector { w := Vec(len(m)) for i := range m { MustHaveSameSize(m[i], v) w[i] = m[i].Dot(v) } return w } // Mult multiplies each element of the matrix with the given factor func (m Matrix) Mult(s float64) Matrix { n := Mat(len(m)) for i := range m { n[i] = m[i].Mult(s) } return n } // Of initialises the rows of the matrix with vectors of the given length func (m Matrix) Of(n int) Matrix { for i := 0; i < len(m); i++ { m[i] = Vec(n) } return m } // With creates a matrix with the given vector replicated at each row func (m Matrix) From(v Vector) Matrix { for i := range m { m[i] = v } return m } // With applies the elements of the given vectors to the corresponding positions in the matrix func (m Matrix) With(v ...Vector) Matrix { for i := range m { m[i] = v[i] } return m } // Generate generates the rows of the matrix using the generator func func (m Matrix) Generate(p int, gen VectorGenerator) Matrix { for i := range m { m[i] = gen(p, i) } return m } // Copy copies the matrix into a new one with the same values // this is for cases where we want to apply mutations, but would like to leave the initial vector intact func (m Matrix) Copy() Matrix { n := Mat(len(m)) for i := 0; i < len(m); i++ { n[i] = m[i].Copy() } return n } // Op applies to each of the elements a specific function func (m Matrix) Op(transform Op) Matrix { n := Mat(len(m)) for i := range m { n[i] = m[i].Op(transform) } return n } // Op applies to each of the elements a specific function func (m Matrix) Dop(transform Dop, n Matrix) Matrix { w := Mat(len(m)) for i := range m { w[i] = m[i].Dop(transform, n[i]) } return w } // String prints the matrix in an easily readable form func (m Matrix) String() string { builder := strings.Builder{} builder.WriteString(fmt.Sprintf("(%d)", len(m))) builder.WriteString("\n") for i := 0; i < len(m); i++ { builder.WriteString("\t") builder.WriteString(fmt.Sprintf("[%d]", i)) builder.WriteString(fmt.Sprintf("%v", m[i])) builder.WriteString("\n") } return builder.String() } type Cube []Matrix func Cub(d int) Cube { cube := make([]Matrix, d) return cube } func (c Cube) String() string { builder := strings.Builder{} builder.WriteString("\n") for i := 0; i < len(c); i++ { builder.WriteString(fmt.Sprintf("[%d]", i)) builder.WriteString("\n") builder.WriteString(fmt.Sprintf("%v", c[i].String())) } return builder.String() }
oremi/vendor/github.com/drakos74/go-ex-machina/xmath/algebra.go
0.8059
0.648383
algebra.go
starcoder
package types import ( sdk "github.com/cosmos/cosmos-sdk/types" stakingtypes "github.com/cosmos/cosmos-sdk/x/staking/types" ) // Validator represents a single validator. // This is defined as an interface so that we can use the SDK types // as well as database types properly. type Validator interface { GetConsAddr() string GetConsPubKey() string GetOperator() string GetSelfDelegateAddress() string GetMaxChangeRate() *sdk.Dec GetMaxRate() *sdk.Dec } // validator allows to easily implement the Validator interface type validator struct { ConsensusAddr string ConsPubKey string OperatorAddr string SelfDelegateAddress string MaxChangeRate *sdk.Dec MaxRate *sdk.Dec } // NewValidator allows to build a new Validator implementation having the given data func NewValidator( consAddr string, opAddr string, consPubKey string, selfDelegateAddress string, maxChangeRate *sdk.Dec, maxRate *sdk.Dec, ) Validator { return validator{ ConsensusAddr: consAddr, ConsPubKey: consPubKey, OperatorAddr: opAddr, SelfDelegateAddress: selfDelegateAddress, MaxChangeRate: maxChangeRate, MaxRate: maxRate, } } // GetConsAddr implements the Validator interface func (v validator) GetConsAddr() string { return v.ConsensusAddr } // GetConsPubKey implements the Validator interface func (v validator) GetConsPubKey() string { return v.ConsPubKey } func (v validator) GetOperator() string { return v.OperatorAddr } func (v validator) GetSelfDelegateAddress() string { return v.SelfDelegateAddress } //Equals return the equality of two validator func (v validator) Equals(w validator) bool { return v.ConsensusAddr == w.ConsensusAddr && v.ConsPubKey == w.ConsPubKey && v.OperatorAddr == w.OperatorAddr } func (v validator) GetMaxChangeRate() *sdk.Dec { return v.MaxChangeRate } func (v validator) GetMaxRate() *sdk.Dec { return v.MaxRate } // _________________________________________________________ // ValidatorDescription contains the description of a validator // and timestamp do the description get changed type ValidatorDescription struct { OperatorAddress string Description stakingtypes.Description Height int64 } // NewValidatorDescription return a new ValidatorDescription object func NewValidatorDescription(opAddr string, description stakingtypes.Description, height int64, ) ValidatorDescription { return ValidatorDescription{ OperatorAddress: opAddr, Description: description, Height: height, } } // Equal tells whether v and w contain the same data func (v ValidatorDescription) Equals(w ValidatorDescription) bool { return v.OperatorAddress == w.OperatorAddress && v.Description == w.Description && v.Height == w.Height } // _________________________________________________________ // ValidatorDelegations contains both a validator delegations as // well as its unbonding delegations type ValidatorDelegations struct { ConsAddress string Delegations stakingtypes.Delegations UnbondingDelegations stakingtypes.UnbondingDelegations Height int64 } //----------------------------------------------------- //ValidatorCommission allow to build a validator commission instance type ValidatorCommission struct { ValAddress string Commission *sdk.Dec MinSelfDelegation *sdk.Int Height int64 } // NewValidatorCommission return a new validator commission instance func NewValidatorCommission( valAddress string, rate *sdk.Dec, minSelfDelegation *sdk.Int, height int64, ) ValidatorCommission { return ValidatorCommission{ ValAddress: valAddress, Commission: rate, MinSelfDelegation: minSelfDelegation, Height: height, } } //Equals return the equality of two validatorCommission func (v ValidatorCommission) Equals(w ValidatorCommission) bool { return v.ValAddress == w.ValAddress && v.Commission == w.Commission && v.MinSelfDelegation == w.MinSelfDelegation && v.Height == w.Height } //-------------------------------------------- // ValidatorVotingPower represents the voting power of a validator at a specific block height type ValidatorVotingPower struct { ConsensusAddress string VotingPower int64 Height int64 } // NewValidatorVotingPower creates a new ValidatorVotingPower func NewValidatorVotingPower(address string, votingPower int64, height int64) ValidatorVotingPower { return ValidatorVotingPower{ ConsensusAddress: address, VotingPower: votingPower, Height: height, } } // Equals tells whether v and w are equals func (v ValidatorVotingPower) Equals(w ValidatorVotingPower) bool { return v.ConsensusAddress == w.ConsensusAddress && v.VotingPower == w.VotingPower && v.Height == w.Height } //-------------------------------------------------------- // ValidatorStatus represent status and jailed state for validator in specific height an timestamp type ValidatorStatus struct { ConsensusAddress string ConsensusPubKey string Status int Jailed bool Height int64 } // NewValidatorVotingPower creates a new ValidatorVotingPower func NewValidatorStatus(address, pubKey string, status int, jailed bool, height int64) ValidatorStatus { return ValidatorStatus{ ConsensusAddress: address, ConsensusPubKey: pubKey, Status: status, Jailed: jailed, Height: height, } } // Equals tells whether v and w are equals func (v ValidatorStatus) Equals(w ValidatorStatus) bool { return v.ConsensusAddress == w.ConsensusAddress && v.ConsensusPubKey == w.ConsensusPubKey && v.Jailed == w.Jailed && v.Status == w.Status && v.Height == w.Height } //--------------------------------------------------------------- // DoubleSignEvidence represent a double sign evidence on each tendermint block type DoubleSignEvidence struct { VoteA DoubleSignVote VoteB DoubleSignVote Height int64 } // NewDoubleSignEvidence return a new DoubleSignEvidence object func NewDoubleSignEvidence(height int64, voteA DoubleSignVote, voteB DoubleSignVote) DoubleSignEvidence { return DoubleSignEvidence{ VoteA: voteA, VoteB: voteB, Height: height, } } // Equals tells whether v and w contain the same data func (w DoubleSignEvidence) Equals(v DoubleSignEvidence) bool { return w.VoteA.Equals(v.VoteA) && w.VoteB.Equals(v.VoteB) && w.Height == v.Height } // DoubleSignVote represents a double vote which is included inside a DoubleSignEvidence type DoubleSignVote struct { BlockID string ValidatorAddress string Signature string Type int Height int64 Round int32 ValidatorIndex int32 } // NewDoubleSignVote allows to create a new DoubleSignVote instance func NewDoubleSignVote( roundType int, height int64, round int32, blockID string, validatorAddress string, validatorIndex int32, signature string, ) DoubleSignVote { return DoubleSignVote{ Type: roundType, Height: height, Round: round, BlockID: blockID, ValidatorAddress: validatorAddress, ValidatorIndex: validatorIndex, Signature: signature, } } func (w DoubleSignVote) Equals(v DoubleSignVote) bool { return w.Type == v.Type && w.Height == v.Height && w.Round == v.Round && w.BlockID == v.BlockID && w.ValidatorAddress == v.ValidatorAddress && w.ValidatorIndex == v.ValidatorIndex && w.Signature == v.Signature }
x/staking/types/validator.go
0.76366
0.430686
validator.go
starcoder
package spher import "fmt" import "math" import "runtime" import "sort" import "sync" // Precisions for float64. const ( FLOAT64_COARSER_PRECISION = 9 FLOAT64_NORMAL_PRECISION = 12 ) // This can be get as follows: math.Float64Bits(math.NaN()). const FLOAT64_NAN_BITS = 0x7ff8000000000001 // Round float64 x to the specified number of decimal places. func Round64(x float64, n int) float64 { if math.IsNaN(x) { return x } shift := math.Pow(10, float64(n)) return math.Floor(x*shift+0.5) / shift } // Vector64 is a dense float64 vector. // Unless specified, methods return self. type Vector64 []float64 // Compares Vector64 x and y. // If either of x or y contains float64 NaN, returns FLOAT64_NAN_BITS. // Returns +1 if x is greater than y, -1 if x is smaller than y, otherwise 0. func Cmp64(x, y Vector64) int { lmin := len(x) if lmin > len(y) { lmin = len(y) } i := 0 s := 0 for ; i < lmin; i++ { if math.IsNaN(x[i]) || math.IsNaN(y[i]) { return FLOAT64_NAN_BITS } if s == 0 { if x[i] < y[i] { s = -1 } else if x[i] > y[i] { s = +1 } } } for ix := i; ix < len(x); ix++ { if math.IsNaN(x[i]) { return FLOAT64_NAN_BITS } } for iy := i; iy < len(y); iy++ { if math.IsNaN(y[i]) { return FLOAT64_NAN_BITS } } if s != 0 { return s } if len(x) < len(y) { return -1 } else if len(x) > len(y) { return +1 } return 0 } // Returns the dot product of Vector64 x and y. func Dot64(x, y Vector64) float64 { if len(x) != len(y) { return math.NaN() } s := float64(0) for i := 0; i < len(x); i++ { s += x[i] * y[i] } return s } // Returns L2-norm of Vector64 x. func L2norm64(x Vector64) float64 { if len(x) == 0 { return math.NaN() } return math.Pow(Dot64(x, x), 0.5) } // Swap Vector64 x and y. // If the length of x is not equal to the one of y, nothing is changed. func Swap64(x, y Vector64) { if len(x) == len(y) { for i := 0; i < len(x); i++ { x[i], y[i] = y[i], x[i] } } } // Apply SparseMatrix64 A to Vector64 x. func (y Vector64) Apply(A SparseMatrix64, x Vector64) Vector64 { A.Apply64(y, x) return y } // Returns a clone of Vector64 x. // A clone is not affected by changes on x, and x is not affected by its clones. func (x Vector64) Clone() Vector64 { cloneX := make(Vector64, len(x)) copy(cloneX, x) return cloneX } // Fills self with float64 y. func (x Vector64) Fill(y float64) Vector64 { for i := 0; i < len(x); i++ { x[i] = y } return x } // Multiply-add Vector64 x by float64 alpha and Vector64 y, that is, x + alpha*y. func (x Vector64) Madd(alpha float64, y Vector64) Vector64 { for i := 0; i < len(x); i++ { x[i] += alpha * y[i] } return x } // Multiply-copy Vector64 x by float64 alpha and Vector64 y, that is, alpha*y. func (x Vector64) Mcopy(alpha float64, y Vector64) Vector64 { for i := 0; i < len(x); i++ { x[i] = alpha * y[i] } return x } // Multiply Vector64 x by float64 y. func (x Vector64) Mul(y float64) Vector64 { for i := 0; i < len(x); i++ { x[i] *= y } return x } // Normalize Vector64 x. func (x Vector64) Normalize() Vector64 { x.Mul(1.0 / L2norm64(x)) return x } // Round the elements of Vector64 x to the specified number of decimal places. func (x Vector64) Round(n int) Vector64 { for i := 0; i < len(x); i++ { x[i] = Round64(x[i], n) } return x } // SparseMatrix64 is an interface for a float64 sparse matrix. type SparseMatrix64 interface { // Applies self to Vector64 x, and stores the result to Vector64 y. // If any error happens, fill Vector64 y with float64 NaN. Apply64(y, x Vector64) // Returns the number of columns. Ncols() int // Returns the number of rows. Nrows() int // Returns a transposed self. // The transpose states must not be affected by each other. T() SparseMatrix64 } // AugmentedSparseMatrix64 is a type for augmented SparseMatrix64. type AugmentedSparseMatrix64 struct { a SparseMatrix64 } // Returns augmented matrix of SparseMatrix64 A. // Augumented matrix of A is defined as (t(O, A), t(t(A), O)). func AugmentSparseMatrix64(A SparseMatrix64) *AugmentedSparseMatrix64 { return &AugmentedSparseMatrix64{a: A} } // Applies self to Vector64 x, and stores the result to Vector64 y. // If any error happens, fill Vector64 y with float64 NaN. func (A *AugmentedSparseMatrix64) Apply64(y, x Vector64) { if !((len(y) == A.Nrows()) && (len(x) == A.Ncols())) { y.Fill(math.NaN()) return } y.Fill(0.0) // (t(O, A), t(t(A), O))t(t(x1), t(x2)) = t(t(Ax2), t(t(A)x1)). y[0:A.a.Nrows()].Apply(A.a, x[A.a.Nrows():(A.a.Nrows()+A.a.Ncols())]) y[A.a.Nrows():(A.a.Nrows()+A.a.Ncols())].Apply(A.a.T(), x[0:A.a.Nrows()]) } // Returns the number of columns. func (A *AugmentedSparseMatrix64) Ncols() int { return A.a.Nrows() + A.a.Ncols() } // Returns the number of rows. func (A *AugmentedSparseMatrix64) Nrows() int { return A.a.Nrows() + A.a.Ncols() } // Returns a transposed self. // The transpose states are not affected by each other. func (A *AugmentedSparseMatrix64) T() SparseMatrix64 { return &AugmentedSparseMatrix64{a: A.a.T()} } // SymmetrizedMatrix64 is a type for t(A)A. type SymmetrizedMatrix64 struct { a SparseMatrix64 } // Symmetrizes SparseMatrix64 A, that is, returns t(A)A. func Symmetrize64(A SparseMatrix64) SparseMatrix64 { return &SymmetrizedMatrix64{A} } // Applies self to Vector64 x, and stores the result to Vector64 y. // If any error happens, fill Vector64 y with float64 NaN. func (A *SymmetrizedMatrix64) Apply64(y, x Vector64) { z := make(Vector64, A.a.Nrows()) z.Apply(A.a, x) y.Apply(A.a.T(), z) } // Returns the number of columns. func (A *SymmetrizedMatrix64) Ncols() int { return A.a.Ncols() } // Returns the number of rows. func (A *SymmetrizedMatrix64) Nrows() int { return A.a.Ncols() } // Returns a transposed self, that is, returns self. func (A *SymmetrizedMatrix64) T() SparseMatrix64 { return A } // CSRMatrix64 is a float64 Compressed Sparse Row (CSR) Matrix. type CSRMatrix64 struct { nrows, ncols int a []float64 ia, ja []int transposed bool } // Returns a new *CSRMatrix64 created from row map m. func NewCSRMatrix64FromRowMap(nrows, ncols int, m map[int]map[int]float64) *CSRMatrix64 { if !((nrows >= 0) && (ncols >= 0)) { return nil } n := 0 for i, mrow := range m { if !((0 <= i) && (i < nrows)) { return nil } n += len(mrow) for j, _ := range mrow { if !((0 <= j) && (j < ncols)) { return nil } } } a := make([]float64, n) ia := make([]int, nrows+1) ia[len(ia)-1] = n ja := make([]int, n) nuseds := 0 for i := 0; i < nrows; i++ { mrow := m[i] aStart, aEnd := nuseds, nuseds+len(mrow) ia[i] = aStart for j, _ := range mrow { ja[nuseds] = j nuseds++ } sort.Ints(ja[aStart:aEnd]) for ai := aStart; ai < aEnd; ai++ { a[ai] = mrow[ja[ai]] } } return &CSRMatrix64{ nrows: nrows, ncols: ncols, a: a, ia: ia, ja: ja, transposed: false, } } // For interface GoStringer in package fmt. // This accepts struct itself and struct pointer, and returns the same string representation. // Hence, the string representation is slightly different from one used in package fmt. func (A CSRMatrix64) GoString() string { return fmt.Sprintf("%T(nrows:%d, ncols:%d)", A, A.Nrows(), A.Ncols()) } // For interface Stringer in package fmt. // This is equivalent to method GoString. func (A CSRMatrix64) String() string { return A.GoString() } // Applies self to Vector64 x, and stores the result to Vector64 y. // If any error happens, fill Vector64 y with float64 NaN. func (A *CSRMatrix64) Apply64(y, x Vector64) { if !((len(y) == A.Nrows()) && (len(x) == A.Ncols())) { y.Fill(math.NaN()) return } y.Fill(0.0) if A.transposed { for i := 0; i < A.nrows; i++ { for ai := A.ia[i]; ai < A.ia[i+1]; ai++ { y[A.ja[ai]] += A.a[ai] * x[i] } } } else { for i := 0; i < A.nrows; i++ { for ai := A.ia[i]; ai < A.ia[i+1]; ai++ { y[i] += A.a[ai] * x[A.ja[ai]] } } } } // Returns the number of columns. func (A *CSRMatrix64) Ncols() int { if A.transposed { return A.nrows } else { return A.ncols } } // Returns the number of rows. func (A *CSRMatrix64) Nrows() int { if A.transposed { return A.ncols } else { return A.nrows } } // Returns a transposed self. // The transpose states are not affected by each other. func (A *CSRMatrix64) T() SparseMatrix64 { return &CSRMatrix64{ nrows: A.nrows, ncols: A.ncols, a: A.a, ia: A.ia, ja: A.ja, transposed: !A.transposed, } } // Matrix64 is a dense column-major float64 matrix. Unless specified, methods return self. type Matrix64 struct { elems Vector64 nrows, ncols int } // Returns a new Matrix64. func NewMatrix64(nrows, ncols int) *Matrix64 { return &Matrix64{ elems: make(Vector64, nrows*ncols), nrows: nrows, ncols: ncols, } } // Returns a new order-n identity Matrix64. func NewMatrix64I(n int) *Matrix64 { I := NewMatrix64(n, n) for i := 0; i < n; i++ { I.Elems()[i+i*I.Nrows()] = 1.0 } return I } // Returns a new order-n symmetric tridiagonal Matrix64. // If len(sdiag) is not len(sdiag) - 1, returns 0x0 Matrix64. func NewSymmetricTridiagonalMatrix64(diag, sdiag Vector64) *Matrix64 { if len(sdiag) != len(diag)-1 { return NewMatrix64(0, 0) } T := NewMatrix64(len(diag), len(diag)) for i := 0; i < len(sdiag); i++ { T.Elems()[i+i*T.Nrows()] = diag[i] T.Elems()[(i+1)+i*T.Nrows()] = sdiag[i] T.Elems()[i+(i+1)*T.Nrows()] = sdiag[i] } T.Elems()[(len(diag)-1)+(len(diag)-1)*T.Nrows()] = diag[len(diag)-1] return T } // Returns a dense Matrix64 of SparseMatrix64. func NewMatrix64FromSparseMatrix64(A SparseMatrix64) *Matrix64 { denseA := NewMatrix64(A.Nrows(), A.Ncols()) x := make(Vector64, A.Ncols()) for j := 0; j < A.Ncols(); j++ { denseA.Elems()[j*A.Nrows():(j+1)*A.Nrows()].Apply(A, x) } return denseA } // For interface GoStringer in package fmt. // This accepts struct itself and struct pointer, and returns the same string representation. // Hence, the string representation is slightly different from one used in package fmt. func (A Matrix64) GoString() string { return fmt.Sprintf("%T(nrows:%d, ncols:%d)", A, A.Nrows(), A.Ncols()) } // For interface Stringer in package fmt. // This is equivalent to method GoString. func (A Matrix64) String() string { return A.GoString() } // Applies self to Vector64 x, and stores the result to Vector64 y. // If any error happens, fill Vector64 y with float64 NaN. func (A *Matrix64) Apply64(y, x Vector64) { if !((len(y) == A.Nrows()) && (len(x) == A.Ncols())) { y.Fill(math.NaN()) return } m, n := A.Nrows(), A.Ncols() for i := 0; i < m; i++ { s := 0.0 for j := 0; j < n; j++ { s += A.elems[i+j*m] * x[j] } y[i] = s } } // Returns a clone of Vector64 x. // A clone is not affected by changes on x, and x is not affected by its clones. func (A *Matrix64) Clone() *Matrix64 { elems := make(Vector64, len(A.elems)) copy(elems, A.elems) return &Matrix64{ elems: elems, nrows: A.nrows, ncols: A.ncols, } } // Returns the result of AB. // If any error happens, returns 0x0-Matrix64. func (A *Matrix64) Compose(B *Matrix64) *Matrix64 { if A.Ncols() != B.Nrows() { return NewMatrix64(0, 0) } return NewMatrix64(A.Nrows(), B.Ncols()).MatMul(A, B) } // Returns the underlying Vector64. func (A *Matrix64) Elems() Vector64 { return A.elems } // Calculate BC, and stores the result to self. // If any error happens, returns 0x0-Matrix64. func (A *Matrix64) MatMul(B, C *Matrix64) *Matrix64 { if !((A.Nrows() >= B.Nrows()) && (B.Ncols() == C.Nrows()) && (A.Ncols() >= C.Ncols())) { return NewMatrix64(0, 0) } if ncpus := runtime.NumCPU(); (ncpus >= 3) && (B.Nrows()*B.Ncols()*C.Ncols() >= 1000*1000*1000) { ncpus -= 1 if ncpus >= B.Nrows() { ncpus = B.Nrows() } var wg sync.WaitGroup for cpu := 0; cpu < ncpus; cpu++ { wg.Add(1) go func(cpu int) { defer wg.Done() A.MatMulInPartialRows(B, C, cpu*B.Nrows()/ncpus, (cpu+1)*B.Nrows()/ncpus) }(cpu) } wg.Wait() return A } else { return A.MatMulInPartialRows(B, C, 0, A.Nrows()) } } // Calculate the rows from istart-th to iend-th of BC, and stores the result to self. // If any error happens, returns 0x0-Matrix64. func (A *Matrix64) MatMulInPartialRows(B, C *Matrix64, istart, iend int) *Matrix64 { if !((0 <= istart) && (istart <= iend) && (iend <= B.Nrows()) && (A.Nrows() >= B.Nrows()) && (B.Ncols() == C.Nrows()) && (A.Ncols() >= C.Ncols())) { return NewMatrix64(0, 0) } for i := istart; i < iend; i++ { for j := 0; j < C.Ncols(); j++ { s := 0.0 for k := 0; k < B.Ncols(); k++ { s += B.Elems()[i+k*B.Nrows()] * C.Elems()[k+j*C.Nrows()] } A.Elems()[i+j*A.Nrows()] = s } } return A } // Returns the number of columns. func (A *Matrix64) Ncols() int { return A.ncols } // Returns the number of rows. func (A *Matrix64) Nrows() int { return A.nrows } // Returns the sliced columns. // If the range is out of bound, returns 0x0 Matrix64. func (A *Matrix64) SlicedColumns(j, n int) *Matrix64 { if !((0 <= j) && (j <= j+n) && (j+n <= A.Ncols())) { return NewMatrix64(0, 0) } return &Matrix64{ elems: A.Elems()[j*A.Nrows() : (j+n)*A.Nrows()], nrows: A.Nrows(), ncols: n, } } // Returns a copy of the transposed self. func (A *Matrix64) T() SparseMatrix64 { tA := &Matrix64{ elems: make(Vector64, len(A.elems)), nrows: A.ncols, ncols: A.nrows, } m, n := A.Nrows(), A.Ncols() for i := 0; i < m; i++ { for j := 0; j < n; j++ { tA.elems[j+i*n] = A.elems[i+j*m] } } return tA } // Matrix64WithColumnValues is a type for sorting the columns of A with ColValues. type Matrix64WithColumnValues struct { A *Matrix64 ColValues Vector64 } // For sort.Interface. func (mcv *Matrix64WithColumnValues) Len() int { return mcv.A.Ncols() } // For sort.Interface. func (mcv *Matrix64WithColumnValues) Less(i, j int) bool { return mcv.ColValues[i] < mcv.ColValues[j] } // For sort.Interface. func (mcv *Matrix64WithColumnValues) Swap(i, j int) { mcv.ColValues[i], mcv.ColValues[j] = mcv.ColValues[j], mcv.ColValues[i] Swap64(mcv.A.SlicedColumns(i, 1).Elems(), mcv.A.SlicedColumns(j, 1).Elems()) }
float64.go
0.806548
0.490785
float64.go
starcoder
package klash import ( "errors" "fmt" "reflect" ) type Params struct { Mapping map[string]*Parameter Listing []*Parameter } // Params store the mapping of ParamName -> Parameter for the given structure. // Since multiple names can be affected to a single parameter, multiple // keys can be associated with a single parameter. func MakeParams(fieldCount int) *Params { return &Params{ make(map[string]*Parameter), make([]*Parameter, 0, fieldCount), } } func NewParams(parameters interface{}) (*Params, error) { pvalue := reflect.ValueOf(parameters) if pvalue.Kind() != reflect.Ptr || pvalue.Elem().Kind() != reflect.Struct { return nil, errors.New("klash: Pointer to struct expected") } fieldCount := pvalue.Type().Elem().NumField() params := MakeParams(fieldCount) if err := params.Parse(&pvalue); err != nil { return nil, err } return params, nil } // Parse discovers the given parameters structure and associates the structure's // field names with their values into the Params structure. func (p *Params) Parse(pvalue *reflect.Value) error { vtype := pvalue.Type().Elem() for idx := 0; idx < vtype.NumField(); idx++ { field := vtype.Field(idx) value := pvalue.Elem().Field(idx) if value.Kind() == reflect.Slice { value.Set(reflect.MakeSlice(value.Type(), 0, 0)) } parameter := NewParameter(field.Name, value) if err := parameter.DiscoverProperties(field.Tag); err != nil { return err } if err := p.Set(parameter.Name, parameter); err != nil { return err } if parameter.Alias != "" { if err := p.Set(parameter.Alias, parameter); err != nil { return err } } p.Listing = append(p.Listing, parameter) } return nil } func (p *Params) Get(key string) (*Parameter, bool) { val, ok := p.Mapping[DecomposeName(key, true)] return val, ok } func (p *Params) Set(key string, value *Parameter) error { key = DecomposeName(key, true) _, ok := p.Mapping[key] if ok { return fmt.Errorf("klash: %s is already an argument or an alias", key) } p.Mapping[key] = value return nil }
vendor/github.com/mota/klash/params.go
0.682997
0.41182
params.go
starcoder
package commands import ( "fmt" "sort" "strings" tbw "text/tabwriter" "time" "github.com/agnivade/levenshtein" ) // LevenshteinDistance contains the computed levenshtein // between Str1 and Str2 type LevenshteinDistance struct { Str1 string Str2 string Distance int } // Compute computes (or recomputes) the levenshtein between Str1 and Str2 func (ld *LevenshteinDistance) Compute() { ld.Distance = levenshtein.ComputeDistance(ld.Str1, ld.Str2) } // LevenshteinDistances is a list of computed levenshtein distances type LevenshteinDistances []LevenshteinDistance // ComputeAll computes all levenshtein distances func (lds LevenshteinDistances) ComputeAll() { for i := range lds { lds[i].Compute() } } // SortByDistance sorts the list of levenshtein distances by distance func (lds LevenshteinDistances) SortByDistance() { sort.Slice(lds, func(i, j int) bool { return lds[i].Distance < lds[j].Distance }) } // PrettyCountdount sees a Duration as a countdown for an event to happen // and transforms into a easily readible string representation. // The results contains information about if the event it's in the past or it's still to come. // If the event it's still to come, it will also contain information about the time left, // expressed in minutes, hours, or days depending on the value of the duration. func PrettyCountdount(d time.Duration) string { switch { case d < 0: return "already over" case d.Hours() < 1: return fmt.Sprintf("%.1f minutes to go", d.Minutes()) case d.Hours() < 24: return fmt.Sprintf("%.1f hours to go", d.Hours()) default: return fmt.Sprintf("%.1f days to go", d.Hours()/24) } } // ParseRFC3339InLocation parses a time string in RFC3339 as a time, and returns that time in // the iana location provided func ParseRFC3339InLocation(timeValue string, ianaLocation string) (time.Time, error) { rfc3339Time, err := time.Parse(time.RFC3339, timeValue) if err != nil { return rfc3339Time, fmt.Errorf("parsing time '%s' in RFC3339 format: %v", timeValue, err) } location, err := time.LoadLocation(ianaLocation) if err != nil { return rfc3339Time, fmt.Errorf("loading '%s' location: %v", ianaLocation, err) } return rfc3339Time.In(location), nil } // HeaderMessage represents a message to discord with an header and a description type HeaderMessage struct { Header string Description string } func (hm *HeaderMessage) String() string { var message strings.Builder // Write header if hm.Header != "" { message.WriteString(fmt.Sprintf("**%s**\n", strings.ToUpper(hm.Header))) } // Write description if hm.Description != "" { message.WriteString(hm.Description + "\n") } return message.String() } // TabularMessage represents a message with an header, description and some tabular data type TabularMessage struct { HeaderMessage TableHeader []string TableRows [][]string } // SetTableHeader sets the table header func (tm *TabularMessage) SetTableHeader(headers ...string) { tm.TableHeader = headers } // AddRow adds a row of data func (tm *TabularMessage) AddRow(rowData ...string) { tm.TableRows = append(tm.TableRows, rowData) } // String returns TabularMessage for discord func (tm *TabularMessage) String() string { var message strings.Builder message.WriteString(tm.HeaderMessage.String()) // Make table var tablebBuilder strings.Builder tableWriter := tbw.NewWriter(&tablebBuilder, 0, 0, 3, ' ', 0) fmt.Fprintln(tableWriter, strings.Join(tm.TableHeader, "\t")) for _, rowData := range tm.TableRows { fmt.Fprintln(tableWriter, strings.Join(rowData, "\t")) } tableWriter.Flush() // Write table message.WriteString("```" + tablebBuilder.String() + "```") return message.String() } // RaceHourComment returns a string with a comment about how late or not is the hour of the race. func RaceHourComment(raceTime time.Time) string { hour := raceTime.Hour() if hour > 4 && hour < 9 { return "Unfortunately it seems you'll have to wake up early if you want to watch the race :(" } return "It seems a decent hour for the race. You won't have to wake up early!" }
commands/utils.go
0.710929
0.669599
utils.go
starcoder
package bebop import ( "github.com/hybridgroup/gobot" ) var _ gobot.Driver = (*BebopDriver)(nil) // BebopDriver is gobot.Driver representation for the Bebop type BebopDriver struct { name string connection gobot.Connection gobot.Eventer } // NewBebopDriver creates an BebopDriver with specified name. func NewBebopDriver(connection *BebopAdaptor, name string) *BebopDriver { d := &BebopDriver{ name: name, connection: connection, Eventer: gobot.NewEventer(), } d.AddEvent("flying") return d } // Name returns the BebopDrivers Name func (a *BebopDriver) Name() string { return a.name } // Connection returns the BebopDrivers Connection func (a *BebopDriver) Connection() gobot.Connection { return a.connection } // adaptor returns ardrone adaptor func (a *BebopDriver) adaptor() *BebopAdaptor { return a.Connection().(*BebopAdaptor) } // Start starts the BebopDriver func (a *BebopDriver) Start() (errs []error) { return } // Halt halts the BebopDriver func (a *BebopDriver) Halt() (errs []error) { return } // TakeOff makes the drone start flying func (a *BebopDriver) TakeOff() { gobot.Publish(a.Event("flying"), a.adaptor().drone.TakeOff()) } // Land causes the drone to land func (a *BebopDriver) Land() { a.adaptor().drone.Land() } // Up makes the drone gain altitude. // speed can be a value from `0` to `100`. func (a *BebopDriver) Up(speed int) { a.adaptor().drone.Up(speed) } // Down makes the drone reduce altitude. // speed can be a value from `0` to `100`. func (a *BebopDriver) Down(speed int) { a.adaptor().drone.Down(speed) } // Left causes the drone to bank to the left, controls the roll, which is // a horizontal movement using the camera as a reference point. // speed can be a value from `0` to `100`. func (a *BebopDriver) Left(speed int) { a.adaptor().drone.Left(speed) } // Right causes the drone to bank to the right, controls the roll, which is // a horizontal movement using the camera as a reference point. // speed can be a value from `0` to `100`. func (a *BebopDriver) Right(speed int) { a.adaptor().drone.Right(speed) } // Forward causes the drone go forward, controls the pitch. // speed can be a value from `0` to `100`. func (a *BebopDriver) Forward(speed int) { a.adaptor().drone.Forward(speed) } // Backward causes the drone go forward, controls the pitch. // speed can be a value from `0` to `100`. func (a *BebopDriver) Backward(speed int) { a.adaptor().drone.Backward(speed) } // Clockwise causes the drone to spin in clockwise direction // speed can be a value from `0` to `100`. func (a *BebopDriver) Clockwise(speed int) { a.adaptor().drone.Clockwise(speed) } // CounterClockwise the drone to spin in counter clockwise direction // speed can be a value from `0` to `100`. func (a *BebopDriver) CounterClockwise(speed int) { a.adaptor().drone.CounterClockwise(speed) } // Stop makes the drone to hover in place. func (a *BebopDriver) Stop() { a.adaptor().drone.Stop() } // Video returns a channel which raw video frames will be broadcast on func (a *BebopDriver) Video() chan []byte { return a.adaptor().drone.Video() } // StartRecording starts the recording video to the drones interal storage func (a *BebopDriver) StartRecording() error { return a.adaptor().drone.StartRecording() } // StopRecording stops a previously started recording func (a *BebopDriver) StopRecording() error { return a.adaptor().drone.StopRecording() } // HullProtection tells the drone if the hull/prop protectors are attached. This is needed to adjust flight characteristics of the Bebop. func (a *BebopDriver) HullProtection(protect bool) error { return a.adaptor().drone.HullProtection(protect) } // Outdoor tells the drone if flying Outdoor or not. This is needed to adjust flight characteristics of the Bebop. func (a *BebopDriver) Outdoor(outdoor bool) error { return a.adaptor().drone.Outdoor(outdoor) }
vendor/github.com/hybridgroup/gobot/platforms/bebop/bebop_driver.go
0.819713
0.520131
bebop_driver.go
starcoder
package collections import "fmt" // Zipper represents a pair of ordered collections that can be zipped together. // Elements of each collection are assumed to be sorted in ascending order. type Zipper interface { // Comparable must compare the left and right collection elements at i and // j respectively, returning the comparison with respect to the left value. // If the left element is less than the right, Compare should return Less. // If the left and right elements are equal, return Equal. If the left // element is greater, return Greater. Any other value will cause a panic // during Zip. Comparable // LenLeft returns the length of the left collection. LenLeft() int // LenRight returns the length of the right collection. LenRight() int // AddLeft adds only the element from the left collection at i to the // zipped collection. AddLeft(i int) // AddRight adds only the element from the right collection at j to the // zipped collection. AddRight(j int) // AddBoth adds both the left and right elements at i and j to the zipped // collection. AddBoth(i, j int) } // ZipWithGaps iterates through each element in the collection, comparing each // leading element in each collection exactly once. Any two elements that are // equal will be zipped together by AddBoth, otherwise the lesser element will // be added on its own by AddLeft or AddRight. Assumes the left and right // collections are sorted in ascending order when ordered by z.Compare. func ZipWithGaps(z Zipper) { i, j := 0, 0 maxLeft, maxRight := z.LenLeft(), z.LenRight() if maxLeft < 0 || maxRight < 0 { panic(fmt.Sprintf("ZipWithGaps: negative lengths %d %d", maxLeft, maxRight)) } for i < maxLeft || j < maxRight { switch { case i >= maxLeft: z.AddRight(j) j++ case j >= maxRight: z.AddLeft(i) i++ default: switch c := z.Compare(i, j); { case c == Less: z.AddLeft(i) i++ case c == Greater: z.AddRight(j) j++ case c == Equal: z.AddBoth(i, j) i++ j++ default: msg := fmt.Sprintf("Zip: compare returned %d: expected %s, "+ "%s, or %s", c, Less, Equal, Greater) panic(msg) } } } } type alwaysEqualZipper struct { Zipper } func (z *alwaysEqualZipper) Compare(i, j int) Ord { return Equal } // Zip zips both collections in the zipper together assuming elements are // always equal. This is equivalent to ZipWithGaps where Zipper.Compare always // returns Equal. func Zip(z Zipper) { ZipWithGaps(&alwaysEqualZipper{z}) }
zip.go
0.774071
0.721817
zip.go
starcoder
package types func IsZeroString(a string) bool { return a == "" } func IsZeroStringOptional(a *string) bool { if a == nil { return true } return false } func IsZeroStringMultiples(a, b []string) bool { if len(a) == 0 { return true } return false } func IsZeroMapStringString(a map[string]string) bool { if len(a) == 0 { return true } return false } func IsZeroMapStringInteger(a map[string]int64) bool { if len(a) == 0 { return true } return false } func IsZeroMapStringReal(a map[string]float64) bool { if len(a) == 0 { return true } return false } func IsZeroMapStringBoolean(a map[string]bool) bool { if len(a) == 0 { return true } return false } func IsZeroMapStringUuid(a map[string]string) bool { if len(a) == 0 { return true } return false } func IsZeroInteger(a int64) bool { return a == 0 } func IsZeroIntegerOptional(a *int64) bool { if a == nil { return true } return false } func IsZeroIntegerMultiples(a, b []int64) bool { if len(a) == 0 { return true } return false } func IsZeroMapIntegerString(a map[int64]string) bool { if len(a) == 0 { return true } return false } func IsZeroMapIntegerInteger(a map[int64]int64) bool { if len(a) == 0 { return true } return false } func IsZeroMapIntegerReal(a map[int64]float64) bool { if len(a) == 0 { return true } return false } func IsZeroMapIntegerBoolean(a map[int64]bool) bool { if len(a) == 0 { return true } return false } func IsZeroMapIntegerUuid(a map[int64]string) bool { if len(a) == 0 { return true } return false } func IsZeroReal(a float64) bool { return a == 0 } func IsZeroRealOptional(a *float64) bool { if a == nil { return true } return false } func IsZeroRealMultiples(a, b []float64) bool { if len(a) == 0 { return true } return false } func IsZeroMapRealString(a map[float64]string) bool { if len(a) == 0 { return true } return false } func IsZeroMapRealInteger(a map[float64]int64) bool { if len(a) == 0 { return true } return false } func IsZeroMapRealReal(a map[float64]float64) bool { if len(a) == 0 { return true } return false } func IsZeroMapRealBoolean(a map[float64]bool) bool { if len(a) == 0 { return true } return false } func IsZeroMapRealUuid(a map[float64]string) bool { if len(a) == 0 { return true } return false } func IsZeroBoolean(a bool) bool { return a == false } func IsZeroBooleanOptional(a *bool) bool { if a == nil { return true } return false } func IsZeroBooleanMultiples(a, b []bool) bool { if len(a) == 0 { return true } return false } func IsZeroMapBooleanString(a map[bool]string) bool { if len(a) == 0 { return true } return false } func IsZeroMapBooleanInteger(a map[bool]int64) bool { if len(a) == 0 { return true } return false } func IsZeroMapBooleanReal(a map[bool]float64) bool { if len(a) == 0 { return true } return false } func IsZeroMapBooleanBoolean(a map[bool]bool) bool { if len(a) == 0 { return true } return false } func IsZeroMapBooleanUuid(a map[bool]string) bool { if len(a) == 0 { return true } return false } func IsZeroUuid(a string) bool { return a == "" } func IsZeroUuidOptional(a *string) bool { if a == nil { return true } return false } func IsZeroUuidMultiples(a, b []string) bool { if len(a) == 0 { return true } return false } func IsZeroMapUuidString(a map[string]string) bool { if len(a) == 0 { return true } return false } func IsZeroMapUuidInteger(a map[string]int64) bool { if len(a) == 0 { return true } return false } func IsZeroMapUuidReal(a map[string]float64) bool { if len(a) == 0 { return true } return false } func IsZeroMapUuidBoolean(a map[string]bool) bool { if len(a) == 0 { return true } return false } func IsZeroMapUuidUuid(a map[string]string) bool { if len(a) == 0 { return true } return false }
vendor/yunion.io/x/ovsdb/types/atomic_gen_iszero_zz_generated.go
0.665737
0.610889
atomic_gen_iszero_zz_generated.go
starcoder
package main import ( "../ahrs" "errors" "github.com/skelterjohn/go.matrix" "math" "math/rand" "sort" ) const ( Pi = math.Pi Deg = Pi / 180 Small = 1e-6 ) var TimeError = errors.New("requested time is outside of scenario") // Situation defines a scenario by piecewise-linear interpolation type SituationSim struct { t []float64 // times for situation, s u1, u2, u3 []float64 // airspeed, kts, aircraft frame [F/B, R/L, and U/D] phi, theta, psi []float64 // attitude, rad [roll R/L, pitch U/D, heading N->E->S->W] phi0, theta0, psi0 []float64 // base attitude, rad [adjust for position of stratux on glareshield] v1, v2, v3 []float64 // windspeed, kts, earth frame [N/S, E/W, and U/D] m1, m2, m3 []float64 // magnetometer reading logMap map[string]interface{} // Map only for analysis/debugging } // BeginTime returns the time stamp when the simulation begins func (s *SituationSim) BeginTime() float64 { return s.t[0] } // Interpolate an ahrs.State from a Situation definition at a given time func (s *SituationSim) Interpolate(t float64, st *ahrs.State, aBias, bBias, mBias []float64) error { if t < s.t[0] || t > s.t[len(s.t)-1] { st = new(ahrs.State) return TimeError } ix := 0 if t > s.t[0] { ix = sort.SearchFloat64s(s.t, t) - 1 } ddt := (s.t[ix+1] - s.t[ix]) f := (s.t[ix+1] - t) / ddt // These are the fields we need to calculate: // U, Z, E, H, N, // V, C, F, D, L st.U1 = f*s.u1[ix] + (1-f)*s.u1[ix+1] st.U2 = f*s.u2[ix] + (1-f)*s.u2[ix+1] st.U3 = f*s.u3[ix] + (1-f)*s.u3[ix+1] st.Z1 = (s.u1[ix+1] - s.u1[ix]) / ddt / ahrs.G st.Z2 = (s.u2[ix+1] - s.u2[ix]) / ddt / ahrs.G st.Z3 = (s.u3[ix+1] - s.u3[ix]) / ddt / ahrs.G st.E0, st.E1, st.E2, st.E3 = ahrs.ToQuaternion( (f*s.phi[ix]+(1-f)*s.phi[ix+1])*Deg, (f*s.theta[ix]+(1-f)*s.theta[ix+1])*Deg, (f*s.psi[ix]+(1-f)*s.psi[ix+1])*Deg) // For calculating the Hx, we need to calculate the Ex a small time from now to find their derivatives tz := Small fz := (s.t[ix+1] - (t + tz)) / ddt ez0, ez1, ez2, ez3 := ahrs.ToQuaternion( (fz*s.phi[ix]+(1-fz)*s.phi[ix+1])*Deg, (fz*s.theta[ix]+(1-fz)*s.theta[ix+1])*Deg, (fz*s.psi[ix]+(1-fz)*s.psi[ix+1])*Deg) // dEx are Ex derivatives dE0 := +(ez0 - st.E0) / tz dE1 := -(ez1 - st.E1) / tz dE2 := -(ez2 - st.E2) / tz dE3 := -(ez3 - st.E3) / tz st.H1 = -2 * (st.E0*dE1 + st.E1*dE0 + st.E2*dE3 - st.E3*dE2) / Deg st.H2 = -2 * (st.E0*dE2 - st.E1*dE3 + st.E2*dE0 + st.E3*dE1) / Deg st.H3 = -2 * (st.E0*dE3 + st.E1*dE2 - st.E2*dE1 + st.E3*dE0) / Deg st.N1 = f*s.m1[ix] + (1-f)*s.m1[ix+1] st.N2 = f*s.m2[ix] + (1-f)*s.m2[ix+1] st.N3 = f*s.m3[ix] + (1-f)*s.m3[ix+1] st.V1 = f*s.v1[ix] + (1-f)*s.v1[ix+1] st.V2 = f*s.v2[ix] + (1-f)*s.v2[ix+1] st.V3 = f*s.v3[ix] + (1-f)*s.v3[ix+1] st.C1 = aBias[0] st.C2 = aBias[1] st.C3 = aBias[2] st.F0, st.F1, st.F2, st.F3 = ahrs.ToQuaternion( (f*s.phi0[ix]+(1-f)*s.phi0[ix+1])*Deg, (f*s.theta0[ix]+(1-f)*s.theta0[ix+1])*Deg, (f*s.psi0[ix]+(1-f)*s.psi0[ix+1])*Deg) st.D1 = bBias[0] st.D2 = bBias[1] st.D3 = bBias[2] st.L1 = mBias[0] st.L2 = mBias[1] st.L3 = mBias[2] st.T = t st.M = matrix.Zeros(32, 32) st.N = matrix.Zeros(32, 32) return nil } // Determine ahrs.Measurement variables from a Situation definition at a given time // gps noise (gaussian stdev) and bias are in kt // airspeed noise and bias are in kt // accelerometer noise and bias are in G // gyro noise and bias are in °/s // magnetometer noise and bias are in μT func (s *SituationSim) Measurement(t float64, m *ahrs.Measurement, uValid, wValid, sValid, mValid bool, uNoise, wNoise, aNoise, bNoise, mNoise float64, uBias, aBias, bBias, mBias []float64, ) error { if t < s.t[0] || t > s.t[len(s.t)-1] { m = new(ahrs.Measurement) return TimeError } var x, z ahrs.State tz := Small s.Interpolate(t, &x, aBias, bBias, mBias) s.Interpolate(t+tz, &z, aBias, bBias, mBias) dU1 := (z.U1 - x.U1) / tz dU2 := (z.U2 - x.U2) / tz dU3 := (z.U3 - x.U3) / tz dE0 := (z.E0 - x.E0) / tz dE1 := -(z.E1 - x.E1) / tz dE2 := -(z.E2 - x.E2) / tz dE3 := -(z.E3 - x.E3) / tz // eij rotates between earth frame i component and aircraft frame j component e11 := (+x.E0*x.E0 + x.E1*x.E1 - x.E2*x.E2 - x.E3*x.E3) e12 := 2 * (-x.E0*x.E3 + x.E1*x.E2) e13 := 2 * (+x.E0*x.E2 + x.E1*x.E3) e21 := 2 * (+x.E0*x.E3 + x.E2*x.E1) e22 := (+x.E0*x.E0 - x.E1*x.E1 + x.E2*x.E2 - x.E3*x.E3) e23 := 2 * (-x.E0*x.E1 + x.E2*x.E3) e31 := 2 * (-x.E0*x.E2 + x.E3*x.E1) e32 := 2 * (+x.E0*x.E1 + x.E3*x.E2) e33 := (+x.E0*x.E0 - x.E1*x.E1 - x.E2*x.E2 + x.E3*x.E3) // fij rotates between sensor frame i component and aircraft frame j component f11 := (+x.F0*x.F0 + x.F1*x.F1 - x.F2*x.F2 - x.F3*x.F3) f12 := 2 * (-x.F0*x.F3 + x.F1*x.F2) f13 := 2 * (+x.F0*x.F2 + x.F1*x.F3) f21 := 2 * (+x.F0*x.F3 + x.F2*x.F1) f22 := (+x.F0*x.F0 - x.F1*x.F1 + x.F2*x.F2 - x.F3*x.F3) f23 := 2 * (-x.F0*x.F1 + x.F2*x.F3) f31 := 2 * (-x.F0*x.F2 + x.F3*x.F1) f32 := 2 * (+x.F0*x.F1 + x.F3*x.F2) f33 := (+x.F0*x.F0 - x.F1*x.F1 - x.F2*x.F2 + x.F3*x.F3) if uValid { // ASI doesn't read U2 or U3 m.UValid = true m.U1 = x.U1 + uBias[0] + uNoise*rand.NormFloat64() } if wValid { m.WValid = true m.W1 = e11*x.U1 + e12*x.U2 + e13*x.U3 + x.V1 + wNoise*rand.NormFloat64() m.W2 = e21*x.U1 + e22*x.U2 + e23*x.U3 + x.V2 + wNoise*rand.NormFloat64() m.W3 = e31*x.U1 + e32*x.U2 + e33*x.U3 + x.V3 + wNoise*rand.NormFloat64() } if sValid { m.SValid = true // These are in aircraft frame h1 := -2 * (dE1*x.E0 + dE0*x.E1 - dE3*x.E2 + dE2*x.E3) h2 := -2 * (dE2*x.E0 + dE3*x.E1 + dE0*x.E2 - dE1*x.E3) h3 := -2 * (dE3*x.E0 - dE2*x.E1 + dE1*x.E2 + dE0*x.E3) y1 := (-dU1-h2*x.U3+h3*x.U2)/ahrs.G - e31 y2 := (-dU2-h3*x.U1+h1*x.U3)/ahrs.G - e32 y3 := (-dU3-h1*x.U2+h2*x.U1)/ahrs.G - e33 // Rotate into sensor frame m.A1 = f11*y1 + f12*y2 + f13*y3 + aBias[0] + aNoise*rand.NormFloat64() m.A2 = f21*y1 + f22*y2 + f23*y3 + aBias[1] + aNoise*rand.NormFloat64() m.A3 = f31*y1 + f32*y2 + f33*y3 + aBias[2] + aNoise*rand.NormFloat64() m.B1 = (f11*h1+f12*h2+f13*h3)/Deg + (bBias[0] + bNoise*rand.NormFloat64()) m.B2 = (f21*h1+f22*h2+f23*h3)/Deg + (bBias[1] + bNoise*rand.NormFloat64()) m.B3 = (f31*h1+f32*h2+f33*h3)/Deg + (bBias[2] + bNoise*rand.NormFloat64()) } if mValid { m.MValid = true m1 := x.N1*e11 + x.N2*e21 + x.N3*e31 m2 := x.N1*e12 + x.N2*e22 + x.N3*e32 m3 := x.N1*e13 + x.N2*e23 + x.N3*e33 m.M1 = f11*m1 + f12*m2 + f13*m3 + mBias[0] + mNoise*rand.NormFloat64() m.M2 = f21*m1 + f22*m2 + f23*m3 + mBias[1] + mNoise*rand.NormFloat64() m.M3 = f31*m1 + f32*m2 + f33*m3 + mBias[2] + mNoise*rand.NormFloat64() } m.T = t return nil } // Data to define a piecewise-linear turn, with entry and exit var airspeed = 120.0 // Nice airspeed for maneuvers, kts var bank = math.Atan((2*Pi*airspeed)/(ahrs.G*120)) / Deg // Bank angle for std rate turn at given airspeed var mush = airspeed * math.Sin(Pi/90) / math.Cos(bank*Deg) // Mush in a turn to maintain altitude // start, initiate roll-in, end roll-in, initiate roll-out, end roll-out, end var sitTurnDef = &SituationSim{ t: []float64{0, 10, 15, 255, 260, 270}, u1: []float64{airspeed, airspeed, airspeed, airspeed, airspeed, airspeed}, u2: []float64{0, 0, 0, 0, 0, 0}, u3: []float64{0, 0, mush, mush, 0, 0}, phi: []float64{0, 0, bank, bank, 0, 0}, //theta: []float64{0, 0, 2, 2, 0, 0}, theta: []float64{0, 0, 0, 0, 0, 0}, psi: []float64{0, 0, 0, 720, 720, 720}, phi0: []float64{0, 0, 0, 0, 0, 0}, theta0: []float64{0, 0, 0, 0, 0, 0}, psi0: []float64{90, 90, 90, 90, 90, 90}, v1: []float64{3, 3, 3, 3, 3, 3}, v2: []float64{4, 4, 4, 4, 4, 4}, v3: []float64{0, 0, 0, 0, 0, 0}, m1: []float64{0, 0, 0, 0, 0, 0}, m2: []float64{1, 1, 1, 1, 1, 1}, m3: []float64{-1, -1, -1, -1, -1, -1}, } var bank1 = math.Atan((2*Pi*95)/(ahrs.G*120)) / Deg var bank2 = math.Atan((2*Pi*120)/(ahrs.G*120)) / Deg var sitTakeoffDef = &SituationSim{ t: []float64{0, 10, 30, 35, 55, 115, 120, 150, 155, 175, 180, 210, 215, 230}, u1: []float64{9, 9, 68, 83, 95, 95, 95, 95, 95, 120, 120, 120, 120, 140}, u2: []float64{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}, u3: []float64{0, 0, 0, -3, -3, -3, -2, -2, -2, 0, 0, 0, 0, 0}, phi: []float64{0, 0, 0, 0, 0, 0, -bank1, -bank1, 0, 0, -bank2, -bank2, 0, 0}, theta: []float64{0, 0, 0, 10, 10, 10, 5, 5, 5, 2, 2, 2, 0, 0}, psi: []float64{0, 0, 0, 0, 0, 0, 0, -90, -90, -90, -90, -180, -180, -180}, phi0: []float64{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}, theta0: []float64{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}, psi0: []float64{90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90}, v1: []float64{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}, v2: []float64{-8, -8, -8, -8, -8, -8, -10, -10, -10, -12, -12, -12, -12, -12}, v3: []float64{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}, m1: []float64{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}, m2: []float64{1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1}, m3: []float64{-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, } func (s *SituationSim) GetLogMap() (p map[string]interface{}) { return s.logMap }
sim/situationSim.go
0.583915
0.403479
situationSim.go
starcoder
package temporal import "time" // Season holds the name and starting and ending times for the season in a given year. type Season struct { Name string Start time.Time End time.Time } // Seasons holds a year and a list of Seasons for that year type Seasons struct { Year int List []Season } // YearSeasons can be used as a list of seasons for a given year. // Optimal usage of seasons is to simply make(YearSeasons) and call GetSeason() method on timestamps. type YearSeasons map[int]*Seasons // GetSeason will get the season for the given timestamp. // If the list of seasons for the timestamps given year have not yet been // generated then they will be generated and utilized to determine the season // of the given timestamp. func (y YearSeasons) GetSeason(t time.Time) Season { year := t.Year() seasons, ok := y[year] if !ok { y[year] = ListSeasons(t) seasons = y[year] } return seasons.GetSeason(t) } // GetSeason ... // "Winter", "Spring", "Summer", or "Fall" // All you have to do is call (this function). // https://www.youtube.com/watch?v=eAR_Ff5A8Rk func (s Seasons) GetSeason(t time.Time) Season { var season Season for _, ssn := range s.List { if t.After(ssn.Start) && ssn.End.After(t) { season = ssn } } return season } // ListSeasons will generate a list of seasons for the given year of the timestamp // It is recommended that you store the list and reuse it for processing multiple timestamps. func ListSeasons(t time.Time) *Seasons { year := t.Year() location := t.Location() seasons := &Seasons{ Year: year, } winter1 := Season{ Name: "winter", Start: DayStart(time.Date(year, time.January, 1, 0, 0, 0, 0, location)), End: DayFinish(time.Date(year, time.March, 20, 0, 0, 0, 0, location)), } seasons.List = append(seasons.List, winter1) spring := Season{ Name: "spring", Start: DayStart(time.Date(year, time.March, 21, 0, 0, 0, 0, location)), End: DayFinish(time.Date(year, time.June, 20, 0, 0, 0, 0, location)), } seasons.List = append(seasons.List, spring) summer := Season{ Name: "summer", Start: DayStart(time.Date(year, time.June, 21, 0, 0, 0, 0, location)), End: DayFinish(time.Date(year, time.September, 22, 0, 0, 0, 0, location)), } seasons.List = append(seasons.List, summer) fall := Season{ Name: "fall", Start: DayStart(time.Date(year, time.September, 23, 0, 0, 0, 0, location)), End: DayFinish(time.Date(year, time.December, 20, 0, 0, 0, 0, location)), } seasons.List = append(seasons.List, fall) winter2 := Season{ Name: "winter", Start: DayStart(time.Date(year, time.December, 21, 0, 0, 0, 0, location)), End: DayFinish(time.Date(year, time.December, 31, 0, 0, 0, 0, location)), } seasons.List = append(seasons.List, winter2) return seasons }
season.go
0.531939
0.412294
season.go
starcoder
package ratelimit import ( "fmt" "sync" "time" ) // Limit defines the maximum number of requests per second. type Limit float64 // Every converts a time interval between requests to a Limit. func Every(interval time.Duration) Limit { if interval <= 0 { panic("ratelimit: invalid time interval for Every") } return 1 / Limit(interval.Seconds()) } // Limiter implements a token bucket limiter at rate `r` tokens per second with burst size of `b` tokens. type Limiter struct { limit Limit burst int mu sync.Mutex tokens float64 // last is the last time the limiter's tokens got updated last time.Time // lastRequest is the latest time of a request lastRequest time.Time } // Limit returns the Limiter's rate. func (l *Limiter) Limit() Limit { return l.limit } // Burst returns the Limiter's burst size. func (l *Limiter) Burst() int { return l.burst } // New returns a new Limiter at rate `r` tokens per second with burst of `b` tokens. func New(r Limit, b int) *Limiter { return &Limiter{ limit: r, burst: b, } } // Allow is the shortcut for AllowN(time.Now(), 1). func (l *Limiter) Allow() bool { return l.AllowN(time.Now(), 1) } // AllowN checks whether `n` requests may happen at time `now`. func (l *Limiter) AllowN(now time.Time, n int) bool { return l.reserveN(now, n, 0).ok } // Wait is the shortcut for WaitN(time.Now(), 1). func (l *Limiter) Wait() (time.Duration, error) { return l.WaitN(time.Now(), 1) } // WaitN calculates the time duration to wait before `n` requests may happen at time `now`. func (l *Limiter) WaitN(now time.Time, n int) (time.Duration, error) { return l.waitN(now, n) } func (l *Limiter) waitN(now time.Time, n int) (time.Duration, error) { if n > l.burst { return 0, fmt.Errorf("ratelimit: WaitN %d exceeds limiter's burst %d", n, l.burst) } _, _, tokens := l.adjust(now) // calculate the remaining number of tokens resulting from the request. tokens -= float64(n) var waitDuration time.Duration if tokens < 0 { waitDuration = l.limit.durationFromTokens(-tokens) } maxWaitDuration := l.limit.durationFromTokens(float64(n)) ok := n <= l.burst && waitDuration <= maxWaitDuration if ok { return waitDuration, nil } return 0, fmt.Errorf("ratelimit: WaitN %d exceeds maximum wait duration", n) } type reservation struct { ok bool tokens int timeToAct time.Time } func (l *Limiter) reserveN(now time.Time, n int, maxWaitDuration time.Duration) reservation { l.mu.Lock() noow, last, tokens := l.adjust(now) // calculate the remaining number of tokens resulting from the request. tokens -= float64(n) var waitDuration time.Duration if tokens < 0 { waitDuration = l.limit.durationFromTokens(-tokens) } ok := n <= l.burst && waitDuration <= maxWaitDuration r := reservation{ ok: ok, } if ok { r.tokens = n r.timeToAct = noow.Add(waitDuration) l.last = noow l.tokens = tokens l.lastRequest = r.timeToAct } else { l.last = last } l.mu.Unlock() return r } // adjust calculates the updated state for Limiter resulting from the passage of time. func (l *Limiter) adjust(now time.Time) (newNow, newLast time.Time, newTokens float64) { last := l.last if now.Before(last) { last = now } maxElapsed := l.limit.durationFromTokens(float64(l.burst) - l.tokens) elapsed := now.Sub(last) if elapsed > maxElapsed { elapsed = maxElapsed } delta := l.limit.tokensFromDuration(elapsed) tokens := l.tokens + delta if burst := float64(l.burst); tokens > burst { tokens = burst } return now, last, tokens } func (lmt Limit) durationFromTokens(tokens float64) time.Duration { seconds := tokens / float64(lmt) return time.Nanosecond * time.Duration(1e9*seconds) } func (lmt Limit) tokensFromDuration(d time.Duration) float64 { return d.Seconds() * float64(lmt) }
pkg/ratelimit/ratelimit.go
0.8586
0.461684
ratelimit.go
starcoder
package fd import "sync" // Laplacian computes the Laplacian of the multivariate function f at the location // x. That is, Laplacian returns // ∆ f(x) = ∇ · ∇ f(x) = \sum_i ∂^2 f(x)/∂x_i^2 // The finite difference formula and other options are specified by settings. // The order of the difference formula must be 2 or Laplacian will panic. func Laplacian(f func(x []float64) float64, x []float64, settings *Settings) float64 { n := len(x) if n == 0 { panic("laplacian: x has zero length") } // Default settings. formula := Central2nd step := formula.Step var originValue float64 var originKnown, concurrent bool // Use user settings if provided. if settings != nil { if !settings.Formula.isZero() { formula = settings.Formula step = formula.Step checkFormula(formula) if formula.Derivative != 2 { panic(badDerivOrder) } } if settings.Step != 0 { if settings.Step < 0 { panic(negativeStep) } step = settings.Step } originKnown = settings.OriginKnown originValue = settings.OriginValue concurrent = settings.Concurrent } evals := n * len(formula.Stencil) if usesOrigin(formula.Stencil) { evals -= n } nWorkers := computeWorkers(concurrent, evals) if nWorkers == 1 { return laplacianSerial(f, x, formula.Stencil, step, originKnown, originValue) } return laplacianConcurrent(nWorkers, evals, f, x, formula.Stencil, step, originKnown, originValue) } func laplacianSerial(f func(x []float64) float64, x []float64, stencil []Point, step float64, originKnown bool, originValue float64) float64 { n := len(x) xCopy := make([]float64, n) fo := func() float64 { // Copy x in case it is modified during the call. copy(xCopy, x) return f(x) } is2 := 1 / (step * step) origin := getOrigin(originKnown, originValue, fo, stencil) var laplacian float64 for i := 0; i < n; i++ { for _, pt := range stencil { var v float64 if pt.Loc == 0 { v = origin } else { // Copying the data anew has two benefits. First, it // avoids floating point issues where adding and then // subtracting the step don't return to the exact same // location. Secondly, it protects against the function // modifying the input data. copy(xCopy, x) xCopy[i] += pt.Loc * step v = f(xCopy) } laplacian += v * pt.Coeff * is2 } } return laplacian } func laplacianConcurrent(nWorkers, evals int, f func(x []float64) float64, x []float64, stencil []Point, step float64, originKnown bool, originValue float64) float64 { type run struct { i int idx int result float64 } n := len(x) send := make(chan run, evals) ans := make(chan run, evals) var originWG sync.WaitGroup hasOrigin := usesOrigin(stencil) if hasOrigin { originWG.Add(1) // Launch worker to compute the origin. go func() { defer originWG.Done() xCopy := make([]float64, len(x)) copy(xCopy, x) originValue = f(xCopy) }() } var workerWG sync.WaitGroup // Launch workers. for i := 0; i < nWorkers; i++ { workerWG.Add(1) go func(send <-chan run, ans chan<- run) { defer workerWG.Done() xCopy := make([]float64, len(x)) for r := range send { if stencil[r.idx].Loc == 0 { originWG.Wait() r.result = originValue } else { // See laplacianSerial for comment on the copy. copy(xCopy, x) xCopy[r.i] += stencil[r.idx].Loc * step r.result = f(xCopy) } ans <- r } }(send, ans) } // Launch the distributor, which sends all of runs. go func(send chan<- run) { for i := 0; i < n; i++ { for idx := range stencil { send <- run{ i: i, idx: idx, } } } close(send) // Wait for all the workers to quit, then close the ans channel. workerWG.Wait() close(ans) }(send) // Read in the results. is2 := 1 / (step * step) var laplacian float64 for r := range ans { laplacian += r.result * stencil[r.idx].Coeff * is2 } return laplacian }
vendor/gonum.org/v1/gonum/diff/fd/laplacian.go
0.721253
0.451992
laplacian.go
starcoder
package plaid import ( "encoding/json" "time" ) // WalletTransaction The transaction details type WalletTransaction struct { // A unique ID identifying the transaction TransactionId string `json:"transaction_id"` // A reference for the transaction Reference string `json:"reference"` // The type of of the transaction. Currently, only `\"PAYOUT\"` is supported. Type string `json:"type"` Amount WalletTransactionAmount `json:"amount"` Counterparty WalletTransactionCounterparty `json:"counterparty"` Status WalletTransactionStatus `json:"status"` // Timestamp when the transaction was created, in [ISO 8601](https://wikipedia.org/wiki/ISO_8601) format. CreatedAt time.Time `json:"created_at"` AdditionalProperties map[string]interface{} } type _WalletTransaction WalletTransaction // NewWalletTransaction instantiates a new WalletTransaction object // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments // will change when the set of required properties is changed func NewWalletTransaction(transactionId string, reference string, type_ string, amount WalletTransactionAmount, counterparty WalletTransactionCounterparty, status WalletTransactionStatus, createdAt time.Time) *WalletTransaction { this := WalletTransaction{} this.TransactionId = transactionId this.Reference = reference this.Type = type_ this.Amount = amount this.Counterparty = counterparty this.Status = status this.CreatedAt = createdAt return &this } // NewWalletTransactionWithDefaults instantiates a new WalletTransaction object // This constructor will only assign default values to properties that have it defined, // but it doesn't guarantee that properties required by API are set func NewWalletTransactionWithDefaults() *WalletTransaction { this := WalletTransaction{} return &this } // GetTransactionId returns the TransactionId field value func (o *WalletTransaction) GetTransactionId() string { if o == nil { var ret string return ret } return o.TransactionId } // GetTransactionIdOk returns a tuple with the TransactionId field value // and a boolean to check if the value has been set. func (o *WalletTransaction) GetTransactionIdOk() (*string, bool) { if o == nil { return nil, false } return &o.TransactionId, true } // SetTransactionId sets field value func (o *WalletTransaction) SetTransactionId(v string) { o.TransactionId = v } // GetReference returns the Reference field value func (o *WalletTransaction) GetReference() string { if o == nil { var ret string return ret } return o.Reference } // GetReferenceOk returns a tuple with the Reference field value // and a boolean to check if the value has been set. func (o *WalletTransaction) GetReferenceOk() (*string, bool) { if o == nil { return nil, false } return &o.Reference, true } // SetReference sets field value func (o *WalletTransaction) SetReference(v string) { o.Reference = v } // GetType returns the Type field value func (o *WalletTransaction) GetType() string { if o == nil { var ret string return ret } return o.Type } // GetTypeOk returns a tuple with the Type field value // and a boolean to check if the value has been set. func (o *WalletTransaction) GetTypeOk() (*string, bool) { if o == nil { return nil, false } return &o.Type, true } // SetType sets field value func (o *WalletTransaction) SetType(v string) { o.Type = v } // GetAmount returns the Amount field value func (o *WalletTransaction) GetAmount() WalletTransactionAmount { if o == nil { var ret WalletTransactionAmount return ret } return o.Amount } // GetAmountOk returns a tuple with the Amount field value // and a boolean to check if the value has been set. func (o *WalletTransaction) GetAmountOk() (*WalletTransactionAmount, bool) { if o == nil { return nil, false } return &o.Amount, true } // SetAmount sets field value func (o *WalletTransaction) SetAmount(v WalletTransactionAmount) { o.Amount = v } // GetCounterparty returns the Counterparty field value func (o *WalletTransaction) GetCounterparty() WalletTransactionCounterparty { if o == nil { var ret WalletTransactionCounterparty return ret } return o.Counterparty } // GetCounterpartyOk returns a tuple with the Counterparty field value // and a boolean to check if the value has been set. func (o *WalletTransaction) GetCounterpartyOk() (*WalletTransactionCounterparty, bool) { if o == nil { return nil, false } return &o.Counterparty, true } // SetCounterparty sets field value func (o *WalletTransaction) SetCounterparty(v WalletTransactionCounterparty) { o.Counterparty = v } // GetStatus returns the Status field value func (o *WalletTransaction) GetStatus() WalletTransactionStatus { if o == nil { var ret WalletTransactionStatus return ret } return o.Status } // GetStatusOk returns a tuple with the Status field value // and a boolean to check if the value has been set. func (o *WalletTransaction) GetStatusOk() (*WalletTransactionStatus, bool) { if o == nil { return nil, false } return &o.Status, true } // SetStatus sets field value func (o *WalletTransaction) SetStatus(v WalletTransactionStatus) { o.Status = v } // GetCreatedAt returns the CreatedAt field value func (o *WalletTransaction) GetCreatedAt() time.Time { if o == nil { var ret time.Time return ret } return o.CreatedAt } // GetCreatedAtOk returns a tuple with the CreatedAt field value // and a boolean to check if the value has been set. func (o *WalletTransaction) GetCreatedAtOk() (*time.Time, bool) { if o == nil { return nil, false } return &o.CreatedAt, true } // SetCreatedAt sets field value func (o *WalletTransaction) SetCreatedAt(v time.Time) { o.CreatedAt = v } func (o WalletTransaction) MarshalJSON() ([]byte, error) { toSerialize := map[string]interface{}{} if true { toSerialize["transaction_id"] = o.TransactionId } if true { toSerialize["reference"] = o.Reference } if true { toSerialize["type"] = o.Type } if true { toSerialize["amount"] = o.Amount } if true { toSerialize["counterparty"] = o.Counterparty } if true { toSerialize["status"] = o.Status } if true { toSerialize["created_at"] = o.CreatedAt } for key, value := range o.AdditionalProperties { toSerialize[key] = value } return json.Marshal(toSerialize) } func (o *WalletTransaction) UnmarshalJSON(bytes []byte) (err error) { varWalletTransaction := _WalletTransaction{} if err = json.Unmarshal(bytes, &varWalletTransaction); err == nil { *o = WalletTransaction(varWalletTransaction) } additionalProperties := make(map[string]interface{}) if err = json.Unmarshal(bytes, &additionalProperties); err == nil { delete(additionalProperties, "transaction_id") delete(additionalProperties, "reference") delete(additionalProperties, "type") delete(additionalProperties, "amount") delete(additionalProperties, "counterparty") delete(additionalProperties, "status") delete(additionalProperties, "created_at") o.AdditionalProperties = additionalProperties } return err } type NullableWalletTransaction struct { value *WalletTransaction isSet bool } func (v NullableWalletTransaction) Get() *WalletTransaction { return v.value } func (v *NullableWalletTransaction) Set(val *WalletTransaction) { v.value = val v.isSet = true } func (v NullableWalletTransaction) IsSet() bool { return v.isSet } func (v *NullableWalletTransaction) Unset() { v.value = nil v.isSet = false } func NewNullableWalletTransaction(val *WalletTransaction) *NullableWalletTransaction { return &NullableWalletTransaction{value: val, isSet: true} } func (v NullableWalletTransaction) MarshalJSON() ([]byte, error) { return json.Marshal(v.value) } func (v *NullableWalletTransaction) UnmarshalJSON(src []byte) error { v.isSet = true return json.Unmarshal(src, &v.value) }
plaid/model_wallet_transaction.go
0.737631
0.437343
model_wallet_transaction.go
starcoder
package xxh3 import ( "math/bits" "unsafe" ) // Uint128 is a 128 bit value. // The actual value can be thought of as u.Hi<<64 | u.Lo. type Uint128 struct { Hi, Lo uint64 } // Bytes returns the uint128 as an array of bytes in canonical form (big-endian encoded). func (u Uint128) Bytes() [16]byte { return [16]byte{ byte(u.Hi >> 0x38), byte(u.Hi >> 0x30), byte(u.Hi >> 0x28), byte(u.Hi >> 0x20), byte(u.Hi >> 0x18), byte(u.Hi >> 0x10), byte(u.Hi >> 0x08), byte(u.Hi), byte(u.Lo >> 0x38), byte(u.Lo >> 0x30), byte(u.Lo >> 0x28), byte(u.Lo >> 0x20), byte(u.Lo >> 0x18), byte(u.Lo >> 0x10), byte(u.Lo >> 0x08), byte(u.Lo), } } type ( ptr = unsafe.Pointer ui = uintptr u8 = uint8 u32 = uint32 u64 = uint64 u128 = Uint128 ) type str struct { p ptr l uint } func readU8(p ptr, o ui) uint8 { return *(*uint8)(ptr(ui(p) + o)) } func readU16(p ptr, o ui) uint16 { b := (*[2]byte)(ptr(ui(p) + o)) return uint16(b[0]) | uint16(b[1])<<8 } func readU32(p ptr, o ui) uint32 { b := (*[4]byte)(ptr(ui(p) + o)) return uint32(b[0]) | uint32(b[1])<<8 | uint32(b[2])<<16 | uint32(b[3])<<24 } func readU64(p ptr, o ui) uint64 { b := (*[8]byte)(ptr(ui(p) + o)) return uint64(b[0]) | uint64(b[1])<<8 | uint64(b[2])<<16 | uint64(b[3])<<24 | uint64(b[4])<<32 | uint64(b[5])<<40 | uint64(b[6])<<48 | uint64(b[7])<<56 } func writeU64(p ptr, o ui, v u64) { b := (*[8]byte)(ptr(ui(p) + o)) b[0] = byte(v) b[1] = byte(v >> 8) b[2] = byte(v >> 16) b[3] = byte(v >> 24) b[4] = byte(v >> 32) b[5] = byte(v >> 40) b[6] = byte(v >> 48) b[7] = byte(v >> 56) } const secretSize = 192 func initSecret(secret ptr, seed u64) { for i := ui(0); i < secretSize/16; i++ { lo := readU64(key, 16*i) + seed hi := readU64(key, 16*i+8) - seed writeU64(secret, 16*i, lo) writeU64(secret, 16*i+8, hi) } } func xxh64AvalancheSmall(x u64) u64 { // x ^= x >> 33 // x must be < 32 bits // x ^= u64(key32_000 ^ key32_004) // caller must do this x *= prime64_2 x ^= x >> 29 x *= prime64_3 x ^= x >> 32 return x } func xxhAvalancheSmall(x u64) u64 { x ^= x >> 33 x *= prime64_2 x ^= x >> 29 x *= prime64_3 x ^= x >> 32 return x } func xxh64AvalancheFull(x u64) u64 { x ^= x >> 33 x *= prime64_2 x ^= x >> 29 x *= prime64_3 x ^= x >> 32 return x } func xxh3Avalanche(x u64) u64 { x ^= x >> 37 x *= 0x165667919e3779f9 x ^= x >> 32 return x } func rrmxmx(h64 u64, len u64) u64 { h64 ^= bits.RotateLeft64(h64, 49) ^ bits.RotateLeft64(h64, 24) h64 *= 0x9fb21c651e98df25 h64 ^= (h64 >> 35) + len h64 *= 0x9fb21c651e98df25 h64 ^= (h64 >> 28) return h64 } func mulFold64(x, y u64) u64 { hi, lo := bits.Mul64(x, y) return hi ^ lo }
vendor/github.com/zeebo/xxh3/utils.go
0.608245
0.407569
utils.go
starcoder