code stringlengths 114 1.05M | path stringlengths 3 312 | quality_prob float64 0.5 0.99 | learning_prob float64 0.2 1 | filename stringlengths 3 168 | kind stringclasses 1 value |
|---|---|---|---|---|---|
package main
import (
"fmt"
"github.com/vodinhphuc/golearn/base"
"github.com/vodinhphuc/golearn/trees"
)
func main() {
/* Performance of CART Algorithm:
Training Time for Titanic Dataset ≈ 611 µs
Prediction Time for Titanic Datset ≈ 101 µs
Complexity Analysis:
1x Dataset -- x ms
2x Dataset -- 1.7x ms
128x Dataset -- 74x ms
Complexity is sub linear
Sklearn:
Training Time for Titanic Dataset ≈ 8.8 µs
Prediction Time for Titanic Datset ≈ 7.87 µs
This implementation and sci-kit learn produce the exact same tree for the exact same dataset.
Predictions on the same test set also yield the exact same accuracy.
This implementation is optimized to prevent redundant iterations over the dataset, but it is not completely optimized. Also, sklearn makes use of numpy to access column easily, whereas here a complete iteration is required.
In terms of Hyperparameters, this implmentation gives you the ability to choose the impurity function and the maxDepth.
Many of the other hyperparameters used in sklearn are not here, but pruning and impurity is included.
*/
// Load Titanic Data For classification
classificationData, err := base.ParseCSVToInstances("../../datasets/titanic.csv", false)
if err != nil {
panic(err)
}
trainData, testData := base.InstancesTrainTestSplit(classificationData, 0.5)
// Create New Classification Tree
// Hyperparameters - loss function, max Depth (-1 will split until pure), list of unique labels
decTree := trees.NewDecisionTreeClassifier("entropy", -1, []int64{0, 1})
// Train Tree
err = decTree.Fit(trainData)
if err != nil {
panic(err)
}
// Print out tree for visualization - shows splits and feature and predictions
fmt.Println(decTree.String())
// Access Predictions
classificationPreds := decTree.Predict(testData)
fmt.Println("Titanic Predictions")
fmt.Println(classificationPreds)
// Evaluate Accuracy on Test Data
fmt.Println(decTree.Evaluate(testData))
// Load House Price Data For Regression
regressionData, err := base.ParseCSVToInstances("../datasets/boston_house_prices.csv", false)
if err != nil {
panic(err)
}
trainRegData, testRegData := base.InstancesTrainTestSplit(regressionData, 0.5)
// Hyperparameters - Loss function, max Depth (-1 will split until pure)
regTree := trees.NewDecisionTreeRegressor("mse", -1)
// Train Tree
err = regTree.Fit(trainRegData)
if err != nil {
panic(err)
}
// Print out tree for visualization
fmt.Println(regTree.String())
// Access Predictions
regressionPreds := regTree.Predict(testRegData)
fmt.Println("Boston House Price Predictions")
fmt.Println(regressionPreds)
} | examples/trees/cart/cart.go | 0.662469 | 0.507934 | cart.go | starcoder |
package base
import (
"math"
"github.com/eriq-augustine/goml/util"
)
// Discretize each feature (column (data point index)) on it's own scale.
// This allows features of diffferent ranges (say a percentage (0 - 1) and
// absolute value (any value (perhaps -1 - 300)) to reside next to each other
// without being put into the same bucket.
func DiscretizeNumericFeatures(data []Tuple, numBuckets int) []IntTuple {
var discreteData []IntTuple = make([]IntTuple, len(data));
for i, tuple := range(data) {
// Make a zero'd int tuple.
discreteData[i] = NewIntTuple(util.InterfaceSlice(make([]int, tuple.DataSize())), tuple.GetClass());
}
if (len(data) == 0 || numBuckets <= 0) {
return discreteData;
}
for i := 0; i < data[0].DataSize(); i++ {
DiscretizeNumericFeature(data, discreteData, numBuckets, i);
}
return discreteData;
}
// |data| will get modified.
func DiscretizeNumericFeature(data []Tuple, discreteData []IntTuple, numBuckets int, featureIndex int) {
if (len(data) == 0 || data[0].DataSize() < featureIndex || !data[0].GetData(featureIndex).IsNumeric()) {
return;
}
var min float64 = (data[0].(NumericTuple)).GetNumericData(featureIndex);
var max float64 = min;
for _, tuple := range(data) {
var val float64 = (tuple.(NumericTuple)).GetNumericData(featureIndex);
if (val < min) {
min = val;
} else if (val > max) {
max = val;
}
}
DiscretizeNumericFeatureWithBounds(data, discreteData, numBuckets, featureIndex, min, max);
}
// |discreteData| will get modified.
func DiscretizeNumericFeatureWithBounds(data []Tuple, discreteData []IntTuple, numBuckets int, featureIndex int, min float64, max float64) {
if (numBuckets < 1 || min > max) {
return;
}
var bucketSize float64 = (max - min) / float64(numBuckets);
for i, _ := range(data) {
discreteData[i].SetData(featureIndex, Discretize((data[i].(NumericTuple)).GetNumericData(featureIndex), min, max, bucketSize, numBuckets));
}
}
// If (max == min), just put it in the first bucket.
func Discretize(val float64, min float64, max float64, bucketSize float64, numBuckets int) int {
if (max == min) {
return 0;
}
return int(math.Min(math.Max(val - min, 0) / bucketSize, float64(numBuckets - 1)));
} | base/discretize.go | 0.754644 | 0.430566 | discretize.go | starcoder |
package dagger
import (
"github.com/autom8ter/dagger/util"
)
// Edge is a relationship between two nodes
type Edge struct {
// An edge implements Node because it has an Identifier and attributes
Node `json:"node"`
// From returns the root node of the edge
From Path `json:"from"`
// To returns the target node of the edge
To Path `json:"to"`
}
// Path satisfies primitive.Path interface
type Path struct {
XID string `json:"xid"`
XType string `json:"xtype"`
}
// Node is a single entity in the graph.
type Node struct {
Path `json:"path"`
Attributes Attributes `json:"attributes"`
}
type Attributes map[string]interface{}
// Exists checks for the existance of a key
func (m Attributes) Exists(key string) bool {
if m == nil {
m = map[string]interface{}{}
}
if val, ok := m[key]; ok && val != nil {
return true
}
return false
}
// Set set an entry in the Node
func (m Attributes) Set(k string, v interface{}) {
if m == nil {
m = map[string]interface{}{}
}
m[k] = v
}
// SetAll set all entries in the Node
func (m Attributes) SetAll(data map[string]interface{}) {
if m == nil {
m = map[string]interface{}{}
}
for k, v := range data {
m.Set(k, v)
}
}
// Get gets an entry from the Attributes by key
func (m Attributes) Get(key string) interface{} {
if m == nil {
m = map[string]interface{}{}
}
return m[key]
}
// GetString gets an entry from the Attributes by key
func (m Attributes) GetString(key string) string {
if m == nil {
m = map[string]interface{}{}
}
if !m.Exists(key) {
return ""
}
return util.ParseString(m[key])
}
// GetBool gets an entry from the Attributes by key
func (m Attributes) GetBool(key string) bool {
if m == nil {
m = map[string]interface{}{}
}
if !m.Exists(key) {
return false
}
return util.ParseBool(m[key])
}
// GetInt gets an entry from the Attributes by key
func (m Attributes) GetInt(key string) int {
if m == nil {
m = map[string]interface{}{}
}
if !m.Exists(key) {
return 0
}
return util.ParseInt(m[key])
}
// Del deletes the entry from the Attributes by key
func (m Attributes) Del(key string) {
if m == nil {
m = map[string]interface{}{}
}
delete(m, key)
}
// Range iterates over the Attributes with the function. If the function returns false, the iteration exits.
func (m Attributes) Range(iterator func(key string, v interface{}) bool) {
if m == nil {
m = map[string]interface{}{}
}
for k, v := range m {
if !iterator(k, v) {
break
}
}
}
// Filter returns a Attributes of the node that return true from the filter function
func (m Attributes) Filter(filter func(key string, v interface{}) bool) Attributes {
if m == nil {
m = map[string]interface{}{}
}
data := Attributes{}
if m == nil {
return data
}
m.Range(func(key string, v interface{}) bool {
if filter(key, v) {
data.Set(key, v)
}
return true
})
return data
}
// Copy creates a replica of the Node
func (m Attributes) Copy() Attributes {
if m == nil {
m = map[string]interface{}{}
}
copied := Attributes{}
if m == nil {
return copied
}
m.Range(func(k string, v interface{}) bool {
copied.Set(k, v)
return true
})
return copied
}
// Export contains an array of nodes and their corresponding edges
type Export struct {
Nodes []Node `json:"nodes"`
Edges []Edge `json:"edges"`
} | model.go | 0.777046 | 0.481332 | model.go | starcoder |
package events
import (
"fmt"
"sort"
"strings"
"time"
"github.com/go-kit/log"
"github.com/go-kit/log/level"
)
// Scheduler holds timeSlice objects and provides an methods to update them..
type Scheduler struct {
logger log.Logger
timeSlice *TimeSlice
}
// TimeSlice is an association between a specific time, and the names of the events that should fire at that time.
type TimeSlice map[time.Time][]string
// NewScheduler returns a new Scheduler.
func NewScheduler(logger log.Logger) *Scheduler {
return &Scheduler{
timeSlice: &TimeSlice{},
logger: logger,
}
}
// All returns all current timeSlice objects.
func (s *Scheduler) All() TimeSlice {
return *s.timeSlice
}
func (s *Scheduler) Report() {
fields := []interface{}{"msg", "events"}
for k, v := range *s.timeSlice {
fields = append(fields, k.Format(time.RFC3339))
fields = append(fields, strings.Join(v, ","))
}
_ = level.Info(s.logger).Log(fields...)
}
// Next determines the next occurring event in the series.
func (s *Scheduler) Next() *time.Time {
times := s.ordered()
if len(times) > 0 {
return ×[0]
}
return nil
}
func (s *Scheduler) ordered() []time.Time {
keys := []time.Time{}
for k := range *s.timeSlice {
keys = append(keys, k)
}
if len(keys) == 0 {
return nil
}
sort.Slice(keys, func(i, j int) bool {
return keys[i].Before(keys[j])
})
return keys
}
// TimesForName returns all timeSlices for a given event name.
func (s *Scheduler) TimesForName(n string) []time.Time {
var times []time.Time
for t, names := range *s.timeSlice {
for _, name := range names {
if n == name {
times = append(times, t)
}
}
}
return times
}
// NamesForTime returns all event names that are scheduled for a given timeSlice.
func (s *Scheduler) NamesForTime(t time.Time) []string {
return (*s.timeSlice)[t]
}
// WaitForNext is a blocking function that waits for the next available time to
// arrive before returning the names to the caller.
func (s *Scheduler) WaitForNext() []string {
next := s.Next()
if next == nil {
return []string{}
}
// Send past events under 30 seconds old.
if time.Since(*next) > time.Duration(30)*time.Second {
_ = level.Warn(s.logger).Log("msg", "sending past event",
"next", next,
"since", time.Since(*next),
)
return s.NamesForTime(*next)
}
_ = level.Info(s.logger).Log("msg", "scheduler waiting",
"next", time.Until(*next),
"names", strings.Join(s.NamesForTime(*next), ","),
)
ti := time.NewTimer(time.Until(*next))
<-ti.C
return s.NamesForTime(*next)
}
// Step deletes the next timeSlice. This is determined to be the timeSlice
// that has just run. The expectation is that Step() is called once the
// events have completed firing to advance to the next position in time.
func (s *Scheduler) Step() {
next := s.Next()
if next != nil {
delete(*s.timeSlice, *s.Next())
}
}
// Set appends the name given to the time slot given.
func (s *Scheduler) Set(t time.Time, name string) error {
if name == "" {
return fmt.Errorf("unable to schedule empty name at time %s", t)
}
if time.Until(t) < 0 {
if time.Since(t) > 5*time.Second {
return fmt.Errorf("not scheduling past event %s for %s, %s", name, t, time.Until(t))
}
}
if _, ok := (*s.timeSlice)[t]; !ok {
(*s.timeSlice)[t] = make([]string, 0)
}
_ = level.Debug(s.logger).Log("msg", "scheduling event",
"name", name,
"time", t,
)
timeHasName := func(names []string) bool {
for _, n := range names {
if n == name {
return true
}
}
return false
}((*s.timeSlice)[t])
if !timeHasName {
(*s.timeSlice)[t] = append((*s.timeSlice)[t], name)
}
return nil
} | pkg/events/schedule.go | 0.712232 | 0.402304 | schedule.go | starcoder |
package trie
import "container/list"
// Tree implemnets ternary trie-tree.
type Tree struct {
// Root is root of the tree. Only Child is valid.
Root Node
// nc means node counts
nc int
}
// New creates a Tree.
func New() *Tree {
return new(Tree)
}
// Get retrieves a value for key.
func (tr *Tree) Get(key string) *Node {
n := &tr.Root
for _, r := range key {
n = n.Get(r)
if n == nil {
return nil
}
}
return n
}
// Put stores a pair of key and value.
func (tr *Tree) Put(key string, value interface{}) *Node {
n := &tr.Root
for _, r := range key {
var f bool
n, f = n.Dig(r)
if f {
tr.nc++
}
}
n.Value = value
return n
}
// Each processes all nodes in width first.
func (tr *Tree) Each(proc NodeProc) {
q := list.New()
q.PushBack(&tr.Root)
for q.Len() != 0 {
f := q.Front()
q.Remove(f)
curr := f.Value.(*Node)
if !proc(curr) {
break
}
curr.Child.Each(func(n *Node) bool {
q.PushBack(n)
return true
})
}
}
// Node implemnets node of ternary trie-tree.
type Node struct {
Label rune
Value interface{}
Low *Node
High *Node
Child *Node
cc int // count of children.
}
// Get finds a child node which Label matches r.
func (n *Node) Get(r rune) *Node {
n = n.Child
for n != nil {
switch {
case r == n.Label:
return n
case r < n.Label:
n = n.Low
default:
n = n.High
}
}
return nil
}
// Dig finds a child node which Label matches r. Or create a new one when there
// are no nodes.
func (n *Node) Dig(r rune) (node *Node, isNew bool) {
if n.Child == nil {
n.Child = &Node{Label: r}
n.cc = 1
return n.Child, true
}
m := n
n = n.Child
for {
switch {
case r == n.Label:
return n, false
case r < n.Label:
if n.Low == nil {
n.Low = &Node{Label: r}
m.cc++
return n.Low, true
}
n = n.Low
default:
if n.High == nil {
n.High = &Node{Label: r}
m.cc++
return n.High, true
}
n = n.High
}
}
}
// Balance balances children nodes.
func (n *Node) Balance() {
if n.Child == nil {
return
}
nodes := make([]*Node, 0, n.cc)
n.Child.Each(func(m *Node) bool {
nodes = append(nodes, m)
return true
})
n.Child = balanceNodes(nodes, 0, len(nodes))
}
// Each processes all sibiling nodes with proc.
func (n *Node) Each(proc NodeProc) bool {
if n == nil {
return true
}
if !n.Low.Each(proc) || !proc(n) || !n.High.Each(proc) {
return false
}
return true
}
func balanceNodes(nodes []*Node, s, e int) *Node {
c := e - s
switch {
case c <= 0:
return nil
case c == 1:
n := nodes[s]
n.Low = nil
n.High = nil
return n
case c == 2:
n := nodes[s]
n.High = nodes[s+1]
n.Low = nil
return n
default:
m := (s + e) / 2
n := nodes[m]
n.Low = balanceNodes(nodes, s, m)
n.High = balanceNodes(nodes, m+1, e)
return n
}
}
// NodeProc provides procedure for nodes.
type NodeProc func(*Node) bool | tree.go | 0.65379 | 0.495056 | tree.go | starcoder |
package iso20022
// Specifies rates.
type CorporateActionRate4 struct {
// Rate used to calculate the amount of the charges/fees that cannot be categorised.
ChargesFees *RateAndAmountFormat5Choice `xml:"ChrgsFees,omitempty"`
// Dividend is final.
FinalDividendRate *ActiveCurrencyAnd13DecimalAmount `xml:"FnlDvddRate,omitempty"`
// Percentage of fiscal tax to apply.
FiscalStamp *PercentageRate `xml:"FsclStmp,omitempty"`
// Rate resulting from a fully franked dividend paid by a company; rate includes tax credit for companies that have made sufficient tax payments during fiscal period.
FullyFrankedRate *RateAndAmountFormat5Choice `xml:"FullyFrnkdRate,omitempty"`
// Cash dividend amount per equity before deductions or allowances have been made.
GrossDividendRate []*GrossDividendRateFormat2Choice `xml:"GrssDvddRate,omitempty"`
// Rate of the cash premium made available if the securities holder consents or participates to an event, for example, consent fees.
CashIncentiveRate *PercentageRate `xml:"CshIncntivRate,omitempty"`
// Public index rate applied to the amount paid to adjust it to inflation.
IndexFactor *RateAndAmountFormat5Choice `xml:"IndxFctr,omitempty"`
// Actual interest rate used for the payment of the interest for the specified interest period.
InterestRateUsedForPayment []*InterestRateUsedForPaymentFormat2Choice `xml:"IntrstRateUsdForPmt,omitempty"`
// Cash dividend amount per equity after deductions or allowances have been made.
NetDividendRate []*NetDividendRateFormat2Choice `xml:"NetDvddRate,omitempty"`
// Rate per share to which a non-resident is entitled.
NonResidentRate *RateAndAmountFormat5Choice `xml:"NonResdtRate,omitempty"`
// A maximum percentage of shares available through the over subscription privilege, usually a percentage of the basic subscription shares, for example, an account owner subscribing to 100 shares may over subscribe to a maximum of 50 additional shares when the over subscription maximum is 50 percent.
MaximumAllowedOversubscriptionRate *PercentageRate `xml:"MaxAllwdOvrsbcptRate,omitempty"`
// Dividend is provisional.
ProvisionalDividendRate *ActiveCurrencyAnd13DecimalAmount `xml:"PrvsnlDvddRate,omitempty"`
// Proportionate allocation used for the offer.
ProrationRate *PercentageRate `xml:"PrratnRate,omitempty"`
// Cash rate made available in an offer in order to encourage participation in the offer.
SolicitationFeeRate *PercentageRate `xml:"SlctnFeeRate,omitempty"`
// Amount of money per equity allocated as the result of a tax credit.
TaxCreditRate []*TaxCreditRateFormat2Choice `xml:"TaxCdtRate,omitempty"`
// Percentage of the gross dividend rate on which tax must be paid .
TaxRelatedRate []*RateTypeAndAmountAndStatus6 `xml:"TaxRltdRate,omitempty"`
// Percentage of a cash distribution that will be withheld by a tax authority.
WithholdingTaxRate *PercentageRate `xml:"WhldgTaxRate,omitempty"`
// Taxation applied on an amount clearly identified as an income.
TaxOnIncome *PercentageRate `xml:"TaxOnIncm,omitempty"`
// Taxation applied on an amount clearly identified as capital profits, capital gains.
TaxOnProfits *PercentageRate `xml:"TaxOnPrfts,omitempty"`
// Percentage of cash that was paid in excess of actual tax obligation and was reclaimed.
TaxReclaimRate *PercentageRate `xml:"TaxRclmRate,omitempty"`
// Rate at which the income will be withheld by the jurisdiction in which the income was originally paid, for which relief at source and/or reclaim may be possible.
WithholdingOfForeignTax *RateAndAmountFormat5Choice `xml:"WhldgOfFrgnTax,omitempty"`
// Rate at which the income will be withheld by the jurisdiction in which the account owner is located, for which relief at source and/or reclaim may be possible.
WithholdingOfLocalTax *RateAndAmountFormat5Choice `xml:"WhldgOfLclTax,omitempty"`
// Rate used for additional tax that cannot be categorised.
AdditionalTax *RateAndAmountFormat5Choice `xml:"AddtlTax,omitempty"`
// Rate applicable to the event announced, for example, redemption rate for a redemption event.
ApplicableRate *PercentageRate `xml:"AplblRate,omitempty"`
}
func (c *CorporateActionRate4) AddChargesFees() *RateAndAmountFormat5Choice {
c.ChargesFees = new(RateAndAmountFormat5Choice)
return c.ChargesFees
}
func (c *CorporateActionRate4) SetFinalDividendRate(value, currency string) {
c.FinalDividendRate = NewActiveCurrencyAnd13DecimalAmount(value, currency)
}
func (c *CorporateActionRate4) SetFiscalStamp(value string) {
c.FiscalStamp = (*PercentageRate)(&value)
}
func (c *CorporateActionRate4) AddFullyFrankedRate() *RateAndAmountFormat5Choice {
c.FullyFrankedRate = new(RateAndAmountFormat5Choice)
return c.FullyFrankedRate
}
func (c *CorporateActionRate4) AddGrossDividendRate() *GrossDividendRateFormat2Choice {
newValue := new(GrossDividendRateFormat2Choice)
c.GrossDividendRate = append(c.GrossDividendRate, newValue)
return newValue
}
func (c *CorporateActionRate4) SetCashIncentiveRate(value string) {
c.CashIncentiveRate = (*PercentageRate)(&value)
}
func (c *CorporateActionRate4) AddIndexFactor() *RateAndAmountFormat5Choice {
c.IndexFactor = new(RateAndAmountFormat5Choice)
return c.IndexFactor
}
func (c *CorporateActionRate4) AddInterestRateUsedForPayment() *InterestRateUsedForPaymentFormat2Choice {
newValue := new(InterestRateUsedForPaymentFormat2Choice)
c.InterestRateUsedForPayment = append(c.InterestRateUsedForPayment, newValue)
return newValue
}
func (c *CorporateActionRate4) AddNetDividendRate() *NetDividendRateFormat2Choice {
newValue := new(NetDividendRateFormat2Choice)
c.NetDividendRate = append(c.NetDividendRate, newValue)
return newValue
}
func (c *CorporateActionRate4) AddNonResidentRate() *RateAndAmountFormat5Choice {
c.NonResidentRate = new(RateAndAmountFormat5Choice)
return c.NonResidentRate
}
func (c *CorporateActionRate4) SetMaximumAllowedOversubscriptionRate(value string) {
c.MaximumAllowedOversubscriptionRate = (*PercentageRate)(&value)
}
func (c *CorporateActionRate4) SetProvisionalDividendRate(value, currency string) {
c.ProvisionalDividendRate = NewActiveCurrencyAnd13DecimalAmount(value, currency)
}
func (c *CorporateActionRate4) SetProrationRate(value string) {
c.ProrationRate = (*PercentageRate)(&value)
}
func (c *CorporateActionRate4) SetSolicitationFeeRate(value string) {
c.SolicitationFeeRate = (*PercentageRate)(&value)
}
func (c *CorporateActionRate4) AddTaxCreditRate() *TaxCreditRateFormat2Choice {
newValue := new(TaxCreditRateFormat2Choice)
c.TaxCreditRate = append(c.TaxCreditRate, newValue)
return newValue
}
func (c *CorporateActionRate4) AddTaxRelatedRate() *RateTypeAndAmountAndStatus6 {
newValue := new(RateTypeAndAmountAndStatus6)
c.TaxRelatedRate = append(c.TaxRelatedRate, newValue)
return newValue
}
func (c *CorporateActionRate4) SetWithholdingTaxRate(value string) {
c.WithholdingTaxRate = (*PercentageRate)(&value)
}
func (c *CorporateActionRate4) SetTaxOnIncome(value string) {
c.TaxOnIncome = (*PercentageRate)(&value)
}
func (c *CorporateActionRate4) SetTaxOnProfits(value string) {
c.TaxOnProfits = (*PercentageRate)(&value)
}
func (c *CorporateActionRate4) SetTaxReclaimRate(value string) {
c.TaxReclaimRate = (*PercentageRate)(&value)
}
func (c *CorporateActionRate4) AddWithholdingOfForeignTax() *RateAndAmountFormat5Choice {
c.WithholdingOfForeignTax = new(RateAndAmountFormat5Choice)
return c.WithholdingOfForeignTax
}
func (c *CorporateActionRate4) AddWithholdingOfLocalTax() *RateAndAmountFormat5Choice {
c.WithholdingOfLocalTax = new(RateAndAmountFormat5Choice)
return c.WithholdingOfLocalTax
}
func (c *CorporateActionRate4) AddAdditionalTax() *RateAndAmountFormat5Choice {
c.AdditionalTax = new(RateAndAmountFormat5Choice)
return c.AdditionalTax
}
func (c *CorporateActionRate4) SetApplicableRate(value string) {
c.ApplicableRate = (*PercentageRate)(&value)
} | CorporateActionRate4.go | 0.873647 | 0.698687 | CorporateActionRate4.go | starcoder |
package zapcore
import (
"bytes"
"fmt"
"math"
"reflect"
"time"
)
// A FieldType indicates which member of the Field union struct should be used
// and how it should be serialized.
type FieldType uint8
//定义字段的类型
const (
// UnknownType is the default field type. Attempting to add it to an encoder will panic.
UnknownType FieldType = iota
// ArrayMarshalerType indicates that the field carries an ArrayMarshaler.
ArrayMarshalerType
// ObjectMarshalerType indicates that the field carries an ObjectMarshaler.
ObjectMarshalerType
// BinaryType indicates that the field carries an opaque binary blob.
BinaryType
// BoolType indicates that the field carries a bool.
BoolType
// ByteStringType indicates that the field carries UTF-8 encoded bytes.
ByteStringType
// Complex128Type indicates that the field carries a complex128.
Complex128Type
// Complex64Type indicates that the field carries a complex128.
Complex64Type
// DurationType indicates that the field carries a time.Duration.
DurationType
// Float64Type indicates that the field carries a float64.
Float64Type
// Float32Type indicates that the field carries a float32.
Float32Type
// Int64Type indicates that the field carries an int64.
Int64Type
// Int32Type indicates that the field carries an int32.
Int32Type
// Int16Type indicates that the field carries an int16.
Int16Type
// Int8Type indicates that the field carries an int8.
Int8Type
// StringType indicates that the field carries a string.
StringType
// TimeType indicates that the field carries a time.Time.
TimeType
// Uint64Type indicates that the field carries a uint64.
Uint64Type
// Uint32Type indicates that the field carries a uint32.
Uint32Type
// Uint16Type indicates that the field carries a uint16.
Uint16Type
// Uint8Type indicates that the field carries a uint8.
Uint8Type
// UintptrType indicates that the field carries a uintptr.
UintptrType
// ReflectType indicates that the field carries an interface{}, which should
// be serialized using reflection.
ReflectType
// NamespaceType signals the beginning of an isolated namespace. All
// subsequent fields should be added to the new namespace.
NamespaceType
// StringerType indicates that the field carries a fmt.Stringer.
StringerType
// ErrorType indicates that the field carries an error.
ErrorType
// SkipType indicates that the field is a no-op.
SkipType
)
//定义field
// A Field is a marshaling operation used to add a key-value pair to a logger's
// context. Most fields are lazily marshaled, so it's inexpensive to add fields
// to disabled debug-level log statements.
type Field struct {
Key string
Type FieldType //字段类型
Integer int64
String string
Interface interface{}
}
//添加
// AddTo exports a field through the ObjectEncoder interface. It's primarily
// useful to library authors, and shouldn't be necessary in most applications.
func (f Field) AddTo(enc ObjectEncoder) {
var err error
switch f.Type {
case ArrayMarshalerType:
err = enc.AddArray(f.Key, f.Interface.(ArrayMarshaler))
case ObjectMarshalerType:
err = enc.AddObject(f.Key, f.Interface.(ObjectMarshaler))
case BinaryType:
enc.AddBinary(f.Key, f.Interface.([]byte))
case BoolType:
enc.AddBool(f.Key, f.Integer == 1)
case ByteStringType:
enc.AddByteString(f.Key, f.Interface.([]byte))
case Complex128Type:
enc.AddComplex128(f.Key, f.Interface.(complex128))
case Complex64Type:
enc.AddComplex64(f.Key, f.Interface.(complex64))
case DurationType:
enc.AddDuration(f.Key, time.Duration(f.Integer))
case Float64Type:
enc.AddFloat64(f.Key, math.Float64frombits(uint64(f.Integer)))
case Float32Type:
enc.AddFloat32(f.Key, math.Float32frombits(uint32(f.Integer)))
case Int64Type:
enc.AddInt64(f.Key, f.Integer)
case Int32Type:
enc.AddInt32(f.Key, int32(f.Integer))
case Int16Type:
enc.AddInt16(f.Key, int16(f.Integer))
case Int8Type:
enc.AddInt8(f.Key, int8(f.Integer))
case StringType:
enc.AddString(f.Key, f.String)
case TimeType:
if f.Interface != nil {
enc.AddTime(f.Key, time.Unix(0, f.Integer).In(f.Interface.(*time.Location)))
} else {
// Fall back to UTC if location is nil.
enc.AddTime(f.Key, time.Unix(0, f.Integer))
}
case Uint64Type:
enc.AddUint64(f.Key, uint64(f.Integer))
case Uint32Type:
enc.AddUint32(f.Key, uint32(f.Integer))
case Uint16Type:
enc.AddUint16(f.Key, uint16(f.Integer))
case Uint8Type:
enc.AddUint8(f.Key, uint8(f.Integer))
case UintptrType:
enc.AddUintptr(f.Key, uintptr(f.Integer))
case ReflectType:
err = enc.AddReflected(f.Key, f.Interface)
case NamespaceType:
enc.OpenNamespace(f.Key)
case StringerType:
err = encodeStringer(f.Key, f.Interface, enc)
case ErrorType:
encodeError(f.Key, f.Interface.(error), enc)
case SkipType:
break
default:
panic(fmt.Sprintf("unknown field type: %v", f))
}
if err != nil {
enc.AddString(fmt.Sprintf("%sError", f.Key), err.Error())
}
}
// Equals returns whether two fields are equal. For non-primitive types such as
// errors, marshalers, or reflect types, it uses reflect.DeepEqual.
func (f Field) Equals(other Field) bool {
if f.Type != other.Type {
return false
}
if f.Key != other.Key {
return false
}
switch f.Type {
case BinaryType, ByteStringType:
return bytes.Equal(f.Interface.([]byte), other.Interface.([]byte))
case ArrayMarshalerType, ObjectMarshalerType, ErrorType, ReflectType:
return reflect.DeepEqual(f.Interface, other.Interface)
default:
return f == other
}
}
func addFields(enc ObjectEncoder, fields []Field) {
for i := range fields {
fields[i].AddTo(enc)
}
}
func encodeStringer(key string, stringer interface{}, enc ObjectEncoder) (err error) {
defer func() {
if v := recover(); v != nil {
err = fmt.Errorf("PANIC=%v", v)
}
}()
enc.AddString(key, stringer.(fmt.Stringer).String())
return
} | zapcore/field.go | 0.520984 | 0.414603 | field.go | starcoder |
package generators
import (
"fmt"
"image"
"image/color"
"math"
"os"
"../tools"
)
func tunnelCreateColor(i int) color.RGBA64 {
nbColors := cycles
nbAvailableColors := len(colors)
nbColorsByIndex := nbColors / nbAvailableColors
currentIndex := i * nbAvailableColors / nbColors
nextIndex := currentIndex + 1
if currentIndex == nbAvailableColors-1 {
nextIndex = 0
}
value := (i - (nbColorsByIndex * currentIndex)) * 100 / nbColorsByIndex
computedColor := tools.ColorMean(value, &colors[currentIndex], &colors[nextIndex])
computedColor.R /= uint16(cycles) / 16
computedColor.G /= uint16(cycles) / 16
computedColor.B /= uint16(cycles) / 16
return computedColor
}
// AddToSandTunnelLine draws line by Bresenham's algorithm.
func AddToSandTunnelLine(img *image.RGBA64, p0, p1 Point, c color.RGBA64, sandCoef int) {
p0or := Point{
cartesian: CartesianPoint{
x: p0.cartesian.x,
y: p0.cartesian.y,
},
polar: p0.cartesian.toPolar(p0.center),
center: p0.center,
}
p1or := Point{
cartesian: CartesianPoint{
x: p1.cartesian.x,
y: p1.cartesian.y,
},
polar: p1.cartesian.toPolar(p1.center),
center: p1.center,
}
dx := p1.cartesian.x - p0.cartesian.x
if dx < 0 {
dx = -dx
}
dy := p1.cartesian.y - p0.cartesian.y
if dy < 0 {
dy = -dy
}
var sx, sy int
if p0.cartesian.x < p1.cartesian.x {
sx = 1
} else {
sx = -1
}
if p0.cartesian.y < p1.cartesian.y {
sy = 1
} else {
sy = -1
}
p0or.polarMove(0.2, -float64(sandCoef))
p1or.polarMove(0.2, -float64(sandCoef))
dx = p0.cartesian.x - p0or.cartesian.x
if dx < 0 {
dx = -dx
}
dy = p0.cartesian.y - p0or.cartesian.y
if dy < 0 {
dy = -dy
}
if p0or.cartesian.x < p0.cartesian.x {
sx = 1
} else {
sx = -1
}
if p0or.cartesian.y < p0.cartesian.y {
sy = 1
} else {
sy = -1
}
err := dx - dy
for {
AddToWuLine(img, float64(p0or.cartesian.x), float64(p0or.cartesian.y), float64(p1or.cartesian.x), float64(p1or.cartesian.y), 1.0, c)
if p0.cartesian.x == p0or.cartesian.x && p0.cartesian.y == p0or.cartesian.y {
break
}
nextXMove := 0
nextYMove := 0
e2 := 2 * err
if e2 > -dy {
err -= dy
nextXMove = sx
}
if e2 < dx {
err += dx
nextYMove = sy
}
p0or.cartesianMove(nextXMove, nextYMove)
p1or.cartesianMove(nextXMove, nextYMove)
}
}
// DrawSandTunnelCurve draw a diamond around a center, with a bigger size than normal
func DrawSandTunnelCurve(img *image.RGBA64, points []Point, c color.RGBA64, sandCoef int) {
for index, point := range points {
if index+1 == len(points) {
AddToSandTunnelLine(img, point, points[0], c, sandCoef)
} else {
AddToSandTunnelLine(img, point, points[index+1], c, sandCoef)
}
}
}
// Tunnel draws a turning shape
func Tunnel() {
if param1 == -1 {
fmt.Println("p1 must be set for THS")
os.Exit(-1)
}
if colors == nil {
fmt.Println("colors must be set for THS")
os.Exit(-1)
}
if cycles < 0 {
cycles = 200
}
generator := tools.NewNumberGenerator(seed, 0, 6)
generatorPoints := tools.NewNumberGenerator(seed, 12, 32)
generatorPointRandomness := tools.NewNumberGenerator(seed, 0, 100)
radius := 40.0
nbPointsToGenerate := generatorPoints.NextPositive()
bounds := img.Bounds()
var shape []Point
for i := 0; i < nbPointsToGenerate; i++ {
randomness := float64(generatorPointRandomness.NextPositive()-50) / float64(nbPointsToGenerate) / 100
point := math.Pi*2*float64(i)/float64(nbPointsToGenerate) + randomness
x := int(math.Cos(point)*radius) + bounds.Max.X/2
y := int(math.Sin(point)*radius) + bounds.Max.Y/2
shape = append(shape, Point{
polar: PolarPoint{
angle: point,
radius: distance(bounds.Max.X/2, bounds.Max.Y/2, x, y),
},
cartesian: CartesianPoint{
x: x,
y: y,
},
center: CartesianPoint{
x: bounds.Max.X / 2,
y: bounds.Max.Y / 2,
},
})
}
for i := 0; i < cycles; i++ {
for index := range shape {
moveAwayFromCenterWithCoef(bounds.Max.X/2, bounds.Max.Y/2, &shape[index], 1, &generatorPointRandomness, &generator)
}
color := tunnelCreateColor(i)
DrawSandTunnelCurve(img, shape, color, param1)
if updateImage != nil {
updateImage(img)
}
}
} | generators/g_automoveCenterLines.go | 0.569972 | 0.430207 | g_automoveCenterLines.go | starcoder |
package ask
import (
"math"
"reflect"
"regexp"
"strconv"
"strings"
)
var tokenMatcher = regexp.MustCompile("([^[]+)?(?:\\[(\\d+)])?")
// Answer holds result of call to For, use one of its methods to extract a value.
type Answer struct {
value interface{}
}
// For is used to select a path from source to return as answer.
func For(source interface{}, path string) *Answer {
parts := strings.Split(path, ".")
current := source
for _, part := range parts {
match := tokenMatcher.FindStringSubmatch(strings.TrimSpace(part))
if len(match) == 3 {
if match[1] != "" {
m, ok := current.(map[string]interface{})
if !ok {
return &Answer{}
}
current = m[match[1]]
}
if match[2] != "" {
index, _ := strconv.Atoi(match[2])
a, ok := current.([]interface{})
if !ok || index < 0 || len(a) <= index {
return &Answer{}
}
current = a[index]
}
}
}
return &Answer{value: current}
}
// Path does the same thing as For but uses existing answer as source.
func (a *Answer) Path(path string) *Answer {
return For(a.value, path)
}
// Exists returns a boolean indicating if the answer exists (not nil).
func (a *Answer) Exists() bool {
return a.value != nil
}
// Value returns the raw value as type interface{}, can be nil if no value is available.
func (a *Answer) Value() interface{} {
return a.value
}
// Slice attempts asserting answer as a []interface{}.
// The first return value is the result, and the second indicates if the operation was successful.
// If not successful the first return value will be set to the d parameter.
func (a *Answer) Slice(d []interface{}) ([]interface{}, bool) {
res, ok := a.value.([]interface{})
if ok {
return res, ok
}
return d, false
}
// Map attempts asserting answer as a map[string]interface{}.
// The first return value is the result, and the second indicates if the operation was successful.
// If not successful the first return value will be set to the d parameter.
func (a *Answer) Map(d map[string]interface{}) (map[string]interface{}, bool) {
res, ok := a.value.(map[string]interface{})
if ok {
return res, ok
}
return d, false
}
// String attempts asserting answer as a string.
// The first return value is the result, and the second indicates if the operation was successful.
// If not successful the first return value will be set to the d parameter.
func (a *Answer) String(d string) (string, bool) {
res, ok := a.value.(string)
if ok {
return res, ok
}
return d, false
}
// Bool attempts asserting answer as a bool.
// The first return value is the result, and the second indicates if the operation was successful.
// If not successful the first return value will be set to the d parameter.
func (a *Answer) Bool(d bool) (bool, bool) {
res, ok := a.value.(bool)
if ok {
return res, ok
}
return d, false
}
// Int attempts asserting answer as a int64. Casting from other number types will be done if necessary.
// The first return value is the result, and the second indicates if the operation was successful.
// If not successful the first return value will be set to the d parameter.
func (a *Answer) Int(d int64) (int64, bool) {
switch vt := a.value.(type) {
case int, int8, int16, int32, int64:
return reflect.ValueOf(vt).Int(), true
case uint, uint8, uint16, uint32, uint64:
val := reflect.ValueOf(vt).Uint()
if val <= math.MaxInt64 {
return int64(val), true
}
case float32, float64:
val := reflect.ValueOf(vt).Float()
if val >= 0 && val <= math.MaxInt64 {
return int64(val), true
}
}
return d, false
}
// Uint attempts asserting answer as a uint64. Casting from other number types will be done if necessary.
// The first return value is the result, and the second indicates if the operation was successful.
// If not successful the first return value will be set to the d parameter.
func (a *Answer) Uint(d uint64) (uint64, bool) {
switch vt := a.value.(type) {
case int, int8, int16, int32, int64:
val := reflect.ValueOf(vt).Int()
if val >= 0 {
return uint64(val), true
}
case uint, uint8, uint16, uint32, uint64:
return reflect.ValueOf(vt).Uint(), true
case float32, float64:
val := reflect.ValueOf(vt).Float()
if val >= 0 && val <= math.MaxUint64 {
return uint64(val), true
}
}
return d, false
}
// Float attempts asserting answer as a float64. Casting from other number types will be done if necessary.
// The first return value is the result, and the second indicates if the operation was successful.
// If not successful the first return value will be set to the d parameter.
func (a *Answer) Float(d float64) (float64, bool) {
switch vt := a.value.(type) {
case int, int8, int16, int32, int64:
return float64(reflect.ValueOf(vt).Int()), true
case uint, uint8, uint16, uint32, uint64:
return float64(reflect.ValueOf(vt).Uint()), true
case float32:
return float64(vt), true
case float64:
return vt, true
}
return d, false
} | ask.go | 0.716417 | 0.425307 | ask.go | starcoder |
package newznab
import (
"net/url"
"time"
)
// TV is a Content implementation that describes an episode of a TV
// series
type TV struct {
// air date for the episode according to the entry
AirDate time.Time
// ID for the entry for the episode in TheTVDB
TVDBID int64
// ID for the entry for the episode in TVRage
TVRageID int64
// absolute number of the season
Season uint
// number of the episode; entry dependent whether it is absolute or relative
Episode uint
// canonical title of the episode
CanonicalTitle string
// rating of the episode as recorded in the newznab entry
Rating float64
}
// IsContent is a dummy function that implements the Content interface
func (TV) IsContent() {}
// Title returns the canonical title of the TV episode
func (t TV) Title() string { return t.CanonicalTitle }
// Aired returns the air date of the episode
func (t TV) Aired() time.Time { return t.AirDate }
// SetAired sets the air date of the episode to the value provided
func (t *TV) SetAired(date time.Time) { t.AirDate = date }
// Movie is a Content implementation that describes a movie
type Movie struct {
// the air date of the movie according to the newznab entry
AirDate time.Time
// ID of the entry for the movie in IMDB
IMDBID int64
// title of the movie as recorded by the IMDB entry
IMDBTitle string
// year of movie release as recorded by IMDB
IMDBYear time.Time
// score of the movie as recorded by IMDB
IMDBScore float64
// URL for a cover image for the movie
Cover *url.URL
}
// IsContent is a dummy function that implements the Content interface
func (Movie) IsContent() {}
// Title returns the IMDB title of the movie
func (m Movie) Title() string { return m.IMDBTitle }
// Aired returns the air date of the movie according to the newznab entry
func (m Movie) Aired() time.Time { return m.AirDate }
// SetAired sets the air date of the movie to the value provided
func (m *Movie) SetAired(date time.Time) { m.AirDate = date }
// Content describes the actual content that an entry corresponds to;
// that is, it describes the movie or episode
type Content interface {
// IsContent is a dummy function that does nothing
IsContent()
// Aired returns the air date of the content
Aired() time.Time
// SetAired sets the air date of the content
SetAired(date time.Time)
// Title returns the canonical title of the content
Title() string
} | newznab/content.go | 0.643665 | 0.408218 | content.go | starcoder |
package geoip2
import (
"errors"
"strconv"
)
type Metadata struct {
NodeCount uint32 // node_count This is an unsigned 32-bit integer indicating the number of nodes in the search tree.
RecordSize uint16 // record_size This is an unsigned 16-bit integer. It indicates the number of bits in a record in the search tree. Note that each node consists of two records.
IPVersion uint16 // ip_version This is an unsigned 16-bit integer which is always 4 or 6. It indicates whether the database contains IPv4 or IPv6 address data.
DatabaseType string // database_type This is a string that indicates the structure of each data record associated with an IP address. The actual definition of these structures is left up to the database creator. Names starting with “GeoIP” are reserved for use by MaxMind (and “GeoIP” is a trademark anyway).
Languages []string // languages An array of strings, each of which is a locale code. A given record may contain data items that have been localized to some or all of these locales. Records should not contain localized data for locales not included in this array. This is an optional key, as this may not be relevant for all types of data.
BinaryFormatMajorVersion uint16 // binary_format_major_version This is an unsigned 16-bit integer indicating the major version number for the database’s binary format.
BinaryFormatMinorVersion uint16 // binary_format_minor_version This is an unsigned 16-bit integer indicating the minor version number for the database’s binary format.
BuildEpoch uint64 // build_epoch This is an unsigned 64-bit integer that contains the database build timestamp as a Unix epoch value.
Description map[string]string // description This key will always point to a map. The keys of that map will be language codes, and the values will be a description in that language as a UTF-8 string. The codes may include additional information such as script or country identifiers, like “zh-TW” or “mn-Cyrl-MN”. The additional identifiers will be separated by a dash character (“-“).
}
var metadataStartMarker = []byte("\xAB\xCD\xEFMaxMind.com")
func readMetadata(buffer []byte) (*Metadata, error) {
dataType, metadataSize, offset, err := readControl(buffer, 0)
if err != nil {
return nil, err
}
if dataType != dataTypeMap {
return nil, errors.New("invalid metadata type: " + strconv.Itoa(int(dataType)))
}
var key []byte
metadata := &Metadata{}
for i := uint(0); i < metadataSize; i++ {
key, offset, err = readMapKey(buffer, offset)
if err != nil {
return nil, err
}
size := uint(0)
dataType, size, offset, err = readControl(buffer, offset)
if err != nil {
return nil, err
}
newOffset := uint(0)
switch b2s(key) {
case "binary_format_major_version":
if dataType != dataTypeUint16 {
return nil, errors.New("invalid binary_format_major_version type: " + strconv.Itoa(int(dataType)))
}
newOffset = offset + size
metadata.BinaryFormatMajorVersion = uint16(bytesToUInt64(buffer[offset:newOffset]))
case "binary_format_minor_version":
if dataType != dataTypeUint16 {
return nil, errors.New("invalid binary_format_minor_version type: " + strconv.Itoa(int(dataType)))
}
newOffset = offset + size
metadata.BinaryFormatMinorVersion = uint16(bytesToUInt64(buffer[offset:newOffset]))
case "build_epoch":
if dataType != dataTypeUint64 {
return nil, errors.New("invalid build_epoch type: " + strconv.Itoa(int(dataType)))
}
newOffset = offset + size
metadata.BuildEpoch = bytesToUInt64(buffer[offset:newOffset])
case "database_type":
if dataType != dataTypeString {
return nil, errors.New("invalid database_type type: " + strconv.Itoa(int(dataType)))
}
newOffset = offset + size
metadata.DatabaseType = b2s(buffer[offset:newOffset])
case "description":
if dataType != dataTypeMap {
return nil, errors.New("invalid description type: " + strconv.Itoa(int(dataType)))
}
metadata.Description, newOffset, err = readStringMapMap(buffer, size, offset)
if err != nil {
return nil, err
}
case "ip_version":
if dataType != dataTypeUint16 {
return nil, errors.New("invalid ip_version type: " + strconv.Itoa(int(dataType)))
}
newOffset = offset + size
metadata.IPVersion = uint16(bytesToUInt64(buffer[offset:newOffset]))
case "languages":
if dataType != dataTypeSlice {
return nil, errors.New("invalid languages type: " + strconv.Itoa(int(dataType)))
}
metadata.Languages, newOffset, err = readStringSlice(buffer, size, offset)
if err != nil {
return nil, err
}
case "node_count":
if dataType != dataTypeUint32 {
return nil, errors.New("invalid node_count type: " + strconv.Itoa(int(dataType)))
}
newOffset = offset + size
metadata.NodeCount = uint32(bytesToUInt64(buffer[offset:newOffset]))
case "record_size":
if dataType != dataTypeUint16 {
return nil, errors.New("invalid record_size type: " + strconv.Itoa(int(dataType)))
}
newOffset = offset + size
metadata.RecordSize = uint16(bytesToUInt64(buffer[offset:newOffset]))
default:
return nil, errors.New("unknown key: " + string(key) + ", type: " + strconv.Itoa(int(dataType)))
}
offset = newOffset
}
return metadata, nil
} | vendor/github.com/IncSW/geoip2/metadata.go | 0.628749 | 0.551815 | metadata.go | starcoder |
package backend
import (
"math"
"sort"
"github.com/iliyanmotovski/raytracer/backend/vector"
)
// Particle represents a point from where rays of "light" emit
type Particle struct {
Pos *vector.Vector
Rays Rays
}
// Creates new Particle with given position and sets directory of 8 base rays
// to the 4 corners of the screen, 2 for each corner, with a very small
// offset to the left and right of the center of the corner
func NewParticle(x, y float64, sceneEdgesBounds Boundaries) *Particle {
baseRays := make(Rays, 8)
top := sceneEdgesBounds[0]
right := sceneEdgesBounds[1]
bottom := sceneEdgesBounds[2]
left := sceneEdgesBounds[3]
for i := range baseRays {
baseRays[i] = NewRay(&vector.Vector{X: x, Y: y})
}
baseRays[0].SetDir(left.B.X, left.B.Y+0.0001)
baseRays[1].SetDir(top.A.X+0.0001, top.A.Y)
baseRays[2].SetDir(top.B.X-0.0001, top.B.Y)
baseRays[3].SetDir(right.A.X, right.A.Y+0.0001)
baseRays[4].SetDir(right.B.X, right.B.Y-0.0001)
baseRays[5].SetDir(bottom.A.X-0.0001, bottom.A.Y)
baseRays[6].SetDir(bottom.B.X+0.0001, bottom.B.Y)
baseRays[7].SetDir(left.A.X, left.A.Y-0.0001)
return &Particle{Pos: &vector.Vector{X: x, Y: y}, Rays: baseRays}
}
// Process casts the rays
func (p *Particle) Process(boundaries Boundaries, polygons Polygons) Triangles {
// Adds 2 rays for each polygon vertice and sets their direction with a very
// small offset to the left and right of the vertice
p.SetRaysDirToPolyVertices(polygons)
// sorts the rays clockwise by angle
p.SortRaysClockwise()
edges := vector.Loop{}
for _, ray := range p.Rays {
var closest *vector.Vector
lastDistance := math.Inf(1)
for _, boundary := range boundaries {
// casts the ray against each boundary
intersection, ok := ray.Cast(boundary)
if ok {
// records the closest point of intersection
// to the starting point of the ray
distance := ray.A.Distance(*intersection)
if distance < lastDistance {
lastDistance = distance
closest = intersection
}
}
}
if closest != nil {
edges = append(edges, closest)
}
}
return NewClockwiseTriangleFan(p.Pos, edges)
}
// SetRaysDirToPolyVertices adds 2 rays for each polygon vertice
// and sets their direction with a very small offset to the left
//and right of the vertice
func (p *Particle) SetRaysDirToPolyVertices(polygons Polygons) {
for _, polygon := range polygons {
for _, vertex := range polygon.Loop {
rayLeft := NewRay(p.Pos)
rayLeft.SetDir(vertex.X-0.0001, vertex.Y-0.0001)
rayRight := NewRay(p.Pos)
rayRight.SetDir(vertex.X+0.0001, vertex.Y+0.0001)
p.Rays = append(p.Rays, rayLeft, rayRight)
}
}
}
// SortRaysClockwise sorts the rays clockwise by angle
func (p *Particle) SortRaysClockwise() {
sort.Slice(p.Rays, func(i, j int) bool {
return p.Rays[i].B.Degrees() < p.Rays[j].B.Degrees()
})
} | backend/particle.go | 0.857306 | 0.713132 | particle.go | starcoder |
package pg_query
import "encoding/json"
/*
* SubPlan - executable expression node for a subplan (sub-SELECT)
*
* The planner replaces SubLink nodes in expression trees with SubPlan
* nodes after it has finished planning the subquery. SubPlan references
* a sub-plantree stored in the subplans list of the toplevel PlannedStmt.
* (We avoid a direct link to make it easier to copy expression trees
* without causing multiple processing of the subplan.)
*
* In an ordinary subplan, testexpr points to an executable expression
* (OpExpr, an AND/OR tree of OpExprs, or RowCompareExpr) for the combining
* operator(s); the left-hand arguments are the original lefthand expressions,
* and the right-hand arguments are PARAM_EXEC Param nodes representing the
* outputs of the sub-select. (NOTE: runtime coercion functions may be
* inserted as well.) This is just the same expression tree as testexpr in
* the original SubLink node, but the PARAM_SUBLINK nodes are replaced by
* suitably numbered PARAM_EXEC nodes.
*
* If the sub-select becomes an initplan rather than a subplan, the executable
* expression is part of the outer plan's expression tree (and the SubPlan
* node itself is not, but rather is found in the outer plan's initPlan
* list). In this case testexpr is NULL to avoid duplication.
*
* The planner also derives lists of the values that need to be passed into
* and out of the subplan. Input values are represented as a list "args" of
* expressions to be evaluated in the outer-query context (currently these
* args are always just Vars, but in principle they could be any expression).
* The values are assigned to the global PARAM_EXEC params indexed by parParam
* (the parParam and args lists must have the same ordering). setParam is a
* list of the PARAM_EXEC params that are computed by the sub-select, if it
* is an initplan; they are listed in order by sub-select output column
* position. (parParam and setParam are integer Lists, not Bitmapsets,
* because their ordering is significant.)
*
* Also, the planner computes startup and per-call costs for use of the
* SubPlan. Note that these include the cost of the subquery proper,
* evaluation of the testexpr if any, and any hashtable management overhead.
*/
type SubPlan struct {
Xpr Node `json:"xpr"`
/* Fields copied from original SubLink: */
SubLinkType SubLinkType `json:"subLinkType"` /* see above */
/* The combining operators, transformed to an executable expression: */
Testexpr Node `json:"testexpr"` /* OpExpr or RowCompareExpr expression tree */
ParamIds List `json:"paramIds"` /* IDs of Params embedded in the above */
/* Identification of the Plan tree to use: */
PlanId int `json:"plan_id"` /* Index (from 1) in PlannedStmt.subplans */
/* Identification of the SubPlan for EXPLAIN and debugging purposes: */
PlanName *string `json:"plan_name"` /* A name assigned during planning */
/* Extra data useful for determining subplan's output type: */
FirstColType Oid `json:"firstColType"` /* Type of first column of subplan result */
FirstColTypmod int32 `json:"firstColTypmod"` /* Typmod of first column of subplan result */
FirstColCollation Oid `json:"firstColCollation"` /* Collation of first column of
* subplan result */
/* Information about execution strategy: */
UseHashTable bool `json:"useHashTable"` /* TRUE to store subselect output in a hash
* table (implies we are doing "IN") */
UnknownEqFalse bool `json:"unknownEqFalse"` /* TRUE if it's okay to return FALSE when the
* spec result is UNKNOWN; this allows much
* simpler handling of null values */
/* Information for passing params into and out of the subselect: */
/* setParam and parParam are lists of integers (param IDs) */
SetParam List `json:"setParam"` /* initplan subqueries have to set these
* Params for parent plan */
ParParam List `json:"parParam"` /* indices of input Params from parent plan */
Args List `json:"args"` /* exprs to pass as parParam values */
/* Estimated execution costs: */
StartupCost Cost `json:"startup_cost"` /* one-time setup cost */
PerCallCost Cost `json:"per_call_cost"` /* cost for each subplan evaluation */
}
func (node SubPlan) MarshalJSON() ([]byte, error) {
type SubPlanMarshalAlias SubPlan
return json.Marshal(map[string]interface{}{
"SubPlan": (*SubPlanMarshalAlias)(&node),
})
}
func (node *SubPlan) UnmarshalJSON(input []byte) (err error) {
var fields map[string]json.RawMessage
err = json.Unmarshal(input, &fields)
if err != nil {
return
}
if fields["xpr"] != nil {
node.Xpr, err = UnmarshalNodeJSON(fields["xpr"])
if err != nil {
return
}
}
if fields["subLinkType"] != nil {
err = json.Unmarshal(fields["subLinkType"], &node.SubLinkType)
if err != nil {
return
}
}
if fields["testexpr"] != nil {
node.Testexpr, err = UnmarshalNodeJSON(fields["testexpr"])
if err != nil {
return
}
}
if fields["paramIds"] != nil {
node.ParamIds.Items, err = UnmarshalNodeArrayJSON(fields["paramIds"])
if err != nil {
return
}
}
if fields["plan_id"] != nil {
err = json.Unmarshal(fields["plan_id"], &node.PlanId)
if err != nil {
return
}
}
if fields["plan_name"] != nil {
err = json.Unmarshal(fields["plan_name"], &node.PlanName)
if err != nil {
return
}
}
if fields["firstColType"] != nil {
err = json.Unmarshal(fields["firstColType"], &node.FirstColType)
if err != nil {
return
}
}
if fields["firstColTypmod"] != nil {
err = json.Unmarshal(fields["firstColTypmod"], &node.FirstColTypmod)
if err != nil {
return
}
}
if fields["firstColCollation"] != nil {
err = json.Unmarshal(fields["firstColCollation"], &node.FirstColCollation)
if err != nil {
return
}
}
if fields["useHashTable"] != nil {
err = json.Unmarshal(fields["useHashTable"], &node.UseHashTable)
if err != nil {
return
}
}
if fields["unknownEqFalse"] != nil {
err = json.Unmarshal(fields["unknownEqFalse"], &node.UnknownEqFalse)
if err != nil {
return
}
}
if fields["setParam"] != nil {
node.SetParam.Items, err = UnmarshalNodeArrayJSON(fields["setParam"])
if err != nil {
return
}
}
if fields["parParam"] != nil {
node.ParParam.Items, err = UnmarshalNodeArrayJSON(fields["parParam"])
if err != nil {
return
}
}
if fields["args"] != nil {
node.Args.Items, err = UnmarshalNodeArrayJSON(fields["args"])
if err != nil {
return
}
}
if fields["startup_cost"] != nil {
err = json.Unmarshal(fields["startup_cost"], &node.StartupCost)
if err != nil {
return
}
}
if fields["per_call_cost"] != nil {
err = json.Unmarshal(fields["per_call_cost"], &node.PerCallCost)
if err != nil {
return
}
}
return
} | vendor/github.com/lfittl/pg_query_go/nodes/sub_plan.go | 0.643441 | 0.513546 | sub_plan.go | starcoder |
package lfsr
import (
"time"
)
// Lfsr8 represents an 8 bit linear feedback shift register
type Lfsr8 struct {
state uint8
seed uint8
}
// NewLfsr8 returns a linear feedback shift register initialized with the specified seed. If the seed is zero the seed is initialized using the current time.
func NewLfsr8(seed uint8) *Lfsr8 {
for seed == 0 {
seed = uint8(time.Now().Nanosecond() & 0xff)
}
return &Lfsr8{seed, seed}
}
// Next returns the next pseudo random number from the linear feedback shift register and the restarted flag
// which indicates that the sequence has completed and is restarting.
func (l *Lfsr8) Next() (value uint8, restarted bool) {
s := l.state
b := (s >> 0) ^ (s >> 2) ^ (s >> 3) ^ (s >> 4)
l.state = (s >> 1) | (b << 7)
return l.state, l.state == l.seed
}
// Lfsr16 represents an 16 bit linear feedback shift register
type Lfsr16 struct {
state uint16
seed uint16
}
// NewLfsr16 returns a linear feedback shift register initialized with the specified seed. If the seed is zero the seed is initialized using the current time.
func NewLfsr16(seed uint16) *Lfsr16 {
for seed == 0 {
seed = uint16(time.Now().Nanosecond() & 0xffff)
}
return &Lfsr16{seed, seed}
}
// Next returns the next pseudo random number from the linear feedback shift register and the restarted flag
// which indicates that the sequence has completed and is restarting.
func (l *Lfsr16) Next() (value uint16, restarted bool) {
s := l.state
b := (s >> 0) ^ (s >> 2) ^ (s >> 3) ^ (s >> 5)
l.state = (s >> 1) | (b << 15)
return l.state, l.state == l.seed
}
// Lfsr32 represents an 32 bit linear feedback shift register
type Lfsr32 struct {
state uint32
seed uint32
}
// NewLfsr32 returns a linear feedback shift register initialized with the specified seed. If the seed is zero the seed is initialized using the current time.
func NewLfsr32(seed uint32) *Lfsr32 {
for seed == 0 {
seed = uint32(time.Now().Nanosecond() & 0xffffffff)
}
return &Lfsr32{seed, seed}
}
// Next returns the next pseudo random number from the linear feedback shift register and the restarted flag
// which indicates that the sequence has completed and is restarting.
func (l *Lfsr32) Next() (value uint32, restarted bool) {
s := l.state
b := (s >> 0) ^ (s >> 2) ^ (s >> 6) ^ (s >> 7)
l.state = (s >> 1) | (b << 31)
return l.state, l.state == l.seed
}
// Lfsr64 represents a 64 bit linear feedback shift register
type Lfsr64 struct {
state uint64
seed uint64
}
// NewLfsr64 returns a linear feedback shift register initialized with the specified seed. If the seed is zero the seed is initialized using the current time.
func NewLfsr64(seed uint64) *Lfsr64 {
for seed == 0 {
seed = uint64(time.Now().Nanosecond() & 0xffffffff)
}
return &Lfsr64{seed, seed}
}
// Next returns the next pseudo random number from the linear feedback shift register and the restarted flag
// which indicates that the sequence has completed and is restarting.
func (l *Lfsr64) Next() (value uint64, restarted bool) {
s := l.state
b := (s >> 0) ^ (s >> 1) ^ (s >> 3) ^ (s >> 4)
l.state = (s >> 1) | (b << 63)
return l.state, l.state == l.seed
} | lfsr.go | 0.888002 | 0.53048 | lfsr.go | starcoder |
package iex
import (
"fmt"
"time"
)
// HistoricalTimeFrame enum for selecting time frame of historical data
type HistoricalTimeFrame string
const (
// FiveDayHistorical Five days historically adjusted market-wide data
FiveDayHistorical HistoricalTimeFrame = "5d"
// FiveDay10MinuteHistorical Five days historically adjusted market-wide data in 10 minute intervals
FiveDay10MinuteHistorical HistoricalTimeFrame = "5dm"
// OneMonthHistorical One month (default) historically adjusted market-wide data
OneMonthHistorical HistoricalTimeFrame = "1m"
// OneMonth30MinuteHistorical One month historically adjusted market-wide data in 30 minute intervals
OneMonth30MinuteHistorical HistoricalTimeFrame = "1mm"
// ThreeMonthHistorical Three months historically adjusted market-wide data
ThreeMonthHistorical HistoricalTimeFrame = "3m"
// SixMonthHistorical Six months historically adjusted market-wide data
SixMonthHistorical HistoricalTimeFrame = "6m"
// OneYearHistorical One year historically adjusted market-wide data
OneYearHistorical HistoricalTimeFrame = "1y"
// TwoYearHistorical Two year historically adjusted market-wide data
TwoYearHistorical HistoricalTimeFrame = "2y"
// FiveYearHistorical Five year historically adjusted market-wide data
FiveYearHistorical HistoricalTimeFrame = "5y"
// YearToDateHistorical Year to date historically adjusted market-wide data
YearToDateHistorical HistoricalTimeFrame = "ytd"
// MaxHistorical All available historically adjusted market-wide data up to 15 years
MaxHistorical HistoricalTimeFrame = "max"
)
// Valid Determines if HistoricalTimeFrame is a defined constant
func (htf HistoricalTimeFrame) Valid() bool {
switch htf {
case
FiveDayHistorical,
FiveDay10MinuteHistorical,
OneMonthHistorical,
OneMonth30MinuteHistorical,
ThreeMonthHistorical,
SixMonthHistorical,
OneYearHistorical,
TwoYearHistorical,
FiveYearHistorical,
YearToDateHistorical,
MaxHistorical:
return true
default:
return false
}
}
// IntradayHistoricalDataPoint Represents a single intraday data point for a stock
type IntradayHistoricalDataPoint struct {
Date Date `json:"date"`
Minute string `json:"minute"`
Label string `json:"label"`
High float64 `json:"high"`
Low float64 `json:"low"`
Average float64 `json:"average"`
Volume int `json:"volume"`
Notional float64 `json:"notional"`
NumberOfTrades int `json:"numberOfTrades"`
MarketHigh float64 `json:"marketHigh"`
MarketLow float64 `json:"marketLow"`
MarketAverage float64 `json:"marketAverage"`
MarketVolume int `json:"marketVolume"`
MarketNotional float64 `json:"marketNotional"`
MarketNumberOfTrades int `json:"marketNumberOfTrades"`
Open float64 `json:"open"`
Close float64 `json:"close"`
MarketOpen float64 `json:"marketOpen"`
MarketClose float64 `json:"marketClose"`
ChangeOverTime float64 `json:"changeOverTime"`
MarketChangeOverTime float64 `json:"marketChangeOverTime"`
}
// HistoricalOptions optional query params to pass to historical endpoint
// If values are false or 0 they aren't passed.
type HistoricalOptions struct {
ChartCloseOnly bool `url:"chartCloseOnly,omitempty"`
ChartSimplify bool `url:"chartSimplify,omitempty"`
ChartInterval int `url:"chartInterval,omitempty"`
ChangeFromClose bool `url:"changeFromClose,omitempty"`
ChartLast int `url:"chartLast,omitempty"`
DisplayPercent bool `url:"displayPercent,omitempty"`
Range string `url:"range,omitempty"`
ExactDate string `url:"exactDate,omitempty"`
Sort string `url:"sort,omitempty"`
IncludeToday bool `url:"includeToday,omitempty"`
}
// IntradayHistoricalOptions optional query params to pass to intraday historical endpoint
// If values are false or 0 they aren't passed.
type IntradayHistoricalOptions struct {
ChartIEXOnly bool `url:"chartIEXOnly,omitempty"`
ChartReset bool `url:"chartReset,omitempty"`
ChartSimplify bool `url:"chartSimplify,omitempty"`
ChartInterval int `url:"chartInterval,omitempty"`
ChangeFromClose bool `url:"changeFromClose,omitempty"`
ChartLast int `url:"chartLast,omitempty"`
}
// HistoricalDataPoint Represents a single historical data point for a stock
type HistoricalDataPoint struct {
Close float64 `json:"close"`
High float64 `json:"high"`
Low float64 `json:"low"`
Open float64 `json:"open"`
Symbol string `json:"symbol"`
Volume float64 `json:"volume"`
ID string `json:"id"`
Key string `json:"key"`
Subkey string `json:"subkey"`
Date Date `json:"date"`
Minute string `json:"minute"`
UOpen float64 `json:"uOpen"`
UClose float64 `json:"uClose"`
UHigh float64 `json:"uHigh"`
ULow float64 `json:"uLow"`
UVolume float64 `json:"uVolume"`
Change float64 `json:"change"`
ChangePercent float64 `json:"changePercent"`
Label string `json:"label"`
ChangeOverTime float64 `json:"changeOverTime"`
}
// Time merges HistoricalDataPoint's Date and Mintue field and
// get the exact time. Useful for "5dm" and "1mm" time frames
func (p HistoricalDataPoint) Time() time.Time {
if p.Minute == "" {
return time.Time(p.Date)
}
layout := "2006-01-02 15:04"
dateStr := fmt.Sprintf("%s %s", p.Date.String(), p.Minute)
t, _ := time.Parse(layout, dateStr)
return t
}
// IntradayOptions optional query params to pass to intraday endpoint
// If values are false or 0 they aren't passed.
type IntradayOptions struct {
ChartIEXOnly bool `url:"chartIEXOnly,omitempty"`
ChartReset bool `url:"chartReset,omitempty"`
ChartSimplify bool `url:"chartSimplify,omitempty"`
ChartInterval int `url:"chartInterval,omitempty"`
ChangeFromClose bool `url:"changeFromClose,omitempty"`
ChartLast int `url:"chartLast,omitempty"`
ExactDate string `url:"exactDate,omitempty"` // Formatted as YYYYMMDD
ChartIEXWhenNull bool `url:"chartIEXWhenNull,omitempty"`
}
// SetExactDate formats a given date as IEX expects
func (opt *IntradayOptions) SetExactDate(day time.Time) {
opt.ExactDate = day.Format("20060102")
} | historical.go | 0.729038 | 0.535524 | historical.go | starcoder |
package algorithms
import (
"bytes"
"log"
"math"
"strconv"
)
type Direction int
const (
Diag Direction = 0
Up Direction = 1
Left Direction = 2
Stop Direction = 3 // Only used for S-W algorithm
)
func minimum4(element1 int, element2 int, element3 int, element4 int) int {
return int(math.Min(float64(element4), float64(minimum3(element1, element2, element3))))
}
func minimum3(element1 int, element2 int, element3 int) int {
return int(math.Min(float64(element3), float64(minimum2(element1, element2))))
}
func minimum2(element1 int, element2 int) int {
return int(math.Min(float64(element1), float64(element2)))
}
func maximumwithdirection3(diagonal int, up int, left int) (value int, direction Direction) {
value = int(math.Max(float64(left), float64(maximum2(diagonal, up))))
if value == diagonal {
direction = Diag
} else if value == up {
direction = Up
} else if value == left {
direction = Left
}
return
}
func maximumwithdirection4(diagonal int, up int, left int, control int) (value int, direction Direction) {
value = maximum2(int(math.Max(float64(left), float64(maximum2(diagonal, up)))), control)
if value == diagonal {
direction = Diag
} else if value == up {
direction = Up
} else if value == left {
direction = Left
} else if value == control {
direction = Stop
}
return
}
func maximum2(element1 int, element2 int) int {
return int(math.Max(float64(element1), float64(element2)))
}
func logArrayLine(array [][]int) {
lenCol := len(array[0])
lenRow := len(array)
for i := 0; i < lenRow; i++ {
buffer := bytes.NewBufferString("")
for j := 0; j < lenCol; j++ {
buffer.WriteString(strconv.Itoa(array[i][j]))
buffer.WriteString(" ")
}
log.Println(buffer.String())
}
}
func revertString(stringTest string) string {
array := []rune(stringTest)
arrayLength := len(array)
for iforward, ibackward := 0, arrayLength-1; iforward < arrayLength/2 && ibackward > 0; iforward, ibackward = iforward+1, ibackward-1 {
array[iforward], array[ibackward] = array[ibackward], array[iforward]
}
return string(array)
}
func compareSameSizeString(stringA string, stringB string) int {
arrayA, arrayB := []rune(stringA), []rune(stringB)
diffCount := 0
for i := 0; i < len(arrayA) && i < len(arrayB); i++ {
if arrayA[i] != arrayB[i] {
diffCount++
}
}
diffCount += int(math.Abs(float64(len(arrayA) - len(arrayB))))
log.Println("Found distance: ", diffCount)
return diffCount
}
func appendPosition(arrayPos [][]int, x int, y int, value int) [][]int {
a := make([][]int, 1)
a[0] = make([]int, 3)
a[0][0] = x
a[0][1] = y
a[0][2] = value
return append(arrayPos, a[0])
} | algorithms/utils.go | 0.645679 | 0.506469 | utils.go | starcoder |
package imgui
// #cgo CXXFLAGS: -std=c++11
// #include "wrapper/implotWrapper.h"
import "C"
import "unsafe"
// The following functions MUST be called BEFORE BeginPlot!
// Set the axes range limits of the next plot. Call right before BeginPlot(). If ImGuiCond_Always is used, the axes limits will be locked.
func ImPlotSetNextPlotLimits(xmin, xmax, ymin, ymax float64, cond Condition) {
C.iggImPlotSetNextPlotLimits(C.double(xmin), C.double(xmax), C.double(ymin), C.double(ymax), C.int(cond))
}
func ImPlotSetNextPlotTicksX(values []float64, labels []string, showDefault bool) {
if len(values) == 0 || len(labels) == 0 {
return
}
labelsArg := make([]*C.char, len(labels))
for i, l := range labels {
la, lf := wrapString(l)
defer lf()
labelsArg[i] = la
}
C.iggImPlotSetNextPlotTicksX(
(*C.double)(unsafe.Pointer(&values[0])),
C.int(len(values)),
&labelsArg[0],
castBool(showDefault),
)
}
func ImPlotSetNextPlotTicksY(values []float64, labels []string, showDefault bool, yAxis int) {
if len(values) == 0 || len(labels) == 0 {
return
}
labelsArg := make([]*C.char, len(labels))
for i, l := range labels {
la, lf := wrapString(l)
defer lf()
labelsArg[i] = la
}
C.iggImPlotSetNextPlotTicksY(
(*C.double)(unsafe.Pointer(&values[0])),
C.int(len(values)),
&labelsArg[0],
castBool(showDefault),
C.int(yAxis),
)
}
func ImPlotFitNextPlotAxis(x, y, y2, y3 bool) {
C.iggImPlotFitNextPlotAxes(
castBool(x),
castBool(y),
castBool(y2),
castBool(y3),
)
}
type ImPlotContext struct {
handle C.IggImPlotContext
}
// Creates a new ImPlot context. Call this after ImGui::CreateContext.
func ImPlotCreateContext() *ImPlotContext {
return &ImPlotContext{handle: C.iggImPlotCreateContext()}
}
// Destroys an ImPlot context. Call this before ImGui::DestroyContext. NULL = destroy current context
func ImPlotDestroyContext() {
C.iggImPlotDestroyContext()
}
type ImPlotFlags int
const (
ImPlotFlags_None ImPlotFlags = 0 // default
ImPlotFlags_NoTitle ImPlotFlags = 1 << 0 // the plot title will not be displayed (titles are also hidden if preceeded by double hashes, e.g. "##MyPlot")
ImPlotFlags_NoLegend ImPlotFlags = 1 << 1 // the legend will not be displayed
ImPlotFlags_NoMenus ImPlotFlags = 1 << 2 // the user will not be able to open context menus with right-click
ImPlotFlags_NoBoxSelect ImPlotFlags = 1 << 3 // the user will not be able to box-select with right-click drag
ImPlotFlags_NoMousePos ImPlotFlags = 1 << 4 // the mouse position, in plot coordinates, will not be displayed inside of the plot
ImPlotFlags_NoHighlight ImPlotFlags = 1 << 5 // plot items will not be highlighted when their legend entry is hovered
ImPlotFlags_NoChild ImPlotFlags = 1 << 6 // a child window region will not be used to capture mouse scroll (can boost performance for single ImGui window applications)
ImPlotFlags_Equal ImPlotFlags = 1 << 7 // primary x and y axes will be constrained to have the same units/pixel (does not apply to auxiliary y-axes)
ImPlotFlags_YAxis2 ImPlotFlags = 1 << 8 // enable a 2nd y-axis on the right side
ImPlotFlags_YAxis3 ImPlotFlags = 1 << 9 // enable a 3rd y-axis on the right side
ImPlotFlags_Query ImPlotFlags = 1 << 10 // the user will be able to draw query rects with middle-mouse or CTRL + right-click drag
ImPlotFlags_Crosshairs ImPlotFlags = 1 << 11 // the default mouse cursor will be replaced with a crosshair when hovered
ImPlotFlags_AntiAliased ImPlotFlags = 1 << 12 // plot lines will be software anti-aliased (not recommended for high density plots, prefer MSAA)
ImPlotFlags_CanvasOnly ImPlotFlags = ImPlotFlags_NoTitle | ImPlotFlags_NoLegend | ImPlotFlags_NoMenus | ImPlotFlags_NoBoxSelect | ImPlotFlags_NoMousePos
)
type ImPlotAxisFlags int
const (
ImPlotAxisFlags_None ImPlotAxisFlags = 0 // default
ImPlotAxisFlags_NoLabel ImPlotAxisFlags = 1 << 0 // the axis label will not be displayed (axis labels also hidden if the supplied string name is NULL)
ImPlotAxisFlags_NoGridLines ImPlotAxisFlags = 1 << 1 // the axis grid lines will not be displayed
ImPlotAxisFlags_NoTickMarks ImPlotAxisFlags = 1 << 2 // the axis tick marks will not be displayed
ImPlotAxisFlags_NoTickLabels ImPlotAxisFlags = 1 << 3 // the axis tick labels will not be displayed
ImPlotAxisFlags_LogScale ImPlotAxisFlags = 1 << 4 // a logartithmic (base 10) axis scale will be used (mutually exclusive with ImPlotAxisFlags_Time)
ImPlotAxisFlags_Time ImPlotAxisFlags = 1 << 5 // axis will display date/time formatted labels (mutually exclusive with ImPlotAxisFlags_LogScale)
ImPlotAxisFlags_Invert ImPlotAxisFlags = 1 << 6 // the axis will be inverted
ImPlotAxisFlags_LockMin ImPlotAxisFlags = 1 << 7 // the axis minimum value will be locked when panning/zooming
ImPlotAxisFlags_LockMax ImPlotAxisFlags = 1 << 8 // the axis maximum value will be locked when panning/zooming
ImPlotAxisFlags_Lock ImPlotAxisFlags = ImPlotAxisFlags_LockMin | ImPlotAxisFlags_LockMax
ImPlotAxisFlags_NoDecorations ImPlotAxisFlags = ImPlotAxisFlags_NoLabel | ImPlotAxisFlags_NoGridLines | ImPlotAxisFlags_NoTickMarks | ImPlotAxisFlags_NoTickLabels
)
//-----------------------------------------------------------------------------
// Begin/End Plot
//-----------------------------------------------------------------------------
// Starts a 2D plotting context. If this function returns true, EndPlot() must
// be called, e.g. "if (BeginPlot(...)) { ... EndPlot(); }". #title_id must
// be unique. If you need to avoid ID collisions or don't want to display a
// title in the plot, use double hashes (e.g. "MyPlot##Hidden" or "##NoTitle").
// If #x_label and/or #y_label are provided, axes labels will be displayed.
func ImPlotBegin(title string, xLabel, yLabel string, size Vec2, flags ImPlotFlags, xFlags, yFlags, y2Flags, y3Flags ImPlotAxisFlags, y2Label, y3Label string) bool {
titleArg, titleFin := wrapString(title)
defer titleFin()
xLabelArg, xLabelFin := wrapString(xLabel)
defer xLabelFin()
yLabelArg, yLabelFin := wrapString(yLabel)
defer yLabelFin()
sizeArg, _ := size.wrapped()
y2LabelArg, y2LabelFin := wrapString(y2Label)
defer y2LabelFin()
y3LabelArg, y3LabelFin := wrapString(y3Label)
defer y3LabelFin()
return C.iggImPlotBeginPlot(
titleArg,
xLabelArg,
yLabelArg,
sizeArg,
C.int(flags),
C.int(xFlags),
C.int(yFlags),
C.int(y2Flags),
C.int(y3Flags),
y2LabelArg,
y3LabelArg) != 0
}
// Only call EndPlot() if BeginPlot() returns true! Typically called at the end
// of an if statement conditioned on BeginPlot().
func ImPlotEnd() {
C.iggImPlotEndPlot()
}
// Plots a vertical bar graph. #width and #shift are in X units.
func ImPlotBars(label string, values []float64, width, shift float64, offset int) {
if len(values) == 0 {
return
}
labelArg, labelFin := wrapString(label)
defer labelFin()
C.iggImPlotBars(labelArg, (*C.double)(unsafe.Pointer(&values[0])), C.int(len(values)), C.double(width), C.double(shift), C.int(offset))
}
// Plots a vertical bar graph. #width and #shift are in X units.
func ImPlotBarsXY(label string, xs, ys []float64, width float64, offset int) {
if len(xs) == 0 || len(ys) == 0 {
return
}
labelArg, labelDeleter := wrapString(label)
defer labelDeleter()
C.iggImPlotBarsXY(
labelArg,
(*C.double)(unsafe.Pointer(&xs[0])),
(*C.double)(unsafe.Pointer(&ys[0])),
C.int(len(xs)),
C.double(width),
C.int(offset))
}
// Plots a horizontal bar graph. #height and #shift are in Y units.
func ImPlotBarsH(label string, values []float64, height, shift float64, offset int) {
if len(values) == 0 {
return
}
labelArg, labelFin := wrapString(label)
defer labelFin()
C.iggImPlotBarsH(labelArg, (*C.double)(unsafe.Pointer(&values[0])), C.int(len(values)), C.double(height), C.double(shift), C.int(offset))
}
// Plots a horizontal bar graph. #height and #shift are in Y units.
func ImPlotBarsHXY(label string, xs, ys []float64, height float64, offset int) {
if len(xs) == 0 || len(ys) == 0 {
return
}
labelArg, labelDeleter := wrapString(label)
defer labelDeleter()
C.iggImPlotBarsHXY(
labelArg,
(*C.double)(unsafe.Pointer(&xs[0])),
(*C.double)(unsafe.Pointer(&ys[0])),
C.int(len(xs)),
C.double(height),
C.int(offset),
)
}
// Plots a standard 2D line plot.
func ImPlotLine(label string, values []float64, xscale, x0 float64, offset int) {
if len(values) == 0 {
return
}
labelArg, labelFin := wrapString(label)
defer labelFin()
C.iggImPlotLine(
labelArg,
(*C.double)(unsafe.Pointer(&values[0])),
C.int(len(values)),
C.double(xscale),
C.double(x0),
C.int(offset),
)
}
// Plots a standard 2D line plot.
func ImPlotLineXY(label string, xs, ys []float64, offset int) {
if len(xs) == 0 || len(ys) == 0 || (len(xs) != len(ys)) {
return
}
labelArg, labelFin := wrapString(label)
defer labelFin()
C.iggImPlotLineXY(
labelArg,
(*C.double)(unsafe.Pointer(&xs[0])),
(*C.double)(unsafe.Pointer(&ys[0])),
C.int(len(xs)),
C.int(offset),
)
}
// Plots a standard 2D scatter plot. Default marker is ImPlotMarker_Circle.
func ImPlotScatter(label string, values []float64, xscale, x0 float64, offset int) {
if len(values) == 0 {
return
}
labelArg, labelDeleter := wrapString(label)
defer labelDeleter()
C.iggImPlotScatter(
labelArg,
(*C.double)(unsafe.Pointer(&values[0])),
C.int(len(values)),
C.double(xscale),
C.double(x0),
C.int(offset),
)
}
// Plots a standard 2D scatter plot. Default marker is ImPlotMarker_Circle.
func ImPlotScatterXY(label string, xs, ys []float64, offset int) {
if len(xs) == 0 || len(ys) == 0 {
return
}
labelArg, labelDeleter := wrapString(label)
defer labelDeleter()
C.iggImPlotScatterXY(
labelArg,
(*C.double)(unsafe.Pointer(&xs[0])),
(*C.double)(unsafe.Pointer(&ys[0])),
C.int(len(xs)),
C.int(offset),
)
}
// Plots a a stairstep graph. The y value is continued constantly from every x position, i.e. the interval [x[i], x[i+1]) has the value y[i].
func ImPlotStairs(label string, values []float64, xscale, x0 float64, offset int) {
if len(values) == 0 {
return
}
labelArg, labelDeleter := wrapString(label)
defer labelDeleter()
C.iggImPlotStairs(
labelArg,
(*C.double)(unsafe.Pointer(&values[0])),
C.int(len(values)),
C.double(xscale),
C.double(x0),
C.int(offset),
)
}
// Plots a a stairstep graph. The y value is continued constantly from every x position, i.e. the interval [x[i], x[i+1]) has the value y[i].
func ImPlotStairsXY(label string, xs, ys []float64, offset int) {
if len(xs) == 0 || len(ys) == 0 {
return
}
labelArg, labelDeleter := wrapString(label)
defer labelDeleter()
C.iggImPlotStairsXY(
labelArg,
(*C.double)(unsafe.Pointer(&xs[0])),
(*C.double)(unsafe.Pointer(&ys[0])),
C.int(len(xs)),
C.int(offset),
)
}
// Plots vertical error bar. The label_id should be the same as the label_id of the associated line or bar plot.
func ImPlotErrorBars(label string, xs, ys, err []float64, offset int) {
if len(xs) == 0 || len(ys) == 0 || len(err) == 0 {
return
}
labelArg, labelDeleter := wrapString(label)
defer labelDeleter()
C.iggImPlotErrorBars(
labelArg,
(*C.double)(unsafe.Pointer(&xs)),
(*C.double)(unsafe.Pointer(&ys)),
(*C.double)(unsafe.Pointer(&err)),
C.int(len(xs)),
C.int(offset),
)
}
// Plots horizontal error bars. The label_id should be the same as the label_id of the associated line or bar plot.
func ImPlotErrorBarsH(label string, xs, ys, err []float64, offset int) {
if len(xs) == 0 || len(ys) == 0 || len(err) == 0 {
return
}
labelArg, labelDeleter := wrapString(label)
defer labelDeleter()
C.iggImPlotErrorBarsH(
labelArg,
(*C.double)(unsafe.Pointer(&xs)),
(*C.double)(unsafe.Pointer(&ys)),
(*C.double)(unsafe.Pointer(&err)),
C.int(len(xs)),
C.int(offset),
)
}
/// Plots vertical stems.
func ImPlotStems(label string, values []float64, yRef, xscale, x0 float64, offset int) {
if len(values) == 0 {
return
}
labelArg, labelDeleter := wrapString(label)
defer labelDeleter()
C.iggImPlotStems(
labelArg,
(*C.double)(unsafe.Pointer(&values[0])),
C.int(len(values)),
C.double(yRef),
C.double(xscale),
C.double(x0),
C.int(offset),
)
}
/// Plots vertical stems.
func ImPlotStemsXY(label string, xs, ys []float64, yRef float64, offset int) {
if len(xs) == 0 || len(ys) == 0 {
return
}
labelArg, labelDeleter := wrapString(label)
defer labelDeleter()
C.iggImPlotStemsXY(
labelArg,
(*C.double)(unsafe.Pointer(&xs[0])),
(*C.double)(unsafe.Pointer(&ys[0])),
C.int(len(xs)),
C.double(yRef),
C.int(offset),
)
}
/// Plots infinite vertical or horizontal lines (e.g. for references or asymptotes).
func ImPlotVLines(label string, xs []float64, offset int) {
if len(xs) == 0 {
return
}
labelArg, labelDeleter := wrapString(label)
defer labelDeleter()
C.iggImPlotVLines(
labelArg,
(*C.double)(unsafe.Pointer(&xs[0])),
C.int(len(xs)),
C.int(offset),
)
}
/// Plots infinite vertical or horizontal lines (e.g. for references or asymptotes).
func ImPlotHLines(label string, ys []float64, offset int) {
if len(ys) == 0 {
return
}
labelArg, labelDeleter := wrapString(label)
defer labelDeleter()
C.iggImPlotHLines(
labelArg,
(*C.double)(unsafe.Pointer(&ys[0])),
C.int(len(ys)),
C.int(offset),
)
}
// Plots a pie chart. If the sum of values > 1 or normalize is true, each value will be normalized. Center and radius are in plot units. #label_fmt can be set to NULL for no labels.
func ImPlotPieChart(labelIds []string, values []float64, x, y, radius float64, normalize bool, labelFmt string, angle0 float64) {
if len(labelIds) == 0 || len(values) == 0 {
return
}
labelIdsArg := make([]*C.char, len(labelIds))
for i, l := range labelIds {
la, lf := wrapString(l)
defer lf()
labelIdsArg[i] = la
}
labelFmtArg, labelFmtDeleter := wrapString(labelFmt)
defer labelFmtDeleter()
C.iggImPlotPieChart(
&labelIdsArg[0],
(*C.double)(unsafe.Pointer(&values[0])),
C.int(len(values)),
C.double(x),
C.double(y),
C.double(radius),
castBool(normalize),
labelFmtArg,
C.double(angle0),
)
}
func ImPlotGetPlotPos() Vec2 {
var pos Vec2
posArg, _ := pos.wrapped()
C.iggImPlotGetPlotPos(posArg)
return pos
}
func ImPlotGetPlotSize() Vec2 {
var size Vec2
sizeArg, _ := size.wrapped()
C.iggImPlotGetPlotSize(sizeArg)
return size
}
func ImPlotIsPlotHovered() bool {
return C.iggImPlotIsPlotHovered() != 0
}
func ImPlotIsPlotXAxisHovered() bool {
return C.iggImPlotIsPlotXAxisHovered() != 0
}
func ImPlotIsPlotYAxisHovered(yAxis int) bool {
return C.iggImPlotIsPlotYAxisHovered(C.int(yAxis)) != -0
} | implot.go | 0.699152 | 0.449393 | implot.go | starcoder |
package propertydb
import (
"testing"
"github.com/stretchr/testify/assert"
)
func defaultCity() City {
return City("Town")
}
func defaultAddress() StreetAddress {
return StreetAddress("Address 1")
}
func defaultInfo() Info {
return Info{
PriceAsking: 12,
PriceFinal: 14,
Type: House,
OperatingCosts: 1231,
PropertyInsuranceMonthly: 121,
CurrentMortgageDeed: 1238,
Notes: "Hello mr sunshine",
}
}
func defaultListing() Listing {
return Listing{
City: defaultCity(),
StreetAddress: defaultAddress(),
Info: defaultInfo(),
}
}
func ValidateAddAndShow(t *testing.T, pdb PropertyDB) {
expectedListing := defaultListing()
err := pdb.Add(expectedListing.City, expectedListing.StreetAddress, expectedListing.Info)
assert.Nil(t, err, ".Add(...) returns error when trying to add a property")
listing, err := pdb.Show(expectedListing.City, expectedListing.StreetAddress)
assert.Nil(t, err, ".Show(...) returns error when trying to show an added property")
assert.Equal(t, expectedListing, listing)
}
func ValidateAddSameTwiceError(t *testing.T, pdb PropertyDB) {
expectedListing := defaultListing()
err := pdb.Add(expectedListing.City, expectedListing.StreetAddress, expectedListing.Info)
assert.Nil(t, err, ".Add(...) returns error when trying to add a property")
err = pdb.Add(expectedListing.City, expectedListing.StreetAddress, expectedListing.Info)
assert.NotNil(t, err, ".Add(...) does not return error when trying to add an existing property")
}
func ValidateCityAndStreetAddressIdentifies(t *testing.T, pdb PropertyDB) {
listingFirst := defaultListing()
listingSecond := Listing{
City: City("SecretTown"),
StreetAddress: StreetAddress("Secret Address"),
Info: Info{
PriceAsking: 1212121,
PriceFinal: 14141414,
Type: House,
OperatingCosts: 1211,
PropertyInsuranceMonthly: 121,
CurrentMortgageDeed: 1238,
Notes: "Hello mr sunshine",
},
}
err := pdb.Add(listingFirst.City, listingFirst.StreetAddress, listingFirst.Info)
assert.Nil(t, err, ".Add(...) returns error when trying to add a property")
err = pdb.Add(listingSecond.City, listingSecond.StreetAddress, listingSecond.Info)
assert.Nil(t, err, ".Add(...) returns error when trying to add a property")
retSecond, err := pdb.Show(listingSecond.City, listingSecond.StreetAddress)
assert.Nil(t, err, ".Show(...) returns error when trying to show the second added property")
assert.Equal(t, listingSecond, retSecond)
retFirst, err := pdb.Show(listingFirst.City, listingFirst.StreetAddress)
assert.Nil(t, err, ".Show(...) returns error when trying to show the first added property")
assert.Equal(t, listingFirst, retFirst)
}
func ValidateUpdate(t *testing.T, pdb PropertyDB) {
//Add property, update property, validate update took place with Show
listing := defaultListing()
err := pdb.Add(listing.City, listing.StreetAddress, listing.Info)
assert.Nil(t, err, ".Add(...) returns error when trying to add a property")
updatedListing := listing
updatedListing.Info.Notes = "I update and update"
pdb.Update(updatedListing.City, updatedListing.StreetAddress, updatedListing.Info)
l, err := pdb.Show(listing.City, listing.StreetAddress)
assert.Nil(t, err, ".Show(...) returns error when trying to read the updated listing")
assert.Equal(t, updatedListing, l)
}
func ValidateList(t *testing.T, pdb PropertyDB) {
//Add three properties, validate same with List()
l1 := defaultListing()
l2 := Listing{
City: City("SecretTown"),
StreetAddress: StreetAddress("Secret Address"),
Info: Info{
PriceAsking: 1212121,
PriceFinal: 14141414,
Type: House,
OperatingCosts: 1211,
PropertyInsuranceMonthly: 121,
CurrentMortgageDeed: 1238,
Notes: "Hello mr sunshine",
},
}
l3 := Listing{
City: City("ChristmasTown"),
StreetAddress: StreetAddress("Christmas Address"),
Info: Info{
PriceAsking: 1212111,
PriceFinal: 141914,
Type: Condo,
OperatingCosts: 1211,
PropertyInsuranceMonthly: 131,
CurrentMortgageDeed: 1228,
Notes: "Snow and Snow",
},
}
err := pdb.Add(l1.City, l1.StreetAddress, l1.Info)
assert.Nil(t, err, ".Add(...) returns error when trying to add a property")
err = pdb.Add(l2.City, l2.StreetAddress, l2.Info)
assert.Nil(t, err, ".Add(...) returns error when trying to add a property")
err = pdb.Add(l3.City, l3.StreetAddress, l3.Info)
assert.Nil(t, err, ".Add(...) returns error when trying to add a property")
listings, err := pdb.List()
assert.Nil(t, err, ".List(...) returns error when trying to list properties")
for _, l := range listings {
if l1 != l && l2 != l && l3 != l {
t.Log("l1", l1)
t.Log("l2", l2)
t.Log("l3", l3)
t.Log("l", l)
t.Log(".List() return faulty listings")
t.Fail()
}
}
}
func ValidateDelete(t *testing.T, pdb PropertyDB) {
//Add three properties, delete one, validate with List()
l1 := defaultListing()
l2 := Listing{
City: City("SecretTown"),
StreetAddress: StreetAddress("Secret Address"),
Info: Info{
PriceAsking: 1212121,
PriceFinal: 14141414,
Type: House,
OperatingCosts: 1211,
PropertyInsuranceMonthly: 121,
CurrentMortgageDeed: 1238,
Notes: "Hello mr sunshine",
},
}
l3 := Listing{
City: City("ChristmasTown"),
StreetAddress: StreetAddress("Christmas Address"),
Info: Info{
PriceAsking: 1212111,
PriceFinal: 141914,
Type: Condo,
OperatingCosts: 1211,
PropertyInsuranceMonthly: 131,
CurrentMortgageDeed: 1228,
Notes: "Snow and Snow",
},
}
err := pdb.Add(l1.City, l1.StreetAddress, l1.Info)
assert.Nil(t, err, ".Add(...) returns error when trying to add a property")
err = pdb.Add(l2.City, l2.StreetAddress, l2.Info)
assert.Nil(t, err, ".Add(...) returns error when trying to add a property")
err = pdb.Add(l3.City, l3.StreetAddress, l3.Info)
assert.Nil(t, err, ".Add(...) returns error when trying to add a property")
err = pdb.Delete(l2.City, l2.StreetAddress)
assert.Nil(t, err, ".Delete(...) returns error when trying to delete a property")
listings, err := pdb.List()
assert.Nil(t, err, ".List(...) returns error when trying to list properties")
for _, l := range listings {
if l1 != l && l3 != l {
t.Log("l1", l1)
t.Log("l2", l2)
t.Log("l3", l3)
t.Log("l", l)
t.Log(".Delete() seem to interact weirdly with .List()")
}
}
} | pkg/propertydb/validate.go | 0.629888 | 0.487795 | validate.go | starcoder |
package list
import "github.com/rickb777/golist/internal/collection"
const List = collection.Collection + `
//-------------------------------------------------------------------------------------------------
// {{.TName}}List is a slice of type {{.PName}}. Use it where you would use []{{.PName}}.
// List values follow a similar pattern to Scala Lists and LinearSeqs in particular.
// Importantly, *none of its methods ever mutate a list*; they merely return new lists where required.
// When a list needs mutating, use normal Go slice operations, e.g. *append()*.
// For comparison with Scala, see e.g. http://www.scala-lang.org/api/2.11.7/#scala.collection.LinearSeq
type {{.TName}}List []{{.PName}}
//-------------------------------------------------------------------------------------------------
// New{{.TName}}List constructs a new list containing the supplied values, if any.
func New{{.TName}}List(values ...{{.PName}}) {{.TName}}List {
list := make({{.TName}}List, len(values))
for i, v := range values {
list[i] = v
}
return list
}
{{if .Type.Underlying.IsBasic}}
// New{{.TName}}ListFrom{{.Type.Underlying.LongName}}s constructs a new {{.TName}}List from a []{{.Type.Underlying}}.
func New{{.TName}}ListFrom{{.Type.Underlying.LongName}}s(values []{{.Type.Underlying}}) {{.TName}}List {
list := make({{.TName}}List, len(values))
for i, v := range values {
list[i] = {{.TName}}(v)
}
return list
}
{{end}}
// Build{{.TName}}ListFromChan constructs a new {{.TName}}List from a channel that supplies a sequence
// of values until it is closed. The function doesn't return until then.
func Build{{.TName}}ListFromChan(source <-chan {{.PName}}) {{.TName}}List {
result := make({{.TName}}List, 0)
for v := range source {
result = append(result, v)
}
return result
}
//-------------------------------------------------------------------------------------------------
` + headTail + conversions + sortable +
iterationFunctions + takeDropFunctions + predicatedFunctions +
equalsFunctions + comparableFunctions + numericFunctions + orderedFunctions +
mkstring + optionForList
// TODO diff
// TODO PadTo,
// TODO StartsWith, EndsWith, IteratorChan
// TODO Fold | internal/list/list.go | 0.649023 | 0.415195 | list.go | starcoder |
package builtin
import (
"errors"
"fmt"
"github.com/fission/fission-workflows/pkg/types"
"github.com/fission/fission-workflows/pkg/types/typedvalues"
"github.com/fission/fission-workflows/pkg/types/typedvalues/controlflow"
)
const (
Foreach = "foreach"
ForeachInputForeach = "foreach"
ForeachInputDo = "do"
ForeachInputCollect = "collect"
ForeachInputSequential = "sequential"
)
/*
FunctionForeach is a control flow construct to execute a certain task for each item in the provided input.
The tasks are executed in parallel.
Note, currently the task in the 'do' does not have access to state in the current workflow.
**Specification**
**input** | required | types | description
-------------------------|----------|---------------|--------------------------------------------------------
foreach | yes | list | The list of elements that foreach should be looped over.
do | yes | task/workflow | The action to perform for every element.
sequential | no | bool | Whether to execute the tasks sequentially (default: false).
collect | no | bool | Collect the outputs of the tasks into an array (default: true).
The element is made available to the action using the field `_item`.
**output** None
**Example**
```
foo:
run: foreach
inputs:
for:
- a
- b
- c
do:
run: noop
inputs: "{ task().Inputs._item }"
```
A complete example of this function can be found in the [foreachwhale](../examples/whales/foreachwhale.wf.yaml) example.
*/
type FunctionForeach struct{}
func (fn *FunctionForeach) Invoke(spec *types.TaskInvocationSpec) (*typedvalues.TypedValue, error) {
// Verify and parse foreach
headerTv, err := ensureInput(spec.GetInputs(), ForeachInputForeach)
if err != nil {
return nil, err
}
i, err := typedvalues.Unwrap(headerTv)
if err != nil {
return nil, err
}
foreach, ok := i.([]interface{})
if !ok {
return nil, fmt.Errorf("condition '%v' needs to be a 'array', but was '%v'", i, headerTv.ValueType())
}
// Wrap task
taskTv, err := ensureInput(spec.GetInputs(), ForeachInputDo, controlflow.TypeTask)
if err != nil {
return nil, err
}
flow, err := controlflow.UnwrapControlFlow(taskTv)
if err != nil {
return nil, err
}
if flow.GetWorkflow() != nil {
return nil, errors.New("foreach does not support workflow inputs (yet)")
}
// Wrap collect
collect := true
collectTv, ok := spec.Inputs[ForeachInputCollect]
if ok {
b, err := typedvalues.UnwrapBool(collectTv)
if err != nil {
return nil, fmt.Errorf("collect could not be parsed into a boolean: %v", err)
}
collect = b
}
// Wrap sequential
var seq bool
seqTv, ok := spec.Inputs[ForeachInputSequential]
if ok {
b, err := typedvalues.UnwrapBool(seqTv)
if err != nil {
return nil, fmt.Errorf("sequential could not be parsed into a boolean: %v", err)
}
seq = b
}
// Create the workflows
wf := &types.WorkflowSpec{
OutputTask: "collector",
Tasks: types.Tasks{},
}
// Create the tasks for each element
var tasks []string // Needed to preserve order of the input array
for k, item := range foreach {
f := flow.Clone()
itemTv := typedvalues.MustWrap(item)
itemTv.SetMetadata(typedvalues.MetadataPriority, "1000") // Ensure that item is resolved before other parameters
f.Input("_item", *itemTv)
// TODO support workflows
t := f.GetTask()
name := fmt.Sprintf("do_%d", k)
wf.AddTask(name, t)
tasks = append(tasks, name)
if seq && k != 0 {
t.Require(tasks[k-1])
}
}
// Add collector task
ct := &types.TaskSpec{
FunctionRef: "compose",
Inputs: types.Inputs{},
Requires: types.Require(tasks...),
}
var output []interface{}
for _, k := range tasks {
if collect {
output = append(output, fmt.Sprintf("{output('%s')}", k))
}
}
ct.Input(ComposeInput, typedvalues.MustWrap(output))
wf.AddTask("collector", ct)
return typedvalues.Wrap(wf)
} | pkg/fnenv/native/builtin/foreach.go | 0.532425 | 0.553083 | foreach.go | starcoder |
package query
import (
"github.com/awslabs/smithy-go/httpbinding"
"math/big"
"net/url"
)
// Value represents a Query Value type.
type Value struct {
// The query values to add the value to.
values url.Values
// The value's key, which will form the prefix for complex types.
key string
// Whether the value should be flattened or not if it's a flattenable type.
flat bool
queryValue httpbinding.QueryValue
}
func newValue(values url.Values, key string, flat bool) Value {
return Value{
values: values,
key: key,
flat: flat,
queryValue: httpbinding.NewQueryValue(values, key, false),
}
}
func newBaseValue(values url.Values) Value {
return Value{
values: values,
queryValue: httpbinding.NewQueryValue(nil, "", false),
}
}
// Array returns a new Array encoder.
func (qv Value) Array(locationName string) *Array {
return newArray(qv.values, qv.key, qv.flat, locationName)
}
// Object returns a new Object encoder.
func (qv Value) Object() *Object {
return newObject(qv.values, qv.key)
}
// Map returns a new Map encoder.
func (qv Value) Map(keyLocationName string, valueLocationName string) *Map {
return newMap(qv.values, qv.key, qv.flat, keyLocationName, valueLocationName)
}
// Base64EncodeBytes encodes v as a base64 query string value.
// This is intended to enable compatibility with the JSON encoder.
func (qv Value) Base64EncodeBytes(v []byte) {
qv.queryValue.Blob(v)
}
// Boolean encodes v as a query string value
func (qv Value) Boolean(v bool) {
qv.queryValue.Boolean(v)
}
// String encodes v as a query string value
func (qv Value) String(v string) {
qv.queryValue.String(v)
}
// Byte encodes v as a query string value
func (qv Value) Byte(v int8) {
qv.queryValue.Byte(v)
}
// Short encodes v as a query string value
func (qv Value) Short(v int16) {
qv.queryValue.Short(v)
}
// Integer encodes v as a query string value
func (qv Value) Integer(v int32) {
qv.queryValue.Integer(v)
}
// Long encodes v as a query string value
func (qv Value) Long(v int64) {
qv.queryValue.Long(v)
}
// Float encodes v as a query string value
func (qv Value) Float(v float32) {
qv.queryValue.Float(v)
}
// Double encodes v as a query string value
func (qv Value) Double(v float64) {
qv.queryValue.Double(v)
}
// BigInteger encodes v as a query string value
func (qv Value) BigInteger(v *big.Int) {
qv.queryValue.BigInteger(v)
}
// BigDecimal encodes v as a query string value
func (qv Value) BigDecimal(v *big.Float) {
qv.queryValue.BigDecimal(v)
} | vendor/github.com/aws/aws-sdk-go-v2/aws/protocol/query/value.go | 0.895263 | 0.40751 | value.go | starcoder |
package arithmetic
import (
"reflect"
)
// An Operander is a value that can be represented as an arithmetic operand.
type Operander interface {
Val() float64
}
// Add gets any number of elements and returns their addition.
func Add(operanders ...interface{}) float64 {
if len(operanders) < 1 {
return 0
}
result := Val(operanders[0])
for _, v := range operanders[1:] {
result += Val(v)
}
return result
}
// Div gets any number of elements and returns their division.
func Div(operanders ...interface{}) float64 {
if len(operanders) < 1 {
return 0
}
result := Val(operanders[0])
for _, v := range operanders[1:] {
result /= Val(v)
}
return result
}
// Eq gets any number of elements and checks if they are equals.
func Eq(operanders ...interface{}) bool {
if len(operanders) < 2 {
return true
}
x := Val(operanders[0])
for _, v := range operanders[1:] {
if x != Val(v) {
return false
}
}
return true
}
// Mul gets any number of elements and returns their multiplication.
func Mul(operanders ...interface{}) float64 {
if len(operanders) < 1 {
return 0
}
result := Val(operanders[0])
for _, v := range operanders[1:] {
result *= Val(v)
}
return result
}
// Ne gets any number of elements and checks if they are differents.
func Ne(operanders ...interface{}) bool {
if len(operanders) < 2 {
return false
}
s := make(map[float64]struct{}, len(operanders))
for _, v := range operanders {
val := Val(v)
if _, ok := s[val]; ok {
return false
}
s[val] = struct{}{}
}
return true
}
// Sub gets any number of elements and returns their subtraction.
func Sub(operanders ...interface{}) float64 {
if len(operanders) < 1 {
return 0
}
result := Val(operanders[0])
for _, v := range operanders[1:] {
result -= Val(v)
}
return result
}
// Val extracts the arithmetic representation from any type. It is ruled by the
// value extraction rules.
func Val(operander interface{}) float64 {
if x, ok := operander.(Operander); ok {
return x.Val()
}
x := reflect.ValueOf(operander)
// nolint:exhaustive
switch x.Kind() {
case reflect.Bool:
if x.Bool() {
return 1
}
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return float64(x.Int())
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32,
reflect.Uint64:
return float64(x.Uint())
case reflect.Float32, reflect.Float64:
return x.Float()
case reflect.Complex64, reflect.Complex128:
y := x.Complex()
return real(y)
case reflect.Array, reflect.Chan, reflect.Map, reflect.Slice, reflect.String:
return float64(x.Len())
case reflect.Struct:
return float64(x.NumField())
}
return 0
} | reflect/arithmetic/arithmetic.go | 0.847936 | 0.549822 | arithmetic.go | starcoder |
package tmplfunc
import (
"fmt"
"reflect"
)
func indirect(v reflect.Value) reflect.Value {
for ; v.Kind() == reflect.Ptr || v.Kind() == reflect.Interface; v = v.Elem() {
}
return v
}
func numOrStr(k reflect.Kind) bool {
return isNum(k) || isStr(k)
}
func isNum(k reflect.Kind) bool {
return isInt(k) || isUint(k) || isFloat(k)
}
func isInt(k reflect.Kind) bool {
switch k {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return true
default:
return false
}
}
func isUint(k reflect.Kind) bool {
switch k {
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
return true
default:
return false
}
}
func isFloat(k reflect.Kind) bool {
switch k {
case reflect.Float32, reflect.Float64:
return true
default:
return false
}
}
func isStr(k reflect.Kind) bool {
switch k {
case reflect.String:
return true
default:
return false
}
}
func operandB(x, y interface{}, allowStr bool) (xv, yv reflect.Value) {
xv = indirect(reflect.ValueOf(x))
yv = indirect(reflect.ValueOf(y))
xk := xv.Kind()
yk := yv.Kind()
if allowStr {
if !numOrStr(xk) {
panic(fmt.Sprintf("[%v] is not number or string", x))
}
if !numOrStr(yk) {
panic(fmt.Sprintf("[%v] is not number or string", y))
}
} else {
if !isNum(xk) {
panic(fmt.Sprintf("[%v] is not number", x))
}
if !isNum(yk) {
panic(fmt.Sprintf("[%v] is not number", y))
}
}
if xk == yk {
return
}
if isStr(xk) && isStr(yk) {
return
}
if isStr(xk) {
yv = reflect.ValueOf(fmt.Sprint(yv.Interface()))
return
}
if isStr(yk) {
xv = reflect.ValueOf(fmt.Sprint(xv.Interface()))
return
}
if isFloat(xk) && isFloat(yk) {
return
}
if isFloat(xk) {
if isInt(yk) {
yv = reflect.ValueOf(float64(yv.Int()))
} else {
yv = reflect.ValueOf(float64(yv.Uint()))
}
return
}
if isFloat(yk) {
if isInt(xk) {
xv = reflect.ValueOf(float64(xv.Int()))
} else {
xv = reflect.ValueOf(float64(xv.Uint()))
}
return
}
if isUint(xk) && isUint(yk) {
return
}
if isInt(xk) && isInt(yk) {
return
}
if isUint(xk) {
xv = reflect.ValueOf(int64(xv.Uint()))
return
}
if isUint(yk) {
yv = reflect.ValueOf(int64(yv.Uint()))
return
}
panic("unreachable code")
} | code/pkg/tmplfunc/operand.go | 0.503174 | 0.407039 | operand.go | starcoder |
package fp
func (l BoolArray) DropRight(n int) BoolArray {
size := len(l)
Require(n >= 0, "index should be >= 0")
if n >= size { n = size }
to := size - n
acc := make([]bool, to)
copy(acc, l[0: to])
return acc
}
func (l StringArray) DropRight(n int) StringArray {
size := len(l)
Require(n >= 0, "index should be >= 0")
if n >= size { n = size }
to := size - n
acc := make([]string, to)
copy(acc, l[0: to])
return acc
}
func (l IntArray) DropRight(n int) IntArray {
size := len(l)
Require(n >= 0, "index should be >= 0")
if n >= size { n = size }
to := size - n
acc := make([]int, to)
copy(acc, l[0: to])
return acc
}
func (l Int64Array) DropRight(n int) Int64Array {
size := len(l)
Require(n >= 0, "index should be >= 0")
if n >= size { n = size }
to := size - n
acc := make([]int64, to)
copy(acc, l[0: to])
return acc
}
func (l ByteArray) DropRight(n int) ByteArray {
size := len(l)
Require(n >= 0, "index should be >= 0")
if n >= size { n = size }
to := size - n
acc := make([]byte, to)
copy(acc, l[0: to])
return acc
}
func (l RuneArray) DropRight(n int) RuneArray {
size := len(l)
Require(n >= 0, "index should be >= 0")
if n >= size { n = size }
to := size - n
acc := make([]rune, to)
copy(acc, l[0: to])
return acc
}
func (l Float32Array) DropRight(n int) Float32Array {
size := len(l)
Require(n >= 0, "index should be >= 0")
if n >= size { n = size }
to := size - n
acc := make([]float32, to)
copy(acc, l[0: to])
return acc
}
func (l Float64Array) DropRight(n int) Float64Array {
size := len(l)
Require(n >= 0, "index should be >= 0")
if n >= size { n = size }
to := size - n
acc := make([]float64, to)
copy(acc, l[0: to])
return acc
}
func (l AnyArray) DropRight(n int) AnyArray {
size := len(l)
Require(n >= 0, "index should be >= 0")
if n >= size { n = size }
to := size - n
acc := make([]Any, to)
copy(acc, l[0: to])
return acc
}
func (l Tuple2Array) DropRight(n int) Tuple2Array {
size := len(l)
Require(n >= 0, "index should be >= 0")
if n >= size { n = size }
to := size - n
acc := make([]Tuple2, to)
copy(acc, l[0: to])
return acc
}
func (l BoolArrayArray) DropRight(n int) BoolArrayArray {
size := len(l)
Require(n >= 0, "index should be >= 0")
if n >= size { n = size }
to := size - n
acc := make([][]bool, to)
copy(acc, l[0: to])
return acc
}
func (l StringArrayArray) DropRight(n int) StringArrayArray {
size := len(l)
Require(n >= 0, "index should be >= 0")
if n >= size { n = size }
to := size - n
acc := make([][]string, to)
copy(acc, l[0: to])
return acc
}
func (l IntArrayArray) DropRight(n int) IntArrayArray {
size := len(l)
Require(n >= 0, "index should be >= 0")
if n >= size { n = size }
to := size - n
acc := make([][]int, to)
copy(acc, l[0: to])
return acc
}
func (l Int64ArrayArray) DropRight(n int) Int64ArrayArray {
size := len(l)
Require(n >= 0, "index should be >= 0")
if n >= size { n = size }
to := size - n
acc := make([][]int64, to)
copy(acc, l[0: to])
return acc
}
func (l ByteArrayArray) DropRight(n int) ByteArrayArray {
size := len(l)
Require(n >= 0, "index should be >= 0")
if n >= size { n = size }
to := size - n
acc := make([][]byte, to)
copy(acc, l[0: to])
return acc
}
func (l RuneArrayArray) DropRight(n int) RuneArrayArray {
size := len(l)
Require(n >= 0, "index should be >= 0")
if n >= size { n = size }
to := size - n
acc := make([][]rune, to)
copy(acc, l[0: to])
return acc
}
func (l Float32ArrayArray) DropRight(n int) Float32ArrayArray {
size := len(l)
Require(n >= 0, "index should be >= 0")
if n >= size { n = size }
to := size - n
acc := make([][]float32, to)
copy(acc, l[0: to])
return acc
}
func (l Float64ArrayArray) DropRight(n int) Float64ArrayArray {
size := len(l)
Require(n >= 0, "index should be >= 0")
if n >= size { n = size }
to := size - n
acc := make([][]float64, to)
copy(acc, l[0: to])
return acc
}
func (l AnyArrayArray) DropRight(n int) AnyArrayArray {
size := len(l)
Require(n >= 0, "index should be >= 0")
if n >= size { n = size }
to := size - n
acc := make([][]Any, to)
copy(acc, l[0: to])
return acc
}
func (l Tuple2ArrayArray) DropRight(n int) Tuple2ArrayArray {
size := len(l)
Require(n >= 0, "index should be >= 0")
if n >= size { n = size }
to := size - n
acc := make([][]Tuple2, to)
copy(acc, l[0: to])
return acc
} | fp/bootstrap_array_dropright.go | 0.647798 | 0.598606 | bootstrap_array_dropright.go | starcoder |
package lucicfg
import (
"fmt"
"time"
"go.starlark.net/starlark"
"go.starlark.net/syntax"
)
var zero = starlark.MakeInt64(0)
// duration wraps an integer, making it a distinct integer-like type.
type duration struct {
starlark.Int // milliseconds
}
// Type returns 'duration', to make the type different from ints.
func (x duration) Type() string {
return "duration"
}
// String formats the duration using Go's time.Duration rules.
func (x duration) String() string {
ms, ok := x.Int64()
if !ok {
return "<invalid-duration>" // probably very-very large
}
return (time.Duration(ms) * time.Millisecond).String()
}
// CompareSameType makes durations comparable by comparing them as integers.
func (x duration) CompareSameType(op syntax.Token, y starlark.Value, depth int) (bool, error) {
return x.Int.CompareSameType(op, y.(duration).Int, depth)
}
// Binary implements binary operations between durations and ints.
func (x duration) Binary(op syntax.Token, y starlark.Value, side starlark.Side) (starlark.Value, error) {
switch y := y.(type) {
case starlark.Int:
switch {
case op == syntax.STAR:
return duration{x.Int.Mul(y)}, nil
case (op == syntax.SLASH || op == syntax.SLASHSLASH) && side == starlark.Left:
return duration{x.Int.Div(y)}, nil
}
case duration:
switch {
case op == syntax.PLUS:
return duration{x.Int.Add(y.Int)}, nil
case op == syntax.MINUS && side == starlark.Left:
return duration{x.Int.Sub(y.Int)}, nil
case op == syntax.MINUS && side == starlark.Right:
return duration{y.Int.Sub(x.Int)}, nil
case (op == syntax.SLASH || op == syntax.SLASHSLASH) && side == starlark.Left:
return x.Int.Div(y.Int), nil
case (op == syntax.SLASH || op == syntax.SLASHSLASH) && side == starlark.Right:
return y.Int.Div(x.Int), nil
}
}
// All other combinations aren't supported.
return nil, nil
}
// Unary implements +-.
func (x duration) Unary(op syntax.Token) (starlark.Value, error) {
switch op {
case syntax.PLUS:
return x, nil
case syntax.MINUS:
return duration{zero.Sub(x.Int)}, nil
}
return nil, nil
}
func init() {
// make_duration(milliseconds) returns a 'duration' value.
declNative("make_duration", func(call nativeCall) (starlark.Value, error) {
var ms starlark.Int
if err := call.unpack(0, &ms); err != nil {
return nil, err
}
return duration{ms}, nil
})
// epoch(layout, value, location) returns int epoch seconds for value parsed as a time per layout in location.
declNative("epoch", func(call nativeCall) (starlark.Value, error) {
var layout starlark.String
var value starlark.String
var location starlark.String
if err := call.unpack(3, &layout, &value, &location); err != nil {
return nil, err
}
loc, err := time.LoadLocation(location.GoString())
if err != nil {
return nil, fmt.Errorf("time.epoch: %s", err)
}
t, err := time.ParseInLocation(layout.GoString(), value.GoString(), loc)
if err != nil {
return nil, fmt.Errorf("time.epoch: %s", err)
}
return starlark.MakeInt(int(t.UnixNano() / 1000000000)), nil
})
} | lucicfg/duration.go | 0.804021 | 0.428831 | duration.go | starcoder |
package assets
import (
"errors"
"fmt"
"strings"
"sync"
)
var (
ErrNotFound = errors.New("could not find an element matching the request")
)
// FilterNodes s used to provide custom filters to when listing nodes
type FilterNodes func(node Node) bool
// FilterNodesByLabel filters all nodes with a given label
func FilterNodesByLabel(labels ...string) FilterNodes {
return func(node Node) bool {
for _, label := range labels {
if node.GetLabel() == label {
return true
}
}
return false
}
}
// FilterNodesByName filters all the nodes with the given names
func FilterNodesByName(names ...string) FilterNodes {
return func(node Node) bool {
for _, name := range names {
if node.GetName() == name {
return true
}
}
return false
}
}
// FilterRelationship is used to provide custom filters to when listing relationships
type FilterRelationship func(rel Relationship) bool
// FilterRelByLabel filters all relationships with a given label
func FilterRelByLabel(label string) FilterRelationship {
return func(rel Relationship) bool {
return rel.Label == label
}
}
// FilterRelByTo filters all relationships which point to a node with the given ID
func FilterRelByTo(toID string) FilterRelationship {
return func(rel Relationship) bool {
return rel.To == toID
}
}
// FilterRelByFrom filters all relationships which point from a node with the given ID
func FilterRelByFrom(fromID string) FilterRelationship {
return func(rel Relationship) bool {
return rel.From == fromID
}
}
// New creates a new graph instance
func NewGraph() *Graph {
return &Graph{
nodes: map[string]Node{},
relationships: map[string]Relationship{},
}
}
// Graph represents a collection of different nodes of the same type
type Graph struct {
sync.RWMutex
nodes map[string]Node
relationships map[string]Relationship
}
// InsertNode adds a new node to the graph
func (g *Graph) InsertNode(name, label string, body []byte) Node {
g.Lock()
defer g.Unlock()
node := newNode(name, label, body)
g.nodes[node.id] = node
return node
}
// GetNodeByID returns the node that has the given ID
func (g *Graph) GetNodeByID(id string) (Node, error) {
g.RLock()
defer g.RUnlock()
item, ok := g.nodes[id]
if !ok {
return Node{}, fmt.Errorf("%w; node with id '%s'", ErrNotFound, id)
}
return item, nil
}
// ListNodes returns a map of all the nodes that match all the where clauses provided.
func (g *Graph) ListNodes(where ...FilterNodes) []Node {
g.RLock()
defer g.RUnlock()
matchingNodes := make([]Node, 0, len(g.nodes))
for _, item := range g.nodes {
matches := true
for _, clause := range where {
if ok := clause(item); !ok {
matches = false
break
}
}
if matches {
matchingNodes = append(matchingNodes, item)
}
}
return matchingNodes
}
func (g *Graph) UpdateNode(nodeID string, body []byte) (Node, error) {
g.Lock()
defer g.Unlock()
node, ok := g.nodes[nodeID]
if !ok {
return Node{}, fmt.Errorf("%w; node with id '%s'", ErrNotFound, nodeID)
}
node.Body = body
g.nodes[node.id] = node
return node, nil
}
func (g *Graph) DeleteNode(nodeID string) error {
g.Lock()
defer g.Unlock()
node, ok := g.nodes[nodeID]
if !ok {
return fmt.Errorf("%w; node with id '%s'", ErrNotFound, nodeID)
}
delete(g.nodes, node.id)
return nil
}
// AddRelationship is used to establish a directional relationship between the two items in the graph
func (g *Graph) AddRelationship(from, to Node, label string) (Relationship, error) {
fromNode, err := g.GetNodeByID(from.GetID())
if err != nil {
return Relationship{}, fmt.Errorf("getNodeByID %s; %w", from.GetID(), err)
}
toNode, err := g.GetNodeByID(to.GetID())
if err != nil {
return Relationship{}, fmt.Errorf("getNodeByID %s; %w", to.GetID(), err)
}
g.Lock()
defer g.Unlock()
rel := newRelationship(fromNode, toNode, label)
g.relationships[rel.ID] = rel
return rel, nil
}
// GetRelationshipByID returns the relationship with the given ID
func (g *Graph) GetRelationshipByID(id string) (Relationship, error) {
g.RLock()
defer g.RUnlock()
item, ok := g.relationships[id]
if !ok {
return Relationship{}, fmt.Errorf("%w; relationship with id '%s'", ErrNotFound, id)
}
return item, nil
}
// ListRelationships returns a list of all relationships with match the given filters. If no filters are provided returns all relationships
func (g *Graph) ListRelationships(filters ...FilterRelationship) []Relationship {
g.RLock()
defer g.RUnlock()
matchingRelationships := make([]Relationship, 0, len(g.relationships))
for _, item := range g.relationships {
matches := true
for _, clause := range filters {
if ok := clause(item); !ok {
matches = false
break
}
}
if matches {
matchingRelationships = append(matchingRelationships, item)
}
}
return matchingRelationships
}
// ListConnections returns all connection chains between a source node to a destination node by following relationships
func (g *Graph) ListConnections(from, to Node) []*ChainLink {
return g.listConnections(from, to, map[string]struct{}{})
}
func (g *Graph) listConnections(from, to Node, visited map[string]struct{}) []*ChainLink {
chains := []*ChainLink{}
visited[from.id] = struct{}{}
for _, rel := range g.ListRelationships(FilterRelByFrom(from.GetID())) {
toCheck := copyMap(visited)
// check if the relationship has already been visited. If it has, then go to the next one
if _, ok := visited[rel.To]; ok {
continue
}
toCheck[rel.To] = struct{}{}
if rel.To == to.id {
chains = append(chains, &ChainLink{node: from, rel: rel, next: &ChainLink{node: to}})
continue
}
next, ok := g.nodes[rel.To]
if !ok {
continue
}
connections := g.listConnections(next, to, toCheck)
for _, cons := range connections {
chains = append(chains, &ChainLink{node: from, rel: rel, next: cons})
}
}
return chains
}
func copyMap(m map[string]struct{}) map[string]struct{} {
n := map[string]struct{}{}
for k, v := range m {
n[k] = v
}
return n
}
// ChainLink is used as a linked list item to show connections between different nodes
type ChainLink struct {
node Node
rel Relationship
next *ChainLink
}
func (c *ChainLink) String() string {
var sb strings.Builder
if c.node.GetID() != "" {
sb.WriteString(c.node.String())
}
if c.rel.ID != "" {
sb.WriteString(fmt.Sprintf("->%s", c.rel.String()))
}
if c.next != nil {
sb.WriteString(fmt.Sprintf("->%s", c.next.String()))
}
return sb.String()
} | graph.go | 0.713032 | 0.411347 | graph.go | starcoder |
package eoy
import (
"fmt"
"time"
)
//Year is used to provide a primary key for storing stats by year.
type Year struct {
ID int
CreatedDate *time.Time
}
//YearResult holds a year and a stats record.
type YearResult struct {
ID int
Stat
}
//YOYear is used to provide a primary key for storing stats by year.
type YOYear struct {
Year
}
//YOYearResult holds a year and a stats record.
type YOYearResult struct {
YearResult
}
//KeyValue implements KeyValuer by returning the value of a key for the
//YearResult object.
func (r YearResult) KeyValue(i int) (key interface{}) {
switch i {
case 0:
key = r.ID
default:
fmt.Printf("Error in YearResult\n%+v\n", r)
err := fmt.Errorf("Not a valid YearResult index, %v", i)
panic(err)
}
return key
}
//FillKeys implements KeyFiller by filling Excel cells with keys from the
//year table.
func (r YearResult) FillKeys(rt *Runtime, sheet Sheet, row, col int) int {
for j := 0; j < len(sheet.KeyNames); j++ {
v := r.KeyValue(j)
s := sheet.KeyStyles[j]
rt.Cell(sheet.Name, row, col+j, v, s)
}
return row
}
//Fill implements Filler by filling in a spreadsheet using data from the years table.
func (y Year) Fill(rt *Runtime, sheet Sheet, row, col int) int {
var a []YearResult
rt.DB.Table("years").Select("years.id, stats.*").Where("years.id = ?", rt.Year).Joins("left join stats on stats.id = years.id").Scan(&a)
for _, r := range a {
rt.Spreadsheet.InsertRow(sheet.Name, row+1)
r.FillKeys(rt, sheet, row, 0)
r.Stat.Fill(rt, sheet.Name, row, len(sheet.KeyNames))
row++
}
return row
}
//NewThisYearSheet builds the data used to decorate the "this year" page.
func (rt *Runtime) NewThisYearSheet() Sheet {
filler := Year{}
result := YearResult{}
name := fmt.Sprintf("%v Summary", rt.Year)
sheet := Sheet{
Titles: []string{
fmt.Sprintf("Results for %v", rt.Year),
},
Name: name,
KeyNames: []string{"Year"},
KeyStyles: []int{rt.KeyStyle},
Filler: filler,
KeyFiller: result,
}
return sheet
}
//KeyValue implements KeyValuer by returning the value of a key for the
//YOYearResult object.
func (r YOYearResult) KeyValue(i int) (key interface{}) {
switch i {
case 0:
key = r.ID
default:
fmt.Printf("Error in YOYearResult\n%+v\n", r)
err := fmt.Errorf("Not a valid YOYearResult index, %v", i)
panic(err)
}
return key
}
//FillKeys implements KeyFiller by filling Excel cells with keys from the
//year table.
func (r YOYearResult) FillKeys(rt *Runtime, sheet Sheet, row, col int) int {
for j := 0; j < len(sheet.KeyNames); j++ {
v := r.KeyValue(j)
s := sheet.KeyStyles[j]
rt.Cell(sheet.Name, row, col+j, v, s)
}
return row
}
//Fill implements Filler by filling in a spreadsheet using data from the years table.
func (y YOYear) Fill(rt *Runtime, sheet Sheet, row, col int) int {
var a []YOYearResult
rt.DB.Order("years.id desc").Table("years").Select("years.id, stats.*").Joins("left join stats on stats.id = years.id").Scan(&a)
for _, r := range a {
rt.Spreadsheet.InsertRow(sheet.Name, row+1)
r.FillKeys(rt, sheet, row, 0)
r.Stat.Fill(rt, sheet.Name, row, len(sheet.KeyNames))
row++
}
return row
}
//NewYOYearSheet builds the data used to decorate the "this year" page.
func (rt *Runtime) NewYOYearSheet() Sheet {
filler := YOYear{}
result := YOYearResult{}
sheet := Sheet{
Titles: []string{
"Year over Year results",
},
Name: "Year over year",
KeyNames: []string{"Year"},
KeyStyles: []int{rt.KeyStyle},
Filler: filler,
KeyFiller: result,
}
return sheet
} | pkg/year.go | 0.598899 | 0.443661 | year.go | starcoder |
package p564
import (
"strconv"
)
/**
Given an integer n, find the closest integer (not including itself), which is a palindrome.
The 'closest' is defined as absolute difference minimized between two integers.
Example 1:
Input: "123"
Output: "121"
Note:
The input n is a positive integer represented by string, whose length will not exceed 18.
If there is a tie, return the smaller one as answer.
*/
func reverse(n []byte) []byte {
res := make([]byte, len(n))
i, j := 0, len(n)-1
for i <= j {
res[i] = n[j]
res[j] = n[i]
i++
j--
}
return res
}
func smallerPalindromic(front, mid string) (uint64, string) {
v, _ := strconv.ParseUint(front+mid, 10, 0)
small := strconv.FormatUint(v-1, 10)
var smalls string
var smallv uint64
smallbytes := []byte(small)
if len(small) == len(front+mid) {
if mid == "" {
smalls = small + string(reverse(smallbytes))
} else {
smalls = small + string(reverse(smallbytes[:len(smallbytes)-1]))
}
if smalls == "00" {
smalls = "9"
}
} else {
if mid != "" {
smalls = small + string(reverse(smallbytes))
} else {
smalls = small + "9" + string(reverse(smallbytes))
}
}
smallv, _ = strconv.ParseUint(smalls, 10, 0)
return smallv, smalls
}
func largerPalindromic(front, mid string) (uint64, string) {
v, _ := strconv.ParseUint(front+mid, 10, 0)
large := strconv.FormatUint(v+1, 10)
var larges string
var largev uint64
largebytes := []byte(large)
if len(large) == len(front+mid) {
if mid == "" {
larges = large + string(reverse(largebytes))
} else {
larges = large + string(reverse(largebytes[:len(largebytes)-1]))
}
} else {
if mid != "" {
larges = large + string(reverse(largebytes[:len(largebytes)-2]))
} else {
larges = large + string(reverse(largebytes[:len(largebytes)-1]))
}
}
largev, _ = strconv.ParseUint(larges, 10, 0)
return largev, larges
}
func nearestPalindromic(n string) string {
bn := []byte(n)
length := len(n)
if length == 1 {
return string(n[0] - 1)
}
origin, _ := strconv.ParseUint(n, 10, 0)
odd := (length%2 == 1)
mid := length / 2
mids := ""
if odd {
mids = string(bn[mid])
}
front := string(bn[:mid])
pals := front + mids + string(reverse(bn[:mid]))
pal, _ := strconv.ParseUint(pals, 10, 0)
smallerPal, largerPal := uint64(0), uint64(0)
var smallers, largers string
if pal < origin {
smallerPal = pal
smallers = pals
largerPal, largers = largerPalindromic(front, mids)
} else if pal > origin {
largerPal = pal
largers = pals
smallerPal, smallers = smallerPalindromic(front, mids)
} else {
largerPal, largers = largerPalindromic(front, mids)
smallerPal, smallers = smallerPalindromic(front, mids)
}
if origin-smallerPal <= largerPal-origin {
return smallers
} else {
return largers
}
} | algorithms/p564/564.go | 0.807499 | 0.438545 | 564.go | starcoder |
package utils
import (
"fmt"
"math"
"sync"
"gonum.org/v1/gonum/lapack/lapack64"
"gonum.org/v1/gonum/blas/blas64"
"gonum.org/v1/gonum/mat"
)
type Matrix struct {
M *mat.Dense
readOnly bool
name string
DataP []float64
}
var (
I, Zero Matrix
)
func init() {
I = NewMatrix(1, 1, []float64{1.})
Zero = NewMatrix(1, 1, []float64{0.})
}
func NewMatrix(nr, nc int, dataO ...[]float64) (R Matrix) {
var (
m *mat.Dense
dataArea []float64
)
if len(dataO) != 0 {
if len(dataO[0]) < nr*nc {
err := fmt.Errorf("mismatch in allocation: NewMatrix nr,nc = %v,%v, len(data[0]) = %v\n", nr, nc, len(dataO[0]))
panic(err)
}
dataArea = dataO[0][0 : nr*nc]
} else {
dataArea = make([]float64, nr*nc)
}
m = mat.NewDense(nr, nc, dataArea)
R = Matrix{
M: m,
readOnly: false,
name: "unnamed - hint: pass a variable name to SetReadOnly()",
DataP: m.RawMatrix().Data,
}
return
}
func NewDiagMatrix(nr int, data []float64, scalarO ...float64) (R Matrix) {
var (
isScalar bool
)
if len(data) == 0 && len(scalarO) != 0 {
isScalar = true
}
if len(data) != nr && !isScalar {
err := fmt.Errorf("wrong length vector, is %d, should be %d", len(data), nr)
panic(err)
}
R = NewMatrix(nr, nr)
for i := 0; i < nr; i++ {
if isScalar {
R.Set(i, i, scalarO[0])
} else {
R.Set(i, i, data[i])
}
}
return
}
// Dims, At and T minimally satisfy the mat.Matrix interface.
func (m Matrix) Dims() (r, c int) { return m.M.Dims() }
func (m Matrix) At(i, j int) float64 { return m.M.At(i, j) }
func (m Matrix) T() mat.Matrix { return m.T() }
func (m Matrix) RawMatrix() blas64.General { return m.M.RawMatrix() }
func (m Matrix) Data() []float64 {
return m.RawMatrix().Data
}
// Utility
func (m *Matrix) IsEmpty() bool {
if m.M == nil {
return true
}
return false
}
// Chainable methods (extended)
func (m *Matrix) SetReadOnly(name ...string) Matrix {
if len(name) != 0 {
m.name = name[0]
}
m.readOnly = true
return *m
}
func (m *Matrix) SetWritable() Matrix {
m.readOnly = false
return *m
}
func (m Matrix) Slice(I, K, J, L int) (R Matrix) { // Does not change receiver
var (
nrR = K - I
ncR = L - J
dataR = make([]float64, nrR*ncR)
_, nc = m.Dims()
data = m.RawMatrix().Data
)
for j := J; j < L; j++ {
for i := I; i < K; i++ {
ind := i*nc + j
iR := i - I
jR := j - J
indR := iR*ncR + jR
dataR[indR] = data[ind]
}
}
R = NewMatrix(nrR, ncR, dataR)
return
}
func (m Matrix) Copy(RO ...Matrix) (R Matrix) { // Does not change receiver
var (
nr, nc = m.Dims()
)
R = getResultMatrix(nr, nc, RO)
copy(R.DataP, m.DataP)
return
}
func (m Matrix) Print(msgI ...string) (o string) {
var (
name = ""
)
if len(msgI) != 0 {
name = msgI[0]
}
formatString := "%s = \n%8.5f\n"
o = fmt.Sprintf(formatString, name, mat.Formatted(m.M, mat.Squeeze()))
fmt.Printf(o)
return
}
func (m Matrix) Transpose() (R Matrix) { // Does not change receiver
var (
nr, nc = m.Dims()
data = m.RawMatrix().Data
)
R = NewMatrix(nc, nr)
dataR := R.M.RawMatrix().Data
for j := 0; j < nc; j++ {
for i := 0; i < nr; i++ {
ind := i*nc + j
indR := i + nr*j
dataR[indR] = data[ind]
}
}
return
}
func getResultMatrix(nr, nc int, RO []Matrix) (R Matrix) { // helper function - parses incoming optional destination argument
if len(RO) != 0 {
// Optional matrix for result is present
R = RO[0]
nrR, ncR := R.Dims()
if nrR != nr || ncR != nc {
panic("incorrect dimensions for provided result matrix")
}
} else {
R = NewMatrix(nr, nc)
}
return
}
func (m Matrix) Mul(A Matrix, RO ...Matrix) (R Matrix) { // Does not change receiver
var (
nrM, ncM = m.Dims()
nrA, ncA = A.M.Dims()
nrR, ncR int
)
switch {
case m.IsScalar():
nrR, ncR = nrA, ncA
case A.IsScalar():
nrR, ncR = nrM, ncM
default:
nrR, ncR = nrM, ncA
}
R = getResultMatrix(nrR, ncR, RO)
switch {
case m.IsScalar():
A.Copy(R)
R.Scale(m.DataP[0])
case A.IsScalar():
m.Copy(R)
R.Scale(A.DataP[0])
default:
R.M.Mul(m.M, A.M)
}
return R
}
func (m Matrix) MulParallel(A Matrix, nP int) (R Matrix) { // Does not change receiver
var (
nrM, _ = m.Dims()
nrA, ncA = A.M.Dims()
wg = sync.WaitGroup{}
aD = A.DataP
)
MulExt := func(m, A Matrix, dataO ...[]float64) (R Matrix) {
var (
nrM, _ = m.Dims()
_, ncA = A.M.Dims()
)
if len(dataO) != 0 {
R = NewMatrix(nrM, ncA, dataO[0])
} else {
R = NewMatrix(nrM, ncA)
}
R.M.Mul(m.M, A.M)
return R
}
if nP > ncA {
nP = ncA
}
R = NewMatrix(nrM, ncA)
rD := R.DataP
ncAChunk := Split1DMaxChunk(ncA, nP)
subAChunkSize := nrA * ncAChunk
subRChunkSize := nrM * ncAChunk
subAstorage := make([]float64, nP*subAChunkSize)
subRstorage := make([]float64, nP*subRChunkSize)
for n := 0; n < nP; n++ {
ind, end := Split1D(ncA, nP, n)
ncSubA := end - ind
wg.Add(1)
go func(ind, end, ncSubA, n int) {
subA := NewMatrix(nrA, ncSubA, subAstorage[n*subAChunkSize:])
sAD := subA.DataP
for j := 0; j < nrA; j++ {
var ii int
for i := ind; i < end; i++ {
sAD[ii+ncSubA*j] = aD[i+ncA*j]
ii++
}
}
subR := MulExt(m, subA, subRstorage[n*subRChunkSize:])
sRD := subR.DataP
for j := 0; j < nrM; j++ {
var ii int
for i := ind; i < end; i++ {
rD[i+ncA*j] = sRD[ii+ncSubA*j]
ii++
}
}
wg.Done()
}(ind, end, ncSubA, n)
}
wg.Wait()
return R
}
func getDimensions(m, A Matrix) (nr, nc int) {
var (
nrA, ncA = A.Dims()
)
nr, nc = m.Dims()
switch {
case m.IsScalar():
nr, nc = A.Dims() // upscale m (scalar) to A
case A.IsScalar():
nrA, ncA = m.Dims() // upscale A (scalar) to m
}
if nrA != nr || ncA != nc {
panic("dimensions of matrices do not match")
}
return
}
func (m *Matrix) upscale(nr, nc int) {
if m.IsScalar() {
val := m.DataP[0]
RR := NewMatrix(nr, nc)
for i := 0; i < nr; i++ {
RR.Set(i, i, val)
}
m.M, m.DataP = RR.M, RR.DataP
}
}
func add(m, A Matrix, subtract bool, RO []Matrix) (R Matrix) { // Changes receiver, optionally does not
var (
nr, nc = getDimensions(m, A)
mult = 1.
)
if subtract {
mult = -1.
}
if len(RO) != 0 {
R = getResultMatrix(nr, nc, RO)
if m.IsScalar() {
val := m.DataP[0]
for i := 0; i < nr; i++ {
R.Set(i, i, val)
}
} else {
for i, val := range m.DataP {
R.DataP[i] = val
}
}
} else {
m.checkWritable()
m.upscale(nr, nc) // Must upscale m to match A
R = m
}
switch {
case A.IsScalar():
AVal := mult * A.DataP[0]
for i := 0; i < nr; i++ {
R.Set(i, i, R.At(i, i)+AVal)
}
default:
for i := range A.DataP {
R.DataP[i] += mult * A.DataP[i]
}
}
return
}
func (m Matrix) Add(A Matrix, RO ...Matrix) Matrix { // Changes receiver optionally
return add(m, A, false, RO)
}
func (m Matrix) Subtract(A Matrix, RO ...Matrix) Matrix { // Changes receiver optionally
return add(m, A, true, RO)
}
func (m Matrix) AddScalar(a float64) Matrix { // Changes receiver
var (
data = m.RawMatrix().Data
)
m.checkWritable()
for i := range data {
data[i] += a
}
return m
}
func (m Matrix) Subset(I Index, newDims ...int) (R Matrix) { // Does not change receiver
/*
Index should contain a list of indices into MI
Note: native mat library matrix storage is in column traversal first (row-major) order
*/
var (
Mr = m.RawMatrix()
nr, nc = m.Dims()
data []float64
nrNew, ncNew = nr, nc
)
if len(newDims) != 0 {
nrNew, ncNew = newDims[0], newDims[1]
}
data = make([]float64, nrNew*ncNew)
for i, ind := range I {
data[i] = Mr.Data[ind]
}
return NewMatrix(nrNew, ncNew, data)
}
func (m Matrix) SliceRows(I Index) (R Matrix) { // Does not change receiver
// I should contain a list of row indices into M
var (
nr, nc = m.Dims()
nI = len(I)
maxIndex = nr - 1
)
R = NewMatrix(nI, nc)
for iNewRow, i := range I {
if i > maxIndex || i < 0 {
fmt.Printf("index out of bounds: index = %d, max_bounds = %d\n", i, maxIndex)
panic("unable to subset rows from matrix")
}
R.M.SetRow(iNewRow, m.M.RawRowView(i))
}
return
}
func (m Matrix) SliceCols(I Index) (R Matrix) { // Does not change receiver
// I should contain a list of column indices into M
var (
nr, nc = m.Dims()
maxIndex = nc - 1
nI = len(I)
dataM = m.RawMatrix().Data
colData = make([]float64, nr)
)
R = NewMatrix(nr, nI)
for jNewCol, j := range I {
if j > maxIndex || j < 0 {
fmt.Printf("index out of bounds: index = %d, max_bounds = %d\n", j, maxIndex)
panic("unable to subset columns from matrix")
}
var ind int
for i := 0; i < nr; i++ {
ind = i*nc + j
colData[i] = dataM[ind]
}
R.M.SetCol(jNewCol, colData)
}
return
}
func (m Matrix) Set(i, j int, val float64) Matrix { // Changes receiver
var (
nr, nc = m.Dims()
)
i, j = lim(i, nr), lim(j, nc)
m.checkWritable()
m.M.Set(i, j, val)
return m
}
func (m Matrix) SetRange(i1, i2, j1, j2 int, val float64) Matrix { // Changes receiver
var (
nr, nc = m.Dims()
data = m.RawMatrix().Data
)
m.checkWritable()
i1, i2, j1, j2 = limRange(i1, i2, j1, j2, nr, nc)
for i := i1; i < i2; i++ {
for j := j1; j < j2; j++ {
ind := i*nc + j
data[ind] = val
}
}
return m
}
func (m Matrix) SetCol(j int, data []float64) Matrix { // Changes receiver
var (
_, nc = m.Dims()
)
j = lim(j, nc)
m.checkWritable()
m.M.SetCol(j, data)
return m
}
func (m Matrix) SetColFrom(col, rowFrom int, data []float64) Matrix { // Changes receiver
var (
nr, nc = m.Dims()
)
col = lim(col, nc)
m.checkWritable()
if len(data)+rowFrom > nr {
panic(
fmt.Errorf("row length exceeded, max is %d, have %d", nr, rowFrom+len(data)))
}
for i, val := range data {
m.M.Set(i+rowFrom, col, val)
}
return m
}
func (m Matrix) AssignColumns(I Index, A Matrix) Matrix { // Changes receiver
var (
_, nc = m.Dims()
)
// Assigns columns in M to columns indexed sequentially from Ainv
m.checkWritable()
for i, j := range I {
if j > nc-1 {
err := fmt.Errorf("bad index value: %v exceeds bounds of %v", j, nc)
panic(err)
}
m.SetCol(j, A.Col(i).RawVector().Data)
}
return m
}
func (m Matrix) Assign(I Index, AI interface{}) Matrix {
if err := m.IndexedAssign(I, AI); err != nil {
panic(err)
}
return m
}
func (m Matrix) Range(RangeO ...interface{}) (r []float64) {
var (
nr, nc = m.Dims()
I Index
data = m.Data()
)
I = expandRangeO(nr, nc, RangeO)
r = make([]float64, len(I))
for i, ind := range I {
r[i] = data[ind]
}
return
}
func (m Matrix) Equate(ValuesI interface{}, RangeO ...interface{}) {
var (
nr, nc = m.Dims()
I Index
Values []float64
nVal int
)
I = expandRangeO(nr, nc, RangeO)
nVal = len(I)
Values = expandValues(nVal, ValuesI)
m.Assign(I, Values)
}
func expandValues(nVal int, ValuesI interface{}) (vals []float64) {
switch values := ValuesI.(type) {
case []float64:
vals = values
if len(vals) != nVal {
goto FAIL
}
case []int:
vals = make([]float64, nVal)
for i := range vals {
vals[i] = float64(values[i])
}
case Index:
vals = make([]float64, nVal)
for i := range vals {
vals[i] = float64(values[i])
}
case int:
vals = make([]float64, nVal)
for i := range vals {
vals[i] = float64(values)
}
case float64:
vals = make([]float64, nVal)
for i := range vals {
vals[i] = values
}
case Matrix:
vals = values.Data()
if len(vals) != nVal {
goto FAIL
}
case Vector:
vals = values.Data()
if len(vals) != nVal {
goto FAIL
}
}
return
FAIL:
panic(fmt.Errorf("number of values not equal to index"))
}
func expandRangeO(nr, nc int, RangeO []interface{}) (I Index) {
var (
err error
I2D Index2D
)
switch len(RangeO) {
case 1:
I = expandRangeI(nr, RangeO[0])
case 2:
I1 := expandRangeI(nr, RangeO[0])
I2 := expandRangeI(nc, RangeO[1])
if I2D, err = NewIndex2D(nr, nc, I1, I2, true); err != nil {
panic(err)
}
I = I2D.ToIndex()
default:
panic(fmt.Errorf("only 1D and 2D ranges supported"))
}
return
}
func expandRangeI(max int, RangeI interface{}) (I Index) {
switch val := RangeI.(type) {
case []int:
I = val
case Index:
I = val
case []float64:
I = make(Index, len(val))
for i, val := range val {
I[i] = int(val)
}
case int:
I = make(Index, 1)
I[0] = val
case string:
r1 := NewR1(max)
I = r1.Range(val)
case Vector:
I = expandRangeI(max, val.Data())
case Matrix:
I = expandRangeI(max, val.Data())
}
for _, val := range I {
if val > max {
panic(fmt.Errorf("max value %d exceeded by index value %d", max, val))
}
}
return
}
//func (m Matrix) AssignVector(I Index, Ainv Vector) Matrix { // Changes receiver
func (m Matrix) AssignVector(I Index, AI interface{}) Matrix { // Changes receiver
// Assigns values indexed into M using values sequentially from Vector Ainv
var (
dataM = m.RawMatrix().Data
)
m.checkWritable()
switch A := AI.(type) {
case Vector:
dataA := A.RawVector().Data
for i, ind := range I {
dataM[ind] = dataA[i]
}
case Matrix:
dataA := A.RawMatrix().Data
for i, ind := range I {
dataM[ind] = dataA[i]
}
case Index:
for i, ind := range I {
dataM[ind] = float64(A[i])
}
}
return m
}
func (m Matrix) Scale(a float64) Matrix { // Changes receiver
var (
data = m.RawMatrix().Data
)
m.checkWritable()
for i := range data {
data[i] *= a
}
return m
}
func (m Matrix) Apply(f func(float64) float64) Matrix { // Changes receiver
var (
data = m.RawMatrix().Data
)
m.checkWritable()
for i, val := range data {
data[i] = f(val)
}
return m
}
func (m Matrix) Apply2(A Matrix, f func(float64, float64) float64) Matrix { // Changes receiver
var (
dataM = m.RawMatrix().Data
dataA = A.RawMatrix().Data
)
m.checkWritable()
for i, val := range dataM {
dataM[i] = f(val, dataA[i])
}
return m
}
func (m Matrix) Apply3(A, B Matrix, f func(float64, float64, float64) float64) Matrix { // Changes receiver
var (
dataM = m.RawMatrix().Data
dA, dB = A.RawMatrix().Data, B.RawMatrix().Data
)
m.checkWritable()
for i, val := range dataM {
dataM[i] = f(val, dA[i], dB[i])
}
return m
}
func (m Matrix) Apply3Parallel(A, B Matrix, f func(float64, float64, float64) float64, nP int) Matrix { // Changes receiver
var (
dataM = m.RawMatrix().Data
dA, dB = A.RawMatrix().Data, B.RawMatrix().Data
wg = sync.WaitGroup{}
l = len(dataM)
)
m.checkWritable()
for n := 0; n < nP; n++ {
ind, end := Split1D(l, nP, n)
wg.Add(1)
go func(ind, end int) {
for i := ind; i < end; i++ {
val := dataM[i]
dataM[i] = f(val, dA[i], dB[i])
}
wg.Done()
}(ind, end)
}
wg.Wait()
return m
}
func (m Matrix) Apply4(A, B, C Matrix, f func(float64, float64, float64, float64) float64) Matrix { // Changes receiver
var (
dataM = m.RawMatrix().Data
dA, dB, dC = A.RawMatrix().Data, B.RawMatrix().Data, C.RawMatrix().Data
)
m.checkWritable()
for i, val := range dataM {
dataM[i] = f(val, dA[i], dB[i], dC[i])
}
return m
}
func (m Matrix) Apply4Parallel(A, B, C Matrix, f func(float64, float64, float64, float64) float64, nP int) Matrix { // Changes receiver
var (
dataM = m.RawMatrix().Data
dA, dB, dC = A.RawMatrix().Data, B.RawMatrix().Data, C.RawMatrix().Data
wg = sync.WaitGroup{}
l = len(dataM)
)
m.checkWritable()
for n := 0; n < nP; n++ {
ind, end := Split1D(l, nP, n)
wg.Add(1)
go func(ind, end int) {
for i := ind; i < end; i++ {
val := dataM[i]
dataM[i] = f(val, dA[i], dB[i], dC[i])
}
wg.Done()
}(ind, end)
}
wg.Wait()
return m
}
func (m Matrix) Apply5Parallel(A, B, C, D Matrix, f func(float64, float64, float64, float64, float64) float64, nP int) Matrix { // Changes receiver
var (
dataM = m.RawMatrix().Data
dA, dB, dC, dD = A.RawMatrix().Data, B.RawMatrix().Data, C.RawMatrix().Data, D.RawMatrix().Data
wg = sync.WaitGroup{}
l = len(dataM)
)
m.checkWritable()
for n := 0; n < nP; n++ {
ind, end := Split1D(l, nP, n)
wg.Add(1)
go func(ind, end int) {
for i := ind; i < end; i++ {
val := dataM[i]
dataM[i] = f(val, dA[i], dB[i], dC[i], dD[i])
}
wg.Done()
}(ind, end)
}
wg.Wait()
return m
}
func (m Matrix) Apply8(A, B, C, D, E, F, G Matrix, f func(float64, float64, float64, float64, float64, float64, float64, float64) float64) Matrix { // Changes receiver
var (
dataM = m.RawMatrix().Data
dA, dB, dC, dD, dE, dF, dG = A.RawMatrix().Data, B.RawMatrix().Data, C.RawMatrix().Data, D.RawMatrix().Data, E.RawMatrix().Data, F.RawMatrix().Data, G.RawMatrix().Data
)
m.checkWritable()
for i, val := range dataM {
dataM[i] = f(val, dA[i], dB[i], dC[i], dD[i], dE[i], dF[i], dG[i])
}
return m
}
func (m Matrix) POW(p int) Matrix { // Changes receiver
var (
data = m.RawMatrix().Data
)
m.checkWritable()
for i, val := range data {
data[i] = POW(val, p)
}
return m
}
func (m Matrix) ElMul(A Matrix, RO ...Matrix) (R Matrix) { // Optionally changes receiver
var (
dataA = A.DataP
nr, nc = m.Dims()
nrA, ncA = A.Dims()
)
if nr != nrA || nc != ncA {
err := fmt.Errorf("dimensions mismatch, have [%d,%d] should equal [%d,%d]",
nr, nc, nrA, ncA)
panic(err)
}
if len(RO) == 0 {
m.checkWritable()
R = m
} else {
R = getResultMatrix(nr, nc, RO)
}
dataR := R.DataP
for i, val := range dataA {
dataR[i] *= val
}
return
}
func (m Matrix) ElDiv(A Matrix) Matrix { // Changes receiver
var (
dataM = m.RawMatrix().Data
dataA = A.RawMatrix().Data
)
m.checkWritable()
for i, val := range dataA {
dataM[i] /= val
}
return m
}
func (m Matrix) AssignScalar(I Index, val float64) Matrix { // Changes receiver
var (
dataM = m.RawMatrix().Data
)
m.checkWritable()
for _, ind := range I {
dataM[ind] = val
}
return m
}
func (m Matrix) LUSolve(b Matrix) (X Matrix) {
var (
err error
)
lu := &mat.LU{}
lu.Factorize(m)
X = NewMatrix(b.Dims())
if err = lu.SolveTo(X.M, false, b.M); err != nil {
panic(err)
}
return
}
// Non chainable methods
func (m Matrix) IsScalar() bool {
var (
nr, nc = m.Dims()
)
return nr*nc == 1
}
func (m Matrix) IndexedAssign(I Index, ValI interface{}) (err error) { // Changes receiver
return IndexedAssign(m, I, ValI)
}
func (m Matrix) Inverse2(iPiv []int, RO ...Matrix) (R Matrix, err error) {
var (
nr, nc = m.Dims()
errSingular = fmt.Errorf("unable to invert, matrix is singular")
WORK Matrix
)
R = RO[0]
WORK = RO[1]
nrR, ncR := R.Dims()
if nrR != nr || ncR != nc {
err := fmt.Errorf("incorrect dimensions for provided result matrix, should be [%d,%d] is [%d,%d]",
nr, nc, nrR, ncR)
panic(err)
}
copy(R.DataP, m.DataP)
if ok := lapack64.Getrf(R.RawMatrix(), iPiv); !ok {
err = errSingular
return
}
//work := make([]float64, nr*nc)
work := WORK.DataP
if ok := lapack64.Getri(R.RawMatrix(), iPiv, work, nr*nc); !ok {
err = errSingular
}
return
}
func (m Matrix) Inverse(RO ...Matrix) (R Matrix, err error) {
var (
nr, nc = m.Dims()
errSingular = fmt.Errorf("unable to invert, matrix is singular")
WORK Matrix
iPiv []int // of size nr
)
if len(RO) != 0 {
// Optional matrix for result is present
R = RO[0]
WORK = RO[1]
nrR, ncR := R.Dims()
if nrR != nr || ncR != nc {
err := fmt.Errorf("incorrect dimensions for provided result matrix, should be [%d,%d] is [%d,%d]",
nr, nc, nrR, ncR)
panic(err)
}
copy(R.DataP, m.DataP)
} else {
R = m.Copy()
WORK = NewMatrix(nr, nc)
}
if m.IsScalar() {
if m.DataP[0] == 0. {
err = errSingular
} else {
R.DataP[0] = 1. / m.DataP[0]
}
return
}
iPiv = make([]int, nr)
if ok := lapack64.Getrf(R.RawMatrix(), iPiv); !ok {
err = errSingular
return
}
//work := make([]float64, nr*nc)
work := WORK.DataP
if ok := lapack64.Getri(R.RawMatrix(), iPiv, work, nr*nc); !ok {
err = errSingular
}
return
}
func (m Matrix) Col(j int) Vector {
var (
data = m.RawMatrix().Data
nr, nc = m.Dims()
vData = make([]float64, nr)
)
j = lim(j, nc)
for i := range vData {
vData[i] = data[i*nc+j]
}
return NewVector(nr, vData)
}
func (m Matrix) Row(i int) Vector {
var (
data = m.RawMatrix().Data
nr, nc = m.Dims()
vData = make([]float64, nc)
)
if i < 0 || i >= nr {
panic(fmt.Errorf("row index is %d, either <0 or >%d", i, nr))
}
i = lim(i, nr)
for j := range vData {
vData[j] = data[j+i*nc]
}
return NewVector(nc, vData)
}
func (m Matrix) Min() (min float64) {
var (
data = m.DataP
)
min = data[0]
for _, val := range data {
if val < min {
min = val
}
}
return
}
func (m Matrix) Max() (max float64) {
var (
data = m.DataP
)
max = data[0]
for _, val := range data {
if val > max {
max = val
}
}
return
}
func (m Matrix) Avg() (avg float64) {
var (
data = m.DataP
nr, nc = m.Dims()
ntot = nr * nc
)
for _, val := range data {
avg += val
}
avg /= float64(ntot)
return
}
func (m Matrix) Find(op EvalOp, val float64, abs bool) (I Index) {
var (
nr, nc = m.Dims()
data = m.RawMatrix().Data
)
var target float64
switch op {
case Equal:
for j := 0; j < nc; j++ {
for i := 0; i < nr; i++ {
ind := i*nc + j
target = data[ind]
if abs {
target = math.Abs(target)
}
if target == val {
I = append(I, ind)
}
}
}
case Less:
for j := 0; j < nc; j++ {
for i := 0; i < nr; i++ {
ind := i*nc + j
target = data[ind]
if abs {
target = math.Abs(target)
}
if target < val {
I = append(I, ind)
}
}
}
case LessOrEqual:
for j := 0; j < nc; j++ {
for i := 0; i < nr; i++ {
ind := i*nc + j
target = data[ind]
if abs {
target = math.Abs(target)
}
if target <= val {
I = append(I, ind)
}
}
}
case Greater:
for j := 0; j < nc; j++ {
for i := 0; i < nr; i++ {
ind := i*nc + j
target = data[ind]
if abs {
target = math.Abs(target)
}
if target > val {
I = append(I, ind)
}
}
}
case GreaterOrEqual:
for j := 0; j < nc; j++ {
for i := 0; i < nr; i++ {
ind := i*nc + j
target = data[ind]
if abs {
target = math.Abs(target)
}
if target >= val {
I = append(I, ind)
}
}
}
}
return
}
func (m Matrix) SubsetVector(I Index) (V Vector) {
var (
Mr = m.RawMatrix()
//nr, nc = m.Dims()
data = make([]float64, len(I))
)
for i, ind := range I {
data[i] = Mr.Data[ind]
}
V = NewVector(len(I), data)
return
}
func (m Matrix) SumRows() (V Vector) {
/*
Calculates the sum of each row to form the output vector, one result per row
*/
var (
nr, nc = m.Dims()
dataM = m.RawMatrix().Data
dataV = make([]float64, nr)
)
for i := 0; i < nr; i++ {
for j := 0; j < nc; j++ {
dataV[i] += dataM[i*nc+j]
}
}
V = NewVector(nr, dataV)
return
}
func (m Matrix) SumCols() (V Vector) {
/*
Calculates the sum of each column to form the output vector, one result per column
*/
var (
nr, nc = m.Dims()
dataM = m.RawMatrix().Data
dataV = make([]float64, nc)
)
for i := 0; i < nr; i++ {
for j := 0; j < nc; j++ {
dataV[j] += dataM[i*nc+j]
}
}
V = NewVector(nc, dataV)
return
}
func (m Matrix) checkWritable() {
if m.readOnly {
err := fmt.Errorf("attempt to write to a read only matrix named: \"%v\"", m.name)
panic(err)
}
}
func RowMajorToColMajor(nr, nc, ind int) (cind int) {
// ind = i + nr * j
// ind / nr = 0 + j
j := ind / nr
i := ind - nr*j
cind = j + nc*i
return
}
func lim(i, imax int) int {
if i < 0 {
return imax + i // Support indexing from end, -1 is imax
}
return i
}
func limLoop(ib, ie, imax int) (ibeg, iend int) {
if ib < 0 {
ibeg = imax + ib
} else {
ibeg = ib
}
if ie < 0 {
iend = imax + ie + 1 // Support indexing from end, -1 is imax
} else {
iend = ie + 1
}
return
}
func limRange(i1, i2, j1, j2, nr, nc int) (ii1, ii2, jj1, jj2 int) {
ii1, ii2 = limLoop(i1, i2, nr)
jj1, jj2 = limLoop(j1, j2, nc)
return ii1, ii2, jj1, jj2
}
func IndexedAssign(mI interface{}, I Index, ValI interface{}) (err error) { // Changes receiver
var (
temp []float64
)
switch Val := ValI.(type) {
case []float64:
temp = Val
case Matrix:
temp = Val.DataP
case Index:
temp = make([]float64, len(I))
for i, val := range Val {
temp[i] = float64(val)
}
}
if len(I) != len(temp) {
err = fmt.Errorf("length of index and values are not equal: len(I) = %v, len(Val) = %v\n", len(I), len(temp))
return
}
switch m := mI.(type) {
case Matrix:
var data = m.RawMatrix().Data
for i, val := range temp {
data[I[i]] = val
}
case DOK:
//_, nc := m.Dims()
nr, _ := m.Dims()
for ii, val := range temp {
// DOK is stored column major, while the composed Index for the range is row-major, so we convert it
i, j := indexToIJColMajor(I[ii], nr)
m.M.Set(i, j, val)
}
case CSR:
nr, _ := m.Dims()
for ii, val := range temp {
// CSR is stored column major, while the composed Index for the range is row-major, so we convert it
i, j := indexToIJColMajor(I[ii], nr)
m.M.Set(i, j, val)
}
}
return
}
func (m *Matrix) InverseWithCheck() (R Matrix) {
var (
err error
nr, _ = m.Dims()
msg = "possible weakly stable wrong solution ie: even spaced point distro, "
)
if R, err = m.Inverse(); err != nil {
panic(err)
}
// Perform check of the inverse to catch (previously seen) errors in the inversion
InvCheck := m.Mul(R).SumCols()
var sum float64
for _, val := range InvCheck.DataP {
sum += val
}
if math.Abs(sum-float64(nr)) > 0.000001 {
err = fmt.Errorf("Inversion of Vandermonde matrix failed with sum [%5.3f], expected [%5.3f]",
sum, float64(nr))
panic(msg + err.Error())
}
return
} | utils/matrix_extended.go | 0.621311 | 0.40645 | matrix_extended.go | starcoder |
package clpwrapper
import (
"github.com/james-bowman/sparse"
"github.com/lanl/clp"
"gonum.org/v1/gonum/floats/scalar"
"gonum.org/v1/gonum/mat"
)
// GoNumMatrixToCLPPackedMatrix converts a likely-sparse mat.Matrix into a CoinPackedMatrix
func GoNumMatrixToCLPPackedMatrix(matrix mat.Matrix) *clp.PackedMatrix {
return GoNumMatrixToCLPPackedMatrixAtTolerance(matrix, 0.0)
}
// GoNumMatrixToCLPPackedMatrixAtTolerance converts a likely-sparse mat.Matrix into a CoinPackedMatrix
// entries with absolute value less than tolerance are dropped
func GoNumMatrixToCLPPackedMatrixAtTolerance(matrix mat.Matrix, tolerance float64) *clp.PackedMatrix {
nRows, nCols := matrix.Dims()
packedMat := clp.NewPackedMatrix()
packedMat.Reserve(nCols, nRows*nCols, false)
for c := 0; c < nCols; c++ {
col := make([]clp.Nonzero, 0)
for r := 0; r < nRows; r++ {
thisVal := matrix.At(r, c)
if !scalar.EqualWithinAbs(thisVal, 0.0, tolerance) {
col = append(col, clp.Nonzero{Index: r, Value: thisVal})
}
}
packedMat.AppendColumn(col)
}
packedMat.SetDimensions(nRows, nCols)
return packedMat
}
// CSCToCLPPackedMatrix converts a sparse.CSC into a CoinPackedMatrix
func CSCToCLPPackedMatrix(matrix *sparse.CSC) *clp.PackedMatrix {
nRows, nCols := matrix.Dims()
totalNNZ := matrix.NNZ()
packedMat := clp.NewPackedMatrix()
packedMat.Reserve(nCols, totalNNZ, false)
for c := 0; c < nCols; c++ {
col := make([]clp.Nonzero, 0)
matrix.DoColNonZero(c, func(i, j int, v float64) {
ele := clp.Nonzero{Index: i, Value: v}
col = append(col, ele)
})
packedMat.AppendColumn(col)
}
packedMat.SetDimensions(nRows, nCols)
return packedMat
}
// COOToCLPPackedMatrix converts a sparse.COO into a CoinPackedMatrix
func COOToCLPPackedMatrix(matrix *sparse.COO) *clp.PackedMatrix {
csc := matrix.ToCSC()
return CSCToCLPPackedMatrix(csc)
}
// LoadSparseProblem loads from sparse constraint matrices
func LoadSparseProblem(simp *clp.Simplex, C []float64,
varBounds, ubBounds []clp.Bounds, eqBoundsVec []float64,
AEQ, AUB *sparse.COO) {
nRowsEQ, nColsEQ := AEQ.Dims()
nRowsUB, _ := AUB.Dims()
AEQcsc := AEQ.ToCSC()
AUBcsc := AUB.ToCSC()
// merge together the A_eq and A_ub matrices
mergedCOO := sparse.NewCOO(nRowsEQ+nRowsUB, nColsEQ, nil, nil, nil)
for c := 0; c < len(C); c++ {
AEQcsc.DoColNonZero(c, func(i, j int, v float64) {
mergedCOO.Set(i, j, v)
})
AUBcsc.DoColNonZero(c, func(i, j int, v float64) {
mergedCOO.Set(i+nRowsEQ, j, v)
})
}
mergedCSC := mergedCOO.ToCSC()
// merge the bounds
eqBounds := BuildEqualityBounds(eqBoundsVec)
fullBounds := append(eqBounds, ubBounds...)
// Load the problem into the model.
cpm := CSCToCLPPackedMatrix(mergedCSC)
simp.LoadProblem(cpm, varBounds, C, fullBounds, nil)
}
// eof | wrappers.go | 0.832169 | 0.616878 | wrappers.go | starcoder |
package areas
/*
BuildCache - queries all Residential Property listings, grabs their (locations) community, municipality and area
columns and puts all unique values in a slice called Slice.
OneLetterMap - every unique letter (made lower case) from all locations are added to this map as keys,
the value of each key is a slice filled with pointers to all values from Slice that contain the unique
letter in question.
i.e. index "a" contains a slice with oshawa, ancaster, bowmanville and every other location containing an "a"
TwoLetterMap - every unique pair of letters (made lower case) within a location string is add to this map
as a key. Each key maps to a slice of pointers to locations with Slice that container the letter pairing.
i.e. index "ca" contains a slice with newcastle, ancaster, etc.
-----------------------------------
There are approximately 1400 unique locations in the database, when a user searches a specific string,
if the string is one letter, it searches the OneLetterMap, this reduces the amount of comparisons by a
decent amount if the letter is less popular, like g or p.
If two letters or more are input then the system uses the TwoLetterMap for lookup, this significantly reduces
the amount of comparisons, in cases such as aj for ajax, comparisons go from 1400 to < 10.
*/
import (
"context"
"strings"
"github.com/MarcusOuelletus/rets-server/database"
"github.com/MarcusOuelletus/rets-server/templates"
"github.com/golang/glog"
"go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/bson/primitive"
"go.mongodb.org/mongo-driver/mongo"
"go.mongodb.org/mongo-driver/mongo/options"
)
type cacheData struct {
Slice []string
OneLetterMap map[string][]*string
TwoLetterMap map[string][]*string
}
var cacheObject = &cacheData{
Slice: nil,
OneLetterMap: make(map[string][]*string),
TwoLetterMap: make(map[string][]*string),
}
type cache struct {
db *mongo.Database
}
type row struct {
Area string `bson:"Area"`
Municipality string `bson:"Municipality"`
Community string `bson:"Community"`
}
// BuildCache - sets Slice of all unique locations as well as the one and two letter maps.
// this allows areas.QueryCache to perform much more efficiently than querying the database
// on every key stroke with the locations search box.
func BuildCache() error {
var err error
var c = new(cache)
c.db, err = database.OpenDatabase()
if err != nil {
return err
}
defer c.db.Client().Disconnect(context.TODO())
locationMap, err := c.createLocationMap()
if err != nil {
return err
}
c.populateCacheSlice(locationMap)
return nil
}
func (c *cache) populateCacheSlice(m map[string]bool) {
if cacheObject.Slice == nil {
cacheObject.Slice = make([]string, len(m))
}
var index = -1
for location := range m {
index++
// Add to cache slice
cacheObject.Slice[index] = location
delete(m, location)
c.addLettersToCacheSingle(index)
c.addLettersToCacheDouble(index)
}
}
func (c *cache) addLettersToCacheSingle(indexOfString int) {
lettersMap := make(map[rune]bool)
for _, theRune := range cacheObject.Slice[indexOfString] {
if _, ok := lettersMap[theRune]; ok {
continue
}
lettersMap[theRune] = true
letter := string(theRune)
letter = strings.ToLower(letter)
cacheObject.OneLetterMap[letter] = append(cacheObject.OneLetterMap[letter], &cacheObject.Slice[indexOfString])
}
}
func (c *cache) addLettersToCacheDouble(indexOfString int) {
pairMap := make(map[string]bool)
var pair string
location := cacheObject.Slice[indexOfString]
for i := 0; i < len(location)-1; i++ {
pair = string(location[i]) + string(location[i+1])
pair = strings.ToLower(pair)
if _, ok := pairMap[pair]; !ok {
pairMap[pair] = true
cacheObject.TwoLetterMap[pair] = append(cacheObject.TwoLetterMap[pair], &cacheObject.Slice[indexOfString])
}
}
}
func (c *cache) createLocationMap() (map[string]bool, error) {
var m = make(map[string]bool)
for _, collection := range [3]string{"res", "con", "com"} {
connection := c.db.Collection(collection)
cursor, err := c.getQueryCursor(connection)
if err != nil {
return nil, err
}
if err := c.addRowsToMap(cursor, m); err != nil {
return nil, err
}
}
return m, nil
}
func (c *cache) getQueryCursor(connection *mongo.Collection) (*mongo.Cursor, error) {
var queryOptions = &options.FindOptions{
Projection: bson.M{
templates.Fields.Area: 1,
templates.Fields.Community: 1,
templates.Fields.Municipality: 1,
},
}
cursor, err := connection.Find(context.Background(), primitive.M{}, queryOptions)
if err != nil {
glog.Errorln("error performing Select, Find() failed")
return nil, err
}
return cursor, nil
}
func (c *cache) addRowsToMap(cursor *mongo.Cursor, m map[string]bool) error {
for cursor.Next(context.TODO()) {
var r = new(row)
if err := cursor.Decode(r); err != nil {
return err
}
for _, location := range [3]string{r.Area, r.Community, r.Municipality} {
if location != "" {
m[location] = true
}
}
}
return nil
} | app/areas/cache.go | 0.63409 | 0.502197 | cache.go | starcoder |
package bulletproofs
import (
"errors"
"math/big"
"github.com/ing-bank/zkrp/crypto/p256"
"github.com/ing-bank/zkrp/util/bn"
)
/*
VectorCopy returns a vector composed by copies of a.
*/
func VectorCopy(a *big.Int, n int64) ([]*big.Int, error) {
var (
i int64
result []*big.Int
)
result = make([]*big.Int, n)
i = 0
for i < n {
result[i] = a
i = i + 1
}
return result, nil
}
/*
VectorConvertToBig converts an array of int64 to an array of big.Int.
*/
func VectorConvertToBig(a []int64, n int64) ([]*big.Int, error) {
var (
i int64
result []*big.Int
)
result = make([]*big.Int, n)
i = 0
for i < n {
result[i] = new(big.Int).SetInt64(a[i])
i = i + 1
}
return result, nil
}
/*
VectorAdd computes vector addition componentwisely.
*/
func VectorAdd(a, b []*big.Int) ([]*big.Int, error) {
var (
result []*big.Int
i, n, m int64
)
n = int64(len(a))
m = int64(len(b))
if n != m {
return nil, errors.New("Size of first argument is different from size of second argument.")
}
i = 0
result = make([]*big.Int, n)
for i < n {
result[i] = bn.Add(a[i], b[i])
result[i] = bn.Mod(result[i], ORDER)
i = i + 1
}
return result, nil
}
/*
VectorSub computes vector addition componentwisely.
*/
func VectorSub(a, b []*big.Int) ([]*big.Int, error) {
var (
result []*big.Int
i, n, m int64
)
n = int64(len(a))
m = int64(len(b))
if n != m {
return nil, errors.New("Size of first argument is different from size of second argument.")
}
i = 0
result = make([]*big.Int, n)
for i < n {
result[i] = bn.Sub(a[i], b[i])
result[i] = bn.Mod(result[i], ORDER)
i = i + 1
}
return result, nil
}
/*
VectorScalarMul computes vector scalar multiplication componentwisely.
*/
func VectorScalarMul(a []*big.Int, b *big.Int) ([]*big.Int, error) {
var (
result []*big.Int
i, n int64
)
n = int64(len(a))
i = 0
result = make([]*big.Int, n)
for i < n {
result[i] = bn.Multiply(a[i], b)
result[i] = bn.Mod(result[i], ORDER)
i = i + 1
}
return result, nil
}
/*
VectorMul computes vector multiplication componentwisely.
*/
func VectorMul(a, b []*big.Int) ([]*big.Int, error) {
var (
result []*big.Int
i, n, m int64
)
n = int64(len(a))
m = int64(len(b))
if n != m {
return nil, errors.New("Size of first argument is different from size of second argument.")
}
i = 0
result = make([]*big.Int, n)
for i < n {
result[i] = bn.Multiply(a[i], b[i])
result[i] = bn.Mod(result[i], ORDER)
i = i + 1
}
return result, nil
}
/*
VectorECMul computes vector EC addition componentwisely.
*/
func VectorECAdd(a, b []*p256.P256) ([]*p256.P256, error) {
var (
result []*p256.P256
i, n, m int64
)
n = int64(len(a))
m = int64(len(b))
if n != m {
return nil, errors.New("Size of first argument is different from size of second argument.")
}
result = make([]*p256.P256, n)
i = 0
for i < n {
result[i] = new(p256.P256).Multiply(a[i], b[i])
i = i + 1
}
return result, nil
} | crypto/vendor/ing-bank/zkrp/bulletproofs/vector.go | 0.687945 | 0.536252 | vector.go | starcoder |
package ptime
import(
"fmt"
"time"
"math"
"sort"
)
type Location struct{
Lat float64
Long float64
Tz int
}
type Method struct{
name string
number float64
}
type PrayerTime struct{
Label string
Time float64
method Method
}
type PrayerTimes []PrayerTime
func (p PrayerTimes) Len() int{ return len(p) }
func (p PrayerTimes) Swap(i, j int){ p[i], p[j] = p[j], p[i] }
func (p PrayerTimes) Less(i, j int) bool{return p[i].Time < p[j].Time}
func adjJulian(jul float64, loc Location) float64{
jul = jul - loc.Long/(24.0*15.0)
return jul
}
func julian(dt time.Time) float64{
year := float64(dt.Year())
month := float64(dt.Month())
day := float64(dt.Day())
hour := float64(dt.Hour())
minute := float64(dt.Minute())
second := float64(dt.Second())
if month <= 2 {
year -= 1
month += 12
}
a := math.Floor(year/100.0)
b := 2 - a + math.Floor(a / 4.0)
jd := math.Floor(365.25 * (year + 4716.0)) + math.Floor(30.6001 * (month + 1)) + day + b - 1524.5
mins := hour * 60.0 + minute + second / 60.0
return jd + mins/1440.0
}
func fixDate(date time.Time) time.Time{
return time.Date(date.Year(), date.Month(), date.Day(), 0.0, 0.0, 0.0, 0.0, date.Location())
}
func fixAngle(deg float64) float64{
return fix(deg, 360.0)
}
func FixHour(hr float64) float64{
return fix(hr, 24.0)
}
func fix(num, den float64) float64{
num = num - den * (math.Floor(num/den))
if num < 0{
return num + den
}else{
return num
}
}
func rad(deg float64) float64{
return deg*math.Pi/180.0
}
func deg(rad float64) float64{
return rad*180.0/math.Pi
}
func sunPosition(jd float64) (float64, float64) {
D := jd - 2451545.0
g := fixAngle(357.529 + 0.98560028 * D)
q := fixAngle(280.459 + 0.98564736 * D)
L := fixAngle(q + 1.915 * math.Sin(rad(g)) + 0.020* math.Sin(2*rad(g)))
//R := 1.00014 - 0.01671* math.Cos(rad(g)) - 0.00014* math.Cos(2*rad(g));
e := 23.439 - 0.00000036* D
RA := deg(math.Atan2(math.Cos(rad(e))* math.Sin(rad(L)), math.Cos(rad(L))))/ 15.0
eqt := q/15 - FixHour(RA)
decl := deg(math.Asin(math.Sin(rad(e))* math.Sin(rad(L))))
return eqt, decl
}
func dhuhrTime(loc Location, julT float64) float64{
eqt, _ := sunPosition(julT)
return 12.0 + float64(loc.Tz) - loc.Long/15.0 - eqt
}
func acot(n float64) float64{
return math.Atan(1/n)
}
func asrTime(loc Location, julT, dhuhr, factor float64) float64{
_, dec := sunPosition(julT)
angle := -math.Atan(1/(factor + math.Tan(rad(math.Abs(loc.Lat - dec)))))
aTime := timeAngle(loc.Lat, julT, dhuhr, angle, 1)
//fmt.Println(angle, aTime)
return aTime
}
func timeAngle(lat, julT, dhuhr, angle, dir float64) float64{
_, decl := sunPosition(julT)
lat = rad(lat)
decl = rad(decl)
tmAngle := 1/15.0*deg(math.Acos((-math.Sin(angle) - math.Sin(lat)*math.Sin(decl))/(math.Cos(lat) * math.Cos(decl))))
return dhuhr + dir*tmAngle
}
func FormatTime(angularT float64) (int, int){
hour := math.Floor(angularT)
minute := math.Floor((angularT - hour) * 60)
return int(hour), int(minute)
}
func initTimes(method string) (*PrayerTimes, error){
calcMethods := map[string][]Method{
"ISNA": {
{"dhuhr", 0.0},
{"-angle", 15.0},
{"fajr", -10.0},
{"-angle", 0.833},
{"asr", 1.0},
{"angle", 0.833},
{"angle", 0.833},
{"angle", 15.0},
},
"MWL": {
{"dhuhr", 0.0},
{"-angle", 18.0},
{"fajr", -10.0},
{"-angle", 0.833},
{"asr", 1.0},
{"angle", 0.833},
{"angle", 0.833},
{"angle", 17.0},
},
"EGAS": {
{"dhuhr", 0.0},
{"-angle", 19.5},
{"fajr", -10.0},
{"-angle", 0.833},
{"asr", 1.0},
{"angle", 0.833},
{"angle", 0.833},
{"angle", 17.5},
},
"Makkah": {
{"dhuhr", 0.0},
{"-angle", 18.5},
{"fajr", -10.0},
{"-angle", 0.833},
{"asr", 1.0},
{"angle", 0.833},
{"angle", 0.833},
{"maghrib", 90.0},
},
"Karachi": {
{"dhuhr", 0.0},
{"-angle", 18.0},
{"fajr", -10.0},
{"-angle", 0.833},
{"asr", 1.0},
{"angle", 0.833},
{"angle", 0.833},
{"angle", 18.0},
},
"Tehran": {
{"dhuhr", 0.0},
{"-angle", 17.7},
{"fajr", -10.0},
{"-angle", 0.833},
{"asr", 1.0},
{"angle", 0.833},
{"angle", 4.5},
{"angle", 14.0},
},
"Jafari": {
{"dhuhr", 0.0},
{"-angle", 16.0},
{"fajr", -10.0},
{"-angle", 0.833},
{"asr", 1.0},
{"angle", 0.833},
{"angle", 4.0},
{"angle", 14.0},
},
}
ptimes := PrayerTimes{
{"dhuhr", 12.0, Method{"", 0.0}},
{"fajr" , 5.0, Method{"", 0.0}},
{"imsak", 5.0, Method{"", 0.0}},
{"sunrise", 6.0, Method{"", 0.0}},
{"asr", 13.0, Method{"", 0.0}},
{"sunset", 18.0, Method{"", 0.0}},
{"maghrib", 18.0, Method{"", 0.0}},
{"isha", 18.0, Method{"", 0.0}},
}
for i := 0; i < 8; i++{
ptimes[i].method = calcMethods[method][i]
}
return &ptimes, nil
}
func DispTimes(ptimes PrayerTimes){
sort.Sort(ptimes)
for i := 0; i < 8; i++ {
time := ptimes[i].Time
label := ptimes[i].Label
//fmt.Println(ptimes[i].label, time)
time = FixHour(time + 0.5/60.0);
hr, min := FormatTime(time)
fmt.Printf("%s\t%.2d:%.2d\n", label, hr, min)
}
}
func calculateTimes(ptimes PrayerTimes, jul float64, loc Location){
pre := map[string]float64{"dhuhr": 0.0, "fajr": 0.0, "maghrib": 0.0}
for i := 0; i < 8; i++{
adjT := jul + ptimes[i].Time/24.0
switch ptimes[i].method.name {
case "dhuhr":
pre["dhuhr"] = dhuhrTime(loc, adjT)
ptimes[i].Time = pre["dhuhr"]
case "angle":
angle := rad(ptimes[i].method.number)
ptimes[i].Time = timeAngle(loc.Lat, adjT, pre["dhuhr"], angle, 1)
pre[ptimes[i].Label] = ptimes[i].Time
case "asr":
factor := ptimes[i].method.number
ptimes[i].Time = asrTime(loc, adjT, pre["dhuhr"], factor)
case "fajr":
ptimes[i].Time = pre["fajr"] + ptimes[i].method.number/60.0
case "maghrib":
ptimes[i].Time = pre["maghrib"] + ptimes[i].method.number/60.0
case "-angle":
angle := rad(ptimes[i].method.number)
ptimes[i].Time = timeAngle(loc.Lat, adjT, pre["dhuhr"], angle, -1)
pre[ptimes[i].Label] = ptimes[i].Time
}
}
}
func GenTimes(date time.Time, loc Location, method string) PrayerTimes{
date = fixDate(date)
timeRef, _ := initTimes(method)
ptimes := *timeRef
jul := adjJulian(julian(date), loc)
calculateTimes(ptimes, jul, loc)
return ptimes
} | ptime.go | 0.553143 | 0.512266 | ptime.go | starcoder |
package main
import (
"fmt"
"io/ioutil"
"time"
"github.com/loov/hrtime"
"github.com/loov/plot"
)
func main() {
DensityPlot()
PercentilesPlot()
TimingPlot()
StackedPlot()
}
// N is the number of experiments
const N = 5000
// TimingPlot demonstrates how to plot timing values based on the order.
func TimingPlot() {
fmt.Println("Timing Plot (timing.svg)")
bench := hrtime.NewBenchmark(N)
for bench.Next() {
time.Sleep(5000 * time.Nanosecond)
}
seconds := plot.DurationToSeconds(bench.Laps())
p := plot.New()
p.Margin = plot.R(5, 0, 0, 5)
p.AddGroup(
plot.NewGrid(),
plot.NewGizmo(),
plot.NewLine("", plot.Points(nil, seconds)),
plot.NewTickLabels(),
)
svg := plot.NewSVG(800, 300)
p.Draw(svg)
ioutil.WriteFile("timing.svg", svg.Bytes(), 0755)
}
// DensityPlot demonstrates how to create a density plot from the values.
func DensityPlot() {
fmt.Println("Density Plot (density.svg)")
bench := hrtime.NewBenchmark(N)
for bench.Next() {
time.Sleep(5000 * time.Nanosecond)
}
seconds := plot.DurationToSeconds(bench.Laps())
p := plot.New()
p.Margin = plot.R(5, 0, 0, 5)
p.AddGroup(
plot.NewGrid(),
plot.NewGizmo(),
plot.NewDensity("", seconds),
plot.NewTickLabels(),
)
svg := plot.NewSVG(800, 300)
p.Draw(svg)
ioutil.WriteFile("density.svg", svg.Bytes(), 0755)
}
// PercentilesPlot demonstrates how to create a percentiles plot from the values.
func PercentilesPlot() {
fmt.Println("Percentiles Plot (percentiles.svg)")
bench := hrtime.NewBenchmark(N)
for bench.Next() {
time.Sleep(5000 * time.Nanosecond)
}
seconds := plot.DurationToSeconds(bench.Laps())
p := plot.New()
p.Margin = plot.R(5, 0, 0, 5)
p.X = plot.NewPercentilesAxis()
p.AddGroup(
plot.NewGrid(),
plot.NewGizmo(),
plot.NewPercentiles("", seconds),
plot.NewTickLabels(),
)
svg := plot.NewSVG(800, 300)
p.Draw(svg)
ioutil.WriteFile("percentiles.svg", svg.Bytes(), 0755)
}
// StackedPlot demonstrates how to combine plots
func StackedPlot() {
fmt.Println("Stacked Plot (stacked.svg)")
bench := hrtime.NewBenchmark(N)
for bench.Next() {
time.Sleep(5000 * time.Nanosecond)
}
p := plot.New()
stack := plot.NewVStack()
stack.Margin = plot.R(5, 5, 5, 5)
p.Add(stack)
seconds := plot.DurationToSeconds(bench.Laps())
stack.Add(plot.NewAxisGroup(
plot.NewGrid(),
plot.NewGizmo(),
plot.NewLine("", plot.Points(nil, seconds)),
plot.NewTickLabels(),
))
stack.Add(plot.NewAxisGroup(
plot.NewGrid(),
plot.NewGizmo(),
plot.NewDensity("", seconds),
plot.NewTickLabels(),
))
percentiles := plot.NewAxisGroup(
plot.NewGrid(),
plot.NewGizmo(),
plot.NewPercentiles("", seconds),
plot.NewTickLabels(),
)
percentiles.X = plot.NewPercentilesAxis()
stack.Add(percentiles)
svg := plot.NewSVG(800, 600)
p.Draw(svg)
ioutil.WriteFile("stacked.svg", svg.Bytes(), 0755)
} | _example/plotting/main.go | 0.756717 | 0.492798 | main.go | starcoder |
package linalg
import (
"fmt"
"math"
)
const (
NEARLY_EQUAL_TOLERANCE = 0.00001
)
type VectorStructure []float64
func NewVector(values []float64) VectorStructure {
vector := make(VectorStructure, 0)
return append(vector, values...)
}
func (v VectorStructure) Size() int {
return len(v)
}
func (v VectorStructure) Norm(order float64) float64 {
norm := 0.0
switch {
case order > 0:
for _, value := range v {
norm = norm + math.Pow(math.Abs(value), order)
}
norm = math.Pow(norm, 1/order)
case order == 0:
currentMax := math.Inf(-1)
for _, value := range v {
if value > currentMax {
currentMax = value
}
}
norm = currentMax
}
return norm
}
func (v VectorStructure) Dot(v2 VectorStructure) float64 {
if v.Size() != v2.Size() {
panic("Vectors should've the same size.")
}
sum := 0.0
for i, value := range v {
sum = sum + value*v2[i]
}
return sum
}
func (v VectorStructure) ScalarMul(lambda float64) VectorStructure {
newValues := make([]float64, 0)
for _, value := range v {
newValues = append(newValues, lambda*value)
}
return NewVector(newValues)
}
func (v VectorStructure) Equal(v2 VectorStructure) bool {
if v.Size() != v2.Size() {
return false
}
for i, value := range v {
if math.Abs(value-v2[i]) > NEARLY_EQUAL_TOLERANCE {
return false
}
}
return true
}
func (v VectorStructure) Sum(v2 VectorStructure) VectorStructure {
if v.Size() != v2.Size() {
panic("Vectors should've the same size.")
}
newValues := make([]float64, 0)
for i, value := range v {
newValues = append(newValues, value+v2[i])
}
return NewVector(newValues)
}
func (v VectorStructure) Minus(v2 VectorStructure) VectorStructure {
return v.Sum(v2.ScalarMul(-1))
}
func (v VectorStructure) Insert(i int, value float64) VectorStructure {
return NewVector(append(v[:i], append([]float64{value}, v[i:]...)...))
}
func (v VectorStructure) Remove(i int) VectorStructure {
return NewVector(append(v[:i], v[i+1:]...))
}
type MatrixStructure [][]float64
func NewMatrix(values [][]float64) MatrixStructure {
firstRowLength := len(values[0])
matrix := make(MatrixStructure, 0)
for _, row := range values {
if firstRowLength == len(row) {
matrix = append(matrix, row)
} else {
panic("Matrix shape is not valid.")
}
}
return matrix
}
func Eye(n int) MatrixStructure {
matrix := make(MatrixStructure, 0)
for i := 0; i < n; i++ {
row := make([]float64, n)
row[i] = 1
matrix = append(matrix, row)
}
return matrix
}
func (m MatrixStructure) Copy() MatrixStructure {
matrix := make(MatrixStructure, 0)
shape := m.Shape()
for i := 0; i < shape[0]; i++ {
row := make([]float64, 0)
row = append(row, m[i]...)
matrix = append(matrix, row)
}
return matrix
}
func (m MatrixStructure) Shape() []int {
return []int{len(m), len(m[0])}
}
func (m MatrixStructure) Transpose() MatrixStructure {
matrixValues := make([][]float64, 0)
origShape := m.Shape()
for i := 0; i < origShape[1]; i++ {
rowValues := make([]float64, 0)
for j := 0; j < origShape[0]; j++ {
rowValues = append(rowValues, m[j][i])
}
matrixValues = append(matrixValues, rowValues)
}
// Currently doesn't make sense to call NewMatrix, but probably in the
// short term the MatrixStructure will get more complex.
return NewMatrix(matrixValues)
}
func (m MatrixStructure) Mul(m2 MatrixStructure) MatrixStructure {
leftShape := m.Shape()
rightShape := m2.Shape()
if leftShape[1] != rightShape[0] {
panic("Matrices shapes are not valid.")
}
matrixValues := make([][]float64, 0)
for i := 0; i < leftShape[0]; i++ {
rowValues := make([]float64, 0)
for j := 0; j < rightShape[1]; j++ {
sumValue := 0.0
for k := 0; k < leftShape[1]; k++ {
sumValue = sumValue + m[i][k]*m2[k][j]
}
rowValues = append(rowValues, sumValue)
}
matrixValues = append(matrixValues, rowValues)
}
return NewMatrix(matrixValues)
}
func (m MatrixStructure) Map(f func(float64) float64) MatrixStructure {
matrixValues := make([][]float64, 0)
origShape := m.Shape()
for i := 0; i < origShape[0]; i++ {
rowValues := make([]float64, 0)
for j := 0; j < origShape[1]; j++ {
rowValues = append(rowValues, f(m[i][j]))
}
matrixValues = append(matrixValues, rowValues)
}
return NewMatrix(matrixValues)
}
func (m MatrixStructure) LUDecomposition() (MatrixStructure, MatrixStructure) {
shape := m.Shape()
L := Eye(shape[0])
U := m.Copy()
for !U.IsUpperDiagonal() {
for col := 0; col < shape[1]-1; col++ {
for row := col + 1; row < shape[0]; row++ {
value := U[row][col]
if value != 0 {
factor := value / U[col][col]
L[row][col] = factor
U = U.RowOperation(row, col, -factor)
}
}
}
}
return L, U
}
func (m MatrixStructure) IsLowerDiagonal() bool {
shape := m.Shape()
for i := 0; i < shape[0]; i++ {
for j := i + 1; j < shape[1]; j++ {
if m[i][j] != 0 {
return false
}
}
}
return true
}
func (m MatrixStructure) IsUpperDiagonal() bool {
return m.Transpose().IsLowerDiagonal()
}
func (m MatrixStructure) IsDiagonal() bool {
return m.IsLowerDiagonal() || m.IsUpperDiagonal()
}
func (m MatrixStructure) SolveDiagonalSystem(v VectorStructure) VectorStructure {
// Upper or lower diagonal?
// TODO: assuming lower diagonal matrix, make this work in a flexible way.
matrixShape := m.Shape()
vectorSize := v.Size()
if matrixShape[0] != vectorSize {
panic("Vector size and matrix shape aren't consistent.")
}
// Create slice of the values of the solution vector, initializing it with
// the first diagonal element of matrix.
// TODO: DRY!!
solutionValues := make([]float64, matrixShape[0])
switch {
case m.IsLowerDiagonal():
solutionValues[0] = v[0] / m[0][0]
for row := 1; row < matrixShape[0]; row++ {
value := v[row]
for col := 0; col < row; col++ {
value = value - m[row][col]*solutionValues[col]
}
solutionValues[row] = value / m[row][row]
}
case m.IsUpperDiagonal():
solutionValues[matrixShape[0]-1] = v[matrixShape[0]-1] / m[matrixShape[0]-1][matrixShape[0]-1]
for row := matrixShape[0] - 2; row >= 0; row-- {
value := v[row]
for col := row + 1; col < matrixShape[1]; col++ {
value = value - m[row][col]*solutionValues[col]
}
solutionValues[row] = value / m[row][row]
}
default:
panic("Not a diagonal system.")
}
return NewVector(solutionValues)
}
func (m MatrixStructure) SolveSystem(v VectorStructure) VectorStructure {
if m.IsDiagonal() {
return m.SolveDiagonalSystem(v)
} else {
L, U := m.LUDecomposition()
y := L.SolveDiagonalSystem(v)
return U.SolveDiagonalSystem(y)
}
}
// Performs the following row operation:
// m[row] = m[row] + factor * m[otherRow]
func (m MatrixStructure) RowOperation(row int, otherRow int, factor float64) MatrixStructure {
E := Eye(m.Shape()[0])
E[row][otherRow] = factor
return E.Mul(m)
}
func (m MatrixStructure) Equal(m2 MatrixStructure) bool {
shape_1 := m.Shape()
shape_2 := m2.Shape()
if shape_1[0] != shape_2[0] || shape_1[1] != shape_2[1] {
return false
}
for i, row := range m {
for j, value := range row {
if math.Abs(value-m2[i][j]) > NEARLY_EQUAL_TOLERANCE {
return false
}
}
}
return true
}
func (m MatrixStructure) Inverse() MatrixStructure {
shape := m.Shape()
// If it isn't a square matrix compute the generalized inverse.
if shape[0] != shape[1] {
m = m.Transpose().Mul(m)
}
inverseValuesTransposed := make([][]float64, 0)
for i := 0; i < shape[1]; i++ {
e := make([]float64, shape[1])
e[i] = 1
e = NewVector(e)
col := m.SolveSystem(e)
inverseValuesTransposed = append(inverseValuesTransposed, col)
}
inverseTransposed := NewMatrix(inverseValuesTransposed)
return inverseTransposed.Transpose()
}
func (m MatrixStructure) String() string {
str := "\n"
for i, row := range m {
if i > 0 {
str += "\n"
}
for j, value := range row {
str += fmt.Sprintf("%0.2f", value)
if j != len(row)-1 {
str += " "
}
}
}
return str + "\n"
} | main.go | 0.690976 | 0.646906 | main.go | starcoder |
package names
import (
"strings"
)
// Name represents a name formed of multiple words. It is inteded to simplify the use of different
// strategies for representing names as strings, like using different separators or using camel
// case. The workds that form the name are stored separated, so there is no need to parse the name
// each time that the workds are needed.
type Name struct {
words []*Word
}
// NewName creates a new name with the given words.
func NewName(words ...*Word) *Name {
name := new(Name)
name.words = make([]*Word, len(words))
copy(name.words, words)
return name
}
// Words returns a slice containing the words of the name. The returned slice is a copy of the
// internal representation, to it is safe to modify after alling this function.
func (n *Name) Words() []*Word {
words := make([]*Word, len(n.words))
copy(words, n.words)
return words
}
// Append creates a new name that has the words of this name, plus the given additional word added
// at the end.
func (n *Name) Append(words ...*Word) *Name {
words = append(n.words, words...)
return NewName(words...)
}
// Insert creates a new name that has the given words, plus the words of this name added at the end.
func (n *Name) Insert(words ...*Word) *Name {
words = append(words, n.words...)
return NewName(words...)
}
// Equals check if this name is equal to the given name.
func (n *Name) Equals(name *Name) bool {
if n == nil && name == nil {
return true
}
if n == nil || name == nil {
return false
}
if len(n.words) != len(name.words) {
return false
}
for i := 0; i < len(n.words); i++ {
if !n.words[i].Equals(name.words[i]) {
return false
}
}
return true
}
// LowerJoined converts all the words of the name to lower case and joins them using the given
// separator.
func (n *Name) LowerJoined(separator string) string {
chunks := make([]string, len(n.words))
for i, word := range n.words {
chunks[i] = strings.ToLower(word.String())
}
return strings.Join(chunks, separator)
}
// UpperJoined converts all the words of the name to upper case and joins them using the given
// separator.
func (n *Name) UpperJoined(separator string) string {
chunks := make([]string, len(n.words))
for i, word := range n.words {
chunks[i] = strings.ToUpper(word.String())
}
return strings.Join(chunks, separator)
}
// CapitalizedJoined capitalizes all the words of the name to lower case and joins them using the
// given separator.
func (n *Name) CapitalizedJoined(separator string) string {
chunks := make([]string, len(n.words))
for i, word := range n.words {
chunks[i] = word.Capitalize()
}
return strings.Join(chunks, separator)
}
// StringType generates a string representing this name, consisting on the list of words of the name
// separated by an underscore.
func (n *Name) String() string {
texts := make([]string, len(n.words))
for i, word := range n.words {
texts[i] = word.String()
}
return strings.Join(texts, "_")
}
// Snake converts this name to snake case.
func (n *Name) Snake() string {
return n.LowerJoined("_")
}
// Camel conerts this name to camel case.
func (n *Name) Camel() string {
return n.CapitalizedJoined("")
}
// Compare compares two names lexicographically.
func Compare(a, b *Name) int {
var result int
aLen := len(a.words)
bLen := len(b.words)
minLen := aLen
if bLen < minLen {
minLen = bLen
}
for i := 0; i < minLen; i++ {
aText := a.words[i].String()
bText := b.words[i].String()
result = strings.Compare(aText, bText)
if result != 0 {
break
}
}
if result == 0 {
if aLen < bLen {
result = -1
} else if aLen > bLen {
result = +1
}
}
return result
}
// Cat returns a new name created concatenating the given names.
func Cat(names ...*Name) *Name {
result := new(Name)
length := 0
for _, name := range names {
length = length + len(name.words)
}
result.words = make([]*Word, length)
i := 0
for _, name := range names {
for _, word := range name.words {
result.words[i] = word
i++
}
}
return result
}
// Names is an slice of names, intended to simplify sorting.
type Names []*Name
// Len implements the Len method of sort.Interface, so that slices of names can be easily sorted.
func (n Names) Len() int {
return len(n)
}
// Swap implements the Swap method of sort.Interface, so that slices of names can be easily sorted.
func (n Names) Swap(i, j int) {
n[i], n[j] = n[j], n[i]
}
// Less implements the Less method of sort.Interface, so that slices of names can be easily sorted.
func (n Names) Less(i, j int) bool {
return Compare(n[i], n[j]) == -1
} | pkg/names/name.go | 0.841696 | 0.41253 | name.go | starcoder |
package debiaser
import (
"fmt"
"log"
"math"
"sort"
"gonum.org/v1/gonum/floats"
"gonum.org/v1/gonum/mat"
"github.com/JaderDias/movingmedian"
)
// Debiaser implements inplace removal of bias from a mat64 of (scaled) values.
type Debiaser interface {
Debias(*mat.Dense)
}
// Sorter provides method to sort and then unsort a mat64
type Sorter interface {
Sort(*mat.Dense)
Unsort(*mat.Dense)
}
// SortedDebiaser is useful when we need to: sort, then unbias based on that sort order, then unsort.
// An example of this is for GC bias.
type SortedDebiaser interface {
Sorter
Debiaser
}
// GeneralDebiaser is an implementation of a SortedDebiaser that can be used simply by setting attributes.int
// Window is the size of the moving window for correction.
// Usage is to Call GeneralDebiaser.Sort() then Debias(), then Unsort(). Presumbly, Those calls will be flanked
// by a scaler call such as to scaler.ZScore.
type GeneralDebiaser struct {
Vals []float64
Window int
inds []int
tmp *mat.Dense
}
func (g *GeneralDebiaser) setTmp(r, c int) {
if g.tmp == nil {
g.tmp = mat.NewDense(r, c, nil)
} else {
gr, gc := g.tmp.Dims()
if gr != r || gc != c {
g.tmp = mat.NewDense(r, c, nil)
}
}
}
// Sort sorts the rows in mat according the order in g.Vals.
func (g *GeneralDebiaser) Sort(mat *mat.Dense) {
if g.inds == nil {
g.inds = make([]int, len(g.Vals))
}
floats.Argsort(g.Vals, g.inds)
r, c := mat.Dims()
g.setTmp(r, c)
changed := false
for ai, bi := range g.inds {
if ai != bi {
changed = true
}
g.tmp.SetRow(ai, mat.RawRowView(bi))
}
if !changed {
log.Println("WARNING: no change after sorting. This usually means .Vals is unset or same as previous run")
}
// copy g.tmp into mat
mat.Copy(g.tmp)
}
// Unsort reverts the values to be position sorted.
func (g *GeneralDebiaser) Unsort(mat *mat.Dense) {
if g.inds == nil {
panic("unsort: must call sort first")
}
r, c := mat.Dims()
g.setTmp(r, c)
// copy mat into g.tmp
g.tmp.Copy(mat)
tmp := make([]float64, len(g.Vals))
for ai, bi := range g.inds {
mat.SetRow(bi, g.tmp.RawRowView(ai))
tmp[bi] = g.Vals[ai]
}
g.Vals = tmp
}
// Debias by subtracting moving median in each sample.
// It's assumed that g.Sort() has been called before this and that g.Unsort() will be called after.
// It's also assumed that the values in mat have been scaled, for example by a `scaler.ZScore`.
func (g *GeneralDebiaser) Debias(imat *mat.Dense) {
r, c := imat.Dims()
col := make([]float64, r)
for sampleI := 0; sampleI < c; sampleI++ {
mat.Col(col, sampleI, imat)
mm := movingmedian.NewMovingMedian(g.Window)
mid := (g.Window-1)/2 + 1
for i := 0; i < mid; i++ {
mm.Push(col[i])
}
for i := 0; i < mid; i++ {
col[i] /= math.Max(mm.Median(), 1)
}
var i int
for i = mid; i < len(col)-mid; i++ {
mm.Push(col[i+mid])
col[i] /= math.Max(mm.Median(), 1)
}
for ; i < len(col); i++ {
col[i] /= math.Max(mm.Median(), 1)
}
imat.SetCol(sampleI, col)
}
}
type ChunkDebiaser struct {
GeneralDebiaser
// ScoreWindow defines the range of Vals used per window.
// E.g. if this is 0.1 then all values from 0.25-0.35 will be normalized to the median of
// Depths occuring in that range.
ScoreWindow float64
}
func (cd *ChunkDebiaser) Debias(imat *mat.Dense) {
if cd.ScoreWindow == 0 {
panic("must set ChunkDebiaser.ScoreWindow")
}
r, c := imat.Dims()
col := make([]float64, r)
slices := make([]int, 1, 100)
v0 := cd.Vals[0]
for i := 0; i < len(cd.Vals); i++ {
if cd.Vals[i]-v0 > cd.ScoreWindow {
v0 = cd.Vals[i]
slices = append(slices, i)
}
}
slices = append(slices, len(cd.Vals))
dpSubset := make([]float64, 0, len(cd.Vals))
for sampleI := 0; sampleI < c; sampleI++ {
mat.Col(col, sampleI, imat)
for i := 1; i < len(slices); i++ {
si, ei := slices[i-1], slices[i]
dpSubset = dpSubset[:(ei - si)]
copy(dpSubset, col[si:ei])
sort.Float64s(dpSubset)
var k int
for ; k < len(dpSubset) && dpSubset[k] == 0; k++ {
}
median := dpSubset[(ei-si-k)/2]
if median > 0 {
for j := si; j < ei; j++ {
col[j] /= median
}
}
}
imat.SetCol(sampleI, col)
}
}
type SVD struct {
MinVariancePct float64
}
func (isvd *SVD) Debias(imat *mat.Dense) {
var svd mat.SVD
if ok := svd.Factorize(imat, mat.SVDThin); !ok {
panic("error with SVD")
}
// get svd and zero out first n components.
s, u, v := extractSVD(&svd)
sum := floats.Sum(s)
str := "variance:"
var n int
for n = 0; n < 15 && 100*s[n]/sum > isvd.MinVariancePct; n++ {
str += fmt.Sprintf(" %.2f", 100*s[n]/sum)
}
log.Println(str)
sigma := mat.NewDense(len(s), len(s), nil)
// leave the first n as 0 and set the rest.
for i := n; i < len(s); i++ {
sigma.Set(i, i, s[i])
}
imat.Product(u, sigma, v.T())
}
func extractSVD(svd *mat.SVD) (s []float64, u, v *mat.Dense) {
um := svd.UTo(nil)
vm := svd.VTo(nil)
s = svd.Values(nil)
return s, um, vm
} | dcnv/debiaser/debiaser.go | 0.615088 | 0.437884 | debiaser.go | starcoder |
package compiler
import "golang.org/x/sys/unix"
type shift int
func (c *compilerContext) isLongJump(jumpSize int) bool {
return jumpSize > c.maxJumpSize
}
func hasLongJump(index int, jts, jfs map[int]int) bool {
// Using the unshifted index to look up positions in jts and jfs is
// only safe if we're iterating backwards. Otherwise we would have to
// fix up the positions in the maps as well and that would be fugly.
if _, ok := jts[index]; ok {
return true
}
if _, ok := jfs[index]; ok {
return true
}
return false
}
func fixupWithShifts(pos, add int, shifts []shift) int {
to := pos + add + 1
currentAdd := add
for _, s := range shifts {
if int(s) > pos && int(s) <= to {
currentAdd++
to++
}
}
return currentAdd
}
type longJumpContext struct {
*compilerContext
maxIndexWithLongJump int
jtLongJumps, jfLongJumps map[int]int
shifts []shift
}
func (c *longJumpContext) fixupLongJumps() {
currentIndex := c.maxIndexWithLongJump
for currentIndex > -1 {
current := c.result[currentIndex]
if isConditionalJump(current) && hasLongJump(currentIndex, c.jtLongJumps, c.jfLongJumps) {
hadJt := c.handleJTLongJumpAt(currentIndex)
c.handleJFLongJumpAt(currentIndex, hadJt)
} else {
if isUnconditionalJump(current) {
c.result[currentIndex].K = uint32(fixupWithShifts(currentIndex, int(c.result[currentIndex].K), c.shifts))
} else {
hadJt := c.shiftJt(currentIndex)
c.shiftJf(hadJt, currentIndex)
}
}
currentIndex--
}
}
func (c *compilerContext) fixupJumps() {
maxIndexWithLongJump := -1
jtLongJumps := make(map[int]int)
jfLongJumps := make(map[int]int)
for l, at := range c.labels.allLabels() {
for _, pos := range c.jts.allJumpsTo(l) {
jumpSize := (at - pos) - 1
if c.isLongJump(jumpSize) {
if maxIndexWithLongJump < pos {
maxIndexWithLongJump = pos
}
jtLongJumps[pos] = jumpSize
} else {
c.result[pos].Jt = uint8(jumpSize)
}
}
for _, pos := range c.jfs.allJumpsTo(l) {
jumpSize := (at - pos) - 1
if c.isLongJump(jumpSize) {
if maxIndexWithLongJump < pos {
maxIndexWithLongJump = pos
}
jfLongJumps[pos] = jumpSize
} else {
c.result[pos].Jf = uint8(jumpSize)
}
}
for _, pos := range c.uconds.allJumpsTo(l) {
c.result[pos].K = uint32((at - pos) - 1)
}
}
(&longJumpContext{c, maxIndexWithLongJump, jtLongJumps, jfLongJumps, nil}).fixupLongJumps()
}
func (c *longJumpContext) handleJTLongJumpAt(currentIndex int) bool {
if jmpLen, ok := c.jtLongJumps[currentIndex]; ok {
jmpLen = fixupWithShifts(currentIndex, jmpLen, c.shifts)
newJf := int(c.result[currentIndex].Jf) + 1
if c.isLongJump(newJf) {
// Simple case, we can just add it to the long jumps for JF:
c.jfLongJumps[currentIndex] = newJf
} else {
c.result[currentIndex].Jf = uint8(newJf)
}
c.insertJumps(currentIndex, jmpLen, 0)
return true
}
return false
}
func (c *longJumpContext) handleJFLongJumpAt(currentIndex int, hadJt bool) {
if jmpLen, ok := c.jfLongJumps[currentIndex]; ok {
jmpLen = fixupWithShifts(currentIndex, jmpLen, c.shifts)
incr, jmpLen := c.incrementJt(hadJt, jmpLen, currentIndex)
c.insertJumps(currentIndex, jmpLen, incr)
}
}
func (c *longJumpContext) incrementJt(hadJt bool, jmpLen, currentIndex int) (int, int) {
incr := 0
if hadJt {
c.result[currentIndex+1].K++
incr++
jmpLen--
} else {
newJt := int(c.result[currentIndex].Jt) + 1
if c.isLongJump(newJt) {
// incr in this case doesn't seem to do much, all tests pass when it is changed to 0
c.insertJumps(currentIndex, newJt, incr)
incr++
} else {
c.result[currentIndex].Jt = uint8(newJt)
}
}
return incr, jmpLen
}
func (c *longJumpContext) shiftJf(hadJt bool, currentIndex int) {
newJf := fixupWithShifts(currentIndex, int(c.result[currentIndex].Jf), c.shifts)
if c.isLongJump(newJf) {
incr, _ := c.incrementJt(hadJt, 0, currentIndex)
c.insertJumps(currentIndex, newJf, incr)
} else {
c.result[currentIndex].Jf = uint8(newJf)
}
}
func (c *longJumpContext) shiftJt(currentIndex int) bool {
newJt := fixupWithShifts(currentIndex, int(c.result[currentIndex].Jt), c.shifts)
if c.isLongJump(newJt) {
// Jf doesn't need to be modified here, because it will be fixed up with the shifts. Hopefully correctly...
c.insertJumps(currentIndex, newJt, 0)
return true
}
c.result[currentIndex].Jt = uint8(newJt)
return false
}
func (c *longJumpContext) insertJumps(currentIndex, pos, incr int) {
c.insertUnconditionalJump(currentIndex+1+incr, pos)
c.result[currentIndex].Jf = uint8(incr)
c.shifts = append(c.shifts, shift(currentIndex+1+incr))
}
func insertSockFilter(sfs []unix.SockFilter, ix int, x unix.SockFilter) []unix.SockFilter {
return append(
append(
append([]unix.SockFilter{}, sfs[:ix]...), x), sfs[ix:]...)
}
func (c *compilerContext) insertUnconditionalJump(from, k int) {
x := unix.SockFilter{Code: OP_JMP_K, K: uint32(k)}
c.result = insertSockFilter(c.result, from, x)
}
func (c *compilerContext) shiftJumpsBy(from, incr int) {
c.jts.shift(from, incr)
c.jfs.shift(from, incr)
c.uconds.shift(from, incr)
c.labels.shiftLabels(from, incr)
} | vendor/github.com/twtiger/gosecco/compiler/jumps.go | 0.522202 | 0.438905 | jumps.go | starcoder |
package cli
import (
"fmt"
"github.com/iskendria-pub/iskendria/util"
"reflect"
"strings"
)
/*
This package implements a generic interactive command-line interface.
To use it, fill a cli.Cli struct with help text and instances of
Handler. Then call the Run method. See trycli/interactive.go to see
how the cli package is used. When the cli starts, the end user gets
a welcome message and a prompt. She can enter commands and sees
a prompt after a command is completed. The user defines the
user interface by filling Cli with help texts and Handler instances.
Cli itself implements Handler because groups of commands can be nested.
Other Handler implementations are SingleLineHandler and
StructRunnerHandler.
SingleLineHandler wraps a Golang-function with
arbitrary arguments of type bool, int32, int64 or string.
There are two possibilities for a SingleLineHandler:
* It can take an extra argument of type Outputter that is called
to report outputs.
* It can take only the named arguments and return a value
that implements error.
StructRunnerHandler wraps a function taking a struct pointer,
the Action. This handler starts a dialog allowing the end user
to set all fields of the struct. Not every type of field is
supported. Fields can be bool, int32, int64, string or []string.
You can set a []string field by delimiting the elements by one
or morespace. Each item is non-empty (not "") but the empty
slice is supported. A command "continue" is added automatically
that allows the end user to call the Action. There is also
"cancel" to go back without executing the Action. The end user
can do "review" to get an overview of the entered properties.
StructRunnerHandler has an optional ReferenceValueGetter, a
function that returns a reference value. The reference value
should produce a value of the type needed by the Action.
When the dialog is started, the read value is initialized
with the reference value. There is a "clear" function that
fills the read value with the reference value, or the zero
values of the struct if there is no ReferenceValueGetter.
There is one more feature to describe. The user may have
background processes. These may have messages to report
to the user. This package provides a hook that is called
each time the end user presses Enter. This way, the user
interface provides frequent opportunities to show messages,
but messages do not interfere when the end user is
typing a command. Please set the EventPager property
of struct Cli.
The cli is implemented as follows. The top-level Handler's
"build" method is executed to get a runnableHandler instance.
This is done recursively resulting in a nested structure of
runnableHandler instances. There are "var _ Handler = ... "
and "var _ runnableHandler = ..." to enforce that every
type implements the intended interfaces.
The runnableHandler implementations groupContextRunnable and
dialogContextRunnable have an interactionStrategy that implements
reading and parsing user input and that provides helper
functions for formatting. When the user has entered a line,
the parsed line is fed to groupContextRunnable.executeLine()
or dialogContextRunnable.executeLine(). The
groupContextRunnable or dialogContextRunnable selects the
appropriate runnableHandler and executes its handleLine
method. groupContextRunnable and dialogContextRunnable are
themselves runnableHandler implementations because a
dialog is called from a command group and a command group
can be the child of another command group. To summarize:
A command wrapped by a runnableHandler is executed using its
handleLine() method. When the runnableHandler is a group,
it has an executeLine() to select and run a command within
the group.
The interaction strategy does not provide help information.
groupContextRunnable and dialogContextRunnable have help
methods. The build() methods of Cli and StructRunnerHandler
wrap these help methods in runnableHandler instances
and register these handlers. This registration ensures
that a help screen's list of options shows the help
function itself as one of the options. dialogRunnableHandler
applies the same idea to provide "continue", "cancel",
"review" and "clear" commands.
*/
type Handler interface {
build() runnableHandler
}
var _ Handler = new(SingleLineHandler)
type SingleLineHandler struct {
Name string
Handler interface{}
ArgNames []string
}
func (slh *SingleLineHandler) build() runnableHandler {
checkFunctionWithArgNames(slh.Handler, slh.ArgNames)
return &singleLineRunnableHandler{
name: slh.Name,
handler: slh.Handler,
argNames: slh.ArgNames,
}
}
func checkFunctionWithArgNames(f interface{}, argNames []string) {
reflectHandlerType := reflect.TypeOf(f)
if reflectHandlerType.Kind() != reflect.Func {
panic("Handler is not a function")
}
switch reflectHandlerType.NumOut() {
case 0:
checkInputTypesIncludingOutputter(reflectHandlerType, argNames)
case 1:
checkFunctionThatReturnsError(reflectHandlerType, argNames)
default:
panic("Handler should return zero or one value")
}
}
func checkInputTypesIncludingOutputter(reflectHandlerType reflect.Type, argNames []string) {
expectedNumFunctionArgs := len(argNames) + 1
if reflectHandlerType.NumIn() != expectedNumFunctionArgs {
panic(fmt.Sprintf(
"Number of handler arguments does not match number of argument names or outputter function is missing: %v",
reflectHandlerType))
}
reflectFirstArgumentType := reflectHandlerType.In(0)
if reflectFirstArgumentType.Kind() != reflect.Func {
panic("The first argument of a handler should be of type func(string)")
}
if reflectFirstArgumentType.NumIn() != 1 {
panic("The first argument of a handler should be a function with one argument")
}
if reflectFirstArgumentType.NumOut() != 0 {
panic("The first argument of a handler should be a function without outputs")
}
if reflectFirstArgumentType.In(0).Kind() != reflect.String {
panic("The first argument of a handler should be a function with a string argument")
}
}
func checkFunctionThatReturnsError(reflectHandlerType reflect.Type, argNames []string) {
if reflectHandlerType.NumIn() != len(argNames) {
panic("Number of handler arguments does not match number of argument names")
}
reflectReturnType := reflectHandlerType.Out(0)
errorType := reflect.TypeOf((*error)(nil)).Elem()
if reflectReturnType == errorType {
return
}
if !reflectReturnType.Implements(errorType) {
panic(fmt.Sprintf(
"Return type does not implement error: %s", reflectReturnType))
}
}
var _ Handler = new(Cli)
type Cli struct {
FullDescription string
OneLineDescription string
Name string
FormatEscape string
Handlers []Handler
EventPager func(Outputter)
}
func (c *Cli) Run() {
if InputScript == "" {
inp = new(inputSourceConsole)
} else {
inp = &inputSourceFile{
fname: InputScript,
}
}
inp.open()
defer inp.close()
c.buildMain().run()
}
var InputScript string
func (c *Cli) build() runnableHandler {
return runnableHandler(c.buildMain())
}
func (c *Cli) buildMain() *groupContextRunnable {
runnableHandlers := make([]runnableHandler, len(c.Handlers))
for i := range c.Handlers {
runnableHandlers[i] = c.Handlers[i].build()
}
result := &groupContextRunnable{
handlersForGroup: &handlersForGroup{
runnableHandlers,
},
interactionStrategy: &interactionStrategyImpl{
fullDescription: c.FullDescription,
oneLineDescription: c.OneLineDescription,
name: c.Name,
formatEscape: c.FormatEscape,
stopWords: map[string]bool{EXIT: true},
eventPager: c.EventPager,
},
}
c.addGeneratedCommandHandlers(result)
result.init()
return result
}
func (c *Cli) addGeneratedCommandHandlers(gcr *groupContextRunnable) {
var helpHandler = &SingleLineHandler{
Name: HELP,
Handler: func(outputter Outputter) { gcr.help(outputter) },
ArgNames: []string{},
}
var exitHandler = &SingleLineHandler{
Name: EXIT,
Handler: func(Outputter) {},
ArgNames: []string{},
}
gcr.handlers = append(gcr.handlers, helpHandler.build())
gcr.handlers = append(gcr.handlers, exitHandler.build())
}
type StructRunnerHandler struct {
FullDescription string
OneLineDescription string
Name string
Action interface{}
ReferenceValueGetter interface{}
ReferenceValueGetterArgNames []string
}
var _ Handler = new(StructRunnerHandler)
func (srh *StructRunnerHandler) build() runnableHandler {
actionInputType := srh.getAndCheckActionType()
srh.checkReferenceValueGetter(actionInputType)
result := &dialogContextRunnable{
handlersForDialog: new(handlersForDialog),
interactionStrategy: &interactionStrategyImpl{
fullDescription: srh.FullDescription,
oneLineDescription: srh.OneLineDescription,
name: srh.Name,
stopWords: map[string]bool{CANCEL: true, CONTINUE: true},
},
action: srh.Action,
actionInpuType: actionInputType,
referenceValueGetter: srh.ReferenceValueGetter,
referenceValueGetterArgNames: srh.ReferenceValueGetterArgNames,
}
srh.addGeneratedCommandHandlers(result)
srh.addPropertyHandlers(actionInputType, result)
return result
}
func (srh *StructRunnerHandler) getAndCheckActionType() reflect.Type {
actionInputType := reflect.TypeOf(srh.Action).In(1).Elem()
if actionInputType.Kind() != reflect.Struct {
panic("The second argument of Action is expected to be a pointer to struct")
}
return actionInputType
}
func (srh *StructRunnerHandler) checkReferenceValueGetter(actionInputType reflect.Type) {
if srh.ReferenceValueGetter != nil {
referenceGetterType := reflect.TypeOf(srh.ReferenceValueGetter)
if referenceGetterType.NumIn() == 0 || referenceGetterType.NumOut() != 1 {
panic(fmt.Sprintf("Reference value getter should have at least one input and one output: %v",
referenceGetterType))
}
checkInputTypesIncludingOutputter(referenceGetterType, srh.ReferenceValueGetterArgNames)
referenceType := referenceGetterType.Out(0).Elem()
if actionInputType != referenceType {
panic("The ReferenceValueGetter must produce the type needed by Action")
}
}
}
func (srh *StructRunnerHandler) addGeneratedCommandHandlers(dcr *dialogContextRunnable) {
helpHandler := &SingleLineHandler{
Name: HELP,
Handler: dcr.help,
ArgNames: []string{},
}
reviewHandler := &SingleLineHandler{
Name: REVIEW,
Handler: dcr.review,
ArgNames: []string{},
}
clearHandler := &SingleLineHandler{
Name: CLEAR,
Handler: dcr.clear,
ArgNames: []string{},
}
continueHandler := &SingleLineHandler{
Name: CONTINUE,
Handler: dcr.doContinue,
ArgNames: []string{},
}
cancelHandler := &SingleLineHandler{
Name: CANCEL,
Handler: dcr.cancel,
ArgNames: []string{},
}
dcr.handlers = []runnableHandler{
helpHandler.build(),
reviewHandler.build(),
clearHandler.build(),
continueHandler.build(),
cancelHandler.build(),
}
if hasListFields(dcr) {
addHandler := &SingleLineHandler{
Name: "add",
Handler: dcr.add,
ArgNames: []string{"list name", "added item"},
}
removeItemHandler := &SingleLineHandler{
Name: "removeItem",
Handler: dcr.removeItem,
ArgNames: []string{"List name", "removed item"},
}
removeIndexHandler := &SingleLineHandler{
Name: "removeIndex",
Handler: dcr.removeIndex,
ArgNames: []string{"List name", "removed index"},
}
insertHandler := &SingleLineHandler{
Name: "insert",
Handler: dcr.insert,
ArgNames: []string{"List name", "inserted item", "index"},
}
dcr.handlers = append(dcr.handlers,
addHandler.build(),
removeItemHandler.build(),
removeIndexHandler.build(),
insertHandler.build(),
)
}
}
func hasListFields(dcr *dialogContextRunnable) bool {
actionInputType := dcr.actionInpuType
for i := 0; i < actionInputType.NumField(); i++ {
fieldType := actionInputType.Field(i).Type
if fieldType.Kind() == reflect.Slice {
return true
}
}
return false
}
func (srh *StructRunnerHandler) addPropertyHandlers(actionInputType reflect.Type, dcr *dialogContextRunnable) {
dialogPropertyHandlers := make([]runnableHandler, actionInputType.NumField())
for i := 0; i < actionInputType.NumField(); i++ {
f := actionInputType.Field(i)
if f.Name != strings.Title(f.Name) {
panic("Field is not exported: " + f.Name)
}
if f.Type.Kind() == reflect.Slice {
addPropertyHandlerForSliceField(f, i, dialogPropertyHandlers)
} else {
addPropertyHandlerForScalarField(f, i, dialogPropertyHandlers)
}
}
dcr.handlers = append(dcr.handlers, dialogPropertyHandlers...)
}
func addPropertyHandlerForSliceField(f reflect.StructField, i int, dialogPropertyHandlers []runnableHandler) {
if f.Type.Elem().Kind() != reflect.String {
panic(fmt.Sprintf("Field %s is a slice, its element type should be string",
f.Name))
}
dlph := &dialogListPropertyHandler{
name: util.UnTitle(f.Name),
fieldNumber: i,
}
dialogPropertyHandlers[i] = dlph
}
func addPropertyHandlerForScalarField(f reflect.StructField, i int, dialogPropertyHandlers []runnableHandler) {
dph := &dialogPropertyHandler{
name: util.UnTitle(f.Name),
propertyType: f.Type,
fieldNumber: i,
}
dialogPropertyHandlers[i] = dph
} | cli/cli.go | 0.687945 | 0.449997 | cli.go | starcoder |
package linkedlist
import (
"errors"
)
type LinkedList struct {
firstNode *LinkedListNode
lastNode *LinkedListNode
}
type LinkedListNode struct {
NextNode *LinkedListNode
Value interface{}
}
// Appends a node to the singly linked list
func (ll *LinkedList) Append(data interface{}) *LinkedList {
if ll.firstNode == nil {
ll.firstNode = &LinkedListNode{Value: data}
ll.lastNode = ll.firstNode
return ll
}
ll.lastNode.NextNode = &LinkedListNode{Value: data}
ll.lastNode = ll.lastNode.NextNode
return ll
}
// Prepends a node to the singly linked list
func (ll *LinkedList) Prepend(data interface{}) *LinkedList {
newFirstNode := &LinkedListNode{Value: data, NextNode: ll.firstNode}
ll.firstNode = newFirstNode
return ll
}
// Find a node by 'index', index simply refers
// to how many nodes we must iterate through before
// finding the desired now
// If the node cannot be found, will return nil
func (ll *LinkedList) Get(index int) (*LinkedListNode, error) {
if index < 0 {
return nil, errors.New("index must be greater than or equal to 0")
}
currentNode := ll.firstNode
count := 0
for {
if count == index {
break
}
if currentNode != nil {
currentNode = currentNode.NextNode
count++
continue
}
break
}
if currentNode != nil {
return currentNode, nil
}
return nil, errors.New("passed an index out of range")
}
// Finds the data in the list and returns the index it was found at.
// If the node cannot be found, returns -1.
func (ll *LinkedList) Contains(data interface{}) int {
count := 0
currentNode := ll.firstNode
found := false
for {
if currentNode.Value == data {
found = true
break
}
currentNode = currentNode.NextNode
count++
if currentNode == nil {
break
}
}
if found {
return count
}
return -1
}
// Removes a node from the list at specified index
// Note, each previous index for an node will be -1.
// Returns a bool whether or not a node was removed
func (ll *LinkedList) Remove(index int) bool {
count := 0
currentNode := ll.firstNode
if index < 0 {
return false
}
if ll.firstNode == nil || ll.lastNode == nil {
return false
}
if index > ll.Count()-1 {
return false
}
for {
if index == 0 {
ll.firstNode = ll.firstNode.NextNode
return true
}
if (index - 1) == count {
newNextNode := currentNode.NextNode.NextNode
currentNode.NextNode = newNextNode
return true
}
currentNode = currentNode.NextNode
count++
}
}
// Counts the amount of linked nodes and returns
// the count.
func (ll *LinkedList) Count() int {
count := 0
currentNode := ll.firstNode
for {
if currentNode != nil {
count++
currentNode = currentNode.NextNode
} else {
break
}
}
return count
} | data_structures/linked_list/linked_list.go | 0.714528 | 0.426979 | linked_list.go | starcoder |
package instrumentation
import "time"
// NopInstrumentation satisfies the Instrumentation interface.
type NopInstrumentation struct{}
// Satisfaction guaranteed.
var _ Instrumentation = NopInstrumentation{}
// InsertCall satisfies the Instrumentation interface.
func (i NopInstrumentation) InsertCall() {}
// InsertRecordCount satisfies the Instrumentation interface.
func (i NopInstrumentation) InsertRecordCount(int) {}
// InsertCallDuration satisfies the Instrumentation interface.
func (i NopInstrumentation) InsertCallDuration(time.Duration) {}
// InsertRecordDuration satisfies the Instrumentation interface.
func (i NopInstrumentation) InsertRecordDuration(time.Duration) {}
// InsertQuorumFailure satisfies the Instrumentation interface.
func (i NopInstrumentation) InsertQuorumFailure() {}
// SelectCall satisfies the Instrumentation interface.
func (i NopInstrumentation) SelectCall() {}
// SelectKeys satisfies the Instrumentation interface.
func (i NopInstrumentation) SelectKeys(int) {}
// SelectSendTo satisfies the Instrumentation interface.
func (i NopInstrumentation) SelectSendTo(int) {}
// SelectFirstResponseDuration satisfies the Instrumentation interface.
func (i NopInstrumentation) SelectFirstResponseDuration(time.Duration) {}
// SelectPartialError satisfies the Instrumentation interface.
func (i NopInstrumentation) SelectPartialError() {}
// SelectBlockingDuration satisfies the Instrumentation interface.
func (i NopInstrumentation) SelectBlockingDuration(time.Duration) {}
// SelectOverheadDuration satisfies the Instrumentation interface.
func (i NopInstrumentation) SelectOverheadDuration(time.Duration) {}
// SelectDuration satisfies the Instrumentation interface.
func (i NopInstrumentation) SelectDuration(time.Duration) {}
// SelectSendAllPermitGranted satisfies the Instrumentation interface.
func (i NopInstrumentation) SelectSendAllPermitGranted() {}
// SelectSendAllPermitRejected satisfies the Instrumentation interface.
func (i NopInstrumentation) SelectSendAllPermitRejected() {}
// SelectSendAllPromotion satisfies the Instrumentation interface.
func (i NopInstrumentation) SelectSendAllPromotion() {}
// SelectRetrieved satisfies the Instrumentation interface.
func (i NopInstrumentation) SelectRetrieved(int) {}
// SelectReturned satisfies the Instrumentation interface.
func (i NopInstrumentation) SelectReturned(int) {}
// SelectRepairNeeded satisfies the Instrumentation interface.
func (i NopInstrumentation) SelectRepairNeeded(int) {}
// DeleteCall satisfies the Instrumentation interface.
func (i NopInstrumentation) DeleteCall() {}
// DeleteRecordCount satisfies the Instrumentation interface.
func (i NopInstrumentation) DeleteRecordCount(int) {}
// DeleteCallDuration satisfies the Instrumentation interface.
func (i NopInstrumentation) DeleteCallDuration(time.Duration) {}
// DeleteRecordDuration satisfies the Instrumentation interface.
func (i NopInstrumentation) DeleteRecordDuration(time.Duration) {}
// DeleteQuorumFailure satisfies the Instrumentation interface.
func (i NopInstrumentation) DeleteQuorumFailure() {}
// RepairCall satisfies the Instrumentation interface.
func (i NopInstrumentation) RepairCall() {}
// RepairRequest satisfies the Instrumentation interface.
func (i NopInstrumentation) RepairRequest(int) {}
// RepairDiscarded satisfies the Instrumentation interface.
func (i NopInstrumentation) RepairDiscarded(int) {}
// RepairWriteSuccess satisfies the Instrumentation interface.
func (i NopInstrumentation) RepairWriteSuccess(int) {}
// RepairWriteFailure satisfies the Instrumentation interface.
func (i NopInstrumentation) RepairWriteFailure(int) {}
// WalkKeys satisfies the Instrumentation interface.
func (i NopInstrumentation) WalkKeys(int) {} | instrumentation/nop_instrumentation.go | 0.725065 | 0.592932 | nop_instrumentation.go | starcoder |
package physics
import (
"fmt"
"image/color"
"math"
"os"
// anonymous import for png decoder
_ "image/png"
"github.com/hajimehoshi/ebiten/v2"
"github.com/hajimehoshi/ebiten/v2/ebitenutil"
"github.com/jtbonhomme/asteboids/internal/vector"
)
// ID displays physic body unique ID.
func (pb *Body) ID() string {
return pb.id.String()
}
// String displays physic body information as a string.
func (pb *Body) String() string {
return fmt.Sprintf("%s: pos [%d, %d]\nsize [%d, %d] orient %0.2f rad (%0.0f °)\nvel {%0.2f %0.2f} acc {%0.2f %0.2f}",
pb.Type(),
int(pb.position.X),
int(pb.position.Y),
int(pb.PhysicWidth),
int(pb.PhysicHeight),
pb.Orientation,
pb.Orientation*180/math.Pi,
pb.Velocity().X,
pb.Velocity().Y,
pb.Acceleration().X,
pb.Acceleration().Y)
}
// Intersect returns true if the physical body collide another one.
// Collision is computed based on Axis-Aligned Bounding Boxes.
// https://developer.mozilla.org/en-US/docs/Games/Techniques/2D_collision_detection
func (pb *Body) Intersect(p Physic) bool {
ax, ay := pb.position.X, pb.position.Y
aw, ah := pb.Dimension().W, pb.Dimension().H
bx, by := p.Position().X, p.Position().Y
bw, bh := p.Dimension().W, p.Dimension().H
return (ax < bx+bw && ay < by+bh) && (ax+aw > bx && ay+ah > by)
}
// IntersectMultiple checks if multiple physical bodies are colliding with the first
func (pb *Body) IntersectMultiple(physics map[string]Physic) (string, bool) {
for _, p := range physics {
if pb.Intersect(p) {
pb.Log.Warnf("%s [%d , %d] (%dx%d) intersect with %s [%d , %d] (%dx%d)",
pb.ID(),
int(pb.Position().X), int(pb.Position().Y),
int(pb.Dimension().W), int(pb.Dimension().H),
p.ID(),
int(p.Position().X), int(p.Position().Y),
int(p.Dimension().W), int(p.Dimension().H))
return p.ID(), true
}
}
return "", false
}
// Dimension returns physical body dimension.
func (pb *Body) Dimension() Size {
return Size{
H: pb.PhysicHeight,
W: pb.PhysicWidth,
}
}
// Position returns physical body position.
func (pb *Body) Position() vector.Vector2D {
return pb.position
}
// Velocity returns physical body velocity.
func (pb *Body) Velocity() vector.Vector2D {
return pb.velocity
}
// LimitVelocity limits the physical body maximum velocity.
func (pb *Body) LimitVelocity(maxVelocity float64) {
pb.maxVelocity = maxVelocity
}
// Move set physical body positiion.
func (pb *Body) Move(position vector.Vector2D) {
pb.position = position
}
// Accelerate set physical body acceleration.
func (pb *Body) Accelerate(acceleration vector.Vector2D) {
pb.acceleration = acceleration
}
// Acceleration returns physical body acceleration.
func (pb *Body) Acceleration() vector.Vector2D {
return pb.acceleration
}
// DrawBodyBoundaryBox draws a box around the body, based on its dimension.
func (pb *Body) DrawBodyBoundaryBox(screen *ebiten.Image) {
// Top boundary
ebitenutil.DrawLine(
screen,
pb.position.X-pb.PhysicWidth/2,
pb.position.Y-pb.PhysicHeight/2,
pb.position.X+pb.PhysicWidth/2,
pb.position.Y-pb.PhysicHeight/2,
color.Gray16{0x6666},
)
// Right boundary
ebitenutil.DrawLine(
screen,
pb.position.X+pb.PhysicWidth/2,
pb.position.Y-pb.PhysicHeight/2,
pb.position.X+pb.PhysicWidth/2,
pb.position.Y+pb.PhysicHeight/2,
color.Gray16{0x6666},
)
// Bottom boundary
ebitenutil.DrawLine(
screen,
pb.position.X-pb.PhysicWidth/2,
pb.position.Y+pb.PhysicHeight/2,
pb.position.X+pb.PhysicWidth/2,
pb.position.Y+pb.PhysicHeight/2,
color.Gray16{0x6666},
)
// Left boundary
ebitenutil.DrawLine(
screen,
pb.position.X-pb.PhysicWidth/2,
pb.position.Y-pb.PhysicHeight/2,
pb.position.X-pb.PhysicWidth/2,
pb.position.Y+pb.PhysicHeight/2,
color.Gray16{0x6666},
)
}
func isElementOf(elt string, arr []string) bool {
for i := 0; i < len(arr); i++ {
if elt == arr[i] {
return true
}
}
return false
}
// LinkAgents draws a perimter around the body, based on a given radius.
func (pb *Body) LinkAgents(screen *ebiten.Image, agents []Physic, agentTypes []string) {
for _, a := range agents {
if isElementOf(a.Type(), agentTypes) {
// Draw line between agents
ebitenutil.DrawLine(
screen,
pb.Position().X, pb.Position().Y,
a.Position().X, a.Position().Y,
color.Gray16{0x2264},
)
}
}
}
// Type returns physical body agent type as a string.
func (pb *Body) Type() string {
return pb.AgentType
}
// Explode proceeds the agent explosion and termination.
func (pb *Body) Explode() {
pb.Unregister(pb.ID(), pb.Type())
}
// Dump write out internal agent's state.
func (pb *Body) Dump(f *os.File) error {
_, err := f.Write([]byte("\n *** " + pb.ID() + " ***\n" + pb.String() + "\n"))
return err
}
// NewBody creates a body
func NewBody(x, y, w, h float64) *Body {
return &Body{
position: vector.Vector2D{
X: x,
Y: y,
},
PhysicWidth: w,
PhysicHeight: h,
}
} | internal/physics/utils.go | 0.7696 | 0.508849 | utils.go | starcoder |
package scimark2
import (
"math"
"math/rand"
"time"
"github.com/EntityFX/EntityFX-Bench/src/go/entityfx/utils"
)
func measureFFT(N int, mintime float64) float64 {
x := randomVector(2 * N)
//oldx := newVectorCopy(x)
var cycles int64 = 1
elapsed := 0.0
for true {
start := (float64(utils.MakeTimestamp()) / 1000.0)
var i int64 = 0
for ; i < cycles; i++ {
fft_transform(x)
fft_inverse(x)
}
elapsed = (float64(utils.MakeTimestamp()) / 1000.0) - start
if elapsed >= mintime {
break
}
cycles *= 2
}
var EPS float64 = 1.0e-10
if fft_test(x) / float64(N) > EPS {
return .0
}
return fft_num_flops(N) * float64(cycles) / elapsed * 1.0e-6
}
func measureSOR(N int, min_time float64) float64 {
G := randomMatrix(N, N)
elapsed := 0.0
cycles := 1
for true {
start := (float64(utils.MakeTimestamp()) / 1000.0)
sor_execute(1.25, G, cycles)
elapsed = (float64(utils.MakeTimestamp()) / 1000.0) - start
if elapsed >= min_time {
break
}
cycles *= 2
}
return sor_num_flops(N, N, cycles) / elapsed * 1.0e-6
}
func measureMonteCarlo(min_time float64) float64 {
elapsed := 0.0
cycles := 1
for (true) {
start := (float64(utils.MakeTimestamp()) / 1000.0)
mc_integrate(cycles);
elapsed = (float64(utils.MakeTimestamp()) / 1000.0) - start
if elapsed >= min_time {
break
}
cycles *= 2
}
return mc_num_flops(cycles) / elapsed * 1.0e-6
}
func measureSparseMatmult(N int, nz int, min_time float64) float64 {
x := randomVector(N)
y := make([]float64, N)
nr := nz / N
anz := nr * N
val := randomVector(anz)
col := make([]int, anz)
row := make([]int, N + 1)
row[0] = 0
for r := 0; r < N; r++ {
rowr := row[r]
row[r + 1] = rowr + nr
step := r / nr
if step < 1 {
step = 1
}
for i := 0; i < nr; i++ {
col[rowr + i] = i * step
}
}
elapsed := 0.0
cycles := 1
for true {
start := (float64(utils.MakeTimestamp()) / 1000.0)
sparse_cr_matmult(y, val, row, col, x, cycles)
elapsed = (float64(utils.MakeTimestamp()) / 1000.0) - start
if elapsed >= min_time {
break
}
cycles *= 2
}
return sparse_cr_num_flops(N, nz, cycles) / elapsed * 1.0e-6
}
func measureLU(N int, min_time float64) float64 {
A := randomMatrix(N, N)
lu := make([][]float64, N)
for i := 0; i < N; i++ {
lu[i] = make([]float64, N)
}
pivot := make([]int, N)
elapsed := 0.0
cycles := 1
for true {
start := (float64(utils.MakeTimestamp()) / 1000.0)
for i := 0; i < cycles; i++ {
copyMatrix(lu, A)
lu_factor(lu, pivot)
}
elapsed = (float64(utils.MakeTimestamp()) / 1000.0) - start
if elapsed >= min_time {
break
}
cycles *= 2
}
b := randomVector(N)
x := newVectorCopy(b)
lu_solve_matrix(lu, pivot, x)
EPS := 1.0e-12
if (normabs(b, matvec(A, x)) / float64(N)) > EPS {
return .0
}
return lu_num_flops(N) * float64(cycles) / elapsed * 1.0e-6
}
func newVectorCopy(x []float64) []float64{
N := len(x)
y := make([]float64, N)
for i := 0; i < N; i++ {
y[i] = x[i]
}
return y
}
func copyVector(B []float64, A []float64) {
N := len(A)
for i := 0; i < N; i++ {
B[i] = A[i]
}
}
func normabs(x []float64, y []float64) float64 {
N := len(x)
sum := .0
for i := 0; i < N; i++ {
sum += math.Abs(x[i] - y[i])
}
return sum;
}
func copyMatrix(B [][]float64, A [][]float64) {
M := len(A)
N := len(A[0])
remainder := N & 3
for i := 0; i < M; i++ {
bi := B[i]
ai := A[i]
for j := 0; j < remainder; j++ {
bi[j] = ai[j]
}
for j := remainder; j < N; j += 4 {
bi[j] = ai[j]
bi[j + 1] = ai[j + 1]
bi[j + 2] = ai[j + 2]
bi[j + 3] = ai[j + 3]
}
}
}
func randomMatrix(M int, N int) [][]float64 {
A := make([][]float64, M)
for i := 0; i < N; i++ {
A[i] = make([]float64, N)
for j := 0; j < N; j++ {
A[i][j] = rand.Float64()
}
}
return A
}
func randomVector(N int) []float64{
rand.Seed(time.Now().Unix())
A := make([]float64, N)
for i := 0; i < N; i++ {
A[i] = rand.Float64()
}
return A
}
func matvec(A [][]float64, x []float64) []float64 {
N := len(x)
y := make([]float64, N)
matvec_n(A, x, y)
return y
}
func matvec_n(A [][]float64, x []float64, y []float64) {
M := len(A)
N := len(A[0])
for i := 0; i < M; i++ {
sum := .0;
ai := A[i]
for j := 0; j < N; j++ {
sum += (ai[j] * x[j])
}
y[i] = sum
}
} | src/go/entityfx/scimark2/kernel.go | 0.528533 | 0.417865 | kernel.go | starcoder |
package smudge
func decodeByte(bytes []byte, startIndex int) (byte, int) {
return bytes[startIndex], startIndex + 1
}
func decodeUint8(bytes []byte, startIndex int) (uint8, int) {
n, i := decodeByte(bytes, startIndex)
return byte(n), i
}
func decodeUint16(bytes []byte, startIndex int) (uint16, int) {
var number uint16
number = uint16(bytes[startIndex+1])<<8 |
uint16(bytes[startIndex+0])
return number, startIndex + 2
}
func decodeUint32(bytes []byte, startIndex int) (uint32, int) {
var number uint32
number = uint32(bytes[startIndex+3])<<24 |
uint32(bytes[startIndex+2])<<16 |
uint32(bytes[startIndex+1])<<8 |
uint32(bytes[startIndex+0])
return number, startIndex + 4
}
func decodeUint64(bytes []byte, startIndex int) (uint64, int) {
var number uint64
number = uint64(bytes[startIndex+7])<<56 |
uint64(bytes[startIndex+6])<<48 |
uint64(bytes[startIndex+5])<<40 |
uint64(bytes[startIndex+4])<<32 |
uint64(bytes[startIndex+3])<<24 |
uint64(bytes[startIndex+2])<<16 |
uint64(bytes[startIndex+1])<<8 |
uint64(bytes[startIndex+0])
return number, startIndex + 8
}
func encodeByte(number byte, bytes []byte, startIndex int) int {
bytes[startIndex+0] = number
return 1
}
func encodeUint8(number uint8, bytes []byte, startIndex int) int {
return encodeByte(byte(number), bytes, startIndex)
}
func encodeUint16(number uint16, bytes []byte, startIndex int) int {
bytes[startIndex+0] = byte(number)
bytes[startIndex+1] = byte(number >> 8)
return 2
}
func encodeUint32(number uint32, bytes []byte, startIndex int) int {
bytes[startIndex+0] = byte(number)
bytes[startIndex+1] = byte(number >> 8)
bytes[startIndex+2] = byte(number >> 16)
bytes[startIndex+3] = byte(number >> 24)
return 4
}
func encodeUint64(number uint64, bytes []byte, startIndex int) int {
bytes[startIndex+0] = byte(number)
bytes[startIndex+1] = byte(number >> 8)
bytes[startIndex+2] = byte(number >> 16)
bytes[startIndex+3] = byte(number >> 24)
bytes[startIndex+4] = byte(number >> 32)
bytes[startIndex+5] = byte(number >> 40)
bytes[startIndex+6] = byte(number >> 48)
bytes[startIndex+7] = byte(number >> 56)
return 8
} | vendor/github.com/clockworksoul/smudge/bytes.go | 0.657098 | 0.413655 | bytes.go | starcoder |
package kriging
import (
"sort"
vec2d "github.com/flywave/go3d/float64/vec2"
vec3d "github.com/flywave/go3d/float64/vec3"
"github.com/flywave/go-geo"
)
type Coordinates []vec3d.T
func (s Coordinates) Len() int {
return len(s)
}
func (s Coordinates) Less(i, j int) bool {
if s[i][1] == s[j][1] {
return s[i][0] < s[j][0]
} else {
return s[i][1] < s[j][1]
}
}
func (s Coordinates) Swap(i, j int) {
s[i], s[j] = s[j], s[i]
}
type Grid struct {
Width int
Height int
Coordinates Coordinates
Count int
Minimum float64
Maximum float64
box *vec3d.Box
srs geo.Proj
}
func NewGrid(width, height int) *Grid {
return &Grid{Width: width, Height: height, Count: width * height, Minimum: 15000, Maximum: -15000}
}
func caclulatePixelSize(width, height int, bbox vec2d.Rect) []float64 {
pixelSize := []float64{0, 0}
pixelSize[0] = (bbox.Max[0] - bbox.Min[0]) / float64(width)
pixelSize[1] = (bbox.Max[1] - bbox.Min[1]) / float64(height)
return pixelSize
}
func CaclulateGrid(width, height int, georef *geo.GeoReference) *Grid {
grid := NewGrid(width, height)
grid.Count = grid.Width * grid.Height
grid.srs = georef.GetSrs()
coords := make(Coordinates, 0, grid.Count)
pixelSize := caclulatePixelSize(grid.Width, grid.Height, georef.GetBBox())
for y := grid.Height - 1; y >= 0; y-- {
latitude := georef.GetOrigin()[1] + (float64(pixelSize[1]) * float64(y))
for x := 0; x < grid.Width; x++ {
longitude := georef.GetOrigin()[0] + (float64(pixelSize[0]) * float64(x))
coords = append(coords, vec3d.T{longitude, latitude, 0})
}
}
grid.Coordinates = coords
return grid
}
func (h *Grid) GetRect() vec2d.Rect {
bbox := h.GetBBox()
return vec2d.Rect{Min: vec2d.T{bbox.Min[0], bbox.Min[1]}, Max: vec2d.T{bbox.Max[0], bbox.Max[1]}}
}
func (h *Grid) GetBBox() vec3d.Box {
if h.box == nil {
r := vec3d.Box{Min: vec3d.MaxVal, Max: vec3d.MinVal}
for i := range h.Coordinates {
r.Extend(&h.Coordinates[i])
}
return r
}
return *h.box
}
func (h *Grid) GetRange() float64 {
return h.Maximum - h.Minimum
}
func (h *Grid) Sort() {
sort.Sort(h.Coordinates)
}
func (h *Grid) Value(row, column int) float64 {
return h.Coordinates[row*h.Width+column][2]
}
func (h *Grid) GetDate() ([]float64, [2]uint32, vec2d.Rect, geo.Proj) {
tiledata := make([]float64, h.Width*h.Height)
row, col := h.Height, h.Width
for x := 0; x < col; x++ {
for y := 0; y < row; y++ {
tiledata[y*col+x] = h.Value(y, x)
}
}
return tiledata, [2]uint32{uint32(h.Width), uint32(h.Height)}, h.GetRect(), h.srs
} | grid.go | 0.817829 | 0.471588 | grid.go | starcoder |
package exp
// using tree data structure to parse and evaluate mathematical expressions
// precedence by power, division, multiplication, addition, and subtraction
import (
"fmt"
"math"
"strconv"
"github.com/dockerian/go-coding/ds/str"
u "github.com/dockerian/go-coding/utils"
)
var (
/* see https://golang.org/ref/spec#Operators
precedence operator
1 ||
2 &&
3 == != < <= > >=
4 + - | ^
5 * / % << >> & &^
*/
opPriority = map[string]int{
"(": 0,
"||": 11,
"&&": 21,
"+": 41,
"-": 42,
"|": 43,
"^": 44,
"*": 51,
"/": 52,
"%": 53,
"<<": 54,
">>": 55,
"&": 56,
"&^": 57,
}
opByLength []string
)
func init() {
i := 0
opByLength = make([]string, len(opPriority))
for op := range opPriority {
opByLength[i] = op
i++
}
str.ByLength.Sort(opByLength)
}
// Exp struct
type Exp struct {
context string
operators *OpStack
operands *OpStack
opTree *OpItem
tokens []string
}
// New returns a new instance of Exp struct
func New(s string) *Exp {
exp := &Exp{context: s}
exp.init()
return exp
}
// Eval returns calculated result of the expression
func (e *Exp) Eval() float64 {
e.init()
result, _ := e.eval(e.opTree)
return result
}
// String func
func (e *Exp) String() string {
return e.toString()
}
func (e *Exp) buildParseNode() *OpItem {
op, ob, oa := e.operators.PopItem(), e.operands.PopItem(), e.operands.PopItem()
// u.Debug("build node: op = %+v, b = %+v, a = %+v\n", op, ob, oa)
so, sp, sb, sa := "", "", "", ""
if oa != nil {
sa = oa.Expression
oa.Parent = op
}
if ob != nil {
sb = ob.Expression
ob.Parent = op
}
if op != nil {
sp = op.Expression
so = op.Op
}
exprText := fmt.Sprintf("(%v %v %v)", sa, sp, sb)
op.Left, op.Right, op.Op, op.Expression = oa, ob, so, exprText
return op
}
func (e *Exp) buildParseTree() *OpItem {
groupCount := 0
for _, token := range e.tokens {
peekToken := e.operators.Peek()
peekPriority, _ := opPriority[peekToken]
priority, isOperator := opPriority[token]
switch {
case token == "(":
e.operators.Push(token)
groupCount++
case token == ")":
if groupCount > 0 {
peek := e.operators.Peek()
for peek != "(" && peek != "" {
item := e.buildParseNode()
e.operands.PushItem(item)
peek = e.operators.Peek()
}
e.operators.Pop()
groupCount--
}
case isOperator:
if e.operators.IsEmpty() || peekPriority < priority {
e.operators.Push(token)
} else {
for peekToken != "" && priority < peekPriority {
item := e.buildParseNode()
e.operands.PushItem(item)
peekToken = e.operators.Peek()
peekPriority, _ = opPriority[peekToken]
if peekToken == "(" {
peekToken = ""
}
}
e.operators.Push(token)
}
default:
e.operands.Push(token)
}
}
for !e.operators.IsEmpty() {
item := e.buildParseNode()
e.operands.PushItem(item)
}
return e.operands.PopItem()
}
func (e *Exp) calc(op, a, b string) (float64, error) {
var result float64
if a == "" {
a = "0.0" // allow 1st operand to be empty
}
fa, err1 := strconv.ParseFloat(a, 64)
if err1 != nil {
return result, fmt.Errorf("Cannot parse '%v' to float64: %v", a, err1)
}
fb, err2 := strconv.ParseFloat(b, 64)
if err2 != nil {
return result, fmt.Errorf("Cannot parse '%v' to float64: %v", b, err2)
}
if _, ok := opPriority[op]; op != "" && !ok {
return result, fmt.Errorf("Unknown operator '%v'", op)
}
return e.calcOp(op, fa, fb), nil
}
func (e *Exp) calcOp(op string, fa, fb float64) float64 {
result := 0.0
// u.Debug("calcOp: '%+v' (%+v, %+v)\n", op, fa, fb)
switch op {
case "":
result = fb
case "+":
result = fa + fb
case "-":
result = fa - fb
case "*":
result = fa * fb
case "/":
result = fa / fb
case "^":
result = math.Pow(fa, fb)
default:
}
return result
}
func (e *Exp) checkOperatorAt(i int) (bool, string) {
siz := len(e.context)
for n := len(opByLength) - 1; n >= 0; n-- {
op := opByLength[n]
dx := i + len(op)
if dx <= siz && op == e.context[i:dx] {
return true, op
}
}
return false, ""
}
func (e *Exp) eval(node *OpItem) (float64, error) {
if node != nil {
if node.Left == nil && node.Right == nil {
return e.calc("", "", node.Op)
}
op1, err := e.eval(node.Left)
if err != nil {
return 0.0, err
}
op2, err := e.eval(node.Right)
if err != nil {
return 0.0, err
}
return e.calcOp(node.Op, op1, op2), nil
}
return 0.0, nil
}
func (e *Exp) getOperandAt(i int) string {
siz := len(e.context)
var j int
for j = i + 1; j < siz; j++ {
ch := e.context[j]
isOperator, _ := e.checkOperatorAt(j)
if isOperator || ch == '(' || ch == ')' || e.isSpace(ch) {
break
}
}
if 0 <= i && j <= siz {
return e.context[i:j]
}
return ""
}
func (e *Exp) init() {
if e.operators == nil {
u.Debug("context: '%v' ---->\n", e.context)
e.operators = &OpStack{}
e.operands = &OpStack{}
e.tokens = e.tokenize()
e.opTree = e.buildParseTree()
}
}
func (e *Exp) isSpace(ch byte) bool {
return ch == ' ' || ch == '\t' || ch == '\r' || ch == '\n'
}
// tokenize converts expression context to tokens
func (e *Exp) tokenize() []string {
siz := len(e.context)
tokens := make([]string, 0, 1)
for i := 0; i < siz; i++ {
ch := e.context[i]
switch {
case e.isSpace(ch):
continue
case ch == '(':
tokens = append(tokens, "(")
case ch == ')':
tokens = append(tokens, ")")
default:
isOperator, op := e.checkOperatorAt(i)
if !isOperator {
op = e.getOperandAt(i)
}
tokens = append(tokens, op)
i = i + len(op) - 1
}
}
return tokens
}
func (e *Exp) toString() string {
e.init()
if e.opTree != nil {
return e.opTree.Expression
}
return ""
} | ds/exp/exp.go | 0.608012 | 0.47859 | exp.go | starcoder |
package main
import (
"bytes"
"fmt"
"go/ast"
)
/*
We want to find any types used in interface methods that are defined in the
interface package, and so aren't qualified by a package name.
We may want to add a package name to these types.
We're looking for Fields within Fieldlists (either for parameters or returns)
A local interface will show up as a Field with a Type which is an Ident. The
Name of the Ident will be the interface name. The Obj of the Ident will be of
Kind type
A local struct is similar to a local interface
An imported interface shows up with a Type which is a SelectorExpr. An imported
struct is similar
A base type shows with a Type that is an Ident with no Obj
*/
func qualifyLocalTypes(n ast.Node, localPkgName string) bool {
v := &QualifyLocalTypesVisitor{
pkg: ast.NewIdent(localPkgName),
}
ast.Walk(v, n)
return v.added
}
type QualifyLocalTypesVisitor struct {
// This is the local package selector
pkg *ast.Ident
added bool
}
func (q *QualifyLocalTypesVisitor) Visit(n ast.Node) ast.Visitor {
// We're looking for fields within field lists within Params or Results of a FuncType
switch n := n.(type) {
case *ast.FuncType:
to := &TypeObjVistor{
q: q,
ancestors: []ast.Node{n},
}
return to
}
return q
}
type TypeObjVistor struct {
q *QualifyLocalTypesVisitor
ancestors []ast.Node
}
func (to *TypeObjVistor) Visit(n ast.Node) ast.Visitor {
switch n := n.(type) {
case *ast.Ident:
if n.Obj != nil && n.Obj.Kind == ast.Typ {
p := to.ancestors[len(to.ancestors)-1]
switch p := p.(type) {
case *ast.Field:
p.Type = to.buildSelector(n)
case *ast.StarExpr:
p.X = to.buildSelector(n)
case *ast.ArrayType:
p.Elt = to.buildSelector(n)
case *ast.MapType:
if p.Key == n {
p.Key = to.buildSelector(n)
} else if p.Value == n {
p.Value = to.buildSelector(n)
}
case *ast.ChanType:
p.Value = to.buildSelector(n)
default:
fmt.Printf("Unexpected type %T\n", p)
printNode(p)
}
return nil
}
}
// We track ancestor nodes so we always know this node's immediate parent
if n == nil {
to.ancestors = to.ancestors[:len(to.ancestors)-1]
// fmt.Printf("shrink to %d\n", len(to.ancestors))
} else {
to.ancestors = append(to.ancestors, n)
// fmt.Printf("grow to %d %T\n", len(to.ancestors), n)
}
return to
}
func (to *TypeObjVistor) buildSelector(n *ast.Ident) *ast.SelectorExpr {
to.q.added = true
return &ast.SelectorExpr{
X: to.q.pkg,
Sel: ast.NewIdent(n.Name),
}
}
func printNode(n ast.Node) {
var w bytes.Buffer
ast.Fprint(&w, nil, n, nil)
fmt.Printf(w.String())
} | genmock/localtypes.go | 0.5769 | 0.471467 | localtypes.go | starcoder |
package blueprint
import (
"encoding/json"
"io/ioutil"
"math"
"strings"
rand7i "github.com/7i/rand"
"github.com/karlek/wasabi/coloring"
"github.com/karlek/wasabi/fractal"
"github.com/karlek/wasabi/iro"
"github.com/karlek/wasabi/mandel"
"github.com/karlek/wasabi/plot"
"github.com/karlek/wasabi/render"
"github.com/sirupsen/logrus"
)
// Blueprint contains the settings and options needed to render a fractal.
type Blueprint struct {
Iterations float64 // Number of iterations.
Bailout float64 // Squared radius of the function domain. Most commonly set to 4, but it's important for planes other than Zrzi.
Tries float64 // The number of orbit attempts calculated by: tries * (width * height)
Coloring string // Coloring method for the orbits.
DrawPath bool // Draw the path between points in the orbit.
PathPoints int64 // The number of intermediate points to use for interpolation.
BezierLevel int // Bezier interpolation level: 1 is linear, 2 is quadratic etc.
Width, Height int // Width and height of final image.
Png, Jpg bool // Image output format.
OutputFilename string // Output filename without (file extension).
CacheHistograms bool // Cache the histograms by saving them to a file.
MultipleExposures bool // Render the image with multiple exposures.
PlotImportance bool // Create an image of the sampling points color graded by their importance.
Imag float64 // Offset on the imaginary-value axis.
Real float64 // Offset on the real-value axis.
Zoom float64 // Zoom factor.
Seed int64 // Random seed.
Threshold float64 // Minimum orbit length to be registered.
// Coefficients multiplied to the imaginary and real parts in the complex function.
ImagCoefficient float64
RealCoefficient float64
Function string // Normalization function for scaling the brightness of the pixels.
Factor float64 // Factor is used by the functions in various ways.
Exposure float64 // Exposure is a scaling factor applied after the normalization function has been applied.
RegisterMode string // How the fractal will capture orbits. The different modes are: anti, primitive and escapes.
ComplexFunction string // The complex function we shall explore.
Plane string // Chose which capital plane we will plot: Crci, Crzi, Zici, Zrci, Zrcr, Zrzi.
BaseColor iro.RGBA // The background color.
Gradient []iro.RGBA // The color gradient used by the coloring methods.
Range []float64 // The interpolation points for the gradient.
ZUpdate string // Chose how we shall update Z.
CUpdate string // Chose how we shall update C.
Theta float64 // Rotation angle. Experimental option since it demands matrix rotation which slows down the renders considerably on CPU based renders.
}
// Parse opens and parses a blueprint json file.
func Parse(filename string) (blue *Blueprint, err error) {
buf, err := ioutil.ReadFile(filename)
if err != nil {
return blue, err
}
blue = new(Blueprint)
err = json.Unmarshal(buf, blue)
return blue, err
}
// Render creates a render object for the blueprint.
func (b *Blueprint) Render() *render.Render {
return render.New(
b.Width,
b.Height,
parseFunctionFlag(b.Function),
b.Factor,
b.Exposure,
)
}
// Fractal creates a fractal object for the blueprint.
func (b *Blueprint) Fractal() *fractal.Fractal {
// Coefficient multiplied inside the complex function we are investigating.
coefficient := complex(b.RealCoefficient, b.ImagCoefficient)
// Offset the fractal rendering.
offset := complex(b.Real, b.Imag)
// Our way of registering orbits. Either we register the orbits that either converges, diverges or both.
registerMode := parseRegistrer(b.RegisterMode)
// Get the complex function to find orbits with.
f := parseComplexFunctionFlag(b.ComplexFunction)
z := parseZandC(b.ZUpdate)
c := parseZandC(b.CUpdate)
colors := iro.ToColors(b.Gradient)
method := coloring.NewColoring(b.BaseColor, parseModeFlag(b.Coloring), colors, b.Range)
// Fill our histogram bins of the orbits.
return fractal.New(
b.Width,
b.Height,
int64(b.Iterations),
method,
coefficient,
b.Bailout,
parsePlane(b.Plane),
f,
b.Zoom,
offset,
b.PlotImportance,
b.Seed,
b.PathPoints,
b.BezierLevel,
b.Tries,
registerMode,
b.Theta,
z, c,
int64(b.Threshold))
}
// parseRegisterMode parses the _registerer_ string to a fractal orbit registrer.
func parseRegistrer(registrer string) mandel.Registrer {
// Choose buddhabrot registrer.
switch strings.ToLower(registrer) {
case "anti", "converge", "converges":
return mandel.Converged
case "primitive":
return mandel.Primitive
case "escapes", "escape":
return mandel.Escaped
default:
logrus.Fatalln("Unknown registrer:", registrer)
}
return mandel.Escaped
}
// parseFunctionFlag parses the _fun_ string to a color scaling function.
func parseFunctionFlag(f string) func(float64, float64) float64 {
switch strings.ToLower(f) {
case "exp":
return plot.Exp
case "log":
return plot.Log
case "sqrt":
return plot.Sqrt
case "lin":
return plot.Lin
default:
logrus.Fatalln("invalid color scaling function:", f)
}
return plot.Exp
}
// parsePlane parses the _plane string to a plane selection.
func parsePlane(plane string) func(complex128, complex128) complex128 {
switch strings.ToLower(plane) {
case "zrzi":
// Original.
return fractal.Zrzi
case "zrcr":
// Pretty :D
return fractal.Zrcr
case "zrci":
// Pretty :D
return fractal.Zrci
case "crci":
// Mandelbrot perimiter.
return fractal.Crci
case "zicr":
// Pretty :D
return fractal.Zicr
case "zici":
// Pretty :D
return fractal.Zici
default:
logrus.Fatalln("invalid plane:", plane)
}
return fractal.Zrzi
}
// parseComplexFunctionFlag parses the _function_ string to a complex function.
func parseComplexFunctionFlag(function string) func(complex128, complex128, complex128) complex128 {
switch strings.ToLower(function) {
case "mandelbrot":
return mandel.Mandelbrot
case "burningship":
return mandel.BurningShip
case "b1":
return mandel.B1
case "b2":
return mandel.B2
default:
logrus.Fatalln("invalid complex function:", function)
}
return mandel.Mandelbrot
}
// parseModeFlag parses the _mode_ string to a coloring function.
func parseModeFlag(mode string) coloring.Mode {
switch strings.ToLower(mode) {
case "iteration":
return coloring.IterationCount
case "modulo":
return coloring.Modulo
case "vector":
return coloring.VectorField
case "orbit":
return coloring.OrbitLength
case "path":
return coloring.Path
default:
logrus.Fatalln("invalid coloring function:", mode)
}
return coloring.IterationCount
}
// parseZandC choses the sampling methods for our original points.
func parseZandC(mode string) func(complex128, *rand7i.ComplexRNG) complex128 {
switch strings.ToLower(mode) {
case "random":
return fractal.RandomPoint
case "origo":
return func(_ complex128, _ *rand7i.ComplexRNG) complex128 { return complex(0, 0) }
case "a1":
return func(c complex128, _ *rand7i.ComplexRNG) complex128 { return complex(real(c), -imag(c)) }
case "a2":
return func(c complex128, _ *rand7i.ComplexRNG) complex128 {
return complex(math.Sin(real(c)), math.Sin(imag(c)))
}
case "a3":
return func(c complex128, _ *rand7i.ComplexRNG) complex128 {
return complex(math.Abs(real(c)), math.Abs(imag(c)))
}
case "a4":
return func(c complex128, _ *rand7i.ComplexRNG) complex128 {
return complex(real(c)/imag(c), real(c))
}
case "a5":
return func(c complex128, _ *rand7i.ComplexRNG) complex128 {
return complex(real(c)*imag(c), -imag(c))
}
case "a6":
return func(c complex128, _ *rand7i.ComplexRNG) complex128 {
return complex(-imag(c), -real(c))
}
default:
logrus.Fatalln("invalid z or c strategy:", mode)
}
return fractal.RandomPoint
} | blueprint/blueprint.go | 0.620047 | 0.523968 | blueprint.go | starcoder |
package main
import (
"encoding/json"
"fmt"
"math"
"net/http"
"strconv"
)
// CalculationResult represents the data structure for the API response containing the calculated credits.
type CalculationResult struct {
Credits float64
}
// CalculationError represents the error case if something goes wrong while calculating the credit amount.
type CalculationError struct {
error string
}
// The calculateDonationCredit method is used to calculate the credit amount for each incoming sales order.
func calculateDonationCredit(usd float64) float64 {
switch {
case usd == 0:
return 0
case usd < 10:
return round(0.1 * usd)
case usd < 100:
return round(0.15 * usd)
case usd < 1000:
return round(0.2 * usd)
case usd < 10000:
return round(0.5 * usd)
case usd >= 10000:
return round(2 * usd)
}
return 0
}
// A simple rounding method to round to two digits after comma.
func round(value float64) float64 {
return math.Round(value*100) / 100
}
// Calculate will trigger the credit calculation and builds the response payload for the incoming request.
func Calculate(w http.ResponseWriter, r *http.Request) {
fmt.Println("GET params were:", r.URL.Query())
salesAmountString := r.URL.Query().Get("salesAmount")
if salesAmount, err := strconv.ParseFloat(salesAmountString, 64); err == nil {
fmt.Println("Debug:")
fmt.Println("input variable is:", salesAmount)
donCredits := calculateDonationCredit(salesAmount)
calculationResult := CalculationResult{Credits: donCredits}
w.Header().Set("Content-Type", "application/json") // this
json.NewEncoder(w).Encode(calculationResult)
}
}
func Welcome(w http.ResponseWriter, r *http.Request) {
fmt.Fprintf(w, "Welcome to the SAP TechEd Developer Keynote 2020")
}
// The calculationHandler abstracts the calculate method from the public API.
func calculationHandler(w http.ResponseWriter, r *http.Request) {
Calculate(w, r)
}
// Setting up the routes for incoming requests.
func setupRoutes() {
http.HandleFunc("/", Welcome)
http.HandleFunc("/conversion", calculationHandler)
}
// The main method of the GO application setting up the routes and starting the http server.
func main() {
fmt.Println("Go Web App Started on Port 8080")
setupRoutes()
http.ListenAndServe(":8080", nil)
} | converter/main.go | 0.716119 | 0.418519 | main.go | starcoder |
package inference
// Disease is the representation of the diseases data in this Expert System.
type Disease struct {
ID string `json:"id"` // Disease ID
Name string `json:"name"` // Name of the disease
Description string `json:"description"` // Description of the disease
Treatment string `json:"treatment"` // Treatment of the disease
Prevention string `json:"prevention"` // Prevention of the disease
Source []SourceAndLink `json:"source"` // Sources of information regarding the disease
Symptoms []Symptom `json:"symptoms"` // Valid symptoms of the disease
}
// Inferred is the object that will be returned after all of the calculations.
type Inferred struct {
Verdict bool `json:"verdict"` // Verdict whether one is infected or not
Probability float64 `json:"probability"` // Probability of infection
Disease Disease `json:"disease"` // Disease data
}
// SymptomAndWeight is a struct representative of the members of 'symptoms' array in 'Input' struct.
type SymptomAndWeight struct {
SymptomID string `json:"symptomId"` // ID of the relevant symptom
Weight float64 `json:"weight"` // User-confidence weights for the Certainty Factor Algorithm
}
// Input is used as a representative of a user's input.
type Input struct {
DiseaseID string `json:"diseaseId"` // ID of the relevant disease
Locale string `json:"locale"` // Locale of the required information (can be 'en' or 'id)
Symptoms []SymptomAndWeight `json:"symptoms"` // Symptoms and weights
}
// SourceAndLink represents the source name and its link for the information regarding a disease.
type SourceAndLink struct {
Name string `json:"name"` // Name of the website
Link string `json:"link"` // Link to the website
}
// Symptom is an object that represents the symptoms data in this library.
type Symptom struct {
ID string `json:"id"` // ID of the symptom
Name string `json:"name"` // Name of the symptom
Weight float64 `json:"weight"` // Expert-calculated weight from data and the relevant experts' opinion
}
// NewInput creates a new input instance that's already validated.
// If default ID is not inside, then we are going to assume Tuberculosis.
// If locale is not inside, then we are going to assume English.
func NewInput(input *Input) *Input {
if input.DiseaseID == "" {
input.DiseaseID = "D01"
}
if input.Locale == "" {
input.Locale = "en"
}
if input.Symptoms == nil {
input.Symptoms = []SymptomAndWeight{}
}
newInput := &Input{
DiseaseID: input.DiseaseID,
Locale: input.Locale,
Symptoms: input.Symptoms,
}
return newInput
}
// GetDiseaseByID is used to fetch a disease data by its ID.
func GetDiseaseByID(ID string, diseases []Disease) *Disease {
for _, disease := range diseases {
if disease.ID == ID {
return &disease
}
}
return nil
}
// ForwardChaining is used to perform inference by using the Forward Chaining Algorithm.
// A weight of zero means that the user is NOT sick.
// This forward chaining will be true only and only if the user has experienced 7 symptoms.
// This is because of our knowledge base - the average of symptoms had by each patient.
func ForwardChaining(input *Input, disease *Disease) bool {
numberOfPositives := 0
for _, userSymptom := range input.Symptoms {
for _, diseaseSymptom := range disease.Symptoms {
if userSymptom.Weight > 0.0 && userSymptom.SymptomID == diseaseSymptom.ID {
numberOfPositives += 1
}
}
}
return numberOfPositives > 7
}
// CertaintyFactor is used to perform analysis and to find the certainty probability.
// First, match the user symptoms' and the available expert symptom' from the knowledge base.
// Second, calculate the real probability.
func CertaintyFactor(input *Input, symptoms []Symptom) float64 {
certainties := make([]float64, 0)
probability := 0.0
// Match and calculate certainty between the expert and the user.
for _, userSymptom := range input.Symptoms {
for _, expertSymptom := range symptoms {
if userSymptom.SymptomID == expertSymptom.ID {
certainties = append(certainties, userSymptom.Weight*expertSymptom.Weight)
}
}
}
// If invalid input, return zero probability.
if len(certainties) == 0 {
return probability
}
// Calculate probability from the certainty array.
probability = certainties[0]
for i := 1; i < len(certainties); i += 1 {
probability = probability + certainties[i]*(1-probability)
}
return probability
}
// Infer is used to calculate based on an input to decide whether the user is infected or not.
// We will use Forward Chaining and Certainty Factor algorithms in order to decide that.
// Algorithm: Get knowledge base -> Forward Chaining -> Certainty Factor -> Result.
func Infer(input *Input) *Inferred {
// Initial preparation: if no locale, set it to be English as default.
processedInput := NewInput(input)
// 0. Fetch all data from the knowledge base.
diseases := getDiseases(processedInput.Locale)
// 1. Get disease from the identifier in the input request body.
disease := GetDiseaseByID(processedInput.DiseaseID, diseases)
// 2. Infer if the user is diagnosed with TB or not with Forward Chaining.
isSick := ForwardChaining(processedInput, disease)
// 3. Calculate certainty factor based on the symptoms.
certaintyProbability := CertaintyFactor(processedInput, disease.Symptoms)
// 4. Create result structure.
inferred := &Inferred{
Verdict: isSick,
Probability: certaintyProbability,
Disease: *disease,
}
// 5. Return result.
return inferred
} | pkg/inference/inference.go | 0.839537 | 0.577912 | inference.go | starcoder |
package matrix
import "errors"
// ZeroMatrix make all value 0
func (m *Matrix) ZeroMatrix() (matrix *Matrix) {
matrix = Copy(m)
matrix.matrix = make([]float64, m.row*m.column)
return
}
// AddRowMatrix will add matrix behinde this matrix
func (m *Matrix) addRowMatrix(mat Matrix) error {
if m.column != mat.column {
return errors.New("Column length is not same")
}
m.matrix = append(m.matrix, mat.matrix...)
m.row += mat.row
return nil
}
// AddRow add row at tail. if the len of column = 0. create new vector 1 * len(row)
// []float64, Matrix, int and float64 are only allowed
func (m *Matrix) AddRow(num interface{}) (matrix *Matrix) {
matrix = Copy(m)
if mat, ok := num.(Matrix); ok {
matrix.err = matrix.addRowMatrix(mat)
return
} else if mat, ok := num.(*Matrix); ok {
matrix.err = matrix.addRowMatrix(*mat)
return
} else if row, ok := num.([]float64); ok {
if matrix.column != len(row) {
matrix.err = errors.New("Column length is not same")
return
}
matrix.row++
matrix.matrix = append(matrix.matrix, row...)
return
} else if row, ok := num.(int); ok {
matrix.row++
vector := make([]float64, matrix.column)
for i := range vector {
vector[i] = float64(row)
}
matrix.matrix = append(matrix.matrix, vector...)
return
} else if row, ok := num.(float64); ok {
matrix.row++
vector := make([]float64, matrix.column)
for i := range vector {
vector[i] = float64(row)
}
matrix.matrix = append(matrix.matrix, vector...)
return
}
matrix.err = errors.New("The argument type is not allowed")
return
}
// AddRowMatrixHEAD will add matrix HEAD this matrix
func (m *Matrix) addRowMatrixHEAD(mat Matrix) error {
if m.column != mat.column {
return errors.New("Column length is not same")
}
m.matrix = append(mat.matrix, m.matrix...)
m.row += mat.row
return nil
}
// AddRowHEAD add row at head. if the len of column = 0 create new vector
func (m *Matrix) AddRowHEAD(num interface{}) (matrix *Matrix) {
matrix = Copy(m)
if mat, ok := num.(Matrix); ok {
matrix.err = matrix.addRowMatrixHEAD(mat)
return
} else if mat, ok := num.(*Matrix); ok {
matrix.err = matrix.addRowMatrixHEAD(*mat)
return
} else if row, ok := num.([]float64); ok {
if matrix.column != len(row) {
matrix.err = errors.New("Column length is not same")
return
}
matrix.row++
matrix.matrix = append(row, matrix.matrix...)
return
} else if row, ok := num.(int); ok {
matrix.row++
vector := make([]float64, matrix.column)
for i := range vector {
vector[i] = float64(row)
}
matrix.matrix = append(vector, matrix.matrix...)
return
} else if row, ok := num.(float64); ok {
matrix.row++
vector := make([]float64, matrix.column)
for i := range vector {
vector[i] = float64(row)
}
matrix.matrix = append(vector, matrix.matrix...)
return
}
matrix.err = errors.New("The argument type is not allowed")
return
}
func (m *Matrix) addColumnMatrix(mat Matrix) error {
if m.row != mat.row {
return errors.New("Row length is not same")
}
result := make([]float64, len(m.matrix)+len(mat.matrix))
c := 0
for i := 0; i < mat.row; i++ {
for j := 0; j < m.column; j++ {
result[c] = m.matrix[i*m.column+j]
c++
}
for j := 0; j < mat.column; j++ {
result[c] = mat.matrix[i*mat.column+j]
c++
}
}
m.column = m.column + mat.column
m.matrix = result
return nil
}
func (m *Matrix) addColumnArray(vector []float64) error {
if m.row != len(vector) {
return errors.New("Given vector and matrix row length is not same")
}
result := make([]float64, len(m.matrix)+len(vector))
c := 0
for i := 0; i < m.row; i++ {
for j := 0; j < m.column; j++ {
result[c] = m.matrix[i*m.column+j]
c++
}
result[c] = vector[i]
c++
}
m.column++
m.matrix = result
return nil
}
func (m *Matrix) addColumnNumber(num float64) {
result := make([]float64, len(m.matrix)+m.row)
c := 0
for i := 0; i < m.row; i++ {
for j := 0; j < m.column; j++ {
result[c] = m.matrix[i*m.column+j]
c++
}
result[c] = num
c++
}
m.column++
m.matrix = result
}
// AddColumn will add column
func (m *Matrix) AddColumn(num interface{}) (matrix *Matrix) {
matrix = Copy(m)
if mat, ok := num.(Matrix); ok {
matrix.err = matrix.addColumnMatrix(mat)
return
} else if mat, ok := num.(*Matrix); ok {
matrix.err = matrix.addColumnMatrix(*mat)
return
} else if row, ok := num.([]float64); ok {
matrix.err = matrix.addColumnArray(row)
return
} else if row, ok := num.(int); ok {
matrix.addColumnNumber(float64(row))
return
} else if row, ok := num.(float64); ok {
matrix.addColumnNumber(row)
return
}
matrix.err = errors.New("The argument type is not allowed")
return
}
// SepRow will return matrix which separate by row numbers
func (m *Matrix) SepRow(start, end int) (matrix *Matrix) {
matrix = Copy(m)
if end < start {
matrix.err = errors.New("The argument values are invalid")
return
} else if end > m.row || start < 1 {
matrix.err = errors.New("The value are out of matrix")
return
}
s := (start - 1) * m.column
e := (end - 1) * m.column
matrix = New(end-start+1, m.column, m.matrix[s:e+m.column])
return
}
// SepColumn will return matrix which separate by sep numbers
func (m *Matrix) SepColumn(start, end int) (matrix *Matrix) {
matrix = Copy(m)
if end < start {
matrix.err = errors.New("The argument values are invalid")
return
} else if end > m.column || start < 1 {
matrix.err = errors.New("The value are out of matrix")
return
}
vector := make([]float64, (end-start+1)*m.row)
count := 0
for i := 0; i < m.row; i++ {
for j := start - 1; j < end; j++ {
vector[count] = m.matrix[i*m.column+j]
count++
}
}
matrix = New(m.row, end-start+1, vector)
return
} | operation.go | 0.661704 | 0.728953 | operation.go | starcoder |
package anansi
import (
"image"
"github.com/jcorbin/anansi/ansi"
)
// Grid is a grid of screen cells.
type Grid struct {
Rect ansi.Rectangle
Stride int
Attr []ansi.SGRAttr
Rune []rune
// TODO []string for multi-rune glyphs
}
// Resize the grid to have room for n cells.
// Returns true only if the resize was a change, false if it was a no-op.
func (g *Grid) Resize(size image.Point) bool {
if size == g.Rect.Size() {
return false
}
if g.IsSub() {
if size.X > g.Stride {
size.X = g.Stride
}
if g.Stride*size.Y > len(g.Rune) {
size.Y = len(g.Rune) / g.Stride
}
g.Rect.Max = g.Rect.Min.Add(size)
} else {
if g.Rect.Min.Point == image.ZP {
g.Rect.Min = ansi.Pt(1, 1)
}
g.Rect.Max = g.Rect.Min.Add(size)
g.Stride = size.X
n := g.Stride * size.Y
if n > cap(g.Rune) {
as := make([]ansi.SGRAttr, n)
rs := make([]rune, n)
copy(as, g.Attr)
copy(rs, g.Rune)
g.Attr, g.Rune = as, rs
} else {
g.Attr = g.Attr[:n]
g.Rune = g.Rune[:n]
}
// TODO re-stride data
}
return true
}
// Clear the (maybe sub) grid; zeros all runes an attributes.
func (g Grid) Clear() {
if !g.IsSub() {
for i := range g.Rune {
g.Rune[i] = 0
g.Attr[i] = 0
}
return
}
pt := g.Rect.Min
i, _ := g.CellOffset(pt)
dx := g.Rect.Dx()
for ; pt.Y < g.Rect.Max.Y; pt.Y++ {
for pt.X = g.Rect.Min.X; pt.X < g.Rect.Max.X; pt.X++ {
g.Rune[i] = 0
g.Attr[i] = 0
i++
}
i -= dx // CR
i += g.Stride // LF
}
}
// Bounds returns the screen bounding rectangle of the grid.
func (g Grid) Bounds() ansi.Rectangle {
return g.Rect
}
// CellOffset returns the offset of the screen cell and true if it's
// within the Grid's Bounds().
func (g Grid) CellOffset(pt ansi.Point) (int, bool) {
if !pt.In(g.Bounds()) {
return 0, false
}
p := pt.ToImage() // convert to normal 0-indexed point
return p.Y*g.Stride + p.X, true
}
// IsSub returns true if the grid's bounding rectangle only covers a
// sub-section of its underlying data.
func (g *Grid) IsSub() bool {
return g.Rect.Size() != g.fullSize()
}
func (g *Grid) fullSize() image.Point {
if g.Stride == 0 {
return image.ZP
}
return image.Pt(g.Stride, len(g.Rune)/g.Stride)
}
// Full returns the full grid containing the receiver grid, reversing any
// sub-grid targeting done by SubRect().
func (g Grid) Full() Grid {
g.Rect.Min = ansi.Pt(1, 1)
g.Rect.Max = g.Rect.Min.Add(g.fullSize())
return g
}
// SubAt is a convenience for calling SubRect with at as the new Min point, and
// the receiver's Rect.Max point.
func (g Grid) SubAt(at ansi.Point) Grid {
return g.SubRect(ansi.Rectangle{Min: at, Max: g.Rect.Max})
}
// SubSize is a convenience for calling SubRect with a Max point determined by
// adding the given size to the receiver's Rect.Min point.
func (g Grid) SubSize(sz image.Point) Grid {
return g.SubRect(ansi.Rectangle{Min: g.Rect.Min, Max: g.Rect.Min.Add(sz)})
}
// SubRect returns a sub-grid, sharing the receiver's Rune/Attr/Stride data,
// but with a new bounding Rect. Clamps r.Max to g.Rect.Max, and returns the
// zero Grid if r.Min is not in g.Rect.
func (g Grid) SubRect(r ansi.Rectangle) Grid {
if !r.Min.In(g.Rect) {
return Grid{}
}
if r.Max.X > g.Rect.Max.X {
r.Max.X = g.Rect.Max.X
}
if r.Max.Y > g.Rect.Max.Y {
r.Max.Y = g.Rect.Max.Y
}
return Grid{
Attr: g.Attr,
Rune: g.Rune,
Stride: g.Stride,
Rect: r,
}
}
// Eq returns true only if the other grid has the same size and contents as the
// receiver.
func (g Grid) Eq(other Grid, zero rune) bool {
n := len(g.Rune)
if n != len(other.Rune) {
return false
}
i := 0
for ; i < n; i++ {
if g.Attr[i] != other.Attr[i] {
return false
}
gr, or := g.Rune[i], other.Rune[i]
if gr == 0 {
gr = zero
}
if or == 0 {
or = zero
}
if gr != or {
return false
}
}
return true
} | grid.go | 0.543833 | 0.458046 | grid.go | starcoder |
package platformsnotificationevents
import (
"encoding/json"
)
// ViasAddress struct for ViasAddress
type ViasAddress struct {
// The name of the city. >Required if either `houseNumberOrName`, `street`, `postalCode`, or `stateOrProvince` are provided.
City *string `json:"city,omitempty"`
// The two-character country code of the address. The permitted country codes are defined in ISO-3166-1 alpha-2 (e.g. 'NL'). > If you don't know the country or are not collecting the country from the shopper, provide `country` as `ZZ`.
Country string `json:"country"`
// The number or name of the house.
HouseNumberOrName *string `json:"houseNumberOrName,omitempty"`
// The postal code. >A maximum of five (5) digits for an address in the USA, or a maximum of ten (10) characters for an address in all other countries. >Required if either `houseNumberOrName`, `street`, `city`, or `stateOrProvince` are provided.
PostalCode *string `json:"postalCode,omitempty"`
// The abbreviation of the state or province. >Two (2) characters for an address in the USA or Canada, or a maximum of three (3) characters for an address in all other countries. >Required for an address in the USA or Canada if either `houseNumberOrName`, `street`, `city`, or `postalCode` are provided.
StateOrProvince *string `json:"stateOrProvince,omitempty"`
// The name of the street. >The house number should not be included in this field; it should be separately provided via `houseNumberOrName`. >Required if either `houseNumberOrName`, `city`, `postalCode`, or `stateOrProvince` are provided.
Street *string `json:"street,omitempty"`
}
// NewViasAddress instantiates a new ViasAddress object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewViasAddress(country string, ) *ViasAddress {
this := ViasAddress{}
this.Country = country
return &this
}
// NewViasAddressWithDefaults instantiates a new ViasAddress object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewViasAddressWithDefaults() *ViasAddress {
this := ViasAddress{}
return &this
}
// GetCity returns the City field value if set, zero value otherwise.
func (o *ViasAddress) GetCity() string {
if o == nil || o.City == nil {
var ret string
return ret
}
return *o.City
}
// GetCityOk returns a tuple with the City field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *ViasAddress) GetCityOk() (*string, bool) {
if o == nil || o.City == nil {
return nil, false
}
return o.City, true
}
// HasCity returns a boolean if a field has been set.
func (o *ViasAddress) HasCity() bool {
if o != nil && o.City != nil {
return true
}
return false
}
// SetCity gets a reference to the given string and assigns it to the City field.
func (o *ViasAddress) SetCity(v string) {
o.City = &v
}
// GetCountry returns the Country field value
func (o *ViasAddress) GetCountry() string {
if o == nil {
var ret string
return ret
}
return o.Country
}
// GetCountryOk returns a tuple with the Country field value
// and a boolean to check if the value has been set.
func (o *ViasAddress) GetCountryOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.Country, true
}
// SetCountry sets field value
func (o *ViasAddress) SetCountry(v string) {
o.Country = v
}
// GetHouseNumberOrName returns the HouseNumberOrName field value if set, zero value otherwise.
func (o *ViasAddress) GetHouseNumberOrName() string {
if o == nil || o.HouseNumberOrName == nil {
var ret string
return ret
}
return *o.HouseNumberOrName
}
// GetHouseNumberOrNameOk returns a tuple with the HouseNumberOrName field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *ViasAddress) GetHouseNumberOrNameOk() (*string, bool) {
if o == nil || o.HouseNumberOrName == nil {
return nil, false
}
return o.HouseNumberOrName, true
}
// HasHouseNumberOrName returns a boolean if a field has been set.
func (o *ViasAddress) HasHouseNumberOrName() bool {
if o != nil && o.HouseNumberOrName != nil {
return true
}
return false
}
// SetHouseNumberOrName gets a reference to the given string and assigns it to the HouseNumberOrName field.
func (o *ViasAddress) SetHouseNumberOrName(v string) {
o.HouseNumberOrName = &v
}
// GetPostalCode returns the PostalCode field value if set, zero value otherwise.
func (o *ViasAddress) GetPostalCode() string {
if o == nil || o.PostalCode == nil {
var ret string
return ret
}
return *o.PostalCode
}
// GetPostalCodeOk returns a tuple with the PostalCode field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *ViasAddress) GetPostalCodeOk() (*string, bool) {
if o == nil || o.PostalCode == nil {
return nil, false
}
return o.PostalCode, true
}
// HasPostalCode returns a boolean if a field has been set.
func (o *ViasAddress) HasPostalCode() bool {
if o != nil && o.PostalCode != nil {
return true
}
return false
}
// SetPostalCode gets a reference to the given string and assigns it to the PostalCode field.
func (o *ViasAddress) SetPostalCode(v string) {
o.PostalCode = &v
}
// GetStateOrProvince returns the StateOrProvince field value if set, zero value otherwise.
func (o *ViasAddress) GetStateOrProvince() string {
if o == nil || o.StateOrProvince == nil {
var ret string
return ret
}
return *o.StateOrProvince
}
// GetStateOrProvinceOk returns a tuple with the StateOrProvince field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *ViasAddress) GetStateOrProvinceOk() (*string, bool) {
if o == nil || o.StateOrProvince == nil {
return nil, false
}
return o.StateOrProvince, true
}
// HasStateOrProvince returns a boolean if a field has been set.
func (o *ViasAddress) HasStateOrProvince() bool {
if o != nil && o.StateOrProvince != nil {
return true
}
return false
}
// SetStateOrProvince gets a reference to the given string and assigns it to the StateOrProvince field.
func (o *ViasAddress) SetStateOrProvince(v string) {
o.StateOrProvince = &v
}
// GetStreet returns the Street field value if set, zero value otherwise.
func (o *ViasAddress) GetStreet() string {
if o == nil || o.Street == nil {
var ret string
return ret
}
return *o.Street
}
// GetStreetOk returns a tuple with the Street field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *ViasAddress) GetStreetOk() (*string, bool) {
if o == nil || o.Street == nil {
return nil, false
}
return o.Street, true
}
// HasStreet returns a boolean if a field has been set.
func (o *ViasAddress) HasStreet() bool {
if o != nil && o.Street != nil {
return true
}
return false
}
// SetStreet gets a reference to the given string and assigns it to the Street field.
func (o *ViasAddress) SetStreet(v string) {
o.Street = &v
}
func (o ViasAddress) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if o.City != nil {
toSerialize["city"] = o.City
}
if true {
toSerialize["country"] = o.Country
}
if o.HouseNumberOrName != nil {
toSerialize["houseNumberOrName"] = o.HouseNumberOrName
}
if o.PostalCode != nil {
toSerialize["postalCode"] = o.PostalCode
}
if o.StateOrProvince != nil {
toSerialize["stateOrProvince"] = o.StateOrProvince
}
if o.Street != nil {
toSerialize["street"] = o.Street
}
return json.Marshal(toSerialize)
}
type NullableViasAddress struct {
value *ViasAddress
isSet bool
}
func (v NullableViasAddress) Get() *ViasAddress {
return v.value
}
func (v *NullableViasAddress) Set(val *ViasAddress) {
v.value = val
v.isSet = true
}
func (v NullableViasAddress) IsSet() bool {
return v.isSet
}
func (v *NullableViasAddress) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableViasAddress(val *ViasAddress) *NullableViasAddress {
return &NullableViasAddress{value: val, isSet: true}
}
func (v NullableViasAddress) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableViasAddress) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | src/platformsnotificationevents/model_vias_address.go | 0.8321 | 0.421969 | model_vias_address.go | starcoder |
package projecteuler
// DigitalNumber is immutable structure containing number and it's digits
type DigitalNumber struct {
x int
digits []byte
}
// NewDigitalNumber constructs new DigitalNumber
func NewDigitalNumber(x int) (newDn DigitalNumber) {
newDn = DigitalNumber{x: x}
for x > 0 {
currDigit := byte(x % 10)
newDn.digits = append(newDn.digits, currDigit)
x /= 10
}
return
}
// X returns number
func (dn DigitalNumber) X() int {
return dn.x
}
// Digits returns digits (reference)
func (dn DigitalNumber) Digits() []byte {
return dn.digits
}
// DigitCount returns number of digits
func (dn DigitalNumber) DigitCount() int {
return len(dn.digits)
}
// NumberFromDigits calculates number from its reversed digits
func NumberFromDigits(digits []byte) (value int) {
pow, value := 1, 0
for i := 0; i < len(digits); i++ {
value += pow * int(digits[i])
pow *= 10
}
return
}
// NonZeroDigits returns true iff digits don't contain zero
func (dn DigitalNumber) NonZeroDigits() bool {
for _, d := range dn.digits {
if d == 0 {
return false
}
}
return true
}
// DifferentDigits returns true iff digits are different among themselves
func (dn DigitalNumber) DifferentDigits() bool {
usedDigits := make(map[byte]struct{})
for _, d := range dn.Digits() {
if _, ok := usedDigits[d]; ok {
return false
}
usedDigits[d] = struct{}{}
}
return true
}
// DifferentDigitCompositions returns true iff DigitalNumber's digits are not in the set, and adds them to it if so.
// Caller is responsible for making the set
func (dn DigitalNumber) DifferentDigitCompositions(usedDigits map[byte]struct{}) (differentDigits bool) {
for _, k := range dn.Digits() {
if _, ok := usedDigits[k]; ok {
return
}
}
differentDigits = true
for _, k := range dn.Digits() {
usedDigits[k] = struct{}{}
}
return
}
// ReplaceDigits replaces digits in a number with ones from the map (index->new digit)
func (dn DigitalNumber) ReplaceDigits(replacements map[byte]byte) (value int) {
pow := 1
value = 0
for i := 0; i < len(dn.digits); i++ {
currDigit := int(dn.digits[i])
if newDigit, ok := replacements[byte(len(dn.digits)-i-1)]; ok {
currDigit = int(newDigit)
}
value += pow * currDigit
pow *= 10
}
return
}
// DigitOccurencies returns the map of digit occurencies
func (dn DigitalNumber) DigitOccurencies() (occurencies map[byte]int) {
occurencies = make(map[byte]int)
for _, d := range dn.Digits() {
if occ, ok := occurencies[d]; !ok {
occurencies[d] = 1
} else {
occurencies[d] = occ + 1
}
}
return
}
// SameDigitSet returns true iff d2 has the same multiset of digits as dn
func (dn DigitalNumber) SameDigitSet(d2 DigitalNumber) bool {
o1 := dn.DigitOccurencies()
o2 := d2.DigitOccurencies()
if o1 == nil || o2 == nil || len(o1) != len(o2) {
return false
}
for d, occ := range o1 {
if occ2, ok := o2[d]; !ok {
return false
} else if occ != occ2 {
return false
}
}
return true
} | digitalNumber.go | 0.830113 | 0.586582 | digitalNumber.go | starcoder |
package timemath
import (
"time"
)
// Unit the time unit
type Unit rune
var (
// Year the unit.
Year Unit = 'y'
// Month the unit.
Month Unit = 'M'
// Week the unit.
Week Unit = 'w'
// Day the unit.
Day Unit = 'd'
// Hour the unit.
Hour Unit = 'h'
// Minute the unit.
Minute Unit = 'm'
// Second the unit.
Second Unit = 's'
)
// Valid returns true if it is a valid unit
func (u Unit) Valid() bool {
switch u {
case Second, Minute, Hour, Day, Week, Month, Year:
return true
default:
return false
}
}
// EndOf returns the end of the given unit.
func (u Unit) EndOf(date time.Time, endOfWeek time.Weekday) time.Time {
switch u {
case Second:
return time.Date(date.Year(), date.Month(), date.Day(), date.Hour(), date.Minute(), date.Second(), 999999999, date.Location())
case Minute:
return time.Date(date.Year(), date.Month(), date.Day(), date.Hour(), date.Minute(), 59, 999999999, date.Location())
case Hour:
return time.Date(date.Year(), date.Month(), date.Day(), date.Hour(), 59, 59, 999999999, date.Location())
case Day:
return time.Date(date.Year(), date.Month(), date.Day(), 23, 59, 59, 999999999, date.Location())
case Week:
temp := date
for temp.Weekday() != endOfWeek {
temp = Day.Add(temp, 1)
}
return Day.EndOf(temp, endOfWeek)
case Month:
return time.Date(date.Year(), date.Month()+1, 0, 23, 59, 59, 999999999, date.Location())
case Year:
return time.Date(date.Year(), time.December, 31, 23, 59, 59, 999999999, date.Location())
default:
panic("unknown unit type")
}
}
// StartOf returns the start of the given unit.
func (u Unit) StartOf(date time.Time, startOfWeek time.Weekday) time.Time {
switch u {
case Second:
return date
case Minute:
return time.Date(date.Year(), date.Month(), date.Day(), date.Hour(), date.Minute(), 0, 0, date.Location())
case Hour:
return time.Date(date.Year(), date.Month(), date.Day(), date.Hour(), 0, 0, 0, date.Location())
case Day:
return time.Date(date.Year(), date.Month(), date.Day(), 0, 0, 0, 0, date.Location())
case Week:
temp := date
for temp.Weekday() != startOfWeek {
temp = Day.Subtract(temp, 1)
}
return Day.StartOf(temp, startOfWeek)
case Month:
return time.Date(date.Year(), date.Month(), 1, 0, 0, 0, 0, date.Location())
case Year:
return time.Date(date.Year(), time.January, 1, 0, 0, 0, 0, date.Location())
default:
panic("unknown unit type")
}
}
// Subtract subtracts the unit * amount to the given date.
func (u Unit) Subtract(date time.Time, amount int) time.Time {
return u.Add(date, -amount)
}
// Add adds the unit * amount to the given date.
func (u Unit) Add(date time.Time, amount int) time.Time {
switch u {
case Second:
return date.Add(time.Duration(amount) * time.Second)
case Minute:
return date.Add(time.Duration(amount) * time.Minute)
case Hour:
return date.Add(time.Duration(amount) * time.Hour)
case Day:
return date.AddDate(0, 0, amount)
case Week:
return date.AddDate(0, 0, amount*7)
case Month:
return date.AddDate(0, amount, 0)
case Year:
return date.AddDate(amount, 0, 0)
default:
panic("unknown unit type")
}
} | math.go | 0.658747 | 0.549097 | math.go | starcoder |
package protoutil
import (
"fmt"
"strconv"
"time"
pb "code.sajari.com/protogen-go/sajari/engine/v2"
structpb "github.com/golang/protobuf/ptypes/struct"
)
func FromProto(v *pb.Value) (interface{}, error) {
switch v := v.Value.(type) {
case *pb.Value_Single:
return v.Single, nil
case *pb.Value_Repeated_:
return v.Repeated.Values, nil
default:
return nil, fmt.Errorf("unexpected type: %T", v)
}
}
func Single(x interface{}) (*pb.Value, error) {
switch x := x.(type) {
case int, uint, int64, uint64, int32, uint32, int16, uint16,
int8, uint8, float32, float64, string, bool:
return &pb.Value{
Value: &pb.Value_Single{
Single: fmt.Sprintf("%v", x),
},
}, nil
default:
return nil, fmt.Errorf("expected single value, got %T", x)
}
}
func Value(x interface{}) (*pb.Value, error) {
switch x := x.(type) {
case string:
return &pb.Value{
Value: &pb.Value_Single{
Single: x,
},
}, nil
case int, uint, int64, uint64, int32, uint32, int16, uint16,
int8, uint8, float32, float64, bool:
return &pb.Value{
Value: &pb.Value_Single{
Single: fmt.Sprintf("%v", x),
},
}, nil
case time.Time:
return &pb.Value{
Value: &pb.Value_Single{
Single: strconv.FormatInt(x.Unix(), 10),
},
}, nil
}
var vs []string
switch x := x.(type) {
case []string:
vs = x
case []int:
vs = make([]string, 0, len(x))
for _, v := range x {
vs = append(vs, strconv.FormatInt(int64(v), 10))
}
case []int64:
vs = make([]string, 0, len(x))
for _, v := range x {
vs = append(vs, strconv.FormatInt(v, 10))
}
case []float32:
vs = make([]string, 0, len(x))
for _, v := range x {
vs = append(vs, strconv.FormatFloat(float64(v), 'g', -1, 32))
}
case []float64:
vs = make([]string, 0, len(x))
for _, v := range x {
vs = append(vs, strconv.FormatFloat(v, 'g', -1, 64))
}
case []interface{}:
vs = make([]string, 0, len(x))
for _, v := range x {
vs = append(vs, fmt.Sprintf("%v", v))
}
default:
return nil, fmt.Errorf("unsupported value: %T", x)
}
return &pb.Value{
Value: &pb.Value_Repeated_{
Repeated: &pb.Value_Repeated{
Values: vs,
},
},
}, nil
}
func Values(m map[string]interface{}) (map[string]*pb.Value, error) {
values := make(map[string]*pb.Value, len(m))
for k, v := range m {
vv, err := Value(v)
if err != nil {
return nil, err
}
values[k] = vv
}
return values, nil
}
func Map(st *structpb.Struct) (map[string]string, error) {
out := make(map[string]string, len(st.GetFields()))
for k, v := range st.GetFields() {
_, ok := v.Kind.(*structpb.Value_StringValue)
if !ok {
return nil, fmt.Errorf("non-string value (%T)", v)
}
out[k] = v.GetStringValue()
}
return out, nil
}
func Struct(m map[string]string) *structpb.Struct {
fields := make(map[string]*structpb.Value, len(m))
for k, v := range m {
fields[k] = &structpb.Value{
Kind: &structpb.Value_StringValue{
StringValue: v,
},
}
}
return &structpb.Struct{
Fields: fields,
}
} | internal/protoutil/protoutil.go | 0.596668 | 0.417806 | protoutil.go | starcoder |
package desync
// Converters are modifiers for chunk data, such as compression or encryption.
// They are used to prepare chunk data for storage, or to read it from storage.
// The order of the conversion layers matters. When plain data is prepared for
// storage, the toStorage method is used in the order the layers are defined.
// To read from storage, the fromStorage method is called for each layer in
// reverse order.
type Converters []converter
// Apply every data converter in the forward direction.
func (s Converters) toStorage(in []byte) ([]byte, error) {
var (
b = in
err error
)
for _, layer := range s {
b, err = layer.toStorage(b)
if err != nil {
return nil, err
}
}
return b, nil
}
// Apply the layers backwards.
func (s Converters) fromStorage(in []byte) ([]byte, error) {
var (
b = in
err error
)
for i := len(s) - 1; i >= 0; i-- {
b, err = s[i].fromStorage(b)
if err != nil {
return nil, err
}
}
return b, nil
}
// Returns true is conversion involves compression. Typically
// used to determine the correct file-extension.
func (s Converters) hasCompression() bool {
for _, layer := range s {
if _, ok := layer.(Compressor); ok {
return true
}
}
return false
}
// Returns true if both converters have the same layers in the
// same order. Used for optimizations.
func (s Converters) equal(c Converters) bool {
if len(s) != len(c) {
return false
}
for i := 0; i < len(s); i++ {
if !s[i].equal(c[i]) {
return false
}
}
return true
}
// converter is a storage data modifier layer.
type converter interface {
// Convert data from it's original form to storage format.
// The input could be plain data, or the output of a prior
// converter.
toStorage([]byte) ([]byte, error)
// Convert data from it's storage format towards it's plain
// form. The input could be encrypted or compressed, while
// the output may be used for the next conversion layer.
fromStorage([]byte) ([]byte, error)
equal(converter) bool
}
// Compression layer
type Compressor struct{}
var _ converter = Compressor{}
func (d Compressor) toStorage(in []byte) ([]byte, error) {
return Compress(in)
}
func (d Compressor) fromStorage(in []byte) ([]byte, error) {
return Decompress(nil, in)
}
func (d Compressor) equal(c converter) bool {
_, ok := c.(Compressor)
return ok
} | coverter.go | 0.75392 | 0.515071 | coverter.go | starcoder |
package lm
import (
"fmt"
"github.com/barnex/fmath"
)
type Mat4x4 [16]float32
func (m *Mat4x4) Pointer() *[16]float32 { return (*[16]float32)(m) }
func (m *Mat4x4) Slice() []float32 { return m[:] }
func (m *Mat4x4) String() string {
return fmt.Sprintf("[%f,%f,%f,%f,\n %f,%f,%f,%f,\n %f,%f,%f,%f,\n %f,%f,%f,%f]",
m[0], m[4], m[8], m[12], m[1], m[5], m[9], m[13], m[2], m[6], m[10], m[14], m[3], m[7], m[11], m[15])
}
func Mat4x4Identity() Mat4x4 {
return Mat4x4{
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1}
}
func Mat4x4Translate(v Vec3) Mat4x4 {
return Mat4x4{
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
v[0], v[1], v[2], 1}
}
func Mat4x4Scale(v Vec3) Mat4x4 {
return Mat4x4{
v[0], 0, 0, 0,
0, v[1], 0, 0,
0, 0, v[2], 0,
0, 0, 0, 1}
}
func Mat4x4Rotate(axis Vec3, radians float32) Mat4x4 {
x, y, z := axis.Norm().XYZ()
c := fmath.Cos(radians)
s := fmath.Sin(radians)
return Mat4x4{
x*x*(1-c) + c, y*x*(1-c) + z*s, x*z*(1-c) - y*s, 0,
x*y*(1-c) - z*s, y*y*(1-c) + c, y*z*(1-c) + x*s, 0,
x*z*(1-c) + y*s, y*z*(1-c) - x*s, z*z*(1-c) + c, 0,
0, 0, 0, 1}
}
func Mat4x4LookAt(eye, center, up Vec3) Mat4x4 {
f := center.Sub(eye).Norm()
s := f.Cross(up.Norm())
u := s.Cross(f)
return Mat4x4{
s[0], u[0], -f[0], 0,
s[1], u[1], -f[1], 0,
s[2], u[2], -f[2], 0,
-s.Dot(eye), -u.Dot(eye), f.Dot(eye), 1}
}
func Mat4x4Frustum(left, right, bottom, top, zNear, zFar float32) Mat4x4 {
width := right - left
height := top - bottom
depth := zFar - zNear
return Mat4x4{
(zNear * 2.0) / width, 0, 0, 0,
0, (zNear * 2.0) / height, 0, 0,
(left + right) / width, (bottom + top) / height, -(zNear + zFar) / depth, -1,
0, 0, -(zNear * zFar * 2.0) / depth, 0}
}
func Mat4x4Perspective(fovY, aspect, zNear, zFar float32) Mat4x4 {
f := float32(1.0 / fmath.Tan(fovY/2.0))
d := zNear - zFar
return Mat4x4{
f / aspect, 0, 0, 0,
0, f, 0, 0,
0, 0, (zFar + zNear) / d, -1,
0, 0, (2 * zFar * zNear) / d, 0}
}
func (m1 Mat4x4) Mul(m2 Mat4x4) Mat4x4 {
return Mat4x4{
m1[0]*m2[0] + m1[1]*m2[4] + m1[2]*m2[8] + m1[3]*m2[12],
m1[0]*m2[1] + m1[1]*m2[5] + m1[2]*m2[9] + m1[3]*m2[13],
m1[0]*m2[2] + m1[1]*m2[6] + m1[2]*m2[10] + m1[3]*m2[14],
m1[0]*m2[3] + m1[1]*m2[7] + m1[2]*m2[11] + m1[3]*m2[15],
m1[4]*m2[0] + m1[5]*m2[4] + m1[6]*m2[8] + m1[7]*m2[12],
m1[4]*m2[1] + m1[5]*m2[5] + m1[6]*m2[9] + m1[7]*m2[13],
m1[4]*m2[2] + m1[5]*m2[6] + m1[6]*m2[10] + m1[7]*m2[14],
m1[4]*m2[3] + m1[5]*m2[7] + m1[6]*m2[11] + m1[7]*m2[15],
m1[8]*m2[0] + m1[9]*m2[4] + m1[10]*m2[8] + m1[11]*m2[12],
m1[8]*m2[1] + m1[9]*m2[5] + m1[10]*m2[9] + m1[11]*m2[13],
m1[8]*m2[2] + m1[9]*m2[6] + m1[10]*m2[10] + m1[11]*m2[14],
m1[8]*m2[3] + m1[9]*m2[7] + m1[10]*m2[11] + m1[11]*m2[15],
m1[12]*m2[0] + m1[13]*m2[4] + m1[14]*m2[8] + m1[15]*m2[12],
m1[12]*m2[1] + m1[13]*m2[5] + m1[14]*m2[9] + m1[15]*m2[13],
m1[12]*m2[2] + m1[13]*m2[6] + m1[14]*m2[10] + m1[15]*m2[14],
m1[12]*m2[3] + m1[13]*m2[7] + m1[14]*m2[11] + m1[15]*m2[15]}
}
func (m Mat4x4) Invert() Mat4x4 {
var s, c [6]float32
s[0] = m[0]*m[5] - m[4]*m[1]
s[1] = m[0]*m[6] - m[4]*m[2]
s[2] = m[0]*m[7] - m[4]*m[3]
s[3] = m[1]*m[6] - m[5]*m[2]
s[4] = m[1]*m[7] - m[5]*m[3]
s[5] = m[2]*m[7] - m[6]*m[3]
c[0] = m[8]*m[13] - m[12]*m[9]
c[1] = m[8]*m[14] - m[12]*m[10]
c[2] = m[8]*m[15] - m[12]*m[11]
c[3] = m[9]*m[14] - m[13]*m[10]
c[4] = m[9]*m[15] - m[13]*m[11]
c[5] = m[10]*m[15] - m[14]*m[11]
// assumes it is invertible
var identity float32 = 1.0 / (s[0]*c[5] - s[1]*c[4] + s[2]*c[3] + s[3]*c[2] - s[4]*c[1] + s[5]*c[0])
return Mat4x4{
(m[5]*c[5] - m[6]*c[4] + m[7]*c[3]) * identity,
(-m[1]*c[5] + m[2]*c[4] - m[3]*c[3]) * identity,
(m[13]*s[5] - m[14]*s[4] + m[15]*s[3]) * identity,
(-m[9]*s[5] + m[10]*s[4] - m[11]*s[3]) * identity,
(-m[4]*c[5] + m[6]*c[2] - m[7]*c[1]) * identity,
(m[0]*c[5] - m[2]*c[2] + m[3]*c[1]) * identity,
(-m[12]*s[5] + m[14]*s[2] - m[15]*s[1]) * identity,
(m[8]*s[5] - m[10]*s[2] + m[11]*s[1]) * identity,
(m[4]*c[4] - m[5]*c[2] + m[7]*c[0]) * identity,
(-m[0]*c[4] + m[1]*c[2] - m[3]*c[0]) * identity,
(m[12]*s[4] - m[13]*s[2] + m[15]*s[0]) * identity,
(-m[8]*s[4] + m[9]*s[2] - m[11]*s[0]) * identity,
(-m[4]*c[3] + m[5]*c[1] - m[6]*c[0]) * identity,
(m[0]*c[3] - m[1]*c[1] + m[2]*c[0]) * identity,
(-m[12]*s[3] + m[13]*s[1] - m[14]*s[0]) * identity,
(m[8]*s[3] - m[9]*s[1] + m[10]*s[0]) * identity}
} | mat4x4.go | 0.609175 | 0.523359 | mat4x4.go | starcoder |
package cubebit
import (
"image/color"
"github.com/9600org/go-rpi-ws281x"
)
// Cubebit represents an instance of the Cube:Bit hardware.
type Cubebit struct {
canvas *ws281x.Canvas
sizeX int
sizeY int
sizeZ int
}
// New creates a new Cubebit instance.
// config passes the details of the hardware to the underlying libws281x library.
// sx, sy, sz specify the dimentions of the LED space (e.g. 5, 5, 5 for the
// 5x5x5 model).
// It returns a Cubebit instance, and a function which should be called to
// release hardware // resources once the caller is finished with the Cubebit
// instance.
func New(config *ws281x.HardwareConfig, sx, sy, sz int) (*Cubebit, func() error, error) {
width := sx
height := sy * sz
c, err := ws281x.NewCanvas(width, height, config)
if err != nil {
return nil, nil, err
}
if err := c.Initialize(); err != nil {
return nil, c.Close, err
}
return &Cubebit{canvas: c, sizeX: sx, sizeY: sy, sizeZ: sx}, c.Close, nil
}
// gamma is a gamma correction table.
var gamma = []uint8{
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2,
2, 3, 3, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 5, 5, 5,
5, 6, 6, 6, 6, 7, 7, 7, 7, 8, 8, 8, 9, 9, 9, 10,
10, 10, 11, 11, 11, 12, 12, 13, 13, 13, 14, 14, 15, 15, 16, 16,
17, 17, 18, 18, 19, 19, 20, 20, 21, 21, 22, 22, 23, 24, 24, 25,
25, 26, 27, 27, 28, 29, 29, 30, 31, 32, 32, 33, 34, 35, 35, 36,
37, 38, 39, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 50,
51, 52, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 66, 67, 68,
69, 70, 72, 73, 74, 75, 77, 78, 79, 81, 82, 83, 85, 86, 87, 89,
90, 92, 93, 95, 96, 98, 99, 101, 102, 104, 105, 107, 109, 110, 112, 114,
115, 117, 119, 120, 122, 124, 126, 127, 129, 131, 133, 135, 137, 138, 140, 142,
144, 146, 148, 150, 152, 154, 156, 158, 160, 162, 164, 167, 169, 171, 173, 175,
177, 180, 182, 184, 186, 189, 191, 193, 196, 198, 200, 203, 205, 208, 210, 213,
215, 218, 220, 223, 225, 228, 231, 233, 236, 239, 241, 244, 247, 249, 252, 255}
func gammafy(col color.RGBA) color.RGBA {
return color.RGBA{gamma[col.R], gamma[col.G], gamma[col.B], col.A}
}
// Set turns the pixel at the specified coordinates to the specified colour.
// Note that no LEDs will change colour until the application also calls the
// Render function, below.
func (c *Cubebit) Set(x, y, z int, col color.RGBA) {
if z%2 == 1 {
y, x = c.sizeY-x-1, c.sizeX-y-1
}
if y%2 == 1 {
x = c.sizeX - x - 1
}
page := c.sizeY * z
c.canvas.Set(x, page+y, gammafy(col))
}
func (c *Cubebit) At(x, y, z int) color.RGBA {
if z%2 == 1 {
y, x = c.sizeY-x-1, c.sizeX-y-1
}
if y%2 == 1 {
x = c.sizeX - x - 1
}
page := c.sizeY * z
return c.canvas.At(x, page+y).(color.RGBA)
}
// Render sends the colour data previously associated with LEDs using the Set
// function to the LEDs themselves.
func (c *Cubebit) Render() {
c.canvas.Render()
}
// Bounds returns the upper limits of the LED space.
func (c *Cubebit) Bounds() (int, int, int) {
return c.sizeX, c.sizeY, c.sizeZ
} | cubebit.go | 0.787686 | 0.445288 | cubebit.go | starcoder |
package render
import (
"context"
"strings"
"github.com/weaveworks/scope/report"
)
// KubernetesVolumesRenderer is a Renderer which combines all Kubernetes
// volumes components such as stateful Pods, Persistent Volume, Persistent Volume Claim, Storage Class.
var KubernetesVolumesRenderer = MakeReduce(
VolumesRenderer,
PodToVolumeRenderer,
PVCToStorageClassRenderer,
PVToSnapshotRenderer,
VolumeSnapshotRenderer,
)
// VolumesRenderer is a Renderer which produces a renderable kubernetes PV & PVC
// graph by merging the pods graph and the Persistent Volume topology.
var VolumesRenderer = volumesRenderer{}
// volumesRenderer is a Renderer to render PV & PVC nodes.
type volumesRenderer struct{}
// Render renders PV & PVC nodes along with adjacency
func (v volumesRenderer) Render(ctx context.Context, rpt report.Report) Nodes {
nodes := make(report.Nodes)
for id, n := range rpt.PersistentVolumeClaim.Nodes {
volume, _ := n.Latest.Lookup(report.KubernetesVolumeName)
for _, p := range rpt.PersistentVolume.Nodes {
volumeName, _ := p.Latest.Lookup(report.KubernetesName)
if volume == volumeName {
n.Adjacency = n.Adjacency.Add(p.ID)
n.Children = n.Children.Add(p)
}
}
nodes[id] = n
}
return Nodes{Nodes: nodes}
}
// PodToVolumeRenderer is a Renderer which produces a renderable kubernetes Pod
// graph by merging the pods graph and the Persistent Volume Claim topology.
// Pods having persistent volumes are rendered.
var PodToVolumeRenderer = podToVolumesRenderer{}
// VolumesRenderer is a Renderer to render Pods & PVCs.
type podToVolumesRenderer struct{}
// Render renders the Pod nodes having volumes adjacency.
func (v podToVolumesRenderer) Render(ctx context.Context, rpt report.Report) Nodes {
nodes := make(report.Nodes)
for podID, podNode := range rpt.Pod.Nodes {
claimNames, found := podNode.Latest.Lookup(report.KubernetesVolumeClaim)
if !found {
continue
}
podNamespace, _ := podNode.Latest.Lookup(report.KubernetesNamespace)
claimNameList := strings.Split(claimNames, report.ScopeDelim)
for _, ClaimName := range claimNameList {
for _, pvcNode := range rpt.PersistentVolumeClaim.Nodes {
pvcName, _ := pvcNode.Latest.Lookup(report.KubernetesName)
pvcNamespace, _ := pvcNode.Latest.Lookup(report.KubernetesNamespace)
if (pvcName == ClaimName) && (podNamespace == pvcNamespace) {
podNode.Adjacency = podNode.Adjacency.Add(pvcNode.ID)
podNode.Children = podNode.Children.Add(pvcNode)
break
}
}
}
nodes[podID] = podNode
}
return Nodes{Nodes: nodes}
}
// PVCToStorageClassRenderer is a Renderer which produces a renderable kubernetes PVC
// & Storage class graph.
var PVCToStorageClassRenderer = pvcToStorageClassRenderer{}
// pvcToStorageClassRenderer is a Renderer to render PVC & StorageClass.
type pvcToStorageClassRenderer struct{}
// Render renders the PVC & Storage Class nodes with adjacency.
func (v pvcToStorageClassRenderer) Render(ctx context.Context, rpt report.Report) Nodes {
nodes := make(report.Nodes)
for scID, scNode := range rpt.StorageClass.Nodes {
storageClass, _ := scNode.Latest.Lookup(report.KubernetesName)
for _, pvcNode := range rpt.PersistentVolumeClaim.Nodes {
storageClassName, _ := pvcNode.Latest.Lookup(report.KubernetesStorageClassName)
if storageClassName == storageClass {
scNode.Adjacency = scNode.Adjacency.Add(pvcNode.ID)
scNode.Children = scNode.Children.Add(pvcNode)
}
}
nodes[scID] = scNode
}
return Nodes{Nodes: nodes}
}
//PVToSnapshotRenderer is a Renderer which produces a renderable kubernetes PV
var PVToSnapshotRenderer = pvToSnapshotRenderer{}
//pvToSnapshotRenderer is a Renderer to render PV & Snapshot.
type pvToSnapshotRenderer struct{}
//Render renders the PV & Snapshot nodes with adjacency.
func (v pvToSnapshotRenderer) Render(ctx context.Context, rpt report.Report) Nodes {
nodes := make(report.Nodes)
for pvNodeID, p := range rpt.PersistentVolume.Nodes {
volumeName, _ := p.Latest.Lookup(report.KubernetesName)
for _, volumeSnapshotNode := range rpt.VolumeSnapshot.Nodes {
snapshotPVName, _ := volumeSnapshotNode.Latest.Lookup(report.KubernetesVolumeName)
if volumeName == snapshotPVName {
p.Adjacency = p.Adjacency.Add(volumeSnapshotNode.ID)
p.Children = p.Children.Add(volumeSnapshotNode)
}
}
nodes[pvNodeID] = p
}
return Nodes{Nodes: nodes}
}
// VolumeSnapshotRenderer is a renderer which produces a renderable Kubernetes Volume Snapshot and Volume Snapshot Data
var VolumeSnapshotRenderer = volumeSnapshotRenderer{}
// volumeSnapshotRenderer is a render to volume snapshot & volume snapshot data
type volumeSnapshotRenderer struct{}
// Render renders the volumeSnapshots & volumeSnapshotData with adjacency
// It checks for the volumeSnapshotData name in volumeSnapshot, adjacency is created by matching the volumeSnapshotData name.
func (v volumeSnapshotRenderer) Render(ctx context.Context, rpt report.Report) Nodes {
nodes := make(report.Nodes)
for volumeSnapshotID, volumeSnapshotNode := range rpt.VolumeSnapshot.Nodes {
snapshotData, _ := volumeSnapshotNode.Latest.Lookup(report.KubernetesSnapshotData)
for volumeSnapshotDataID, volumeSnapshotDataNode := range rpt.VolumeSnapshotData.Nodes {
snapshotDataName, _ := volumeSnapshotDataNode.Latest.Lookup(report.KubernetesName)
if snapshotDataName == snapshotData {
volumeSnapshotNode.Adjacency = volumeSnapshotNode.Adjacency.Add(volumeSnapshotDataNode.ID)
volumeSnapshotNode.Children = volumeSnapshotNode.Children.Add(volumeSnapshotDataNode)
}
nodes[volumeSnapshotDataID] = volumeSnapshotDataNode
}
nodes[volumeSnapshotID] = volumeSnapshotNode
}
return Nodes{Nodes: nodes}
} | deepfence_agent/tools/apache/scope/render/persistentvolume.go | 0.708112 | 0.433442 | persistentvolume.go | starcoder |
package testes
import (
"fmt"
"reflect"
"testing"
"github.com/skeptycal/types"
)
var NewAnyValue = types.NewAnyValue
const (
assertEqual = "AssertEqual(%v): got %v, want %v"
assertNotEqual = "AssertNotEqual(%v): got %v, want %v"
assertDeepEqual = "AssertDeepEqual(%v): got %v, want %v"
assertSameType = "AssertSameType(%v): got %v, want %v"
assertSameKind = "AssertSameKind(%v): got %v, want %v"
assertSameFunc = "AssertSameFunc(%v): got %v, want %v"
)
func IsKindEqual(got, want Any) bool {
if gv, ok := got.(reflect.Kind); ok {
if wv, ok := want.(reflect.Kind); ok {
return gv == wv
}
}
return false
}
func AssertKindEqual(t *testing.T, name string, got, want Any) bool {
if IsKindEqual(got, want) {
return true
}
TErrorf(t, assertSameKind, name, got, want)
return false
}
func GetFuncResult(t *testing.T, name string, fn Any, args ...reflect.Value) ([]reflect.Value, error) {
if fn == nil {
return nil, fmt.Errorf("fn must be provided: %v", fn)
}
f := NewAnyValue(fn)
if !IsKindEqual(f.Kind(), reflect.Func) {
return nil, fmt.Errorf("fn must be a function: %v(%v)", fn, f.Kind())
}
return nil, nil
}
// AssertSameFunc returns true if got and want are
// both functions that return the same value when
// called with args... as input.
func AssertSameFunc(t *testing.T, name string, got, want Any, args ...reflect.Value) bool {
g := NewAnyValue(got)
// If got is a pointer, run again with object
if IsKindEqual(g.Kind(), reflect.Ptr) {
return AssertSameFunc(t, name, reflect.Indirect(g.ValueOf()), want)
}
// If want is a pointer, run again with object
w := NewAnyValue(want)
if w.Kind() == reflect.Ptr {
return AssertSameFunc(t, name, got, reflect.Indirect(w.ValueOf()))
}
if g.Kind() != reflect.Func {
TErrorf(t, assertSameFunc, name, g.Kind(), reflect.Func)
return false
}
if g.Kind() != reflect.Func {
TErrorf(t, assertSameFunc, name, g.Kind(), reflect.Func)
return false
}
gf := g.ValueOf().Call(args)
wf := w.ValueOf().Call(args)
return AssertEqual(t, name, gf, wf)
}
func AssertEqual(t *testing.T, name string, got, want Any) bool {
if got == want {
return true
}
t.Errorf(assertEqual, name, got, want)
return false
}
func AssertNotEqual(t *testing.T, name string, got, want Any) bool {
if got != want {
return true
}
t.Errorf(assertNotEqual, name, got, want)
return false
}
func AssertDeepEqual(t *testing.T, name string, got, want Any) bool {
if reflect.DeepEqual(got, want) {
return true
}
t.Errorf(assertDeepEqual, name, got, want)
return false
}
func AssertSameType(t *testing.T, name string, got, want Any) bool {
g := NewAnyValue(got).TypeOf()
w := NewAnyValue(want).TypeOf()
if g == w {
return true
}
t.Errorf(assertSameType, name, g, w)
return false
}
func AssertSameKind(t *testing.T, name string, got, want Any) bool {
g := NewAnyValue(got).Kind()
w := NewAnyValue(want).Kind()
if g == w {
return true
}
t.Errorf(assertSameKind, name, g, w)
return false
} | assert.go | 0.670069 | 0.466542 | assert.go | starcoder |
package hr2d
// Use dynamic programming to determine maximum value
func SolveDP(R, C int, table [][]int) (int) {
alignments := generate_alignments(C)
pairs := generate_alignment_pairs(alignments)
N := len(alignments)
// predecessors array each index p[i][j] is the index of the
// alignment from the previous row.
// current and previous best scores for the current row i
// given the set of all possible valid alignments.
dc := make([]int, N)
dp := make([]int, N)
for j, ali := range alignments {
dp[j] = ali.Dot(table[0])
}
row_scores := make([]int, N)
// process each row
for i := 1; i < R; i++ {
// determine alignment scores for the current row
for j, ali := range alignments {
row_scores[j] = ali.Dot(table[i])
dc[j] = 0 //reset this rows current best score
}
// determine best score for each alignment given previous answers
for a := 0; a < N; a++ {
for _, b := range pairs[a] {
c := dp[a] + row_scores[b]
if c > dc[b] {
dc[b] = c
}
}
}
// copy current best scores to the previous scores table
for j := 0; j < N; j++ {
dp[j] = dc[j]
}
}
var maximum_value int
for _, v := range dp {
if v > maximum_value {
maximum_value = v
}
}
return maximum_value
}
// Use dynamic programming to determine maximum value and alignment
func SolveDP2(R, C int, table [][]int) (int, Alignments, error) {
alignments := generate_alignments(C)
pairs := generate_alignment_pairs(alignments)
N := len(alignments)
// predecessors array each index p[i][j] is the index of the
// alignment from the previous row.
p := make([][]int, R)
//p[0] = make([]int, N) p[0] is not interesting
// current and previous best scores for the current row i
// given the set of all possible valid alignments.
dc := make([]int, N)
dp := make([]int, N)
for j, ali := range alignments {
dp[j] = ali.Dot(table[0])
}
row_scores := make([]int, N)
// process each row
for i := 1; i < R; i++ {
p[i] = make([]int, N)
// determine alignment scores for the current row
for j, ali := range alignments {
row_scores[j] = ali.Dot(table[i])
dc[j] = 0 //reset this rows current best score
}
// determine best score for each alignment given previous answers
for a := 0; a < N; a++ {
for _, b := range pairs[a] {
c := dp[a] + row_scores[b]
if c > dc[b] {
dc[b] = c
p[i][b] = a
}
}
}
// copy current best scores to the previous scores table
for j := 0; j < N; j++ {
dp[j] = dc[j]
}
}
final := make(Alignments, R)
// determine which alignment generated the highest score in the final row
var a, c int // best alignment index; best cost
for j, v := range dp {
if v > c {
c = v
a = j
}
}
// walk the parent array in reverse
// select the optimal alignment at each row
for i := R - 1; i > 0; i-- {
final[i] = alignments[a]
a = p[i][a]
}
final[0] = alignments[a]
return c, final, nil
}
// Use Maximum Flow to determine maximum value
func SolveFlow(R, C int, table [][]int) (int) {
var total_value int;
var max_flow int;
// O(N^2)
// determine total value of all houses
for i:=0; i<R; i++ {
for j:=0; j<C; j++ {
total_value += table[i][j];
}
}
// O(N * 4*(N/2)) => O(2*N^2) => O(N^2)
// determine the maximum flow, by finding the min cut
// of all paths from the source to the sink.
// This loop iterates over all vertices's in U, then determines
// the min cut for all adjacent vertices's in V.
for i:=0; i<R; i++ {
for j:=i%2; j<C; j+=2 {
if i>0 {
max_flow += update_path(table,i,j,i-1,j);
}
if j>0 {
max_flow += update_path(table,i,j,i,j-1);
}
if j+1 < C {
max_flow += update_path(table,i,j,i,j+1);
}
if i+1 < R {
max_flow += update_path(table,i,j,i+1,j);
}
}
}
maximum_value := total_value - max_flow;
return maximum_value
}
// compute blocking flow through s->u->v->t
func update_path(table [][]int, i, j, x, y int) int {
u := table[i][j]
v := table[x][y]
// determine the blocking flow
f := u
if v < u {
f = v
}
// ensure that this path has not been cut already
if f > 0 {
table[i][j] -= f
table[x][y] -= f
}
return f;
} | hr2d/hr2d.go | 0.788094 | 0.454533 | hr2d.go | starcoder |
package vtree
import (
"bytes"
"sort"
)
// Node represents an immutable node in the radix tree which
// can be either an edge node or a leaf node.
type Node struct {
leaf *leaf
edges []*Node
prefix []byte
}
type leaf struct {
key []byte
val *Item
}
// Min returns the key and value of the minimum item in the
// subtree of the current node.
func (n *Node) Min() ([]byte, *Item) {
for {
if n.isLeaf() {
return n.leaf.key, n.leaf.val
}
if len(n.edges) > 0 {
n = n.edges[0]
} else {
break
}
}
return nil, nil
}
// Max returns the key and value of the maximum item in the
// subtree of the current node.
func (n *Node) Max() ([]byte, *Item) {
for {
if num := len(n.edges); num > 0 {
n = n.edges[num-1]
continue
}
if n.isLeaf() {
return n.leaf.key, n.leaf.val
}
break
}
return nil, nil
}
// Path is used to recurse over the tree only visiting nodes
// which are above this node in the tree.
func (n *Node) Path(k []byte, f Walker) {
s := k
for {
if n.leaf != nil {
if f(n.leaf.key, n.leaf.val) {
return
}
}
if len(s) == 0 {
return
}
if _, n = n.getSub(s[0]); n == nil {
return
}
if bytes.HasPrefix(s, n.prefix) {
s = s[len(n.prefix):]
} else {
break
}
}
}
// Subs is used to recurse over the tree only visiting nodes
// which are directly under this node in the tree.
func (n *Node) Subs(k []byte, f Walker) {
s := k
for {
// Check for key exhaution
if len(s) == 0 {
subs(n, f, false)
return
}
// Look for an edge
if _, n = n.getSub(s[0]); n == nil {
break
}
// Consume the search prefix
if bytes.HasPrefix(s, n.prefix) {
s = s[len(n.prefix):]
} else if bytes.HasPrefix(n.prefix, s) {
subs(n, f, true)
return
} else {
break
}
}
}
// Walk is used to recurse over the tree only visiting nodes
// which are under this node in the tree.
func (n *Node) Walk(k []byte, f Walker) {
s := k
for {
// Check for key exhaution
if len(s) == 0 {
walk(n, f, false)
return
}
// Look for an edge
if _, n = n.getSub(s[0]); n == nil {
break
}
// Consume the search prefix
if bytes.HasPrefix(s, n.prefix) {
s = s[len(n.prefix):]
} else if bytes.HasPrefix(n.prefix, s) {
walk(n, f, false)
return
} else {
break
}
}
}
// ------------------------------
// ------------------------------
// ------------------------------
// ------------------------------
// ------------------------------
func (n *Node) isLeaf() bool {
return n.leaf != nil
}
func (n *Node) dup() *Node {
d := &Node{}
if n.leaf != nil {
d.leaf = &leaf{}
*d.leaf = *n.leaf
}
if n.prefix != nil {
d.prefix = make([]byte, len(n.prefix))
copy(d.prefix, n.prefix)
}
if len(n.edges) != 0 {
d.edges = make([]*Node, len(n.edges))
copy(d.edges, n.edges)
}
return d
}
func (n *Node) addSub(s *Node) {
num := len(n.edges)
idx := sort.Search(num, func(i int) bool {
return n.edges[i].prefix[0] >= s.prefix[0]
})
n.edges = append(n.edges, s)
if idx != num {
copy(n.edges[idx+1:], n.edges[idx:num])
n.edges[idx] = s
}
}
func (n *Node) repSub(s *Node) {
num := len(n.edges)
idx := sort.Search(num, func(i int) bool {
return n.edges[i].prefix[0] >= s.prefix[0]
})
if idx < num && n.edges[idx].prefix[0] == s.prefix[0] {
n.edges[idx] = s
return
}
panic("replacing missing edge")
}
func (n *Node) getSub(label byte) (int, *Node) {
num := len(n.edges)
idx := sort.Search(num, func(i int) bool {
return n.edges[i].prefix[0] >= label
})
if idx < num && n.edges[idx].prefix[0] == label {
return idx, n.edges[idx]
}
return -1, nil
}
func (n *Node) delSub(label byte) {
num := len(n.edges)
idx := sort.Search(num, func(i int) bool {
return n.edges[i].prefix[0] >= label
})
if idx < num && n.edges[idx].prefix[0] == label {
copy(n.edges[idx:], n.edges[idx+1:])
n.edges[len(n.edges)-1] = nil
n.edges = n.edges[:len(n.edges)-1]
}
}
func (n *Node) mergeChild() {
e := n.edges[0]
child := e
n.prefix = concat(n.prefix, child.prefix)
if child.leaf != nil {
n.leaf = new(leaf)
*n.leaf = *child.leaf
} else {
n.leaf = nil
}
if len(child.edges) != 0 {
n.edges = make([]*Node, len(child.edges))
copy(n.edges, child.edges)
} else {
n.edges = nil
}
}
func subs(n *Node, f Walker, sub bool) bool {
// Visit the leaf values if any
if sub && n.leaf != nil {
if f(n.leaf.key, n.leaf.val) {
return true
}
return false
}
// Recurse on the children
for _, e := range n.edges {
if subs(e, f, true) {
return true
}
}
return false
}
func walk(n *Node, f Walker, sub bool) bool {
// Visit the leaf values if any
if n.leaf != nil {
if f(n.leaf.key, n.leaf.val) {
return true
}
}
// Recurse on the children
for _, e := range n.edges {
if walk(e, f, true) {
return true
}
}
return false
}
func (n *Node) get(k []byte) *Item {
s := k
for {
// Check for key exhaution
if len(s) == 0 {
if n.isLeaf() {
return n.leaf.val
}
break
}
// Look for an edge
_, n = n.getSub(s[0])
if n == nil {
break
}
// Consume the search prefix
if bytes.HasPrefix(s, n.prefix) {
s = s[len(n.prefix):]
} else {
break
}
}
return nil
} | node.go | 0.745861 | 0.451992 | node.go | starcoder |
package xrand
import (
"encoding/binary"
"fmt"
"math"
"math/bits"
"time"
)
// https://prng.di.unimi.it/xoshiro256starstar.c
type Xoshiro256ss struct {
s [4]uint64
}
func NewXoshiro256ss(seed int64) *Xoshiro256ss {
x := Xoshiro256ss{}
x.Seed(seed)
return &x
}
func (x Xoshiro256ss) State() []byte {
s := make([]byte, 32)
binary.BigEndian.PutUint64(s[:8], x.s[0])
binary.BigEndian.PutUint64(s[8:16], x.s[1])
binary.BigEndian.PutUint64(s[16:24], x.s[2])
binary.BigEndian.PutUint64(s[24:32], x.s[3])
return s
}
func (x *Xoshiro256ss) SetState(state []byte) {
mix := NewSplitMix64(time.Now().UTC().UnixNano())
x.s[0] = bytesToState64(state, 0, &mix)
x.s[1] = bytesToState64(state, 1, &mix)
x.s[2] = bytesToState64(state, 2, &mix)
x.s[3] = bytesToState64(state, 3, &mix)
}
func (x *Xoshiro256ss) Seed(seed int64) {
s := NewSplitMix64(seed)
x.s[0] = s.Uint64()
x.s[1] = s.Uint64()
x.s[2] = s.Uint64()
x.s[3] = s.Uint64()
}
func (x *Xoshiro256ss) Uint64() uint64 {
result := bits.RotateLeft64(x.s[1]*5, 7) * 9
t := x.s[1] << 17
x.s[2] ^= x.s[0]
x.s[3] ^= x.s[1]
x.s[1] ^= x.s[2]
x.s[0] ^= x.s[3]
x.s[2] ^= t
x.s[3] = bits.RotateLeft64(x.s[3], 45)
return result
}
func (x *Xoshiro256ss) Int64() int64 {
return unsafeUint64ToInt64(x.Uint64())
}
func (x *Xoshiro256ss) Int63() int64 {
return int64(x.Uint64() & (1<<63 - 1))
}
func (x *Xoshiro256ss) Float64() float64 {
return math.Float64frombits(0x3ff<<52|x.Uint64()>>12) - 1.0
}
// Call Uint64() * 2^128
func (x *Xoshiro256ss) Jump() {
var (
jump = [...]uint64{0x180ec6d33cfd0aba, 0xd5a61266f0c9392c, 0xa9582618e03fc9aa, 0x39abdc4529b1661c}
)
s := [...]uint64{0, 0, 0, 0}
for i := range jump {
for b := 0; b < 64; b++ {
if (jump[i] & 1 << b) != 0 {
s[0] ^= x.s[0]
s[1] ^= x.s[1]
s[2] ^= x.s[2]
s[3] ^= x.s[3]
}
}
x.Uint64()
}
x.s[0] = s[0]
x.s[1] = s[1]
x.s[2] = s[2]
x.s[3] = s[3]
}
// Call Uint64() * 2^192
func (x *Xoshiro256ss) LongJump() {
var (
jump = [...]uint64{
0x76e15d3efefdcbbf, 0xc5004e441c522fb3, 0x77710069854ee241, 0x39109bb02acbe635,
}
)
s := [...]uint64{0, 0, 0, 0}
for i := range jump {
for b := 0; b < 64; b++ {
if (jump[i] & 1 << b) != 0 {
s[0] ^= x.s[0]
s[1] ^= x.s[1]
s[2] ^= x.s[2]
s[3] ^= x.s[3]
}
}
x.Uint64()
}
x.s[0] = s[0]
x.s[1] = s[1]
x.s[2] = s[2]
x.s[3] = s[3]
}
func (x Xoshiro256ss) String() string {
return fmt.Sprintf("%064x", x.State())
}
func (x Xoshiro256ss) GoString() string {
return "xrand.Xoshiro256ss{state:\"" + x.String() + "\"}"
} | xoshiro256ss.go | 0.524151 | 0.454109 | xoshiro256ss.go | starcoder |
package datablocks
import (
"columns"
"datatypes"
"base/errors"
)
type DataBlock struct {
info *DataBlockInfo
seqs []*datatypes.Value
values []*ColumnValue
immutable bool
valuesmap map[string]*ColumnValue
}
func NewDataBlock(cols []columns.Column) *DataBlock {
var values []*ColumnValue
valuesmap := make(map[string]*ColumnValue)
for _, col := range cols {
cv := NewColumnValue(col)
valuesmap[col.Name] = cv
values = append(values, cv)
}
return &DataBlock{
info: &DataBlockInfo{},
values: values,
valuesmap: valuesmap,
}
}
func (block *DataBlock) Info() *DataBlockInfo {
return block.info
}
func (block *DataBlock) NumRows() int {
if block.seqs != nil {
return len(block.seqs)
} else {
return block.values[0].NumRows()
}
}
func (block *DataBlock) NumColumns() int {
return len(block.values)
}
func (block *DataBlock) Columns() []columns.Column {
var cols []columns.Column
for _, cv := range block.values {
cols = append(cols, cv.column)
}
return cols
}
func (block *DataBlock) Iterator(name string) (*DataBlockIterator, error) {
cv, ok := block.valuesmap[name]
if !ok {
return nil, errors.Errorf("Can't find column:%v", name)
}
return newDataBlockIterator(block.seqs, cv), nil
}
func (block *DataBlock) Iterators() []*DataBlockIterator {
var iterators []*DataBlockIterator
for _, cv := range block.values {
iter := newDataBlockIterator(block.seqs, cv)
iterators = append(iterators, iter)
}
return iterators
}
func (block *DataBlock) Write(batcher *BatchWriter) error {
if block.immutable {
return errors.New("Can't write, block is immutable")
}
cols := batcher.values
for _, col := range cols {
if _, ok := block.valuesmap[col.column.Name]; !ok {
return errors.Errorf("Can't find column:%v", col)
}
}
for _, col := range cols {
cv := block.valuesmap[col.column.Name]
cv.values = append(cv.values, col.values...)
}
return nil
}
func (block *DataBlock) setSeqs(seqs []*datatypes.Value) {
block.seqs = seqs
block.immutable = true
} | src/datablocks/datablock.go | 0.557845 | 0.486697 | datablock.go | starcoder |
package ecvrf
import (
"bytes"
"crypto/elliptic"
"crypto/hmac"
"errors"
"hash"
"math/big"
)
type point struct {
X, Y *big.Int
}
type core struct {
*Config
curve elliptic.Curve
cachedHasher hash.Hash
}
// Q returns prime order of large prime order subgroup.
func (c *core) Q() *big.Int {
return c.curve.Params().N
}
// N return half of length, in octets, of a field element in F, rounded up to the nearest even integer
func (c *core) N() int {
return ((c.curve.Params().P.BitLen()+1)/2 + 7) / 8
}
func (c *core) getCachedHasher() hash.Hash {
if c.cachedHasher != nil {
return c.cachedHasher
}
c.cachedHasher = c.NewHasher()
return c.cachedHasher
}
// Marshal marshals a point into compressed form specified in section 4.3.6 of ANSI X9.62.
// It's the alias of `point_to_string` specified in [draft-irtf-cfrg-vrf-06 section 5.5](https://tools.ietf.org/id/draft-irtf-cfrg-vrf-06.html#rfc.section.5.5).
func (c *core) Marshal(pt *point) []byte {
byteLen := (c.curve.Params().BitSize + 7) / 8
out := make([]byte, byteLen+1)
// compress format, 3 for odd y
out[0] = 2 + byte(pt.Y.Bit(0))
bytes := pt.X.Bytes()
if n := len(bytes); byteLen > n {
copy(out[1+byteLen-n:], bytes)
} else {
copy(out[1:], bytes)
}
return out
}
// Unmarshal unmarshals a compressed point in the form specified in section 4.3.6 of ANSI X9.62.
// It's the alias of `string_to_point` specified in [draft-irtf-cfrg-vrf-06 section 5.5](https://tools.ietf.org/id/draft-irtf-cfrg-vrf-06.html#rfc.section.5.5).
// This is borrowed from the project https://github.com/google/keytransparency.
func (c *core) Unmarshal(in []byte) (*point, error) {
byteLen := (c.curve.Params().BitSize + 7) / 8
if (in[0] &^ 1) != 2 {
return nil, errors.New("unrecognized point encoding")
}
if len(in) != 1+byteLen {
return nil, errors.New("invalid point data length")
}
// Based on Routine 2.2.4 in NIST Mathematical routines paper
p := c.curve.Params().P
x := new(big.Int).SetBytes(in[1 : 1+byteLen])
y2 := c.Y2(c.curve, x)
y := c.Sqrt(c.curve, y2)
if y == nil {
return nil, errors.New("invalid point: y^2 is not a squire")
}
var y2c big.Int
y2c.Mul(y, y).Mod(&y2c, p)
if y2c.Cmp(y2) != 0 {
return nil, errors.New("invalid point: sqrt(y2)^2 != y2")
}
if y.Bit(0) != uint(in[0]&1) {
y.Sub(p, y)
}
// valid point: return it
return &point{x, y}, nil
}
func (c *core) ScalarMult(pt *point, k []byte) *point {
x, y := c.curve.ScalarMult(pt.X, pt.Y, k)
return &point{x, y}
}
func (c *core) ScalarBaseMult(k []byte) *point {
x, y := c.curve.ScalarBaseMult(k)
return &point{x, y}
}
func (c *core) Add(pt1, pt2 *point) *point {
x, y := c.curve.Add(pt1.X, pt1.Y, pt2.X, pt2.Y)
return &point{x, y}
}
func (c *core) Sub(pt1, pt2 *point) *point {
// pt1 - pt2 = pt1 + invert(pt2),
// where invert(pt2) = (x2, P - y2)
x, y := c.curve.Add(
pt1.X, pt1.Y,
pt2.X, new(big.Int).Sub(c.curve.Params().P, pt2.Y))
return &point{x, y}
}
// HashToCurveTryAndIncrement takes in the VRF input `alpha` and converts it to H, using the try_and_increment algorithm.
// See: [draft-irtf-cfrg-vrf-06 section 5.4.1.1](https://tools.ietf.org/id/draft-irtf-cfrg-vrf-06.html#rfc.section.5.4.1.1).
func (c *core) HashToCurveTryAndIncrement(pk *point, alpha []byte) (H *point, err error) {
hasher := c.getCachedHasher()
hash := make([]byte, 1+hasher.Size())
hash[0] = 2 // compress format
// step 1: ctr = 0
ctr := 0
// step 2: PK_string = point_to_string(Y)
pkBytes := c.Marshal(pk)
// step 3 ~ 6
prefix := []byte{c.SuiteString, 0x01}
suffix := []byte{0}
for ; ctr < 256; ctr++ {
// hash_string = Hash(suite_string || one_string || PK_string || alpha_string || ctr_string)
suffix[0] = byte(ctr)
hasher.Reset()
hasher.Write(prefix)
hasher.Write(pkBytes)
hasher.Write(alpha)
hasher.Write(suffix)
// apppend right after compress format
hasher.Sum(hash[1:1])
// H = arbitrary_string_to_point(hash_string)
if H, err = c.Unmarshal(hash); err == nil {
if c.Cofactor > 1 {
// If H is not "INVALID" and cofactor > 1, set H = cofactor * H
H = c.ScalarMult(H, []byte{c.Cofactor})
}
return H, nil
}
}
return nil, errors.New("no valid point found")
}
// See: [draft-irtf-cfrg-vrf-06 section 5.4.3](https://tools.ietf.org/id/draft-irtf-cfrg-vrf-06.html#rfc.section.5.4.3)
func (c *core) HashPoints(points ...*point) *big.Int {
hasher := c.getCachedHasher()
hasher.Reset()
hasher.Write([]byte{c.SuiteString, 0x2})
for _, pt := range points {
hasher.Write(c.Marshal(pt))
}
return bits2int(hasher.Sum(nil), c.N()*8)
}
func (c *core) GammaToHash(gamma *point) []byte {
gammaCof := c.ScalarMult(gamma, []byte{c.Cofactor})
hasher := c.getCachedHasher()
hasher.Reset()
hasher.Write([]byte{c.SuiteString, 0x03})
hasher.Write(c.Marshal(gammaCof))
return hasher.Sum(nil)
}
func (c *core) EncodeProof(gamma *point, C, S *big.Int) []byte {
gammaBytes := c.Marshal(gamma)
cbytes := int2octets(C, c.N())
sbytes := int2octets(S, (c.Q().BitLen()+7)/8)
return append(append(gammaBytes, cbytes...), sbytes...)
}
// See: [draft-irtf-cfrg-vrf-06 section 5.4.4](https://tools.ietf.org/id/draft-irtf-cfrg-vrf-06.html#rfc.section.5.4.4)
func (c *core) DecodeProof(pi []byte) (gamma *point, C, S *big.Int, err error) {
var (
ptlen = (c.curve.Params().BitSize+7)/8 + 1
clen = c.N()
slen = (c.Q().BitLen() + 7) / 8
)
if len(pi) != ptlen+clen+slen {
err = errors.New("invalid proof length")
return
}
if gamma, err = c.Unmarshal(pi[:ptlen]); err != nil {
return
}
C = new(big.Int).SetBytes(pi[ptlen : ptlen+clen])
S = new(big.Int).SetBytes(pi[ptlen+clen:])
return
}
// https://tools.ietf.org/html/rfc6979#section-2.3.2
func bits2int(in []byte, qlen int) *big.Int {
out := new(big.Int).SetBytes(in)
if inlen := len(in) * 8; inlen > qlen {
return out.Rsh(out, uint(inlen-qlen))
}
return out
}
// https://tools.ietf.org/html/rfc6979#section-2.3.3
func int2octets(v *big.Int, rolen int) []byte {
var (
out = v.Bytes()
outlen = len(out)
)
// left pad with zeros if it's too short
if rolen > outlen {
out2 := make([]byte, rolen)
copy(out2[rolen-outlen:], out)
return out2
}
// drop most significant bytes if it's too long
return out[outlen-rolen:]
}
// https://tools.ietf.org/html/rfc6979#section-2.3.4
func bits2octets(in []byte, q *big.Int, rolen int) []byte {
z1 := bits2int(in, q.BitLen())
z2 := new(big.Int).Sub(z1, q)
if z2.Sign() < 0 {
return int2octets(z1, rolen)
}
return int2octets(z2, rolen)
}
// rfc6979nonce generates nonce according to [RFC6979](https://tools.ietf.org/html/rfc6979).
func rfc6979nonce(
sk *big.Int,
m []byte,
q *big.Int,
newHasher func() hash.Hash,
) *big.Int {
var (
qlen = q.BitLen()
rolen = (qlen + 7) / 8
hasher = newHasher()
)
// Step A
// Process m through the hash function H, yielding:
// h1 = H(m)
// (h1 is a sequence of hlen bits).
hasher.Write(m)
h1 := hasher.Sum(nil)
hlen := len(h1)
bx := int2octets(sk, rolen)
bh := bits2octets(h1, q, rolen)
// Step B
// Set:
// V = 0x01 0x01 0x01 ... 0x01
v := bytes.Repeat([]byte{1}, hlen)
// Step C
// Set:
// K = 0x00 0x00 0x00 ... 0x00
k := make([]byte, hlen)
// Step D ~ G
for i := 0; i < 2; i++ {
// Set:
// K = HMAC_K(V || 0x00 || int2octets(x) || bits2octets(h1))
mac := hmac.New(newHasher, k)
mac.Write(v)
mac.Write([]byte{byte(i)}) // internal octet
mac.Write(bx)
mac.Write(bh)
mac.Sum(k[:0])
// Set:
// V = HMAC_K(V)
mac = hmac.New(newHasher, k)
mac.Write(v)
mac.Sum(v[:0])
}
// Step H
for {
// Step H1
var t []byte
// Step H2
mac := hmac.New(newHasher, k)
for len(t)*8 < qlen {
mac.Write(v)
mac.Sum(v[:0])
mac.Reset()
t = append(t, v...)
}
// Step H3
secret := bits2int(t, qlen)
if secret.Sign() > 0 && secret.Cmp(q) < 0 {
return secret
}
mac.Write(v)
mac.Write([]byte{0x00})
mac.Sum(k[:0])
mac = hmac.New(newHasher, k)
mac.Write(v)
mac.Sum(v[:0])
}
} | core.go | 0.815894 | 0.40592 | core.go | starcoder |
package colorful
import (
"math"
)
// Converts the given color to LuvLCh space using D65 as reference white.
// h values are in [0..360], C and L values are in [0..1] although C can overshoot 1.0
func (col Color) LuvLCh() (l, c, h float64) {
return col.LuvLChWhiteRef(D65)
}
func LuvToLuvLCh(L, u, v float64) (l, c, h float64) {
// Oops, floating point workaround necessary if u ~= v and both are very small (i.e. almost zero).
if math.Abs(v-u) > 1e-4 && math.Abs(u) > 1e-4 {
h = math.Mod(57.29577951308232087721*math.Atan2(v, u)+360.0, 360.0) // Rad2Deg
} else {
h = 0.0
}
l = L
c = math.Sqrt(sq(u) + sq(v))
return
}
// Converts the given color to LuvLCh space, taking into account
// a given reference white. (i.e. the monitor's white)
// h values are in [0..360], c and l values are in [0..1]
func (col Color) LuvLChWhiteRef(wref [3]float64) (l, c, h float64) {
return LuvToLuvLCh(col.LuvWhiteRef(wref))
}
// Generates a color by using data given in LuvLCh space using D65 as reference white.
// h values are in [0..360], C and L values are in [0..1]
// WARNING: many combinations of `l`, `c`, and `h` values do not have corresponding
// valid RGB values, check the FAQ in the README if you're unsure.
func LuvLCh(l, c, h float64) Color {
return LuvLChWhiteRef(l, c, h, D65)
}
func LuvLChToLuv(l, c, h float64) (L, u, v float64) {
H := 0.01745329251994329576 * h // Deg2Rad
u = c * math.Cos(H)
v = c * math.Sin(H)
L = l
return
}
// Generates a color by using data given in LuvLCh space, taking
// into account a given reference white. (i.e. the monitor's white)
// h values are in [0..360], C and L values are in [0..1]
func LuvLChWhiteRef(l, c, h float64, wref [3]float64) Color {
L, u, v := LuvLChToLuv(l, c, h)
return LuvWhiteRef(L, u, v, wref)
}
// BlendLUVLCh blends the color with col2 in the cylindrical CIELUV color space.
func (col Color) BlendLUVLCh(col2 Color, t float64) Color {
return BlendLUVLCh(col, col2, t)
}
// BlendLUVLCh blends col1 and col2 in the cylindrical CIELUV color space.
func BlendLUVLCh(col1, col2 Color, t float64) Color {
l1, c1, h1 := col1.LuvLCh()
l2, c2, h2 := col2.LuvLCh()
// We know that h are both in [0..360]
return LuvLCh(l1+t*(l2-l1), c1+t*(c2-c1), lerpAngle(h1, h2, t))
} | colorful/cs_luvlch.go | 0.858867 | 0.558327 | cs_luvlch.go | starcoder |
func uniquePathsMemo(m int, n int) int {
memo := make([][]int, m)
for row := range memo {
memo[row] = make([]int, n)
}
var dfs func(int, int) int
dfs = func(r, c int) int {
if numPaths := memo[r][c]; numPaths !=0 {
return numPaths
}
switch {
case r == 0 && c == 0:
return 0
case r == 0 || c == 0:
return 1
}
memo[r][c] = dfs(r-1, c) + dfs(r, c-1)
return memo[r][c]
}
return dfs(m-1, n-1)
}
//bottom up 2D tabular approach
//time: O(m*n)
//space: O(m*n)
func uniquePaths2D(m int, n int) int {
dp := make([][]int, m)
for i:=0; i < m; i++ {
dp[i] = make([]int, n)
dp[i][0] = 1
}
for j:=0; j < n; j++ {
dp[0][j] = 1
}
for r:=1; r < len(dp); r++ {
for c:=1; c < len(dp[0]); c++ {
dp[r][c] = dp[r][c-1] + dp[r-1][c]
}
}
return dp[m-1][n-1]
}
/*
reduced to O(n) space bottom up DP version
- we only need to reserve space for each col in our dp table
- taking care of initializing our dp arr with vals = 1,
this marks the iteration of traversing right bound on row0
- when iterating through each remaining (r,c) pair row-wise,
we increment the store at that dp idx by the unique paths from the left
of the dp idx; recurrence relation: dp[i] = dp[i] + dp[i-1] i elem of {1,n-1}
- after each row traversal, our dp array stores results for unique paths
up to that row
for m=3, n=7 ex
[1, 1, 1, 1, 1, 1, 1] after row=0 traversal
[1, 2, 3, 4, 5, 6, 7] after row=1 traversal
[1, 3, 6, 10, 15, 21, 28] after row=2 traversal; res =28
time: O(m*n)
space: O(n)
*/
func uniquePaths(m int, n int) int {
dp := make([]int, n)
for idx := range dp {
dp[idx] = 1
}
for row := 1; row < m; row++ {
for col := 1; col < n; col++ {
dp[col] += dp[col-1]
}
}
fmt.Println(dp)
return dp[n-1]
}
/*
prev version; 11/22/2021
time: O(m*n) number of nodes in graph
space: O(m*n) for aux memo + O(n) for recursion stack
*/
/*
import (
"strconv"
"fmt"
)
func uniquePaths(m int, n int) int {
memo := make(map[string]int)
return dfs(memo, 0, 0, m, n )
}
func isValid(i, j, m, n int) bool {
if i == m || j == n {
return false
}
return true
}
func dfs(memo map[string]int, i, j, m, n int) int {
if !isValid(i,j,m,n) {
return 0
}
key := strconv.Itoa(i) + "_" + strconv.Itoa(j)
if _, ok := memo[key]; ok {
return memo[key]
}
if i == m -1 && j == n - 1 {
return 1
}
path := 0
path += dfs(memo,i+1, j, m, n)
path += dfs(memo,i, j+1, m, n)
memo[key] = path
fmt.Println(memo)
return memo[key]
}
*/ | unique-paths/unique-paths.go | 0.619586 | 0.404802 | unique-paths.go | starcoder |
package date
import (
"database/sql/driver"
"fmt"
"time"
)
// Date specifies a date without time (only year, month and day).
// Internally it represents a time.Time instant with zero UTC time parts.
// The zero value of the struct is represented as "0001-01-01".
type Date struct {
t time.Time
}
// NewDate creates a new date instance from the provided year, month and day.
func NewDate(year int, month time.Month, day int) Date {
return Date{
t: time.Date(year, month, day, 0, 0, 0, 0, time.UTC),
}
}
// Parse parses a formatted string and returns the date value it represents.
// The string must be in in ISO 8601 extended format (e.g. "2006-01-02").
func Parse(s string) (Date, error) {
t, err := time.Parse("2006-01-02", s)
return Date{t: t}, err
}
// Format returns a textual representation of the date value formatted
// according to the specified layout (the same as in time.Time.Format).
func (d Date) Format(layout string) string {
return d.t.Format(layout)
}
// String returns a string representing the date instant in ISO 8601
// extended format (e.g. "2006-01-02").
func (d Date) String() string {
return d.t.Format("2006-01-02")
}
// IsZero reports whether d represents the zero date instant,
// year 1, January 1 ("0001-01-01").
func (d Date) IsZero() bool {
return d.t.IsZero()
}
// Day returns the day of the month specified by d.
func (d Date) Day() int {
return d.t.Day()
}
// Month returns the month of the year specified by t.
func (d Date) Month() time.Month {
return d.t.Month()
}
// Year returns the year in which d occurs.
func (d Date) Year() int {
return d.t.Year()
}
// Before reports whether the date instant d is before d2.
func (d Date) Before(d2 Date) bool {
return d.t.Before(d2.t)
}
// After reports whether the date instant d is after d2.
func (d Date) After(d2 Date) bool {
return d.t.After(d2.t)
}
// Equal reports whether the date instant d is equal to d2.
func (d Date) Equal(d2 Date) bool {
d1Year, d1Month, d1Day := d.t.Date()
d2Year, d2Month, d2Day := d2.t.Date()
return d1Year == d2Year && d1Month == d2Month && d1Day == d2Day
}
// Sub returns the duration d-d2 in days.
func (d Date) Sub(d2 Date) int {
return int(d.t.Sub(d2.t).Hours() / 24)
}
// Scan parses a value (usually from db). It implements sql.Scanner,
// https://golang.org/pkg/database/sql/#Scanner.
func (d *Date) Scan(v interface{}) (err error) {
err = nil
switch v := v.(type) {
case nil:
d.t = time.Time{}
case []byte:
if len(v) > 0 {
d.t, err = time.Parse("2006-01-02", string(v))
} else {
d.t = time.Time{}
}
case string:
if v != "" {
d.t, err = time.Parse("2006-01-02", v)
} else {
d.t = time.Time{}
}
case time.Time:
year, month, day := v.Date()
d.t = time.Date(year, month, day, 0, 0, 0, 0, time.UTC)
default:
err = fmt.Errorf("%T %+v is not a meaningful date", v, v)
d.t = time.Time{}
}
return err
}
// Value converts the instant to a time.Time. It implements driver.Valuer,
// https://golang.org/pkg/database/sql/driver/#Valuer.
func (d Date) Value() (driver.Value, error) {
return d.t, nil
}
// MarshalText implements the encoding.TextMarshaler interface.
// The date is given in ISO 8601 extended format (e.g. "2006-01-02").
func (d Date) MarshalText() ([]byte, error) {
return []byte(d.String()), nil
}
// UnmarshalText implements the encoding.TextUnmarshaler interface.
// The date is expected to be in ISO 8601 extended format (e.g. "2006-01-02").
func (d *Date) UnmarshalText(data []byte) (err error) {
return d.Scan(data)
} | date.go | 0.842604 | 0.637736 | date.go | starcoder |
package stripe
import (
"net/url"
"reflect"
"strconv"
)
// addParamsToValues takes an interface (usually *SomeTypeParams) and a pointer
// to a url.Values. It iterates over each field in the interface (using
// the attributes method), and adds the value of each field to the url.Values.
func addParamsToValues(params interface{}, values *url.Values) {
var val string
for name, mtype := range attributes(params) {
switch mtype.Name() {
case "string":
val = getString(params, name)
case "int":
val = getInt(params, name)
case "float64":
val = getFloat64(params, name)
case "bool":
val = getBool(params, name)
}
if val != "" {
values.Add(getTag(params, "stripe_field", name), val)
}
}
}
// attributes takes a struct m and returns a map of strings (field names) to
// reflect.Types (field types).
func attributes(m interface{}) map[string]reflect.Type {
typ := reflect.TypeOf(m)
if typ.Kind() == reflect.Ptr {
typ = typ.Elem()
}
attrs := make(map[string]reflect.Type)
for i := 0; i < typ.NumField(); i++ {
p := typ.Field(i)
if !p.Anonymous {
attrs[p.Name] = p.Type
}
}
return attrs
}
// getTag gets the tagName tag from an element in a struct. It takes a struct
// and a fieldName (string), and returns the tag named tagName ("stripe_field",
// etc) for that fieldName.
func getTag(m interface{}, tagName, fieldName string) string {
f, _ := reflect.TypeOf(m).Elem().FieldByName(fieldName)
return f.Tag.Get(tagName)
}
// getString gets the value of fieldName in the struct m, and returns it.
func getString(m interface{}, fieldName string) string {
return getField(m, fieldName).String()
}
// getBool gets the value of fieldName in the struct m (bool), converts it to
// a string, and returns the result. If the value is the zero value (false), it
// returns a blank string.
func getBool(m interface{}, fieldName string) string {
val := getField(m, fieldName).Bool()
if val {
opposite, _ := strconv.ParseBool(getTag(m, "opposite", fieldName))
if opposite {
return strconv.FormatBool(!val)
} else {
return strconv.FormatBool(val)
}
} else {
return ""
}
}
// getInt gets the value of fieldName in the struct m (int), converts it to a
// string, and returns the result. If the value is the zero value (0), it
// returns a blank string.
func getInt(m interface{}, fieldName string) string {
val := int(getField(m, fieldName).Int())
if val == 0 {
return ""
} else {
return strconv.Itoa(val)
}
}
// getFloat gets the value of fieldName in the struct m (float64), converts it
// to a string, and returns the result. If the value is the zero value (0.0),
// it returns a blank string.
func getFloat64(m interface{}, fieldName string) string {
val := getField(m, fieldName).Float()
if val == 0.0 {
return ""
} else {
return strconv.FormatFloat(val, 'f', 2, 32)
}
}
// getField gets the reflect.Value of fieldName in the struct m.
func getField(m interface{}, fieldName string) reflect.Value {
val := reflect.ValueOf(m)
return reflect.Indirect(val).FieldByName(fieldName)
} | stripe/utils.go | 0.617628 | 0.449936 | utils.go | starcoder |
package slippy
import (
"math"
"github.com/go-spatial/geom"
)
func NewTile(z, x, y uint, buffer float64, srid uint64) *Tile {
return &Tile{
z: z,
x: x,
y: y,
Buffer: buffer,
SRID: srid,
}
}
// Tile describes a slippy tile.
type Tile struct {
// zoom
z uint
// column
x uint
// row
y uint
// buffer will add a buffer to the tile bounds. this buffer is expected to use the same units as the SRID
// of the projected tile (i.e. WebMercator = pixels, 3395 = meters)
Buffer float64
// spatial reference id
SRID uint64
}
func NewTileLatLon(z uint, lat, lon, buffer float64, srid uint64) *Tile {
x := Lon2Tile(z, lon)
y := Lat2Tile(z, lat)
return &Tile{
z: z,
x: x,
y: y,
Buffer: buffer,
SRID: srid,
}
}
func (t *Tile) ZXY() (uint, uint, uint) { return t.z, t.x, t.y }
func Lat2Tile(zoom uint, lat float64) (y uint) {
lat_rad := lat * math.Pi / 180
return uint(math.Exp2(float64(zoom))*
(1.0-math.Log(
math.Tan(lat_rad)+
(1/math.Cos(lat_rad)))/math.Pi)) /
2.0
}
func Lon2Tile(zoom uint, lon float64) (x uint) {
return uint(math.Exp2(float64(zoom)) * (lon + 180.0) / 360.0)
}
// Tile2Lon will return the west most longitude
func Tile2Lon(x, z uint) float64 { return float64(x)/math.Exp2(float64(z))*360.0 - 180.0 }
// Tile2Lat will return the east most Latitude
func Tile2Lat(y, z uint) float64 {
var n float64 = math.Pi
if y != 0 {
n = math.Pi - 2.0*math.Pi*float64(y)/math.Exp2(float64(z))
}
return 180.0 / math.Pi * math.Atan(0.5*(math.Exp(n)-math.Exp(-n)))
}
// Bounds returns the bounds of the Tile as defined by the East most longitude, North most latitude, West most longitude, South most latitude.
func (t *Tile) Bounds() [4]float64 {
east := Tile2Lon(t.x, t.z)
west := Tile2Lon(t.x+1, t.z)
north := Tile2Lat(t.y, t.z)
south := Tile2Lat(t.y+1, t.z)
return [4]float64{east, north, west, south}
}
/*
// Keep this comment as it is a guide for how we can take bounds and a srid and convert it to Extents and Buffereded Extents.
// This is how we convert from the Bounds, and TileSize to Extent for Webmercator.
bounds := t.Bounds()
east,north,west, south := bounds[0],bounds[1],bounds[2],bounds[3]
TileSize := 4096.0
// Convert bounds to coordinates in webmercator.
c, err := webmercator.PToXY(east, north, west, south)
log.Println("c", c, "err", err)
// Turn the Coordinates into an Extent (minx, miny, maxx, maxy)
// Here is where the origin flip happens if there is one.
extent := geom.NewBBox(
[2]float64{c[0], c[1]},
[2]float64{c[2], c[3]},
)
// A Span is just MaxX - MinX
xspan := extent.XSpan()
yspan := extent.YSpan()
log.Println("Extent", extent, "MinX", extent.MinX(), "MinY", extent.MinY(), "xspan", xspan, "yspan", yspan)
// To get the Buffered Extent, we just need the extent and the Buffer size.
// Convert to tile coordinates. Convert the meters (WebMercator) into pixels of the tile..
nx := float64(int64((c[0] - extent.MinX()) * TileSize / xspan))
ny := float64(int64((c[1] - extent.MinY()) * TileSize / yspan))
mx := float64(int64((c[2] - extent.MinX()) * TileSize / xspan))
my := float64(int64((c[3] - extent.MinY()) * TileSize / yspan))
// Expend by the that number of pixels. We could also do the Expand on the Extent instead, of the Bounding Box on the Pixel.
mextent := geom.NewBBox([2]float64{nx, ny}, [2]float64{mx, my}).ExpandBy(64)
log.Println("mxy[", nx, ny, mx, my, "]", "err", err, "mext", mextent)
// Convert Pixel back to meters.
bext := geom.NewBBox(
[2]float64{
(mextent.MinX() * xspan / TileSize) + extent.MinX(),
(mextent.MinY() * yspan / TileSize) + extent.MinY(),
},
[2]float64{
(mextent.MaxX() * xspan / TileSize) + extent.MinX(),
(mextent.MaxY() * yspan / TileSize) + extent.MinY(),
},
)
log.Println("bext", bext)
*/
// TODO(arolek): support alternative SRIDs. Currently this assumes 3857
// Extent will return the tile extent excluding the tile's buffer and the Extent's SRID
func (t *Tile) Extent() (extent *geom.Extent, srid uint64) {
max := 20037508.34
// resolution
res := (max * 2) / math.Exp2(float64(t.z))
// unbuffered extent
return geom.NewExtent(
[2]float64{
-max + (float64(t.x) * res), // MinX
max - (float64(t.y) * res), // Miny
},
[2]float64{
-max + (float64(t.x) * res) + res, // MaxX
max - (float64(t.y) * res) - res, // MaxY
},
), t.SRID
}
// BufferedExtent will return the tile extent including the tile's buffer and the Extent's SRID
func (t *Tile) BufferedExtent() (bufferedExtent *geom.Extent, srid uint64) {
extent, _ := t.Extent()
// TODO(arolek): the following value is hard coded for MVT, but this concept needs to be abstracted to support different projections
mvtTileWidthHeight := 4096.0
// the bounds / extent
mvtTileExtent := [4]float64{
0 - t.Buffer, 0 - t.Buffer,
mvtTileWidthHeight + t.Buffer, mvtTileWidthHeight + t.Buffer,
}
xspan := extent.MaxX() - extent.MinX()
yspan := extent.MaxY() - extent.MinY()
bufferedExtent = geom.NewExtent(
[2]float64{
(mvtTileExtent[0] * xspan / mvtTileWidthHeight) + extent.MinX(),
(mvtTileExtent[1] * yspan / mvtTileWidthHeight) + extent.MinY(),
},
[2]float64{
(mvtTileExtent[2] * xspan / mvtTileWidthHeight) + extent.MinX(),
(mvtTileExtent[3] * yspan / mvtTileWidthHeight) + extent.MinY(),
},
)
return bufferedExtent, t.SRID
}
// TODO (ear7h): sibling support
// RangeFamilyAt calls f on every tile vertically related to t at the specified zoom
func (t *Tile) RangeFamilyAt(zoom uint, f func(*Tile) error) error {
// handle ancestors and self
if zoom <= t.z {
mag := t.z - zoom
arg := NewTile(zoom, t.x>>mag, t.y>>mag, t.Buffer, t.SRID)
return f(arg)
}
// handle descendants
mag := zoom - t.z
delta := uint(math.Exp2(float64(mag)))
leastX := t.x << mag
leastY := t.y << mag
for x := leastX; x < leastX+delta; x++ {
for y := leastY; y < leastY+delta; y++ {
err := f(NewTile(zoom, x, y, 0, geom.WebMercator))
if err != nil {
return err
}
}
}
return nil
} | vendor/github.com/go-spatial/geom/slippy/tile.go | 0.743913 | 0.577346 | tile.go | starcoder |
package htmldiff
import (
"github.com/mb0/diff"
"golang.org/x/net/html"
)
// treeRune holds an individual rune in the HTML along with the node it is in and, for convienience, its position (if in a container).
type treeRune struct {
leaf *html.Node
letter rune
pos posT
}
// diffData is a type that exists in order to provide a diff.Data interface. It holds the two sets of treeRunes to difference.
type diffData struct {
a, b *[]treeRune
}
// Equal exists to fulfill the diff.Data interface.
// NOTE: this is usually the most called function in the package!
func (dd diffData) Equal(i, j int) bool {
if (*dd.a)[i].letter != (*dd.b)[j].letter {
return false
}
if !posEqual((*dd.a)[i].pos, (*dd.b)[j].pos) {
return false
}
return nodeBranchesEqual((*dd.a)[i].leaf, (*dd.b)[j].leaf)
}
// nodeBranchesEqual checks that two leaves come from branches that can be compared.
func nodeBranchesEqual(leafA, leafB *html.Node) bool {
if !nodeEqualExText(leafA, leafB) {
return false
}
if leafA.Parent == nil && leafB.Parent == nil {
return true // at the top of the tree
}
if leafA.Parent != nil && leafB.Parent != nil {
return nodeEqualExText(leafA.Parent, leafB.Parent) // go up to the next level
}
return false // one of the leaves has a parent, the other does not
}
// attrEqual checks that the attributes of two nodes are the same.
func attrEqual(base, comp *html.Node) bool {
if len(comp.Attr) != len(base.Attr) {
return false
}
for a := range comp.Attr {
if comp.Attr[a].Key != base.Attr[a].Key ||
comp.Attr[a].Namespace != base.Attr[a].Namespace ||
comp.Attr[a].Val != base.Attr[a].Val {
return false
}
}
return true
}
// compares nodes excluding their text
func nodeEqualExText(base, comp *html.Node) bool {
if comp.DataAtom != base.DataAtom ||
comp.Namespace != base.Namespace ||
comp.Type != base.Type {
return false
}
return attrEqual(base, comp)
}
// renders a tree of nodes into a slice of treeRunes.
func renderTreeRunes(n *html.Node, tr *[]treeRune) {
p := getPos(n)
if n.FirstChild == nil { // it is a leaf node
switch n.Type {
case html.TextNode:
if len(n.Data) == 0 {
*tr = append(*tr, treeRune{leaf: n, letter: '\u200b' /* zero-width space */, pos: p}) // make sure we catch the node, even if no data
} else {
for _, r := range []rune(n.Data) {
*tr = append(*tr, treeRune{leaf: n, letter: r, pos: p})
}
}
default:
*tr = append(*tr, treeRune{leaf: n, letter: 0, pos: p})
}
} else {
for c := n.FirstChild; c != nil; c = c.NextSibling {
renderTreeRunes(c, tr)
}
}
}
// wrapper for diff.Granular() -- should only concatanate changes for similar text nodes
func granular(gran int, dd diffData, changes []diff.Change) []diff.Change {
ret := make([]diff.Change, 0, len(changes))
startSame := 0
changeCount := 0
lastAleaf, lastBleaf := (*dd.a)[0].leaf, (*dd.b)[0].leaf
for c, cc := range changes {
if cc.A < len(*dd.a) && cc.B < len(*dd.b) &&
lastAleaf.Type == html.TextNode && lastBleaf.Type == html.TextNode &&
(*dd.a)[cc.A].leaf == lastAleaf && (*dd.b)[cc.B].leaf == lastBleaf &&
nodeEqualExText(lastAleaf, lastBleaf) { // TODO is this last constraint required?
// do nothing yet, queue it up until there is a difference
changeCount++
} else { // no match
if changeCount > 0 { // flush
ret = append(ret, diff.Granular(gran, changes[startSame:startSame+changeCount])...)
}
ret = append(ret, cc)
startSame = c + 1 // the one after this
changeCount = 0
if cc.A < len(*dd.a) && cc.B < len(*dd.b) {
lastAleaf, lastBleaf = (*dd.a)[cc.A].leaf, (*dd.b)[cc.B].leaf
}
}
}
if changeCount > 0 { // flush
ret = append(ret, diff.Granular(gran, changes[startSame:])...)
}
return ret
} | treerunes.go | 0.529507 | 0.534673 | treerunes.go | starcoder |
package test_persistence
import (
"testing"
data1 "github.com/pip-services-samples/service-beacons-go/data/version1"
persist "github.com/pip-services-samples/service-beacons-go/persistence"
cdata "github.com/pip-services3-go/pip-services3-commons-go/data"
"github.com/stretchr/testify/assert"
)
type BeaconsPersistenceFixture struct {
BEACON1 *data1.BeaconV1
BEACON2 *data1.BeaconV1
BEACON3 *data1.BeaconV1
persistence persist.IBeaconsPersistence
}
func NewBeaconsPersistenceFixture(persistence persist.IBeaconsPersistence) *BeaconsPersistenceFixture {
c := BeaconsPersistenceFixture{}
c.BEACON1 = &data1.BeaconV1{
Id: "1",
Udi: "00001",
Type: data1.AltBeacon,
SiteId: "1",
Label: "TestBeacon1",
Center: data1.GeoPointV1{Type: "Point", Coordinates: [][]float32{{0.0, 0.0}}},
Radius: 50,
}
c.BEACON2 = &data1.BeaconV1{
Id: "2",
Udi: "00002",
Type: data1.IBeacon,
SiteId: "1",
Label: "TestBeacon2",
Center: data1.GeoPointV1{Type: "Point", Coordinates: [][]float32{{2.0, 2.0}}},
Radius: 70,
}
c.BEACON3 = &data1.BeaconV1{
Id: "3",
Udi: "00003",
Type: data1.AltBeacon,
SiteId: "2",
Label: "TestBeacon3",
Center: data1.GeoPointV1{Type: "Point", Coordinates: [][]float32{{10.0, 10.0}}},
Radius: 50,
}
c.persistence = persistence
return &c
}
func (c *BeaconsPersistenceFixture) testCreateBeacons(t *testing.T) {
// Create the first beacon
beacon, err := c.persistence.Create("", c.BEACON1)
assert.Nil(t, err)
assert.NotNil(t, beacon)
assert.Equal(t, c.BEACON1.Udi, beacon.Udi)
assert.Equal(t, c.BEACON1.SiteId, beacon.SiteId)
assert.Equal(t, c.BEACON1.Type, beacon.Type)
assert.Equal(t, c.BEACON1.Label, beacon.Label)
assert.NotNil(t, beacon.Center)
// Create the second beacon
beacon, err = c.persistence.Create("", c.BEACON2)
assert.Nil(t, err)
assert.NotNil(t, beacon)
assert.Equal(t, c.BEACON2.Udi, beacon.Udi)
assert.Equal(t, c.BEACON2.SiteId, beacon.SiteId)
assert.Equal(t, c.BEACON2.Type, beacon.Type)
assert.Equal(t, c.BEACON2.Label, beacon.Label)
assert.NotNil(t, beacon.Center)
// Create the third beacon
beacon, err = c.persistence.Create("", c.BEACON3)
assert.Nil(t, err)
assert.NotNil(t, beacon)
assert.Equal(t, c.BEACON3.Udi, beacon.Udi)
assert.Equal(t, c.BEACON3.SiteId, beacon.SiteId)
assert.Equal(t, c.BEACON3.Type, beacon.Type)
assert.Equal(t, c.BEACON3.Label, beacon.Label)
assert.NotNil(t, beacon.Center)
}
func (c *BeaconsPersistenceFixture) TestCrudOperations(t *testing.T) {
var beacon1 data1.BeaconV1
// Create items
c.testCreateBeacons(t)
// Get all beacons
page, err := c.persistence.GetPageByFilter("", cdata.NewEmptyFilterParams(), cdata.NewEmptyPagingParams())
assert.Nil(t, err)
assert.NotNil(t, page)
assert.Len(t, page.Data, 3)
beacon1 = *page.Data[0]
// Update the beacon
beacon1.Label = "ABC"
beacon, err := c.persistence.Update("", &beacon1)
assert.Nil(t, err)
assert.NotNil(t, beacon)
assert.Equal(t, beacon1.Id, beacon.Id)
assert.Equal(t, "ABC", beacon.Label)
// Get beacon by udi
beacon, err = c.persistence.GetOneByUdi("", beacon1.Udi)
assert.Nil(t, err)
assert.NotNil(t, beacon)
assert.Equal(t, beacon1.Id, beacon.Id)
// Delete the beacon
beacon, err = c.persistence.DeleteById("", beacon1.Id)
assert.Nil(t, err)
assert.NotNil(t, beacon)
assert.Equal(t, beacon1.Id, beacon.Id)
// Try to get deleted beacon
beacon, err = c.persistence.GetOneById("", beacon1.Id)
assert.Nil(t, err)
assert.Nil(t, beacon)
}
func (c *BeaconsPersistenceFixture) TestGetWithFilters(t *testing.T) {
// Create items
c.testCreateBeacons(t)
// Filter by id
page, err := c.persistence.GetPageByFilter("",
cdata.NewFilterParamsFromTuples(
"id", "1",
),
cdata.NewEmptyPagingParams())
assert.Nil(t, err)
assert.Len(t, page.Data, 1)
// Filter by udi
page, err = c.persistence.GetPageByFilter(
"",
cdata.NewFilterParamsFromTuples(
"udi", "00002",
),
cdata.NewEmptyPagingParams())
assert.Nil(t, err)
assert.Len(t, page.Data, 1)
// Filter by udis
page, err = c.persistence.GetPageByFilter(
"",
cdata.NewFilterParamsFromTuples(
"udis", "00001,00003",
),
cdata.NewEmptyPagingParams())
assert.Nil(t, err)
assert.Len(t, page.Data, 2)
// Filter by site_id
page, err = c.persistence.GetPageByFilter(
"",
cdata.NewFilterParamsFromTuples(
"site_id", "1",
),
cdata.NewEmptyPagingParams())
assert.Nil(t, err)
assert.Len(t, page.Data, 2)
} | test/persistence/BeaconsPersistenceFixture.go | 0.658527 | 0.558628 | BeaconsPersistenceFixture.go | starcoder |
package golsp
import (
"strings"
"strconv"
"unicode"
"fmt"
)
type OperatorType int
const (
OperatorTypeSpread OperatorType = 0
OperatorTypeZip OperatorType = 1
OperatorTypeDot OperatorType = 2
)
var Operators = []string{"...", ":", "."}
var OperatorTypes = map[string]OperatorType{
"...": OperatorTypeSpread,
":": OperatorTypeZip,
".": OperatorTypeDot,
}
var LiteralDelimiters = map[string]string{"\"": "\"", "#": "\n"}
var LiteralDelimiterTypes = map[string]STNodeType{
"\"": STNodeTypeStringLiteral,
"#": STNodeTypeComment,
}
var TokenDelimiters = map[string]string{
"": "",
"[": "]",
"]": "",
"{": "}",
"}": "",
"(": ")",
")": "",
}
var TokenDelimiterTypes = map[string]STNodeType{
"": STNodeTypeScope,
"[": STNodeTypeExpression,
"{": STNodeTypeList,
"(": STNodeTypeMap,
}
// MakeST: construct a syntax tree from a list of tokens
// `tokens`: list of tokens to parse
func MakeST(tokens []string) STNode {
root := STNode{Type: STNodeTypeScope}
root.Children, _ = makeST(tokens[0], tokens[1:])
return root
}
// makeST: recursively construct a syntax tree from a list of tokens
// `delim`: the leading delimeter of the current expression
// `tokens`: remaining tokens to parse
// this function returns a list of nodes within the current expression
// and a list of remaining unparsed tokens
func makeST(delim string, tokens []string) ([]STNode, []string) {
nodes := make([]STNode, 0, len(tokens))
zip := false
dot := false
var prev *STNode
var current STNode
newline := false
prevlength := 0
i := 0
for ; i < len(tokens); i++ {
if tokens[i] == TokenDelimiters[delim] { return nodes, tokens[i + 1:] }
if tokens[i] == "\n" {
delimtype := TokenDelimiterTypes[delim]
if newline && (len(nodes) - prevlength) > 1 &&
delimtype != STNodeTypeMap && delimtype != STNodeTypeList {
node := STNode{
Type: STNodeTypeExpression,
Children: make([]STNode, len(nodes[prevlength:])),
}
copy(node.Children, nodes[prevlength:])
nodes = nodes[:prevlength]
nodes, prev, zip, dot = appendNode(nodes, node, prev, zip, dot)
}
newline = true
prevlength = len(nodes)
continue
}
current = STNode{
Head: tokens[i],
Type: STNodeTypeIdentifier,
Children: make([]STNode, 0),
}
// check if current token is a delimiter '[]' or '{}'
// parse recursively if so
delimtype, isDelimiter := TokenDelimiterTypes[current.Head]
if isDelimiter {
var newtokens []string
current.Type = delimtype
current.Children, newtokens = makeST(current.Head, tokens[i + 1:])
i = -1
tokens = newtokens
nodes, prev, zip, dot = appendNode(nodes, current, prev, zip, dot)
continue
}
// check if current token is an extended literal i.e a string or comment
literaltype, isLiteral := LiteralDelimiterTypes[string(current.Head[0])]
if isLiteral {
if literaltype == STNodeTypeComment { continue }
if literaltype == STNodeTypeStringLiteral {
current.Head = fmt.Sprintf("\"%s\"", normalizeString(current.Head[1:len(current.Head) - 1]))
}
current.Type = literaltype
nodes, prev, zip, dot = appendNode(nodes, current, prev, zip, dot)
continue
}
// check if current token is a number literal
num, err := strconv.ParseFloat(current.Head, 64)
if err == nil {
current.Type = STNodeTypeNumberLiteral
if float64(int(num)) == num {
current.Head = strconv.Itoa(int(num))
} else { current.Head = fmt.Sprintf("%g", num) }
nodes, prev, zip, dot = appendNode(nodes, current, prev, zip, dot)
continue
}
// check if current token is an operator
optype, isOperator := OperatorTypes[current.Head]
if isOperator && len(nodes) > 0 {
switch optype {
case OperatorTypeSpread: prev.Spread = true
case OperatorTypeZip: zip = true
case OperatorTypeDot: dot = true
}
continue
}
// current token must be an identifier
nodes, prev, zip, dot = appendNode(nodes, current, prev, zip, dot)
}
return nodes, tokens[i:]
}
// appendNode: append a parsed node to a list of parsed nodes
// `nodes`: the list of parsed nodes
// `node`: the parsed node to append to the list
// `prev`: a pointer to the node that was last parsed and appended
// `zip`: whether a zip operator precedes the current node
// `dot`: whether a dot operator precedes the current node
// this function returns the new list of nodes, a pointer to the
// appended node, and values for both of the zip and dot flags
// this function always returns false for zip and dot -- i could manually
// reset zip and dot every time i call appendNode (see makeST), but this looks nicer
func appendNode(nodes []STNode, node STNode,
prev *STNode, zip bool, dot bool) ([]STNode, *STNode, bool, bool) {
var addr *STNode
if zip || dot {
if prev != nil {
if zip {
prev.Zip = &node
addr = prev.Zip
} else {
prev.Dot = &node
addr = prev.Dot
}
}
} else {
nodes = append(nodes, node)
addr = &nodes[len(nodes) - 1]
}
return nodes, addr, false, false
}
// normalizeString: Replace esacped escape sequences with actual escape
// sequences in a string
// `str`: the string
// this function returns the normalized string
func normalizeString(str string) string {
str = strings.Replace(str, "\\n", "\n", -1)
str = strings.Replace(str, "\\\"", "\"", -1)
return str
}
// parseLiteral: parse an extended literal, i.e a string or comment
// `delimiter`: leading delimiter of literal, either '"' or '#'
// `input`: list of unparsed characters following delimiter
// this function returns the number of characters it has parsed
// and a literal token
func parseLiteral(delimiter string, input []rune) (int, string) {
escape := '\\'
str := ""
i := 0
for ; i < len(input); i++ {
if input[i] == escape {
str += string(input[i])
i++
str += string(input[i])
continue
}
if string(input[i]) == LiteralDelimiters[delimiter] {
str += LiteralDelimiters[delimiter]
i++
break
}
str += string(input[i])
}
return i, str
}
// matchOperator: check if a list of characters contains an operator
// and find the correct operator if so
// `runes`: list of characters
// `index`: index at which to begin searching runes for an operator
// this function returns the index of the found operator in `Operators`
// (defined above) or -1 if runes does not contain an operator immediately
// after index
func matchOperator(runes []rune, index int) int {
matchindex := -1
matchscore := 0
r := runes[index]
for i, op := range Operators {
score := 0
if r != rune(op[0]) { continue }
if index + len(op) > len(runes) { continue }
opstr := string(runes[index:index + len(op)])
if op == opstr { score = len(op) }
if score > matchscore {
matchscore = score
matchindex = i
}
}
return matchindex
}
// Tokenize: tokenize a string
// `input`: the string to tokenize
// this function returns a list of tokens
func Tokenize(input string) []string {
input = strings.TrimSpace(input)
runes := []rune(input)
token := ""
tokens := []string{token, "\n"}
for i := 0; i < len(runes); i++ {
r := runes[i]
if r == '\n' {
if len(token) > 0 {
tokens = append(tokens, token)
token = ""
}
tokens = append(tokens, "\n")
continue
}
end, literal := LiteralDelimiters[string(r)]
if literal {
if len(token) > 0 {
tokens = append(tokens, token)
token = ""
}
len, str := parseLiteral(string(r), runes[i + 1:])
i += len
tokens = append(tokens, string(r) + str)
if end == "\n" { tokens = append(tokens, end) }
continue
}
opindex := matchOperator(runes, i)
if opindex != -1 {
op := Operators[opindex]
i += len(op) - 1
// weird hack to get dot operator to play nicely with floating-point numbers
isNumber := false
if op == "." {
_, err := strconv.ParseFloat(token, 64)
isNumber = err == nil
}
if op != "." || (!isNumber) {
if len(token) > 0 {
tokens = append(tokens, token)
token = ""
}
tokens = append(tokens, op)
continue
}
}
_, delimiter := TokenDelimiters[string(r)]
if !delimiter && !unicode.IsSpace(r) {
token += string(r)
continue
}
if len(token) > 0 {
tokens = append(tokens, token)
}
token = ""
if delimiter {
tokens = append(tokens, string(r))
}
}
if len(token) > 0 { tokens = append(tokens, token) }
tokens = append(tokens, "\n", "")
return tokens
} | core/parser.go | 0.609524 | 0.461199 | parser.go | starcoder |
package wad
// Currency is a currency type.
type Currency struct {
// Code is the currency code (e.g. usd)
Code string `json:"code"`
// Countries is csv list of countries
Countries string `json:"countries"`
// Decimals is the number of decimals in currency
Decimals int `json:"decimals"`
// IsoNum is the iso code of the country. Negative if null
IsoNum int `json:"iso_num"`
// Name is the name of the Currency
Name string `json:"name"`
// Symbol is the currency symbol
Symbol string `json:"symbol"`
}
// CurrencyMap of code->currency.
type CurrencyMap = map[string]Currency
// Fiat is a list of fiat currencies.
var Fiat = CurrencyMap{
"AED": {
Code: "AED",
Countries: "United Arab Emirates",
Decimals: 2,
IsoNum: 784,
Name: "United Arab Emirates dirham",
Symbol: "د.إ",
},
"AFN": {
Code: "AFN",
Countries: "Afghanistan",
Decimals: 2,
IsoNum: 971,
Name: "Afghan afghani",
Symbol: "",
},
"ALL": {
Code: "ALL",
Countries: "Albania",
Decimals: 2,
IsoNum: 8,
Name: "Albanian lek",
Symbol: "L",
},
"AMD": {
Code: "AMD",
Countries: "Armenia",
Decimals: 2,
IsoNum: 51,
Name: "Armenian dram",
Symbol: "",
},
"ANG": {
Code: "ANG",
Countries: "Curaçao (CW), Sint Maarten (SX)",
Decimals: 2,
IsoNum: 532,
Name: "Netherlands Antillean guilder",
Symbol: "ƒ",
},
"AOA": {
Code: "AOA",
Countries: "Angola",
Decimals: 2,
IsoNum: 973,
Name: "Angolan kwanza",
Symbol: "Kz",
},
"ARS": {
Code: "ARS",
Countries: "Argentina",
Decimals: 2,
IsoNum: 32,
Name: "Argentine peso",
Symbol: "$",
},
"AUD": {
Code: "AUD",
Countries: "Australia, Christmas Island (CX), Cocos (Keeling) Islands (CC), Heard Island and McDonald Islands (HM), Kiribati (KI), Nauru (NR), Norfolk Island (NF), Tuvalu (TV)",
Decimals: 2,
IsoNum: 36,
Name: "Australian dollar",
Symbol: "$",
},
"AWG": {
Code: "AWG",
Countries: "Aruba",
Decimals: 2,
IsoNum: 533,
Name: "Aruban florin",
Symbol: "ƒ",
},
"AZN": {
Code: "AZN",
Countries: "Azerbaijan",
Decimals: 2,
IsoNum: 944,
Name: "Azerbaijani manat",
Symbol: "₼",
},
"BAM": {
Code: "BAM",
Countries: "Bosnia and Herzegovina",
Decimals: 2,
IsoNum: 977,
Name: "Bosnia and Herzegovina convertible mark",
Symbol: "KM",
},
"BBD": {
Code: "BBD",
Countries: "Barbados",
Decimals: 2,
IsoNum: 52,
Name: "Barbados dollar",
Symbol: "Bds$",
},
"BDT": {
Code: "BDT",
Countries: "Bangladesh",
Decimals: 2,
IsoNum: 50,
Name: "Bangladeshi taka",
Symbol: "৳",
},
"BGN": {
Code: "BGN",
Countries: "Bulgaria",
Decimals: 2,
IsoNum: 975,
Name: "Bulgarian lev",
Symbol: "лв",
},
"BHD": {
Code: "BHD",
Countries: "Bahrain",
Decimals: 3,
IsoNum: 48,
Name: "Bahraini dinar",
Symbol: ".د.ب",
},
"BIF": {
Code: "BIF",
Countries: "Burundi",
Decimals: 0,
IsoNum: 108,
Name: "Burundian franc",
Symbol: "FBu",
},
"BMD": {
Code: "BMD",
Countries: "Bermuda",
Decimals: 2,
IsoNum: 60,
Name: "Bermudian dollar",
Symbol: "BD$",
},
"BND": {
Code: "BND",
Countries: "Brunei",
Decimals: 2,
IsoNum: 96,
Name: "Brunei dollar",
Symbol: "B$",
},
"BOB": {
Code: "BOB",
Countries: "Bolivia",
Decimals: 2,
IsoNum: 68,
Name: "Boliviano",
Symbol: "Bs",
},
"BRL": {
Code: "BRL",
Countries: "Brazil",
Decimals: 2,
IsoNum: 986,
Name: "Brazilian real",
Symbol: "R$",
},
"BSD": {
Code: "BSD",
Countries: "Bahamas",
Decimals: 2,
IsoNum: 44,
Name: "Bahamian dollar",
Symbol: "B$",
},
"BTN": {
Code: "BTN",
Countries: "Bhutan",
Decimals: 2,
IsoNum: 64,
Name: "Bhutanese ngultrum",
Symbol: "Nu.",
},
"BWP": {
Code: "BWP",
Countries: "Botswana",
Decimals: 2,
IsoNum: 72,
Name: "Botswana pula",
Symbol: "P",
},
"BYN": {
Code: "BYN",
Countries: "Belarus",
Decimals: 2,
IsoNum: 933,
Name: "Belarusian ruble",
Symbol: "Br",
},
"BZD": {
Code: "BZD",
Countries: "Belize",
Decimals: 2,
IsoNum: 84,
Name: "Belize dollar",
Symbol: "BZ$",
},
"CAD": {
Code: "CAD",
Countries: "Canada",
Decimals: 2,
IsoNum: 124,
Name: "Canadian dollar",
Symbol: "$",
},
"CDF": {
Code: "CDF",
Countries: "Democratic Republic of the Congo",
Decimals: 2,
IsoNum: 976,
Name: "Congolese franc",
Symbol: "FC",
},
"CHF": {
Code: "CHF",
Countries: "Switzerland, Liechtenstein (LI)",
Decimals: 2,
IsoNum: 756,
Name: "Swiss franc",
Symbol: "SFr",
},
"CLF": {
Code: "CLF",
Countries: "Chile",
Decimals: 4,
IsoNum: 990,
Name: "Unidad de Fomento (funds Code)",
Symbol: "UF",
},
"CLP": {
Code: "CLP",
Countries: "Chile",
Decimals: 0,
IsoNum: 152,
Name: "Chilean peso",
Symbol: "$",
},
"CNY": {
Code: "CNY",
Countries: "China",
Decimals: 2,
IsoNum: 156,
Name: "Chinese yuan",
Symbol: "¥",
},
"COP": {
Code: "COP",
Countries: "Colombia",
Decimals: 2,
IsoNum: 170,
Name: "Colombian peso",
Symbol: "$",
},
"CRC": {
Code: "CRC",
Countries: "Costa Rica",
Decimals: 2,
IsoNum: 188,
Name: "Costa Rican colon",
Symbol: "₡",
},
"CUP": {
Code: "CUP",
Countries: "Cuba",
Decimals: 2,
IsoNum: 192,
Name: "Cuban peso",
Symbol: "$",
},
"CVE": {
Code: "CVE",
Countries: "Cabo Verde",
Decimals: 2,
IsoNum: 132,
Name: "Cape Verdean escudo",
Symbol: "$",
},
"CZK": {
Code: "CZK",
Countries: "Czechia",
Decimals: 2,
IsoNum: 203,
Name: "Czech koruna",
Symbol: "Kč",
},
"DJF": {
Code: "DJF",
Countries: "Djibouti",
Decimals: 0,
IsoNum: 262,
Name: "Djiboutian franc",
Symbol: "Fdj",
},
"DKK": {
Code: "DKK",
Countries: "Denmark, Faroe Islands (FO), Greenland (GL)",
Decimals: 2,
IsoNum: 208,
Name: "Danish krone",
Symbol: "kr.",
},
"DOP": {
Code: "DOP",
Countries: "Dominican Republic",
Decimals: 2,
IsoNum: 214,
Name: "Dominican peso",
Symbol: "$",
},
"DZD": {
Code: "DZD",
Countries: "Algeria",
Decimals: 2,
IsoNum: 12,
Name: "Algerian dinar",
Symbol: "دج",
},
"EGP": {
Code: "EGP",
Countries: "Egypt",
Decimals: 2,
IsoNum: 818,
Name: "Egyptian pound",
Symbol: "E£",
},
"ERN": {
Code: "ERN",
Countries: "Eritrea",
Decimals: 2,
IsoNum: 232,
Name: "Eritrean nakfa",
Symbol: "Nkf",
},
"ETB": {
Code: "ETB",
Countries: "Ethiopia",
Decimals: 2,
IsoNum: 230,
Name: "Ethiopian birr",
Symbol: "Br",
},
"EUR": {
Code: "EUR",
Countries: "Åland Islands (AX), European Union (EU), Andorra (AD), Austria (AT), Belgium (BE), Cyprus (CY), Estonia (EE), Finland (FI), France (FR), French Southern and Antarctic Lands (TF), Germany (DE), Greece (GR), Guadeloupe (GP), Ireland (IE), Italy (IT), Latvia (LV), Lithuania (LT), Luxembourg (LU), Malta (MT), French Guiana (GF), Martinique (MQ), Mayotte (YT), Monaco (MC), Montenegro (ME), Netherlands (NL), Portugal (PT), Réunion (RE), Saint Barthélemy (BL), Saint Martin (MF), Saint Pierre and Miquelon (PM), San Marino (SM), Slovakia (SK), Slovenia (SI), Spain (ES), Vatican City (VA)",
Decimals: 2,
IsoNum: 978,
Name: "Euro",
Symbol: "€",
},
"FJD": {
Code: "FJD",
Countries: "Fiji",
Decimals: 2,
IsoNum: 242,
Name: "Fiji dollar",
Symbol: "FJ$",
},
"FKP": {
Code: "FKP",
Countries: "Falkland Islands (pegged to GBP 1:1)",
Decimals: 2,
IsoNum: 238,
Name: "Falkland Islands pound",
Symbol: "£",
},
"GBP": {
Code: "GBP",
Countries: "United Kingdom, the Isle of Man (IM, see Manx pound), Jersey (JE, see Jersey pound), and Guernsey (GG, see Guernsey pound)",
Decimals: 2,
IsoNum: 826,
Name: "Pound sterling",
Symbol: "£",
},
"GEL": {
Code: "GEL",
Countries: "Georgia",
Decimals: 2,
IsoNum: 981,
Name: "Georgian lari",
Symbol: "₾",
},
"GHS": {
Code: "GHS",
Countries: "Ghana",
Decimals: 2,
IsoNum: 936,
Name: "Ghanaian cedi",
Symbol: "GH₵",
},
"GIP": {
Code: "GIP",
Countries: "Gibraltar (pegged to GBP 1:1)",
Decimals: 2,
IsoNum: 292,
Name: "G<NAME>",
Symbol: "£",
},
"GMD": {
Code: "GMD",
Countries: "Gambia",
Decimals: 2,
IsoNum: 270,
Name: "<NAME>",
Symbol: "D",
},
"GNF": {
Code: "GNF",
Countries: "Guinea",
Decimals: 0,
IsoNum: 324,
Name: "Guinean franc",
Symbol: "FG",
},
"GTQ": {
Code: "GTQ",
Countries: "Guatemala",
Decimals: 2,
IsoNum: 320,
Name: "Guatemalan quetzal",
Symbol: "Q",
},
"GYD": {
Code: "GYD",
Countries: "Guyana",
Decimals: 2,
IsoNum: 328,
Name: "Guyanese dollar",
Symbol: "G$",
},
"HKD": {
Code: "HKD",
Countries: "Hong Kong",
Decimals: 2,
IsoNum: 344,
Name: "Hong Kong dollar",
Symbol: "HK$",
},
"HNL": {
Code: "HNL",
Countries: "Honduras",
Decimals: 2,
IsoNum: 340,
Name: "Honduran lempira",
Symbol: "L",
},
"HRK": {
Code: "HRK",
Countries: "Croatia",
Decimals: 2,
IsoNum: 191,
Name: "Croatian kuna",
Symbol: "kn",
},
"HTG": {
Code: "HTG",
Countries: "Haiti",
Decimals: 2,
IsoNum: 332,
Name: "Haitian gourde",
Symbol: "G",
},
"HUF": {
Code: "HUF",
Countries: "Hungary",
Decimals: 2,
IsoNum: 348,
Name: "Hungarian forint",
Symbol: "Ft",
},
"IDR": {
Code: "IDR",
Countries: "Indonesia",
Decimals: 2,
IsoNum: 360,
Name: "Indonesian rupiah",
Symbol: "Rp",
},
"ILS": {
Code: "ILS",
Countries: "Israel",
Decimals: 2,
IsoNum: 376,
Name: "Israeli new shekel",
Symbol: "₪",
},
"INR": {
Code: "INR",
Countries: "India, Bhutan",
Decimals: 2,
IsoNum: 356,
Name: "Indian rupee",
Symbol: "₹",
},
"IQD": {
Code: "IQD",
Countries: "Iraq",
Decimals: 3,
IsoNum: 368,
Name: "Iraqi dinar",
Symbol: "د.ع",
},
"IRR": {
Code: "IRR",
Countries: "Iran",
Decimals: 2,
IsoNum: 364,
Name: "Iranian rial",
Symbol: "﷼",
},
"ISK": {
Code: "ISK",
Countries: "Iceland",
Decimals: 0,
IsoNum: 352,
Name: "Icelandic króna",
Symbol: "kr",
},
"JMD": {
Code: "JMD",
Countries: "Jamaica",
Decimals: 2,
IsoNum: 388,
Name: "Jamaican dollar",
Symbol: "J$",
},
"JOD": {
Code: "JOD",
Countries: "Jordan",
Decimals: 3,
IsoNum: 400,
Name: "Jordanian dinar",
Symbol: "$",
},
"JPY": {
Code: "JPY",
Countries: "Japan",
Decimals: 0,
IsoNum: 392,
Name: "Japanese yen",
Symbol: "د.أ",
},
"KES": {
Code: "KES",
Countries: "Kenya",
Decimals: 2,
IsoNum: 404,
Name: "<NAME>",
Symbol: "$",
},
"KGS": {
Code: "KGS",
Countries: "Kyrgyzstan",
Decimals: 2,
IsoNum: 417,
Name: "Kyrgyzstani som",
Symbol: "KSh",
},
"KHR": {
Code: "KHR",
Countries: "Cambodia",
Decimals: 2,
IsoNum: 116,
Name: "Cambodian riel",
Symbol: "С̲",
},
"KMF": {
Code: "KMF",
Countries: "Comoros",
Decimals: 0,
IsoNum: 174,
Name: "Comoro franc",
Symbol: "៛",
},
"KPW": {
Code: "KPW",
Countries: "North Korea",
Decimals: 2,
IsoNum: 408,
Name: "North Korean won",
Symbol: "₩",
},
"KRW": {
Code: "KRW",
Countries: "South Korea",
Decimals: 0,
IsoNum: 410,
Name: "South Korean won",
Symbol: "₩",
},
"KWD": {
Code: "KWD",
Countries: "Kuwait",
Decimals: 3,
IsoNum: 414,
Name: "Kuwaiti dinar",
Symbol: "د.ك",
},
"KYD": {
Code: "KYD",
Countries: "Cayman Islands",
Decimals: 2,
IsoNum: 136,
Name: "Cayman Islands dollar",
Symbol: "CI$",
},
"KZT": {
Code: "KZT",
Countries: "Kazakhstan",
Decimals: 2,
IsoNum: 398,
Name: "Kaz<NAME>",
Symbol: "₸",
},
"LAK": {
Code: "LAK",
Countries: "Laos",
Decimals: 2,
IsoNum: 418,
Name: "<NAME>",
Symbol: "₭",
},
"LBP": {
Code: "LBP",
Countries: "Lebanon",
Decimals: 2,
IsoNum: 422,
Name: "L<NAME>",
Symbol: "ل.ل.",
},
"LKR": {
Code: "LKR",
Countries: "Sri Lanka",
Decimals: 2,
IsoNum: 144,
Name: "<NAME>",
Symbol: "Rs",
},
"LRD": {
Code: "LRD",
Countries: "Liberia",
Decimals: 2,
IsoNum: 430,
Name: "Liberian dollar",
Symbol: "L$",
},
"LSL": {
Code: "LSL",
Countries: "Lesotho",
Decimals: 2,
IsoNum: 426,
Name: "<NAME>",
Symbol: "L",
},
"LYD": {
Code: "LYD",
Countries: "Libya",
Decimals: 3,
IsoNum: 434,
Name: "<NAME>",
Symbol: "LD",
},
"MAD": {
Code: "MAD",
Countries: "Morocco, Western Sahara",
Decimals: 2,
IsoNum: 504,
Name: "<NAME>",
Symbol: "DH",
},
"MDL": {
Code: "MDL",
Countries: "Moldova",
Decimals: 2,
IsoNum: 498,
Name: "<NAME>",
Symbol: "lei",
},
"MGA": {
Code: "MGA",
Countries: "Madagascar",
Decimals: 2,
IsoNum: 969,
Name: "<NAME>",
Symbol: "Ar",
},
"MKD": {
Code: "MKD",
Countries: "North Macedonia",
Decimals: 2,
IsoNum: 807,
Name: "<NAME>",
Symbol: "den",
},
"MMK": {
Code: "MMK",
Countries: "Myanmar",
Decimals: 2,
IsoNum: 104,
Name: "<NAME>",
Symbol: "K",
},
"MNT": {
Code: "MNT",
Countries: "Mongolia",
Decimals: 2,
IsoNum: 496,
Name: "<NAME>",
Symbol: "₮",
},
"MOP": {
Code: "MOP",
Countries: "Macau",
Decimals: 2,
IsoNum: 446,
Name: "<NAME>",
Symbol: "MOP$",
},
"MRU": {
Code: "MRU",
Countries: "Mauritania",
Decimals: 2,
IsoNum: 929,
Name: "<NAME>",
Symbol: "UM",
},
"MUR": {
Code: "MUR",
Countries: "Mauritius",
Decimals: 2,
IsoNum: 480,
Name: "<NAME>",
Symbol: "₨",
},
"MVR": {
Code: "MVR",
Countries: "Maldives",
Decimals: 2,
IsoNum: 462,
Name: "Maldivian rufiyaa",
Symbol: "Rf",
},
"MWK": {
Code: "MWK",
Countries: "Malawi",
Decimals: 2,
IsoNum: 454,
Name: "Malawian kwacha",
Symbol: "K",
},
"MXN": {
Code: "MXN",
Countries: "Mexico",
Decimals: 2,
IsoNum: 484,
Name: "Mexican peso",
Symbol: "$",
},
"MYR": {
Code: "MYR",
Countries: "Malaysia",
Decimals: 2,
IsoNum: 458,
Name: "Malaysian ringgit",
Symbol: "RM",
},
"MZN": {
Code: "MZN",
Countries: "Mozambique",
Decimals: 2,
IsoNum: 943,
Name: "Mozambican metical",
Symbol: "MT",
},
"NAD": {
Code: "NAD",
Countries: "Namibia",
Decimals: 2,
IsoNum: 516,
Name: "Namibian dollar",
Symbol: "N$",
},
"NGN": {
Code: "NGN",
Countries: "Nigeria",
Decimals: 2,
IsoNum: 566,
Name: "Nigerian naira",
Symbol: "₦",
},
"NIO": {
Code: "NIO",
Countries: "Nicaragua",
Decimals: 2,
IsoNum: 558,
Name: "<NAME>",
Symbol: "C$",
},
"NOK": {
Code: "NOK",
Countries: "Norway, Svalbard and Jan Mayen (SJ), Bouvet Island (BV)",
Decimals: 2,
IsoNum: 578,
Name: "Norwegian krone",
Symbol: "kr",
},
"NPR": {
Code: "NPR",
Countries: "Nepal",
Decimals: 2,
IsoNum: 524,
Name: "Nepalese rupee",
Symbol: "रू",
},
"NZD": {
Code: "NZD",
Countries: "New Zealand, Cook Islands (CK), Niue (NU), Pitcairn Islands (PN; see also Pitcairn Islands dollar), Tokelau (TK)",
Decimals: 2,
IsoNum: 554,
Name: "New Zealand dollar",
Symbol: "NZ$",
},
"OMR": {
Code: "OMR",
Countries: "Oman",
Decimals: 3,
IsoNum: 512,
Name: "<NAME>",
Symbol: "ر.ع.",
},
"PAB": {
Code: "PAB",
Countries: "Panama",
Decimals: 2,
IsoNum: 590,
Name: "Panamanian balboa",
Symbol: "B/.",
},
"PEN": {
Code: "PEN",
Countries: "Peru",
Decimals: 2,
IsoNum: 604,
Name: "Peruvian sol",
Symbol: "S/",
},
"PGK": {
Code: "PGK",
Countries: "Papua New Guinea",
Decimals: 2,
IsoNum: 598,
Name: "Papua New Guinean kina",
Symbol: "K",
},
"PHP": {
Code: "PHP",
Countries: "Philippines",
Decimals: 2,
IsoNum: 608,
Name: "Philippine peso[12]",
Symbol: "₱",
},
"PKR": {
Code: "PKR",
Countries: "Pakistan",
Decimals: 2,
IsoNum: 586,
Name: "Pakistani rupee",
Symbol: "₨",
},
"PLN": {
Code: "PLN",
Countries: "Poland",
Decimals: 2,
IsoNum: 985,
Name: "Polish złoty",
Symbol: "zł",
},
"PYG": {
Code: "PYG",
Countries: "Paraguay",
Decimals: 0,
IsoNum: 600,
Name: "Paraguayan guaraní",
Symbol: "₲",
},
"QAR": {
Code: "QAR",
Countries: "Qatar",
Decimals: 2,
IsoNum: 634,
Name: "Qatari riyal",
Symbol: "QR",
},
"RON": {
Code: "RON",
Countries: "Romania",
Decimals: 2,
IsoNum: 946,
Name: "Romanian leu",
Symbol: "L",
},
"RSD": {
Code: "RSD",
Countries: "Serbia",
Decimals: 2,
IsoNum: 941,
Name: "Serbian dinar",
Symbol: "din",
},
"RUB": {
Code: "RUB",
Countries: "Russia",
Decimals: 2,
IsoNum: 643,
Name: "Russian ruble",
Symbol: "₽",
},
"RWF": {
Code: "RWF",
Countries: "Rwanda",
Decimals: 0,
IsoNum: 646,
Name: "Rwandan franc",
Symbol: "FRw",
},
"SAR": {
Code: "SAR",
Countries: "Saudi Arabia",
Decimals: 2,
IsoNum: 682,
Name: "Saudi riyal",
Symbol: "SAR",
},
"SBD": {
Code: "SBD",
Countries: "Solomon Islands",
Decimals: 2,
IsoNum: 90,
Name: "Solomon Islands dollar",
Symbol: "SI$",
},
"SCR": {
Code: "SCR",
Countries: "Seychelles",
Decimals: 2,
IsoNum: 690,
Name: "Seychelles rupee",
Symbol: "SCR",
},
"SDG": {
Code: "SDG",
Countries: "Sudan",
Decimals: 2,
IsoNum: 938,
Name: "Sudanese pound",
Symbol: "SS£",
},
"SEK": {
Code: "SEK",
Countries: "Sweden",
Decimals: 2,
IsoNum: 752,
Name: "Swedish krona/kronor",
Symbol: "kr",
},
"SGD": {
Code: "SGD",
Countries: "Singapore",
Decimals: 2,
IsoNum: 702,
Name: "Singapore dollar",
Symbol: "S$",
},
"SHP": {
Code: "SHP",
Countries: "Saint Helena (SH-SH), Ascension Island (SH-AC), Tristan da Cunha",
Decimals: 2,
IsoNum: 654,
Name: "Saint Helena pound",
Symbol: "£",
},
"SLL": {
Code: "SLL",
Countries: "Sierra Leone",
Decimals: 2,
IsoNum: 694,
Name: "Sierra Leonean leone",
Symbol: "Le",
},
"SOS": {
Code: "SOS",
Countries: "Somalia",
Decimals: 2,
IsoNum: 706,
Name: "Somali shilling",
Symbol: "Sh.So.",
},
"SRD": {
Code: "SRD",
Countries: "Suriname",
Decimals: 2,
IsoNum: 968,
Name: "Surinamese dollar",
Symbol: "SR$",
},
"SSP": {
Code: "SSP",
Countries: "South Sudan",
Decimals: 2,
IsoNum: 728,
Name: "South Sudanese pound",
Symbol: "SS£",
},
"STN": {
Code: "STN",
Countries: "São Tomé and Príncipe",
Decimals: 2,
IsoNum: 930,
Name: "São Tomé and Príncipe dobra",
Symbol: "Db",
},
"SVC": {
Code: "SVC",
Countries: "El Salvador",
Decimals: 2,
IsoNum: 222,
Name: "Salvadoran colón",
Symbol: "₡",
},
"SYP": {
Code: "SYP",
Countries: "Syria",
Decimals: 2,
IsoNum: 760,
Name: "<NAME>",
Symbol: "LS",
},
"SZL": {
Code: "SZL",
Countries: "Eswatini",
Decimals: 2,
IsoNum: 748,
Name: "Swazi lilangeni",
Symbol: "L",
},
"THB": {
Code: "THB",
Countries: "Thailand",
Decimals: 2,
IsoNum: 764,
Name: "<NAME>",
Symbol: "฿",
},
"TJS": {
Code: "TJS",
Countries: "Tajikistan",
Decimals: 2,
IsoNum: 972,
Name: "<NAME>",
Symbol: "SM",
},
"TMT": {
Code: "TMT",
Countries: "Turkmenistan",
Decimals: 2,
IsoNum: 934,
Name: "Turkmenistan manat",
Symbol: "T",
},
"TND": {
Code: "TND",
Countries: "Tunisia",
Decimals: 3,
IsoNum: 788,
Name: "<NAME>",
Symbol: "DT",
},
"TOP": {
Code: "TOP",
Countries: "Tonga",
Decimals: 2,
IsoNum: 776,
Name: "<NAME>ʻanga",
Symbol: "T$",
},
"TRY": {
Code: "TRY",
Countries: "Turkey",
Decimals: 2,
IsoNum: 949,
Name: "Turkish lira",
Symbol: "₺",
},
"TTD": {
Code: "TTD",
Countries: "Trinidad and Tobago",
Decimals: 2,
IsoNum: 780,
Name: "Trinidad and Tobago dollar",
Symbol: "TT$",
},
"TWD": {
Code: "TWD",
Countries: "Taiwan",
Decimals: 2,
IsoNum: 901,
Name: "New Taiwan dollar",
Symbol: "NT$",
},
"TZS": {
Code: "TZS",
Countries: "Tanzania",
Decimals: 2,
IsoNum: 834,
Name: "Tanzanian shilling",
Symbol: "TSh",
},
"UAH": {
Code: "UAH",
Countries: "Ukraine",
Decimals: 2,
IsoNum: 980,
Name: "Ukrainian hryvnia",
Symbol: "₴",
},
"UGX": {
Code: "UGX",
Countries: "Uganda",
Decimals: 0,
IsoNum: 800,
Name: "Ugandan shilling",
Symbol: "USh",
},
"USD": {
Code: "USD",
Countries: "United States, American Samoa (AS), British Indian Ocean Territory (IO) (also uses GBP), British Virgin Islands (VG), Caribbean Netherlands (BQ – Bonaire, Sint Eustatius and Saba), Ecuador (EC), El Salvador (SV), Guam (GU), Haiti (HT), Marshall Islands (MH), Federated States of Micronesia (FM), Northern Mariana Islands (MP), Palau (PW), Panama (PA) (as well as Panamanian Balboa), Puerto Rico (PR), Timor-Leste (TL), Turks and Caicos Islands (TC), U.S. Virgin Islands (VI), United States Minor Outlying Islands (UM)",
Decimals: 2,
IsoNum: 840,
Name: "United States dollar",
Symbol: "$",
},
"UYU": {
Code: "UYU",
Countries: "Uruguay",
Decimals: 2,
IsoNum: 858,
Name: "Uruguayan peso",
Symbol: "$",
},
"UZS": {
Code: "UZS",
Countries: "Uzbekistan",
Decimals: 2,
IsoNum: 860,
Name: "Uzbekistan som",
Symbol: "soʻm",
},
"VES": {
Code: "VES",
Countries: "Venezuela",
Decimals: 2,
IsoNum: 928,
Name: "<NAME>",
Symbol: "Bs.S.",
},
"VND": {
Code: "VND",
Countries: "Vietnam",
Decimals: 0,
IsoNum: 704,
Name: "Vietname<NAME>",
Symbol: "₫",
},
"VUV": {
Code: "VUV",
Countries: "Vanuatu",
Decimals: 0,
IsoNum: 548,
Name: "<NAME>",
Symbol: "VT",
},
"WST": {
Code: "WST",
Countries: " Samoa",
Decimals: 2,
IsoNum: 882,
Name: "<NAME>",
Symbol: "WS$",
},
"XAF": {
Code: "XAF",
Countries: "BEAC Cameroon (CM), Central African Republic (CF), Republic of the Congo (CG), Chad (TD), Equatorial Guinea (GQ), Gabon (GA)",
Decimals: 0,
IsoNum: 950,
Name: "CFA franc",
Symbol: "FCFA",
},
"XAG": {
Code: "XAG",
Countries: "",
Decimals: 0,
IsoNum: 961,
Name: "Silver (one troy ounce)",
Symbol: "",
},
"XAU": {
Code: "XAU",
Countries: "",
Decimals: 0,
IsoNum: 959,
Name: "Gold (one troy ounce)",
Symbol: "",
},
"XBA": {
Code: "XBA",
Countries: "",
Decimals: 0,
IsoNum: 955,
Name: "European Composite Unit (EURCO) (bond market unit)",
Symbol: "",
},
"XBB": {
Code: "XBB",
Countries: "",
Decimals: 0,
IsoNum: 956,
Name: "European Monetary Unit (E.M.U.-6) (bond market unit)",
Symbol: "",
},
"XBC": {
Code: "XBC",
Countries: "",
Decimals: 0,
IsoNum: 957,
Name: "European Unit of Account 9 (E.U.A.-9) (bond market unit)",
Symbol: "",
},
"XBD": {
Code: "XBD",
Countries: "",
Decimals: 0,
IsoNum: 958,
Name: "European Unit of Account 17 (E.U.A.-17) (bond market unit)",
Symbol: "",
},
"XCD": {
Code: "XCD",
Countries: " Anguilla (AI), Antigua and Barbuda (AG), Dominica (DM), Grenada (GD), Montserrat (MS), Saint Kitts and Nevis (KN), Saint Lucia (LC), Saint Vincent and the Grenadines (VC)",
Decimals: 2,
IsoNum: 951,
Name: "East Caribbean dolla",
Symbol: "$",
},
"XOF": {
Code: "XOF",
Countries: "BCEAO Benin (BJ), Burkina Faso (BF), Côte d'Ivoire (CI), Guinea-Bissau (GW), Mali (ML), Niger (NE), Senegal (SN), Togo (TG)",
Decimals: 0,
IsoNum: 952,
Name: "CFA franc",
Symbol: "CFA",
},
"XPD": {
Code: "XPD",
Countries: "",
Decimals: 0,
IsoNum: 964,
Name: "Palladium (one troy ounce)",
Symbol: "",
},
"XPF": {
Code: "XPF",
Countries: "French territories of the Pacific Ocean: French Polynesia (PF), New Caledonia (NC), Wallis and Futuna (WF)",
Decimals: 0,
IsoNum: 953,
Name: "CFP franc (franc Pacifique)",
Symbol: "F",
},
"XPT": {
Code: "XPT",
Countries: "",
Decimals: 0,
IsoNum: 962,
Name: "Platinum (one troy ounce)",
Symbol: "",
},
"XSU": {
Code: "XSU",
Countries: "",
Decimals: 0,
IsoNum: 994,
Name: "SUCRE Unified System for Regional Compensation (SUCRE)",
Symbol: "",
},
"XTS": {
Code: "XTS",
Countries: "",
Decimals: 0,
IsoNum: 963,
Name: "Code reserved for testing",
Symbol: "",
},
"XUA": {
Code: "XUA",
Countries: "",
Decimals: 0,
IsoNum: 965,
Name: "ADB Unit of Account African Development Bank",
Symbol: "",
},
"XXX": {
Code: "XXX",
Countries: "",
Decimals: 0,
IsoNum: 999,
Name: "No currency",
Symbol: "",
},
"YER": {
Code: "YER",
Countries: "Yemen",
Decimals: 2,
IsoNum: 886,
Name: "Yemeni rial",
Symbol: "ر.ي",
},
"ZAR": {
Code: "ZAR",
Countries: "Lesotho, Namibia, South Africa",
Decimals: 2,
IsoNum: 710,
Name: "South African rand",
Symbol: "R",
},
"ZMW": {
Code: "ZMW",
Countries: "Zambia",
Decimals: 2,
IsoNum: 967,
Name: "Zambian kwacha",
Symbol: "K",
},
"ZWL": {
Code: "ZWL",
Countries: "Zimbabwe",
Decimals: 2,
IsoNum: 932,
Name: "Zimbabwean dollar",
Symbol: "$",
},
} | moneysocket/wad/fiat.go | 0.516352 | 0.483344 | fiat.go | starcoder |
package raft
import (
"errors"
"log"
"sync"
pb "github.com/ridwanmsharif/raft/pb"
)
// Storage is an interface that may be implemented by the users
// of the distributed application to retrieve log entries from storage.
// If any Storage method returns an error, the raft instance will
// become inoperable and refuse to participate in elections; the
// application is responsible for cleanup and recovery in this case.
type Storage interface {
// Entries returns a slice of log entries in the range [lo,hi).
// MaxSize limits the total size of the log entries returned, but
// Entries returns at least one entry if any.
Entries(lo, hi, maxSize int64) ([]pb.LogEntry, error)
// Term returns the term of entry i, which must be in the range
// [FirstIndex()-1, LastIndex()]. The term of the entry before
// FirstIndex is retained for matching purposes even though the
// rest of that entry may not be available.
Term(i int64) (int64, error)
// LastIndex returns the index of the last entry in the log.
LastIndex() (int64, error)
// FirstIndex returns the index of the first log entry that is
// possibly available via Entries (older entries have been incorporated
// into the latest Snapshot; if storage only contains the dummy entry the
// first log entry is not available).
FirstIndex() (int64, error)
// Append entries
Append([]pb.LogEntry) error
}
// MemoryStorage implements the Storage interface backed by an
// in-memory array.
type DefaultStorage struct {
// Protects access to all fields. Most methods of MemoryStorage are
// run on the raft goroutine, but Append() is run on an application
// goroutine.
sync.Mutex
// entries[i] has raft log position i
entries []pb.LogEntry
}
// NewMemoryStorage creates an empty MemoryStorage.
func NewStorage() *DefaultStorage {
return &DefaultStorage{
// When starting from scratch populate the list with a dummy entry at term zero.
entries: make([]pb.LogEntry, 1),
}
}
// Limit Size of slice of log entries
func limitSize(entries []pb.LogEntry, maxSize int64) []pb.LogEntry {
if len(entries) == 0 {
return entries
}
var limit int
for limit = 0; limit < len(entries); limit++ {
if limit > int(maxSize) {
break
}
}
return entries[:limit]
}
// Entries implements the Storage interface.
func (ds *DefaultStorage) Entries(lo, hi, maxSize int64) ([]pb.LogEntry, error) {
ds.Lock()
defer ds.Unlock()
offset := ds.entries[0].Index
if lo <= int64(offset) {
return nil, errors.New("requested index is unavailable due to compaction")
}
if hi > ds.lastIndex()+1 {
// raftLogger.Panicf
log.Panicf("entries' hi(%d) is out of bound lastindex(%d)", hi, ds.lastIndex())
}
// only contains dummy entries.
if len(ds.entries) == 1 {
return nil, errors.New("requested entry at index is unavailable")
}
entries := ds.entries[lo-offset : hi-offset]
return limitSize(entries, maxSize), nil
}
// Term implements the Storage interface.
func (ds *DefaultStorage) Term(i int64) (int64, error) {
ds.Lock()
defer ds.Unlock()
offset := ds.entries[0].Index
if i < offset {
return 0, errors.New("requested index is unavailable due to compaction")
}
if int(i-offset) >= len(ds.entries) {
return 0, errors.New("requested entry at index is unavailable")
}
return ds.entries[i-offset].Term, nil
}
// LastIndex implements the Storage interface.
func (ds *DefaultStorage) LastIndex() (int64, error) {
ds.Lock()
defer ds.Unlock()
return ds.lastIndex(), nil
}
func (ds *DefaultStorage) lastIndex() int64 {
return ds.entries[0].Index + int64(len(ds.entries)) - 1
}
// FirstIndex implements the Storage interface.
func (ds *DefaultStorage) FirstIndex() (int64, error) {
ds.Lock()
defer ds.Unlock()
return ds.firstIndex(), nil
}
func (ds *DefaultStorage) firstIndex() int64 {
return ds.entries[0].Index + 1
}
// Append the new entries to storage.
// entries[0].Index > ds.entries[0].Index
func (ds *DefaultStorage) Append(entries []pb.LogEntry) error {
if len(entries) == 0 {
return nil
}
ds.Lock()
defer ds.Unlock()
first := ds.firstIndex()
last := entries[0].Index + int64(len(entries)) - 1
// shortcut if there is no new entry.
if last < first {
return nil
}
// truncate compacted entries
// Read entries in te new array whic weren't already in storage
if first > entries[0].Index {
entries = entries[first-entries[0].Index:]
}
offset := entries[0].Index - ds.entries[0].Index
switch {
case int64(len(ds.entries)) > offset:
ds.entries = append([]pb.LogEntry{}, ds.entries[:offset]...)
ds.entries = append(ds.entries, entries...)
case int64(len(ds.entries)) == offset:
ds.entries = append(ds.entries, entries...)
default:
ds.entries = ds.entries
log.Panicf("missing log entry [last: %d, append at: %d]", ds.lastIndex(), entries[0].Index)
}
return nil
} | raft/storage.go | 0.571169 | 0.427875 | storage.go | starcoder |
package tracer
import (
"image"
"math"
"math/rand"
"sync"
"github.com/go-gl/mathgl/mgl64"
)
func LinearInterpolation(t float64, color1, color2 mgl64.Vec3) mgl64.Vec3 {
return color1.Mul(1.0 - t).Add(color2.Mul(t))
}
func BackgroundColor(ray Ray) mgl64.Vec3 {
unitDirection := ray.Direction.Normalize()
t := 0.5 * (unitDirection.Y() + 1.0)
return LinearInterpolation(t, mgl64.Vec3{1.0, 1.0, 1.0}, mgl64.Vec3{0.5, 0.7, 1.0})
}
func Trace(ray Ray, world Primitive, bounces int) (color mgl64.Vec3) {
if bounces <= 0 {
return mgl64.Vec3{}
}
if rec := world.Hit(ray, mgl64.Epsilon, math.MaxFloat64); rec != nil {
if bounce := rec.Material.Scatter(ray, rec); bounce != nil {
return MulByVec(Trace(bounce.Scattered, world, bounces-1), bounce.Attenuation)
}
return mgl64.Vec3{}
}
return BackgroundColor(ray)
}
// ToColumns splits a rectangle into n columns
func ToColumns(rect image.Rectangle, n int) []image.Rectangle {
x0, y0 := rect.Min.X, rect.Min.Y
x1, y1 := rect.Max.X, rect.Max.Y
rects := make([]image.Rectangle, n)
for i := 0; i < n; i++ {
rects[i] = image.Rect(x0+(x1/n)*i, y0, x0+(x1/n)*(i+1), y1)
}
return rects
}
// RenderOptions describes all the options available to change how a scene is rendered
type RenderOptions struct {
CameraOptions
ImageOptions
Samples, Bounces int
}
// Scene describes everything needed to render and image
type Scene struct {
RenderOptions
camera Camera
World Primitive
}
func NewScene(options RenderOptions, world Primitive) Scene {
return Scene{
RenderOptions: options,
camera: NewCamera(options.CameraOptions),
World: world,
}
}
func (s *Scene) SamplePixel(x, y float64) mgl64.Vec3 {
// Average multiple samples with random offsets
var color mgl64.Vec3
for i := 0; i < s.Samples; i++ {
// Translate pixel to camera plane
u := (x + rand.Float64()) / float64(s.Width)
v := (y + rand.Float64()) / float64(s.Height)
// New ray from camera origin to point on camera plane
camRay := s.camera.Ray(u, v)
// Bounce around
color = color.Add(Trace(camRay, s.World, s.Bounces))
}
return Div(color, float64(s.Samples))
}
func (s *Scene) Render(img *image.RGBA) {
bounds := img.Bounds()
for y := bounds.Min.Y; y < bounds.Max.Y; y++ {
for x := bounds.Min.X; x < bounds.Max.X; x++ {
// Sample pixel
color := s.SamplePixel(float64(x), float64(y))
// Gamma magic (Brightens image)
color = mgl64.Vec3{
math.Sqrt(color[0]),
math.Sqrt(color[1]),
math.Sqrt(color[2]),
}
// Write pixel
RGBASetVec3(img, color, x, y)
}
}
}
// RenderParallel renders the scene across n goroutines
func (s *Scene) RenderParallel(img *image.RGBA, n int) {
var wg sync.WaitGroup
wg.Add(n)
for _, col := range ToColumns(img.Bounds(), n) {
sub := img.SubImage(col).(*image.RGBA)
// Render!
go func() {
s.Render(sub)
wg.Done()
}()
}
wg.Wait()
} | render.go | 0.805135 | 0.52208 | render.go | starcoder |
package _98_Validate_Binary_Search_Tree
import "math"
/*https://leetcode.com/problems/validate-binary-search-tree/
Given a binary tree, determine if it is a valid binary search tree (BST).
Assume a BST is defined as follows:
The left subtree of a node contains only nodes with keys less than the node's key.
The right subtree of a node contains only nodes with keys greater than the node's key.
Both the left and right subtrees must also be binary search trees.
Example 1:
Input:
2
/ \
1 3
Output: true
Example 2:
5
/ \
1 4
/ \
3 6
Output: false
Explanation: The input is: [5,1,4,null,null,3,6]. The root node's value
is 5 but its right child's value is 4.
*/
/* Description
1. Make in-order traversal (left-root-right) and put all vals into list. All values in the list must be in incremental order
- O(n) time, O(n.val + max depth) memory
2. Top down solution
- DFS (stack, recursion) or BFS (queue) doesnt metter, but we must pass the max|min limits everywhere
- check rule of root.Val with root.Left | root.Right and max|min limits, if ok - move deeper, if rules ok everywhere - return true
- O(n) - time, O(n) memory
*/
// Definition for a binary tree node.
type TreeNode struct {
Val int
Left *TreeNode
Right *TreeNode
}
type LimNode struct {
node *TreeNode
min int
max int
}
type Stack []LimNode
func (s *Stack) push(n LimNode) {
*s = append(*s, n)
}
func (s *Stack) pop() LimNode {
if len(*s) == 0 {
return LimNode{nil, 0, 0}
}
n := (*s)[len(*s)-1]
*s = (*s)[:len(*s)-1]
return n
}
func (s *Stack) length() int {
return len(*s)
}
// 2. Top Down solution
func isValidBST(root *TreeNode) bool {
if root == nil {
return true
}
s := Stack{LimNode{root, math.MinInt64, math.MaxInt64}}
for s.length() > 0 {
n := s.pop()
if n.node.Left != nil {
// fmt.Printf("left %+v \n",n)
if n.node.Left.Val < n.node.Val && n.node.Left.Val > n.min {
s.push(LimNode{n.node.Left, n.min, n.node.Val})
} else {
return false
}
}
if n.node.Right != nil {
// fmt.Printf("right %+v \n",n)
if n.node.Right.Val > n.node.Val && n.node.Right.Val < n.max {
s.push(LimNode{n.node.Right, n.node.Val, n.max})
} else {
return false
}
}
}
return true
} | 98_Validate_Binary_Search_Tree/solution.go | 0.912565 | 0.522994 | solution.go | starcoder |
package stepflowae
import (
"context"
"encoding/json"
"errors"
"fmt"
stepflow "github.com/jcalvarado1965/go-stepflow"
"google.golang.org/appengine/datastore"
"google.golang.org/appengine/memcache"
)
type appengineStorage struct {
Logger stepflow.Logger
}
const dataflowRunKind = "DataflowRun"
const flowKind = "Flow"
const flowSplitKind = "FlowSplit"
const dataflowRunJSON = "DataflowRunJSON"
const dataflowRunState = "State"
const dataflowRunDFID = "DataflowID"
var errNotFound = errors.New("Not found")
type flowDataType string
const (
flowDataNil = flowDataType("Nil")
flowDataByteArray = flowDataType("ByteArray")
flowDataString = flowDataType("String")
flowDataRawMessage = flowDataType("RawMessage")
)
type flowDatastore struct {
stepflow.FlowNoData
Data []byte
DataType string
}
// NewAppengineStorage creates a datastore storage service
func NewAppengineStorage(logger stepflow.Logger) stepflow.Storage {
return &appengineStorage{
Logger: logger,
}
}
// DataflowRun cannot be stored directly due to struct pointers, so serialize to JSON and
// store that
func (as *appengineStorage) StoreDataflowRun(ctx context.Context, run *stepflow.DataflowRun) error {
as.Logger.Debugf(ctx, "Storing dataflow run %v", run)
key := datastore.NewKey(ctx, dataflowRunKind, string(run.ID), 0, nil)
buff, err := json.Marshal(run)
if err != nil {
return err
}
entity := &datastore.PropertyList{
datastore.Property{Name: dataflowRunJSON, Value: buff, NoIndex: true},
datastore.Property{Name: dataflowRunState, Value: string(run.State)},
datastore.Property{Name: dataflowRunDFID, Value: run.Dataflow.ID},
}
_, err = datastore.Put(ctx, key, entity)
return err
}
func (as *appengineStorage) RetrieveDataflowRuns(ctx context.Context, keys []stepflow.DataflowRunID) map[stepflow.DataflowRunID]*stepflow.DataflowRun {
var dsKeys []*datastore.Key
for _, key := range keys {
dsKeys = append(dsKeys, datastore.NewKey(ctx, dataflowRunKind, string(key), 0, nil))
}
as.Logger.Debugf(ctx, "Retrieving data flow runs with IDs %v", dsKeys)
entities := make([]datastore.PropertyList, len(dsKeys))
runs := make(map[stepflow.DataflowRunID]*stepflow.DataflowRun)
if err := datastore.GetMulti(ctx, dsKeys, entities); err != nil {
as.Logger.Errorf(ctx, "Error retrieving dataflow runs: %s", err.Error())
} else {
for _, entity := range entities {
// get the JSON for the run
var buff []byte
for _, prop := range entity {
if prop.Name == dataflowRunJSON {
buff, _ = prop.Value.([]byte)
}
}
if buff == nil {
as.Logger.Errorf(ctx, "Entity does not have dataflowRunJSON property or it is not []byte")
return runs
}
var run stepflow.DataflowRun
if err := json.Unmarshal(buff, &run); err != nil {
as.Logger.Errorf(ctx, "Error unmarshalling entity %s", err.Error())
return runs
}
runs[run.ID] = &run
}
}
return runs
}
func (as *appengineStorage) DeleteDataflowRun(ctx context.Context, key stepflow.DataflowRunID) error {
dsKey := datastore.NewKey(ctx, dataflowRunKind, string(key), 0, nil)
err := datastore.Delete(ctx, dsKey)
return err
}
func (as *appengineStorage) StoreFlow(ctx context.Context, flow *stepflow.Flow) error {
as.Logger.Debugf(ctx, "Storing flow %v", flow)
var buff []byte
dataType := flowDataNil
if flow.Data != nil {
switch data := flow.Data.(type) {
case string:
dataType = flowDataString
buff = []byte(data)
case []byte:
dataType = flowDataByteArray
buff = data
case json.RawMessage:
dataType = flowDataRawMessage
buff = []byte(data)
default:
err := fmt.Errorf("Unrecognized data type %v", data)
as.Logger.Errorf(ctx, err.Error())
return err
}
}
flowDS := &flowDatastore{
FlowNoData: flow.FlowNoData,
Data: buff,
DataType: string(dataType),
}
key := datastore.NewKey(ctx, flowKind, string(flow.ID), 0, nil)
_, err := datastore.Put(ctx, key, flowDS)
return err
}
func (as *appengineStorage) RetrieveFlows(ctx context.Context, keys []stepflow.FlowID) map[stepflow.FlowID]*stepflow.Flow {
var dsKeys []*datastore.Key
for _, key := range keys {
dsKeys = append(dsKeys, datastore.NewKey(ctx, flowKind, string(key), 0, nil))
}
as.Logger.Debugf(ctx, "Retrieving flows with IDs %v", dsKeys)
flowList := make([]flowDatastore, len(dsKeys))
flows := make(map[stepflow.FlowID]*stepflow.Flow)
if err := datastore.GetMulti(ctx, dsKeys, flowList); err != nil {
as.Logger.Errorf(ctx, "Error retrieving flows: %s", err.Error())
} else {
as.Logger.Debugf(ctx, "Retrieved flows %v", flowList)
for _, flowDs := range flowList {
flows[flowDs.ID] = &stepflow.Flow{
FlowNoData: flowDs.FlowNoData,
Data: flowDs.Data,
}
}
}
return flows
}
func (as *appengineStorage) DeleteFlow(ctx context.Context, key stepflow.FlowID) error {
dsKey := datastore.NewKey(ctx, flowKind, string(key), 0, nil)
err := datastore.Delete(ctx, dsKey)
return err
}
func (as *appengineStorage) StoreFlowSplit(ctx context.Context, flowSplit *stepflow.FlowSplit) error {
as.Logger.Debugf(ctx, "Storing flow split %v", flowSplit)
key := datastore.NewKey(ctx, flowSplitKind, string(flowSplit.ID), 0, nil)
_, err := datastore.Put(ctx, key, flowSplit)
return err
}
func (as *appengineStorage) RetrieveFlowSplits(ctx context.Context, keys []stepflow.FlowSplitID) map[stepflow.FlowSplitID]*stepflow.FlowSplit {
var dsKeys []*datastore.Key
for _, key := range keys {
dsKeys = append(dsKeys, datastore.NewKey(ctx, flowSplitKind, string(key), 0, nil))
}
as.Logger.Debugf(ctx, "Retrieving flow splits with IDs %v", dsKeys)
flowSplitList := make([]stepflow.FlowSplit, len(dsKeys))
flowSplits := make(map[stepflow.FlowSplitID]*stepflow.FlowSplit)
if err := datastore.GetMulti(ctx, dsKeys, flowSplitList); err != nil {
as.Logger.Errorf(ctx, "Error retrieving flow splits: %s", err.Error())
} else {
as.Logger.Debugf(ctx, "Got flow splits %v", flowSplitList)
for _, flowSplit := range flowSplitList {
flowSplits[flowSplit.ID] = &flowSplit
}
}
return flowSplits
}
func (as *appengineStorage) DeleteFlowSplit(ctx context.Context, key stepflow.FlowSplitID) error {
dsKey := datastore.NewKey(ctx, flowSplitKind, string(key), 0, nil)
err := datastore.Delete(ctx, dsKey)
return err
}
func (as *appengineStorage) Increment(ctx context.Context, key string, initialValue int64, increment int64) int64 {
as.Logger.Debugf(ctx, "-------Incrementing %s with init %d, incr %d", key, initialValue-increment, increment)
newVal, _ := memcache.Increment(ctx, key, increment, uint64(initialValue-increment))
as.Logger.Debugf(ctx, "Incr returning %d", newVal)
return int64(newVal)
}
func (as *appengineStorage) IncrementWithError(ctx context.Context, key string, increment int64, errIncrement int64) (count int64, errCount int64) {
const errUnit int64 = 1 << 32
const lowMask int64 = errUnit - 1
totalIncr := increment + errUnit*errIncrement
incremented := as.Increment(ctx, key, totalIncr, totalIncr)
return incremented & lowMask, incremented / errUnit
} | storage.go | 0.580709 | 0.442697 | storage.go | starcoder |
package main
/*****************************************************************************************************
*
* There are a total of numCourses courses you have to take, labeled from 0 to numCourses-1.
*
* Some courses may have prerequisites, for example to take course 0 you have to first take course 1,
* which is expressed as a pair: [0,1]
*
* Given the total number of courses and a list of prerequisite pairs, is it possible for you to
* finish all courses?
*
* Example 1:
*
* Input: numCourses = 2, prerequisites = [[1,0]]
* Output: true
* Explanation: There are a total of 2 courses to take.
* To take course 1 you should have finished course 0. So it is possible.
*
* Example 2:
*
* Input: numCourses = 2, prerequisites = [[1,0],[0,1]]
* Output: false
* Explanation: There are a total of 2 courses to take.
* To take course 1 you should have finished course 0, and to take course 0 you should
* also have finished course 1. So it is impossible.
*
* Constraints:
*
* The input prerequisites is a graph represented by a list of edges, not adjacency matrices.
* Read more about how a graph is represented.
* You may assume that there are no duplicate edges in the input prerequisites.
* 1 <= numCourses <= 10^5
******************************************************************************************************/
// 思路 判断有没有环,有环则无法完成所有课程得学习
func canFinish(numCourses int, prerequisites [][]int) bool {
// 构造邻接表 建立关联关系
// hash mark 标记已修课程
var (
mp = map[int][]int{} // 邻接表 记录图
status = map[int]int{} // 课程的状态
dfs func(int) bool
)
// 创建邻接表
for _, v := range prerequisites {
mp[v[0]] = append(mp[v[0]], v[1])
}
// 课程搜索
dfs = func(now int) bool {
if status[now] == 1 {
return false
}
status[now] = 1
mark := true
for _, v := range mp[now] {
mark = mark && dfs(v)
if !mark {
return false
}
}
status[now] = 2
return true
}
for i := 0; i < numCourses; i++ {
if status[i] != 2 {
if !dfs(i) {
return false
}
}
}
return true
} | basic/Algorithm/graph/207.course_schedule/207.CourseSchedule_zhangsl.go | 0.564098 | 0.584271 | 207.CourseSchedule_zhangsl.go | starcoder |
package render3d
import (
"github.com/galaco/lambda-core/mesh"
)
// Compositor is a struct that provides a mechanism to compose 1 or more models into a single renderable set of data,
// indexed by material.
// This is super handy for reducing draw calls down a bunch.
// A resultant Composition should result in a single set of vertex data + 1 pair of index offset+length info per material
// referenced by all models composed.
type Compositor struct {
meshes []mesh.IMesh
isOutdated bool
}
// AddModel adds a new model to be composed.
func (compositor *Compositor) AddMesh(m mesh.IMesh) {
compositor.meshes = append(compositor.meshes, m)
compositor.isOutdated = true
}
func (compositor *Compositor) IsOutdated() bool {
return compositor.isOutdated
}
// ComposeScene builds a sceneComposition mesh for rendering
func (compositor *Compositor) ComposeScene() *Composition {
compositor.isOutdated = false
texMappings := map[string][]mesh.IMesh{}
// Step 1. Map meshes into contiguous groups by texture
for idx, m := range compositor.meshes {
if _, ok := texMappings[m.Material().FilePath()]; !ok {
texMappings[m.Material().FilePath()] = make([]mesh.IMesh, 0)
}
texMappings[m.Material().FilePath()] = append(texMappings[m.Material().FilePath()], compositor.meshes[idx])
}
// Step 2. Construct a single vertex object Composition ordered by material
sceneComposition := NewComposition()
vertCount := 0
for key, texMesh := range texMappings {
// @TODO verify if this is the vertex offset of the actual array offset (vertexOffset * 3)
matVertOffset := vertCount
matVertCount := 0
for _, sMesh := range texMesh {
sceneComposition.AddVertex(sMesh.Vertices()...)
sceneComposition.AddNormal(sMesh.Normals()...)
sceneComposition.AddUV(sMesh.UVs()...)
matVertCount += len(sMesh.Vertices()) / 3
}
sceneComposition.AddMesh(NewCompositionMesh(key, matVertOffset, matVertCount))
vertCount += matVertCount
}
sceneComposition.GenerateTangents()
// Step 3. Generate indices from composed materials
sceneComposition.Compose()
return sceneComposition
} | internal/renderer/render3d/compositor.go | 0.611498 | 0.552178 | compositor.go | starcoder |
package govaluate
// boolFilter is used gather the information about evaluationStage`s which resulted boolean value
type boolFilter struct {
boundTrue *boundBoolFilter
boundFalse *boundBoolFilter
}
func newBoolFilter(cap int) *boolFilter {
return &boolFilter{
boundTrue: newBoundBoolFilter(cap, true),
boundFalse: newBoundBoolFilter(cap, false),
}
}
func (bf *boolFilter) getVarsCausing(value bool) map[string]bool {
if value {
return bf.boundTrue.vars
} else {
return bf.boundFalse.vars
}
}
func (bf *boolFilter) pushStageResult(stage *evaluationStage) {
// route boolean expressions to their filters
if stage.result == true {
bf.boundTrue.pushStageResult(stage)
return
}
if stage.result == false {
bf.boundFalse.pushStageResult(stage)
return
}
// ternary expression is split to 2 actual expressions: TERNARY_TRUE and TERNARY_FALSE
// so if expression originally was: `a > b ? c : d` then there are 2 expressions in result:
// TERNARY_TRUE(x1, c) and TERNARY_FALSE(x2, d), and c is `true` part while d is `false` part
if stage.symbol == TERNARY_TRUE && stage.rightStage != nil {
bf.boundTrue.pushStageResult(stage.rightStage)
return
}
if stage.symbol == TERNARY_FALSE && stage.rightStage != nil {
bf.boundFalse.pushStageResult(stage.rightStage)
return
}
}
// postprocessStages gathers information about names of variables which cause true and false
// it is called once after all evaluationStage`s are processed
func (bf *boolFilter) postprocessStages() {
bf.boundTrue.postprocessStages()
bf.boundFalse.postprocessStages()
}
// boundBoolFilter is boolFilter part that is bound to specific boolean value: true or false
type boundBoolFilter struct {
value bool
avoid bool
stages []*evaluationStage
vars map[string]bool
visited map[*evaluationStage]bool
}
func newBoundBoolFilter(cap int, value bool) *boundBoolFilter {
return &boundBoolFilter{
value: value,
avoid: !value,
stages: make([]*evaluationStage, 0, cap),
vars: make(map[string]bool, cap),
visited: make(map[*evaluationStage]bool, cap),
}
}
func (bbf *boundBoolFilter) collectVarsInfo(stage *evaluationStage) {
// avoid visiting stages repeatedly
if bbf.visited[stage] {
return
}
bbf.visited[stage] = true
// don't go down to subtrees those results differ from bound value
if stage.result == bbf.avoid {
return
}
if stage.variableName != nil {
bbf.vars[*stage.variableName] = true
}
if stage.leftStage != nil {
bbf.collectVarsInfo(stage.leftStage)
}
if stage.rightStage != nil {
bbf.collectVarsInfo(stage.rightStage)
}
}
func (bbf *boundBoolFilter) postprocessStages() {
for _, stage := range bbf.stages {
bbf.collectVarsInfo(stage)
}
}
func (bbf *boundBoolFilter) pushStageResult(stage *evaluationStage) {
bbf.stages = append(bbf.stages, stage)
} | boolFilter.go | 0.739046 | 0.402157 | boolFilter.go | starcoder |
package agent
import (
"container/heap"
"github.com/joyrex2001/nightshift/internal/scanner"
)
// objectspq is the priority queue that contains objects found by the scanners.
// Each scanner has a priority and Objects found with these scanners take the
// same priority. The highest priority takes precedence over earlier scanned
// objects with a lower priority (hence this implementation uses a priority
// queue). If an Object is added with the same priority it will be replaced.
type objectspq []*scanner.Object
// Len returns the length of the priority queue, as required by the heap
// interface.
func (pq objectspq) Len() int {
return len(pq)
}
// Less compares the scanner.Objects, and determines the order of the priority
// queue, as required by the heap interface.
func (pq objectspq) Less(i, j int) bool {
return pq[i].Priority > pq[j].Priority
}
// Swap will swap two scanner.Objects on the priority queue, as required by
// the heap interface.
func (pq objectspq) Swap(i, j int) {
pq[i], pq[j] = pq[j], pq[i]
}
// Pop will remove the scanner.Object with the lowest Priority from the
// priority queue.
func (pq *objectspq) Pop() interface{} {
old := *pq
n := len(old)
item := old[n-1]
*pq = old[0 : n-1]
return item
}
// Push will add an Object to the priority queue.
func (pq *objectspq) Push(x interface{}) {
item := x.(*scanner.Object)
*pq = append(*pq, item)
}
// Index will return the raw array index of the given scanner.Object.
func (pq objectspq) Index(obj *scanner.Object) int {
// this could be optimized with a hashmap indexing the actual positions,
// however, since the queues are expected to have just a few entries, and
// this code will only be called when an update is received, the
// implementation is left as-is (choosing readiblity over performance).
for i, o := range pq {
if o.Priority == obj.Priority {
return i
}
}
return -1
}
// InitObjects will initialize the objects. If objects were stored, this method
// will remove these and the objects are re-initialized.
func (a *worker) InitObjects() {
a.m.Lock()
defer a.m.Unlock()
a.objects = map[string]*objectspq{}
}
// GetObjects will go through all object priority queues, and for each object
// found, it will append the result with the highest priority to the Objects
// result map.
func (a *worker) GetObjects() map[string]*scanner.Object {
a.m.Lock()
defer a.m.Unlock()
objs := map[string]*scanner.Object{}
for _, opq := range a.objects {
if len(*opq) > 0 {
obj := (*opq)[0].Copy()
objs[obj.UID] = obj
}
}
return objs
}
// addObject will add (or replace!) an object to the collection of objects.
// Each object is stored in its own priority queue, and if an object with the
// same priority is to be added, it will replace the object instead.
func (a *worker) addObject(obj *scanner.Object) {
a.m.Lock()
defer a.m.Unlock()
opq, ok := a.objects[obj.UID]
if !ok {
// no entries yet, init the heap!
opq := &objectspq{obj}
a.objects[obj.UID] = opq
heap.Init(opq)
return
}
if idx := opq.Index(obj); idx >= 0 {
// existing entry found for this priority; replace the item in the
// priority queue
(*opq)[idx] = obj
return
}
// add the object to the priority queue
heap.Push(opq, obj)
}
// removeObject will remove an Object from the priority queue.
func (a *worker) removeObject(obj *scanner.Object) {
a.m.Lock()
defer a.m.Unlock()
opq, ok := a.objects[obj.UID]
if !ok {
return
}
if idx := opq.Index(obj); idx >= 0 {
heap.Remove(opq, idx)
}
} | internal/agent/objects.go | 0.705481 | 0.496826 | objects.go | starcoder |
package object
import "errors"
type Comparable interface {
// Compare methods
// Eq returns true if the left Object is equal to the right Object.
Eq(Object) (Boolean, error)
// Neq returns true if the left Object is not equal to the right Object.
Neq(Object) (Boolean, error)
// Less returns true if the left Object is less than the right Object.
Less(Object) (Boolean, error)
// LessEq returns true if the left Object is less than or equal to the right Object.
LessEq(Object) (Boolean, error)
// Greater returns true if the left Object is greter than the right Object.
Greater(Object) (Boolean, error)
// GreaterEq returns true if the left Object is greter than or equal to the right Object.
GreaterEq(Object) (Boolean, error)
// ToBoolean returns true when the value of this Object is not false.
ToBoolean() (value Boolean)
// IsEmpty returns true when the value of this object is equal to the
// value of this type of Object in it's default state.
// IsEmpty() bool
}
// These functions should be used when the left Object could be nil.
// They check if either is nil before calling the respective comparator method on the Object.
// Eq returns true if the left Object is equal to the right Object.
// If either is nil Eq returns false.
// Comparing different types is also an error.
func Eq(left Object, right Object) (Boolean, error) {
leftIsNil := IsNil(left)
rightIsNil := IsNil(right)
// If both are nil they are equal.
if leftIsNil && rightIsNil {
return Boolean(true), nil
}
// If one is nil and the other isn't they are not equal
if leftIsNil != rightIsNil {
return Boolean(false), nil
}
return left.Eq(right)
}
// Neq returns true if the left Object is not equal to the right Object.
// If either is nil Neq returns true.
// Comparing different types is also an error.
func Neq(left Object, right Object) (Boolean, error) {
leftIsNil := IsNil(left)
rightIsNil := IsNil(right)
// If both are nil they are equal.
if leftIsNil && rightIsNil {
return Boolean(false), nil
}
// If one is nil and the other isn't they are not equal
if leftIsNil != rightIsNil {
return Boolean(true), nil
}
return left.Neq(right)
}
// Less returns true if the left Object is less than the right Object.
// If either is nil Less returns false and an error.
// Comparing different types is also an error.
func Less(left Object, right Object) (Boolean, error) {
// If one is nil and the other isn't they cannot be compared.
if IsNil(left) || IsNil(right) {
return Boolean(false), errors.New("less than not defined on nil")
}
return left.Less(right)
}
// LessEq returns true if the left Object is less than or equal to the right Object.
// If either is nil LessEq returns false and an error.
// Comparing different types is also an error.
func LessEq(left Object, right Object) (Boolean, error) {
// If one is nil and the other isn't they cannot be compared.
if IsNil(left) || IsNil(right) {
return Boolean(false), errors.New("less than or equal to not defined on nil")
}
return left.LessEq(right)
}
// Greater returns true if the left Object is greter than the right Object.
// If either is nil Greater returns false and an error.
// Comparing different types is also an error.
func Greater(left Object, right Object) (Boolean, error) {
// If one is nil and the other isn't they cannot be compared.
if IsNil(left) || IsNil(right) {
return Boolean(false), errors.New("greater than not defined on nil")
}
return left.Greater(right)
}
// GreaterEq returns true if the left Object is greter than or equal to the right Object.
// If either is nil GreaterEq returns false and an error.
// Comparing different types is also an error.
func GreaterEq(left Object, right Object) (Boolean, error) {
// If one is nil and the other isn't they cannot be compared.
if IsNil(left) || IsNil(right) {
return Boolean(false), errors.New("greater than or equal to not defined on nil")
}
return left.GreaterEq(right)
} | pkg/object/comparable.go | 0.862699 | 0.498535 | comparable.go | starcoder |
package gofa
// Angle
// Operations on Angles
/*
Anp Normalize angle into the range 0 <= a < 2pi.
Given:
a float64 angle (radians)
Returned (function value):
float64 angle in range 0-2pi
*/
func Anp(a float64) float64 {
var w float64
w = fmod(a, D2PI)
if w < 0 {
w += D2PI
}
return w
}
/*
Anpm Normalize angle into the range -pi <= a < +pi.
Given:
a float64 angle (radians)
Returned (function value):
float64 angle in range +/-pi
*/
func Anpm(a float64) float64 {
var w float64
w = fmod(a, D2PI)
if fabs(w) >= DPI {
w -= dsign(D2PI, a)
}
return w
}
/*
A2af Decompose radians into degrees, arcminutes, arcseconds, fraction.
Given:
ndp int resolution (Note 1)
angle float64 angle in radians
Returned:
sign byte '+' or '-'
idmsf [4]int degrees, arcminutes, arcseconds, fraction
Notes:
1) The argument ndp is interpreted as follows:
ndp resolution
: ...0000 00 00
-7 1000 00 00
-6 100 00 00
-5 10 00 00
-4 1 00 00
-3 0 10 00
-2 0 01 00
-1 0 00 10
0 0 00 01
1 0 00 00.1
2 0 00 00.01
3 0 00 00.001
: 0 00 00.000...
2) The largest positive useful value for ndp is determined by the
size of angle, the format of float64s on the target platform, and
the risk of overflowing idmsf[3]. On a typical platform, for
angle up to 2pi, the available floating-point precision might
correspond to ndp=12. However, the practical limit is typically
ndp=9, set by the capacity of a 32-bit int, or ndp=4 if int is
only 16 bits.
3) The absolute value of angle may exceed 2pi. In cases where it
does not, it is up to the caller to test for and handle the
case where angle is very nearly 2pi and rounds up to 360 degrees,
by testing for idmsf[0]=360 and setting idmsf[0-3] to zero.
Called:
D2tf decompose days to hms
*/
func A2af(ndp int, angle float64, sign *byte, idmsf *[4]int) {
/* Hours to degrees * radians to turns */
F := 15.0 / D2PI
/* Scale then use days to h,m,s function. */
D2tf(ndp, angle*F, sign, idmsf)
}
/*
A2tf Decompose radians into hours, minutes, seconds, fraction.
Given:
ndp int resolution (Note 1)
angle float64 angle in radians
Returned:
sign byte '+' or '-'
ihmsf [4]int hours, minutes, seconds, fraction
Notes:
1) The argument ndp is interpreted as follows:
ndp resolution
: ...0000 00 00
-7 1000 00 00
-6 100 00 00
-5 10 00 00
-4 1 00 00
-3 0 10 00
-2 0 01 00
-1 0 00 10
0 0 00 01
1 0 00 00.1
2 0 00 00.01
3 0 00 00.001
: 0 00 00.000...
2) The largest positive useful value for ndp is determined by the
size of angle, the format of float64s on the target platform, and
the risk of overflowing ihmsf[3]. On a typical platform, for
angle up to 2pi, the available floating-point precision might
correspond to ndp=12. However, the practical limit is typically
ndp=9, set by the capacity of a 32-bit int, or ndp=4 if int is
only 16 bits.
3) The absolute value of angle may exceed 2pi. In cases where it
does not, it is up to the caller to test for and handle the
case where angle is very nearly 2pi and rounds up to 24 hours,
by testing for ihmsf[0]=24 and setting ihmsf[0-3] to zero.
Called:
D2tf decompose days to hms
*/
func A2tf(ndp int, angle float64, sign *byte, ihmsf *[4]int) {
D2tf(ndp, angle/D2PI, sign, ihmsf)
}
/*
D2tf Decompose days to hours, minutes, seconds, fraction.
Given:
ndp int resolution (Note 1)
days float64 interval in days
Returned:
sign byte '+' or '-'
ihmsf [4]int hours, minutes, seconds, fraction
Notes:
1) The argument ndp is interpreted as follows:
ndp resolution
: ...0000 00 00
-7 1000 00 00
-6 100 00 00
-5 10 00 00
-4 1 00 00
-3 0 10 00
-2 0 01 00
-1 0 00 10
0 0 00 01
1 0 00 00.1
2 0 00 00.01
3 0 00 00.001
: 0 00 00.000...
2) The largest positive useful value for ndp is determined by the
size of days, the format of float64 on the target platform, and
the risk of overflowing ihmsf[3]. On a typical platform, for
days up to 1.0, the available floating-point precision might
correspond to ndp=12. However, the practical limit is typically
ndp=9, set by the capacity of a 32-bit int, or ndp=4 if int is
only 16 bits.
3) The absolute value of days may exceed 1.0. In cases where it
does not, it is up to the caller to test for and handle the
case where days is very nearly 1.0 and rounds up to 24 hours,
by testing for ihmsf[0]=24 and setting ihmsf[0-3] to zero.
*/
func D2tf(ndp int, days float64, sign *byte, ihmsf *[4]int) {
var nrs, n int
var rs, rm, rh, a, w, ah, am, as, af float64
/* Handle sign. */
if days >= 0.0 {
*sign = '+'
} else {
*sign = '-'
}
/* Interval in seconds. */
a = DAYSEC * fabs(days)
/* Pre-round if resolution coarser than 1s (then pretend ndp=1). */
if ndp < 0 {
nrs = 1
for n = 1; n <= -ndp; n++ {
if n == 2 || n == 4 {
nrs *= 6
} else {
nrs *= 4
}
}
rs = float64(nrs)
w = a / rs
a = rs * dnint(w)
}
/* Express the unit of each field in resolution units. */
nrs = 1
for n = 1; n <= ndp; n++ {
nrs *= 10
}
rs = float64(nrs)
rm = rs * 60.0
rh = rm * 60.0
/* Round the interval and express in resolution units. */
a = dnint(rs * a)
/* Break into fields. */
ah = a / rh
ah = dint(ah)
a -= ah * rh
am = a / rm
am = dint(am)
a -= am * rm
as = a / rs
as = dint(as)
af = a - as*rs
/* Return results. */
ihmsf[0] = int(ah)
ihmsf[1] = int(am)
ihmsf[2] = int(as)
ihmsf[3] = int(af)
}
/*
Af2a Convert degrees, arcminutes, arcseconds to radians.
Given:
s byte sign: '-' = negative, otherwise positive
ideg int degrees
iamin int arcminutes
asec float64 arcseconds
Returned:
rad float64 angle in radians
Returned (function value):
int status: 0 = OK
1 = ideg outside range 0-359
2 = iamin outside range 0-59
3 = asec outside range 0-59.999...
Notes:
1) The result is computed even if any of the range checks fail.
2) Negative ideg, iamin and/or asec produce a warning status, but
the absolute value is used in the conversion.
3) If there are multiple errors, the status value reflects only the
first, the smallest taking precedence.
*/
func Af2a(s byte, ideg, iamin int, asec float64, rad *float64) int {
/* Compute the interval. */
if s == '-' {
*rad = -1.0
} else {
*rad = 1.0
}
*rad *= (60.0*(60.0*(fabs(float64(ideg)))+(fabs(float64(iamin)))) + fabs(asec)) * DAS2R
/* Validate arguments and return status. */
if ideg < 0 || ideg > 359 {
return 1
}
if iamin < 0 || iamin > 59 {
return 2
}
if asec < 0.0 || asec >= 60.0 {
return 3
}
return 0
}
/*
Tf2a Convert hours, minutes, seconds to radians.
Given:
s byte sign: '-' = negative, otherwise positive
ihour int hours
imin int minutes
sec float64 seconds
Returned:
rad float64 angle in radians
Returned (function value):
int status: 0 = OK
1 = ihour outside range 0-23
2 = imin outside range 0-59
3 = sec outside range 0-59.999...
Notes:
1) The result is computed even if any of the range checks fail.
2) Negative ihour, imin and/or sec produce a warning status, but
the absolute value is used in the conversion.
3) If there are multiple errors, the status value reflects only the
first, the smallest taking precedence.
*/
func Tf2a(s byte, ihour, imin int, sec float64, rad *float64) int {
/* Compute the interval. */
if s == '-' {
*rad = -1.0
} else {
*rad = 1.0
}
*rad *= (60.0*(60.0*(fabs(float64(ihour)))+(fabs(float64(imin)))) + fabs(sec)) * DS2R
/* Validate arguments and return status. */
if ihour < 0 || ihour > 23 {
return 1
}
if imin < 0 || imin > 59 {
return 2
}
if sec < 0.0 || sec >= 60.0 {
return 3
}
return 0
}
/*
Tf2d Convert hours, minutes, seconds to days.
Given:
s byte sign: '-' = negative, otherwise positive
ihour int hours
imin int minutes
sec float64 seconds
Returned:
days float64 interval in days
Returned (function value):
int status: 0 = OK
1 = ihour outside range 0-23
2 = imin outside range 0-59
3 = sec outside range 0-59.999...
Notes:
1) The result is computed even if any of the range checks fail.
2) Negative ihour, imin and/or sec produce a warning status, but
the absolute value is used in the conversion.
3) If there are multiple errors, the status value reflects only the
first, the smallest taking precedence.
*/
func Tf2d(s byte, ihour, imin int, sec float64, days *float64) int {
/* Compute the interval. */
// *days = ( s == '-' ? -1.0 : 1.0 ) *
if s == '-' {
*days = -1.0
} else {
*days = 1.0
}
*days *= (60.0*(60.0*(fabs(float64(ihour)))+(fabs(float64(imin)))) + fabs(sec)) / DAYSEC
/* Validate arguments and return status. */
if ihour < 0 || ihour > 23 {
return 1
}
if imin < 0 || imin > 59 {
return 2
}
if sec < 0.0 || sec >= 60.0 {
return 3
}
return 0
}
// Separation and position-angle
/*
Sepp Angular separation between two p-vectors.
Given:
a [3]float64 first p-vector (not necessarily unit length)
b [3]float64 second p-vector (not necessarily unit length)
Returned (function value):
float64 angular separation (radians, always positive)
Notes:
1) If either vector is null, a zero result is returned.
2) The angular separation is most simply formulated in terms of
scalar product. However, this gives poor accuracy for angles
near zero and pi. The present algorithm uses both cross product
and dot product, to deliver full accuracy whatever the size of
the angle.
Called:
Pxp vector product of two p-vectors
Pm modulus of p-vector
Pdp scalar product of two p-vectors
*/
func Sepp(a, b [3]float64) float64 {
var axb [3]float64
var ss, cs, s float64
/* Sine of angle between the vectors, multiplied by the two moduli. */
Pxp(a, b, &axb)
ss = Pm(axb)
/* Cosine of the angle, multiplied by the two moduli. */
cs = Pdp(a, b)
/* The angle. */
if (ss != 0.0) || (cs != 0.0) {
s = atan2(ss, cs)
} else {
s = 0.0
}
return s
}
/*
Seps Angular separation between two sets of spherical coordinates.
Given:
al float64 first longitude (radians)
ap float64 first latitude (radians)
bl float64 second longitude (radians)
bp float64 second latitude (radians)
Returned (function value):
float64 angular separation (radians)
Called:
S2c spherical coordinates to unit vector
Sepp angular separation between two p-vectors
*/
func Seps(al, ap, bl, bp float64) float64 {
var ac, bc [3]float64
var s float64
/* Spherical to Cartesian. */
S2c(al, ap, &ac)
S2c(bl, bp, &bc)
/* Angle between the vectors. */
s = Sepp(ac, bc)
return s
}
/*
Pap Position-angle from two p-vectors.
Given:
a [3]float64 direction of reference point
b [3]float64 direction of point whose PA is required
Returned (function value):
float64 position angle of b with respect to a (radians)
Notes:
1) The result is the position angle, in radians, of direction b with
respect to direction a. It is in the range -pi to +pi. The
sense is such that if b is a small distance "north" of a the
position angle is approximately zero, and if b is a small
distance "east" of a the position angle is approximately +pi/2.
2) The vectors a and b need not be of unit length.
3) Zero is returned if the two directions are the same or if either
vector is null.
4) If vector a is at a pole, the result is ill-defined.
Called:
Pn decompose p-vector into modulus and direction
Pm modulus of p-vector
Pxp vector product of two p-vectors
Pmp p-vector minus p-vector
Pdp scalar product of two p-vectors
*/
func Pap(a, b [3]float64) float64 {
var am, bm, st, ct, xa, ya, za, pa float64
var au, eta, xi, a2b [3]float64
/* Modulus and direction of the a vector. */
Pn(a, &am, &au)
/* Modulus of the b vector. */
bm = Pm(b)
/* Deal with the case of a null vector. */
if (am == 0.0) || (bm == 0.0) {
st = 0.0
ct = 1.0
} else {
/* The "north" axis tangential from a (arbitrary length). */
xa = a[0]
ya = a[1]
za = a[2]
eta[0] = -xa * za
eta[1] = -ya * za
eta[2] = xa*xa + ya*ya
/* The "east" axis tangential from a (same length). */
Pxp(eta, au, &xi)
/* The vector from a to b. */
Pmp(b, a, &a2b)
/* Resolve into components along the north and east axes. */
st = Pdp(a2b, xi)
ct = Pdp(a2b, eta)
/* Deal with degenerate cases. */
if (st == 0.0) && (ct == 0.0) {
ct = 1.0
}
}
/* Position angle. */
pa = atan2(st, ct)
return pa
}
/*
Pas Position-angle from spherical coordinates.
Given:
al float64 longitude of point A (e.g. RA) in radians
ap float64 latitude of point A (e.g. Dec) in radians
bl float64 longitude of point B
bp float64 latitude of point B
Returned (function value):
float64 position angle of B with respect to A
Notes:
1) The result is the bearing (position angle), in radians, of point
B with respect to point A. It is in the range -pi to +pi. The
sense is such that if B is a small distance "east" of point A,
the bearing is approximately +pi/2.
2) Zero is returned if the two points are coincident.
*/
func Pas(al, ap, bl, bp float64) float64 {
var dl, x, y, pa float64
dl = bl - al
y = sin(dl) * cos(bp)
x = sin(bp)*cos(ap) - cos(bp)*sin(ap)*cos(dl)
// pa = ((x != 0.0) || (y != 0.0)) ? atan2(y, x) : 0.0;
if (x != 0.0) || (y != 0.0) {
pa = atan2(y, x)
} else {
pa = 0.0
}
return pa
} | angle.go | 0.756537 | 0.647659 | angle.go | starcoder |
package chart
import (
"fmt"
util "github.com/wcharczuk/go-chart/util"
)
const (
// DefaultSimpleMovingAveragePeriod is the default number of values to average.
DefaultSimpleMovingAveragePeriod = 16
)
// SMASeries is a computed series.
type SMASeries struct {
Name string
Style Style
YAxis YAxisType
Period int
InnerSeries ValuesProvider
}
// GetName returns the name of the time series.
func (sma SMASeries) GetName() string {
return sma.Name
}
// GetStyle returns the line style.
func (sma SMASeries) GetStyle() Style {
return sma.Style
}
// GetYAxis returns which YAxis the series draws on.
func (sma SMASeries) GetYAxis() YAxisType {
return sma.YAxis
}
// Len returns the number of elements in the series.
func (sma SMASeries) Len() int {
return sma.InnerSeries.Len()
}
// GetPeriod returns the window size.
func (sma SMASeries) GetPeriod(defaults ...int) int {
if sma.Period == 0 {
if len(defaults) > 0 {
return defaults[0]
}
return DefaultSimpleMovingAveragePeriod
}
return sma.Period
}
// GetValues gets a value at a given index.
func (sma SMASeries) GetValues(index int) (x, y float64) {
if sma.InnerSeries == nil || sma.InnerSeries.Len() == 0 {
return
}
px, _ := sma.InnerSeries.GetValues(index)
x = px
y = sma.getAverage(index)
return
}
// GetLastValues computes the last moving average value but walking back window size samples,
// and recomputing the last moving average chunk.
func (sma SMASeries) GetLastValues() (x, y float64) {
if sma.InnerSeries == nil || sma.InnerSeries.Len() == 0 {
return
}
seriesLen := sma.InnerSeries.Len()
px, _ := sma.InnerSeries.GetValues(seriesLen - 1)
x = px
y = sma.getAverage(seriesLen - 1)
return
}
func (sma SMASeries) getAverage(index int) float64 {
period := sma.GetPeriod()
floor := util.Math.MaxInt(0, index-period)
var accum float64
var count float64
for x := index; x >= floor; x-- {
_, vy := sma.InnerSeries.GetValues(x)
accum += vy
count += 1.0
}
return accum / count
}
// Render renders the series.
func (sma SMASeries) Render(r Renderer, canvasBox Box, xrange, yrange Range, defaults Style) {
style := sma.Style.InheritFrom(defaults)
Draw.LineSeries(r, canvasBox, xrange, yrange, style, sma)
}
// Validate validates the series.
func (sma SMASeries) Validate() error {
if sma.InnerSeries == nil {
return fmt.Errorf("sma series requires InnerSeries to be set")
}
return nil
} | vendor/github.com/wcharczuk/go-chart/sma_series.go | 0.868493 | 0.504272 | sma_series.go | starcoder |
// Package mhf provides an interface to memory hard functions, a.k.a password key derivation functions.
package mhf
import "errors"
var errParams = errors.New("invalid amount of parameters")
// Identifier is used to specify the memory hard function to be used.
type Identifier byte
const (
// Argon2id password kdf function.
Argon2id Identifier = 1 + iota
// Scrypt password kdf function.
Scrypt
// PBKDF2Sha512 PBKDF2 password kdf function using SHA-512.
PBKDF2Sha512
// Bcrypt password kdf function.
Bcrypt
maxID
)
// Available reports whether the given kdf function is linked into the binary.
func (i Identifier) Available() bool {
return i > 0 && i < maxID
}
// Get returns an MHF with default parameters.
func (i Identifier) Get() *MHF {
if i == 0 || i >= maxID {
return nil
}
return &MHF{constructors[i-1]()}
}
// Harden uses default parameters for the key derivation function over the input password and salt.
func (i Identifier) Harden(password, salt []byte, length int) []byte {
return i.Get().Harden(password, salt, length)
}
// String returns the string name of the hashing function.
func (i Identifier) String() string {
return i.Get().String()
}
type constructor func() memoryHardFunction
var constructors [maxID - 1]constructor
func (i Identifier) register(c constructor) {
constructors[i-1] = c
}
func init() {
Argon2id.register(argon2idNew)
Scrypt.register(scryptmhfNew)
PBKDF2Sha512.register(pbkdf2New)
Bcrypt.register(bcryptNew)
}
type memoryHardFunction interface {
// Harden uses default parameters for the key derivation function over the input password and salt.
Harden(password, salt []byte, length int) []byte
// Parameterize replaces the functions parameters with the new ones. Must match the amount of parameters.
Parameterize(parameters ...int)
// String returns the string name of the function and its parameters.
String() string
params() []int
}
// MHF allows customisation of the underlying memory-hard function.
type MHF struct {
memoryHardFunction
}
// Set sets m's memory-hard function to the specified one and returns m. Returns nil if the identifier is invalid.
func (m *MHF) Set(i Identifier) *MHF {
if i == 0 || i >= maxID {
return nil
}
m.memoryHardFunction = constructors[i-1]()
return m
} | mhf/mhf.go | 0.788949 | 0.420243 | mhf.go | starcoder |
package goble
// A dictionary of known characteristic names and type (keyed by characteristic uuid)
var knownCharacteristics = map[string]struct{ Name, Type string }{
"2a00": {Name: "Device Name", Type: "org.bluetooth.characteristic.gap.device_name"},
"2a01": {Name: "Appearance", Type: "org.bluetooth.characteristic.gap.appearance"},
"2a02": {Name: "Peripheral Privacy Flag", Type: "org.bluetooth.characteristic.gap.peripheral_privacy_flag"},
"2a03": {Name: "Reconnection Address", Type: "org.bluetooth.characteristic.gap.reconnection_address"},
"2a04": {Name: "Peripheral Preferred Connection Parameters", Type: "org.bluetooth.characteristic.gap.peripheral_preferred_connection_parameters"},
"2a05": {Name: "Service Changed", Type: "org.bluetooth.characteristic.gatt.service_changed"},
"2a06": {Name: "Alert Level", Type: "org.bluetooth.characteristic.alert_level"},
"2a07": {Name: "Tx Power Level", Type: "org.bluetooth.characteristic.tx_power_level"},
"2a08": {Name: "Date Time", Type: "org.bluetooth.characteristic.date_time"},
"2a09": {Name: "Day of Week", Type: "org.bluetooth.characteristic.day_of_week"},
"2a0a": {Name: "Day Date Time", Type: "org.bluetooth.characteristic.day_date_time"},
"2a0c": {Name: "Exact Time 256", Type: "org.bluetooth.characteristic.exact_time_256"},
"2a0d": {Name: "DST Offset", Type: "org.bluetooth.characteristic.dst_offset"},
"2a0e": {Name: "Time Zone", Type: "org.bluetooth.characteristic.time_zone"},
"2a0f": {Name: "Local Time Information", Type: "org.bluetooth.characteristic.local_time_information"},
"2a11": {Name: "Time with DST", Type: "org.bluetooth.characteristic.time_with_dst"},
"2a12": {Name: "Time Accuracy", Type: "org.bluetooth.characteristic.time_accuracy"},
"2a13": {Name: "Time Source", Type: "org.bluetooth.characteristic.time_source"},
"2a14": {Name: "Reference Time Information", Type: "org.bluetooth.characteristic.reference_time_information"},
"2a16": {Name: "Time Update Control Point", Type: "org.bluetooth.characteristic.time_update_control_point"},
"2a17": {Name: "Time Update State", Type: "org.bluetooth.characteristic.time_update_state"},
"2a18": {Name: "Glucose Measurement", Type: "org.bluetooth.characteristic.glucose_measurement"},
"2a19": {Name: "Battery Level", Type: "org.bluetooth.characteristic.battery_level"},
"2a1c": {Name: "Temperature Measurement", Type: "org.bluetooth.characteristic.temperature_measurement"},
"2a1d": {Name: "Temperature Type", Type: "org.bluetooth.characteristic.temperature_type"},
"2a1e": {Name: "Intermediate Temperature", Type: "org.bluetooth.characteristic.intermediate_temperature"},
"2a21": {Name: "Measurement Interval", Type: "org.bluetooth.characteristic.measurement_interval"},
"2a22": {Name: "Boot Keyboard Input Report", Type: "org.bluetooth.characteristic.boot_keyboard_input_report"},
"2a23": {Name: "System ID", Type: "org.bluetooth.characteristic.system_id"},
"2a24": {Name: "Model Number String", Type: "org.bluetooth.characteristic.model_number_string"},
"2a25": {Name: "Serial Number String", Type: "org.bluetooth.characteristic.serial_number_string"},
"2a26": {Name: "Firmware Revision String", Type: "org.bluetooth.characteristic.firmware_revision_string"},
"2a27": {Name: "Hardware Revision String", Type: "org.bluetooth.characteristic.hardware_revision_string"},
"2a28": {Name: "Software Revision String", Type: "org.bluetooth.characteristic.software_revision_string"},
"2a29": {Name: "Manufacturer Name String", Type: "org.bluetooth.characteristic.manufacturer_name_string"},
"2a2a": {Name: "IEEE 11073-20601 Regulatory Certification Data List", Type: "org.bluetooth.characteristic.ieee_11073-20601_regulatory_certification_data_list"},
"2a2b": {Name: "Current Time", Type: "org.bluetooth.characteristic.current_time"},
"2a31": {Name: "Scan Refresh", Type: "org.bluetooth.characteristic.scan_refresh"},
"2a32": {Name: "Boot Keyboard Output Report", Type: "org.bluetooth.characteristic.boot_keyboard_output_report"},
"2a33": {Name: "Boot Mouse Input Report", Type: "org.bluetooth.characteristic.boot_mouse_input_report"},
"2a34": {Name: "Glucose Measurement Context", Type: "org.bluetooth.characteristic.glucose_measurement_context"},
"2a35": {Name: "Blood Pressure Measurement", Type: "org.bluetooth.characteristic.blood_pressure_measurement"},
"2a36": {Name: "Intermediate Cuff Pressure", Type: "org.bluetooth.characteristic.intermediate_blood_pressure"},
"2a37": {Name: "Heart Rate Measurement", Type: "org.bluetooth.characteristic.heart_rate_measurement"},
"2a38": {Name: "Body Sensor Location", Type: "org.bluetooth.characteristic.body_sensor_location"},
"2a39": {Name: "Heart Rate Control Point", Type: "org.bluetooth.characteristic.heart_rate_control_point"},
"2a3f": {Name: "Alert Status", Type: "org.bluetooth.characteristic.alert_status"},
"2a40": {Name: "Ringer Control Point", Type: "org.bluetooth.characteristic.ringer_control_point"},
"2a41": {Name: "Ringer Setting", Type: "org.bluetooth.characteristic.ringer_setting"},
"2a42": {Name: "Alert Category ID Bit Mask", Type: "org.bluetooth.characteristic.alert_category_id_bit_mask"},
"2a43": {Name: "Alert Category ID", Type: "org.bluetooth.characteristic.alert_category_id"},
"2a44": {Name: "Alert Notification Control Point", Type: "org.bluetooth.characteristic.alert_notification_control_point"},
"2a45": {Name: "Unread Alert Status", Type: "org.bluetooth.characteristic.unread_alert_status"},
"2a46": {Name: "New Alert", Type: "org.bluetooth.characteristic.new_alert"},
"2a47": {Name: "Supported New Alert Category", Type: "org.bluetooth.characteristic.supported_new_alert_category"},
"2a48": {Name: "Supported Unread Alert Category", Type: "org.bluetooth.characteristic.supported_unread_alert_category"},
"2a49": {Name: "Blood Pressure Feature", Type: "org.bluetooth.characteristic.blood_pressure_feature"},
"2a4a": {Name: "HID Information", Type: "org.bluetooth.characteristic.hid_information"},
"2a4b": {Name: "Report Map", Type: "org.bluetooth.characteristic.report_map"},
"2a4c": {Name: "HID Control Point", Type: "org.bluetooth.characteristic.hid_control_point"},
"2a4d": {Name: "Report", Type: "org.bluetooth.characteristic.report"},
"2a4e": {Name: "Protocol Mode", Type: "org.bluetooth.characteristic.protocol_mode"},
"2a4f": {Name: "Scan Interval Window", Type: "org.bluetooth.characteristic.scan_interval_window"},
"2a50": {Name: "PnP ID", Type: "org.bluetooth.characteristic.pnp_id"},
"2a51": {Name: "Glucose Feature", Type: "org.bluetooth.characteristic.glucose_feature"},
"2a52": {Name: "Record Access Control Point", Type: "org.bluetooth.characteristic.record_access_control_point"},
"2a53": {Name: "RSC Measurement", Type: "org.bluetooth.characteristic.rsc_measurement"},
"2a54": {Name: "RSC Feature", Type: "org.bluetooth.characteristic.rsc_feature"},
"2a55": {Name: "SC Control Point", Type: "org.bluetooth.characteristic.sc_control_point"},
"2a5b": {Name: "CSC Measurement", Type: "org.bluetooth.characteristic.csc_measurement"},
"2a5c": {Name: "CSC Feature", Type: "org.bluetooth.characteristic.csc_feature"},
"2a5d": {Name: "Sensor Location", Type: "org.bluetooth.characteristic.sensor_location"},
} | vendor/github.com/raff/goble/characteristics.go | 0.53437 | 0.401013 | characteristics.go | starcoder |
// Copyright 2015 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// terribly slow on wasm
// +build !wasm
package main
import (
"fmt"
"math/big"
"unsafe"
)
var one = big.NewInt(1)
type _type struct {
name string
bits uint
signed bool
}
// testvalues returns a list of all test values for this type.
func (t *_type) testvalues() []*big.Int {
var a []*big.Int
a = append(a, big.NewInt(0))
a = append(a, big.NewInt(1))
a = append(a, big.NewInt(2))
if t.signed {
a = append(a, big.NewInt(-1))
a = append(a, big.NewInt(-2))
r := big.NewInt(1)
a = append(a, r.Lsh(r, t.bits-1).Sub(r, big.NewInt(1)))
r = big.NewInt(1)
a = append(a, r.Lsh(r, t.bits-1).Sub(r, big.NewInt(2)))
r = big.NewInt(1)
a = append(a, r.Lsh(r, t.bits-1).Neg(r))
r = big.NewInt(1)
a = append(a, r.Lsh(r, t.bits-1).Neg(r).Add(r, big.NewInt(1)))
} else {
r := big.NewInt(1)
a = append(a, r.Lsh(r, t.bits).Sub(r, big.NewInt(1)))
r = big.NewInt(1)
a = append(a, r.Lsh(r, t.bits).Sub(r, big.NewInt(2)))
}
return a
}
// trunc truncates a value to the range of the given type.
func (t *_type) trunc(x *big.Int) *big.Int {
r := new(big.Int)
m := new(big.Int)
m.Lsh(one, t.bits)
m.Sub(m, one)
r.And(x, m)
if t.signed && r.Bit(int(t.bits)-1) == 1 {
m.Neg(one)
m.Lsh(m, t.bits)
r.Or(r, m)
}
return r
}
var types = []_type{
_type{"byte", 8, false},
_type{"int8", 8, true},
_type{"uint8", 8, false},
_type{"rune", 32, true},
_type{"int16", 16, true},
_type{"uint16", 16, false},
_type{"int32", 32, true},
_type{"uint32", 32, false},
_type{"int64", 64, true},
_type{"uint64", 64, false},
_type{"int", 8 * uint(unsafe.Sizeof(int(0))), true},
_type{"uint", 8 * uint(unsafe.Sizeof(uint(0))), false},
_type{"uintptr", 8 * uint(unsafe.Sizeof((*byte)(nil))), false},
}
type binop struct {
name string
eval func(x, y *big.Int) *big.Int
}
var binops = []binop{
binop{"+", func(x, y *big.Int) *big.Int { return new(big.Int).Add(x, y) }},
binop{"-", func(x, y *big.Int) *big.Int { return new(big.Int).Sub(x, y) }},
binop{"*", func(x, y *big.Int) *big.Int { return new(big.Int).Mul(x, y) }},
binop{"/", func(x, y *big.Int) *big.Int { return new(big.Int).Quo(x, y) }},
binop{"%", func(x, y *big.Int) *big.Int { return new(big.Int).Rem(x, y) }},
binop{"&", func(x, y *big.Int) *big.Int { return new(big.Int).And(x, y) }},
binop{"|", func(x, y *big.Int) *big.Int { return new(big.Int).Or(x, y) }},
binop{"^", func(x, y *big.Int) *big.Int { return new(big.Int).Xor(x, y) }},
binop{"&^", func(x, y *big.Int) *big.Int { return new(big.Int).AndNot(x, y) }},
}
type unop struct {
name string
eval func(x *big.Int) *big.Int
}
var unops = []unop{
unop{"+", func(x *big.Int) *big.Int { return new(big.Int).Set(x) }},
unop{"-", func(x *big.Int) *big.Int { return new(big.Int).Neg(x) }},
unop{"^", func(x *big.Int) *big.Int { return new(big.Int).Not(x) }},
}
type shiftop struct {
name string
eval func(x *big.Int, i uint) *big.Int
}
var shiftops = []shiftop{
shiftop{"<<", func(x *big.Int, i uint) *big.Int { return new(big.Int).Lsh(x, i) }},
shiftop{">>", func(x *big.Int, i uint) *big.Int { return new(big.Int).Rsh(x, i) }},
}
// valname returns the name of n as can be used as part of a variable name.
func valname(n *big.Int) string {
s := fmt.Sprintf("%d", n)
if s[0] == '-' {
s = "neg" + s[1:]
}
return s
}
func main() {
fmt.Println("package main")
// We make variables to hold all the different values we'd like to use.
// We use global variables to prevent any constant folding.
for _, t := range types {
for _, n := range t.testvalues() {
fmt.Printf("var %s_%s %s = %d\n", t.name, valname(n), t.name, n)
}
}
fmt.Println("func main() {")
for _, t := range types {
// test binary ops
for _, op := range binops {
for _, x := range t.testvalues() {
for _, y := range t.testvalues() {
if (op.name == "/" || op.name == "%") && y.Sign() == 0 {
continue
}
r := t.trunc(op.eval(x, y))
eqn := fmt.Sprintf("%s_%s %s %s_%s != %d", t.name, valname(x), op.name, t.name, valname(y), r)
fmt.Printf("\tif %s { println(\"bad: %s\") }\n", eqn, eqn)
}
}
}
// test unary ops
for _, op := range unops {
for _, x := range t.testvalues() {
r := t.trunc(op.eval(x))
eqn := fmt.Sprintf("%s %s_%s != %d", op.name, t.name, valname(x), r)
fmt.Printf("\tif %s { println(\"bad: %s\") }\n", eqn, eqn)
}
}
// test shifts
for _, op := range shiftops {
for _, x := range t.testvalues() {
for _, i := range []uint{0, 1, t.bits - 2, t.bits - 1, t.bits, t.bits + 1} {
r := t.trunc(op.eval(x, i))
eqn := fmt.Sprintf("%s_%s %s %d != %d", t.name, valname(x), op.name, i, r)
fmt.Printf("\tif %s { println(\"bad: %s\") }\n", eqn, eqn)
}
}
}
}
fmt.Println("}")
} | test/fixedbugs/issue9604b.go | 0.667798 | 0.469642 | issue9604b.go | starcoder |
package schema
import (
"fmt"
"github.com/vmware-tanzu/carvel-ytt/pkg/filepos"
"github.com/vmware-tanzu/carvel-ytt/pkg/yamlmeta"
)
// Type encapsulates a schema that describes a yamlmeta.Node.
type Type interface {
AssignTypeTo(node yamlmeta.Node) TypeCheck
CheckType(node yamlmeta.Node) TypeCheck
GetValueType() Type
GetDefaultValue() interface{}
SetDefaultValue(interface{})
GetDefinitionPosition() *filepos.Position
GetDescription() string
SetDescription(string)
GetTitle() string
SetTitle(string)
GetExamples() []Example
SetExamples([]Example)
IsDeprecated() (bool, string)
SetDeprecated(bool, string)
String() string
}
var _ Type = (*DocumentType)(nil)
var _ Type = (*MapType)(nil)
var _ Type = (*MapItemType)(nil)
var _ Type = (*ArrayType)(nil)
var _ Type = (*ArrayItemType)(nil)
var _ Type = (*ScalarType)(nil)
var _ Type = (*AnyType)(nil)
var _ Type = (*NullType)(nil)
type DocumentType struct {
Source *yamlmeta.Document
ValueType Type // typically one of: MapType, ArrayType, ScalarType
Position *filepos.Position
defaultValue interface{}
}
type MapType struct {
Items []*MapItemType
Position *filepos.Position
documentation documentation
}
type MapItemType struct {
Key interface{} // usually a string
ValueType Type
Position *filepos.Position
defaultValue interface{}
}
type ArrayType struct {
ItemsType Type
Position *filepos.Position
defaultValue interface{}
documentation documentation
}
type ArrayItemType struct {
ValueType Type
Position *filepos.Position
defaultValue interface{}
}
type ScalarType struct {
ValueType interface{}
Position *filepos.Position
defaultValue interface{}
documentation documentation
}
type AnyType struct {
defaultValue interface{}
Position *filepos.Position
documentation documentation
}
type NullType struct {
ValueType Type
Position *filepos.Position
documentation documentation
}
// The total set of supported scalars.
const (
FloatType = float64(0)
StringType = ""
IntType = int64(0)
BoolType = false
)
// GetValueType provides the type of the value
func (t *DocumentType) GetValueType() Type {
return t.ValueType
}
// GetValueType provides the type of the value
func (m MapType) GetValueType() Type {
panic("Not implemented because it is unreachable")
}
// GetValueType provides the type of the value
func (t MapItemType) GetValueType() Type {
return t.ValueType
}
// GetValueType provides the type of the value
func (a ArrayType) GetValueType() Type {
return a.ItemsType
}
// GetValueType provides the type of the value
func (a ArrayItemType) GetValueType() Type {
return a.ValueType
}
// GetValueType provides the type of the value
func (s ScalarType) GetValueType() Type {
panic("Not implemented because it is unreachable")
}
// GetValueType provides the type of the value
func (a AnyType) GetValueType() Type {
return &a
}
// GetValueType provides the type of the value
func (n NullType) GetValueType() Type {
return n.ValueType
}
// GetDefaultValue provides the default value
func (t DocumentType) GetDefaultValue() interface{} {
return &yamlmeta.Document{Value: t.defaultValue, Position: t.Position}
}
// GetDefaultValue provides the default value
func (m MapType) GetDefaultValue() interface{} {
defaultValues := &yamlmeta.Map{Position: m.Position}
for _, item := range m.Items {
newItem := item.GetDefaultValue()
defaultValues.Items = append(defaultValues.Items, newItem.(*yamlmeta.MapItem))
}
return defaultValues
}
// GetDefaultValue provides the default value
func (t MapItemType) GetDefaultValue() interface{} {
return &yamlmeta.MapItem{Key: t.Key, Value: t.defaultValue, Position: t.Position}
}
// GetDefaultValue provides the default value
func (a ArrayType) GetDefaultValue() interface{} {
return a.defaultValue
}
// GetDefaultValue provides the default value
func (a ArrayItemType) GetDefaultValue() interface{} {
panic(fmt.Sprintf("Unexpected call to GetDefaultValue() on %+v", a))
}
// GetDefaultValue provides the default value
func (s ScalarType) GetDefaultValue() interface{} {
return s.defaultValue // scalar values are copied (even through an interface{} reference)
}
// GetDefaultValue provides the default value
func (a AnyType) GetDefaultValue() interface{} {
if node, ok := a.defaultValue.(yamlmeta.Node); ok {
return node.DeepCopyAsInterface()
}
return a.defaultValue
}
// GetDefaultValue provides the default value
func (n NullType) GetDefaultValue() interface{} {
return nil
}
// SetDefaultValue sets the default value of the entire document to `val`
func (t *DocumentType) SetDefaultValue(val interface{}) {
t.defaultValue = val
}
// SetDefaultValue is ignored as default values should be set on each MapItemType, individually.
func (m *MapType) SetDefaultValue(val interface{}) {
// TODO: determine if we should set the contents of a MapType by setting the given Map...?
return
}
// SetDefaultValue sets the default value to `val`
func (t *MapItemType) SetDefaultValue(val interface{}) {
t.defaultValue = val
}
// SetDefaultValue sets the default value to `val`
func (a *ArrayType) SetDefaultValue(val interface{}) {
a.defaultValue = val
}
// SetDefaultValue sets the default value to `val`
func (a *ArrayItemType) SetDefaultValue(val interface{}) {
a.defaultValue = val
}
// SetDefaultValue sets the default value to `val`
func (s *ScalarType) SetDefaultValue(val interface{}) {
s.defaultValue = val
}
// SetDefaultValue does nothing
func (a *AnyType) SetDefaultValue(val interface{}) {
a.defaultValue = val
}
// SetDefaultValue sets the default value of the wrapped type to `val`
func (n *NullType) SetDefaultValue(val interface{}) {
n.GetValueType().SetDefaultValue(val)
}
// GetDefinitionPosition reports the location in source schema that contains this type definition.
func (t *DocumentType) GetDefinitionPosition() *filepos.Position {
return t.Position
}
// GetDefinitionPosition reports the location in source schema that contains this type definition.
func (m MapType) GetDefinitionPosition() *filepos.Position {
return m.Position
}
// GetDefinitionPosition reports the location in source schema that contains this type definition.
func (t MapItemType) GetDefinitionPosition() *filepos.Position {
return t.Position
}
// GetDefinitionPosition reports the location in source schema that contains this type definition.
func (a ArrayType) GetDefinitionPosition() *filepos.Position {
return a.Position
}
// GetDefinitionPosition reports the location in source schema that contains this type definition.
func (a ArrayItemType) GetDefinitionPosition() *filepos.Position {
return a.Position
}
// GetDefinitionPosition reports the location in source schema that contains this type definition.
func (s ScalarType) GetDefinitionPosition() *filepos.Position {
return s.Position
}
// GetDefinitionPosition reports the location in source schema that contains this type definition.
func (a AnyType) GetDefinitionPosition() *filepos.Position {
return a.Position
}
// GetDefinitionPosition reports the location in source schema that contains this type definition.
func (n NullType) GetDefinitionPosition() *filepos.Position {
return n.Position
}
// GetDescription provides descriptive information
func (t *DocumentType) GetDescription() string {
return ""
}
// GetDescription provides descriptive information
func (m *MapType) GetDescription() string {
return m.documentation.description
}
// GetDescription provides descriptive information
func (t *MapItemType) GetDescription() string {
return ""
}
// GetDescription provides descriptive information
func (a *ArrayType) GetDescription() string {
return a.documentation.description
}
// GetDescription provides descriptive information
func (a *ArrayItemType) GetDescription() string {
return ""
}
// GetDescription provides descriptive information
func (s *ScalarType) GetDescription() string {
return s.documentation.description
}
// GetDescription provides descriptive information
func (a *AnyType) GetDescription() string {
return a.documentation.description
}
// GetDescription provides descriptive information
func (n *NullType) GetDescription() string {
return n.documentation.description
}
// SetDescription sets the description of the type
func (t *DocumentType) SetDescription(desc string) {}
// SetDescription sets the description of the type
func (m *MapType) SetDescription(desc string) {
m.documentation.description = desc
}
// SetDescription sets the description of the type
func (t *MapItemType) SetDescription(desc string) {}
// SetDescription sets the description of the type
func (a *ArrayType) SetDescription(desc string) {
a.documentation.description = desc
}
// SetDescription sets the description of the type
func (a *ArrayItemType) SetDescription(desc string) {}
// SetDescription sets the description of the type
func (s *ScalarType) SetDescription(desc string) {
s.documentation.description = desc
}
// SetDescription sets the description of the type
func (a *AnyType) SetDescription(desc string) {
a.documentation.description = desc
}
// SetDescription sets the description of the type
func (n *NullType) SetDescription(desc string) {
n.documentation.description = desc
}
// GetTitle provides title information
func (t *DocumentType) GetTitle() string {
return ""
}
// GetTitle provides title information
func (m *MapType) GetTitle() string {
return m.documentation.title
}
// GetTitle provides title information
func (t *MapItemType) GetTitle() string {
return ""
}
// GetTitle provides title information
func (a *ArrayType) GetTitle() string {
return a.documentation.title
}
// GetTitle provides title information
func (a *ArrayItemType) GetTitle() string {
return ""
}
// GetTitle provides title information
func (s *ScalarType) GetTitle() string {
return s.documentation.title
}
// GetTitle provides title information
func (a *AnyType) GetTitle() string {
return a.documentation.title
}
// GetTitle provides title information
func (n *NullType) GetTitle() string {
return n.documentation.title
}
// SetTitle sets the title of the type
func (t *DocumentType) SetTitle(title string) {}
// SetTitle sets the title of the type
func (m *MapType) SetTitle(title string) {
m.documentation.title = title
}
// SetTitle sets the title of the type
func (t *MapItemType) SetTitle(title string) {}
// SetTitle sets the title of the type
func (a *ArrayType) SetTitle(title string) {
a.documentation.title = title
}
// SetTitle sets the title of the type
func (a *ArrayItemType) SetTitle(title string) {}
// SetTitle sets the title of the type
func (s *ScalarType) SetTitle(title string) {
s.documentation.title = title
}
// SetTitle sets the title of the type
func (a *AnyType) SetTitle(title string) {
a.documentation.title = title
}
// SetTitle sets the title of the type
func (n *NullType) SetTitle(title string) {
n.documentation.title = title
}
// GetExamples provides descriptive example information
func (t *DocumentType) GetExamples() []Example {
return nil
}
// GetExamples provides descriptive example information
func (m *MapType) GetExamples() []Example {
return m.documentation.examples
}
// GetExamples provides descriptive example information
func (t *MapItemType) GetExamples() []Example {
return nil
}
// GetExamples provides descriptive example information
func (a *ArrayType) GetExamples() []Example {
return a.documentation.examples
}
// GetExamples provides descriptive example information
func (a *ArrayItemType) GetExamples() []Example {
return nil
}
// GetExamples provides descriptive example information
func (s *ScalarType) GetExamples() []Example {
return s.documentation.examples
}
// GetExamples provides descriptive example information
func (a *AnyType) GetExamples() []Example {
return a.documentation.examples
}
// GetExamples provides descriptive example information
func (n *NullType) GetExamples() []Example {
return n.documentation.examples
}
// SetExamples sets the description and example of the type
func (t *DocumentType) SetExamples(data []Example) {}
// SetExamples sets the description and example of the type
func (m *MapType) SetExamples(exs []Example) {
m.documentation.examples = exs
}
// SetExamples sets the description and example of the type
func (t *MapItemType) SetExamples(exs []Example) {}
// SetExamples sets the description and example of the type
func (a *ArrayType) SetExamples(exs []Example) {
a.documentation.examples = exs
}
// SetExamples sets the description and example of the type
func (a *ArrayItemType) SetExamples(exs []Example) {}
// SetExamples sets the description and example of the type
func (s *ScalarType) SetExamples(exs []Example) {
s.documentation.examples = exs
}
// SetExamples sets the description and example of the type
func (a *AnyType) SetExamples(exs []Example) {
a.documentation.examples = exs
}
// SetExamples sets the description and example of the type
func (n *NullType) SetExamples(exs []Example) {
n.documentation.examples = exs
}
// IsDeprecated provides deprecated field information
func (t *DocumentType) IsDeprecated() (bool, string) {
return false, ""
}
// IsDeprecated provides deprecated field information
func (m *MapType) IsDeprecated() (bool, string) {
return m.documentation.deprecated, m.documentation.deprecationNotice
}
// IsDeprecated provides deprecated field information
func (t *MapItemType) IsDeprecated() (bool, string) {
return false, ""
}
// IsDeprecated provides deprecated field information
func (a *ArrayType) IsDeprecated() (bool, string) {
return a.documentation.deprecated, a.documentation.deprecationNotice
}
// IsDeprecated provides deprecated field information
func (a *ArrayItemType) IsDeprecated() (bool, string) {
return false, ""
}
// IsDeprecated provides deprecated field information
func (s *ScalarType) IsDeprecated() (bool, string) {
return s.documentation.deprecated, s.documentation.deprecationNotice
}
// IsDeprecated provides deprecated field information
func (a *AnyType) IsDeprecated() (bool, string) {
return a.documentation.deprecated, a.documentation.deprecationNotice
}
// IsDeprecated provides deprecated field information
func (n *NullType) IsDeprecated() (bool, string) {
return n.documentation.deprecated, n.documentation.deprecationNotice
}
// SetDeprecated sets the deprecated field value
func (t *DocumentType) SetDeprecated(deprecated bool, notice string) {}
// SetDeprecated sets the deprecated field value
func (m *MapType) SetDeprecated(deprecated bool, notice string) {
m.documentation.deprecationNotice = notice
m.documentation.deprecated = deprecated
}
// SetDeprecated sets the deprecated field value
func (t *MapItemType) SetDeprecated(deprecated bool, notice string) {}
// SetDeprecated sets the deprecated field value
func (a *ArrayType) SetDeprecated(deprecated bool, notice string) {
a.documentation.deprecationNotice = notice
a.documentation.deprecated = deprecated
}
// SetDeprecated sets the deprecated field value
func (a *ArrayItemType) SetDeprecated(deprecated bool, notice string) {}
// SetDeprecated sets the deprecated field value
func (s *ScalarType) SetDeprecated(deprecated bool, notice string) {
s.documentation.deprecationNotice = notice
s.documentation.deprecated = deprecated
}
// SetDeprecated sets the deprecated field value
func (a *AnyType) SetDeprecated(deprecated bool, notice string) {
a.documentation.deprecationNotice = notice
a.documentation.deprecated = deprecated
}
// SetDeprecated sets the deprecated field value
func (n *NullType) SetDeprecated(deprecated bool, notice string) {
n.documentation.deprecationNotice = notice
n.documentation.deprecated = deprecated
}
// String produces a user-friendly name of the expected type.
func (t *DocumentType) String() string {
return yamlmeta.TypeName(&yamlmeta.Document{})
}
// String produces a user-friendly name of the expected type.
func (m MapType) String() string {
return yamlmeta.TypeName(&yamlmeta.Map{})
}
// String produces a user-friendly name of the expected type.
func (t MapItemType) String() string {
return fmt.Sprintf("%s: %s", t.Key, t.ValueType.String())
}
// String produces a user-friendly name of the expected type.
func (a ArrayType) String() string {
return yamlmeta.TypeName(&yamlmeta.Array{})
}
// String produces a user-friendly name of the expected type.
func (a ArrayItemType) String() string {
return fmt.Sprintf("- %s", a.ValueType.String())
}
// String produces a user-friendly name of the expected type.
func (s ScalarType) String() string {
return yamlmeta.TypeName(s.ValueType)
}
// String produces a user-friendly name of the expected type.
func (a AnyType) String() string {
return "any"
}
// String produces a user-friendly name of the expected type.
func (n NullType) String() string {
return "null"
} | vendor/github.com/vmware-tanzu/carvel-ytt/pkg/schema/type.go | 0.636805 | 0.406126 | type.go | starcoder |
package ridl
// NodeType represents the type of a parser tree node.
type NodeType uint
const (
RootNodeType NodeType = iota
TokenNodeType
DefinitionNodeType
ImportNodeType
EnumNodeType
MessageNodeType
ArgumentNodeType
MethodNodeType
ServiceNodeType
)
// Node represents a parser tree node
type Node interface {
Start() int
End() int
Type() NodeType
}
type node struct {
children []Node
start int
end int
}
func (n *node) End() int {
return n.end
}
func (n *node) Start() int {
return n.start
}
func (n *node) Push(child Node) {
n.children = append(n.children, child)
}
func (n *node) Children() []Node {
return n.children
}
func (n *node) Filter(nt NodeType) []Node {
matches := make([]Node, 0, len(n.children))
for i := 0; i < len(n.children); i++ {
if n.children[i].Type() == nt {
matches = append(matches, n.children[i])
}
}
return matches
}
type RootNode struct {
node
}
func (rn RootNode) Definitions() []*DefinitionNode {
nodes := rn.Filter(DefinitionNodeType)
definitionNodes := make([]*DefinitionNode, 0, len(nodes))
for i := range nodes {
definitionNodes = append(definitionNodes, nodes[i].(*DefinitionNode))
}
return definitionNodes
}
func (rn RootNode) Imports() []*ImportNode {
nodes := rn.Filter(ImportNodeType)
importNodes := make([]*ImportNode, 0, len(nodes))
for i := range nodes {
importNodes = append(importNodes, nodes[i].(*ImportNode))
}
return importNodes
}
func (rn RootNode) Messages() []*MessageNode {
nodes := rn.Filter(MessageNodeType)
messageNodes := make([]*MessageNode, 0, len(nodes))
for i := range nodes {
messageNodes = append(messageNodes, nodes[i].(*MessageNode))
}
return messageNodes
}
func (rn RootNode) Enums() []*EnumNode {
nodes := rn.Filter(EnumNodeType)
enumNodes := make([]*EnumNode, 0, len(nodes))
for i := range nodes {
enumNodes = append(enumNodes, nodes[i].(*EnumNode))
}
return enumNodes
}
func (rn RootNode) Services() []*ServiceNode {
nodes := rn.Filter(ServiceNodeType)
serviceNodes := make([]*ServiceNode, 0, len(nodes))
for i := range nodes {
serviceNodes = append(serviceNodes, nodes[i].(*ServiceNode))
}
return serviceNodes
}
func (rn RootNode) Type() NodeType {
return RootNodeType
}
type DefinitionNode struct {
node
leftNode *TokenNode
rightNode *TokenNode
optional bool
meta []*DefinitionNode
}
func (dn DefinitionNode) Meta() []*DefinitionNode {
return dn.meta
}
func (dn DefinitionNode) Type() NodeType {
return DefinitionNodeType
}
func (dn DefinitionNode) Left() *TokenNode {
if dn.leftNode == nil {
return invalidToken
}
return dn.leftNode
}
func (dn DefinitionNode) Right() *TokenNode {
if dn.rightNode == nil {
return invalidToken
}
return dn.rightNode
}
func (dn DefinitionNode) Optional() bool {
return dn.optional
}
type TokenNode struct {
node
tok *token
}
var invalidToken = &TokenNode{
tok: &token{tt: tokenInvalid},
}
func newTokenNode(tok *token) *TokenNode {
return &TokenNode{
node: node{
start: tok.pos,
end: tok.pos + len(tok.val),
},
tok: tok,
}
}
func (tn TokenNode) String() string {
if tn.tok == nil {
return ""
}
return tn.tok.val
}
func (tn TokenNode) Type() NodeType {
return TokenNodeType
}
type ImportNode struct {
node
path *TokenNode
members []*TokenNode
}
func (in ImportNode) Members() []*TokenNode {
return in.members
}
func (in ImportNode) Path() *TokenNode {
if in.path == nil {
return invalidToken
}
return in.path
}
func (in ImportNode) Type() NodeType {
return ImportNodeType
}
type EnumNode struct {
node
name *TokenNode
enumType *TokenNode
values []*DefinitionNode
}
func (en EnumNode) Type() NodeType {
return EnumNodeType
}
func (en EnumNode) Name() *TokenNode {
return en.name
}
func (en EnumNode) TypeName() *TokenNode {
return en.enumType
}
func (en EnumNode) Values() []*DefinitionNode {
return en.values
}
type MessageNode struct {
node
name *TokenNode
fields []*DefinitionNode
}
func (mn MessageNode) Name() *TokenNode {
return mn.name
}
func (mn *MessageNode) Type() NodeType {
return MessageNodeType
}
func (mn *MessageNode) Fields() []*DefinitionNode {
return mn.fields
}
type ArgumentNode struct {
node
name *TokenNode
argumentType *TokenNode
optional bool
stream bool //TODO: should be deprecated
}
func (an *ArgumentNode) Name() *TokenNode {
if an.name == nil {
return invalidToken
}
return an.name
}
func (an *ArgumentNode) TypeName() *TokenNode {
if an.argumentType == nil {
return invalidToken
}
return an.argumentType
}
func (an *ArgumentNode) Optional() bool {
return an.optional
}
func (an *ArgumentNode) Type() NodeType {
return ArgumentNodeType
}
type MethodNode struct {
name *TokenNode
proxy bool
inputs argumentList
outputs argumentList
}
func (mn *MethodNode) Name() *TokenNode {
return mn.name
}
func (mn *MethodNode) Proxy() bool {
return mn.proxy
}
func (mn *MethodNode) StreamInput() bool {
return mn.inputs.stream
}
func (mn *MethodNode) StreamOutput() bool {
return mn.outputs.stream
}
func (mn *MethodNode) Inputs() []*ArgumentNode {
return mn.inputs.arguments
}
func (mn *MethodNode) Outputs() []*ArgumentNode {
return mn.outputs.arguments
}
type ServiceNode struct {
node
name *TokenNode
methods []*MethodNode
}
func (sn ServiceNode) Type() NodeType {
return ServiceNodeType
}
func (sn ServiceNode) Name() *TokenNode {
return sn.name
}
func (sn ServiceNode) Methods() []*MethodNode {
return sn.methods
}
type argumentList struct {
stream bool
arguments []*ArgumentNode
} | schema/ridl/parser_node.go | 0.689724 | 0.474996 | parser_node.go | starcoder |
package main
import "fmt"
/*
Given an array of integers nums sorted in ascending order, find the starting and ending position of a given target value.
Your algorithm's runtime complexity must be in the order of O(log n).
If the target is not found in the array, return [-1, -1].
Example 1:
Input: nums = [5,7,7,8,8,10], target = 8
Output: [3,4]
Example 2:
Input: nums = [5,7,7,8,8,10], target = 6
Output: [-1,-1]
给定一个按照升序排列的整数数组 nums,和一个目标值 target。找出给定目标值在数组中的开始位置和结束位置。
你的算法时间复杂度必须是 O(log n) 级别。
如果数组中不存在目标值,返回 [-1, -1]。
示例 1:
输入: nums = [5,7,7,8,8,10], target = 8
输出: [3,4]
示例 2:
输入: nums = [5,7,7,8,8,10], target = 6
输出: [-1,-1]
*/
// 解法一:很明显又是一道二分查找题
// 执行两次二分查找即可,只是要稍微做点变动
// 第一次查找找到最左边的数,第二次查找找最右边的数
// 举例说明,例如找第一个相等的数,二分查找如果找到相等的数,并不马上返回,而是判断左边还有没有数,如果没有数,说明这个相等的数下标已经是
// 最左边了。如果有数,判断是否相等,如果相等,则对mid-1的区域继续进行二分查找。不断向左缩小范围,直到只能找到一个数
// 找右边的数也是同理
func findFirst(nums []int, low, high, target int) int {
// 退出条件 low>high说明数组已经找遍了
if low > high {
return -1
}
mid := (low + high) / 2
// 如果mid下标的值和目标target相等,我们还要判断左边还有没有相等的
if nums[mid] == target {
// 如果已经是最左边,那么直接返回0
if mid == 0 {
return 0
}
// 不是最左边,并且左边的mid-1存在相等,缩小查找范围
if nums[mid-1] == target {
return findFirst(nums, low, mid-1, target)
}
// 如果mid-1不相等,说明已经是最左边,直接返回
return mid
}
if target < nums[mid] && target >= nums[low] {
return findFirst(nums, low, mid-1, target)
} else if target > nums[mid] && target <= nums[high] {
return findFirst(nums, mid+1, high, target)
} else {
return -1
}
}
func findLast(nums []int, low, high, target int) int {
if low > high {
return -1
}
mid := (low + high) / 2
if nums[mid] == target {
if mid == len(nums)-1 {
return high
}
if nums[mid+1] == target {
return findLast(nums, mid+1, high, target)
}
return mid
}
if target < nums[mid] && target >= nums[low] {
return findLast(nums, low, mid-1, target)
} else if target > nums[mid] && target <= nums[high] {
return findLast(nums, mid+1, high, target)
} else {
return -1
}
}
func searchRange(nums []int, target int) []int {
p1 := findFirst(nums, 0, len(nums)-1, target)
p2 := findLast(nums, 0, len(nums)-1, target)
return []int{p1, p2}
}
func main() {
// input := []int{5, 7, 7, 8, 8, 10}
input := []int{2, 2}
// fmt.Println(searchRange(input, 8))
fmt.Println(searchRange(input, 2))
} | Programs/034Find First and Last Position of Element in Sorted Array/034Find First and Last Position of Element in Sorted Array.go | 0.593256 | 0.673988 | 034Find First and Last Position of Element in Sorted Array.go | starcoder |
// +build !wasm
package audio
import (
"github.com/adamlenda/engine/audio/al"
"github.com/adamlenda/engine/core"
"github.com/adamlenda/engine/gls"
"github.com/adamlenda/engine/math32"
)
// Listener is an audio listener positioned in space.
type Listener struct {
core.Node
}
// NewListener creates a Listener object.
func NewListener() *Listener {
l := new(Listener)
l.Node.Init(l)
return l
}
// SetVelocity sets the velocity of the listener with x, y, z components.
func (l *Listener) SetVelocity(vx, vy, vz float32) {
al.Listener3f(al.Velocity, vx, vy, vz)
}
// SetVelocityVec sets the velocity of the listener with a vector.
func (l *Listener) SetVelocityVec(v *math32.Vector3) {
al.Listener3f(al.Velocity, v.X, v.Y, v.Z)
}
// Velocity returns the velocity of the listener as x, y, z components.
func (l *Listener) Velocity() (float32, float32, float32) {
return al.GetListener3f(al.Velocity)
}
// VelocityVec returns the velocity of the listener as a vector.
func (l *Listener) VelocityVec() math32.Vector3 {
vx, vy, vz := al.GetListener3f(al.Velocity)
return math32.Vector3{vx, vy, vz}
}
// SetGain sets the gain of the listener.
func (l *Listener) SetGain(gain float32) {
al.Listenerf(al.Gain, gain)
}
// Gain returns the gain of the listener.
func (l *Listener) Gain() float32 {
return al.GetListenerf(al.Gain)
}
// Render is called by the renderer at each frame.
// Updates the position and orientation of the listener.
func (l *Listener) Render(gl *gls.GLS) {
// Sets the listener source world position
var wpos math32.Vector3
l.WorldPosition(&wpos)
al.Listener3f(al.Position, wpos.X, wpos.Y, wpos.Z)
// Get listener current world direction
var vdir math32.Vector3
l.WorldDirection(&vdir)
// Assumes initial UP vector and recalculates current up vector
vup := math32.Vector3{0, 1, 0}
var vright math32.Vector3
vright.CrossVectors(&vdir, &vup)
vup.CrossVectors(&vright, &vdir)
// Sets the listener orientation
orientation := []float32{vdir.X, vdir.Y, vdir.Z, vup.X, vup.Y, vup.Z}
al.Listenerfv(al.Orientation, orientation)
} | audio/listener-desktop.go | 0.814533 | 0.410431 | listener-desktop.go | starcoder |
package main
import (
"errors"
"fmt"
"math"
"strconv"
blt "bearlibterminal"
)
const (
// Values that are important for creating and backtracking graph.
nodeInitialWeight = -1 // Nodes not traversed.
)
type Node struct {
/* Node is struct that mimics some properties
of Tile struct (implemented in map.go).
X, Y are coords of Node, and Weight is value important
for graph creating, and later - finding shortest path
from source (creature) to goal (coords).
Weight is supposed to be set to -1 initially - it
marks Node as not traversed during
graph creation process. */
X, Y int
Weight int
}
func TilesToNodes() [][]*Node {
/* TilesToNodes is function that takes Board
(ie map, or fragment, of level) as argument. It converts
Tiles to Nodes, and returns 2d array of *Node to mimic
Board behaviour.
In future, it may be worth to create new type, ie
type Nodes [][]*Node.
During initialization, every newly created Node has
its Weight set to -1 to mark that it's not traversed. */
nodes := make([][]*Node, MapSizeX)
for i := range nodes {
nodes[i] = make([]*Node, MapSizeY)
}
for x := 0; x < MapSizeX; x++ {
for y := 0; y < MapSizeY; y++ {
nodes[x][y] = &Node{x, y, nodeInitialWeight}
}
}
return nodes
}
func FindAdjacent(b Board, c Creatures, nodes [][]*Node, frontiers []*Node, start *Node, w int) ([]*Node, bool) {
/* Function FindAdjacent takes Board, Board-like [][]*Node array,
coords of starting point, and current value to attribute Weight field
of Node; FindAdjacent returns slice of adjacent tiles and startFound
bool flag.
At start, empty slice of *Node is created, and boolean flag startFound
is set to false; this flag will be set to true, if function will find
node that is source of path, and it'll break the loops.
Primary for loop uses one of frontiers, and x, y nested loops
checks for its adjacent tiles (more details in in-line comments);
if tile met conditions, its Weight is set to current w value, then node
is added to list of adjacent tiles. */
var adjacent = []*Node{}
startFound := false
for i := 0; i < len(frontiers); i++ {
for x := frontiers[i].X - 1; x <= frontiers[i].X+1; x++ {
for y := frontiers[i].Y - 1; y <= frontiers[i].Y+1; y++ {
if x == start.X && y == start.Y {
startFound = true
nodes[x][y].Weight = w
goto End
}
if x < 0 || x >= MapSizeX || y < 0 || y >= MapSizeY {
continue //node is out of map bounds
}
if nodes[x][y].Weight != nodeInitialWeight {
continue //node is marked as traversed already
}
if x == frontiers[i].X && y == frontiers[i].Y {
continue //it's the current frontier node
}
if b[x][y].Blocked == true {
continue //tile is blocked
}
if GetAliveCreatureFromTile(x, y, c) != nil {
continue //tile is occupied by other monster
}
nodes[x][y].Weight = w
adjacent = append(adjacent, nodes[x][y])
}
}
}
End:
return adjacent, startFound
}
func (c *Creature) MoveTowardsPath(b Board, cs Creatures, tx, ty int) {
/* MoveTowardsPath is one of main pathfinding methods. It takes
Board and ints tx, ty (ie target coords) as arguments.
MoveTowardsPath uses weighted graph to find shortest path
from goal (tx, ty - it's more universal than passing Node or
Creature) to source (creature, ie receiver).
At first, it creates simple graph with all nodes' Weight set to
-1 as not-yet-traversed. Later, it starts potentially infinite loop
that breaks if starting position is found by FindAdjacent function,
or when FindAdjacent won't find any proper tiles that are
adjacent to previously found ones (ie frontiers).
After every iteration, local variable "w" used to attribute
node Weight increases by one, to mark that it's another step away
from goal position; it makes backtracking easy - Creature position
is end of path / graph, so Creature has only find node with
Weight set to lesser value that node occupied by Creature.
Effect may be a bit strange as it takes first node that met
conditions, but works rather well with basic MoveTowards method. */
nodes := TilesToNodes()
start := nodes[c.X][c.Y]
startFound := false
goal := nodes[tx][ty]
goal.Weight = 0
var frontiers = []*Node{goal}
w := 0
for {
w++
if len(frontiers) == 0 || startFound == true {
break
}
frontiers, startFound = FindAdjacent(b, cs, nodes, frontiers, start, w)
}
// Uncomment line below, if you want to see nodes' weights.
//RenderWeights(nodes)
dx, dy, err := BacktrackPath(nodes, start)
if err != nil {
fmt.Println(err)
}
c.Move(dx, dy, b)
}
func BacktrackPath(nodes [][]*Node, start *Node) (int, int, error) {
/* Function BacktrackPath takes 2d array of *Node, and
starting *Node as arguments; it returns two ints, that serves
as coords.
BacktrackPath is used in pathfinding. It uses weighted graph
that has some sort of path already created (more in comments for
MoveTowardsPath and FindAdjacent). Instead of creating
proper path, or using search algorithm, structure of graph
allows to use just node with smaller Weight than start node.
It returns error if can't find proper tile.
Note: returning three values at once is ugly. */
direction := *start
for x := start.X - 1; x <= start.X+1; x++ {
for y := start.Y - 1; y <= start.Y+1; y++ {
if x < 0 || x >= MapSizeX || y < 0 || y >= MapSizeY {
continue // Node is out of map bounds.
}
if x == start.X && y == start.Y {
continue // This node is the current node.
}
if nodes[x][y].Weight == nodeInitialWeight {
continue // Node is not part of path.
}
if nodes[x][y].Weight < direction.Weight {
direction = *nodes[x][y] // Node is closer to goal than current node.
goto EndLoop
}
}
}
EndLoop:
var err error
if direction == *start {
// This error doesn't need helper function from err_.go.
err = errors.New("Warning: function BacktrackPath could not find direction that met all requirements." +
"\n Returned coords are coords of starting position.")
}
dx := direction.X - start.X
dy := direction.Y - start.Y
return dx, dy, err
}
func RenderWeights(nodes [][]*Node) {
/* RenderWeights is created for debugging purposes.
Clears whole map, and prints Weights of all nodes
of graph, then waits for user input to continue
game loop.
It's supposed to be called near the end of
MoveTowardsPath method. */
blt.Clear()
for x := 0; x < MapSizeX; x++ {
for y := 0; y < MapSizeY; y++ {
glyph := strconv.Itoa(nodes[x][y].Weight)
if nodes[x][y].Weight == nodeInitialWeight {
glyph = "-"
} else if nodes[x][y].Weight > 9 {
glyph = "+"
}
blt.Print(x, y, glyph)
}
}
blt.Refresh()
blt.Read()
}
func (c *Creature) MoveTowards(b Board, cs Creatures, tx, ty int, ai int) {
/* MoveTowards is *the* main method for pathfinding.
Has *Creature as receiver, and takes Board (ie map of level),
ints tx and ty (ie coords of Node - in that case, it's more
universal than passing whole Node or Creature), and ai - it's
style of ai (these style markers are enums declared in ai.go)
as arguments.
Standard behaviour is always the same - check next tile on the single
path between source and target; if it's available to pass, make a move;
if not, behavior is different for every style.
Creatures with DumbAI style checks for adjacent tiles - if are available,
takes a step, otherwise stands still.
Creatures with other styles (currently only PatherAI is implemented)
calls MoveTowardsPath function, that creates weighted graph and finds
shortest path from source to goal. */
if ai == MeleePatherAI || ai == RangedPatherAI {
c.MoveTowardsPath(b, cs, tx, ty)
} else {
dx := tx - c.X
dy := ty - c.Y
ddx, ddy := 0, 0
if dx > 0 {
ddx = 1
} else if dx < 0 {
ddx = -1
}
if dy > 0 {
ddy = 1
} else if dy < 0 {
ddy = -1
}
newX, newY := c.X+ddx, c.Y+ddy
if b[newX][newY].Blocked == false && GetAliveCreatureFromTile(newX, newY, cs) == nil {
c.Move(ddx, ddy, b)
} else {
if ai == MeleeDumbAI || ai == RangedDumbAI {
if ddx != 0 {
if b[newX][c.Y].Blocked == false && GetAliveCreatureFromTile(newX, c.Y, cs) == nil {
c.Move(ddx, 0, b)
}
} else if ddy != 0 {
if b[c.X][newY].Blocked == false && GetAliveCreatureFromTile(c.X, newY, cs) == nil {
c.Move(0, ddy, b)
}
}
}
}
}
}
func (c *Creature) DistanceTo(tx, ty int) int {
/* DistanceTo is Creature method. It takes target x and target y as args.
Computes, then returns, distance from receiver to target. */
dx := float64(tx - c.X)
dy := float64(ty - c.Y)
return RoundFloatToInt(math.Sqrt(math.Pow(dx, 2) + math.Pow(dy, 2)))
} | pathfinding.go | 0.52902 | 0.530845 | pathfinding.go | starcoder |
package gcp
import (
"context"
"cloud.google.com/go/bigquery"
"github.com/gruntwork-io/terratest/modules/logger"
"github.com/gruntwork-io/terratest/modules/testing"
)
// CreateDataset creates a BigQuery Dataset with the given DatasetMetadata.
func CreateDataset(t testing.TestingT, projectID, datasetID string, dm bigquery.DatasetMetadata) {
err := CreateDatasetE(t, projectID, datasetID, dm)
if err != nil {
t.Fatal(err)
}
}
// CreateDatasetE creates a BigQuery Dataset with the given DatasetMetadata.
func CreateDatasetE(t testing.TestingT, projectID, datasetID string, dm bigquery.DatasetMetadata) error {
logger.Logf(t, "Creating dataset %s", datasetID)
ctx := context.Background()
// Creates a client.
client, err := bigquery.NewClient(ctx, projectID)
if err != nil {
return err
}
// Creates a Dataset handle
dataset := client.Dataset(datasetID)
// Creates the new dataset.
return dataset.Create(ctx, &dm)
}
// DeleteDataset destroys the dataset with the given name.
func DeleteDataset(t testing.TestingT, projectID, datasetID string) {
err := DeleteDatasetE(t, projectID, datasetID)
if err != nil {
t.Fatal(err)
}
}
// DeleteDatasetE destroys the dataset with the given name.
func DeleteDatasetE(t testing.TestingT, projectID, datasetID string) error {
logger.Logf(t, "Deleting dataset %s", datasetID)
ctx := context.Background()
// Creates a client
client, err := bigquery.NewClient(ctx, projectID)
if err != nil {
return err
}
// Deletes the dataset
return client.Dataset(datasetID).Delete(ctx)
}
// AssertDatasetExists checks if the given dataset exists and fails the test if it does not.
func AssertDatasetExists(t testing.TestingT, projectID, datasetID string) {
err := AssertDatasetExistsE(t, projectID, datasetID)
if err != nil {
t.Fatal(err)
}
}
// AssertDatasetExistsE checks if the given dataset exists and returns an error if it does not.
func AssertDatasetExistsE(t testing.TestingT, projectID, datasetID string) error {
logger.Logf(t, "Finding dataset %s", datasetID)
ctx := context.Background()
// Creates a client.
client, err := bigquery.NewClient(ctx, projectID)
if err != nil {
return err
}
// Seeks metadata for Dataset
if _, err := client.Dataset(datasetID).Metadata(ctx); err != nil {
return err
}
return nil
} | modules/gcp/bigquery.go | 0.657538 | 0.440048 | bigquery.go | starcoder |
package cases
import (
"sort"
"testing"
"github.com/prometheus/prometheus/pkg/labels"
"github.com/stretchr/testify/require"
)
// SortedLabelsTest exports a single, constant metric with labels in the wrong order
// and checks that we receive the metrics with sorted labels.
func SortedLabelsTest() Test {
return Test{
Name: "SortedLabels",
Metrics: staticHandler([]byte(`
# HELP test A gauge
# TYPE test gauge
test{b="2",a="1"} 1.0
`)),
Expected: func(t *testing.T, bs []Batch) {
forAllSamples(bs, func(s sample) {
names := []string{}
for i := range s.l {
names = append(names, s.l[i].Name)
}
require.True(t, sort.IsSorted(sort.StringSlice(names)), "'%s' is not sorted", s.l.String())
})
tests := countMetricWithValue(t, bs, labels.FromStrings("__name__", "test", "a", "1", "b", "2"), 1.0)
require.True(t, tests > 0, `found zero samples for test{a="1",b="2"}`)
},
}
}
// RepeatedLabelsTest exports a single, constant metric with repeated labels
// and checks that we don't receive metrics any metrics - the scrape should fail.
func RepeatedLabelsTest() Test {
return Test{
Name: "RepeatedLabels",
Metrics: staticHandler([]byte(`
# HELP test A gauge
# TYPE test gauge
test{a="1",a="1"} 1.0
`)),
Expected: func(t *testing.T, bs []Batch) {
forAllSamples(bs, func(s sample) {
counts := map[string]int{}
for i := range s.l {
counts[s.l[i].Name]++
}
for name, count := range counts {
require.Equal(t, 1, count, "label '%s' is repeated %d times", name, count)
}
})
tests := countMetricWithValue(t, bs, labels.FromStrings("__name__", "test", "a", "1"), 1.0)
require.True(t, tests == 0, `found samples for test{a="1"}, none expected`)
},
}
}
// EmptyLabelsTests exports a single, constant metric with an empty labels
// and checks that we receive the metrics without said label.
func EmptyLabelsTest() Test {
return Test{
Name: "EmptyLabels",
Metrics: staticHandler([]byte(`
# HELP test A gauge
# TYPE test gauge
test{a=""} 1.0
`)),
Expected: func(t *testing.T, bs []Batch) {
forAllSamples(bs, func(s sample) {
for i := range s.l {
require.NotEmpty(t, s.l[i].Value, "'%s' contains empty labels", s.l.String())
}
})
tests := countMetricWithValue(t, bs, labels.FromStrings("__name__", "test"), 1.0)
require.True(t, tests > 0, `found zero samples for {"__name__"="test"}`)
},
}
}
// NameLabelTests exports a single, constant metric with no name label
// and checks that we don't receive metrics without a name label - the scape should fail.
func NameLabelTest() Test {
return Test{
Name: "NameLabel",
Metrics: staticHandler([]byte(`
# HELP test A gauge
# TYPE test gauge
{label="value"} 1.0
`)),
Expected: func(t *testing.T, bs []Batch) {
forAllSamples(bs, func(s sample) {
for i := range s.l {
if s.l[i].Name == "__name__" {
return
}
}
require.True(t, false, "metric '%s' is missing name label", s.l.String())
})
samples := countMetricWithValue(t, bs, labels.FromStrings("label", "value"), 1.0)
require.True(t, samples == 0, `found non-zero samples for {label="value"} = 1.0`)
},
}
}
// HonorLabels exports a single, constant metric with a job label
// and checks that we receive metrics a exported_job label.
func HonorLabelsTest() Test {
return Test{
Name: "HonorLabels",
Metrics: staticHandler([]byte(`
# HELP test A gauge
# TYPE test gauge
test{job="original", instance="foo"} 1.0
`)),
Expected: func(t *testing.T, bs []Batch) {
samples := countMetricWithValue(t, bs, labels.FromStrings("__name__", "test", "exported_job", "original", "exported_instance", "foo"), 1.0)
require.Greater(t, samples, 0, `found zero samples for test{exported_job="original"} = 1.0`)
},
}
} | remote_write_sender/cases/labels.go | 0.750918 | 0.564038 | labels.go | starcoder |
package graphutils
import "strconv"
type Graph struct {
VertexArray []*Vertex
}
type Vertex struct {
Id string
Visited bool
AdjEdge []*Edge
}
type Edge struct {
Source *Vertex
Destination *Vertex
Weight int
}
func NewGraph() *Graph {
return &Graph{
make([]*Vertex, 0),
}
}
func NewVertex(input_id string) *Vertex {
return &Vertex{
Id: input_id,
Visited: false,
AdjEdge: make([]*Edge, 0),
}
}
func NewEdge(source, destination *Vertex, weight int) *Edge {
return &Edge{
source,
destination,
weight,
}
}
func StrToInt(input_str string) int {
result, err := strconv.Atoi(input_str)
if err != nil {
panic("failed to convert string")
}
return result
}
func (G *Graph) AddVertices(more ...*Vertex) {
for _, vertex := range more {
G.VertexArray = append(G.VertexArray, vertex)
}
}
func (A *Vertex) AddEdges(more ...*Edge) {
for _, edge := range more {
A.AdjEdge = append(A.AdjEdge, edge)
}
}
//Find the node with the id, or create it.
func (G *Graph) GetOrConst(id string) *Vertex {
vertex := G.GetVertexByID(id)
if vertex == nil {
vertex = NewVertex(id)
G.AddVertices(vertex)
}
return vertex
}
func (G *Graph) GetVertexByID(id string) *Vertex {
for _, vertex := range G.VertexArray {
if vertex.Id == id {
return vertex
}
}
return nil
}
func (A *Vertex) GetAdEdg() chan *Edge {
edgechan := make(chan *Edge)
go func() {
defer close(edgechan)
for _, edge := range A.AdjEdge {
edgechan <- edge
}
}()
return edgechan
}
const MAXWEIGHT = 1000000
type MinDistanceFromSource map[*Vertex]int
func (G *Graph) Dijks(StartSource, TargetSource *Vertex) MinDistanceFromSource {
D := make(MinDistanceFromSource)
for _, vertex := range G.VertexArray {
D[vertex] = MAXWEIGHT
}
D[StartSource] = 0
for edge := range StartSource.GetAdEdg() {
D[edge.Destination] = edge.Weight
}
CalculateDistance(StartSource, TargetSource, D)
return D
}
func CalculateDistance(StartSource, TargetSource *Vertex, D MinDistanceFromSource) {
for edge := range StartSource.GetAdEdg() {
if D[edge.Destination] > D[edge.Source]+edge.Weight {
D[edge.Destination] = D[edge.Source] + edge.Weight
} else if D[edge.Destination] < D[edge.Source]+edge.Weight {
continue
}
CalculateDistance(edge.Destination, TargetSource, D)
}
} | graphutils/graph.go | 0.644113 | 0.422386 | graph.go | starcoder |
package pretty
import (
"encoding/hex"
"fmt"
"io"
"strconv"
. "github.com/polydawn/refmt/tok"
)
func NewEncoder(wr io.Writer) *Encoder {
return &Encoder{
wr: wr,
stack: make([]phase, 0, 10),
}
}
func (d *Encoder) Reset() {
d.stack = d.stack[0:0]
d.current = phase_anyExpectValue
}
/*
A pretty.Encoder is a TokenSink that emits pretty-printed stuff.
The default behavior is color coded with ANSI escape sequences, so it's
snazzy looking on your terminal.
*/
type Encoder struct {
wr io.Writer
// Stack, tracking how many array and map opens are outstanding.
// (Values are only 'phase_mapExpectKeyOrEnd' and 'phase_arrExpectValueOrEnd'.)
stack []phase
current phase // shortcut to value at end of stack
// Spare memory, for use in operations on leaf nodes (e.g. temp space for an int serialization).
scratch [64]byte
}
type phase int
const (
phase_anyExpectValue phase = iota
phase_mapExpectKeyOrEnd
phase_mapExpectValue
phase_arrExpectValueOrEnd
)
func (d *Encoder) Step(tok *Token) (done bool, err error) {
switch d.current {
case phase_anyExpectValue:
switch tok.Type {
case TMapOpen:
d.pushPhase(phase_mapExpectKeyOrEnd)
d.emitMapOpen(tok)
return false, nil
case TArrOpen:
d.pushPhase(phase_arrExpectValueOrEnd)
d.emitArrOpen(tok)
return false, nil
case TMapClose:
return true, fmt.Errorf("unexpected mapClose; expected start of value")
case TArrClose:
return true, fmt.Errorf("unexpected arrClose; expected start of value")
default:
d.emitValue(tok)
d.wr.Write(wordBreak)
return true, nil
}
case phase_mapExpectKeyOrEnd:
switch tok.Type {
case TMapOpen:
return true, fmt.Errorf("unexpected mapOpen; expected start of key or end of map")
case TArrOpen:
return true, fmt.Errorf("unexpected arrOpen; expected start of key or end of map")
case TMapClose:
d.emitMapClose(tok)
return d.popPhase()
case TArrClose:
return true, fmt.Errorf("unexpected arrClose; expected start of key or end of map")
default:
switch tok.Type {
case TString, TInt, TUint:
d.wr.Write(indentWord(len(d.stack)))
d.emitValue(tok)
d.wr.Write(wordColon)
d.current = phase_mapExpectValue
return false, nil
default:
return true, fmt.Errorf("unexpected token of type %T; expected map key", *tok)
}
}
case phase_mapExpectValue:
switch tok.Type {
case TMapOpen:
d.pushPhase(phase_mapExpectKeyOrEnd)
d.emitMapOpen(tok)
return false, nil
case TArrOpen:
d.pushPhase(phase_arrExpectValueOrEnd)
d.emitArrOpen(tok)
return false, nil
case TMapClose:
return true, fmt.Errorf("unexpected mapClose; expected start of value")
case TArrClose:
return true, fmt.Errorf("unexpected arrClose; expected start of value")
default:
d.current = phase_mapExpectKeyOrEnd
d.emitValue(tok)
d.wr.Write(wordBreak)
return false, nil
}
case phase_arrExpectValueOrEnd:
switch tok.Type {
case TMapOpen:
d.pushPhase(phase_mapExpectKeyOrEnd)
d.emitMapOpen(tok)
return false, nil
case TArrOpen:
d.pushPhase(phase_arrExpectValueOrEnd)
d.emitArrOpen(tok)
return false, nil
case TMapClose:
return true, fmt.Errorf("unexpected mapClose; expected start of value or end of array")
case TArrClose:
d.emitArrClose(tok)
return d.popPhase()
default:
d.wr.Write(indentWord(len(d.stack)))
d.emitValue(tok)
d.wr.Write(wordBreak)
return false, nil
}
default:
panic("Unreachable")
}
}
func (d *Encoder) pushPhase(p phase) {
d.current = p
d.stack = append(d.stack, d.current)
}
// Pop a phase from the stack; return 'true' if stack now empty.
func (d *Encoder) popPhase() (bool, error) {
n := len(d.stack) - 1
if n == 0 {
return true, nil
}
if n < 0 { // the state machines are supposed to have already errored better
panic("prettyEncoder stack overpopped")
}
d.current = d.stack[n-1]
d.stack = d.stack[0:n]
return false, nil
}
func (d *Encoder) emitMapOpen(tok *Token) {
if tok.Tagged {
d.wr.Write(wordTag)
d.wr.Write([]byte(strconv.Itoa(tok.Tag)))
d.wr.Write(wordTagClose)
}
d.wr.Write(wordMapOpenPt1)
if tok.Length < 0 {
d.wr.Write(wordUnknownLen)
} else {
d.wr.Write([]byte(strconv.Itoa(tok.Length)))
}
d.wr.Write(wordMapOpenPt2)
d.wr.Write(wordBreak)
}
func (d *Encoder) emitMapClose(tok *Token) {
d.wr.Write(indentWord(len(d.stack) - 1))
d.wr.Write(wordMapClose)
d.wr.Write(wordBreak)
}
func (d *Encoder) emitArrOpen(tok *Token) {
if tok.Tagged {
d.wr.Write(wordTag)
d.wr.Write([]byte(strconv.Itoa(tok.Tag)))
d.wr.Write(wordTagClose)
}
d.wr.Write(wordArrOpenPt1)
if tok.Length < 0 {
d.wr.Write(wordUnknownLen)
} else {
d.wr.Write([]byte(strconv.Itoa(tok.Length)))
}
d.wr.Write(wordArrOpenPt2)
d.wr.Write(wordBreak)
}
func (d *Encoder) emitArrClose(tok *Token) {
d.wr.Write(indentWord(len(d.stack) - 1))
d.wr.Write(wordArrClose)
d.wr.Write(wordBreak)
}
func (d *Encoder) emitValue(tok *Token) {
if tok.Tagged {
d.wr.Write(wordTag)
d.wr.Write([]byte(strconv.Itoa(tok.Tag)))
d.wr.Write(wordTagClose)
}
switch tok.Type {
case TNull:
d.wr.Write(wordNull)
case TString:
d.emitString(tok.Str)
case TBytes:
dst := make([]byte, hex.EncodedLen(len(tok.Bytes)))
hex.Encode(dst, tok.Bytes)
d.wr.Write(dst)
case TBool:
switch tok.Bool {
case true:
d.wr.Write(wordTrue)
case false:
d.wr.Write(wordFalse)
}
case TInt:
b := strconv.AppendInt(d.scratch[:0], tok.Int, 10)
d.wr.Write(b)
case TUint:
b := strconv.AppendUint(d.scratch[:0], tok.Uint, 10)
d.wr.Write(b)
case TFloat64:
b := strconv.AppendFloat(d.scratch[:0], tok.Float64, 'f', 6, 64)
d.wr.Write(b)
default:
panic(fmt.Errorf("TODO finish more pretty.Encoder primitives support: unhandled token %s", tok))
}
}
func (d *Encoder) writeByte(b byte) {
d.scratch[0] = b
d.wr.Write(d.scratch[0:1])
} | pretty/prettyEncoder.go | 0.54698 | 0.512876 | prettyEncoder.go | starcoder |
package model
import (
"time"
)
// Leg describes the transportation between two locations on a voyage
type Leg struct {
VoyageNumber VoyageNumber `json:"voyage_number"`
LoadLocation UNLocode `json:"from"`
UnloadLocation UNLocode `json:"to"`
LoadTime time.Time `json:"load_time"`
UnloadTime time.Time `json:"unload_time"`
}
// NewLeg creates a new itinerary leg.
func NewLeg(voyageNumber VoyageNumber, loadLocation, unloadLocation UNLocode, loadTime, unloadTime time.Time) Leg {
return Leg{
VoyageNumber: voyageNumber,
LoadLocation: loadLocation,
UnloadLocation: unloadLocation,
LoadTime: loadTime,
UnloadTime: unloadTime,
}
}
// Itinerary specifies steps required to transport a cargo from its origin to
// destination.
type Itinerary struct {
Legs []Leg `json:"legs"`
}
// InitialDepartureLocation returns the start of the itinerary.
func (i Itinerary) InitialDepartureLocation() UNLocode {
if i.IsEmpty() {
return UNLocode("")
}
return i.Legs[0].LoadLocation
}
// FinalArrivalLocation returns the end of the itinerary.
func (i Itinerary) FinalArrivalLocation() UNLocode {
if i.IsEmpty() {
return UNLocode("")
}
return i.Legs[len(i.Legs)-1].UnloadLocation
}
// FinalArrivalTime returns the expected arrival time at final destination.
func (i Itinerary) FinalArrivalTime() time.Time {
return i.Legs[len(i.Legs)-1].UnloadTime
}
// IsEmpty checks if the itinerary contains at least one leg.
func (i Itinerary) IsEmpty() bool {
return i.Legs == nil || len(i.Legs) == 0
}
// IsExpected checks if the given handling event is expected when executing
// this itinerary.
func (i Itinerary) IsExpected(event HandlingEvent) bool {
if i.IsEmpty() {
return true
}
switch event.Activity.Type {
case Receive:
return i.InitialDepartureLocation() == event.Activity.Location
case Load:
for _, l := range i.Legs {
if l.LoadLocation == event.Activity.Location && l.VoyageNumber == event.Activity.VoyageNumber {
return true
}
}
return false
case Unload:
for _, l := range i.Legs {
if l.UnloadLocation == event.Activity.Location && l.VoyageNumber == event.Activity.VoyageNumber {
return true
}
}
return false
case Claim:
return i.FinalArrivalLocation() == event.Activity.Location
}
return true
} | section19/cargo/model/itinerary.go | 0.709019 | 0.425605 | itinerary.go | starcoder |
package pulse
import (
"strconv"
"strings"
"sort"
"fmt"
"math"
"github.com/bradfitz/slice"
"github.com/pkg/errors"
)
// Signal implements a received 433 MHz signal of compressed raw time series
// that consists of pulse lengths and a sequence of pulses.
type Signal struct {
Lengths []int
Seq string
}
// Pair simply implements a tuple fo two values
// (first, second).
type Pair struct {
first int
second int
}
// Decode tries to decode a received Signal
// based on all currently supported protocols.
func Decode(s *Signal) (interface{}, error) {
for _, p := range Protocols() {
if matches(s, p) {
binary, err := convert(s.Seq, p.Mapping)
if err != nil {
return nil, err
}
return p.Decode(binary)
}
}
return nil, nil
}
// matches checks whether a received Signal matches
// a protocol.
func matches(s *Signal, p *Protocol) bool {
var i int
var maxDelta float64
// length of the pulse sequence must match
if !contains(p.SeqLength, len(s.Seq)) {
return false
}
// number of pulse length must match
if len(s.Lengths) != len(p.Lengths) {
return false
}
// pulse length must be in a certain range
for i < len(s.Lengths) {
maxDelta = float64(float64(s.Lengths[i]) * float64(0.4))
if math.Abs(float64(s.Lengths[i]-p.Lengths[i])) > maxDelta {
return false
}
i++
}
return true
}
// Prepare takes an compressed signal as input,
// 1) splits it into pulse lengths and pulse sequence,
// 2) removes pulse lengths that are 0,
// 3) sorts the pulse lengths in ascending order, and
// 4) rearranges the pulse sequence, which characters each is a pulse length
// represented by its index in the array of pulse lengths.
func Prepare(input string) (*Signal, error) {
parts := strings.Split(input, " ")
if len(parts) < 8 {
return nil, fmt.Errorf("Incorrect number of pulse lengths: %s", input)
}
lengths := parts[:8]
seq := parts[8]
lengths = filter(lengths, func(s string) bool {
return s != "0"
})
lengthsInts, err := toIntArray(lengths)
if err != nil {
return nil, fmt.Errorf("Cannot convert pulse lengths to integers: %s", lengths)
}
return sortSignal(
&Signal{
lengthsInts,
seq,
})
}
// sortSignal sorts the given pulse lengths in ascending order
// and changes the pulse sequence, where each character is a pulse length
// represented by its index in the array of pulse lengths,
// according to the new order of indices.
func sortSignal(s *Signal) (*Signal, error) {
sortedIndices := sortIndices(s.Lengths)
sort.Ints(s.Lengths)
seq, err := convert(s.Seq, sortedIndices)
if err != nil {
return nil, errors.Wrapf(err, "Failed to change the representation of '%s'", s.Seq)
}
return &Signal{
s.Lengths,
seq,
}, nil
}
// sortIndices sorts the indicies of a
// given array a, i.e. iff the array is
// [200, 600, 500], then it returns [0:0, 1:2, 1:1].
func sortIndices(a []int) map[string]string {
pairs := make([]Pair, len(a))
for i, e := range a {
pairs[i] = Pair{e, i}
}
slice.Sort(pairs[:], func(l, r int) bool {
return pairs[l].first < pairs[r].first
})
indices := make(map[string]string, len(a))
for j, p := range pairs {
indices[strconv.Itoa(p.second)] = strconv.Itoa(j)
}
return indices
}
// convert maps a pulse sequence to another representation, using a given mapping.
func convert(seq string, mapping map[string]string) (string, error) {
var hadMatch bool
var i int
var result string
for i < len(seq) {
hadMatch = false
for search, replace := range mapping {
if len(seq)-i >= len(search) {
if string(seq[i:i+len(search)]) == search {
result += replace
i += len(search)
hadMatch = true
break
}
}
}
if !hadMatch {
return "", fmt.Errorf("Unable to apply mapping to pulse sequence %s", seq)
}
}
return result, nil
}
func filter(a []string, f func(string) bool) []string {
vsf := make([]string, 0)
for _, v := range a {
if f(v) {
vsf = append(vsf, v)
}
}
return vsf
}
func toIntArray(a []string) ([]int, error) {
var intArray = []int{}
for _, i := range a {
j, err := strconv.Atoi(i)
if err != nil {
return nil, err
}
intArray = append(intArray, j)
}
return intArray, nil
}
func contains(s []int, e int) bool {
for _, a := range s {
if a == e {
return true
}
}
return false
} | pulse/pulse.go | 0.811751 | 0.519582 | pulse.go | starcoder |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.