code stringlengths 114 1.05M | path stringlengths 3 312 | quality_prob float64 0.5 0.99 | learning_prob float64 0.2 1 | filename stringlengths 3 168 | kind stringclasses 1 value |
|---|---|---|---|---|---|
package model
import (
"github.com/jesand/stats/channel/bsc"
"github.com/jesand/stats/dist"
"github.com/jesand/stats/factor"
"github.com/jesand/stats/variable"
"math"
)
// Create a new MultipleBSCModel
func NewMultipleBSCModel() *MultipleBSCModel {
return &MultipleBSCModel{
Inputs: make(map[string]*variable.DiscreteRV),
Channels: make(map[string]*bsc.BSC),
FactorGraph: factor.NewFactorGraph(),
SoftInputs: true,
}
}
// A noisy channel model which explains a data set as passing through one of
// a set of BSC channels with unknown noise rates. For instance, this can be
// used to infer the answers of independent binary-valued crowdsourcing
// questions.
type MultipleBSCModel struct {
// The variables sent over the noisy channels
Inputs map[string]*variable.DiscreteRV
// The posterior probability that each latent input variable is true
InputScores map[string]float64
// Whether to use soft or hard assignments to inputs during inference
SoftInputs bool
// The noisy channels
Channels map[string]*bsc.BSC
// A factor graph relating inputs to outputs
FactorGraph *factor.FactorGraph
}
// Adds a new BSC to the model with the given name and noise rate.
func (model *MultipleBSCModel) AddChannel(name string, noise float64) {
model.Channels[name] = bsc.NewBSC(noise)
}
// Ask whether a given channel exists
func (model MultipleBSCModel) HasChannel(name string) bool {
_, ok := model.Channels[name]
return ok
}
// Ask whether a given input exists
func (model MultipleBSCModel) HasInput(name string) bool {
_, ok := model.Inputs[name]
return ok
}
// Adds a new observation to the model for the given channel and input. If the
// input is new, it will be created automatically.
func (model *MultipleBSCModel) AddObservation(input, channel string, value bool) {
inputVar, ok := model.Inputs[input]
if !ok {
inputVar = variable.NewDiscreteRV(0, dist.BooleanSpace)
model.Inputs[input] = inputVar
}
ch := model.Channels[channel]
// Add a worker noise factor to explain the assessment
outcome := dist.BooleanSpace.BoolOutcome(value)
model.FactorGraph.AddFactor(ch.Factor(inputVar, variable.NewDiscreteRV(
outcome, dist.BooleanSpace)))
}
// Score the model, using the current parameter values
func (model MultipleBSCModel) Score() float64 {
return model.FactorGraph.Score()
}
// Train noise rates and input values using expectation maximization.
func (model *MultipleBSCModel) EM(maxRounds int, tolerance float64,
callback func(model *MultipleBSCModel, round int, stage string)) {
var (
round int
initialScore = model.Score()
thisRound = initialScore
lastRound = thisRound - 1.0
softScores = make(map[*variable.DiscreteRV]float64)
)
if callback != nil {
callback(model, round, "Initial")
}
for round = 1; (maxRounds == 0 || round <= maxRounds) &&
thisRound-lastRound > tolerance; round++ {
// Update input
for _, input := range model.Inputs {
input.Set(0)
ifFalse := math.Exp(model.FactorGraph.ScoreVar(input))
input.Set(1)
ifTrue := math.Exp(model.FactorGraph.ScoreVar(input))
if ifFalse > ifTrue {
input.Set(0)
}
if model.SoftInputs {
if ifTrue == 0 {
softScores[input] = 1e-6
} else if ifFalse == 0 {
softScores[input] = 1 - 1e-6
} else {
softScores[input] = ifTrue / (ifTrue + ifFalse)
}
} else {
softScores[input] = input.Val()
}
}
if callback != nil {
callback(model, round, "input")
}
// Update noise rates
thisRound2, lastRound2 := thisRound, lastRound
for r2 := 1; (maxRounds == 0 || r2 <= maxRounds) &&
thisRound2-lastRound2 > tolerance; r2++ {
for _, ch := range model.Channels {
var sum, count float64
for _, factor := range model.FactorGraph.AdjToVariable(ch.NoiseRate) {
if ch, ok := factor.(*bsc.BSCFactor); ok {
count++
qi := softScores[ch.Input]
if ch.Output.Val() == 1 {
sum += qi
} else {
sum += 1 - qi
}
}
}
if sum == 0 {
ch.NoiseRate.Set(1e-3)
} else if sum == count {
ch.NoiseRate.Set(1 - 1e-3)
} else {
ch.NoiseRate.Set(sum / count)
}
}
if callback != nil {
callback(model, round, "noise")
}
lastRound2, thisRound2 = thisRound2, model.Score()
}
lastRound, thisRound = thisRound, model.Score()
}
model.InputScores = make(map[string]float64)
for name, input := range model.Inputs {
input.Set(0)
ifFalse := math.Exp(model.FactorGraph.ScoreVar(input))
input.Set(1)
ifTrue := math.Exp(model.FactorGraph.ScoreVar(input))
if ifFalse > ifTrue {
input.Set(0)
}
model.InputScores[name] = ifTrue / (ifTrue + ifFalse)
}
if callback != nil {
callback(model, 0, "Final")
}
} | model/multiple_bsc.go | 0.705582 | 0.551755 | multiple_bsc.go | starcoder |
package dsl
import (
"io"
"time"
)
//Entries is a list of invidiual Entry(ies)
type Entries []Entry
//First returns the *first* Entry that satisfies the passed in Matcher.
//The second return value tells the caller if an entry was found or not
func (e Entries) First(matcher Matcher) (Entry, bool) {
for _, entry := range e {
if matcher.Match(entry) {
return entry, true
}
}
return Entry{}, false
}
//Filter returns the list of Entries that match the passed-in Matcher
func (e Entries) Filter(matcher Matcher) Entries {
filtered := Entries{}
for _, entry := range e {
if matcher.Match(entry) {
filtered = append(filtered, entry)
}
}
return filtered
}
//ConstructTimeline takes a TimelineDescription and a Zeroth entry and returns a Timeline
//The Zeroth entry is used to compute the starting time the Timeline
func (e Entries) ConstructTimeline(description TimelineDescription, zeroEntry Entry) Timeline {
timeline := Timeline{
Description: description,
ZeroEntry: zeroEntry,
}
timeOffset := time.Duration(0)
for i, point := range description {
entry, _ := e.First(point.Matcher)
if i > 0 {
previousEntry, _ := e.First(description[i-1].Matcher)
duration := entry.Timestamp.Sub(previousEntry.Timestamp)
timeOffset -= time.Duration(float64(duration) * (1 - point.Squash))
entry.Timestamp = entry.Timestamp.Add(timeOffset)
}
timeline.Entries = append(timeline.Entries, entry)
}
return timeline
}
//GroupBy groups all Entries by the passed in Getter it returns a GroupedEntries object
//The values returned by the Getter correpond to the Keys in the returned GroupedEntries object
func (e Entries) GroupBy(getter Getter) *GroupedEntries {
groups := NewGroupedEntries()
for _, entry := range e {
key, ok := getter.Get(entry)
if !ok {
continue
}
groups.Append(key, entry)
}
return groups
}
//WriteLagerFormatTo emits lager-formatted entries to the passed in writer
func (e Entries) WriteLagerFormatTo(w io.Writer) error {
for _, entry := range e {
err := entry.WriteLagerFormatTo(w)
if err != nil {
return err
}
}
return nil
}
func (e Entries) Len() int { return len(e) }
func (e Entries) Swap(i, j int) { e[i], e[j] = e[j], e[i] }
func (e Entries) Less(i, j int) bool { return e[i].Timestamp.Before(e[j].Timestamp) } | dsl/entries.go | 0.710226 | 0.464294 | entries.go | starcoder |
package egeom
import (
"golang.org/x/image/math/fixed"
"image"
)
type Rectangle interface {
X() int
Y() int
Width() int
Height() int
Equals(q Rectangle) bool
TopLeft() Point
TopCenter() Point
TopRight() Point
CenterLeft() Point
Center() Point
CenterRight() Point
BottomLeft() Point
BottomCenter() Point
BottomRight() Point
Fixed26() fixed.Rectangle26_6
Fixed52() fixed.Rectangle52_12
ImageRect() image.Rectangle
}
func NewRectangle(location Point, width, height int) Rectangle {
return rectImpl{
location: location,
w: width,
h: height,
}
}
func NewRectangleImage(r image.Rectangle) Rectangle {
return rectImpl{
location: NewPointImage(r.Min),
w: r.Dx(),
h: r.Dy(),
}
}
func NewRectangleFixed26(q fixed.Rectangle26_6) Rectangle {
dim := q.Max.Sub(q.Min)
return rectImpl{
location: NewPointFixed26(q.Min),
w: dim.X.Round(),
h: dim.Y.Round(),
}
}
func NewRectangleFixed52(q fixed.Rectangle52_12) Rectangle {
dim := q.Max.Sub(q.Min)
return rectImpl{
location: NewPointFixed52(q.Min),
w: dim.X.Round(),
h: dim.Y.Round(),
}
}
type rectImpl struct {
location Point
w, h int
}
func (z rectImpl) Equals(q Rectangle) bool {
if q == nil {
return false
}
return z.location.Equals(q.TopLeft()) && z.w == q.Width() && z.h == q.Height()
}
func (z rectImpl) TopLeft() Point {
return z.location
}
func (z rectImpl) TopCenter() Point {
return z.location.Add(z.w/2, 0)
}
func (z rectImpl) TopRight() Point {
return z.location.Add(z.w, 0)
}
func (z rectImpl) CenterLeft() Point {
return z.location.Add(0, z.h/2)
}
func (z rectImpl) Center() Point {
return z.location.Add(z.w/2, z.h/2)
}
func (z rectImpl) CenterRight() Point {
return z.location.Add(z.w, z.h/2)
}
func (z rectImpl) BottomLeft() Point {
return z.location.Add(0, z.h)
}
func (z rectImpl) BottomCenter() Point {
return z.location.Add(z.w/2, z.h)
}
func (z rectImpl) BottomRight() Point {
return z.location.Add(z.w, z.h)
}
func (z rectImpl) X() int {
return z.location.X()
}
func (z rectImpl) Y() int {
return z.location.Y()
}
func (z rectImpl) Width() int {
return z.w
}
func (z rectImpl) Height() int {
return z.h
}
func (z rectImpl) Fixed26() fixed.Rectangle26_6 {
return fixed.Rectangle26_6{
Min: z.TopLeft().Fixed26(),
Max: z.BottomRight().Fixed26(),
}
}
func (z rectImpl) Fixed52() fixed.Rectangle52_12 {
return fixed.Rectangle52_12{
Min: z.TopLeft().Fixed52(),
Max: z.BottomRight().Fixed52(),
}
}
func (z rectImpl) ImageRect() image.Rectangle {
br := z.BottomRight()
return image.Rect(z.X(), z.Y(), br.X(), br.Y())
} | egraphic/egeom/rectangle.go | 0.828245 | 0.407392 | rectangle.go | starcoder |
package list
import (
"golang.org/x/exp/constraints"
)
// ValueEqual returns a function that tests equality of a with the value passed the returned function.
// The primary purpose of this function is for use in ForAll or other predicates.
func ValueEqual[T comparable](a T) func(T) bool {
return func(b T) bool {
return a == b
}
}
// ValueEqual2 tests equality of a with b.
// The primary purpose of this function is for use in ForAll2 or other predicates that accept two parameters.
func ValueEqual2[T comparable](a, b T) bool {
return a == b
}
// Not returns a function that tests for the negative of the predicate.
// The primary purpose of this function is for use in ForAll or other predicates.
func Not[T comparable](predicate func(T) bool) func(T) bool {
return func(a T) bool {
return !predicate(a)
}
}
// Not2 returns a function that tests for the negative of the two-parameter predicate.
// The primary purpose of this function is for use in ForAll2 or other predicates that accept two parameters.
func Not2[T, T2 comparable](predicate func(T, T2) bool) func(T, T2) bool {
return func(a T, b T2) bool {
return !predicate(a, b)
}
}
// GreaterThan returns a function that tests whether the value passed the returned function is greater than a.
// The primary purpose of this function is for use in ForAll or other predicates.
func GreaterThan[T constraints.Ordered](a T) func(T) bool {
return func(b T) bool {
return b > a
}
}
// GreaterThan2 tests whether b is greater than a.
// The primary purpose of this function is for use in ForAll2 or other predicates that accept two parameters.
func GreaterThan2[T constraints.Ordered](a, b T) bool {
return b > a
}
// LessThan returns a function that tests whether the value passed the returned function is less than a.
// The primary purpose of this function is for use in ForAll or other predicates.
func LessThan[T constraints.Ordered](a T) func(T) bool {
return func(b T) bool {
return b < a
}
}
// LessThan2 tests whether b is less than a.
// The primary purpose of this function is for use in ForAll2 or other predicates that accept two parameters.
func LessThan2[T constraints.Ordered](a, b T) bool {
return b < a
}
// ForAll tests whether all values match predicate.
func ForAll[T any](predicate func(T) bool, values []T) bool {
for i := range values {
if !predicate(values[i]) {
return false
}
}
return true
}
// ForAll2 tests whether all pairs of values from values1 and values2 match predicate.
func ForAll2[T any, T2 any](predicate func(T, T2) bool, values1 []T, values2 []T2) bool {
min, _ := Min(len(values1), len(values2))
if min == 0 {
return true
}
for _, i := range RangeTo(min - 1) {
if !predicate(values1[i], values2[i]) {
return false
}
}
return true
} | list/forAll.go | 0.841468 | 0.676889 | forAll.go | starcoder |
package metrics
import (
"context"
"go.opencensus.io/stats"
"go.opencensus.io/stats/view"
)
// Recorder is our backend-independent metrics recorder.
// This should be created with NewRecorder().
type Recorder struct {
startPomCount *stats.Int64Measure
runningPomCount *stats.Int64Measure
serverCount *stats.Int64Measure
}
// NewRecorder creates a Recorder with its metrics initialized.
func NewRecorder() (*Recorder, error) {
recorder := &Recorder{
startPomCount: stats.Int64("pomodoros_started", "Count of Pomodoros started", stats.UnitDimensionless),
runningPomCount: stats.Int64("pomodoros_running", "Current number of Pomodoros running", stats.UnitDimensionless),
serverCount: stats.Int64("connected_servers", "Current number of connected servers", stats.UnitDimensionless),
}
startView := &view.View{
Name: "pomodoros_started_count",
Measure: recorder.startPomCount,
Description: "The number of Pomodoros started",
Aggregation: view.Count(),
}
runningView := &view.View{
Name: "pomodoros_running_value",
Measure: recorder.runningPomCount,
Description: "The number of Pomodoros running",
Aggregation: view.LastValue(),
}
serverView := &view.View{
Name: "connected_servers_value",
Measure: recorder.serverCount,
Description: "The number of connected servers",
Aggregation: view.LastValue(),
}
return recorder, view.Register(startView, runningView, serverView)
}
// RecordStartPom records the start of a pomodoro.
func (r *Recorder) RecordStartPom() {
stats.Record(context.Background(), r.startPomCount.M(1))
}
// RecordRunningPoms records the number of currently running pomodoros.
func (r *Recorder) RecordRunningPoms(count int64) {
stats.Record(context.Background(), r.runningPomCount.M(count))
}
// RecordConnectedServers records the number of currently connected servers (guilds).
func (r *Recorder) RecordConnectedServers(count int64) {
stats.Record(context.Background(), r.serverCount.M(count))
} | metrics/recorder.go | 0.644449 | 0.431764 | recorder.go | starcoder |
package charlatan
import "bytes"
// Query is a query
type Query struct {
// the fields to select if condition match the object
fields []*Field
// the resource from wich we want to evaluate and select fields
from string
// the expression to evaluate on each record. The resulting constant will
// always be converted as a bool
expression operand
// the record index to start from
startingAt int64
// the record index to stop at
limit *int64
}
// A Record is a record
type Record interface {
Find(*Field) (*Const, error)
}
// QueryFromString creates a query from the given string
func QueryFromString(s string) (*Query, error) {
return parserFromString(s).Parse()
}
// NewQuery creates a new query with the given from part
func NewQuery(from string) *Query {
return &Query{from: from}
}
// From returns the FROM part of this query
func (q *Query) From() string {
return q.from
}
// StartingAt returns the 'STARTING AT' part of the query, or 0 if it's not
// present
func (q *Query) StartingAt() int64 {
return q.startingAt
}
// HasLimit tests if the query has a limit
func (q *Query) HasLimit() bool {
return q.limit != nil
}
// Limit returns the 'LIMIT' part of the query, or 0 if it's not present
func (q *Query) Limit() int64 {
if q.limit == nil {
return 0
}
return *q.limit
}
// AddField adds one field
func (q *Query) AddField(field *Field) {
if field != nil {
q.fields = append(q.fields, field)
}
}
// AddFields adds multiple fields
func (q *Query) AddFields(fields []*Field) {
for _, field := range fields {
q.AddField(field)
}
}
// Fields returns the fields
func (q *Query) Fields() []*Field {
return q.fields
}
// setWhere sets the where condition
func (q *Query) setWhere(op operand) {
if op == nil {
return
}
q.expression = op
}
func (q *Query) setLimit(limit int64) {
q.limit = &limit
}
// FieldsValues extracts the values of each fields into the given record
// Note that you should evaluate the query first
func (q *Query) FieldsValues(record Record) ([]*Const, error) {
values := make([]*Const, len(q.fields))
for i, field := range q.fields {
value, err := field.Evaluate(record)
if err != nil {
return nil, err
}
values[i] = value
}
return values, nil
}
// Evaluate evaluates the query against the given record
func (q *Query) Evaluate(record Record) (bool, error) {
// no expression, always valid
if q.expression == nil {
return true, nil
}
constant, err := q.expression.Evaluate(record)
if err != nil {
return false, err
}
return constant.AsBool(), nil
}
// String returns a string representation of this query
func (q *Query) String() string {
var buffer bytes.Buffer
buffer.WriteString("SELECT ")
for i, field := range q.fields {
if i > 0 {
buffer.WriteString(", ")
}
buffer.WriteString(field.Name())
}
buffer.WriteString(" FROM ")
buffer.WriteString(q.from)
if q.expression != nil {
buffer.WriteString(" WHERE ")
buffer.WriteString(q.expression.String())
}
return buffer.String()
} | plugins/data/parser/ql/charlatan/query.go | 0.794385 | 0.450299 | query.go | starcoder |
package level
import "fmt"
// TileType describes the general type of a map tile.
type TileType byte
// Info returns the information associated with the tile type.
func (t TileType) Info() TileTypeInfo {
if int(t) < len(tileTypeInfoList) {
return tileTypeInfoList[t]
}
info := tileTypeInfoList[TileTypeSolid]
info.Name = t.String()
info.SlopeInvertedType = t
return info
}
// String returns the textual representation of the type.
func (t TileType) String() string {
if int(t) < len(tileTypeInfoList) {
return tileTypeInfoList[t].Name
}
return fmt.Sprintf("Unknown%02X", int(t))
}
// Tiles come in different forms:
// Solid tiles can not be entered, Open tiles are regular tiles with a flat floor and a flat ceiling.
// DiagonalOpen tiles are those with flat floors and ceilings, and two walls cut off by one diagonal wall.
// Slope tiles have a sloped floor (or ceiling). Valley tiles have one floor vertex lower while Ridge tiles have one
// floor vertex higher than the other three.
const (
TileTypeSolid TileType = 0x00
TileTypeOpen TileType = 0x01
TileTypeDiagonalOpenSouthEast TileType = 0x02
TileTypeDiagonalOpenSouthWest TileType = 0x03
TileTypeDiagonalOpenNorthWest TileType = 0x04
TileTypeDiagonalOpenNorthEast TileType = 0x05
TileTypeSlopeSouthToNorth TileType = 0x06
TileTypeSlopeWestToEast TileType = 0x07
TileTypeSlopeNorthToSouth TileType = 0x08
TileTypeSlopeEastToWest TileType = 0x09
TileTypeValleySouthEastToNorthWest TileType = 0x0A
TileTypeValleySouthWestToNorthEast TileType = 0x0B
TileTypeValleyNorthWestToSouthEast TileType = 0x0C
TileTypeValleyNorthEastToSouthWest TileType = 0x0D
TileTypeRidgeNorthWestToSouthEast TileType = 0x0E
TileTypeRidgeNorthEastToSouthWest TileType = 0x0F
TileTypeRidgeSouthEastToNorthWest TileType = 0x10
TileTypeRidgeSouthWestToNorthEast TileType = 0x11
)
// TileTypes returns a list of all known tile types.
func TileTypes() []TileType {
return []TileType{
TileTypeSolid, TileTypeOpen,
TileTypeDiagonalOpenSouthEast, TileTypeDiagonalOpenSouthWest, TileTypeDiagonalOpenNorthWest, TileTypeDiagonalOpenNorthEast,
TileTypeSlopeSouthToNorth, TileTypeSlopeWestToEast, TileTypeSlopeNorthToSouth, TileTypeSlopeEastToWest,
TileTypeValleySouthEastToNorthWest, TileTypeValleySouthWestToNorthEast, TileTypeValleyNorthWestToSouthEast, TileTypeValleyNorthEastToSouthWest,
TileTypeRidgeNorthWestToSouthEast, TileTypeRidgeNorthEastToSouthWest, TileTypeRidgeSouthEastToNorthWest, TileTypeRidgeSouthWestToNorthEast,
}
} | ss1/content/archive/level/TileType.go | 0.793906 | 0.67633 | TileType.go | starcoder |
package mat
//mat "gonum.org/v1/gonum/mat"
/*
func useless() *mat.Dense {
return mat.NewDense()
}
*/
//M64 represents a float64 matrix with r rows and c colomns
type M64 struct {
r int
c int
data []float64
}
//Dims returns the number of rows and colomns
func (m *M64) Dims() (int, int) {
if m == nil {
return 0, 0
}
return m.r, m.c
}
//Size returns the size of the data array: rows*colomns
func (m *M64) Size() int {
if m == nil {
return 0
}
return m.r * m.c
}
//NewM64 returns a new M64 instance, initialized with data if len==r*c
func NewM64(r, c int, data []float64) *M64 {
if r <= 0 {
r = 1
}
if c <= 0 {
c = 1
}
m := &M64{r: r, c: c}
if len(data) == r*c {
m.data = data
} else {
m.data = make([]float64, int(r*c))
}
return m
}
//Valid returns false if m is nil, and initiates with empty data of size=r*c if invalid size
func (m *M64) Valid() bool {
if m == nil {
return false
}
if m.r <= 0 {
m.r = 1
}
if m.c <= 0 {
m.c = 1
}
s := m.r * m.c
if len(m.data) != s {
m.data = make([]float64, s)
}
return true
}
func (m *M64) index(i, j int) int {
if i < 0 || j < 0 {
return 0
}
if i == 0 {
return j
}
return m.c*i + j
}
//At returns the value at position row=i,col=j. panics if m is nil or index out of range
func (m *M64) At(i, j int) float64 {
return m.data[m.index(i, j)]
}
//Set sets val at position row=i,col=j. panics if m is nil or index out of range
func (m *M64) Set(i, j int, val float64) {
m.data[m.index(i, j)] = val
}
//Add adds n to m (element by element)
func (m *M64) Add(n *M64) error {
return add(m, n, m)
}
//Sub substracts n to m (element by element)
func (m *M64) Sub(n *M64) error {
return sub(m, n, m)
}
//Mul return mxn (matrix product)
func (m *M64) Mul(n *M64) error {
return mul(m, n, m)
}
//MulElem return mxn (matrix product)
func (m *M64) MulElem(n *M64) error {
return mulElem(m, n, m)
}
//MapElem applies fn to each element of the matrix
func (m *M64) MapElem(fn func(x float64) float64) error {
return mapElemVal(m, m, fn)
} | mat64/mat.go | 0.791176 | 0.558146 | mat.go | starcoder |
package samples
func init() {
sampleDataProposalCreateOperation[46] = `{
"expiration_time": "2016-08-30T16:33:53",
"extensions": [],
"fee": {
"amount": 2419303,
"asset_id": "1.3.0"
},
"fee_paying_account": "1.2.126659",
"proposed_ops": [
{
"op": [
6,
{
"account": "1.2.121",
"active": {
"account_auths": [],
"address_auths": [],
"key_auths": [
[
"<KEY>",
1
]
],
"weight_threshold": 1
},
"extensions": {},
"fee": {
"amount": 43168,
"asset_id": "1.3.0"
},
"new_options": {
"extensions": [],
"memo_key": "<KEY>",
"num_committee": 8,
"num_witness": 27,
"votes": [
"1:22",
"1:23",
"1:24",
"1:25",
"1:26",
"1:27",
"1:30",
"1:31",
"1:33",
"1:34",
"1:35",
"1:36",
"1:37",
"1:40",
"1:44",
"1:45",
"1:47",
"1:48",
"1:51",
"1:53",
"1:54",
"1:56",
"1:60",
"0:76",
"0:84",
"0:85",
"0:87",
"0:88",
"0:111",
"0:112",
"1:143",
"0:147",
"1:164",
"1:165",
"1:166",
"2:179"
],
"voting_account": "1.2.19600"
},
"owner": {
"account_auths": [],
"address_auths": [],
"key_auths": [
[
"<KEY>",
1
]
],
"weight_threshold": 1
}
}
]
}
]
}`
}
//end of file | gen/samples/proposalcreateoperation_46.go | 0.560974 | 0.434581 | proposalcreateoperation_46.go | starcoder |
package sweetiebot
import (
"fmt"
"sort"
"strconv"
"strings"
"github.com/bwmarrin/discordgo"
)
type CollectionsModule struct {
AddFuncMap map[string]func(string) string
RemoveFuncMap map[string]func(string) string
}
func (w *CollectionsModule) Name() string {
return "Collection"
}
func (w *CollectionsModule) Register(info *GuildInfo) {}
func (w *CollectionsModule) Commands() []Command {
return []Command{
&AddCommand{w.AddFuncMap},
&RemoveCommand{w.RemoveFuncMap},
&CollectionsCommand{},
&PickCommand{},
&NewCommand{},
&DeleteCommand{},
&SearchCollectionCommand{},
&ImportCommand{},
}
}
func (w *CollectionsModule) Description() string {
return "Contains commands for manipulating Sweetie Bot's collections."
}
type AddCommand struct {
funcmap map[string]func(string) string
}
func (c *AddCommand) Name() string {
return "Add"
}
func (c *AddCommand) Process(args []string, msg *discordgo.Message, indices []int, info *GuildInfo) (string, bool, *discordgo.MessageEmbed) {
if len(args) < 1 {
return "```No collection given```", false, nil
}
if len(args) < 2 {
return "```Can't add empty string!```", false, nil
}
collections := strings.Split(args[0], "+")
for _, v := range collections {
_, ok := info.config.Basic.Collections[v]
if !ok {
return fmt.Sprintf("```The %s collection does not exist!```", v), false, nil
}
}
add := ""
length := make([]string, len(collections), len(collections))
arg := msg.Content[indices[1]:]
for k, v := range collections {
info.config.Basic.Collections[v][arg] = true
fn, ok := c.funcmap[v]
length[k] = fmt.Sprintf("Length of %s: %v", PartialSanitize(v), strconv.Itoa(len(info.config.Basic.Collections[v])))
if ok {
add += " " + fn(arg)
}
}
info.SaveConfig()
return fmt.Sprintf("```Added %s to %s%s. \n%s```", PartialSanitize(arg), PartialSanitize(strings.Join(collections, ", ")), add, strings.Join(length, "\n")), false, nil
}
func (c *AddCommand) Usage(info *GuildInfo) *CommandUsage {
return &CommandUsage{
Desc: "Adds [arbitrary string] to [collection], then calls a handler function for that specific collection.",
Params: []CommandUsageParam{
CommandUsageParam{Name: "collection(s)", Desc: "The name of a collection. Specify multiple collections with \"collection1+collection2\"", Optional: false},
CommandUsageParam{Name: "arbitrary string", Desc: "Arbitrary string to add to collection. Quotes aren't necessary, but cannot be empty.", Optional: false},
},
}
}
func (c *AddCommand) UsageShort() string { return "Adds a line to a collection." }
type RemoveCommand struct {
funcmap map[string]func(string) string
}
func (c *RemoveCommand) Name() string {
return "Remove"
}
func (c *RemoveCommand) Process(args []string, msg *discordgo.Message, indices []int, info *GuildInfo) (string, bool, *discordgo.MessageEmbed) {
if len(args) < 1 {
return "```No collection given```", false, nil
}
if len(args) < 2 {
return "```Can't remove an empty string!```", false, nil
}
collection := args[0]
cmap, ok := info.config.Basic.Collections[collection]
if !ok {
return "```That collection does not exist!```", false, nil
}
arg := msg.Content[indices[1]:]
_, ok = cmap[arg]
if !ok {
return "```Could not find " + arg + "!```", false, nil
}
delete(info.config.Basic.Collections[collection], arg)
fn, ok := c.funcmap[collection]
retval := "```Removed " + PartialSanitize(arg) + " from " + PartialSanitize(collection) + ". Length of " + PartialSanitize(collection) + ": " + strconv.Itoa(len(info.config.Basic.Collections[collection])) + "```"
if ok {
retval = fn(arg)
}
info.SaveConfig()
return retval, false, nil
}
func (c *RemoveCommand) Usage(info *GuildInfo) *CommandUsage {
return &CommandUsage{
Desc: "Removes [arbitrary string] from [collection], then calls a handler function for that specific collection.",
Params: []CommandUsageParam{
CommandUsageParam{Name: "collection(s)", Desc: "The name of a collection. Specifying multiple collections is not supported.", Optional: false},
CommandUsageParam{Name: "arbitrary string", Desc: "Arbitrary string to remove from collection. Quotes aren't necessary, but cannot be empty.", Optional: false},
},
}
}
func (c *RemoveCommand) UsageShort() string { return "Removes a line from a collection." }
type MemberFields []*discordgo.MessageEmbedField
func (f MemberFields) Len() int {
return len(f)
}
func (f MemberFields) Less(i, j int) bool {
return strings.Compare(f[i].Name, f[j].Name) < 0
}
func (f MemberFields) Swap(i, j int) {
f[i], f[j] = f[j], f[i]
}
type CollectionsCommand struct {
}
func (c *CollectionsCommand) Name() string {
return "Collections"
}
func ShowAllCollections(message string, info *GuildInfo) *discordgo.MessageEmbed {
fields := make(MemberFields, 0, len(info.modules))
for k, v := range info.config.Basic.Collections {
fields = append(fields, &discordgo.MessageEmbedField{Name: k, Value: fmt.Sprintf("%v items", len(v)), Inline: true})
}
sort.Sort(fields)
return &discordgo.MessageEmbed{
Type: "rich",
Author: &discordgo.MessageEmbedAuthor{
URL: "https://github.com/blackhole12/sweetiebot",
Name: "Sweetie Bot Collections",
IconURL: fmt.Sprintf("https://cdn.discordapp.com/avatars/%v/%s.jpg", sb.SelfID, sb.SelfAvatar),
},
Description: message,
Color: 0x3e92e5,
Fields: fields,
}
}
func (c *CollectionsCommand) Process(args []string, msg *discordgo.Message, indices []int, info *GuildInfo) (string, bool, *discordgo.MessageEmbed) {
const LINES int = 3
const MAXLENGTH int = 24
if len(args) < 1 {
return "", false, ShowAllCollections("No collection specified.", info)
}
arg := args[0]
cmap, ok := info.config.Basic.Collections[arg]
if !ok {
return "```That collection doesn't exist! Use this command with no arguments to see a list of all collections.```", false, nil
}
s := strings.Join(MapToSlice(cmap), "\n")
s = strings.Replace(s, "```", "\\`\\`\\`", -1)
s = strings.Replace(s, "[](/", "[\u200B](/", -1)
return fmt.Sprintf("```\n%s contains:\n%s```", arg, s), false, nil
}
func (c *CollectionsCommand) Usage(info *GuildInfo) *CommandUsage {
return &CommandUsage{
Desc: "Lists all the collections that sweetiebot is using, or the contents of a specific collection.",
Params: []CommandUsageParam{
CommandUsageParam{Name: "collection", Desc: "The name of a collection. Specifying multiple collections is not supported.", Optional: true},
},
}
}
func (c *CollectionsCommand) UsageShort() string { return "Lists all collections." }
type PickCommand struct {
}
func (c *PickCommand) Name() string {
return "Pick"
}
func (c *PickCommand) Process(args []string, msg *discordgo.Message, indices []int, info *GuildInfo) (string, bool, *discordgo.MessageEmbed) {
if len(args) < 1 {
return "", false, ShowAllCollections("No collection specified.", info)
}
arg := strings.ToLower(args[0])
if arg == "spoiler" || arg == "emote" {
return "```You cannot pick an item from that collection.```", false, nil
}
cmap, ok := info.config.Basic.Collections[arg]
if !ok {
return "```That collection doesn't exist! Use this command with no arguments to see a list of all collections.```", false, nil
}
if len(cmap) > 0 {
return ReplaceAllMentions(MapGetRandomItem(cmap)), false, nil
}
return "```That collection is empty.```", false, nil
}
func (c *PickCommand) Usage(info *GuildInfo) *CommandUsage {
return &CommandUsage{
Desc: "Picks a random item from the given collection and displays it.",
Params: []CommandUsageParam{
CommandUsageParam{Name: "collection", Desc: "The name of a collection. Specifying multiple collections is not supported.", Optional: false},
},
}
}
func (c *PickCommand) UsageShort() string { return "Picks a random item." }
type NewCommand struct {
}
func (c *NewCommand) Name() string {
return "New"
}
func (c *NewCommand) Process(args []string, msg *discordgo.Message, indices []int, info *GuildInfo) (string, bool, *discordgo.MessageEmbed) {
if len(args) < 1 {
return "```You have to provide a new collection name.```", false, nil
}
collection := strings.ToLower(args[0])
if strings.ContainsAny(collection, "+") {
return "```Don't make collection names with + in them, dumbass!```", false, nil
}
_, ok := info.config.Basic.Collections[collection]
if ok {
return "```That collection already exists!```", false, nil
}
info.config.Basic.Collections[collection] = make(map[string]bool)
info.SaveConfig()
return "```Created the " + collection + " collection.```", false, nil
}
func (c *NewCommand) Usage(info *GuildInfo) *CommandUsage {
return &CommandUsage{
Desc: "Creates a new collection with the given name, provided the collection does not already exist.",
Params: []CommandUsageParam{
CommandUsageParam{Name: "collection", Desc: "The name of the new collection. No spaces are allowed, should only use letters and numbers.", Optional: false},
},
}
}
func (c *NewCommand) UsageShort() string { return "Creates a new collection." }
type DeleteCommand struct {
}
func (c *DeleteCommand) Name() string {
return "Delete"
}
func (c *DeleteCommand) Process(args []string, msg *discordgo.Message, indices []int, info *GuildInfo) (string, bool, *discordgo.MessageEmbed) {
if len(args) < 1 {
return "```You have to provide a collection name.```", false, nil
}
collection := strings.ToLower(args[0])
_, ok := info.config.Basic.Collections[collection]
if !ok {
return "```That collection doesn't exist!```", false, nil
}
_, ok = map[string]bool{"emote": true, "bored": true, "status": true, "spoiler": true, "bucket": true}[collection]
if ok {
return "```You can't delete that collection!```", false, nil
}
delete(info.config.Basic.Collections, collection)
info.SaveConfig()
return "```Deleted the " + collection + " collection.```", false, nil
}
func (c *DeleteCommand) Usage(info *GuildInfo) *CommandUsage {
return &CommandUsage{
Desc: "Deletes a collection with the given name, provided the collection is not protected.",
Params: []CommandUsageParam{
CommandUsageParam{Name: "collection", Desc: "The name of the collection. Certain collections cannot be deleted.", Optional: false},
},
}
}
func (c *DeleteCommand) UsageShort() string { return "Deletes a collection." }
type SearchCollectionCommand struct {
}
func (c *SearchCollectionCommand) Name() string {
return "SearchCollection"
}
func (c *SearchCollectionCommand) Process(args []string, msg *discordgo.Message, indices []int, info *GuildInfo) (string, bool, *discordgo.MessageEmbed) {
if len(args) < 1 {
return "```You have to provide a new collection name.```", false, nil
}
if len(args) < 2 {
return "```You have to provide something to search for (use !collections to dump the contents of a collection).```", false, nil
}
collection := strings.ToLower(args[0])
if collection == "spoiler" {
return "```You can't search in that collection.```", false, nil
}
cmap, ok := info.config.Basic.Collections[collection]
if !ok {
return "```That collection doesn't exist! Use !collections without any arguments to list them.```", false, nil
}
results := []string{}
arg := msg.Content[indices[1]:]
for k, _ := range cmap {
if strings.Contains(k, arg) {
results = append(results, k)
}
}
if len(results) > 0 {
return "```The following collection entries match your query:\n" + PartialSanitize(strings.Join(results, "\n")) + "```", len(results) > 6, nil
}
return "```No results found in the " + collection + " collection.```", false, nil
}
func (c *SearchCollectionCommand) Usage(info *GuildInfo) *CommandUsage {
return &CommandUsage{
Desc: "Returns all members of the given collection that contain the given string.",
Params: []CommandUsageParam{
CommandUsageParam{Name: "collection", Desc: "The name of the collection. Specifying multiple collections is not supported.", Optional: false},
CommandUsageParam{Name: "arbitrary string", Desc: "Arbitrary string to add to collection. Quotes aren't necessary, but cannot be empty.", Optional: false},
},
}
}
func (c *SearchCollectionCommand) UsageShort() string { return "Searches a collection." }
type ImportCommand struct {
}
func (c *ImportCommand) Name() string {
return "Import"
}
func (c *ImportCommand) Process(args []string, msg *discordgo.Message, indices []int, info *GuildInfo) (string, bool, *discordgo.MessageEmbed) {
if len(args) < 1 {
return "```No source server provided.```", false, nil
}
other := []*GuildInfo{}
str := args[0]
exact := false
if str[len(str)-1] == '@' {
str = str[:len(str)-1]
exact = true
}
for _, v := range sb.guilds {
if exact {
if strings.Compare(strings.ToLower(v.Guild.Name), strings.ToLower(str)) == 0 {
other = append(other, v)
}
} else {
if strings.Contains(strings.ToLower(v.Guild.Name), strings.ToLower(str)) {
other = append(other, v)
}
}
}
if len(other) > 1 {
names := make([]string, len(other), len(other))
for i := 0; i < len(other); i++ {
names[i] = other[i].Guild.Name
}
return fmt.Sprintf("```Could be any of the following servers: \n%s```", PartialSanitize(strings.Join(names, "\n"))), len(names) > 8, nil
}
if len(other) < 1 {
return fmt.Sprintf("```Could not find any server matching %s!```", args[0]), false, nil
}
if !other[0].config.Basic.Importable {
return "```That server has not made their collections importable by other servers. If this is a public server, you can ask a moderator on that server to run \"!setconfig importable true\" if they wish to make their collections public.```", false, nil
}
if len(args) < 2 {
return "```No source collection provided.```", false, nil
}
source := args[1]
target := source
if len(args) > 2 {
target = args[2]
}
sourceCollection, ok := other[0].config.Basic.Collections[source]
if !ok {
return fmt.Sprintf("```The source collection (%s) does not exist on the source server (%s)!```", source, other[0].Guild.Name), false, nil
}
targetCollection, tok := info.config.Basic.Collections[target]
if !tok {
return fmt.Sprintf("```The target collection (%s) does not exist on this server! Please manually create this collection using !new if you actually intended this.```", target), false, nil
}
for k, v := range sourceCollection {
targetCollection[k] = v
}
info.SaveConfig()
return fmt.Sprintf("```Successfully merged \"%s\" from %s into \"%s\" on this server. New size: %v```", source, other[0].Guild.Name, target, len(targetCollection)), false, nil
}
func (c *ImportCommand) Usage(info *GuildInfo) *CommandUsage {
return &CommandUsage{
Desc: "Adds all elements from the source collection on the source server to the target collection on this server. If no target is specified, attempts to copy all items into a collection of the same name as the source. Example: ```!import Manechat cool notcool```",
Params: []CommandUsageParam{
CommandUsageParam{Name: "source server", Desc: "The exact name of the source server to copy from.", Optional: false},
CommandUsageParam{Name: "source collection", Desc: "Name of the collection to copy from on the source server.", Optional: false},
CommandUsageParam{Name: "target collection", Desc: "The target collection to copy to on this server. If omitted, defaults to the source collection name.", Optional: true},
},
}
}
func (c *ImportCommand) UsageShort() string { return "Imports a collection from another server." } | sweetiebot/collections_command.go | 0.646014 | 0.5526 | collections_command.go | starcoder |
package gofun
import "reflect"
// Foldable is the interface for folding.
type Foldable interface {
// FoldLeft folds Foldable from left side. Left folding is
// calculated f(...f(f(z, xs[0]), xs[1])..., xs[n-1]).
FoldLeft(f func(interface{}, interface{}) interface{}, z interface{}) interface{}
// FoldLeft folds Foldable from right side. Right folding is
// calculated f(xs[0], f(xs[1], ...f(xs[n-1], z)...)).
FoldRight(f func(interface{}, interface{}) interface{}, z interface{}) interface{}
}
// FoldableOrElse returns x if x is Foldable, otherwise y.
func FoldableOrElse(x interface{}, y Foldable) Foldable {
z, isOk := x.(Foldable)
if isOk {
return z
} else {
return y
}
}
// All returns true if f returns true for all elements, otherwise false.
func All(f func(interface{}) bool, xs Foldable) bool {
return BoolOrElse(xs.FoldLeft(func(x, y interface{}) interface{} {
return BoolOrElse(x, false) && f(y)
}, true), false)
}
// AllM is similar to All but returns Monad and f returns Monad instead of bool values. Unit must be
// the unit function for specified monad.
func AllM(f func(interface{}) Monad, xs Foldable, unit func(interface{}) Monad) Monad {
return FoldLeftM(func(x interface{}, y interface{}) Monad {
if BoolOrElse(x, false) {
return f(y)
} else {
return unit(x)
}
}, true, xs, unit)
}
// Any returns true if f returns true for any element, otherwise false.
func Any(f func(interface{}) bool, xs Foldable) bool {
return BoolOrElse(xs.FoldLeft(func(x, y interface{}) interface{} {
return BoolOrElse(x, false) || f(y)
}, false), false)
}
// AnyM is similar to Any but returns Monad and f returns Monad instead of bool values. Unit must be
// the unit function for specified monad.
func AnyM(f func(interface{}) Monad, xs Foldable, unit func(interface{}) Monad) Monad {
return FoldLeftM(func(x interface{}, y interface{}) Monad {
if BoolOrElse(x, false) {
return unit(x)
} else {
return f(y)
}
}, false, xs, unit)
}
// DeepElement is similar to Element but uses reflect.DeepEqual instead of equal operator.
func DeepElement(x interface{}, xs Foldable) bool {
return BoolOrElse(xs.FoldLeft(func(y, z interface{}) interface{} {
return BoolOrElse(y, false) || reflect.DeepEqual(z, x)
}, false), false)
}
// Element returns true if Foldable contains the element, otherwise false.
func Element(x interface{}, xs Foldable) bool {
return BoolOrElse(xs.FoldLeft(func(y, z interface{}) interface{} {
return BoolOrElse(y, false) || z == x
}, false), false)
}
// Filter filters the elements and returns a list of the filtered elements.
func Filter(f func(interface{}) bool, xs Foldable) *List {
return ListOrElse(PairOrElse(xs.FoldLeft(func(x, y interface{}) interface{} {
if f(y) {
p := PairOrElse(x, NewPair(Nil(), nil))
ys := ListOrElse(p.First, Nil())
prev := ListOrElse(p.Second, nil)
l := Cons(y, Nil())
if prev != nil {
prev.SetTail(l)
} else {
ys = l
}
return NewPair(ys, l)
} else {
return x
}
}, NewPair(Nil(), nil)), NewPair(Nil(), nil)).First, Nil())
}
// FilterM is similar to Filter but returns Monad and f returns Monad instead of list and bool value.
// Unit must be the unit function for specified monad.
func FilterM(f func(interface{}) Monad, xs Foldable, unit func(interface{}) Monad) Monad{
return MonadOrElse(FoldLeftM(func(x, y interface{}) Monad {
return MonadOrElse(f(y).Map(func(y2 interface{}) interface{} {
if BoolOrElse(y2, false) {
p := PairOrElse(x, NewPair(Nil(), nil))
ys := ListOrElse(p.First, Nil())
prev := ListOrElse(p.Second, nil)
l := Cons(y, Nil())
if prev != nil {
prev.SetTail(l)
} else {
ys = l
}
return NewPair(ys, l)
} else {
return x
}
}), unit(Nil()))
}, NewPair(Nil(), nil), xs, unit).Map(func(x interface{}) interface{} {
p := PairOrElse(x, NewPair(Nil(), nil))
return p.First
}), unit(Nil()))
}
// FilterSlice filters the elements and returns a slice of the filtered elements.
func FilterSlice(f func(interface{}) bool, xs Foldable) InterfaceSlice {
return InterfaceSliceOrElse(xs.FoldLeft(func(x, y interface{}) interface{} {
if f(y) {
return append(InterfaceSliceOrElse(x, InterfaceSlice([]interface{} {})), y)
} else {
return x
}
}, InterfaceSlice([]interface{} {})), InterfaceSlice([]interface{} {}))
}
// FilterSliceM is similar to FilterSlice but returns Monad and f returns Monad instead of slice and
// bool value. Unit must be the unit function for specified monad.
func FilterSliceM(f func(interface{}) Monad, xs Foldable, unit func(interface{}) Monad) Monad {
return FoldLeftM(func(x, y interface{}) Monad {
return MonadOrElse(f(y).Map(func(y2 interface{}) interface{} {
if BoolOrElse(y2, false) {
return append(InterfaceSliceOrElse(x, InterfaceSlice([]interface{} {})), y)
} else {
return x
}
}), unit(InterfaceSlice([]interface{} {})))
}, InterfaceSlice([]interface{} {}), xs, unit)
}
// Find finds the element and returns the optional found element.
func Find(f func(interface{}) bool, xs Foldable) *Option {
return OptionOrElse(xs.FoldLeft(func(x, y interface{}) interface{} {
o := OptionOrElse(x, None())
if o.IsSome() {
return o
} else {
if f(y) {
return Some(y)
} else {
return None()
}
}
}, None()), None())
}
// FindM is similar to Find but returns Monad and f returns Monad instead of optional element and
// bool value. Unit must be the unit function for specified monad.
func FindM(f func(interface{}) Monad, xs Foldable, unit func(interface{}) Monad) Monad {
return FoldLeftM(func(x, y interface{}) Monad {
o := OptionOrElse(x, None())
if o.IsSome() {
return unit(o)
} else {
return MonadOrElse(f(y).Map(func(y2 interface{}) interface{} {
if BoolOrElse(y2, false) {
return Some(y)
} else {
return None()
}
}), unit(None()))
}
}, None(), xs, unit)
}
// FoldLeftM is similar to FoldLeft but returns Monad and f returns Monad instead of a value. Unit
// must be the unit function for specified monad.
func FoldLeftM(f func(interface{}, interface{}) Monad, z interface{}, xs Foldable, unit func(interface{}) Monad) Monad {
g, isOk := xs.FoldRight(func(y, x interface{}) interface{} {
return func(x2 interface{}) Monad {
h, isOk2 := x.(func(interface{}) Monad)
if isOk2 {
return f(x2, y).Bind(h)
} else {
return unit(x2)
}
}
}, unit).(func(interface{}) Monad)
if isOk {
return g(z)
} else {
return unit(z)
}
}
// FoldRightM is similar to FoldRight but returns Monad and f returns Monad instead of a value. Unit
// must be the unit function for specified monad.
func FoldRightM(f func(interface{}, interface{}) Monad, z interface{}, xs Foldable, unit func(interface{}) Monad) Monad {
g, isOk := xs.FoldLeft(func(x, y interface{}) interface{} {
return func(x2 interface{}) Monad {
h, isOk2 := x.(func(interface{}) Monad)
if isOk2 {
return f(y, x2).Bind(h)
} else {
return unit(x2)
}
}
}, unit).(func(interface{}) Monad)
if isOk {
return g(z)
} else {
return unit(z)
}
}
// Length returns the length of Foldable.
func Length(xs Foldable) int {
return IntOrElse(xs.FoldLeft(func(x, y interface{}) interface{} {
return IntOrElse(x, 0) + 1
}, 0), 0)
}
// NotDeepElement is the DeepElement negation.
func NotDeepElement(x interface{}, xs Foldable) bool {
return !DeepElement(x, xs)
}
// NotElement is the Element negation.
func NotElement(x interface{}, xs Foldable) bool {
return !Element(x, xs)
}
// Null returns true if Foldable is empty, otherwise false.
func Null(xs Foldable) bool {
return BoolOrElse(xs.FoldLeft(func (x, y interface{}) interface{} {
return false
}, true), false)
}
// ToList converts Foldable to a list.
func ToList(xs Foldable) *List {
return ListOrElse(PairOrElse(xs.FoldLeft(func(x, y interface{}) interface{} {
p := PairOrElse(x, NewPair(Nil(), nil))
ys := ListOrElse(p.First, Nil())
prev := ListOrElse(p.Second, nil)
l := Cons(y, Nil())
if prev != nil {
prev.SetTail(l)
} else {
ys = l
}
return NewPair(ys, l)
}, NewPair(Nil(), nil)), NewPair(Nil(), nil)).First, Nil())
}
// ToSlice converts Foldable to a slice.
func ToSlice(xs Foldable) InterfaceSlice {
return InterfaceSliceOrElse(xs.FoldLeft(func(x, y interface{}) interface{} {
return append(InterfaceSliceOrElse(x, InterfaceSlice([]interface{} {})), y)
}, InterfaceSlice([]interface{} {})), InterfaceSlice([]interface{} {}))
}
func (xs *Option) FoldLeft(f func(interface{}, interface{}) interface{}, z interface{}) interface{} {
if xs.IsSome() {
return f(z, xs.Get())
} else {
return z
}
}
func (xs *Option) FoldRight(f func(interface{}, interface{}) interface{}, z interface{}) interface{} {
if xs.IsSome() {
return f(xs.Get(), z)
} else {
return z
}
}
func (xs *Either) FoldLeft(f func(interface{}, interface{}) interface{}, z interface{}) interface{} {
if xs.IsRight() {
return f(z, xs.GetRight())
} else {
return z
}
}
func (xs *Either) FoldRight(f func(interface{}, interface{}) interface{}, z interface{}) interface{} {
if xs.IsRight() {
return f(xs.GetRight(), z)
} else {
return z
}
}
func (xs *List) FoldLeft(f func(interface{}, interface{}) interface{}, z interface{}) interface{} {
y := z
for l := xs; l.IsCons(); l = l.Tail() {
y = f(y, l.Head())
}
return y
}
func (xs *List) FoldRight(f func(interface{}, interface{}) interface{}, z interface{}) interface{} {
ys := make([]interface{}, 0, Length(xs))
for l := xs; l.IsCons(); l = l.Tail() {
ys = append(ys, l.Head())
}
return InterfaceSlice(ys).FoldRight(f, z)
}
func (xs InterfaceSlice) FoldLeft(f func(interface{}, interface{}) interface{}, z interface{}) interface{} {
y := z
for _, x := range xs {
y = f(y, x)
}
return y
}
func (xs InterfaceSlice) FoldRight(f func(interface{}, interface{}) interface{}, z interface{}) interface{} {
y := z
for i := len(xs) - 1; i >= 0; i-- {
y = f(xs[i], y)
}
return y
}
func (xs InterfacePairMap) FoldLeft(f func(interface{}, interface{}) interface{}, z interface{}) interface{} {
y := z
for k, v := range xs {
y = f(y, NewPair(k, v))
}
return y
}
func (xs InterfacePairMap) FoldRight(f func(interface{}, interface{}) interface{}, z interface{}) interface{} {
ys := make([]interface{}, 0, len(xs))
for k, v := range xs {
ys = append(ys, NewPair(k, v))
}
return InterfaceSlice(ys).FoldRight(f, z)
} | foldable.go | 0.792705 | 0.474996 | foldable.go | starcoder |
package shape
import (
"fmt"
"math"
"strings"
"github.com/fogleman/gg"
"github.com/golang/freetype/raster"
)
// Cubic represents a single cubic bezier curve
type Cubic struct {
X1, Y1 float64
X2, Y2 float64
X3, Y3 float64
X4, Y4 float64
Width float64
MinLineWidth float64
MaxLineWidth float64
MinArcLength float64
}
func NewCubic() *Cubic {
q := &Cubic{}
q.MaxLineWidth = 1.0 / 2
q.MinLineWidth = 0.2
q.MinArcLength = 5
return q
}
func (q *Cubic) Init(plane *Plane) {
rnd := plane.Rnd
q.X1 = randomW(plane)
q.Y1 = randomH(plane)
q.X2 = q.X1 + rnd.Float64()*40 - 20
q.Y2 = q.Y1 + rnd.Float64()*40 - 20
q.X3 = q.X2 + rnd.Float64()*40 - 20
q.Y3 = q.Y2 + rnd.Float64()*40 - 20
q.X4 = q.X3 + rnd.Float64()*40 - 20
q.Y4 = q.Y3 + rnd.Float64()*40 - 20
q.Width = 1.0 / 2
q.mutateImpl(plane, 1.0, 2, ActionAny)
}
func (q *Cubic) Draw(dc *gg.Context, scale float64) {
dc.MoveTo(q.X1, q.Y1)
dc.CubicTo(q.X2, q.Y2, q.X3, q.Y3, q.X4, q.Y4)
dc.SetLineWidth(q.Width * scale)
dc.Stroke()
}
func (q *Cubic) SVG(attrs string) string {
// TODO: this is a little silly
attrs = strings.Replace(attrs, "fill", "stroke", -1)
return fmt.Sprintf(
"<path %s fill=\"none\" d=\"M %f %f Q %f %f, %f %f, %f %f\" stroke-width=\"%f\" />",
attrs, q.X1, q.Y1, q.X2, q.Y2, q.X3, q.Y3, q.X4, q.Y4, q.Width)
}
func (q *Cubic) Copy() Shape {
a := *q
return &a
}
func (q *Cubic) Mutate(plane *Plane, temp float64) {
q.mutateImpl(plane, temp, 10, ActionAny)
}
func (q *Cubic) mutateImpl(plane *Plane, temp float64, rollback int, actions ActionType) {
if actions == ActionNone {
return
}
const R = math.Pi / 4.0
const m = 16
w := plane.W
h := plane.H
rnd := plane.Rnd
scale := temp * 16
save := *q
for {
switch rnd.Intn(7) {
case 0: // Mutate
if (actions & ActionMutate) == 0 {
continue
}
a := rnd.NormFloat64() * scale
b := rnd.NormFloat64() * scale
q.X1 = clamp(q.X1+a, -m, float64(w-1+m))
q.Y1 = clamp(q.Y1+b, -m, float64(h-1+m))
case 1:
if (actions & ActionMutate) == 0 {
continue
}
a := rnd.NormFloat64() * scale
b := rnd.NormFloat64() * scale
q.X2 = clamp(q.X2+a, -m, float64(w-1+m))
q.Y2 = clamp(q.Y2+b, -m, float64(h-1+m))
case 2:
if (actions & ActionMutate) == 0 {
continue
}
a := rnd.NormFloat64() * scale
b := rnd.NormFloat64() * scale
q.X3 = clamp(q.X3+a, -m, float64(w-1+m))
q.Y3 = clamp(q.Y3+b, -m, float64(h-1+m))
case 3:
if (actions & ActionMutate) == 0 {
continue
}
a := rnd.NormFloat64() * scale
b := rnd.NormFloat64() * scale
q.X4 = clamp(q.X4+a, -m, float64(w-1+m))
q.Y4 = clamp(q.Y4+b, -m, float64(h-1+m))
case 4: // Width
q.Width = clamp(q.Width+rnd.NormFloat64()*temp, q.MinLineWidth, q.MaxLineWidth)
case 5: // Translate
if (actions & ActionTranslate) == 0 {
continue
}
a := rnd.NormFloat64() * scale
b := rnd.NormFloat64() * scale
q.X1 = clamp(q.X1+a, -m, float64(w-1+m))
q.Y1 = clamp(q.Y1+b, -m, float64(h-1+m))
q.X2 = clamp(q.X2+a, -m, float64(w-1+m))
q.Y2 = clamp(q.Y2+b, -m, float64(h-1+m))
q.X3 = clamp(q.X3+a, -m, float64(w-1+m))
q.Y3 = clamp(q.Y3+b, -m, float64(h-1+m))
q.X4 = clamp(q.X4+a, -m, float64(w-1+m))
q.Y4 = clamp(q.Y4+b, -m, float64(h-1+m))
case 6: // Rotate
if (actions & ActionRotate) == 0 {
continue
}
cx := (q.X1 + q.X2 + q.X3 + q.X4) / 4
cy := (q.Y1 + q.Y2 + q.Y3 + q.Y4) / 4
theta := rnd.NormFloat64() * temp * R
cos := math.Cos(theta)
sin := math.Sin(theta)
var a, b float64
a, b = rotateAbout(q.X1, q.Y1, cx, cy, cos, sin)
q.X1 = clamp(a, -m, float64(w-1+m))
q.Y1 = clamp(b, -m, float64(h-1+m))
a, b = rotateAbout(q.X2, q.Y2, cx, cy, cos, sin)
q.X2 = clamp(a, -m, float64(w-1+m))
q.Y2 = clamp(b, -m, float64(h-1+m))
a, b = rotateAbout(q.X3, q.Y3, cx, cy, cos, sin)
q.X3 = clamp(a, -m, float64(w-1+m))
q.Y3 = clamp(b, -m, float64(h-1+m))
a, b = rotateAbout(q.X4, q.Y4, cx, cy, cos, sin)
q.X4 = clamp(a, -m, float64(w-1+m))
q.Y4 = clamp(b, -m, float64(h-1+m))
// TODO: Scale
}
if q.Valid() {
break
}
if rollback > 0 {
*q = save
rollback -= 1
}
}
}
func (q *Cubic) Valid() bool {
dx12 := int(q.X1 - q.X2)
dy12 := int(q.Y1 - q.Y2)
d12 := dx12*dx12 + dy12*dy12
dx23 := int(q.X2 - q.X3)
dy23 := int(q.Y2 - q.Y3)
d23 := dx23*dx23 + dy23*dy23
dx34 := int(q.X3 - q.X4)
dy34 := int(q.Y3 - q.Y4)
d34 := dx34*dx34 + dy34*dy34
return d12 > 1 && d23 > 1 && d34 > 1 && q.arcLength() > q.MinArcLength
}
func (q *Cubic) arcLength() float64 {
d := 0.0
// The actual answer requires that we do numerical integration
// along the length of the spline; or we can approximate it by
// sampling N points and adding the length of each segment.
x := q.X1
y := q.Y1
const k = 1.0 / 48.0
for t := k; t < 1.0; t += k {
mt := 1.0 - t
t2 := t * t
mt2 := mt * mt
a := mt2 * mt
b := mt2 * t * 3
c := mt * t2 * 3
d := t * t2
nx := a*q.X1 + b*q.X2 + c*q.X3 + d*q.X4
ny := a*q.Y1 + b*q.Y2 + c*q.Y3 + d*q.Y4
dx := nx - x
dy := ny - y
d += math.Sqrt(dx*dx + dy*dy)
x = nx
y = ny
}
return d
}
func (q *Cubic) Rasterize(rc *RasterContext) []Scanline {
var path raster.Path
p1 := fixp(q.X1, q.Y1)
p2 := fixp(q.X2, q.Y2)
p3 := fixp(q.X3, q.Y3)
p4 := fixp(q.X4, q.Y4)
path.Start(p1)
path.Add3(p2, p3, p4)
width := fix(q.Width)
return strokePath(rc, path, width, raster.RoundCapper, raster.RoundJoiner)
} | primitive/shape/cubic.go | 0.635222 | 0.482856 | cubic.go | starcoder |
package locstor
import (
"bytes"
"encoding/gob"
"encoding/json"
)
var (
// BinaryEncoding is a ready-to-use implementation of EncoderDecoder which
// encodes data structures in a binary format using the gob package.
BinaryEncoding = &binaryEncoderDecoder{}
// JSONEncoding is a ready-to-use implementation of EncoderDecoder which
// encodes data structures as json.
JSONEncoding = &jsonEncoderDecoder{}
)
// Encoder is an interface implemented by objects which can encode an arbitrary
// go object into a slice of bytes.
type Encoder interface {
Encode(interface{}) ([]byte, error)
}
// Decoder is an interface implemented by objects which can decode a slice
// of bytes into an arbitrary go object.
type Decoder interface {
Decode([]byte, interface{}) error
}
// EncoderDecoder is an interface implemented by objects which can both encode
// an arbitrary go object into a slice of bytes and decode that slice of bytes
// into an arbitrary go object. EncoderDecoders should have the property that
// Encode(Decode(x)) == x for all objects x which are encodable.
type EncoderDecoder interface {
Encoder
Decoder
}
// jsonEncoderDecoder is an implementation of EncoderDecoder which uses json
// encoding.
type jsonEncoderDecoder struct{}
// Encode implements the Encode method of Encoder
func (jsonEncoderDecoder) Encode(v interface{}) ([]byte, error) {
return json.Marshal(v)
}
// Decode implements the Decode method of Decoder
func (jsonEncoderDecoder) Decode(data []byte, v interface{}) error {
return json.Unmarshal(data, v)
}
// binaryEncoderDecoder is an implementation of EncoderDecoder which uses binary
// encoding via the gob package in the standard library.
type binaryEncoderDecoder struct{}
// Encode implements the Encode method of Encoder
func (b binaryEncoderDecoder) Encode(v interface{}) ([]byte, error) {
buf := bytes.NewBuffer([]byte{})
enc := gob.NewEncoder(buf)
if err := enc.Encode(v); err != nil {
return nil, err
}
return buf.Bytes(), nil
}
// Decode implements the Decode method of Decoder
func (b binaryEncoderDecoder) Decode(data []byte, v interface{}) error {
buf := bytes.NewBuffer(data)
dec := gob.NewDecoder(buf)
return dec.Decode(v)
} | encode.go | 0.734786 | 0.410638 | encode.go | starcoder |
package destiny2
import "time"
// Color represents a color with RGBA values represented between 0 and 255.
type Color struct {
Red, Green, Blue, Alpha byte
}
type AnimationReference struct {
AnimName, AnimIdentifier, Path string
}
type HyperlinkReference struct {
Title, Url string
}
type DyeReference struct {
ChannelHash, DyeHash uint32
}
type ArtDyeReference struct {
ArtDyeChannelHash uint32
}
type GearArtArrangementReference struct {
ClassHash, ArtArrangementHash uint32
}
// ItemVendorSourceReference represents that a vendor could sell this item.
type ItemVendorSourceReference struct {
// VendorHash is the hash of a related VendorEntity.
VendorHash uint32
// VendorItemIndexes are the indexes at which this item is sold at this vendor.
VendorItemIndexes []int32
}
type InterpolationPoint struct {
Value, Weight int32
}
type InterpolationPointFloat struct {
Value, Weight float32
}
type VendorRequirementDisplayEntry struct {
Icon, Name, Source, Type string
}
type DateRange struct {
Start, End time.Time
}
// VendorInteractionSackEntry is the sack of items to show in a vendor interaction.
type VendorInteractionSackEntry struct {
// SackType is the sack identifier in InventoryItemEntity.VendorSackType.
// If these match, this sack is shown in the vendor interaction.
SackType uint32
}
// ItemCreationEntryLevel is a wrapper for the item level at which an item should spawn.
type ItemCreationEntryLevel struct {
// Level is the minimum character level the item spawns at.
Level int32
}
// VendorService is the localized name of a vendor-provided service.
type VendorService struct {
// Name is the localized name of a service provided.
Name string
}
// ActivityGraphListEntry is the default map to show for a given activity or destination.
type ActivityGraphListEntry struct {
// ActivityGraphHash is the hash of a related ActivityGraphEntity.
ActivityGraphHash uint32
}
// ActivityGraphNodeFeaturingState is a single visual state that a node can be in.
type ActivityGraphNodeFeaturingState struct {
// HighlightType is a classification of different ways this node feature can be highlighted.
HighlightType ActivityGraphNodeHighlightType
}
// ActivityModifierReference is a reference to an activity modifier from another entity.
type ActivityModifierReference struct {
// ActivityModifierHash is the hash of a related ActivityModifierEntity.
ActivityModifierHash uint32
}
// ActivityUnlockString represents a status string that could be displayed about an activity.
type ActivityUnlockString struct {
// DisplayString is the localized string to display if certain conditions are met.
DisplayString string
}
type ActivityLoadoutRequirementSet struct {
// Requirements are the set of requirements that will be applied on the activity if this set is active.
Requirements []ActivityLoadout
}
// ActivityInsertionPoint is a point of entry into an activity.
type ActivityInsertionPoint struct {
// PhaseHash is the unique hash representing the phase.
PhaseHash uint32
}
// ActivityGraphNodeStateEntry represents a single state that a graph node might end up in.
type ActivityGraphNodeStateEntry struct {
State GraphNodeState
}
// ActivityGraphArtElement represents one-off visual effects overlayed on the map.
type ActivityGraphArtElement struct {
// Position is the position on the map of this art element.
Position Position
}
// UnlockExpression is the foundation of the game's gating mechanics and othe related restrictions.
type UnlockExpression struct {
Scope GatingScope
}
type LinkedGraphEntry struct {
// ActivityGraphHash is the hash of a related ActivityGraphEntity.
ActivityGraphHash uint32
}
// DestinationBubbleSetting is human readable data about a bubble. Deprecated.
type DestinationBubbleSetting struct {
DisplayProperties DisplayProperties
}
// VendorGroupReference refers to a grouping of vendors.
type VendorGroupReference struct {
// VendorGroupHash is the hash of a related VendorGroupEntity.
VendorGroupHash uint32
}
// ArtifactTierItem is a plug item unlocked by activating this item in the artifact.
type ArtifactTierItem struct {
// ItemHash is the hash for a related InventoryItemEntity.
ItemHash uint32
}
// ItemVersion refers to the power cap for this item version.
type ItemVersion struct {
// PowerCapHash is the hash for a related PowerCapEntity.
PowerCapHash uint32
}
type TalentNodeStep struct {
WeaponPerformance, ImpactEffects, GuardianAttributes, LightAbilities, DamageTypes int32
}
// ItemMetricBlock represents metrics available for display and selection on an item.
type ItemMetricBlock struct {
// AvailableMetricCategoryNodeHashes are the hashes of all related PresentationNodeEntity structs.
AvailableMetricCategoryNodeHashes []uint32
}
type PresentationNodeChildEntry struct {
// PresentationNodeHash is the hash of a related PresentationNodeEntity.
PresentationNodeHash uint32
}
type PresentationNodeCollectibleChildEntry struct {
// CollectibleHash is the hash of a related CollectibleEntity.
CollectibleHash uint32
}
type CollectibleAcquisitionBlock struct {
// AcquireMaterialRequirementHash is the hash of a related MaterialRequirementSetEntity.
AcquireMaterialRequirementHash uint32
// AcquireTimestampUnlockValueHash is the hash of a related UnlockValueEntity.
AcquireTimestampUnlockValueHash uint32
}
// PresentationNodeRequirementsBlock defines the requirements for showing a presentation node.
type PresentationNodeRequirementsBlock struct {
// EntitlementUnavailableMessage is the localized string to show if this node is inaccessible due to entitlements.
EntitlementUnavailableMessage string
}
// PresentationNodeRecordChildEntry is an entry for a record-based presentation node.
type PresentationNodeRecordChildEntry struct {
// RecordHash is the hash of a related RecordEntity.
RecordHash uint32
}
// RecordIntervalRewards are items rewarded for completing a record interval.
type RecordIntervalRewards struct {
// IntervalRewardItems are a list of items and their quantities rewarded for completing this record interval.
IntervalRewardItems []ItemQuantity
}
// PresentationNodeMetricChildEntry is an entry for a metric-related presentation node.
type PresentationNodeMetricChildEntry struct {
// MetricHash is the hash of a related MetricEntity.
MetricHash uint32
}
// PlugRule describes a rule around whether the plug is enabled or insertable.
type PlugRule struct {
// FailureMessage is the localized string to show if this rule fails.
FailureMessage string
}
// ParentItemOverride describes how a plug item overrides certain properties of the item it is socketed into.
type ParentItemOverride struct {
// AdditionalEquipRequirementsDisplayStrings are localized strings
// that describe additional requirements for equipping the parent item.
AdditionalEquipRequirementsDisplayStrings []string
// PIPIcon is the icon to show when viewing an item that has this plug socketed.
PipIcon string
}
// EnergyCapacityEntry is an entry for a plug item which grants energy capacity to the item is it socketed into.
type EnergyCapacityEntry struct {
// CapacityValue is the amount of energy capacity this plug provides.
CapacityValue int32
// EnergyTypeHash is the hash of a related EnergyTypeEntity.
EnergyTypeHash uint32
// EnergyType is a classification of the type of energy capacity granted.
EnergyType EnergyType
}
// EnergyCostEntry is an entry for a plug item that costs energy to insert into an item.
type EnergyCostEntry struct {
// EnergyCost is the cost of inserting this plug.
EnergyCost int32
// EnergyTypeHash is the hash of a related EnergyTypeEntity.
EnergyTypeHash uint32
// EnergyType is a classification of the type of energy that the plug costs.
EnergyType EnergyType
}
// NodeActivationRequirement describes the requirements for activation a talent node.
type NodeActivationRequirement struct {
// GridLevel is progression level on the talent grid required to activate this node.
GridLevel int32
// MaterialRequirementHashes are the hashes of all related MaterialRequirementSetEntity structs.
MaterialRequirementHashes []uint32
}
// NodeSocketReplaceResponse describes how a socket on an item is replaced with a new plug on a node step's activation.
type NodeSocketReplaceResponse struct {
// SocketTypeHash is the hash of a related SocketTypeEntity.
SocketTypeHash uint32
// PlugItemHash is the hash of a related InventoryItemEntity.
PlugItemHash uint32
}
// TalentExclusiveGroup describes a node that exists as part of an exclusive group.
// An exclusive group is a group of nodes that can and cannot be activated together.
type TalentExclusiveGroup struct {
// GroupHash is the unique identifer for this exclusive group within the talent grid.
GroupHash uint32
// LoreHash is the hash of a related LoreEntity.
LoreHash uint32
// NodeHashes are the hashes of talent nodes that are part of this group.
NodeHashes []uint32
// OpposingGroupHashes are the unique identifiers for all exclusive groups that will be deactivated if any node in this group is activated.
OpposingGroupHashes []uint32
// OpposingNodeHashes are the hashs of talent nodes that are deactivated if any node in this group is activated.
OpposingNodeHashes []uint32
}
// TalentNodeCategory describes a group of talent nodes by functionality.
type TalentNodeCategory struct {
// Identifier is an identifier for this category.
Identifier string
// IsLoreDriven determines if this category has a related LoreEntity.
IsLoreDriven bool
DisplayProperties DisplayProperties
// NodeHashes are the hashes of all talent nodes in this talent grid that are in this category.
NodeHashes []uint32
}
// PresentationNodesComponent describes the node components that compose each presentation node.
type PresentationNodesComponent struct {
// Nodes are presentation components by the hash of a related PresentationNodeEntity.
Nodes map[uint32]PresentationNodeComponent
}
// PresentationNodeComponent describes the components of a presentation node.
type PresentationNodeComponent struct {
// State is the state of this component.
State PresentationNodeState
// Objective is the progress for an objective in the presentation node.
Objective ObjectiveProgress
// ProgressValue is how much of the presentation node is completed so far.
ProgressValue int32
// CompletionValue is the value at which the presentation node is considered complete.
CompletionValue int32
// RecordCategoryScore is the current score of the record category the presentation node represents.
RecordCategoryScore int32
} | misc.go | 0.581897 | 0.489931 | misc.go | starcoder |
package parse
import (
"go/ast"
"strings"
"github.com/ardnew/gosh/cmd/goshfun/util"
)
// Return represents an individual return variable in the list of return
// variables of an individual function definition.
type Return struct {
Name string
Ref []Reference
Type string
}
// NewReturn creates a new Return by inspecting the parsed AST field.
func NewReturn(field *ast.Field) *Return {
return (&Return{
Name: "",
Ref: []Reference{},
Type: "",
}).Parse(field.Type)
}
// Parse constructs an Return by traversing the AST construction.
func (ret *Return) Parse(expr ast.Expr) *Return {
switch t := expr.(type) {
case *ast.Ident:
ret.Type = t.Name
return ret // base case; we stop recursion once we reach the type name.
case *ast.ArrayType:
ret.Ref = append(ret.Ref, RefArray)
return ret.Parse(t.Elt)
case *ast.StarExpr:
ret.Ref = append(ret.Ref, RefPointer)
return ret.Parse(t.X)
}
// shouldn't reach here unless the Expr doesn't have an identifying type,
// (which I believe is always a syntax error in Go), or we encountered an
// unrecognized expression and is not currently supported. in either case,
// this is interpreted as an error, and we cannot use this function.
return nil
}
func (ret *Return) String() string {
return ret.ProtoSh()
}
// IsListRef returns whether or not the reference at index ri is one of the list
// types (array or ellipses).
func (ret *Return) IsListRef(ri int) bool {
return nil != ret && ri < len(ret.Ref) &&
(RefArray == ret.Ref[ri] || RefEllipses == ret.Ref[ri])
}
// ProtoGo returns the signature used for this Return value for the Go
// interface.
func (ret *Return) ProtoGo() string {
var sb strings.Builder
if ret.Name != "" {
sb.WriteString(ret.Name)
sb.WriteRune(' ')
}
for _, ref := range ret.Ref {
sb.WriteString(ref.Symbol())
}
sb.WriteString(ret.Type)
return sb.String()
}
// ProtoSh returns the signature used for this Return value for the shell
// interface.
func (ret *Return) ProtoSh() string {
var sb strings.Builder
for _, ref := range ret.Ref {
switch ref {
case RefArray, RefEllipses:
sb.WriteString(RefEllipses.Symbol())
break
}
}
if ret.Name != "" {
sb.WriteString(ret.Name)
} else {
sb.WriteString(ret.Type)
}
return sb.String()
}
// Prototype returns the signature used for this Return value for either the
// shell interface or the Go interface.
func (ret *Return) Prototype(sh bool) string {
if sh {
return ret.ProtoSh()
}
return ret.ProtoGo()
}
// Declaration returns a representation of the type of this return variable that
// can be attached to a local variable identifier.
func (ret *Return) Declaration() string {
var a string
var hasList bool
for _, ref := range ret.Ref {
switch ref {
case RefArray, RefEllipses:
if hasList {
// currently do not support list indirection
break
}
a = a + RefArray.Symbol()
hasList = true
case RefPointer:
a = a + ref.Symbol()
}
}
return a + ret.Type
}
// FormatOptions stores the various formatting options used to format a Return
// value as a string.
type FormatOptions struct {
IntegerBase int
FloatFormat byte
FloatPrecision int
ComplexFormat byte
ComplexPrecision int
}
// DefaultFormat contains the default formatting options for a Return value
// formatted as a string.
var DefaultFormat = FormatOptions{
IntegerBase: 10,
FloatFormat: 'g',
FloatPrecision: -1, // special value, "min digits required for exact rep"
ComplexFormat: 'g',
ComplexPrecision: -1, // special value, same as FloatPrecision
}
// Formatter returns a slice of Go source code lines defining an anonymous
// function that will convert a variable whose type is identified by the
// receiver ret's Type into a string.
func (ret *Return) Formatter(rName, qName string, retPos, numRets int, format FormatOptions) []string {
ln, fn := util.Newliner{}, util.Newliner{}
fName := "format" + strings.Title(rName)
iWidth := util.NumDigits(numRets)
ln.Addf("// -------------------------------------")
ln.Addf("// %*d | %s -> %s", iWidth, retPos, rName, ret.Declaration())
ln.Addf("// -------------------------------------")
ln.Addf("%s := func(input %s) string {", fName, ret.Type)
switch ret.Type {
case "rune":
fn.Add("return string(input)")
case "string":
fn.Add("return input")
case "error":
fn.Add("if nil != input {")
fn.Add(" return input.Error()")
fn.Add("}")
fn.Add("return \"\"")
case "bool":
fn.Add("return strconv.FormatBool(input)")
case "byte":
fn.Addf("return strconv.FormatUint(uint64(input), %d)", format.IntegerBase)
case "int":
fn.Addf("return strconv.FormatInt(int64(input), %d)", format.IntegerBase)
case "int8":
fn.Addf("return strconv.FormatInt(int64(input), %d)", format.IntegerBase)
case "int16":
fn.Addf("return strconv.FormatInt(int64(input), %d)", format.IntegerBase)
case "int32":
fn.Addf("return strconv.FormatInt(int64(input), %d)", format.IntegerBase)
case "int64":
fn.Addf("return strconv.FormatInt(int64(input), %d)", format.IntegerBase)
case "uint":
fn.Addf("return strconv.FormatUint(uint64(input), %d)", format.IntegerBase)
case "uint8":
fn.Addf("return strconv.FormatUint(uint64(input), %d)", format.IntegerBase)
case "uint16":
fn.Addf("return strconv.FormatUint(uint64(input), %d)", format.IntegerBase)
case "uint32":
fn.Addf("return strconv.FormatUint(uint64(input), %d)", format.IntegerBase)
case "uint64":
fn.Addf("return strconv.FormatUint(uint64(input), %d)", format.IntegerBase)
case "uintptr":
fn.Addf("return strconv.FormatUint(uint64(input), %d)", format.IntegerBase)
case "float32":
fn.Addf("return strconv.FormatFloat(float64(input), %d, %d, 32)", format.FloatFormat, format.FloatPrecision)
case "float64":
fn.Addf("return strconv.FormatFloat(float64(input), %d, %d, 64)", format.FloatFormat, format.FloatPrecision)
case "complex64":
fn.Addf("return strconv.FormatComplex(complex128(input), %d, %d, 64)", format.ComplexFormat, format.ComplexPrecision)
case "complex128":
fn.Addf("return strconv.FormatComplex(complex128(input), %d, %d, 128)", format.ComplexFormat, format.ComplexPrecision)
default:
fn.Add("return nil")
}
for _, s := range fn {
ln.Addf("\t%s", s)
}
ln.Add("}")
if ret.IsListRef(0) {
ln.Addf("for _, r := range %s {", rName)
ln.Addf("\t%s = append(%s, %s(r))", qName, qName, fName)
ln.Add("}")
} else {
ln.Addf("%s = append(%s, %s(%s))", qName, qName, fName, rName)
}
return ln
} | cmd/goshfun/parse/return.go | 0.737442 | 0.423577 | return.go | starcoder |
package main
import (
"errors"
"regexp"
"strings"
)
var (
QUOTES = []byte{'"', '\''}
ALL_QUOTES = append(QUOTES, '`')
ESCAPE byte = '\\'
)
func getFirstLine(str string) (nextLine string) {
breakLinePos := regexp.MustCompile(`\n`).FindStringIndex(str)
if breakLinePos == nil {
return str
}
return str[:breakLinePos[1]]
}
func getLastLine(str string) (lastLine string) {
breakLinePos := regexp.MustCompile(`[^\n]*$`).FindStringIndex(str)
if breakLinePos == nil {
return str
}
return str[breakLinePos[0]:]
}
/**
* Returns str identation pattern
* @return pattern string string formed only by \s's & \t's
*/
func getIdentPatt(str string) (pattern string) {
pattPos := regexp.MustCompile(`^[(\s|\t)]?`).FindStringIndex(str)
pattern = str[:pattPos[1]]
return
}
/**
* Given a code and a opening char (e.g.: `{`, `<`, `"`, `'` et al),
* the algorithm will return the position of the block content
* delimited by this last mentioned param and its counterpart (e.g.: `}`, `>`, `"`, `'` et al)
*
* @param code *string
* @param opening_char byte e.g.: `{`, `<`, `"`, `'` et al
* @param ignore []byte This is a set of potentially harmful character.
* i.e.: other opening chars.
* Example: If we are interested in finding the counterpart of
* `{` in the following code:
*
* ```
* func foo() int {
* str := "}}}}}}}}}"
* return 0
* }
* ```
*
* Then, we should "ignore" the `}`'s in `str := "}}}}}}}}}"`
*
* @return loc [2]int The position of opening and closing chars
* @return err error
*/
func getChunk(code *string, opening_char byte, ignore []byte) (loc [2]int, err error) {
var (
opening_loc int = strings.Index(*code, string(opening_char))
closing_char byte = getClosingChar(opening_char)
parity int = 0
)
for i := opening_loc; i < len(*code); i++ {
var char byte = (*code)[i]
updateParity(&parity, code, i, opening_char, closing_char)
if parity == 0 {
closing_loc := i
return [2]int{opening_loc, closing_loc}, nil
} else if InSlice(char, ignore) {
rest := (*code)[i:]
ignore_loc, _ := getChunk(&rest, char, nil)
i += ignore_loc[1]
}
}
return loc, errors.New("chunk's closing tag 404 not found")
}
func getClosingChar(c byte) byte {
var res byte
switch c {
case '{':
res = '}'
case '"':
case '\'':
case '`':
res = c
}
return res
}
func updateParity(parity *int, code *string, i int, opening_char, closing_char byte) {
var (
char = (*code)[i]
is_a_boundary_char bool = (char == opening_char) || (char == closing_char)
is_an_opening_char bool = (InSlice(opening_char, QUOTES) && ((*parity)%2 == 0)) || (char == closing_char && !InSlice(opening_char, QUOTES))
is_false_positive bool = InSlice(opening_char, QUOTES) && ((*parity) > 0) && (char == opening_char) && ((*code)[i-1] == ESCAPE)
)
if is_a_boundary_char && !is_false_positive {
if is_an_opening_char {
(*parity)++
} else {
(*parity)--
}
}
} | syntax_utils.go | 0.725454 | 0.541227 | syntax_utils.go | starcoder |
package ntw
type (
// interface is used because of the nature of "go" language
// because inheritance is managed through composition
iToken interface {
kind() int
value() string
isEmpty() bool
parse()
setAnd()
setOrdinal()
}
token struct {
input string
pos int
ordinal bool
val string
}
tenToken struct {
token
}
hundredToken struct {
token
}
thousandToken struct {
token
}
)
func newTensToken(input string) *tenToken {
t := &tenToken{
token{
input: input,
},
}
t.parse()
return t
}
func (t *tenToken) kind() int {
return tens
}
// parse will parse last 2 digits of each 3 digit segment of a number
// last two digits are parsed in a group because this is most natural
// way to English language
func (t *tenToken) parse() {
nr := t.input
if len(nr) == 3 {
nr = nr[1:]
}
if nr == "00" {
return
}
if nr[0] == '0' {
nr = nr[1:]
}
if len(nr) == 1 {
m := carord(tokens.tenRight, tokens.tenRightOrdinal, t.ordinal)
t.val = m[nr]
return
}
m := carord(tokens.tenDouble, tokens.tenDoubleOrdinal, t.ordinal)
if v, ok := m[nr]; ok {
t.val = v
return
}
if nr[1] == '0' {
m = carord(tokens.tenLeft, tokens.tenLeftOrdinal, t.ordinal)
t.val = m[string(nr[0])]
return
}
m = carord(tokens.tenRight, tokens.tenRightOrdinal, t.ordinal)
t.val = tokens.tenLeft[string(nr[0])] + "-" + m[string(nr[1])]
}
func newHundredsToken(input string) *hundredToken {
t := &hundredToken{
token{
input: input,
},
}
t.parse()
return t
}
func (t *hundredToken) kind() int {
return hundreds
}
// parse will parse hundred expression
// one hundred, two hundred...
func (t *hundredToken) parse() {
if len(t.input) < 3 || t.input[0] == '0' {
return
}
nr := string(t.input[0])
if t.ordinal {
t.val = tokens.tenRight[nr] + " " + tokens.other["100th"]
} else {
t.val = tokens.tenRight[nr] + " " + tokens.other["100"]
}
}
func newThousandsToken(input string, pos int) *thousandToken {
t := &thousandToken{
token{
input: input,
pos: pos,
},
}
t.parse()
return t
}
func (t *thousandToken) kind() int {
return thousands
}
// parse will parse thousands expression:
// billion, million, thousand...
func (t *thousandToken) parse() {
if t.input == "000" {
t.val = ""
} else if t.ordinal {
t.val = tokens.thousandOrdinal[t.pos]
} else {
t.val = tokens.thousand[t.pos]
}
}
func (t *token) value() string {
return t.val
}
func (t *token) isEmpty() bool {
return t.val == ""
}
func (t *token) setAnd() {
t.val = tokens.other["and"] + " " + t.val
}
func (t *token) setOrdinal() {
t.ordinal = true
} | ntw/token.go | 0.63443 | 0.407805 | token.go | starcoder |
package bdf2array
import (
"bytes"
"fmt"
"image"
bdf "github.com/zachomedia/go-bdf"
)
const maxNumBytesPerColumn = 4
const glyphHeaderSize = 5
// Glyph contains data for a single glyph
type Glyph struct {
Font *bdf.Font
Codepoint int
Character *bdf.Character
Alpha *image.Alpha
EncodedSize int
JumpOffset int
Advance int
BytesPerColumn int
BoundingBox image.Rectangle
AlphaOffset image.Point // offset between the font bounding box coordinates and the coordinate system of the Alpha image
TopLeftOffset image.Point
EncodedData []byte
}
func (g *Glyph) scanImageRegion() {
g.BoundingBox = image.Rectangle{}
bounds := g.Alpha.Bounds()
for y := bounds.Min.Y; y < bounds.Max.Y; y++ {
for x := bounds.Min.X; x < bounds.Max.X; x++ {
if g.Alpha.AlphaAt(x, y).A != 0 {
// active pixel, make sure it falls inside the bounding box:
g.BoundingBox = g.BoundingBox.Union(image.Rectangle{
Min: image.Pt(x, y),
Max: image.Pt(x+1, y+1),
})
}
}
}
// determine the offset between the coordinate system of the original Alpha image and the font bounding box:
baseline := image.Pt(0, g.Font.Ascent)
bottomLeft := baseline.Add(image.Pt(g.Character.LowerPoint[0], -g.Character.LowerPoint[1]))
g.AlphaOffset = image.Pt(
bottomLeft.X-g.Alpha.Bounds().Min.X,
bottomLeft.Y-g.Alpha.Bounds().Max.Y,
)
if g.AlphaOffset.X < 0 || g.AlphaOffset.Y < 0 {
fmt.Printf("Warning: Negative AlphaOffset found: %s, this should not happen\n", g.AlphaOffset)
}
}
// EncodeGlyph creates the encoded data for the specified glyph.
func (g *Glyph) encodeGlyph() (err error) {
if g.BoundingBox.Empty() {
// empty bitmap (e.g. a space)
g.EncodedData = make([]byte, 0) // empty data
g.JumpOffset = glyphHeaderSize
return // done
}
// calculate the offset of the top-left corner of the resulting bitmap data:
g.TopLeftOffset = g.AlphaOffset.Add(g.BoundingBox.Min)
// check the Advance value:
if g.Advance < (g.TopLeftOffset.X + g.BoundingBox.Dx()) {
fmt.Printf("minimum 'advance' value of character %d increased by %d pixels, to prevent horizontal overlap of glyphs\n", g.Codepoint, g.TopLeftOffset.X+g.BoundingBox.Dx()-g.Advance)
g.Advance = g.TopLeftOffset.X + g.BoundingBox.Dx()
}
dy := g.BoundingBox.Dy()
g.BytesPerColumn = ((dy - 1) / 8) + 1
if g.BytesPerColumn > maxNumBytesPerColumn {
return fmt.Errorf("character %d too tall (%d exceeds the maximum of 32)", g.Codepoint, dy)
}
bitmap := &bytes.Buffer{}
for x := g.BoundingBox.Min.X; x < g.BoundingBox.Max.X; x++ {
y := g.BoundingBox.Min.Y
for row := 0; row < g.BytesPerColumn; row++ {
var val uint8
for bit := 0; bit < 8; bit++ {
if g.Alpha.AlphaAt(x, y).A != 0 { // reading outside the image's bounding box always returns 0, so no special measures are needed here
val |= 1 << bit
}
y++
}
err = bitmap.WriteByte(val)
if err != nil {
return
}
}
}
g.EncodedData = bitmap.Bytes()
g.JumpOffset = glyphHeaderSize + len(g.EncodedData)
return nil
}
func (g *Glyph) getEncoding() (result []byte, err error) {
result = make([]byte, g.JumpOffset)
if g.Codepoint > 255 || g.JumpOffset > 255 || g.TopLeftOffset.Y > 31 || g.TopLeftOffset.Y > 255 || g.Advance > 255 {
err = fmt.Errorf("range exceeded on codepoint %d", g.Codepoint)
return
}
// fill in the header:
result[0] = byte(g.Codepoint)
result[1] = byte(g.JumpOffset)
result[2] = byte(g.TopLeftOffset.Y)<<2 | (byte(g.BytesPerColumn-1) & 0x03) // nolint:gomnd
result[3] = byte(g.TopLeftOffset.X)
result[4] = byte(g.Advance)
// and copy the data:
copy(result[5:], g.EncodedData)
return
} | glyph.go | 0.690768 | 0.444444 | glyph.go | starcoder |
package draw
import (
"context"
"image"
"image/color"
"image/draw"
"math"
"gonum.org/v1/gonum/mat"
)
func Rotate(ctx context.Context, img draw.Image, deg int) draw.Image {
if deg%360 == 0 {
return img
}
rotM := mat.NewDense(2, 2, []float64{
math.Cos(degToRad(-deg)),
-math.Sin(degToRad(-deg)),
math.Sin(degToRad(-deg)),
math.Cos(degToRad(-deg)),
})
// size needs to be adjusted for 45 < deg < 135 and 225 < deg < 315
var wide = 1.0
if (deg >= 55 && deg < 125) || (deg >= 235 && deg < 305) {
wide = 1.3
}
canvas := image.NewRGBA(
image.Rectangle{
image.Point{int(math.Round(-float64(img.Bounds().Dx()) * wide / 2)), -img.Bounds().Dy() / 2},
image.Point{int(math.Round(float64(img.Bounds().Dx()) * wide / 2)), img.Bounds().Dy() / 2},
},
)
// canvas := image.NewRGBA(img.Bounds())
// draw.Draw(canvas, canvas.Bounds(), m, b.Min.Add(p), draw.Src)
var bgColor = color.RGBA{192, 192, 192, 255}
if v, ok := ctx.Value(BackgroundColor).(color.RGBA); ok {
bgColor = v
}
draw.Draw(canvas, canvas.Bounds(), &image.Uniform{bgColor}, image.ZP, draw.Src)
for x := 0; x < img.Bounds().Dx(); x++ {
for y := 0; y < img.Bounds().Dy(); y++ {
var pt mat.Dense
// when rotating, we offset both x and y by -img.height/2 and -img.width/2,
// and avoid unwanted translation
pt.Mul(
rotM,
mat.NewDense(
2,
1,
[]float64{float64(x - img.Bounds().Dx()/2), float64(y - img.Bounds().Dy()/2)}),
)
canvas.Set(int(pt.At(0, 0)), int(pt.At(1, 0)), img.At(x, y))
}
}
return canvas
}
func Scale(ctx context.Context, img draw.Image, c float64) draw.Image {
if math.Abs(c-1.0) < 0.001 {
return img
}
if c < 0.0 {
c = 0.0
}
var fillFactor int = 1
if c > 1.0 {
fillFactor = int(math.Round(c / 0.5))
}
scaled := image.NewRGBA(
image.Rect(
0,
0,
int(math.Round(float64(img.Bounds().Max.X)*c)),
int(math.Round(float64(img.Bounds().Max.Y)*c)),
),
)
var bgColor = color.RGBA{192, 192, 192, 255}
if v, ok := ctx.Value(BackgroundColor).(color.RGBA); ok {
bgColor = v
}
draw.Draw(scaled, scaled.Bounds(), &image.Uniform{bgColor}, image.ZP, draw.Src)
sc := mat.NewDiagDense(3, []float64{c, c, 1.0})
for x := 0; x < img.Bounds().Dx(); x++ {
for y := 0; y < img.Bounds().Dy(); y++ {
var pt mat.Dense
pt.Mul(sc, mat.NewDense(3, 1, []float64{float64(x), float64(y), 1.0}))
for i := -fillFactor; i < fillFactor; i++ {
scaled.Set(
int(math.Round(pt.At(0, 0)))+i,
int(math.Round(pt.At(1, 0)))+i,
img.At(x, y),
)
}
}
}
return scaled
}
func degToRad(deg int) float64 {
return float64(deg%360) * math.Pi / 180.0
} | draw/transform.go | 0.631708 | 0.506591 | transform.go | starcoder |
package main
import (
"fmt"
"math"
"os"
"sort"
"strconv"
"strings"
)
type Spread struct {
low int
high int
}
func (s Spread) Contains(v int) bool {
return s.low <= v && v <= s.high
}
func (s Spread) Surrounds(ns Spread) bool {
return ns.low <= s.low && ns.high >= s.high
}
type Action string
const (
ON Action = "on"
OFF Action = "off"
)
type Command struct {
action Action
x, y, z Spread
}
type RangeMap struct {
ranges []Spread
values []bool
}
func NewRangeMap() *RangeMap {
return &RangeMap{}
}
func (r *RangeMap) Add(newSpread Spread, v bool) {
fmt.Printf("Adding %+v to %+v\n", newSpread, r.ranges)
defer func() {
fmt.Printf("New ranges: %+v\n", r.ranges)
}()
if len(r.ranges) == 0 {
r.ranges = append(r.ranges, newSpread)
r.values = append(r.values, v)
return
}
idxLow := sort.Search(len(r.ranges), func(i int) bool {
contains := r.ranges[i].Contains(newSpread.low)
return contains || r.ranges[i].low >= newSpread.low
})
idxHigh := sort.Search(len(r.ranges), func(i int) bool {
contains := r.ranges[i].Contains(newSpread.high)
return contains || r.ranges[i].high >= newSpread.high
})
fmt.Printf("idxLow, High: %d, %d\n", idxLow, idxHigh)
if idxLow >= len(r.ranges) {
r.ranges = append(r.ranges, newSpread)
r.values = append(r.values, v)
return
}
if idxHigh == 0 {
if !r.ranges[0].Contains(newSpread.high) {
r.ranges = append([]Spread{newSpread}, r.ranges...)
r.values = append([]bool{v}, r.values...)
return
}
}
if idxLow == idxHigh {
spreads := append([]Spread{}, r.ranges[:idxLow]...)
values := append([]bool{}, r.values[:idxLow]...)
spreads = append(spreads, Spread{r.ranges[idxLow].low, newSpread.low - 1})
values = append(values, r.values[idxLow])
spreads = append(spreads, newSpread)
values = append(values, v)
spreads = append(spreads, Spread{newSpread.high + 1, r.ranges[idxLow].high})
values = append(values, r.values[idxLow])
spreads = append(spreads, r.ranges[idxLow+1:]...)
values = append(values, r.values[idxLow+1:]...)
r.ranges = spreads
r.values = values
return
}
// idxLow != idxHigh
spreads := append([]Spread{}, r.ranges[:idxLow]...)
values := append([]bool{}, r.values[:idxLow]...)
spreads = append(spreads, Spread{r.ranges[idxLow].low, newSpread.low - 1})
values = append(values, r.values[idxLow])
spreads = append(spreads, newSpread)
values = append(values, v)
if idxHigh < len(r.ranges) {
spreads = append(spreads, Spread{newSpread.high + 1, r.ranges[idxHigh].high})
values = append(values, r.values[idxHigh])
spreads = append(spreads, r.ranges[idxHigh+1:]...)
values = append(values, r.values[idxHigh+1:]...)
}
r.ranges = spreads
r.values = values
return
}
func main() {
world := NewWorld()
lines := strings.Split(tiny, "\n")
commands := []Command{}
loop:
for _, l := range lines {
c := Command{}
arr := strings.Split(l, " ")
switch arr[0] {
case "on":
c.action = ON
case "off":
c.action = OFF
default:
logFatal("Unknown action %q\n", arr[0])
}
arr = strings.Split(arr[1], ",")
for _, data := range arr {
equals := strings.Split(data, "=")
numbers := strings.Split(equals[1], "..")
low, high := strToInt(numbers[0]), strToInt(numbers[1])
if high < -50 || low > 50 {
continue loop
}
s := Spread{low, high}
switch equals[0] {
case "x":
c.x = s
case "y":
c.y = s
case "z":
c.z = s
default:
logFatal("Unknown param %q\n", equals[0])
}
}
commands = append(commands, c)
}
// fmt.Printf("Commands: %+v\n", commands)
for _, c := range commands {
fmt.Printf("Processing command %+v\n", c)
for x := c.x.low; x <= c.x.high; x++ {
for y := c.y.low; y <= c.y.high; y++ {
world.Set(Coordinate{x, y, 0}, Spread{c.z.low, c.z.high}, c.action == ON)
}
}
fmt.Printf("Number on: %d\n", world.Len())
}
fmt.Printf("Number on: %d\n", world.Len())
}
type Coordinate struct {
x, y, z int
}
type World struct {
data map[int]map[int]*RangeMap
}
func NewWorld() World {
return World{
data: make(map[int]map[int]*RangeMap),
}
}
func (w *World) Set(c Coordinate, s Spread, v bool) {
if _, ok := w.data[c.x]; !ok {
w.data[c.x] = make(map[int]*RangeMap)
}
if _, ok := w.data[c.x][c.y]; !ok {
w.data[c.x][c.y] = NewRangeMap()
}
w.data[c.x][c.y].Add(s, v)
}
func (w *World) Len() uint64 {
var count uint64
for _, row := range w.data {
for _, rm := range row {
for s, v := range rm.ranges {
if rm.values[s] == false {
continue
}
dist := v.high - v.low + 1
count += uint64(dist)
}
}
}
return count
}
func logFatal(s string, args ...interface{}) {
fmt.Printf(s, args...)
os.Exit(1)
}
func minInt(a, b int) int {
return int(math.Min(float64(a), float64(b)))
}
func maxInt(a, b int) int {
return int(math.Max(float64(a), float64(b)))
}
func absInt(a int) int {
return int(math.Abs(float64(a)))
}
func strToInt(str string) int {
n, err := strconv.ParseInt(str, 10, 64)
if err != nil {
logFatal("could not parse %q: %v\n", str, err)
}
return int(n)
}
var tiny = `on x=10..12,y=10..12,z=10..12
on x=11..13,y=11..13,z=11..13
off x=9..11,y=9..11,z=9..11
on x=10..10,y=10..10,z=10..10`
var sample = `on x=-20..26,y=-36..17,z=-47..7
on x=-20..33,y=-21..23,z=-26..28
on x=-22..28,y=-29..23,z=-38..16
on x=-46..7,y=-6..46,z=-50..-1
on x=-49..1,y=-3..46,z=-24..28
on x=2..47,y=-22..22,z=-23..27
on x=-27..23,y=-28..26,z=-21..29
on x=-39..5,y=-6..47,z=-3..44
on x=-30..21,y=-8..43,z=-13..34
on x=-22..26,y=-27..20,z=-29..19
off x=-48..-32,y=26..41,z=-47..-37
on x=-12..35,y=6..50,z=-50..-2
off x=-48..-32,y=-32..-16,z=-15..-5
on x=-18..26,y=-33..15,z=-7..46
off x=-40..-22,y=-38..-28,z=23..41
on x=-16..35,y=-41..10,z=-47..6
off x=-32..-23,y=11..30,z=-14..3
on x=-49..-5,y=-3..45,z=-29..18
off x=18..30,y=-20..-8,z=-3..13
on x=-41..9,y=-7..43,z=-33..15
on x=-54112..-39298,y=-85059..-49293,z=-27449..7877
on x=967..23432,y=45373..81175,z=27513..53682`
var input = `on x=-38..7,y=-5..47,z=-4..41
on x=-16..35,y=-21..25,z=-48..5
on x=-43..4,y=-32..21,z=-18..27
on x=-16..38,y=-37..9,z=-10..40
on x=-3..43,y=-40..13,z=-48..-4
on x=-6..43,y=-4..41,z=-6..47
on x=-29..15,y=-9..43,z=-39..5
on x=-37..9,y=-16..37,z=-1..45
on x=-28..21,y=-7..46,z=-10..36
on x=-26..27,y=-6..40,z=-18..34
off x=13..30,y=32..41,z=-10..1
on x=-43..6,y=-7..46,z=-15..31
off x=19..30,y=-43..-27,z=-36..-26
on x=-15..34,y=-41..10,z=-45..0
off x=15..31,y=27..36,z=20..33
on x=-8..42,y=-44..6,z=-22..25
off x=-37..-20,y=22..40,z=35..44
on x=2..46,y=-43..3,z=-17..36
off x=23..34,y=5..16,z=-5..6
on x=-9..36,y=-47..7,z=-47..5
on x=-160..27861,y=57453..76567,z=10007..35491
on x=-34240..-12876,y=-87116..-51990,z=33628..53802
on x=-23262..4944,y=62820..88113,z=-7149..-1858
on x=-48127..-16845,y=59088..86535,z=-5102..744
on x=-54674..-32293,y=21567..47184,z=55761..72099
on x=-69807..-50439,y=-53937..-38431,z=14338..28485
on x=13123..34571,y=-74162..-53699,z=5810..32903
on x=-36751..-16860,y=49545..75658,z=-34660..-21694
on x=-68435..-62588,y=-40370..-6437,z=-39677..-23920
on x=-82974..-59309,y=18720..41775,z=-39850..-17189
on x=-72347..-41708,y=9850..18229,z=-53512..-34975
on x=-30894..4418,y=65635..84979,z=19530..39249
on x=15288..25235,y=-29448..1499,z=71472..85280
on x=-54686..-44312,y=36734..56590,z=-60370..-40555
on x=-5266..21740,y=-63065..-47319,z=-53244..-34997
on x=30363..46391,y=65963..73471,z=-16696..-1815
on x=8243..24278,y=-94018..-69081,z=4245..29009
on x=-37020..-23168,y=-72025..-55051,z=-35968..-10146
on x=-47068..-26645,y=-20779..13147,z=56626..75638
on x=-38276..-11929,y=-75280..-63708,z=19483..24673
on x=3913..16625,y=66848..92341,z=9711..32412
on x=-33513..-7553,y=-44460..-19203,z=53802..84399
on x=-46017..-22850,y=-46589..-32578,z=-70024..-47623
on x=48573..58330,y=2974..29593,z=34454..60055
on x=-10056..10090,y=8646..37530,z=-95368..-64816
on x=-40314..-23851,y=49027..75546,z=33133..59153
on x=9529..39846,y=-43216..-6720,z=66362..84337
on x=-45474..-33807,y=15249..40850,z=-72058..-42686
on x=51316..72889,y=-20375..8377,z=32410..56709
on x=-2105..32624,y=-78827..-73957,z=-36836..-9360
on x=28703..45480,y=7042..25778,z=-82005..-55614
on x=6954..42651,y=4674..37873,z=57207..93287
on x=29735..50615,y=17210..29568,z=64317..74832
on x=-71817..-57182,y=-52937..-34602,z=-16791..8172
on x=62789..73440,y=-8262..6641,z=-47239..-39833
on x=22253..33377,y=-66419..-35421,z=-76817..-56664
on x=-81664..-74289,y=-19781..2191,z=-47797..-9331
on x=-15501..3487,y=57147..69359,z=30481..53449
on x=65898..70034,y=-42224..-28303,z=-36101..-5313
on x=43494..54307,y=42390..54768,z=-48993..-31471
on x=-78098..-51348,y=-31792..-29005,z=-59011..-31680
on x=1733..29690,y=-76915..-65890,z=-51511..-21684
on x=15663..36026,y=-43573..-32068,z=54160..67763
on x=-15046..8020,y=73251..81288,z=-6407..6193
on x=-27828..-7924,y=-81275..-66773,z=-6446..19063
on x=53718..81429,y=8690..21113,z=-46849..-30403
on x=-36511..1151,y=59425..77413,z=7092..45917
on x=1629..15400,y=11693..34093,z=68313..91797
on x=-26483..210,y=1338..31754,z=-84816..-58477
on x=-83237..-62066,y=4539..25394,z=-51906..-28397
on x=-9149..923,y=-81989..-60157,z=-26789..-5365
on x=17517..37608,y=26598..46873,z=57508..64782
on x=-7119..11105,y=62942..83529,z=9625..26513
on x=-51514..-30945,y=20145..42149,z=-62845..-43704
on x=5756..29641,y=-22431..5811,z=63100..78564
on x=2390..19403,y=-20570..2836,z=62003..85329
on x=-30076..-15623,y=55291..75814,z=15603..36585
on x=31384..53207,y=-52113..-34713,z=-53230..-48671
on x=-22679..10340,y=13595..27259,z=74909..82901
on x=67880..82348,y=8971..27422,z=-2124..15487
on x=-18895..10483,y=-85666..-59455,z=34047..51125
on x=55861..76270,y=-63315..-43630,z=-14734..14650
on x=574..29499,y=-45563..-10582,z=-90533..-55006
on x=-69275..-42476,y=-59658..-25077,z=-52699..-28481
on x=-56824..-33936,y=-25868..8029,z=-82735..-53242
on x=-90068..-61105,y=25564..34118,z=-8753..8383
on x=-16748..-8507,y=-60001..-44220,z=39386..65527
on x=47454..61550,y=30183..42751,z=-48341..-40487
on x=-75242..-56708,y=-18345..7252,z=27135..42009
on x=-58608..-41635,y=-77670..-51210,z=-19528..-1735
on x=-52353..-35577,y=65342..71094,z=-34292..-10437
on x=-13904..390,y=-85050..-62408,z=-40671..-16675
on x=-70826..-61780,y=-28261..-8503,z=-43838..-22314
on x=23456..34852,y=-75129..-39146,z=-59367..-27639
on x=-21289..-10174,y=-4853..32679,z=-90593..-71383
on x=-37590..-9368,y=-75394..-60296,z=-44762..-24730
on x=-60024..-32249,y=59509..67462,z=-2182..22377
on x=-8113..11767,y=-35418..-18956,z=64722..86638
on x=46854..82238,y=40215..59083,z=-11037..11812
on x=49339..85055,y=28346..47525,z=5491..39152
on x=-50591..-22806,y=-33668..-17243,z=58370..72406
on x=54558..69946,y=-56413..-35097,z=-27088..7730
on x=-64262..-47635,y=-74110..-35937,z=13887..37783
on x=-51771..-28374,y=-60611..-46568,z=23665..55950
on x=-81107..-54757,y=-26037..7731,z=-40004..-22397
on x=-41417..-19367,y=54161..90953,z=12299..44226
on x=-89805..-55266,y=-31643..-22791,z=-15805..-3577
on x=-78290..-54183,y=8949..18096,z=-47920..-37582
on x=-44972..-27636,y=-56757..-33114,z=43862..61796
on x=34720..58327,y=40641..60954,z=5688..14788
on x=-56983..-26271,y=-69520..-43968,z=5457..30306
on x=52234..80932,y=-28451..-12788,z=25327..31570
on x=-20283..-10624,y=55145..77699,z=-55377..-51731
on x=-8918..23862,y=-26789..-7902,z=78219..83878
on x=-81127..-49088,y=-33284..-13921,z=-50207..-32847
on x=3968..12957,y=62162..90459,z=-32604..-13525
on x=50536..63270,y=47029..71173,z=25852..40402
on x=-19947..3085,y=69295..98557,z=-7457..15697
on x=56416..88951,y=-49125..-23678,z=13033..31254
on x=-45415..-13431,y=-70617..-33184,z=-67667..-43697
on x=-25226..-16517,y=11281..39430,z=65912..87788
on x=32863..54130,y=33778..71445,z=42050..55059
on x=-61047..-29947,y=59749..68086,z=-42477..-13948
on x=-18251..9292,y=-89783..-58004,z=-41787..-15274
on x=5759..16739,y=31155..46337,z=-75036..-53352
on x=31554..54658,y=53243..73463,z=-26575..5972
on x=-33007..-6786,y=-62474..-41434,z=50315..75779
on x=27458..32973,y=22810..54163,z=-75563..-58647
on x=8075..18075,y=-50635..-24172,z=62978..85992
on x=-76736..-47443,y=-4014..19377,z=45970..58134
on x=-60157..-52753,y=-43338..-22023,z=-51719..-41956
on x=-3216..22631,y=49983..61450,z=-77551..-54695
on x=-46004..-40643,y=51830..62022,z=18230..48510
on x=42330..66450,y=-40592..-14557,z=-58845..-29636
on x=-32729..-9787,y=-91565..-69806,z=18117..35690
on x=-32002..-10885,y=-1443..14786,z=-90524..-68253
on x=50978..75653,y=7140..18805,z=-63220..-43305
on x=-64568..-49330,y=15278..36078,z=28629..42531
on x=19040..44557,y=-75487..-59006,z=-37514..-20407
on x=-2797..6071,y=-87782..-64015,z=-23722..-9160
on x=30427..40250,y=-51346..-20826,z=-76885..-46217
on x=-9837..5654,y=49778..66163,z=42263..63916
on x=-21072..11872,y=28390..51048,z=-77577..-49183
on x=31541..48623,y=37624..58494,z=-42361..-30931
on x=-56299..-27988,y=47361..70101,z=1213..10546
on x=-23456..-16890,y=-79546..-59160,z=-6743..10289
on x=14204..41142,y=56511..74601,z=5095..33434
on x=-36005..-14701,y=-68016..-39674,z=36553..63480
on x=60545..74232,y=15015..42503,z=-43149..-34200
on x=41594..65337,y=50002..72588,z=12856..37989
on x=-90432..-62701,y=-37837..-4386,z=-32554..-13392
on x=70315..83165,y=-40717..-23351,z=-28440..-1511
on x=-24120..5947,y=34921..55704,z=-69582..-46555
on x=-32209..-8756,y=-77077..-58552,z=-48224..-14837
on x=-43510..-17400,y=64412..85528,z=6143..27595
on x=36053..55013,y=53314..68720,z=-22332..-5189
on x=-29743..-3821,y=65395..78815,z=-9292..23641
on x=-69714..-38553,y=-44109..-30469,z=29793..51760
on x=62579..74277,y=11751..29748,z=23050..34754
on x=-2338..12874,y=75847..79442,z=10731..41551
on x=-47619..-35145,y=-52665..-37628,z=39702..49423
on x=-18665..-6319,y=-71514..-63562,z=42460..45818
on x=-30044..-12170,y=53793..59153,z=-63499..-40230
on x=-12567..16134,y=-90016..-66446,z=-35676..-19472
on x=-85062..-55201,y=4912..28423,z=-41593..-11980
on x=56423..80218,y=-37562..-16517,z=36700..48100
on x=-79818..-55704,y=11491..34326,z=8367..40952
on x=-26293..-12220,y=41552..57647,z=51547..73246
on x=-92036..-71546,y=10557..30361,z=-37285..-11767
on x=36855..65165,y=-77057..-59070,z=4400..24300
on x=-50136..-35853,y=35165..54382,z=37467..66189
on x=16047..55776,y=-39553..-11227,z=58349..80407
on x=27458..35035,y=-32489..-4540,z=-78886..-68473
on x=46937..76452,y=12250..27089,z=-55190..-45467
on x=-60706..-29052,y=-43190..-26027,z=48544..56051
on x=33102..61073,y=27580..41152,z=41873..64266
on x=-83817..-62138,y=-874..20403,z=-57292..-40933
on x=54691..83982,y=-26668..-1254,z=25108..50265
on x=-64405..-32749,y=44513..79090,z=-13114..-3445
on x=4627..28855,y=-38014..-18427,z=-78981..-56867
on x=39450..53446,y=42289..75373,z=-33901..-6131
on x=63899..90602,y=26653..55063,z=-10988..9550
on x=14634..44509,y=-78079..-56702,z=28926..52784
on x=38612..58844,y=41733..50901,z=-69067..-31286
on x=-76929..-61757,y=-65663..-31719,z=-735..7695
on x=28516..47164,y=-29956..-15347,z=-75568..-61002
on x=-50167..-42193,y=-40926..-30244,z=-60920..-47646
on x=-55502..-47750,y=54875..64454,z=-29489..-3798
on x=14430..36601,y=26459..39911,z=-79289..-57027
on x=25075..27283,y=52962..87505,z=18854..33830
on x=-58860..-43502,y=58845..85498,z=-18144..7960
on x=-53792..-49597,y=-62122..-48441,z=-6272..16303
on x=-1823..9424,y=-44705..-17073,z=70120..80148
on x=63692..82216,y=-19361..8359,z=33210..45303
on x=-11840..-2979,y=24389..52039,z=60315..71939
on x=-73992..-63729,y=-45246..-14461,z=-46112..-19286
on x=44923..62221,y=-54960..-42011,z=-38137..-20135
on x=14722..33976,y=-61145..-30720,z=-75360..-54000
on x=245..27732,y=58258..82234,z=30097..48740
on x=6991..26059,y=17174..25794,z=71284..89533
on x=35195..56084,y=51212..74918,z=-32483..-9615
on x=33703..66981,y=-60122..-54236,z=-53453..-17926
on x=-61940..-36371,y=-16581..-4570,z=-60706..-40703
on x=60760..77773,y=-33429..-9006,z=14598..31427
on x=18431..39521,y=-7515..9629,z=60150..89625
on x=54831..71507,y=20590..53690,z=18166..24092
on x=-25927..-4852,y=56797..80031,z=24590..44494
on x=58156..74392,y=-54667..-25937,z=-34893..-13568
on x=-46251..-30782,y=70616..92727,z=-10301..13242
on x=-54435..-31583,y=18809..57238,z=52002..64018
on x=-87191..-65448,y=21016..30554,z=-7539..18577
on x=-27162..-8066,y=-76175..-58675,z=-45390..-18705
on x=-28095..-21402,y=-70382..-55725,z=38509..56620
on x=49521..65654,y=31848..57013,z=29190..51381
on x=-75558..-48003,y=15704..38247,z=17226..43613
on x=23884..58026,y=50160..65808,z=6785..27572
on x=58063..93764,y=-22911..-4938,z=-11895..2739
on x=43350..68759,y=-71427..-46464,z=-6078..15892
on x=-33140..-3203,y=-20673..-10382,z=-75944..-67085
on x=-21259..-17037,y=67489..78783,z=-8541..11664
on x=36779..59339,y=24261..52940,z=-53125..-35093
on x=-30493..793,y=44829..70216,z=-52950..-47982
on x=45292..72628,y=-55273..-48706,z=-1598..9919
on x=29403..34110,y=-54445..-36095,z=43199..56783
on x=-59861..-33784,y=-53249..-37265,z=34591..65790
off x=41754..57148,y=-80200..-55875,z=3599..22722
on x=-10075..4386,y=27944..45112,z=-79226..-60131
off x=-15187..248,y=-83279..-72364,z=5037..32511
on x=54933..58881,y=12281..29091,z=-69028..-54121
on x=8090..25240,y=70064..81196,z=-16223..-13021
off x=-60400..-26105,y=-62607..-40191,z=47851..67422
off x=24451..46164,y=-75961..-52303,z=-49916..-25476
on x=-44857..-7989,y=-38916..-18382,z=-70680..-58913
off x=-72200..-43203,y=24727..57702,z=-44696..-7928
on x=26323..57990,y=-14888..994,z=-79264..-65525
on x=-48909..-40386,y=-73244..-64696,z=-323..16342
on x=35358..52991,y=10687..27904,z=-70944..-57912
off x=-23236..-4162,y=-29712..-17951,z=59106..79389
off x=53581..72320,y=6800..13298,z=-45872..-42480
on x=-43571..-35441,y=41386..68207,z=-44478..-35187
on x=29214..36514,y=2338..22755,z=54949..83144
on x=-84002..-51122,y=41833..51908,z=9476..15499
off x=7540..23221,y=-12461..-7794,z=-95431..-66096
on x=76564..93348,y=-9452..16724,z=-13630..7535
on x=24899..35662,y=-61799..-36371,z=50395..73121
off x=-23379..-14671,y=-61654..-26944,z=-66602..-52265
on x=-75965..-53958,y=-25180..-13183,z=24409..52392
on x=-10197..4622,y=-15474..1783,z=-92565..-76011
off x=45082..71453,y=9055..32401,z=53858..58811
on x=-76326..-55011,y=-39530..-20949,z=21859..44714
off x=32466..57949,y=29661..50902,z=38020..60279
off x=-48739..-19365,y=36215..74060,z=-61189..-33437
off x=-13146..-2771,y=9021..18827,z=60320..97125
on x=-224..15138,y=31517..57835,z=-83757..-53030
on x=48425..68878,y=14971..40195,z=-64239..-33895
on x=-3608..5152,y=-30571..-5399,z=75733..96504
on x=62972..67090,y=-52211..-34797,z=-11592..8052
on x=6468..27085,y=-75658..-53084,z=24332..51031
on x=-9175..29042,y=-47119..-33884,z=-82475..-62552
off x=-15847..4062,y=60344..89283,z=-30211..-17239
on x=25768..49899,y=62273..71708,z=349..10734
off x=42707..63210,y=13025..46778,z=36022..55110
on x=46293..67516,y=-7202..26002,z=-66272..-31228
off x=-62283..-34594,y=-38660..-16958,z=38044..60467
off x=-70095..-49102,y=31813..56709,z=8713..28863
on x=13788..17456,y=-12132..6791,z=61871..77520
on x=-43439..-29707,y=-56265..-39662,z=-72104..-38103
off x=13592..44088,y=-74894..-45793,z=37968..66601
off x=46700..52734,y=-40463..-19848,z=-69191..-42886
on x=20538..39375,y=61182..85768,z=15326..29409
on x=54417..76403,y=-19878..-1308,z=-42875..-26138
on x=-86317..-61464,y=-18403..3486,z=-18507..6337
on x=2786..19873,y=-20405..9939,z=59315..81073
off x=-59877..-54443,y=34487..69275,z=3488..18241
off x=-39007..-20035,y=4078..30312,z=-82731..-51587
on x=32588..56936,y=-76500..-67396,z=-21385..-397
off x=25080..41990,y=-28846..-19737,z=69970..78086
on x=23152..34879,y=52174..69560,z=27953..49114
on x=60684..71642,y=-30692..-3159,z=31270..54443
off x=-45205..-29098,y=58510..89307,z=-17925..2596
off x=19582..24130,y=-83134..-57590,z=35254..46862
on x=-11911..23624,y=-67005..-30930,z=-81907..-60918
off x=4727..24751,y=-39077..-37491,z=-71684..-61996
off x=-9593..7610,y=-9049..14670,z=62634..98129
on x=-66988..-43094,y=-62996..-35119,z=11118..43202
on x=-35998..-5275,y=-37152..-15041,z=-87055..-60774
off x=-14073..-1453,y=48138..72420,z=-55577..-47324
on x=53303..57735,y=-50475..-39459,z=28711..49776
on x=50327..79524,y=-20647..-7189,z=32930..64357
off x=-21585..-2673,y=67475..97316,z=-16225..10334
on x=12761..42162,y=-75198..-45687,z=27773..56590
off x=10780..29984,y=30806..53252,z=45151..82121
on x=46365..68175,y=-3920..2578,z=-68115..-55420
off x=26455..49250,y=-55979..-42110,z=-63322..-50217
off x=-62311..-44896,y=-70433..-49939,z=-6372..6167
off x=-68448..-49911,y=12490..21799,z=-66820..-50662
on x=11680..41135,y=42991..76600,z=38991..56525
off x=60371..92603,y=19664..34168,z=-32784..-13415
off x=-72850..-41597,y=-58644..-37918,z=-37165..-19623
off x=53858..74510,y=34819..52123,z=16497..43908
on x=48488..52121,y=-45360..-24737,z=-66885..-37749
on x=20448..57476,y=27537..48980,z=46019..57068
on x=-27248..-19724,y=41351..47693,z=-77216..-44436
on x=-52950..-33175,y=41728..62787,z=-16516..-11981
off x=-51177..-31463,y=-63233..-45533,z=32418..61549
off x=-37057..-12370,y=18117..43449,z=-74085..-60338
off x=-12503..2846,y=-42876..-30104,z=63819..75029
on x=60667..70952,y=-46497..-30751,z=16478..34217
on x=10069..26074,y=50365..61240,z=-68598..-56464
off x=17685..45785,y=44978..64857,z=53362..71129
off x=-73990..-42336,y=-72284..-40294,z=8863..26728
off x=68782..80879,y=22420..46148,z=-2329..26525
on x=59344..83826,y=-40162..-30633,z=2097..16442
on x=-27562..-10414,y=27820..58187,z=-65865..-46977
on x=36887..65981,y=-73709..-52311,z=-42786..-24070
off x=-20682..-3178,y=64806..78374,z=10659..34470
off x=-8804..15550,y=-8444..-2404,z=-96231..-65363
off x=-66648..-53156,y=-56554..-29056,z=-33726..-17793
on x=-203..26059,y=68129..80958,z=-43219..-27404
on x=47218..72444,y=35342..44493,z=-36229..-12760
on x=46904..60309,y=-54993..-32818,z=-50008..-30454
off x=-21694..-6209,y=-88469..-65433,z=-11873..12245
off x=-62399..-24805,y=28232..34853,z=51230..60419
on x=-79596..-57362,y=-48983..-34721,z=-13420..10701
on x=-6768..1790,y=-19645..-5406,z=-92627..-74287
on x=-33652..-18284,y=-23439..-8382,z=67655..90006
off x=16524..39458,y=29152..49162,z=55749..83384
on x=-54254..-34002,y=-88855..-70903,z=-22296..3410
on x=-20779..8521,y=-89181..-73450,z=-7812..26787
on x=1615..21625,y=-6080..17570,z=71668..83316
off x=-9715..12709,y=-50780..-22669,z=-74774..-69626
on x=9867..24037,y=12358..42384,z=72370..82642
on x=3224..30208,y=28864..51571,z=61883..84706
off x=-945..19284,y=66805..89823,z=25957..49425
off x=-25280..-6091,y=-97225..-62630,z=-19807..-7788
off x=-2250..13009,y=50093..83251,z=-51760..-30168
off x=-54440..-34334,y=26817..48120,z=48074..60160
on x=23262..49261,y=40455..52393,z=-60959..-43615
on x=-23294..-2268,y=-52310..-24691,z=-87802..-57989
off x=-26612..7840,y=72808..79999,z=-29713..-3816
on x=54391..88234,y=-51754..-15468,z=1945..19324
off x=17508..32841,y=-73524..-65420,z=-35218..-11372
off x=31152..49478,y=-70736..-54693,z=-43913..-40242
on x=53480..64046,y=-62586..-42748,z=34500..47779
off x=34058..63228,y=-61928..-58936,z=-31573..-3659
off x=-58697..-39679,y=-81148..-49181,z=13624..23687
off x=-27245..-8270,y=35092..58039,z=40192..57884
off x=32739..63309,y=-40682..-17682,z=47869..70035
on x=52219..64202,y=-67303..-51407,z=4800..23668
on x=-54654..-29433,y=-86817..-62196,z=-5556..23658
off x=-75279..-49504,y=-7178..9542,z=44327..53956
on x=-7810..8890,y=-30024..-89,z=66319..97476
on x=8400..36435,y=12009..44304,z=-80097..-67512
off x=22016..38709,y=71634..74972,z=-3129..1171
off x=48394..68796,y=44368..57057,z=-40373..-16910
off x=-56815..-38268,y=36694..67658,z=-47750..-32089
off x=-57877..-35052,y=46097..56601,z=33355..49073
on x=23011..37434,y=-53974..-23058,z=-61883..-45402
off x=11872..42043,y=19031..22704,z=-83665..-55245
off x=-73243..-46798,y=-4329..17893,z=-57779..-44793
on x=67456..73394,y=-22388..-14950,z=-37499..-28618
off x=30365..66533,y=-70682..-46579,z=-16208..-5031
on x=21360..45088,y=-76539..-50147,z=21845..41905
on x=-18419..-11559,y=76149..84053,z=-11328..-2185
off x=33020..55476,y=-15536..2658,z=64275..79458
on x=-48063..-26857,y=-20734..10735,z=-85808..-52673
off x=43790..66834,y=-56342..-29394,z=31254..58327
off x=67314..83024,y=-13230..17187,z=-23371..-4720
off x=-40007..-14810,y=-73067..-57103,z=-66195..-38082
off x=25841..57188,y=-26024..897,z=-87581..-52851
off x=45794..76267,y=-52081..-18959,z=-40107..-15901
off x=-40509..-4365,y=56716..81562,z=-22853..6830
off x=-77503..-45958,y=-63620..-34287,z=-19880..-7413
off x=-19264..12115,y=-19022..8117,z=-93523..-72932
off x=39766..60651,y=-59973..-42774,z=-28355..-7554
on x=-5185..26343,y=42329..68147,z=56719..69801
off x=-1117..14361,y=41023..50863,z=-73482..-62392
off x=14618..41995,y=-57249..-23838,z=-82100..-56433
on x=-73588..-51361,y=3468..30508,z=-52360..-20267
on x=12744..37529,y=-54963..-50978,z=-53987..-45070
off x=-72308..-43455,y=-38389..-24377,z=-50714..-45259
off x=-78117..-42852,y=-13721..13148,z=-55905..-37080
off x=-4467..22317,y=-12170..5957,z=71290..88500
on x=-95981..-66408,y=7590..8037,z=-2404..26148
off x=12232..45401,y=12849..45079,z=-78972..-62278
on x=-46581..-20385,y=43824..70561,z=32388..58000
off x=14142..24293,y=-26356..7327,z=-82833..-56772
on x=-62185..-54635,y=-62453..-44051,z=-17689..-15452
off x=7493..30003,y=51943..75363,z=19045..35329
on x=10852..22754,y=-21936..-12574,z=58047..81390
on x=-85885..-60870,y=-5186..26456,z=-18838..-15144
off x=573..10699,y=-12795..12158,z=62024..82285
on x=-53647..-41451,y=6744..19177,z=-65585..-52437
on x=-1937..13101,y=16787..44245,z=-89301..-69691
off x=11648..28117,y=65897..81495,z=-51744..-31456
on x=-34510..-23114,y=64316..84122,z=11745..26248
on x=46597..67036,y=-49078..-18339,z=-55106..-43379
on x=-1630..16917,y=44609..68512,z=50592..75476
off x=-21971..1123,y=24005..48686,z=60732..80668
off x=42871..68726,y=-60814..-42185,z=27184..53921
off x=38826..47043,y=-40691..-34285,z=-57815..-35188
on x=44616..56936,y=-18905..4924,z=53303..63015
on x=22934..47063,y=21220..45110,z=48969..72456
off x=31507..57059,y=41717..67987,z=-48794..-42062
on x=63949..80604,y=11054..40766,z=17043..53534
on x=28112..49371,y=-69150..-55767,z=-37760..-7539
off x=-39198..-18198,y=-10040..11413,z=-84459..-58961
on x=-21052..258,y=-11980..13528,z=-92447..-67079
off x=-72190..-65264,y=-11322..11452,z=20127..52589
on x=9456..34476,y=75111..90133,z=-3087..22015
off x=-48678..-16972,y=-41882..-11490,z=63832..72591
on x=711..33910,y=-92450..-65393,z=-19051..4525
off x=62421..84065,y=26556..40786,z=14281..29967
on x=-67588..-56095,y=28175..65003,z=22332..47033
on x=-70974..-36736,y=9539..20776,z=49351..68205
off x=-25177..13122,y=-61323..-41425,z=46642..75842
on x=-52446..-40486,y=-62884..-38244,z=20759..52144
on x=-42548..-33936,y=-53496..-46698,z=-50788..-30250
off x=33339..50697,y=-75051..-48370,z=36655..54219
off x=43213..72026,y=-22655..-11856,z=37238..51298` | 22/main.go | 0.533641 | 0.490785 | main.go | starcoder |
package main
import "github.com/nsaje/dagger/dagger"
type RelationalOperator int
const (
LT RelationalOperator = iota
GT
LTE
GTE
)
type LogicalOperator int
const (
OR LogicalOperator = iota
AND
)
type Node interface {
eval(valueTable valueTable) (bool, map[dagger.StreamID][]float64)
getLeafNodes() []LeafNode
}
type LeafNode struct {
streamID dagger.StreamID
relationalOperator RelationalOperator
threshold float64
periods int
}
func (n LeafNode) getLeafNodes() []LeafNode {
return []LeafNode{n}
}
func (n LeafNode) eval(vt valueTable) (bool, map[dagger.StreamID][]float64) {
result := true
values := make(map[dagger.StreamID][]float64)
values[n.streamID] = make([]float64, n.periods)
lastNRecords := vt.getLastN(n.streamID, n.periods)
if len(lastNRecords) == 0 {
return false, values
}
for i, r := range lastNRecords {
value := r.Data.(float64)
values[n.streamID][i] = value
switch n.relationalOperator {
case LT:
result = result && value < n.threshold
case GT:
result = result && value > n.threshold
case LTE:
result = result && value <= n.threshold
case GTE:
result = result && value >= n.threshold
}
}
return result, values
}
type BinNode struct {
op LogicalOperator
left Node
right Node
}
func mergeValues(l map[dagger.StreamID][]float64, r map[dagger.StreamID][]float64) map[dagger.StreamID][]float64 {
values := l
for k, rightVal := range r {
leftVal := l[k]
// skip if key already exists and contains a longer slice (larger periods value)
if leftVal != nil && len(leftVal) > len(rightVal) {
continue
}
values[k] = rightVal
}
return values
}
func (n BinNode) eval(vt valueTable) (bool, map[dagger.StreamID][]float64) {
leftResult, leftValues := n.left.eval(vt)
rightResult, rightValues := n.right.eval(vt)
values := mergeValues(leftValues, rightValues)
var result bool
if n.op == OR {
result = leftResult || rightResult
} else if n.op == AND {
result = leftResult && rightResult
} else {
panic("unknown operator")
}
return result, values
}
func (n BinNode) getLeafNodes() []LeafNode {
return append(n.left.getLeafNodes(), n.right.getLeafNodes()...)
} | computations/computation-alarm/tree.go | 0.558086 | 0.400427 | tree.go | starcoder |
package primitives
import (
"math"
"math/rand"
)
type Vector struct {
X, Y, Z float64
}
func (u Vector) RGBA() (r, g, b, a uint32) {
// Sqrt() for gamma-2 correction
r = uint32(math.Sqrt(u.X) * 0xffff)
g = uint32(math.Sqrt(u.Y) * 0xffff)
b = uint32(math.Sqrt(u.Z) * 0xffff)
a = 0xffff
return
}
var UnitVector = Vector{1, 1, 1}
func VectorInUnitSphere(rand *rand.Rand) Vector {
for {
r := Vector{rand.Float64(), rand.Float64(), rand.Float64()}
p := r.MultiplyScalar(2.0).Subtract(UnitVector)
if p.DotProduct(p) >= 1.0 {
return p
}
}
}
func (u Vector) Add(v Vector) Vector {
return Vector{u.X + v.X, u.Y + v.Y, u.Z + v.Z}
}
func (u Vector) Subtract(v Vector) Vector {
return Vector{u.X - v.X, u.Y - v.Y, u.Z - v.Z}
}
func (u Vector) DotProduct(v Vector) float64 {
return u.X*v.X + u.Y*v.Y + u.Z*v.Z
}
func (u Vector) Multiply(v Vector) Vector {
return Vector{u.X * v.X, u.Y * v.Y, u.Z * v.Z}
}
func (u Vector) Length() float64 {
return math.Sqrt(u.DotProduct(u))
}
func (u Vector) Normalise() Vector {
l := u.Length()
return Vector{u.X / l, u.Y / l, u.Z / l}
}
func (u Vector) AddScalar(a float64) Vector {
return Vector{u.X + a, u.Y + a, u.Z + a}
}
func (u Vector) MultiplyScalar(a float64) Vector {
return Vector{u.X * a, u.Y * a, u.Z * a}
}
func (u Vector) DivideScalar(a float64) Vector {
return Vector{u.X / a, u.Y / a, u.Z / a}
}
func (u Vector) Refract(normal Vector, ni_over_nt float64) (bool, Vector) {
uv := u.Normalise()
un := normal.Normalise()
vdotn := uv.DotProduct(un)
// 1 - (n1/n2)^2 [1-(v.n)^2]
discriminant := 1 - (ni_over_nt * ni_over_nt * (1 - vdotn*vdotn))
if discriminant > 0 { // we have refraction
// n1/n2 (v - (v.n)N) - rt(discriminant) N
first_term := uv.Subtract(un.MultiplyScalar(vdotn)).MultiplyScalar(ni_over_nt)
second_term := un.MultiplyScalar(math.Sqrt(discriminant))
return true, first_term.Subtract(second_term)
}
return false, Vector{}
}
func (u Vector) Reflect(n Vector) Vector {
b := 2 * u.DotProduct(n)
return u.Subtract(n.MultiplyScalar(b))
} | internal/primitives/vector.go | 0.859531 | 0.558809 | vector.go | starcoder |
package openapi
import (
"encoding/json"
)
// VulnerabilityNote struct for VulnerabilityNote
type VulnerabilityNote struct {
Title *string `json:"Title,omitempty"`
Audience *string `json:"Audience,omitempty"`
Type *int32 `json:"Type,omitempty"`
Ordinal *string `json:"Ordinal,omitempty"`
Lang *string `json:"lang,omitempty"`
Value *string `json:"Value,omitempty"`
}
// NewVulnerabilityNote instantiates a new VulnerabilityNote object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewVulnerabilityNote() *VulnerabilityNote {
this := VulnerabilityNote{}
return &this
}
// NewVulnerabilityNoteWithDefaults instantiates a new VulnerabilityNote object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewVulnerabilityNoteWithDefaults() *VulnerabilityNote {
this := VulnerabilityNote{}
return &this
}
// GetTitle returns the Title field value if set, zero value otherwise.
func (o *VulnerabilityNote) GetTitle() string {
if o == nil || o.Title == nil {
var ret string
return ret
}
return *o.Title
}
// GetTitleOk returns a tuple with the Title field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *VulnerabilityNote) GetTitleOk() (*string, bool) {
if o == nil || o.Title == nil {
return nil, false
}
return o.Title, true
}
// HasTitle returns a boolean if a field has been set.
func (o *VulnerabilityNote) HasTitle() bool {
if o != nil && o.Title != nil {
return true
}
return false
}
// SetTitle gets a reference to the given string and assigns it to the Title field.
func (o *VulnerabilityNote) SetTitle(v string) {
o.Title = &v
}
// GetAudience returns the Audience field value if set, zero value otherwise.
func (o *VulnerabilityNote) GetAudience() string {
if o == nil || o.Audience == nil {
var ret string
return ret
}
return *o.Audience
}
// GetAudienceOk returns a tuple with the Audience field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *VulnerabilityNote) GetAudienceOk() (*string, bool) {
if o == nil || o.Audience == nil {
return nil, false
}
return o.Audience, true
}
// HasAudience returns a boolean if a field has been set.
func (o *VulnerabilityNote) HasAudience() bool {
if o != nil && o.Audience != nil {
return true
}
return false
}
// SetAudience gets a reference to the given string and assigns it to the Audience field.
func (o *VulnerabilityNote) SetAudience(v string) {
o.Audience = &v
}
// GetType returns the Type field value if set, zero value otherwise.
func (o *VulnerabilityNote) GetType() int32 {
if o == nil || o.Type == nil {
var ret int32
return ret
}
return *o.Type
}
// GetTypeOk returns a tuple with the Type field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *VulnerabilityNote) GetTypeOk() (*int32, bool) {
if o == nil || o.Type == nil {
return nil, false
}
return o.Type, true
}
// HasType returns a boolean if a field has been set.
func (o *VulnerabilityNote) HasType() bool {
if o != nil && o.Type != nil {
return true
}
return false
}
// SetType gets a reference to the given int32 and assigns it to the Type field.
func (o *VulnerabilityNote) SetType(v int32) {
o.Type = &v
}
// GetOrdinal returns the Ordinal field value if set, zero value otherwise.
func (o *VulnerabilityNote) GetOrdinal() string {
if o == nil || o.Ordinal == nil {
var ret string
return ret
}
return *o.Ordinal
}
// GetOrdinalOk returns a tuple with the Ordinal field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *VulnerabilityNote) GetOrdinalOk() (*string, bool) {
if o == nil || o.Ordinal == nil {
return nil, false
}
return o.Ordinal, true
}
// HasOrdinal returns a boolean if a field has been set.
func (o *VulnerabilityNote) HasOrdinal() bool {
if o != nil && o.Ordinal != nil {
return true
}
return false
}
// SetOrdinal gets a reference to the given string and assigns it to the Ordinal field.
func (o *VulnerabilityNote) SetOrdinal(v string) {
o.Ordinal = &v
}
// GetLang returns the Lang field value if set, zero value otherwise.
func (o *VulnerabilityNote) GetLang() string {
if o == nil || o.Lang == nil {
var ret string
return ret
}
return *o.Lang
}
// GetLangOk returns a tuple with the Lang field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *VulnerabilityNote) GetLangOk() (*string, bool) {
if o == nil || o.Lang == nil {
return nil, false
}
return o.Lang, true
}
// HasLang returns a boolean if a field has been set.
func (o *VulnerabilityNote) HasLang() bool {
if o != nil && o.Lang != nil {
return true
}
return false
}
// SetLang gets a reference to the given string and assigns it to the Lang field.
func (o *VulnerabilityNote) SetLang(v string) {
o.Lang = &v
}
// GetValue returns the Value field value if set, zero value otherwise.
func (o *VulnerabilityNote) GetValue() string {
if o == nil || o.Value == nil {
var ret string
return ret
}
return *o.Value
}
// GetValueOk returns a tuple with the Value field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *VulnerabilityNote) GetValueOk() (*string, bool) {
if o == nil || o.Value == nil {
return nil, false
}
return o.Value, true
}
// HasValue returns a boolean if a field has been set.
func (o *VulnerabilityNote) HasValue() bool {
if o != nil && o.Value != nil {
return true
}
return false
}
// SetValue gets a reference to the given string and assigns it to the Value field.
func (o *VulnerabilityNote) SetValue(v string) {
o.Value = &v
}
func (o VulnerabilityNote) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if o.Title != nil {
toSerialize["Title"] = o.Title
}
if o.Audience != nil {
toSerialize["Audience"] = o.Audience
}
if o.Type != nil {
toSerialize["Type"] = o.Type
}
if o.Ordinal != nil {
toSerialize["Ordinal"] = o.Ordinal
}
if o.Lang != nil {
toSerialize["lang"] = o.Lang
}
if o.Value != nil {
toSerialize["Value"] = o.Value
}
return json.Marshal(toSerialize)
}
type NullableVulnerabilityNote struct {
value *VulnerabilityNote
isSet bool
}
func (v NullableVulnerabilityNote) Get() *VulnerabilityNote {
return v.value
}
func (v *NullableVulnerabilityNote) Set(val *VulnerabilityNote) {
v.value = val
v.isSet = true
}
func (v NullableVulnerabilityNote) IsSet() bool {
return v.isSet
}
func (v *NullableVulnerabilityNote) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableVulnerabilityNote(val *VulnerabilityNote) *NullableVulnerabilityNote {
return &NullableVulnerabilityNote{value: val, isSet: true}
}
func (v NullableVulnerabilityNote) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableVulnerabilityNote) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | openapi/model_vulnerability_note.go | 0.761627 | 0.41484 | model_vulnerability_note.go | starcoder |
package passwordless
import (
"crypto/rand"
"errors"
"strings"
"context"
)
var (
crockfordBytes = []byte("0123456789abcdefghjkmnpqrstvwxyz")
)
// TokenGenerator defines an interface for generating and sanitising
// cryptographically-secure tokens.
type TokenGenerator interface {
// Generate should return a token and nil error on success, or an empty
// string and error on failure.
Generate(ctx context.Context) (string, error)
// Sanitize should take a user provided input and sanitize it such that
// it can be passed to a function that expects the same input as
// `Generate()`. Useful for cases where the token may be subject to
// minor transcription errors by a user. (e.g. 0 == O)
Sanitize(ctx context.Context, s string) (string, error)
}
// ByteGenerator generates random sequences of bytes from the specified set
// of the specified length.
type ByteGenerator struct {
Bytes []byte
Length int
}
// NewByteGenerator creates and returns a ByteGenerator.
func NewByteGenerator(b []byte, l int) *ByteGenerator {
return &ByteGenerator{
Bytes: b,
Length: l,
}
}
// Generate returns a string generated from random bytes of the configured
// set, of the given length. An error may be returned if there is insufficient
// entropy to generate a result.
func (g ByteGenerator) Generate(ctx context.Context) (string, error) {
if b, err := randBytes(g.Bytes, g.Length); err != nil {
return "", err
} else {
return string(b), nil
}
}
func (g ByteGenerator) Sanitize(ctx context.Context, s string) (string, error) {
return s, nil
}
// CrockfordGenerator generates random tokens using Douglas Crockford's base
// 32 alphabet which limits characters of similar appearances. The
// Sanitize method of this generator will deal with transcribing incorrect
// characters back to the correct value.
type CrockfordGenerator struct {
Length int
}
// NewCrockfordGenerator returns a new Crockford token generator that creates
// tokens of the specified length.
func NewCrockfordGenerator(l int) *CrockfordGenerator {
return &CrockfordGenerator{l}
}
func (g CrockfordGenerator) Generate(ctx context.Context) (string, error) {
if b, err := randBytes(crockfordBytes, g.Length); err != nil {
return "", err
} else {
return string(b), nil
}
}
// Sanitize attempts to translate strings back to the correct Crockford
// alphabet, in case of user transcribe errors.
func (g CrockfordGenerator) Sanitize(ctx context.Context, s string) (string, error) {
bs := []byte(strings.ToLower(s))
for i, b := range bs {
if b == 'i' || b == 'l' || b == '|' {
bs[i] = '1'
} else if b == 'o' {
bs[i] = '0'
}
}
return string(bs), nil
}
// PINGenerator generates numerical PINs of the specifeid length.
type PINGenerator struct {
Length int
}
// Generate returns a numerical PIN of the chosen length. If there is not
// enough random entropy, the returned string will be empty and an error
// value present.
func (g PINGenerator) Generate(ctx context.Context) (string, error) {
if b, err := randBytes([]byte("0123456789"), g.Length); err != nil {
return "", err
} else {
return string(b), nil
}
}
func (g PINGenerator) Sanitize(ctx context.Context, s string) (string, error) {
bs := []byte(strings.ToLower(s))
for i, b := range bs {
if b == 'i' || b == 'l' || b == '|' {
bs[i] = '1'
} else if b == 'o' {
bs[i] = '0'
} else if s[i] == 'B' {
bs[i] = '8'
} else if s[i] == 'b' {
bs[i] = '6'
} else if b == 's' {
bs[i] = '5'
}
}
return string(bs), nil
}
// randBytes returns a random array of bytes picked from `p` of length `n`.
func randBytes(p []byte, n int) ([]byte, error) {
if len(p) > 256 {
return nil, errors.New("randBytes requires a pool of <= 256 items")
}
c := len(p)
b := make([]byte, n)
if _, err := rand.Read(b); err != nil {
return nil, err
}
// Pick items randomly out of `p`. Because it's possible that
// `len(p) < size(byte)`, use remainder in next iteration to ensure all
// bytes have an equal chance of being selected.
j := 0 // reservoir
for i := 0; i < n; i++ {
bb := int(b[i])
b[i] = p[(j+bb)%c]
j += (c + (c-bb)%c) % c
}
return b, nil
} | tokens.go | 0.645567 | 0.42483 | tokens.go | starcoder |
package main
import "fmt"
import "time"
import "math"
import "image"
import "image/color"
import "image/png"
import "bufio"
import "os"
type Vector struct {
x float64
y float64
z float64
}
type Color struct {
r float64
g float64
b float64
}
type Ray struct {
start Vector
dir Vector
}
type Intersection struct {
thing Thing
ray *Ray
dist float64
}
type Colors struct {
white Color
grey Color
black Color
background Color
defaultColor Color
}
func (isect *Intersection) isNull() bool {
return isect.ray == nil
}
func (v Vector) mul(k float64) Vector {
return Vector{k * v.x, k * v.y, k * v.z}
}
func (v Vector) add(v2 Vector) Vector {
return Vector{v.x + v2.x, v.y + v2.y, v.z + v2.z}
}
func (v Vector) sub(v2 Vector) Vector {
return Vector{v.x - v2.x, v.y - v2.y, v.z - v2.z}
}
func (v Vector) dot(v2 Vector) float64 {
return v.x*v2.x + v.y*v2.y + v.z*v2.z
}
func (v Vector) mag() float64 {
return math.Sqrt(v.x*v.x + v.y*v.y + v.z*v.z)
}
func (v Vector) norm() Vector {
var magnitude = v.mag()
var div float64
if magnitude == 0 {
div = math.Inf(1)
} else {
div = 1.0 / magnitude
}
return v.mul(div)
}
func (v Vector) cross(v2 Vector) Vector {
return Vector{v.y*v2.z - v.z*v2.y,
v.z*v2.x - v.x*v2.z,
v.x*v2.y - v.y*v2.x}
}
func (c Color) scale(k float64) Color {
return Color{k * c.r, k * c.g, k * c.b}
}
func (c Color) add(color Color) Color {
return Color{c.r + color.r, c.g + color.g, c.b + color.b}
}
func (c Color) toDrawingColor() color.Color {
return color.RGBA{legalize(c.r), legalize(c.g), legalize(c.b), 255}
}
func (c Color) times(color Color) Color {
return Color{c.r * color.r, c.g * color.g, c.b * color.b}
}
func legalize(c float64) uint8 {
if c < 0.0 {
return 0
}
if c > 1.0 {
return 255
}
return byte(c * 255)
}
var gColors Colors
func init() {
gColors.white = Color{1.0, 1.0, 1.0}
gColors.grey = Color{0.5, 0.5, 0.5}
gColors.black = Color{0.0, 0.0, 0.0}
gColors.background = gColors.black
gColors.defaultColor = gColors.black
}
func main() {
start := time.Now()
width := 500
height := 500
img := image.NewRGBA(image.Rect(0, 0, width, height))
var scene = CreateDefaultScene()
var rayTracer = RayTracerEngine{maxDepth: 5, scene: scene}
rayTracer.render(img, width, height)
elapsed := time.Since(start)
var file, _ = os.Create("go-raytracer.png")
var writer = bufio.NewWriter(file)
png.Encode(writer, img)
writer.Flush()
fmt.Printf("Completed in %s\n", elapsed)
}
type Camera struct {
forward Vector
right Vector
up Vector
pos Vector
}
func CreateCamera(pos Vector, lookAt Vector) Camera {
var result = Camera{}
result.pos = pos
var down = Vector{0.0, -1.0, 0.0}
var forward = lookAt.sub(pos)
result.forward = forward.norm()
var fwXdown = result.forward.cross(down)
fwXdown = fwXdown.norm()
result.right = fwXdown.mul(1.5)
var fwXright = result.forward.cross(result.right)
fwXright = fwXright.norm()
result.up = fwXright.mul(1.5)
return result
}
type Surface interface {
diffuse(pos *Vector) Color
specular(pos *Vector) Color
reflect(pos *Vector) float64
roughness() float64
}
type Thing interface {
intersect(ray *Ray) Intersection
normal(pos *Vector) Vector
surface() Surface
}
type Light struct {
pos Vector
color Color
}
func CreateLight(pos Vector, color Color) Light {
var result Light
result.pos = pos
result.color = color
return result
}
type Scene interface {
Things() []Thing
Lights() []Light
Camera() Camera
}
/*Thing*/
type Sphere struct {
radius2 float64
center Vector
mSurface Surface
}
func CreateSphere(center Vector, radius float64, surface Surface) *Sphere {
return &Sphere{
radius * radius,
center,
surface,
}
}
func (sphere *Sphere) normal(pos *Vector) Vector {
var diff = pos.sub(sphere.center)
return diff.norm()
}
func (sphere *Sphere) intersect(ray *Ray) Intersection {
var result Intersection
var eo = sphere.center.sub(ray.start)
var v = eo.dot(ray.dir)
var dist float64
if v >= 0 {
var disc = sphere.radius2 - (eo.dot(eo) - v*v)
if disc >= 0 {
dist = v - math.Sqrt(disc)
}
}
if dist == 0 {
return result
}
result = Intersection{sphere, ray, dist}
return result
}
func (sphere *Sphere) surface() Surface {
return sphere.mSurface
}
type Plane struct {
norm Vector
offset float64
mSurface Surface
}
func (plane *Plane) normal(pos *Vector) Vector {
return plane.norm
}
func (plane *Plane) intersect(ray *Ray) Intersection {
var result Intersection
var denom = plane.norm.dot(ray.dir)
if denom > 0 {
return result
}
var dist = (plane.norm.dot(ray.start) + plane.offset) / (-denom)
result = Intersection{plane, ray, dist}
return result
}
func CreatePlane(norm Vector, offset float64, surface Surface) *Plane {
return &Plane{
norm,
offset,
surface,
}
}
func (plane *Plane) surface() Surface {
return plane.mSurface
}
/* ShinySurface */
type ShinySurface struct {
}
func (surface *ShinySurface) diffuse(pos *Vector) Color {
return gColors.white
}
func (surface *ShinySurface) specular(pos *Vector) Color {
return gColors.grey
}
func (surface *ShinySurface) reflect(pos *Vector) float64 {
return 0.7
}
func (surface *ShinySurface) roughness() float64 {
return 250.0
}
/* CheckerboardSurface */
type CheckerboardSurface struct {
}
func (surface *CheckerboardSurface) diffuse(pos *Vector) Color {
var val = (math.Floor(pos.z) + math.Floor(pos.x))
if math.Mod(val, 2.0) != 0 {
return gColors.white
}
return gColors.black
}
func (surface *CheckerboardSurface) specular(pos *Vector) Color {
return gColors.white
}
func (surface *CheckerboardSurface) reflect(pos *Vector) float64 {
var val = (math.Floor(pos.z) + math.Floor(pos.x))
if math.Mod(val, 2.0) != 0 {
return 0.1
}
return 0.7
}
func (surface *CheckerboardSurface) roughness() float64 {
return 150.0
}
type DefaultScene struct {
things []Thing
lights []Light
camera Camera
}
func CreateDefaultScene() *DefaultScene {
var result = &DefaultScene{}
var shiny = &ShinySurface{}
var checkerboard = &CheckerboardSurface{}
var plane1 = CreatePlane(Vector{0.0, 1.0, 0.0}, 0.0, checkerboard)
var sphere1 = CreateSphere(Vector{0.0, 1.0, -0.25}, 1.0, shiny)
var sphere2 = CreateSphere(Vector{-1.0, 0.5, 1.5}, 0.5, shiny)
result.things = []Thing{plane1, sphere1, sphere2}
result.lights = []Light{
CreateLight(Vector{-2.0, 2.5, 0.0}, Color{0.49, 0.07, 0.07}),
CreateLight(Vector{1.5, 2.5, 1.5}, Color{0.07, 0.07, 0.49}),
CreateLight(Vector{1.5, 2.5, -1.5}, Color{0.07, 0.49, 0.071}),
CreateLight(Vector{0.0, 3.5, 0.0}, Color{0.21, 0.21, 0.35}),
}
result.camera = CreateCamera(Vector{3.0, 2.0, 4.0}, Vector{-1.0, 0.5, 0.0})
return result
}
func (scene *DefaultScene) Things() []Thing {
return scene.things
}
func (scene *DefaultScene) Lights() []Light {
return scene.lights
}
func (scene *DefaultScene) Camera() Camera {
return scene.camera
}
type RayTracerEngine struct {
maxDepth int
scene *DefaultScene
}
func (rayTracer *RayTracerEngine) intersections(ray *Ray) Intersection {
var closest = math.Inf(1)
var closestInter Intersection
for _, thing := range rayTracer.scene.Things() {
var inter = thing.intersect(ray)
if !inter.isNull() && inter.dist < closest {
closestInter = inter
closest = inter.dist
}
}
return closestInter
}
func (rayTracer *RayTracerEngine) testRay(ray *Ray) float64 {
var isect = rayTracer.intersections(ray)
if !isect.isNull() {
return isect.dist
}
return math.NaN()
}
func (rayTracer *RayTracerEngine) traceRay(ray *Ray, depth int) Color {
var isect = rayTracer.intersections(ray)
if isect.isNull() {
return gColors.background
}
return rayTracer.shade(isect, depth)
}
func (rayTracer *RayTracerEngine) shade(isect Intersection, depth int) Color {
var d = isect.ray.dir
var pos = d.mul(isect.dist)
pos = pos.add(isect.ray.start)
var normal = isect.thing.normal(&pos)
var normalDotD = normal.dot(d)
var vec = normal.mul(normalDotD)
vec = vec.mul(2.0)
var reflectDir = d.sub(vec)
var naturalColor = rayTracer.getNaturalColor(isect.thing, &pos, &normal, &reflectDir)
naturalColor = naturalColor.add(gColors.background)
getReflectionColor := func() Color {
var ray = Ray{pos, reflectDir}
var reflect = isect.thing.surface().reflect(&pos)
var color = rayTracer.traceRay(&ray, depth+1)
color = color.scale(reflect)
return color
}
var reflectedColor Color
if depth >= rayTracer.maxDepth {
reflectedColor = gColors.grey
} else {
reflectedColor = getReflectionColor()
}
var resultColor = naturalColor.add(reflectedColor)
return resultColor
}
func (rayTracer *RayTracerEngine) getNaturalColor(thing Thing, pos *Vector, norm *Vector, rd *Vector) Color {
var resultColor = gColors.black
var surface = thing.surface()
var rayDirNormal = rd.norm()
var colDiffuse = surface.diffuse(pos)
var colSpecular = surface.specular(pos)
var lcolor Color
var scolor Color
var ray Ray
ray.start = *pos
addLight := func(light *Light) {
var ldis = light.pos.sub(*pos)
var livec = ldis.norm()
var neatIsect = rayTracer.testRay(&Ray{*pos, livec})
var isInShadow bool
if math.IsNaN(neatIsect) {
isInShadow = false
} else {
isInShadow = neatIsect <= ldis.mag()
}
if isInShadow {
return
}
var illum = livec.dot(*norm)
var specular = livec.dot(rayDirNormal)
lcolor = gColors.defaultColor
scolor = gColors.defaultColor
if illum > 0 {
lcolor = light.color.scale(illum)
}
if specular > 0 {
scolor = light.color.scale(math.Pow(specular, surface.roughness()))
}
var diffuseColor = lcolor.times(colDiffuse)
var specularColor = scolor.times(colSpecular)
resultColor = resultColor.add(diffuseColor)
resultColor = resultColor.add(specularColor)
}
for _, light := range rayTracer.scene.Lights() {
addLight(&light)
}
return resultColor
}
func (rayTracer *RayTracerEngine) getPoint(x int, y int, camera Camera, screenWidth int, screenHeight int, scale int) Vector {
var recenterX = (float64(x) - (float64(screenWidth) / 2.0)) / 2.0 / float64(scale)
var recenterY = -(float64(y) - (float64(screenHeight) / 2.0)) / 2.0 / float64(scale)
var vx = camera.right.mul(recenterX)
var vy = camera.up.mul(recenterY)
var v = vx.add(vy)
var z = camera.forward.add(v)
z = z.norm()
return z
}
func (rayTracer *RayTracerEngine) render(img *image.RGBA, w int, h int) {
var camera = rayTracer.scene.Camera()
var ray Ray
ray.start = camera.pos
var scale = h
if scale > w {
scale = w
}
for y := 0; y < h; y++ {
for x := 0; x < w; x++ {
var dir = rayTracer.getPoint(x, y, camera, w, h, scale)
ray.dir = dir
color := rayTracer.traceRay(&ray, 0)
img.Set(x, y, color.toDrawingColor())
}
}
} | go/RayTracer.go | 0.845974 | 0.429968 | RayTracer.go | starcoder |
package convtree
import (
"errors"
"fmt"
"github.com/google/uuid"
"math"
)
type ConvTree struct {
ID string
IsLeaf bool
MaxPoints float64
MaxDepth int
Depth int
GridSize int
ConvNum int
Kernel [][]float64
Points []Point
MinXLength float64
MinYLength float64
TopLeft Point
BottomRight Point
ChildTopLeft *ConvTree
ChildTopRight *ConvTree
ChildBottomLeft *ConvTree
ChildBottomRight *ConvTree
}
func NewConvTree(topLeft Point, bottomRight Point, minXLength float64, minYLength float64, maxPoints float64, maxDepth int,
convNumber int, gridSize int, kernel [][]float64, initPoints []Point) (ConvTree, error) {
if topLeft.X >= bottomRight.X {
err := errors.New("X of top left point is larger or equal to X of bottom right point")
return ConvTree{}, err
}
if topLeft.Y <= bottomRight.Y {
err := errors.New("Y of bottom right point is larger or equal to Y of top left point")
return ConvTree{}, err
}
id := uuid.New().String()
if !checkKernel(kernel) {
kernel = [][]float64{
[]float64{0.5, 0.5, 0.5},
[]float64{0.5, 1.0, 0.5},
[]float64{0.5, 0.5, 0.5},
}
}
tree := ConvTree{
IsLeaf: true,
ID: id,
MaxPoints: maxPoints,
GridSize: gridSize,
ConvNum: convNumber,
Kernel: kernel,
MaxDepth: maxDepth,
TopLeft: topLeft,
BottomRight: bottomRight,
Points: []Point{},
MinXLength: minXLength,
MinYLength: minYLength,
}
if initPoints != nil {
tree.Points = initPoints
}
if tree.checkSplit() {
tree.split()
}
return tree, nil
}
func checkKernel(kernel [][]float64) bool {
if kernel == nil || len(kernel) == 0 {
return false
}
if kernel[0] == nil {
return false
}
xSize, ySize := len(kernel[0]), len(kernel)
if xSize != ySize {
return false
}
for _, row := range kernel {
if len(row) != xSize {
return false
}
}
return true
}
func (tree *ConvTree) split() {
xSize, ySize := tree.GridSize, tree.GridSize
grid := make([][]float64, xSize)
xStep := (tree.BottomRight.X - tree.TopLeft.X) / float64(xSize)
yStep := (tree.TopLeft.Y - tree.BottomRight.Y) / float64(ySize)
for i := 0; i < xSize; i++ {
grid[i] = make([]float64, ySize)
for j := 0; j < ySize; j++ {
xLeft := tree.TopLeft.X + float64(i)*xStep
xRight := tree.TopLeft.X + float64(i+1)*xStep
yBottom := tree.BottomRight.Y + float64(j)*yStep
yTop := tree.BottomRight.Y + float64(j+1)*yStep
grid[i][j] = float64(tree.getNodeWeight(xLeft, xRight, yTop, yBottom))
}
}
convolved := normalizeGrid(grid)
for i := 0; i < tree.ConvNum; i++ {
tmpGrid, err := convolve(convolved, tree.Kernel, 1, 1)
if err != nil {
fmt.Println(err)
break
}
convolved = normalizeGrid(tmpGrid)
}
convolved = normalizeGrid(convolved)
xMax, yMax := getSplitPoint(convolved)
if xMax < 1 || xMax >= (len(convolved)-1) {
xMax = len(convolved) / 2
}
if yMax < 1 || yMax >= (len(convolved[0])-1) {
yMax = len(convolved[0]) / 2
}
xOffset := float64(xMax) * xStep
yOffset := float64(yMax) * yStep
xRight := tree.TopLeft.X + xOffset
if xRight-tree.TopLeft.X < tree.MinXLength {
xRight = tree.TopLeft.X + tree.MinXLength
}
if tree.BottomRight.X-xRight < tree.MinXLength {
xRight = tree.BottomRight.X - tree.MinXLength
}
yBottom := tree.BottomRight.Y + yOffset
if yBottom-tree.BottomRight.Y < tree.MinYLength {
yBottom = tree.BottomRight.Y + tree.MinYLength
}
if tree.TopLeft.Y-yBottom < tree.MinYLength {
yBottom = tree.TopLeft.Y - tree.MinYLength
}
id := uuid.New().String()
tree.ChildTopLeft = &ConvTree{
ID: id,
TopLeft: tree.TopLeft,
BottomRight: Point{
X: xRight,
Y: yBottom,
},
MaxPoints: tree.MaxPoints,
MaxDepth: tree.MaxDepth,
Kernel: tree.Kernel,
Depth: tree.Depth + 1,
GridSize: tree.GridSize,
ConvNum: tree.ConvNum,
MinXLength: tree.MinXLength,
MinYLength: tree.MinYLength,
IsLeaf: true,
}
tree.ChildTopLeft.Points = tree.filterSplitPoints(tree.ChildTopLeft.TopLeft, tree.ChildTopLeft.BottomRight)
if tree.ChildTopLeft.checkSplit() {
tree.ChildTopLeft.split()
}
id = uuid.New().String()
tree.ChildTopRight = &ConvTree{
ID: id,
TopLeft: Point{
X: xRight,
Y: tree.TopLeft.Y,
},
BottomRight: Point{
X: tree.BottomRight.X,
Y: yBottom,
},
MaxPoints: tree.MaxPoints,
MaxDepth: tree.MaxDepth,
Kernel: tree.Kernel,
Depth: tree.Depth + 1,
GridSize: tree.GridSize,
ConvNum: tree.ConvNum,
MinXLength: tree.MinXLength,
MinYLength: tree.MinYLength,
IsLeaf: true,
}
tree.ChildTopRight.Points = tree.filterSplitPoints(tree.ChildTopRight.TopLeft, tree.ChildTopRight.BottomRight)
if tree.ChildTopRight.checkSplit() {
tree.ChildTopRight.split()
}
id = uuid.New().String()
tree.ChildBottomLeft = &ConvTree{
ID: id,
TopLeft: Point{
X: tree.TopLeft.X,
Y: yBottom,
},
BottomRight: Point{
X: xRight,
Y: tree.BottomRight.Y,
},
MaxPoints: tree.MaxPoints,
MaxDepth: tree.MaxDepth,
Kernel: tree.Kernel,
Depth: tree.Depth + 1,
GridSize: tree.GridSize,
ConvNum: tree.ConvNum,
MinXLength: tree.MinXLength,
MinYLength: tree.MinYLength,
IsLeaf: true,
}
tree.ChildBottomLeft.Points = tree.filterSplitPoints(tree.ChildBottomLeft.TopLeft, tree.ChildBottomLeft.BottomRight)
if tree.ChildBottomLeft.checkSplit() {
tree.ChildBottomLeft.split()
}
id = uuid.New().String()
tree.ChildBottomRight = &ConvTree{
ID: id,
TopLeft: Point{
X: xRight,
Y: yBottom,
},
BottomRight: tree.BottomRight,
MaxPoints: tree.MaxPoints,
MaxDepth: tree.MaxDepth,
Kernel: tree.Kernel,
Depth: tree.Depth + 1,
GridSize: tree.GridSize,
ConvNum: tree.ConvNum,
MinXLength: tree.MinXLength,
MinYLength: tree.MinYLength,
IsLeaf: true,
}
tree.ChildBottomRight.Points = tree.filterSplitPoints(tree.ChildBottomRight.TopLeft, tree.ChildBottomRight.BottomRight)
if tree.ChildBottomRight.checkSplit() {
tree.ChildBottomRight.split()
}
tree.IsLeaf = false
tree.Points = nil
}
func getSplitPoint(grid [][]float64) (int, int) {
threshold := 0.8
maxX, maxY := 0, 0
maxValue := 0.0
for i := 0; i < len(grid); i++ {
for j := 0; j < len(grid[0]); j++ {
if grid[i][j] > maxValue {
maxValue = grid[i][j]
maxX, maxY = i, j
}
}
}
splitValue := maxValue * threshold
counter := 1
itemFound := false
splitX, splitY := 0, 0
for {
x, y := 0, 0
vals := []float64{}
itemFound = false
i := maxX - counter
if i >= 0 {
for j := maxY - counter; j <= maxY+counter; j++ {
if j >= 0 && j < len(grid[0]) {
if grid[i][j] > splitValue {
itemFound = true
x = i
vals = append(vals, grid[i][j])
}
}
}
}
i = maxX + counter
if i < len(grid) {
for j := maxY - counter; j <= maxY+counter; j++ {
if j >= 0 && j < len(grid[0]) {
if grid[i][j] > splitValue {
itemFound = true
if math.Abs(float64(x-len(grid)/2)) > math.Abs(float64(i-len(grid)/2)) {
x = i
}
vals = append(vals, grid[i][j])
}
}
}
}
i = maxY - counter
if i >= 0 {
for j := maxX - counter; j <= maxX+counter; j++ {
if j >= 0 && j < len(grid) {
if grid[j][i] > splitValue {
itemFound = true
y = i
if j != maxX-counter && j != maxX+counter {
vals = append(vals, grid[j][i])
}
}
}
}
}
i = maxY + counter
if i < len(grid[0]) {
for j := maxX - counter; j <= maxX+counter; j++ {
if j >= 0 && j < len(grid) {
if grid[j][i] > splitValue {
itemFound = true
if math.Abs(float64(y-len(grid[0])/2)) > math.Abs(float64(i-len(grid[0])/2)) {
y = i
}
if j != maxX-counter && j != maxX+counter {
vals = append(vals, grid[j][i])
}
}
}
}
}
if !itemFound {
break
}
if x != 0 {
splitX = x
}
if y != 0 {
splitY = y
}
splitValue = mean(vals) * threshold
counter++
}
if splitX > maxX {
splitX++
} else {
splitX--
}
if splitY > maxY {
splitY++
} else {
splitY--
}
return splitX, splitY
}
func mean(in []float64) float64 {
sum := 0.0
for _, v := range in {
sum += v
}
return sum / float64(len(in))
}
func (tree *ConvTree) Insert(point Point, allowSplit bool) {
if !tree.IsLeaf {
if point.X >= tree.ChildTopLeft.TopLeft.X && point.X <= tree.ChildTopLeft.BottomRight.X &&
point.Y <= tree.ChildTopLeft.TopLeft.Y && point.Y >= tree.ChildTopLeft.BottomRight.Y {
tree.ChildTopLeft.Insert(point, allowSplit)
return
}
if point.X >= tree.ChildTopRight.TopLeft.X && point.X <= tree.ChildTopRight.BottomRight.X &&
point.Y <= tree.ChildTopRight.TopLeft.Y && point.Y >= tree.ChildTopRight.BottomRight.Y {
tree.ChildTopRight.Insert(point, allowSplit)
return
}
if point.X >= tree.ChildBottomLeft.TopLeft.X && point.X <= tree.ChildBottomLeft.BottomRight.X &&
point.Y <= tree.ChildBottomLeft.TopLeft.Y && point.Y >= tree.ChildBottomLeft.BottomRight.Y {
tree.ChildBottomLeft.Insert(point, allowSplit)
return
}
if point.X >= tree.ChildBottomRight.TopLeft.X && point.X <= tree.ChildBottomRight.BottomRight.X &&
point.Y <= tree.ChildBottomRight.TopLeft.Y && point.Y >= tree.ChildBottomRight.BottomRight.Y {
tree.ChildBottomRight.Insert(point, allowSplit)
return
}
} else {
tree.Points = append(tree.Points, point)
if allowSplit {
if tree.checkSplit() {
tree.split()
}
}
}
}
func (tree *ConvTree) Check() {
if tree.checkSplit() {
tree.split()
}
}
func (tree *ConvTree) Clear() {
tree.Points = nil
if tree.ChildBottomLeft != nil {
tree.ChildBottomLeft.Clear()
}
if tree.ChildBottomRight != nil {
tree.ChildBottomRight.Clear()
}
if tree.ChildTopLeft != nil {
tree.ChildTopLeft.Clear()
}
if tree.ChildTopRight != nil {
tree.ChildTopRight.Clear()
}
}
func (tree ConvTree) checkSplit() bool {
cond1 := (tree.BottomRight.X-tree.TopLeft.X) > 2*tree.MinXLength && (tree.TopLeft.Y-tree.BottomRight.Y) > 2*tree.MinYLength
totalWeight := float64(0)
for _, point := range tree.Points {
totalWeight += point.Weight
}
cond2 := totalWeight > tree.MaxPoints && tree.Depth < tree.MaxDepth
return cond1 && cond2
}
func (tree ConvTree) getNodeWeight(xLeft, xRight, yTop, yBottom float64) float64 {
total := float64(0)
for _, point := range tree.Points {
if point.X >= xLeft && point.X <= xRight && point.Y >= yBottom && point.Y <= yTop {
total += point.Weight
}
}
return total
}
func (tree ConvTree) filterSplitPoints(topLeft, bottomRight Point) []Point {
result := []Point{}
for _, point := range tree.Points {
if point.X >= topLeft.X && point.X <= bottomRight.X && point.Y >= bottomRight.Y && point.Y <= topLeft.Y {
result = append(result, point)
}
}
return result
}
func convolve(grid [][]float64, kernel [][]float64, stride, padding int) ([][]float64, error) {
if stride < 1 {
err := errors.New("convolutional stride must be larger than 0")
return nil, err
}
if padding < 1 {
err := errors.New("convolutional padding must be larger than 0")
return nil, err
}
kernelSize := len(kernel)
if len(grid) < kernelSize {
err := errors.New("grid width is less than convolutional kernel size")
return nil, err
}
if len(grid[0]) < kernelSize {
err := errors.New("grid height is less than convolutional kernel size")
return nil, err
}
procGrid := make([][]float64, len(grid)+2*padding)
for i := 0; i < padding; i++ {
procGrid[i] = make([]float64, len(grid)+2*padding)
for j := range procGrid[i] {
procGrid[i][j] = 0
}
}
for i := 1; i < (len(procGrid) - 1); i++ {
procGrid[i] = make([]float64, len(grid)+2*padding)
procGrid[i][0] = 0
for j := 1; j < len(procGrid[i])-1; j++ {
procGrid[i][j] = grid[i-padding][j-padding]
}
procGrid[i][len(procGrid[i])-1] = 0
}
for i := 0; i < padding; i++ {
procGrid[len(procGrid)-i-1] = make([]float64, len(grid)+2*padding)
for j := range procGrid[len(procGrid)-i-1] {
procGrid[len(procGrid)-i-1][j] = 0
}
}
resultWidth := int((len(grid)-kernelSize+2*padding)/stride) + 1
resultHeight := int((len(grid[0])-kernelSize+2*padding)/stride) + 1
result := make([][]float64, resultWidth)
for i := 0; i < resultWidth; i++ {
result[i] = make([]float64, resultHeight)
for j := 0; j < resultHeight; j++ {
total := 0.0
for x := 0; x < kernelSize; x++ {
for y := 0; y < kernelSize; y++ {
posX := stride*i + x
posY := stride*j + y
if posX >= 0 && posX < len(procGrid) && posY >= 0 && posY < len(procGrid[0]) {
total += procGrid[posX][posY] * kernel[x][y]
}
}
}
result[i][j] = total
}
}
return result, nil
}
func normalizeGrid(grid [][]float64) [][]float64 {
maxValue := -math.MaxFloat64
for i := 0; i < len(grid); i++ {
for j := 0; j < len(grid[0]); j++ {
if grid[i][j] > maxValue {
maxValue = grid[i][j]
}
}
}
for i := 0; i < len(grid); i++ {
for j := 0; j < len(grid[0]); j++ {
grid[i][j] = grid[i][j] / maxValue
}
}
return grid
} | conv-tree.go | 0.562417 | 0.418637 | conv-tree.go | starcoder |
package test_multiassign
func assert(want int, act int, code string)
func println(format ...string)
func strcmp(s1 string, s2 string) int
func multiRet() (int, int, int, int, int, int) {
return 1, 2, 3, 4, 5, 6
}
func multiRetStr() (string, string, string, string, string, string) {
return "abc", "def", "ghi", "jkl", "mno", "pqr"
}
func multiRetFloat() (float64, float64) {
return 0.1, 0.2
}
type gT01 struct {
a int
b string
}
func multiRetStruct(a int, b string) (int, gT01, string) {
var g = gT01{
a: a,
b: b,
}
return g.a, g, g.b
}
func retStruct() gT01 {
var g = gT01{
a: 1,
b: "aaa",
}
return g
}
func multiRet2Struct(a int, b string) (int, gT01, gT01, string) {
var g1 = gT01{
a: a,
b: b,
}
var g2 = gT01{
a: a + 1,
b: "bbb",
}
return g1.a, g1, g2, g1.b
}
type gT02 struct {
a [20]int
}
func multiRet2BigStruct(a int, b string) (int, gT01, gT02, string) {
var g1 = gT01{
a: a + 2,
b: b,
}
var g2 = gT02{
a: [20]int{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20},
}
return g1.a, g1, g2, g1.b
}
func multiRetSlice() ([]int, []string) {
var a = []int{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20}
var b = []string{"aaa", "bbb", "ccc", "ddd", "eee", "fff"}
return a, b
}
func multiRetArged(x int, y string) (gT02, gT02) {
var g1 = gT02{
a: [20]int{
x + 1,
x + 2,
x + 3,
x + 4,
x + 5,
x + 6,
x + 7,
x + 8,
x + 9,
x + 10,
x + 11,
x + 12,
x + 13,
x + 14,
x + 15,
x + 16,
x + 17,
x + 18,
x + 19,
x + 20,
},
}
var g2 = gT02{
a: [20]int{
x + 1,
x + 2,
x + 3,
x + 4,
x + 5,
x + 6,
x + 7,
x + 8,
x + 9,
x + 10,
x + 11,
x + 12,
x + 13,
x + 14,
x + 15,
x + 16,
x + 17,
x + 18,
x + 19,
x + 20,
},
}
return g1, g2
}
func main() {
var a01, b01, c01, d01, e01, f01 int
a01, b01, c01, d01, e01, f01 = multiRet()
assert(1, a01, "a01")
assert(2, b01, "b01")
assert(3, c01, "c01")
assert(4, d01, "d01")
assert(5, e01, "e01")
assert(6, f01, "f01")
var a02, b02, c02, d02, e02, f02 string
a02, b02, c02, d02, e02, f02 = multiRetStr()
assert(0, strcmp(a02, "abc"), "strcmp(a02, \"abc\")")
assert(0, strcmp(b02, "def"), "strcmp(b02, \"def\")")
assert(0, strcmp(c02, "ghi"), "strcmp(c02, \"ghi\")")
assert(0, strcmp(d02, "jkl"), "strcmp(d02, \"jkl\")")
assert(0, strcmp(e02, "mno"), "strcmp(e02, \"mno\")")
assert(0, strcmp(f02, "pqr"), "strcmp(f02, \"pqr\")")
var a03, b03 float64
a03, b03 = multiRetFloat()
println("%lf", a03)
println("%lf", b03)
assert(1, a03 == 0.1, "a03==0.1")
assert(1, b03 == 0.2, "b03==0.2")
var a04 int
var b04 string
var c04 gT01
a04, c04, b04 = multiRetStruct(1, "aaa")
assert(1, a04, "a04")
assert(0, strcmp(b04, "aaa"), "strcmp(b04, \"aaa\")")
assert(1, c04.a, "c04.a")
assert(0, strcmp(c04.b, "aaa"), "strcmp(c04.b, \"aaa\")")
var c041 gT01
c041 = retStruct()
assert(1, c041.a, "c041.a")
assert(0, strcmp(c041.b, "aaa"), "strcmp(c041.b, \"aaa\")")
var a042 int
var b042 string
var c042 gT01
var d042 gT01
a042, c042, d042, b042 = multiRet2Struct(1, "aaa")
assert(1, a042, "a042")
assert(0, strcmp(b042, "aaa"), "strcmp(b042, \"aaa\")")
assert(1, c042.a, "c042.a")
assert(0, strcmp(c042.b, "aaa"), "strcmp(c042.b, \"aaa\")")
assert(2, d042.a, "d042.a")
assert(0, strcmp(d042.b, "bbb"), "strcmp(d042.b, \"bbb\")")
var a043 int
var b043 string
var c043 gT01
var d043 gT02
a043, c043, d043, b043 = multiRet2BigStruct(1, "ccc")
assert(3, a043, "a043")
assert(0, strcmp(b043, "ccc"), "strcmp(b043, \"ccc\")")
assert(3, c043.a, "c043.a")
assert(0, strcmp(c043.b, "ccc"), "strcmp(c043.b, \"ccc\")")
assert(1, d043.a[0], "d043.a[0]")
assert(2, d043.a[1], "d043.a[1]")
assert(3, d043.a[2], "d043.a[2]")
assert(4, d043.a[3], "d043.a[3]")
assert(5, d043.a[4], "d043.a[4]")
assert(6, d043.a[5], "d043.a[5]")
assert(7, d043.a[6], "d043.a[6]")
assert(8, d043.a[7], "d043.a[7]")
assert(9, d043.a[8], "d043.a[8]")
assert(10, d043.a[9], "d043.a[9]")
assert(11, d043.a[10], "d043.a[10]")
assert(12, d043.a[11], "d043.a[11]")
assert(13, d043.a[12], "d043.a[12]")
assert(14, d043.a[13], "d043.a[13]")
assert(15, d043.a[14], "d043.a[14]")
assert(16, d043.a[15], "d043.a[15]")
assert(17, d043.a[16], "d043.a[16]")
assert(18, d043.a[17], "d043.a[17]")
assert(19, d043.a[18], "d043.a[18]")
assert(20, d043.a[19], "d043.a[19]")
var a05, b05, c05, d05, e05, f05, g05 = 1, 2, 3, 4, 5, 6, 7
assert(1, a05, "a05")
assert(2, b05, "b05")
assert(3, c05, "c05")
assert(4, d05, "d05")
assert(5, e05, "e05")
assert(6, f05, "f05")
assert(7, g05, "g05")
a05, b05, c05, _, e05, f05, g05 = g05, f05, e05, d05, c05, b05, a05
assert(7, a05, "a05")
assert(6, b05, "b05")
assert(5, c05, "c05")
assert(4, d05, "d05")
assert(3, e05, "e05")
assert(2, f05, "f05")
assert(1, g05, "g05")
a05, b05, c05, d05, e05, f05, g05 = 1, 2, 3, 4, 5, 6, 7
assert(1, a05, "a05")
assert(2, b05, "b05")
assert(3, c05, "c05")
assert(4, d05, "d05")
assert(5, e05, "e05")
assert(6, f05, "f05")
assert(7, g05, "g05")
var a06,
b06,
c06,
d06,
e06,
f06,
g06 = "aaa",
"bbb",
"ccc",
"ddd",
"eee",
"fff",
"ggg"
assert(0, strcmp(a06, "aaa"), "strcmp(a06, \"aaa\")")
assert(0, strcmp(b06, "bbb"), "strcmp(b06, \"bbb\")")
assert(0, strcmp(c06, "ccc"), "strcmp(c06, \"ccc\")")
assert(0, strcmp(d06, "ddd"), "strcmp(d06, \"ddd\")")
assert(0, strcmp(e06, "eee"), "strcmp(e06, \"eee\")")
assert(0, strcmp(f06, "fff"), "strcmp(f06, \"fff\")")
assert(0, strcmp(g06, "ggg"), "strcmp(g06, \"ggg\")")
a06, b06, c06, _, e06, f06, g06 = g06, f06, e06, d06, c06, b06, a06
assert(0, strcmp(a06, "ggg"), "strcmp(a06, \"ggg\")")
assert(0, strcmp(b06, "fff"), "strcmp(b06, \"fff\")")
assert(0, strcmp(c06, "eee"), "strcmp(c06, \"eee\")")
assert(0, strcmp(d06, "ddd"), "strcmp(d06, \"ddd\")")
assert(0, strcmp(e06, "ccc"), "strcmp(e06, \"ccc\")")
assert(0, strcmp(f06, "bbb"), "strcmp(f06, \"bbb\")")
assert(0, strcmp(g06, "aaa"), "strcmp(g06, \"aaa\")")
a06,
b06,
c06,
d06,
e06,
f06,
g06 = "aaa",
"bbb",
"ccc",
"ddd",
"eee",
"fff",
"ggg"
assert(0, strcmp(a06, "aaa"), "strcmp(a06, \"aaa\")")
assert(0, strcmp(b06, "bbb"), "strcmp(b06, \"bbb\")")
assert(0, strcmp(c06, "ccc"), "strcmp(c06, \"ccc\")")
assert(0, strcmp(d06, "ddd"), "strcmp(d06, \"ddd\")")
assert(0, strcmp(e06, "eee"), "strcmp(e06, \"eee\")")
assert(0, strcmp(f06, "fff"), "strcmp(f06, \"fff\")")
assert(0, strcmp(g06, "ggg"), "strcmp(g06, \"ggg\")")
var a07, b07, c07, d07 = 0.1, 0.2, 0.3, 0.4
assert(1, a07 == 0.1, "a07==0.1")
assert(1, b07 == 0.2, "b07==0.2")
assert(1, c07 == 0.3, "c07==0.3")
assert(1, d07 == 0.4, "d07==0.4")
a07, b07, c07, d07 = d07, c07, b07, a07
println("a07: %lf", a07)
assert(1, a07 == 0.4, "a07==0.4")
assert(1, b07 == 0.3, "b07==0.3")
assert(1, c07 == 0.2, "c07==0.2")
assert(1, d07 == 0.1, "d07==0.1")
a07, b07, c07, d07 = 0.1, 0.2, 0.3, 0.4
assert(1, a07 == 0.1, "a07==0.1")
assert(1, b07 == 0.2, "b07==0.2")
assert(1, c07 == 0.3, "c07==0.3")
assert(1, d07 == 0.4, "d07==0.4")
var a08 []int
var b08 []string
a08, b08 = multiRetSlice()
assert(1, a08[0], "a08[0]")
assert(11, a08[10], "a08[10]")
assert(20, a08[19], "a08[19]")
assert(0, strcmp(b08[0], "aaa"), "strcmp(b08[0], \"aaa\")")
assert(0, strcmp(b08[3], "ddd"), "strcmp(b08[3], \"ddd\")")
assert(0, strcmp(b08[5], "fff"), "strcmp(b08[5], \"fff\")")
// このテストは未だ通りません。a09をgT01=>gT02にしたら通ったのですがわからん。
var a09 gT02
var b09 gT02
a09, b09 = multiRetArged(100, "abc")
assert(101, a09.a[0], "a09.a[0]")
// assert(0, strcmp(a09.b, "abc"), "strcmp(b09.b, \"abc\")")
assert(101, b09.a[0], "b09.a[0]")
println("OK")
} | testdata/multiassign.go | 0.589953 | 0.469642 | multiassign.go | starcoder |
package gridon
import "math"
var tickTables = map[TickGroup][]struct {
Lower float64
Upper float64
Tick float64
}{
TickGroupTopix100: {
{Lower: 0, Upper: 1_000, Tick: 0.1},
{Lower: 1_000, Upper: 3_000, Tick: 0.5},
{Lower: 3_000, Upper: 10_000, Tick: 1},
{Lower: 10_000, Upper: 30_000, Tick: 5},
{Lower: 30_000, Upper: 100_000, Tick: 10},
{Lower: 100_000, Upper: 300_000, Tick: 50},
{Lower: 300_000, Upper: 1_000_000, Tick: 100},
{Lower: 1_000_000, Upper: 3_000_000, Tick: 500},
{Lower: 3_000_000, Upper: 10_000_000, Tick: 1_000},
{Lower: 10_000_000, Upper: 30_000_000, Tick: 5_000},
{Lower: 30_000_000, Upper: math.Inf(1), Tick: 10_000},
},
TickGroupOther: {
{Lower: 0, Upper: 3_000, Tick: 1},
{Lower: 3_000, Upper: 5_000, Tick: 5},
{Lower: 5_000, Upper: 10_000, Tick: 10},
{Lower: 10_000, Upper: 50_000, Tick: 50},
{Lower: 50_000, Upper: 100_000, Tick: 100},
{Lower: 100_000, Upper: 500_000, Tick: 500},
{Lower: 500_000, Upper: 1_000_000, Tick: 1_000},
{Lower: 1_000_000, Upper: 5_000_000, Tick: 5_000},
{Lower: 5_000_000, Upper: 10_000_000, Tick: 10_000},
{Lower: 10_000_000, Upper: 50_000_000, Tick: 50_000},
{Lower: 50_000_000, Upper: math.Inf(1), Tick: 100_000},
},
}
// newTick - 新しいtickの取得
func newTick() ITick {
return &tick{}
}
// ITick - ティック計算のインターフェース
type ITick interface {
GetTick(tickGroup TickGroup, price float64) float64
TickAddedPrice(tickGroup TickGroup, price float64, tick int) float64
Ticks(tickGroup TickGroup, a float64, b float64) int
}
// tick - ティック計算
type tick struct{}
// GetTick - 1ティックの幅
func (t *tick) GetTick(tickGroup TickGroup, price float64) float64 {
switch tickGroup {
case TickGroupTopix100:
return t.getTopix100Tick(price)
default:
return t.getOtherTick(price)
}
}
// getTopix100Tick - TOPIX100テーブル対象の銘柄の呼値単位
func (t *tick) getTopix100Tick(price float64) float64 {
switch {
case price <= 1_000:
return 0.1
case price <= 3_000:
return 0.5
case price <= 10_000:
return 1
case price <= 30_000:
return 5
case price <= 100_000:
return 10
case price <= 300_000:
return 50
case price <= 1_000_000:
return 100
case price <= 3_000_000:
return 500
case price <= 10_000_000:
return 1_000
case price <= 30_000_000:
return 5_000
default:
return 10_000
}
}
// getOtherTick - TOPIX100テーブル対象以外の銘柄の呼値単位
func (t *tick) getOtherTick(price float64) float64 {
switch {
case price <= 3_000:
return 1
case price <= 5_000:
return 5
case price <= 30_000:
return 10
case price <= 50_000:
return 50
case price <= 300_000:
return 100
case price <= 500_000:
return 500
case price <= 3_000_000:
return 1_000
case price <= 5_000_000:
return 5_000
case price <= 30_000_000:
return 10_000
case price <= 50_000_000:
return 50_000
default:
return 100_000
}
}
// TickAddedPrice - 呼値単位を加味した価格
func (t *tick) TickAddedPrice(tickGroup TickGroup, price float64, tick int) float64 {
for tick != 0 {
if tick < 0 {
price -= t.GetTick(tickGroup, price-0.01)
tick++
} else {
price += t.GetTick(tickGroup, price+0.01)
tick--
}
}
return math.Round(price*10) / 10 // 小数点以下第一で四捨五入
}
// Ticks - minからmaxになるのに何tickあるか
func (t *tick) Ticks(tickGroup TickGroup, a float64, b float64) int {
max, min := a, b
if max < min {
max, min = min, max
}
tickTable, ok := tickTables[tickGroup]
if !ok {
tickTable = tickTables[TickGroupOther]
}
// minの含まれているborderを特定
// maxが同じborderに含まれていればmaxまでのTICK数を、含まれていなければupperまでのTICK数を計算し、TICK数に加算する
// maxまでのTICK数が計算されたならreturn
// 次のborderに移って、繰り返す
var tick int
for _, t := range tickTable {
if min < t.Lower || t.Upper <= min {
continue
}
// maxが含まれている値段水準に達したら、minとmaxのTICK数を加味して結果を返す
if t.Lower <= max && max < t.Upper {
tick += int(math.Ceil((max - min) / t.Tick))
break
}
// maxが含まれている値段水準でなかったので、TICK数にその値段水準の最大値までのTICK数を加算し、minを次の水準に移して次のループにいく
tick += int(math.Ceil((t.Upper - min) / t.Tick))
min = t.Upper
}
return tick
} | tick.go | 0.565299 | 0.726147 | tick.go | starcoder |
package imaging
import (
"image"
)
// Rotate90 rotates the image 90 degrees counterclockwise and returns the transformed image.
func Rotate90(img image.Image) *image.NRGBA {
src := toNRGBA(img)
srcW := src.Bounds().Max.X
srcH := src.Bounds().Max.Y
dstW := srcH
dstH := srcW
dst := image.NewNRGBA(image.Rect(0, 0, dstW, dstH))
parallel(dstH, func(partStart, partEnd int) {
for dstY := partStart; dstY < partEnd; dstY++ {
for dstX := 0; dstX < dstW; dstX++ {
srcX := dstH - dstY - 1
srcY := dstX
srcOff := srcY*src.Stride + srcX*4
dstOff := dstY*dst.Stride + dstX*4
copy(dst.Pix[dstOff:dstOff+4], src.Pix[srcOff:srcOff+4])
}
}
})
return dst
}
// Rotate180 rotates the image 180 degrees counterclockwise and returns the transformed image.
func Rotate180(img image.Image) *image.NRGBA {
src := toNRGBA(img)
srcW := src.Bounds().Max.X
srcH := src.Bounds().Max.Y
dstW := srcW
dstH := srcH
dst := image.NewNRGBA(image.Rect(0, 0, dstW, dstH))
parallel(dstH, func(partStart, partEnd int) {
for dstY := partStart; dstY < partEnd; dstY++ {
for dstX := 0; dstX < dstW; dstX++ {
srcX := dstW - dstX - 1
srcY := dstH - dstY - 1
srcOff := srcY*src.Stride + srcX*4
dstOff := dstY*dst.Stride + dstX*4
copy(dst.Pix[dstOff:dstOff+4], src.Pix[srcOff:srcOff+4])
}
}
})
return dst
}
// Rotate270 rotates the image 270 degrees counterclockwise and returns the transformed image.
func Rotate270(img image.Image) *image.NRGBA {
src := toNRGBA(img)
srcW := src.Bounds().Max.X
srcH := src.Bounds().Max.Y
dstW := srcH
dstH := srcW
dst := image.NewNRGBA(image.Rect(0, 0, dstW, dstH))
parallel(dstH, func(partStart, partEnd int) {
for dstY := partStart; dstY < partEnd; dstY++ {
for dstX := 0; dstX < dstW; dstX++ {
srcX := dstY
srcY := dstW - dstX - 1
srcOff := srcY*src.Stride + srcX*4
dstOff := dstY*dst.Stride + dstX*4
copy(dst.Pix[dstOff:dstOff+4], src.Pix[srcOff:srcOff+4])
}
}
})
return dst
}
// FlipH flips the image horizontally (from left to right) and returns the transformed image.
func FlipH(img image.Image) *image.NRGBA {
src := toNRGBA(img)
srcW := src.Bounds().Max.X
srcH := src.Bounds().Max.Y
dstW := srcW
dstH := srcH
dst := image.NewNRGBA(image.Rect(0, 0, dstW, dstH))
parallel(dstH, func(partStart, partEnd int) {
for dstY := partStart; dstY < partEnd; dstY++ {
for dstX := 0; dstX < dstW; dstX++ {
srcX := dstW - dstX - 1
srcY := dstY
srcOff := srcY*src.Stride + srcX*4
dstOff := dstY*dst.Stride + dstX*4
copy(dst.Pix[dstOff:dstOff+4], src.Pix[srcOff:srcOff+4])
}
}
})
return dst
}
// FlipV flips the image vertically (from top to bottom) and returns the transformed image.
func FlipV(img image.Image) *image.NRGBA {
src := toNRGBA(img)
srcW := src.Bounds().Max.X
srcH := src.Bounds().Max.Y
dstW := srcW
dstH := srcH
dst := image.NewNRGBA(image.Rect(0, 0, dstW, dstH))
parallel(dstH, func(partStart, partEnd int) {
for dstY := partStart; dstY < partEnd; dstY++ {
for dstX := 0; dstX < dstW; dstX++ {
srcX := dstX
srcY := dstH - dstY - 1
srcOff := srcY*src.Stride + srcX*4
dstOff := dstY*dst.Stride + dstX*4
copy(dst.Pix[dstOff:dstOff+4], src.Pix[srcOff:srcOff+4])
}
}
})
return dst
} | vendor/github.com/wujiang/imaging/transform.go | 0.869146 | 0.563858 | transform.go | starcoder |
package typeinfo
import (
"fmt"
"github.com/src-d/go-mysql-server/sql"
"vitess.io/vitess/go/sqltypes"
"github.com/liquidata-inc/dolt/go/store/types"
)
type Identifier string
const (
UnknownTypeIdentifier Identifier = "unknown"
BitTypeIdentifier Identifier = "bit"
BoolTypeIdentifier Identifier = "bool"
DatetimeTypeIdentifier Identifier = "datetime"
DecimalTypeIdentifier Identifier = "decimal"
EnumTypeIdentifier Identifier = "enum"
FloatTypeIdentifier Identifier = "float"
InlineBlobTypeIdentifier Identifier = "inlineblob"
IntTypeIdentifier Identifier = "int"
SetTypeIdentifier Identifier = "set"
TimeTypeIdentifier Identifier = "time"
TupleTypeIdentifier Identifier = "tuple"
UintTypeIdentifier Identifier = "uint"
UuidTypeIdentifier Identifier = "uuid"
VarBinaryTypeIdentifier Identifier = "varbinary"
VarStringTypeIdentifier Identifier = "varstring"
YearTypeIdentifier Identifier = "year"
)
var Identifiers = map[Identifier]struct{}{
UnknownTypeIdentifier: {},
BitTypeIdentifier: {},
BoolTypeIdentifier: {},
DatetimeTypeIdentifier: {},
DecimalTypeIdentifier: {},
EnumTypeIdentifier: {},
FloatTypeIdentifier: {},
InlineBlobTypeIdentifier: {},
IntTypeIdentifier: {},
SetTypeIdentifier: {},
TimeTypeIdentifier: {},
TupleTypeIdentifier: {},
UintTypeIdentifier: {},
UuidTypeIdentifier: {},
VarBinaryTypeIdentifier: {},
VarStringTypeIdentifier: {},
YearTypeIdentifier: {},
}
// TypeInfo is an interface used for encoding type information.
type TypeInfo interface {
// ConvertNomsValueToValue converts a Noms value to a go value. The expected NomsKind of the given
// parameter is equivalent to the NomsKind returned by this type info.
ConvertNomsValueToValue(v types.Value) (interface{}, error)
// ConvertValueToNomsValue converts a go value or Noms value to a Noms value. The type of the Noms
// value will be equivalent to the NomsKind returned from NomsKind.
ConvertValueToNomsValue(v interface{}) (types.Value, error)
// Equals returns whether the given TypeInfo is equivalent to this TypeInfo.
Equals(other TypeInfo) bool
// FormatValue returns the stringified version of the value.
FormatValue(v types.Value) (*string, error)
// GetTypeIdentifier returns an identifier for this type used for serialization.
GetTypeIdentifier() Identifier
// GetTypeParams returns a map[string]string containing the type parameters. This is used for
// serialization and deserialization of type information.
GetTypeParams() map[string]string
// IsValid takes in a types.Value and returns whether it is valid for this type.
IsValid(v types.Value) bool
// NomsKind returns the NomsKind that best matches this TypeInfo.
NomsKind() types.NomsKind
// ParseValue parses a string and returns a go value that represents it according to this type.
ParseValue(str *string) (types.Value, error)
// ToSqlType returns the TypeInfo as a sql.Type. If an exact match is able to be made then that is
// the one returned, otherwise the sql.Type is the closest match possible.
ToSqlType() sql.Type
// Stringer results are used to inform users of the constraint's properties.
fmt.Stringer
}
// FromSqlType takes in a sql.Type and returns the most relevant TypeInfo.
func FromSqlType(sqlType sql.Type) (TypeInfo, error) {
switch sqlType.Type() {
case sqltypes.Null:
return UnknownType, nil
case sqltypes.Int8:
return Int8Type, nil
case sqltypes.Int16:
return Int16Type, nil
case sqltypes.Int24:
return Int24Type, nil
case sqltypes.Int32:
return Int32Type, nil
case sqltypes.Int64:
return Int64Type, nil
case sqltypes.Uint8:
return Uint8Type, nil
case sqltypes.Uint16:
return Uint16Type, nil
case sqltypes.Uint24:
return Uint24Type, nil
case sqltypes.Uint32:
return Uint32Type, nil
case sqltypes.Uint64:
return Uint64Type, nil
case sqltypes.Float32:
return Float32Type, nil
case sqltypes.Float64:
return Float64Type, nil
case sqltypes.Timestamp:
return TimestampType, nil
case sqltypes.Date:
return DateType, nil
case sqltypes.Time:
return TimeType, nil
case sqltypes.Datetime:
return DatetimeType, nil
case sqltypes.Year:
return YearType, nil
case sqltypes.Decimal:
decimalSQLType, ok := sqlType.(sql.DecimalType)
if !ok {
return nil, fmt.Errorf(`expected "DecimalTypeIdentifier" from SQL basetype "Decimal"`)
}
return &decimalType{decimalSQLType}, nil
case sqltypes.Text:
stringType, ok := sqlType.(sql.StringType)
if !ok {
return nil, fmt.Errorf(`expected "StringType" from SQL basetype "Text"`)
}
return &varStringType{stringType}, nil
case sqltypes.Blob:
//TODO: determine the storage format
if fmt.Sprintf("a") != "" { // always evaluates to true, compiler won't complain about unreachable code
return nil, fmt.Errorf(`"%v" has not yet been implemented`, sqlType.String())
}
stringType, ok := sqlType.(sql.StringType)
if !ok {
return nil, fmt.Errorf(`expected "StringType" from SQL basetype "Blob"`)
}
return &varBinaryType{stringType}, nil
case sqltypes.VarChar:
stringType, ok := sqlType.(sql.StringType)
if !ok {
return nil, fmt.Errorf(`expected "StringType" from SQL basetype "VarChar"`)
}
return &varStringType{stringType}, nil
case sqltypes.VarBinary:
//TODO: determine the storage format
if fmt.Sprintf("a") != "" { // always evaluates to true, compiler won't complain about unreachable code
return nil, fmt.Errorf(`"%v" has not yet been implemented`, sqlType.String())
}
stringType, ok := sqlType.(sql.StringType)
if !ok {
return nil, fmt.Errorf(`expected "StringType" from SQL basetype "VarBinary"`)
}
return &varBinaryType{stringType}, nil
case sqltypes.Char:
stringType, ok := sqlType.(sql.StringType)
if !ok {
return nil, fmt.Errorf(`expected "StringType" from SQL basetype "Char"`)
}
return &varStringType{stringType}, nil
case sqltypes.Binary:
//TODO: determine the storage format
if fmt.Sprintf("a") != "" { // always evaluates to true, compiler won't complain about unreachable code
return nil, fmt.Errorf(`"%v" has not yet been implemented`, sqlType.String())
}
stringType, ok := sqlType.(sql.StringType)
if !ok {
return nil, fmt.Errorf(`expected "StringType" from SQL basetype "Binary"`)
}
return &varBinaryType{stringType}, nil
case sqltypes.Bit:
bitSQLType, ok := sqlType.(sql.BitType)
if !ok {
return nil, fmt.Errorf(`expected "BitTypeIdentifier" from SQL basetype "Bit"`)
}
return &bitType{bitSQLType}, nil
case sqltypes.Enum:
enumSQLType, ok := sqlType.(sql.EnumType)
if !ok {
return nil, fmt.Errorf(`expected "EnumTypeIdentifier" from SQL basetype "Enum"`)
}
return &enumType{enumSQLType}, nil
case sqltypes.Set:
setSQLType, ok := sqlType.(sql.SetType)
if !ok {
return nil, fmt.Errorf(`expected "SetTypeIdentifier" from SQL basetype "Set"`)
}
return &setType{setSQLType}, nil
default:
return nil, fmt.Errorf(`no type info can be created from SQL base type "%v"`, sqlType.String())
}
}
// FromTypeParams constructs a TypeInfo from the given identifier and parameters.
func FromTypeParams(id Identifier, params map[string]string) (TypeInfo, error) {
switch id {
case BitTypeIdentifier:
return CreateBitTypeFromParams(params)
case BoolTypeIdentifier:
return BoolType, nil
case DatetimeTypeIdentifier:
return CreateDatetimeTypeFromParams(params)
case DecimalTypeIdentifier:
return CreateDecimalTypeFromParams(params)
case EnumTypeIdentifier:
return CreateEnumTypeFromParams(params)
case FloatTypeIdentifier:
return CreateFloatTypeFromParams(params)
case InlineBlobTypeIdentifier:
return InlineBlobType, nil
case IntTypeIdentifier:
return CreateIntTypeFromParams(params)
case SetTypeIdentifier:
return CreateSetTypeFromParams(params)
case TimeTypeIdentifier:
return TimeType, nil
case TupleTypeIdentifier:
return TupleType, nil
case UintTypeIdentifier:
return CreateUintTypeFromParams(params)
case UuidTypeIdentifier:
return UuidType, nil
case VarBinaryTypeIdentifier:
return CreateVarBinaryTypeFromParams(params)
case VarStringTypeIdentifier:
return CreateVarStringTypeFromParams(params)
case YearTypeIdentifier:
return YearType, nil
default:
return nil, fmt.Errorf(`"%v" cannot be made from an identifier and params`, id)
}
}
// FromKind returns the default TypeInfo for a given types.Value.
func FromKind(kind types.NomsKind) TypeInfo {
switch kind {
case types.BoolKind:
return BoolType
case types.FloatKind:
return Float64Type
case types.InlineBlobKind:
return InlineBlobType
case types.IntKind:
return Int64Type
case types.NullKind:
return UnknownType
case types.StringKind:
return StringDefaultType
case types.TimestampKind:
return DatetimeType
case types.TupleKind:
return TupleType
case types.UintKind:
return Uint64Type
case types.UUIDKind:
return UuidType
default:
panic(fmt.Errorf(`no default type info for NomsKind "%v"`, kind.String()))
}
}
// Convert takes in a types.Value, as well as the source and destination TypeInfos, and
// converts the TypeInfo into the applicable types.Value.
func Convert(v types.Value, srcTi TypeInfo, destTi TypeInfo) (types.Value, error) {
str, err := srcTi.FormatValue(v)
if err != nil {
return nil, err
}
val, err := destTi.ParseValue(str)
if err != nil {
return nil, err
}
return val, nil
}
// IsStringType returns whether the given TypeInfo represents a CHAR, VARCHAR, or TEXT-derivative.
func IsStringType(ti TypeInfo) bool {
_, ok := ti.(*varStringType)
return ok
}
// ParseIdentifier takes in an Identifier in string form and returns the matching Identifier.
// Returns UnknownTypeIdentifier when the string match is not found.
func ParseIdentifier(name string) Identifier {
id := Identifier(name)
_, ok := Identifiers[id]
if ok {
return id
}
return UnknownTypeIdentifier
}
// String returns a string representation of the identifier. This may later be used in parsing to
// retrieve the original identifier.
func (i Identifier) String() string {
return string(i)
} | go/libraries/doltcore/schema/typeinfo/typeinfo.go | 0.540681 | 0.420659 | typeinfo.go | starcoder |
package util
import "strconv"
import "time"
const MomentLength = 32
func TimeString() string {
return time.Now().UTC().String()
}
func ParseTimeString(timestr string) time.Time {
yr, _ := strconv.Atoi(timestr[:4])
mo, _ := strconv.Atoi(timestr[5:7])
d, _ := strconv.Atoi(timestr[8:10])
hr, _ := strconv.Atoi(timestr[11:13])
min, _ := strconv.Atoi(timestr[14:16])
sec, _ := strconv.Atoi(timestr[17:19])
return time.Date(yr, time.Month(mo), d, hr, min, sec, 0, time.Local)
}
var months = map[string]int{
"Jan": 1,
"Feb": 2,
"Mar": 3,
"Apr": 4,
"May": 5,
"Jun": 6,
"Jul": 7,
"Aug": 8,
"Sep": 9,
"Oct": 10,
"Nov": 11,
"Dec": 12,
}
func ParseMomentString(momentstr string) time.Time {
yr, _ := strconv.Atoi(momentstr[6:10])
mo, _ := months[momentstr[:2]]
d, _ := strconv.Atoi(momentstr[3:5])
hr, _ := strconv.Atoi(momentstr[11:13])
min, _ := strconv.Atoi(momentstr[14:16])
return time.Date(yr, time.Month(mo), d, hr, min, 0, 0, time.Local)
}
func ParseDateString(datestr string) time.Time {
yr, _ := strconv.Atoi(datestr[:4])
mo, _ := strconv.Atoi(datestr[5:7])
d, _ := strconv.Atoi(datestr[8:10])
return time.Date(yr, time.Month(mo), d, 0, 0, 0, 0, time.Local)
}
func ParseMinuteString(minutestr string) time.Time {
yr, _ := strconv.Atoi(minutestr[:4])
mo, _ := strconv.Atoi(minutestr[5:7])
d, _ := strconv.Atoi(minutestr[8:10])
hr, _ := strconv.Atoi(minutestr[11:13])
min, _ := strconv.Atoi(minutestr[14:16])
return time.Date(yr, time.Month(mo), d, hr, min, 0, 0, time.Local)
}
func DurationTimeStrings(timestr1, timestr2 string) time.Duration {
tm1 := ParseTimeString(timestr1)
tm2 := ParseTimeString(timestr2)
return tm2.Sub(tm1)
}
func DurationHours(timestr1, timestr2 string) float64 {
return DurationTimeStrings(timestr1, timestr2).Hours()
}
func DurationDays(timestr1, timestr2 string) float64 {
return DurationHours(timestr1, timestr2) / float64(24)
}
func ToTheDay(timestr string) string {
return timestr[:10]
}
func ToTheHour(timestr string) string {
return timestr[:13]
}
func ToTheMinute(timestr string) string {
return timestr[:16]
}
func ToTheSecond(timestr string) string {
return timestr[:19]
} | util/time.go | 0.657758 | 0.455017 | time.go | starcoder |
package main
import (
"fmt"
"strconv"
"strings"
)
// size creates a function that returns the SSZ size of the struct. There are two components:
// 1. Fixed: Size that we can determine at compilation time (i.e. uint, fixed bytes, fixed vector...)
// 2. Dynamic: Size that depends on the input (i.e. lists, dynamic containers...)
// Note that if any of the internal fields of the struct is nil, we will not fail, only not add up
// that field to the size. It is up to other methods like marshal to fail on that scenario.
func (e *env) size(name string, v *Value) string {
tmpl := `// SizeSSZ returns the ssz encoded size in bytes for the {{.name}} object
func (:: *{{.name}}) SizeSSZ() (size int) {
size = {{.fixed}}{{if .dynamic}}
{{.dynamic}}
{{end}}
return
}`
str := execTmpl(tmpl, map[string]interface{}{
"name": name,
"fixed": v.n,
"dynamic": v.sizeContainer("size", true),
})
return appendObjSignature(str, v)
}
func (v *Value) sizeContainer(name string, start bool) string {
if !start {
tmpl := `{{if .check}} if ::.{{.name}} == nil {
::.{{.name}} = new({{.obj}})
}
{{end}} {{ .dst }} += ::.{{.name}}.SizeSSZ()`
check := true
if v.isListElem() {
check = false
}
if v.noPtr {
check = false
}
return execTmpl(tmpl, map[string]interface{}{
"name": v.name,
"dst": name,
"obj": v.objRef(),
"check": check,
})
}
out := []string{}
for indx, v := range v.o {
if !v.isFixed() {
out = append(out, fmt.Sprintf("// Field (%d) '%s'\n%s", indx, v.name, v.size(name)))
}
}
return strings.Join(out, "\n\n")
}
// 'name' is the name of target variable we assign the size too. We also use this function
// during marshalling to figure out the size of the offset
func (v *Value) size(name string) string {
if v.isFixed() {
if v.t == TypeContainer {
return v.sizeContainer(name, false)
}
if v.n == 1 {
return name + "++"
}
return name + " += " + strconv.Itoa(int(v.n))
}
switch v.t {
case TypeContainer, TypeReference:
return v.sizeContainer(name, false)
case TypeBitList:
fallthrough
case TypeBytes:
return fmt.Sprintf(name+" += len(::.%s)", v.name)
case TypeList:
fallthrough
case TypeVector:
if v.e.isFixed() {
return fmt.Sprintf("%s += len(::.%s) * %d", name, v.name, v.e.n)
}
v.e.name = v.name + "[ii]"
tmpl := `for ii := 0; ii < len(::.{{.name}}); ii++ {
{{.size}} += 4
{{.dynamic}}
}`
return execTmpl(tmpl, map[string]interface{}{
"name": v.name,
"size": name,
"dynamic": v.e.size(name),
})
default:
panic(fmt.Errorf("size not implemented for type %s", v.t.String()))
}
} | sszgen/size.go | 0.703448 | 0.487307 | size.go | starcoder |
package gonatsd
import (
"strings"
)
// Trie - prefix tree.
type Trie struct {
root *trieNode
sep string
nodes int
values int
}
type trieNode struct {
Name string
Children map[string]*trieNode
values []interface{}
}
func NewTrie(sep string) *Trie {
trie := &Trie{}
trie.root = &trieNode{}
trie.sep = sep
return trie
}
func (t *Trie) Insert(key string, value interface{}) {
parts := strings.Split(key, t.sep)
node := t.root
for _, part := range parts {
if node.Children == nil {
node.Children = make(map[string]*trieNode)
}
child := node.Children[part]
if child == nil {
child = &trieNode{Name: part}
node.Children[part] = child
t.nodes++
}
node = child
}
if node.values == nil {
node.values = make([]interface{}, 0, 1)
}
node.values = append(node.values, value)
t.values++
}
func (t *Trie) Delete(key string, value interface{}) bool {
parts := strings.Split(key, t.sep)
// Need the nodes for pruning later
nodes := make([]*trieNode, len(parts))
node := t.root
for i, part := range parts {
if node.Children == nil {
return false
}
child := node.Children[part]
if child == nil {
return false
}
node = child
nodes[i] = node
}
if node.values == nil {
return false
}
for i, v := range node.values {
if v == value {
lastIndex := len(node.values) - 1
node.values[i] = node.values[lastIndex]
node.values = node.values[:lastIndex]
t.values--
if len(node.values) == 0 && len(node.Children) == 0 {
t.pruneNodes(nodes)
}
return true
}
}
return false
}
func (t *Trie) Nodes() int {
return t.nodes
}
func (t *Trie) Values() int {
return t.values
}
var emptyNodeSlice = make([]*trieNode, 0, 0)
type Matcher func(*trieNode, string) ([]*trieNode, []*trieNode)
var BasicMatcher = func(node *trieNode, token string) ([]*trieNode, []*trieNode) {
match := node.Children[token]
if match != nil {
return []*trieNode{match}, nil
}
return emptyNodeSlice, nil
}
var WildcardMatcher = func(node *trieNode, token string) ([]*trieNode, []*trieNode) {
matches := make([]*trieNode, 0, 3)
match := node.Children[token]
if match != nil {
matches = append(matches, match)
}
match = node.Children["*"]
if match != nil {
matches = append(matches, match)
}
match = node.Children[">"]
if match != nil {
return matches, []*trieNode{match}
}
return matches, nil
}
func (t *Trie) Match(key string, matcher Matcher) []interface{} {
values := make([]interface{}, 0, 1)
parts := strings.Split(key, t.sep)
currentLevel := []*trieNode{t.root}
for _, part := range parts {
nextLevel := make([]*trieNode, 0, 1)
for _, node := range currentLevel {
matches, valueNodes := matcher(node, part)
nextLevel = append(nextLevel, matches...)
if valueNodes != nil {
for _, valueNode := range valueNodes {
values = append(values, valueNode.values...)
}
}
}
if len(nextLevel) == 0 {
return values
}
currentLevel = nextLevel
}
for _, node := range currentLevel {
values = append(values, node.values...)
}
return values
}
func (t *Trie) pruneNodes(nodes []*trieNode) {
length := len(nodes)
var last *trieNode = nil
for i := length - 1; i >= 0; i-- {
node := nodes[i]
if last != nil {
delete(node.Children, last.Name)
t.nodes--
}
if len(node.values) == 0 && len(node.Children) == 0 {
node.values = nil
node.Children = nil
} else {
return
}
last = node
}
delete(t.root.Children, last.Name)
t.nodes--
} | gonatsd/trie.go | 0.501221 | 0.550487 | trie.go | starcoder |
package common
import (
"bytes"
"compress/gzip"
"fmt"
"io/ioutil"
"github.com/klauspost/compress/snappy"
"github.com/n1chre/minio/pkg/s3select/internal/parquet-go/gen-go/parquet"
"github.com/pierrec/lz4"
)
// ToSliceValue converts values to a slice value.
func ToSliceValue(values []interface{}, parquetType parquet.Type) interface{} {
switch parquetType {
case parquet.Type_BOOLEAN:
bs := make([]bool, len(values))
for i := range values {
bs[i] = values[i].(bool)
}
return bs
case parquet.Type_INT32:
i32s := make([]int32, len(values))
for i := range values {
i32s[i] = values[i].(int32)
}
return i32s
case parquet.Type_INT64:
i64s := make([]int64, len(values))
for i := range values {
i64s[i] = values[i].(int64)
}
return i64s
case parquet.Type_FLOAT:
f32s := make([]float32, len(values))
for i := range values {
f32s[i] = values[i].(float32)
}
return f32s
case parquet.Type_DOUBLE:
f64s := make([]float64, len(values))
for i := range values {
f64s[i] = values[i].(float64)
}
return f64s
case parquet.Type_BYTE_ARRAY:
array := make([][]byte, len(values))
for i := range values {
array[i] = values[i].([]byte)
}
return array
}
return nil
}
// BitWidth returns bits count required to accommodate given value.
func BitWidth(ui64 uint64) (width int32) {
for ; ui64 != 0; ui64 >>= 1 {
width++
}
return width
}
// Compress compresses given data.
func Compress(compressionType parquet.CompressionCodec, data []byte) ([]byte, error) {
switch compressionType {
case parquet.CompressionCodec_UNCOMPRESSED:
return data, nil
case parquet.CompressionCodec_SNAPPY:
return snappy.Encode(nil, data), nil
case parquet.CompressionCodec_GZIP:
buf := new(bytes.Buffer)
writer := gzip.NewWriter(buf)
n, err := writer.Write(data)
if err != nil {
return nil, err
}
if n != len(data) {
return nil, fmt.Errorf("short writes")
}
if err = writer.Flush(); err != nil {
return nil, err
}
if err = writer.Close(); err != nil {
return nil, err
}
return buf.Bytes(), nil
case parquet.CompressionCodec_LZ4:
buf := new(bytes.Buffer)
writer := lz4.NewWriter(buf)
n, err := writer.Write(data)
if err != nil {
return nil, err
}
if n != len(data) {
return nil, fmt.Errorf("short writes")
}
if err = writer.Flush(); err != nil {
return nil, err
}
if err = writer.Close(); err != nil {
return nil, err
}
return buf.Bytes(), nil
}
return nil, fmt.Errorf("unsupported compression codec %v", compressionType)
}
// Uncompress uncompresses given data.
func Uncompress(compressionType parquet.CompressionCodec, data []byte) ([]byte, error) {
switch compressionType {
case parquet.CompressionCodec_UNCOMPRESSED:
return data, nil
case parquet.CompressionCodec_SNAPPY:
return snappy.Decode(nil, data)
case parquet.CompressionCodec_GZIP:
reader, err := gzip.NewReader(bytes.NewReader(data))
if err != nil {
return nil, err
}
defer reader.Close()
return ioutil.ReadAll(reader)
case parquet.CompressionCodec_LZ4:
return ioutil.ReadAll(lz4.NewReader(bytes.NewReader(data)))
}
return nil, fmt.Errorf("unsupported compression codec %v", compressionType)
} | pkg/s3select/internal/parquet-go/common/common.go | 0.604516 | 0.465448 | common.go | starcoder |
package datatype
import (
"database/sql/driver"
"encoding/json"
"errors"
"fmt"
"math"
"github.com/go-gl/mathgl/mgl32"
"github.com/mysll/toolkit"
)
type ObjectId uint64
type Vec3 mgl32.Vec3
var (
Forward = Vec3{0, 0, 1}
Back = Vec3{0, 0, -1}
Up = Vec3{0, 1, 0}
Down = Vec3{0, -1, 0}
Left = Vec3{-1, 0, 0}
Right = Vec3{1, 0, 0}
Zero = Vec3{}
One = Vec3{1, 1, 1}
)
func V3(x float32, y float32, z float32) Vec3 {
return Vec3{x, y, z}
}
func (v Vec3) X() float32 {
return v[0]
}
func (v Vec3) Y() float32 {
return v[1]
}
func (v Vec3) Z() float32 {
return v[2]
}
func (v Vec3) Equal(rhs Vec3) bool {
for i := 0; i < 3; i++ {
if !toolkit.IsEqual32(v[i], rhs[i]) {
return false
}
}
return true
}
func (v Vec3) Cross(v2 Vec3) Vec3 {
return Vec3{v[1]*v2[2] - v[2]*v2[1], v[2]*v2[0] - v[0]*v2[2], v[0]*v2[1] - v[1]*v2[0]}
}
func (v Vec3) Mul(c float32) Vec3 {
return Vec3{v[0] * c, v[1] * c, v[2] * c}
}
func (v Vec3) Add(v2 Vec3) Vec3 {
return Vec3{v[0] + v2[0], v[1] + v2[1], v[2] + v2[2]}
}
func (v Vec3) Sub(v2 Vec3) Vec3 {
return Vec3{v[0] - v2[0], v[1] - v2[1], v[2] - v2[2]}
}
func (v Vec3) Dot(v2 Vec3) float32 {
return v[0]*v2[0] + v[1]*v2[1] + v[2]*v2[2]
}
func (v Vec3) Len() float32 {
return float32(math.Sqrt(float64(v[0]*v[0] + v[1]*v[1] + v[2]*v[2])))
}
func (v Vec3) LenSqr() float32 {
return v[0]*v[0] + v[1]*v[1] + v[2]*v[2]
}
func (v Vec3) Normalize() Vec3 {
l := 1.0 / v.Len()
return Vec3{v[0] * l, v[1] * l, v[2] * l}
}
// gorm
func (v *Vec3) Scan(value interface{}) error {
bytes, ok := value.([]byte)
if !ok {
return errors.New(fmt.Sprint("Failed to unmarsh Vec3", value))
}
result := Vec3{}
err := json.Unmarshal(bytes, &result)
if err != nil {
return err
}
*v = result
return nil
}
func (v Vec3) Value() (driver.Value, error) {
return json.Marshal(v)
}
type Vec2 mgl32.Vec2
func V2(x float32, y float32) Vec2 {
return Vec2{x, y}
}
func (v Vec2) Equal(rhs Vec2) bool {
for i := 0; i < 2; i++ {
if !toolkit.IsEqual32(v[i], rhs[i]) {
return false
}
}
return true
}
func (v Vec2) X() float32 {
return v[0]
}
func (v Vec2) Y() float32 {
return v[1]
}
func (v Vec2) Add(v2 Vec2) Vec2 {
return Vec2{v[0] + v2[0], v[1] + v2[1]}
}
func (v Vec2) Sub(v2 Vec2) Vec2 {
return Vec2{v[0] - v2[0], v[1] - v2[1]}
}
func (v Vec2) Mul(c float32) Vec2 {
return Vec2{v[0] * c, v[1] * c}
}
func (v Vec2) Dot(v2 Vec2) float32 {
return v[0]*v2[0] + v[1]*v2[1]
}
func (v Vec2) Len() float32 {
return float32(math.Hypot(float64(v[0]), float64(v[1])))
}
func (v Vec2) LenSqr() float32 {
return v[0]*v[0] + v[1]*v[1]
}
func (v Vec2) Normalize() Vec2 {
l := 1.0 / v.Len()
return Vec2{v[0] * l, v[1] * l}
}
// gorm
func (v *Vec2) Scan(value interface{}) error {
bytes, ok := value.([]byte)
if !ok {
return errors.New(fmt.Sprint("Failed to unmarsh Vec3", value))
}
result := Vec2{}
err := json.Unmarshal(bytes, &result)
if err != nil {
return err
}
*v = result
return nil
}
func (v Vec2) Value() (driver.Value, error) {
return json.Marshal(v)
} | common/datatype/type.go | 0.626581 | 0.436622 | type.go | starcoder |
package aiplatform
import (
context "context"
cmpopts "github.com/google/go-cmp/cmp/cmpopts"
codes "google.golang.org/grpc/codes"
status "google.golang.org/grpc/status"
proto "google.golang.org/protobuf/proto"
protocmp "google.golang.org/protobuf/testing/protocmp"
fieldmaskpb "google.golang.org/protobuf/types/known/fieldmaskpb"
assert "gotest.tools/v3/assert"
strings "strings"
testing "testing"
)
type SpecialistPoolServiceTestSuite struct {
T *testing.T
// Server to test.
Server SpecialistPoolServiceServer
}
func (fx SpecialistPoolServiceTestSuite) TestSpecialistPool(ctx context.Context, options SpecialistPoolTestSuiteConfig) {
fx.T.Run("SpecialistPool", func(t *testing.T) {
options.ctx = ctx
options.service = fx.Server
options.test(t)
})
}
type SpecialistPoolTestSuiteConfig struct {
ctx context.Context
service SpecialistPoolServiceServer
currParent int
// The parents to use when creating resources.
// At least one parent needs to be set. Depending on methods available on the resource,
// more may be required. If insufficient number of parents are
// provided the test will fail.
Parents []string
// Create should return a resource which is valid to create, i.e.
// all required fields set.
Create func(parent string) *SpecialistPool
// Update should return a resource which is valid to update, i.e.
// all required fields set.
Update func(parent string) *SpecialistPool
// Patterns of tests to skip.
// For example if a service has a Get method:
// Skip: ["Get"] will skip all tests for Get.
// Skip: ["Get/persisted"] will only skip the subtest called "persisted" of Get.
Skip []string
}
func (fx *SpecialistPoolTestSuiteConfig) test(t *testing.T) {
t.Run("Create", fx.testCreate)
t.Run("Get", fx.testGet)
t.Run("Update", fx.testUpdate)
t.Run("List", fx.testList)
}
func (fx *SpecialistPoolTestSuiteConfig) testCreate(t *testing.T) {
fx.maybeSkip(t)
// Method should fail with InvalidArgument if no parent is provided.
t.Run("missing parent", func(t *testing.T) {
fx.maybeSkip(t)
_, err := fx.service.CreateSpecialistPool(fx.ctx, &CreateSpecialistPoolRequest{
Parent: "",
SpecialistPool: fx.Create(fx.nextParent(t, false)),
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
// Method should fail with InvalidArgument if provided parent is invalid.
t.Run("invalid parent", func(t *testing.T) {
fx.maybeSkip(t)
_, err := fx.service.CreateSpecialistPool(fx.ctx, &CreateSpecialistPoolRequest{
Parent: "invalid resource name",
SpecialistPool: fx.Create(fx.nextParent(t, false)),
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
// The method should fail with InvalidArgument if the resource has any
// required fields and they are not provided.
t.Run("required fields", func(t *testing.T) {
fx.maybeSkip(t)
t.Run(".name", func(t *testing.T) {
fx.maybeSkip(t)
parent := fx.nextParent(t, false)
msg := fx.Create(parent)
container := msg
if container == nil {
t.Skip("not reachable")
}
fd := container.ProtoReflect().Descriptor().Fields().ByName("name")
container.ProtoReflect().Clear(fd)
_, err := fx.service.CreateSpecialistPool(fx.ctx, &CreateSpecialistPoolRequest{
Parent: parent,
SpecialistPool: msg,
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
t.Run(".display_name", func(t *testing.T) {
fx.maybeSkip(t)
parent := fx.nextParent(t, false)
msg := fx.Create(parent)
container := msg
if container == nil {
t.Skip("not reachable")
}
fd := container.ProtoReflect().Descriptor().Fields().ByName("display_name")
container.ProtoReflect().Clear(fd)
_, err := fx.service.CreateSpecialistPool(fx.ctx, &CreateSpecialistPoolRequest{
Parent: parent,
SpecialistPool: msg,
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
})
}
func (fx *SpecialistPoolTestSuiteConfig) testGet(t *testing.T) {
fx.maybeSkip(t)
// Method should fail with InvalidArgument if no name is provided.
t.Run("missing name", func(t *testing.T) {
fx.maybeSkip(t)
_, err := fx.service.GetSpecialistPool(fx.ctx, &GetSpecialistPoolRequest{
Name: "",
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
// Method should fail with InvalidArgument if the provided name is not valid.
t.Run("invalid name", func(t *testing.T) {
fx.maybeSkip(t)
_, err := fx.service.GetSpecialistPool(fx.ctx, &GetSpecialistPoolRequest{
Name: "invalid resource name",
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
// Resource should be returned without errors if it exists.
t.Run("exists", func(t *testing.T) {
fx.maybeSkip(t)
parent := fx.nextParent(t, false)
created := fx.create(t, parent)
msg, err := fx.service.GetSpecialistPool(fx.ctx, &GetSpecialistPoolRequest{
Name: created.Name,
})
assert.NilError(t, err)
assert.DeepEqual(t, msg, created, protocmp.Transform())
})
// Method should fail with NotFound if the resource does not exist.
t.Run("not found", func(t *testing.T) {
fx.maybeSkip(t)
parent := fx.nextParent(t, false)
created := fx.create(t, parent)
_, err := fx.service.GetSpecialistPool(fx.ctx, &GetSpecialistPoolRequest{
Name: created.Name + "notfound",
})
assert.Equal(t, codes.NotFound, status.Code(err), err)
})
// Method should fail with InvalidArgument if the provided name only contains wildcards ('-')
t.Run("only wildcards", func(t *testing.T) {
fx.maybeSkip(t)
_, err := fx.service.GetSpecialistPool(fx.ctx, &GetSpecialistPoolRequest{
Name: "projects/-/locations/-/specialistPools/-",
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
}
func (fx *SpecialistPoolTestSuiteConfig) testUpdate(t *testing.T) {
fx.maybeSkip(t)
// Method should fail with InvalidArgument if no name is provided.
t.Run("missing name", func(t *testing.T) {
fx.maybeSkip(t)
parent := fx.nextParent(t, false)
msg := fx.Update(parent)
msg.Name = ""
_, err := fx.service.UpdateSpecialistPool(fx.ctx, &UpdateSpecialistPoolRequest{
SpecialistPool: msg,
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
// Method should fail with InvalidArgument if provided name is not valid.
t.Run("invalid name", func(t *testing.T) {
fx.maybeSkip(t)
parent := fx.nextParent(t, false)
msg := fx.Update(parent)
msg.Name = "invalid resource name"
_, err := fx.service.UpdateSpecialistPool(fx.ctx, &UpdateSpecialistPoolRequest{
SpecialistPool: msg,
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
parent := fx.nextParent(t, false)
created := fx.create(t, parent)
// Method should fail with NotFound if the resource does not exist.
t.Run("not found", func(t *testing.T) {
fx.maybeSkip(t)
msg := fx.Update(parent)
msg.Name = created.Name + "notfound"
_, err := fx.service.UpdateSpecialistPool(fx.ctx, &UpdateSpecialistPoolRequest{
SpecialistPool: msg,
})
assert.Equal(t, codes.NotFound, status.Code(err), err)
})
// The method should fail with InvalidArgument if the update_mask is invalid.
t.Run("invalid update mask", func(t *testing.T) {
fx.maybeSkip(t)
_, err := fx.service.UpdateSpecialistPool(fx.ctx, &UpdateSpecialistPoolRequest{
SpecialistPool: created,
UpdateMask: &fieldmaskpb.FieldMask{
Paths: []string{
"invalid_field_xyz",
},
},
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
// Method should fail with InvalidArgument if any required field is missing
// when called with '*' update_mask.
t.Run("required fields", func(t *testing.T) {
fx.maybeSkip(t)
t.Run(".name", func(t *testing.T) {
fx.maybeSkip(t)
msg := proto.Clone(created).(*SpecialistPool)
container := msg
if container == nil {
t.Skip("not reachable")
}
fd := container.ProtoReflect().Descriptor().Fields().ByName("name")
container.ProtoReflect().Clear(fd)
_, err := fx.service.UpdateSpecialistPool(fx.ctx, &UpdateSpecialistPoolRequest{
SpecialistPool: msg,
UpdateMask: &fieldmaskpb.FieldMask{
Paths: []string{
"*",
},
},
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
t.Run(".display_name", func(t *testing.T) {
fx.maybeSkip(t)
msg := proto.Clone(created).(*SpecialistPool)
container := msg
if container == nil {
t.Skip("not reachable")
}
fd := container.ProtoReflect().Descriptor().Fields().ByName("display_name")
container.ProtoReflect().Clear(fd)
_, err := fx.service.UpdateSpecialistPool(fx.ctx, &UpdateSpecialistPoolRequest{
SpecialistPool: msg,
UpdateMask: &fieldmaskpb.FieldMask{
Paths: []string{
"*",
},
},
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
})
}
func (fx *SpecialistPoolTestSuiteConfig) testList(t *testing.T) {
fx.maybeSkip(t)
// Method should fail with InvalidArgument if provided parent is invalid.
t.Run("invalid parent", func(t *testing.T) {
fx.maybeSkip(t)
_, err := fx.service.ListSpecialistPools(fx.ctx, &ListSpecialistPoolsRequest{
Parent: "invalid resource name",
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
// Method should fail with InvalidArgument is provided page token is not valid.
t.Run("invalid page token", func(t *testing.T) {
fx.maybeSkip(t)
parent := fx.nextParent(t, false)
_, err := fx.service.ListSpecialistPools(fx.ctx, &ListSpecialistPoolsRequest{
Parent: parent,
PageToken: "invalid page token",
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
// Method should fail with InvalidArgument is provided page size is negative.
t.Run("negative page size", func(t *testing.T) {
fx.maybeSkip(t)
parent := fx.nextParent(t, false)
_, err := fx.service.ListSpecialistPools(fx.ctx, &ListSpecialistPoolsRequest{
Parent: parent,
PageSize: -10,
})
assert.Equal(t, codes.InvalidArgument, status.Code(err), err)
})
const resourcesCount = 15
parent := fx.nextParent(t, true)
parentMsgs := make([]*SpecialistPool, resourcesCount)
for i := 0; i < resourcesCount; i++ {
parentMsgs[i] = fx.create(t, parent)
}
// If parent is provided the method must only return resources
// under that parent.
t.Run("isolation", func(t *testing.T) {
fx.maybeSkip(t)
response, err := fx.service.ListSpecialistPools(fx.ctx, &ListSpecialistPoolsRequest{
Parent: parent,
PageSize: 999,
})
assert.NilError(t, err)
assert.DeepEqual(
t,
parentMsgs,
response.SpecialistPools,
cmpopts.SortSlices(func(a, b *SpecialistPool) bool {
return a.Name < b.Name
}),
protocmp.Transform(),
)
})
// If there are no more resources, next_page_token should not be set.
t.Run("last page", func(t *testing.T) {
fx.maybeSkip(t)
response, err := fx.service.ListSpecialistPools(fx.ctx, &ListSpecialistPoolsRequest{
Parent: parent,
PageSize: resourcesCount,
})
assert.NilError(t, err)
assert.Equal(t, "", response.NextPageToken)
})
// If there are more resources, next_page_token should be set.
t.Run("more pages", func(t *testing.T) {
fx.maybeSkip(t)
response, err := fx.service.ListSpecialistPools(fx.ctx, &ListSpecialistPoolsRequest{
Parent: parent,
PageSize: resourcesCount - 1,
})
assert.NilError(t, err)
assert.Check(t, response.NextPageToken != "")
})
// Listing resource one by one should eventually return all resources.
t.Run("one by one", func(t *testing.T) {
fx.maybeSkip(t)
msgs := make([]*SpecialistPool, 0, resourcesCount)
var nextPageToken string
for {
response, err := fx.service.ListSpecialistPools(fx.ctx, &ListSpecialistPoolsRequest{
Parent: parent,
PageSize: 1,
PageToken: nextPageToken,
})
assert.NilError(t, err)
assert.Equal(t, 1, len(response.SpecialistPools))
msgs = append(msgs, response.SpecialistPools...)
nextPageToken = response.NextPageToken
if nextPageToken == "" {
break
}
}
assert.DeepEqual(
t,
parentMsgs,
msgs,
cmpopts.SortSlices(func(a, b *SpecialistPool) bool {
return a.Name < b.Name
}),
protocmp.Transform(),
)
})
// Method should not return deleted resources.
t.Run("deleted", func(t *testing.T) {
fx.maybeSkip(t)
const deleteCount = 5
for i := 0; i < deleteCount; i++ {
_, err := fx.service.DeleteSpecialistPool(fx.ctx, &DeleteSpecialistPoolRequest{
Name: parentMsgs[i].Name,
})
assert.NilError(t, err)
}
response, err := fx.service.ListSpecialistPools(fx.ctx, &ListSpecialistPoolsRequest{
Parent: parent,
PageSize: 9999,
})
assert.NilError(t, err)
assert.DeepEqual(
t,
parentMsgs[deleteCount:],
response.SpecialistPools,
cmpopts.SortSlices(func(a, b *SpecialistPool) bool {
return a.Name < b.Name
}),
protocmp.Transform(),
)
})
}
func (fx *SpecialistPoolTestSuiteConfig) nextParent(t *testing.T, pristine bool) string {
if pristine {
fx.currParent++
}
if fx.currParent >= len(fx.Parents) {
t.Fatal("need at least", fx.currParent+1, "parents")
}
return fx.Parents[fx.currParent]
}
func (fx *SpecialistPoolTestSuiteConfig) peekNextParent(t *testing.T) string {
next := fx.currParent + 1
if next >= len(fx.Parents) {
t.Fatal("need at least", next+1, "parents")
}
return fx.Parents[next]
}
func (fx *SpecialistPoolTestSuiteConfig) maybeSkip(t *testing.T) {
for _, skip := range fx.Skip {
if strings.Contains(t.Name(), skip) {
t.Skip("skipped because of .Skip")
}
}
}
func (fx *SpecialistPoolTestSuiteConfig) create(t *testing.T, parent string) *SpecialistPool {
t.Helper()
t.Skip("Long running create method not supported")
return nil
} | proto/gen/googleapis/cloud/aiplatform/v1/specialist_pool_service_aiptest.pb.go | 0.560974 | 0.496155 | specialist_pool_service_aiptest.pb.go | starcoder |
package bebras_guard
import (
"sync"
"time"
)
// Implements a very simple and approximate leaky bucket algorithm.
// To keep it simple, each request will wait for a duration depending on the
// number of other requests waiting ; and the bucket will leak at regular
// intervals, independently of when the requests arrived.
// That means that the delay between two requests can be anywhere between
// epsilon (if the bucket leaked right between the two requests) and
// 2 * delay - epsilon (if the bucket leaked right before the first request,
// and then right after the second).
// The average request rate will still be the expected one, making it a good
// approximation.
type LeakyBucket struct {
maxBurst int
maxWaiting int
delay time.Duration
m sync.Mutex
active bool
burst map[string]int
waiting map[string]int
}
type LeakyBucketConfig struct {
MaxBurst int
MaxWaiting int
Delay time.Duration
}
func (this *LeakyBucket) GetSlot(key string) (result bool) {
// Gets a slot in the bucket, waiting if a slot is not available yet
this.m.Lock()
this.active = true
if this.burst[key] < this.maxBurst {
this.burst[key]++
this.m.Unlock()
result = true
} else if this.waiting[key] < this.maxWaiting {
this.waiting[key]++
this.m.Unlock()
time.Sleep(time.Duration(this.waiting[key]) * this.delay)
result = true
} else {
this.m.Unlock()
result = false
}
return
}
func (this *LeakyBucket) Leak() {
// Leak buckets, allowing one more slot to be taken
// Skip everything if no bucket needs to leak
if(!this.active) { return; }
var stillActive bool = false
for k := range this.burst {
// Leak for each key
this.m.Lock()
if(this.waiting[k] > 0) {
stillActive = true
this.waiting[k]--
} else if(this.burst[k] > 0) {
delete(this.waiting, k)
stillActive = true
this.burst[k]--
} else {
delete(this.burst, k)
}
this.m.Unlock()
}
this.active = stillActive
}
func (this *LeakyBucket) Configure(config LeakyBucketConfig) {
this.maxBurst = config.MaxBurst
this.maxWaiting = config.MaxWaiting
this.delay = config.Delay
}
func (this *LeakyBucket) Run() {
leakTicker := time.NewTicker(this.delay)
go func() {
for {
select {
case <-leakTicker.C:
this.Leak()
}
}
}()
}
func NewLeakyBucket() (*LeakyBucket) {
// TODO :: proper configuration
var lc *LeakyBucket = &LeakyBucket{
maxBurst: 5,
maxWaiting: 30,
delay: 100 * time.Millisecond,
burst: make(map[string]int),
waiting: make(map[string]int),
}
return lc
} | leaky_bucket.go | 0.615781 | 0.435721 | leaky_bucket.go | starcoder |
package util
import (
"fmt"
"reflect"
"github.com/pkg/errors"
)
var floatType = reflect.TypeOf(float64(0))
var intType = reflect.TypeOf(int64(0))
var stringType = reflect.TypeOf("")
var boolType = reflect.TypeOf(false)
// AsFloat64 attempts to convert unk to a float64
func AsFloat64(unk interface{}) (float64, error) {
v := reflect.ValueOf(unk)
v = reflect.Indirect(v)
if !v.Type().ConvertibleTo(floatType) {
return 0, fmt.Errorf("cannot convert %v (%v) to float64", v.Type(), v)
}
fv := v.Convert(floatType)
return fv.Float(), nil
}
// AsInt64 attempts to convert unk to an int64
func AsInt64(unk interface{}) (int64, error) {
v := reflect.ValueOf(unk)
v = reflect.Indirect(v)
if !v.Type().ConvertibleTo(intType) {
return 0, fmt.Errorf("cannot convert %v (%v) to int64", v.Type(), v)
}
iv := v.Convert(intType)
return iv.Int(), nil
}
// AsString attempts to convert unk to a string
func AsString(unk interface{}) (string, error) {
v := reflect.ValueOf(unk)
v = reflect.Indirect(v)
if !v.Type().ConvertibleTo(stringType) {
return "", fmt.Errorf("cannot convert %v (%v) to string", v.Type(), v)
}
sv := v.Convert(stringType)
return sv.String(), nil
}
// AsBool attempts to convert unk to a bool
func AsBool(unk interface{}) (bool, error) {
v := reflect.ValueOf(unk)
v = reflect.Indirect(v)
if !v.Type().ConvertibleTo(boolType) {
// See if it's a string we can parse
str, err := AsString(unk)
if err == nil {
result, err := ParseBool(str)
if err != nil {
return false, errors.Wrapf(err, "cannot parse string %v as bool", str)
}
return result, nil
}
return false, fmt.Errorf("cannot convert %v (%v) to bool", v.Type(), v)
}
bv := v.Convert(boolType)
return bv.Bool(), nil
}
//AsSliceOfStrings attempts to convert unk to a slice of strings
func AsSliceOfStrings(unk interface{}) ([]string, error) {
v := reflect.ValueOf(unk)
v = reflect.Indirect(v)
result := make([]string, 0)
for i := 0; i < v.Len(); i++ {
iv := v.Index(i)
// TODO Would be nice to type check this, but not sure how
result = append(result, fmt.Sprintf("%v", iv))
}
return result, nil
}
//AsMapOfStringsIntefaces attempts to convert unk to a map[string]interface{}
func AsMapOfStringsIntefaces(unk interface{}) (map[string]interface{}, error) {
v := reflect.ValueOf(unk)
v = reflect.Indirect(v)
if v.Kind() != reflect.Map {
return make(map[string]interface{}), fmt.Errorf("cannot convert %v (%v) to map[string]interface{}", v.Type(), v)
}
result := make(map[string]interface{})
for _, key := range v.MapKeys() {
result[key.String()] = v.MapIndex(key).Interface()
}
return result, nil
}
// DereferenceInt will return the int value or the empty value for int
func DereferenceInt(i *int) int {
if i != nil {
return *i
}
return 0
}
// DereferenceInt64 will return the int value or the empty value for i
func DereferenceInt64(i *int64) int64 {
if i != nil {
return *i
}
return 0
}
// DereferenceString will return the string value or the empty value for string
func DereferenceString(s *string) string {
if s != nil {
return *s
}
return ""
}
// DereferenceFloat64 will return the float64 value or the empty value for float64
func DereferenceFloat64(f *float64) float64 {
if f != nil {
return *f
}
return 0
}
// IsZeroOfUnderlyingType checks if the underlying type of the interface is set to it's zero value
func IsZeroOfUnderlyingType(x interface{}) bool {
return reflect.DeepEqual(x, reflect.Zero(reflect.TypeOf(x)).Interface())
}
// DereferenceBool will return the bool value or the empty value for bool
func DereferenceBool(b *bool) bool {
if b != nil {
return *b
}
return false
} | pkg/util/types.go | 0.610802 | 0.489564 | types.go | starcoder |
package week6
import (
"crypto/rsa"
"errors"
"math/big"
)
func isSquareNumber(n *big.Int) (bool, *big.Int) {
sqrtFloor := new(big.Int).Sqrt(n)
squareSqrtFloor := new(big.Int).Mul(sqrtFloor, sqrtFloor)
return squareSqrtFloor.Cmp(n) == 0, sqrtFloor
}
// FactorCloselyFactorSemiPrime finds p, q such that
// N = p*q when |p - q| < 2N^{1/4} and p <= q
func FactorCloselyFactorSemiPrime(N *big.Int) (*big.Int, *big.Int, error) {
return FactorNearlyFactorSemiPrime(N, 0)
}
// FactorNearlyFactorSemiPrime finds p, q such that
// N = p*q when |p - q| < 2^{magnitude + 1} N^{1/4} and p <= q
// Notice that when magnitude = 1, it reduces to `FactorCloselyFactorSemiPrime`
func FactorNearlyFactorSemiPrime(N *big.Int, magnitude uint) (*big.Int, *big.Int, error) {
return FactorProportionalFactorSemiPrime(N, magnitude, big.NewRat(1, 1))
}
// FactorProportionalFactorSemiPrime finds p, q such that
// N = p*q when |ap - bq| < 2^{magnitude + 1} N^{1/4}, λ = a/b is a rational number
// Notice that when a = b = 1, it reduces to `FactorNearlyFactorSemiPrime`
func FactorProportionalFactorSemiPrime(N *big.Int, magnitude uint, proportion *big.Rat) (*big.Int, *big.Int, error) {
// extract λ = a/b s.t. a, b is even
num := proportion.Num()
num.Add(num, num)
denom := proportion.Denom()
denom.Add(denom, denom)
// calculate a*b*N
numDenomN := new(big.Int).Set(N)
numDenomN.Mul(numDenomN, num)
numDenomN.Mul(numDenomN, denom)
numDenomNSqrt := new(big.Int).Sqrt(numDenomN)
AvgGuess := new(big.Int).Set(numDenomNSqrt)
AvgGuessSqaure := new(big.Int).Mul(AvgGuess, AvgGuess)
Difference := new(big.Int).Sub(AvgGuessSqaure, numDenomN)
one := big.NewInt(1)
for i := 0; i < (1 << (magnitude + magnitude)); i++ {
// increment Difference by 2*A_guess + 1
Difference.Add(Difference, AvgGuess)
Difference.Add(Difference, AvgGuess)
Difference.Add(Difference, one)
// increment A_guess by 1
AvgGuess.Add(AvgGuess, one)
// check if x^2 is square number
if isSquare, SqrtDifference := isSquareNumber(Difference); isSquare {
pMultiple := new(big.Int).Sub(AvgGuess, SqrtDifference)
qMultiple := new(big.Int).Add(AvgGuess, SqrtDifference)
quo := new(big.Int)
if quo.Mod(pMultiple, num); quo.BitLen() == 0 {
pMultiple.Div(pMultiple, num)
qMultiple.Div(qMultiple, denom)
return pMultiple, qMultiple, nil
} else if quo.Mod(pMultiple, denom); quo.BitLen() == 0 {
pMultiple.Div(pMultiple, denom)
qMultiple.Div(qMultiple, num)
return pMultiple, qMultiple, nil
} else {
continue
}
}
}
return nil, nil, errors.New("The factor is not closely enough for efficient factoring")
}
// DecryptRSAPKCSv15WithCloselyFactor will try to decrypt cipherText given RSA public key assuming the SemiPrimi can be
// factored into two close prime
func DecryptRSAPKCSv15WithCloselyFactor(pubKey *rsa.PublicKey, cipherText []byte) ([]byte, error) {
p, q, err := FactorCloselyFactorSemiPrime(pubKey.N)
one := big.NewInt(1)
if err != nil {
panic(err)
}
Primes := make([]*big.Int, 2)
Primes[0] = p
Primes[1] = q
// Compute Euler Function φ(N) = (p - 1)(q - 1)
pSub1 := new(big.Int).Sub(p, one)
qSub1 := new(big.Int).Sub(q, one)
phi := new(big.Int).Mul(pSub1, qSub1)
// Compute Private Component from E, φ(N)
D := new(big.Int)
new(big.Int).GCD(D, new(big.Int), big.NewInt(int64(pubKey.E)), phi)
privateKey := new(rsa.PrivateKey)
privateKey.PublicKey = *pubKey
privateKey.D = D
privateKey.Primes = Primes
return rsa.DecryptPKCS1v15(nil, privateKey, cipherText)
} | week6/week6.go | 0.768299 | 0.519217 | week6.go | starcoder |
package mint
import (
"fmt"
sdk "github.com/ftlnetwork/ftlnetwork-sdk/types"
)
// Minter represents the minting state
type Minter struct {
Inflation sdk.Dec `json:"inflation"` // current annual inflation rate
AnnualProvisions sdk.Dec `json:"annual_provisions"` // current annual expected provisions
}
// Create a new minter object
func NewMinter(inflation, annualProvisions sdk.Dec) Minter {
return Minter{
Inflation: inflation,
AnnualProvisions: annualProvisions,
}
}
// minter object for a new chain
func InitialMinter(inflation sdk.Dec) Minter {
return NewMinter(
inflation,
sdk.NewDec(0),
)
}
// default initial minter object for a new chain
// which uses an inflation rate of 13%
func DefaultInitialMinter() Minter {
return InitialMinter(
sdk.NewDecWithPrec(13, 2),
)
}
func validateMinter(minter Minter) error {
if minter.Inflation.LT(sdk.ZeroDec()) {
return fmt.Errorf("mint parameter Inflation should be positive, is %s",
minter.Inflation.String())
}
return nil
}
// get the new inflation rate for the next hour
func (m Minter) NextInflationRate(params Params, bondedRatio sdk.Dec) (
inflation sdk.Dec) {
// The target annual inflation rate is recalculated for each previsions cycle. The
// inflation is also subject to a rate change (positive or negative) depending on
// the distance from the desired ratio (67%). The maximum rate change possible is
// defined to be 13% per year, however the annual inflation is capped as between
// 7% and 20%.
// (1 - bondedRatio/GoalBonded) * InflationRateChange
inflationRateChangePerYear := sdk.OneDec().
Sub(bondedRatio.Quo(params.GoalBonded)).
Mul(params.InflationRateChange)
inflationRateChange := inflationRateChangePerYear.Quo(sdk.NewDec(int64(params.BlocksPerYear)))
// increase the new annual inflation for this next cycle
inflation = m.Inflation.Add(inflationRateChange)
if inflation.GT(params.InflationMax) {
inflation = params.InflationMax
}
if inflation.LT(params.InflationMin) {
inflation = params.InflationMin
}
return inflation
}
// calculate the annual provisions based on current total supply and inflation rate
func (m Minter) NextAnnualProvisions(params Params, totalSupply sdk.Dec) (
provisions sdk.Dec) {
return m.Inflation.Mul(totalSupply)
}
// get the provisions for a block based on the annual provisions rate
func (m Minter) BlockProvision(params Params) sdk.Coin {
provisionAmt := m.AnnualProvisions.QuoInt(sdk.NewInt(int64(params.BlocksPerYear)))
return sdk.NewCoin(params.MintDenom, provisionAmt.TruncateInt())
} | x/mint/minter.go | 0.806281 | 0.441613 | minter.go | starcoder |
package trier
import (
"math/rand"
"time"
)
// Iterator defines parameters to create new delay.
type Iterator interface {
Next() (time.Duration, bool)
}
// Iterable defines parameters to create new iterator.
type Iterable interface {
Iterator() Iterator
}
type constant time.Duration
func (i constant) Next() (time.Duration, bool) {
return time.Duration(i), false
}
func (i constant) Iterator() Iterator {
return i
}
// NewConstant creates iterable for creating iterators
// returning constant delay on each iteration.
func NewConstant(d time.Duration) Iterable {
return constant(d)
}
type linear struct {
d, rate time.Duration
}
func (i *linear) Next() (time.Duration, bool) {
i.d += i.rate
return i.d, false
}
func (i linear) Iterator() Iterator {
return &linear{rate: i.d}
}
// NewLinear creates iterable for creating iterators
// returning delay which grows linearly on each iteration.
func NewLinear(d time.Duration) Iterable {
return linear{d: d}
}
type linearRate struct {
d, rate time.Duration
}
func (i *linearRate) Next() (time.Duration, bool) {
v := i.d
i.d += i.rate
return v, false
}
func (i linearRate) Iterator() Iterator {
return &linearRate{i.d, i.rate}
}
// NewLinearRate creates iterable for creating iterators
// returning delay which grows linearly with specified rate on each iteration.
func NewLinearRate(d, rate time.Duration) Iterable {
return linearRate{d, rate}
}
type exponential time.Duration
func (i *exponential) Next() (time.Duration, bool) {
v := *i
*i = v + v
return time.Duration(v), false
}
func (i exponential) Iterator() Iterator {
return &i
}
// NewExponential creates iterable for creating iterators
// returning delay which grows exponentially on each iteration.
func NewExponential(d time.Duration) Iterable {
return exponential(d)
}
type exponentialRate struct {
d, rate float64
}
func (i *exponentialRate) Next() (time.Duration, bool) {
v := i.d
i.d += i.d * i.rate
return time.Duration(v), false
}
func (i exponentialRate) Iterator() Iterator {
return &exponentialRate{i.d, i.rate}
}
// NewExponentialRate creates iterable for creating iterators
// returning delay which grows exponentially with specified rate on each iteration.
func NewExponentialRate(d time.Duration, rate float64) Iterable {
return exponentialRate{float64(d), rate}
}
// Decorator extends behavior of an iterable.
type Decorator func(Iterable) Iterable
type maxRetriesB struct {
n int
b Iterable
}
func (b maxRetriesB) Iterator() Iterator {
return &maxRetriesI{b.n, b.b.Iterator()}
}
type maxRetriesI struct {
n int
i Iterator
}
func (i *maxRetriesI) Next() (time.Duration, bool) {
if i.n > 0 {
i.n--
return i.i.Next()
}
return 0, true
}
// WithMaxRetries sets maximum number of retries.
func WithMaxRetries(n int) Decorator {
return func(b Iterable) Iterable {
return maxRetriesB{n, b}
}
}
func init() {
rand.Seed(time.Now().UnixNano())
}
type jitterB struct {
n, j int64
b Iterable
}
func (b jitterB) Iterator() Iterator {
return jitterI{b.n, b.j, b.b.Iterator()}
}
type jitterI struct {
n, j int64
i Iterator
}
func (i jitterI) Next() (time.Duration, bool) {
v, done := i.i.Next()
if done {
return 0, done
}
v = v + time.Duration(rand.Int63n(i.n)-i.j)
if v < 0 {
v = 0
}
return v, done
}
// WithJitter sets maximum duration randomly added to or extracted from delay
// between retries to improve performance under high contention.
func WithJitter(d time.Duration) Decorator {
return func(b Iterable) Iterable {
j := int64(d)
return jitterB{j*2 + 1, j, b}
}
} | iterator.go | 0.775562 | 0.414069 | iterator.go | starcoder |
package goment
import (
"time"
)
// Diff returns the difference between two Goments as an integer.
func (g *Goment) Diff(args ...interface{}) int {
numArgs := len(args)
if numArgs > 0 {
units := ""
input, err := New(args[0])
if err != nil {
return 0
}
if numArgs > 1 {
if parsedUnits, ok := args[1].(string); ok {
units = parsedUnits
}
}
diff := Diff{
Start: g,
End: input,
}
switch units {
case "y", "year", "years":
return diff.InYears()
case "M", "month", "months":
return diff.InMonths()
case "w", "week", "weeks":
return diff.InWeeks()
case "d", "day", "days":
return diff.InDays()
case "h", "hour", "hours":
return diff.InHours()
case "m", "minute", "minutes":
return diff.InMinutes()
default:
return diff.InSeconds()
}
}
return 0
}
// DaysInMonth returns the number of days in the set month.
func (g *Goment) DaysInMonth() int {
return daysInMonth(g.Month(), g.Year())
}
// ToTime returns the time.Time object that is wrapped by Goment.
func (g *Goment) ToTime() time.Time {
return g.time
}
// ToUnix returns the Unix timestamp (the number of seconds since the Unix Epoch).
func (g *Goment) ToUnix() int64 {
return g.ToTime().Unix()
}
// ToArray returns an array that mirrors the parameters from time.Date().
func (g *Goment) ToArray() []int {
return []int{g.Year(), g.Month(), g.Date(), g.Hour(), g.Minute(), g.Second(), g.Nanosecond()}
}
// ToDateTime returns a DateTime struct.
func (g *Goment) ToDateTime() DateTime {
return DateTime{
Year: g.Year(),
Month: g.Month(),
Day: g.Date(),
Hour: g.Hour(),
Minute: g.Minute(),
Second: g.Second(),
Nanosecond: g.Nanosecond(),
Location: g.ToTime().Location(),
}
}
// ToString returns a string representation of the Goment time.
func (g *Goment) ToString() string {
return g.ToTime().String()
}
// ToISOString returns a ISO8601 standard representation of the Goment time.
func (g *Goment) ToISOString() string {
return g.ToTime().Format("2006-01-02T15:04:05.999Z07:00")
}
func daysInMonth(month, year int) int {
return time.Date(year, time.Month(month+1), 0, 0, 0, 0, 0, time.UTC).Day()
} | display.go | 0.815526 | 0.467514 | display.go | starcoder |
package main
import "fmt"
// PieceCount is the count of mens and kings
type PieceCount struct {
men [2]int
kings [2]int
}
// Opposition is the other player
func Opposition(player int) int {
if player == 1 {
return 2
}
return 1
}
// Direction says if "forward" is up or down the board. This allows us to
// use the same logic for player 1 and 2.
func direction(player int) int {
if player == 1 {
return 1
}
return -1
}
// Board represents the state of the game as an 8x8 array of integers
// where player 1 is the value 1, player 2 the value 2 and an empty
// space is a 0
type Board struct {
state [8][8]int
}
// NewBoard sets up a fresh board in the initial configuration
func NewBoard() *Board {
b := &Board{}
b.state[0] = [8]int{0, 1, 0, 1, 0, 1, 0, 1}
b.state[1] = [8]int{1, 0, 1, 0, 1, 0, 1, 0}
b.state[2] = [8]int{0, 1, 0, 1, 0, 1, 0, 1}
b.state[5] = [8]int{2, 0, 2, 0, 2, 0, 2, 0}
b.state[6] = [8]int{0, 2, 0, 2, 0, 2, 0, 2}
b.state[7] = [8]int{2, 0, 2, 0, 2, 0, 2, 0}
return b
}
// Get returns the state of the board Get position `p`
func (b Board) Get(p Pos) int {
return b.state[p.Y][p.X]
}
// Set sets the the state of the board at position `p`
func (b *Board) Set(p Pos, state int) {
b.state[p.Y][p.X] = state
}
// Apply moves a piece according to `move` and returns a new
// state. The move must be legal.
func (b Board) Apply(move *Move) *Board {
startPos := move.Squares[0]
startPiece := b.Get(startPos)
newBoard := &Board{b.state}
// Blank out the starting square of the move
newBoard.Set(startPos, 0)
// Remove any captured pieces we jumped from the board
for _, square := range move.JumpedSquares() {
newBoard.Set(square, 0)
}
// Land on the final square
final := move.Squares[move.Length()-1]
// If not already a king, and we've landed on either baseline
// we get a coronation.
if startPiece > 0 && final.Y == 0 || final.Y == 7 {
newBoard.Set(final, -startPiece) // Coronation
} else {
newBoard.Set(final, startPiece)
}
return newBoard
}
// Validate traverses a chain of squares to dermine if the move is legal
func (b Board) Validate(move *Move) error {
// Start square has to be held by `player`
startPos := move.Squares[0]
startPiece := b.Get(startPos)
if abs(startPiece) != move.Player {
return fmt.Errorf("Start position %s isn't valid", startPos.AsString())
}
// If we have more that one transition (more than two squares), then all
// transitions must be jumps. Note, this doesn't verify the actual state
// of the board, just the coordinates.
if move.Length() > 2 && !move.ValidJumpSequence() {
return fmt.Errorf("Move is not a valid jump sequence")
}
for index := 1; index < move.Length(); index++ {
endPos := move.Squares[index]
// endPos has to be available
if b.Get(endPos) != 0 {
return fmt.Errorf("Position %s isn't available", endPos.AsString())
}
// Check that we moved diagonally, either 1 or 2 squares
dX, dY, err := ValidDiagonal(startPos, endPos)
if err != nil {
return err
}
// Check we're moving forwards, if not king. King-case is covered
// by the above diagonal check.
if dY > 0 && startPiece == 2 || dY < 0 && startPiece == 1 {
return fmt.Errorf("Non-king move must move forward")
}
// If I jumped a square, it has to be occupied by the opponent.
betweenX := dX / 2
betweenY := dY / 2
middle := Pos{
startPos.X + betweenX,
startPos.Y + betweenY,
}
if betweenX != 0 && betweenY != 0 {
if abs(b.Get(middle)) != Opposition(move.Player) {
return fmt.Errorf("Jumped square not held by opponent")
}
}
startPos = endPos
}
return nil // we're good
}
// nonCaptureMoves returns a list of non capture moves from `square` --
// 0, 1, 2 (4, 5 for kings) possible squares.
func (b Board) nonCaptureMoves(player int, square Pos) []*Move {
cols := []int{-1, 1}
rows := []int{direction(player)}
if b.Get(square) < 0 {
// Kings can go both backwards and forwards
rows = append(rows, -direction(player))
}
moves := []*Move{}
for _, dY := range rows {
for _, dX := range cols {
if candidate, err := NewPos(square.X+dX, square.Y+dY); err == nil {
if b.Get(*candidate) == 0 {
moves = append(moves, NewMove(player, square, *candidate))
}
}
}
}
return moves
}
// singleJumps returns a list of single jumpable positions from `square` --
// 0, 1, 2 (4, 5 for kings) possible squares
func (b Board) singleJumps(player int, square Pos) []Pos {
cols := []int{-1, 1}
rows := []int{direction(player)}
if b.Get(square) < 0 {
// Kings can go both backwards and forwards
rows = append(rows, -direction(player))
}
jumps := []Pos{}
for _, dY := range rows {
for _, dX := range cols {
if first, err := NewPos(square.X+dX, square.Y+dY); err == nil {
if abs(b.Get(*first)) == Opposition(player) {
if second, err := NewPos(first.X+dX, first.Y+dY); err == nil {
if b.Get(*second) == 0 {
jumps = append(jumps, *second)
}
}
}
}
}
}
return jumps
}
// JumpMoves finds all available capture moves starting at `square`, including
// multi-hops and king moves
func (b Board) JumpMoves(player int, square Pos) []*Move {
movesList := []*Move{}
b.jumpMoves(&Board{b.state}, square, NewMove(player, square), &movesList)
return movesList
}
// validDirection discards potential jump squares based on the prevaling
// direction already set in the move. This is to ensure that kings, whilst
// allowed to go both forward and back still stick to a single direction
// within each single move.
func validDirection(jumps []Pos, move *Move) []Pos {
if move.Length() == 1 {
// We have no prevaling direction, so any jumps may be considered
return jumps
}
dY := move.Squares[1].Y - move.Squares[0].Y
sameDirection := []Pos{}
for _, jmp := range jumps {
if jmp.Y-move.Squares[1].Y == dY {
sameDirection = append(sameDirection, jmp)
}
}
return sameDirection
}
func (b Board) jumpMoves(state *Board, square Pos, move *Move, moves *[]*Move) {
player := move.Player
jumps := validDirection(b.singleJumps(player, square), move)
if len(jumps) == 0 && move.Length() > 1 {
*moves = append(*moves, move)
return
}
for _, jmp := range jumps {
newState := state.Apply(NewMove(player, square, jmp))
move.addSquare(jmp) // Record this in current move..
b.jumpMoves(newState, jmp, move, moves) // Walk the tree depth-first
// Now make a fresh move for when we follow the next branch
move = NewMove(player, square)
}
}
// AllMoves returns a list of all valid moves for `player`
func (b *Board) AllMoves(player int) []*Move {
jumpMoves := []*Move{}
nonJumpMoves := []*Move{}
for y := 0; y < 8; y++ {
for x := 0; x < 8; x++ {
pos := Pos{x, y}
if abs(b.Get(pos)) == player {
jumpMoves = append(jumpMoves, b.JumpMoves(player, pos)...)
nonJumpMoves = append(nonJumpMoves, b.nonCaptureMoves(player, pos)...)
}
}
}
if len(jumpMoves) > 0 {
return jumpMoves
}
return nonJumpMoves
}
// CountPieces ...
func (b Board) CountPieces() *PieceCount {
pc := &PieceCount{}
for y := 0; y < 8; y++ {
for x := 0; x < 8; x++ {
switch b.Get(Pos{x, y}) {
case 2:
pc.men[1]++
case -2:
pc.kings[1]++
case 1:
pc.men[0]++
case -1:
pc.kings[0]++
default:
}
}
}
return pc
} | board.go | 0.750095 | 0.613121 | board.go | starcoder |
package mdc
import (
"strconv"
"github.com/hexops/vecty"
)
// Series interface is meant to be used with the DataTable
// component for displaying structured data.
type Series interface {
// Head is the title of the data.
Head() string
// Kind
Kind() DataKind
AtRow(i int) vecty.MarkupOrChild
}
type DataKind int
const (
// Keep in sync with DataTable's heads() method.
defaultDataKind DataKind = iota
DataString
DataNumeric
DataCheckbox
)
// CellClassName returns the cell class.
func (dk DataKind) CellClassName() (class string) {
switch dk {
case DataString, defaultDataKind:
// no class.
case DataNumeric:
class = "mdc-data-table__cell--numeric"
case DataCheckbox:
class = "mdc-data-table__cell-checkbox"
default:
panic("unknown DataKind")
}
return class
}
// Compile-time check of interface implementation.
var (
_ Series = (*StringSeries)(nil)
_ Series = (*IntSeries)(nil)
_ Series = (*FloatSeries)(nil)
)
type StringSeries struct {
Label string
Data []string
}
func (ss *StringSeries) Head() string { return ss.Label }
func (ss *StringSeries) Kind() DataKind { return DataString }
func (ss *StringSeries) AtRow(i int) vecty.MarkupOrChild {
return vecty.Text(ss.Data[i])
}
type IntSeries struct {
Label string
Data []int
}
func (ss *IntSeries) Head() string { return ss.Label }
func (ss *IntSeries) Kind() DataKind { return DataNumeric }
func (ss *IntSeries) AtRow(i int) vecty.MarkupOrChild {
return vecty.Text(strconv.Itoa(ss.Data[i]))
}
type FloatSeries struct {
Label string
Data []float64
Prec int
// Floating point format verbs (see https://pkg.go.dev/fmt)
// 'e': scientific notation, e.g. -1.234456e+78
// 'E': scientific notation, e.g. -1.234456E+78
// 'f': decimal point but no exponent, e.g. 123.456
// 'g': %e for large exponents, %f otherwise. Precision is discussed below.
// 'G': %E for large exponents, %f otherwise
// 'x': hexadecimal notation (with decimal power of two exponent), e.g. -0x1.23abcp+20
// 'X': upper-case hexadecimal notation, e.g. -0X1.23ABCP+20
// 'b': decimalless scientific notation with exponent a power of two, in the manner of strconv.FormatFloat with the 'b' format e.g. -123456p-78
Fmt byte
}
func (ss *FloatSeries) Head() string { return ss.Label }
func (ss *FloatSeries) Kind() DataKind { return DataNumeric }
func (ss *FloatSeries) AtRow(i int) vecty.MarkupOrChild {
const (
defaultFmt = 'g'
defaultPrec = 6
)
prec := ss.Prec
if prec == 0 {
ss.Prec = defaultPrec
}
if ss.Fmt == 0 {
ss.Fmt = defaultFmt
}
return vecty.Text(strconv.FormatFloat(ss.Data[i], defaultFmt, prec, 64))
} | series.go | 0.650023 | 0.460774 | series.go | starcoder |
package telego
import (
upp "github.com/SakoDroid/telego/Parser"
objs "github.com/SakoDroid/telego/objects"
)
//This is the interface used for creating normal keyboards and inline keyboards.
type MarkUps interface {
toMarkUp() objs.ReplyMarkup
}
//A normal keyboard.
type keyboard struct {
keys [][]*objs.KeyboardButton
resizeKeyBoard, oneTimeKeyboard, selective bool
inputFieldPlaceHolder string
}
func (kb *keyboard) fixRows(row int) {
dif := (row) - len(kb.keys)
for i := 0; i < dif; i++ {
kb.keys = append(kb.keys, make([]*objs.KeyboardButton, 0))
}
}
/*Adds a new button holding the given text to the specified row. According to telegram bot api if this button is pressed the text inside the button will be sent to the bot as a message.
Note : row number starts from 1. (it's not zero based). If any number lower than 1 is passed, no button will be added*/
func (kb *keyboard) AddButton(text string, row int) {
kb.addButton(text, row, false, false, nil)
}
/*Adds a new button holding the given text to the specified row. This method also adds a handler for that button so everytime this button is pressed the handler will be called. You can read the documentation of "AddHandler" for better understanding on handlers.
Note : row number starts from 1. (it's not zero based). If any number lower than 1 is passed, no button will be added*/
func (kb *keyboard) AddButtonHandler(text string, row int, handler func(*objs.Update), chatTypes ...string) {
kb.addButton(text, row, false, false, nil)
upp.AddHandler(text, handler, chatTypes...)
}
/*Adds a new contact button. According to telegram bot api when this button is pressed,the user's phone number will be sent as a contact. Available in private chats only.
Note: ContactButtons and LocationButtons will only work in Telegram versions released after 9 April, 2016. Older clients will display unsupported message.
Note : row number starts from 1. (it's not zero based). If any number lower than 1 is passed, no button will be added*/
func (kb *keyboard) AddContactButton(text string, row int) {
kb.addButton(text, row, true, false, nil)
}
/*Adds a new location button. According to telegram bot api when this button is pressed,the user's location will be sent. Available in private chats only.
Note: ContactButtons and LocationButtons will only work in Telegram versions released after 9 April, 2016. Older clients will display unsupported message.
Note : row number starts from 1. (it's not zero based). If any number lower than 1 is passed, no button will be added*/
func (kb *keyboard) AddLocationButton(text string, row int) {
kb.addButton(text, row, false, true, nil)
}
/*Adds a new poll button. According to telegram bot api, the user will be asked to create a poll and send it to the bot when this button is pressed. Available in private chats only.
Note: PollButton will only work in Telegram versions released after 23 January, 2020. Older clients will display unsupported message.
Note : row number starts from 1. (it's not zero based). If any number lower than 1 is passed, no button will be added.
Note : poll type can be "regular" or "quiz". Any other value will cause the button not to be added.*/
func (kb *keyboard) AddPollButton(text string, row int, pollType string) {
if pollType == "regular" || pollType == "quiz" {
kb.addButton(text, row, false, false, &objs.KeyboardButtonPollType{Type: pollType})
}
}
func (kb *keyboard) addButton(text string, row int, contact, location bool, poll *objs.KeyboardButtonPollType) {
if row >= 1 {
kb.fixRows(row)
kb.keys[row-1] = append(kb.keys[row-1], &objs.KeyboardButton{
Text: text,
RequestContact: contact,
RequestLocation: location,
RequestPoll: poll,
})
}
}
func (kb *keyboard) toMarkUp() objs.ReplyMarkup {
return &objs.ReplyKeyboardMarkup{
Keyboard: kb.keys,
ResizeKeyboard: kb.resizeKeyBoard,
OneTimeKeyboard: kb.oneTimeKeyboard,
InputFieldPlaceholder: kb.inputFieldPlaceHolder,
Selective: kb.selective,
}
}
type inlineKeyboard struct {
keys [][]*objs.InlineKeyboardButton
}
/*Adds a button that will open an url when pressed.
Note : row number starts from 1. (it's not zero based). If any number lower than 1 is passed, no button will be added*/
func (in *inlineKeyboard) AddURLButton(text, url string, row int) {
in.addButton(text, url, "", "", "", nil, nil, false, row)
}
/*Adds a button that will be used for automatic authorization. According to telegram bot api, login url is an HTTP URL used to automatically authorize the user. Can be used as a replacement for the Telegram Login Widget.
Note : row number starts from 1. (it's not zero based). If any number lower than 1 is passed, no button will be added.
Arguments :
1. url : An HTTP URL to be opened with user authorization data added to the query string when the button is pressed. If the user refuses to provide authorization data, the original URL without information about the user will be opened. The data added is the same as described in Receiving authorization data. NOTE: You must always check the hash of the received data to verify the authentication and the integrity of the data as described in Checking authorization.
2. forwardText : New text of the button in forwarded messages.
3. botUsername : Username of a bot, which will be used for user authorization. See Setting up a bot for more details. If not specified, the current bot's username will be assumed. The url's domain must be the same as the domain linked with the bot. See Linking your domain to the bot for more details.
4. requestWriteAccess : Pass True to request the permission for your bot to send messages to the user.
*/
func (in *inlineKeyboard) AddLoginURLButton(text, url, forwardText, botUsername string, requestWriteAccess bool, row int) {
in.addButton(text, "", "", "", "", &objs.LoginUrl{
URL: url,
ForwardText: forwardText,
BotUsername: botUsername,
RequestWriteAccess: requestWriteAccess,
}, nil, false, row)
}
/*Adds a button that when its pressed, a call back query with the given data is sen to the bot
Note : row number starts from 1. (it's not zero based). If any number lower than 1 is passed, no button will be added.
*/
func (in *inlineKeyboard) AddCallbackButton(text, callbackData string, row int) {
in.addButton(text, "", callbackData, "", "", nil, nil, false, row)
}
/*Adds a button that when its pressed, a call back query with the given data is sen to the bot. A handler is also added which will be called everytime a call back query is received for this button.
Note : row number starts from 1. (it's not zero based). If any number lower than 1 is passed, no button will be added.
*/
func (in *inlineKeyboard) AddCallbackButtonHandler(text, callbackData string, row int, handler func(*objs.Update)) {
in.addButton(text, "", callbackData, "", "", nil, nil, false, row)
upp.AddCallbackHandler(callbackData, handler)
}
/*Adds a switch inline query button. According to tlegram bot api, pressing the button will prompt the user to select one of their chats, open that chat and insert the bot's username and the specified inline query in the input field. Can be empty, in which case just the bot's username will be inserted. Note: This offers an easy way for users to start using your bot in inline mode when they are currently in a private chat with it. Especially useful when combined with switch_pm… actions – in this case the user will be automatically returned to the chat they switched from, skipping the chat selection screen.
Note : If "currentChat" option is true, the inline query will be inserted in the current chat's input field.
Note : row number starts from 1. (it's not zero based). If any number lower than 1 is passed, no button will be added.
*/
func (in *inlineKeyboard) AddSwitchInlineQueryButton(text, inlineQuery string, row int, currenChat bool) {
if currenChat {
in.addButton(text, "", "", "", inlineQuery, nil, nil, false, row)
} else {
in.addButton(text, "", "", inlineQuery, "", nil, nil, false, row)
}
}
/*Adds a game button. Everytime a user presses this button a game will be launched. Use botfather to set up a game.
NOTE: This type of button must always be the first button in the first row.*/
func (in *inlineKeyboard) AddGameButton(text string, row int) {
in.addButton(text, "", "", "", "", nil, &objs.CallbackGame{}, false, row)
}
/*Adds a pay button.
NOTE: This type of button must always be the first button in the first row.
Note : row number starts from 1. (it's not zero based). If any number lower than 1 is passed, no button will be added.
*/
func (in *inlineKeyboard) AddPayButton(text string, row int) {
in.addButton(text, "", "", "", "", nil, nil, true, row)
}
func (in *inlineKeyboard) addButton(text, url, callbackData, switchInlineQuery, switchInlineQueryCurrentChat string, loginUrl *objs.LoginUrl, callbackGame *objs.CallbackGame, pay bool, row int) {
if row >= 1 {
in.fixRows(row)
in.keys[row-1] = append(in.keys[row-1], &objs.InlineKeyboardButton{
Text: text,
URL: url,
LoginURL: loginUrl,
CallbackData: callbackData,
SwitchInlineQuery: switchInlineQuery,
SwitchInlineQueryCurrentChat: switchInlineQueryCurrentChat,
CallbackGame: callbackGame,
Pay: pay,
})
}
}
func (in *inlineKeyboard) fixRows(row int) {
dif := (row) - len(in.keys)
for i := 0; i < dif; i++ {
in.keys = append(in.keys, make([]*objs.InlineKeyboardButton, 0))
}
}
func (in *inlineKeyboard) toInlineKeyboardMarkup() objs.InlineKeyboardMarkup {
return objs.InlineKeyboardMarkup{
InlineKeyboard: in.keys,
}
}
func (in *inlineKeyboard) toMarkUp() objs.ReplyMarkup {
return &objs.InlineKeyboardMarkup{
InlineKeyboard: in.keys,
}
} | keyboard.go | 0.52756 | 0.430088 | keyboard.go | starcoder |
// Package descriptions provides the descriptions as used by the graphql endpoint for Weaviate
package descriptions
// AGGREGATE
const AggregateProperty = "Aggregate this property"
const AggregateThings = "Aggregate Things on a local Weaviate"
const AggregateActions = "Aggregate Things on a local Weaviate"
const GroupBy = "Specify which properties to group by"
const AggregateObj = "An object allowing Aggregation of Things and Actions"
const AggregatePropertyObject = "An object containing Aggregation information about this property"
const AggregateThingsActionsObj = "An object allowing Aggregation of %ss on a local Weaviate"
const AggregateMean = "Aggregate on the mean of numeric property values"
const AggregateSum = "Aggregate on the sum of numeric property values"
const AggregateMedian = "Aggregate on the median of numeric property values"
const AggregateMode = "Aggregate on the mode of numeric property values"
const AggregateMin = "Aggregate on the minimum of numeric property values"
const AggregateMax = "Aggregate on the maximum of numeric property values"
const AggregateCount = "Aggregate on the total amount of found property values"
const AggregateGroupedBy = "Indicates the group of returned data"
const AggregateNumericObj = "An object containing the %s of numeric properties"
const AggregateCountObj = "An object containing countable properties"
const AggregateGroupedByObj = "An object containing the path and value of the grouped property"
const AggregateGroupedByGroupedByPath = "The path of the grouped property"
const AggregateGroupedByGroupedByValue = "The value of the grouped property"
// NETWORK
const NetworkAggregateWeaviateObj = "An object containing Get Things and Actions fields for network Weaviate instance: "
const NetworkAggregate = "Perform Aggregation of Things and Actions"
const NetworkAggregateThings = "Aggregate Things on a network Weaviate"
const NetworkAggregateActions = "Aggregate Things on a network Weaviate"
const NetworkAggregateObj = "An object allowing Aggregation of Things and Actions"
const NetworkAggregatePropertyObject = "An object containing Aggregation information about this property"
const NetworkAggregateThingsActionsObj = "An object allowing Aggregation of %ss on a network Weaviate"
const NetworkAggregateMean = "Aggregate on the mean of numeric property values"
const NetworkAggregateSum = "Aggregate on the sum of numeric property values"
const NetworkAggregateMedian = "Aggregate on the median of numeric property values"
const NetworkAggregateMode = "Aggregate on the mode of numeric property values"
const NetworkAggregateMin = "Aggregate on the minimum of numeric property values"
const NetworkAggregateMax = "Aggregate on the maximum of numeric property values"
const NetworkAggregateCount = "Aggregate on the total amount of found property values"
const NetworkAggregateGroupedBy = "Indicates the group of returned data"
const NetworkAggregateNumericObj = "An object containing the %s of numeric properties"
const NetworkAggregateCountObj = "An object containing countable properties"
const NetworkAggregateGroupedByObj = "An object containing the path and value of the grouped property"
const NetworkAggregateGroupedByGroupedByPath = "The path of the grouped property"
const NetworkAggregateGroupedByGroupedByValue = "The value of the grouped property" | adapters/handlers/graphql/descriptions/aggregate.go | 0.771413 | 0.441312 | aggregate.go | starcoder |
package compare
import (
"encoding/json"
"reflect"
"regexp"
"sort"
"github.com/yudai/gojsondiff"
"github.com/yudai/gojsondiff/formatter"
)
// JSONDiff represents the differences between two JSON values.
type JSONDiff struct {
left map[string]interface{}
ds []gojsondiff.Delta
}
// Deltas returns Deltas that describe individual differences between two JSON values.
func (d *JSONDiff) Deltas() []gojsondiff.Delta {
return d.ds
}
// Modified returns true if JSONDiff has at least one Delta.
func (d *JSONDiff) Modified() bool {
return len(d.ds) > 0
}
// Format returns a string representation of the differences between two JSON values.
func (d *JSONDiff) Format(coloring bool) (string, error) {
config := formatter.AsciiFormatterConfig{
ShowArrayIndex: true,
Coloring: coloring,
}
af := formatter.NewAsciiFormatter(d.left, config)
diff, err := af.Format(d)
if err != nil {
return "", err
}
// remove wrapping object for values of basic types
basicRE := regexp.MustCompile(`^\s*{\n-\s*"\$":\s*([\s\S]*)\+\s*"\$":\s*([\s\S]*)\n\s*}`)
unwrapped := basicRE.ReplaceAllString(diff, "- $1+ $2")
// remove wrapping object for arrays and objects
nonBasicRE := regexp.MustCompile(`^\s*{\n\s*"\$":([\s\S]*)\n\s}`)
unwrapped = nonBasicRE.ReplaceAllString(unwrapped, "$1")
// decrease indentation for remaining lines
indentRE := regexp.MustCompile(`(\n.)\s{2}`)
return indentRE.ReplaceAllString(unwrapped, "$1"), nil
}
// JSONDiffer compares JSON strings.
type JSONDiffer struct {
// BasicEqualer specifies how values of basic types should be compared.
BasicEqualer
}
// Equal determines if two JSON strings represent the same value.
// Returns an error iff the strings don't adhere to the JSON syntax.
func (jd JSONDiffer) Equal(left, right []byte) (bool, error) {
d, err := jd.Compare(left, right)
if err != nil {
return false, err
}
return !d.Modified(), nil
}
// Compare returns the differences between two JSON strings.
// Returns an error iff the strings don't adhere to the JSON syntax.
func (jd JSONDiffer) Compare(left, right []byte) (*JSONDiff, error) {
var l, r interface{}
if err := json.Unmarshal(left, &l); err != nil {
return nil, err
}
if err := json.Unmarshal(right, &r); err != nil {
return nil, err
}
// add explicit root in case the values are arrays or plain values (not objects)
leftMap := map[string]interface{}{"$": l}
rightMap := map[string]interface{}{"$": r}
d := jd.compareMaps(leftMap, rightMap)
d.left = leftMap
return d, nil
}
// cf. https://github.com/yudai/gojsondiff/blob/master/gojsondiff.go#L66-L74
func (jd JSONDiffer) compareMaps(left, right map[string]interface{}) *JSONDiff {
ds := jd.mapDeltas(left, right)
return &JSONDiff{ds: ds}
}
// cf. https://github.com/yudai/gojsondiff/blob/master/gojsondiff.go#L235-L279
func (jd JSONDiffer) compare(pos gojsondiff.Position, left, right interface{}) (bool, gojsondiff.Delta) {
if reflect.TypeOf(left) != reflect.TypeOf(right) {
return false, gojsondiff.NewModified(pos, left, right)
}
switch l := left.(type) {
case []interface{}:
if ds := jd.sliceDeltas(l, right.([]interface{})); len(ds) > 0 {
return false, gojsondiff.NewArray(pos, ds)
}
case map[string]interface{}:
if ds := jd.mapDeltas(l, right.(map[string]interface{})); len(ds) > 0 {
return false, gojsondiff.NewObject(pos, ds)
}
default:
return jd.valueDelta(pos, left, right)
}
return true, nil
}
// cf. https://github.com/yudai/gojsondiff/blob/master/gojsondiff.go#L125-L233
// Note that this implementation is much more primitive. There's no attempt to
// find a longest common sequence and base differences on that. We just compare
// values index by index.
func (jd JSONDiffer) sliceDeltas(left, right []interface{}) []gojsondiff.Delta {
var ds []gojsondiff.Delta
for i, leftVal := range left {
if i < len(right) {
if same, d := jd.compare(gojsondiff.Index(i), leftVal, right[i]); !same {
ds = append(ds, d)
}
} else {
ds = append(ds, gojsondiff.NewDeleted(gojsondiff.Index(i), leftVal))
}
}
for i := len(left); i < len(right); i++ {
ds = append(ds, gojsondiff.NewAdded(gojsondiff.Index(i), right[i]))
}
return ds
}
// cf. https://github.com/yudai/gojsondiff/blob/master/gojsondiff.go#L86-L112
func (jd JSONDiffer) mapDeltas(left, right map[string]interface{}) []gojsondiff.Delta {
var ds []gojsondiff.Delta
keys := sortedKeys(left) // stabilize delta order
for _, key := range keys {
if rightVal, ok := right[key]; ok {
if same, d := jd.compare(gojsondiff.Name(key), left[key], rightVal); !same {
ds = append(ds, d)
}
} else {
ds = append(ds, gojsondiff.NewDeleted(gojsondiff.Name(key), left[key]))
}
}
keys = sortedKeys(right) // stabilize delta order
for _, key := range keys {
if _, ok := left[key]; !ok {
ds = append(ds, gojsondiff.NewAdded(gojsondiff.Name(key), right[key]))
}
}
return ds
}
// valueDelta returns the Delta (if any) for two basic values (null, boolean, number, string).
// Rather than just using reflect.DeepEqual(), as gojsondiff does, we use a custom BasicEqualer.
func (jd *JSONDiffer) valueDelta(pos gojsondiff.Position, left, right interface{}) (bool, gojsondiff.Delta) {
var same bool
switch l := left.(type) {
case nil:
same = left == right
case bool:
same = jd.Bool(l, right.(bool))
case float64:
same = jd.Float64(l, right.(float64))
case string:
same = jd.String(l, right.(string))
default:
// should never happen (https://golang.org/pkg/encoding/json/#Unmarshal)
same = reflect.DeepEqual(left, right)
}
if !same {
return false, gojsondiff.NewModified(pos, left, right)
}
return true, nil
}
// cf. https://github.com/yudai/gojsondiff/blob/master/gojsondiff.go#L409-L416
func sortedKeys(m map[string]interface{}) []string {
keys := make([]string, 0, len(m))
for key := range m {
keys = append(keys, key)
}
sort.Strings(keys)
return keys
} | json.go | 0.82485 | 0.44571 | json.go | starcoder |
package sensors
import (
"github.com/b3nn0/goflying/mpu9250"
"github.com/kidoman/embd"
)
const (
mpu9250GyroRange = 250 // mpu9250GyroRange is the default range to use for the Gyro.
mpu9250AccelRange = 4 // mpu9250AccelRange is the default range to use for the Accel.
mpu9250UpdateFreq = 50 // mpu9250UpdateFreq is the rate at which to update the sensor values.
)
// MPU9250 represents an InvenSense MPU9250 attached to the I2C bus and satisfies
// the IMUReader interface.
type MPU9250 struct {
mpu *mpu9250.MPU9250
}
// NewMPU9250 returns an instance of the MPU9250 IMUReader, connected to an
// MPU9250 attached on the I2C bus with either valid address.
func NewMPU9250(i2cbus *embd.I2CBus) (*MPU9250, error) {
var (
m MPU9250
mpu *mpu9250.MPU9250
err error
)
mpu, err = mpu9250.NewMPU9250(i2cbus, mpu9250GyroRange, mpu9250AccelRange, mpu9250UpdateFreq, true, false)
if err != nil {
return nil, err
}
// Set Gyro (Accel) LPFs to 20 (21) Hz to filter out prop/glareshield vibrations above 1200 (1260) RPM
mpu.SetGyroLPF(21)
mpu.SetAccelLPF(21)
m.mpu = mpu
return &m, nil
}
// Read returns the average (since last reading) time, Gyro X-Y-Z, Accel X-Y-Z, Mag X-Y-Z,
// error reading Gyro/Accel, and error reading Mag.
func (m *MPU9250) Read() (T int64, G1, G2, G3, A1, A2, A3, M1, M2, M3 float64, GAError, MAGError error) {
var (
data *mpu9250.MPUData
i int8
)
data = new(mpu9250.MPUData)
for data.N == 0 && i < 5 {
data = <-m.mpu.CAvg
T = data.T.UnixNano()
G1 = data.G1
G2 = data.G2
G3 = data.G3
A1 = data.A1
A2 = data.A2
A3 = data.A3
M1 = data.M1
M2 = data.M2
M3 = data.M3
GAError = data.GAError
MAGError = data.MagError
i++
}
return
}
// ReadOne returns the most recent time, Gyro X-Y-Z, Accel X-Y-Z, Mag X-Y-Z,
// error reading Gyro/Accel, and error reading Mag.
func (m *MPU9250) ReadOne() (T int64, G1, G2, G3, A1, A2, A3, M1, M2, M3 float64, GAError, MAGError error) {
var (
data *mpu9250.MPUData
)
data = new(mpu9250.MPUData)
data = <-m.mpu.C
T = data.T.UnixNano()
G1 = data.G1
G2 = data.G2
G3 = data.G3
A1 = data.A1
A2 = data.A2
A3 = data.A3
M1 = data.M1
M2 = data.M2
M3 = data.M3
GAError = data.GAError
MAGError = data.MagError
return
}
// Close stops reading the MPU.
func (m *MPU9250) Close() {
m.mpu.CloseMPU()
} | sensors/mpu9250.go | 0.673514 | 0.402451 | mpu9250.go | starcoder |
package day20
import (
"fmt"
"github.com/knalli/aoc"
"strconv"
)
const (
DARK int32 = '.'
LIGHT int32 = '#'
)
func enhanceImage(input *Grid, enhancementAlgorithm string, outside int32) *Grid {
output := NewGrid(input.Width()+2, input.Height()+2)
for y := 0; y < output.Height(); y++ {
for x := 0; x < output.Width(); x++ {
inBasePoint := Point{X: x - 1, Y: y - 1}
encoded := ""
for _, inPoint := range inBasePoint.Block9() {
if input.Valid(inPoint) {
if input.GetValue(inPoint) == 1 {
encoded += "1"
} else {
encoded += "0"
}
} else {
// infinite (dark/light)
// well, that trick required a look into reddit.
// the variable "outside" is required because the "infinite default" flipped each round
if outside == LIGHT {
encoded += "1"
} else {
encoded += "0"
}
}
}
// binary
decoded := binary2Int(encoded)
if enhancementAlgorithm[decoded] == uint8(LIGHT) {
output.SetValue(Point{x, y}, 1)
}
}
}
return output
}
func binary2Int(s string) int64 {
if v, err := strconv.ParseInt(s, 2, 64); err != nil {
return 0
} else {
return v
}
}
func parseInput(lines []string) (string, *Grid) {
enhancementAlgorithm := lines[0]
grid := NewGrid(
len(lines[2]),
len(lines[2:]),
)
for y, line := range lines[2:] {
for x, c := range line {
if c == LIGHT {
grid.SetValue(Point{x, y}, 1)
}
}
}
return enhancementAlgorithm, grid
}
func gridRenderer(x int, y int, v int) string {
if v == 1 {
return string(LIGHT)
}
return string(DARK)
}
func solve1(lines []string) error {
enhancementAlgorithm, grid := parseInput(lines)
output := grid
for i := 0; i < 2; i++ {
outside := LIGHT
if i%2 == 0 {
outside = DARK
}
output = enhanceImage(output, enhancementAlgorithm, outside)
}
//fmt.Println("Output:")
//fmt.Println(output.ToString(gridRenderer))
result := output.Count(func(x int, y int, v int) bool {
return v == 1
})
aoc.PrintSolution(fmt.Sprintf("%d pixels are lit", result))
return nil
}
func solve2(lines []string) error {
enhancementAlgorithm, grid := parseInput(lines)
output := grid
for i := 0; i < 50; i++ {
outside := LIGHT
if i%2 == 0 {
outside = DARK
}
output = enhanceImage(output, enhancementAlgorithm, outside)
}
//fmt.Println("Output:")
//fmt.Println(output.ToString(gridRenderer))
result := output.Count(func(x int, y int, v int) bool {
return v == 1
})
aoc.PrintSolution(fmt.Sprintf("%d pixels are lit", result))
return nil
} | day20/puzzle.go | 0.593491 | 0.415966 | puzzle.go | starcoder |
package base
import (
"bytes"
"fmt"
"io"
"math"
"math/rand"
"runtime"
"sync"
"gonum.org/v1/gonum/blas"
"gonum.org/v1/gonum/blas/blas64"
"gonum.org/v1/gonum/mat"
)
// MatConst is a matrix where all cless have the same value
type MatConst struct {
Rows, Columns int
Value float64
}
// Dims for MatConst
func (m MatConst) Dims() (int, int) { return m.Rows, m.Columns }
// At for MatConst
func (m MatConst) At(i, j int) float64 { return m.Value }
// T for MatConst
func (m MatConst) T() mat.Matrix { return MatTranspose{Matrix: m} }
// MatTranspose is a matrix override to transpose a mat.Matrix from its initializer
type MatTranspose struct{ mat.Matrix }
// Dims for MatTranspose
func (m MatTranspose) Dims() (int, int) { r, c := m.Matrix.Dims(); return c, r }
// At for MatTranspose
func (m MatTranspose) At(i, j int) float64 {
return m.Matrix.At(j, i)
}
// Set for MatTranspose
func (m MatTranspose) Set(i, j int, v float64) {
if Mutable, ok := m.Matrix.(mat.Mutable); ok {
Mutable.Set(j, i, v)
} else {
panic("underling Matrix is not Mutable")
}
}
// T for MatTranspose
func (m MatTranspose) T() mat.Matrix { return m.Matrix }
// MatOnesPrepended is a matrix override representing its initializer with an initial column of ones added
type MatOnesPrepended struct{ mat.Matrix }
// Dims for MatOnesPrepended
func (m MatOnesPrepended) Dims() (int, int) { r, c := m.Matrix.Dims(); return r, 1 + c }
// At for MatOnesPrepended
func (m MatOnesPrepended) At(i, j int) float64 {
if j == 0 {
return 1.
}
return m.Matrix.At(i, j-1)
}
// Set for MatOnesPrepended
func (m MatOnesPrepended) Set(i, j int, v float64) {
if Mutable, ok := m.Matrix.(mat.Mutable); ok {
Mutable.Set(i, j-1, v)
} else {
panic("underling Matrix is not Mutable")
}
}
// T for MatOnesPrepended not implemented
func (m MatOnesPrepended) T() mat.Matrix { return MatTranspose{m} }
// MatFirstColumnRemoved is a matrix whose an initial column has been removed respective to its initializer
type MatFirstColumnRemoved struct{ mat.Matrix }
// Dims for MatFirstColumnRemoved
func (m MatFirstColumnRemoved) Dims() (int, int) { r, c := m.Matrix.Dims(); return r, c - 1 }
// At for MatFirstColumnRemoved
func (m MatFirstColumnRemoved) At(i, j int) float64 {
return m.Matrix.At(i, j+1)
}
// Set for MatFirstColumnRemoved
func (m MatFirstColumnRemoved) Set(i, j int, v float64) {
if Mutable, ok := m.Matrix.(mat.Mutable); ok {
Mutable.Set(i, j+1, v)
} else {
panic("underling Matrix is not Mutable")
}
}
// T for MatFirstColumnRemoved
func (m MatFirstColumnRemoved) T() mat.Matrix { return MatTranspose{m} }
// MatFirstRowZeroed is a matrix whose an initial Row has been set to zeros respective to its initializer
type MatFirstRowZeroed struct{ mat.Matrix }
// Dims for MatFirstRowZeroed
func (m MatFirstRowZeroed) Dims() (int, int) { return m.Matrix.Dims() }
// At for MatFirstRowZeroed
func (m MatFirstRowZeroed) At(i, j int) float64 {
if i == 0 {
return 0.
}
return m.Matrix.At(i, j)
}
// Set for MatFirstRowZeroed
func (m MatFirstRowZeroed) Set(i, j int, v float64) {
if Mutable, ok := m.Matrix.(mat.Mutable); ok {
Mutable.Set(i, j, v)
} else {
panic("underling Matrix is not Mutable")
}
}
// T for MatFirstRowZeroed
func (m MatFirstRowZeroed) T() mat.Matrix { return MatTranspose{m} }
// MatRowSlice is a matrix row chunk
type MatRowSlice struct {
mat.Matrix
Start, End int
}
// Dims for MatRowSlice
func (m MatRowSlice) Dims() (int, int) { _, c := m.Matrix.Dims(); return m.End - m.Start, c }
// At for MatRowSlice
func (m MatRowSlice) At(i, j int) float64 {
if i < 0 || i > m.End-m.Start {
panic("indexing error")
}
return m.Matrix.At(i+m.Start, j)
}
// Set for MatRowSlice
func (m MatRowSlice) Set(i, j int, v float64) {
if Mutable, ok := m.Matrix.(mat.Mutable); ok {
Mutable.Set(i-m.Start, j, v)
} else {
panic("underling Matrix is not Mutable")
}
}
// T for MatRowSlice
func (m MatRowSlice) T() mat.Matrix { return MatTranspose{m} }
// MatApply0 is a mat.Matrix override where At returns a func-generated value
type MatApply0 struct {
Rows, Columns int
Func func() float64
}
// Dims for MatApply0
func (m MatApply0) Dims() (int, int) { return m.Rows, m.Columns }
// At for MatApply0
func (m MatApply0) At(i, j int) float64 { return m.Func() }
// T for MatApply0
func (m MatApply0) T() mat.Matrix { return MatTranspose{m} }
// MatApply1 is a mat.Matrix override where At returns a func-trancformed value whose inputs are elements from its initializers
type MatApply1 struct {
mat.Matrix
Func func(float64) float64
}
// Dims for MatApply1
func (m MatApply1) Dims() (int, int) { return m.Matrix.Dims() }
// At for MatApply1
func (m MatApply1) At(i, j int) float64 { return m.Func(m.Matrix.At(i, j)) }
// T for MatApply1 returns a MatTranspose
func (m MatApply1) T() mat.Matrix { return MatTranspose{m} }
// MatApply2 is a mat.Matric overrides returning a function where args are elements from its two Matrix initializers
type MatApply2 struct {
A, B mat.Matrix
Func func(a, b float64) float64
}
// Dims for MatApply2
func (m MatApply2) Dims() (int, int) { return m.A.Dims() }
// At for MatApply2
func (m MatApply2) At(i, j int) float64 { return m.Func(m.A.At(i, j), m.B.At(i, j)) }
// T for MatApply2
func (m MatApply2) T() mat.Matrix { return MatTranspose{m} }
// MatSub is a mat.Matrix override returning difference from its two initializers
type MatSub struct{ A, B mat.Matrix }
// Dims for MatSub
func (m MatSub) Dims() (int, int) { return m.A.Dims() }
// At for MatSub
func (m MatSub) At(i, j int) float64 { return m.A.At(i, j) - m.B.At(i, j) }
// T for MatSub
func (m MatSub) T() mat.Matrix { return MatTranspose{m} }
// MatMulElem is a mat.Matrix override returning elementwize product from its two initializers
type MatMulElem struct{ A, B mat.Matrix }
// Dims for MatMuilElem
func (m MatMulElem) Dims() (int, int) { return m.A.Dims() }
// At for MatMulElem
func (m MatMulElem) At(i, j int) float64 { return m.A.At(i, j) * m.B.At(i, j) }
// T for MatMulElem
func (m MatMulElem) T() mat.Matrix { return MatTranspose{m} }
// MatScaled is a mat.Matrix override returning scaled value from its initializer
type MatScaled struct {
mat.Matrix
Scale float64
}
// Dims for MatScaled
func (m MatScaled) Dims() (int, int) { return m.Matrix.Dims() }
// At for MatScaled
func (m MatScaled) At(i, j int) float64 { return m.Matrix.At(i, j) * m.Scale }
// T for MatScaled
func (m MatScaled) T() mat.Matrix { return MatTranspose{m} }
// MatOneMinus is a mat.Matrix override returning 1.-value from its initializer
type MatOneMinus struct {
mat.Matrix
}
// Dims for MatOnesMinus
func (m MatOneMinus) Dims() (int, int) { return m.Dims() }
// At for MatOneMinus
func (m MatOneMinus) At(i, j int) float64 { return 1. - m.At(i, j) }
// T for MatOneMinus
func (m MatOneMinus) T() mat.Matrix { return MatTranspose{m} }
// MatDimsString returns a string representing Dims of its several Matrix parameters
func MatDimsString(mats ...mat.Matrix) string {
s := ""
for _, m := range mats {
r, c := m.Dims()
s = fmt.Sprintf("%s %d,%d", s, r, c)
}
return s
}
// MatDimsCheck checks compat of operator op and its Matrix parameters Dims.
// R is result of op, X and Y are operands of op.
// "." is dot product. "+","-","*","/" are elementwize opts
func MatDimsCheck(op string, R, X, Y mat.Matrix) {
rx, cx := X.Dims()
ry, cy := Y.Dims()
rr, cr := R.Dims()
switch op {
case "+", "-", "*", "/":
if rx != ry || cx != cy || rr != rx || cr != cx {
panic(fmt.Errorf("%s %s", op, MatDimsString(R, X, Y)))
}
case ".":
if cx != ry || rr != rx || cr != cy {
panic(fmt.Errorf("%s %s", op, MatDimsString(R, X, Y)))
}
}
}
// MatStr return a string from a mat.Matrix
func MatStr(Xs ...mat.Matrix) string {
if len(Xs) == 0 {
return ""
}
nSamples, nFeatures := Xs[0].Dims()
b := bytes.NewBuffer(nil)
for i := 0; i < nSamples; i++ {
for imat, X := range Xs {
_, nFeatures = X.Dims()
for j := 0; j < nFeatures; j++ {
io.WriteString(b, fmt.Sprintf("%g", X.At(i, j)))
if j < nFeatures-1 || imat < len(Xs)-1 {
io.WriteString(b, "\t")
} else {
io.WriteString(b, "\n")
}
}
}
}
return b.String()
}
// MatColStr return the string for a matrix column
func MatColStr(X mat.Matrix, j int) string {
nSamples, _ := X.Dims()
var t = make([]float64, nSamples)
mat.Col(t, j, X)
return fmt.Sprint(t)
}
// MatRowStr returns the string for a matrix row
func MatRowStr(X mat.Matrix, i int) string {
_, nFeatures := X.Dims()
var t = make([]float64, nFeatures)
mat.Row(t, i, X)
return fmt.Sprint(t)
}
// MatShuffle shuffles the rows of X and Y matrices
func MatShuffle(X, Y *mat.Dense) {
nSamples, nFeatures := X.Dims()
_, nOutputs := Y.Dims()
Xrowi := make([]float64, nFeatures, nFeatures)
Yrowi := make([]float64, nOutputs, nOutputs)
for i := nSamples - 1; i > 0; i-- {
j := rand.Intn(i + 1)
copy(Xrowi, X.RawRowView(i))
X.SetRow(i, X.RawRowView(j))
X.SetRow(j, Xrowi)
copy(Yrowi, Y.RawRowView(i))
Y.SetRow(i, Y.RawRowView(j))
Y.SetRow(j, Yrowi)
}
}
// MatSigmoid put emelent-wise sigmoid of X into dst
func MatSigmoid(dst *mat.Dense, X mat.Matrix) *mat.Dense {
if dst == nil {
r, c := X.Dims()
dst = mat.NewDense(r, c, nil)
}
dst.Apply(func(i int, j int, v float64) float64 {
return 1. / (1. + math.Exp(-v))
}, X)
return dst
}
// MatParallelGemm parallelize Gemm on A rows
func MatParallelGemm(tA, tB blas.Transpose, alpha float64, a, b blas64.General, beta float64, c blas64.General) {
var start, end int
// defer func() {
// if r := recover(); r != nil {
// fmt.Printf("a %d,%d start:%d end:%d %s\n", a.Rows, a.Cols, start, end, r)
// }
// }()
nJobs := runtime.NumCPU()
// n is a.Rows if blas.NoTrans, else a.Cols
var n int
if tA == blas.NoTrans {
n = a.Rows
} else {
n = a.Cols
}
sliceRows := (n + nJobs - 1) / nJobs
wg := new(sync.WaitGroup)
fn := func(job, start, end int, wg *sync.WaitGroup) {
//MatDimsCheck(".", dst.Slice(start, end, 0, nOutputs), MatRowSlice{Matrix: A, Start: start, End: end}, B)
if end > n {
end = n
}
if end > start {
//fmt.Printf("start %d end %d aRows %d cRows %d\n", start, end, a.Rows, c.Rows)
var aSlice blas64.General
if tA == blas.NoTrans {
aSlice = MatGeneralSlice(a, start, end, 0, a.Cols)
} else {
aSlice = MatGeneralSlice(a, 0, a.Rows, start, end)
}
blas64.Gemm(tA, tB, alpha,
aSlice,
b,
beta,
MatGeneralSlice(c, start, end, 0, c.Cols))
}
wg.Done()
}
if n < 64 {
wg.Add(1)
fn(0, 0, a.Rows, wg)
} else {
for job := 0; job < nJobs; job++ {
end = start + sliceRows
if end > n {
end = n
}
if end > start {
wg.Add(1)
//fmt.Printf("processing rows %d-%d of a(%d,%d, len:%d)\n", start, end, a.Rows, a.Cols, len(a.Data))
go fn(job, start, end, wg)
}
start = end
}
wg.Wait()
}
}
// MatDenseFirstColumnRemoved returns a *mat.Dense with the same underlaying data as M but 1st column removed
func MatDenseFirstColumnRemoved(src *mat.Dense) *mat.Dense {
nSamples, nOutputs := src.Dims()
return MatDenseSlice(src, 0, nSamples, 1, nOutputs)
}
// MatDenseSlice returns a *mat.Dense with the same underlaying data as src but rows and columns removed
func MatDenseSlice(src mat.RawMatrixer, i, k, j, l int) *mat.Dense {
M := src.RawMatrix()
m := &mat.Dense{}
m.SetRawMatrix(MatGeneralSlice(M, i, k, j, l))
return m
}
// MatGeneralSlice returns a blas64.General with the same underlaying data as M but rows and columns removed
func MatGeneralSlice(M blas64.General, i, k, j, l int) blas64.General {
// defer func() {
// if r := recover(); r != nil {
// fmt.Printf("i*M.Stride+j : %d*%d+%d : %d (k-1)*M.Stride+l: %d*%d+%d : %d len:%d %s\n",
// i, M.Stride, j, i*M.Stride+j,
// k-1, M.Stride, l, (k-1)*M.Stride+l, len(M.Data), r)
// }
// }()
if k <= i {
panic(fmt.Errorf("k<=i %d %d", k, i))
}
return blas64.General{
Rows: k - i,
Cols: l - j,
Stride: M.Stride,
Data: M.Data[i*M.Stride+j : (k-1)*M.Stride+l],
}
}
// MatDenseRowSlice returns a *mat.Dense with the same underlaying data as src but rows and columns removed
func MatDenseRowSlice(src mat.RawMatrixer, i, k int) *mat.Dense {
M := src.RawMatrix()
m := &mat.Dense{}
m.SetRawMatrix(MatGeneralRowSlice(M, i, k))
return m
}
// MatGeneralRowSlice returns a blas64.General with the same underlaying data as M but rows and columns removed
func MatGeneralRowSlice(M blas64.General, i, k int) blas64.General {
// defer func() {
// if r := recover(); r != nil {
// fmt.Printf("i*M.Stride+j : %d*%d+%d : %d (k-1)*M.Stride+l: %d*%d+%d : %d len:%d %s\n",
// i, M.Stride, j, i*M.Stride+j,
// k-1, M.Stride, l, (k-1)*M.Stride+l, len(M.Data), r)
// }
// }()
if k <= i {
panic(fmt.Errorf("k<=i %d %d", k, i))
}
return blas64.General{
Rows: k - i,
Cols: M.Cols,
Stride: M.Stride,
Data: M.Data[i*M.Stride : k*M.Stride],
}
} | base/matrix.go | 0.7413 | 0.423458 | matrix.go | starcoder |
package direct
import (
"fmt"
"google.golang.org/protobuf/types/known/structpb"
)
func MapToProtoStruct(m map[string]interface{}) (*structpb.Struct, error) {
fields := map[string]*structpb.Value{}
for k, v := range m {
val, err := ValueToStructValue(v)
if err != nil {
return nil, err
}
fields[k] = val
}
return &structpb.Struct{Fields: fields}, nil
}
func ValueToStructValue(v interface{}) (*structpb.Value, error) {
switch x := v.(type) {
case nil:
return &structpb.Value{Kind: &structpb.Value_NullValue{}}, nil
case bool:
return &structpb.Value{Kind: &structpb.Value_BoolValue{BoolValue: x}}, nil
case float64:
return &structpb.Value{Kind: &structpb.Value_NumberValue{NumberValue: x}}, nil
case float32:
return &structpb.Value{Kind: &structpb.Value_NumberValue{NumberValue: float64(x)}}, nil
case int:
return &structpb.Value{Kind: &structpb.Value_NumberValue{NumberValue: float64(x)}}, nil
case int8:
return &structpb.Value{Kind: &structpb.Value_NumberValue{NumberValue: float64(x)}}, nil
case int16:
return &structpb.Value{Kind: &structpb.Value_NumberValue{NumberValue: float64(x)}}, nil
case int32:
return &structpb.Value{Kind: &structpb.Value_NumberValue{NumberValue: float64(x)}}, nil
case int64:
return &structpb.Value{Kind: &structpb.Value_NumberValue{NumberValue: float64(x)}}, nil
case uint:
return &structpb.Value{Kind: &structpb.Value_NumberValue{NumberValue: float64(x)}}, nil
case uint8:
return &structpb.Value{Kind: &structpb.Value_NumberValue{NumberValue: float64(x)}}, nil
case uint16:
return &structpb.Value{Kind: &structpb.Value_NumberValue{NumberValue: float64(x)}}, nil
case uint32:
return &structpb.Value{Kind: &structpb.Value_NumberValue{NumberValue: float64(x)}}, nil
case uint64:
return &structpb.Value{Kind: &structpb.Value_NumberValue{NumberValue: float64(x)}}, nil
case string:
return &structpb.Value{Kind: &structpb.Value_StringValue{StringValue: x}}, nil
case map[string]interface{}:
m, err := MapToProtoStruct(x)
if err != nil {
return nil, err
}
return &structpb.Value{Kind: &structpb.Value_StructValue{StructValue: m}}, nil
case []interface{}:
var vals []*structpb.Value
for _, e := range x {
val, err := ValueToStructValue(e)
if err != nil {
return nil, err
}
vals = append(vals, val)
}
return &structpb.Value{Kind: &structpb.Value_ListValue{ListValue: &structpb.ListValue{Values: vals}}}, nil
default:
return nil, fmt.Errorf("bad type %T for JSON value", v)
}
} | internal/proxy/direct/utils.go | 0.589362 | 0.404272 | utils.go | starcoder |
package matrix
import (
"math/rand"
)
/*
A matrix backed by a flat array of all elements.
*/
type DenseMatrix struct {
matrix
// flattened matrix data. elements[i*step+j] is row i, col j
elements []float64
// actual offset between rows
step int
}
/*
Returns an array of slices referencing the matrix data. Changes to
the slices effect changes to the matrix.
*/
func (A *DenseMatrix) Arrays() [][]float64 {
a := make([][]float64, A.rows)
for i := 0; i < A.rows; i++ {
a[i] = A.elements[i*A.step : i*A.step+A.cols]
}
return a
}
/*
Returns the contents of this matrix stored into a flat array (row-major).
*/
func (A *DenseMatrix) Array() []float64 {
if A.step == A.rows {
return A.elements[0 : A.rows*A.cols]
}
a := make([]float64, A.rows*A.cols)
for i := 0; i < A.rows; i++ {
for j := 0; j < A.cols; j++ {
a[i*A.cols+j] = A.elements[i*A.step+j]
}
}
return a
}
func (A *DenseMatrix) rowSlice(row int) []float64 {
return A.elements[row*A.step : row*A.step+A.cols]
}
/*
Get the element in the ith row and jth column.
*/
func (A *DenseMatrix) Get(i int, j int) (v float64) {
/*
i = i % A.rows
if i < 0 {
i = A.rows - i
}
j = j % A.cols
if j < 0 {
j = A.cols - j
}
*/
// reslicing like this does efficient range checks, perhaps
v = A.elements[i*A.step : i*A.step+A.cols][j]
//v = A.elements[i*A.step+j]
return
}
func (A *DenseMatrix) GetTuples(row int) []IndexedValue {
panic("not implemented")
}
/*
Set the element in the ith row and jth column to v.
*/
func (A *DenseMatrix) Set(i int, j int, v float64) {
/*
i = i % A.rows
if i < 0 {
i = A.rows - i
}
j = j % A.cols
if j < 0 {
j = A.cols - j
}
*/
// reslicing like this does efficient range checks, perhaps
A.elements[i*A.step : i*A.step+A.cols][j] = v
//A.elements[i*A.step+j] = v
}
/*
Get a submatrix starting at i,j with rows rows and cols columns. Changes to
the returned matrix show up in the original.
*/
func (A *DenseMatrix) GetMatrix(i, j, rows, cols int) *DenseMatrix {
B := new(DenseMatrix)
B.elements = A.elements[i*A.step+j : i*A.step+j+(rows-1)*A.step+cols]
B.rows = rows
B.cols = cols
B.step = A.step
return B
}
/*
Copy B into A, with B's 0, 0 aligning with A's i, j
*/
func (A *DenseMatrix) SetMatrix(i, j int, B *DenseMatrix) {
for r := 0; r < B.rows; r++ {
for c := 0; c < B.cols; c++ {
A.Set(i+r, j+c, B.Get(r, c))
}
}
}
func (A *DenseMatrix) GetColVector(j int) *DenseMatrix {
return A.GetMatrix(0, j, A.rows, 1)
}
func (A *DenseMatrix) GetRowVector(i int) *DenseMatrix {
return A.GetMatrix(i, 0, 1, A.cols)
}
/*
Get a copy of this matrix with 0s above the diagonal.
*/
func (A *DenseMatrix) L() *DenseMatrix {
B := A.Copy()
for i := 0; i < A.rows; i++ {
for j := i + 1; j < A.cols; j++ {
B.Set(i, j, 0)
}
}
return B
}
/*
Get a copy of this matrix with 0s below the diagonal.
*/
func (A *DenseMatrix) U() *DenseMatrix {
B := A.Copy()
for i := 0; i < A.rows; i++ {
for j := 0; j < i && j < A.cols; j++ {
B.Set(i, j, 0)
}
}
return B
}
func (A *DenseMatrix) Copy() *DenseMatrix {
B := new(DenseMatrix)
B.rows = A.rows
B.cols = A.cols
B.step = A.cols
B.elements = make([]float64, B.rows*B.cols)
for row := 0; row < B.rows; row++ {
copy(B.rowSlice(row), A.rowSlice(row))
}
return B
}
/*
Get a new matrix [A B].
*/
func (A *DenseMatrix) Augment(B *DenseMatrix) (C *DenseMatrix, err error) {
if A.rows != B.rows {
err = ErrorDimensionMismatch
return
}
C = Zeros(A.rows, A.cols+B.cols)
err = A.AugmentFill(B, C)
return
}
func (A *DenseMatrix) AugmentFill(B, C *DenseMatrix) (err error) {
if A.rows != B.rows || C.rows != A.rows || C.cols != A.cols+B.cols {
err = ErrorDimensionMismatch
return
}
C.SetMatrix(0, 0, A)
C.SetMatrix(0, A.cols, B)
/*
for i := 0; i < C.Rows(); i++ {
for j := 0; j < A.Cols(); j++ {
C.Set(i, j, A.Get(i, j))
}
for j := 0; j < B.Cols(); j++ {
C.Set(i, j+A.Cols(), B.Get(i, j))
}
}*/
return
}
/*
Get a new matrix [A; B], with A above B.
*/
func (A *DenseMatrix) Stack(B *DenseMatrix) (C *DenseMatrix, err error) {
if A.cols != B.cols {
err = ErrorDimensionMismatch
return
}
C = Zeros(A.rows+B.rows, A.cols)
err = A.StackFill(B, C)
return
}
func (A *DenseMatrix) StackFill(B, C *DenseMatrix) (err error) {
if A.cols != B.cols || C.cols != A.cols || C.rows != A.rows+B.rows {
err = ErrorDimensionMismatch
return
}
C.SetMatrix(0, 0, A)
C.SetMatrix(A.rows, 0, B)
/*
for j := 0; j < A.cols; j++ {
for i := 0; i < A.Rows(); i++ {
C.Set(i, j, A.Get(i, j))
}
for i := 0; i < B.cols; i++ {
C.Set(i+A.rows, j, B.Get(i, j))
}
}
*/
return
}
/*
Create a sparse matrix copy.
*/
func (A *DenseMatrix) SparseMatrix() *SparseMatrix {
B := ZerosSparse(A.rows, A.cols)
for i := 0; i < A.rows; i++ {
for j := 0; j < A.cols; j++ {
v := A.Get(i, j)
if v != 0 {
B.Set(i, j, v)
}
}
}
return B
}
func (A *DenseMatrix) DenseMatrix() *DenseMatrix {
return A.Copy()
}
func Zeros(rows, cols int) *DenseMatrix {
A := new(DenseMatrix)
A.elements = make([]float64, rows*cols)
A.rows = rows
A.cols = cols
A.step = cols
return A
}
func Ones(rows, cols int) *DenseMatrix {
A := new(DenseMatrix)
A.elements = make([]float64, rows*cols)
A.rows = rows
A.cols = cols
A.step = cols
for i := 0; i < len(A.elements); i++ {
A.elements[i] = 1
}
return A
}
func Numbers(rows, cols int, num float64) *DenseMatrix {
A := Zeros(rows, cols)
for i := 0; i < A.Rows(); i++ {
for j := 0; j < A.Cols(); j++ {
A.Set(i, j, num)
}
}
return A
}
/*
Create an identity matrix with span rows and span columns.
*/
func Eye(span int) *DenseMatrix {
A := Zeros(span, span)
for i := 0; i < span; i++ {
A.Set(i, i, 1)
}
return A
}
func Normals(rows, cols int) *DenseMatrix {
A := Zeros(rows, cols)
for i := 0; i < A.Rows(); i++ {
for j := 0; j < A.Cols(); j++ {
A.Set(i, j, rand.NormFloat64())
}
}
return A
}
func Diagonal(d []float64) *DenseMatrix {
n := len(d)
A := Zeros(n, n)
for i := 0; i < n; i++ {
A.Set(i, i, d[i])
}
return A
}
func MakeDenseCopy(A MatrixRO) *DenseMatrix {
B := Zeros(A.Rows(), A.Cols())
for i := 0; i < B.rows; i++ {
for j := 0; j < B.cols; j++ {
B.Set(i, j, A.Get(i, j))
}
}
return B
}
func MakeDenseMatrix(elements []float64, rows, cols int) *DenseMatrix {
A := new(DenseMatrix)
A.rows = rows
A.cols = cols
A.step = cols
A.elements = elements
return A
}
func MakeDenseMatrixStacked(data [][]float64) *DenseMatrix {
rows := len(data)
cols := len(data[0])
elements := make([]float64, rows*cols)
for i := 0; i < rows; i++ {
for j := 0; j < cols; j++ {
elements[i*cols+j] = data[i][j]
}
}
return MakeDenseMatrix(elements, rows, cols)
}
func (A *DenseMatrix) String() string { return String(A) } | dense.go | 0.678966 | 0.669677 | dense.go | starcoder |
package processor
import (
"fmt"
"sync"
"time"
"github.com/Jeffail/benthos/v3/internal/docs"
"github.com/Jeffail/benthos/v3/internal/tracing"
"github.com/Jeffail/benthos/v3/lib/log"
"github.com/Jeffail/benthos/v3/lib/message"
"github.com/Jeffail/benthos/v3/lib/metrics"
"github.com/Jeffail/benthos/v3/lib/types"
"github.com/Jeffail/benthos/v3/lib/util/aws/lambda/client"
)
//------------------------------------------------------------------------------
func init() {
Constructors[TypeAWSLambda] = TypeSpec{
constructor: NewAWSLambda,
Version: "3.36.0",
Categories: []Category{
CategoryIntegration,
},
Summary: `
Invokes an AWS lambda for each message. The contents of the message is the
payload of the request, and the result of the invocation will become the new
contents of the message.`,
Description: `
It is possible to perform requests per message of a batch in parallel by setting
the ` + "`parallel`" + ` flag to ` + "`true`" + `. The ` + "`rate_limit`" + `
field can be used to specify a rate limit [resource](/docs/components/rate_limits/about)
to cap the rate of requests across parallel components service wide.
In order to map or encode the payload to a specific request body, and map the
response back into the original payload instead of replacing it entirely, you
can use the ` + "[`branch` processor](/docs/components/processors/branch)" + `.
### Error Handling
When Benthos is unable to connect to the AWS endpoint or is otherwise unable to invoke the target lambda function it will retry the request according to the configured number of retries. Once these attempts have been exhausted the failed message will continue through the pipeline with it's contents unchanged, but flagged as having failed, allowing you to use [standard processor error handling patterns](/docs/configuration/error_handling).
However, if the invocation of the function is successful but the function itself throws an error, then the message will have it's contents updated with a JSON payload describing the reason for the failure, and a metadata field ` + "`lambda_function_error`" + ` will be added to the message allowing you to detect and handle function errors with a ` + "[`branch`](/docs/components/processors/branch)" + `:
` + "```yaml" + `
pipeline:
processors:
- branch:
processors:
- aws_lambda:
function: foo
result_map: |
root = if meta().exists("lambda_function_error") {
throw("Invocation failed due to %v: %v".format(this.errorType, this.errorMessage))
} else {
this
}
output:
switch:
retry_until_success: false
cases:
- check: errored()
output:
reject: ${! error() }
- output:
resource: somewhere_else
` + "```" + `
### Credentials
By default Benthos will use a shared credentials file when connecting to AWS
services. It's also possible to set them explicitly at the component level,
allowing you to transfer data across accounts. You can find out more
[in this document](/docs/guides/cloud/aws).`,
FieldSpecs: docs.FieldSpecs{
docs.FieldCommon("parallel", "Whether messages of a batch should be dispatched in parallel."),
}.Merge(client.FieldSpecs()),
Examples: []docs.AnnotatedExample{
{
Title: "Branched Invoke",
Summary: `
This example uses a ` + "[`branch` processor](/docs/components/processors/branch/)" + ` to map a new payload for triggering a lambda function with an ID and username from the original message, and the result of the lambda is discarded, meaning the original message is unchanged.`,
Config: `
pipeline:
processors:
- branch:
request_map: '{"id":this.doc.id,"username":this.user.name}'
processors:
- aws_lambda:
function: trigger_user_update
`,
},
},
}
Constructors[TypeLambda] = TypeSpec{
constructor: NewLambda,
Status: docs.StatusDeprecated,
Categories: []Category{
CategoryIntegration,
},
Summary: `
Invokes an AWS lambda for each message. The contents of the message is the
payload of the request, and the result of the invocation will become the new
contents of the message.`,
Description: `
## Alternatives
This processor has been renamed to ` + "[`aws_lambda`](/docs/components/processors/aws_lambda)" + `.
It is possible to perform requests per message of a batch in parallel by setting
the ` + "`parallel`" + ` flag to ` + "`true`" + `. The ` + "`rate_limit`" + `
field can be used to specify a rate limit [resource](/docs/components/rate_limits/about)
to cap the rate of requests across parallel components service wide.
In order to map or encode the payload to a specific request body, and map the
response back into the original payload instead of replacing it entirely, you
can use the ` + "[`branch` processor](/docs/components/processors/branch)" + `.
### Error Handling
When all retry attempts for a message are exhausted the processor cancels the
attempt. These failed messages will continue through the pipeline unchanged, but
can be dropped or placed in a dead letter queue according to your config, you
can read about these patterns [here](/docs/configuration/error_handling).
### Credentials
By default Benthos will use a shared credentials file when connecting to AWS
services. It's also possible to set them explicitly at the component level,
allowing you to transfer data across accounts. You can find out more
[in this document](/docs/guides/cloud/aws).`,
FieldSpecs: docs.FieldSpecs{
docs.FieldCommon("parallel", "Whether messages of a batch should be dispatched in parallel."),
}.Merge(client.FieldSpecs()),
Examples: []docs.AnnotatedExample{
{
Title: "Branched Invoke",
Summary: `
This example uses a ` + "[`branch` processor](/docs/components/processors/branch/)" + ` to map a new payload for triggering a lambda function with an ID and username from the original message, and the result of the lambda is discarded, meaning the original message is unchanged.`,
Config: `
pipeline:
processors:
- branch:
request_map: '{"id":this.doc.id,"username":this.user.name}'
processors:
- lambda:
function: trigger_user_update
`,
},
},
}
}
//------------------------------------------------------------------------------
// LambdaConfig contains configuration fields for the Lambda processor.
type LambdaConfig struct {
client.Config `json:",inline" yaml:",inline"`
Parallel bool `json:"parallel" yaml:"parallel"`
}
// NewLambdaConfig returns a LambdaConfig with default values.
func NewLambdaConfig() LambdaConfig {
return LambdaConfig{
Config: client.NewConfig(),
Parallel: false,
}
}
//------------------------------------------------------------------------------
// Lambda is a processor that invokes an AWS Lambda using the message as the
// request body, and returns the response.
type Lambda struct {
client *client.Type
parallel bool
conf LambdaConfig
log log.Modular
stats metrics.Type
mCount metrics.StatCounter
mErrLambda metrics.StatCounter
mErr metrics.StatCounter
mSent metrics.StatCounter
mBatchSent metrics.StatCounter
}
// NewAWSLambda returns a Lambda processor.
func NewAWSLambda(
conf Config, mgr types.Manager, log log.Modular, stats metrics.Type,
) (Type, error) {
return newLambda(conf.AWSLambda, mgr, log, stats)
}
// NewLambda returns a Lambda processor.
func NewLambda(
conf Config, mgr types.Manager, log log.Modular, stats metrics.Type,
) (Type, error) {
return newLambda(conf.Lambda, mgr, log, stats)
}
func newLambda(
conf LambdaConfig, mgr types.Manager, log log.Modular, stats metrics.Type,
) (Type, error) {
l := &Lambda{
conf: conf,
log: log,
stats: stats,
parallel: conf.Parallel,
mCount: stats.GetCounter("count"),
mErrLambda: stats.GetCounter("error.lambda"),
mErr: stats.GetCounter("error"),
mSent: stats.GetCounter("sent"),
mBatchSent: stats.GetCounter("batch.sent"),
}
var err error
if l.client, err = client.New(
conf.Config,
client.OptSetLogger(l.log),
// TODO: V4 Remove this
client.OptSetStats(metrics.Namespaced(l.stats, "client")),
client.OptSetManager(mgr),
); err != nil {
return nil, err
}
return l, nil
}
//------------------------------------------------------------------------------
// ProcessMessage applies the processor to a message, either creating >0
// resulting messages or a response to be sent back to the message source.
func (l *Lambda) ProcessMessage(msg types.Message) ([]types.Message, types.Response) {
l.mCount.Incr(1)
var resultMsg types.Message
if !l.parallel || msg.Len() == 1 {
resultMsg = msg.Copy()
IteratePartsWithSpanV2("aws_lambda", nil, resultMsg, func(i int, _ *tracing.Span, p types.Part) error {
if err := l.client.InvokeV2(p); err != nil {
l.mErr.Incr(1)
l.mErrLambda.Incr(1)
l.log.Errorf("Lambda function '%v' failed: %v\n", l.conf.Config.Function, err)
return err
}
return nil
})
} else {
parts := make([]types.Part, msg.Len())
msg.Iter(func(i int, p types.Part) error {
parts[i] = p.Copy()
return nil
})
wg := sync.WaitGroup{}
wg.Add(msg.Len())
for i := 0; i < msg.Len(); i++ {
go func(index int) {
result, err := l.client.Invoke(message.Lock(msg, index))
if err == nil && result.Len() != 1 {
err = fmt.Errorf("unexpected response size: %v", result.Len())
}
if err != nil {
l.mErr.Incr(1)
l.mErrLambda.Incr(1)
l.log.Errorf("Lambda parallel request to '%v' failed: %v\n", l.conf.Config.Function, err)
FlagErr(parts[index], err)
} else {
parts[index] = result.Get(0)
}
wg.Done()
}(i)
}
wg.Wait()
resultMsg = message.New(nil)
resultMsg.SetAll(parts)
}
msgs := [1]types.Message{resultMsg}
l.mBatchSent.Incr(1)
l.mSent.Incr(int64(resultMsg.Len()))
return msgs[:], nil
}
// CloseAsync shuts down the processor and stops processing requests.
func (l *Lambda) CloseAsync() {
}
// WaitForClose blocks until the processor has closed down.
func (l *Lambda) WaitForClose(timeout time.Duration) error {
return nil
}
//------------------------------------------------------------------------------ | lib/processor/aws_lambda.go | 0.724286 | 0.699973 | aws_lambda.go | starcoder |
package common
// Len returns len of specified domain
func (m *AddressMap) Len(domains ...*Domain) int {
switch len(domains) {
case 0:
// Len of the map
return len(m.GetMap())
case 1:
// Len of particular domain
return m.GetList(domains[0]).Len()
case 2:
// Len of particular domain within particular domain
return m.GetList(domains[0]).Len(domains[1])
}
return 0
}
// Has checks whether specified domain exists
func (m *AddressMap) Has(domains ...*Domain) bool {
switch len(domains) {
case 1:
return m.GetList(domains[0]) != nil
case 2:
return m.GetList(domains[0]).Has(domains[1])
}
return false
}
// Append wraps AddressList.Append
// Call example:
// Append([domain,] address0, [address1,...])
func (m *AddressMap) Append(entities ...interface{}) *AddressMap {
domains, addresses := m.normalizeParams(true, entities...)
if len(domains) > 0 {
// Domain extracted, append provided addresses to specified domain
m.EnsureList(domains[0]).Append(addresses...)
}
return m
}
// Set wraps Replace
func (m *AddressMap) Set(entities ...interface{}) *AddressMap {
return m.Replace(entities...)
}
// normalizeParams builds list of domains and list of addresses out of provided entities
func (m *AddressMap) normalizeParams(insertDefaultDomain bool, entities ...interface{}) ([]*Domain, []*Address) {
var domains []*Domain
var addresses []*Address
for _, entity := range entities {
switch typed := entity.(type) {
case *Domain:
domains = append(domains, typed)
case *Address:
addresses = append(addresses, typed)
}
}
// Introduce default domain if needed
if insertDefaultDomain && (len(domains) == 0) {
domains = append(domains, DomainThis)
}
return domains, addresses
}
// Replace wraps ReplaceList and ReplaceAddresses
// Call example:
// Replace([domain0, {domain1, domain2,... nested domains to be replaced with provided addresses}] address0[, address1,...])
func (m *AddressMap) Replace(entities ...interface{}) *AddressMap {
domains, addresses := m.normalizeParams(true, entities...)
switch len(domains) {
case 0:
// No domains specified, don't know what to do
return m
case 1:
// Replace whole AddressList with specified addresses
return m.ReplaceList(domains[0], addresses...)
}
// Replace some nested domains with specified addresses
return m.ReplaceAddresses(domains[0], domains[1:], addresses...)
}
// ReplaceList replaces whole AddressList with specified addresses
func (m *AddressMap) ReplaceList(domain *Domain, addresses ...*Address) *AddressMap {
m.NewList(domain).Append(addresses...)
return m
}
// ReplaceAddresses replaces specified deleteDomains within domain with provided addresses
func (m *AddressMap) ReplaceAddresses(domain *Domain, deleteDomains []*Domain, addresses ...*Address) *AddressMap {
return m.ReplaceList(
domain,
m.EnsureList(domain). // get current AddressList
Exclude(deleteDomains...). // create new AddressList w/o deleteDomains. New AddressList can be nil
Ensure(). // but we definitely need to have this list, thus ensure it exists
Append(addresses...). // append to new AddressList w/o deleteDomains provided addresses
All()..., // get AddressList as slice
)
}
// First wraps AddressList.First
func (m *AddressMap) First(domains ...*Domain) *Address {
switch len(domains) {
case 0:
// First of any available addresses
if lists := m.GetLists(); lists != nil {
for _, list := range lists {
if first := list.First(); first != nil {
return first
}
}
}
case 1:
return m.GetList(domains[0]).First()
case 2:
return m.GetList(domains[0]).First(domains[1:]...)
}
return nil
}
// All wraps AddressList.All
func (m *AddressMap) All(domains ...*Domain) []*Address {
switch len(domains) {
case 1:
return m.GetList(domains[0]).All()
case 2:
return m.GetList(domains[0]).All(domains[1:]...)
}
return nil
} | pkg/api/common/address_map.jacket.go | 0.714329 | 0.407333 | address_map.jacket.go | starcoder |
package registry
import (
"fmt"
"github.com/sirupsen/logrus"
"k8s.io/apimachinery/pkg/util/sets"
)
// Type identifies the type of registry element a Node refers to
type Type int
const (
Workflow Type = iota
Chain
Reference
)
var nodeTypes = [3]string{Workflow: "workflow", Reference: "reference", Chain: "chain"}
// Node is an interface that allows a user to identify ancestors and descendants of a step registry element
type Node interface {
// Name returns the name of the registry element a Node refers to
Name() string
// Type returns the type of the registry element a Node refers to
Type() Type
// Ancestors returns a set of strings containing the names of all of the node's ancestors
Ancestors() []Node
// Descendants returns a set of strings containing the names of all of the node's descendants
Descendants() []Node
// Parents returns a set of strings containing the names of all the node's parents
Parents() []Node
// Children returns a set of strings containing the names of all the node's children
Children() []Node
}
// NodeByName provides a mapping from node name to the Node interface
type NodeByName struct {
References map[string]Node
Chains map[string]Node
Workflows map[string]Node
}
type nodeWithName struct {
name string
}
type nodeWithParents struct {
workflowParents workflowNodeSet
chainParents chainNodeSet
}
type nodeWithChildren struct {
chainChildren chainNodeSet
referenceChildren referenceNodeSet
}
type workflowNode struct {
nodeWithName
nodeWithChildren
}
type chainNode struct {
nodeWithName
nodeWithParents
nodeWithChildren
}
type referenceNode struct {
nodeWithName
nodeWithParents
}
// Verify that all node types implement Node
var _ Node = &workflowNode{}
var _ Node = &chainNode{}
var _ Node = &referenceNode{}
// internal node type sets
type workflowNodeSet map[*workflowNode]sets.Empty
type chainNodeSet map[*chainNode]sets.Empty
type referenceNodeSet map[*referenceNode]sets.Empty
// Name -> internal node type maps
type workflowNodeByName map[string]*workflowNode
type chainNodeByName map[string]*chainNode
type referenceNodeByName map[string]*referenceNode
func (set workflowNodeSet) insert(node *workflowNode) {
set[node] = sets.Empty{}
}
func (set chainNodeSet) insert(node *chainNode) {
set[node] = sets.Empty{}
}
func (set referenceNodeSet) insert(node *referenceNode) {
set[node] = sets.Empty{}
}
func (n *nodeWithName) Name() string {
return n.name
}
func (*workflowNode) Type() Type {
return Workflow
}
func (*chainNode) Type() Type {
return Chain
}
func (*referenceNode) Type() Type {
return Reference
}
func (n *nodeWithParents) Parents() []Node {
var parents []Node
for parent := range n.workflowParents {
parents = append(parents, parent)
}
for parent := range n.chainParents {
parents = append(parents, parent)
}
return parents
}
func (*workflowNode) Parents() []Node { return []Node{} }
func (n *nodeWithChildren) Children() []Node {
var children []Node
for child := range n.referenceChildren {
children = append(children, child)
}
for child := range n.chainChildren {
children = append(children, child)
}
return children
}
func (*referenceNode) Children() []Node { return []Node{} }
func (n *nodeWithParents) Ancestors() []Node {
ancestors := n.Parents()
for parent := range n.chainParents {
ancestors = append(ancestors, parent.Ancestors()...)
}
return ancestors
}
func (*workflowNode) Ancestors() []Node { return []Node{} }
func (n *nodeWithChildren) Descendants() []Node {
descendants := n.Children()
for child := range n.chainChildren {
descendants = append(descendants, child.Descendants()...)
}
return descendants
}
func (*referenceNode) Descendants() []Node { return []Node{} }
func (n *workflowNode) addChainChild(child *chainNode) {
n.chainChildren.insert(child)
child.workflowParents.insert(n)
}
func (n *workflowNode) addReferenceChild(child *referenceNode) {
n.referenceChildren.insert(child)
child.workflowParents.insert(n)
}
func (n *chainNode) addChainChild(child *chainNode) {
n.chainChildren.insert(child)
child.chainParents.insert(n)
}
func (n *chainNode) addReferenceChild(child *referenceNode) {
n.referenceChildren.insert(child)
child.chainParents.insert(n)
}
func FieldsForNode(n Node) logrus.Fields {
return logrus.Fields{
"node-name": n.Name(),
"node-type": nodeTypes[n.Type()],
}
}
func newNodeWithName(name string) nodeWithName {
return nodeWithName{name: name}
}
func newNodeWithParents() nodeWithParents {
return nodeWithParents{
chainParents: make(chainNodeSet),
workflowParents: make(workflowNodeSet),
}
}
func newNodeWithChildren() nodeWithChildren {
return nodeWithChildren{
chainChildren: make(chainNodeSet),
referenceChildren: make(referenceNodeSet),
}
}
func hasCycles(node *chainNode, ancestors sets.String, traversedPath []string) error {
if ancestors == nil {
ancestors = sets.NewString()
}
if ancestors.Has(node.name) {
return fmt.Errorf("Cycle detected: %s is an ancestor of itself; traversedPath: %v", node.name, append(traversedPath, node.name))
}
ancestors.Insert(node.name)
for child := range node.chainChildren {
if child.Type() != Chain {
continue
}
// get new copy of ancestors and traversedPath so the root node's set isn't changed
ancestorsCopy := sets.NewString(ancestors.UnsortedList()...)
traversedPathCopy := append(traversedPath[:0:0], traversedPath...)
traversedPathCopy = append(traversedPathCopy, node.name)
if err := hasCycles(child, ancestorsCopy, traversedPathCopy); err != nil {
return err
}
}
return nil
}
// NewGraph returns a NodeByType map representing the provided step references, chains, and workflows as a directed graph.
func NewGraph(stepsByName ReferenceByName, chainsByName ChainByName, workflowsByName WorkflowByName) (NodeByName, error) {
nodesByName := NodeByName{
References: make(map[string]Node),
Chains: make(map[string]Node),
Workflows: make(map[string]Node),
}
// References can only be children; load them so they can be added as children by workflows and chains
referenceNodes := make(referenceNodeByName)
for name := range stepsByName {
node := &referenceNode{
nodeWithName: newNodeWithName(name),
nodeWithParents: newNodeWithParents(),
}
referenceNodes[name] = node
nodesByName.References[name] = node
}
// since we may load the parent chain before a child chain, we need to make the parent->child links after loading all chains
parentChildChain := make(map[*chainNode][]string)
chainNodes := make(chainNodeByName)
for name, chain := range chainsByName {
node := &chainNode{
nodeWithName: newNodeWithName(name),
nodeWithChildren: newNodeWithChildren(),
nodeWithParents: newNodeWithParents(),
}
chainNodes[name] = node
nodesByName.Chains[name] = node
for _, step := range chain.Steps {
if step.Reference != nil {
if _, exists := referenceNodes[*step.Reference]; !exists {
return nodesByName, fmt.Errorf("Chain %s contains non-existent reference %s", name, *step.Reference)
}
node.addReferenceChild(referenceNodes[*step.Reference])
}
if step.Chain != nil {
parentChildChain[node] = append(parentChildChain[node], *step.Chain)
}
}
}
for parent, children := range parentChildChain {
for _, child := range children {
if _, exists := chainNodes[child]; !exists {
return nodesByName, fmt.Errorf("Chain %s contains non-existent chain %s", parent.Name(), child)
}
parent.addChainChild(chainNodes[child])
}
}
// verify that no cycles exist
for _, chain := range chainNodes {
if err := hasCycles(chain, sets.NewString(), []string{}); err != nil {
return nodesByName, err
}
}
workflowNodes := make(workflowNodeByName)
for name, workflow := range workflowsByName {
node := &workflowNode{
nodeWithName: newNodeWithName(name),
nodeWithChildren: newNodeWithChildren(),
}
workflowNodes[name] = node
nodesByName.Workflows[name] = node
steps := append(workflow.Pre, append(workflow.Test, workflow.Post...)...)
for _, step := range steps {
if step.Reference != nil {
if _, exists := referenceNodes[*step.Reference]; !exists {
return nodesByName, fmt.Errorf("Workflow %s contains non-existent reference %s", name, *step.Reference)
}
node.addReferenceChild(referenceNodes[*step.Reference])
}
if step.Chain != nil {
if _, exists := chainNodes[*step.Chain]; !exists {
return nodesByName, fmt.Errorf("Workflow %s contains non-existent chain %s", name, *step.Chain)
}
node.addChainChild(chainNodes[*step.Chain])
}
}
}
return nodesByName, nil
} | pkg/registry/graph.go | 0.755005 | 0.470189 | graph.go | starcoder |
package stoichiometry
import (
"math"
"strconv"
"strings"
)
// MaxFuelForOre returns the maximum amount of fuel that can be produced wih the given ore.
func MaxFuelForOre(reactions string, ore int64) int64 {
reactionLookup := parseReactions(reactions)
oreForOneFuel := calcMinOreForFuel(reactionLookup, 1)
lowerFuelGuess := ore / oreForOneFuel
upperFuelGuess := 2 * lowerFuelGuess
for upperFuelGuess-lowerFuelGuess > 1 {
centerFuelGuess := lowerFuelGuess + (upperFuelGuess-lowerFuelGuess)/2
centerOre := calcMinOreForFuel(reactionLookup, centerFuelGuess)
if centerOre < ore {
lowerFuelGuess = centerFuelGuess
} else {
upperFuelGuess = centerFuelGuess
}
}
return lowerFuelGuess
}
// MinOreForFuel calculates the ORE needed to produce a target amount of FUEL.
func MinOreForFuel(reactions string, targetFuel int64) int64 {
reactionLookup := parseReactions(reactions)
return calcMinOreForFuel(reactionLookup, targetFuel)
}
func calcMinOreForFuel(reactionLookup map[string]reaction, targetFuel int64) int64 {
neededElements := make(map[string]int64)
leftovers := make(map[string]int64)
calcNeededElements(element{name: "FUEL", amount: targetFuel}, reactionLookup, neededElements, leftovers)
return calcOreFromElements(reactionLookup, neededElements)
}
func calcNeededElements(currentElement element, reactions map[string]reaction, neededElements map[string]int64, leftovers map[string]int64) {
reaction := reactions[currentElement.name]
for _, reactingElement := range reaction.from {
if reactingElement.name == "ORE" {
return
}
parentElementFactor := int64(math.Ceil(float64(currentElement.amount) / float64(reaction.to.amount)))
reactionForReactingElement := reactions[reactingElement.name]
neededElementsForReactingElement := reactingElement.amount*parentElementFactor - leftovers[reactingElement.name]
reactionFactor := int64(math.Ceil(float64(neededElementsForReactingElement) / float64(reactionForReactingElement.to.amount)))
actualElementsForReaction := reactionFactor * reactionForReactingElement.to.amount
leftovers[reactingElement.name] = actualElementsForReaction - neededElementsForReactingElement
neededElements[reactingElement.name] += actualElementsForReaction
calcNeededElements(element{name: reactingElement.name, amount: neededElementsForReactingElement}, reactions, neededElements, leftovers)
}
}
func calcOreFromElements(reactions map[string]reaction, neededElements map[string]int64) int64 {
amountOfOre := int64(0)
for element, neededAmount := range neededElements {
reactionForElement := reactions[element]
if reactionForElement.from[0].name != "ORE" {
continue
}
factor := int64(math.Ceil(float64(neededAmount) / float64(reactionForElement.to.amount)))
amountOfOre += factor * reactionForElement.from[0].amount
}
return amountOfOre
}
func parseReactions(reactionsString string) map[string]reaction {
elements := make(map[string]reaction)
lines := strings.Split(reactionsString, "\n")
for _, line := range lines {
equationSides := strings.Split(line, " => ")
rightSideElement := parseElement(equationSides[1])
leftSideElementParts := strings.Split(equationSides[0], ", ")
leftSideElements := []element{}
for _, leftPart := range leftSideElementParts {
leftSideElements = append(leftSideElements, parseElement(leftPart))
}
elements[rightSideElement.name] = reaction{from: leftSideElements, to: rightSideElement}
}
return elements
}
func parseElement(elementString string) element {
elementParts := strings.Split(elementString, " ")
amount, _ := strconv.ParseInt(elementParts[0], 10, 64)
return element{
name: elementParts[1],
amount: int64(amount),
}
}
type reaction struct {
from []element
to element
}
type element struct {
name string
amount int64
} | 14-space-stoichiometry/stoichiometry/stoichiometry.go | 0.802285 | 0.531088 | stoichiometry.go | starcoder |
package day10
import (
"fmt"
"math"
"sort"
"strings"
"unicode"
)
type asteroid struct {
x int
y int
}
func blastAsteroids(spaceMap *[][]string, station asteroid, rounds int) (last asteroid) {
removeShootNumbers(spaceMap)
reachableAsteroids := getDetectedAsteroidsInternal(spaceMap, station.x, station.y)
sort.Slice(reachableAsteroids, func(i, j int) bool {
return getRotatingLaserAngleFromCoordAngle(getAngle(station, reachableAsteroids[i])) < getRotatingLaserAngleFromCoordAngle(getAngle(station, reachableAsteroids[j]))
})
for i := 0; i < rounds && i < len(reachableAsteroids); i++ {
last = reachableAsteroids[i]
(*spaceMap)[last.y][last.x] = fmt.Sprint(i + 1)
}
return
}
func removeShootNumbers(spaceMap *[][]string) {
for _, line := range *spaceMap {
for x, pos := range line {
if unicode.IsNumber(rune(pos[0])) {
line[x] = "."
}
}
}
}
func getRotatingLaserAngleFromCoordAngle(angle float64) float64 {
var tmp float64
if angle > 270 {
tmp = angle - 270
} else {
tmp = angle + 90
}
if tmp == 360 {
return 0
}
return tmp
}
func getBestStationAsteroid(rawMap string) (bestStation asteroid, detectedAsteroids []asteroid, spaceMap [][]string) {
spaceMap = getMap(rawMap)
possibleStations := getAllAsteroids(&spaceMap)
for _, possibleStation := range possibleStations {
spaceMapCopy := spaceMap
detectedAsteroidsTmp := getDetectedAsteroidsInternal(&spaceMapCopy, possibleStation.x, possibleStation.y)
spaceMap[possibleStation.y][possibleStation.x] = fmt.Sprint(len(detectedAsteroidsTmp))
if len(detectedAsteroidsTmp) > len(detectedAsteroids) {
detectedAsteroids = detectedAsteroidsTmp
bestStation = possibleStation
}
}
return
}
func getDetectedAsteroids(rawMap, monitoringStationLabel string) (detectedAsteroids []asteroid, spaceMap [][]string) {
spaceMap = getMap(rawMap)
x, y := findPosition(&spaceMap, monitoringStationLabel)
detectedAsteroids = getDetectedAsteroidsInternal(&spaceMap, x, y)
return
}
func getDetectedAsteroidsInternal(spaceMap *[][]string, x, y int) []asteroid {
station := asteroid{x: x, y: y}
asteroids := getAllAsteroids(spaceMap)
sort.Slice(asteroids, func(i, j int) bool {
return getDistance(station, asteroids[i]) < getDistance(station, asteroids[j])
})
detectedAsteroids := make([]asteroid, 0)
for _, asteroid := range asteroids[1:] {
if hidingAsteroid := getHiding(&station, &detectedAsteroids, &asteroid); hidingAsteroid != nil {
symbol := (*spaceMap)[hidingAsteroid.y][hidingAsteroid.x]
if unicode.IsLetter(rune(symbol[0])) {
(*spaceMap)[asteroid.y][asteroid.x] = strings.ToLower(symbol)
}
} else {
detectedAsteroids = append(detectedAsteroids, asteroid)
}
}
return detectedAsteroids
}
func getHiding(station *asteroid, detectedAsteroids *[]asteroid, asteroid *asteroid) *asteroid {
for _, detectedAsteroid := range *detectedAsteroids {
if isHiding(station, &detectedAsteroid, asteroid) {
return &detectedAsteroid
}
}
return nil
}
func isHiding(station *asteroid, detectedAsteroid *asteroid, asteroid *asteroid) bool {
// Using polar coordinate system
angle1 := getAngle(*station, *detectedAsteroid)
angle2 := getAngle(*station, *asteroid)
return angle1 == angle2
}
func getDistance(asteroid1, asteroid2 asteroid) float64 {
x1, x2 := float64(asteroid1.x), float64(asteroid2.x)
y1, y2 := float64(asteroid1.y), float64(asteroid2.y)
return math.Sqrt(math.Pow(x2-x1, 2) + math.Pow(y2-y1, 2))
}
func getAngle(asteroid1, asteroid2 asteroid) float64 {
x1, x2 := float64(asteroid1.x), float64(asteroid2.x)
y1, y2 := float64(asteroid1.y), float64(asteroid2.y)
radian := math.Atan2(y2-y1, x2-x1)
degree := 180 / math.Pi * radian
if degree < 0 {
degree += 360
}
return degree
}
func getAllAsteroids(spaceMap *[][]string) []asteroid {
asteroids := make([]asteroid, 0)
for y, line := range *spaceMap {
for x, position := range line {
if position != "." {
asteroids = append(asteroids, asteroid{x: x, y: y})
}
}
}
return asteroids
}
func findPosition(spaceMap *[][]string, element string) (x, y int) {
for y, line := range *spaceMap {
for x, position := range line {
if element == position {
return x, y
}
}
}
return -1, -1
}
func getMap(rawMap string) [][]string {
lines := strings.Split(rawMap, "\n")
spaceMap := make([][]string, len(lines))
for y, line := range lines {
positions := []rune(line)
spaceMap[y] = make([]string, len(positions))
for x, position := range positions {
spaceMap[y][x] = string(position)
}
}
return spaceMap
}
func printMap(spaceMap [][]string) string {
var sb strings.Builder
for y, line := range spaceMap {
for _, position := range line {
sb.WriteString(position)
}
if y < len(spaceMap)-1 {
sb.WriteString("\n")
}
}
return sb.String()
} | go/src/day10/day10.go | 0.593963 | 0.501282 | day10.go | starcoder |
package gio
import (
"image"
"image/color"
"gioui.org/f32"
"gioui.org/layout"
"gioui.org/op"
"gioui.org/op/clip"
"gioui.org/op/paint"
"github.com/tdewolff/canvas"
)
type Gio struct {
ops *op.Ops
width, height float64
xScale, yScale float64
dimensions layout.Dimensions
}
// New returns a Gio renderer of fixed size.
func New(gtx layout.Context, width, height float64) *Gio {
dimensions := layout.Dimensions{Size: image.Point{int(width + 0.5), int(height + 0.5)}}
return &Gio{
ops: gtx.Ops,
width: width,
height: height,
xScale: 1.0,
yScale: 1.0,
dimensions: dimensions,
}
}
// NewContain returns a Gio renderer that fills the constraints either horizontally or vertically, whichever is met first.
func NewContain(gtx layout.Context, width, height float64) *Gio {
xScale := float64(gtx.Constraints.Max.X-gtx.Constraints.Min.X) / width
yScale := float64(gtx.Constraints.Max.Y-gtx.Constraints.Min.Y) / height
if yScale < xScale {
xScale = yScale
} else {
yScale = xScale
}
dimensions := layout.Dimensions{Size: image.Point{int(width*xScale + 0.5), int(height*yScale + 0.5)}}
return &Gio{
ops: gtx.Ops,
width: width,
height: height,
xScale: xScale,
yScale: yScale,
dimensions: dimensions,
}
}
// NewStretch returns a Gio renderer that stretches the view to fit the constraints.
func NewStretch(gtx layout.Context, width, height float64) *Gio {
xScale := float64(gtx.Constraints.Max.X-gtx.Constraints.Min.X) / width
yScale := float64(gtx.Constraints.Max.Y-gtx.Constraints.Min.Y) / height
dimensions := layout.Dimensions{Size: image.Point{int(width*xScale + 0.5), int(height*yScale + 0.5)}}
return &Gio{
ops: gtx.Ops,
width: width,
height: height,
xScale: xScale,
yScale: yScale,
dimensions: dimensions,
}
}
// Dimensions returns the dimensions for Gio.
func (r *Gio) Dimensions() layout.Dimensions {
return r.dimensions
}
// Size returns the size of the canvas in millimeters.
func (r *Gio) Size() (float64, float64) {
return r.width, r.height
}
func (r *Gio) point(p canvas.Point) f32.Point {
return f32.Point{float32(r.xScale * p.X), float32(r.yScale * (r.height - p.Y))}
}
func (r *Gio) renderPath(path *canvas.Path, col color.RGBA) {
path = path.ReplaceArcs()
p := clip.Path{}
p.Begin(r.ops)
for _, seg := range path.Segments() {
switch seg.Cmd {
case canvas.MoveToCmd:
p.MoveTo(r.point(seg.End))
case canvas.LineToCmd:
p.LineTo(r.point(seg.End))
case canvas.QuadToCmd:
p.QuadTo(r.point(seg.CP1()), r.point(seg.End))
case canvas.CubeToCmd:
p.CubeTo(r.point(seg.CP1()), r.point(seg.CP2()), r.point(seg.End))
case canvas.ArcToCmd:
// TODO: ArcTo
p.LineTo(r.point(seg.End))
case canvas.CloseCmd:
p.Close()
}
}
shape := clip.Outline{p.End()}
paint.FillShape(r.ops, toNRGBA(col), shape.Op())
}
// RenderPath renders a path to the canvas using a style and a transformation matrix.
func (r *Gio) RenderPath(path *canvas.Path, style canvas.Style, m canvas.Matrix) {
if style.HasFill() {
r.renderPath(path.Transform(m), style.FillColor)
}
if style.HasStroke() {
if style.IsDashed() {
path = path.Dash(style.DashOffset, style.Dashes...)
}
path = path.Stroke(style.StrokeWidth, style.StrokeCapper, style.StrokeJoiner)
r.renderPath(path.Transform(m), style.StrokeColor)
}
}
// RenderText renders a text object to the canvas using a transformation matrix.
func (r *Gio) RenderText(text *canvas.Text, m canvas.Matrix) {
text.RenderAsPath(r, m, canvas.DefaultResolution)
}
// RenderImage renders an image to the canvas using a transformation matrix.
func (r *Gio) RenderImage(img image.Image, m canvas.Matrix) {
paint.NewImageOp(img).Add(r.ops)
m = canvas.Identity.Scale(r.xScale, r.yScale).Mul(m)
m = m.Translate(0.0, float64(img.Bounds().Max.Y))
trans := op.Affine(f32.NewAffine2D(
float32(m[0][0]), -float32(m[0][1]), float32(m[0][2]),
-float32(m[1][0]), float32(m[1][1]), float32(r.yScale*r.height-m[1][2]),
)).Push(r.ops)
paint.PaintOp{}.Add(r.ops)
trans.Pop()
}
func toNRGBA(col color.Color) color.NRGBA {
r, g, b, a := col.RGBA()
if a == 0 {
return color.NRGBA{}
}
r = (r * 0xffff) / a
g = (g * 0xffff) / a
b = (b * 0xffff) / a
return color.NRGBA{R: uint8(r >> 8), G: uint8(g >> 8), B: uint8(b >> 8), A: uint8(a >> 8)}
} | renderers/gio/gio.go | 0.727879 | 0.457743 | gio.go | starcoder |
package medtronic
import (
"fmt"
"log"
"strconv"
"time"
)
const (
// JSONTimeLayout specifies the format for JSON time values.
JSONTimeLayout = time.RFC3339
// UserTimeLayout specifies a consistent, human-readable format for local time.
UserTimeLayout = "2006-01-02 15:04:05"
)
type (
// Duration allows custom JSON marshaling for time.Duration values.
Duration time.Duration
// TimeOfDay represents a value between 0 and 24 hours.
TimeOfDay time.Duration
)
// hourstoDuration converts n hours to a Duration.
func hoursToDuration(n uint8) Duration {
return Duration(time.Duration(n) * time.Hour)
}
// halfHoursToDuration converts n half-hours to a Duration.
func halfHoursToDuration(n uint8) Duration {
return Duration(time.Duration(n) * 30 * time.Minute)
}
// minutesToDuration converts n minutes to a Duration.
func minutesToDuration(n uint8) Duration {
return Duration(time.Duration(n) * time.Minute)
}
// TimeOfDay converts a duration to a time of day.
func (d Duration) TimeOfDay() TimeOfDay {
td := time.Duration(d)
if td < 0 || 24*time.Hour <= td {
log.Panicf("duration %v is not a valid time of day", td)
}
return TimeOfDay(td)
}
// HalfHours convert a time of day into half-hours.
func (t TimeOfDay) HalfHours() uint8 {
n := time.Duration(t) / (30 * time.Minute)
if n > 255 {
log.Panicf("time of day %v is too large", t)
}
return uint8(n)
}
// Convert a time of day to a string of the form HH:MM.
func (t TimeOfDay) String() string {
d := time.Duration(t)
hour := d / time.Hour
min := (d % time.Hour) / time.Minute
return fmt.Sprintf("%02d:%02d", hour, min)
}
// ParseTimeOfDay parses a string of the form HH:MM into a time of day.
func ParseTimeOfDay(s string) (TimeOfDay, error) {
if len(s) == 5 && s[2] == ':' {
hour, hErr := strconv.Atoi(s[0:2])
min, mErr := strconv.Atoi(s[3:5])
if hErr == nil && 0 <= hour && hour <= 23 && mErr == nil && 0 <= min && min <= 59 {
d := time.Duration(hour)*time.Hour + time.Duration(min)*time.Minute
return Duration(d).TimeOfDay(), nil
}
}
return 0, fmt.Errorf("ParseTimeOfDay: %q must be of the form HH:MM", s)
}
// halfHoursToTimeOfDay converts n half-hours to a time of day.
func halfHoursToTimeOfDay(n uint8) TimeOfDay {
return Duration(time.Duration(n) * 30 * time.Minute).TimeOfDay()
}
// SinceMidnight converts a time to a time of day.
func SinceMidnight(t time.Time) TimeOfDay {
hour, min, sec := t.Clock()
h, m, s := time.Duration(hour), time.Duration(min), time.Duration(sec)
n := time.Duration(t.Nanosecond())
d := h*time.Hour + m*time.Minute + s*time.Second + n*time.Nanosecond
return Duration(d).TimeOfDay()
}
// Decode a 5-byte timestamp from a pump history record.
func decodeTime(data []byte) time.Time {
sec := int(data[0] & 0x3F)
min := int(data[1] & 0x3F)
hour := int(data[2] & 0x1F)
day := int(data[3] & 0x1F)
// The 4-bit month value is encoded in the high 2 bits of the first 2 bytes.
month := time.Month(int(data[0]>>6)<<2 | int(data[1]>>6))
year := 2000 + int(data[4]&0x7F)
return time.Date(year, month, day, hour, min, sec, 0, time.Local)
}
// Decode a 2-byte date from a pump history record.
func decodeDate(data []byte) time.Time {
day := int(data[0] & 0x1F)
month := time.Month(int(data[0]>>5)<<1 + int(data[1]>>7))
year := 2000 + int(data[1]&0x7F)
return time.Date(year, month, day, 0, 0, 0, 0, time.Local)
} | time.go | 0.718002 | 0.492005 | time.go | starcoder |
package assert
import (
"testing"
"github.com/ppapapetrou76/go-testing/internal/pkg/values"
"github.com/ppapapetrou76/go-testing/types"
)
// SliceOpt is a configuration option to initialize an AssertableAny Slice.
type SliceOpt func(*AssertableSlice)
// AssertableSlice is the implementation of AssertableSlice for string slices.
type AssertableSlice struct {
t *testing.T
actual types.Containable
customMessage string
}
// WithCustomMessage provides a custom message to be added before the assertion error message.
func WithCustomMessage(customMessage string) SliceOpt {
return func(c *AssertableSlice) {
c.customMessage = customMessage
}
}
// ThatSlice returns a proper assertable structure based on the slice type.
func ThatSlice(t *testing.T, actual interface{}, opts ...SliceOpt) AssertableSlice {
t.Helper()
assertable := &AssertableSlice{
t: t,
actual: values.NewSliceValue(actual),
}
for _, opt := range opts {
opt(assertable)
}
return *assertable
}
// IsEqualTo asserts if the expected slice is equal to the assertable slice value
// It errors the tests if the compared values (actual VS expected) are not equal.
func (a AssertableSlice) IsEqualTo(expected interface{}) AssertableSlice {
a.t.Helper()
if !a.actual.IsEqualTo(expected) {
a.t.Error(shouldBeEqual(a.actual, expected))
}
return a
}
// IsNotEqualTo asserts if the expected slice is not equal to the assertable slice value
// It errors the tests if the compared values (actual VS expected) are equal.
func (a AssertableSlice) IsNotEqualTo(expected interface{}) AssertableSlice {
a.t.Helper()
if a.actual.IsEqualTo(expected) {
a.t.Error(shouldNotBeEqual(a.actual, expected))
}
return a
}
// HasSize asserts if the assertable string slice has the expected length size
// It errors the test if it doesn't have the expected size.
func (a AssertableSlice) HasSize(size int) AssertableSlice {
a.t.Helper()
if !a.actual.HasSize(size) {
a.t.Error(shouldHaveSize(a.actual, size))
}
return a
}
// IsEmpty asserts if the assertable string slice is empty or not.
func (a AssertableSlice) IsEmpty() AssertableSlice {
a.t.Helper()
if a.actual.IsNotEmpty() {
a.t.Error(shouldBeEmpty(a.actual))
}
return a
}
// IsNotEmpty asserts if the assertable string slice is not empty.
func (a AssertableSlice) IsNotEmpty() AssertableSlice {
a.t.Helper()
if a.actual.IsEmpty() {
a.t.Error(shouldNotBeEmpty(a.actual))
}
return a
}
// Contains asserts if the assertable string slice contains the given element(s)
// It errors the test if it does not contain it/them.
func (a AssertableSlice) Contains(elements interface{}) AssertableSlice {
a.t.Helper()
if a.actual.DoesNotContain(elements) {
a.t.Error(shouldContain(a.actual, elements))
}
return a
}
// ContainsOnly asserts if the assertable string slice contains only the given element(s)
// It errors the test if it does not contain it/them.
func (a AssertableSlice) ContainsOnly(elements interface{}) AssertableSlice {
a.t.Helper()
if !a.actual.ContainsOnly(elements) {
a.t.Error(shouldContainOnly(a.actual, elements))
}
return a
}
// DoesNotContain asserts if the assertable string slice does not contain the given element
// It errors the test if it contains it/them.
func (a AssertableSlice) DoesNotContain(elements interface{}) AssertableSlice {
a.t.Helper()
if a.actual.Contains(elements) {
a.t.Error(shouldNotContain(a.actual, elements))
}
return a
}
// HasUniqueElements asserts if the assertable string slice does not contain the given element
// It errors the test if it contains it/them.
func (a AssertableSlice) HasUniqueElements() AssertableSlice {
if !(a.actual.HasUniqueElements()) {
a.t.Error(shouldContainUniqueElements(a.actual))
}
return a
}
// IsSortedAscending asserts if the assertable slice is sorted on ascending order.
// It supports the following slices : []string, []int, []int32, []int64, []float64 and any slice that implements
// sort.Interface
// If a non-supported type is given then the assertion will fail.
func (a AssertableSlice) IsSortedAscending() AssertableSlice {
if !(a.actual.IsSorted(false)) {
a.t.Error(shouldBeSorted(a.actual, "ascending"))
}
return a
}
// IsSortedDescending asserts if the assertable slice is sorted on descending order.
// It supports the following slices : []string, []int, []int32, []int64, []float64 and any slice that implements
// sort.Interface
// If a non-supported type is given then the assertion will fail.
func (a AssertableSlice) IsSortedDescending() AssertableSlice {
if !(a.actual.IsSorted(true)) {
a.t.Error(shouldBeSorted(a.actual, "descending"))
}
return a
} | assert/slice.go | 0.849285 | 0.718051 | slice.go | starcoder |
package xstats
import (
"io"
"time"
)
// Sender define an interface to a stats system like statsd or datadog to send
// service's metrics.
type Sender interface {
// Gauge measure the value of a particular thing at a particular time,
// like the amount of fuel in a car’s gas tank or the number of users
// connected to a system.
Gauge(stat string, value float64, tags ...string)
// Count track how many times something happened per second, like
// the number of database requests or page views.
Count(stat string, count float64, tags ...string)
// Histogram track the statistical distribution of a set of values,
// like the duration of a number of database queries or the size of
// files uploaded by users. Each histogram will track the average,
// the minimum, the maximum, the median, the 95th percentile and the count.
Histogram(stat string, value float64, tags ...string)
// Timing mesures the elapsed time
Timing(stat string, value time.Duration, tags ...string)
}
// CloseSender will call Close() on any xstats.Sender that implements io.Closer
func CloseSender(s Sender) error {
if c, ok := s.(io.Closer); ok {
return c.Close()
}
return nil
}
// MultiSender lets you assign more than one sender to xstats in order to
// multicast observeration to different systems.
type MultiSender []Sender
// Gauge implements the xstats.Sender interface
func (s MultiSender) Gauge(stat string, value float64, tags ...string) {
for _, ss := range s {
ss.Gauge(stat, value, tags...)
}
}
// Count implements the xstats.Sender interface
func (s MultiSender) Count(stat string, count float64, tags ...string) {
for _, ss := range s {
ss.Count(stat, count, tags...)
}
}
// Histogram implements the xstats.Sender interface
func (s MultiSender) Histogram(stat string, value float64, tags ...string) {
for _, ss := range s {
ss.Histogram(stat, value, tags...)
}
}
// Timing implements the xstats.Sender interface
func (s MultiSender) Timing(stat string, duration time.Duration, tags ...string) {
for _, ss := range s {
ss.Timing(stat, duration, tags...)
}
}
// Close implements the io.Closer interface
func (s MultiSender) Close() error {
var firstErr error
// attempt to close all senders, return first error encountered
for _, ss := range s {
err := CloseSender(ss)
if err != nil && firstErr == nil {
firstErr = err
}
}
return firstErr
} | vendor/github.com/rs/xstats/sender.go | 0.706292 | 0.531878 | sender.go | starcoder |
package linkedlist
// Node represents a doubly linked node. As the spec states, we keep a
// reference to the next and previous nodes to avoid iteration.
type Node struct {
next, prev *Node
Value interface{}
}
// Next returns the next node in the list unless there is no next node (it is
// the sentinel node) and then it returns nil
func (n *Node) Next() *Node {
if e := n.next; e.Value != nil {
return e
}
return nil
}
// Prev returns the previous node in the list unless there is no previous node
// (it is the sentinel node) and then it returns nil
func (n *Node) Prev() *Node {
if e := n.prev; e.Value != nil {
return e
}
return nil
}
// List represents a Circular Doubly Linked List.
type List struct {
// sentinel is the value used to make this a circular list. It allows us
// to keep a 0 index node in the list which points to the head and tail
// easily.
sentinel Node
// size allows us to get the length of our list in O(1) time.
size int
}
// Init is a way to initialize the list or clear it out if need be.
func (l *List) Init() *List {
l.sentinel.next = &l.sentinel
l.sentinel.prev = &l.sentinel
l.size = 0
return l
}
// New returns a new initialized List instance ready to be added to.
func New() *List {
return new(List).Init()
}
// insertAfter is a convenience function which handles placing a node after a
// selected node. It also increases the list size and returns the new node
// instance.
func (l *List) insertAfter(newNode, node *Node) *Node {
newNode.prev = node
newNode.next = node.next
node.next = newNode
newNode.next.prev = newNode
l.size++
return newNode
}
// insertValue is a convenience function which calls insertAfter creating a
// new node in the process based on a passed in value.
func (l *List) insertValue(v interface{}, afterNode *Node) *Node {
return l.insertAfter(&Node{Value: v}, afterNode)
}
// remove is a convenience function used to remove a specified node and
// decrease the list size.
func (l *List) remove(node *Node) *Node {
node.prev.next = node.next
node.next.prev = node.prev
l.size--
return node
}
// Size returns the length of the list.
func (l *List) Size() int {
return l.size
}
// First returns the first node in the list unless the list is empty which it
// then returns nil.
func (l *List) First() *Node {
if l.sentinel.next == &l.sentinel {
return nil
}
return l.sentinel.next
}
// Last returns the last node in the list unless the list is empty which it
// then returns nil.
func (l *List) Last() *Node {
if l.sentinel.prev == &l.sentinel {
return nil
}
return l.sentinel.prev
}
// Append adds a new value to the end of the list.
func (l *List) Append(v interface{}) *Node {
return l.insertValue(v, l.sentinel.prev)
}
// Prepend adds a new value to the beginnging of the list.
func (l *List) Prepend(v interface{}) *Node {
return l.insertValue(v, &l.sentinel)
}
// Remove a node from the list if there are values in the list.
func (l *List) Remove(node *Node) interface{} {
if l.size > 0 {
l.remove(node)
}
return node.Value
}
// Pop removes the last value of the list returning the node.
func (l *List) Pop() *Node {
if l.size == 0 {
return nil
}
return l.remove(l.sentinel.prev)
}
// PopLeft removes the first value of the list returning the node.
func (l *List) PopLeft() *Node {
if l.size == 0 {
return nil
}
return l.remove(l.sentinel.next)
}
// AppendList adds another list to the end of the current list concatenating them.
func (l *List) AppendList(other *List) {
for i, n := other.Size(), other.First(); i > 0; i, n = i-1, n.Next() {
l.insertValue(n.Value, l.sentinel.prev)
}
}
// PrependList adds another list to the beginning of the current list
// concatenating them.
func (l *List) PrependList(other *List) {
for i, n := other.Size(), other.Last(); i > 0; i, n = i-1, n.Prev() {
l.insertValue(n.Value, &l.sentinel)
}
}
// ToSlice converts a List to a Golang slice
func (l *List) ToSlice() []interface{} {
s := make([]interface{}, l.Size())
for i, n := 0, l.First(); n != nil; i, n = i+1, n.Next() {
s[i] = n.Value
}
return s
} | data-structures/linked-list/linkedlist.go | 0.849691 | 0.532 | linkedlist.go | starcoder |
package conf
import (
"fmt"
"math"
"reflect"
"strconv"
"strings"
"time"
)
const (
maxUint = uint64(^uint(0))
maxInt = int64(maxUint >> 1)
minInt = -maxInt - 1
)
func decode(output, input reflect.Value) error {
if input.Kind() == reflect.Interface && !input.IsNil() {
input = input.Elem()
}
switch output.Kind() {
case reflect.Bool:
return decodeBool(output, input)
case reflect.Int:
return decodeInt(output, input, minInt, maxInt)
case reflect.Int8:
return decodeInt(output, input, math.MinInt8, math.MaxInt8)
case reflect.Int16:
return decodeInt(output, input, math.MinInt16, math.MaxInt16)
case reflect.Int32:
return decodeInt(output, input, math.MinInt32, math.MaxInt32)
case reflect.Int64:
outputType := output.Type()
if outputType.PkgPath() == "time" && outputType.Name() == "Duration" {
return decodeDuration(output, input)
}
return decodeInt(output, input, math.MinInt64, math.MaxInt64)
case reflect.Uint:
return decodeUint(output, input, maxUint)
case reflect.Uint8:
return decodeUint(output, input, math.MaxUint8)
case reflect.Uint16:
return decodeUint(output, input, math.MaxUint16)
case reflect.Uint32:
return decodeUint(output, input, math.MaxUint32)
case reflect.Uint64:
return decodeUint(output, input, math.MaxUint64)
case reflect.Float32:
return decodeFloat(output, input, math.MaxFloat32)
case reflect.Float64:
return decodeFloat(output, input, math.MaxFloat64)
case reflect.String:
return decodeString(output, input)
case reflect.Array:
return decodeArray(output, input)
case reflect.Slice:
return decodeSlice(output, input)
case reflect.Map:
return decodeMap(output, input)
case reflect.Interface:
return decodeInterface(output, input)
case reflect.Struct:
return decodeStruct(output, input)
case reflect.Ptr:
return decodePtr(output, input)
default:
return fmt.Errorf("type '%s' is not supported", output.Kind())
}
}
func decodeBool(output, input reflect.Value) error {
input = convertNumericString(input)
switch input.Kind() {
case reflect.Bool:
output.SetBool(input.Bool())
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
output.SetBool(input.Int() != 0)
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
output.SetBool(input.Uint() != 0)
case reflect.Float32, reflect.Float64:
output.SetBool(input.Float() != 0)
default:
return fmt.Errorf("'%s' could not be converted to '%s'", input.Type(), output.Type())
}
return nil
}
func decodeInt(output, input reflect.Value, min, max int64) error {
input = convertNumericString(input)
switch input.Kind() {
case reflect.Bool:
if input.Bool() {
output.SetInt(1)
} else {
output.SetInt(0)
}
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
i := input.Int()
if i < min || i > max {
return fmt.Errorf("value out of range ('%s' expected)", output.Type())
}
output.SetInt(i)
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
i := input.Uint()
if i > uint64(max) {
return fmt.Errorf("value out of range ('%s' expected)", output.Type())
}
output.SetInt(int64(i))
case reflect.Float32, reflect.Float64:
f := input.Float()
if f < float64(min) || f > float64(max) {
return fmt.Errorf("value out of range ('%s' expected)", output.Type())
}
output.SetInt(int64(f))
default:
return fmt.Errorf("'%s' could not be converted to '%s'", input.Type(), output.Type())
}
return nil
}
func decodeUint(output, input reflect.Value, max uint64) error {
input = convertNumericString(input)
switch input.Kind() {
case reflect.Bool:
if input.Bool() {
output.SetUint(1)
} else {
output.SetUint(0)
}
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
i := input.Int()
if i < 0 || uint64(i) > max {
return fmt.Errorf("value out of range ('%s' expected)", output.Type())
}
output.SetUint(uint64(i))
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
i := input.Uint()
if i > max {
return fmt.Errorf("value out of range ('%s' expected)", output.Type())
}
output.SetUint(i)
case reflect.Float32, reflect.Float64:
f := input.Float()
if f < 0 || f > float64(max) {
return fmt.Errorf("value out of range ('%s' expected)", output.Type())
}
output.SetUint(uint64(f))
default:
return fmt.Errorf("'%s' could not be converted to '%s'", input.Type(), output.Type())
}
return nil
}
func decodeDuration(output, input reflect.Value) error {
input = convertNumericString(input)
switch input.Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
output.SetInt(input.Int())
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
i := input.Uint()
if i > math.MaxInt64 {
return fmt.Errorf("value out of range ('%s' expected)", output.Type())
}
output.SetInt(int64(i))
case reflect.Float32, reflect.Float64:
f := input.Float()
if f < math.MinInt64 || f > math.MaxInt64 {
return fmt.Errorf("value out of range ('%s' expected)", output.Type())
}
output.SetInt(int64(f))
case reflect.String:
d, err := time.ParseDuration(input.String())
if err != nil {
return fmt.Errorf("'%s' is not a valid duration", input.String())
}
output.SetInt(int64(d))
default:
return fmt.Errorf("'%s' could not be converted to 'time.Duration'", input.Type())
}
return nil
}
func decodeFloat(output, input reflect.Value, max float64) error {
input = convertNumericString(input)
switch input.Kind() {
case reflect.Bool:
if input.Bool() {
output.SetFloat(1)
} else {
output.SetFloat(0)
}
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
i := input.Int()
if float64(i) < -max || float64(i) > max {
return fmt.Errorf("value out of range ('%s' expected)", input.Type())
}
output.SetFloat(float64(i))
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
i := input.Uint()
if float64(i) > max {
return fmt.Errorf("value out of range ('%s' expected)", input.Type())
}
output.SetFloat(float64(i))
case reflect.Float32, reflect.Float64:
f := input.Float()
if f < -max || f > max {
return fmt.Errorf("value out of range ('%s' expected)", input.Type())
}
output.SetFloat(f)
default:
return fmt.Errorf("'%s' could not be converted to '%s'", input.Type(), output.Type())
}
return nil
}
func decodeString(output, input reflect.Value) error {
switch input.Kind() {
case reflect.Bool:
output.SetString(strconv.FormatBool(input.Bool()))
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
output.SetString(strconv.FormatInt(input.Int(), 10))
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
output.SetString(strconv.FormatUint(input.Uint(), 10))
case reflect.Float32, reflect.Float64:
output.SetString(strconv.FormatFloat(input.Float(), 'f', -1, 64))
case reflect.String:
output.SetString(input.String())
default:
output.SetString(fmt.Sprintf("%v", input.Interface()))
}
return nil
}
func decodeArray(output, input reflect.Value) error {
switch input.Kind() {
case reflect.Array, reflect.Slice:
n := input.Len()
if n != output.Len() {
return fmt.Errorf("'[%d]%s' could not be converted to '[%d]%s'", input.Len(), input.Type().Elem(), output.Len(), output.Type().Elem())
}
for i := 0; i < n; i++ {
if err := decode(output.Index(i), input.Index(i)); err != nil {
return err
}
}
default:
if output.Len() != 1 {
return fmt.Errorf("'[1]%s' could not be converted to '[%d]%s'", input.Type(), output.Len(), output.Type().Elem())
}
return decode(output.Index(0), input)
}
return nil
}
func decodeSlice(output, input reflect.Value) error {
switch input.Kind() {
case reflect.Array, reflect.Slice:
n := input.Len()
sliceVal := reflect.MakeSlice(reflect.SliceOf(output.Type().Elem()), n, n)
for i := 0; i < n; i++ {
if err := decode(sliceVal.Index(i), input.Index(i)); err != nil {
return err
}
}
output.Set(sliceVal)
default:
sliceVal := reflect.MakeSlice(reflect.SliceOf(output.Type().Elem()), 1, 1)
if err := decode(sliceVal.Index(0), input); err != nil {
return err
}
output.Set(sliceVal)
}
return nil
}
func decodeMap(output, input reflect.Value) error {
if input.Kind() != reflect.Map {
return fmt.Errorf("'%s' could not be converted to 'map'", input.Type())
}
outputType := output.Type()
mapType := reflect.MapOf(outputType.Key(), outputType.Elem())
mapVal := reflect.MakeMap(mapType)
for _, key := range input.MapKeys() {
k := reflect.Indirect(reflect.New(mapType.Key()))
if err := decode(k, key); err != nil {
return err
}
v := reflect.Indirect(reflect.New(mapType.Elem()))
if err := decode(v, input.MapIndex(key)); err != nil {
return err
}
mapVal.SetMapIndex(k, v)
}
output.Set(mapVal)
return nil
}
func decodeInterface(output, input reflect.Value) error {
if !output.CanSet() {
return fmt.Errorf("'%s' cannot be set", output.Type())
}
output.Set(input)
return nil
}
func decodeStruct(output, input reflect.Value) error {
if input.Kind() != reflect.Map {
return fmt.Errorf("'%s' could not be converted to 'map'", input.Type())
}
stringType := reflect.TypeOf("")
keyType := input.Type().Key()
if !stringType.AssignableTo(keyType) {
return fmt.Errorf("map[%s]' could not be converted to 'map[string]'", keyType)
}
for _, field := range fieldsOf(output) {
if field.ignore {
continue
}
key := reflect.ValueOf(field.mapkey())
val := input.MapIndex(key)
if !val.IsValid() && len(field.key) == 0 {
// map key not found and no key specified (search case-insensitive)
for _, k := range input.MapKeys() {
if s, ok := k.Interface().(string); ok {
if strings.EqualFold(s, field.name) {
key = k
val = input.MapIndex(k)
break
}
}
}
}
if !val.IsValid() {
// map key not found
if field.required {
return fmt.Errorf("required field '%s' not found", field.name)
}
continue
}
if err := decode(field.value, val); err != nil {
return fmt.Errorf("[struct field '%s'] %s", field.name, err)
}
}
return nil
}
func decodePtr(output, input reflect.Value) error {
if input.Kind() == reflect.Ptr {
input = input.Elem()
}
if !output.IsNil() {
return decode(output.Elem(), input)
}
// The output value is nil. Create a new value
// and assign it to output.
val := reflect.New(output.Type().Elem())
if err := decode(val, input); err != nil {
return err
}
output.Set(val)
return nil
}
func convertNumericString(v reflect.Value) reflect.Value {
if v.Kind() == reflect.String {
s := v.String()
if b, err := strconv.ParseBool(s); err == nil {
return reflect.ValueOf(b)
}
if i, err := strconv.ParseInt(s, 10, 64); err == nil {
return reflect.ValueOf(i)
}
if f, err := strconv.ParseFloat(s, 64); err == nil {
return reflect.ValueOf(f)
}
}
return v
}
type field struct {
name string
value reflect.Value
key string
required bool
ignore bool
}
func fieldsOf(v reflect.Value) []*field {
t := v.Type()
n := v.NumField()
fields := make([]*field, 0, n)
for i := 0; i < n; i++ {
structField := t.Field(i)
f := &field{
name: structField.Name,
value: v.Field(i),
}
tag := structField.Tag.Get("config")
switch tag {
case "":
// no 'config' tag
case "-":
f.ignore = true
default:
tagParts := strings.Split(tag, ",")
f.key = tagParts[0]
if len(tagParts) > 1 {
switch tagParts[1] {
case "required":
f.required = true
default:
panic(fmt.Sprintf("'%s.%s' contains an invalid 'config' tag (%s)", t, f.name, tag))
}
}
}
fields = append(fields, f)
}
return fields
}
func (f *field) mapkey() string {
if len(f.key) == 0 {
return f.name
}
return f.key
} | decode.go | 0.633524 | 0.532547 | decode.go | starcoder |
package fmom
import (
"fmt"
"math"
"gonum.org/v1/gonum/spatial/r3"
)
// Equal returns true if p1==p2
func Equal(p1, p2 P4) bool {
return p4equal(p1, p2, 1e-14)
}
func p4equal(p1, p2 P4, epsilon float64) bool {
if cmpeq(p1.E(), p2.E(), epsilon) &&
cmpeq(p1.Px(), p2.Px(), epsilon) &&
cmpeq(p1.Py(), p2.Py(), epsilon) &&
cmpeq(p1.Pz(), p2.Pz(), epsilon) {
return true
}
return false
}
func cmpeq(x, y, epsilon float64) bool {
if x == y {
return true
}
return math.Abs(x-y) < epsilon
}
// Add returns the sum p1+p2.
func Add(p1, p2 P4) P4 {
// FIXME(sbinet):
// dispatch most efficient/less-lossy addition
// based on type(dst) (and, optionally, type(src))
var sum P4
switch p1 := p1.(type) {
case *PxPyPzE:
p := NewPxPyPzE(p1.Px()+p2.Px(), p1.Py()+p2.Py(), p1.Pz()+p2.Pz(), p1.E()+p2.E())
sum = &p
case *EEtaPhiM:
p := NewPxPyPzE(p1.Px()+p2.Px(), p1.Py()+p2.Py(), p1.Pz()+p2.Pz(), p1.E()+p2.E())
var pp EEtaPhiM
pp.Set(&p)
sum = &pp
case *EtEtaPhiM:
p := NewPxPyPzE(p1.Px()+p2.Px(), p1.Py()+p2.Py(), p1.Pz()+p2.Pz(), p1.E()+p2.E())
var pp EtEtaPhiM
pp.Set(&p)
sum = &pp
case *PtEtaPhiM:
p := NewPxPyPzE(p1.Px()+p2.Px(), p1.Py()+p2.Py(), p1.Pz()+p2.Pz(), p1.E()+p2.E())
var pp PtEtaPhiM
pp.Set(&p)
sum = &pp
case *IPtCotThPhiM:
p := NewPxPyPzE(p1.Px()+p2.Px(), p1.Py()+p2.Py(), p1.Pz()+p2.Pz(), p1.E()+p2.E())
var pp IPtCotThPhiM
pp.Set(&p)
sum = &pp
default:
panic(fmt.Errorf("fmom: invalid P4 concrete value: %#v", p1))
}
return sum
}
// IAdd adds src into dst, and returns dst
func IAdd(dst, src P4) P4 {
// FIXME(sbinet):
// dispatch most efficient/less-lossy addition
// based on type(dst) (and, optionally, type(src))
var sum P4
var p4 *PxPyPzE = nil
switch p1 := dst.(type) {
case *PxPyPzE:
p4 = p1
sum = dst
case *EEtaPhiM:
p := NewPxPyPzE(p1.Px(), p1.Py(), p1.Pz(), p1.E())
p4 = &p
sum = dst
case *EtEtaPhiM:
p := NewPxPyPzE(p1.Px(), p1.Py(), p1.Pz(), p1.E())
p4 = &p
sum = dst
case *PtEtaPhiM:
p := NewPxPyPzE(p1.Px(), p1.Py(), p1.Pz(), p1.E())
p4 = &p
sum = dst
case *IPtCotThPhiM:
p := NewPxPyPzE(p1.Px(), p1.Py(), p1.Pz(), p1.E())
p4 = &p
sum = dst
default:
panic(fmt.Errorf("fmom: invalid P4 concrete value: %#v", dst))
}
p4.P4.X += src.Px()
p4.P4.Y += src.Py()
p4.P4.Z += src.Pz()
p4.P4.T += src.E()
sum.Set(p4)
return sum
}
// Scale returns a*p
func Scale(a float64, p P4) P4 {
// FIXME(sbinet):
// dispatch most efficient/less-lossy operation
// based on type(dst) (and, optionally, type(src))
out := p.Clone()
dst := NewPxPyPzE(a*p.Px(), a*p.Py(), a*p.Pz(), a*p.E())
out.Set(&dst)
return out
}
// InvMass computes the invariant mass of two incoming 4-vectors p1 and p2.
func InvMass(p1, p2 P4) float64 {
p := Add(p1, p2)
return p.M()
}
// BoostOf returns the 3d boost vector of the provided four-vector p.
// It panics if p has zero energy and a non-zero |p|^2.
// It panics if p isn't a timelike four-vector.
func BoostOf(p P4) r3.Vec {
e := p.E()
if e == 0 {
if p.P2() == 0 {
return r3.Vec{}
}
panic("fmom: zero-energy four-vector")
}
if p.M2() <= 0 {
panic("fmom: non-timelike four-vector")
}
inv := 1 / e
return r3.Vec{X: inv * p.Px(), Y: inv * p.Py(), Z: inv * p.Pz()}
}
// Boost returns a copy of the provided four-vector
// boosted by the provided three-vector.
func Boost(p P4, vec r3.Vec) P4 {
o := p.Clone()
if vec == (r3.Vec{}) {
return o
}
var (
px = p.Px()
py = p.Py()
pz = p.Pz()
ee = p.E()
p3 = r3.Vec{X: px, Y: py, Z: pz}
v2 = r3.Dot(vec, vec)
bp = r3.Dot(vec, p3)
gamma = 1 / math.Sqrt(1-v2)
beta = (gamma - 1) / v2
alpha = beta*bp + gamma*ee
)
pp := NewPxPyPzE(
px+alpha*vec.X,
py+alpha*vec.Y,
pz+alpha*vec.Z,
gamma*(ee+bp),
)
o.Set(&pp)
return o
}
// VecOf returns the 3-vector of fhe four-momentum
func VecOf(p P4) r3.Vec {
return r3.Vec{
X: p.Px(),
Y: p.Py(),
Z: p.Pz(),
}
} | fmom/ops.go | 0.693888 | 0.485417 | ops.go | starcoder |
package cpualt
type BusReader = func(addr uint32) uint8
type BusWriter = func(addr uint32, val uint8)
type Bus struct {
M uint8 // last data access
// 2^10 because segments are 4bits length
Read [1048576]BusReader
Write [1048576]BusWriter
}
func (b *Bus) Init() {
for i := range b.Read {
b.Read[i] = func(addr uint32) uint8 { return b.M }
b.Write[i] = func(addr uint32, val uint8) {}
}
}
func (b *Bus) AttachReader(start, end uint32, r BusReader) {
aStart := start >> 4
aEnd := end >> 4
for a := aStart; a <= aEnd; a++ {
b.Read[a] = r
}
}
func (b *Bus) AttachWriter(start, end uint32, w BusWriter) {
aStart := start >> 4
aEnd := end >> 4
for a := aStart; a <= aEnd; a++ {
b.Write[a] = w
}
}
func (b *Bus) EaRead(addr uint32) uint8 {
b.M = b.Read[addr>>4](addr)
return b.M
}
func (b *Bus) EaWrite(addr uint32, val uint8) {
b.Write[addr>>4](addr, val)
b.M = val
}
func (b *Bus) nWrite(bank byte, addr uint16, val byte) {
ea := uint32(bank)<<16 | uint32(addr)
b.Write[ea>>4](ea, val)
b.M = val
}
func (b *Bus) nWrite16_cross(bank byte, addr uint16, value uint16) {
ea := uint32(bank)<<16 | uint32(addr)
ll := byte(value)
hh := byte(value >> 8)
b.Write[ea>>4](ea, ll)
ea++
b.Write[ea>>4](ea, hh)
b.M = hh
}
func (b *Bus) eaWrite16_cross(ea uint32, value uint16) {
ll := byte(value)
hh := byte(value >> 8)
b.Write[ea>>4](ea, ll)
ea++
b.Write[ea>>4](ea, hh)
b.M = hh
}
func (b *Bus) nWrite16_wrap(bank byte, addr uint16, value uint16) {
bank32 := uint32(bank) << 16
ll := byte(value)
hh := byte(value >> 8)
ea := bank32 | uint32(addr)
b.Write[ea>>4](ea, ll)
ea = bank32 | uint32(addr+1)
b.Write[ea>>4](ea, hh)
b.M = hh
}
func (b *Bus) nRead(bank byte, addr uint16) uint8 {
ea := uint32(bank)<<16 | uint32(addr)
b.M = b.Read[ea>>4](ea)
return b.M
}
func (b *Bus) nRead16_wrap(bank byte, addr uint16) uint16 {
bank32 := uint32(bank) << 16
ea := bank32 | uint32(addr)
ll := b.Read[ea>>4](ea)
ea = bank32 | uint32(addr+1)
hh := b.Read[ea>>4](ea)
b.M = hh
return uint16(hh)<<8 | uint16(ll)
}
func (b *Bus) nRead16_cross(bank byte, addr uint16) uint16 {
ea := uint32(bank)<<16 | uint32(addr)
ll := b.Read[ea>>4](ea)
ea = (ea + 1) & 0x00ffffff // wrap on 24bits
hh := b.Read[ea>>4](ea)
b.M = hh
return uint16(hh)<<8 | uint16(ll)
}
func (b *Bus) eaRead16_cross(ea uint32) uint16 {
ll := b.Read[ea>>4](ea)
ea = (ea + 1) & 0x00ffffff // wrap on 24bits
hh := b.Read[ea>>4](ea)
b.M = hh
return uint16(hh)<<8 | uint16(ll)
}
func (b *Bus) nRead24_wrap(bank byte, addr uint16) uint32 {
bank32 := uint32(bank) << 16
ea := bank32 | uint32(addr+0)
ll := b.Read[ea>>4](ea)
ea = bank32 | uint32(addr+1)
mm := b.Read[ea>>4](ea)
ea = bank32 | uint32(addr+2)
hh := b.Read[ea>>4](ea)
b.M = hh
return uint32(hh)<<16 | uint32(mm)<<8 | uint32(ll)
} | emulator/cpualt/bus.go | 0.532668 | 0.418519 | bus.go | starcoder |
package main
import (
"fmt"
"os"
"path/filepath"
intoto "github.com/in-toto/in-toto-golang/in_toto"
"github.com/spf13/cobra"
)
var (
recordStepName string
recordMaterialsPaths []string
recordProductsPaths []string
)
var recordCmd = &cobra.Command{
Use: "record",
Short: `Creates a signed link metadata file in two steps, in order to provide
evidence for supply chain steps that cannot be carried out by a single command`,
Long: `Creates a signed link metadata file in two steps, in order to provide
evidence for supply chain steps that cannot be carried out by a single command
(for which ‘in-toto-run’ should be used). It returns a non-zero value on
failure and zero otherwise.`,
PersistentPreRunE: recordPreRun,
}
var recordStartCmd = &cobra.Command{
Use: "start",
Short: `Creates a preliminary link file recording the paths and hashes of the
passed materials and signs it with the passed functionary’s key.`,
Long: `Creates a preliminary link file recording the paths and hashes of the
passed materials and signs it with the passed functionary’s key.
The resulting link file is stored as ‘.<name>.<keyid prefix>.link-unfinished’.`,
RunE: recordStart,
}
var recordStopCmd = &cobra.Command{
Use: "stop",
Short: `Records and adds the paths and hashes of the passed products to the link metadata file and updates the signature.`,
Long: `Expects preliminary link file ‘.<name>.<keyid prefix>.link-unfinished’ in the CWD,
signed by the passed functionary’s key. If found, it records
and adds the paths and hashes of the passed products to the
link metadata file, updates the signature and renames the
file to ‘<name>.<keyid prefix>.link’.`,
RunE: recordStop,
}
func init() {
rootCmd.AddCommand(recordCmd)
recordCmd.PersistentFlags().StringVarP(
&recordStepName,
"name",
"n",
"",
`Name for the resulting link metadata file.
It is also used to associate the link with a step defined
in an in-toto layout.`,
)
recordCmd.PersistentFlags().StringVarP(
&keyPath,
"key",
"k",
"",
`Path to a private key file to sign the resulting link metadata.
The keyid prefix is used as an infix for the link metadata filename,
i.e. ‘<name>.<keyid prefix>.link’. See ‘–key-type’ for available
formats. Passing one of ‘–key’ or ‘–gpg’ is required.`,
)
recordCmd.PersistentFlags().StringVarP(
&certPath,
"cert",
"c",
"",
`Path to a PEM formatted certificate that corresponds
with the provided key.`,
)
recordCmd.PersistentFlags().StringVarP(
&outDir,
"metadata-directory",
"d",
"./",
`Directory to store link metadata`,
)
recordCmd.PersistentFlags().StringArrayVarP(
&lStripPaths,
"lstrip-paths",
"l",
[]string{},
`Path prefixes used to left-strip artifact paths before storing
them to the resulting link metadata. If multiple prefixes
are specified, only a single prefix can match the path of
any artifact and that is then left-stripped. All prefixes
are checked to ensure none of them are a left substring
of another.`,
)
recordCmd.PersistentFlags().StringArrayVarP(
&exclude,
"exclude",
"e",
[]string{},
`Path patterns to match paths that should not be recorded as
‘materials’ or ‘products’. Passed patterns override patterns defined
in environment variables or config files. See Config docs for details.`,
)
recordCmd.MarkPersistentFlagRequired("name")
// Record Start Command
recordCmd.AddCommand(recordStartCmd)
recordStartCmd.Flags().StringArrayVarP(
&recordMaterialsPaths,
"materials",
"m",
[]string{},
`Paths to files or directories, whose paths and hashes
are stored in the resulting link metadata before the
command is executed. Symlinks are followed.`,
)
// Record Stop Command
recordCmd.AddCommand(recordStopCmd)
recordStopCmd.Flags().StringArrayVarP(
&recordProductsPaths,
"products",
"p",
[]string{},
`Paths to files or directories, whose paths and hashes
are stored in the resulting link metadata after the
command is executed. Symlinks are followed.`,
)
}
func recordPreRun(cmd *cobra.Command, args []string) error {
key = intoto.Key{}
cert = intoto.Key{}
if keyPath == "" && certPath == "" {
return fmt.Errorf("key or cert must be provided")
}
if len(keyPath) > 0 {
if _, err := os.Stat(keyPath); err == nil {
if err := key.LoadKeyDefaults(keyPath); err != nil {
return fmt.Errorf("invalid key at %s: %w", keyPath, err)
}
} else {
return fmt.Errorf("key not found at %s: %w", keyPath, err)
}
}
if len(certPath) > 0 {
if _, err := os.Stat(certPath); err == nil {
if err := cert.LoadKeyDefaults(certPath); err != nil {
return fmt.Errorf("invalid cert at %s: %w", certPath, err)
}
key.KeyVal.Certificate = cert.KeyVal.Certificate
} else {
return fmt.Errorf("cert not found at %s: %w", certPath, err)
}
}
return nil
}
func recordStart(cmd *cobra.Command, args []string) error {
block, err := intoto.InTotoRecordStart(recordStepName, recordMaterialsPaths, key, []string{"sha256"}, exclude, lStripPaths)
if err != nil {
return fmt.Errorf("failed to create start link file: %w", err)
}
prelimLinkName := fmt.Sprintf(intoto.PreliminaryLinkNameFormat, recordStepName, key.KeyID)
prelimLinkPath := filepath.Join(outDir, prelimLinkName)
err = block.Dump(prelimLinkPath)
if err != nil {
return fmt.Errorf("failed to write start link file to %s: %w", prelimLinkName, err)
}
return nil
}
func recordStop(cmd *cobra.Command, args []string) error {
var prelimLinkMb intoto.Metablock
prelimLinkName := fmt.Sprintf(intoto.PreliminaryLinkNameFormat, recordStepName, key.KeyID)
prelimLinkPath := filepath.Join(outDir, prelimLinkName)
if err := prelimLinkMb.Load(prelimLinkPath); err != nil {
return fmt.Errorf("failed to load start link file at %s: %w", prelimLinkName, err)
}
linkMb, err := intoto.InTotoRecordStop(prelimLinkMb, recordProductsPaths, key, []string{"sha256"}, exclude, lStripPaths)
if err != nil {
return fmt.Errorf("failed to create stop link file: %w", err)
}
linkName := fmt.Sprintf(intoto.LinkNameFormat, recordStepName, key.KeyID)
linkPath := filepath.Join(outDir, linkName)
err = linkMb.Dump(linkPath)
if err != nil {
return fmt.Errorf("failed to write stop link file to %s: %w", prelimLinkName, err)
}
return nil
} | cmd/in-toto/record.go | 0.567337 | 0.446917 | record.go | starcoder |
package types
import (
"context"
"strconv"
"github.com/MontFerret/ferret/pkg/runtime/core"
"github.com/MontFerret/ferret/pkg/runtime/values"
)
// ToFloat takes an input value of any type and convert it into a float value.
// @param value (Value) - Input value of arbitrary type.
// @returns (Float) -
// None and false are converted to the value 0
// true is converted to 1
// Numbers keep their original value
// Strings are converted to their numeric equivalent if the string contains a valid representation of a number.
// String values that do not contain any valid representation of a number will be converted to the number 0.
// An empty array is converted to 0, an array with one member is converted into the result of TO_NUMBER() for its sole member.
// An array with two or more members is converted to the number 0.
// An object / HTML node is converted to the number 0.
func ToFloat(ctx context.Context, args ...core.Value) (core.Value, error) {
err := core.ValidateArgs(args, 1, 1)
if err != nil {
return values.None, err
}
arg := args[0]
switch arg.Type() {
case core.BooleanType:
val := arg.(values.Boolean)
if val {
return values.NewFloat(1), nil
}
return values.ZeroFloat, nil
case core.IntType:
val := arg.(values.Int)
return values.Float(val), nil
case core.FloatType:
return arg, nil
case core.StringType:
str := arg.String()
if str == "" {
return values.ZeroFloat, nil
}
num, err := strconv.ParseFloat(str, 64)
if err != nil {
return values.ZeroFloat, nil
}
return values.NewFloat(num), nil
case core.DateTimeType:
val := arg.(values.DateTime)
if val.IsZero() {
return values.ZeroFloat, nil
}
return values.NewFloat(float64(val.Unix())), nil
case core.NoneType:
return values.ZeroFloat, nil
case core.ArrayType:
val := arg.(*values.Array)
if val.Length() == 0 {
return values.ZeroFloat, nil
}
if val.Length() == 1 {
return ToFloat(ctx, val.Get(0))
}
return values.ZeroFloat, nil
default:
return values.ZeroFloat, nil
}
} | pkg/stdlib/types/to_float.go | 0.737725 | 0.465873 | to_float.go | starcoder |
package tui
import (
"bytes"
"fmt"
"image"
"strconv"
)
type testCell struct {
Rune rune
Style Style
}
// A TestSurface implements the Surface interface with local buffers,
// and provides accessors to check the output of a draw operation on the Surface.
type TestSurface struct {
cells map[image.Point]testCell
cursor image.Point
size image.Point
emptyCh rune
}
// NewTestSurface returns a new TestSurface.
func NewTestSurface(w, h int) *TestSurface {
return &TestSurface{
cells: make(map[image.Point]testCell),
size: image.Point{w, h},
emptyCh: '.',
}
}
// SetCell sets the contents of the addressed cell.
func (s *TestSurface) SetCell(x, y int, ch rune, style Style) {
s.cells[image.Point{x, y}] = testCell{
Rune: ch,
Style: style,
}
}
// SetCursor moves the Surface's cursor to the specified position.
func (s *TestSurface) SetCursor(x, y int) {
s.cursor = image.Point{x, y}
}
// HideCursor removes the cursor from the display.
func (s *TestSurface) HideCursor() {
s.cursor = image.Point{}
}
// Begin resets the state of the TestSurface, clearing all cells.
// It must be called before drawing the Surface.
func (s *TestSurface) Begin() {
s.cells = make(map[image.Point]testCell)
}
// End indicates the surface has been painted on, and can be rendered.
// It's a no-op for TestSurface.
func (s *TestSurface) End() {
// NOP
}
// Size returns the dimensions of the surface.
func (s *TestSurface) Size() image.Point {
return s.size
}
// String returns the characters written to the TestSurface.
func (s *TestSurface) String() string {
var buf bytes.Buffer
buf.WriteRune('\n')
for j := 0; j < s.size.Y; j++ {
for i := 0; i < s.size.X; i++ {
if cell, ok := s.cells[image.Point{i, j}]; ok {
buf.WriteRune(cell.Rune)
if w := runeWidth(cell.Rune); w > 1 {
i += w - 1
}
} else {
buf.WriteRune(s.emptyCh)
}
}
buf.WriteRune('\n')
}
return buf.String()
}
// FgColors renders the TestSurface's foreground colors, using the digits 0-7 for painted cells, and the empty character for unpainted cells.
func (s *TestSurface) FgColors() string {
var buf bytes.Buffer
buf.WriteRune('\n')
for j := 0; j < s.size.Y; j++ {
for i := 0; i < s.size.X; i++ {
if cell, ok := s.cells[image.Point{i, j}]; ok {
color := cell.Style.Fg
buf.WriteRune('0' + rune(color))
} else {
buf.WriteRune(s.emptyCh)
}
}
buf.WriteRune('\n')
}
return buf.String()
}
// BgColors renders the TestSurface's background colors, using the digits 0-7 for painted cells, and the empty character for unpainted cells.
func (s *TestSurface) BgColors() string {
var buf bytes.Buffer
buf.WriteRune('\n')
for j := 0; j < s.size.Y; j++ {
for i := 0; i < s.size.X; i++ {
if cell, ok := s.cells[image.Point{i, j}]; ok {
color := cell.Style.Bg
buf.WriteRune('0' + rune(color))
} else {
buf.WriteRune(s.emptyCh)
}
}
buf.WriteRune('\n')
}
return buf.String()
}
// Decorations renders the TestSurface's decorations (Reverse, Bold, Underline) using a bitmask:
// Reverse: 1
// Bold: 2
// Underline: 4
func (s *TestSurface) Decorations() string {
var buf bytes.Buffer
buf.WriteRune('\n')
for j := 0; j < s.size.Y; j++ {
for i := 0; i < s.size.X; i++ {
if cell, ok := s.cells[image.Point{i, j}]; ok {
mask := int64(0)
if cell.Style.Reverse == DecorationOn {
mask |= 1
}
if cell.Style.Bold == DecorationOn {
mask |= 2
}
if cell.Style.Underline == DecorationOn {
mask |= 4
}
buf.WriteString(strconv.FormatInt(mask, 16))
} else {
buf.WriteRune(s.emptyCh)
}
}
buf.WriteRune('\n')
}
return buf.String()
}
func surfaceEquals(surface *TestSurface, want string) string {
if surface.String() != want {
return fmt.Sprintf("got = \n%s\n\nwant = \n%s", surface.String(), want)
}
return ""
} | testing.go | 0.731538 | 0.50061 | testing.go | starcoder |
package pb
// {encode,decode}{I,S,U}{32,64}
// DecodeS64 reads a single 64bit zigzag varint
func DecodeS64(buf []byte, next *int) int64 {
return DecodeZigZag(DecodeVarInt(buf, next))
}
// DecodeI64 reads a single 64bit signed varint
func DecodeI64(buf []byte, next *int) int64 {
return int64(DecodeVarInt(buf, next))
}
// DecodeU64 reads a single 64bit unsigned varint
func DecodeU64(buf []byte, next *int) uint64 {
return DecodeVarInt(buf, next)
}
func DecodeS64PackedDelta(buf []byte, next *int) []int64 {
innerBuf := DecodeBytes(buf, next)
output := make([]int64, len(innerBuf)) // more memory but no reallocation
idx := 0
last := int64(0)
for innerNext := 0; innerNext < len(innerBuf); {
last += DecodeZigZag(DecodeVarInt(innerBuf, &innerNext))
output[idx] = last
idx++
}
return output[:idx]
}
// DecodeS64PackedDeltaZero is a variation on DecodeS64PackedDelta which decodes
// until it hits a 0, rather than using a length.
func DecodeS64PackedDeltaZero(buf []byte) []int64 {
var output []int64
last := int64(0)
for next := 0; next < len(buf); {
nextDelta := DecodeVarInt(buf, &next)
if nextDelta == 0 {
return output
}
last += DecodeZigZag(nextDelta)
output = append(output, last)
}
panic("ran out of data")
}
// EncodeS64PackedDeltaZero is a variation on EncodeS64PackedDelta, which encodes
// without a length, and puts a 0 on the end.
func EncodeS64PackedDeltaZero(buf []byte, values []int64) []byte {
last := int64(0)
for _, value := range values {
buf = EncodeVarInt(buf, EncodeZigZag(value-last))
last = value
}
buf = append(buf, 0)
return buf
}
func EncodeS64PackedDelta(buf []byte, values []int64) []byte {
last := int64(0)
var innerBuf []byte
for _, value := range values {
innerBuf = EncodeVarInt(innerBuf, EncodeZigZag(value-last))
last = value
}
buf = EncodeBuffer(buf, innerBuf)
return buf
}
func EncodeS64PackedDeltaFunc(values []int64) func(buf []byte) []byte {
return func(buf []byte) []byte {
last := int64(0)
for _, value := range values {
buf = EncodeVarInt(buf, EncodeZigZag(value-last))
last = value
}
return buf
}
}
func DecodeS64Packed(buf []byte, next *int) []int64 {
innerBuf := DecodeBytes(buf, next)
output := make([]int64, len(innerBuf)) // more memory but no reallocation
idx := 0
for innerNext := 0; innerNext < len(innerBuf); {
output[idx] = DecodeZigZag(DecodeVarInt(innerBuf, &innerNext))
idx++
}
return output[:idx]
}
func DecodeU64Packed(buf []byte, next *int) []uint64 {
var output []uint64
innerBuf := DecodeBytes(buf, next)
for innerNext := 0; innerNext < len(innerBuf); {
output = append(output, DecodeVarInt(innerBuf, &innerNext))
}
return output
}
func EncodeU64Packed(values []uint64) func(buf []byte) []byte {
return func(buf []byte) []byte {
for _, value := range values {
buf = EncodeVarInt(buf, value)
}
return buf
}
}
func DecodeS64Opt(buf []byte, next *int) *int64 {
o := DecodeS64(buf, next)
return &o
}
func DecodeI64Opt(buf []byte, next *int) *int64 {
o := DecodeI64(buf, next)
return &o
}
func DecodeU64Opt(buf []byte, next *int) *uint64 {
o := DecodeU64(buf, next)
return &o
} | internal/pb/64.go | 0.709019 | 0.409457 | 64.go | starcoder |
package query
import (
"context"
"fmt"
"sort"
"strconv"
"strings"
"github.com/Peripli/service-manager/pkg/util"
"github.com/Peripli/service-manager/pkg/web"
)
// Operator is a query operator
type Operator string
const (
// EqualsOperator takes two operands and tests if they are equal
EqualsOperator Operator = "="
// NotEqualsOperator takes two operands and tests if they are not equal
NotEqualsOperator Operator = "!="
// GreaterThanOperator takes two operands and tests if the left is greater than the right
GreaterThanOperator Operator = "gt"
// LessThanOperator takes two operands and tests if the left is lesser than the right
LessThanOperator Operator = "lt"
// InOperator takes two operands and tests if the left is contained in the right
InOperator Operator = "in"
// NotInOperator takes two operands and tests if the left is not contained in the right
NotInOperator Operator = "notin"
// EqualsOrNilOperator takes two operands and tests if the left is equal to the right, or if the left is nil
EqualsOrNilOperator Operator = "eqornil"
)
// IsMultiVariate returns true if the operator requires right operand with multiple values
func (op Operator) IsMultiVariate() bool {
return op == InOperator || op == NotInOperator
}
// IsNullable returns true if the operator can check if the left operand is nil
func (op Operator) IsNullable() bool {
return op == EqualsOrNilOperator
}
// IsNumeric returns true if the operator works only with numeric operands
func (op Operator) IsNumeric() bool {
return op == LessThanOperator || op == GreaterThanOperator
}
var operators = []Operator{EqualsOperator, NotEqualsOperator, InOperator,
NotInOperator, GreaterThanOperator, LessThanOperator, EqualsOrNilOperator}
const (
// OpenBracket is the token that denotes the beginning of a multivariate operand
OpenBracket rune = '['
// OpenBracket is the token that denotes the end of a multivariate operand
CloseBracket rune = ']'
// Separator is the separator between field and label queries
Separator rune = '|'
// OperandSeparator is the separator between the operator and the operands
OperandSeparator rune = ' '
)
// CriterionType is a type of criteria to be applied when querying
type CriterionType string
const (
// FieldQuery denotes that the query should be executed on the entity's fields
FieldQuery CriterionType = "fieldQuery"
// LabelQuery denotes that the query should be executed on the entity's labels
LabelQuery CriterionType = "labelQuery"
)
var supportedQueryTypes = []CriterionType{FieldQuery, LabelQuery}
// Criterion is a single part of a query criteria
type Criterion struct {
// LeftOp is the left operand in the query
LeftOp string
// Operator is the query operator
Operator Operator
// RightOp is the right operand in the query which can be multivariate
RightOp []string
// Type is the type of the query
Type CriterionType
}
// ByField constructs a new criterion for field querying
func ByField(operator Operator, leftOp string, rightOp ...string) Criterion {
return newCriterion(leftOp, operator, rightOp, FieldQuery)
}
// ByLabel constructs a new criterion for label querying
func ByLabel(operator Operator, leftOp string, rightOp ...string) Criterion {
return newCriterion(leftOp, operator, rightOp, LabelQuery)
}
func newCriterion(leftOp string, operator Operator, rightOp []string, criteriaType CriterionType) Criterion {
return Criterion{LeftOp: leftOp, Operator: operator, RightOp: rightOp, Type: criteriaType}
}
func (c Criterion) Validate() error {
if len(c.RightOp) > 1 && !c.Operator.IsMultiVariate() {
return fmt.Errorf("multiple values %s received for single value operation %s", c.RightOp, c.Operator)
}
if c.Operator.IsNullable() && c.Type != FieldQuery {
return &util.UnsupportedQueryError{Message: "nullable operations are supported only for field queries"}
}
if c.Operator.IsNumeric() && !isNumeric(c.RightOp[0]) {
return &util.UnsupportedQueryError{Message: fmt.Sprintf("%s is numeric operator, but the right operand %s is not numeric", c.Operator, c.RightOp[0])}
}
if strings.ContainsRune(c.LeftOp, Separator) {
parts := strings.FieldsFunc(c.LeftOp, func(r rune) bool {
return r == Separator
})
possibleKey := parts[len(parts)-1]
return &util.UnsupportedQueryError{Message: fmt.Sprintf("separator %c is not allowed in %s with left operand \"%s\". Maybe you meant \"%s\"? Make sure if the separator is present in any right operand, that it is escaped with a backslash (\\)", Separator, c.Type, c.LeftOp, possibleKey)}
}
for _, op := range c.RightOp {
if strings.ContainsRune(op, '\n') {
return fmt.Errorf("%s with key \"%s\" has value \"%s\" contaning forbidden new line character", c.Type, c.LeftOp, op)
}
}
return nil
}
func mergeCriteria(c1 []Criterion, c2 []Criterion) ([]Criterion, error) {
result := c1
fieldQueryLeftOperands := make(map[string]int)
labelQueryLeftOperands := make(map[string]int)
for _, criterion := range append(c1, c2...) {
if criterion.Type == FieldQuery {
fieldQueryLeftOperands[criterion.LeftOp]++
}
if criterion.Type == LabelQuery {
labelQueryLeftOperands[criterion.LeftOp]++
}
}
for _, newCriterion := range c2 {
leftOp := newCriterion.LeftOp
// disallow duplicate label queries
if count, ok := labelQueryLeftOperands[leftOp]; ok && count > 1 && newCriterion.Type == LabelQuery {
return nil, &util.UnsupportedQueryError{Message: fmt.Sprintf("duplicate label query key: %s", newCriterion.LeftOp)}
}
// disallow duplicate field query keys
if count, ok := fieldQueryLeftOperands[leftOp]; ok && count > 1 && newCriterion.Type == FieldQuery {
return nil, &util.UnsupportedQueryError{Message: fmt.Sprintf("duplicate field query key: %s", newCriterion.LeftOp)}
}
if err := newCriterion.Validate(); err != nil {
return nil, err
}
}
result = append(result, c2...)
return result, nil
}
type criteriaCtxKey struct{}
// AddCriteria adds the given criteria to the context and returns an error if any of the criteria is not valid
func AddCriteria(ctx context.Context, newCriteria ...Criterion) (context.Context, error) {
currentCriteria := CriteriaForContext(ctx)
criteria, err := mergeCriteria(currentCriteria, newCriteria)
if err != nil {
return nil, err
}
return context.WithValue(ctx, criteriaCtxKey{}, criteria), nil
}
// CriteriaForContext returns the criteria for the given context
func CriteriaForContext(ctx context.Context) []Criterion {
currentCriteria := ctx.Value(criteriaCtxKey{})
if currentCriteria == nil {
return []Criterion{}
}
return currentCriteria.([]Criterion)
}
// ContextWithCriteria returns a new context with given criteria
func ContextWithCriteria(ctx context.Context, criteria []Criterion) context.Context {
return context.WithValue(ctx, criteriaCtxKey{}, criteria)
}
// BuildCriteriaFromRequest builds criteria for the given request's query params and returns an error if the query is not valid
func BuildCriteriaFromRequest(request *web.Request) ([]Criterion, error) {
var criteria []Criterion
for _, queryType := range supportedQueryTypes {
queryValues := request.URL.Query().Get(string(queryType))
querySegments, err := process(queryValues, queryType)
if err != nil {
return nil, err
}
if criteria, err = mergeCriteria(criteria, querySegments); err != nil {
return nil, err
}
}
sort.Sort(ByLeftOp(criteria))
return criteria, nil
}
type ByLeftOp []Criterion
func (c ByLeftOp) Len() int {
return len(c)
}
func (c ByLeftOp) Less(i, j int) bool {
return c[i].LeftOp < c[j].LeftOp
}
func (c ByLeftOp) Swap(i, j int) {
c[i], c[j] = c[j], c[i]
}
func process(input string, criteriaType CriterionType) ([]Criterion, error) {
var c []Criterion
if input == "" {
return c, nil
}
var leftOp string
var operator Operator
j := 0
for i := 0; i < len(input); i++ {
if leftOp != "" && operator != "" {
remaining := input[i+len(operator)+1:]
rightOp, offset, err := findRightOp(remaining, leftOp, operator, criteriaType)
if err != nil {
return nil, err
}
criterion := newCriterion(leftOp, operator, rightOp, criteriaType)
if err := criterion.Validate(); err != nil {
return nil, err
}
c = append(c, criterion)
i += offset + len(operator) + len(string(Separator))
j = i + 1
leftOp = ""
operator = ""
} else {
remaining := input[i:]
for _, op := range operators {
if strings.HasPrefix(remaining, fmt.Sprintf("%c%s%c", OperandSeparator, op, OperandSeparator)) {
leftOp = input[j:i]
operator = op
break
}
}
}
}
if len(c) == 0 {
return nil, fmt.Errorf("%s is not a valid %s", input, criteriaType)
}
return c, nil
}
func findRightOp(remaining string, leftOp string, operator Operator, criteriaType CriterionType) (rightOp []string, offset int, err error) {
rightOpBuffer := strings.Builder{}
for _, ch := range remaining {
if ch == Separator {
if offset+1 < len(remaining) && rune(remaining[offset+1]) == Separator && remaining[offset-1] != '\\' {
arg := rightOpBuffer.String()
rightOp = append(rightOp, arg)
rightOpBuffer.Reset()
} else if rune(remaining[offset-1]) == Separator {
offset++
continue
} else {
if remaining[offset-1] != '\\' { // delimiter is not escaped - treat as separator
arg := rightOpBuffer.String()
rightOp = append(rightOp, arg)
rightOpBuffer.Reset()
break
} else { // remove escaping symbol
tmp := rightOpBuffer.String()[:offset-1]
rightOpBuffer.Reset()
rightOpBuffer.WriteString(tmp)
rightOpBuffer.WriteRune(ch)
}
}
} else {
rightOpBuffer.WriteRune(ch)
}
offset++
}
if rightOpBuffer.Len() > 0 {
rightOp = append(rightOp, rightOpBuffer.String())
}
if len(rightOp) > 0 && operator.IsMultiVariate() {
firstElement := rightOp[0]
if strings.IndexRune(firstElement, OpenBracket) == 0 {
rightOp[0] = firstElement[1:]
} else {
return nil, -1, fmt.Errorf("operator %s for %s %s requires right operand to be surrounded in %c%c", operator, criteriaType, leftOp, OpenBracket, CloseBracket)
}
lastElement := rightOp[len(rightOp)-1]
if rune(lastElement[len(lastElement)-1]) == CloseBracket {
rightOp[len(rightOp)-1] = lastElement[:len(lastElement)-1]
} else {
return nil, -1, fmt.Errorf("operator %s for %s %s requires right operand to be surrounded in %c%c", operator, criteriaType, leftOp, OpenBracket, CloseBracket)
}
}
if len(rightOp) == 0 {
rightOp = append(rightOp, "")
}
return
}
func isNumeric(str string) bool {
_, err := strconv.Atoi(str)
if err == nil {
return true
}
_, err = strconv.ParseFloat(str, 64)
return err == nil
} | pkg/query/selection.go | 0.800809 | 0.662305 | selection.go | starcoder |
package climate
import (
"github.com/willbeason/worldproc/pkg/geodesic"
"math"
)
// Flux is the solar flux at the equator at noon.
const Flux = 400
// SB is the Stefan-Boltzmann constant.
const SB = 5.670374419184429453970996731889231E-8
// WD is Wien's displacement constant.
//const WD = 2.8977719E-3
const ZeroCelsius = 273.15
const DefaultAir = 1.0
const DesertSpecificHeat = 100000
const CoastSpecificHeat = 400000
const OceanSpecificHeat = 1400000
const AirSpecificHeat = 100000
type Climate struct {
// LandSpecificHeat is the energy, in J/m^2 required to heat only the land
// by 1 K.
LandSpecificHeat float64
// LandEnergy is the energy held by land.
LandEnergy float64
// Air is the proportion of air.
// 1.0 is the mean across the planet.
Air float64
// AirEnergy is the energy held by the air.
AirEnergy float64
// AirVelocity is the magnitude and direction of air flowing through this
// tile.
AirVelocity geodesic.Vector
}
func (t *Climate) LandTemperature() float64 {
return t.LandEnergy / t.LandSpecificHeat
}
func (t *Climate) AirTemperature() float64 {
return t.AirEnergy / (t.Air * AirSpecificHeat)
}
func (t *Climate) SetTemperature(kelvin float64) {
t.LandEnergy = t.LandSpecificHeat * kelvin
t.AirEnergy = t.Air * AirSpecificHeat * kelvin
}
func (t *Climate) Pressure() float64 {
return t.Air * t.AirTemperature() / ZeroCelsius
}
func (t *Climate) Simulate(flux float64, latitude float64, altitude float64, seconds float64) {
incoming := flux * seconds
// Land absorbs sunlight and cools down, but not air.
// opacity must be _at least_ 0.5 at the poles
dLatitude := - math.Cos(latitude) * 0.23
opacity := 0.5 + (altitude / 3.0) + dLatitude
opacity = math.Max(0.0, math.Min(1.0, opacity))
outgoing := seconds * math.Pow(t.LandTemperature(), 4) * opacity * SB
t.LandEnergy += incoming - outgoing
// Move towards equilibrium between land/air.
invSpecificHeat := 1.0 / (t.Air * AirSpecificHeat + t.LandSpecificHeat)
totalEnergy := t.AirEnergy + t.LandEnergy
// deltaAirEnergy is the delta to AirEnergy that brings the system to equilibrium.
deltaAirEnergy := totalEnergy * (t.Air * AirSpecificHeat) * invSpecificHeat - t.AirEnergy
t.AirEnergy += deltaAirEnergy
t.LandEnergy -= deltaAirEnergy
}
func yearMax(landSpecificHeat, startTemp float64, latitude float64, maxAngle float64) (float64, float64) {
max := startTemp
c := &Climate{
LandSpecificHeat: landSpecificHeat,
Air: DefaultAir,
}
c.SetTemperature(startTemp)
for day := 0; day < 360; day++ {
for hour := 0; hour < 24; hour ++ {
declination := maxAngle * math.Sin((float64(day) + float64(hour) / 24) * math.Pi / 180)
flux := Flux * math.Sin(declination)
flux = math.Max(0.0, flux)
c.Simulate(flux, latitude, 0.0, 3600)
temp := c.LandTemperature()
max = math.Max(temp, max)
}
}
return max, c.LandTemperature()
}
func PoleEquilibrium(specificHeat, maxAngle float64) float64 {
i := 0
low := 0.0
lowMax, _ := yearMax(specificHeat, low, math.Pi / 2, maxAngle)
high := 2 * ZeroCelsius
highMax, _ := yearMax(specificHeat, high, math.Pi / 2, maxAngle)
for math.Abs(highMax - lowMax) > 0.001 {
mid := (low + high) / 2.0
max, end := yearMax(specificHeat, mid, math.Pi / 2, maxAngle)
if end < mid {
high = mid
highMax = max
} else {
low = mid
lowMax = max
}
i++
}
return highMax
}
func LowHigh(specificHeat, latitude, startNoon float64) (float64, float64) {
temp := startNoon
cosLatitude := math.Cos(latitude)
c := &Climate{
LandSpecificHeat: specificHeat,
Air: DefaultAir,
}
c.SetTemperature(startNoon)
lowest, highest := temp, temp
for i := 0; i < 144; i++ {
sunAngle := float64(i) * math.Pi / 72
flux := Flux * math.Cos(sunAngle) * cosLatitude
flux = math.Max(0.0, flux)
c.Simulate(flux, latitude, 0.0, 600)
temp = c.AirTemperature()
if temp < lowest {
lowest = temp
}
if temp > highest {
highest = temp
}
}
diff := startNoon - temp
if math.Abs(diff) < 0.001 {
return lowest, highest
}
return LowHigh(specificHeat, latitude, temp)
} | pkg/climate/temperature.go | 0.803983 | 0.670005 | temperature.go | starcoder |
package histogram
import (
"math"
"sync"
"time"
tdigest "github.com/caio/go-tdigest"
)
// Histogram a quantile approximation data structure
type Histogram interface {
Update(v float64)
Distributions() []Distribution
Snapshot() []Distribution
Count() uint64
Quantile(q float64) float64
Max() float64
Min() float64
Sum() float64
Mean() float64
Granularity() Granularity
}
// Option allows histogram customization
type Option func(*histogramImpl)
// GranularityOption of the histogram
func GranularityOption(g Granularity) Option {
return func(args *histogramImpl) {
args.granularity = g
}
}
// Compression of the histogram
func Compression(c uint32) Option {
return func(args *histogramImpl) {
args.compression = c
}
}
// MaxBins of the histogram
func MaxBins(c int) Option {
return func(args *histogramImpl) {
args.maxBins = c
}
}
func defaultHistogramImpl() *histogramImpl {
return &histogramImpl{maxBins: 10, granularity: MINUTE, compression: 5}
}
// Creates a new Wavefront histogram
func New(setters ...Option) Histogram {
h := defaultHistogramImpl()
for _, setter := range setters {
setter(h)
}
return h
}
type histogramImpl struct {
mutex sync.Mutex
priorTimedBinsList []*timedBin
currentTimedBin *timedBin
granularity Granularity
compression uint32
maxBins int
}
type timedBin struct {
tdigest *tdigest.TDigest
timestamp time.Time
}
// Distribution holds the samples and its timestamp.
type Distribution struct {
Centroids []Centroid
Timestamp time.Time
}
// Update registers a new sample in the histogram.
func (h *histogramImpl) Update(v float64) {
h.rotateCurrentTDigestIfNeedIt()
h.mutex.Lock()
defer h.mutex.Unlock()
h.currentTimedBin.tdigest.Add(v)
}
// Count returns the total number of samples on this histogram.
func (h *histogramImpl) Count() uint64 {
h.rotateCurrentTDigestIfNeedIt()
h.mutex.Lock()
defer h.mutex.Unlock()
return h.currentTimedBin.tdigest.Count()
}
// Quantile returns the desired percentile estimation.
func (h *histogramImpl) Quantile(q float64) float64 {
h.rotateCurrentTDigestIfNeedIt()
h.mutex.Lock()
defer h.mutex.Unlock()
return h.currentTimedBin.tdigest.Quantile(q)
}
// Max returns the maximum value of samples on this histogram.
func (h *histogramImpl) Max() float64 {
h.rotateCurrentTDigestIfNeedIt()
h.mutex.Lock()
defer h.mutex.Unlock()
max := math.SmallestNonzeroFloat64
h.currentTimedBin.tdigest.ForEachCentroid(func(mean float64, count uint32) bool {
max = math.Max(max, mean)
return true
})
return max
}
// Min returns the minimum value of samples on this histogram.
func (h *histogramImpl) Min() float64 {
h.rotateCurrentTDigestIfNeedIt()
h.mutex.Lock()
defer h.mutex.Unlock()
min := math.MaxFloat64
for _, bin := range append(h.priorTimedBinsList, h.currentTimedBin) {
bin.tdigest.ForEachCentroid(func(mean float64, count uint32) bool {
min = math.Min(min, mean)
return true
})
}
return min
}
// Sum returns the sum of all values on this histogram.
func (h *histogramImpl) Sum() float64 {
h.rotateCurrentTDigestIfNeedIt()
h.mutex.Lock()
defer h.mutex.Unlock()
sum := float64(0)
for _, bin := range append(h.priorTimedBinsList, h.currentTimedBin) {
bin.tdigest.ForEachCentroid(func(mean float64, count uint32) bool {
sum += mean * float64(count)
return true
})
}
return sum
}
// Mean returns the mean values of samples on this histogram.
func (h *histogramImpl) Mean() float64 {
h.rotateCurrentTDigestIfNeedIt()
h.mutex.Lock()
defer h.mutex.Unlock()
t := float64(0)
c := uint32(0)
for _, bin := range append(h.priorTimedBinsList, h.currentTimedBin) {
bin.tdigest.ForEachCentroid(func(mean float64, count uint32) bool {
t += mean * float64(count)
c += count
return true
})
}
return t / float64(c)
}
// Granularity value
func (h *histogramImpl) Granularity() Granularity {
return h.granularity
}
// Snapshot returns a copy of all samples on comlepted time slices
func (h *histogramImpl) Snapshot() []Distribution {
return h.distributions(false)
}
// Distributions returns all samples on comlepted time slices, and clear the histogram
func (h *histogramImpl) Distributions() []Distribution {
return h.distributions(true)
}
func (h *histogramImpl) distributions(clean bool) []Distribution {
h.rotateCurrentTDigestIfNeedIt()
h.mutex.Lock()
defer h.mutex.Unlock()
distributions := make([]Distribution, len(h.priorTimedBinsList))
for idx, bin := range h.priorTimedBinsList {
var centroids []Centroid
bin.tdigest.ForEachCentroid(func(mean float64, count uint32) bool {
centroids = append(centroids, Centroid{Value: mean, Count: int(count)})
return true
})
distributions[idx] = Distribution{Timestamp: bin.timestamp, Centroids: centroids}
}
if clean {
h.priorTimedBinsList = h.priorTimedBinsList[:0]
}
return distributions
}
func (h *histogramImpl) rotateCurrentTDigestIfNeedIt() {
h.mutex.Lock()
defer h.mutex.Unlock()
if h.currentTimedBin == nil {
h.currentTimedBin = h.newTimedBin()
} else if h.currentTimedBin.timestamp != h.now() {
h.priorTimedBinsList = append(h.priorTimedBinsList, h.currentTimedBin)
if len(h.priorTimedBinsList) > h.maxBins {
h.priorTimedBinsList = h.priorTimedBinsList[1:]
}
h.currentTimedBin = h.newTimedBin()
}
}
func (h *histogramImpl) now() time.Time {
return time.Now().Truncate(h.granularity.Duration())
}
func (h *histogramImpl) newTimedBin() *timedBin {
td, _ := tdigest.New(tdigest.Compression(h.compression))
return &timedBin{timestamp: h.now(), tdigest: td}
} | src/telegraf/vendor/github.com/wavefronthq/wavefront-sdk-go/histogram/histogram.go | 0.788909 | 0.443359 | histogram.go | starcoder |
package matrix
import (
"fmt"
)
type Dense struct {
v []float64 // [row, row, ..., row]
numrow int
numcol int
stride int // The distance between vertically adjacent elements.
}
// AsDense makes new dense matrix that refers to v
func AsDense(numrow, numcol int, v []float64) Dense {
n := numrow * numcol
if n > len(v) {
panic("matrix: AsDense: numrow*numcol > len(v)")
}
return Dense{v: v[:n], numrow: numrow, numcol: numcol, stride: numcol}
}
// MakeDense allocates new dense matrix and initializes its first elements to
// values specified by iv.
func MakeDense(numrow, numcol int, iv ...float64) Dense {
v := make([]float64, numrow*numcol)
copy(v, iv)
return AsDense(numrow, numcol, v)
}
// IsValid reports whether d represents valid matrix value.
func (d Dense) IsValid() bool {
return len(d.v) > 0 && d.numrow > 0 && d.numcol > 0 && d.stride > 0
}
// SetAll sets all elements of d to a.
func (d Dense) SetAll(a float64) {
for i := 0; i < d.numrow; i++ {
row := d.v[i*d.stride:]
k := d.numcol - 1
for k > 0 {
row[k] = a
row[k-1] = a
k -= 2
}
if k == 0 {
row[0] = a
}
}
}
// SetI sets elements of d to create identity matrix (panics if d is not square).
func (d Dense) SetIdentity() {
if d.numrow != d.numcol {
panic("matrix: SetI on non square matrix")
}
d.SetAll(0)
for i := 0; i < len(d.v); i += d.stride + 1 {
d.v[i] = 1
}
}
// Size returns dimensions of the matrix (rows, cols).
func (d Dense) Size() (int, int) {
return d.numrow, d.numcol
}
// NumRow returns number of rows.
func (d Dense) NumRow() int {
return d.numrow
}
// NumCol returns number of columns.
func (d Dense) NumCol() int {
return d.numcol
}
// Stride returns distance between vertically adjacent elements.
func (d Dense) Stride() int {
return d.stride
}
// Elems returns internal buffer of elements. Be careful when use returned slice.
// For example you can not assume that its length corresponds to dimensions of d.
func (d Dense) Elems() []float64 {
return d.v
}
// Get returns element from row i, column k.
func (d Dense) Get(i, k int) float64 {
return d.v[i*d.stride+k]
}
// Set sets element in row i and column k.
func (d Dense) Set(i, k int, a float64) {
d.v[i*d.stride+k] = a
}
// Rows returns a slice of a matrix that contains rows from start to stop-1.
func (d Dense) Rows(start, stop int) Dense {
if start > stop || start < 0 || stop > d.numrow {
panic("matrix: bad indexes for horizontal slice")
}
return Dense{
v: d.v[start*d.stride : stop*d.stride],
numrow: stop - start,
numcol: d.numcol,
stride: d.stride,
}
}
// Cols returns a slice of a matrix that contains columns from start to stop-1.
func (d Dense) Cols(start, stop int) Dense {
if start > stop || start < 0 || stop > d.numcol {
panic("matrix: bad indexes for vertical slice")
}
return Dense{
v: d.v[start : (d.numrow-1)*d.stride+stop],
numrow: d.numrow,
numcol: stop - start,
stride: d.stride,
}
}
// AsRow returns horizontal vector that refers to d. Panics if cols != stride.
func (d Dense) AsRow() Dense {
if d.numcol != d.stride {
panic("matrix: AsRow: numcol != stride")
}
return Dense{v: d.v, numrow: 1, numcol: len(d.v), stride: len(d.v)}
}
// AsCol returns vertical vector that refers to d. Panics if numcol != stride.
func (d Dense) AsCol() Dense {
if d.numcol != d.stride {
panic("matrix: AsCol: numcol != stride")
}
return Dense{v: d.v, numrow: len(d.v), numcol: 1, stride: 1}
}
// Equal returns true if matrices are equal
func (d Dense) Equal(a Dense) bool {
if d.numrow != a.numrow || d.numcol != a.numcol {
return false
}
for i := 0; i < d.numrow; i++ {
dr := d.v[i*d.stride:]
ar := a.v[i*a.stride:]
for k := 0; k < d.numcol; k++ {
if dr[k] != ar[k] {
return false
}
}
}
return true
}
func (d Dense) Format(f fmt.State, c rune) {
format := "%g"
switch c {
case 'f':
format = "%f"
case 'e':
format = "%e"
case 'G':
format = "%G"
case 'E':
format = "%E"
case 'F':
format = "%F"
}
lastrow, lastcol := d.Size()
lastrow--
lastcol--
f.Write([]byte{'{'})
for i := 0; i <= lastrow; i++ {
if i > 0 {
f.Write([]byte{'\n'})
}
for k := 0; k <= lastcol; k++ {
fmt.Fprintf(f, format, d.Get(i, k))
if k < lastcol {
f.Write([]byte{','})
}
}
if i < lastrow {
f.Write([]byte{','})
}
}
f.Write([]byte{'}'})
}
// Utils
func (d Dense) checkDim(a Dense) {
if d.numrow != a.numrow || d.numcol != a.numcol {
panic("matrix: dimensions not equal")
}
} | dense.go | 0.800692 | 0.567817 | dense.go | starcoder |
package rangeset
import (
"fmt"
"github.com/biogo/store/step"
)
func max(a, b int) int {
if a > b {
return a
}
return b
}
type _bool bool
const (
_true = _bool(true)
_false = _bool(false)
)
func (b _bool) Equal(e step.Equaler) bool {
return b == e.(_bool)
}
// RangeSet acts like a bitvector where the interval values for true can be extracted.
type RangeSet struct {
v *step.Vector
}
// Range is 0-based half-open.
type Range struct {
Start, End int
}
func (i Range) String() string {
return fmt.Sprintf("Range(%d-%d)", i.Start, i.End)
}
// Ranges returns the intervals that are set to true in the RangeSet.
func (b *RangeSet) Ranges() []Range {
var posns []Range
var start, end int
var val step.Equaler
var err error
for end < b.v.Len() {
start, end, val, err = b.v.StepAt(end)
if err == nil {
if !val.(_bool) {
continue
}
posns = append(posns, Range{start, end})
} else {
}
}
return posns
}
// SetRange sets the values between start and end to true.
func (b *RangeSet) SetRange(start, end int) {
b.v.SetRange(start, end, _true)
}
// ClearRange sets the values between start and end to false.
func (b *RangeSet) ClearRange(start, end int) {
b.v.SetRange(start, end, _false)
}
// New returns a new RangeSet
func New(start, end int) (*RangeSet, error) {
s, err := step.New(start, end, _false)
if err == nil {
s.Relaxed = true
}
return &RangeSet{v: s}, err
}
type operation int
const (
intersectionOp operation = iota
unionOp
differenceOp
)
// Intersection returns a new RangeSet with intersecting regions from a and b.
func Intersection(a, b *RangeSet) *RangeSet {
return combine(a, b, intersectionOp)
}
// Union returns a new RangeSet with the union of regions from a and b.
func Union(a, b *RangeSet) *RangeSet {
return combine(a, b, unionOp)
}
// Difference returns a new RangeSet with regions in a that are absent from b.
func Difference(a, b *RangeSet) *RangeSet {
return combine(a, b, differenceOp)
}
func combine(a, b *RangeSet, op operation) *RangeSet {
if a.v.Zero != b.v.Zero {
panic("intersection must have same Zero state for a and b")
}
c, err := New(0, max(a.v.Len(), b.v.Len()))
if err != nil {
panic(err)
}
var start, end int
var val step.Equaler
for end < a.v.Len() {
start, end, val, _ = a.v.StepAt(end)
if !val.(_bool) {
continue
}
c.SetRange(start, end)
}
start, end = 0, 0
for end < b.v.Len() {
start, end, val, err = b.v.StepAt(end)
if op == intersectionOp && !val.(_bool) {
c.ClearRange(start, end)
continue
}
if op == differenceOp && val.(_bool) {
c.ClearRange(start, end)
continue
}
if op == unionOp {
if val.(_bool) {
c.SetRange(start, end)
}
continue
}
c.v.ApplyRange(start, end, func(e step.Equaler) step.Equaler {
if op == intersectionOp && (e.(_bool) && val.(_bool)) {
return _bool(true)
} else if op == differenceOp && (e.(_bool) && !val.(_bool)) {
return _bool(true)
}
return _bool(false)
})
}
return c
} | rangeset/rangeset.go | 0.760828 | 0.402774 | rangeset.go | starcoder |
package matchers
import (
"github.com/onsi/gomega/types"
"github.com/vps2/agouti/matchers/internal"
)
// HaveText passes when the expected text is equal to the actual element text.
// This matcher fails if the provided selection refers to more than one element.
func HaveText(text string) types.GomegaMatcher {
return &internal.ValueMatcher{Method: "Text", Property: "text", Expected: text}
}
// MatchText passes when the expected regular expression matches the actual element text.
// This matcher will fail if the provided selection refers to more than one element.
func MatchText(regexp string) types.GomegaMatcher {
return &internal.MatchTextMatcher{Regexp: regexp}
}
// HaveCount passes when the expected element count is equal to the actual
// number of elements in the selection.
func HaveCount(count int) types.GomegaMatcher {
return &internal.ValueMatcher{Method: "Count", Property: "element count", Expected: count}
}
// HaveAttribute passes when the expected attribute and value are present on the element.
// This matcher will fail if the provided selection refers to more than one element.
func HaveAttribute(attribute string, value string) types.GomegaMatcher {
return &internal.HaveAttributeMatcher{ExpectedAttribute: attribute, ExpectedValue: value}
}
// HaveCSS passes when the expected CSS property and value are present on the element.
// This matcher only matches exact, calculated CSS values, though there is support for parsing colors.
// Example: "blue" and "#00f" will both match rgba(0, 0, 255, 1)
// This matcher will fail if the provided selection refers to more than one element.
func HaveCSS(property string, value string) types.GomegaMatcher {
return &internal.HaveCSSMatcher{ExpectedProperty: property, ExpectedValue: value}
}
// BeSelected passes when the provided selection refers to form elements that are selected.
// Examples: a checked <input type="checkbox" />, or the selected <option> in a <select>
// This matcher will fail if any of the selection's form elements are not selected.
func BeSelected() types.GomegaMatcher {
return &internal.BooleanMatcher{Method: "Selected", Property: "selected"}
}
// BeVisible passes when the selection refers to elements that are displayed on the page.
// This matcher will fail if any of the selection's elements are not visible.
func BeVisible() types.GomegaMatcher {
return &internal.BooleanMatcher{Method: "Visible", Property: "visible"}
}
// BeEnabled passes when the selection refers to form elements that are enabled.
// This matcher will fail if any of the selection's form elements are not enabled.
func BeEnabled() types.GomegaMatcher {
return &internal.BooleanMatcher{Method: "Enabled", Property: "enabled"}
}
// BeActive passes when the selection refers to the active page element.
func BeActive() types.GomegaMatcher {
return &internal.BooleanMatcher{Method: "Active", Property: "active"}
}
// BeFound passes when the provided selection refers to one or more elements on the page.
func BeFound() types.GomegaMatcher {
return &internal.BeFoundMatcher{}
}
// EqualElement passes when the expected selection refers to the same element as the provided
// actual selection. This matcher will fail if either selection refers to more than one element.
func EqualElement(comparable interface{}) types.GomegaMatcher {
return &internal.EqualElementMatcher{ExpectedSelection: comparable}
} | matchers/selection_matchers.go | 0.896704 | 0.642432 | selection_matchers.go | starcoder |
package reflect
import (
r "reflect"
xr "github.com/cosmos72/gomacro/xreflect"
)
type none struct{}
var (
Nil = r.Value{}
None = r.ValueOf(none{}) // used to indicate "no value"
TypeOfInt = r.TypeOf(int(0))
TypeOfInt8 = r.TypeOf(int8(0))
TypeOfInt16 = r.TypeOf(int16(0))
TypeOfInt32 = r.TypeOf(int32(0))
TypeOfInt64 = r.TypeOf(int64(0))
TypeOfUint = r.TypeOf(uint(0))
TypeOfUint8 = r.TypeOf(uint8(0))
TypeOfUint16 = r.TypeOf(uint16(0))
TypeOfUint32 = r.TypeOf(uint32(0))
TypeOfUint64 = r.TypeOf(uint64(0))
TypeOfUintptr = r.TypeOf(uintptr(0))
TypeOfFloat32 = r.TypeOf(float32(0))
TypeOfFloat64 = r.TypeOf(float64(0))
TypeOfComplex64 = r.TypeOf(complex64(0))
TypeOfComplex128 = r.TypeOf(complex128(0))
TypeOfBool = r.TypeOf(false)
TypeOfString = r.TypeOf("")
)
func Category(k r.Kind) r.Kind {
switch k {
case r.Int, r.Int8, r.Int16, r.Int32, r.Int64:
return r.Int
case r.Uint, r.Uint8, r.Uint16, r.Uint32, r.Uint64, r.Uintptr:
return r.Uint
case r.Float32, r.Float64:
return r.Float64
case r.Complex64, r.Complex128:
return r.Complex128
default:
return k
}
}
func IsCategory(k r.Kind, categories ...r.Kind) bool {
k = Category(k)
for _, c := range categories {
if k == c {
return true
}
}
return false
}
// IsOptimizedKind returns true if fast interpreter expects optimized expressions for given Kind
func IsOptimizedKind(k r.Kind) bool {
switch k {
case r.Bool, r.Int, r.Int8, r.Int16, r.Int32, r.Int64,
r.Uint, r.Uint8, r.Uint16, r.Uint32, r.Uint64, r.Uintptr,
r.Float32, r.Float64, r.Complex64, r.Complex128, r.String:
return true
}
return false
}
var kindToType = [...]r.Type{
r.Bool: TypeOfBool,
r.Int: TypeOfInt,
r.Int8: TypeOfInt8,
r.Int16: TypeOfInt16,
r.Int32: TypeOfInt32,
r.Int64: TypeOfInt64,
r.Uint: TypeOfUint,
r.Uint8: TypeOfUint8,
r.Uint16: TypeOfUint16,
r.Uint32: TypeOfUint32,
r.Uint64: TypeOfUint64,
r.Uintptr: TypeOfUintptr,
r.Float32: TypeOfFloat32,
r.Float64: TypeOfFloat64,
r.Complex64: TypeOfComplex64,
r.Complex128: TypeOfComplex128,
r.String: TypeOfString,
}
func KindToType(k r.Kind) r.Type {
if int(k) < len(kindToType) {
return kindToType[k]
}
return nil
}
// ConvertValue converts a value to type t and returns the converted value.
// extends reflect.Value.Convert(t) by allowing conversions from/to complex numbers.
// does not check for overflows or truncation.
func ConvertValue(v r.Value, to r.Type) r.Value {
t := Type(v)
if t == to {
return v
}
if !t.ConvertibleTo(to) {
// reflect.Value does not allow conversions from/to complex types
k := v.Kind()
kto := to.Kind()
if IsCategory(kto, r.Complex128) {
if IsCategory(k, r.Int, r.Uint, r.Float64) {
temp := v.Convert(TypeOfFloat64).Float()
v = r.ValueOf(complex(temp, 0.0))
}
} else if IsCategory(k, r.Complex128) {
if IsCategory(k, r.Int, r.Uint, r.Float64) {
temp := real(v.Complex())
v = r.ValueOf(temp)
}
}
}
return v.Convert(to)
}
func PackValues(val0 r.Value, values []r.Value) []r.Value {
if len(values) == 0 && val0 != None {
values = []r.Value{val0}
}
return values
}
func PackTypes(typ0 xr.Type, types []xr.Type) []xr.Type {
if len(types) == 0 && typ0 != nil {
types = []xr.Type{typ0}
}
return types
}
func UnpackValues(vals []r.Value) (r.Value, []r.Value) {
val0 := None
if len(vals) > 0 {
val0 = vals[0]
}
return val0, vals
}
// Interface() is a zero-value-safe version of reflect.Value.Interface()
func Interface(v r.Value) interface{} {
if !v.IsValid() || !v.CanInterface() || v == None {
return nil
}
return v.Interface()
}
// Type() is a zero-value-safe version of reflect.Value.Type()
func Type(value r.Value) r.Type {
if !value.IsValid() || value == None {
return nil
}
return value.Type()
}
func IsNillableKind(k r.Kind) bool {
switch k {
case r.Invalid, // nil is nillable...
r.Chan, r.Func, r.Interface, r.Map, r.Ptr, r.Slice:
return true
default:
return false
}
} | vendor/github.com/cosmos72/gomacro/base/reflect/reflect.go | 0.559531 | 0.470858 | reflect.go | starcoder |
package mandira
import (
"fmt"
"strconv"
)
/* Parser for the extended features in Mandira.
word = ([a-zA-Z1-9]+)
binop = <|<=|>|>=|!=|==
comb = or|and
unary = not
filter = |
variable = word
string = " .* "
atom = variable | string | word
funcexpr = word [( atom[, atom...] )]
varexpr = variable [|funcexpr...]
Conditional logic is mostly as expected, with operators of the same precedence
being computed from left to right.
They are, from low to high: binops, combs, unary, parens
In the future, "and" may be higher priority than "or".
*/
// A lookup expression is a naked word which will be looked up in the context at render time
type lookupExpr struct {
name string
}
// A varExpr is a lookupExpr followed by zero or more funcExprs
type varExpr struct {
exprs []interface{}
}
// A func expression has a function name to be looked up in the filter list
// at render time and a list of arguments, which are varExprs or literals
type funcExpr struct {
name string
arguments []interface{}
}
// A cond is a unary condition with a single value and optional negation
type cond struct {
not bool
expr interface{}
}
// A conditional is a n-ary conditional with n opers and n+1 expressions, which
// can be conds or conditionals
type conditional struct {
not bool
opers []string
exprs []interface{}
}
// A list of tokens with a pointer (p) and a run (run)
// In tokenizing, this structure tracks tokens, but p points to the []byte
// being tokenized, and run keeps track of the length of the current token
// In parsing, this p is used as a pointer to a token in tokens
type tokenList struct {
tokens []string
p int
run int
}
// Return the number of remaining tokens
func (t *tokenList) Remaining() int {
return len(t.tokens) - t.p
}
// Return the next token. Returns "" if there are none left.
func (t *tokenList) Next() string {
if t.p == len(t.tokens) {
return ""
}
t.p++
return t.tokens[t.p-1]
}
// Peek at the current token. Returns "" if there are none left.
func (t *tokenList) Peek() string {
if t.p == len(t.tokens) {
return ""
}
return t.tokens[t.p]
}
// Go back to previous token and return it.
func (t *tokenList) Prev() string {
if t.p > 0 {
t.p--
}
return t.tokens[t.p]
}
type parserError struct {
tokens *tokenList
message string
}
func (p *parserError) Error() string {
return fmt.Sprintf(`%s: "%s" in %v`, p.message, p.tokens.Peek(), p.tokens)
}
// Parse an atom; an atom is a literal or a lookup expression.
func parseAtom(token string) interface{} {
if token[0] == '"' {
return token[1 : len(token)-1]
}
i, err := strconv.ParseInt(token, 10, 64)
if err == nil {
return i
}
f, err := strconv.ParseFloat(token, 64)
if err == nil {
return f
}
return &lookupExpr{token}
}
// parse a value, which is a literal or a variable expression
func parseValue(tokens *tokenList) (interface{}, error) {
tok := tokens.Next()
if len(tok) == 0 {
return nil, &parserError{tokens, "Expected a value, found nothing"}
}
try := parseAtom(tok)
/* if this wasn't a lookupExpr, then it's a literal */
if _, ok := try.(*lookupExpr); !ok {
return try, nil
}
tokens.Prev()
varexp, err := parseVarExpression(tokens)
return varexp, err
}
// parse a value and return a unary cond expr (to negate values)
func parseCond(tokens *tokenList) (*cond, error) {
var err error
c := &cond{}
c.expr, err = parseValue(tokens)
return c, err
}
// Parse a conditional expression, recurse each time a paren is encountered
func parseCondition(tokens *tokenList) (*conditional, error) {
c := &conditional{}
negated := false
expectCond := true
for tok := tokens.Next(); len(tok) > 0; tok = tokens.Next() {
switch tok {
case "(":
if !expectCond {
return c, &parserError{tokens, "Expected an operator, not a " + tok}
}
expr, err := parseCondition(tokens)
if err != nil {
return c, err
}
expr.not = negated
c.exprs = append(c.exprs, expr)
negated = false
expectCond = false
case "not":
if !expectCond {
return c, &parserError{tokens, "Expected an operator, not a " + tok}
}
negated = !negated
case ")":
if expectCond {
return c, &parserError{tokens, "Expected a condition, not a " + tok}
}
return c, nil
case "or", "and", ">", "<", "<=", ">=", "==", "!=":
if expectCond {
return c, &parserError{tokens, "Expected a condition, not an operator " + tok}
}
c.opers = append(c.opers, tok)
expectCond = true
default:
if !expectCond {
return c, &parserError{tokens, "Expected an operator, not " + tok}
}
tokens.Prev()
expr, err := parseCond(tokens)
if err != nil {
return c, err
}
expr.not = negated
c.exprs = append(c.exprs, expr)
// reset everything
expectCond = false
negated = false
}
}
return c, nil
}
// parse a function expression, which comes after each | in a filter
func parseFuncExpression(tokens *tokenList) (*funcExpr, error) {
fe := &funcExpr{}
fe.name = tokens.Next()
if len(fe.name) == 0 {
return fe, &parserError{tokens, "Expected filter name, got nil"}
}
tok := tokens.Peek()
if tok == "(" {
tokens.Next()
for tok = tokens.Next(); len(tok) > 0; tok = tokens.Next() {
fe.arguments = append(fe.arguments, parseAtom(tok))
tok = tokens.Next()
if tok == ")" {
break
}
if tok != "," {
return fe, &parserError{tokens, "Expected comma (,)"}
}
}
}
return fe, nil
}
// parse a variable expression, which is a lookup + 0 or more func exprs
func parseVarExpression(tokens *tokenList) (*varExpr, error) {
expr := &varExpr{}
tok := tokens.Next()
if len(tok) == 0 {
return expr, &parserError{tokens, "Empty expression"}
}
// the first token is definitely a variable
expr.exprs = append(expr.exprs, &lookupExpr{tok})
tok = tokens.Next()
if tok != "|" && tok != "" {
tokens.Prev()
return expr, nil
}
for len(tok) > 0 {
if tok == "|" {
e, err := parseFuncExpression(tokens)
if err != nil {
return expr, err
}
expr.exprs = append(expr.exprs, e)
tok = tokens.Next()
} else if tok == "" {
return expr, nil
} else {
tokens.Prev()
return expr, nil
}
}
return expr, nil
}
// Parse aa "variable element", which returns a varElement (AST)
func parseVarElement(s string) (*varElement, error) {
var elem = &varElement{}
tokens, err := tokenize(s)
if err != nil {
return elem, err
}
expr, err := parseVarExpression(&tokenList{tokens, 0, 0})
if err != nil {
return elem, err
}
elem.expr = expr
return elem, nil
}
// Parse a "conditional element", which returns a conditional section element (AST)
func parseCondElement(s string) (*sectionElement, error) {
var elem = §ionElement{}
tokens, err := tokenize(s)
if err != nil {
return elem, err
}
expr, err := parseCondition(&tokenList{tokens, 0, 0})
if err != nil {
return elem, err
}
elem.expr = expr
return elem, nil
}
// tokenize an expression, returning a list of strings or an error
func tokenize(c string) ([]string, error) {
b := []byte(c)
tn := tokenList{[]string{}, 0, 0}
for ; tn.p < len(b); tn.p++ {
switch b[tn.p] {
case ' ', '\t':
if tn.run < tn.p {
tn.tokens = append(tn.tokens, string(b[tn.run:tn.p]))
}
tn.run = tn.p + 1
/* tokens which can be singular or double */
case '<', '>':
if tn.run < tn.p {
tn.tokens = append(tn.tokens, string(b[tn.run:tn.p]))
}
if tn.p+1 < len(b) && b[tn.p+1] == '=' {
tn.tokens = append(tn.tokens, string(b[tn.p:tn.p+2]))
tn.p++
} else {
tn.tokens = append(tn.tokens, string(b[tn.p]))
}
tn.run = tn.p + 1
/* tokens which must be double */
case '!', '=':
if tn.run < tn.p {
tn.tokens = append(tn.tokens, string(b[tn.run:tn.p]))
}
if tn.p+1 < len(b) && b[tn.p+1] == '=' {
tn.tokens = append(tn.tokens, string(b[tn.p:tn.p+2]))
tn.p++
} else {
return tn.tokens, parseError{tn.p, "invalid token: " + string(b[tn.p])}
}
tn.run = tn.p + 1
case '"':
start := tn.p
tn.p++
for ; tn.p < len(b); tn.p++ {
if b[tn.p] == '"' && b[tn.p-1] != '\\' {
tn.tokens = append(tn.tokens, string(b[start:tn.p+1]))
break
}
}
tn.run = tn.p + 1
/* tokens which are only ever single */
case '|', '(', ')', ',':
if tn.p > 0 && b[tn.p] == '\\' && b[tn.p] == '"' {
tn.run = tn.p + 1
continue
}
if tn.run < tn.p {
tn.tokens = append(tn.tokens, string(b[tn.run:tn.p]))
}
tn.tokens = append(tn.tokens, string(b[tn.p]))
tn.run = tn.p + 1
default:
}
}
if tn.run < len(b) {
tn.tokens = append(tn.tokens, string(b[tn.run:]))
}
return tn.tokens, nil
} | parser.go | 0.593138 | 0.561696 | parser.go | starcoder |
package gt
import (
"database/sql/driver"
"fmt"
"time"
)
/*
Shortcut for making a date from a time:
inst := time.Now()
date := gt.NullDateFrom(inst.Date())
Reversible:
date == gt.NullDateFrom(date.Date())
Note that `gt.NullDateFrom(0, 0, 0)` returns a zero value which is considered
empty/null, but NOT equivalent to `time.Time{}`. The equivalent of zero time is
`gt.NullDateFrom(1, 1, 1)`.
*/
func NullDateFrom(year int, month time.Month, day int) NullDate {
return NullDate{year, month, day}
}
// Shortcut for `gt.NullTimeNow().NullDate()`.
func NullDateNow() NullDate {
return NullTimeNow().NullDate()
}
/*
Shortcut: parses successfully or panics. Should be used only in root scope. When
error handling is relevant, use `.Parse`.
*/
func ParseNullDate(src string) (val NullDate) {
try(val.Parse(src))
return
}
/*
Civil date without time. Corresponds to SQL type `date` and HTML input with
`type="date"`. Zero value is considered empty in text, and null in JSON and
SQL. Features:
* Reversible encoding/decoding in text. Zero value is "".
* Reversible encoding/decoding in JSON. Zero value is `null`.
* Reversible encoding/decoding in SQL. Zero value is `null`.
* Text encoding uses the ISO 8601 extended calendar date format: "0001-02-03".
* Text decoding supports date-only strings and full RFC3339 timestamps.
* Convertible to and from `gt.NullTime`.
Caution: `gt.NullDate{}` or `gt.NullDate{0, 0, 0}` is considered empty/null, but
when converted to `time.Time` or `gt.NullTime`, it's NOT equivalent to the zero
time. The equivalent of zero time is `gt.NullDate{1, 1, 1}`.
*/
type NullDate struct {
Year int `json:"year" db:"year"`
Month time.Month `json:"month" db:"month"`
Day int `json:"day" db:"day"`
}
var (
_ = Encodable(NullDate{})
_ = Decodable((*NullDate)(nil))
)
// Implement `gt.Zeroable`. Equivalent to `reflect.ValueOf(self).IsZero()`.
func (self NullDate) IsZero() bool { return self == NullDate{} }
// Implement `gt.Nullable`. True if zero.
func (self NullDate) IsNull() bool { return self.IsZero() }
/*
Implement `gt.Getter`. If zero, returns `nil`, otherwise uses `.TimeUTC` to
return a timestamp suitable for SQL encoding.
*/
func (self NullDate) Get() interface{} {
if self.IsNull() {
return nil
}
return self.TimeUTC()
}
// Implement `gt.Setter`, using `.Scan`. Panics on error.
func (self *NullDate) Set(src interface{}) { try(self.Scan(src)) }
// Implement `gt.Zeroer`, zeroing the receiver.
func (self *NullDate) Zero() {
if self != nil {
*self = NullDate{}
}
}
/*
Implement `fmt.Stringer`. If zero, returns an empty string. Otherwise returns a
text representation in the standard machine-readable ISO 8601 format.
*/
func (self NullDate) String() string {
if self.IsNull() {
return ``
}
return bytesString(self.Append(nil))
}
/*
Implement `gt.Parser`. If the input is empty, zeroes the receiver. Otherwise
requires an ISO 8601 date representation, one of:
* Extended calendar date: "2006-01-02"
* RFC3339 (default Go timestamp format): "2006-01-02T15:04:05Z07:00"
*/
func (self *NullDate) Parse(src string) error {
if len(src) == 0 {
self.Zero()
return nil
}
var val time.Time
var err error
// Too restrictive. TODO fuzzier detection.
if len(src) == len(dateFormat) {
val, err = time.Parse(dateFormat, src)
} else {
val, err = time.Parse(timeFormat, src)
}
if err != nil {
return err
}
self.SetTime(val)
return nil
}
// Implement `gt.Appender`, using the same representation as `.String`.
func (self NullDate) Append(buf []byte) []byte {
if self.IsNull() {
return buf
}
// `time.Time.AppendFormat` doesn't seem to do this.
buf = Raw(buf).Grow(dateStrLen)
return self.TimeUTC().AppendFormat(buf, dateFormat)
}
/*
Implement `encoding.TextMarhaler`. If zero, returns nil. Otherwise returns the
same representation as `.String`.
*/
func (self NullDate) MarshalText() ([]byte, error) {
if self.IsNull() {
return nil, nil
}
return self.Append(nil), nil
}
// Implement `encoding.TextUnmarshaler`, using the same algorithm as `.Parse`.
func (self *NullDate) UnmarshalText(src []byte) error {
return self.Parse(bytesString(src))
}
/*
Implement `json.Marshaler`. If zero, returns bytes representing `null`.
Otherwise returns bytes representing a JSON string with the same text as in
`.String`.
*/
func (self NullDate) MarshalJSON() ([]byte, error) {
if self.IsNull() {
return bytesNull, nil
}
var arr [dateStrLen + 2]byte
buf := arr[:0]
buf = append(buf, '"')
buf = self.Append(buf)
buf = append(buf, '"')
return buf, nil
}
/*
Implement `json.Unmarshaler`. If the input is empty or represents JSON `null`,
zeroes the receiver. Otherwise parses a JSON string, using the same algorithm
as `.Parse`.
*/
func (self *NullDate) UnmarshalJSON(src []byte) error {
if isJsonEmpty(src) {
self.Zero()
return nil
}
if isJsonStr(src) {
return self.UnmarshalText(cutJsonStr(src))
}
return errJsonString(src, self)
}
// Implement `driver.Valuer`, using `.Get`.
func (self NullDate) Value() (driver.Value, error) {
return self.Get(), nil
}
/*
Implement `sql.Scanner`, converting an arbitrary input to `gt.NullDate` and
modifying the receiver. Acceptable inputs:
* `nil` -> use `.Zero`
* `string` -> use `.Parse`
* `[]byte` -> use `.UnmarshalText`
* `time.Time` -> use `.SetTime`
* `*time.Time` -> use `.Zero` or `.SetTime`
* `gt.NullTime` -> use `.SetTime`
* `gt.NullDate` -> assign
* `gt.Getter` -> scan underlying value
*/
func (self *NullDate) Scan(src interface{}) error {
switch src := src.(type) {
case nil:
self.Zero()
return nil
case string:
return self.Parse(src)
case []byte:
return self.UnmarshalText(src)
case time.Time:
self.SetTime(src)
return nil
case *time.Time:
if src == nil {
self.Zero()
} else {
self.SetTime(*src)
}
return nil
case NullTime:
self.SetTime(src.Time())
return nil
case NullDate:
*self = src
return nil
default:
val, ok := get(src)
if ok {
return self.Scan(val)
}
return errScanType(self, src)
}
}
// Implement `fmt.GoStringer`, returning valid Go code that constructs this value.
func (self NullDate) GoString() string {
year, month, day := self.Date()
return fmt.Sprintf(`gt.NullDateFrom(%v, %v, %v)`, year, int(month), day)
}
/*
If the input is zero, zeroes the receiver. Otherwise uses `time.Time.Date` and
assigns the resulting year, month, day to the receiver, ignoring smaller
constituents such as hour.
*/
func (self *NullDate) SetTime(src time.Time) {
// Note: `time.Time.Date()` "normalizes" zeros into 1 even when `.IsZero()`.
if src.IsZero() {
self.Zero()
} else {
*self = NullDateFrom(src.Date())
}
}
// Same as `time.Time.Date`. Returns a tuple of the underlying year, month, day.
func (self NullDate) Date() (year int, month time.Month, day int) {
return self.Year, self.Month, self.Day
}
// Converts to `gt.NullTime` with `T00:00:00` in the provided timezone.
func (self NullDate) NullTimeIn(loc *time.Location) NullTime {
return NullTime(time.Date(self.Year, self.Month, self.Day, 0, 0, 0, 0, loc))
}
// Converts to `gt.NullTime` with `T00:00:00` in UTC.
func (self NullDate) NullTimeUTC() NullTime {
return self.NullTimeIn(time.UTC)
}
// Converts to `time.Time` with `T00:00:00` in the provided timezone.
func (self NullDate) TimeIn(loc *time.Location) time.Time {
return self.NullTimeIn(loc).Time()
}
// Converts to `time.Time` with `T00:00:00` in UTC.
func (self NullDate) TimeUTC() time.Time {
return self.NullTimeUTC().Time()
}
/*
Similar to `time.Time.AddDate`. Returns a modified version of the current value,
with the year, month, day deltas added to the corresponding fields. The deltas
may be negative. Note that `time.Time` and all time-related types in this
package have a convenient `.Date` method that returns this tuple. The
calculations are performed for the UTC timezone.
As a special case, because the zero value is considered null, calling this on a
zero date ALWAYS returns the same zero date. This matches general SQL semantics
of operations involving nulls. Note that the equivalent of zero TIME is not
`gt.NullDateFrom(0, 0, 0)`, but rather `gt.NullDateFrom(1, 1, 1)`.
*/
func (self NullDate) AddDate(years int, months int, days int) NullDate {
if self.IsZero() {
return self
}
return NullDateFrom(self.NullTimeUTC().AddDate(years, months, days).Date())
} | gt_null_date.go | 0.79158 | 0.553747 | gt_null_date.go | starcoder |
package monitoringcommon
const MonitoringGrafanaDBMultitenancyDetailedJSON = `{
"annotations": {
"list": [
{
"builtIn": 1,
"datasource": "-- Grafana --",
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations & Alerts",
"type": "dashboard"
}
]
},
"editable": true,
"gnetId": null,
"graphTooltip": 0,
"id": 12,
"links": [],
"panels": [
{
"collapsed": false,
"datasource": null,
"gridPos": {
"h": 1,
"w": 24,
"x": 0,
"y": 0
},
"id": 3,
"panels": [],
"title": "Tenants",
"type": "row"
},
{
"cacheTimeout": null,
"datasource": "Prometheus",
"description": "Number of active and reconciled APIManagementTenant CRs.",
"fieldConfig": {
"defaults": {
"custom": {},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
}
]
}
},
"overrides": []
},
"gridPos": {
"h": 6,
"w": 3,
"x": 0,
"y": 1
},
"id": 4,
"interval": null,
"links": [],
"maxDataPoints": 100,
"options": {
"colorMode": "value",
"graphMode": "area",
"justifyMode": "auto",
"orientation": "auto",
"reduceOptions": {
"calcs": [
"mean"
],
"fields": "",
"values": false
},
"textMode": "auto"
},
"pluginVersion": "7.3.10",
"targets": [
{
"expr": "num_reconciled_tenants",
"instant": true,
"interval": "",
"legendFormat": "",
"refId": "A"
}
],
"timeFrom": null,
"timeShift": null,
"title": "Active Tenant CRs",
"type": "stat"
},
{
"cacheTimeout": null,
"datasource": "Prometheus",
"description": "Number of APIManagementTenant CRs both reconciled and not reconciled.",
"fieldConfig": {
"defaults": {
"custom": {},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "blue",
"value": null
}
]
}
},
"overrides": []
},
"gridPos": {
"h": 6,
"w": 3,
"x": 3,
"y": 1
},
"id": 5,
"interval": null,
"links": [],
"maxDataPoints": 100,
"options": {
"colorMode": "value",
"graphMode": "area",
"justifyMode": "auto",
"orientation": "auto",
"reduceOptions": {
"calcs": [
"mean"
],
"fields": "",
"values": false
},
"textMode": "auto"
},
"pluginVersion": "7.3.10",
"targets": [
{
"expr": "total_num_tenants",
"instant": true,
"interval": "",
"legendFormat": "",
"refId": "A"
}
],
"timeFrom": null,
"timeShift": null,
"title": "Total Tenant CRs",
"type": "stat"
},
{
"datasource": null,
"description": "Percentage of APIManagementTenant CRs that have been reconciled.",
"fieldConfig": {
"defaults": {
"custom": {},
"mappings": [],
"thresholds": {
"mode": "percentage",
"steps": [
{
"color": "red",
"value": null
},
{
"color": "yellow",
"value": 90
},
{
"color": "green",
"value": 95
}
]
},
"unit": "percentunit"
},
"overrides": []
},
"gridPos": {
"h": 6,
"w": 5,
"x": 6,
"y": 1
},
"id": 7,
"options": {
"reduceOptions": {
"calcs": [
"mean"
],
"fields": "",
"values": false
},
"showThresholdLabels": false,
"showThresholdMarkers": true
},
"pluginVersion": "7.3.10",
"targets": [
{
"expr": "num_reconciled_tenants / total_num_tenants",
"instant": true,
"interval": "",
"intervalFactor": 2,
"legendFormat": "",
"refId": "A"
}
],
"timeFrom": null,
"timeShift": null,
"title": "% Tenant CRs Reconciled",
"type": "gauge"
}
],
"refresh": "10s",
"schemaVersion": 26,
"style": "dark",
"tags": [],
"templating": {
"list": []
},
"time": {
"from": "now-1h",
"to": "now"
},
"timepicker": {
"refresh_intervals": [
"5s",
"10s",
"30s",
"1m",
"5m",
"15m",
"30m",
"1h",
"2h",
"1d"
],
"time_options": [
"5m",
"15m",
"1h",
"6h",
"12h",
"24h",
"2d",
"7d",
"30d"
]
},
"timezone": "",
"title": "Multitenancy Detailed",
"uid": "a5c77de61baa79315708d479f34ded2f8eb23381",
"version": 1
}` | pkg/products/monitoringcommon/dashboards/multitenancyDetailed.go | 0.587588 | 0.4831 | multitenancyDetailed.go | starcoder |
package aip
import (
"encoding/hex"
"errors"
"strconv"
"strings"
"github.com/bitcoinschema/go-bob"
)
// NewFromTape will create a new AIP object from a bob.Tape
// Using the FromTape() alone will prevent validation (data is needed via SetData to enable)
func NewFromTape(tape bob.Tape) (a *Aip) {
a = new(Aip)
a.FromTape(tape)
return
}
// FromTape takes a BOB Tape and returns an Aip data structure.
// Using the FromTape() alone will prevent validation (data is needed via SetData to enable)
func (a *Aip) FromTape(tape bob.Tape) {
// Not a valid tape?
if len(tape.Cell) < 4 {
return
}
// Loop to find start of AIP
var startIndex int
for i, cell := range tape.Cell {
if cell.S == Prefix {
startIndex = i
break
}
}
// Set the AIP fields
a.Algorithm = Algorithm(tape.Cell[startIndex+1].S)
a.AlgorithmSigningComponent = tape.Cell[startIndex+2].S
a.Signature = tape.Cell[startIndex+3].B
// Final index count
finalIndexCount := startIndex + 4
// Store the indices
if len(tape.Cell) > finalIndexCount {
// TODO: Consider OP_RETURN is included in sig when processing a tx using indices
// Loop over remaining indices if they exist and append to indices slice
a.Indices = make([]int, len(tape.Cell)-finalIndexCount)
for x := finalIndexCount - 1; x < len(tape.Cell); x++ {
if index, err := strconv.Atoi(tape.Cell[x].S); err == nil {
a.Indices = append(a.Indices, index)
}
}
}
}
// NewFromTapes will create a new AIP object from a []bob.Tape
// Using the FromTapes() alone will prevent validation (data is needed via SetData to enable)
func NewFromTapes(tapes []bob.Tape) (a *Aip) {
// Loop tapes -> cells (only supporting 1 sig right now)
for _, t := range tapes {
for _, cell := range t.Cell {
if cell.S == Prefix {
a = new(Aip)
a.FromTape(t)
a.SetDataFromTapes(tapes)
return
}
}
}
return
}
// SetDataFromTapes sets the data the AIP signature is signing
func (a *Aip) SetDataFromTapes(tapes []bob.Tape) {
// Set OP_RETURN to be consistent with BitcoinFiles SDK
var data = []string{opReturn}
if len(a.Indices) == 0 {
// Walk over all output values and concatenate them until we hit the AIP prefix, then add in the separator
for _, tape := range tapes {
for _, cell := range tape.Cell {
if cell.S != Prefix {
// Skip the OPS
if cell.Ops != "" {
continue
}
data = append(data, strings.TrimSpace(cell.S))
} else {
data = append(data, pipe)
a.Data = data
return
}
}
}
} else {
var indexCt = 0
for _, tape := range tapes {
for _, cell := range tape.Cell {
if cell.S != Prefix && contains(a.Indices, indexCt) {
data = append(data, cell.S)
} else {
data = append(data, pipe)
}
indexCt++
}
}
a.Data = data
}
}
// SignBobOpReturnData appends a signature to a BOB Tx by adding a
// protocol separator followed by AIP information
func SignBobOpReturnData(privateKey string, algorithm Algorithm, output bob.Output) (*bob.Output, *Aip, error) {
// Parse the data to sign
var dataToSign []string
for _, tape := range output.Tape {
for _, cell := range tape.Cell {
if len(cell.S) > 0 {
dataToSign = append(dataToSign, cell.S)
} else {
// TODO: Review this case. Should we assume the b64 is signed?
// Should protocol doc for AIP mention this?
dataToSign = append(dataToSign, cell.B)
}
}
}
// Sign the data
a, err := Sign(privateKey, algorithm, strings.Join(dataToSign, ""))
if err != nil {
return nil, nil, err
}
// Create the output tape
output.Tape = append(output.Tape, bob.Tape{
Cell: []bob.Cell{{
H: hex.EncodeToString([]byte(Prefix)),
S: Prefix,
}, {
H: hex.EncodeToString([]byte(algorithm)),
S: string(algorithm),
}, {
H: hex.EncodeToString([]byte(a.AlgorithmSigningComponent)),
S: a.AlgorithmSigningComponent,
}, {
H: hex.EncodeToString([]byte(a.Signature)),
S: a.Signature,
}},
})
return &output, a, nil
}
// ValidateTapes validates the AIP signature for a given []bob.Tape
func ValidateTapes(tapes []bob.Tape) (bool, error) {
// Loop tapes -> cells (only supporting 1 sig right now)
for _, tape := range tapes {
for _, cell := range tape.Cell {
// Once we hit AIP Prefix, stop
if cell.S == Prefix {
a := NewFromTape(tape)
a.SetDataFromTapes(tapes)
return a.Validate()
}
}
}
return false, errors.New("no AIP tape found")
}
// contains looks in a slice for a given value
func contains(s []int, e int) bool {
for _, a := range s {
if a == e {
return true
}
}
return false
} | protocols/go-aip/bob.go | 0.57523 | 0.423756 | bob.go | starcoder |
package lexer
import (
"errors"
"fmt"
"io"
)
// TokenType indicates the type of token
type TokenType int
const (
// EmptyToken is token with no content (just the delim byte)
EmptyToken TokenType = iota
// BitsToken is a token composed of 0 and 1
BitsToken
// DigitsToken is a token composed of digits (0-9)
DigitsToken
// HexToken is a token composed of hex digits (0-9a-fA-F)
HexToken
// FloatToken is a token composed of digits and, at most, one dot
FloatToken
// DataToken is token with arbitrary content
DataToken
)
// Token is a single token recognized by the lexer
type Token struct {
// Type indicates the type of the token (a broad category of the literal bytes)
Type TokenType
// Literal holds the token bytes, plus the separator at the end
Literal []byte
state dfaState
}
// OnlyDigits returns true if the token contains only digits (is a BitsToken or a DigitsToken)
func (t *Token) OnlyDigits() bool {
return t.Type == BitsToken || t.Type == DigitsToken
}
// IsHex returns true if the token contains only hex digits (is a BitsToken, DigitsToken or HexToken)
func (t *Token) IsHex() bool {
return t.Type == BitsToken || t.Type == DigitsToken || t.Type == HexToken
}
// IsFloat returns true if the token contains a valid float (is a BitsToken, DigitsToken or FloatToken)
func (t *Token) IsFloat() bool {
return t.Type == BitsToken || t.Type == DigitsToken || t.Type == FloatToken
}
// EndsWith returns true if the last byte of the Literal is equals to the given delim byte.
func (t *Token) EndsWith(delim byte) bool {
ll := len(t.Literal)
if ll > 0 {
return t.Literal[ll-1] == delim
}
return false
}
// WithoutSuffix returns Literal without its last byte
func (t *Token) WithoutSuffix() []byte {
ll := len(t.Literal)
if ll > 0 {
return t.Literal[:ll-1]
}
return nil
}
// byte changes the Type field according to the byte c (make the internal dfa state to change).
func (t *Token) byte(c byte) {
t.state = t.state.next(c)
switch t.state {
case emptyState:
panic("impossibru!")
case bitsState:
t.Type = BitsToken
case digitsState:
t.Type = DigitsToken
case hexState:
t.Type = HexToken
case signState:
t.Type = DataToken
case intState:
t.Type = DataToken
case dotState:
t.Type = DataToken
case floatState:
t.Type = FloatToken
case dataState:
t.Type = DataToken
default:
panic(fmt.Errorf("unknown token type: %v", t.Type))
}
}
// ErrTokenTooLong is returned by NextToken when the maximum length is reached without finding the delimiter byte
var ErrTokenTooLong = errors.New("token too long")
// Lexer is a very simple lexer, able to scan a reader using a delimiter byte and a maximum token length.
type Lexer struct {
Reader io.Reader
buf []byte
}
// Next scans the next token from the underlying reader, using a maximum length and a delimiter byte. If the maximum
// length is reached, an ErrTokenTooLong is returned.
// The delimiter byte is included in the Token literal and in the byte count.
func (l *Lexer) Next(max int, delim byte) (Token, error) {
if max < 1 {
return Token{}, fmt.Errorf("invalid max value, should be greater than 0")
}
if l.buf == nil {
l.buf = make([]byte, 1)
}
t := Token{
Type: EmptyToken,
}
for i := 0; i < max; i++ {
_, err := io.ReadFull(l.Reader, l.buf)
if err != nil {
return t, err
}
c := l.buf[0]
t.Literal = append(t.Literal, c)
if c == delim {
return t, nil
}
t.byte(c)
}
return t, ErrTokenTooLong
}
// NextFixed scans the next token from the underlying reader using a fixed length. If EOF is found before reading the
// token completely, an io.EOF is returned, along the resulting token (with a shorted literal obviously).
func (l *Lexer) NextFixed(length int) (Token, error) {
if length < 1 {
return Token{}, fmt.Errorf("invalid length value, should be greater than 0")
}
t := Token{
Type: EmptyToken,
Literal: make([]byte, length),
}
_, err := io.ReadFull(l.Reader, t.Literal)
switch err {
case io.ErrUnexpectedEOF:
for _, c := range t.Literal[:length-1] {
t.byte(c)
}
t.Literal = t.Literal[:len(t.Literal)-1]
return t, io.EOF
case io.EOF:
t.Literal = t.Literal[:len(t.Literal)-1]
return t, err
case nil:
default:
return t, err
}
for _, c := range t.Literal[:length-1] {
t.byte(c)
}
return t, nil
} | lexer/lexer.go | 0.736969 | 0.610599 | lexer.go | starcoder |
package config
/**
* Configuration for Load Balancing Virtual Server resource.
*/
type Lbvserver struct {
/**
* Name for the virtual server. Must begin with an ASCII alphanumeric or underscore (_) character, and must contain only ASCII alphanumeric, underscore, hash (#), period (.), space, colon (:), at sign (@), equal sign (=), and hyphen (-) characters. Can be changed after the virtual server is created.
CLI Users: If the name includes one or more spaces, enclose the name in double or single quotation marks (for example, "my vserver" or 'my vserver').
*/
Name string `json:"name,omitempty"`
/**
* Protocol used by the service (also called the service type).
*/
Servicetype string `json:"servicetype,omitempty"`
/**
* IPv4 or IPv6 address to assign to the virtual server.
*/
Ipv46 string `json:"ipv46,omitempty"`
/**
* IP address pattern, in dotted decimal notation, for identifying packets to be accepted by the virtual server. The IP Mask parameter specifies which part of the destination IP address is matched against the pattern. Mutually exclusive with the IP Address parameter.
For example, if the IP pattern assigned to the virtual server is 198.51.100.0 and the IP mask is 255.255.240.0 (a forward mask), the first 20 bits in the destination IP addresses are matched with the first 20 bits in the pattern. The virtual server accepts requests with IP addresses that range from 172.16.17.32 to 192.168.3.11. You can also use a pattern such as 0.0.2.2 and a mask such as 0.0.255.255 (a reverse mask).
If a destination IP address matches more than one IP pattern, the pattern with the longest match is selected, and the associated virtual server processes the request. For example, if virtual servers vs1 and vs2 have the same IP pattern, 0.0.100.128, but different IP masks of 0.0.255.255 and 0.0.224.255, a destination IP address of 198.51.100.128 has the longest match with the IP pattern of vs1. If a destination IP address matches two or more virtual servers to the same extent, the request is processed by the virtual server whose port number matches the port number in the request.
*/
Ippattern string `json:"ippattern,omitempty"`
/**
* IP mask, in dotted decimal notation, for the IP Pattern parameter. Can have leading or trailing non-zero octets (for example, 255.255.240.0 or 0.0.255.255). Accordingly, the mask specifies whether the first n bits or the last n bits of the destination IP address in a client request are to be matched with the corresponding bits in the IP pattern. The former is called a forward mask. The latter is called a reverse mask.
*/
Ipmask string `json:"ipmask,omitempty"`
/**
* Port number for the virtual server.
*/
Port int `json:"port,omitempty"`
/**
* The list of IPv4/IPv6 addresses bound to ipset would form a part of listening service on the current lb vserver
*/
Ipset string `json:"ipset,omitempty"`
/**
* Number of IP addresses that the appliance must generate and assign to the virtual server. The virtual server then functions as a network virtual server, accepting traffic on any of the generated IP addresses. The IP addresses are generated automatically, as follows:
* For a range of n, the last octet of the address specified by the IP Address parameter increments n-1 times.
* If the last octet exceeds 255, it rolls over to 0 and the third octet increments by 1.
Note: The Range parameter assigns multiple IP addresses to one virtual server. To generate an array of virtual servers, each of which owns only one IP address, use brackets in the IP Address and Name parameters to specify the range. For example:
add lb vserver my_vserver[1-3] HTTP 192.0.2.[1-3] 80
*/
Range int `json:"range,omitempty"`
/**
* Type of persistence for the virtual server. Available settings function as follows:
* SOURCEIP - Connections from the same client IP address belong to the same persistence session.
* COOKIEINSERT - Connections that have the same HTTP Cookie, inserted by a Set-Cookie directive from a server, belong to the same persistence session.
* SSLSESSION - Connections that have the same SSL Session ID belong to the same persistence session.
* CUSTOMSERVERID - Connections with the same server ID form part of the same session. For this persistence type, set the Server ID (CustomServerID) parameter for each service and configure the Rule parameter to identify the server ID in a request.
* RULE - All connections that match a user defined rule belong to the same persistence session.
* URLPASSIVE - Requests that have the same server ID in the URL query belong to the same persistence session. The server ID is the hexadecimal representation of the IP address and port of the service to which the request must be forwarded. This persistence type requires a rule to identify the server ID in the request.
* DESTIP - Connections to the same destination IP address belong to the same persistence session.
* SRCIPDESTIP - Connections that have the same source IP address and destination IP address belong to the same persistence session.
* CALLID - Connections that have the same CALL-ID SIP header belong to the same persistence session.
* RTSPSID - Connections that have the same RTSP Session ID belong to the same persistence session.
* FIXSESSION - Connections that have the same SenderCompID and TargetCompID values belong to the same persistence session.
* USERSESSION - Persistence session is created based on the persistence parameter value provided from an extension.
*/
Persistencetype string `json:"persistencetype,omitempty"`
/**
* Time period for which a persistence session is in effect.
*/
Timeout int `json:"timeout,omitempty"`
/**
* Backup persistence type for the virtual server. Becomes operational if the primary persistence mechanism fails.
*/
Persistencebackup string `json:"persistencebackup,omitempty"`
/**
* Time period for which backup persistence is in effect.
*/
Backuppersistencetimeout int `json:"backuppersistencetimeout,omitempty"`
/**
* Load balancing method. The available settings function as follows:
* ROUNDROBIN - Distribute requests in rotation, regardless of the load. Weights can be assigned to services to enforce weighted round robin distribution.
* LEASTCONNECTION (default) - Select the service with the fewest connections.
* LEASTRESPONSETIME - Select the service with the lowest average response time.
* LEASTBANDWIDTH - Select the service currently handling the least traffic.
* LEASTPACKETS - Select the service currently serving the lowest number of packets per second.
* CUSTOMLOAD - Base service selection on the SNMP metrics obtained by custom load monitors.
* LRTM - Select the service with the lowest response time. Response times are learned through monitoring probes. This method also takes the number of active connections into account.
Also available are a number of hashing methods, in which the appliance extracts a predetermined portion of the request, creates a hash of the portion, and then checks whether any previous requests had the same hash value. If it finds a match, it forwards the request to the service that served those previous requests. Following are the hashing methods:
* URLHASH - Create a hash of the request URL (or part of the URL).
* DOMAINHASH - Create a hash of the domain name in the request (or part of the domain name). The domain name is taken from either the URL or the Host header. If the domain name appears in both locations, the URL is preferred. If the request does not contain a domain name, the load balancing method defaults to LEASTCONNECTION.
* DESTINATIONIPHASH - Create a hash of the destination IP address in the IP header.
* SOURCEIPHASH - Create a hash of the source IP address in the IP header.
* TOKEN - Extract a token from the request, create a hash of the token, and then select the service to which any previous requests with the same token hash value were sent.
* SRCIPDESTIPHASH - Create a hash of the string obtained by concatenating the source IP address and destination IP address in the IP header.
* SRCIPSRCPORTHASH - Create a hash of the source IP address and source port in the IP header.
* CALLIDHASH - Create a hash of the SIP Call-ID header.
* USER_TOKEN - Same as TOKEN LB method but token needs to be provided from an extension.
*/
Lbmethod string `json:"lbmethod,omitempty"`
/**
* Number of bytes to consider for the hash value used in the URLHASH and DOMAINHASH load balancing methods.
*/
Hashlength int `json:"hashlength,omitempty"`
/**
* IPv4 subnet mask to apply to the destination IP address or source IP address when the load balancing method is DESTINATIONIPHASH or SOURCEIPHASH.
*/
Netmask string `json:"netmask,omitempty"`
/**
* Number of bits to consider in an IPv6 destination or source IP address, for creating the hash that is required by the DESTINATIONIPHASH and SOURCEIPHASH load balancing methods.
*/
V6netmasklen int `json:"v6netmasklen,omitempty"`
/**
* Backup load balancing method. Becomes operational if the primary load balancing me
thod fails or cannot be used.
Valid only if the primary method is based on static proximity.
*/
Backuplbmethod string `json:"backuplbmethod,omitempty"`
/**
* Use this parameter to specify the cookie name for COOKIE peristence type. It specifies the name of cookie with a maximum of 32 characters. If not specified, cookie name is internally generated.
*/
Cookiename string `json:"cookiename,omitempty"`
/**
* Expression, or name of a named expression, against which traffic is evaluated.
The following requirements apply only to the Citrix ADC CLI:
* If the expression includes one or more spaces, enclose the entire expression in double quotation marks.
* If the expression itself includes double quotation marks, escape the quotations by using the \ character.
* Alternatively, you can use single quotation marks to enclose the rule, in which case you do not have to escape the double quotation marks.
*/
Rule string `json:"rule,omitempty"`
/**
* Expression identifying traffic accepted by the virtual server. Can be either an expression (for example, CLIENT.IP.DST.IN_SUBNET(192.0.2.0/24) or the name of a named expression. In the above example, the virtual server accepts all requests whose destination IP address is in the 192.0.2.0/24 subnet.
*/
Listenpolicy string `json:"listenpolicy,omitempty"`
/**
* Integer specifying the priority of the listen policy. A higher number specifies a lower priority. If a request matches the listen policies of more than one virtual server the virtual server whose listen policy has the highest priority (the lowest priority number) accepts the request.
*/
Listenpriority int `json:"listenpriority,omitempty"`
/**
* Expression specifying which part of a server's response to use for creating rule based persistence sessions (persistence type RULE). Can be either an expression or the name of a named expression.
Example:
HTTP.RES.HEADER("setcookie").VALUE(0).TYPECAST_NVLIST_T('=',';').VALUE("server1").
*/
Resrule string `json:"resrule,omitempty"`
/**
* Persistence mask for IP based persistence types, for IPv4 virtual servers.
*/
Persistmask string `json:"persistmask,omitempty"`
/**
* Persistence mask for IP based persistence types, for IPv6 virtual servers.
*/
V6persistmasklen int `json:"v6persistmasklen,omitempty"`
/**
* Use priority queuing on the virtual server. based persistence types, for IPv6 virtual servers.
*/
Pq string `json:"pq,omitempty"`
/**
* Use SureConnect on the virtual server.
*/
Sc string `json:"sc,omitempty"`
/**
* Use network address translation (NAT) for RTSP data connections.
*/
Rtspnat string `json:"rtspnat,omitempty"`
/**
* Redirection mode for load balancing. Available settings function as follows:
* IP - Before forwarding a request to a server, change the destination IP address to the server's IP address.
* MAC - Before forwarding a request to a server, change the destination MAC address to the server's MAC address. The destination IP address is not changed. MAC-based redirection mode is used mostly in firewall load balancing deployments.
* IPTUNNEL - Perform IP-in-IP encapsulation for client IP packets. In the outer IP headers, set the destination IP address to the IP address of the server and the source IP address to the subnet IP (SNIP). The client IP packets are not modified. Applicable to both IPv4 and IPv6 packets.
* TOS - Encode the virtual server's TOS ID in the TOS field of the IP header.
You can use either the IPTUNNEL or the TOS option to implement Direct Server Return (DSR).
*/
M string `json:"m,omitempty"`
/**
* TOS ID of the virtual server. Applicable only when the load balancing redirection mode is set to TOS.
*/
Tosid int `json:"tosid,omitempty"`
/**
* Length of the token to be extracted from the data segment of an incoming packet, for use in the token method of load balancing. The length of the token, specified in bytes, must not be greater than 24 KB. Applicable to virtual servers of type TCP.
*/
Datalength int `json:"datalength,omitempty"`
/**
* Offset to be considered when extracting a token from the TCP payload. Applicable to virtual servers, of type TCP, using the token method of load balancing. Must be within the first 24 KB of the TCP payload.
*/
Dataoffset int `json:"dataoffset,omitempty"`
/**
* Perform load balancing on a per-packet basis, without establishing sessions. Recommended for load balancing of intrusion detection system (IDS) servers and scenarios involving direct server return (DSR), where session information is unnecessary.
*/
Sessionless string `json:"sessionless,omitempty"`
/**
* When value is ENABLED, Trofs persistence is honored. When value is DISABLED, Trofs persistence is not honored.
*/
Trofspersistence string `json:"trofspersistence,omitempty"`
/**
* State of the load balancing virtual server.
*/
State string `json:"state,omitempty"`
/**
* Mode in which the connection failover feature must operate for the virtual server. After a failover, established TCP connections and UDP packet flows are kept active and resumed on the secondary appliance. Clients remain connected to the same servers. Available settings function as follows:
* STATEFUL - The primary appliance shares state information with the secondary appliance, in real time, resulting in some runtime processing overhead.
* STATELESS - State information is not shared, and the new primary appliance tries to re-create the packet flow on the basis of the information contained in the packets it receives.
* DISABLED - Connection failover does not occur.
*/
Connfailover string `json:"connfailover,omitempty"`
/**
* URL to which to redirect traffic if the virtual server becomes unavailable.
WARNING! Make sure that the domain in the URL does not match the domain specified for a content switching policy. If it does, requests are continuously redirected to the unavailable virtual server.
*/
Redirurl string `json:"redirurl,omitempty"`
/**
* Route cacheable requests to a cache redirection virtual server. The load balancing virtual server can forward requests only to a transparent cache redirection virtual server that has an IP address and port combination of *:80, so such a cache redirection virtual server must be configured on the appliance.
*/
Cacheable string `json:"cacheable,omitempty"`
/**
* Idle time, in seconds, after which a client connection is terminated.
*/
Clttimeout int `json:"clttimeout,omitempty"`
/**
* Type of threshold that, when exceeded, triggers spillover. Available settings function as follows:
* CONNECTION - Spillover occurs when the number of client connections exceeds the threshold.
* DYNAMICCONNECTION - Spillover occurs when the number of client connections at the virtual server exceeds the sum of the maximum client (Max Clients) settings for bound services. Do not specify a spillover threshold for this setting, because the threshold is implied by the Max Clients settings of bound services.
* BANDWIDTH - Spillover occurs when the bandwidth consumed by the virtual server's incoming and outgoing traffic exceeds the threshold.
* HEALTH - Spillover occurs when the percentage of weights of the services that are UP drops below the threshold. For example, if services svc1, svc2, and svc3 are bound to a virtual server, with weights 1, 2, and 3, and the spillover threshold is 50%, spillover occurs if svc1 and svc3 or svc2 and svc3 transition to DOWN.
* NONE - Spillover does not occur.
*/
Somethod string `json:"somethod,omitempty"`
/**
* If spillover occurs, maintain source IP address based persistence for both primary and backup virtual servers.
*/
Sopersistence string `json:"sopersistence,omitempty"`
/**
* Timeout for spillover persistence, in minutes.
*/
Sopersistencetimeout int `json:"sopersistencetimeout,omitempty"`
/**
* Threshold in percent of active services below which vserver state is made down. If this threshold is 0, vserver state will be up even if one bound service is up.
*/
Healththreshold int `json:"healththreshold,omitempty"`
/**
* Threshold at which spillover occurs. Specify an integer for the CONNECTION spillover method, a bandwidth value in kilobits per second for the BANDWIDTH method (do not enter the units), or a percentage for the HEALTH method (do not enter the percentage symbol).
*/
Sothreshold int `json:"sothreshold,omitempty"`
/**
* Action to be performed if spillover is to take effect, but no backup chain to spillover is usable or exists
*/
Sobackupaction string `json:"sobackupaction,omitempty"`
/**
* Rewrite the port and change the protocol to ensure successful HTTP redirects from services.
*/
Redirectportrewrite string `json:"redirectportrewrite,omitempty"`
/**
* Flush all active transactions associated with a virtual server whose state transitions from UP to DOWN. Do not enable this option for applications that must complete their transactions.
*/
Downstateflush string `json:"downstateflush,omitempty"`
/**
* Name of the backup virtual server to which to forward requests if the primary virtual server goes DOWN or reaches its spillover threshold.
*/
Backupvserver string `json:"backupvserver,omitempty"`
/**
* If the primary virtual server goes down, do not allow it to return to primary status until manually enabled.
*/
Disableprimaryondown string `json:"disableprimaryondown,omitempty"`
/**
* Insert an HTTP header, whose value is the IP address and port number of the virtual server, before forwarding a request to the server. The format of the header is <vipHeader>: <virtual server IP address>_<port number >, where vipHeader is the name that you specify for the header. If the virtual server has an IPv6 address, the address in the header is enclosed in brackets ([ and ]) to separate it from the port number. If you have mapped an IPv4 address to a virtual server's IPv6 address, the value of this parameter determines which IP address is inserted in the header, as follows:
* VIPADDR - Insert the IP address of the virtual server in the HTTP header regardless of whether the virtual server has an IPv4 address or an IPv6 address. A mapped IPv4 address, if configured, is ignored.
* V6TOV4MAPPING - Insert the IPv4 address that is mapped to the virtual server's IPv6 address. If a mapped IPv4 address is not configured, insert the IPv6 address.
* OFF - Disable header insertion.
*/
Insertvserveripport string `json:"insertvserveripport,omitempty"`
/**
* Name for the inserted header. The default name is vip-header.
*/
Vipheader string `json:"vipheader,omitempty"`
/**
* Fully qualified domain name (FQDN) of the authentication virtual server to which the user must be redirected for authentication. Make sure that the Authentication parameter is set to ENABLED.
*/
Authenticationhost string `json:"authenticationhost,omitempty"`
/**
* Enable or disable user authentication.
*/
Authentication string `json:"authentication,omitempty"`
/**
* Enable or disable user authentication with HTTP 401 responses.
*/
Authn401 string `json:"authn401,omitempty"`
/**
* Name of an authentication virtual server with which to authenticate users.
*/
Authnvsname string `json:"authnvsname,omitempty"`
/**
* Process traffic with the push virtual server that is bound to this load balancing virtual server.
*/
Push string `json:"push,omitempty"`
/**
* Name of the load balancing virtual server, of type PUSH or SSL_PUSH, to which the server pushes updates received on the load balancing virtual server that you are configuring.
*/
Pushvserver string `json:"pushvserver,omitempty"`
/**
* Expression for extracting a label from the server's response. Can be either an expression or the name of a named expression.
*/
Pushlabel string `json:"pushlabel,omitempty"`
/**
* Allow multiple Web 2.0 connections from the same client to connect to the virtual server and expect updates.
*/
Pushmulticlients string `json:"pushmulticlients,omitempty"`
/**
* Name of the TCP profile whose settings are to be applied to the virtual server.
*/
Tcpprofilename string `json:"tcpprofilename,omitempty"`
/**
* Name of the HTTP profile whose settings are to be applied to the virtual server.
*/
Httpprofilename string `json:"httpprofilename,omitempty"`
/**
* Name of the DB profile whose settings are to be applied to the virtual server.
*/
Dbprofilename string `json:"dbprofilename,omitempty"`
/**
* Any comments that you might want to associate with the virtual server.
*/
Comment string `json:"comment,omitempty"`
/**
* Use Layer 2 parameters (channel number, MAC address, and VLAN ID) in addition to the 4-tuple (<source IP>:<source port>::<destination IP>:<destination port>) that is used to identify a connection. Allows multiple TCP and non-TCP connections with the same 4-tuple to co-exist on the Citrix ADC.
*/
L2conn string `json:"l2conn,omitempty"`
/**
* Oracle server version
*/
Oracleserverversion string `json:"oracleserverversion,omitempty"`
/**
* For a load balancing virtual server of type MSSQL, the Microsoft SQL Server version. Set this parameter if you expect some clients to run a version different from the version of the database. This setting provides compatibility between the client-side and server-side connections by ensuring that all communication conforms to the server's version.
*/
Mssqlserverversion string `json:"mssqlserverversion,omitempty"`
/**
* MySQL protocol version that the virtual server advertises to clients.
*/
Mysqlprotocolversion int `json:"mysqlprotocolversion,omitempty"`
/**
* MySQL server version string that the virtual server advertises to clients.
*/
Mysqlserverversion string `json:"mysqlserverversion,omitempty"`
/**
* Character set that the virtual server advertises to clients.
*/
Mysqlcharacterset int `json:"mysqlcharacterset,omitempty"`
/**
* Server capabilities that the virtual server advertises to clients.
*/
Mysqlservercapabilities int `json:"mysqlservercapabilities,omitempty"`
/**
* Apply AppFlow logging to the virtual server.
*/
Appflowlog string `json:"appflowlog,omitempty"`
/**
* Name of the network profile to associate with the virtual server. If you set this parameter, the virtual server uses only the IP addresses in the network profile as source IP addresses when initiating connections with servers.
*/
Netprofile string `json:"netprofile,omitempty"`
/**
* How the Citrix ADC responds to ping requests received for an IP address that is common to one or more virtual servers. Available settings function as follows:
* If set to PASSIVE on all the virtual servers that share the IP address, the appliance always responds to the ping requests.
* If set to ACTIVE on all the virtual servers that share the IP address, the appliance responds to the ping requests if at least one of the virtual servers is UP. Otherwise, the appliance does not respond.
* If set to ACTIVE on some virtual servers and PASSIVE on the others, the appliance responds if at least one virtual server with the ACTIVE setting is UP. Otherwise, the appliance does not respond.
Note: This parameter is available at the virtual server level. A similar parameter, ICMP Response, is available at the IP address level, for IPv4 addresses of type VIP. To set that parameter, use the add ip command in the CLI or the Create IP dialog box in the GUI.
*/
Icmpvsrresponse string `json:"icmpvsrresponse,omitempty"`
/**
* Route Health Injection (RHI) functionality of the NetSaler appliance for advertising the route of the VIP address associated with the virtual server. When Vserver RHI Level (RHI) parameter is set to VSVR_CNTRLD, the following are different RHI behaviors for the VIP address on the basis of RHIstate (RHI STATE) settings on the virtual servers associated with the VIP address:
* If you set RHI STATE to PASSIVE on all virtual servers, the Citrix ADC always advertises the route for the VIP address.
* If you set RHI STATE to ACTIVE on all virtual servers, the Citrix ADC advertises the route for the VIP address if at least one of the associated virtual servers is in UP state.
* If you set RHI STATE to ACTIVE on some and PASSIVE on others, the Citrix ADC advertises the route for the VIP address if at least one of the associated virtual servers, whose RHI STATE set to ACTIVE, is in UP state.
*/
Rhistate string `json:"rhistate,omitempty"`
/**
* Number of requests, or percentage of the load on existing services, by which to increase the load on a new service at each interval in slow-start mode. A non-zero value indicates that slow-start is applicable. A zero value indicates that the global RR startup parameter is applied. Changing the value to zero will cause services currently in slow start to take the full traffic as determined by the LB method. Subsequently, any new services added will use the global RR factor.
*/
Newservicerequest int `json:"newservicerequest,omitempty"`
/**
* Units in which to increment load at each interval in slow-start mode.
*/
Newservicerequestunit string `json:"newservicerequestunit,omitempty"`
/**
* Interval, in seconds, between successive increments in the load on a new service or a service whose state has just changed from DOWN to UP. A value of 0 (zero) specifies manual slow start.
*/
Newservicerequestincrementinterval int `json:"newservicerequestincrementinterval,omitempty"`
/**
* Minimum number of members expected to be present when vserver is used in Autoscale.
*/
Minautoscalemembers int `json:"minautoscalemembers,omitempty"`
/**
* Maximum number of members expected to be present when vserver is used in Autoscale.
*/
Maxautoscalemembers int `json:"maxautoscalemembers,omitempty"`
/**
* Persist AVP number for Diameter Persistency.
In case this AVP is not defined in Base RFC 3588 and it is nested inside a Grouped AVP,
define a sequence of AVP numbers (max 3) in order of parent to child. So say persist AVP number X
is nested inside AVP Y which is nested in Z, then define the list as Z Y X
*/
Persistavpno []int `json:"persistavpno,omitempty"`
/**
* This argument decides the behavior incase the service which is selected from an existing persistence session has reached threshold.
*/
Skippersistency string `json:"skippersistency,omitempty"`
/**
* Integer value that uniquely identifies the traffic domain in which you want to configure the entity. If you do not specify an ID, the entity becomes part of the default traffic domain, which has an ID of 0.
*/
Td int `json:"td,omitempty"`
/**
* Name of the authentication profile to be used when authentication is turned on.
*/
Authnprofile string `json:"authnprofile,omitempty"`
/**
* This option is used to retain vlan information of incoming packet when macmode is enabled
*/
Macmoderetainvlan string `json:"macmoderetainvlan,omitempty"`
/**
* Enable database specific load balancing for MySQL and MSSQL service types.
*/
Dbslb string `json:"dbslb,omitempty"`
/**
* This argument is for enabling/disabling the dns64 on lbvserver
*/
Dns64 string `json:"dns64,omitempty"`
/**
* If this option is enabled while resolving DNS64 query AAAA queries are not sent to back end dns server
*/
Bypassaaaa string `json:"bypassaaaa,omitempty"`
/**
* When set to YES, this option causes the DNS replies from this vserver to have the RA bit turned on. Typically one would set this option to YES, when the vserver is load balancing a set of DNS servers thatsupport recursive queries.
*/
Recursionavailable string `json:"recursionavailable,omitempty"`
/**
* By turning on this option packets destined to a vserver in a cluster will not under go any steering. Turn this option for single packet request response mode or when the upstream device is performing a proper RSS for connection based distribution.
*/
Processlocal string `json:"processlocal,omitempty"`
/**
* Name of the DNS profile to be associated with the VServer. DNS profile properties will be applied to the transactions processed by a VServer. This parameter is valid only for DNS and DNS-TCP VServers.
*/
Dnsprofilename string `json:"dnsprofilename,omitempty"`
/**
* Name of the LB profile which is associated to the vserver
*/
Lbprofilename string `json:"lbprofilename,omitempty"`
/**
* Port number for the virtual server, from which we absorb the traffic for http redirect
*/
Redirectfromport int `json:"redirectfromport,omitempty"`
/**
* URL to which all HTTP traffic received on the port specified in the -redirectFromPort parameter is redirected.
*/
Httpsredirecturl string `json:"httpsredirecturl,omitempty"`
/**
* This option enables you to retain existing connections on a node joining a Cluster system or when a node is being configured for passive timeout. By default, this option is disabled.
*/
Retainconnectionsoncluster string `json:"retainconnectionsoncluster,omitempty"`
/**
* Name of the adfsProxy profile to be used to support ADFSPIP protocol for ADFS servers.
*/
Adfsproxyprofile string `json:"adfsproxyprofile,omitempty"`
/**
* Port number for external TCP probe. NetScaler provides support for external TCP health check of the vserver status over the selected port. This option is only supported for vservers assigned with an IPAddress or ipset.
*/
Tcpprobeport int `json:"tcpprobeport,omitempty"`
/**
* Name of QUIC profile which will be attached to the VServer.
*/
Quicprofilename string `json:"quicprofilename,omitempty"`
/**
* Name of the QUIC Bridge profile whose settings are to be applied to the virtual server.
*/
Quicbridgeprofilename string `json:"quicbridgeprofilename,omitempty"`
/**
* Citrix ADC provides support for external health check of the vserver status. Select HTTP or TCP probes for healthcheck
*/
Probeprotocol string `json:"probeprotocol,omitempty"`
/**
* HTTP code to return in SUCCESS case.
*/
Probesuccessresponsecode string `json:"probesuccessresponsecode,omitempty"`
/**
* Citrix ADC provides support for external health check of the vserver status. Select port for HTTP/TCP monitring
*/
Probeport int `json:"probeport,omitempty"`
/**
* Weight to assign to the specified service.
*/
Weight int `json:"weight,omitempty"`
/**
* Service to bind to the virtual server.
*/
Servicename string `json:"servicename,omitempty"`
/**
* The redirect URL to be unset.
*/
Redirurlflags bool `json:"redirurlflags,omitempty"`
/**
* New name for the virtual server.
*/
Newname string `json:"newname,omitempty"`
//------- Read only Parameter ---------;
Value string `json:"value,omitempty"`
Ipmapping string `json:"ipmapping,omitempty"`
Ngname string `json:"ngname,omitempty"`
Type string `json:"type,omitempty"`
Curstate string `json:"curstate,omitempty"`
Effectivestate string `json:"effectivestate,omitempty"`
Status string `json:"status,omitempty"`
Lbrrreason string `json:"lbrrreason,omitempty"`
Redirect string `json:"redirect,omitempty"`
Precedence string `json:"precedence,omitempty"`
Homepage string `json:"homepage,omitempty"`
Dnsvservername string `json:"dnsvservername,omitempty"`
Domain string `json:"domain,omitempty"`
Cachevserver string `json:"cachevserver,omitempty"`
Health string `json:"health,omitempty"`
Ruletype string `json:"ruletype,omitempty"`
Groupname string `json:"groupname,omitempty"`
Cookiedomain string `json:"cookiedomain,omitempty"`
Map string `json:"map,omitempty"`
Gt2gb string `json:"gt2gb,omitempty"`
Consolidatedlconn string `json:"consolidatedlconn,omitempty"`
Consolidatedlconngbl string `json:"consolidatedlconngbl,omitempty"`
Thresholdvalue string `json:"thresholdvalue,omitempty"`
Bindpoint string `json:"bindpoint,omitempty"`
Version string `json:"version,omitempty"`
Totalservices string `json:"totalservices,omitempty"`
Activeservices string `json:"activeservices,omitempty"`
Statechangetimesec string `json:"statechangetimesec,omitempty"`
Statechangetimeseconds string `json:"statechangetimeseconds,omitempty"`
Statechangetimemsec string `json:"statechangetimemsec,omitempty"`
Tickssincelaststatechange string `json:"tickssincelaststatechange,omitempty"`
Isgslb string `json:"isgslb,omitempty"`
Vsvrdynconnsothreshold string `json:"vsvrdynconnsothreshold,omitempty"`
Backupvserverstatus string `json:"backupvserverstatus,omitempty"`
Nodefaultbindings string `json:"nodefaultbindings,omitempty"`
} | resource/config/lbvserver.go | 0.816589 | 0.499878 | lbvserver.go | starcoder |
package iso20022
// Set of elements used to provide information specific to the individual transaction(s) included in the message.
type CreditTransferTransactionInformation12 struct {
// Ultimate party that owes an amount of money to the (ultimate) creditor.
UltimateDebtor *PartyIdentification32 `xml:"UltmtDbtr,omitempty"`
// Party that initiates the payment.
// Usage: This can be either the debtor or a party that initiates the credit transfer on behalf of the debtor.
InitiatingParty *PartyIdentification32 `xml:"InitgPty,omitempty"`
// Party that owes an amount of money to the (ultimate) creditor.
Debtor *PartyIdentification32 `xml:"Dbtr"`
// Unambiguous identification of the account of the debtor to which a debit entry will be made as a result of the transaction.
DebtorAccount *CashAccount16 `xml:"DbtrAcct,omitempty"`
// Financial institution servicing an account for the debtor.
DebtorAgent *BranchAndFinancialInstitutionIdentification4 `xml:"DbtrAgt"`
// Unambiguous identification of the account of the debtor agent at its servicing agent in the payment chain.
DebtorAgentAccount *CashAccount16 `xml:"DbtrAgtAcct,omitempty"`
// Agent immediately prior to the instructing agent.
PreviousInstructingAgent *BranchAndFinancialInstitutionIdentification4 `xml:"PrvsInstgAgt,omitempty"`
// Unambiguous identification of the account of the previous instructing agent at its servicing agent in the payment chain.
PreviousInstructingAgentAccount *CashAccount16 `xml:"PrvsInstgAgtAcct,omitempty"`
// Agent between the debtor's agent and the creditor's agent.
//
// Usage: If more than one intermediary agent is present, then IntermediaryAgent1 identifies the agent between the DebtorAgent and the IntermediaryAgent2.
IntermediaryAgent1 *BranchAndFinancialInstitutionIdentification4 `xml:"IntrmyAgt1,omitempty"`
// Unambiguous identification of the account of the intermediary agent 1 at its servicing agent in the payment chain.
IntermediaryAgent1Account *CashAccount16 `xml:"IntrmyAgt1Acct,omitempty"`
// Agent between the debtor's agent and the creditor's agent.
//
// Usage: If more than two intermediary agents are present, then IntermediaryAgent2 identifies the agent between the IntermediaryAgent1 and the IntermediaryAgent3.
IntermediaryAgent2 *BranchAndFinancialInstitutionIdentification4 `xml:"IntrmyAgt2,omitempty"`
// Unambiguous identification of the account of the intermediary agent 2 at its servicing agent in the payment chain.
IntermediaryAgent2Account *CashAccount16 `xml:"IntrmyAgt2Acct,omitempty"`
// Agent between the debtor's agent and the creditor's agent.
//
// Usage: If IntermediaryAgent3 is present, then it identifies the agent between the IntermediaryAgent 2 and the CreditorAgent.
IntermediaryAgent3 *BranchAndFinancialInstitutionIdentification4 `xml:"IntrmyAgt3,omitempty"`
// Unambiguous identification of the account of the intermediary agent 3 at its servicing agent in the payment chain.
IntermediaryAgent3Account *CashAccount16 `xml:"IntrmyAgt3Acct,omitempty"`
// Financial institution servicing an account for the creditor.
CreditorAgent *BranchAndFinancialInstitutionIdentification4 `xml:"CdtrAgt"`
// Unambiguous identification of the account of the creditor agent at its servicing agent to which a credit entry will be made as a result of the payment transaction.
CreditorAgentAccount *CashAccount16 `xml:"CdtrAgtAcct,omitempty"`
// Party to which an amount of money is due.
Creditor *PartyIdentification32 `xml:"Cdtr"`
// Unambiguous identification of the account of the creditor to which a credit entry will be posted as a result of the payment transaction.
CreditorAccount *CashAccount16 `xml:"CdtrAcct,omitempty"`
// Ultimate party to which an amount of money is due.
UltimateCreditor *PartyIdentification32 `xml:"UltmtCdtr,omitempty"`
// Information supplied to enable the matching of an entry with the items that the transfer is intended to settle, such as commercial invoices in an accounts' receivable system.
RemittanceInformation *RemittanceInformation5 `xml:"RmtInf,omitempty"`
// Amount of money to be moved between the debtor and creditor, before deduction of charges, expressed in the currency as ordered by the initiating party.
InstructedAmount *ActiveOrHistoricCurrencyAndAmount `xml:"InstdAmt,omitempty"`
}
func (c *CreditTransferTransactionInformation12) AddUltimateDebtor() *PartyIdentification32 {
c.UltimateDebtor = new(PartyIdentification32)
return c.UltimateDebtor
}
func (c *CreditTransferTransactionInformation12) AddInitiatingParty() *PartyIdentification32 {
c.InitiatingParty = new(PartyIdentification32)
return c.InitiatingParty
}
func (c *CreditTransferTransactionInformation12) AddDebtor() *PartyIdentification32 {
c.Debtor = new(PartyIdentification32)
return c.Debtor
}
func (c *CreditTransferTransactionInformation12) AddDebtorAccount() *CashAccount16 {
c.DebtorAccount = new(CashAccount16)
return c.DebtorAccount
}
func (c *CreditTransferTransactionInformation12) AddDebtorAgent() *BranchAndFinancialInstitutionIdentification4 {
c.DebtorAgent = new(BranchAndFinancialInstitutionIdentification4)
return c.DebtorAgent
}
func (c *CreditTransferTransactionInformation12) AddDebtorAgentAccount() *CashAccount16 {
c.DebtorAgentAccount = new(CashAccount16)
return c.DebtorAgentAccount
}
func (c *CreditTransferTransactionInformation12) AddPreviousInstructingAgent() *BranchAndFinancialInstitutionIdentification4 {
c.PreviousInstructingAgent = new(BranchAndFinancialInstitutionIdentification4)
return c.PreviousInstructingAgent
}
func (c *CreditTransferTransactionInformation12) AddPreviousInstructingAgentAccount() *CashAccount16 {
c.PreviousInstructingAgentAccount = new(CashAccount16)
return c.PreviousInstructingAgentAccount
}
func (c *CreditTransferTransactionInformation12) AddIntermediaryAgent1() *BranchAndFinancialInstitutionIdentification4 {
c.IntermediaryAgent1 = new(BranchAndFinancialInstitutionIdentification4)
return c.IntermediaryAgent1
}
func (c *CreditTransferTransactionInformation12) AddIntermediaryAgent1Account() *CashAccount16 {
c.IntermediaryAgent1Account = new(CashAccount16)
return c.IntermediaryAgent1Account
}
func (c *CreditTransferTransactionInformation12) AddIntermediaryAgent2() *BranchAndFinancialInstitutionIdentification4 {
c.IntermediaryAgent2 = new(BranchAndFinancialInstitutionIdentification4)
return c.IntermediaryAgent2
}
func (c *CreditTransferTransactionInformation12) AddIntermediaryAgent2Account() *CashAccount16 {
c.IntermediaryAgent2Account = new(CashAccount16)
return c.IntermediaryAgent2Account
}
func (c *CreditTransferTransactionInformation12) AddIntermediaryAgent3() *BranchAndFinancialInstitutionIdentification4 {
c.IntermediaryAgent3 = new(BranchAndFinancialInstitutionIdentification4)
return c.IntermediaryAgent3
}
func (c *CreditTransferTransactionInformation12) AddIntermediaryAgent3Account() *CashAccount16 {
c.IntermediaryAgent3Account = new(CashAccount16)
return c.IntermediaryAgent3Account
}
func (c *CreditTransferTransactionInformation12) AddCreditorAgent() *BranchAndFinancialInstitutionIdentification4 {
c.CreditorAgent = new(BranchAndFinancialInstitutionIdentification4)
return c.CreditorAgent
}
func (c *CreditTransferTransactionInformation12) AddCreditorAgentAccount() *CashAccount16 {
c.CreditorAgentAccount = new(CashAccount16)
return c.CreditorAgentAccount
}
func (c *CreditTransferTransactionInformation12) AddCreditor() *PartyIdentification32 {
c.Creditor = new(PartyIdentification32)
return c.Creditor
}
func (c *CreditTransferTransactionInformation12) AddCreditorAccount() *CashAccount16 {
c.CreditorAccount = new(CashAccount16)
return c.CreditorAccount
}
func (c *CreditTransferTransactionInformation12) AddUltimateCreditor() *PartyIdentification32 {
c.UltimateCreditor = new(PartyIdentification32)
return c.UltimateCreditor
}
func (c *CreditTransferTransactionInformation12) AddRemittanceInformation() *RemittanceInformation5 {
c.RemittanceInformation = new(RemittanceInformation5)
return c.RemittanceInformation
}
func (c *CreditTransferTransactionInformation12) SetInstructedAmount(value, currency string) {
c.InstructedAmount = NewActiveOrHistoricCurrencyAndAmount(value, currency)
} | CreditTransferTransactionInformation12.go | 0.735262 | 0.451387 | CreditTransferTransactionInformation12.go | starcoder |
package asserts
import (
"errors"
"sync"
)
type memoryBackstore struct {
top memBSBranch
mu sync.RWMutex
}
type memBSNode interface {
put(assertType *AssertionType, key []string, assert Assertion) error
get(key []string, maxFormat int) (Assertion, error)
search(hint []string, found func(Assertion), maxFormat int)
}
type memBSBranch map[string]memBSNode
type memBSLeaf map[string]map[int]Assertion
func (br memBSBranch) put(assertType *AssertionType, key []string, assert Assertion) error {
key0 := key[0]
down := br[key0]
if down == nil {
if len(key) > 2 {
down = make(memBSBranch)
} else {
down = make(memBSLeaf)
}
br[key0] = down
}
return down.put(assertType, key[1:], assert)
}
func (leaf memBSLeaf) cur(key0 string, maxFormat int) (a Assertion) {
for formatnum, a1 := range leaf[key0] {
if formatnum <= maxFormat {
if a == nil || a1.Revision() > a.Revision() {
a = a1
}
}
}
return a
}
func (leaf memBSLeaf) put(assertType *AssertionType, key []string, assert Assertion) error {
key0 := key[0]
cur := leaf.cur(key0, assertType.MaxSupportedFormat())
if cur != nil {
rev := assert.Revision()
curRev := cur.Revision()
if curRev >= rev {
return &RevisionError{Current: curRev, Used: rev}
}
}
if _, ok := leaf[key0]; !ok {
leaf[key0] = make(map[int]Assertion)
}
leaf[key0][assert.Format()] = assert
return nil
}
// errNotFound is used internally by backends, it is converted to the richer
// NotFoundError only at their public interface boundary
var errNotFound = errors.New("assertion not found")
func (br memBSBranch) get(key []string, maxFormat int) (Assertion, error) {
key0 := key[0]
down := br[key0]
if down == nil {
return nil, errNotFound
}
return down.get(key[1:], maxFormat)
}
func (leaf memBSLeaf) get(key []string, maxFormat int) (Assertion, error) {
key0 := key[0]
cur := leaf.cur(key0, maxFormat)
if cur == nil {
return nil, errNotFound
}
return cur, nil
}
func (br memBSBranch) search(hint []string, found func(Assertion), maxFormat int) {
hint0 := hint[0]
if hint0 == "" {
for _, down := range br {
down.search(hint[1:], found, maxFormat)
}
return
}
down := br[hint0]
if down != nil {
down.search(hint[1:], found, maxFormat)
}
return
}
func (leaf memBSLeaf) search(hint []string, found func(Assertion), maxFormat int) {
hint0 := hint[0]
if hint0 == "" {
for key := range leaf {
cand := leaf.cur(key, maxFormat)
if cand != nil {
found(cand)
}
}
return
}
cur := leaf.cur(hint0, maxFormat)
if cur != nil {
found(cur)
}
}
// NewMemoryBackstore creates a memory backed assertions backstore.
func NewMemoryBackstore() Backstore {
return &memoryBackstore{
top: make(memBSBranch),
}
}
func (mbs *memoryBackstore) Put(assertType *AssertionType, assert Assertion) error {
mbs.mu.Lock()
defer mbs.mu.Unlock()
internalKey := make([]string, 1, 1+len(assertType.PrimaryKey))
internalKey[0] = assertType.Name
internalKey = append(internalKey, assert.Ref().PrimaryKey...)
err := mbs.top.put(assertType, internalKey, assert)
return err
}
func (mbs *memoryBackstore) Get(assertType *AssertionType, key []string, maxFormat int) (Assertion, error) {
mbs.mu.RLock()
defer mbs.mu.RUnlock()
internalKey := make([]string, 1+len(assertType.PrimaryKey))
internalKey[0] = assertType.Name
copy(internalKey[1:], key)
a, err := mbs.top.get(internalKey, maxFormat)
if err == errNotFound {
return nil, &NotFoundError{Type: assertType}
}
return a, err
}
func (mbs *memoryBackstore) Search(assertType *AssertionType, headers map[string]string, foundCb func(Assertion), maxFormat int) error {
mbs.mu.RLock()
defer mbs.mu.RUnlock()
hint := make([]string, 1+len(assertType.PrimaryKey))
hint[0] = assertType.Name
for i, name := range assertType.PrimaryKey {
hint[1+i] = headers[name]
}
candCb := func(a Assertion) {
if searchMatch(a, headers) {
foundCb(a)
}
}
mbs.top.search(hint, candCb, maxFormat)
return nil
} | vendor/github.com/snapcore/snapd/asserts/membackstore.go | 0.547948 | 0.419707 | membackstore.go | starcoder |
package geobin
import (
"encoding/binary"
"github.com/tidwall/tile38/geojson"
"github.com/tidwall/tile38/geojson/geohash"
)
type BBox struct {
Min, Max Position
}
// WithinBBox detects if the object is fully contained inside a bbox.
func (g Object) WithinBBox(bbox BBox) bool {
return g.bridge().WithinBBox(geojson.BBox{
Min: geojson.Position{bbox.Min.X, bbox.Min.Y, bbox.Min.Z},
Max: geojson.Position{bbox.Max.X, bbox.Max.Y, bbox.Max.Z},
})
}
// IntersectsBBox detects if the object intersects a bbox.
func (g Object) IntersectsBBox(bbox BBox) bool {
return g.bridge().IntersectsBBox(geojson.BBox{
Min: geojson.Position{bbox.Min.X, bbox.Min.Y, bbox.Min.Z},
Max: geojson.Position{bbox.Max.X, bbox.Max.Y, bbox.Max.Z},
})
}
// Within detects if the object is fully contained inside another object.
func (g Object) Within(o Object) bool {
return g.bridge().Within(o.bridge())
}
// Intersects detects if the object intersects another object.
func (g Object) Intersects(o Object) bool {
return g.bridge().Intersects(o.bridge())
}
// Nearby detects if the object is nearby a position.
func (g Object) Nearby(center Position, meters float64) bool {
return g.bridge().Nearby(
geojson.Position{center.X, center.Y, center.Z}, meters,
)
}
// CalculatedBBox is exterior bbox containing the object.
func (g Object) CalculatedBBox() BBox {
b := g.bridge().CalculatedBBox()
return BBox{
Min: Position{b.Min.X, b.Min.Y, b.Min.Z},
Max: Position{b.Max.X, b.Max.Y, b.Max.Z},
}
}
// CalculatedPoint is a point representation of the object.
func (g Object) CalculatedPoint() Position {
p := g.bridge().CalculatedPoint()
return Position{p.X, p.Y, p.Z}
}
// Geohash converts the object to a geohash value.
func (g Object) Geohash(precision int) (string, error) {
return g.bridge().Geohash(precision)
}
// IsBBoxDefined returns true if the object has a defined bbox.
func (g Object) IsBBoxDefined() bool { return g.bridge().IsBBoxDefined() }
func (g Object) Sparse(amount byte) []Object {
gb := g.BBox()
b := geojson.BBox{
Min: geojson.Position{gb.Min.X, gb.Min.Y, gb.Min.Z},
Max: geojson.Position{gb.Max.X, gb.Max.Y, gb.Max.Z},
}
bb := b.Sparse(amount)
var res []Object
for _, b := range bb {
res = append(res, Make3DRect(b.Min.X, b.Min.Y, b.Min.Z, b.Max.X, b.Max.Y, b.Max.Z))
}
return res
}
func BBoxFromCenter(lat float64, lon float64, meters float64) Object {
b := geojson.BBoxesFromCenter(lat, lon, meters)
return Make2DRect(b.Min.X, b.Min.Y, b.Max.X, b.Max.Y)
}
// DistanceTo calculates the distance to a position
func (p Position) DistanceTo(position Position) float64 {
p1 := geojson.Position{p.X, p.Y, p.Z}
p2 := geojson.Position{position.X, position.Y, position.Z}
return p1.DistanceTo(p2)
}
// Destination calculates a new position based on the distance and bearing.
func (p Position) Destination(meters, bearingDegrees float64) Position {
p1 := geojson.Position{p.X, p.Y, p.Z}
p2 := p1.Destination(meters, bearingDegrees)
return Position{p2.X, p2.Y, p2.Z}
}
func geomReadPosition(data []byte, dims int) (geojson.Position, []byte) {
var p geojson.Position
p.X, data = readFloat64(data)
p.Y, data = readFloat64(data)
if dims == 3 {
p.Z, data = readFloat64(data)
}
return p, data
}
func geomReadBBox(data []byte, bboxSize int) geojson.BBox {
switch bboxSize {
case 48:
var min, max geojson.Position
min.X, data = readFloat64(data)
min.Y, data = readFloat64(data)
min.Z, data = readFloat64(data)
max.X, data = readFloat64(data)
max.Y, data = readFloat64(data)
max.Z, data = readFloat64(data)
return geojson.BBox{min, max}
case 32:
var min, max geojson.Position
min.X, data = readFloat64(data)
min.Y, data = readFloat64(data)
max.X, data = readFloat64(data)
max.Y, data = readFloat64(data)
return geojson.BBox{min, max}
case 24:
p, _ := geomReadPosition(data, 3)
return geojson.BBox{p, p}
case 16:
p, _ := geomReadPosition(data, 2)
return geojson.BBox{p, p}
}
return geojson.BBox{}
}
func (o Object) bridge() geojson.Object {
if len(o.data) == 0 {
// empty geometry
return geojson.String("")
}
tail := o.data[len(o.data)-1]
if tail&1 == 0 {
// object is a string
return geojson.String(o.String())
}
var dims int
var bboxSize int
if tail>>1&1 == 1 {
dims = 3
if tail>>2&1 == 1 {
// 3D rect
bboxSize = 48
} else {
// 3D point
bboxSize = 24
}
} else {
dims = 2
if tail>>2&1 == 1 {
// 2D rect
bboxSize = 32
} else {
// 2D point
bboxSize = 16
}
}
if (tail>>3)&1 == 0 {
// simple
switch bboxSize {
default:
return geojson.String("") // invalid
case 48, 32:
// simple rect, bbox around a center point
bbox := geomReadBBox(o.data, bboxSize)
return geojson.Point{
Coordinates: geojson.Position{
X: (bbox.Max.X + bbox.Min.X) / 2,
Y: (bbox.Max.Y + bbox.Min.Y) / 2,
Z: (bbox.Max.Z + bbox.Min.Z) / 2,
},
BBox: &bbox,
}
case 24:
// simple 3d point
p, _ := geomReadPosition(o.data, dims)
return geojson.Point{Coordinates: p}
case 16:
// simple 2d point
p, _ := geomReadPosition(o.data, dims)
return geojson.SimplePoint{p.X, p.Y}
}
}
var exsz int
if tail>>4&1 == 1 {
// has exdata, skip over
exsz = int(binary.LittleEndian.Uint32(o.data[len(o.data)-5:]))
}
geomData := o.data[bboxSize+exsz:]
geomHead := geomData[0]
geomType := GeometryType(geomHead >> 4)
geomData = geomData[1:]
if geomHead&1 == 1 {
// hasMembers
sz := int(binary.LittleEndian.Uint32(geomData))
geomData = geomData[4+sz:]
}
var bbox *geojson.BBox
if geomHead>>1&1 == 1 {
// export bbox
v := geomReadBBox(o.data, bboxSize)
bbox = &v
}
// complex, let's pull the geom data
switch geomType {
default:
return geojson.String("")
case Point:
p, _ := geomReadPosition(geomData, dims)
return geojson.Point{Coordinates: p, BBox: bbox}
case MultiPoint, LineString:
n := int(binary.LittleEndian.Uint32(geomData))
geomData = geomData[4:]
coords := make([]geojson.Position, n)
for i := 0; i < n; i++ {
coords[i], geomData = geomReadPosition(geomData, dims)
}
if geomType == MultiPoint {
return geojson.MultiPoint{coords, bbox}
}
return geojson.LineString{coords, bbox}
case MultiLineString, Polygon:
n := int(binary.LittleEndian.Uint32(geomData))
geomData = geomData[4:]
coords := make([][]geojson.Position, n)
for i := 0; i < n; i++ {
nn := int(binary.LittleEndian.Uint32(geomData))
geomData = geomData[4:]
coords[i] = make([]geojson.Position, nn)
for j := 0; j < nn; j++ {
coords[i][j], geomData = geomReadPosition(geomData, dims)
}
}
if geomType == MultiLineString {
return geojson.MultiLineString{coords, bbox}
}
return geojson.Polygon{coords, bbox}
case MultiPolygon:
n := int(binary.LittleEndian.Uint32(geomData))
geomData = geomData[4:]
coords := make([][][]geojson.Position, n)
for i := 0; i < n; i++ {
nn := int(binary.LittleEndian.Uint32(geomData))
geomData = geomData[4:]
coords[i] = make([][]geojson.Position, nn)
for j := 0; j < nn; j++ {
nnn := int(binary.LittleEndian.Uint32(geomData))
geomData = geomData[4:]
coords[i][j] = make([]geojson.Position, nnn)
for k := 0; k < nnn; k++ {
coords[i][j][k], geomData = geomReadPosition(geomData, dims)
}
}
}
return geojson.MultiPolygon{coords, bbox}
case GeometryCollection, FeatureCollection:
n := int(binary.LittleEndian.Uint32(geomData))
geomData = geomData[4:]
objs := make([]geojson.Object, n)
for i := 0; i < n; i++ {
sz := int(binary.LittleEndian.Uint32(geomData))
o := Object{geomData[4 : 4+sz : 4+sz]}
geomData = geomData[4+sz:]
objs[i] = o.bridge()
}
if geomType == GeometryCollection {
return geojson.GeometryCollection{objs, bbox}
}
return geojson.FeatureCollection{objs, bbox}
case Feature:
sz := int(binary.LittleEndian.Uint32(geomData))
o := Object{geomData[4 : 4+sz : 4+sz]}
geom := o.bridge()
return geojson.Feature{
Geometry: geom,
BBox: bbox,
//idprops: string(o.Members()),
}
}
}
func GeohashEncode(lat, lon float64, precision int) (string, error) {
return geohash.Encode(lat, lon, precision)
}
func GeohashDecode(hash string) (lat, lon float64, err error) {
return geohash.Decode(hash)
} | bridge.go | 0.76999 | 0.621713 | bridge.go | starcoder |
package version
const dataTable = `[
{
"vid": 1,
"name": "LIFX",
"defaults": {
"hev": false,
"color": false,
"chain": false,
"matrix": false,
"relays": false,
"buttons": false,
"infrared": false,
"multizone": false,
"temperature_range": null,
"extended_multizone": false
},
"products": [
{
"pid": 1,
"name": "LIFX Original 1000",
"features": {
"color": true,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": false,
"temperature_range": [
2500,
9000
]
},
"upgrades": []
},
{
"pid": 3,
"name": "LIFX Color 650",
"features": {
"color": true,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": false,
"temperature_range": [
2500,
9000
]
},
"upgrades": []
},
{
"pid": 10,
"name": "LIFX White 800 (Low Voltage)",
"features": {
"color": false,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": false,
"temperature_range": [
2700,
6500
]
},
"upgrades": []
},
{
"pid": 11,
"name": "LIFX White 800 (High Voltage)",
"features": {
"color": false,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": false,
"temperature_range": [
2700,
6500
]
},
"upgrades": []
},
{
"pid": 15,
"name": "LIFX Color 1000",
"features": {
"color": true,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": false,
"temperature_range": [
2500,
9000
]
},
"upgrades": []
},
{
"pid": 18,
"name": "LIFX White 900 BR30 (Low Voltage)",
"features": {
"color": false,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": false,
"temperature_range": [
2500,
9000
]
},
"upgrades": []
},
{
"pid": 19,
"name": "LIFX White 900 BR30 (High Voltage)",
"features": {
"color": false,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": false,
"temperature_range": [
2500,
9000
]
},
"upgrades": []
},
{
"pid": 20,
"name": "LIFX Color 1000 BR30",
"features": {
"color": true,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": false,
"temperature_range": [
2500,
9000
]
},
"upgrades": []
},
{
"pid": 22,
"name": "LIFX Color 1000",
"features": {
"color": true,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": false,
"temperature_range": [
2500,
9000
]
},
"upgrades": []
},
{
"pid": 27,
"name": "LIFX A19",
"features": {
"color": true,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": false,
"temperature_range": [
2500,
9000
]
},
"upgrades": [
{
"major": 2,
"minor": 80,
"features": {
"temperature_range": [
1500,
9000
]
}
}
]
},
{
"pid": 28,
"name": "LIFX BR30",
"features": {
"color": true,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": false,
"temperature_range": [
2500,
9000
]
},
"upgrades": [
{
"major": 2,
"minor": 80,
"features": {
"temperature_range": [
1500,
9000
]
}
}
]
},
{
"pid": 29,
"name": "LIFX A19 Night Vision",
"features": {
"color": true,
"chain": false,
"matrix": false,
"infrared": true,
"multizone": false,
"temperature_range": [
2500,
9000
]
},
"upgrades": [
{
"major": 2,
"minor": 80,
"features": {
"temperature_range": [
1500,
9000
]
}
}
]
},
{
"pid": 30,
"name": "LIFX BR30 Night Vision",
"features": {
"color": true,
"chain": false,
"matrix": false,
"infrared": true,
"multizone": false,
"temperature_range": [
2500,
9000
]
},
"upgrades": [
{
"major": 2,
"minor": 80,
"features": {
"temperature_range": [
1500,
9000
]
}
}
]
},
{
"pid": 31,
"name": "LIFX Z",
"features": {
"color": true,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": true,
"temperature_range": [
2500,
9000
]
},
"upgrades": []
},
{
"pid": 32,
"name": "<NAME>",
"features": {
"color": true,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": true,
"temperature_range": [
2500,
9000
],
"min_ext_mz_firmware": 1532997580,
"min_ext_mz_firmware_components": [
2,
77
]
},
"upgrades": [
{
"major": 2,
"minor": 77,
"features": {
"extended_multizone": true
}
},
{
"major": 2,
"minor": 80,
"features": {
"temperature_range": [
1500,
9000
]
}
}
]
},
{
"pid": 36,
"name": "<NAME>",
"features": {
"color": true,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": false,
"temperature_range": [
2500,
9000
]
},
"upgrades": [
{
"major": 2,
"minor": 80,
"features": {
"temperature_range": [
1500,
9000
]
}
}
]
},
{
"pid": 37,
"name": "<NAME>",
"features": {
"color": true,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": false,
"temperature_range": [
2500,
9000
]
},
"upgrades": [
{
"major": 2,
"minor": 80,
"features": {
"temperature_range": [
1500,
9000
]
}
}
]
},
{
"pid": 38,
"name": "<NAME>",
"features": {
"color": true,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": true,
"temperature_range": [
2500,
9000
],
"min_ext_mz_firmware": 1532997580,
"min_ext_mz_firmware_components": [
2,
77
]
},
"upgrades": [
{
"major": 2,
"minor": 77,
"features": {
"extended_multizone": true
}
},
{
"major": 2,
"minor": 80,
"features": {
"temperature_range": [
1500,
9000
]
}
}
]
},
{
"pid": 39,
"name": "LIFX Downlight White to Warm",
"features": {
"color": false,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": false,
"temperature_range": [
2500,
9000
]
},
"upgrades": [
{
"major": 2,
"minor": 80,
"features": {
"temperature_range": [
1500,
9000
]
}
}
]
},
{
"pid": 40,
"name": "LIFX Downlight",
"features": {
"color": true,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": false,
"temperature_range": [
2500,
9000
]
},
"upgrades": [
{
"major": 2,
"minor": 80,
"features": {
"temperature_range": [
1500,
9000
]
}
}
]
},
{
"pid": 43,
"name": "LIFX A19",
"features": {
"color": true,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": false,
"temperature_range": [
2500,
9000
]
},
"upgrades": [
{
"major": 2,
"minor": 80,
"features": {
"temperature_range": [
1500,
9000
]
}
}
]
},
{
"pid": 44,
"name": "LIFX BR30",
"features": {
"color": true,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": false,
"temperature_range": [
2500,
9000
]
},
"upgrades": [
{
"major": 2,
"minor": 80,
"features": {
"temperature_range": [
1500,
9000
]
}
}
]
},
{
"pid": 45,
"name": "LIFX A19 Night Vision",
"features": {
"color": true,
"chain": false,
"matrix": false,
"infrared": true,
"multizone": false,
"temperature_range": [
2500,
9000
]
},
"upgrades": [
{
"major": 2,
"minor": 80,
"features": {
"temperature_range": [
1500,
9000
]
}
}
]
},
{
"pid": 46,
"name": "LIFX BR30 Night Vision",
"features": {
"color": true,
"chain": false,
"matrix": false,
"infrared": true,
"multizone": false,
"temperature_range": [
2500,
9000
]
},
"upgrades": [
{
"major": 2,
"minor": 80,
"features": {
"temperature_range": [
1500,
9000
]
}
}
]
},
{
"pid": 49,
"name": "LIFX Mini Color",
"features": {
"color": true,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": false,
"temperature_range": [
1500,
9000
]
},
"upgrades": []
},
{
"pid": 50,
"name": "LIFX Mini White to Warm",
"features": {
"color": false,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": false,
"temperature_range": [
1500,
6500
]
},
"upgrades": [
{
"major": 3,
"minor": 70,
"features": {
"temperature_range": [
1500,
9000
]
}
}
]
},
{
"pid": 51,
"name": "LIFX Mini White",
"features": {
"color": false,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": false,
"temperature_range": [
2700,
2700
]
},
"upgrades": []
},
{
"pid": 52,
"name": "LIFX GU10",
"features": {
"color": true,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": false,
"temperature_range": [
1500,
9000
]
},
"upgrades": []
},
{
"pid": 53,
"name": "LIFX GU10",
"features": {
"color": true,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": false,
"temperature_range": [
1500,
9000
]
},
"upgrades": []
},
{
"pid": 55,
"name": "LIFX Tile",
"features": {
"color": true,
"chain": true,
"matrix": true,
"infrared": false,
"multizone": false,
"temperature_range": [
2500,
9000
]
},
"upgrades": []
},
{
"pid": 57,
"name": "LIFX Candle",
"features": {
"color": true,
"chain": false,
"matrix": true,
"infrared": false,
"multizone": false,
"temperature_range": [
1500,
9000
]
},
"upgrades": []
},
{
"pid": 59,
"name": "LIFX Mini Color",
"features": {
"color": true,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": false,
"temperature_range": [
1500,
9000
]
},
"upgrades": []
},
{
"pid": 60,
"name": "LIFX Mini White to Warm",
"features": {
"color": false,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": false,
"temperature_range": [
1500,
6500
]
},
"upgrades": [
{
"major": 3,
"minor": 70,
"features": {
"temperature_range": [
1500,
9000
]
}
}
]
},
{
"pid": 61,
"name": "LIFX Mini White",
"features": {
"color": false,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": false,
"temperature_range": [
2700,
2700
]
},
"upgrades": []
},
{
"pid": 62,
"name": "LIFX A19",
"features": {
"color": true,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": false,
"temperature_range": [
1500,
9000
]
},
"upgrades": []
},
{
"pid": 63,
"name": "LIFX BR30",
"features": {
"color": true,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": false,
"temperature_range": [
1500,
9000
]
},
"upgrades": []
},
{
"pid": 64,
"name": "LIFX A19 Night Vision",
"features": {
"color": true,
"chain": false,
"matrix": false,
"infrared": true,
"multizone": false,
"temperature_range": [
1500,
9000
]
},
"upgrades": []
},
{
"pid": 65,
"name": "LIFX BR30 Night Vision",
"features": {
"color": true,
"chain": false,
"matrix": false,
"infrared": true,
"multizone": false,
"temperature_range": [
1500,
9000
]
},
"upgrades": []
},
{
"pid": 66,
"name": "LIFX Mini White",
"features": {
"color": false,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": false,
"temperature_range": [
2700,
2700
]
},
"upgrades": []
},
{
"pid": 68,
"name": "LIFX Candle",
"features": {
"color": true,
"chain": false,
"matrix": true,
"infrared": false,
"multizone": false,
"temperature_range": [
1500,
9000
]
},
"upgrades": []
},
{
"pid": 70,
"name": "LIFX Switch",
"features": {
"color": false,
"relays": true,
"chain": false,
"matrix": false,
"buttons": true,
"infrared": false,
"multizone": false
},
"upgrades": []
},
{
"pid": 71,
"name": "LIFX Switch",
"features": {
"color": false,
"relays": true,
"chain": false,
"matrix": false,
"buttons": true,
"infrared": false,
"multizone": false
},
"upgrades": []
},
{
"pid": 81,
"name": "LIFX Candle White to Warm",
"features": {
"color": false,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": false,
"temperature_range": [
2200,
6500
]
},
"upgrades": []
},
{
"pid": 82,
"name": "LIFX Filament Clear",
"features": {
"color": false,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": false,
"temperature_range": [
2100,
2100
]
},
"upgrades": []
},
{
"pid": 85,
"name": "LIFX Filament Amber",
"features": {
"color": false,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": false,
"temperature_range": [
2000,
2000
]
},
"upgrades": []
},
{
"pid": 87,
"name": "LIFX Mini White",
"features": {
"color": false,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": false,
"temperature_range": [
2700,
2700
]
},
"upgrades": []
},
{
"pid": 88,
"name": "LIFX Mini White",
"features": {
"color": false,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": false,
"temperature_range": [
2700,
2700
]
},
"upgrades": []
},
{
"pid": 89,
"name": "LIFX Switch",
"features": {
"color": false,
"relays": true,
"chain": false,
"matrix": false,
"buttons": true,
"infrared": false,
"multizone": false
},
"upgrades": []
},
{
"pid": 90,
"name": "LIFX Clean",
"features": {
"hev": true,
"color": true,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": false,
"temperature_range": [
1500,
9000
]
},
"upgrades": []
},
{
"pid": 91,
"name": "LIFX Color",
"features": {
"color": true,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": false,
"temperature_range": [
1500,
9000
]
},
"upgrades": []
},
{
"pid": 92,
"name": "LIFX Color",
"features": {
"color": true,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": false,
"temperature_range": [
1500,
9000
]
},
"upgrades": []
},
{
"pid": 93,
"name": "LIFX A19 US",
"features": {
"color": true,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": false,
"temperature_range": [
1500,
9000
]
},
"upgrades": []
},
{
"pid": 94,
"name": "LIFX BR30",
"features": {
"color": true,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": false,
"temperature_range": [
1500,
9000
]
},
"upgrades": []
},
{
"pid": 96,
"name": "LIFX Candle White to Warm",
"features": {
"color": false,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": false,
"temperature_range": [
2200,
6500
]
},
"upgrades": []
},
{
"pid": 97,
"name": "LIFX A19",
"features": {
"color": true,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": false,
"temperature_range": [
1500,
9000
]
},
"upgrades": []
},
{
"pid": 98,
"name": "LIFX BR30",
"features": {
"color": true,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": false,
"temperature_range": [
1500,
9000
]
},
"upgrades": []
},
{
"pid": 99,
"name": "LIFX Clean",
"features": {
"hev": true,
"color": true,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": false,
"temperature_range": [
1500,
9000
]
},
"upgrades": []
},
{
"pid": 100,
"name": "<NAME>",
"features": {
"color": false,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": false,
"temperature_range": [
2100,
2100
]
},
"upgrades": []
},
{
"pid": 101,
"name": "<NAME>",
"features": {
"color": false,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": false,
"temperature_range": [
2000,
2000
]
},
"upgrades": []
},
{
"pid": 109,
"name": "<NAME>",
"features": {
"color": true,
"chain": false,
"matrix": false,
"infrared": true,
"multizone": false,
"temperature_range": [
1500,
9000
]
},
"upgrades": []
},
{
"pid": 110,
"name": "LIFX <NAME>",
"features": {
"color": true,
"chain": false,
"matrix": false,
"infrared": true,
"multizone": false,
"temperature_range": [
1500,
9000
]
},
"upgrades": []
},
{
"pid": 111,
"name": "<NAME>",
"features": {
"color": true,
"chain": false,
"matrix": false,
"infrared": true,
"multizone": false,
"temperature_range": [
1500,
9000
]
},
"upgrades": []
},
{
"pid": 112,
"name": "LIFX BR30 Night Vision Intl",
"features": {
"color": true,
"chain": false,
"matrix": false,
"infrared": true,
"multizone": false,
"temperature_range": [
1500,
9000
]
},
"upgrades": []
},
{
"pid": 113,
"name": "LIFX Mini WW US",
"features": {
"color": false,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": false,
"temperature_range": [
1500,
9000
]
},
"upgrades": []
},
{
"pid": 114,
"name": "LIFX Mini WW Intl",
"features": {
"color": false,
"chain": false,
"matrix": false,
"infrared": false,
"multizone": false,
"temperature_range": [
1500,
9000
]
},
"upgrades": []
}
]
}
]
` | lifx/version/data.go | 0.714728 | 0.45308 | data.go | starcoder |
package main
import (
"os"
"math"
"errors"
"fmt"
"database/sql"
_ "github.com/mattn/go-sqlite3"
)
// Store a geolocation
type Location struct {
lat float64
long float64
}
// Helper function to take care of errors
func handleErrors(err error) {
if err != nil {
panic(err)
}
}
// Converts degrees into radians
func degreesToRadians(degrees float64) float64 {
for degrees >= 360.0 {
degrees -= 360
}
return degrees * (math.Pi / 180)
}
// Haversine distance computation
// Takes two locations and returns the distance between the two
func haversineDistance(loc0 Location, loc1 Location) float64 {
lat0 := degreesToRadians(loc0.lat)
lat1 := degreesToRadians(loc1.lat)
delta_lat := degreesToRadians(loc1.lat - loc0.lat)
delta_long := degreesToRadians(loc1.long - loc0.long)
r := 6365.079 // Earth radius under London
return 2.0 * r * math.Asin(math.Sqrt(math.Pow(delta_lat / 2.0, 2.0) + math.Cos(lat1) * math.Cos(lat0) * math.Pow(delta_long / 2.0, 2.0)))
}
// Fetches location data from a string postcode
func getLocationFromPostcode(postcode string) Location {
db, err := sql.Open("sqlite3", "./postcode.sl3")
handleErrors(err)
defer db.Close()
var location Location
rows, err := db.Query(fmt.Sprintf("select lat, long from postcode where code='%s' limit 1", postcode))
rows.Next()
handleErrors(rows.Scan(&location.lat, &location.long))
return location
}
// Returns the distance between two postcodes
func distanceBetweenPostcodes(postcode0, postcode1 string) float64 {
loc0 := getLocationFromPostcode(postcode0)
loc1 := getLocationFromPostcode(postcode1)
return haversineDistance(loc0, loc1)
}
func main() {
if len(os.Args) != 3 {
handleErrors(errors.New("Program takes two arguments, the two postcodes."))
}
postcode0 := os.Args[1]
postcode1 := os.Args[2]
distance := distanceBetweenPostcodes(postcode0, postcode1)
distance_in_miles := distance / 1.609344
fmt.Printf("The distance between postcodes %s and %s is %f km (%f mi).", postcode0, postcode1, distance, distance_in_miles)
} | thorney_distance.go | 0.873134 | 0.421552 | thorney_distance.go | starcoder |
package three
//go:generate go run geometry_method_generator/main.go -geometryType ConeGeometry -geometrySlug cone_geometry
import (
"math"
"github.com/gopherjs/gopherjs/js"
)
// ConeGeometry a class for generating Cone geometries.
type ConeGeometry struct {
*js.Object
Radius float64 `js:"radius"`
Height float64 `js:"height"`
RadialSegments int `js:"radialSegments"`
HeightSegments int `js:"heightSegments"`
OpenEnded bool `js:"openEnded"`
ThetaStart float64 `js:"thetaStart"`
ThetaLength float64 `js:"thetaLength"`
}
// ConeGeometryParameters .
type ConeGeometryParameters struct {
// Radius of the cone base.
Radius float64
// Height of the Cone. Default is 1.
Height float64
// Number of segmented faces around the circumference of the Cone. Default is 8
RadialSegments int
// Number of rows of faces along the height of the Cone. Default is 1.
HeightSegments int
// A Boolean indicating whether the ends of the Cone are open or capped. Default is false, meaning capped.
OpenEnded bool
// Start angle for first segment, default = 0 (three o'clock position).
ThetaStart float64
// The central angle, often called theta, of the circular sector. The default is 2*Pi, which makes for a complete Cone.
ThetaLength float64
}
// NewConeGeometry creates a new BoxGeometry. Set ThetaLength to NaN to create empty geometry.
func NewConeGeometry(params *ConeGeometryParameters) ConeGeometry {
if params == nil {
params = &ConeGeometryParameters{}
}
// Make sure both are defined to prevent unclear code
if params.Height == 0 || params.Radius == 0 {
params.Height = 1
params.Radius = 1
}
// Probably don't want no Cone.
if params.ThetaLength == 0 {
params.ThetaLength = 2 * math.Pi
}
if math.IsNaN(params.ThetaLength) {
params.ThetaLength = 0
}
if params.RadialSegments == 0 {
params.RadialSegments = 8
}
if params.HeightSegments == 0 {
params.RadialSegments = 1
}
return ConeGeometry{
Object: three.Get("ConeGeometry").New(
params.Radius,
params.Height,
params.RadialSegments,
params.HeightSegments,
params.OpenEnded,
params.ThetaStart,
params.ThetaLength,
),
}
} | geometries_cone_geometry.go | 0.798108 | 0.497986 | geometries_cone_geometry.go | starcoder |
package geometry
import (
"fluorescence/shading"
"math"
"math/rand"
)
// Vector is a 3D vector
type Vector struct {
X float64 `json:"x"`
Y float64 `json:"y"`
Z float64 `json:"z"`
}
// VectorZero references the zero vector
var VectorZero = Vector{}
// VectorMax references the maximum representable float64 vector
var VectorMax = Vector{math.MaxFloat64, math.MaxFloat64, math.MaxFloat64}
// VectorUp references the up vector (positive Y) with the standard cartesian axes as an orthogonal system
var VectorUp = Vector{0.0, 1.0, 0.0}
// VectorRight references the right vector (positive X) with the standard cartesian axes as an orthogonal system
var VectorRight = Vector{1.0, 0.0, 0.0}
// VectorForward references the forward vector (negative Z) with the standard cartesian axes as an orthogonal system
// it points towards negative Z to preserve the system's right-handedness
var VectorForward = Vector{0.0, 0.0, -1.0}
// RandomOnUnitDisk returns a new Vector pointing from the origin to a
// random point on a unit disk
func RandomOnUnitDisk(rng *rand.Rand) Vector {
for {
v := Vector{
X: 2.0*rng.Float64() - 1.0,
Y: 2.0*rng.Float64() - 1.0,
Z: 0.0,
}
if v.Magnitude() < 1.0 {
return v
}
}
}
// RandomInUnitSphere returns a new Vector pointing from the origin to a
// random point in a unit sphere
func RandomInUnitSphere(rng *rand.Rand) Vector {
for {
v := Vector{
X: 2.0*rng.Float64() - 1.0,
Y: 2.0*rng.Float64() - 1.0,
Z: 2.0*rng.Float64() - 1.0,
}
if v.Magnitude() < 1.0 {
return v
}
}
}
// Magnitude return euclidean length of Vector
func (v Vector) Magnitude() float64 {
return math.Sqrt(v.X*v.X + v.Y*v.Y + v.Z*v.Z)
}
// Unit returns a new Vector with direction preserved and length equal to one
func (v Vector) Unit() Vector {
return v.DivScalar(v.Magnitude())
}
// Dot computes the dot or scalar product of two Vectors
func (v Vector) Dot(w Vector) float64 {
return v.X*w.X + v.Y*w.Y + v.Z*w.Z
}
// Cross computes the cross or Vector product of two Vectors
func (v Vector) Cross(w Vector) Vector {
return Vector{v.Y*w.Z - v.Z*w.Y, v.Z*w.X - v.X*w.Z, v.X*w.Y - v.Y*w.X}
}
// Add adds a Vector to another Vector component-wise
func (v Vector) Add(w Vector) Vector {
return Vector{v.X + w.X, v.Y + w.Y, v.Z + w.Z}
}
// Sub subtracts a Vector from another Vector component-wise
func (v Vector) Sub(w Vector) Vector {
return Vector{v.X - w.X, v.Y - w.Y, v.Z - w.Z}
}
// MultScalar multiplies a Vector by a scalar
func (v Vector) MultScalar(s float64) Vector {
return Vector{v.X * s, v.Y * s, v.Z * s}
}
// MultVector multiplies a Vector by a Vector component-wise
func (v Vector) MultVector(w Vector) Vector {
return Vector{v.X * w.X, v.Y * w.Y, v.Z * w.Z}
}
// Pow raises a Vector to an exponential power, component-wise
func (v Vector) Pow(e float64) Vector {
return Vector{math.Pow(v.X, e), math.Pow(v.Y, e), math.Pow(v.Z, e)}
}
// DivScalar divides a Vector by a scalar
func (v Vector) DivScalar(s float64) Vector {
inv := 1.0 / s
return Vector{v.X * inv, v.Y * inv, v.Z * inv}
}
// DivVector divides a Vector by a Vector component-wise
func (v Vector) DivVector(w Vector) Vector {
return Vector{v.X / w.X, v.Y / w.Y, v.Z / w.Z}
}
// Negate returns a Vector pointing in the opposite direction
func (v Vector) Negate() Vector {
return Vector{-v.X, -v.Y, -v.Z}
}
// ReflectAround returns the reflection of a vector given a normal
func (v Vector) ReflectAround(w Vector) Vector {
return v.Sub(w.MultScalar(v.Dot(w) * 2.0))
}
// RefractAround returns the refraction of a vector given the normal and ratio of reflective indices
func (v Vector) RefractAround(w Vector, rri float64) (Vector, bool) {
dt := v.Unit().Dot(w)
discriminant := 1.0 - (rri*rri)*(1.0-(dt*dt))
// fmt.Println(rri)
if discriminant > 0 {
// fmt.Println("yu")
return v.Unit().Sub(w.MultScalar(dt)).MultScalar(rri).Sub(w.MultScalar(math.Sqrt(discriminant))), true
}
return VectorZero, false
}
// ToColor converts a Vector to a Color
func (v Vector) ToColor() shading.Color {
return shading.Color{
Red: v.X,
Green: v.Y,
Blue: v.Z,
}
}
// VectorFromColor creates a Vector from a Color
func VectorFromColor(c shading.Color) Vector {
return Vector{c.Red, c.Green, c.Blue}
}
// Copy returns a new Vector identical to v
func (v Vector) Copy() Vector {
return Vector{v.X, v.Y, v.Z}
} | geometry/vector.go | 0.925394 | 0.679797 | vector.go | starcoder |
package ltsv
import (
"bytes"
"golang.org/x/xerrors"
)
type (
// Field is a struct to hold label-value pair.
Field struct {
Label string
Value string
}
// Parser is for parsing LTSV-encoded format.
Parser struct {
// FieldDelimiter is the delimiter of fields. It defaults to '\t'.
FieldDelimiter byte
// ValueDelimiter is the delimiter of label-value pairs. It defaults to ':'.
ValueDelimiter byte
// StrictMode is a flag to check if labels and values are valid.
// If strictMode is false,
// the parser just split fields with `FieldDelimiter`
// and split label and value with `ValueDelimiter` without checking if they are valid.
// The valid label is `/[0-9A-Za-z_.-]+/`.
// The valid value is `/[^\b\t\r\n]*/`.
StrictMode bool
}
)
// DefaultParser is the default parser
var DefaultParser = Parser{
FieldDelimiter: '\t',
ValueDelimiter: ':',
StrictMode: true,
}
var (
// ErrMissingLabel is an error to describe label is missing (ex. 'my_value')
ErrMissingLabel = xerrors.New("missing label")
// ErrEmptyLabel is an error to describe label is empty (ex. ':my_value')
ErrEmptyLabel = xerrors.New("empty label")
// ErrInvalidLabel is an error to describe label contains invalid char (ex. 'my\tlabel:my_value')
ErrInvalidLabel = xerrors.New("invalid label")
// ErrInvalidValue is an error to describe value contains invalid char (ex. 'my_label:my_value\n')
ErrInvalidValue = xerrors.New("invalid value")
// Break is an error for break loop
Break = xerrors.New("break")
)
// ParseField parse LTSV-encoded field and return the label and value.
// The result share same memory with inputted field.
func (p Parser) ParseField(field []byte) (label []byte, value []byte, err error) {
idx := bytes.IndexByte(field, p.ValueDelimiter)
if idx > 0 {
label = field[0:idx]
value = field[idx+1:]
if p.StrictMode {
if err = validateLabel(label); err != nil {
return nil, nil, xerrors.Errorf("bad field label syntax %q: %w", string(field), err)
}
if err = validateValue(value); err != nil {
return nil, nil, xerrors.Errorf("bad field value syntax %q: %w", string(field), err)
}
}
} else {
switch idx {
case -1:
err = xerrors.Errorf("bad field syntax %q: %w", string(field), ErrMissingLabel)
case 0:
err = xerrors.Errorf("bad field syntax %q: %w", string(field), ErrEmptyLabel)
}
}
return
}
// ParseLine parse one line of LTSV-encoded data and call callback.
// The callback function will be called for each field.
func (p Parser) ParseLine(line []byte, callback func(label []byte, value []byte) error) error {
oriLine := line
for len(line) > 0 {
idx := bytes.IndexByte(line, p.FieldDelimiter)
var field []byte
if idx == -1 {
field = line
line = nil
} else {
field = line[0:idx]
line = line[idx+1:]
}
if len(field) == 0 {
continue
}
label, value, err := p.ParseField(field)
if err != nil {
return xerrors.Errorf("bad line syntax %q: %w", string(oriLine), err)
}
if err = callback(label, value); err != nil {
if err == Break {
break
}
return xerrors.Errorf("ParseLine callback error: %w", err)
}
}
return nil
}
// ParseLineAsMap parse one line of LTSV-encoded data and return the map[string]string.
// For reducing memory allocation, you can pass a map to record to reuse the given map.
func (p Parser) ParseLineAsMap(line []byte, record map[string]string) (map[string]string, error) {
if record == nil {
record = map[string]string{}
}
err := p.ParseLine(line, func(label []byte, value []byte) error {
record[string(label)] = string(value)
return nil
})
if err != nil {
return nil, xerrors.Errorf(": %w", err)
}
return record, nil
}
// ParseLineAsSlice parse one line of LTSV-encoded data and return the []Field.
// For reducing memory allocation, you can pass a slice to record to reuse the given slice.
func (p Parser) ParseLineAsSlice(line []byte, record []Field) ([]Field, error) {
record = record[:0]
err := p.ParseLine(line, func(label []byte, value []byte) error {
record = append(record, Field{string(label), string(value)})
return nil
})
if err != nil {
return nil, xerrors.Errorf(": %w", err)
}
return record, nil
}
func validateLabel(label []byte) error {
for _, c := range label {
if !isValidKey(c) {
return xerrors.Errorf("invalid char %q used in label %q: %w", c, string(label), ErrInvalidLabel)
}
}
return nil
}
func validateValue(value []byte) error {
for _, c := range value {
if !isValidValue(c) {
return xerrors.Errorf("invalid char %q used in value %q: %w", c, string(value), ErrInvalidValue)
}
}
return nil
}
func isValidKey(ch byte) bool { // [0-9A-Za-z_.-]
switch ch {
case '_', '.', '-',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z',
'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z':
return true
}
return false
}
func isValidValue(ch byte) bool {
// %x01-08 / %x0B / %x0C / %x0E-FF
switch ch {
case '\b', '\t', '\r', '\n':
return false
}
return true
} | parser.go | 0.650467 | 0.446495 | parser.go | starcoder |
package runtime
import (
"math"
"github.com/apmckinlay/gsuneido/util/dnum"
"github.com/apmckinlay/gsuneido/util/regex"
)
var (
Zero Value = SuInt(0)
One Value = SuInt(1)
MaxInt Value = SuDnum{Dnum: dnum.FromInt(math.MaxInt32)}
Inf Value = SuDnum{Dnum: dnum.PosInf}
NegInf Value = SuDnum{Dnum: dnum.NegInf}
True Value = SuBool(true)
False Value = SuBool(false)
// EmptyStr defined in sustr.go
)
func OpIs(x Value, y Value) Value {
return SuBool(x == y || x.Equal(y))
}
func OpIsnt(x Value, y Value) Value {
return SuBool(!x.Equal(y))
}
func OpLt(x Value, y Value) Value {
return SuBool(x.Compare(y) < 0)
}
func OpLte(x Value, y Value) Value {
return SuBool(x.Compare(y) <= 0)
}
func OpGt(x Value, y Value) Value {
return SuBool(x.Compare(y) > 0)
}
func OpGte(x Value, y Value) Value {
return SuBool(x.Compare(y) >= 0)
}
func OpAdd(x Value, y Value) Value {
if xi, xok := SuIntToInt(x); xok {
if yi, yok := SuIntToInt(y); yok {
return IntVal(xi + yi)
}
}
return SuDnum{Dnum: dnum.Add(ToDnum(x), ToDnum(y))}
}
func OpSub(x Value, y Value) Value {
if xi, xok := SuIntToInt(x); xok {
if yi, yok := SuIntToInt(y); yok {
return IntVal(xi - yi)
}
}
return SuDnum{Dnum: dnum.Sub(ToDnum(x), ToDnum(y))}
}
func OpMul(x Value, y Value) Value {
if xi, xok := SuIntToInt(x); xok {
if yi, yok := SuIntToInt(y); yok {
return IntVal(xi * yi)
}
}
return SuDnum{Dnum: dnum.Mul(ToDnum(x), ToDnum(y))}
}
func OpDiv(x Value, y Value) Value {
if yi, yok := SuIntToInt(y); yok && yi != 0 {
if xi, xok := SuIntToInt(x); xok {
if xi%yi == 0 {
return IntVal(xi / yi)
}
}
}
return SuDnum{Dnum: dnum.Div(ToDnum(x), ToDnum(y))}
}
func OpMod(x Value, y Value) Value {
return IntVal(ToInt(x) % ToInt(y))
}
func OpLeftShift(x Value, y Value) Value {
result := int32(ToInt(x)) << ToInt(y)
return IntVal(int(result))
}
func OpRightShift(x Value, y Value) Value {
result := uint32(ToInt(x)) >> ToInt(y)
return IntVal(int(result))
}
func OpBitOr(x Value, y Value) Value {
return IntVal(ToInt(x) | ToInt(y))
}
func OpBitAnd(x Value, y Value) Value {
return IntVal(ToInt(x) & ToInt(y))
}
func OpBitXor(x Value, y Value) Value {
return IntVal(ToInt(x) ^ ToInt(y))
}
func OpBitNot(x Value) Value {
return IntVal(^ToInt(x))
}
func OpNot(x Value) Value {
if x == True {
return False
} else if x == False {
return True
}
panic("not requires boolean")
}
func OpBool(x Value) bool {
switch x {
case True:
return true
case False:
return false
default:
panic("conditionals require true or false")
}
}
func OpUnaryPlus(x Value) Value {
if _, ok := x.(*smi); ok {
return x
}
return SuDnum{Dnum: ToDnum(x)}
}
func OpUnaryMinus(x Value) Value {
if xi, ok := SuIntToInt(x); ok {
return IntVal(-xi)
}
return SuDnum{Dnum: ToDnum(x).Neg()}
}
func OpCat(t *Thread, x, y Value) Value {
if ssx, ok := x.(SuStr); ok {
if ssy, ok := y.(SuStr); ok {
return cat2(string(ssx), string(ssy))
}
}
return cat3(t, x, y)
}
func cat2(xs, ys string) Value {
const LARGE = 256
if len(xs)+len(ys) < LARGE {
return SuStr(xs + ys)
}
if len(xs) == 0 {
return SuStr(ys)
}
if len(ys) == 0 {
return SuStr(xs)
}
return NewSuConcat().Add(xs).Add(ys)
}
func cat3(t *Thread, x, y Value) Value {
var result Value
if xc, ok := x.(SuConcat); ok {
result = xc.Add(catToStr(t, y))
} else {
result = cat2(catToStr(t, x), catToStr(t, y))
}
if xe, ok := x.(*SuExcept); ok {
return &SuExcept{SuStr: SuStr(AsStr(result)), Callstack: xe.Callstack}
}
if ye, ok := y.(*SuExcept); ok {
return &SuExcept{SuStr: SuStr(AsStr(result)), Callstack: ye.Callstack}
}
return result
}
func catToStr(t *Thread, v Value) string {
if d, ok := v.(ToStringable); ok {
return d.ToString(t)
}
return AsStr(v)
}
func OpMatch(x Value, y regex.Pattern) SuBool {
return SuBool(y.Matches(ToStr(x)))
}
// ToIndex is used by ranges and string[i]
func ToIndex(key Value) int {
if n, ok := key.IfInt(); ok {
return n
}
panic("indexes must be integers")
}
func prepFrom(from int, size int) int {
if from < 0 {
from += size
if from < 0 {
from = 0
}
}
if from > size {
from = size
}
return from
}
func prepTo(from int, to int, size int) int {
if to < 0 {
to += size
}
if to < from {
to = from
}
if to > size {
to = size
}
return to
}
func prepLen(len int, size int) int {
if len < 0 {
len = 0
}
if len > size {
len = size
}
return len
} | runtime/ops.go | 0.528047 | 0.59561 | ops.go | starcoder |
package token
type (
Token interface {
VisitToken(TokenVisitor)
}
TokenVisitor struct {
Ident func(Ident)
Function func(Function)
AtKeyword func(AtKeyword)
Hash func(Hash)
String func(String)
Url func(Url)
Delim func(Delim)
Number func(Number)
Percentage func(Percentage)
Dimension func(Dimension)
Whitespace func(Whitespace)
Colon func(Colon)
Semicolon func(Semicolon)
Comma func(Comma)
OpenSquare func(OpenSquare)
CloseSquare func(CloseSquare)
OpenParen func(OpenParen)
CloseParen func(CloseParen)
OpenCurly func(OpenCurly)
CloseCurly func(CloseCurly)
}
Ident struct {
Offset int
Value string
}
Function struct {
Offset int
Value string
}
AtKeyword struct {
Offset int
Value string
}
Hash struct {
Offset int
Value string
Id bool
}
String struct {
Offset int
Mark rune
Value string
}
Url struct {
Offset int
Value string
}
Delim struct {
Offset int
Value rune
}
Number struct {
Offset int
Value float64
Integer bool
}
Percentage struct {
Offset int
Value float64
}
Dimension struct {
Offset int
Value float64
Integer bool
Unit string
}
Whitespace struct {
Offset int
}
Colon struct {
Offset int
}
Semicolon struct {
Offset int
}
CloseCurly struct {
Offset int
}
OpenCurly struct {
Offset int
}
CloseParen struct {
Offset int
}
OpenParen struct {
Offset int
}
CloseSquare struct {
Offset int
}
OpenSquare struct {
Offset int
}
Comma struct {
Offset int
}
)
func (t Ident) VisitToken(v TokenVisitor) { v.Ident(t) }
func (t Function) VisitToken(v TokenVisitor) { v.Function(t) }
func (t AtKeyword) VisitToken(v TokenVisitor) { v.AtKeyword(t) }
func (t Hash) VisitToken(v TokenVisitor) { v.Hash(t) }
func (t String) VisitToken(v TokenVisitor) { v.String(t) }
func (t Url) VisitToken(v TokenVisitor) { v.Url(t) }
func (t Delim) VisitToken(v TokenVisitor) { v.Delim(t) }
func (t Number) VisitToken(v TokenVisitor) { v.Number(t) }
func (t Percentage) VisitToken(v TokenVisitor) { v.Percentage(t) }
func (t Dimension) VisitToken(v TokenVisitor) { v.Dimension(t) }
func (t Whitespace) VisitToken(v TokenVisitor) { v.Whitespace(t) }
func (t Colon) VisitToken(v TokenVisitor) { v.Colon(t) }
func (t Semicolon) VisitToken(v TokenVisitor) { v.Semicolon(t) }
func (t Comma) VisitToken(v TokenVisitor) { v.Comma(t) }
func (t OpenSquare) VisitToken(v TokenVisitor) { v.OpenSquare(t) }
func (t CloseSquare) VisitToken(v TokenVisitor) { v.CloseSquare(t) }
func (t OpenParen) VisitToken(v TokenVisitor) { v.OpenParen(t) }
func (t CloseParen) VisitToken(v TokenVisitor) { v.CloseParen(t) }
func (t OpenCurly) VisitToken(v TokenVisitor) { v.OpenCurly(t) }
func (t CloseCurly) VisitToken(v TokenVisitor) { v.CloseCurly(t) }
func Offset(token Token) int {
switch t := token.(type) {
case Ident:
return t.Offset
case Function:
return t.Offset
case AtKeyword:
return t.Offset
case Hash:
return t.Offset
case String:
return t.Offset
case Url:
return t.Offset
case Delim:
return t.Offset
case Number:
return t.Offset
case Percentage:
return t.Offset
case Dimension:
return t.Offset
case Whitespace:
return t.Offset
case Colon:
return t.Offset
case Semicolon:
return t.Offset
case Comma:
return t.Offset
case OpenSquare:
return t.Offset
case CloseSquare:
return t.Offset
case OpenParen:
return t.Offset
case CloseParen:
return t.Offset
case OpenCurly:
return t.Offset
case CloseCurly:
return t.Offset
}
return -1
} | pkg/asset/css/token/token.go | 0.607547 | 0.515315 | token.go | starcoder |
package zplgfa
import (
"encoding/hex"
"fmt"
"image"
"image/color"
"math"
"strings"
)
// GraphicType is a type to select the graphic format
type GraphicType int
const (
// ASCII graphic type using only hex characters (0-9A-F)
ASCII GraphicType = iota
// Binary saving the same data as binary
Binary
// CompressedASCII compresses the hex data via RLE
CompressedASCII
)
// ConvertToZPL is just a wrapper for ConvertToGraphicField which also includes the ZPL
// starting code ^XA and ending code ^XZ, as well as a Field Separator and Field Origin.
func ConvertToZPL(img image.Image, graphicType GraphicType) string {
return fmt.Sprintf("^XA,^FS\n^FO0,0\n%s^FS,^XZ\n", ConvertToGraphicField(img, graphicType))
}
// FlattenImage optimizes an image for the converting process
func FlattenImage(source image.Image) *image.NRGBA {
size := source.Bounds().Size()
background := color.White
target := image.NewNRGBA(source.Bounds())
for y := 0; y < size.Y; y++ {
for x := 0; x < size.X; x++ {
p := source.At(x, y)
flat := flatten(p, background)
target.Set(x, y, flat)
}
}
return target
}
func flatten(input color.Color, background color.Color) color.Color {
source := color.NRGBA64Model.Convert(input).(color.NRGBA64)
r, g, b, a := source.RGBA()
bgR, bgG, bgB, _ := background.RGBA()
alpha := float32(a) / 0xffff
conv := func(c uint32, bg uint32) uint8 {
val := 0xffff - uint32((float32(bg) * alpha))
val = val | uint32(float32(c)*alpha)
return uint8(val >> 8)
}
c := color.NRGBA{
conv(r, bgR),
conv(g, bgG),
conv(b, bgB),
uint8(0xff),
}
return c
}
func getRepeatCode(repeatCount int, char string) string {
repeatStr := ""
if repeatCount > 419 {
repeatCount -= 419
repeatStr += getRepeatCode(repeatCount, char)
repeatCount = 419
}
high := repeatCount / 20
low := repeatCount % 20
lowString := " GHIJKLMNOPQRSTUVWXY"
highString := " ghijklmnopqrstuvwxyz"
if high > 0 {
repeatStr += string(highString[high])
}
if low > 0 {
repeatStr += string(lowString[low])
}
repeatStr += char
return repeatStr
}
// CompressASCII compresses the ASCII data of a ZPL Graphic Field using RLE
func CompressASCII(in string) string {
var curChar string
var lastChar string
var lastCharSince int
var output string
var repCode string
for i := 0; i < len(in)+1; i++ {
if i == len(in) {
curChar = ""
if lastCharSince == 0 {
switch lastChar {
case "0":
output = ","
return output
case "F":
output = "!"
return output
}
}
} else {
curChar = string(in[i])
}
if lastChar != curChar {
if i-lastCharSince > 4 {
repCode = getRepeatCode(i-lastCharSince, lastChar)
output += repCode
} else {
for j := 0; j < i-lastCharSince; j++ {
output += lastChar
}
}
lastChar = curChar
lastCharSince = i
}
}
if output == "" {
output += getRepeatCode(len(in), lastChar)
}
return output
}
// ConvertToGraphicField converts an image.Image picture to a ZPL compatible Graphic Field.
// The ZPL ^GF (Graphic Field) supports various data formats, this package supports the
// normal ASCII encoded, as well as a RLE compressed ASCII format. It also supports the
// Binary Graphic Field format. The encoding can be chosen by the second argument.
func ConvertToGraphicField(source image.Image, graphicType GraphicType) string {
var gfType string
var lastLine string
size := source.Bounds().Size()
width := size.X / 8
height := size.Y
if size.Y%8 != 0 {
width = width + 1
}
var GraphicFieldData string
for y := 0; y < size.Y; y++ {
line := make([]uint8, width)
lineIndex := 0
index := uint8(0)
currentByte := line[lineIndex]
for x := 0; x < size.X; x++ {
index = index + 1
p := source.At(x, y)
lum := color.Gray16Model.Convert(p).(color.Gray16)
if lum.Y < math.MaxUint16/2 {
currentByte = currentByte | (1 << (8 - index))
}
if index >= 8 {
line[lineIndex] = currentByte
lineIndex++
if lineIndex < len(line) {
currentByte = line[lineIndex]
}
index = 0
}
}
hexstr := strings.ToUpper(hex.EncodeToString(line))
switch graphicType {
case ASCII:
GraphicFieldData += fmt.Sprintln(hexstr)
case CompressedASCII:
curLine := CompressASCII(hexstr)
if lastLine == curLine {
GraphicFieldData += ":"
} else {
GraphicFieldData += curLine
}
lastLine = curLine
case Binary:
GraphicFieldData += fmt.Sprintf("%s", line)
}
}
if graphicType == ASCII || graphicType == CompressedASCII {
gfType = "A"
} else if graphicType == Binary {
gfType = "B"
}
return fmt.Sprintf("^GF%s,%d,%d,%d,\n%s", gfType, len(GraphicFieldData), width*height, width, GraphicFieldData)
} | zplgfa.go | 0.689619 | 0.440168 | zplgfa.go | starcoder |
package query
// The nodeLinkI interface provides an interface to allow nodes to be linked in a parent child chain
type nodeLinkI interface {
setChild(NodeI)
setParent(NodeI)
getParent() NodeI
getChild() NodeI
copy() NodeI // all linkable nodes must be copyable
}
// The nodeLink is designed to be a mixin for the basic node structure. It encapsulates the joining of nodes.
// In particular, the SetParentNode method gets exported for codegen purposes.
type nodeLink struct {
// Parent of the join, so its doubly linked
parentNode NodeI
// child of the join.
childNode NodeI
}
func (n *nodeLink) setChild(cn NodeI) {
if n.childNode == nil {
n.childNode = cn
} else {
panic("node already has a child node")
}
}
func (n *nodeLink) setParent(pn NodeI) {
n.parentNode = pn
}
// SetParentNode is used internally by the framework.
// It is used by the codegenerator to create linked nodes.
// It is used by the serializer to restore linked nodes.
func SetParentNode(child NodeI, parent NodeI) {
if parent != nil {
if parent.(nodeLinkI).getChild() != nil {
// Create a copy of the parent chain, since the parent already has a child
parent = copyUp(parent)
}
child.(nodeLinkI).setParent(parent)
parent.(nodeLinkI).setChild(child)
}
}
// copyUp creates a copy of the given node and copies all of its parent nodes too, putting the copies in its parent chain.
func copyUp(n NodeI) NodeI {
nl := n.(TableNodeI)
cp := nl.Copy_()
if p := nl.getParent(); p != nil {
parent := copyUp(p)
cp.(nodeLinkI).setParent(parent)
parent.(nodeLinkI).setChild(cp)
}
return cp
}
// ParentNode is used internally by the framework to return a node's parent.
func ParentNode(n NodeI) NodeI {
return n.(nodeLinkI).getParent()
}
func (n *nodeLink) getParent() NodeI {
if n.parentNode == nil {
return nil
}
return n.parentNode.(NodeI)
}
func (n *nodeLink) getChild() NodeI {
return n.childNode
}
/**
Public Accessors
The following functions are designed primarily to be used by the db package to help it unpack queries. The are not
given an accessor at the beginning so that they do not show up as a function in editors that provide code hinting when
trying to put together a node chain during the code creation process. Essentially they are trying to create exported
functions for the db package without broadcasting them to the world.
*/
// ChildNode is used internally by the framework to get the child node of a node
func ChildNode(n NodeI) NodeI {
return n.(nodeLinkI).getChild()
}
// RootNode is used internally by the framework to get the root node, which is the top parent in the node tree.
func RootNode(n NodeI) NodeI {
if self, ok := n.(nodeLinkI); !ok {
return nil
} else if self.getParent() == nil {
return self.(NodeI)
} else {
var n1 = self
for pn := n1.getParent(); pn != nil; pn = n1.getParent() {
n1 = pn.(nodeLinkI)
}
return n1.(NodeI)
}
}
func CopyNode(n NodeI) ReferenceNodeI {
if self, ok := n.(ReferenceNodeI); !ok {
panic("cannot copy this kind of node")
} else {
return self.copy().(ReferenceNodeI)
}
} | pkg/orm/query/nodeLink.go | 0.679391 | 0.409339 | nodeLink.go | starcoder |
package utils
import (
"strconv"
"strings"
"time"
)
const TIME_LAYOUT_OFTEN = "2006-01-02 15:04:05"
// DateFormat pattern rules.
var datePatterns = []string{
// year
"Y", "2006", // A full numeric representation of a year, 4 digits Examples: 1999 or 2003
"y", "06", //A two digit representation of a year Examples: 99 or 03
// month
"m", "01", // Numeric representation of a month, with leading zeros 01 through 12
"n", "1", // Numeric representation of a month, without leading zeros 1 through 12
"M", "Jan", // A short textual representation of a month, three letters Jan through Dec
"F", "January", // A full textual representation of a month, such as January or March January through December
// day
"d", "02", // Day of the month, 2 digits with leading zeros 01 to 31
"j", "2", // Day of the month without leading zeros 1 to 31
// week
"D", "Mon", // A textual representation of a day, three letters Mon through Sun
"l", "Monday", // A full textual representation of the day of the week Sunday through Saturday
// time
"g", "3", // 12-hour format of an hour without leading zeros 1 through 12
"G", "15", // 24-hour format of an hour without leading zeros 0 through 23
"h", "03", // 12-hour format of an hour with leading zeros 01 through 12
"H", "15", // 24-hour format of an hour with leading zeros 00 through 23
"a", "pm", // Lowercase Ante meridiem and Post meridiem am or pm
"A", "PM", // Uppercase Ante meridiem and Post meridiem AM or PM
"i", "04", // Minutes with leading zeros 00 to 59
"s", "05", // Seconds, with leading zeros 00 through 59
// time zone
"T", "MST",
"P", "-07:00",
"O", "-0700",
// RFC 2822
"r", time.RFC1123Z,
}
// Parse Date use PHP time format.
func DateParse(dateString, format string) (time.Time, error) {
replacer := strings.NewReplacer(datePatterns...)
format = replacer.Replace(format)
return time.ParseInLocation(format, dateString, time.Local)
}
// Date takes a PHP like date func to Go's time format.
func Date(t time.Time, format string) string {
replacer := strings.NewReplacer(datePatterns...)
format = replacer.Replace(format)
return t.Format(format)
}
func DateFormat(t time.Time, layout string) (datestring string) {
datestring = t.Format(layout)
return
}
// 解析常用的日期时间格式:2014-01-11 16:18:00,东八区
func TimeParseOften(value string) (time.Time, error) {
local, _ := time.LoadLocation("Local")
return time.ParseInLocation(TIME_LAYOUT_OFTEN, value, local)
}
//返回当前时区的当前时间
func TimeLocal() time.Time {
format := "2006-01-02 15:04:05 -07:00 "
dateString := DateFormat(time.Now(), format)
formatedDate, _ := DateParse(dateString, format)
return formatedDate
}
//返回当前时区的当前时间
func DateLocal() time.Time {
format := "2006-01-02"
dateString := DateFormat(time.Now(), format)
formatedDate, _ := DateParse(dateString, format)
return formatedDate
}
//返回当前时区的当前时间
func TimeLocalString() string {
dateString := DateFormat(TimeLocal(), TIME_LAYOUT_OFTEN)
return dateString
}
//返回当前时区的当前月日时,120615,12月6号15时
func TimeLocalMDHString() string {
dateString := DateFormat(TimeLocal(), "010215")
return dateString
}
//返回当前时区的当前时间的字符串
func TimeLocalYYYYMMDDHHMMSSString() string {
dateString := DateFormat(TimeLocal(), "20060102150405")
return dateString
}
//将unix 时间戳转换为时间字符串
//1441070992=>2015-09-01 09:29:52
func Timestamp2String(timestamp int64) string {
tm := time.Unix(timestamp, 0)
return tm.Format(TIME_LAYOUT_OFTEN)
}
//把指定的时间转换为字符串
func DateTime2String(dt time.Time) string {
dateString := DateFormat(dt, TIME_LAYOUT_OFTEN)
return dateString
}
//得到多少分钟前的时间
func TheTimeString(counts time.Duration) string {
baseTime := time.Now()
date := baseTime.Add(counts)
dateString := DateFormat(date, TIME_LAYOUT_OFTEN)
return dateString
}
//得到多少分钟前的时间
func TheTime(counts time.Duration) time.Time {
baseTime := time.Now()
date := baseTime.Add(counts)
return date
}
//返回当前月份,yms str类型,ymi int 类型
func TheYearMonthString() (yms string, ymi int) {
yms = DateFormat(TimeLocal(), "200601")
ymi, _ = strconv.Atoi(yms)
return yms, ymi
}
//以时间作为文件夹目录
func GetDateAsDirName() string {
format := "2006/01/02"
dateString := DateFormat(time.Now(), format)
return dateString
}
//以时间作为文件夹目录
func GetDateYYYYMMDD() string {
format := "20060102"
dateString := DateFormat(time.Now(), format)
return dateString
} | time.go | 0.6488 | 0.440289 | time.go | starcoder |
package main
import (
"fmt"
"sort"
"strconv"
)
// Graph defines the structure for our graph
type Graph struct {
Edges []*Edge
Nodes map[*Node]bool
}
// Edge defines a struct used to build Edges
type Edge struct {
Parent *Node
Child *Node
Cost int
}
// Node defines a struct used to build a Node
type Node struct {
Name string
}
// Infinity defines a value of infinity
const Infinity = int(^uint(0) >> 1)
// AddEdge adds an Edge to the Graph
func (g *Graph) AddEdge(parent, child *Node, cost int) {
edge := &Edge{
Parent: parent,
Child: child,
Cost: cost,
}
g.Edges = append(g.Edges, edge)
g.AddNode(parent)
g.AddNode(child)
}
// AddNode adds a Node to the Graph list of Nodes, if the the node wasn't already added
func (g *Graph) AddNode(node *Node) {
if g.Nodes == nil {
g.Nodes = make(map[*Node]bool)
}
_, ok := g.Nodes[node]
if !ok {
g.Nodes[node] = true
}
}
// String returns a string representation of the Graph
func (g *Graph) String() string {
var s string
s += "Edges:\n"
for _, edge := range g.Edges {
s += edge.Parent.Name + " -> " + edge.Child.Name + " = " + strconv.Itoa(edge.Cost)
s += "\n"
}
s += "\n"
s += "Nodes: "
i := 0
for node := range g.Nodes {
if i == len(g.Nodes)-1 {
s += node.Name
} else {
s += node.Name + ", "
}
i++
}
s += "\n"
return s
}
// Dijkstra implements Dijkstra's algorithm
func (g *Graph) Dijkstra(startNode *Node) (costTable map[*Node]int) {
costTable = g.NewCostTable(startNode)
// An empty list of "visited" Nodes. Everytime the algorithm runs on a Node, we add it here
var visited []*Node
for len(visited) != len(g.Nodes) {
node := getClosestNonVisitedNode(costTable, visited)
visited = append(visited, node)
nodeEdges := g.GetNodeEdges(node)
for _, edge := range nodeEdges {
distanceToNeighbor := costTable[node] + edge.Cost
if distanceToNeighbor < costTable[edge.Child] {
costTable[edge.Child] = distanceToNeighbor
}
}
}
return costTable
}
// NewCostTable returns an initialized cost table for the Dijkstra algorithm work with
func (g *Graph) NewCostTable(startNode *Node) map[*Node]int {
costTable := make(map[*Node]int)
costTable[startNode] = 0
for node := range g.Nodes {
if node != startNode {
costTable[node] = Infinity
}
}
return costTable
}
// GetNodeEdges returns all the Edges that start with the specified Node
func (g *Graph) GetNodeEdges(node *Node) (edges []*Edge) {
for _, edge := range g.Edges {
if edge.Parent == node {
edges = append(edges, edge)
}
}
return edges
}
// getClosestNonVisitedNode returns the closest Node (with the lower cost) from the costTable
func getClosestNonVisitedNode(costTable map[*Node]int, visited []*Node) *Node {
type CostTableToSort struct {
Node *Node
Cost int
}
var sorted []CostTableToSort
for node, cost := range costTable {
var isVisited bool
for _, visitedNode := range visited {
if node == visitedNode {
isVisited = true
}
}
if !isVisited {
sorted = append(sorted, CostTableToSort{node, cost})
}
}
sort.Slice(sorted, func(i, j int) bool {
return sorted[i].Cost < sorted[j].Cost
})
return sorted[0].Node
}
func main() {
start := &Node{Name: "start"}
finish := &Node{Name: "finish"}
a := &Node{Name: "a"}
b := &Node{Name: "b"}
c := &Node{Name: "c"}
d := &Node{Name: "d"}
g := Graph{}
g.AddEdge(start, a, 5)
g.AddEdge(start, b, 2)
g.AddEdge(b, a, 8)
g.AddEdge(b, c, 7)
g.AddEdge(a, c, 4)
g.AddEdge(a, d, 2)
g.AddEdge(c, d, 3)
g.AddEdge(c, finish, 6)
g.AddEdge(d, finish, 1)
costTable := g.Dijkstra(start)
for node, cost := range costTable {
fmt.Printf("Distance from %s to %s = %d\n", a.Name, node.Name, cost)
}
} | algorithms/gr-dij/golang/dijkstras_algorithm.go | 0.703855 | 0.52275 | dijkstras_algorithm.go | starcoder |
package store
import (
"database/sql"
"github.com/hashicorp/go-multierror"
)
// ScanStrings scans a slice of strings from the return value of `*store.query`.
func ScanStrings(rows *sql.Rows, queryErr error) (_ []string, err error) {
if queryErr != nil {
return nil, queryErr
}
defer func() { err = CloseRows(rows, err) }()
var values []string
for rows.Next() {
var value string
if err := rows.Scan(&value); err != nil {
return nil, err
}
values = append(values, value)
}
return values, nil
}
// ScanFirstString scans a slice of strings from the return value of `*store.query` and returns the first.
func ScanFirstString(rows *sql.Rows, err error) (string, bool, error) {
values, err := ScanStrings(rows, err)
if err != nil || len(values) == 0 {
return "", false, err
}
return values[0], true, nil
}
// ScanInts scans a slice of ints from the return value of `*store.query`.
func ScanInts(rows *sql.Rows, queryErr error) (_ []int, err error) {
if queryErr != nil {
return nil, queryErr
}
defer func() { err = CloseRows(rows, err) }()
var values []int
for rows.Next() {
var value int
if err := rows.Scan(&value); err != nil {
return nil, err
}
values = append(values, value)
}
return values, nil
}
// ScanFirstInt scans a slice of ints from the return value of `*store.query` and returns the first.
func ScanFirstInt(rows *sql.Rows, err error) (int, bool, error) {
values, err := ScanInts(rows, err)
if err != nil || len(values) == 0 {
return 0, false, err
}
return values[0], true, nil
}
// ScanBytes scans a slice of bytes from the return value of `*store.query`.
func ScanBytes(rows *sql.Rows, queryErr error) (_ [][]byte, err error) {
if queryErr != nil {
return nil, queryErr
}
defer func() { err = CloseRows(rows, err) }()
var values [][]byte
for rows.Next() {
var value []byte
if err := rows.Scan(&value); err != nil {
return nil, err
}
values = append(values, value)
}
return values, nil
}
// ScanFirstBytes scans a slice of bytes from the return value of `*store.query` and returns the first.
func ScanFirstBytes(rows *sql.Rows, err error) ([]byte, bool, error) {
values, err := ScanBytes(rows, err)
if err != nil || len(values) == 0 {
return nil, false, err
}
return values[0], true, nil
}
// CloseRows closes the rows object and checks its error value.
func CloseRows(rows *sql.Rows, err error) error {
if closeErr := rows.Close(); closeErr != nil {
err = multierror.Append(err, closeErr)
}
if rowsErr := rows.Err(); rowsErr != nil {
err = multierror.Append(err, rowsErr)
}
return err
} | enterprise/internal/codeintel/bundles/persistence/sqlite/store/scan.go | 0.805211 | 0.422147 | scan.go | starcoder |
package data
import (
"strings"
)
type ProjectionParams struct {
values []string
}
func NewEmptyProjectionParams() *ProjectionParams {
return &ProjectionParams{
values: make([]string, 0, 10),
}
}
func NewProjectionParamsFromStrings(values []string) *ProjectionParams {
c := &ProjectionParams{
values: make([]string, len(values)),
}
copy(c.values, values)
return c
}
func NewProjectionParamsFromAnyArray(values *AnyValueArray) *ProjectionParams {
if values == nil {
return NewEmptyProjectionParams()
}
c := &ProjectionParams{
values: make([]string, 0, values.Len()),
}
for index := 0; index < values.Len(); index++ {
value := values.GetAsString(index)
if value != "" {
c.values = append(c.values, value)
}
}
return c
}
func (c *ProjectionParams) Value() []string {
return c.values
}
func (c *ProjectionParams) Len() int {
return len(c.values)
}
func (c *ProjectionParams) Get(index int) string {
return c.values[index]
}
func (c *ProjectionParams) Put(index int, value string) {
if cap(c.values)+1 < index {
a := make([]string, index+1, (index+1)*2)
copy(a, c.values)
c.values = a
}
c.values[index] = value
}
func (c *ProjectionParams) Remove(index int) {
c.values = append(c.values[:index], c.values[index+1:]...)
}
func (c *ProjectionParams) Push(value string) {
c.values = append(c.values, value)
}
func (c *ProjectionParams) Append(elements []string) {
if elements != nil {
c.values = append(c.values, elements...)
}
}
func (c *ProjectionParams) Clear() {
c.values = make([]string, 0, 10)
}
func (c *ProjectionParams) String() string {
builder := ""
for index := 0; index < c.Len(); index++ {
if index > 0 {
builder = builder + ","
}
builder = builder + c.Get(index)
}
return builder
}
func NewProjectionParamsFromValue(value interface{}) *ProjectionParams {
values := NewAnyValueArrayFromValue(value)
return NewProjectionParamsFromAnyArray(values)
}
func ParseProjectionParams(values ...string) *ProjectionParams {
c := NewEmptyProjectionParams()
for index := 0; index < len(values); index++ {
parseProjectionParamValue("", c, values[index])
}
return c
}
func parseProjectionParamValue(prefix string, c *ProjectionParams, value string) {
if value != "" {
value = strings.Trim(value, " \t\n\r")
}
openBracket := 0
openBracketIndex := -1
closeBracketIndex := -1
commaIndex := -1
breakCycleRequired := false
for index := 0; index < len(value); index++ {
switch value[index] {
case '(':
if openBracket == 0 {
openBracketIndex = index
}
openBracket++
break
case ')':
openBracket--
if openBracket == 0 {
closeBracketIndex = index
if openBracketIndex >= 0 && closeBracketIndex > 0 {
previousPrefix := prefix
if prefix != "" {
prefix = prefix + "." + value[:openBracketIndex]
} else {
prefix = value[:openBracketIndex]
}
subValue := value[openBracketIndex+1 : closeBracketIndex]
parseProjectionParamValue(prefix, c, subValue)
subValue = value[closeBracketIndex+1:]
parseProjectionParamValue(previousPrefix, c, subValue)
breakCycleRequired = true
}
}
break
case ',':
if openBracket == 0 {
commaIndex = index
subValue := value[0:commaIndex]
if subValue != "" {
if prefix != "" {
c.Push(prefix + "." + subValue)
} else {
c.Push(subValue)
}
}
subValue = value[commaIndex+1:]
if subValue != "" {
parseProjectionParamValue(prefix, c, subValue)
breakCycleRequired = true
}
}
break
}
if breakCycleRequired {
break
}
}
if value != "" && openBracketIndex == -1 && commaIndex == -1 {
if prefix != "" {
c.Push(prefix + "." + value)
} else {
c.Push(value)
}
}
} | data/ProjectionParams.go | 0.513668 | 0.561876 | ProjectionParams.go | starcoder |
package arith
import "github.com/egonelbre/exp/bit"
type Model interface {
NBits() uint
Encode(enc *Encoder, value uint)
Decode(dec *Decoder) (value uint)
}
type Shift struct {
P P
I byte
}
func (m *Shift) NBits() uint { return 1 }
func (m *Shift) adapt(bit uint) {
switch bit {
case 1:
m.P += (MaxP - m.P) >> m.I
case 0:
m.P -= m.P >> m.I
}
}
func (m *Shift) Encode(enc *Encoder, bit uint) {
enc.Encode(bit, m.P)
m.adapt(bit)
}
func (m *Shift) Decode(dec *Decoder) (bit uint) {
bit = dec.Decode(m.P)
m.adapt(bit)
return bit
}
type Shift2 struct {
P0 P
I0 byte
P1 P
I1 byte
}
func (m *Shift2) NBits() uint { return 1 }
func (m *Shift2) adapt(bit uint) {
switch bit {
case 1:
m.P0 += (MaxP/2 - m.P0) >> m.I0
m.P1 += (MaxP/2 - m.P1) >> m.I1
case 0:
m.P0 -= m.P0 >> m.I0
m.P1 -= m.P1 >> m.I1
}
}
func (m *Shift2) Encode(enc *Encoder, bit uint) {
enc.Encode(bit, m.P0+m.P1)
m.adapt(bit)
}
func (m *Shift2) Decode(dec *Decoder) (bit uint) {
bit = dec.Decode(m.P0 + m.P1)
m.adapt(bit)
return bit
}
type Shift4 struct {
P [4]P
I [4]byte
}
func (m *Shift4) NBits() uint { return 1 }
func (m *Shift4) adapt(bit uint) {
switch bit {
case 1:
m.P[0] += (MaxP/4 - m.P[0]) >> m.I[0]
m.P[1] += (MaxP/4 - m.P[1]) >> m.I[1]
m.P[2] += (MaxP/4 - m.P[2]) >> m.I[2]
m.P[3] += (MaxP/4 - m.P[3]) >> m.I[3]
case 0:
m.P[0] -= m.P[0] >> m.I[0]
m.P[1] -= m.P[1] >> m.I[1]
m.P[2] -= m.P[2] >> m.I[2]
m.P[3] -= m.P[3] >> m.I[3]
}
}
func (m *Shift4) Encode(enc *Encoder, bit uint) {
enc.Encode(bit, m.P[0]+m.P[1]+m.P[2]+m.P[3])
m.adapt(bit)
}
func (m *Shift4) Decode(dec *Decoder) (bit uint) {
bit = dec.Decode(m.P[0] + m.P[1] + m.P[2] + m.P[3])
m.adapt(bit)
return bit
}
type Tree []Model
func (tree Tree) NBits() uint { return bit.ScanRight(uint64(tree.syms())) }
func (tree Tree) syms() uint { return uint(len(tree) + 1) }
func (tree Tree) msb() uint { return tree.syms() / 2 }
func NewTree(nbits uint, model func() Model) Tree {
syms := 1 << nbits
tree := make(Tree, syms-1)
for i := range tree {
tree[i] = model()
}
return tree
}
func NewEmptyTree(nbits uint) Tree {
return make(Tree, 1<<nbits-1)
}
func (tree Tree) Encode(enc *Encoder, value uint) {
if value > tree.syms() {
panic("")
}
syms, msb := tree.syms(), tree.msb()
ctx := uint(1)
for ctx < syms {
bit := uint(0)
if value&msb != 0 {
bit = 1
}
value += value
tree[ctx-1].Encode(enc, bit)
ctx += ctx + bit
}
}
func (tree Tree) Decode(dec *Decoder) (value uint) {
ctx := uint(1)
syms := tree.syms()
for ctx < syms {
ctx += ctx + tree[ctx-1].Decode(dec)
}
return ctx - syms
} | coder/arith/models.go | 0.535584 | 0.413714 | models.go | starcoder |
package level
// TileTextureInfo describes the textures used for a map tile.
type TileTextureInfo uint16
// WallTextureIndex returns the texture index into the texture atlas for the walls.
// Valid range [0..63].
// This property is only valid in real world.
func (info TileTextureInfo) WallTextureIndex() AtlasIndex {
return AtlasIndex(info & 0x003F)
}
// WithWallTextureIndex returns an info with given index set.
// Values outside valid range are ignored.
func (info TileTextureInfo) WithWallTextureIndex(value AtlasIndex) TileTextureInfo {
if value >= 64 {
return info
}
return TileTextureInfo(uint16(info&^0x003F) | uint16(value&0x003F))
}
// CeilingTextureIndex returns the texture index into the texture atlas for the ceiling.
// Valid range [0..31].
// This property is only valid in real world.
func (info TileTextureInfo) CeilingTextureIndex() AtlasIndex {
return AtlasIndex((info & 0x07C0) >> 6)
}
// WithCeilingTextureIndex returns an info with given index set.
// Values outside valid range are ignored.
func (info TileTextureInfo) WithCeilingTextureIndex(value AtlasIndex) TileTextureInfo {
if value >= FloorCeilingTextureLimit {
return info
}
return TileTextureInfo(uint16(info&^0x07C0) | (uint16(value&0x001F) << 6))
}
// FloorTextureIndex returns the texture index into the texture atlas for the floor.
// Valid range [0..31].
// This property is only valid in real world.
func (info TileTextureInfo) FloorTextureIndex() AtlasIndex {
return AtlasIndex((info & 0xF800) >> 11)
}
// WithFloorTextureIndex returns an info with given index set.
// Values outside valid range are ignored.
func (info TileTextureInfo) WithFloorTextureIndex(value AtlasIndex) TileTextureInfo {
if value >= FloorCeilingTextureLimit {
return info
}
return TileTextureInfo(uint16(info&^0xF800) | (uint16(value&0x001F) << 11))
}
// FloorPaletteIndex returns the palette index for the floor in cyberspace.
func (info TileTextureInfo) FloorPaletteIndex() byte {
return byte(info)
}
// WithFloorPaletteIndex returns an info with given index set.
func (info TileTextureInfo) WithFloorPaletteIndex(value byte) TileTextureInfo {
return TileTextureInfo(uint16(info&0xFF00) | uint16(value))
}
// CeilingPaletteIndex returns the palette index for the ceiling in cyberspace.
func (info TileTextureInfo) CeilingPaletteIndex() byte {
return byte(info >> 8)
}
// WithCeilingPaletteIndex returns an info with given index set.
func (info TileTextureInfo) WithCeilingPaletteIndex(value byte) TileTextureInfo {
return TileTextureInfo(uint16(info&0x00FF) | (uint16(value) << 8))
} | ss1/content/archive/level/TileTextureInfo.go | 0.894115 | 0.542924 | TileTextureInfo.go | starcoder |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.