code stringlengths 114 1.05M | path stringlengths 3 312 | quality_prob float64 0.5 0.99 | learning_prob float64 0.2 1 | filename stringlengths 3 168 | kind stringclasses 1 value |
|---|---|---|---|---|---|
package expressions
import (
"regexp"
"strconv"
"strings"
)
func BuildEqExpressionFn(expr string) func([]byte) bool {
isNumericValue := false
fExpValue, err := strconv.ParseFloat(expr, 64)
if err == nil {
isNumericValue = true
}
return func(value []byte) bool {
if isNumericValue {
fValue, err := strconv.ParseFloat(string(value), 64)
if err != nil {
return false
}
return fValue == fExpValue
}
return expr == string(value)
}
}
func BuildNeExpressionFn(expr string) func([]byte) bool {
isNumericValue := false
fExpValue, err := strconv.ParseFloat(expr, 64)
if err == nil {
isNumericValue = true
}
return func(value []byte) bool {
if isNumericValue {
fValue, err := strconv.ParseFloat(string(value), 64)
if err != nil {
return false
}
return fValue != fExpValue
}
return expr != string(value)
}
}
func BuildLtExpressionFn(expr float64) func([]byte) bool {
return func(value []byte) bool {
fValue, err := strconv.ParseFloat(string(value), 64)
if err == nil {
return fValue < expr
}
return false
}
}
func BuildLteExpressionFn(expr float64) func([]byte) bool {
return func(value []byte) bool {
fValue, err := strconv.ParseFloat(string(value), 64)
if err == nil {
return fValue <= expr
}
return false
}
}
func BuildGtExpressionFn(expr float64) func([]byte) bool {
return func(value []byte) bool {
fValue, err := strconv.ParseFloat(string(value), 64)
if err == nil {
return fValue > expr
}
return false
}
}
func BuildGteExpressionFn(expr float64) func([]byte) bool {
return func(value []byte) bool {
fValue, err := strconv.ParseFloat(string(value), 64)
if err == nil {
return fValue >= expr
}
return false
}
}
func BuildMatchExpressionFn(expr string) func([]byte) bool {
re := regexp.MustCompile(expr)
return func(value []byte) bool {
return re.Match(value)
}
}
func BuildNotMatchExpressionFn(expr string) func([]byte) bool {
re := regexp.MustCompile(expr)
return func(value []byte) bool {
return !re.Match(value)
}
}
func BuildContainsExpressionFn(expr string) func([]byte) bool {
return func(value []byte) bool {
return strings.Contains(string(value), expr)
}
}
func BuildNotContainsExpressionFn(expr string) func([]byte) bool {
return func(value []byte) bool {
return !strings.Contains(string(value), expr)
}
} | config/expressions/builder.go | 0.620737 | 0.429609 | builder.go | starcoder |
package main
import (
"time"
"math/rand"
"math"
)
type Zipfian struct {
items int64
alpha, zetaN, eta, theta float64
r *rand.Rand
}
func NewZipfian(items int64, constant float64) Zipfian {
rng := rand.New(rand.NewSource(time.Now().UnixNano()))
return NewZipfianWithRand(items, constant, rng)
}
func NewZipfianWithRand(items int64, constant float64, r *rand.Rand) Zipfian {
zetaN := zeta(items, constant)
zeta2theta := zeta(2, constant)
return Zipfian {
items: items,
theta: constant,
alpha: 1.0/(1.0 - constant),
zetaN: zetaN,
eta: (1 - math.Pow(2.0/float64(items), 1-constant))/(1 - zeta2theta / zetaN),
r: r,
}
}
func (z *Zipfian) NextItem() int64 {
u := z.r.Float64()
uz := u * z.zetaN
if uz < 1.0 {
return 0
}
if uz < 1.0 + math.Pow(0.5, z.theta) {
return 1
}
return int64(float64(z.items)*math.Pow(z.eta*u-z.eta+1, z.alpha))
}
func (z * Zipfian) reset(items int64) {
zeta2theta := zeta(2, z.theta)
z.items = items
z.zetaN = zeta(items, z.theta)
z.eta = (1 - math.Pow(2.0/float64(z.items), 1-z.theta))/(1 - zeta2theta / z.zetaN)
}
func zeta(n int64, theta float64) float64 {
sum := 0.0;
for i := int64(0); i < n; i++ {
sum += 1 / math.Pow(float64(i+1), theta);
}
return sum
}
type ZipfianSamples struct {
sources []int64
counts []int64
first int64
samples int64
zipf Zipfian
}
func NewZipfianSamples(nsources int64, samples int64, zipf Zipfian) ZipfianSamples {
sources := make([]int64, nsources)
for i := 0; i < len(sources); i++ {
sources[i] = int64(i)
}
return ZipfianSamples {
sources: sources,
counts: make([]int64, len(sources)),
first: 0,
samples: samples,
zipf: zipf,
}
}
func (z * ZipfianSamples) NextItem() *int64 {
if len(z.sources) == 0 {
return nil
} else {
idx := z.zipf.NextItem()
source := z.sources[idx]
z.counts[source] += 1
if z.counts[source] == z.samples {
l := len(z.sources)
z.sources[idx] = z.sources[l-1] // move the last item to the removed position
z.sources = z.sources[:l-1] // then reslice
//z.sources = append(z.sources[:idx], z.sources[idx+1:]...)
if len(z.sources) > 0 {
z.zipf.reset(int64(len(z.sources)))
}
}
return &source
}
return nil
} | cmd/experiments/zipfian.go | 0.575946 | 0.448004 | zipfian.go | starcoder |
package aoc2021
/*
Describe the problem
*/
const DAY_2021_02_TEST_DATA = `forward 5
down 5
forward 8
up 3
down 8
forward 2`
const DAY_2021_02_DATA = `forward 3
down 7
forward 7
down 4
down 9
down 7
forward 5
forward 9
forward 3
forward 8
down 4
down 6
down 3
forward 7
forward 1
forward 4
down 1
forward 7
forward 9
down 3
down 1
down 5
forward 8
down 2
down 9
forward 3
down 9
down 7
down 6
down 1
forward 4
forward 9
forward 8
down 3
down 9
down 5
forward 5
down 7
down 7
up 1
down 2
up 1
down 7
up 1
up 1
down 8
down 8
forward 2
down 5
forward 9
forward 8
forward 4
up 2
down 9
down 7
forward 4
up 7
up 4
down 4
down 3
forward 8
down 8
up 2
up 1
forward 3
up 6
up 8
down 5
down 4
down 4
forward 1
down 8
forward 3
forward 5
forward 4
forward 2
forward 7
up 5
up 2
forward 2
forward 5
down 4
up 6
forward 3
forward 1
forward 1
forward 6
forward 7
forward 1
forward 8
forward 4
forward 4
forward 8
down 6
down 8
forward 4
forward 1
down 8
forward 3
forward 3
forward 9
forward 9
forward 3
up 1
down 2
down 5
forward 4
forward 5
forward 7
forward 4
forward 4
up 5
forward 1
down 9
down 9
down 1
up 7
down 8
up 6
down 4
forward 7
down 8
down 1
forward 4
forward 5
forward 9
down 2
forward 7
forward 7
up 2
up 1
forward 9
forward 1
forward 7
up 3
forward 8
forward 1
down 6
down 6
down 4
forward 2
forward 1
down 3
down 4
down 2
forward 9
up 8
down 4
down 3
down 1
down 1
forward 6
forward 6
down 7
forward 1
forward 5
forward 9
forward 5
forward 1
up 8
forward 7
up 3
forward 6
down 5
up 8
down 4
down 8
forward 2
up 7
forward 9
down 9
forward 1
down 5
forward 8
down 7
forward 8
forward 1
forward 5
down 4
down 1
forward 4
up 6
down 3
down 1
forward 1
forward 1
up 8
down 9
forward 8
forward 5
down 5
forward 1
up 9
down 6
down 4
up 2
forward 5
down 7
up 1
forward 3
up 5
forward 9
up 6
down 4
forward 6
down 8
down 2
forward 3
forward 4
down 5
down 7
down 4
up 3
up 8
down 8
up 8
down 8
forward 8
down 3
up 3
forward 8
down 6
forward 2
down 8
down 5
up 2
forward 1
forward 4
down 1
forward 5
forward 5
forward 2
forward 2
forward 4
down 7
forward 6
up 6
down 8
forward 4
forward 6
forward 2
down 8
down 2
up 1
down 8
forward 9
up 5
forward 8
up 9
down 1
down 2
forward 6
down 9
forward 3
up 8
up 4
down 8
forward 2
down 1
forward 6
forward 4
down 4
forward 4
up 8
down 6
forward 3
up 2
up 6
down 1
down 3
down 1
up 6
down 9
up 6
forward 9
down 4
forward 3
forward 1
up 7
down 1
forward 5
up 1
up 8
forward 5
down 5
forward 2
up 8
up 7
forward 4
up 7
up 4
forward 5
forward 3
down 9
forward 1
down 8
forward 3
up 3
down 7
forward 4
down 7
down 5
down 8
down 8
forward 6
forward 5
up 4
down 6
forward 4
up 2
up 4
down 4
down 9
forward 7
down 8
forward 6
forward 5
up 8
down 6
forward 1
up 2
forward 5
forward 7
down 4
down 6
forward 9
forward 2
up 6
up 6
forward 1
up 6
forward 8
down 7
forward 1
down 1
up 9
up 1
forward 1
forward 7
forward 5
down 4
forward 6
forward 4
down 8
up 6
up 8
forward 6
forward 3
up 6
forward 6
down 8
down 5
down 5
down 2
down 6
forward 1
forward 4
forward 5
down 5
forward 6
forward 2
forward 2
up 7
up 6
up 7
forward 7
forward 6
down 7
down 7
up 4
forward 5
forward 2
down 6
up 4
forward 8
down 1
down 5
up 6
down 4
down 3
down 8
forward 8
down 9
forward 8
forward 6
down 4
down 3
forward 6
up 4
up 9
forward 3
down 3
down 9
forward 1
down 7
forward 2
up 7
down 6
forward 5
down 8
down 1
forward 8
down 4
up 3
down 5
forward 6
down 7
forward 3
forward 6
forward 8
forward 6
down 4
down 6
forward 9
up 8
forward 2
forward 8
forward 1
forward 1
forward 3
forward 8
forward 6
forward 8
down 5
down 2
down 6
up 4
forward 5
forward 9
forward 1
down 3
down 6
down 7
forward 5
forward 8
up 1
forward 4
up 3
forward 6
down 3
down 7
down 1
down 1
forward 8
forward 3
forward 2
forward 1
forward 3
forward 7
up 6
down 8
forward 3
forward 8
forward 1
forward 4
up 3
down 7
up 9
up 6
forward 1
forward 6
forward 5
down 5
down 2
forward 8
up 8
down 4
forward 6
down 2
forward 1
down 8
forward 2
forward 9
forward 1
down 9
down 1
down 9
down 1
up 9
forward 3
forward 7
forward 3
down 5
up 3
forward 4
up 1
forward 2
down 8
forward 8
down 1
up 9
down 7
forward 9
up 6
down 3
forward 9
down 2
down 3
up 5
up 5
forward 8
down 2
forward 2
up 3
down 8
down 1
down 9
forward 5
down 5
down 5
down 4
down 8
forward 7
up 3
up 4
up 4
up 7
down 2
down 6
up 3
down 9
up 2
forward 6
forward 1
down 7
down 5
forward 6
down 6
up 4
down 4
down 8
up 5
forward 9
down 8
forward 1
forward 2
forward 8
forward 2
forward 3
up 9
up 8
up 9
up 6
down 5
forward 7
up 8
forward 1
down 3
down 8
forward 3
up 6
down 2
forward 2
up 4
up 4
forward 6
forward 1
forward 4
down 3
down 1
up 7
down 9
up 1
down 9
down 4
up 2
forward 8
down 3
forward 7
up 6
forward 7
up 2
forward 5
down 3
up 2
down 3
down 8
forward 5
down 8
forward 7
up 6
down 4
forward 4
down 1
up 3
forward 6
down 1
down 2
down 2
forward 7
down 6
down 2
forward 9
down 2
forward 2
down 2
forward 6
down 9
up 4
up 7
up 6
down 3
forward 3
down 7
down 6
forward 8
down 4
up 3
down 1
forward 8
down 4
forward 5
forward 1
down 7
forward 5
up 3
down 7
forward 3
down 4
up 1
down 5
forward 8
down 2
forward 9
forward 3
up 2
down 9
forward 5
up 7
down 7
down 1
down 7
down 8
forward 1
down 9
down 7
forward 4
down 5
forward 9
down 6
down 1
forward 2
up 4
up 3
down 1
forward 4
up 3
forward 4
up 2
forward 6
down 6
up 8
down 3
forward 5
down 2
forward 9
down 3
down 7
forward 4
down 5
up 9
up 9
down 4
up 5
forward 1
down 8
up 8
up 4
up 2
up 8
forward 5
down 5
up 1
down 6
down 3
up 4
forward 3
forward 1
forward 1
up 4
forward 1
down 6
forward 7
forward 4
forward 4
forward 9
forward 6
down 6
forward 4
up 7
down 5
down 2
forward 3
down 7
forward 8
down 7
forward 4
up 9
down 1
forward 3
forward 1
forward 4
down 2
up 2
down 1
down 4
down 7
forward 5
forward 3
up 5
forward 1
down 4
down 8
up 4
up 1
down 1
down 4
down 2
down 4
up 8
down 3
forward 8
up 5
down 4
forward 9
forward 7
down 4
forward 7
forward 9
forward 6
forward 9
up 9
down 3
up 4
down 8
forward 9
up 2
up 3
forward 8
forward 3
forward 1
forward 7
forward 8
forward 9
down 7
forward 3
forward 3
forward 4
up 8
forward 1
forward 5
up 9
down 2
down 7
forward 5
up 4
forward 9
down 9
up 2
forward 7
down 9
up 8
up 1
up 1
up 5
forward 4
down 2
forward 7
down 1
down 8
down 8
forward 3
forward 8
up 9
forward 7
forward 6
forward 3
forward 7
up 3
up 6
forward 5
forward 5
down 2
down 7
down 7
up 7
forward 6
forward 1
forward 7
up 6
down 9
forward 7
forward 1
up 7
forward 4
forward 9
up 2
down 5
down 6
down 4
forward 1
forward 7
forward 4
forward 5
down 2
forward 5
down 9
forward 7
forward 4
up 5
down 6
forward 2
forward 4
forward 8
up 1
down 2
up 9
forward 6
down 1
forward 6
forward 4
down 6
forward 6
up 4
down 5
forward 8
down 3
up 4
forward 3
down 6
up 7
down 8
down 5
down 7
forward 4
down 1
forward 5
up 9
up 3
down 4
forward 9
forward 6
forward 1
up 5
down 5
forward 1
forward 6
down 4
up 7
forward 5
down 5
forward 1
forward 5
down 4
forward 6
down 6
down 5
down 5
forward 4
down 5
forward 2
down 9
down 6
down 5
forward 5
down 9
down 7
up 8
down 3
forward 6
down 4
forward 8
forward 9
down 1
up 3
forward 4
up 3
forward 7
down 5
up 8
forward 1
up 1
down 2
forward 1
up 9
down 4
forward 1
up 3
down 9
up 8
down 2
up 4
forward 2
forward 8
up 8
forward 9
forward 2
down 4
forward 8
down 4
forward 9
down 1
up 8
forward 6
down 4
down 8
forward 1
forward 1
forward 5
forward 9
up 4
down 7
forward 4
down 5
forward 1
down 4
up 8
up 4
forward 2
forward 9
down 8
down 3
down 2
up 3
down 1
down 8
forward 5
down 6
down 5
forward 8
down 8
down 1
forward 1
forward 3
forward 7
forward 1
up 2
up 3
forward 9
down 2
forward 5
down 8
forward 7
forward 3
up 5
forward 1
forward 1
up 8
down 1
down 2
down 7
down 1
down 9
forward 7
down 8
down 4
up 8
forward 1
down 6
forward 9
forward 7
up 2
forward 3
forward 2
up 3
up 6
up 1
down 1
up 9
forward 8
forward 6
down 6
up 8
down 8
forward 5
forward 7
down 7
forward 3
forward 9
down 8
down 8
forward 2
up 7
down 4
down 7
up 6
down 3
forward 1
forward 8
down 5
down 6
up 5
forward 8
forward 6
up 8
forward 8
up 3
up 6
up 3
forward 1` | app/aoc2021/aoc2021_02_data.go | 0.756717 | 0.546315 | aoc2021_02_data.go | starcoder |
package constants
const (
// UnitAcres is a unit of land measurement in the British Imperial and United States Customary systems, equal to 43,560 square feet, or 4,840 square yards. One acre is equivalent to 0.4047 hectare (4,047 square metres
UnitAcres = "acres"
// UnitHectares is a unit of area in the metric system equal to 100 ares, or 10,000 square metres, and the equivalent of 2.471 acres in the British Imperial System and the United States Customary measure. The term is derived from the Latin area and from hect, an irregular contraction of the Greek word for hundred.
UnitHectares = "hectares"
// UnitMillimeters spelled as millimeter, unit of length equal to 0.001 metre in the metric system and the equivalent of 0.03937 inch
UnitMillimeters = "millimeters"
// UnitMillimetres spelled as millimetre, unit of length equal to 0.001 metre in the metric system and the equivalent of 0.03937 inch
UnitMillimetres = "millimetres"
// UnitMiles is an English unit of length of linear measure equal to 5.280 feet, or 1.760 yards, and standardised as exactly 1,609.344 meters by international agreement in 1959.
UnitMiles = "miles"
// UnitNauticalMiles us known as the knot. Nautical miles and knots are almost universally used for aeronautical and maritime navigation, because of their relationship with degrees and minutes of latitute
UnitNauticalMiles = "nautical_miles"
// UnitKilometers (American spelling) is a unit of length in the metric system, equal to one thousand meters.
UnitKilometers = "kilometeres"
// UnitRadians is the standard unit of angular measure, used in many areas of mathematics.
UnitRadians = "radians"
// UnitDegrees is a measurement of a plane angle, defined so that a full rotation is 360 degrees.
UnitDegrees = "degrees"
// UnitInches is a unit of length in the (British) imperial and United States customary systems of measurement now formally equal to 1/36th yard but usually understood as 1/12th of a foot.
UnitInches = "inches"
// UnitYards is an English unit of length, in both the British imperial and US customary systems of measurement, that comprises 3 feet or 36 inches.
UnitYards = "yards"
// UnitMeters is the base unit of length in the International System of Units (SI).
UnitMeters = "meters"
// UnitCentimeters is a unit of length in the metric system, equal to one hundredth of a meter.
UnitCentimeters = "centimeters"
// UnitCentimetres (international spelling) is a unit of length in the metric system, equal to one hundredth of a meter.
UnitCentimetres = "centimetres"
// UnitMetres (international spelling) is the base unit of length in the International System of Units (SI)
UnitMetres = "metres"
// UnitKimometres is a unit of length in the metric system, equal to one thousand metres.
UnitKimometres = "kilometres"
// UnitFeet is a unit of length in the imperial and US customary systems of measurement.
UnitFeet = "feet"
// UnitDefault us the default unit used in most Turf methods when no other unit is specified is kilometers
UnitDefault = "kilometres"
// EarthRadius is the radius of the earch in km
// Approximate radius of the earth in meters. The radius at the equator is ~6378137 and at the poles is ~6356752. https://en.wikipedia.org/wiki/World_Geodetic_System#WGS84
// 6371008.8 is one published "average radius" see https://en.wikipedia.org/wiki/Earth_radius#Mean_radius, or ftp://athena.fsv.cvut.cz/ZFG/grs80-Moritz.pdf p.4
// https://github.com/Turfjs/turf/issues/635
EarthRadius = 6371008.8
) | constants/constants.go | 0.806014 | 0.817356 | constants.go | starcoder |
package board
// Resolves effects of the given battle on the board.
// Forwards the given battle to the appropriate battle resolver based on its type.
// Returns any retreating move orders that could not be resolved.
func (board Board) resolveBattle(battle Battle) (retreats []Order) {
if battle.isBorderConflict() {
return board.resolveBorderBattle(battle)
}
if len(battle.Results) == 1 {
return board.resolveSingleplayerBattle(battle)
}
return board.resolveMultiplayerBattle(battle)
}
// Resolves effects on the board from the given border battle.
// Assumes that the battle consists of exactly 2 results, for each of the areas in the battle,
// that each result is tied to a move order, and that the battle had at least one winner.
// Returns any retreating move orders that could not be resolved.
func (board Board) resolveBorderBattle(battle Battle) (retreats []Order) {
winners, _ := battle.parseResults()
move1 := battle.Results[0].Move
move2 := battle.Results[1].Move
// If there is more than one winner, the battle was a tie, and both moves retreat.
if len(winners) > 1 {
board.removeMove(move1)
board.removeMove(move2)
if !board.attemptRetreat(move1) {
retreats = append(retreats, move1)
}
if !board.attemptRetreat(move2) {
retreats = append(retreats, move2)
}
return retreats
}
winner := winners[0]
for _, move := range []Order{move1, move2} {
if move.Player == winner {
// If destination area is uncontrolled, the player must win a singleplayer battle there before taking control.
if board.Areas[move.To].IsControlled() {
board.succeedMove(move)
}
} else {
board.removeMove(move)
board.removeOriginUnit(move)
}
}
return nil
}
// Resolves effects on the board from the given singleplayer battle (player vs. neutral area).
// Assumes that the battle has a single result, with a move order tied to it.
// Returns the move order in a list if it fails retreat, or nil otherwise.
func (board Board) resolveSingleplayerBattle(battle Battle) (retreats []Order) {
winners, _ := battle.parseResults()
move := battle.Results[0].Move
if len(winners) != 1 {
board.removeMove(move)
if board.attemptRetreat(move) {
return nil
} else {
return []Order{move}
}
}
board.succeedMove(move)
return nil
}
// Resolves effects on hte board from the given multiplayer battle.
// Assumes that the battle has at least 1 winner
// Returns any retreating move orders that could not be resolved.
func (board Board) resolveMultiplayerBattle(battle Battle) (retreats []Order) {
winners, losers := battle.parseResults()
tie := len(winners) != 1
for _, result := range battle.Results {
// If the result has a DefenderArea, it is the result of the area's defender.
// If the defender won, nothing changes for them.
// If an attacker won, changes to the defender will be handled by calling succeedMove.
if result.DefenderArea != "" {
continue
}
move := result.Move
lost := containsPlayer(losers, move.Player)
if lost {
board.removeMove(move)
board.removeOriginUnit(move)
continue
}
if tie {
board.removeMove(move)
if !board.attemptRetreat(move) {
retreats = append(retreats, move)
}
continue
}
if board.Areas[move.To].IsControlled() {
board.succeedMove(move)
}
}
return retreats
} | game/board/resolve_battle.go | 0.792625 | 0.495728 | resolve_battle.go | starcoder |
package builds
import (
"path/filepath"
"time"
g "github.com/onsi/ginkgo"
o "github.com/onsi/gomega"
buildv1 "github.com/openshift/api/build/v1"
exutil "github.com/openshift/origin/test/extended/util"
)
func verifyStages(stages []buildv1.StageInfo, expectedStages map[string][]string) {
for _, stage := range stages {
expectedDurations, ok := expectedStages[string(stage.Name)]
if !ok {
o.ExpectWithOffset(1, ok).To(o.BeTrue(), "Unexpected stage %v was encountered", stage.Name)
}
if expectedDurations[0] != "" {
expectedMinDuration, _ := time.ParseDuration(expectedDurations[0])
o.ExpectWithOffset(1, (stage.DurationMilliseconds < expectedMinDuration.Nanoseconds()/int64(time.Millisecond))).To(o.BeFalse(), "Stage %v ran for %v, expected greater than %v", stage.Name, stage.DurationMilliseconds, expectedMinDuration)
}
expectedMaxDuration, _ := time.ParseDuration(expectedDurations[1])
o.ExpectWithOffset(1, stage.DurationMilliseconds > expectedMaxDuration.Nanoseconds()/int64(time.Millisecond)).To(o.BeFalse(), "Stage %v ran for %v, expected less than %v", stage.Name, stage.DurationMilliseconds, expectedMaxDuration)
delete(expectedStages, string(stage.Name))
}
o.ExpectWithOffset(1, expectedStages).To(o.BeEmpty())
}
var _ = g.Describe("[sig-builds][Feature:Builds][timing] capture build stages and durations", func() {
var (
buildTimingBaseDir = exutil.FixturePath("testdata", "builds", "build-timing")
isFixture = filepath.Join(buildTimingBaseDir, "test-is.json")
dockerBuildFixture = filepath.Join(buildTimingBaseDir, "test-docker-build.json")
dockerBuildDockerfile = filepath.Join(buildTimingBaseDir, "Dockerfile")
sourceBuildFixture = filepath.Join(buildTimingBaseDir, "test-s2i-build.json")
sourceBuildBinDir = filepath.Join(buildTimingBaseDir, "s2i-binary-dir")
oc = exutil.NewCLI("build-timing")
)
g.Context("", func() {
g.BeforeEach(func() {
exutil.PreTestDump()
})
g.AfterEach(func() {
if g.CurrentGinkgoTestDescription().Failed {
exutil.DumpPodStates(oc)
exutil.DumpConfigMapStates(oc)
exutil.DumpPodLogsStartingWith("", oc)
}
})
g.It("should record build stages and durations for s2i", func() {
expectedBuildStages := make(map[string][]string)
expectedBuildStages["PullImages"] = []string{"", "1000s"}
expectedBuildStages["Build"] = []string{"10ms", "1000s"}
expectedBuildStages["PushImage"] = []string{"100ms", "1000s"}
g.By("creating test image stream")
err := oc.Run("create").Args("-f", isFixture).Execute()
o.Expect(err).NotTo(o.HaveOccurred())
g.By("creating test build config")
err = oc.Run("create").Args("-f", sourceBuildFixture).Execute()
o.Expect(err).NotTo(o.HaveOccurred())
g.By("starting the test source build")
br, _ := exutil.StartBuildAndWait(oc, "test", "--from-dir", sourceBuildBinDir)
br.AssertSuccess()
// Bug 1716697 - ensure push spec doesn't include tag, only SHA
o.Expect(br.Logs()).To(o.MatchRegexp(`pushed image-registry\.openshift-image-registry\.svc:5000/.*/test@sha256:`))
verifyStages(br.Build.Status.Stages, expectedBuildStages)
})
g.It("should record build stages and durations for docker", func() {
expectedBuildStages := make(map[string][]string)
expectedBuildStages["PullImages"] = []string{"", "1000s"}
expectedBuildStages["Build"] = []string{"10ms", "1000s"}
expectedBuildStages["PushImage"] = []string{"100ms", "1000s"}
g.By("creating test image stream")
err := oc.Run("create").Args("-f", isFixture).Execute()
o.Expect(err).NotTo(o.HaveOccurred())
g.By("creating test build config")
err = oc.Run("create").Args("-f", dockerBuildFixture).Execute()
o.Expect(err).NotTo(o.HaveOccurred())
g.By("starting the test docker build")
br, _ := exutil.StartBuildAndWait(oc, "test", "--from-file", dockerBuildDockerfile)
br.AssertSuccess()
// Bug 1716697 - ensure push spec doesn't include tag, only SHA
o.Expect(br.Logs()).To(o.MatchRegexp(`pushed image-registry\.openshift-image-registry\.svc:5000/.*/test@sha256:`))
verifyStages(br.Build.Status.Stages, expectedBuildStages)
})
})
}) | test/extended/builds/build_timing.go | 0.532668 | 0.421969 | build_timing.go | starcoder |
package preprocessing
import (
"github.com/wieku/gosu-pp/beatmap/difficulty"
"github.com/wieku/gosu-pp/beatmap/objects"
"github.com/wieku/gosu-pp/math/vector"
"math"
)
const (
maximumSliderRadius float32 = NormalizedRadius * 2.4
assumedSliderRadius float32 = NormalizedRadius * 1.8
)
// LazySlider is a utility struct that has LazyEndPosition and LazyTravelDistance needed for difficulty calculations
type LazySlider struct {
*objects.Slider
diff *difficulty.Difficulty
LazyEndPosition vector.Vector2f
LazyTravelDistance float32
LazyTravelTime float64
}
func NewLazySlider(slider *objects.Slider, d *difficulty.Difficulty) *LazySlider {
decorated := &LazySlider{
Slider: slider,
diff: d,
}
decorated.calculateEndPosition()
return decorated
}
func (slider *LazySlider) calculateEndPosition() {
slider.LazyTravelTime = slider.ScorePoints[len(slider.ScorePoints)-1].Time - slider.GetStartTime()
slider.LazyEndPosition = slider.GetStackedPositionAtMod(slider.LazyTravelTime+slider.GetStartTime(), slider.diff.Mods) // temporary lazy end position until a real result can be derived.
currCursorPosition := slider.GetStackedStartPositionMod(slider.diff.Mods)
scalingFactor := NormalizedRadius / slider.diff.CircleRadius // lazySliderDistance is coded to be sensitive to scaling, this makes the maths easier with the thresholds being used.
for i := 0; i < len(slider.ScorePoints); i++ {
var currMovementObj = slider.ScorePoints[i]
var stackedPosition vector.Vector2f
if i == len(slider.ScorePoints)-1 { // bug that made into deployment but well
stackedPosition = slider.GetStackedPositionAtMod(slider.EndTime, slider.diff.Mods)
} else {
stackedPosition = slider.GetStackedPositionAtMod(currMovementObj.Time, slider.diff.Mods)
}
currMovement := stackedPosition.Sub(currCursorPosition)
currMovementLength := scalingFactor * float64(currMovement.Len())
// Amount of movement required so that the cursor position needs to be updated.
requiredMovement := float64(assumedSliderRadius)
if i == len(slider.ScorePoints)-1 {
// The end of a slider has special aim rules due to the relaxed time constraint on position.
// There is both a lazy end position as well as the actual end slider position. We assume the player takes the simpler movement.
// For sliders that are circular, the lazy end position may actually be farther away than the sliders true end.
// This code is designed to prevent buffing situations where lazy end is actually a less efficient movement.
lazyMovement := slider.LazyEndPosition.Sub(currCursorPosition)
if lazyMovement.Len() < currMovement.Len() {
currMovement = lazyMovement
}
currMovementLength = scalingFactor * float64(currMovement.Len())
} else if currMovementObj.IsReverse {
// For a slider repeat, assume a tighter movement threshold to better assess repeat sliders.
requiredMovement = NormalizedRadius
}
if currMovementLength > requiredMovement {
// this finds the positional delta from the required radius and the current position, and updates the currCursorPosition accordingly, as well as rewarding distance.
currCursorPosition = currCursorPosition.Add(currMovement.Scl(float32((currMovementLength - requiredMovement) / currMovementLength)))
currMovementLength *= (currMovementLength - requiredMovement) / currMovementLength
slider.LazyTravelDistance += float32(currMovementLength)
}
if i == len(slider.ScorePoints)-1 {
slider.LazyEndPosition = currCursorPosition
}
}
slider.LazyTravelDistance *= float32(math.Pow(1+float64(slider.RepeatCount-1)/2.5, 1.0/2.5)) // Bonus for repeat sliders until a better per nested object strain system can be achieved.
} | performance/osu/preprocessing/lazyslider.go | 0.729616 | 0.428233 | lazyslider.go | starcoder |
package main
/* Day 9 part A:
For a given input stream consisting of groups of characters bounded by {}
and optionally within {} separated by a comma, determine the total number of
groups. These may be infinitely nested.
There are special groups ("garbage") bounded by < and > within which are
excluded from the overall count. The ! character will "cancel" the following
character, including another !. Thus, !! is a noop. Garbage only occurs within
groups (bounded by {}).
Parsing State Machine:
! skip next
< begin garbage; ignore everything until >
{ begin group
, ok to start a new group when seeing another {.
} end of previous group
Scoring:
Each group is assigned a score which is one more than the score of the group
that immediately contains it. (The outermost group gets a score of 1.)
Part B:
Count the number of characters in garbage. <> don't count towards the count and
neither does ! nor the character(s) being cancelled by !.
*/
import (
"flag"
"fmt"
"strings"
)
var input = flag.String("input", "{}", "Input string for the program")
var partB = flag.Bool("partB", false, "Perform part B solution")
var debug = flag.Bool("debug", false, "Debug output")
func main() {
flag.Parse()
// Loop over each character
characters := strings.Split(*input, "")
depth := 0 // Group depth; used with scoring
score := 0 // Total score
garbageCharacters := 0 // Cleaned up garbage characters
ignoreNext := false
ignoreGroups := false
for i, token := range characters {
if *debug {
fmt.Printf("[%d/%d] score: %d, depth: %d, ignoreNext: %t, ignoreGroups: %t, token %s\n", i, len(*input)-1, score, depth, ignoreNext, ignoreGroups, token)
}
if ignoreNext {
// Clear and skip this character
ignoreNext = false
continue
}
switch token {
case "{":
if !ignoreGroups {
depth += 1
} else {
garbageCharacters += 1
}
case "<":
if ignoreGroups {
// Already ignoring, so we need to bump garbageCharacters
garbageCharacters += 1
}
ignoreGroups = true // start of garbage
case ">":
ignoreGroups = false // end of garbage
case "}":
if !ignoreGroups {
score += depth
depth -= 1
} else {
garbageCharacters += 1
}
case "!":
ignoreNext = true
default:
if ignoreGroups {
garbageCharacters += 1
}
}
} // EOF
if *partB {
fmt.Printf("Garbage characters: %d\n", garbageCharacters)
} else {
fmt.Printf("high score: %d\n", score)
}
} | 2017/day09.go | 0.598547 | 0.404184 | day09.go | starcoder |
package pure
import (
"context"
"errors"
"fmt"
"os"
"path/filepath"
"github.com/benthosdev/benthos/v4/internal/bundle"
"github.com/benthosdev/benthos/v4/internal/component/processor"
"github.com/benthosdev/benthos/v4/internal/docs"
"github.com/benthosdev/benthos/v4/internal/interop"
"github.com/benthosdev/benthos/v4/internal/log"
"github.com/benthosdev/benthos/v4/internal/message"
oprocessor "github.com/benthosdev/benthos/v4/internal/old/processor"
// nolint:staticcheck // Ignore SA1019 deprecation warning until we can switch to "google.golang.org/protobuf/types/dynamicpb"
"github.com/golang/protobuf/jsonpb"
// nolint:staticcheck // Ignore SA1019 deprecation warning until we can switch to "google.golang.org/protobuf/types/dynamicpb"
"github.com/golang/protobuf/proto"
"github.com/jhump/protoreflect/desc"
"github.com/jhump/protoreflect/desc/protoparse"
"github.com/jhump/protoreflect/dynamic"
)
func init() {
err := bundle.AllProcessors.Add(func(conf oprocessor.Config, mgr bundle.NewManagement) (processor.V1, error) {
p, err := newProtobuf(conf.Protobuf, mgr)
if err != nil {
return nil, err
}
return processor.NewV2ToV1Processor("protobuf", p, mgr.Metrics()), nil
}, docs.ComponentSpec{
Name: "protobuf",
Categories: []string{
"Parsing",
},
Summary: `
Performs conversions to or from a protobuf message. This processor uses
reflection, meaning conversions can be made directly from the target .proto
files.`,
Status: docs.StatusBeta,
Description: `
The main functionality of this processor is to map to and from JSON documents,
you can read more about JSON mapping of protobuf messages here:
[https://developers.google.com/protocol-buffers/docs/proto3#json](https://developers.google.com/protocol-buffers/docs/proto3#json)
Using reflection for processing protobuf messages in this way is less performant
than generating and using native code. Therefore when performance is critical it
is recommended that you use Benthos plugins instead for processing protobuf
messages natively, you can find an example of Benthos plugins at
[https://github.com/benthosdev/benthos-plugin-example](https://github.com/benthosdev/benthos-plugin-example)
## Operators
### ` + "`to_json`" + `
Converts protobuf messages into a generic JSON structure. This makes it easier
to manipulate the contents of the document within Benthos.
### ` + "`from_json`" + `
Attempts to create a target protobuf message from a generic JSON structure.`,
Config: docs.FieldComponent().WithChildren(
docs.FieldString("operator", "The [operator](#operators) to execute").HasOptions("to_json", "from_json"),
docs.FieldString("message", "The fully qualified name of the protobuf message to convert to/from."),
docs.FieldString("import_paths", "A list of directories containing .proto files, including all definitions required for parsing the target message. If left empty the current directory is used. Each directory listed will be walked with all found .proto files imported.").Array(),
).ChildDefaultAndTypesFromStruct(oprocessor.NewProtobufConfig()),
Examples: []docs.AnnotatedExample{
{
Title: "JSON to Protobuf",
Summary: `
If we have the following protobuf definition within a directory called ` + "`testing/schema`" + `:
` + "```protobuf" + `
syntax = "proto3";
package testing;
import "google/protobuf/timestamp.proto";
message Person {
string first_name = 1;
string last_name = 2;
string full_name = 3;
int32 age = 4;
int32 id = 5; // Unique ID number for this person.
string email = 6;
google.protobuf.Timestamp last_updated = 7;
}
` + "```" + `
And a stream of JSON documents of the form:
` + "```json" + `
{
"firstName": "caleb",
"lastName": "quaye",
"email": "<EMAIL>"
}
` + "```" + `
We can convert the documents into protobuf messages with the following config:`,
Config: `
pipeline:
processors:
- protobuf:
operator: from_json
message: testing.Person
import_paths: [ testing/schema ]
`,
},
{
Title: "Protobuf to JSON",
Summary: `
If we have the following protobuf definition within a directory called ` + "`testing/schema`" + `:
` + "```protobuf" + `
syntax = "proto3";
package testing;
import "google/protobuf/timestamp.proto";
message Person {
string first_name = 1;
string last_name = 2;
string full_name = 3;
int32 age = 4;
int32 id = 5; // Unique ID number for this person.
string email = 6;
google.protobuf.Timestamp last_updated = 7;
}
` + "```" + `
And a stream of protobuf messages of the type ` + "`Person`" + `, we could convert them into JSON documents of the format:
` + "```json" + `
{
"firstName": "caleb",
"lastName": "quaye",
"email": "<EMAIL>"
}
` + "```" + `
With the following config:`,
Config: `
pipeline:
processors:
- protobuf:
operator: to_json
message: testing.Person
import_paths: [ testing/schema ]
`,
},
},
})
if err != nil {
panic(err)
}
}
type protobufOperator func(part *message.Part) error
func newProtobufToJSONOperator(msg string, importPaths []string) (protobufOperator, error) {
if msg == "" {
return nil, errors.New("message field must not be empty")
}
descriptors, err := loadDescriptors(importPaths)
if err != nil {
return nil, err
}
m := getMessageFromDescriptors(msg, descriptors)
if m == nil {
return nil, fmt.Errorf("unable to find message '%v' definition within '%v'", msg, importPaths)
}
marshaller := &jsonpb.Marshaler{
AnyResolver: dynamic.AnyResolver(dynamic.NewMessageFactoryWithDefaults(), descriptors...),
}
return func(part *message.Part) error {
msg := dynamic.NewMessage(m)
if err := proto.Unmarshal(part.Get(), msg); err != nil {
return fmt.Errorf("failed to unmarshal message: %w", err)
}
data, err := msg.MarshalJSONPB(marshaller)
if err != nil {
return fmt.Errorf("failed to marshal protobuf message: %w", err)
}
part.Set(data)
return nil
}, nil
}
func newProtobufFromJSONOperator(msg string, importPaths []string) (protobufOperator, error) {
if msg == "" {
return nil, errors.New("message field must not be empty")
}
descriptors, err := loadDescriptors(importPaths)
if err != nil {
return nil, err
}
m := getMessageFromDescriptors(msg, descriptors)
if m == nil {
return nil, fmt.Errorf("unable to find message '%v' definition within '%v'", msg, importPaths)
}
unmarshaler := &jsonpb.Unmarshaler{
AnyResolver: dynamic.AnyResolver(dynamic.NewMessageFactoryWithDefaults(), descriptors...),
}
return func(part *message.Part) error {
msg := dynamic.NewMessage(m)
if err := msg.UnmarshalJSONPB(unmarshaler, part.Get()); err != nil {
return fmt.Errorf("failed to unmarshal JSON message: %w", err)
}
data, err := msg.Marshal()
if err != nil {
return fmt.Errorf("failed to marshal protobuf message: %v", err)
}
part.Set(data)
return nil
}, nil
}
func strToProtobufOperator(opStr, message string, importPaths []string) (protobufOperator, error) {
switch opStr {
case "to_json":
return newProtobufToJSONOperator(message, importPaths)
case "from_json":
return newProtobufFromJSONOperator(message, importPaths)
}
return nil, fmt.Errorf("operator not recognised: %v", opStr)
}
func loadDescriptors(importPaths []string) ([]*desc.FileDescriptor, error) {
var parser protoparse.Parser
if len(importPaths) == 0 {
importPaths = []string{"."}
} else {
parser.ImportPaths = importPaths
}
var files []string
for _, importPath := range importPaths {
if err := filepath.Walk(importPath, func(path string, info os.FileInfo, ferr error) error {
if ferr != nil || info.IsDir() {
return ferr
}
if filepath.Ext(info.Name()) == ".proto" {
rPath, ferr := filepath.Rel(importPath, path)
if ferr != nil {
return fmt.Errorf("failed to get relative path: %v", ferr)
}
files = append(files, rPath)
}
return nil
}); err != nil {
return nil, err
}
}
fds, err := parser.ParseFiles(files...)
if err != nil {
return nil, fmt.Errorf("failed to parse .proto file: %v", err)
}
if len(fds) == 0 {
return nil, fmt.Errorf("no .proto files were found in the paths '%v'", importPaths)
}
return fds, err
}
func getMessageFromDescriptors(message string, fds []*desc.FileDescriptor) *desc.MessageDescriptor {
var msg *desc.MessageDescriptor
for _, fd := range fds {
msg = fd.FindMessage(message)
if msg != nil {
break
}
}
return msg
}
//------------------------------------------------------------------------------
type protobufProc struct {
operator protobufOperator
log log.Modular
}
func newProtobuf(conf oprocessor.ProtobufConfig, mgr interop.Manager) (*protobufProc, error) {
p := &protobufProc{
log: mgr.Logger(),
}
var err error
if p.operator, err = strToProtobufOperator(conf.Operator, conf.Message, conf.ImportPaths); err != nil {
return nil, err
}
return p, nil
}
func (p *protobufProc) Process(ctx context.Context, msg *message.Part) ([]*message.Part, error) {
newPart := msg.Copy()
if err := p.operator(newPart); err != nil {
p.log.Debugf("Operator failed: %v", err)
return nil, err
}
return []*message.Part{newPart}, nil
}
func (p *protobufProc) Close(context.Context) error {
return nil
} | internal/impl/pure/processor_protobuf.go | 0.765155 | 0.583263 | processor_protobuf.go | starcoder |
package matrix_ops
import (
"errors"
"fmt"
)
type Matrix [][]float64
func CreateEmptyMatrix(rows, cols int) Matrix { // TODO: check for valid rows, cols
var mat Matrix = make(Matrix, rows)
for i := 0; i < rows; i++ {
mat[i] = make([]float64, cols)
}
return mat
}
func CopyMatrix(mat Matrix) Matrix {
var new_mat Matrix = CreateEmptyMatrix(Size(mat))
for row_k := range mat {
copy(new_mat[row_k], mat[row_k])
}
return new_mat
}
func CreateIdentityMatrix(size int) Matrix {
var mat Matrix = CreateEmptyMatrix(size, size)
for row_k := range mat {
mat[row_k][row_k] = 1
}
return mat
}
func CheckMatrix(mat Matrix) bool {
rows, cols := Size(mat)
if rows == 0 && cols == 0 {
return true
} else if rows == 0 || cols == 0 {
return false
} else {
for row := 1; row < rows; row++ {
if len(mat[row]) != cols {
return false
}
}
return true
}
}
func Size(mat Matrix) (int, int) {
if len(mat) == 0 {
return 0, 0
} else {
return len(mat), len(mat[0])
}
}
func PrintMatrix(mat Matrix) {
for _, row := range mat {
for _, val := range row {
fmt.Printf("%v ", val)
}
fmt.Printf("\n")
}
}
func TransposeMatrix(mat Matrix) (Matrix, error) {
if !CheckMatrix(mat) {
return nil, errors.New("Bad matrix")
}
rows, cols := Size(mat)
var new_mat Matrix = CreateEmptyMatrix(cols, rows)
for row_k, row := range mat {
for col_k := range row {
new_mat[col_k][row_k] = mat[row_k][col_k]
}
}
return new_mat, nil
}
func MultiplyMatrixScalar(mat Matrix, scalar float64) (Matrix, error) {
if !CheckMatrix(mat) {
return nil, errors.New("Bad matrix")
}
var new_mat Matrix = CopyMatrix(mat)
for _, row := range new_mat {
for col_k := range row {
row[col_k] *= scalar
}
}
return new_mat, nil
}
func MultiplyMatrices(mat1, mat2 Matrix) (Matrix, error) {
if !CheckMatrix(mat1) {
return nil, errors.New("Bad first matrix")
} else if !CheckMatrix(mat2) {
return nil, errors.New("Bad second matrix")
}
rows1, cols1 := Size(mat1)
rows2, cols2 := Size(mat2)
if cols1 != rows2 {
return nil, errors.New("Number of columns of the first matrix should " +
"be equal to the number of rows of the second " +
"matrix")
} else {
var new_mat Matrix = CreateEmptyMatrix(rows1, cols2)
for row_k, row := range new_mat {
for col_k := range row {
var sum float64
for i := 0; i < cols1; i++ {
sum += mat1[row_k][i] * mat2[i][col_k]
}
new_mat[row_k][col_k] = sum
}
}
return new_mat, nil
}
}
func SumMatrices(mat1, mat2 Matrix) (Matrix, error) {
if !CheckMatrix(mat1) {
return nil, errors.New("Bad first matrix")
} else if !CheckMatrix(mat2) {
return nil, errors.New("Bad second matrix")
}
rows1, cols1 := Size(mat1)
rows2, cols2 := Size(mat2)
if rows1 != rows2 || cols1 != cols2 {
return nil, errors.New("Matrix sizes should be equal")
} else {
var new_mat = CopyMatrix(mat1)
for row_k, row := range new_mat {
for col_k := range row {
new_mat[row_k][col_k] += mat2[row_k][col_k]
}
}
return new_mat, nil
}
}
func ScalarMultiplyVectors(vec1, vec2 Matrix) (float64, error) {
// First we check for errors
if !CheckMatrix(vec1) {
return 0, errors.New("Bad first vector")
} else if !CheckMatrix(vec2) {
return 0, errors.New("Bad second vector")
}
rows1, cols1 := Size(vec1)
rows2, cols2 := Size(vec2)
// Then we check if these VALID matrices are vectors
if !(rows1 == 1 || cols1 == 1) {
return 0, errors.New("First matrix is not a vector")
} else if !(rows2 == 1 || cols2 == 1) {
return 0, errors.New("Second matrix is not a vector")
}
var new_vec1, new_vec2 Matrix = vec1, vec2
var err error
if rows1 != 1 {
new_vec1, err = TransposeMatrix(CopyMatrix(vec1))
if err != nil {
return 0, errors.New("Couldn't transpose the first matrix")
}
}
if rows2 != 1 {
new_vec2, err = TransposeMatrix(CopyMatrix(vec2))
if err != nil {
return 0, errors.New("Couldn't transpose the second matrix")
}
}
_, cols1 = Size(new_vec1)
_, cols2 = Size(new_vec2)
if cols1 != cols2 {
return 0, errors.New("Vectors should be equal in length")
}
var sum float64
for col := 0; col < cols1; col++ {
sum += new_vec1[0][col] * new_vec2[0][col]
}
return sum, nil
} | github.com/qwertygidq/matrix_ops/matrix_ops.go | 0.553747 | 0.617282 | matrix_ops.go | starcoder |
package processor
import (
"fmt"
"time"
"github.com/Jeffail/benthos/v3/internal/bloblang"
"github.com/Jeffail/benthos/v3/internal/bloblang/field"
"github.com/Jeffail/benthos/v3/internal/docs"
"github.com/Jeffail/benthos/v3/lib/log"
"github.com/Jeffail/benthos/v3/lib/message"
"github.com/Jeffail/benthos/v3/lib/metrics"
"github.com/Jeffail/benthos/v3/lib/types"
)
//------------------------------------------------------------------------------
func init() {
Constructors[TypeInsertPart] = TypeSpec{
constructor: NewInsertPart,
Categories: []Category{
CategoryComposition,
},
Summary: `
Insert a new message into a batch at an index. If the specified index is greater
than the length of the existing batch it will be appended to the end.`,
Description: `
The index can be negative, and if so the message will be inserted from the end
counting backwards starting from -1. E.g. if index = -1 then the new message
will become the last of the batch, if index = -2 then the new message will be
inserted before the last message, and so on. If the negative index is greater
than the length of the existing batch it will be inserted at the beginning.
The new message will have metadata copied from the first pre-existing message of
the batch.
This processor will interpolate functions within the 'content' field, you can
find a list of functions [here](/docs/configuration/interpolation#bloblang-queries).`,
FieldSpecs: docs.FieldSpecs{
docs.FieldCommon("index", "The index within the batch to insert the message at."),
docs.FieldCommon("content", "The content of the message being inserted.").IsInterpolated(),
},
}
}
//------------------------------------------------------------------------------
// InsertPartConfig contains configuration fields for the InsertPart processor.
type InsertPartConfig struct {
Index int `json:"index" yaml:"index"`
Content string `json:"content" yaml:"content"`
}
// NewInsertPartConfig returns a InsertPartConfig with default values.
func NewInsertPartConfig() InsertPartConfig {
return InsertPartConfig{
Index: -1,
Content: "",
}
}
//------------------------------------------------------------------------------
// InsertPart is a processor that inserts a new message part at a specific
// index.
type InsertPart struct {
part *field.Expression
conf Config
log log.Modular
stats metrics.Type
mCount metrics.StatCounter
mSent metrics.StatCounter
mBatchSent metrics.StatCounter
}
// NewInsertPart returns a InsertPart processor.
func NewInsertPart(
conf Config, mgr types.Manager, log log.Modular, stats metrics.Type,
) (Type, error) {
part, err := bloblang.NewField(conf.InsertPart.Content)
if err != nil {
return nil, fmt.Errorf("failed to parse content expression: %v", err)
}
return &InsertPart{
part: part,
conf: conf,
log: log,
stats: stats,
mCount: stats.GetCounter("count"),
mSent: stats.GetCounter("sent"),
mBatchSent: stats.GetCounter("batch.sent"),
}, nil
}
//------------------------------------------------------------------------------
// ProcessMessage applies the processor to a message, either creating >0
// resulting messages or a response to be sent back to the message source.
func (p *InsertPart) ProcessMessage(msg types.Message) ([]types.Message, types.Response) {
p.mCount.Incr(1)
newPartBytes := p.part.Bytes(0, msg)
index := p.conf.InsertPart.Index
msgLen := msg.Len()
if index < 0 {
index = msgLen + index + 1
if index < 0 {
index = 0
}
} else if index > msgLen {
index = msgLen
}
newMsg := message.New(nil)
newPart := msg.Get(0).Copy()
newPart.Set(newPartBytes)
msg.Iter(func(i int, p types.Part) error {
if i == index {
newMsg.Append(newPart)
}
newMsg.Append(p.Copy())
return nil
})
if index == msg.Len() {
newMsg.Append(newPart)
}
p.mBatchSent.Incr(1)
p.mSent.Incr(int64(newMsg.Len()))
msgs := [1]types.Message{newMsg}
return msgs[:], nil
}
// CloseAsync shuts down the processor and stops processing requests.
func (p *InsertPart) CloseAsync() {
}
// WaitForClose blocks until the processor has closed down.
func (p *InsertPart) WaitForClose(timeout time.Duration) error {
return nil
}
//------------------------------------------------------------------------------ | lib/processor/insert_part.go | 0.695648 | 0.519217 | insert_part.go | starcoder |
package plantri
import (
"errors"
"fmt"
)
var ErrVertexNotFound = errors.New("plantri: vertex with that id doesn't exist")
// Graph is an abstraction of a simple graph. Each vertex is guaranteed to
// have a unique identifier.
type Graph interface {
// Size returns the number of edges in the graph.
Size() int
// Order returns the number of vertices in the graph.
Order() int
// Vertices returns the list of vertices in the graph. Two calls to
// this function will return the list of vertices in the same order so
// long as the graph hasn't changed in the interim.
// TODO(guy): Unit test this requirement
Vertices() []Vertex
// Edges returns the list of edges in the graph. Two calls to this function
// will return the list of edges in the same order so long as the graph
// hasn't changed in the interim.
// TODO(guy): Unit test this requirement
Edges() []Edge
// AddEdge adds an edge to the graph between the vertices with the given
// identifiers. Note that this may change the order of the lists returned
// by Vertices() and Edges().
AddEdge(int, int) error
}
// Vertex represents a vertex in a simple graph.
type Vertex interface {
// Id returns the identifier associated with this vertex. This id is
// guaranteed to be unique within any graph containing this vertex.
Id() int
}
// Edge represents a directed edge between two vertices.
type Edge struct {
Source Vertex
Dest Vertex
}
// adjMatrix is a graph with edges represented by an adjacency matrix. Vertices
// of an adjMatrix are identified by their index in the matrix, beginning at 0.
// TODO(guy): Unit test this requirement
type adjMatrix struct {
n int
matrix [][]bool
}
// NewAdjMatrix returns the trivial graph on n vertices, with edges represented
// by an n-by-n adjacency matrix.
func NewAdjMatrix(n int) *adjMatrix {
var matrix [][]bool
for i := 0; i < n; i++ {
matrix = append(matrix, []bool{})
for j := 0; j < n; j++ {
eq := false
if i == j {
eq = true
}
matrix[i] = append(matrix[i], eq)
}
}
return &adjMatrix{
n: n,
matrix: matrix,
}
}
func (am *adjMatrix) inBounds(i int) bool {
return i >= 0 && i < am.n
}
func (am *adjMatrix) getVertex(i int) (*adjMatrixVertex, error) {
if !am.inBounds(i) {
return nil, ErrVertexNotFound
}
return &adjMatrixVertex{index: i}, nil
}
func (am *adjMatrix) AddEdge(i, j int) error {
if !am.inBounds(i) || !am.inBounds(j) {
return ErrVertexNotFound
}
am.matrix[i][j] = true
am.matrix[j][i] = true
return nil
}
func (am *adjMatrix) Size() int {
return len(am.Edges())
}
func (am *adjMatrix) Order() int {
return am.n
}
func (am *adjMatrix) Vertices() []Vertex {
var res []Vertex
for i := 0; i < am.n; i++ {
v, err := am.getVertex(i)
if err != nil { // should never happen
panicUnexpected("adjMatrix.Vertices", err)
}
res = append(res, v)
}
return res
}
func (am *adjMatrix) Edges() []Edge {
var res []Edge
for i := 0; i < am.n; i++ {
for j := i + 1; j < am.n; j++ {
if !am.matrix[i][j] {
continue
}
v1, err := am.getVertex(i)
if err != nil {
panicUnexpected("adjMatrix.Edges", err)
}
v2, err := am.getVertex(j)
if err != nil {
panicUnexpected("adjMatrix.Edges", err)
}
res = append(res, Edge{
Source: v1,
Dest: v2,
})
}
}
return res
}
// String is a Stringer implementation for adjMatrix for debugging.
func (am *adjMatrix) String() string {
res := fmt.Sprintf("Adjacency Matrix Order(%d) Size(%d)\n",
am.Order(), am.Size())
for i := 0; i < am.n; i++ {
for j := 0; j < am.n; j++ {
if am.matrix[i][j] {
res += "1 "
} else {
res += "0 "
}
}
res += "\n"
}
return res
}
// adjMatrixVertex is a vertex of an adjMatrix graph.
type adjMatrixVertex struct {
index int
}
func (amv *adjMatrixVertex) Id() int {
return amv.index
}
// panicUnexpected panics with a standard error message on errors that should
// never happen logically. This should only be called if there is a critical
// failure in plantri's code.
func panicUnexpected(fn string, err error) {
panic(fmt.Errorf("plantri: unexpected error in %s: %v", fn, err))
}
// Compile-time interface implementation checks.
var _ Graph = new(adjMatrix)
var _ Vertex = new(adjMatrixVertex) | graph.go | 0.520253 | 0.549157 | graph.go | starcoder |
package graph
// Indexable is expected to be implemented by all vertices and edges of a graph
// so they can be indexed and uniquely referenced.
type Indexable interface {
// Produces a unique key representing the caller. This key must be
// comparable in order to be used as the key of a map.
Key() interface{}
}
// indexKey returns the value produced by Key() if the given value implements
// Indexable, or the pointer to the original interface value otherwise.
func indexKey(v interface{}) interface{} {
i, ok := v.(Indexable)
if !ok {
return v
}
return i.Key()
}
// IndexedVertices represents vertices of a graph indexed by key.
// The key is arbitrary but must be unique to each vertex.
type IndexedVertices map[interface{}]Vertex
// IndexedEdges represents edges of a graph indexed by key.
// The key is arbitrary but must be unique to each edge.
type IndexedEdges map[interface{}]*Edge
// HeadVerticesByTailVertex represents down edges of a graph by mapping lists
// of head vertices ("target nodes") to their associated tail vertex ("source node")
// represented by an index key unique to that vertex.
type HeadVerticesByTailVertex map[interface{}]IndexedVertices
// TailVerticesByHeadVertex represents up edges of a graph by mapping lists of
// tail vertices ("source nodes") to their associated head vertex ("target node")
// represented by an index key unique to that vertex.
type TailVerticesByHeadVertex map[interface{}]IndexedVertices
// Add indexes a Vertex.
func (i IndexedVertices) Add(v Vertex) {
i[indexKey(v)] = v
}
// Add indexes an Edge.
func (i IndexedEdges) Add(e *Edge) {
i[indexKey(e)] = e
}
// Connect indexes a head vertex for the given tail vertex.
func (i HeadVerticesByTailVertex) Connect(tail, head Vertex) {
if i[indexKey(tail)] == nil {
i[indexKey(tail)] = make(IndexedVertices)
}
i[indexKey(tail)].Add(head)
}
// Connect indexes a tail vertex for the given head vertex.
func (i TailVerticesByHeadVertex) Connect(head, tail Vertex) {
if i[indexKey(head)] == nil {
i[indexKey(head)] = make(IndexedVertices)
}
i[indexKey(head)].Add(tail)
} | graph/index.go | 0.81946 | 0.514278 | index.go | starcoder |
package overlay
import (
"encoding/json"
"github.com/mafredri/cdp/protocol/dom"
"github.com/mafredri/cdp/protocol/page"
"github.com/mafredri/cdp/protocol/runtime"
)
// GetHighlightObjectForTestArgs represents the arguments for GetHighlightObjectForTest in the Overlay domain.
type GetHighlightObjectForTestArgs struct {
NodeID dom.NodeID `json:"nodeId"` // Id of the node to get highlight object for.
IncludeDistance *bool `json:"includeDistance,omitempty"` // Whether to include distance info.
IncludeStyle *bool `json:"includeStyle,omitempty"` // Whether to include style info.
ColorFormat ColorFormat `json:"colorFormat,omitempty"` // The color format to get config with (default: hex).
ShowAccessibilityInfo *bool `json:"showAccessibilityInfo,omitempty"` // Whether to show accessibility info (default: true).
}
// NewGetHighlightObjectForTestArgs initializes GetHighlightObjectForTestArgs with the required arguments.
func NewGetHighlightObjectForTestArgs(nodeID dom.NodeID) *GetHighlightObjectForTestArgs {
args := new(GetHighlightObjectForTestArgs)
args.NodeID = nodeID
return args
}
// SetIncludeDistance sets the IncludeDistance optional argument.
// Whether to include distance info.
func (a *GetHighlightObjectForTestArgs) SetIncludeDistance(includeDistance bool) *GetHighlightObjectForTestArgs {
a.IncludeDistance = &includeDistance
return a
}
// SetIncludeStyle sets the IncludeStyle optional argument. Whether to
// include style info.
func (a *GetHighlightObjectForTestArgs) SetIncludeStyle(includeStyle bool) *GetHighlightObjectForTestArgs {
a.IncludeStyle = &includeStyle
return a
}
// SetColorFormat sets the ColorFormat optional argument. The color
// format to get config with (default: hex).
func (a *GetHighlightObjectForTestArgs) SetColorFormat(colorFormat ColorFormat) *GetHighlightObjectForTestArgs {
a.ColorFormat = colorFormat
return a
}
// SetShowAccessibilityInfo sets the ShowAccessibilityInfo optional argument.
// Whether to show accessibility info (default: true).
func (a *GetHighlightObjectForTestArgs) SetShowAccessibilityInfo(showAccessibilityInfo bool) *GetHighlightObjectForTestArgs {
a.ShowAccessibilityInfo = &showAccessibilityInfo
return a
}
// GetHighlightObjectForTestReply represents the return values for GetHighlightObjectForTest in the Overlay domain.
type GetHighlightObjectForTestReply struct {
Highlight json.RawMessage `json:"highlight"` // Highlight data for the node.
}
// GetGridHighlightObjectsForTestArgs represents the arguments for GetGridHighlightObjectsForTest in the Overlay domain.
type GetGridHighlightObjectsForTestArgs struct {
NodeIDs []dom.NodeID `json:"nodeIds"` // Ids of the node to get highlight object for.
}
// NewGetGridHighlightObjectsForTestArgs initializes GetGridHighlightObjectsForTestArgs with the required arguments.
func NewGetGridHighlightObjectsForTestArgs(nodeIDs []dom.NodeID) *GetGridHighlightObjectsForTestArgs {
args := new(GetGridHighlightObjectsForTestArgs)
args.NodeIDs = nodeIDs
return args
}
// GetGridHighlightObjectsForTestReply represents the return values for GetGridHighlightObjectsForTest in the Overlay domain.
type GetGridHighlightObjectsForTestReply struct {
Highlights json.RawMessage `json:"highlights"` // Grid Highlight data for the node ids provided.
}
// GetSourceOrderHighlightObjectForTestArgs represents the arguments for GetSourceOrderHighlightObjectForTest in the Overlay domain.
type GetSourceOrderHighlightObjectForTestArgs struct {
NodeID dom.NodeID `json:"nodeId"` // Id of the node to highlight.
}
// NewGetSourceOrderHighlightObjectForTestArgs initializes GetSourceOrderHighlightObjectForTestArgs with the required arguments.
func NewGetSourceOrderHighlightObjectForTestArgs(nodeID dom.NodeID) *GetSourceOrderHighlightObjectForTestArgs {
args := new(GetSourceOrderHighlightObjectForTestArgs)
args.NodeID = nodeID
return args
}
// GetSourceOrderHighlightObjectForTestReply represents the return values for GetSourceOrderHighlightObjectForTest in the Overlay domain.
type GetSourceOrderHighlightObjectForTestReply struct {
Highlight json.RawMessage `json:"highlight"` // Source order highlight data for the node id provided.
}
// HighlightFrameArgs represents the arguments for HighlightFrame in the Overlay domain.
type HighlightFrameArgs struct {
FrameID page.FrameID `json:"frameId"` // Identifier of the frame to highlight.
ContentColor *dom.RGBA `json:"contentColor,omitempty"` // The content box highlight fill color (default: transparent).
ContentOutlineColor *dom.RGBA `json:"contentOutlineColor,omitempty"` // The content box highlight outline color (default: transparent).
}
// NewHighlightFrameArgs initializes HighlightFrameArgs with the required arguments.
func NewHighlightFrameArgs(frameID page.FrameID) *HighlightFrameArgs {
args := new(HighlightFrameArgs)
args.FrameID = frameID
return args
}
// SetContentColor sets the ContentColor optional argument. The
// content box highlight fill color (default: transparent).
func (a *HighlightFrameArgs) SetContentColor(contentColor dom.RGBA) *HighlightFrameArgs {
a.ContentColor = &contentColor
return a
}
// SetContentOutlineColor sets the ContentOutlineColor optional argument.
// The content box highlight outline color (default: transparent).
func (a *HighlightFrameArgs) SetContentOutlineColor(contentOutlineColor dom.RGBA) *HighlightFrameArgs {
a.ContentOutlineColor = &contentOutlineColor
return a
}
// HighlightNodeArgs represents the arguments for HighlightNode in the Overlay domain.
type HighlightNodeArgs struct {
HighlightConfig HighlightConfig `json:"highlightConfig"` // A descriptor for the highlight appearance.
NodeID *dom.NodeID `json:"nodeId,omitempty"` // Identifier of the node to highlight.
BackendNodeID *dom.BackendNodeID `json:"backendNodeId,omitempty"` // Identifier of the backend node to highlight.
ObjectID *runtime.RemoteObjectID `json:"objectId,omitempty"` // JavaScript object id of the node to be highlighted.
Selector *string `json:"selector,omitempty"` // Selectors to highlight relevant nodes.
}
// NewHighlightNodeArgs initializes HighlightNodeArgs with the required arguments.
func NewHighlightNodeArgs(highlightConfig HighlightConfig) *HighlightNodeArgs {
args := new(HighlightNodeArgs)
args.HighlightConfig = highlightConfig
return args
}
// SetNodeID sets the NodeID optional argument. Identifier of the node
// to highlight.
func (a *HighlightNodeArgs) SetNodeID(nodeID dom.NodeID) *HighlightNodeArgs {
a.NodeID = &nodeID
return a
}
// SetBackendNodeID sets the BackendNodeID optional argument.
// Identifier of the backend node to highlight.
func (a *HighlightNodeArgs) SetBackendNodeID(backendNodeID dom.BackendNodeID) *HighlightNodeArgs {
a.BackendNodeID = &backendNodeID
return a
}
// SetObjectID sets the ObjectID optional argument. JavaScript object
// id of the node to be highlighted.
func (a *HighlightNodeArgs) SetObjectID(objectID runtime.RemoteObjectID) *HighlightNodeArgs {
a.ObjectID = &objectID
return a
}
// SetSelector sets the Selector optional argument. Selectors to
// highlight relevant nodes.
func (a *HighlightNodeArgs) SetSelector(selector string) *HighlightNodeArgs {
a.Selector = &selector
return a
}
// HighlightQuadArgs represents the arguments for HighlightQuad in the Overlay domain.
type HighlightQuadArgs struct {
Quad dom.Quad `json:"quad"` // Quad to highlight
Color *dom.RGBA `json:"color,omitempty"` // The highlight fill color (default: transparent).
OutlineColor *dom.RGBA `json:"outlineColor,omitempty"` // The highlight outline color (default: transparent).
}
// NewHighlightQuadArgs initializes HighlightQuadArgs with the required arguments.
func NewHighlightQuadArgs(quad dom.Quad) *HighlightQuadArgs {
args := new(HighlightQuadArgs)
args.Quad = quad
return args
}
// SetColor sets the Color optional argument. The highlight fill color
// (default: transparent).
func (a *HighlightQuadArgs) SetColor(color dom.RGBA) *HighlightQuadArgs {
a.Color = &color
return a
}
// SetOutlineColor sets the OutlineColor optional argument. The
// highlight outline color (default: transparent).
func (a *HighlightQuadArgs) SetOutlineColor(outlineColor dom.RGBA) *HighlightQuadArgs {
a.OutlineColor = &outlineColor
return a
}
// HighlightRectArgs represents the arguments for HighlightRect in the Overlay domain.
type HighlightRectArgs struct {
X int `json:"x"` // X coordinate
Y int `json:"y"` // Y coordinate
Width int `json:"width"` // Rectangle width
Height int `json:"height"` // Rectangle height
Color *dom.RGBA `json:"color,omitempty"` // The highlight fill color (default: transparent).
OutlineColor *dom.RGBA `json:"outlineColor,omitempty"` // The highlight outline color (default: transparent).
}
// NewHighlightRectArgs initializes HighlightRectArgs with the required arguments.
func NewHighlightRectArgs(x int, y int, width int, height int) *HighlightRectArgs {
args := new(HighlightRectArgs)
args.X = x
args.Y = y
args.Width = width
args.Height = height
return args
}
// SetColor sets the Color optional argument. The highlight fill color
// (default: transparent).
func (a *HighlightRectArgs) SetColor(color dom.RGBA) *HighlightRectArgs {
a.Color = &color
return a
}
// SetOutlineColor sets the OutlineColor optional argument. The
// highlight outline color (default: transparent).
func (a *HighlightRectArgs) SetOutlineColor(outlineColor dom.RGBA) *HighlightRectArgs {
a.OutlineColor = &outlineColor
return a
}
// HighlightSourceOrderArgs represents the arguments for HighlightSourceOrder in the Overlay domain.
type HighlightSourceOrderArgs struct {
SourceOrderConfig SourceOrderConfig `json:"sourceOrderConfig"` // A descriptor for the appearance of the overlay drawing.
NodeID *dom.NodeID `json:"nodeId,omitempty"` // Identifier of the node to highlight.
BackendNodeID *dom.BackendNodeID `json:"backendNodeId,omitempty"` // Identifier of the backend node to highlight.
ObjectID *runtime.RemoteObjectID `json:"objectId,omitempty"` // JavaScript object id of the node to be highlighted.
}
// NewHighlightSourceOrderArgs initializes HighlightSourceOrderArgs with the required arguments.
func NewHighlightSourceOrderArgs(sourceOrderConfig SourceOrderConfig) *HighlightSourceOrderArgs {
args := new(HighlightSourceOrderArgs)
args.SourceOrderConfig = sourceOrderConfig
return args
}
// SetNodeID sets the NodeID optional argument. Identifier of the node
// to highlight.
func (a *HighlightSourceOrderArgs) SetNodeID(nodeID dom.NodeID) *HighlightSourceOrderArgs {
a.NodeID = &nodeID
return a
}
// SetBackendNodeID sets the BackendNodeID optional argument.
// Identifier of the backend node to highlight.
func (a *HighlightSourceOrderArgs) SetBackendNodeID(backendNodeID dom.BackendNodeID) *HighlightSourceOrderArgs {
a.BackendNodeID = &backendNodeID
return a
}
// SetObjectID sets the ObjectID optional argument. JavaScript object
// id of the node to be highlighted.
func (a *HighlightSourceOrderArgs) SetObjectID(objectID runtime.RemoteObjectID) *HighlightSourceOrderArgs {
a.ObjectID = &objectID
return a
}
// SetInspectModeArgs represents the arguments for SetInspectMode in the Overlay domain.
type SetInspectModeArgs struct {
Mode InspectMode `json:"mode"` // Set an inspection mode.
HighlightConfig *HighlightConfig `json:"highlightConfig,omitempty"` // A descriptor for the highlight appearance of hovered-over nodes. May be omitted if `enabled == false`.
}
// NewSetInspectModeArgs initializes SetInspectModeArgs with the required arguments.
func NewSetInspectModeArgs(mode InspectMode) *SetInspectModeArgs {
args := new(SetInspectModeArgs)
args.Mode = mode
return args
}
// SetHighlightConfig sets the HighlightConfig optional argument. A
// descriptor for the highlight appearance of hovered-over nodes. May
// be omitted if `enabled == false`.
func (a *SetInspectModeArgs) SetHighlightConfig(highlightConfig HighlightConfig) *SetInspectModeArgs {
a.HighlightConfig = &highlightConfig
return a
}
// SetShowAdHighlightsArgs represents the arguments for SetShowAdHighlights in the Overlay domain.
type SetShowAdHighlightsArgs struct {
Show bool `json:"show"` // True for showing ad highlights
}
// NewSetShowAdHighlightsArgs initializes SetShowAdHighlightsArgs with the required arguments.
func NewSetShowAdHighlightsArgs(show bool) *SetShowAdHighlightsArgs {
args := new(SetShowAdHighlightsArgs)
args.Show = show
return args
}
// SetPausedInDebuggerMessageArgs represents the arguments for SetPausedInDebuggerMessage in the Overlay domain.
type SetPausedInDebuggerMessageArgs struct {
Message *string `json:"message,omitempty"` // The message to display, also triggers resume and step over controls.
}
// NewSetPausedInDebuggerMessageArgs initializes SetPausedInDebuggerMessageArgs with the required arguments.
func NewSetPausedInDebuggerMessageArgs() *SetPausedInDebuggerMessageArgs {
args := new(SetPausedInDebuggerMessageArgs)
return args
}
// SetMessage sets the Message optional argument. The message to
// display, also triggers resume and step over controls.
func (a *SetPausedInDebuggerMessageArgs) SetMessage(message string) *SetPausedInDebuggerMessageArgs {
a.Message = &message
return a
}
// SetShowDebugBordersArgs represents the arguments for SetShowDebugBorders in the Overlay domain.
type SetShowDebugBordersArgs struct {
Show bool `json:"show"` // True for showing debug borders
}
// NewSetShowDebugBordersArgs initializes SetShowDebugBordersArgs with the required arguments.
func NewSetShowDebugBordersArgs(show bool) *SetShowDebugBordersArgs {
args := new(SetShowDebugBordersArgs)
args.Show = show
return args
}
// SetShowFPSCounterArgs represents the arguments for SetShowFPSCounter in the Overlay domain.
type SetShowFPSCounterArgs struct {
Show bool `json:"show"` // True for showing the FPS counter
}
// NewSetShowFPSCounterArgs initializes SetShowFPSCounterArgs with the required arguments.
func NewSetShowFPSCounterArgs(show bool) *SetShowFPSCounterArgs {
args := new(SetShowFPSCounterArgs)
args.Show = show
return args
}
// SetShowGridOverlaysArgs represents the arguments for SetShowGridOverlays in the Overlay domain.
type SetShowGridOverlaysArgs struct {
GridNodeHighlightConfigs []GridNodeHighlightConfig `json:"gridNodeHighlightConfigs"` // An array of node identifiers and descriptors for the highlight appearance.
}
// NewSetShowGridOverlaysArgs initializes SetShowGridOverlaysArgs with the required arguments.
func NewSetShowGridOverlaysArgs(gridNodeHighlightConfigs []GridNodeHighlightConfig) *SetShowGridOverlaysArgs {
args := new(SetShowGridOverlaysArgs)
args.GridNodeHighlightConfigs = gridNodeHighlightConfigs
return args
}
// SetShowFlexOverlaysArgs represents the arguments for SetShowFlexOverlays in the Overlay domain.
type SetShowFlexOverlaysArgs struct {
FlexNodeHighlightConfigs []FlexNodeHighlightConfig `json:"flexNodeHighlightConfigs"` // An array of node identifiers and descriptors for the highlight appearance.
}
// NewSetShowFlexOverlaysArgs initializes SetShowFlexOverlaysArgs with the required arguments.
func NewSetShowFlexOverlaysArgs(flexNodeHighlightConfigs []FlexNodeHighlightConfig) *SetShowFlexOverlaysArgs {
args := new(SetShowFlexOverlaysArgs)
args.FlexNodeHighlightConfigs = flexNodeHighlightConfigs
return args
}
// SetShowScrollSnapOverlaysArgs represents the arguments for SetShowScrollSnapOverlays in the Overlay domain.
type SetShowScrollSnapOverlaysArgs struct {
ScrollSnapHighlightConfigs []ScrollSnapHighlightConfig `json:"scrollSnapHighlightConfigs"` // An array of node identifiers and descriptors for the highlight appearance.
}
// NewSetShowScrollSnapOverlaysArgs initializes SetShowScrollSnapOverlaysArgs with the required arguments.
func NewSetShowScrollSnapOverlaysArgs(scrollSnapHighlightConfigs []ScrollSnapHighlightConfig) *SetShowScrollSnapOverlaysArgs {
args := new(SetShowScrollSnapOverlaysArgs)
args.ScrollSnapHighlightConfigs = scrollSnapHighlightConfigs
return args
}
// SetShowContainerQueryOverlaysArgs represents the arguments for SetShowContainerQueryOverlays in the Overlay domain.
type SetShowContainerQueryOverlaysArgs struct {
ContainerQueryHighlightConfigs []ContainerQueryHighlightConfig `json:"containerQueryHighlightConfigs"` // An array of node identifiers and descriptors for the highlight appearance.
}
// NewSetShowContainerQueryOverlaysArgs initializes SetShowContainerQueryOverlaysArgs with the required arguments.
func NewSetShowContainerQueryOverlaysArgs(containerQueryHighlightConfigs []ContainerQueryHighlightConfig) *SetShowContainerQueryOverlaysArgs {
args := new(SetShowContainerQueryOverlaysArgs)
args.ContainerQueryHighlightConfigs = containerQueryHighlightConfigs
return args
}
// SetShowPaintRectsArgs represents the arguments for SetShowPaintRects in the Overlay domain.
type SetShowPaintRectsArgs struct {
Result bool `json:"result"` // True for showing paint rectangles
}
// NewSetShowPaintRectsArgs initializes SetShowPaintRectsArgs with the required arguments.
func NewSetShowPaintRectsArgs(result bool) *SetShowPaintRectsArgs {
args := new(SetShowPaintRectsArgs)
args.Result = result
return args
}
// SetShowLayoutShiftRegionsArgs represents the arguments for SetShowLayoutShiftRegions in the Overlay domain.
type SetShowLayoutShiftRegionsArgs struct {
Result bool `json:"result"` // True for showing layout shift regions
}
// NewSetShowLayoutShiftRegionsArgs initializes SetShowLayoutShiftRegionsArgs with the required arguments.
func NewSetShowLayoutShiftRegionsArgs(result bool) *SetShowLayoutShiftRegionsArgs {
args := new(SetShowLayoutShiftRegionsArgs)
args.Result = result
return args
}
// SetShowScrollBottleneckRectsArgs represents the arguments for SetShowScrollBottleneckRects in the Overlay domain.
type SetShowScrollBottleneckRectsArgs struct {
Show bool `json:"show"` // True for showing scroll bottleneck rects
}
// NewSetShowScrollBottleneckRectsArgs initializes SetShowScrollBottleneckRectsArgs with the required arguments.
func NewSetShowScrollBottleneckRectsArgs(show bool) *SetShowScrollBottleneckRectsArgs {
args := new(SetShowScrollBottleneckRectsArgs)
args.Show = show
return args
}
// SetShowHitTestBordersArgs represents the arguments for SetShowHitTestBorders in the Overlay domain.
type SetShowHitTestBordersArgs struct {
Show bool `json:"show"` // True for showing hit-test borders
}
// NewSetShowHitTestBordersArgs initializes SetShowHitTestBordersArgs with the required arguments.
func NewSetShowHitTestBordersArgs(show bool) *SetShowHitTestBordersArgs {
args := new(SetShowHitTestBordersArgs)
args.Show = show
return args
}
// SetShowWebVitalsArgs represents the arguments for SetShowWebVitals in the Overlay domain.
type SetShowWebVitalsArgs struct {
Show bool `json:"show"` // No description.
}
// NewSetShowWebVitalsArgs initializes SetShowWebVitalsArgs with the required arguments.
func NewSetShowWebVitalsArgs(show bool) *SetShowWebVitalsArgs {
args := new(SetShowWebVitalsArgs)
args.Show = show
return args
}
// SetShowViewportSizeOnResizeArgs represents the arguments for SetShowViewportSizeOnResize in the Overlay domain.
type SetShowViewportSizeOnResizeArgs struct {
Show bool `json:"show"` // Whether to paint size or not.
}
// NewSetShowViewportSizeOnResizeArgs initializes SetShowViewportSizeOnResizeArgs with the required arguments.
func NewSetShowViewportSizeOnResizeArgs(show bool) *SetShowViewportSizeOnResizeArgs {
args := new(SetShowViewportSizeOnResizeArgs)
args.Show = show
return args
}
// SetShowHingeArgs represents the arguments for SetShowHinge in the Overlay domain.
type SetShowHingeArgs struct {
HingeConfig *HingeConfig `json:"hingeConfig,omitempty"` // hinge data, null means hideHinge
}
// NewSetShowHingeArgs initializes SetShowHingeArgs with the required arguments.
func NewSetShowHingeArgs() *SetShowHingeArgs {
args := new(SetShowHingeArgs)
return args
}
// SetHingeConfig sets the HingeConfig optional argument. hinge data,
// null means hideHinge
func (a *SetShowHingeArgs) SetHingeConfig(hingeConfig HingeConfig) *SetShowHingeArgs {
a.HingeConfig = &hingeConfig
return a
} | protocol/overlay/command.go | 0.785514 | 0.441793 | command.go | starcoder |
package geometry
import (
"github.com/Glenn-Gray-Labs/g3n/gls"
"github.com/Glenn-Gray-Labs/g3n/math32"
)
// NewCube creates a cube geometry with the specified size.
func NewCube(size float32) *Geometry {
return NewSegmentedCube(size, 1)
}
// NewSegmentedCube creates a segmented cube geometry with the specified size and number of segments.
func NewSegmentedCube(size float32, segments int) *Geometry {
return NewSegmentedBox(size, size, size, segments, segments, segments)
}
// NewBox creates a box geometry with the specified width, height, and length.
func NewBox(width, height, length float32) *Geometry {
return NewSegmentedBox(width, height, length, 1, 1, 1)
}
// NewSegmentedBox creates a segmented box geometry with the specified width, height, length, and number of segments in each dimension.
func NewSegmentedBox(width, height, length float32, widthSegments, heightSegments, lengthSegments int) *Geometry {
box := NewGeometry()
// Validate arguments
if widthSegments <= 0 || heightSegments <= 0 || lengthSegments <= 0 {
panic("Invalid argument(s). All segment quantities should be greater than zero.")
}
// Create buffers
positions := math32.NewArrayF32(0, 16)
normals := math32.NewArrayF32(0, 16)
uvs := math32.NewArrayF32(0, 16)
indices := math32.NewArrayU32(0, 16)
// Internal function to build each of the six box planes
buildPlane := func(u, v string, udir, vdir int, width, height, length float32, materialIndex uint) {
offset := positions.Len() / 3
gridX := widthSegments
gridY := heightSegments
var w string
if (u == "x" && v == "y") || (u == "y" && v == "x") {
w = "z"
} else if (u == "x" && v == "z") || (u == "z" && v == "x") {
w = "y"
gridY = lengthSegments
} else if (u == "z" && v == "y") || (u == "y" && v == "z") {
w = "x"
gridX = lengthSegments
}
var normal math32.Vector3
if length > 0 {
normal.SetByName(w, 1)
} else {
normal.SetByName(w, -1)
}
wHalf := width / 2
hHalf := height / 2
gridX1 := gridX + 1
gridY1 := gridY + 1
segmentWidth := width / float32(gridX)
segmentHeight := height / float32(gridY)
// Generate the plane vertices, normals, and uv coordinates
for iy := 0; iy < gridY1; iy++ {
for ix := 0; ix < gridX1; ix++ {
var vector math32.Vector3
vector.SetByName(u, (float32(ix)*segmentWidth-wHalf)*float32(udir))
vector.SetByName(v, (float32(iy)*segmentHeight-hHalf)*float32(vdir))
vector.SetByName(w, length)
positions.AppendVector3(&vector)
normals.AppendVector3(&normal)
uvs.Append(float32(ix)/float32(gridX), float32(1)-(float32(iy)/float32(gridY)))
}
}
// Generate the indices for the vertices, normals and uv coordinates
gstart := indices.Size()
for iy := 0; iy < gridY; iy++ {
for ix := 0; ix < gridX; ix++ {
a := ix + gridX1*iy
b := ix + gridX1*(iy+1)
c := (ix + 1) + gridX1*(iy+1)
d := (ix + 1) + gridX1*iy
indices.Append(uint32(a+offset), uint32(b+offset), uint32(d+offset), uint32(b+offset), uint32(c+offset), uint32(d+offset))
}
}
gcount := indices.Size() - gstart
box.AddGroup(gstart, gcount, int(materialIndex))
}
wHalf := width / 2
hHalf := height / 2
lHalf := length / 2
buildPlane("z", "y", -1, -1, length, height, wHalf, 0) // px
buildPlane("z", "y", 1, -1, length, height, -wHalf, 1) // nx
buildPlane("x", "z", 1, 1, width, length, hHalf, 2) // py
buildPlane("x", "z", 1, -1, width, length, -hHalf, 3) // ny
buildPlane("x", "y", 1, -1, width, height, lHalf, 4) // pz
buildPlane("x", "y", -1, -1, width, height, -lHalf, 5) // nz
box.SetIndices(indices)
box.AddVBO(gls.NewVBO(positions).AddAttrib(gls.VertexPosition))
box.AddVBO(gls.NewVBO(normals).AddAttrib(gls.VertexNormal))
box.AddVBO(gls.NewVBO(uvs).AddAttrib(gls.VertexTexcoord))
// Update bounding box
box.boundingBox.Min = math32.Vector3{-wHalf, -hHalf, -lHalf}
box.boundingBox.Max = math32.Vector3{wHalf, hHalf, lHalf}
box.boundingBoxValid = true
// Update bounding sphere
box.boundingSphere.Radius = math32.Sqrt(math32.Pow(width/2, 2) + math32.Pow(height/2, 2) + math32.Pow(length/2, 2))
box.boundingSphereValid = true
// Update area
box.area = 2*width + 2*height + 2*length
box.areaValid = true
// Update volume
box.volume = width * height * length
box.volumeValid = true
return box
} | geometry/box.go | 0.846609 | 0.583975 | box.go | starcoder |
package juroku
import (
"errors"
"image"
"image/color"
"math"
"sort"
"github.com/disintegration/gift"
)
// GetPalette returns the palette of the image.
func GetPalette(img image.Image) color.Palette {
colors := make(map[color.RGBA]bool)
for y := img.Bounds().Min.Y; y < img.Bounds().Max.Y; y++ {
for x := img.Bounds().Min.X; x < img.Bounds().Max.X; x++ {
r, g, b, a := img.At(x, y).RGBA()
colors[color.RGBA{
R: uint8(r >> 8),
G: uint8(g >> 8),
B: uint8(b >> 8),
A: uint8(a >> 8),
}] = true
}
}
var palette color.Palette
for col := range colors {
palette = append(palette, col)
}
return palette
}
func getScore(edges image.Image, x, y int) float64 {
r, _, _, _ := edges.At(x, y).RGBA()
return math.Log(float64(r)+7.0)*0.65 + 0.45
}
// ChunkImage chunks an image following the ComputerCraft requirements of
// maximum of 2 colors per 2x3 chunk of pixels and returns it. It is assumed
// that the palette has already been reduced to 16 colors.
func ChunkImage(img image.Image) (image.Image, error) {
if img.Bounds().Dx()%2 != 0 {
return nil, errors.New("juroku: image width must be a multiple of 2")
}
if img.Bounds().Dy()%3 != 0 {
return nil, errors.New("juroku: image height must be a multiple of 3")
}
edges := image.NewRGBA(img.Bounds())
g := gift.New(gift.Sobel(), gift.Grayscale())
g.Draw(edges, img)
output := image.NewRGBA(img.Bounds())
type pixel struct {
color color.RGBA
image image.Image
x int
y int
}
for y := img.Bounds().Min.Y; y < img.Bounds().Max.Y; y += 3 {
for x := img.Bounds().Min.X; x < img.Bounds().Max.X; x += 2 {
var pixels []pixel
pixelScore := make(map[color.RGBA]float64)
for dy := 0; dy < 3; dy++ {
for dx := 0; dx < 2; dx++ {
r, g, b, a := img.At(x+dx, y+dy).RGBA()
col := color.RGBA{
R: uint8(r >> 8),
G: uint8(g >> 8),
B: uint8(b >> 8),
A: uint8(a >> 8),
}
pixels = append(pixels, pixel{
color: col,
image: img,
x: x + dx,
y: y + dy,
})
pixelScore[col] += getScore(edges, x+dx, y+dy)
}
}
type colorCount struct {
color color.RGBA
weight float64
}
var aggrPixels []colorCount
for k, v := range pixelScore {
aggrPixels = append(aggrPixels, colorCount{
color: k,
weight: v,
})
}
if len(aggrPixels) < 3 {
// we're gucci
for _, pix := range pixels {
output.Set(pix.x, pix.y, pix.color)
}
continue
}
sort.Slice(aggrPixels, func(i int, j int) bool {
return aggrPixels[i].weight > aggrPixels[j].weight
})
for _, pix := range pixels {
if pix.color != aggrPixels[0].color &&
pix.color != aggrPixels[1].color {
output.Set(pix.x, pix.y,
color.Palette{
aggrPixels[0].color,
aggrPixels[1].color,
}.Convert(pix.color))
} else {
output.Set(pix.x, pix.y, pix.color)
}
}
}
}
return output, nil
} | image.go | 0.780913 | 0.490419 | image.go | starcoder |
// Copyright 2021 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
import (
"fmt"
"math"
)
//go:noinline
func fcmplt(a, b float64, x uint64) uint64 {
if a < b {
x = 0
}
return x
}
//go:noinline
func fcmple(a, b float64, x uint64) uint64 {
if a <= b {
x = 0
}
return x
}
//go:noinline
func fcmpgt(a, b float64, x uint64) uint64 {
if a > b {
x = 0
}
return x
}
//go:noinline
func fcmpge(a, b float64, x uint64) uint64 {
if a >= b {
x = 0
}
return x
}
//go:noinline
func fcmpeq(a, b float64, x uint64) uint64 {
if a == b {
x = 0
}
return x
}
//go:noinline
func fcmpne(a, b float64, x uint64) uint64 {
if a != b {
x = 0
}
return x
}
func main() {
type fn func(a, b float64, x uint64) uint64
type testCase struct {
f fn
a, b float64
x, want uint64
}
NaN := math.NaN()
for _, t := range []testCase{
{fcmplt, 1.0, 1.0, 123, 123},
{fcmple, 1.0, 1.0, 123, 0},
{fcmpgt, 1.0, 1.0, 123, 123},
{fcmpge, 1.0, 1.0, 123, 0},
{fcmpeq, 1.0, 1.0, 123, 0},
{fcmpne, 1.0, 1.0, 123, 123},
{fcmplt, 1.0, 2.0, 123, 0},
{fcmple, 1.0, 2.0, 123, 0},
{fcmpgt, 1.0, 2.0, 123, 123},
{fcmpge, 1.0, 2.0, 123, 123},
{fcmpeq, 1.0, 2.0, 123, 123},
{fcmpne, 1.0, 2.0, 123, 0},
{fcmplt, 2.0, 1.0, 123, 123},
{fcmple, 2.0, 1.0, 123, 123},
{fcmpgt, 2.0, 1.0, 123, 0},
{fcmpge, 2.0, 1.0, 123, 0},
{fcmpeq, 2.0, 1.0, 123, 123},
{fcmpne, 2.0, 1.0, 123, 0},
{fcmplt, 1.0, NaN, 123, 123},
{fcmple, 1.0, NaN, 123, 123},
{fcmpgt, 1.0, NaN, 123, 123},
{fcmpge, 1.0, NaN, 123, 123},
{fcmpeq, 1.0, NaN, 123, 123},
{fcmpne, 1.0, NaN, 123, 0},
{fcmplt, NaN, 1.0, 123, 123},
{fcmple, NaN, 1.0, 123, 123},
{fcmpgt, NaN, 1.0, 123, 123},
{fcmpge, NaN, 1.0, 123, 123},
{fcmpeq, NaN, 1.0, 123, 123},
{fcmpne, NaN, 1.0, 123, 0},
{fcmplt, NaN, NaN, 123, 123},
{fcmple, NaN, NaN, 123, 123},
{fcmpgt, NaN, NaN, 123, 123},
{fcmpge, NaN, NaN, 123, 123},
{fcmpeq, NaN, NaN, 123, 123},
{fcmpne, NaN, NaN, 123, 0},
} {
got := t.f(t.a, t.b, t.x)
if got != t.want {
panic(fmt.Sprintf("want %v, got %v", t.want, got))
}
}
} | test/fixedbugs/issue43619.go | 0.704973 | 0.428114 | issue43619.go | starcoder |
package bigquery
import (
"errors"
"fmt"
"strconv"
"time"
bq "google.golang.org/api/bigquery/v2"
)
// Value stores the contents of a single cell from a BigQuery result.
type Value interface{}
// ValueLoader stores a slice of Values representing a result row from a Read operation.
// See RowIterator.Next for more information.
type ValueLoader interface {
Load(v []Value, s Schema) error
}
// ValueList converts a []Value to implement ValueLoader.
type ValueList []Value
// Load stores a sequence of values in a ValueList.
func (vs *ValueList) Load(v []Value, _ Schema) error {
*vs = append(*vs, v...)
return nil
}
// valueMap converts a map[string]Value to implement ValueLoader.
type valueMap map[string]Value
// Load stores a sequence of values in a valueMap.
func (vm *valueMap) Load(v []Value, s Schema) error {
if *vm == nil {
*vm = map[string]Value{}
}
for i, f := range s {
(*vm)[f.Name] = v[i]
}
return nil
}
// A ValueSaver returns a row of data to be inserted into a table.
type ValueSaver interface {
// Save returns a row to be inserted into a BigQuery table, represented
// as a map from field name to Value.
// If insertID is non-empty, BigQuery will use it to de-duplicate
// insertions of this row on a best-effort basis.
Save() (row map[string]Value, insertID string, err error)
}
// ValuesSaver implements ValueSaver for a slice of Values.
type ValuesSaver struct {
Schema Schema
// If non-empty, BigQuery will use InsertID to de-duplicate insertions
// of this row on a best-effort basis.
InsertID string
Row []Value
}
// Save implements ValueSaver
func (vls *ValuesSaver) Save() (map[string]Value, string, error) {
m, err := valuesToMap(vls.Row, vls.Schema)
return m, vls.InsertID, err
}
func valuesToMap(vs []Value, schema Schema) (map[string]Value, error) {
if len(vs) != len(schema) {
return nil, errors.New("Schema does not match length of row to be inserted")
}
m := make(map[string]Value)
for i, fieldSchema := range schema {
if fieldSchema.Type == RecordFieldType {
nested, ok := vs[i].([]Value)
if !ok {
return nil, errors.New("Nested record is not a []Value")
}
value, err := valuesToMap(nested, fieldSchema.Schema)
if err != nil {
return nil, err
}
m[fieldSchema.Name] = value
} else {
m[fieldSchema.Name] = vs[i]
}
}
return m, nil
}
// convertRows converts a series of TableRows into a series of Value slices.
// schema is used to interpret the data from rows; its length must match the
// length of each row.
func convertRows(rows []*bq.TableRow, schema Schema) ([][]Value, error) {
var rs [][]Value
for _, r := range rows {
row, err := convertRow(r, schema)
if err != nil {
return nil, err
}
rs = append(rs, row)
}
return rs, nil
}
func convertRow(r *bq.TableRow, schema Schema) ([]Value, error) {
if len(schema) != len(r.F) {
return nil, errors.New("schema length does not match row length")
}
var values []Value
for i, cell := range r.F {
fs := schema[i]
v, err := convertValue(cell.V, fs.Type, fs.Schema)
if err != nil {
return nil, err
}
values = append(values, v)
}
return values, nil
}
func convertValue(val interface{}, typ FieldType, schema Schema) (Value, error) {
switch val := val.(type) {
case nil:
return nil, nil
case []interface{}:
return convertRepeatedRecord(val, typ, schema)
case map[string]interface{}:
return convertNestedRecord(val, schema)
case string:
return convertBasicType(val, typ)
default:
return nil, fmt.Errorf("got value %v; expected a value of type %s", val, typ)
}
}
func convertRepeatedRecord(vals []interface{}, typ FieldType, schema Schema) (Value, error) {
var values []Value
for _, cell := range vals {
// each cell contains a single entry, keyed by "v"
val := cell.(map[string]interface{})["v"]
v, err := convertValue(val, typ, schema)
if err != nil {
return nil, err
}
values = append(values, v)
}
return values, nil
}
func convertNestedRecord(val map[string]interface{}, schema Schema) (Value, error) {
// convertNestedRecord is similar to convertRow, as a record has the same structure as a row.
// Nested records are wrapped in a map with a single key, "f".
record := val["f"].([]interface{})
if len(record) != len(schema) {
return nil, errors.New("schema length does not match record length")
}
var values []Value
for i, cell := range record {
// each cell contains a single entry, keyed by "v"
val := cell.(map[string]interface{})["v"]
fs := schema[i]
v, err := convertValue(val, fs.Type, fs.Schema)
if err != nil {
return nil, err
}
values = append(values, v)
}
return values, nil
}
// convertBasicType returns val as an interface with a concrete type specified by typ.
func convertBasicType(val string, typ FieldType) (Value, error) {
switch typ {
case StringFieldType:
return val, nil
case IntegerFieldType:
return strconv.Atoi(val)
case FloatFieldType:
return strconv.ParseFloat(val, 64)
case BooleanFieldType:
return strconv.ParseBool(val)
case TimestampFieldType:
f, err := strconv.ParseFloat(val, 64)
return Value(time.Unix(0, int64(f*1e9))), err
default:
return nil, errors.New("unrecognized type")
}
} | vendor/cloud.google.com/go/bigquery/value.go | 0.769254 | 0.521776 | value.go | starcoder |
package geo
import (
"fmt"
"strconv"
"strings"
"github.com/cockroachdb/cockroach/pkg/geo/geopb"
"github.com/cockroachdb/cockroach/pkg/geo/geos"
"github.com/cockroachdb/cockroach/pkg/util"
"github.com/twpayne/go-geom"
"github.com/twpayne/go-geom/encoding/ewkb"
"github.com/twpayne/go-geom/encoding/ewkbhex"
"github.com/twpayne/go-geom/encoding/geojson"
"github.com/twpayne/go-geom/encoding/wkb"
)
// parseEWKBRaw creates a geopb.SpatialObject from an EWKB
// without doing any SRID based checks.
// You most likely want parseEWKB instead.
func parseEWKBRaw(in geopb.EWKB) (geopb.SpatialObject, error) {
t, err := ewkb.Unmarshal(in)
if err != nil {
return geopb.SpatialObject{}, err
}
return spatialObjectFromGeom(t)
}
// parseAmbiguousText parses a text as a number of different options
// that is available in the geospatial world using the first character as
// a heuristic.
// This matches the PostGIS direct cast from a string to GEOGRAPHY/GEOMETRY.
func parseAmbiguousText(str string, defaultSRID geopb.SRID) (geopb.SpatialObject, error) {
if len(str) == 0 {
return geopb.SpatialObject{}, fmt.Errorf("geo: parsing empty string to geo type")
}
switch str[0] {
case '0':
return parseEWKBHex(str, defaultSRID)
case 0x00, 0x01:
return parseEWKB([]byte(str), defaultSRID, DefaultSRIDIsHint)
case '{':
return parseGeoJSON([]byte(str), defaultSRID)
}
return parseEWKT(geopb.EWKT(str), defaultSRID, DefaultSRIDIsHint)
}
// parseEWKBHex takes a given str assumed to be in EWKB hex and transforms it
// into a SpatialObject.
func parseEWKBHex(str string, defaultSRID geopb.SRID) (geopb.SpatialObject, error) {
t, err := ewkbhex.Decode(str)
if err != nil {
return geopb.SpatialObject{}, err
}
// TODO(otan): check SRID is valid against spatial_ref_sys.
if defaultSRID != 0 && t.SRID() == 0 {
adjustGeomSRID(t, defaultSRID)
}
return spatialObjectFromGeom(t)
}
// parseEWKB takes given bytes assumed to be EWKB and transforms it into a SpatialObject.
// The defaultSRID will overwrite any SRID set in the EWKB if overwrite is true.
func parseEWKB(
b []byte, defaultSRID geopb.SRID, overwrite defaultSRIDOverwriteSetting,
) (geopb.SpatialObject, error) {
t, err := ewkb.Unmarshal(b)
if err != nil {
return geopb.SpatialObject{}, err
}
// TODO(otan): check SRID is valid against spatial_ref_sys.
if overwrite == DefaultSRIDShouldOverwrite || (defaultSRID != 0 && t.SRID() == 0) {
adjustGeomSRID(t, defaultSRID)
}
return spatialObjectFromGeom(t)
}
// parseWKB takes given bytes assumed to be WKB and transforms it into a SpatialObject.
func parseWKB(b []byte, defaultSRID geopb.SRID) (geopb.SpatialObject, error) {
t, err := wkb.Unmarshal(b)
if err != nil {
return geopb.SpatialObject{}, err
}
adjustGeomSRID(t, defaultSRID)
return spatialObjectFromGeom(t)
}
// parseGeoJSON takes given bytes assumed to be GeoJSON and transforms it into a SpatialObject.
func parseGeoJSON(b []byte, defaultSRID geopb.SRID) (geopb.SpatialObject, error) {
var f geojson.Feature
if err := f.UnmarshalJSON(b); err != nil {
return geopb.SpatialObject{}, err
}
t := f.Geometry
// TODO(otan): check SRID from properties.
if defaultSRID != 0 && t.SRID() == 0 {
adjustGeomSRID(t, defaultSRID)
}
return spatialObjectFromGeom(t)
}
// adjustGeomSRID adjusts the SRID of a given geom.T.
// Ideally SetSRID is an interface of geom.T, but that is not the case.
func adjustGeomSRID(t geom.T, srid geopb.SRID) {
switch t := t.(type) {
case *geom.Point:
t.SetSRID(int(srid))
case *geom.LineString:
t.SetSRID(int(srid))
case *geom.Polygon:
t.SetSRID(int(srid))
case *geom.GeometryCollection:
t.SetSRID(int(srid))
case *geom.MultiPoint:
t.SetSRID(int(srid))
case *geom.MultiLineString:
t.SetSRID(int(srid))
case *geom.MultiPolygon:
t.SetSRID(int(srid))
default:
panic(fmt.Errorf("geo: unknown geom type: %v", t))
}
}
const sridPrefix = "SRID="
const sridPrefixLen = len(sridPrefix)
type defaultSRIDOverwriteSetting bool
const (
// DefaultSRIDShouldOverwrite implies the parsing function should overwrite
// the SRID with the defaultSRID.
DefaultSRIDShouldOverwrite defaultSRIDOverwriteSetting = true
// DefaultSRIDIsHint implies that the default SRID is only a hint
// and if the SRID is provided by the given EWKT/EWKB, it should be
// used instead.
DefaultSRIDIsHint defaultSRIDOverwriteSetting = false
)
// parseEWKT decodes a WKT string and transforms it into a SpatialObject.
// The defaultSRID will overwrite any SRID set in the EWKT if overwrite is true.
func parseEWKT(
str geopb.EWKT, defaultSRID geopb.SRID, overwrite defaultSRIDOverwriteSetting,
) (geopb.SpatialObject, error) {
srid := defaultSRID
if hasPrefixIgnoreCase(string(str), sridPrefix) {
end := strings.Index(string(str[sridPrefixLen:]), ";")
if end != -1 {
if overwrite != DefaultSRIDShouldOverwrite {
sridInt64, err := strconv.ParseInt(string(str[sridPrefixLen:sridPrefixLen+end]), 10, 32)
if err != nil {
return geopb.SpatialObject{}, err
}
// Only use the parsed SRID if the parsed SRID is not zero and it was not
// to be overwritten by the DefaultSRID parameter.
if sridInt64 != 0 {
srid = geopb.SRID(sridInt64)
}
}
str = str[sridPrefixLen+end+1:]
} else {
return geopb.SpatialObject{}, fmt.Errorf(
"geo: failed to find ; character with SRID declaration during EWKT decode: %q",
str,
)
}
}
ewkb, err := geos.WKTToEWKB(geopb.WKT(str), srid)
if err != nil {
return geopb.SpatialObject{}, err
}
return parseEWKBRaw(ewkb)
}
// hasPrefixIgnoreCase returns whether a given str begins with a prefix, ignoring case.
// It assumes that the string and prefix contains only ASCII bytes.
func hasPrefixIgnoreCase(str string, prefix string) bool {
if len(str) < len(prefix) {
return false
}
for i := 0; i < len(prefix); i++ {
if util.ToLowerSingleByte(str[i]) != util.ToLowerSingleByte(prefix[i]) {
return false
}
}
return true
} | pkg/geo/parse.go | 0.573917 | 0.416559 | parse.go | starcoder |
package htm
import (
"fmt"
"github.com/cznic/mathutil"
"github.com/nupic-community/htm/utils"
"github.com/zacg/floats"
"github.com/zacg/go.matrix"
//"math"
//"math/rand"
//"sort"
)
type TpStats struct {
NInfersSinceReset int
NPredictions int
PredictionScoreTotal float64
PredictionScoreTotal2 float64
FalseNegativeScoreTotal float64
FalsePositiveScoreTotal float64
PctExtraTotal float64
PctMissingTotal float64
TotalMissing float64
TotalExtra float64
CurPredictionScore float64
CurPredictionScore2 float64
CurFalseNegativeScore float64
CurFalsePositiveScore float64
CurMissing float64
CurExtra float64
ConfHistogram matrix.DenseMatrix
}
func (s *TpStats) ToString() string {
result := "Stats: \n"
result += fmt.Sprintf("nInferSinceReset %v \n", s.NInfersSinceReset)
result += fmt.Sprintf("nPredictions %v \n", s.NPredictions)
result += fmt.Sprintf("PredictionScoreTotal %v \n", s.PredictionScoreTotal)
result += fmt.Sprintf("PredictionScoreTotal2 %v \n", s.PredictionScoreTotal2)
result += fmt.Sprintf("FalseNegativeScoreTotal %v \n", s.FalseNegativeScoreTotal)
result += fmt.Sprintf("FalsePositiveScoreTotal %v \n", s.FalsePositiveScoreTotal)
result += fmt.Sprintf("PctExtraTotal %v \n", s.PctExtraTotal)
result += fmt.Sprintf("PctMissingTotal %v \n", s.PctMissingTotal)
result += fmt.Sprintf("TotalMissing %v \n", s.TotalMissing)
result += fmt.Sprintf("TotalExtra %v \n", s.TotalExtra)
result += fmt.Sprintf("CurPredictionScore %v \n", s.CurPredictionScore)
result += fmt.Sprintf("CurPredictionScore2 %v \n", s.CurPredictionScore2)
result += fmt.Sprintf("CurFalseNegativeScore %v \n", s.CurFalseNegativeScore)
result += fmt.Sprintf("CurFalsePositiveScore %v \n", s.CurFalsePositiveScore)
result += fmt.Sprintf("CurMissing %v \n", s.CurMissing)
result += fmt.Sprintf("CurExtra %v \n", s.CurExtra)
result += fmt.Sprintf("ConfHistogram %v \n", s.ConfHistogram.String())
return result
}
type confidence struct {
PredictionScore float64
PositivePredictionScore float64
NegativePredictionScore float64
}
/*
This function produces goodness-of-match scores for a set of input patterns,
by checking for their presence in the current and predicted output of the
TP. Returns a global count of the number of extra and missing bits, the
confidence scores for each input pattern, and (if requested) the
bits in each input pattern that were not present in the TP's prediction.
param patternNZs a list of input patterns that we want to check for. Each
element is a list of the non-zeros in that pattern.
param output The output of the TP. If not specified, then use the
TP's current output. This can be specified if you are
trying to check the prediction metric for an output from
the past.
param colConfidence The column confidences. If not specified, then use the
TP's current colConfidence. This can be specified if you
are trying to check the prediction metrics for an output
from the past.
param details if True, also include details of missing bits per pattern.
returns list containing:
[
totalExtras,
totalMissing,
[conf_1, conf_2, ...],
[missing1, missing2, ...]
]
retval totalExtras a global count of the number of 'extras', i.e. bits that
are on in the current output but not in the or of all the
passed in patterns
retval totalMissing a global count of all the missing bits, i.e. the bits
that are on in the or of the patterns, but not in the
current output
retval conf_i the confidence score for the i'th pattern inpatternsToCheck
This consists of 3 items as a tuple:
(predictionScore, posPredictionScore, negPredictionScore)
retval missing_i the bits in the i'th pattern that were missing
in the output. This list is only returned if details is
True.
*/
func (tp *TemporalPooler) checkPrediction2(patternNZs [][]int, output *SparseBinaryMatrix,
colConfidence []float64, details bool) (int, int, []confidence, []int) {
// Get the non-zeros in each pattern
numPatterns := len(patternNZs)
// Compute the union of all the expected patterns
var orAll []int
for _, row := range patternNZs {
for _, col := range row {
if !utils.ContainsInt(col, orAll) {
orAll = append(orAll, col)
}
}
}
var outputIdxs []int
// Get the list of active columns in the output
if output == nil {
if tp.CurrentOutput == nil {
panic("Expected tp output")
}
outputIdxs = tp.CurrentOutput.NonZeroRows()
} else {
outputIdxs = output.NonZeroRows()
}
// Compute the total extra and missing in the output
totalExtras := 0
totalMissing := 0
for _, val := range outputIdxs {
if !utils.ContainsInt(val, orAll) {
totalExtras++
}
}
for _, val := range orAll {
if !utils.ContainsInt(val, outputIdxs) {
totalMissing++
}
}
// Get the percent confidence level per column by summing the confidence
// levels of the cells in the column. During training, each segment's
// confidence number is computed as a running average of how often it
// correctly predicted bottom-up activity on that column. A cell's
// confidence number is taken from the first active segment found in the
// cell. Note that confidence will only be non-zero for predicted columns.
if colConfidence == nil {
if tp.params.Verbosity >= 5 {
fmt.Println("Col confidence nil, copying from tp state...")
}
colConfidence = make([]float64, len(tp.DynamicState.ColConfidence))
copy(colConfidence, tp.DynamicState.ColConfidence)
}
// Assign confidences to each pattern
var confidences []confidence
for i := 0; i < numPatterns; i++ {
// Sum of the column confidences for this pattern
//positivePredictionSum = colConfidence[patternNZs[i]].sum()
positivePredictionSum := floats.Sum(floats.SubSet(colConfidence, patternNZs[i]))
// How many columns in this pattern
positiveColumnCount := len(patternNZs[i])
// Sum of all the column confidences
totalPredictionSum := floats.Sum(colConfidence)
// Total number of columns
totalColumnCount := len(colConfidence)
negativePredictionSum := totalPredictionSum - positivePredictionSum
negativeColumnCount := totalColumnCount - positiveColumnCount
positivePredictionScore := 0.0
// Compute the average confidence score per column for this pattern
if positiveColumnCount != 0 {
positivePredictionScore = positivePredictionSum
}
// Compute the average confidence score per column for the other patterns
negativePredictionScore := 0.0
if negativeColumnCount != 0 {
negativePredictionScore = negativePredictionSum
}
// Scale the positive and negative prediction scores so that they sum to
// 1.0
currentSum := negativePredictionScore + positivePredictionScore
if currentSum > 0 {
positivePredictionScore *= 1.0 / currentSum
negativePredictionScore *= 1.0 / currentSum
}
predictionScore := positivePredictionScore - negativePredictionScore
newConf := confidence{predictionScore, positivePredictionScore, negativePredictionScore}
confidences = append(confidences, newConf)
}
// Include detail? (bits in each pattern that were missing from the output)
if details {
var missingPatternBits []int
for _, pattern := range patternNZs {
for _, val := range pattern {
if !utils.ContainsInt(val, outputIdxs) &&
!utils.ContainsInt(val, missingPatternBits) {
missingPatternBits = append(missingPatternBits, val)
}
}
}
return totalExtras, totalMissing, confidences, missingPatternBits
} else {
return totalExtras, totalMissing, confidences, nil
}
}
/*
Called at the end of learning and inference, this routine will update
a number of stats in our _internalStats dictionary, including our computed
prediction score.
param stats internal stats dictionary
param bottomUpNZ list of the active bottom-up inputs
param predictedState The columns we predicted on the last time step (should
match the current bottomUpNZ in the best case)
param colConfidence Column confidences we determined on the last time step
*/
func (tp *TemporalPooler) updateStatsInferEnd(stats *TpStats, bottomUpNZ []int,
predictedState *SparseBinaryMatrix, colConfidence []float64) {
// Return if not collecting stats
if !tp.params.CollectStats {
return
}
stats.NInfersSinceReset++
// Compute the prediction score, how well the prediction from the last
// time step predicted the current bottom-up input
numExtra2, numMissing2, confidences2, _ := tp.checkPrediction2([][]int{bottomUpNZ}, predictedState, colConfidence, false)
predictionScore := confidences2[0].PredictionScore
positivePredictionScore := confidences2[0].PositivePredictionScore
negativePredictionScore := confidences2[0].NegativePredictionScore
// Store the stats that don't depend on burn-in
stats.CurPredictionScore2 = predictionScore
stats.CurFalseNegativeScore = negativePredictionScore
stats.CurFalsePositiveScore = positivePredictionScore
stats.CurMissing = float64(numMissing2)
stats.CurExtra = float64(numExtra2)
// If we are passed the burn-in period, update the accumulated stats
// Here's what various burn-in values mean:
// 0: try to predict the first element of each sequence and all subsequent
// 1: try to predict the second element of each sequence and all subsequent
// etc.
if stats.NInfersSinceReset <= tp.params.BurnIn {
return
}
// Burn-in related stats
stats.NPredictions++
numExpected := mathutil.Max(1, len(bottomUpNZ))
stats.TotalMissing += float64(numMissing2)
stats.TotalExtra += float64(numExtra2)
stats.PctExtraTotal += 100.0 * float64(numExtra2) / float64(numExpected)
stats.PctMissingTotal += 100.0 * float64(numMissing2) / float64(numExpected)
stats.PredictionScoreTotal2 += predictionScore
stats.FalseNegativeScoreTotal += 1.0 - positivePredictionScore
stats.FalsePositiveScoreTotal += negativePredictionScore
if tp.collectSequenceStats {
// Collect cell confidences for every cell that correctly predicted current
// bottom up input. Normalize confidence across each column
cc := tp.DynamicState.CellConfidence.Copy()
for r := 0; r < cc.Rows(); r++ {
for c := 0; c < cc.Cols(); c++ {
if !tp.DynamicState.InfActiveState.Get(r, c) {
cc.Set(r, c, 0)
}
}
}
sconf := make([]int, cc.Rows())
for r := 0; r < cc.Rows(); r++ {
count := 0
for c := 0; c < cc.Cols(); c++ {
if cc.Get(r, c) > 0 {
count++
}
}
sconf[r] = count
}
for r := 0; r < cc.Rows(); r++ {
for c := 0; c < cc.Cols(); c++ {
temp := cc.Get(r, c)
cc.Set(r, c, temp/float64(sconf[r]))
}
}
// Update cell confidence histogram: add column-normalized confidence
// scores to the histogram
stats.ConfHistogram.Add(cc)
}
} | temporalPoolerStats.go | 0.682574 | 0.451266 | temporalPoolerStats.go | starcoder |
//go:build 386 || arm || mips || mipsle
// +build 386 arm mips mipsle
package runtime
// Additional index/slice error paths for 32-bit platforms.
// Used when the high word of a 64-bit index is not zero.
// failures in the comparisons for s[x], 0 <= x < y (y == len(s))
func goPanicExtendIndex(hi int, lo uint, y int) {
panicCheck1(getcallerpc(), "index out of range")
panic(boundsError{x: int64(hi)<<32 + int64(lo), signed: true, y: y, code: boundsIndex})
}
func goPanicExtendIndexU(hi uint, lo uint, y int) {
panicCheck1(getcallerpc(), "index out of range")
panic(boundsError{x: int64(hi)<<32 + int64(lo), signed: false, y: y, code: boundsIndex})
}
// failures in the comparisons for s[:x], 0 <= x <= y (y == len(s) or cap(s))
func goPanicExtendSliceAlen(hi int, lo uint, y int) {
panicCheck1(getcallerpc(), "slice bounds out of range")
panic(boundsError{x: int64(hi)<<32 + int64(lo), signed: true, y: y, code: boundsSliceAlen})
}
func goPanicExtendSliceAlenU(hi uint, lo uint, y int) {
panicCheck1(getcallerpc(), "slice bounds out of range")
panic(boundsError{x: int64(hi)<<32 + int64(lo), signed: false, y: y, code: boundsSliceAlen})
}
func goPanicExtendSliceAcap(hi int, lo uint, y int) {
panicCheck1(getcallerpc(), "slice bounds out of range")
panic(boundsError{x: int64(hi)<<32 + int64(lo), signed: true, y: y, code: boundsSliceAcap})
}
func goPanicExtendSliceAcapU(hi uint, lo uint, y int) {
panicCheck1(getcallerpc(), "slice bounds out of range")
panic(boundsError{x: int64(hi)<<32 + int64(lo), signed: false, y: y, code: boundsSliceAcap})
}
// failures in the comparisons for s[x:y], 0 <= x <= y
func goPanicExtendSliceB(hi int, lo uint, y int) {
panicCheck1(getcallerpc(), "slice bounds out of range")
panic(boundsError{x: int64(hi)<<32 + int64(lo), signed: true, y: y, code: boundsSliceB})
}
func goPanicExtendSliceBU(hi uint, lo uint, y int) {
panicCheck1(getcallerpc(), "slice bounds out of range")
panic(boundsError{x: int64(hi)<<32 + int64(lo), signed: false, y: y, code: boundsSliceB})
}
// failures in the comparisons for s[::x], 0 <= x <= y (y == len(s) or cap(s))
func goPanicExtendSlice3Alen(hi int, lo uint, y int) {
panicCheck1(getcallerpc(), "slice bounds out of range")
panic(boundsError{x: int64(hi)<<32 + int64(lo), signed: true, y: y, code: boundsSlice3Alen})
}
func goPanicExtendSlice3AlenU(hi uint, lo uint, y int) {
panicCheck1(getcallerpc(), "slice bounds out of range")
panic(boundsError{x: int64(hi)<<32 + int64(lo), signed: false, y: y, code: boundsSlice3Alen})
}
func goPanicExtendSlice3Acap(hi int, lo uint, y int) {
panicCheck1(getcallerpc(), "slice bounds out of range")
panic(boundsError{x: int64(hi)<<32 + int64(lo), signed: true, y: y, code: boundsSlice3Acap})
}
func goPanicExtendSlice3AcapU(hi uint, lo uint, y int) {
panicCheck1(getcallerpc(), "slice bounds out of range")
panic(boundsError{x: int64(hi)<<32 + int64(lo), signed: false, y: y, code: boundsSlice3Acap})
}
// failures in the comparisons for s[:x:y], 0 <= x <= y
func goPanicExtendSlice3B(hi int, lo uint, y int) {
panicCheck1(getcallerpc(), "slice bounds out of range")
panic(boundsError{x: int64(hi)<<32 + int64(lo), signed: true, y: y, code: boundsSlice3B})
}
func goPanicExtendSlice3BU(hi uint, lo uint, y int) {
panicCheck1(getcallerpc(), "slice bounds out of range")
panic(boundsError{x: int64(hi)<<32 + int64(lo), signed: false, y: y, code: boundsSlice3B})
}
// failures in the comparisons for s[x:y:], 0 <= x <= y
func goPanicExtendSlice3C(hi int, lo uint, y int) {
panicCheck1(getcallerpc(), "slice bounds out of range")
panic(boundsError{x: int64(hi)<<32 + int64(lo), signed: true, y: y, code: boundsSlice3C})
}
func goPanicExtendSlice3CU(hi uint, lo uint, y int) {
panicCheck1(getcallerpc(), "slice bounds out of range")
panic(boundsError{x: int64(hi)<<32 + int64(lo), signed: false, y: y, code: boundsSlice3C})
}
// Implemented in assembly, as they take arguments in registers.
// Declared here to mark them as ABIInternal.
func panicExtendIndex(hi int, lo uint, y int)
func panicExtendIndexU(hi uint, lo uint, y int)
func panicExtendSliceAlen(hi int, lo uint, y int)
func panicExtendSliceAlenU(hi uint, lo uint, y int)
func panicExtendSliceAcap(hi int, lo uint, y int)
func panicExtendSliceAcapU(hi uint, lo uint, y int)
func panicExtendSliceB(hi int, lo uint, y int)
func panicExtendSliceBU(hi uint, lo uint, y int)
func panicExtendSlice3Alen(hi int, lo uint, y int)
func panicExtendSlice3AlenU(hi uint, lo uint, y int)
func panicExtendSlice3Acap(hi int, lo uint, y int)
func panicExtendSlice3AcapU(hi uint, lo uint, y int)
func panicExtendSlice3B(hi int, lo uint, y int)
func panicExtendSlice3BU(hi uint, lo uint, y int)
func panicExtendSlice3C(hi int, lo uint, y int)
func panicExtendSlice3CU(hi uint, lo uint, y int) | src/runtime/panic32.go | 0.560012 | 0.451024 | panic32.go | starcoder |
package numerics
import (
"fmt"
"math"
)
const logSqrt2Pi = 0.91893853320467274178032973640562
func GaussAt(x float64) float64 {
return math.Exp(-x*x/2) / (math.Sqrt2 * math.SqrtPi)
}
func GaussCumulativeTo(x float64) float64 {
return math.Erf(x/math.Sqrt2)/2 + 0.5
}
func GaussInvCumulativeTo(x, mean, stddev float64) float64 {
// From numerical recipes, page 320
return mean - math.Sqrt(2)*stddev*InvErfc(2*x)
}
// Inverse of complementary error function. Returns x such that erfc(x) = p for argument p.
func InvErfc(p float64) float64 {
// From page 265 of numerical recipes
if p >= 2.0 {
return -100
}
if p <= 0.0 {
return 100
}
var pp float64
if p < 1.0 {
pp = p
} else {
pp = 2 - p
}
t := math.Sqrt(-2 * math.Log(pp/2.0)) // Initial guess
x := -0.70711 * ((2.30753+t*0.27061)/(1.0+t*(0.99229+t*0.04481)) - t)
for j := 0; j < 2; j++ {
err := math.Erfc(x) - pp
x += err / (1.12837916709551257*math.Exp(-(x*x)) - x*err) // Halley
}
if p < 1.0 {
return x
}
return -x
}
type GaussDist struct {
Mean float64
Stddev float64
Precision float64
PrecisionMean float64
Variance float64
}
func NewGaussDist(mean, stddev float64) *GaussDist {
variance := stddev * stddev
precision := 1 / variance
return &GaussDist{
Mean: mean,
Stddev: stddev,
Variance: variance,
Precision: precision,
PrecisionMean: precision * mean,
}
}
func (z *GaussDist) String() string {
return fmt.Sprintf("{μ:%.6g σ:%.6g}", z.Mean, z.Stddev)
}
// Sub sets z to the difference x-y and returns z.
func (z *GaussDist) Sub(x, y *GaussDist) *GaussDist {
z.Mean = x.Mean - y.Mean
z.Variance = x.Variance + y.Variance
z.Stddev = math.Sqrt(z.Variance)
z.Precision = 1 / z.Variance
z.PrecisionMean = z.Mean * z.Precision
return z
}
// Mul sets z to the product x*y and returns z.
func (z *GaussDist) Mul(x, y *GaussDist) *GaussDist {
z.Precision = x.Precision + y.Precision
z.PrecisionMean = x.PrecisionMean + y.PrecisionMean
z.fromPrecisionMean()
return z
}
// Div sets z to the product x/y and returns z.
func (z *GaussDist) Div(x, y *GaussDist) *GaussDist {
z.Precision = x.Precision - y.Precision
z.PrecisionMean = x.PrecisionMean - y.PrecisionMean
z.fromPrecisionMean()
return z
}
// CumulativeTo returns the cumulative distrubution function evaluated at x.
func (z *GaussDist) CumulativeTo(x float64) float64 {
return GaussCumulativeTo((x - z.Mean) / z.Stddev)
}
func (z *GaussDist) fromPrecisionMean() {
z.Variance = 1 / z.Precision
z.Stddev = math.Sqrt(z.Variance)
z.Mean = z.PrecisionMean / z.Precision
}
// Returns the log product normalization of x and y.
func LogProdNorm(x, y *GaussDist) float64 {
if x.Precision == 0 || y.Precision == 0 {
return 0
}
varSum := x.Variance + y.Variance
meanDiff := x.Mean - y.Mean
meanDiff2 := meanDiff * meanDiff
return -logSqrt2Pi - (math.Log(varSum)+meanDiff2/varSum)/2.0
}
// Returns the log ratio normalization of x and y.
func LogRatioNorm(x, y *GaussDist) float64 {
if x.Precision == 0 || y.Precision == 0 {
return 0
}
varDiff := y.Variance - x.Variance
meanDiff := x.Mean - y.Mean
meanDiff2 := meanDiff * meanDiff
return math.Log(y.Variance) + logSqrt2Pi - math.Log(varDiff)/2 + meanDiff2/(2*varDiff)
}
// Computes the absolute difference between two Gaussians
func AbsDiff(x, y *GaussDist) float64 {
return math.Max(math.Abs(x.PrecisionMean-y.PrecisionMean), math.Sqrt(math.Abs(x.Precision-y.Precision)))
} | vendor/github.com/ChrisHines/GoSkills/skills/numerics/GaussDist.go | 0.908929 | 0.61115 | GaussDist.go | starcoder |
// This package contains string conversion function and is written in [Go][1].
// It is much alike the standard library's strconv package, but it is
// specifically tailored for the performance needs within the minify package.
// For example, the floating-point to string conversion function is
// approximately twice as fast as the standard library, but it is not as
// precise.
package byteconv
import (
"math"
"strconv"
)
var float64pow10 = []float64{
1e0, 1e1, 1e2, 1e3, 1e4, 1e5, 1e6, 1e7, 1e8, 1e9,
1e10, 1e11, 1e12, 1e13, 1e14, 1e15, 1e16, 1e17, 1e18, 1e19,
1e20, 1e21, 1e22,
}
// Float parses a byte-slice and returns the float it represents.
// If an invalid character is encountered, it will stop there.
func ParseFloat(b []byte) (v float64, ok bool, err error) {
if len(b) == 0 {
return
}
if UseStdLib {
v, err = strconv.ParseFloat(string(b), 64)
ok = err == nil
return
}
i := 0
neg := false
if i < len(b) && (b[i] == '+' || b[i] == '-') {
neg = b[i] == '-'
i++
}
dot := -1
trunk := -1
n := uint64(0)
for ; i < len(b); i++ {
c := b[i]
if c >= '0' && c <= '9' {
if trunk == -1 {
if n > math.MaxUint64/10 {
trunk = i
} else {
n *= 10
n += uint64(c - '0')
}
}
} else if dot == -1 && c == '.' {
dot = i
} else {
if c == 'e' || c == 'E' {
break
}
return 0, false, syntaxError("ParseFloat", string(b))
}
}
f := float64(n)
if neg {
f = -f
}
mantExp := int64(0)
if dot != -1 {
if trunk == -1 {
trunk = i
}
mantExp = int64(trunk - dot - 1)
} else if trunk != -1 {
mantExp = int64(trunk - i)
}
expExp := int64(0)
if i < len(b) && (b[i] == 'e' || b[i] == 'E') {
i++
if e, ok, err := ParseInt(b[i:]); ok && err == nil {
expExp = e
i += LenInt(e)
} else {
return 0, false, syntaxError("ParseFloat", string(b))
}
}
exp := expExp - mantExp
// copied from strconv/atof.go
if exp == 0 {
return f, true, nil
} else if exp > 0 && exp <= 15+22 { // int * 10^k
// If exponent is big but number of digits is not,
// can move a few zeros into the integer part.
if exp > 22 {
f *= float64pow10[exp-22]
exp = 22
}
if f <= 1e15 && f >= -1e15 {
return f * float64pow10[exp], true, nil
}
} else if exp < 0 && exp >= -22 { // int / 10^k
return f / float64pow10[-exp], true, nil
}
f *= math.Pow10(int(-mantExp))
return f * math.Pow10(int(expExp)), true, nil
} | util/byteconv/float.go | 0.615666 | 0.478894 | float.go | starcoder |
package joejson
import (
"encoding/json"
"fmt"
)
// GeometryTypeGeometryCollection is the value for a GeometryCollection's 'type' member.
const GeometryTypeGeometryCollection = "GeometryCollection"
// GeometryCollection is a slice of Geometries.
type GeometryCollection []GeometryCollectionMember
// AppendPoint appends a Point to the collection.
func (g GeometryCollection) AppendPoint(m Point) GeometryCollection {
return append(g, GeometryCollectionMember{m})
}
// AppendMultiPoint appends a MultiPoint to the collection.
func (g GeometryCollection) AppendMultiPoint(m MultiPoint) GeometryCollection {
return append(g, GeometryCollectionMember{m})
}
// AppendLineString appends a LineString to the collection.
func (g GeometryCollection) AppendLineString(m LineString) GeometryCollection {
return append(g, GeometryCollectionMember{m})
}
// AppendMuliLineString appends a MultiLineString to the collection.
func (g GeometryCollection) AppendMuliLineString(m MultiLineString) GeometryCollection {
return append(g, GeometryCollectionMember{m})
}
// AppendPolygon appends a Polygon to the collection.
func (g GeometryCollection) AppendPolygon(m Polygon) GeometryCollection {
return append(g, GeometryCollectionMember{m})
}
// AppendMultiPolygon appends a MultiPolygon to the collection.
func (g GeometryCollection) AppendMultiPolygon(m MultiPolygon) GeometryCollection {
return append(g, GeometryCollectionMember{m})
}
// MarshalJSON is a custom JSON marshaller.
func (g GeometryCollection) MarshalJSON() ([]byte, error) {
return json.Marshal(&struct {
Geometries []GeometryCollectionMember `json:"geometries"`
Type string `json:"type"`
}{
g,
GeometryTypeGeometryCollection,
})
}
// UnmarshalJSON is a custom JSON unmarshaller.
func (g *GeometryCollection) UnmarshalJSON(b []byte) error {
var tmp struct {
Geometries []json.RawMessage `json:"geometries"`
Type string `json:"type"`
}
if err := json.Unmarshal(b, &tmp); err != nil {
return err
}
if tmp.Type != GeometryTypeGeometryCollection {
return fmt.Errorf("invalid type %q, expected %q", tmp.Type, GeometryTypeGeometryCollection)
}
for _, geom := range tmp.Geometries {
geom, err := unmarshalGeometry(geom)
if err != nil {
return err
}
*g = append(*g, GeometryCollectionMember{geom})
}
return nil
}
// GeometryCollectionMember is a Geometry belonging to a GeometryCollection.
type GeometryCollectionMember struct {
geometry any
}
// AsPoint casts the Geometry to a Point.
func (g GeometryCollectionMember) AsPoint() (Point, bool) {
p, ok := g.geometry.(Point)
return p, ok
}
// AsMultiPoint casts the Geometry to a MultiPoint.
func (g GeometryCollectionMember) AsMultiPoint() (MultiPoint, bool) {
p, ok := g.geometry.(MultiPoint)
return p, ok
}
// AsLineString casts the Geometry to a LineString.
func (g GeometryCollectionMember) AsLineString() (LineString, bool) {
p, ok := g.geometry.(LineString)
return p, ok
}
// AsMultiLineString casts the Geometry to a MultiLineString.
func (g GeometryCollectionMember) AsMultiLineString() (MultiLineString, bool) {
p, ok := g.geometry.(MultiLineString)
return p, ok
}
// AsPolygon casts the Geometry to a Polygon.
func (g *GeometryCollectionMember) AsPolygon() (Polygon, bool) {
p, ok := g.geometry.(Polygon)
return p, ok
}
// AsMultiPolygon casts the Geometry to a MultiPolygon.
func (g *GeometryCollectionMember) AsMultiPolygon() (MultiPolygon, bool) {
p, ok := g.geometry.(MultiPolygon)
return p, ok
}
// MarshalJSON is a custom JSON marshaller.
func (g GeometryCollectionMember) MarshalJSON() ([]byte, error) {
return json.Marshal(g.geometry)
}
// Type is the type of the Geometry.
func (g GeometryCollectionMember) Type() string {
switch g.geometry.(type) {
case Point:
return GeometryTypePoint
case MultiPoint:
return GeometryTypeMultiPoint
case LineString:
return GeometryTypeLineString
case MultiLineString:
return GeometryTypeMultiLineString
case Polygon:
return GeometryTypePolygon
case MultiPolygon:
return GeometryTypeMultiPolygon
default:
return ""
}
} | geometrycollection.go | 0.799638 | 0.4231 | geometrycollection.go | starcoder |
package main
import (
"fmt"
"golang.org/x/tour/tree"
)
/*
------- Exercise: Equivalent Binary Trees
There can be many different binary trees with the same sequence of values stored in it.
For example, here are two binary trees storing the sequence 1, 1, 2, 3, 5, 8, 13.
A function to check whether two binary trees store the same sequence is quite complex in most languages.
We'll use Go's concurrency and channels to write a simple solution.
This example uses the tree package, which defines the type:
type Tree struct {
Left *Tree
Value int
Right *Tree
}
1. Implement the Walk function.
2. Test the Walk function.
The function tree.New(k) constructs a randomly-structured (but always sorted) binary tree holding the values k, 2k, 3k, ..., 10k.
Create a new channel ch and kick off the walker:
> go Walk(tree.New(1), ch)
Then read and print 10 values from the channel. It should be the numbers 1, 2, 3, ..., 10.
3. Implement the Same function using Walk to determine whether t1 and t2 store the same values.
4. Test the Same function.
Same(tree.New(1), tree.New(1)) should return true, and Same(tree.New(1), tree.New(2)) should return false.
The documentation for Tree can be found here[1].
[1] https://pkg.go.dev/golang.org/x/tour/tree?utm_source=godoc#Tree
*/
// Walk walks the tree t sending all values from the tree to the channel ch.
func Walk(myTree *tree.Tree, channel chan int) {
// So it can be closed automatically when this function ends its processing
// Infrastructure thing
defer close(channel)
// Business thing
var walkAnonymousFunction func(t *tree.Tree) // This is required because I need to call it inside the function
walkAnonymousFunction = func(t *tree.Tree) {
if t != nil {
walkAnonymousFunction(t.Left)
channel <- t.Value
walkAnonymousFunction(t.Right)
}
}
walkAnonymousFunction(myTree)
}
// Same determines whether the trees t1 and t2 contain the same values.
func Same(t1, t2 *tree.Tree) bool {
t1Channel, t2Channel := make(chan int), make(chan int)
go Walk(t1, t1Channel)
go Walk(t2, t2Channel)
for valueFromT1Channel := range t1Channel {
valueFromT2Channel, doesItHasMore := <-t2Channel
if doesItHasMore {
if valueFromT1Channel != valueFromT2Channel {
return false
}
} else {
// Both channels are supposed to have the same amount of values
return false
}
}
return true
}
// https://tour.golang.org/concurrency/8
func main() {
sampleTree := tree.New(1)
fmt.Println(sampleTree) //((((1 (2)) 3 (4)) 5 ((6) 7 ((8) 9))) 10)
honestChannel := make(chan int)
go Walk(tree.New(1), honestChannel)
for i := 0; i < 10; i++ {
whatWhatSentThroughWalkFunction := <-honestChannel
fmt.Println(whatWhatSentThroughWalkFunction)
}
shouldBeTrue := Same(tree.New(1), tree.New(1))
shouldBeFalse := Same(tree.New(1), tree.New(2))
if shouldBeTrue == false || shouldBeFalse == true {
panic("Wrong results!")
}
} | 6_concurrency/exercise-equivalent-binary-trees.go | 0.772101 | 0.6597 | exercise-equivalent-binary-trees.go | starcoder |
package skyhook
import (
"encoding/json"
"io"
"io/ioutil"
"github.com/paulmach/go.geojson"
gomapinfer "github.com/mitroadmaps/gomapinfer/common"
)
type GeoJsonData struct {
Collection *geojson.FeatureCollection
}
func GetGeometryBbox(g *geojson.Geometry) gomapinfer.Rectangle {
var bbox gomapinfer.Rectangle = gomapinfer.EmptyRectangle
handlePointBBox := func(coordinate []float64) {
p := gomapinfer.Point{coordinate[0], coordinate[1]}
bbox = bbox.Extend(p)
}
handleLineStringBBox := func(coordinates [][]float64) {
for _, coordinate := range coordinates {
p := gomapinfer.Point{coordinate[0], coordinate[1]}
bbox = bbox.Extend(p)
}
}
handlePolygonBBox := func(coordinates [][][]float64) {
// We do not support holes yet, so just use coordinates[0].
// coordinates[0] is the exterior ring while coordinates[1:] specify
// holes in the polygon that should be excluded.
for _, coordinate := range coordinates[0] {
p := gomapinfer.Point{coordinate[0], coordinate[1]}
bbox = bbox.Extend(p)
}
}
if g.Type == geojson.GeometryPoint {
handlePointBBox(g.Point)
} else if g.Type == geojson.GeometryMultiPoint {
for _, coordinate := range g.MultiPoint {
handlePointBBox(coordinate)
}
} else if g.Type == geojson.GeometryLineString {
handleLineStringBBox(g.LineString)
} else if g.Type == geojson.GeometryMultiLineString {
for _, coordinates := range g.MultiLineString {
handleLineStringBBox(coordinates)
}
} else if g.Type == geojson.GeometryPolygon {
handlePolygonBBox(g.Polygon)
} else if g.Type == geojson.GeometryMultiPolygon {
for _, coordinates := range g.MultiPolygon {
handlePolygonBBox(coordinates)
}
}
return bbox
}
type GeoJsonDataSpec struct{}
func (s GeoJsonDataSpec) DecodeMetadata(rawMetadata string) DataMetadata {
return NoMetadata{}
}
func (s GeoJsonDataSpec) ReadStream(r io.Reader) (interface{}, error) {
var data *geojson.FeatureCollection
if err := ReadJsonData(r, &data); err != nil {
return nil, err
}
return data, nil
}
func (s GeoJsonDataSpec) WriteStream(data interface{}, w io.Writer) error {
if err := WriteJsonData(data, w); err != nil {
return err
}
return nil
}
func (s GeoJsonDataSpec) Read(format string, metadata DataMetadata, r io.Reader) (interface{}, error) {
bytes, err := ioutil.ReadAll(r)
if err != nil {
return nil, err
}
var data *geojson.FeatureCollection
if err := json.Unmarshal(bytes, &data); err != nil {
return nil, err
}
return data, nil
}
func (s GeoJsonDataSpec) Write(data interface{}, format string, metadata DataMetadata, w io.Writer) error {
_, err := w.Write(JsonMarshal(data))
return err
}
func (s GeoJsonDataSpec) GetDefaultExtAndFormat(data interface{}, metadata DataMetadata) (ext string, format string) {
return "json", "json"
}
func init() {
DataSpecs[GeoJsonType] = GeoJsonDataSpec{}
} | skyhook/data_geojson.go | 0.703244 | 0.453685 | data_geojson.go | starcoder |
package schema
import (
"fmt"
"regexp"
"strings"
"github.com/fatih/camelcase"
pb "github.com/semi-technologies/contextionary/contextionary"
contextionary "github.com/semi-technologies/contextionary/contextionary/core"
"github.com/semi-technologies/contextionary/errors"
)
// SearchResult is a single search result. See wrapping Search Results for the Type
type SearchResult struct {
Name string
Certainty float32
}
// SearchResults is grouping of SearchResults for a SchemaSearch
type SearchResults struct {
Type SearchType
Results []SearchResult
}
// Len of the result set
func (r SearchResults) Len() int {
return len(r.Results)
}
// SchemaSearch can be used to search for related classes and properties, see
// documentation of SearchParams for more details on how to use it and
// documentation on *pb.SchemaSearchResults for more details on how to use the return
// value
func (con *Contextionary) SchemaSearch(params *pb.SchemaSearchParams) (*pb.SchemaSearchResults, error) {
p := SearchParams{params}
if err := p.Validate(); err != nil {
return nil, errors.NewInvalidUserInputf("invalid search params: %s", err)
}
centroid, err := con.centroidFromNameAndKeywords(p)
if err != nil {
return nil, errors.NewInvalidUserInputf("could not build centroid from name and keywords: %s", err)
}
rawResults, err := con.knnSearch(*centroid)
if err != nil {
return nil, errors.NewInternalf("could not perform knn search: %s", err)
}
if p.SearchType == pb.SearchType_CLASS {
return con.handleClassSearch(p, rawResults)
}
// since we have passed validation we know that anything that's not a class
// search must be a property search
return con.handlePropertySearch(p, rawResults)
}
func (con *Contextionary) centroidFromNameAndKeywords(p SearchParams) (*contextionary.Vector, error) {
nameVector, err := con.camelCaseWordToVector(p.Name)
if err != nil {
return nil, fmt.Errorf("invalid name in search: %s", err)
}
if len(p.Keywords) == 0 {
return nameVector, nil
}
vectors := make([]contextionary.Vector, len(p.Keywords)+1, len(p.Keywords)+1)
weights := make([]float32, len(p.Keywords)+1, len(p.Keywords)+1)
// set last vector to className which always has weight=1
vectors[len(vectors)-1] = *nameVector
weights[len(vectors)-1] = 1
for i, keyword := range p.Keywords {
kwVector, err := con.wordToVector(keyword.Keyword)
if err != nil {
return nil, fmt.Errorf("invalid keyword in search: %s", err)
}
vectors[i] = *kwVector
weights[i] = keyword.Weight
}
return contextionary.ComputeWeightedCentroid(vectors, weights)
}
func (con *Contextionary) camelCaseWordToVector(w string) (*contextionary.Vector, error) {
parts := camelcase.Split(w)
if len(parts) == 1 {
// no camelcasing, no need to build a centroid
return con.wordToVector(w)
}
vectors := make([]contextionary.Vector, len(parts), len(parts))
weights := make([]float32, len(parts), len(parts))
for i, part := range parts {
v, err := con.wordToVector(part)
if err != nil {
return nil, fmt.Errorf("invalid camelCased compound word: %s", err)
}
vectors[i] = *v
weights[i] = 1 // on camel-casing all parts are weighted equally
}
return contextionary.ComputeWeightedCentroid(vectors, weights)
}
func (con *Contextionary) wordToVector(w string) (*contextionary.Vector, error) {
w = strings.ToLower(w)
itemIndex := con.WordToItemIndex(w)
if ok := itemIndex.IsPresent(); !ok {
return nil, fmt.Errorf(
"the word '%s' is not present in the contextionary and therefore not a valid search term", w)
}
vector, err := con.GetVectorForItemIndex(itemIndex)
if err != nil {
return nil, fmt.Errorf("could not get vector for word '%s' with itemIndex '%d': %s",
w, itemIndex, err)
}
return vector, nil
}
func (con *Contextionary) handleClassSearch(p SearchParams, search rawResults) (*pb.SchemaSearchResults, error) {
return &pb.SchemaSearchResults{
Type: p.SearchType,
Results: search.extractClassNames(p),
}, nil
}
func (con *Contextionary) handlePropertySearch(p SearchParams, search rawResults) (*pb.SchemaSearchResults, error) {
return &pb.SchemaSearchResults{
Type: p.SearchType,
Results: search.extractPropertyNames(p),
}, nil
}
func (con *Contextionary) knnSearch(vector contextionary.Vector) (rawResults, error) {
list, distances, err := con.GetNnsByVector(vector, 10000, 3)
if err != nil {
return nil, fmt.Errorf("could not get nearest neighbors for vector '%v': %s", vector, err)
}
results := make(rawResults, len(list), len(list))
for i := range list {
word, err := con.ItemIndexToWord(list[i])
if err != nil {
return results, fmt.Errorf("got a result from kNN search, but don't have a word for this index: %s", err)
}
results[i] = rawResult{
name: word,
distance: distances[i],
}
}
return results, nil
}
// rawResult is a helper struct to contain the results of the kNN-search. It
// does not yet contain the desired output. This means the names can be both
// classes/properties and arbitrary words. Furthermore the certainty has not
// yet been normalized , so it is merely the raw kNN distance
type rawResult struct {
name string
distance float32
}
type rawResults []rawResult
func (r rawResults) extractClassNames(p SearchParams) []*pb.SchemaSearchResult {
var results []*pb.SchemaSearchResult
regex := regexp.MustCompile(fmt.Sprintf("^\\$%s\\[([A-Za-z]+)\\]$", "OBJECT"))
for _, rawRes := range r {
if regex.MatchString(rawRes.name) {
certainty := distanceToCertainty(rawRes.distance)
if certainty < p.Certainty {
continue
}
results = append(results, &pb.SchemaSearchResult{
Name: regex.FindStringSubmatch(rawRes.name)[1], //safe because we ran .MatchString before
Certainty: certainty,
})
}
}
return results
}
func (r rawResults) extractPropertyNames(p SearchParams) []*pb.SchemaSearchResult {
var results []*pb.SchemaSearchResult
regex := regexp.MustCompile("^\\$[A-Za-z]+\\[[A-Za-z]+\\]\\[([A-Za-z]+)\\]$")
propsMap := map[string][]*pb.SchemaSearchResult{}
for _, rawRes := range r {
if regex.MatchString(rawRes.name) {
name := regex.FindStringSubmatch(rawRes.name)[1] //safe because we ran .MatchString before
certainty := distanceToCertainty(rawRes.distance)
if certainty < p.Certainty {
continue
}
res := &pb.SchemaSearchResult{
Name: name,
Certainty: certainty,
}
if _, ok := propsMap[name]; !ok {
propsMap[name] = []*pb.SchemaSearchResult{res}
} else {
propsMap[name] = append(propsMap[name], res)
}
}
}
// now calculate mean of duplicate results
for _, resultsPerName := range propsMap {
results = append(results, &pb.SchemaSearchResult{
Name: resultsPerName[0].Name,
Certainty: meanCertainty(resultsPerName),
})
}
return results
}
func meanCertainty(rs []*pb.SchemaSearchResult) float32 {
var compound float32
for _, r := range rs {
compound += r.Certainty
}
return compound / float32(len(rs))
}
func distanceToCertainty(d float32) float32 {
return 1 - d/12
} | contextionary/schema/schema_search.go | 0.680879 | 0.402451 | schema_search.go | starcoder |
package cbor
import (
"errors"
"io"
"reflect"
)
// Decoder reads and decodes CBOR values from an input stream.
type Decoder struct {
r io.Reader
buf []byte
d decodeState
off int // start of unread data in buf
bytesRead int
}
// NewDecoder returns a new decoder that reads from r.
func NewDecoder(r io.Reader) *Decoder {
return &Decoder{r: r}
}
// Decode reads the next CBOR-encoded value from its input and stores it in
// the value pointed to by v.
func (dec *Decoder) Decode(v interface{}) (err error) {
if len(dec.buf) == dec.off {
if n, err := dec.read(); n == 0 {
return err
}
}
dec.d.reset(dec.buf[dec.off:])
err = dec.d.value(v)
dec.off += dec.d.off
dec.bytesRead += dec.d.off
if err != nil {
if err == io.ErrUnexpectedEOF {
// Need to read more data.
if n, err := dec.read(); n == 0 {
return err
}
return dec.Decode(v)
}
return err
}
return nil
}
// NumBytesRead returns the number of bytes read.
func (dec *Decoder) NumBytesRead() int {
return dec.bytesRead
}
func (dec *Decoder) read() (int, error) {
// Copy unread data over read data and reset off to 0.
if dec.off > 0 {
n := copy(dec.buf, dec.buf[dec.off:])
dec.buf = dec.buf[:n]
dec.off = 0
}
// Grow buf if needed.
const minRead = 512
if cap(dec.buf)-len(dec.buf) < minRead {
newBuf := make([]byte, len(dec.buf), 2*cap(dec.buf)+minRead)
copy(newBuf, dec.buf)
dec.buf = newBuf
}
// Read from reader and reslice buf.
n, err := dec.r.Read(dec.buf[len(dec.buf):cap(dec.buf)])
dec.buf = dec.buf[0 : len(dec.buf)+n]
return n, err
}
// Encoder writes CBOR values to an output stream.
type Encoder struct {
w io.Writer
opts EncOptions
e encodeState
indefTypes []cborType
}
// NewEncoder returns a new encoder that writes to w.
func NewEncoder(w io.Writer, encOpts EncOptions) *Encoder {
return &Encoder{w: w, opts: encOpts, e: encodeState{}}
}
// Encode writes the CBOR encoding of v to the stream.
func (enc *Encoder) Encode(v interface{}) error {
if len(enc.indefTypes) > 0 && v != nil {
indefType := enc.indefTypes[len(enc.indefTypes)-1]
if indefType == cborTypeTextString {
k := reflect.TypeOf(v).Kind()
if k != reflect.String {
return errors.New("cbor: cannot encode item type " + k.String() + " for indefinite-length text string")
}
} else if indefType == cborTypeByteString {
t := reflect.TypeOf(v)
k := t.Kind()
if (k != reflect.Array && k != reflect.Slice) || t.Elem().Kind() != reflect.Uint8 {
return errors.New("cbor: cannot encode item type " + k.String() + " for indefinite-length byte string")
}
}
}
err := enc.e.marshal(v, enc.opts)
if err == nil {
_, err = enc.e.WriteTo(enc.w)
}
return err
}
// StartIndefiniteByteString starts byte string encoding of indefinite length.
// Subsequent calls of (*Encoder).Encode() encodes definite length byte strings
// ("chunks") as one continguous string until EndIndefinite is called.
func (enc *Encoder) StartIndefiniteByteString() error {
return enc.startIndefinite(cborTypeByteString)
}
// StartIndefiniteTextString starts text string encoding of indefinite length.
// Subsequent calls of (*Encoder).Encode() encodes definite length text strings
// ("chunks") as one continguous string until EndIndefinite is called.
func (enc *Encoder) StartIndefiniteTextString() error {
return enc.startIndefinite(cborTypeTextString)
}
// StartIndefiniteArray starts array encoding of indefinite length.
// Subsequent calls of (*Encoder).Encode() encodes elements of the array
// until EndIndefinite is called.
func (enc *Encoder) StartIndefiniteArray() error {
return enc.startIndefinite(cborTypeArray)
}
// StartIndefiniteMap starts array encoding of indefinite length.
// Subsequent calls of (*Encoder).Encode() encodes elements of the map
// until EndIndefinite is called.
func (enc *Encoder) StartIndefiniteMap() error {
return enc.startIndefinite(cborTypeMap)
}
// EndIndefinite closes last opened indefinite length value.
func (enc *Encoder) EndIndefinite() error {
if len(enc.indefTypes) == 0 {
return errors.New("cbor: cannot encode \"break\" code outside indefinite length values")
}
_, err := enc.w.Write([]byte{0xff})
if err == nil {
enc.indefTypes = enc.indefTypes[:len(enc.indefTypes)-1]
}
return err
}
var cborIndefHeader = map[cborType][]byte{
cborTypeByteString: {0x5f},
cborTypeTextString: {0x7f},
cborTypeArray: {0x9f},
cborTypeMap: {0xbf},
}
func (enc *Encoder) startIndefinite(typ cborType) error {
_, err := enc.w.Write(cborIndefHeader[typ])
if err == nil {
enc.indefTypes = append(enc.indefTypes, typ)
}
return err
}
// RawMessage is a raw encoded CBOR value. It implements Marshaler and
// Unmarshaler interfaces and can be used to delay CBOR decoding or
// precompute a CBOR encoding.
type RawMessage []byte
// MarshalCBOR returns m as the CBOR encoding of m.
func (m RawMessage) MarshalCBOR() ([]byte, error) {
if len(m) == 0 {
return cborNil, nil
}
return m, nil
}
// UnmarshalCBOR sets *m to a copy of data.
func (m *RawMessage) UnmarshalCBOR(data []byte) error {
if m == nil {
return errors.New("cbor.RawMessage: UnmarshalCBOR on nil pointer")
}
*m = append((*m)[0:0], data...)
return nil
} | vendor/github.com/fxamacker/cbor/stream.go | 0.666931 | 0.436382 | stream.go | starcoder |
package gflowparser
import (
"github.com/flowdev/gflowparser/data"
"github.com/flowdev/gflowparser/data2svg"
"github.com/flowdev/gflowparser/parser"
"github.com/flowdev/gflowparser/svg"
"github.com/flowdev/gparselib"
)
// ConvertFlowDSLToSVG transforms a flow given as DSL string into a SVG image
// plus component (subflow) types, data types, (currently empty) feedback
// string and potential error(s).
func ConvertFlowDSLToSVG(flowContent, flowName string,
) (
svgData []byte,
compTypes []data.Type,
dataTypes []data.Type,
feedback string,
err error,
) {
pd := gparselib.NewParseData(flowName, flowContent)
pFlow, err := parser.NewFlowParser()
if err != nil {
return nil, nil, nil, "", err
}
pd, _ = pFlow.ParseFlow(pd, nil)
fb, err := parser.CheckFeedback(pd.Result)
if err != nil {
return nil, nil, nil, "", err
}
flow := pd.Result.Value.(data.Flow)
sf, err := data2svg.Convert(flow, pd.Source)
if err != nil {
return nil, nil, nil, "", err
}
compTypes, dataTypes = extractTypes(flow)
//fmt.Fprintf(os.Stderr, "DEBUG: svgFlow=`%s`\n", spew.Sdump(sf))
buf, err := svg.FromFlowData(sf)
if err != nil {
return nil, nil, nil, "", err
}
return buf, compTypes, dataTypes, fb, nil
}
func extractTypes(flow data.Flow) (compTypes []data.Type, dataTypes []data.Type) {
dataMap := make(map[string]data.Type)
compMap := make(map[string]data.Type)
compNames := make(map[string]bool)
for _, partLine := range flow.Parts {
for _, part := range partLine {
switch p := part.(type) {
case data.Arrow:
dataMap = addTypes(dataMap, p.Data)
case data.Component:
// check component, plugins, ...
if !p.Decl.VagueType || !compNames[p.Decl.Name] {
compMap = addType(compMap, p.Decl.Type)
compNames[p.Decl.Name] = true
}
for _, plugin := range p.Plugins {
compMap = addPluginTypes(compMap, compNames, plugin.Types)
}
}
}
}
return valuesOf(compMap), valuesOf(dataMap)
}
func valuesOf(typeMap map[string]data.Type) []data.Type {
types := make([]data.Type, 0, len(typeMap))
for _, t := range typeMap {
types = append(types, t)
}
return types
}
func addPluginTypes(compMap map[string]data.Type, compNames map[string]bool, types []data.Type,
) map[string]data.Type {
for _, t := range types {
if t.Package != "" || !compNames[t.LocalType] {
compMap = addType(compMap, t)
}
}
return compMap
}
func addTypes(typeMap map[string]data.Type, types []data.Type) map[string]data.Type {
for _, t := range types {
typeMap = addType(typeMap, t)
}
return typeMap
}
func addType(typeMap map[string]data.Type, typ data.Type) map[string]data.Type {
if typ.ListType != nil {
return addType(typeMap, *typ.ListType)
}
if typ.MapKeyType != nil {
typeMap = addType(typeMap, *typ.MapKeyType)
return addType(typeMap, *typ.MapValueType)
}
typeMap[typToString(typ)] = typ
return typeMap
}
func typToString(t data.Type) string {
return t.Package + "." + t.LocalType
} | converter.go | 0.617859 | 0.474205 | converter.go | starcoder |
package decision_tree
import (
"sort"
"math"
)
// zips two values and allows sorting based on value
type zipColumn struct {
Value float64
Response float64
}
// container of zipColumns that implements sorting interface
type zipColumnSortable []zipColumn
func (a zipColumnSortable) Len() int { return len(a) }
func (a zipColumnSortable) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
func (a zipColumnSortable) Less(i, j int) bool { return a[i].Value < a[j].Value }
/*
Split dataset on column according to GINI criteria.
GINI calculated as:
P(t) = probability of a sample belonging to subtree
P(1|t) = probability of pos sample in subtree
G(t) = 1 - P(1|t)^2 - P(0|t)^2
GINI = P(t_l)*G(t_l) + P(t_r)*G(t_r)
where t_l and t_r are left and right subtrees after some split
Arguments
--------
X: training dataset
y: training responses
index: column index to split by
Returns
-------
(minGini, split)
where
minGini: minimum Gini impurity value
split: value at which we split this column
*/
func splitGINI(X [][]float64, y []float64, index int) (float64, float64) {
N := len(X)
totalPositive := 0.0
// zip the column values and corresponding responses
col := make([]zipColumn, N)
for i := range X {
col[i] = zipColumn{X[i][index], y[i]}
totalPositive += y[i]
}
// sort the column by value
sort.Sort(zipColumnSortable(col))
nPositiveL := 0.0
minGini := 1.1
split := col[0].Value
// investigate every possible split point, track minimum GINI
for i := 1; i < N; i++ {
// we already split on this value! continue
if col[i].Value == col[i-1].Value {
nPositiveL += col[i-1].Response
continue
}
// calculate P(1|t_l) and P(0|t_l)
nPositiveL += col[i-1].Response
pPositiveL := nPositiveL / float64(i)
pNegativeL := 1 - pPositiveL
// infer P(1|t_r) and P(0|t_r) from left tree
nPositiveR := totalPositive - nPositiveL
pPositiveR := nPositiveR / float64(N - i)
pNegativeR := 1 - pPositiveR
// calculate G(t_l) and G(t_r)
giniL := 1 - math.Pow(pPositiveL, 2) - math.Pow(pNegativeL, 2)
giniR := 1 - math.Pow(pPositiveR, 2) - math.Pow(pNegativeR, 2)
// calculate P(t_l) and P(t_r)
pL := float64(i) / float64(N)
pR := 1 - pL
// put everything together to calculate GINI for this split point
gini := pL * giniL + pR * giniR
// record the best split so far
if gini < minGini {
minGini = gini
split = col[i].Value
}
}
return minGini, split
} | decision_tree/gini.go | 0.716318 | 0.634699 | gini.go | starcoder |
package grafo
import (
"bytes"
"fmt"
"github.com/jrecuero/go-cli/tools"
)
// Grafo represents ...
type Grafo struct {
id Ider
Label string
root IVertex
anchor IVertex
path *Path
vertices map[Ider]IVertex
}
// GetRoot is ...
func (grafo *Grafo) GetRoot() IVertex {
return grafo.root
}
// GetAnchor is ...
func (grafo *Grafo) GetAnchor() IVertex {
return grafo.anchor
}
// GetVertices is ...
func (grafo *Grafo) GetVertices() map[Ider]IVertex {
return grafo.vertices
}
// AddEdge adds the given edge to the given parent. If parent is nil, use
// the grafo root vertex. Parent attribute in the Child vertex is set properly.
func (grafo *Grafo) AddEdge(parent IVertex, edge IEdge) error {
if parent == nil {
parent = grafo.GetRoot()
edge.SetParent(parent)
}
if !parent.GetHooked() {
return tools.ERROR(nil, false, "Parent not found in grafo: %#v\n", parent)
}
if err := parent.AddEdge(edge); err != nil {
return err
}
child := edge.GetChild()
if err := child.AddParent(parent); err != nil {
return err
}
child.SetHooked(true)
grafo.vertices[child.GetID()] = child
return nil
}
// AddVertex adds an static edge fromt the given parent to the given child.
func (grafo *Grafo) AddVertex(parent IVertex, child IVertex) error {
if parent == nil {
parent = grafo.GetRoot()
}
var edge IEdge = StaticEdge(parent, child)
return grafo.AddEdge(parent, edge)
}
// ExistPathTo checks if there is edge from the given anchor to the given
// child. If not anchor vertex is provided, the grafo anchor is used instead of.
func (grafo *Grafo) ExistPathTo(anchor IVertex, dest IVertex) (IEdge, bool) {
if anchor == nil {
anchor = grafo.anchor
}
for _, edge := range anchor.GetEdges() {
if edge.GetChild() == dest {
return edge, true
}
}
return nil, false
}
// IsPathTo check if there is a edge from the given anchor to the given child
// ana if the path is possible. If not anchor is vertex is provided, the grafo
// anchor is used instead of.
func (grafo *Grafo) IsPathTo(anchor IVertex, dest IVertex, params ...interface{}) (IEdge, bool) {
if anchor == nil {
anchor = grafo.anchor
}
if edge, ok := grafo.ExistPathTo(anchor, dest); ok {
if _, bok := edge.Check(params...); bok {
return edge, true
}
}
return nil, false
}
// PathsFrom returns all existance and possible Edges from the given anchor.
func (grafo *Grafo) PathsFrom(anchor IVertex, params ...interface{}) []IVertex {
var children []IVertex
if anchor == nil {
anchor = grafo.anchor
}
for _, edge := range anchor.GetEdges() {
if _, ok := edge.Check(params...); ok {
children = append(children, edge.GetChild())
}
}
return children
}
// setAnchor is ..
func (grafo *Grafo) setAnchor(anchor IVertex) IVertex {
grafo.anchor = anchor
return grafo.GetAnchor()
}
// AddVtoV adds a edge to the grafo traverse.
func (grafo *Grafo) AddVtoV(edge IEdge) error {
if edge.GetParent() == nil {
edge.SetParent(grafo.GetRoot())
}
if grafo.GetAnchor() != edge.GetParent() {
return tools.ERROR(nil, false, "parent is not the anchor: %#v\n", edge.GetParent())
}
grafo.setAnchor(edge.GetChild())
grafo.path.Edges = append(grafo.path.Edges, edge)
return nil
}
// SetAnchorTo moves the anchor to the destination vertex and adds the edge to
// the grafo traverse.
func (grafo *Grafo) SetAnchorTo(dest IVertex) IVertex {
for _, edge := range grafo.anchor.GetEdges() {
if edge.GetChild() == dest {
if err := grafo.AddVtoV(edge); err != nil {
return nil
}
return grafo.GetAnchor()
}
}
return nil
}
// ToMermaid is ...
func (grafo *Grafo) ToMermaid() string {
var buffer bytes.Buffer
buffer.WriteString("graph LR\n")
for _, vertex := range grafo.GetVertices() {
for _, edge := range vertex.GetEdges() {
buffer.WriteString(edge.ToMermaid())
}
}
return buffer.String()
}
// NewGrafo is ...
func NewGrafo(label string) *Grafo {
root := NewVertex("root/0")
root.SetHooked(true)
grafo := &Grafo{
id: nextIder(),
Label: label,
root: root,
path: NewPath(fmt.Sprintf("%s/path", label)),
vertices: make(map[Ider]IVertex),
}
grafo.anchor = grafo.GetRoot()
return grafo
} | grafo/grafo.go | 0.719975 | 0.414484 | grafo.go | starcoder |
package inverted
// DeltaMerger can be used to condense the number of single writes into one big
// one. Additionally it removes overlaps between additions and deletions. It is
// meant to be used in batch situation, where 5 ref objects in a row might each
// increase the doc count by one. Instead of writing 5 additions and 4
// deletions, this can be condensed to write just one addition
type DeltaMerger struct {
additions propsByName
deletions propsByName
}
func NewDeltaMerger() *DeltaMerger {
return &DeltaMerger{
additions: propsByName{},
deletions: propsByName{},
}
}
func (dm *DeltaMerger) AddAdditions(props []Property, docID uint64) {
for _, prop := range props {
storedProp := dm.additions.getOrCreate(prop.Name)
storedProp.hasFrequency = prop.HasFrequency
for _, item := range prop.Items {
storedItem := storedProp.getOrCreateItem(item.Data)
storedItem.addDocIDAndFrequency(docID, item.TermFrequency)
}
}
}
func (dm *DeltaMerger) AddDeletions(props []Property, docID uint64) {
for _, prop := range props {
additionProp := dm.additions.getOrCreate(prop.Name)
for _, item := range prop.Items {
additionItem := additionProp.getOrCreateItem(item.Data)
ok := additionItem.deleteIfPresent(docID)
if ok {
// we are done with this prop, no need to register an explicit deletion
continue
}
// this was not added by us, we need to remove it
deletionItem := dm.deletions.getOrCreate(prop.Name).getOrCreateItem(item.Data)
deletionItem.addDocIDAndFrequency(docID, 0) // frequency does not matter on deletion
}
}
}
func (dm *DeltaMerger) Merge() DeltaMergeResult {
return DeltaMergeResult{
Additions: dm.additions.merge(),
Deletions: dm.deletions.merge(),
}
}
type DeltaMergeResult struct {
Additions []MergeProperty
Deletions []MergeProperty
}
type MergeProperty struct {
Name string
HasFrequency bool
MergeItems []MergeItem
}
type MergeItem struct {
Data []byte
DocIDs []MergeDocIDWithFrequency
}
// IDs is meant for cases such as deletion, where the frequency is irrelevant,
// but the expected format is a []docID
func (mi MergeItem) IDs() []uint64 {
out := make([]uint64, len(mi.DocIDs))
for i, tuple := range mi.DocIDs {
out[i] = tuple.DocID
}
return out
}
// Countable converts the merge item to a regular (non-merge) Countable. Note
// that this loses the IDs and Frequency information, so IDs have to be passed
// separately using .IDs()
func (mi MergeItem) Countable() Countable {
return Countable{
Data: mi.Data,
}
}
type MergeDocIDWithFrequency struct {
DocID uint64
Frequency float32
}
type propsByName map[string]*propWithDocIDs
func (pbn propsByName) getOrCreate(name string) *propWithDocIDs {
prop, ok := pbn[name]
if ok {
return prop
}
prop = &propWithDocIDs{name: name, items: map[string]*countableWithDocIDs{}}
pbn[name] = prop
return prop
}
func (pbn propsByName) merge() []MergeProperty {
out := make([]MergeProperty, len(pbn))
i := 0
for _, prop := range pbn {
mergedProp := prop.merge()
if mergedProp == nil {
continue
}
out[i] = *mergedProp
i++
}
if i == 0 {
return nil
}
return out[:i]
}
type propWithDocIDs struct {
name string
items map[string]*countableWithDocIDs
hasFrequency bool
}
func (pwd *propWithDocIDs) getOrCreateItem(data []byte) *countableWithDocIDs {
name := string(data)
item, ok := pwd.items[name]
if ok {
return item
}
item = &countableWithDocIDs{
value: data,
docIDs: map[uint64]float32{},
}
pwd.items[name] = item
return item
}
func (pwd *propWithDocIDs) merge() *MergeProperty {
items := make([]MergeItem, len(pwd.items))
i := 0
for _, item := range pwd.items {
mergedItem := item.merge()
if mergedItem == nil {
continue
}
items[i] = *mergedItem
i++
}
if i == 0 {
return nil
}
return &MergeProperty{
Name: pwd.name,
HasFrequency: pwd.hasFrequency,
MergeItems: items[:i],
}
}
type countableWithDocIDs struct {
value []byte
docIDs map[uint64]float32 // map[docid]frequency
}
func (cwd *countableWithDocIDs) addDocIDAndFrequency(docID uint64, freq float32) {
cwd.docIDs[docID] = freq
}
func (cwd *countableWithDocIDs) deleteIfPresent(docID uint64) bool {
_, ok := cwd.docIDs[docID]
if !ok {
return false
}
delete(cwd.docIDs, docID)
return true
}
func (cwd *countableWithDocIDs) merge() *MergeItem {
if len(cwd.docIDs) == 0 {
return nil
}
ids := make([]MergeDocIDWithFrequency, len(cwd.docIDs))
i := 0
for docID, freq := range cwd.docIDs {
ids[i] = MergeDocIDWithFrequency{DocID: docID, Frequency: freq}
i++
}
return &MergeItem{
Data: cwd.value,
DocIDs: ids,
}
} | adapters/repos/db/inverted/delta_merger.go | 0.689515 | 0.4881 | delta_merger.go | starcoder |
package ent
import (
"AppFactory/internal/data/ent/student"
"fmt"
"strings"
"entgo.io/ent/dialect/sql"
)
// Student is the model entity for the Student schema.
type Student struct {
config `json:"-"`
// ID of the ent.
ID int64 `json:"id,omitempty"`
// ExamNum holds the value of the "exam_num" field.
ExamNum string `json:"exam_num,omitempty"`
// StudentName holds the value of the "student_name" field.
StudentName string `json:"student_name,omitempty"`
// ClassName holds the value of the "class_name" field.
ClassName string `json:"class_name,omitempty"`
// ChineseScore holds the value of the "chinese_score" field.
ChineseScore string `json:"chinese_score,omitempty"`
// MathScore holds the value of the "math_score" field.
MathScore string `json:"math_score,omitempty"`
// EnglishScore holds the value of the "english_score" field.
EnglishScore string `json:"english_score,omitempty"`
// TotalScore holds the value of the "total_score" field.
TotalScore string `json:"total_score,omitempty"`
// ClassRate holds the value of the "class_rate" field.
ClassRate string `json:"class_rate,omitempty"`
// SchoolRate holds the value of the "school_rate" field.
SchoolRate string `json:"school_rate,omitempty"`
// StepRank holds the value of the "step_rank" field.
StepRank string `json:"step_rank,omitempty"`
// UploadDate holds the value of the "upload_date" field.
UploadDate string `json:"upload_date,omitempty"`
// IsDeleted holds the value of the "is_deleted" field.
IsDeleted string `json:"is_deleted,omitempty"`
// DeleteTime holds the value of the "delete_time" field.
DeleteTime string `json:"delete_time,omitempty"`
}
// scanValues returns the types for scanning values from sql.Rows.
func (*Student) scanValues(columns []string) ([]interface{}, error) {
values := make([]interface{}, len(columns))
for i := range columns {
switch columns[i] {
case student.FieldID:
values[i] = &sql.NullInt64{}
case student.FieldExamNum, student.FieldStudentName, student.FieldClassName, student.FieldChineseScore, student.FieldMathScore, student.FieldEnglishScore, student.FieldTotalScore, student.FieldClassRate, student.FieldSchoolRate, student.FieldStepRank, student.FieldUploadDate, student.FieldIsDeleted, student.FieldDeleteTime:
values[i] = &sql.NullString{}
default:
return nil, fmt.Errorf("unexpected column %q for type Student", columns[i])
}
}
return values, nil
}
// assignValues assigns the values that were returned from sql.Rows (after scanning)
// to the Student fields.
func (s *Student) assignValues(columns []string, values []interface{}) error {
if m, n := len(values), len(columns); m < n {
return fmt.Errorf("mismatch number of scan values: %d != %d", m, n)
}
for i := range columns {
switch columns[i] {
case student.FieldID:
value, ok := values[i].(*sql.NullInt64)
if !ok {
return fmt.Errorf("unexpected type %T for field id", value)
}
s.ID = int64(value.Int64)
case student.FieldExamNum:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field exam_num", values[i])
} else if value.Valid {
s.ExamNum = value.String
}
case student.FieldStudentName:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field student_name", values[i])
} else if value.Valid {
s.StudentName = value.String
}
case student.FieldClassName:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field class_name", values[i])
} else if value.Valid {
s.ClassName = value.String
}
case student.FieldChineseScore:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field chinese_score", values[i])
} else if value.Valid {
s.ChineseScore = value.String
}
case student.FieldMathScore:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field math_score", values[i])
} else if value.Valid {
s.MathScore = value.String
}
case student.FieldEnglishScore:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field english_score", values[i])
} else if value.Valid {
s.EnglishScore = value.String
}
case student.FieldTotalScore:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field total_score", values[i])
} else if value.Valid {
s.TotalScore = value.String
}
case student.FieldClassRate:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field class_rate", values[i])
} else if value.Valid {
s.ClassRate = value.String
}
case student.FieldSchoolRate:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field school_rate", values[i])
} else if value.Valid {
s.SchoolRate = value.String
}
case student.FieldStepRank:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field step_rank", values[i])
} else if value.Valid {
s.StepRank = value.String
}
case student.FieldUploadDate:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field upload_date", values[i])
} else if value.Valid {
s.UploadDate = value.String
}
case student.FieldIsDeleted:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field is_deleted", values[i])
} else if value.Valid {
s.IsDeleted = value.String
}
case student.FieldDeleteTime:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field delete_time", values[i])
} else if value.Valid {
s.DeleteTime = value.String
}
}
}
return nil
}
// Update returns a builder for updating this Student.
// Note that you need to call Student.Unwrap() before calling this method if this Student
// was returned from a transaction, and the transaction was committed or rolled back.
func (s *Student) Update() *StudentUpdateOne {
return (&StudentClient{config: s.config}).UpdateOne(s)
}
// Unwrap unwraps the Student entity that was returned from a transaction after it was closed,
// so that all future queries will be executed through the driver which created the transaction.
func (s *Student) Unwrap() *Student {
tx, ok := s.config.driver.(*txDriver)
if !ok {
panic("ent: Student is not a transactional entity")
}
s.config.driver = tx.drv
return s
}
// String implements the fmt.Stringer.
func (s *Student) String() string {
var builder strings.Builder
builder.WriteString("Student(")
builder.WriteString(fmt.Sprintf("id=%v", s.ID))
builder.WriteString(", exam_num=")
builder.WriteString(s.ExamNum)
builder.WriteString(", student_name=")
builder.WriteString(s.StudentName)
builder.WriteString(", class_name=")
builder.WriteString(s.ClassName)
builder.WriteString(", chinese_score=")
builder.WriteString(s.ChineseScore)
builder.WriteString(", math_score=")
builder.WriteString(s.MathScore)
builder.WriteString(", english_score=")
builder.WriteString(s.EnglishScore)
builder.WriteString(", total_score=")
builder.WriteString(s.TotalScore)
builder.WriteString(", class_rate=")
builder.WriteString(s.ClassRate)
builder.WriteString(", school_rate=")
builder.WriteString(s.SchoolRate)
builder.WriteString(", step_rank=")
builder.WriteString(s.StepRank)
builder.WriteString(", upload_date=")
builder.WriteString(s.UploadDate)
builder.WriteString(", is_deleted=")
builder.WriteString(s.IsDeleted)
builder.WriteString(", delete_time=")
builder.WriteString(s.DeleteTime)
builder.WriteByte(')')
return builder.String()
}
// Students is a parsable slice of Student.
type Students []*Student
func (s Students) config(cfg config) {
for _i := range s {
s[_i].config = cfg
}
} | internal/data/ent/student.go | 0.617051 | 0.444324 | student.go | starcoder |
// action package contains interfaces and behaviour for general-purpose "management actions" that change a model's
// decision variables based on their activation status.
package action
import (
"github.com/LindsayBradford/crem/internal/pkg/model/planningunit"
)
// ManagementActionType identifies a set of management actions that make the same kind of change to relevant
// model decision variables.
type ManagementActionType string
func (t ManagementActionType) String() string {
return string(t)
}
// ModelVariableName identifies a particular model variableOld value that a management action modifies when its
// activation status changes.
type ModelVariableName string
// ManagementAction defines a general interface for the implementation of management actions.
type ManagementAction interface {
// PlanningUnit returns the identifier of the planning unit in which management action is spatially located.
PlanningUnit() planningunit.Id
// Type identifies the ManagementActionType of a particular management action
Type() ManagementActionType
// IsActive reports whether a management action is active (true) or not (false).
IsActive() bool
// ModelVariableName reports tha value of the model variableName stored with the management action.
ModelVariableValue(variableName ModelVariableName) float64
// Subscribe allows a number of implementations of Reporting to subscribe fur updates (changes in activation state).
Subscribe(observers ...Observer)
// InitialisingActivation activates an inactive management action, triggering Reporting method
// ObserveActionInitialising callbacks.
InitialisingActivation()
InitialisingDeactivation()
// ToggleActivation activates an inactive ManagementAction and vice-versa,
// triggering Reporting method ObserveAction callbacks.
ToggleActivation()
// ToggleActivationUnobserved activates an inactive ManagementAction and vice-versa, without triggering any
// Reporting method callbacks. Expected to be called when undoing a change that observers shouldn't react to.
ToggleActivationUnobserved()
// SetActivation activates an the ManagementAction as per the value supplied,
SetActivation(value bool)
// SetActivation activates an the ManagementAction as per the value supplied, without triggering any
// Reporting method callbacks. Expected to be called when undoing a change that observers shouldn't react to.
SetActivationUnobserved(value bool)
} | internal/pkg/model/action/ManagementAction.go | 0.782413 | 0.459743 | ManagementAction.go | starcoder |
package writer
import (
"fmt"
"github.com/jedib0t/go-pretty/v6/table"
"github.com/jedib0t/go-pretty/v6/text"
"github.com/jedib0t/go-sudoku/sudoku"
)
// RenderSamurai renders a Samurai Sudoku which is a combination of 5 Sudokus.
func RenderSamurai(grids []*sudoku.Grid) (string, error) {
if len(grids) != 5 {
return " ", fmt.Errorf("found %d interconnected grids instead of 5", len(grids))
}
tw := table.NewWriter()
tw.AppendRow(table.Row{
renderSubGrid(grids[1], sudoku.Location{X: 0, Y: 0}, colorsDark),
renderSubGrid(grids[1], sudoku.Location{X: 0, Y: 3}, colorsBright),
renderSubGrid(grids[1], sudoku.Location{X: 0, Y: 6}, colorsDark),
" ",
renderSubGrid(grids[2], sudoku.Location{X: 0, Y: 0}, colorsDark),
renderSubGrid(grids[2], sudoku.Location{X: 0, Y: 3}, colorsBright),
renderSubGrid(grids[2], sudoku.Location{X: 0, Y: 6}, colorsDark),
})
tw.AppendRow(table.Row{
renderSubGrid(grids[1], sudoku.Location{X: 3, Y: 0}, colorsBright),
renderSubGrid(grids[1], sudoku.Location{X: 3, Y: 3}, colorsDark),
renderSubGrid(grids[1], sudoku.Location{X: 3, Y: 6}, colorsBright),
" ",
renderSubGrid(grids[2], sudoku.Location{X: 3, Y: 0}, colorsBright),
renderSubGrid(grids[2], sudoku.Location{X: 3, Y: 3}, colorsDark),
renderSubGrid(grids[2], sudoku.Location{X: 3, Y: 6}, colorsBright),
})
tw.AppendRow(table.Row{
renderSubGrid(grids[1], sudoku.Location{X: 6, Y: 0}, colorsDark),
renderSubGrid(grids[1], sudoku.Location{X: 6, Y: 3}, colorsBright),
renderSubGrid(grids[0], sudoku.Location{X: 0, Y: 0}, colorsDark),
renderSubGrid(grids[0], sudoku.Location{X: 0, Y: 3}, colorsBright),
renderSubGrid(grids[0], sudoku.Location{X: 0, Y: 6}, colorsDark),
renderSubGrid(grids[2], sudoku.Location{X: 6, Y: 3}, colorsBright),
renderSubGrid(grids[2], sudoku.Location{X: 6, Y: 6}, colorsDark),
})
tw.AppendRow(table.Row{
" ",
" ",
renderSubGrid(grids[0], sudoku.Location{X: 3, Y: 0}, colorsBright),
renderSubGrid(grids[0], sudoku.Location{X: 3, Y: 3}, colorsDark),
renderSubGrid(grids[0], sudoku.Location{X: 3, Y: 6}, colorsBright),
" ",
" ",
}, table.RowConfig{AutoMerge: true})
tw.AppendRow(table.Row{
renderSubGrid(grids[3], sudoku.Location{X: 0, Y: 0}, colorsDark),
renderSubGrid(grids[3], sudoku.Location{X: 0, Y: 3}, colorsBright),
renderSubGrid(grids[0], sudoku.Location{X: 6, Y: 0}, colorsDark),
renderSubGrid(grids[0], sudoku.Location{X: 6, Y: 3}, colorsBright),
renderSubGrid(grids[0], sudoku.Location{X: 6, Y: 6}, colorsDark),
renderSubGrid(grids[4], sudoku.Location{X: 0, Y: 3}, colorsBright),
renderSubGrid(grids[4], sudoku.Location{X: 0, Y: 6}, colorsDark),
})
tw.AppendRow(table.Row{
renderSubGrid(grids[3], sudoku.Location{X: 3, Y: 0}, colorsBright),
renderSubGrid(grids[3], sudoku.Location{X: 3, Y: 3}, colorsDark),
renderSubGrid(grids[3], sudoku.Location{X: 3, Y: 6}, colorsBright),
" ",
renderSubGrid(grids[4], sudoku.Location{X: 3, Y: 0}, colorsBright),
renderSubGrid(grids[4], sudoku.Location{X: 3, Y: 3}, colorsDark),
renderSubGrid(grids[4], sudoku.Location{X: 3, Y: 6}, colorsBright),
})
tw.AppendRow(table.Row{
renderSubGrid(grids[3], sudoku.Location{X: 6, Y: 0}, colorsDark),
renderSubGrid(grids[3], sudoku.Location{X: 6, Y: 3}, colorsBright),
renderSubGrid(grids[3], sudoku.Location{X: 6, Y: 6}, colorsDark),
" ",
renderSubGrid(grids[4], sudoku.Location{X: 6, Y: 0}, colorsDark),
renderSubGrid(grids[4], sudoku.Location{X: 6, Y: 3}, colorsBright),
renderSubGrid(grids[4], sudoku.Location{X: 6, Y: 6}, colorsDark),
})
tw.SetColumnConfigs([]table.ColumnConfig{
{Number: 4, AutoMerge: true},
})
tw.SetStyle(table.StyleLight)
tw.Style().Box.PaddingLeft = ""
tw.Style().Box.PaddingRight = ""
tw.Style().Options.DrawBorder = false
tw.Style().Options.SeparateColumns = themeSelected == themeNone
tw.Style().Options.SeparateRows = themeSelected == themeNone
return tw.Render(), nil
}
func renderSubGrid(grid *sudoku.Grid, sgLoc sudoku.Location, colors text.Colors) string {
tw := table.NewWriter()
sg := grid.SubGrid(sgLoc.X, sgLoc.Y)
var row table.Row
for idx, loc := range sg.Locations {
val := grid.Get(loc.X, loc.Y)
if val == 0 {
row = append(row, colors.Sprint(" "))
} else {
row = append(row, colors.Sprintf(" %d ", val))
}
if (idx+1)%3 == 0 {
tw.AppendRow(row)
row = table.Row{}
}
}
tw.SetStyle(table.StyleLight)
tw.Style().Box.PaddingLeft = ""
tw.Style().Box.PaddingRight = ""
tw.Style().Options.DrawBorder = false
tw.Style().Options.SeparateColumns = false
return tw.Render()
} | writer/samurai_sudoku.go | 0.575111 | 0.5083 | samurai_sudoku.go | starcoder |
package monotone_convex
/*
implementation based on Hagan, <NAME>., and <NAME>. "Methods for constructing a yield curve." Wilmott Magazine, May (2008): 70-81.
*/
import (
m "../../measures"
"log"
"math"
"sort"
)
type mcInput struct {
lambda float64
terms []m.Time
rates []m.Rate
}
func (inp *mcInput) N() int {
return len(inp.terms)
}
func (inp *mcInput) TermAt(i int) float64 {
if i <= 0 {
return 0
} else {
return float64(inp.terms[i-1])
}
}
func (inp *mcInput) RateAt(i int) float64 {
if i <= 0 {
return float64(inp.rates[0])
} else {
return float64(inp.rates[i-1])
}
}
type initialFI struct {
mcInput
fD []float64
interpolantAtNodeD []float64
f []float64
}
func SpotRateInterpolator(lambda float64) func(terms []m.Time, rates []m.Rate) m.SpotRate {
return func(terms []m.Time, rates []m.Rate) m.SpotRate {
if len(terms) != len(rates) {
log.Fatalf("must have corresponding length of terms and rates! %d != %d\n", len(terms), len(rates))
}
e := estimateInitialFI(mcInput{lambda, terms, rates})
return func(Term m.Time) m.Rate { return m.Rate(spotRate(float64(Term), e)) }
}
}
func ForwardRateInterpolator(lambda float64) func(terms []m.Time, rates []m.Rate) func(Term m.Time) m.Rate {
return func(terms []m.Time, rates []m.Rate) func(Term m.Time) m.Rate {
if len(terms) != len(rates) {
log.Fatalf("must have corresponding length of terms and rates! %d != %d\n", len(terms), len(rates))
}
e := estimateInitialFI(mcInput{lambda, terms, rates})
return func(Term m.Time) m.Rate { return m.Rate(forwardRate(float64(Term), e)) }
}
}
func spotRate(Term float64, e initialFI) float64 {
// 'numbering refers to Wilmott paper
if Term <= 0 {
return e.f[0]
}
if Term > e.TermAt(e.N()) {
return spotRate(e.TermAt(e.N()), e)*e.TermAt(e.N())/Term + forwardRate(e.TermAt(e.N()), e)*(1-e.TermAt(e.N())/Term)
}
i, x, g0, g1 := initialInterpolators(e, Term)
G := adjustedGIntegrated(x, g0, g1)
//'(12)
return 1 / Term * (e.TermAt(i)*e.interpolantAtNodeD[i] + (Term-e.TermAt(i))*e.fD[i+1] + (e.TermAt(i+1)-e.TermAt(i))*G)
}
func initialInterpolators(e initialFI, Term float64) (i int, x float64, g0 float64, g1 float64) {
i = e.lastTermIndexBefore(Term)
// 'the x in (25)
x = (Term - e.TermAt(i)) / (e.TermAt(i+1) - e.TermAt(i))
g0 = e.f[i] - e.fD[i+1]
g1 = e.f[i+1] - e.fD[i+1]
return
}
func adjustedGIntegrated(x float64, g0 float64, g1 float64) float64 {
if x == 0 || x == 1 {
return 0
} else if (g0 < 0 && -0.5*g0 <= g1 && g1 <= -2*g0) || (g0 > 0 && -0.5*g0 >= g1 && g1 >= -2*g0) {
// 'zone (i)
return g0*(x-2*math.Pow(x, 2)+math.Pow(x, 3)) + g1*(-math.Pow(x, 2)+math.Pow(x, 3))
} else if (g0 < 0 && g1 > -2*g0) || (g0 > 0 && g1 < -2*g0) {
//'zone (ii)
// '(29)
eta := (g1 + 2*g0) / (g1 - g0)
// '(28)
if x <= eta {
return g0 * x
} else {
return g0*x + (g1-g0)*math.Pow(x-eta, 3)/math.Pow(1-eta, 2)/3
}
} else if (g0 > 0 && 0 > g1 && g1 > -0.5*g0) || (g0 < 0 && 0 < g1 && g1 < -0.5*g0) {
// 'zone (iii)
// '(31)
eta := 3 * g1 / (g1 - g0)
//'(30)
if x < eta {
return g1*x - 1.0/3.0*(g0-g1)*(math.Pow(eta-x, 3)/math.Pow(eta, 2)-eta)
} else {
return (2.0/3.0*g1+1.0/3.0*g0)*eta + g1*(x-eta)
}
} else if g0 == 0 || g1 == 0 {
return 0
} else {
// 'zone (iv)
// '(33)
eta := g1 / (g1 + g0)
// '(34)
A := -g0 * g1 / (g0 + g1)
// '(32)
if x <= eta {
return A*x - 1.0/3.0*(g0-A)*(math.Pow(eta-x, 3)/math.Pow(eta, 2)-eta)
} else {
return (2.0/3.0*A+1.0/3.0*g0)*eta + A*(x-eta) + (g1-A)/3*math.Pow(x-eta, 3)/math.Pow(1-eta, 2)
}
}
}
func forwardRate(Term float64, e initialFI) float64 {
// 'numbering refers to Wilmott paper
if Term <= 0 {
return e.f[0]
}
if Term > e.TermAt(e.N()) {
return forwardRate(e.TermAt(e.N()), e)
}
i, x, g0, g1 := initialInterpolators(e, Term)
G := adjustedG(x, g0, g1)
// '(26)
return G + e.fD[i+1]
}
func adjustedG(x float64, g0 float64, g1 float64) float64 {
if x == 0 {
return g0
} else if x == 1 {
return g1
} else if (g0 < 0 && -0.5*g0 <= g1 && g1 <= -2*g0) || (g0 > 0 && -0.5*g0 >= g1 && g1 >= -2*g0) {
// 'zone (i)
return g0*(1-4*x+3*math.Pow(x, 2)) + g1*(-2*x+3*math.Pow(x, 2))
} else if (g0 < 0 && g1 > -2*g0) || (g0 > 0 && g1 < -2*g0) {
// 'zone (ii)
// '(29)
eta := (g1 + 2*g0) / (g1 - g0)
// '(28)
if x <= eta {
return g0
} else {
return g0 + (g1-g0)*math.Pow((x-eta)/(1-eta), 2)
}
} else if (g0 > 0 && 0 > g1 && g1 > -0.5*g0) || (g0 < 0 && 0 < g1 && g1 < -0.5*g0) {
// 'zone (iii)
// '(31)
eta := 3 * g1 / (g1 - g0)
// '(30)
if x < eta {
return g1 + (g0-g1)*math.Pow((eta-x)/eta, 2)
} else {
return g1
}
} else if g0 == 0 || g1 == 0 {
return 0
} else {
// 'zone (iv)
// '(33)
eta := g1 / (g1 + g0)
// '(34)
A := -g0 * g1 / (g0 + g1)
// '(32)
if x <= eta {
return A + (g0-A)*math.Pow((eta-x)/eta, 2)
} else {
return A + (g1-A)*math.Pow((eta-x)/(1-eta), 2)
}
}
}
func bound(Minimum float64, Variable float64, Maximum float64) float64 {
return math.Max(Minimum, math.Min(Variable, Maximum))
}
func (e *initialFI) lastTermIndexBefore(Term float64) int {
return sort.Search(len(e.terms), func(i int) bool { return float64(e.terms[i]) >= Term })
}
func estimateInitialFI(inp mcInput) initialFI {
fD := make([]float64, inp.N()+1)
interpolantAtNodeD := make([]float64, inp.N()+1)
// 'step 1
for j := 1; j < inp.N()+1; j++ {
fD[j] = (inp.TermAt(j)*inp.RateAt(j) - inp.TermAt(j-1)*inp.RateAt(j-1)) / (inp.TermAt(j) - inp.TermAt(j-1))
interpolantAtNodeD[j] = inp.RateAt(j)
}
var f []float64
if inp.lambda == 0 {
f = estimateInitialFIunameliorated(inp, fD)
} else {
f = estimateInitialFIameliorated(inp, fD)
}
return initialFI{mcInput: inp, fD: fD, interpolantAtNodeD: interpolantAtNodeD, f: f}
}
func estimateInitialFIunameliorated(inp mcInput, fD []float64) []float64 {
f := make([]float64, inp.N()+1)
// 'f_i estimation under the unameliorated method
// 'numbering refers to Wilmott paper
// 'step 2
// '(22)
for j := 1; j < inp.N(); j++ {
f[j] = (inp.TermAt(j)-inp.TermAt(j-1))/(inp.TermAt(j+1)-inp.TermAt(j-1))*fD[j+1] +
(inp.TermAt(j+1)-inp.TermAt(j))/(inp.TermAt(j+1)-inp.TermAt(j-1))*fD[j]
}
// '(23)
f[0] = fD[1] - 0.5*(f[1]-fD[1])
// '(24)
f[inp.N()] = fD[inp.N()] - 0.5*(f[inp.N()-1]-fD[inp.N()])
// 'step 3
f[0] = bound(0, f[0], 2*fD[1])
for j := 1; j < inp.N(); j++ {
f[j] = bound(0, f[j], 2*math.Min(fD[j], fD[j+1]))
}
f[inp.N()] = bound(0, f[inp.N()], 2*fD[inp.N()])
return f
}
func estimateInitialFIameliorated(inp mcInput, fD []float64) []float64 {
N := inp.N()
fdiscrete := make([]float64, N+2)
copy(fdiscrete, fD)
//'f_i estimation under the ameliorated method
//'numbering refers to AMF paper
Theta := make([][]float64, 3)
for i := range Theta {
Theta[i] = make([]float64, N+3)
}
fminmax := make([][][]float64, 3)
for i := range fminmax {
fminmax[i] = make([][]float64, 3)
for j := range fminmax[i] {
fminmax[i][j] = make([]float64, inp.N()+1)
}
}
dfalseTerms := make([]float64, N+3)
for i := 0; i <= N; i++ {
dfalseTerms[i+1] = inp.TermAt(i)
}
//'(72) and (73)
dfalseTerms[0] = -dfalseTerms[2]
dfalseTerms[N+2] = 2*dfalseTerms[N+1] - dfalseTerms[N]
fdiscrete[0] = fdiscrete[1] - (dfalseTerms[2]-dfalseTerms[1])/(dfalseTerms[2+1]-dfalseTerms[1])*(fdiscrete[2]-fdiscrete[1])
fdiscrete[N+1] = fdiscrete[N] + (dfalseTerms[N+1]-dfalseTerms[N])/(dfalseTerms[N+1]-dfalseTerms[N-1])*(fdiscrete[N]-fdiscrete[N-1])
//'(74)
for j := 0; j <= N; j++ {
fminmax[1][0][j] =
(dfalseTerms[j+1]-dfalseTerms[j])/(dfalseTerms[j+2]-dfalseTerms[j])*fdiscrete[j+1] +
(dfalseTerms[j+2]-dfalseTerms[j+1])/(dfalseTerms[j+2]-dfalseTerms[j])*fdiscrete[j]
}
//'[68)
for j := 1; j <= N+1; j++ {
Theta[0][j+1] = (dfalseTerms[j+1] - dfalseTerms[j]) / (dfalseTerms[j+1] - dfalseTerms[j-1]) * (fdiscrete[j] - fdiscrete[j-1])
}
//'(71)
for j := 0; j <= N; j++ {
Theta[2][j] = (dfalseTerms[j+1] - dfalseTerms[j]) / (dfalseTerms[j-1+2+1] - dfalseTerms[j]) * (fdiscrete[j-1+2] - fdiscrete[j])
}
//'(67)
for j := 1; j <= N; j++ {
if fdiscrete[j-1] < fdiscrete[j] && fdiscrete[j] <= fdiscrete[j+1] {
fminmax[0][1][j] = math.Min(fdiscrete[j]+0.5*Theta[0][j+1], fdiscrete[j+1])
fminmax[2][1][j] = math.Min(fdiscrete[j]+2*Theta[0][j+1], fdiscrete[j+1])
} else if fdiscrete[j-1] < fdiscrete[j] && fdiscrete[j] > fdiscrete[j+1] {
fminmax[0][1][j] = math.Max(fdiscrete[j]-0.5*inp.lambda*Theta[0][j+1], fdiscrete[j+1])
fminmax[2][1][j] = fdiscrete[j]
} else if fdiscrete[j-1] >= fdiscrete[j] && fdiscrete[j] <= fdiscrete[j+1] {
fminmax[0][1][j] = fdiscrete[j]
fminmax[2][1][j] = math.Min(fdiscrete[j]-0.5*inp.lambda*Theta[0][j+1], fdiscrete[j+1])
} else if fdiscrete[j-1] >= fdiscrete[j] && fdiscrete[j] > fdiscrete[j+1] {
fminmax[0][1][j] = math.Max(fdiscrete[j]+2*Theta[0][j+1], fdiscrete[j+1])
fminmax[2][1][j] = math.Max(fdiscrete[j]+0.5*Theta[0][j+1], fdiscrete[j+1])
}
}
//'(70)
for j := 0; j <= N-1; j++ {
if fdiscrete[j] < fdiscrete[j+1] && fdiscrete[j+1] <= fdiscrete[j+2] {
fminmax[0][2][j] = math.Max(fdiscrete[j+1]-2*Theta[2][j+1], fdiscrete[j])
fminmax[2][2][j] = math.Max(fdiscrete[j+1]-0.5*Theta[2][j+1], fdiscrete[j])
} else if fdiscrete[j] < fdiscrete[j+1] && fdiscrete[j+1] > fdiscrete[j+2] {
fminmax[0][2][j] = math.Max(fdiscrete[j+1]+0.5*inp.lambda*Theta[2][j+1], fdiscrete[j])
fminmax[2][2][j] = fdiscrete[j+1]
} else if fdiscrete[j] >= fdiscrete[j+1] && fdiscrete[j+1] < fdiscrete[j+2] {
fminmax[0][2][j] = fdiscrete[j+1]
fminmax[2][2][j] = math.Min(fdiscrete[j+1]+0.5*inp.lambda*Theta[2][j+1], fdiscrete[j])
} else if fdiscrete[j] >= fdiscrete[j+1] && fdiscrete[j+1] >= fdiscrete[j+2] {
fminmax[0][2][j] = math.Min(fdiscrete[j+1]-0.5*Theta[2][j+1], fdiscrete[j])
fminmax[2][2][j] = math.Min(fdiscrete[j+1]-2*Theta[2][j+1], fdiscrete[j])
}
}
for j := 1; j <= N-1; j++ {
if math.Max(fminmax[0][1][j], fminmax[0][2][j]) <= math.Min(fminmax[2][1][j], fminmax[2][2][j]) {
//'(75, 76)
fminmax[1][0][j] = bound(math.Max(fminmax[0][1][j], fminmax[0][2][j]), fminmax[1][0][j], math.Min(fminmax[2][1][j], fminmax[2][2][j]))
} else {
//'(78)
fminmax[1][0][j] = bound(math.Min(fminmax[2][1][j], fminmax[2][2][j]), fminmax[1][0][j], math.Max(fminmax[0][1][j], fminmax[0][2][j]))
}
}
//'(79)
if math.Abs(fminmax[1][0][0]-fdiscrete[0]) > 0.5*math.Abs(fminmax[1][0][1]-fdiscrete[0]) {
fminmax[1][0][0] = fdiscrete[1] - 0.5*(fminmax[1][0][1]-fdiscrete[0])
}
//'(80)
if math.Abs(fminmax[1][0][N]-fdiscrete[N]) > 0.5*math.Abs(fminmax[1][0][N-1]-fdiscrete[N]) {
fminmax[1][0][N] = fdiscrete[N] - 0.5*(fminmax[1][0][N-1]-fdiscrete[N])
}
//'(60)
fminmax[1][0][0] = bound(0, fminmax[1][0][0], 2*fdiscrete[1])
//'(61)
for j := 1; j <= N-1; j++ {
fminmax[1][0][j] = bound(0, fminmax[1][0][j], 2*math.Min(fdiscrete[j], fdiscrete[j+1]))
}
//'(62)
fminmax[1][0][N] = bound(0, fminmax[1][0][N], 2*fdiscrete[N])
//'finish, so populate the f array
f := make([]float64, inp.N()+1)
for j := 0; j <= N; j++ {
f[j] = fminmax[1][0][j]
}
return f
} | src/bond/monotone_convex/interpolator.go | 0.655336 | 0.488588 | interpolator.go | starcoder |
package consumererror
import (
"errors"
"go.opentelemetry.io/collector/consumer/pdata"
)
// Traces is an error that may carry associated Trace data for a subset of received data
// that faiiled to be processed or sent.
type Traces struct {
error
failed pdata.Traces
}
// NewTraces creates a Traces that can encapsulate received data that failed to be processed or sent.
func NewTraces(err error, failed pdata.Traces) error {
return Traces{
error: err,
failed: failed,
}
}
// AsTraces finds the first error in err's chain that can be assigned to target. If such an error is found
// it is assigned to target and true is returned, otherwise false is returned.
func AsTraces(err error, target *Traces) bool {
if err == nil {
return false
}
return errors.As(err, target)
}
// GetTraces returns failed traces from the associated error.
func (err Traces) GetTraces() pdata.Traces {
return err.failed
}
// Logs is an error that may carry associated Log data for a subset of received data
// that faiiled to be processed or sent.
type Logs struct {
error
failed pdata.Logs
}
// NewLogs creates a Logs that can encapsulate received data that failed to be processed or sent.
func NewLogs(err error, failed pdata.Logs) error {
return Logs{
error: err,
failed: failed,
}
}
// AsLogs finds the first error in err's chain that can be assigned to target. If such an error is found
// it is assigned to target and true is returned, otherwise false is returned.
func AsLogs(err error, target *Logs) bool {
if err == nil {
return false
}
return errors.As(err, target)
}
// GetLogs returns failed logs from the associated error.
func (err Logs) GetLogs() pdata.Logs {
return err.failed
}
// Metrics is an error that may carry associated Metrics data for a subset of received data
// that faiiled to be processed or sent.
type Metrics struct {
error
failed pdata.Metrics
}
// NewMetrics creates a Metrics that can encapsulate received data that failed to be processed or sent.
func NewMetrics(err error, failed pdata.Metrics) error {
return Metrics{
error: err,
failed: failed,
}
}
// AsMetrics finds the first error in err's chain that can be assigned to target. If such an error is found
// it is assigned to target and true is returned, otherwise false is returned.
func AsMetrics(err error, target *Metrics) bool {
if err == nil {
return false
}
return errors.As(err, target)
}
// GetMetrics returns failed metrics from the associated error.
func (err Metrics) GetMetrics() pdata.Metrics {
return err.failed
} | consumer/consumererror/signalerrors.go | 0.710528 | 0.469034 | signalerrors.go | starcoder |
package neural
// See https://github.com/ArztSamuel/Applying_EANNs for the inspiration for this.
import (
"math"
"math/rand"
)
// Defines a deeply-connected neuron with weights to the neurons on the next layer
type Neuron struct {
Weights []float32
}
type NeuralLayer struct {
NextLayerSize int
Neurons []Neuron
Bias float32
}
// Apply the sigmoid function as an activation function
func activationFunction(value float32) float32 {
if value > 10 {
return 1.0
} else if value < -10 {
return 0.0
}
return 1.0 / (1.0 + float32(math.Exp(-float64(value))))
}
// Activates using inputs for these neurons and returns the output for the next layer
// len(inputs) == len(neurons). len(output) == nextLayerSize
func (n *NeuralLayer) Evaluate(inputs []float32) []float32 {
outputs := make([]float32, n.NextLayerSize)
for i := 0; i < n.NextLayerSize; i++ {
outputs[i] = n.Bias
}
for i, input := range inputs {
for j, weight := range n.Neurons[i].Weights {
outputs[j] += weight * input
}
}
for i := 0; i < n.NextLayerSize; i++ {
outputs[i] = activationFunction(outputs[i])
}
return outputs
}
func newNeuralLayer(layerSize, nextLayerSize int) *NeuralLayer {
neurons := make([]Neuron, layerSize)
for i, _ := range neurons {
neurons[i].Weights = make([]float32, nextLayerSize)
}
layer := NeuralLayer{NextLayerSize: nextLayerSize, Neurons: neurons, Bias: 1.0}
layer.Randomize()
return &layer
}
// Randomizes all weights, excluding the bias.
func (n *NeuralLayer) Randomize() {
for i, _ := range n.Neurons {
for j, _ := range n.Neurons[i].Weights {
n.Neurons[i].Weights[j] = rand.Float32()*2.0 - 1.0
}
}
}
func (n *NeuralLayer) ProbablyRandomize(randomizeProbability, randomAmount float32) {
for i, _ := range n.Neurons {
for j, _ := range n.Neurons[i].Weights {
if rand.Float32() < randomizeProbability {
n.Neurons[i].Weights[j] += rand.Float32()*randomAmount - randomAmount
}
}
}
}
func (n *NeuralLayer) CrossMerge(first, second *NeuralLayer, crossoverProbability float32) {
for i, _ := range n.Neurons {
for j, _ := range n.Neurons[i].Weights {
if rand.Float32() < crossoverProbability {
n.Neurons[i].Weights[j] = second.Neurons[i].Weights[j]
} else {
n.Neurons[i].Weights[j] = first.Neurons[i].Weights[j]
}
}
}
} | voxelli/neural/neuralLayer.go | 0.841207 | 0.593698 | neuralLayer.go | starcoder |
package xgeneric
import "math/rand"
// Uniq returns a duplicate-free version of an array
func Uniq[T comparable](collection []T) []T {
result := make([]T, 0, len(collection))
seen := make(map[T]struct{}, len(collection))
for _, item := range collection {
if _, ok := seen[item]; ok {
continue
}
seen[item] = struct{}{}
result = append(result, item)
}
return result
}
// Chunk returns an array of elements split into groups the length of size.
func Chunk[T any](collection []T, size int) [][]T {
if size <= 0 {
panic("size must bigger than 0")
}
result := make([][]T, 0, len(collection)/2+1)
length := len(collection)
for i := 0; i < length; i++ {
chunk := i / size
if i%size == 0 {
result = append(result, make([]T, 0, size))
}
result[chunk] = append(result[chunk], collection[i])
}
return result
}
// Map manipulates a slice and transforms it to a slice of another type.
func Map[T any, R any](collection []T, iteratee func(T, int) R) []R {
result := make([]R, len(collection))
for i, item := range collection {
result[i] = iteratee(item, i)
}
return result
}
// Subset return part of a slice.
func Subset[T any](collection []T, offset int, limit uint) []T {
size := len(collection)
if offset < 0 {
offset = size + offset
if offset < 0 {
offset = 0
}
}
if offset > size {
return []T{}
}
if limit > uint(size)-uint(offset) {
limit = uint(size - offset)
}
return collection[offset : offset+int(limit)]
}
// Shuffle returns an array of shuffled values. Uses the Fisher-Yates shuffle algorithm.
func Shuffle[T any](collection []T) []T {
rand.Shuffle(len(collection), func(i, j int) {
collection[i], collection[j] = collection[j], collection[i]
})
return collection
}
// Filter iterates over elements of collection, returning an array of all elements predicate returns truthy for.
func Filter[V any](collection []V, predicate func(V, int) bool) []V {
result := []V{}
for i, item := range collection {
if predicate(item, i) {
result = append(result, item)
}
}
return result
} | pkg/util/xgeneric/slice.go | 0.826222 | 0.435181 | slice.go | starcoder |
package mesh
import (
"github.com/weqqr/panorama/lm"
)
type Vertex struct {
Position lm.Vector3
Texcoord lm.Vector2
Normal lm.Vector3
}
type Mesh struct {
Vertices []Vertex
}
func NewMesh() Mesh {
return Mesh{
Vertices: []Vertex{},
}
}
type Model struct {
Meshes []Mesh
}
func NewModel() Model {
return Model{
Meshes: []Mesh{},
}
}
func Cuboid(x1, y1, z1, x2, y2, z2 float32) []Mesh {
yp := NewMesh()
yp.Vertices = []Vertex{
{Position: lm.Vec3(x1, y2, z1), Texcoord: lm.Vec2(0.0, 0.0), Normal: lm.Vec3(0.0, 1.0, 0.0)},
{Position: lm.Vec3(x1, y2, z2), Texcoord: lm.Vec2(0.0, 1.0), Normal: lm.Vec3(0.0, 1.0, 0.0)},
{Position: lm.Vec3(x2, y2, z2), Texcoord: lm.Vec2(1.0, 1.0), Normal: lm.Vec3(0.0, 1.0, 0.0)},
{Position: lm.Vec3(x1, y2, z1), Texcoord: lm.Vec2(0.0, 0.0), Normal: lm.Vec3(0.0, 1.0, 0.0)},
{Position: lm.Vec3(x2, y2, z1), Texcoord: lm.Vec2(1.0, 0.0), Normal: lm.Vec3(0.0, 1.0, 0.0)},
{Position: lm.Vec3(x2, y2, z2), Texcoord: lm.Vec2(1.0, 1.0), Normal: lm.Vec3(0.0, 1.0, 0.0)},
}
ym := NewMesh()
ym.Vertices = []Vertex{
{Position: lm.Vec3(x1, y1, z1), Texcoord: lm.Vec2(0.0, 0.0), Normal: lm.Vec3(0.0, -1.0, 0.0)},
{Position: lm.Vec3(x1, y1, z2), Texcoord: lm.Vec2(0.0, 1.0), Normal: lm.Vec3(0.0, -1.0, 0.0)},
{Position: lm.Vec3(x2, y1, z2), Texcoord: lm.Vec2(1.0, 1.0), Normal: lm.Vec3(0.0, -1.0, 0.0)},
{Position: lm.Vec3(x1, y1, z1), Texcoord: lm.Vec2(0.0, 0.0), Normal: lm.Vec3(0.0, -1.0, 0.0)},
{Position: lm.Vec3(x2, y1, z1), Texcoord: lm.Vec2(1.0, 0.0), Normal: lm.Vec3(0.0, -1.0, 0.0)},
{Position: lm.Vec3(x2, y1, z2), Texcoord: lm.Vec2(1.0, 1.0), Normal: lm.Vec3(0.0, -1.0, 0.0)},
}
xp := NewMesh()
xp.Vertices = []Vertex{
{Position: lm.Vec3(x2, y1, z1), Texcoord: lm.Vec2(1.0, 1.0), Normal: lm.Vec3(1.0, 0.0, 0.0)},
{Position: lm.Vec3(x2, y1, z2), Texcoord: lm.Vec2(0.0, 1.0), Normal: lm.Vec3(1.0, 0.0, 0.0)},
{Position: lm.Vec3(x2, y2, z2), Texcoord: lm.Vec2(0.0, 0.0), Normal: lm.Vec3(1.0, 0.0, 0.0)},
{Position: lm.Vec3(x2, y1, z1), Texcoord: lm.Vec2(1.0, 1.0), Normal: lm.Vec3(1.0, 0.0, 0.0)},
{Position: lm.Vec3(x2, y2, z1), Texcoord: lm.Vec2(1.0, 0.0), Normal: lm.Vec3(1.0, 0.0, 0.0)},
{Position: lm.Vec3(x2, y2, z2), Texcoord: lm.Vec2(0.0, 0.0), Normal: lm.Vec3(1.0, 0.0, 0.0)},
}
xm := NewMesh()
xm.Vertices = []Vertex{
{Position: lm.Vec3(x1, y1, z1), Texcoord: lm.Vec2(1.0, 0.0), Normal: lm.Vec3(-1.0, 0.0, 0.0)},
{Position: lm.Vec3(x1, y1, z2), Texcoord: lm.Vec2(0.0, 0.0), Normal: lm.Vec3(-1.0, 0.0, 0.0)},
{Position: lm.Vec3(x1, y2, z2), Texcoord: lm.Vec2(0.0, 1.0), Normal: lm.Vec3(-1.0, 0.0, 0.0)},
{Position: lm.Vec3(x1, y1, z1), Texcoord: lm.Vec2(1.0, 0.0), Normal: lm.Vec3(-1.0, 0.0, 0.0)},
{Position: lm.Vec3(x1, y2, z1), Texcoord: lm.Vec2(1.0, 1.0), Normal: lm.Vec3(-1.0, 0.0, 0.0)},
{Position: lm.Vec3(x1, y2, z2), Texcoord: lm.Vec2(0.0, 1.0), Normal: lm.Vec3(-1.0, 0.0, 0.0)},
}
zp := NewMesh()
zp.Vertices = []Vertex{
{Position: lm.Vec3(x1, y1, z2), Texcoord: lm.Vec2(0.0, 0.0), Normal: lm.Vec3(0.0, 0.0, 1.0)},
{Position: lm.Vec3(x1, y2, z2), Texcoord: lm.Vec2(0.0, 1.0), Normal: lm.Vec3(0.0, 0.0, 1.0)},
{Position: lm.Vec3(x2, y2, z2), Texcoord: lm.Vec2(1.0, 1.0), Normal: lm.Vec3(0.0, 0.0, 1.0)},
{Position: lm.Vec3(x1, y1, z2), Texcoord: lm.Vec2(0.0, 0.0), Normal: lm.Vec3(0.0, 0.0, 1.0)},
{Position: lm.Vec3(x2, y1, z2), Texcoord: lm.Vec2(1.0, 0.0), Normal: lm.Vec3(0.0, 0.0, 1.0)},
{Position: lm.Vec3(x2, y2, z2), Texcoord: lm.Vec2(1.0, 1.0), Normal: lm.Vec3(0.0, 0.0, 1.0)},
}
zm := NewMesh()
zm.Vertices = []Vertex{
{Position: lm.Vec3(x1, y1, z1), Texcoord: lm.Vec2(0.0, 0.0), Normal: lm.Vec3(0.0, 0.0, -1.0)},
{Position: lm.Vec3(x1, y2, z1), Texcoord: lm.Vec2(0.0, 1.0), Normal: lm.Vec3(0.0, 0.0, -1.0)},
{Position: lm.Vec3(x2, y2, z1), Texcoord: lm.Vec2(1.0, 1.0), Normal: lm.Vec3(0.0, 0.0, -1.0)},
{Position: lm.Vec3(x1, y1, z1), Texcoord: lm.Vec2(0.0, 0.0), Normal: lm.Vec3(0.0, 0.0, -1.0)},
{Position: lm.Vec3(x2, y1, z1), Texcoord: lm.Vec2(1.0, 0.0), Normal: lm.Vec3(0.0, 0.0, -1.0)},
{Position: lm.Vec3(x2, y2, z1), Texcoord: lm.Vec2(1.0, 1.0), Normal: lm.Vec3(0.0, 0.0, -1.0)},
}
return []Mesh{yp, ym, xp, xm, zp, zm}
}
func Cube() *Model {
model := NewModel()
model.Meshes = append(model.Meshes, Cuboid(-0.5, -0.5, -0.5, 0.5, 0.5, 0.5)...)
return &model
} | mesh/mesh.go | 0.551091 | 0.701362 | mesh.go | starcoder |
package integrationtests
import (
"context"
"testing"
ethAbi "github.com/ethereum/go-ethereum/accounts/abi"
ethcommon "github.com/ethereum/go-ethereum/common"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/consensys/orchestrate/pkg/encoding/json"
"github.com/consensys/orchestrate/pkg/errors"
"github.com/consensys/orchestrate/pkg/sdk/client"
"github.com/consensys/orchestrate/pkg/types/api"
"github.com/consensys/orchestrate/pkg/types/entities"
"github.com/consensys/orchestrate/pkg/types/testutils"
"github.com/consensys/orchestrate/pkg/utils"
)
type contractsTestSuite struct {
suite.Suite
client client.OrchestrateClient
env *IntegrationEnvironment
}
func (s *contractsTestSuite) TestContractRegistry_Register() {
ctx := context.Background()
s.T().Run("should register a contract with tag", func(t *testing.T) {
txRequest := testutils.FakeRegisterContractRequest()
resp, err := s.client.RegisterContract(ctx, txRequest)
if err != nil {
assert.Fail(t, err.Error())
return
}
assert.Equal(t, txRequest.Name, resp.Name)
assert.Equal(t, txRequest.Tag, resp.Tag)
assert.Equal(t, txRequest.DeployedBytecode, resp.DeployedBytecode)
assert.Equal(t, txRequest.Bytecode, resp.Bytecode)
assert.NotEmpty(t, resp.Constructor.Signature)
assert.NotEmpty(t, resp.Events)
assert.NotEmpty(t, resp.Methods)
abi, err := json.Marshal(txRequest.ABI)
assert.NoError(t, err)
assert.Equal(t, string(abi), resp.ABI)
})
s.T().Run("should register a contract with tag latest", func(t *testing.T) {
txRequest := testutils.FakeRegisterContractRequest()
txRequest.Tag = ""
resp, err := s.client.RegisterContract(ctx, txRequest)
if err != nil {
assert.Fail(t, err.Error())
return
}
assert.Equal(t, txRequest.Name, resp.Name)
assert.Equal(t, entities.DefaultTagValue, resp.Tag)
assert.Equal(t, txRequest.DeployedBytecode, resp.DeployedBytecode)
assert.Equal(t, txRequest.Bytecode, resp.Bytecode)
})
s.T().Run("should fail with invalidFormatError if payload is invalid", func(t *testing.T) {
txRequest := testutils.FakeRegisterContractRequest()
txRequest.Name = ""
_, err := s.client.RegisterContract(ctx, txRequest)
assert.Error(t, err)
assert.True(t, errors.IsInvalidFormatError(err), err.Error())
})
s.T().Run("should fail with encodingError if ABI payload is invalid", func(t *testing.T) {
txRequest := testutils.FakeRegisterContractRequest()
txRequest.ABI = "{asd}asdasd"
_, err := s.client.RegisterContract(ctx, txRequest)
assert.Error(t, err)
assert.True(t, errors.IsEncodingError(err), err.Error())
})
}
func (s *contractsTestSuite) TestContractRegistry_Get() {
contractName := "contract_" + utils.RandString(5)
ctx := context.Background()
txRequest := testutils.FakeRegisterContractRequest()
txRequest.Name = contractName
_, err := s.client.RegisterContract(ctx, txRequest)
if err != nil {
assert.Fail(s.T(), err.Error())
return
}
s.T().Run("should get all contracts", func(t *testing.T) {
resp, err := s.client.GetContractsCatalog(ctx)
if err != nil {
assert.Fail(t, err.Error())
return
}
assert.Contains(t, resp, contractName)
})
s.T().Run("should get all tags of a contract", func(t *testing.T) {
resp, err := s.client.GetContractTags(ctx, contractName)
if err != nil {
assert.Fail(t, err.Error())
return
}
assert.Len(t, resp, 1)
assert.Contains(t, resp, txRequest.Tag)
})
s.T().Run("should get a contract", func(t *testing.T) {
resp, err := s.client.GetContract(ctx, txRequest.Name, txRequest.Tag)
if err != nil {
assert.Fail(t, err.Error())
return
}
assert.Equal(t, txRequest.Name, resp.Name)
abi, err := json.Marshal(txRequest.ABI)
assert.NoError(t, err)
assert.Equal(t, string(abi), resp.ABI)
})
s.T().Run("should get a contract method signatures", func(t *testing.T) {
resp, err := s.client.GetContractMethodSignatures(ctx, txRequest.Name, txRequest.Tag, "")
if err != nil {
assert.Fail(t, err.Error())
return
}
assert.Contains(t, resp, "transferFrom(address,address,uint256)")
assert.Contains(t, resp, "totalSupply()")
assert.Contains(t, resp, "approve(address,uint256)")
resp2, err := s.client.GetContractMethodSignatures(ctx, txRequest.Name, txRequest.Tag, "balanceOf")
if err != nil {
assert.Fail(t, err.Error())
return
}
assert.Len(t, resp2, 1)
assert.Contains(t, resp2, "balanceOf(address)")
})
}
func (s *contractsTestSuite) TestContractRegistry_CodeHash() {
ctx := context.Background()
contractName := "contract_" + utils.RandString(5)
txRequest := testutils.FakeRegisterContractRequest()
txRequest.Name = contractName
_, err := s.client.RegisterContract(ctx, txRequest)
if err != nil {
assert.Fail(s.T(), err.Error())
return
}
address := ethcommon.HexToAddress(utils.RandHexString(10))
address2 := ethcommon.HexToAddress(utils.RandHexString(10))
codeHash := ethcommon.HexToHash(utils.RandHexString(20))
codeHash2 := "0xd63259750ca3b56efab25f0646a4d1fb659b6b643474506e1be24d81f9e55fd8"
chainID := "2017"
s.T().Run("should set contract code hashes successfully", func(t *testing.T) {
err := s.client.SetContractAddressCodeHash(ctx, address.String(), chainID, &api.SetContractCodeHashRequest{
CodeHash: codeHash.String(),
})
assert.NoError(t, err)
err = s.client.SetContractAddressCodeHash(ctx, address2.String(), chainID, &api.SetContractCodeHashRequest{
CodeHash: codeHash2,
})
assert.NoError(t, err)
})
s.T().Run("should get default contract event by sigHash successfully", func(t *testing.T) {
resp, err := s.client.GetContractEvents(ctx, address.String(), chainID, &api.GetContractEventsRequest{
SigHash: "0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef",
IndexedInputCount: 2,
})
assert.NoError(t, err)
if len(resp.DefaultEvents) == 0 {
assert.Fail(t, "expected some default events")
}
event := ðAbi.Event{}
err = json.Unmarshal([]byte(resp.DefaultEvents[0]), event)
assert.NoError(t, err)
assert.Equal(t, "Transfer", event.Name)
})
s.T().Run("should get contract event by sigHash successfully", func(t *testing.T) {
resp, err := s.client.GetContractEvents(ctx, address2.String(), chainID, &api.GetContractEventsRequest{
SigHash: "0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef",
IndexedInputCount: 2,
})
assert.NoError(t, err)
if resp.Event == "" {
assert.Fail(t, "expected contract events")
}
event := ðAbi.Event{}
err = json.Unmarshal([]byte(resp.Event), event)
assert.NoError(t, err)
assert.Equal(t, "Transfer", event.Name)
})
s.T().Run("should fail to set contract code hashes if invalid address", func(t *testing.T) {
err := s.client.SetContractAddressCodeHash(ctx, "InvalidAddress", chainID, &api.SetContractCodeHashRequest{
CodeHash: codeHash.String(),
})
assert.Error(t, err)
assert.True(t, errors.IsInvalidFormatError(err), "IsInvalidFormatError")
})
s.T().Run("should fail to set contract code hashes if invalid codeHash", func(t *testing.T) {
err := s.client.SetContractAddressCodeHash(ctx, address.String(), chainID, &api.SetContractCodeHashRequest{
CodeHash: "{invalidCodeHash}",
})
assert.Error(t, err)
assert.True(t, errors.IsInvalidFormatError(err), "IsInvalidFormatError")
})
} | services/api/integration-tests/contracts.go | 0.607197 | 0.436142 | contracts.go | starcoder |
package message
import (
"github.com/mschoenlaub/grip/level"
)
type condComposer struct {
cond bool
msg Composer
}
// When returns a conditional message that is only logged if the
// condition is bool. Converts the second argument to a composer, if
// needed, using the same rules that the logging methods use.
func When(cond bool, m interface{}) Composer {
return &condComposer{cond: cond, msg: ConvertToComposer(level.Priority(0), m)}
}
// Whenf returns a conditional message that is only logged if the
// condition is bool, and creates a sprintf-style message, which will
// itself only log if the base expression is not the empty string.
func Whenf(cond bool, m string, args ...interface{}) Composer {
return &condComposer{cond: cond, msg: NewFormatted(m, args...)}
}
// Whenln returns a conditional message that is only logged if the
// condition is bool, and creates a sprintf-style message, which will
// itself only log if the base expression is not the empty string.
func Whenln(cond bool, args ...interface{}) Composer {
return &condComposer{cond: cond, msg: NewLine(args...)}
}
// WhenMsg returns a conditional message that is only logged if the
// condition is bool, and creates a string message that will only log
// when the message content is not the empty string. Use this for a
// more strongly-typed conditional logging message.
func WhenMsg(cond bool, m string) Composer {
return &condComposer{cond: cond, msg: NewString(m)}
}
func (c *condComposer) String() string { return c.msg.String() }
func (c *condComposer) Raw() interface{} { return c.msg.Raw() }
func (c *condComposer) Priority() level.Priority { return c.msg.Priority() }
func (c *condComposer) SetPriority(p level.Priority) error { return c.msg.SetPriority(p) }
func (c *condComposer) Annotate(k string, v interface{}) error { return c.msg.Annotate(k, v) }
func (c *condComposer) Loggable() bool {
if c.cond {
return c.msg.Loggable()
}
return false
} | message/conditional.go | 0.657758 | 0.452113 | conditional.go | starcoder |
package sort
// BinaryInsertionSort is an implementation of the binary insertion sort
// algorithm borrowed from timsort, with some minor modifications.
// It requires O(n log n) compares, but O(n^2) data movement (worst case).
func BinaryInsertionSort(arr []string) {
binaryInsertionSortDepth(arr, 0)
}
// binaryInsertionSortDepth is identical to BinaryInsertionSort but takes
// a depth value which indicates the portion of the strings that is to be
// used in sorting (that is, ignoring the characters from 0 to depth).
func binaryInsertionSortDepth(arr []string, depth int) {
size := len(arr)
if arr == nil || size < 2 || depth < 0 {
return
}
for ii := 0; ii < size; ii++ {
pivot := arr[ii]
// Set left (and right) to the index where a[start] (pivot) belongs
left := 0
right := ii
// Invariants:
// pivot >= all in [lo, left).
// pivot < all in [right, start).
for left < right {
mid := (left + right) >> 1
if compareTail(pivot, arr[mid], depth) < 0 {
right = mid
} else {
left = mid + 1
}
}
// The invariants above still hold, so pivot belongs at left.
// Note that if there are elements equal to pivot, left points
// to the first slot after them -- that's why this sort is stable.
// Slide elements over to make room for the pivot.
count := ii - left
// Switch is just an optimization for arraycopy in default case.
switch count {
case 2:
arr[left+2] = arr[left+1]
fallthrough
case 1:
arr[left+1] = arr[left]
default:
copy(arr[left+1:], arr[left:left+count])
}
arr[left] = pivot
}
}
// compareTail compares two strings, starting with the characters at
// offset 'depth' (assumes the leading characters are the same in both
// sequences). Returns a negative integer, zero, or a positive integer as
// the first argument is less than, equal to, or greater than the second.
func compareTail(a, b string, depth int) int {
idx := depth
var s, t uint8
if idx < len(a) {
s = a[idx]
}
if idx < len(b) {
t = b[idx]
}
for s == t && idx < len(a) {
idx++
if s = 0; idx < len(a) {
s = a[idx]
}
if t = 0; idx < len(b) {
t = b[idx]
}
}
// Convert unsigned to signed so we can return negatives.
return int(s) - int(t)
} | sort/binaryinsertionsort.go | 0.807878 | 0.638497 | binaryinsertionsort.go | starcoder |
package main
// mainUsage describes usage of the overall tool.
const mainUsage string = `
A Swiss Army knife for vending your own Go packages.
vend [subcommand] [arguments]
Valid subcommands :
vend init
vend cp
vend mv
vend path
vend list
vend info
For help with subcommands run :
vend [subcommand] -h
`
// listUsage describes usage of the list subcommand.
const listUsage string = `
Lists all the dependencies of the package specified by the [path], if ommitted
defaults to the current working directory. The [path] can be specified
relative to the current working directory or as an import path resolved through
the GOPATH.
vend list [arguments] [path]
`
// infoUsage describes usage of the info subcommand.
const infoUsage string = `
Print out information about the package specified by the [path], if ommitted
defaults to the current working directory. The [path] can be specified relative
to the current working directory or as an import path resolved through the
GOPATH.
vend info [arguments] [path]
`
// initUsage describes usage of the init subcommand.
const initUsage string = `
For the package in the current working directory copies all external packages
into the specified [directory], while updating all the import paths. The
specified [directory] is created if necessary. External packages are packages
not located in the standard library, a parent directory, or a subdirectory.
The packages are copied into a subdirectory specified by the package name. If
multiple dependencies have the same package name the command will fail and
provide all the duplicates, the user should use the vend cp command to place
those packages in unique directories before running vend init again to process
the other packages.
vend init [directory]
`
// cpUsage describes usage of the cp subcommand.
const cpUsage string = `
Copies the package in the [from] import path or directory to the [to]
directory, updating the necessary import paths for the package in the current
working directory.
vend cp [from] [to]
`
// mvUsage describes usage of the mv subcommand.
const mvUsage string = `
Moves the package in the [from] path or directory to the [to] directory,
updating the necessary import paths for the package in the current working
directory. The mv subcommand cannot be used with standard packages, use
cp instead.
vend mv [from] [to]
`
// pathUsage describes usage of the path subcommand.
const pathUsage string = `
Updates all the usages of the import path [from] to the import path [to] for
the package in the current working directory. When updating it includes import
paths located in subdirectories of the [from] import path, updating them to
their corresponding location in the [to] import path.
vend path [from] [to]
` | usage.go | 0.686895 | 0.618593 | usage.go | starcoder |
package matcher
import (
"reflect"
"strings"
)
// StructMatcher defines a generic structure matcher. Implements Matcher interface
type StructMatcher struct {
// pattern with a map with the name of the field an its value
pattern map[string]interface{}
}
// NewStructMatcher creates a generic structure matcher with a given pattern
func NewStructMatcher(pattern map[string]interface{}) StructMatcher {
return StructMatcher{pattern: pattern}
}
// Matches returns whether arg is a match.
func (sm StructMatcher) Matches(arg interface{}) bool {
for k, v := range sm.pattern {
// Check if other has the field, and matches type and value.
// Other has field if
// 1. Has a field named K
// 2. Has a field with a json annotation named K
// 3. Has a field with a proto annotation named K
// Example:
// Username string `protobuf:"bytes,1,opt,name=username,proto3" json:"username,omitempty"`
// 2^^^ 3^^^ 1^^^^^
value := reflect.ValueOf(arg)
if value.Kind() == reflect.Ptr {
value = value.Elem()
}
found := false
for i := 0; i < value.NumField() && !found; i++ { // iterates through every struct type field
name, json, proto := GetFieldName(value.Type().Field(i))
if k == name || k == json || k == proto {
found = true
if value.Field(i).Type() != reflect.ValueOf(v).Type() || value.Field(i).Interface() != v {
return false
}
}
}
if !found {
return false
}
}
return true
}
// String describes what the matcher matches. Not used but necessary to satisfy Matcher interface
func (sm StructMatcher) String() string {
return "A matcher for arbitrary struct types"
}
// GetFieldName returns the name of the file, the json name and the protobuf name indicated in the annotations
func GetFieldName(t reflect.StructField) (string, string, string) {
var name, jsonName, protoName string
name = t.Name
if jsonTag := t.Tag.Get("json"); jsonTag != "" && jsonTag != "-" {
// check for possible comma as in "json:"username,omitempty""
var ind int
if ind = strings.Index(jsonTag, ","); ind < 0 {
ind = len(jsonTag)
}
jsonName = jsonTag[:ind]
}
if protoTag := t.Tag.Get("protobuf"); protoTag != "" && protoTag != "-" {
// get the name from protobuf:"bytes,1,opt,name=username,proto3"
var ind int
if ind = strings.Index(protoTag, "name="); ind < 0 {
ind = len(protoTag)
}
fieldName := protoTag[ind:]
if ind = strings.Index(fieldName, ","); ind < 0 {
ind = len(fieldName)
}
fieldName = fieldName[5:ind]
protoName = fieldName
}
return name, jsonName, protoName
} | pkg/matcher/struct.go | 0.768993 | 0.420719 | struct.go | starcoder |
package heatmap
import (
"github.com/K-Phoen/grabana/heatmap/axis"
"github.com/K-Phoen/grabana/target/graphite"
"github.com/K-Phoen/grabana/target/influxdb"
"github.com/K-Phoen/grabana/target/prometheus"
"github.com/K-Phoen/grabana/target/stackdriver"
"github.com/grafana-tools/sdk"
)
// DataFormatMode represents the data format modes.
type DataFormatMode string
const (
// Grafana does the bucketing by going through all time series values
TimeSeriesBuckets DataFormatMode = "tsbuckets"
// Each time series already represents a Y-Axis bucket.
TimeSeries DataFormatMode = "timeseries"
)
// LegendOption allows to configure a legend.
type LegendOption uint16
const (
// Hide keeps the legend from being displayed.
Hide LegendOption = iota
)
// Option represents an option that can be used to configure a heatmap panel.
type Option func(stat *Heatmap)
// Heatmap represents a heatmap panel.
type Heatmap struct {
Builder *sdk.Panel
}
// New creates a new heatmap panel.
func New(title string, options ...Option) *Heatmap {
panel := &Heatmap{Builder: sdk.NewHeatmap(title)}
panel.Builder.IsNew = false
panel.Builder.HeatmapPanel.Cards = struct {
CardPadding *float64 `json:"cardPadding"`
CardRound *float64 `json:"cardRound"`
}{}
panel.Builder.HeatmapPanel.Color = struct {
CardColor string `json:"cardColor"`
ColorScale string `json:"colorScale"`
ColorScheme string `json:"colorScheme"`
Exponent float64 `json:"exponent"`
Min *float64 `json:"min,omitempty"`
Max *float64 `json:"max,omitempty"`
Mode string `json:"mode"`
}{
CardColor: "#b4ff00",
ColorScale: "sqrt",
ColorScheme: "interpolateSpectral",
Exponent: 0.5,
Mode: "spectrum",
}
panel.Builder.HeatmapPanel.Legend = struct {
Show bool `json:"show"`
}{
Show: true,
}
panel.Builder.HeatmapPanel.Tooltip = struct {
Show bool `json:"show"`
ShowHistogram bool `json:"showHistogram"`
}{
Show: true,
ShowHistogram: true,
}
panel.Builder.HeatmapPanel.XAxis = struct {
Show bool `json:"show"`
}{
Show: true,
}
panel.Builder.HeatmapPanel.YBucketBound = "auto"
for _, opt := range append(defaults(), options...) {
opt(panel)
}
return panel
}
func defaults() []Option {
return []Option{
Span(6),
DataFormat(TimeSeriesBuckets),
HideZeroBuckets(),
HighlightCards(),
defaultYAxis(),
}
}
func defaultYAxis() Option {
return func(heatmap *Heatmap) {
heatmap.Builder.HeatmapPanel.YAxis = *axis.New().Builder
}
}
// DataSource sets the data source to be used by the panel.
func DataSource(source string) Option {
return func(heatmap *Heatmap) {
heatmap.Builder.Datasource = &source
}
}
// DataFormat sets how the data should be interpreted.
func DataFormat(format DataFormatMode) Option {
return func(heatmap *Heatmap) {
heatmap.Builder.HeatmapPanel.DataFormat = string(format)
}
}
// WithPrometheusTarget adds a prometheus query to the graph.
func WithPrometheusTarget(query string, options ...prometheus.Option) Option {
target := prometheus.New(query, options...)
return func(heatmap *Heatmap) {
heatmap.Builder.AddTarget(&sdk.Target{
RefID: target.Ref,
Expr: target.Expr,
IntervalFactor: target.IntervalFactor,
Interval: target.Interval,
Step: target.Step,
LegendFormat: target.LegendFormat,
Instant: target.Instant,
Format: target.Format,
})
}
}
// WithGraphiteTarget adds a Graphite target to the table.
func WithGraphiteTarget(query string, options ...graphite.Option) Option {
target := graphite.New(query, options...)
return func(heatmap *Heatmap) {
heatmap.Builder.AddTarget(target.Builder)
}
}
// WithInfluxDBTarget adds an InfluxDB target to the graph.
func WithInfluxDBTarget(query string, options ...influxdb.Option) Option {
target := influxdb.New(query, options...)
return func(heatmap *Heatmap) {
heatmap.Builder.AddTarget(target.Builder)
}
}
// WithStackdriverTarget adds a stackdriver query to the graph.
func WithStackdriverTarget(target *stackdriver.Stackdriver) Option {
return func(heatmap *Heatmap) {
heatmap.Builder.AddTarget(target.Builder)
}
}
// Span sets the width of the panel, in grid units. Should be a positive
// number between 1 and 12. Example: 6.
func Span(span float32) Option {
return func(heatmap *Heatmap) {
heatmap.Builder.Span = span
}
}
// Height sets the height of the panel, in pixels. Example: "400px".
func Height(height string) Option {
return func(heatmap *Heatmap) {
heatmap.Builder.Height = &height
}
}
// Description annotates the current visualization with a human-readable description.
func Description(content string) Option {
return func(heatmap *Heatmap) {
heatmap.Builder.Description = &content
}
}
// Transparent makes the background transparent.
func Transparent() Option {
return func(heatmap *Heatmap) {
heatmap.Builder.Transparent = true
}
}
// Legend defines what should be shown in the legend.
func Legend(opts ...LegendOption) Option {
return func(heatmap *Heatmap) {
for _, opt := range opts {
if opt == Hide {
heatmap.Builder.HeatmapPanel.Legend.Show = false
}
}
}
}
// ShowZeroBuckets forces the display of "zero" buckets.
func ShowZeroBuckets() Option {
return func(heatmap *Heatmap) {
heatmap.Builder.HeatmapPanel.HideZeroBuckets = false
}
}
// HideZeroBuckets hides "zero" buckets.
func HideZeroBuckets() Option {
return func(heatmap *Heatmap) {
heatmap.Builder.HeatmapPanel.HideZeroBuckets = true
}
}
// HighlightCards highlights bucket cards.
func HighlightCards() Option {
return func(heatmap *Heatmap) {
heatmap.Builder.HeatmapPanel.HighlightCards = true
}
}
// NoHighlightCards disables the highlighting of bucket cards.
func NoHighlightCards() Option {
return func(heatmap *Heatmap) {
heatmap.Builder.HeatmapPanel.HighlightCards = false
}
}
// ReverseYBuckets reverses the order of bucket on the Y-axis.
func ReverseYBuckets() Option {
return func(heatmap *Heatmap) {
heatmap.Builder.HeatmapPanel.ReverseYBuckets = true
}
}
// HideTooltip prevents the tooltip from being displayed.
func HideTooltip() Option {
return func(heatmap *Heatmap) {
heatmap.Builder.HeatmapPanel.Tooltip.Show = false
}
}
// HideTooltipHistogram prevents the histograms from being displayed in tooltips.
// Histogram represents the distribution of the bucket values for the specific timestamp.
func HideTooltipHistogram() Option {
return func(heatmap *Heatmap) {
heatmap.Builder.HeatmapPanel.Tooltip.ShowHistogram = false
}
}
// TooltipDecimals sets the number of decimals to be displayed in tooltips.
func TooltipDecimals(decimals int) Option {
return func(heatmap *Heatmap) {
heatmap.Builder.HeatmapPanel.TooltipDecimals = decimals
}
}
// HideXAxis prevents the X-axis from being displayed.
func HideXAxis() Option {
return func(heatmap *Heatmap) {
heatmap.Builder.HeatmapPanel.XAxis.Show = false
}
}
// YAxis configures the Y axis.
func YAxis(opts ...axis.Option) Option {
return func(heatmap *Heatmap) {
heatmap.Builder.HeatmapPanel.YAxis = *axis.New(opts...).Builder
}
}
// Repeat configures repeating a panel for a variable
func Repeat(repeat string) Option {
return func(heatmap *Heatmap) {
heatmap.Builder.Repeat = &repeat
}
} | vendor/github.com/K-Phoen/grabana/heatmap/heatmap.go | 0.851675 | 0.422505 | heatmap.go | starcoder |
package main
import (
"container/heap"
"math"
)
// Check whether a leaf node has a circle event and add to event queue if true
func checkCircleEvent(leafNode *node, sweepline float64, eventQueue *PriorityQueue) *Item {
if leafNode.previous == nil || leafNode.next == nil {
return nil
}
leftSite := leafNode.previous.arcSite
rightSite := leafNode.next.arcSite
middleSite := leafNode.arcSite
if leftSite == rightSite {
return nil
}
// WTF - I have no idea how this part works
// THIS IS NOT EXPLAINED IN THE TEXT BOOK AT ALL
// Taken from https://github.com/gorhill/Javascript-Voronoi/blob/master/rhill-voronoi-core.js
// TODO - work out how this works
// Apparently circle's have an orientation and clockwise ones are not valid circle events (?)
bx := middleSite.x
by := middleSite.y
ax := leftSite.x - bx
ay := leftSite.y - by
cx := rightSite.x - bx
cy := rightSite.y - by
d := 2 * ((ax * cy) - (ay * cx))
if d < 0 {
return nil
}
// Let the center be (a, b) -> each point is equal distance to (a, b) since all lie on the circumference.
// Given points (x1, y1), (x2, y2), (x3, y3) equate square of distance:
// (a - x1)^2 + (b - y1)^2 = (a - x2)^2 + (b - y2)^2 = (a - x3)^2 + (b - y3)^2
// Note - we need to find the constants in the expanded form below for each point.
// (a - x)^2 + (b - y)^2 --> a^2 + b^2 - 2 a x + x^2 - 2 b y + y^2
// We can use this to generate two linear equations and solve for (a, b)
// Difference between first and second, and second and third - These are the constants for linear equations
// x^2
leftXSquaredDiff := (leftSite.x * leftSite.x) - (middleSite.x * middleSite.x)
rightXSquaredDiff := (middleSite.x * middleSite.x) - (rightSite.x * rightSite.x)
// y^2
leftYSquaredDiff := (leftSite.y * leftSite.y) - (middleSite.y * middleSite.y)
rightYSquareDiff := (middleSite.y * middleSite.y) - (rightSite.y * rightSite.y)
// -2x -> (this is the a term constant)
leftXLinearDiff := (2.0 * leftSite.x) - (2.0 * middleSite.x)
rightXLinearDiff := (2.0 * middleSite.x) - (2.0 * rightSite.x)
// -2y -> (this is the b term constant)
leftYLinearDiff := (2.0 * leftSite.y) - (2.0 * middleSite.y)
rightYLinearDiff := (2.0 * middleSite.y) - (2.0 * rightSite.y)
// x^2 + y^2 --> (let this be k)
constantsLeft := leftXSquaredDiff + leftYSquaredDiff
constantsRight := rightXSquaredDiff + rightYSquareDiff
// We now have the constants of two linear equations of the form k - 2ax - 2by = 0
// (I think) equation for b - from substituting linear equation in other linear equation
// 2x1.k2 - 2x2.k1
// b = ------------------- (note: the '.' is multiplication)
// 2y1.2x2 - 2y2.2x1
b := ((-1.0 * leftXLinearDiff * constantsRight) - (-1.0 * rightXLinearDiff * constantsLeft)) /
((leftYLinearDiff * rightXLinearDiff) - (rightYLinearDiff * leftXLinearDiff))
// now substitute b back into equation a = (k1 - 2y1.b) / 2x1
a := (constantsLeft - (leftYLinearDiff * b)) / leftXLinearDiff
// This gives us the circle center (a, b)
// fmt.Println("(a, b) value is --> (", a, ", ", b, ")")
// Calculate the radius as distance from center to any of the sites (we choose left site)
radius := math.Sqrt(math.Pow((leftSite.x-a), 2) + math.Pow((leftSite.y-b), 2))
// Check that the bottom of circle lies below the sweepline
// TODO - The 0.01 is to deal with the situation where a circle event check for other circle events after
// it has been handled and finds the exact one that it is currently at
// - There must be a better way to handle this
bottomOfCircleY := b - radius
if bottomOfCircleY+0.0000001 > sweepline {
return nil
}
// The circle event is valid - create and add to event queue
circleCenter := site{x: a, y: b} // TODO - this feels like bad design - technically not a site
circleEvent := &Item{
value: Event{eventType: "circle", location: circleCenter, leafNode: leafNode},
priority: bottomOfCircleY,
}
heap.Push(eventQueue, circleEvent)
return circleEvent
} | circle.go | 0.71113 | 0.47384 | circle.go | starcoder |
package config
import (
"strings"
)
// Partial defined a type used to store configuration information.
type Partial map[string]interface{}
// ToPartial will try to convert a map value into a packed Partial
// instance (recursively).
// The rules of conversion are:
// - Partial : recursively convert the stored elements
// - map : convert the map into a Partial and convert the stored elements
// - array : recursively convert the stored elements
// - float : convert to int if the value hasn't decimal component
func ToPartial(data interface{}) interface{} {
switch data.(type) {
case Partial:
for key, value := range data.(Partial) {
data.(Partial)[key] = ToPartial(value)
}
case map[string]interface{}:
cfg := Partial{}
for key, value := range data.(map[string]interface{}) {
cfg[key] = ToPartial(value)
}
return cfg
case map[interface{}]interface{}:
cfg := Partial{}
for key, value := range data.(map[interface{}]interface{}) {
cfg[key.(string)] = ToPartial(value)
}
return cfg
case []interface{}:
for key, value := range data.([]interface{}) {
data.([]interface{})[key] = ToPartial(value)
}
case float32:
if float32(int(data.(float32))) == data.(float32) {
return int(data.(float32))
}
case float64:
if float64(int(data.(float64))) == data.(float64) {
return int(data.(float64))
}
}
return data
}
// Has will check if a requested path exists in the config partial.
func (p Partial) Has(path string) bool {
it := p
splits := strings.Split(path, ".")
for i, split := range splits {
if split == "" {
continue
}
switch it[split].(type) {
case Partial:
it = it[split].(Partial)
case nil:
return false
default:
return i == len(splits)-1
}
}
return true
}
// Get will retrieve the value stored in the requested path.
// If the path does not exist, then the value nil will be returned. Or, if
// a default value was given as the optional extra argument, then it will
// be returned instead of the standard nil value.
func (p Partial) Get(path string, def ...interface{}) interface{} {
it := p
splits := strings.Split(path, ".")
for i, split := range splits {
if split == "" {
continue
}
if _, ok := it[split]; !ok {
if len(def) > 0 {
return def[0]
}
return nil
}
switch it[split].(type) {
case Partial:
it = it[split].(Partial)
case nil:
return nil
default:
if i != len(splits)-1 {
if len(def) > 0 {
return def[0]
}
return nil
}
return it[split]
}
}
return it
}
// Int will return the casting to int of the stored value in the
// requested path. If the value retrieved was not found or returned nil, then
// the default optional argument will be returned if given.
func (p Partial) Int(path string, def ...int) int {
val := p.Get(path)
defer func() {
if r := recover(); r != nil {
panic(errConversion(val, "int"))
}
}()
if val == nil && len(def) > 0 {
return def[0]
}
return val.(int)
}
// String will return the casting to string of the stored value in the
// requested path. If the value retrieved was not found or returned nil, then
// the default optional argument will be returned if given.
func (p Partial) String(path string, def ...string) string {
val := p.Get(path)
defer func() {
if r := recover(); r != nil {
panic(errConversion(val, "string"))
}
}()
if val == nil && len(def) > 0 {
return def[0]
}
return val.(string)
}
// List will return the casting to a list of the stored value in the
// requested path. If the value retrieved was not found or returned nil, then
// the default optional argument will be returned if given.
func (p Partial) List(path string, def ...[]interface{}) []interface{} {
val := p.Get(path)
defer func() {
if r := recover(); r != nil {
panic(errConversion(val, "[]interface{}"))
}
}()
if val == nil && len(def) > 0 {
return def[0]
}
return val.([]interface{})
}
// Partial will return the casting to a config partial of the stored
// value in the requested path. If the value retrieved was not found or
// returned nil, then the default optional argument will be returned if given.
func (p Partial) Partial(path string, def ...Partial) Partial {
val := p.Get(path)
defer func() {
if r := recover(); r != nil {
panic(errConversion(val, "Partial"))
}
}()
if val == nil && len(def) > 0 {
return def[0]
}
return val.(Partial)
}
func (p Partial) merge(p2 Partial) Partial {
for key, val := range p2 {
switch val.(type) {
case Partial:
switch p[key].(type) {
case Partial:
default:
p[key] = Partial{}
}
p[key].(Partial).merge(val.(Partial))
default:
p[key] = val
}
}
return p
} | config/partial.go | 0.659624 | 0.512998 | partial.go | starcoder |
package ride
import (
"encoding/base64"
"github.com/pkg/errors"
)
type TreeEstimation struct {
Estimation int `cbor:"0,keyasint"`
Verifier int `cbor:"1,keyasint,omitempty"`
Functions map[string]int `cbor:"2,keyasint,omitempty"`
}
func EstimateTree(tree *Tree, v int) (TreeEstimation, error) {
switch v {
case 1:
te, err := newTreeEstimatorV1(tree)
if err != nil {
return TreeEstimation{}, errors.Wrapf(err, "failed to estimate with tree estimator V%d", v)
}
max, verifier, functions, err := te.estimate()
if err != nil {
return TreeEstimation{}, errors.Wrapf(err, "failed to estimate with tree estimator V%d", v)
}
return TreeEstimation{Estimation: max, Verifier: verifier, Functions: functions}, nil
case 2:
id := base64.StdEncoding.EncodeToString(tree.Digest[:])
switch id {
case "<KEY>:
return TreeEstimation{Estimation: 3418, Functions: map[string]int{"random": 3418}}, nil
case "iCUS2gce1wBLSNEE//ehvTvBaoZ6aLtdLxyj1k76yjk=":
return TreeEstimation{Estimation: 3223, Functions: map[string]int{"random": 3223}}, nil
case "x932L0eSYHVgTMIYKVVDKXm67RFFUrshgsKYYjFVLfg=":
return TreeEstimation{Estimation: 3089, Functions: map[string]int{"random": 3089}}, nil
case "VPRjsCLrvwS8pKPk78vzZ2VplwlKWBbRs4M/KhZGpCU=":
return TreeEstimation{Estimation: 3128, Functions: map[string]int{"setOrder": 3128, "cancelOrder": 1143, "executeOrder": 2794}}, nil
case "<KEY>=":
return TreeEstimation{Estimation: 3128, Functions: map[string]int{"setOrder": 3128, "cancelOrder": 1143, "executeOrder": 2791}}, nil
case "<KEY>=":
return TreeEstimation{Estimation: 3118, Functions: map[string]int{"setOrder": 3118, "cancelOrder": 1133, "executeOrder": 1887}}, nil
case "<KEY>:
return TreeEstimation{Estimation: 3128, Functions: map[string]int{"setOrder": 3128, "cancelOrder": 1143, "executeOrder": 2794}}, nil
case "<KEY>=":
return TreeEstimation{Estimation: 3118, Functions: map[string]int{"setOrder": 3118, "cancelOrder": 1133, "executeOrder": 1875}}, nil
}
te, err := newTreeEstimatorV2(tree)
if err != nil {
return TreeEstimation{}, errors.Wrapf(err, "failed to estimate with tree estimator V%d", v)
}
max, verifier, functions, err := te.estimate()
if err != nil {
return TreeEstimation{}, errors.Wrapf(err, "failed to estimate with tree estimator V%d", v)
}
return TreeEstimation{Estimation: max, Verifier: verifier, Functions: functions}, nil
case 3:
te, err := newTreeEstimatorV3(tree)
if err != nil {
return TreeEstimation{}, errors.Wrapf(err, "failed to estimate with tree estimator V%d", v)
}
max, verifier, functions, err := te.estimate()
if err != nil {
return TreeEstimation{}, errors.Wrapf(err, "failed to estimate with tree estimator V%d", v)
}
return TreeEstimation{Estimation: max, Verifier: verifier, Functions: functions}, nil
case 4:
te, err := newTreeEstimatorV4(tree)
if err != nil {
return TreeEstimation{}, errors.Wrapf(err, "failed to estimate with tree estimator V%d", v)
}
max, verifier, functions, err := te.estimate()
if err != nil {
return TreeEstimation{}, errors.Wrapf(err, "failed to estimate with tree estimator V%d", v)
}
return TreeEstimation{Estimation: max, Verifier: verifier, Functions: functions}, nil
default:
return TreeEstimation{}, errors.Errorf("unsupported version of tree estimator '%d'", v)
}
} | pkg/ride/tree_estimation.go | 0.656658 | 0.479077 | tree_estimation.go | starcoder |
package types
import (
"reflect"
"strconv"
"sync"
)
var (
stringType = reflect.TypeOf((*string)(nil)).Elem()
sliceStringType = reflect.TypeOf([]string(nil))
intType = reflect.TypeOf((*int)(nil)).Elem()
sliceIntType = reflect.TypeOf([]int(nil))
int64Type = reflect.TypeOf((*int64)(nil)).Elem()
sliceInt64Type = reflect.TypeOf([]int64(nil))
float64Type = reflect.TypeOf((*float64)(nil)).Elem()
sliceFloat64Type = reflect.TypeOf([]float64(nil))
)
var arrayAppendersMap sync.Map
func ArrayAppender(typ reflect.Type) AppenderFunc {
if v, ok := arrayAppendersMap.Load(typ); ok {
return v.(AppenderFunc)
}
fn := arrayAppender(typ)
arrayAppendersMap.Store(typ, fn)
return fn
}
func arrayAppender(typ reflect.Type) AppenderFunc {
kind := typ.Kind()
if kind == reflect.Ptr {
typ = typ.Elem()
kind = typ.Kind()
}
switch kind {
case reflect.Slice, reflect.Array:
// ok:
default:
return nil
}
elemType := typ.Elem()
if kind == reflect.Slice {
switch elemType {
case stringType:
return appendSliceStringValue
case intType:
return appendSliceIntValue
case int64Type:
return appendSliceInt64Value
case float64Type:
return appendSliceFloat64Value
}
}
appendElem := appender(elemType, true)
return func(b []byte, v reflect.Value, flags int) []byte {
flags |= arrayFlag
kind := v.Kind()
switch kind {
case reflect.Ptr, reflect.Slice:
if v.IsNil() {
return AppendNull(b, flags)
}
}
if kind == reflect.Ptr {
v = v.Elem()
}
quote := shouldQuoteArray(flags)
if quote {
b = append(b, '\'')
}
flags |= subArrayFlag
b = append(b, '{')
for i := 0; i < v.Len(); i++ {
elem := v.Index(i)
b = appendElem(b, elem, flags)
b = append(b, ',')
}
if v.Len() > 0 {
b[len(b)-1] = '}' // Replace trailing comma.
} else {
b = append(b, '}')
}
if quote {
b = append(b, '\'')
}
return b
}
}
func appendSliceStringValue(b []byte, v reflect.Value, flags int) []byte {
ss := v.Convert(sliceStringType).Interface().([]string) //nolint:forcetypeassert
return appendSliceString(b, ss, flags)
}
func appendSliceString(b []byte, ss []string, flags int) []byte {
if ss == nil {
return AppendNull(b, flags)
}
quote := shouldQuoteArray(flags)
if quote {
b = append(b, '\'')
}
b = append(b, '{')
for _, s := range ss {
b = appendString2(b, s, flags)
b = append(b, ',')
}
if len(ss) > 0 {
b[len(b)-1] = '}' // Replace trailing comma.
} else {
b = append(b, '}')
}
if quote {
b = append(b, '\'')
}
return b
}
func appendSliceIntValue(b []byte, v reflect.Value, flags int) []byte {
ints := v.Convert(sliceIntType).Interface().([]int) //nolint:forcetypeassert
return appendSliceInt(b, ints, flags)
}
func appendSliceInt(b []byte, ints []int, flags int) []byte {
if ints == nil {
return AppendNull(b, flags)
}
quote := shouldQuoteArray(flags)
if quote {
b = append(b, '\'')
}
b = append(b, '{')
for _, n := range ints {
b = strconv.AppendInt(b, int64(n), 10)
b = append(b, ',')
}
if len(ints) > 0 {
b[len(b)-1] = '}' // Replace trailing comma.
} else {
b = append(b, '}')
}
if quote {
b = append(b, '\'')
}
return b
}
func appendSliceInt64Value(b []byte, v reflect.Value, flags int) []byte {
ints := v.Convert(sliceInt64Type).Interface().([]int64) //nolint:forcetypeassert
return appendSliceInt64(b, ints, flags)
}
func appendSliceInt64(b []byte, ints []int64, flags int) []byte {
if ints == nil {
return AppendNull(b, flags)
}
quote := shouldQuoteArray(flags)
if quote {
b = append(b, '\'')
}
b = append(b, '{')
for _, n := range ints {
b = strconv.AppendInt(b, n, 10)
b = append(b, ',')
}
if len(ints) > 0 {
b[len(b)-1] = '}' // Replace trailing comma.
} else {
b = append(b, '}')
}
if quote {
b = append(b, '\'')
}
return b
}
func appendSliceFloat64Value(b []byte, v reflect.Value, flags int) []byte {
floats := v.Convert(sliceFloat64Type).Interface().([]float64) //nolint:forcetypeassert
return appendSliceFloat64(b, floats, flags)
}
func appendSliceFloat64(b []byte, floats []float64, flags int) []byte {
if floats == nil {
return AppendNull(b, flags)
}
quote := shouldQuoteArray(flags)
if quote {
b = append(b, '\'')
}
b = append(b, '{')
for _, n := range floats {
b = appendFloat2(b, n, flags)
b = append(b, ',')
}
if len(floats) > 0 {
b[len(b)-1] = '}' // Replace trailing comma.
} else {
b = append(b, '}')
}
if quote {
b = append(b, '\'')
}
return b
} | types/array_append.go | 0.536556 | 0.507202 | array_append.go | starcoder |
package framework
import (
"fmt"
"strings"
"github.com/kiegroup/kogito-cloud-operator/pkg/infrastructure"
)
// KogitoInfraComponent defines the KogitoInfra component
type KogitoInfraComponent struct {
name string
}
const (
infinispanKey = "infinispan"
kafkaKey = "kafka"
keycloakKey = "keycloak"
)
var (
// InfinispanKogitoInfraComponent is for infinispan
InfinispanKogitoInfraComponent KogitoInfraComponent = KogitoInfraComponent{name: infinispanKey}
// KafkaKogitoInfraComponent is for kafka
KafkaKogitoInfraComponent KogitoInfraComponent = KogitoInfraComponent{name: kafkaKey}
// KeycloakKogitoInfraComponent is for keycloak
KeycloakKogitoInfraComponent KogitoInfraComponent = KogitoInfraComponent{name: keycloakKey}
)
// ParseKogitoInfraComponent retrieves the correspoding KogitoInfraComponent
func ParseKogitoInfraComponent(component string) KogitoInfraComponent {
switch cmp := strings.ToLower(component); cmp {
case infinispanKey:
return InfinispanKogitoInfraComponent
case kafkaKey:
return KafkaKogitoInfraComponent
case keycloakKey:
return KeycloakKogitoInfraComponent
default:
return KogitoInfraComponent{name: cmp}
}
}
// InstallKogitoInfraComponent installs the desired component with the given installer type
func InstallKogitoInfraComponent(namespace string, installerType InstallerType, component KogitoInfraComponent) error {
GetLogger(namespace).Infof("%s install Kogito Infra Component %s", installerType, component.name)
switch installerType {
case CLIInstallerType:
return cliInstallKogitoInfraComponent(namespace, component)
case CRInstallerType:
return crInstallKogitoInfraComponent(namespace, component)
default:
panic(fmt.Errorf("Unknown installer type %s", installerType))
}
}
func crInstallKogitoInfraComponent(namespace string, component KogitoInfraComponent) error {
ensureComponent := infrastructure.EnsureKogitoInfra(namespace, kubeClient)
switch component {
case InfinispanKogitoInfraComponent:
ensureComponent = ensureComponent.WithInfinispan()
case KafkaKogitoInfraComponent:
ensureComponent = ensureComponent.WithKafka()
case KeycloakKogitoInfraComponent:
ensureComponent = ensureComponent.WithKeycloak()
}
_, _, err := ensureComponent.Apply()
return err
}
func cliInstallKogitoInfraComponent(namespace string, component KogitoInfraComponent) error {
_, err := ExecuteCliCommandInNamespace(namespace, "install", component.name)
return err
}
// RemoveKogitoInfraComponent removes the desired component with the given installer type
func RemoveKogitoInfraComponent(namespace string, installerType InstallerType, component KogitoInfraComponent) error {
GetLogger(namespace).Infof("%s remove Kogito Infra Component %s", installerType, component.name)
switch installerType {
case CLIInstallerType:
return cliRemoveKogitoInfraComponent(namespace, component)
case CRInstallerType:
return crRemoveKogitoInfraComponent(namespace, component)
default:
panic(fmt.Errorf("Unknown installer type %s", installerType))
}
}
func crRemoveKogitoInfraComponent(namespace string, component KogitoInfraComponent) error {
ensureComponent := infrastructure.EnsureKogitoInfra(namespace, kubeClient)
switch component {
case InfinispanKogitoInfraComponent:
ensureComponent = ensureComponent.WithoutInfinispan()
case KafkaKogitoInfraComponent:
ensureComponent = ensureComponent.WithoutKafka()
case KeycloakKogitoInfraComponent:
ensureComponent = ensureComponent.WithoutKeycloak()
}
_, _, err := ensureComponent.Apply()
return err
}
func cliRemoveKogitoInfraComponent(namespace string, component KogitoInfraComponent) error {
_, err := ExecuteCliCommandInNamespace(namespace, "remove", component.name)
return err
}
// WaitForKogitoInfraComponent waits for the given component to be installed or removed
func WaitForKogitoInfraComponent(namespace string, component KogitoInfraComponent, shouldRun bool, timeoutInMin int) error {
return WaitForOnOpenshift(namespace, getWaitRunningMessage(component.name, shouldRun), timeoutInMin,
func() (bool, error) {
if shouldRun {
return IsKogitoInfraComponentRunning(namespace, component)
}
return IsKogitoInfraComponentTerminated(namespace, component)
})
}
// IsKogitoInfraComponentRunning checks whether the given component is running from KogitoInfra
func IsKogitoInfraComponentRunning(namespace string, component KogitoInfraComponent) (bool, error) {
ensureComponent := infrastructure.EnsureKogitoInfra(namespace, kubeClient)
switch component {
case InfinispanKogitoInfraComponent:
ensureComponent = ensureComponent.WithInfinispan()
case KafkaKogitoInfraComponent:
ensureComponent = ensureComponent.WithKafka()
case KeycloakKogitoInfraComponent:
ensureComponent = ensureComponent.WithKeycloak()
}
_, ready, err := ensureComponent.Apply()
return ready, err
}
// IsKogitoInfraComponentTerminated checks whether the given component is terminated from KogitoInfra
func IsKogitoInfraComponentTerminated(namespace string, component KogitoInfraComponent) (bool, error) {
ensureComponent := infrastructure.EnsureKogitoInfra(namespace, kubeClient)
switch component {
case InfinispanKogitoInfraComponent:
ensureComponent = ensureComponent.WithoutInfinispan()
case KafkaKogitoInfraComponent:
ensureComponent = ensureComponent.WithoutKafka()
case KeycloakKogitoInfraComponent:
ensureComponent = ensureComponent.WithoutKeycloak()
}
_, ready, err := ensureComponent.Apply()
return ready, err
}
func getWaitRunningMessage(component string, shouldRun bool) string {
msg := "running"
if !shouldRun {
msg = fmt.Sprintf("not %s", msg)
}
return fmt.Sprintf("%s is %s", component, msg)
} | test/framework/kogitoinfra.go | 0.634317 | 0.422207 | kogitoinfra.go | starcoder |
package internal
import (
"bytes"
"time"
api "github.com/tigrisdata/tigris/api/server/v1"
ulog "github.com/tigrisdata/tigris/util/log"
"github.com/ugorji/go/codec"
"google.golang.org/grpc/codes"
"google.golang.org/protobuf/types/known/timestamppb"
)
var (
bh codec.BincHandle
)
// DataType is to define the different data types for the data stored in the storage engine.
type DataType byte
// Note: Do not change the order. Order is important because encoder is adding the type as the first byte. Check the
// Encode/Decode method to see how it is getting used.
const (
Unknown DataType = iota
TableDataType
)
const (
JsonEncoding = iota + 1
)
func NewTimestamp() *Timestamp {
ts := time.Now().UTC()
return &Timestamp{
Seconds: ts.Unix(),
Nanoseconds: int64(ts.Nanosecond()),
}
}
func (ts *Timestamp) ToRFC3339() string {
gotime := time.Unix(ts.Seconds, ts.Nanoseconds).UTC()
return gotime.Format(time.RFC3339)
}
func (ts *Timestamp) GetProtoTS() *timestamppb.Timestamp {
return ×tamppb.Timestamp{
Seconds: ts.Seconds,
Nanos: int32(ts.Nanoseconds),
}
}
// NewTableData returns a table data type by setting the ts to the current value.
func NewTableData(data []byte) *TableData {
return &TableData{
CreatedAt: NewTimestamp(),
RawData: data,
}
}
func NewTableDataWithTS(createdAt *Timestamp, updatedAt *Timestamp, data []byte) *TableData {
return &TableData{
CreatedAt: createdAt,
UpdatedAt: updatedAt,
RawData: data,
}
}
func NewTableDataWithEncoding(data []byte, encoding int32) *TableData {
return &TableData{
CreatedAt: NewTimestamp(),
RawData: data,
Encoding: encoding,
}
}
// Encode is used to encode data to the raw bytes which is used to store in storage as value. The first byte is storing
// the type corresponding to this Data. This is important and used by the decoder later to decode back.
func Encode(data *TableData) ([]byte, error) {
var buf bytes.Buffer
// this is added so that we can evolve the DataTypes and have more dataTypes in future
err := buf.WriteByte(byte(TableDataType))
if err != nil {
return nil, err
}
enc := codec.NewEncoder(&buf, &bh)
if err := enc.Encode(data); ulog.E(err) {
return nil, err
}
return buf.Bytes(), nil
}
// Decode is used to decode the raw bytes to TableData. The raw bytes are returned from the storage and the kvStore is
// calling Decode to convert these raw bytes back to TableData.
func Decode(b []byte) (*TableData, error) {
dataType := DataType(b[0])
return decodeInternal(dataType, b[1:])
}
func decodeInternal(dataType DataType, encoded []byte) (*TableData, error) {
dec := codec.NewDecoderBytes(encoded, &bh)
switch dataType {
case TableDataType:
var v *TableData
if err := dec.Decode(&v); err != nil {
return nil, err
}
return v, nil
}
return nil, api.Errorf(codes.Internal, "unable to decode '%v'", dataType)
} | internal/data.go | 0.740737 | 0.412885 | data.go | starcoder |
package model
const VarianceStatMask = ^uint64(0) >> (64 - 5)
type WeaponVariance struct {
Durability int
PhyReinforce int
MagReinforce int
HitRate int
PhyAttack int
MagAttack int
CriticalRate int
}
type ArmorVariance struct {
Durability int
PhyReinforce int
MagReinforce int
PhyDefense int
MagDefense int
ParryRate int
}
type ShieldVariance struct {
Durability int
PhyReinforce int
MagReinforce int
PhyDefense int
MagDefense int
BlockRate int
}
type AccessoryVariance struct {
PhyAbsorb int
MagAbsorb int
}
func (wv WeaponVariance) ToVariance() (variance uint64) {
variance |= uint64(wv.Durability)
variance <<= 5
variance |= uint64(wv.PhyReinforce)
variance <<= 5
variance |= uint64(wv.MagReinforce)
variance <<= 5
variance |= uint64(wv.HitRate)
variance <<= 5
variance |= uint64(wv.PhyAttack)
variance <<= 5
variance |= uint64(wv.MagAttack)
variance <<= 5
variance |= uint64(wv.CriticalRate)
return
}
func (av ArmorVariance) ToVariance() (variance uint64) {
variance |= uint64(av.Durability)
variance <<= 5
variance |= uint64(av.PhyReinforce)
variance <<= 5
variance |= uint64(av.MagReinforce)
variance <<= 5
variance |= uint64(av.PhyDefense)
variance <<= 5
variance |= uint64(av.MagDefense)
variance <<= 5
variance |= uint64(av.ParryRate)
return
}
func (sv ShieldVariance) ToVariance() (variance uint64) {
variance |= uint64(sv.Durability)
variance <<= 5
variance |= uint64(sv.PhyReinforce)
variance <<= 5
variance |= uint64(sv.MagReinforce)
variance <<= 5
variance |= uint64(sv.BlockRate)
variance <<= 5
variance |= uint64(sv.PhyDefense)
variance <<= 5
variance |= uint64(sv.MagDefense)
return
}
func (av AccessoryVariance) ToVariance() (variance uint64) {
variance |= uint64(av.PhyAbsorb)
variance <<= 5
variance |= uint64(av.MagAbsorb)
return
}
func WeaponStatsFromVariance(variance uint64) (weaponVariance WeaponVariance) {
weaponVariance.CriticalRate = int(variance & VarianceStatMask)
variance >>= 5
weaponVariance.MagAttack = int(variance & VarianceStatMask)
variance >>= 5
weaponVariance.PhyAttack = int(variance & VarianceStatMask)
variance >>= 5
weaponVariance.HitRate = int(variance & VarianceStatMask)
variance >>= 5
weaponVariance.MagReinforce = int(variance & VarianceStatMask)
variance >>= 5
weaponVariance.PhyReinforce = int(variance & VarianceStatMask)
variance >>= 5
weaponVariance.Durability = int(variance & VarianceStatMask)
return
}
func ArmorStatsFromVariance(variance uint64) (armorVariance ArmorVariance) {
armorVariance.ParryRate = int(variance & VarianceStatMask)
variance >>= 5
armorVariance.MagDefense = int(variance & VarianceStatMask)
variance >>= 5
armorVariance.PhyDefense = int(variance & VarianceStatMask)
variance >>= 5
armorVariance.MagReinforce = int(variance & VarianceStatMask)
variance >>= 5
armorVariance.PhyReinforce = int(variance & VarianceStatMask)
variance >>= 5
armorVariance.Durability = int(variance & VarianceStatMask)
return
}
func ShieldStatsFromVariance(variance uint64) (shieldVariance ShieldVariance) {
shieldVariance.MagDefense = int(variance & VarianceStatMask)
variance >>= 5
shieldVariance.PhyDefense = int(variance & VarianceStatMask)
variance >>= 5
shieldVariance.BlockRate = int(variance & VarianceStatMask)
variance >>= 5
shieldVariance.MagReinforce = int(variance & VarianceStatMask)
variance >>= 5
shieldVariance.PhyReinforce = int(variance & VarianceStatMask)
variance >>= 5
shieldVariance.Durability = int(variance & VarianceStatMask)
return
}
func AccessoryStatsFromVariance(variance uint64) (accessoryVariance AccessoryVariance) {
accessoryVariance.MagAbsorb = int(variance & VarianceStatMask)
variance >>= 5
accessoryVariance.PhyAbsorb = int(variance & VarianceStatMask)
return
} | model/item_variance.go | 0.62601 | 0.569015 | item_variance.go | starcoder |
package holtwinters
// Creates an array of forcasted data points based on the given time series
func TripleExponentialSmoothing(series []float64, alpha, beta, gamma float64, seasonLength, nPredictions int) []float64 {
// initialize the predicted series
predictedValues := make([]float64, 0, len(series)+nPredictions)
previousLevel := series[0]
currentTrend := initialTrend(series, seasonLength)
currentSeason := initialSeasonality(series, seasonLength)
seriesLength := len(series)
for i := 0; i < seriesLength+nPredictions; i++ {
if i >= seriesLength {
// calculating forecasted predictions using the generated model
m := i - seriesLength + 1
predictedValues = append(predictedValues, (previousLevel+float64(m)*currentTrend)+currentSeason[i%seasonLength])
} else {
// calculating predicted values based on existing data
currentLevel := alpha*(series[i]-currentSeason[i%seasonLength]) + (1-alpha)*(previousLevel+currentTrend)
currentTrend = beta*(currentLevel-previousLevel) + (1-beta)*currentTrend
previousLevel = currentLevel
currentSeason[i%seasonLength] = gamma*(series[i]-currentLevel) + (1-gamma)*currentSeason[i%seasonLength]
predictedValues = append(predictedValues, currentLevel+currentTrend+currentSeason[i%seasonLength])
}
}
return predictedValues
}
// Calculates the initial trend for the holt/winters forcast
func initialTrend(series []float64, seasonLength int) float64 {
// initialize the trend
trend := 0.0
// loop through the time series
for i := 0; i < seasonLength; i++ {
trend += (series[i+seasonLength] - series[i]) / float64(seasonLength)
}
return trend / float64(seasonLength)
}
// Calculates the initial season for the holt/winters forcast
func initialSeasonality(series []float64, seasonLength int) []float64 {
initialSeason := make([]float64, 0, seasonLength)
numSeasons := int(len(series) / seasonLength)
// calculates the average for each season in the data
seasonAvgs := make([]float64, numSeasons)
for i := 0; i < numSeasons; i++ {
for j := i * seasonLength; j < (i*seasonLength)+seasonLength; j++ {
seasonAvgs[i] += series[j]
}
seasonAvgs[i] /= float64(seasonLength)
}
for i := 0; i < seasonLength; i++ {
seasonVector := 0.0
for j := 0; j < numSeasons; j++ {
seasonVector += series[seasonLength*j+i] - seasonAvgs[j]
}
initialSeason = append(initialSeason, seasonVector/float64(numSeasons))
}
return initialSeason
} | holtwinters.go | 0.780579 | 0.657916 | holtwinters.go | starcoder |
package semantic
import "github.com/google/gapid/gapil/ast"
// Length represents a length of object expression.
// Object must be of either pointer, slice, map or string type.
// The length expression is allowed to be of any numeric type
type Length struct {
AST *ast.Call // the underlying syntax node this was built from
Object Expression // the object go get the length of
Type Type // the resolved type of the length operation
}
func (*Length) isNode() {}
func (*Length) isExpression() {}
// ExpressionType implements Expression
func (l *Length) ExpressionType() Type { return l.Type }
// Assert represents a runtime assertion.
// Assertions are also used to infer required behavior from the expressions.
type Assert struct {
AST *ast.Call // the underlying syntax node this was built from
Condition Expression // the condition is being asserted must be true
Message string
}
func (*Assert) isNode() {}
func (*Assert) isStatement() {}
// Cast represents a type reinterpret expression.
type Cast struct {
AST *ast.Call // the underlying syntax node this was built from
Object Expression // the expression to cast the result of
Type Type // the type to cast to
}
func (*Cast) isNode() {}
func (*Cast) isExpression() {}
// ExpressionType implements Expression
func (c *Cast) ExpressionType() Type { return c.Type }
// New represents a call to new.
type New struct {
AST *ast.Call // the underlying syntax node this was built from
Type *Reference
}
func (*New) isNode() {}
func (*New) isExpression() {}
// ExpressionType implements Expression
func (n *New) ExpressionType() Type { return n.Type }
// Create represents a call to new on a class type.
type Create struct {
AST *ast.Call // the underlying syntax node this was built from
Type *Reference
Initializer *ClassInitializer
}
func (*Create) isNode() {}
func (*Create) isExpression() {}
// ExpressionType implements Expression
func (n *Create) ExpressionType() Type { return n.Type }
// Make represents a call to make.
type Make struct {
AST *ast.Call // the underlying syntax node this was built from
Type *Slice
Size Expression
}
func (*Make) isNode() {}
func (*Make) isExpression() {}
// ExpressionType implements Expression
func (m *Make) ExpressionType() Type { return m.Type }
// Clone represents a call to clone.
type Clone struct {
AST *ast.Call // the underlying syntax node this was built from
Slice Expression
Type *Slice
}
func (*Clone) isNode() {}
func (*Clone) isExpression() {}
// ExpressionType implements Expression
func (m *Clone) ExpressionType() Type { return m.Type }
// Read represents a call to read.
type Read struct {
AST *ast.Call // the underlying syntax node this was built from
Slice Expression
}
func (*Read) isNode() {}
func (*Read) isStatement() {}
// Write represents a call to write.
type Write struct {
AST *ast.Call // the underlying syntax node this was built from
Slice Expression
}
func (*Write) isNode() {}
func (*Write) isStatement() {}
// Copy represents a call to copy.
type Copy struct {
AST *ast.Call // the underlying syntax node this was built from
Src Expression
Dst Expression
}
func (*Copy) isNode() {}
func (*Copy) isStatement() {}
// Print represents a call to print.
type Print struct {
AST *ast.Call // the underlying syntax node this was built from
Arguments []Expression // The parameters to print
}
func (*Print) isNode() {}
func (*Print) isStatement() {} | gapil/semantic/internal.go | 0.800536 | 0.504822 | internal.go | starcoder |
package reltest
import (
"context"
"fmt"
"reflect"
"strings"
"github.com/go-rel/rel"
)
type insertAll []*MockInsertAll
func (ia *insertAll) register(ctxData ctxData) *MockInsertAll {
mia := &MockInsertAll{
assert: &Assert{ctxData: ctxData},
}
*ia = append(*ia, mia)
return mia
}
func (ia insertAll) execute(ctx context.Context, records interface{}) error {
for _, mia := range ia {
if (mia.argRecord == nil || reflect.DeepEqual(mia.argRecord, records)) &&
(mia.argRecordType == "" || mia.argRecordType == reflect.TypeOf(records).String()) &&
(mia.argRecordTable == "" || mia.argRecordTable == rel.NewCollection(records, true).Table()) &&
mia.assert.call(ctx) {
return mia.retError
}
}
mia := &MockInsertAll{
assert: &Assert{ctxData: fetchContext(ctx)},
argRecord: records,
}
panic(failExecuteMessage(mia, ia))
}
func (ia *insertAll) assert(t T) bool {
t.Helper()
for _, mia := range *ia {
if !mia.assert.assert(t, mia) {
return false
}
}
*ia = nil
return true
}
// MockInsertAll asserts and simulate Insert function for test.
type MockInsertAll struct {
assert *Assert
argRecord interface{}
argRecordType string
argRecordTable string
retError error
}
// For assert calls for given record.
func (mia *MockInsertAll) For(record interface{}) *MockInsertAll {
mia.argRecord = record
return mia
}
// ForType assert calls for given type.
// Type must include package name, example: `model.User`.
func (mia *MockInsertAll) ForType(typ string) *MockInsertAll {
mia.argRecordType = "*" + strings.TrimPrefix(typ, "*")
return mia
}
// ForTable assert calls for given table.
func (mia *MockInsertAll) ForTable(typ string) *MockInsertAll {
mia.argRecordTable = typ
return mia
}
// Error sets error to be returned.
func (mia *MockInsertAll) Error(err error) *Assert {
mia.retError = err
return mia.assert
}
// Success sets no error to be returned.
func (mia *MockInsertAll) Success() *Assert {
return mia.Error(nil)
}
// ConnectionClosed sets this error to be returned.
func (mia *MockInsertAll) ConnectionClosed() *Assert {
return mia.Error(ErrConnectionClosed)
}
// NotUnique sets not unique error to be returned.
func (mia *MockInsertAll) NotUnique(key string) *Assert {
return mia.Error(rel.ConstraintError{
Key: key,
Type: rel.UniqueConstraint,
})
}
// String representation of mocked call.
func (mia MockInsertAll) String() string {
argRecord := "<Any>"
if mia.argRecord != nil {
argRecord = csprint(mia.argRecord, true)
} else if mia.argRecordType != "" {
argRecord = fmt.Sprintf("<Type: %s>", mia.argRecordType)
} else if mia.argRecordTable != "" {
argRecord = fmt.Sprintf("<Table: %s>", mia.argRecordTable)
}
return mia.assert.sprintf("InsertAll(ctx, %s)", argRecord)
}
// ExpectString representation of mocked call.
func (mia MockInsertAll) ExpectString() string {
return mia.assert.sprintf("InsertAll().ForType(\"%T\")", mia.argRecord)
} | insert_all.go | 0.623033 | 0.428473 | insert_all.go | starcoder |
package ua
// StatusCode is the result of the service call.
type StatusCode uint32
// IsGood returns true if the StatusCode is good.
func (c StatusCode) IsGood() bool {
return (uint32(c) & SeverityMask) == SeverityGood
}
// IsBad returns true if the StatusCode is bad.
func (c StatusCode) IsBad() bool {
return (uint32(c) & SeverityMask) == SeverityBad
}
// IsUncertain returns true if the StatusCode is uncertain.
func (c StatusCode) IsUncertain() bool {
return (uint32(c) & SeverityMask) == SeverityUncertain
}
// IsStructureChanged returns true if the structure is changed.
func (c StatusCode) IsStructureChanged() bool {
return (uint32(c) & StructureChanged) == StructureChanged
}
// IsSemanticsChanged returns true if the semantics is changed.
func (c StatusCode) IsSemanticsChanged() bool {
return (uint32(c) & SemanticsChanged) == SemanticsChanged
}
// IsOverflow returns true if the data value has exceeded the limits of the data type.
func (c StatusCode) IsOverflow() bool {
return ((uint32(c) & InfoTypeMask) == InfoTypeDataValue) && ((uint32(c) & Overflow) == Overflow)
}
const (
// Good - The operation completed successfully.
Good StatusCode = 0x00000000
// SeverityMask - .
SeverityMask uint32 = 0xC0000000
// SeverityGood - .
SeverityGood uint32 = 0x00000000
// SeverityUncertain - .
SeverityUncertain uint32 = 0x40000000
// SeverityBad - .
SeverityBad uint32 = 0x80000000
// SubCodeMask - .
SubCodeMask uint32 = 0x0FFF0000
// StructureChanged - .
StructureChanged uint32 = 0x00008000
// SemanticsChanged - .
SemanticsChanged uint32 = 0x00004000
// InfoTypeMask - .
InfoTypeMask uint32 = 0x00000C00
// InfoTypeDataValue - .
InfoTypeDataValue uint32 = 0x00000400
// InfoBitsMask - .
InfoBitsMask uint32 = 0x000003FF
// LimitBitsMask - .
LimitBitsMask uint32 = 0x00000300
// LimitBitsNone - .
LimitBitsNone uint32 = 0x00000000
// LimitBitsLow - .
LimitBitsLow uint32 = 0x00000100
// LimitBitsHigh - .
LimitBitsHigh uint32 = 0x00000200
// LimitBitsConstant - .
LimitBitsConstant uint32 = 0x00000300
// Overflow - .
Overflow uint32 = 0x00000080
// HistorianBitsMask - the mask of bits that pertain to the Historian.
HistorianBitsMask uint32 = 0x0000001F
// HistorianBitsCalculated - A data value which was calculated.
HistorianBitsCalculated uint32 = 0x00000001
// HistorianBitsInterpolated - A data value which was interpolated.
HistorianBitsInterpolated uint32 = 0x00000010
// HistorianBitsPartial - A data value which was calculated with an incomplete interval.
HistorianBitsPartial uint32 = 0x00000100
) | ua/status_code.go | 0.755997 | 0.416737 | status_code.go | starcoder |
package util
// FloatSlidingWindow is a buffer with a fixed capacity. Elements are
// inserted/removed in the FIFO order. Elements are removed from the buffer
// only when it runs out of capacity and a new element is inserted.
type FloatSlidingWindow interface {
// Add a value to the end of the queue. On overflow returns true and the
// oldest value, which is also removed from the buffer. Otherwise
// returns (false, _).
Push(value float64) (bool, float64)
// Returns the elements in the buffer, ordered by time of insertion
// (oldest first).
Contents() []float64
// Returns a pointer to the most recently added element. The pointer can
// be used to modify the last element. It is only valid until the next
// call to Push(). Return nil if called on an empty buffer.
Head() *float64
// Reset the contents of the window.
Clear()
}
// NewFloatSlidingWindow returns a new instance of FloatSlidingWindowImpl with a
// given size.
func NewFloatSlidingWindow(size int) FloatSlidingWindow {
if size < 1 {
panic("Buffer size must be at least 1")
}
return &floatSlidingWindow{make([]float64, 0), -1, size, false}
}
type floatSlidingWindow struct {
buffer []float64
// Index of the most recently added element.
head int
// Max number of elements.
capacity int
// Whether the buffer is full, i.e. the number of elements in the buffer
// equals capacity.
isFull bool
}
// Head returns a pointer to the most recently added element. The pointer can be
// used to modify the last element. It is only valid until the next call to
// Push(). Returns nil if called on an empty buffer.
func (b *floatSlidingWindow) Head() *float64 {
if b.head == -1 {
return nil
}
return &b.buffer[b.head]
}
// Contents returns the elements in the buffer, ordered by time of insertion
// (oldest first).
func (b *floatSlidingWindow) Contents() []float64 {
return append(b.buffer[b.head+1:], b.buffer[:b.head+1]...)
}
// Push adds a value to the end of the window. On overflow returns true and the
// oldest value, which is also removed from the window. Otherwise returns
// (false, _).
func (b *floatSlidingWindow) Push(value float64) (bool, float64) {
b.head++
if b.head == b.capacity {
b.head = 0
b.isFull = true
}
if !b.isFull {
b.buffer = append(b.buffer, value)
return false, 0.0
}
// Buffer is full. Rewrite the oldest entry and return it.
prevValue := b.buffer[b.head]
b.buffer[b.head] = value
return true, prevValue
}
// Clear resets the contents of the window.
func (b *floatSlidingWindow) Clear() {
b.buffer = make([]float64, 0)
b.head = -1
b.isFull = false
} | vertical-pod-autoscaler/recommender/util/slidingwindow.go | 0.821367 | 0.564879 | slidingwindow.go | starcoder |
package types
import (
"fmt"
"time"
)
// TimeValue is a struct that holds a time value.
type TimeValue struct {
value time.Time
}
// IsSameAs returns true if the value is the same as the expected value, else false.
func (t TimeValue) IsSameAs(expected interface{}) bool {
return t.value == expected
}
// IsAlmostSameAs returns true if the value is the almost the same as the expected value, else false.
func (t TimeValue) IsAlmostSameAs(expected interface{}) bool {
return NewTimeValue(t.value.Add(time.Millisecond*100)).IsAfter(expected) && NewTimeValue(t.value.Add(-time.Millisecond*100)).IsBefore(expected)
}
// IsNotDefined returns true if the time value is not defined (has no value) else false.
func (t TimeValue) IsNotDefined() bool {
return t.value.Nanosecond() == 0
}
// IsDefined returns true if the time value is defined ( has some value ) else false.
func (t TimeValue) IsDefined() bool {
return !t.IsNotDefined()
}
// IsNotSameAs returns true if the value is not the same as the expected value, else false.
func (t TimeValue) IsNotSameAs(expected interface{}) bool {
return t.value != expected
}
// IsAfter returns true if the value is after than the expected value, else false.
func (t TimeValue) IsAfter(expected interface{}) bool {
return t.isAfter(NewTimeValue(expected))
}
// IsBefore returns true if the value is before the expected value, else false.
func (t TimeValue) IsBefore(expected interface{}) bool {
return t.isBefore(NewTimeValue(expected))
}
// Value returns the actual value of the structure.
func (t TimeValue) Value() interface{} {
return t.value
}
func (t TimeValue) isAfter(expected TimeValue) bool {
return t.value.After(expected.value)
}
func (t TimeValue) isBefore(expected TimeValue) bool {
return t.value.Before(expected.value)
}
// NewTimeValue creates and returns an TimeValue struct initialed with the given value.
func NewTimeValue(value interface{}) TimeValue {
switch v := value.(type) {
case time.Time:
return TimeValue{value: value.(time.Time)}
default:
panic(fmt.Sprintf("expected time.Time value type but got %T type", v))
}
} | internal/pkg/types/time_value.go | 0.871871 | 0.639912 | time_value.go | starcoder |
package text
import (
tf "github.com/tensorflow/tensorflow/tensorflow/go"
"github.com/tensorflow/tensorflow/tensorflow/go/op"
)
// ReadText from a file
func ReadText(s *op.Scope, fileName string) (indices tf.Output) {
read := op.ReadFile(s, op.Const(s.SubScope("file_name"), fileName))
indices = op.DecodeRaw(s, read, tf.Uint8)
return
}
// OneHot embed chars
func OneHot(s *op.Scope, indices tf.Output) (embeded tf.Output) {
embeded = op.OneHot(s,
indices,
op.Const(s.SubScope("depth"), int32(256)),
op.Const(s.SubScope("on_value"), float32(1)),
op.Const(s.SubScope("on_value"), float32(0)),
)
return
}
// ToCharByte converts one-hot encoding back to one byte
func ToCharByte(s *op.Scope, oneHot tf.Output) (charByte tf.Output) {
charByte = op.Cast(s, op.ArgMax(s, oneHot, op.Const(s, int32(-1))), tf.Uint8)
return
}
// Split the text into chunks
func Split(s *op.Scope, data tf.Output, seqLen, numSeq int64) (seqs tf.Output) {
slice := op.Slice(s, data, op.Const(s.SubScope("begin"), []int64{0}), op.Const(s.SubScope("size"), []int64{numSeq * seqLen}))
seqs = op.Reshape(s, slice, op.Const(s.SubScope("shape"), []int64{numSeq, seqLen}))
return
}
// Offset returns two version of input which are one element in the outer dimensions.
func Offset(s *op.Scope, input tf.Output) (x, y tf.Output) {
nilChar := op.Const(s, []uint8{0})
xs := s.SubScope("x")
x = op.Concat(xs, op.Const(xs, int32(0)), []tf.Output{nilChar, input})
ys := s.SubScope("y")
y = op.Concat(ys, op.Const(ys, int32(0)), []tf.Output{input, nilChar})
return
}
// NextSeqBatch returns a pair of the curent char and the next char
func NextSeqBatch(s *op.Scope, fileName string, seqLen, batchSize, numSeqs int64, seed int64) (curentChar, nextChar tf.Output, init *tf.Operation) {
chars := ReadText(s.SubScope("read"), fileName)
x, y := Offset(s.SubScope("offset"), chars)
xs := s.SubScope("x")
xSplit := OneHot(xs, Split(xs, x, seqLen, numSeqs))
ys := s.SubScope("y")
ySplit := OneHot(ys, Split(ys, y, seqLen, numSeqs))
seedOutput := op.Const(s, seed)
outputTypes := []tf.DataType{tf.Float, tf.Float}
preBatchOutputShapes := []tf.Shape{tf.MakeShape(seqLen, 256), tf.MakeShape(seqLen, 256)}
outputShapes := []tf.Shape{tf.MakeShape(batchSize, seqLen, 256), tf.MakeShape(batchSize, seqLen, 256)}
dataset := op.TensorSliceDataset(s, []tf.Output{xSplit, ySplit}, outputShapes)
repeatDataset := op.RepeatDataset(s, dataset, op.Const(s.SubScope("count"), int64(-1)), outputTypes, preBatchOutputShapes)
shuffleDataset := op.ShuffleDataset(s,
repeatDataset,
op.Const(s.SubScope("buffer_size"), int64(100)),
seedOutput,
seedOutput,
outputTypes,
preBatchOutputShapes,
)
batchDataset := op.BatchDataset(s, shuffleDataset, op.Const(s.SubScope("batch_size"), batchSize), outputTypes, outputShapes)
iterator := op.Iterator(s, "", "", outputTypes, outputShapes)
next := op.IteratorGetNext(s, iterator, outputTypes, outputShapes)
init = op.MakeIterator(s, batchDataset, iterator)
curentChar = next[0]
nextChar = next[1]
return
} | text/text.go | 0.768038 | 0.413832 | text.go | starcoder |
package gofiql
import (
"bytes"
"fmt"
"regexp"
)
// spitExpression takes in input a constraint expression and splits it
// into its component parts, i.e. left operand, right operand and
// operator.
// It makese uses of a regular expression.
func splitExpression(expression *string) (*string, *string, *string, error) {
re := regexp.MustCompile(expressionRx)
if re == nil {
return nil, nil, nil, fmt.Errorf("Problem with the regexp: unable to compile")
}
match := re.FindAllStringSubmatch(*expression, -1)
if len(match) == 0 {
return nil, nil, nil, fmt.Errorf("No match found for %s", *expression)
}
names := re.SubexpNames()
if len(names) != 4 {
return nil, nil, nil, fmt.Errorf("Problem with the regexp: not enough names")
}
var lOperand, rOperand, operator string
for i, v := range match[0] {
if names[i] == "lOperand" {
lOperand = v
} else if names[i] == "rOperand" {
rOperand = v
} else if names[i] == "operator" {
operator = v
}
}
return &lOperand, &operator, &rOperand, nil
}
// tabs concatenas tabs according to the depth and so the
// indication of the tree level provided in input.
func tabs(depth int) string {
var buffer bytes.Buffer
for i := 0; i < depth; i++ {
buffer.WriteString("\t")
}
return buffer.String()
}
// checkParenthesis checks the parenthesis of an expression.
// It returns 'false' in case of malformed parenthesis combination,
// 'true' elsewhere.
func checkParenthesis(expression *string) bool {
chars := []byte(*expression)
stack := newStack()
cntr := 0
for _, char := range chars {
if char == lParenthesisByte {
cntr++
stack.push(&lParenthesis)
}
if char == rParenthesisByte {
cntr--
stack.pop()
}
}
if cntr == 0 && stack.len() == 0 {
return true
}
return false
}
// PrettyPrinting performs an AST traversal and prints out
// the tree in a pretty hierarchical format.
func PrettyPrinting(root *Node, depth int) {
fmt.Printf("%s%s\n", tabs(depth), root.expression.String())
if root.lChild != nil {
PrettyPrinting(root.lChild, depth+1)
}
if root.rChild != nil {
PrettyPrinting(root.rChild, depth+1)
}
}
// stack provides a basic implementation of the stack logic
// based off a string slice.
type stack struct {
stack []*string
}
// newStack creates a new slice backing the stack.
func newStack() *stack {
return &stack{
stack: make([]*string, 0),
}
}
// Push pushes an element on the stack
func (s *stack) push(v *string) {
s.stack = append(s.stack, v)
}
// Top returns the topmost element without removing it from
// the datastructure.
func (s *stack) top() *string {
if len(s.stack) > 0 {
return s.stack[len(s.stack)-1]
}
return nil
}
// Pop returns the topmost elemet and removes it from the
// datastructure.
func (s *stack) pop() *string {
if len(s.stack) > 0 {
v := s.stack[len(s.stack)-1]
s.stack = s.stack[:len(s.stack)-1]
return v
}
return nil
}
// Len returns the length in terms of element of the backing
// datastructure.
func (s *stack) len() int {
return len(s.stack)
} | gofiql/util.go | 0.63341 | 0.468 | util.go | starcoder |
package cp
import "fmt"
//Draw flags
const (
DRAW_SHAPES = 1 << 0
DRAW_CONSTRAINTS = 1 << 1
DRAW_COLLISION_POINTS = 1 << 2
)
// 16 bytes
type FColor struct {
R, G, B, A float32
}
type Drawer interface {
DrawCircle(pos Vector, angle, radius float64, outline, fill FColor, data interface{})
DrawSegment(a, b Vector, fill FColor, data interface{})
DrawFatSegment(a, b Vector, radius float64, outline, fill FColor, data interface{})
DrawPolygon(count int, verts []Vector, radius float64, outline, fill FColor, data interface{})
DrawDot(size float64, pos Vector, fill FColor, data interface{})
Flags() uint
OutlineColor() FColor
ShapeColor(shape *Shape, data interface{}) FColor
ConstraintColor() FColor
CollisionPointColor() FColor
Data() interface{}
}
func DrawShape(shape *Shape, options Drawer) {
body := shape.body
data := options.Data()
outline := options.OutlineColor()
fill := options.ShapeColor(shape, data)
switch shape.Class.(type) {
case *Circle:
circle := shape.Class.(*Circle)
options.DrawCircle(circle.tc, body.a, circle.r, outline, fill, data)
case *Segment:
seg := shape.Class.(*Segment)
options.DrawFatSegment(seg.ta, seg.tb, seg.r, outline, fill, data)
case *PolyShape:
poly := shape.Class.(*PolyShape)
count := poly.count
planes := poly.planes
verts := make([]Vector, count)
for i := 0; i < count; i++ {
verts[i] = planes[i].v0
}
options.DrawPolygon(count, verts, poly.r, outline, fill, data)
default:
panic("Unknown shape type")
}
}
var springVerts = []Vector{
{0.00, 0.0},
{0.20, 0.0},
{0.25, 3.0},
{0.30, -6.0},
{0.35, 6.0},
{0.40, -6.0},
{0.45, 6.0},
{0.50, -6.0},
{0.55, 6.0},
{0.60, -6.0},
{0.65, 6.0},
{0.70, -3.0},
{0.75, 6.0},
{0.80, 0.0},
{1.00, 0.0},
}
func DrawConstraint(constraint *Constraint, options Drawer) {
data := options.Data()
color := options.ConstraintColor()
body_a := constraint.a
body_b := constraint.b
switch constraint.Class.(type) {
case *PinJoint:
joint := constraint.Class.(*PinJoint)
a := body_a.transform.Point(joint.AnchorA)
b := body_b.transform.Point(joint.AnchorB)
options.DrawDot(5, a, color, data)
options.DrawDot(5, b, color, data)
options.DrawSegment(a, b, color, data)
case *SlideJoint:
joint := constraint.Class.(*SlideJoint)
a := body_a.transform.Point(joint.AnchorA)
b := body_b.transform.Point(joint.AnchorB)
options.DrawDot(5, a, color, data)
options.DrawDot(5, b, color, data)
options.DrawSegment(a, b, color, data)
case *PivotJoint:
joint := constraint.Class.(*PivotJoint)
a := body_a.transform.Point(joint.AnchorA)
b := body_b.transform.Point(joint.AnchorB)
options.DrawDot(5, a, color, data)
options.DrawDot(5, b, color, data)
case *GrooveJoint:
joint := constraint.Class.(*GrooveJoint)
a := body_a.transform.Point(joint.GrooveA)
b := body_a.transform.Point(joint.GrooveB)
c := body_b.transform.Point(joint.AnchorB)
options.DrawDot(5, c, color, data)
options.DrawSegment(a, b, color, data)
case *DampedSpring:
spring := constraint.Class.(*DampedSpring)
a := body_a.transform.Point(spring.AnchorA)
b := body_b.transform.Point(spring.AnchorB)
options.DrawDot(5, a, color, data)
options.DrawDot(5, b, color, data)
delta := b.Sub(a)
cos := delta.X
sin := delta.Y
s := 1.0 / delta.Length()
r1 := Vector{cos, -sin * s}
r2 := Vector{sin, cos * s}
verts := []Vector{}
for i := 0; i < len(springVerts); i++ {
v := springVerts[i]
verts = append(verts, Vector{v.Dot(r1) + a.X, v.Dot(r2) + a.Y})
}
for i := 0; i < len(springVerts)-1; i++ {
options.DrawSegment(verts[i], verts[i+1], color, data)
}
// these aren't drawn in Chipmunk, so they aren't drawn here
case *GearJoint:
case *SimpleMotor:
case *DampedRotarySpring:
case *RotaryLimitJoint:
case *RatchetJoint:
default:
panic(fmt.Sprintf("Implement me: %#v", constraint.Class))
}
return
}
func DrawSpace(space *Space, options Drawer) {
space.dynamicShapes.class.Each(func(obj *Shape) {
DrawShape(obj, options)
})
space.staticShapes.class.Each(func(obj *Shape) {
DrawShape(obj, options)
})
for _, constraint := range space.constraints {
DrawConstraint(constraint, options)
}
drawSeg := options.DrawSegment
data := options.Data()
for _, arb := range space.arbiters {
n := arb.n
for j := 0; j < arb.count; j++ {
p1 := arb.body_a.p.Add(arb.contacts[j].r1)
p2 := arb.body_b.p.Add(arb.contacts[j].r2)
a := p1.Add(n.Mult(-2))
b := p2.Add(n.Mult(2))
drawSeg(a, b, options.CollisionPointColor(), data)
}
}
} | draw.go | 0.603815 | 0.413004 | draw.go | starcoder |
package dsf
import (
"encoding/binary"
"fmt"
)
// DsdChunk is the file structure of the DSD chunk within a DSD stream file.
// See "DSF File Format Specification", v1.01, Sony Corporation. All data is
// little-endian. This is exported to allow reading with binary.Read.
type DsdChunk struct {
// DSD chunk header.
// 'D' , 'S' , 'D', ' ' (includes 1 space).
Header [4]byte
// Size of this chunk.
// 28 bytes.
Size [8]byte
// Total file size.
TotalFileSize [8]byte
// Pointer to Metadata chunk.
// If Metadata doesn’t exist, set 0. If the file has ID3v2 tag, then set the
// pointer to it. ID3v2 tag should be located in the end of the file.
MetadataPointer [8]byte
}
// Header identifying a DSD chunk within a DSD stream file.
const dsdChunkHeader = "DSD "
// Size in bytes of a DSD chunk within a DSD stream file.
const dsdChunkSize = 28
// readDSDChunk reads the DSD chunk and stores the result in d.
func (d *decoder) readDSDChunk() error {
// Read the entire chunk in one go
err := binary.Read(d.reader, binary.LittleEndian, &d.dsd)
if err != nil {
return err
}
// Chunk header
header := string(d.dsd.Header[:])
switch header {
case dsdChunkHeader:
// This is the expected chunk header
case fmtChunkHeader:
return fmt.Errorf("dsd: expected DSD chunk but found fmt chunk")
case dataChunkHeader:
return fmt.Errorf("dsd: expected DSD chunk but found data chunk")
default:
return fmt.Errorf("dsd: bad chunk header: %q\ndsd chunk: % x", header, d.dsd)
}
// Size of this chunk
size := binary.LittleEndian.Uint64(d.dsd.Size[:])
if size != dsdChunkSize {
return fmt.Errorf("dsd: bad chunk size: %v bytes\ndsd chunk: % x", size, d.dsd)
}
// Total file size
totalFileSize := binary.LittleEndian.Uint64(d.dsd.TotalFileSize[:])
if totalFileSize < (dsdChunkSize + fmtChunkSize + dataChunkSize) {
return fmt.Errorf("dsd: bad total file size: %v bytes\ndsd chunk: % x", totalFileSize, d.dsd)
}
// Pointer to Metadata chunk
metadataPointer := binary.LittleEndian.Uint64(d.dsd.MetadataPointer[:])
if metadataPointer != 0 {
if metadataPointer >= totalFileSize || metadataPointer <= (dsdChunkSize+fmtChunkSize+dataChunkSize) {
return fmt.Errorf("dsd: bad pointer to metadata chunk: %v bytes\ndsd chunk: % x", metadataPointer, d.dsd)
} else {
// Prepare the audio.Audio in d to hold the metadata
d.audio.Metadata = make([]byte, totalFileSize-metadataPointer)
}
}
// Log the fields of the chunk (only active if a log output has been set)
d.logger.Print("\nDSD Chunk\n=========\n")
d.logger.Printf("Chunk header: %q\n", header)
d.logger.Printf("Size of this chunk: %v bytes\n", size)
d.logger.Printf("Total file size: %v bytes\n", totalFileSize)
d.logger.Printf("Pointer to Metadata chunk: %v\n", metadataPointer)
return nil
}
// writeDSDChunk writes the DSD chunk.
func (e *encoder) writeDSDChunk() error {
// Chunk header
header := dsdChunkHeader
copy(e.dsd.Header[:], header)
// Size of this chunk
size := uint64(dsdChunkSize)
binary.LittleEndian.PutUint64(e.dsd.Size[:], size)
// Total file size
totalFileSize := uint64(dsdChunkSize + fmtChunkSize + dataChunkSize +
len(e.audio.EncodedSamples) + len(e.audio.Metadata))
binary.LittleEndian.PutUint64(e.dsd.TotalFileSize[:], totalFileSize)
// Pointer to Metadata chunk
metadataPointer := uint64(0)
if len(e.audio.Metadata) > 0 {
metadataPointer = totalFileSize - uint64(len(e.audio.Metadata))
}
binary.LittleEndian.PutUint64(e.dsd.MetadataPointer[:], metadataPointer)
// Log the fields of the chunk (only active if a log output has been set)
e.logger.Print("\nDSD Chunk\n=========\n")
e.logger.Printf("Chunk header: %q\n", header)
e.logger.Printf("Size of this chunk: %v\n", size)
e.logger.Printf("Total file size: %v\n", totalFileSize)
e.logger.Printf("Pointer to Metadata chunk: %v\n", metadataPointer)
// Write the entire chunk in one go
err := binary.Write(e.writer, binary.LittleEndian, &e.dsd)
if err != nil {
return err
}
return nil
} | audio/dsf/dsd.go | 0.574634 | 0.473231 | dsd.go | starcoder |
package raspivid
/*
Usage:
cam := Motion{}
castMotion := broker.New()
go castMotion.Start()
go cam.Start(castMotion)
reader := castMotion.Subscribe()
defer castMotion.Unsubscribe(reader)
for {
fmt.Println(<-reader)
}
*/
import (
"bufio"
"log"
"os"
"time"
"sentry-picam/broker"
)
const ignoreFirstFrames = 10 // give camera's autoexposure some time to settle
// Motion stores configuration parameters and forms the basis for Detect
type Motion struct {
Width int
Height int
SenseThreshold int8
BlockWidth int
Protocol string
ListenPort string
MotionMask []byte
output []byte
recorder *Recorder
RecordingFolder string
}
// motionVector from raspivid.
// Ignoring Y since it might be redundant
// SAD Might be unusable since it periodically spikes
const sizeofMotionVector = 4 // size of a motion vector in bytes
type motionVector struct {
X int8
Y int8
//SAD int16 // Sum of Absolute Difference.
}
type mVhelper struct {
tX, tY, tXn, tYn int8 // counters for increasing and decreasing X/Y vectors
count int
}
func (mV *mVhelper) add(v motionVector) {
mV.count++
if v.X > 0 {
mV.tX++
} else if v.X < 0 {
mV.tXn++
}
if v.Y > 0 {
mV.tY++
} else if v.Y < 0 {
mV.tYn++
}
}
// getAvg figures out if the motion vectors are in the same general direction
func (mV *mVhelper) getAvg(threshold int8) motionVector {
if (mV.tX >= threshold || mV.tXn >= threshold) &&
(mV.tY >= threshold || mV.tYn >= threshold) {
return motionVector{
1,
1,
//int16(mV.SAD),
}
}
return motionVector{
0,
0,
//0,
}
}
func (mV *mVhelper) reset() {
mV.count = 0
mV.tX = 0
mV.tXn = 0
mV.tY = 0
mV.tYn = 0
}
// ApplyMask applies a mask to ignore specified motion blocks
func (c *Motion) ApplyMask(mask []byte) {
c.MotionMask = mask
f, _ := os.Create(c.RecordingFolder + "motionMask.bin")
f.Write(mask)
defer f.Close()
}
// ApplyPreviousMask applies the previously registered motion mask
func (c *Motion) ApplyPreviousMask() {
f, err := os.ReadFile(c.RecordingFolder + "motionMask.bin")
if err != nil {
log.Printf("Couldn't load motion mask")
return
}
rowCount := c.Height / 16
colCount := (c.Width + 16) / 16
usableCols := colCount - 1
maskLength := (usableCols / c.BlockWidth) * (rowCount / c.BlockWidth)
if len(f) == maskLength {
c.MotionMask = f
log.Printf("Previously registered motion mask has been applied")
} else {
log.Printf("Couldn't apply previous motion mask due to changed resolution")
}
}
// condenseBlocksDirection takes a blockWidth * blockWidth average of macroblocks from buf and stores the
// condensed result into frame
func (c *Motion) condenseBlocksDirection(frame *[]motionVector, buf *[]motionVector) {
rowCount := c.Height / 16
colCount := (c.Width + 16) / 16
usableCols := colCount - 1
mV := make([]mVhelper, usableCols/c.BlockWidth)
i := 0
compressedIndex := 0
for x := 0; x < rowCount; x++ {
blk := 1
blkIndex := 0
for y := 0; y < colCount; y++ {
if y < usableCols {
mV[blkIndex].add((*buf)[i])
}
if blk == c.BlockWidth {
blk = 0
blkIndex++
}
blk++
i++
}
if x%c.BlockWidth == 0 {
for idx, v := range mV {
if len(c.MotionMask) > 0 && c.MotionMask[compressedIndex] == 0 {
(*frame)[compressedIndex] = motionVector{0, 0}
} else {
(*frame)[compressedIndex] = v.getAvg(c.SenseThreshold)
}
mV[idx].reset()
compressedIndex++
}
}
}
}
func (c *Motion) getMaxBlockWidth() {
sizeMacroX := c.Width / 16
sizeMacroY := c.Height / 16
// split frame into larger zones, find largest factor
simpFactor := 0
for ((sizeMacroX/(1<<simpFactor))%2 == 0) && ((sizeMacroY/(1<<simpFactor))%2 == 0) {
simpFactor++
}
blockWidth := 1 << simpFactor // largest block width
if c.BlockWidth != 0 {
blockWidth = c.BlockWidth
}
c.BlockWidth = blockWidth
if c.Width%(16*c.BlockWidth) != 0 || c.Height%(16*c.BlockWidth) != 0 {
log.Fatal("Invalid block width")
}
}
// Init initializes configuration variables for Motion
func (c *Motion) Init(usePreviousMask bool) {
if c.Width == 0 || c.Height == 0 {
c.Width = 1280
c.Height = 960
}
if c.SenseThreshold == 0 {
c.SenseThreshold = 9
}
if c.Protocol == "" || c.ListenPort == "" {
c.Protocol = "tcp"
c.ListenPort = ":9000"
}
c.getMaxBlockWidth()
if c.SenseThreshold > int8(c.BlockWidth*c.BlockWidth) {
c.SenseThreshold = int8(c.BlockWidth)
log.Printf("mthreshold lowered to %d\n", c.SenseThreshold)
}
log.Printf("Motion threshold: %d / %d\n", c.SenseThreshold, c.BlockWidth*c.BlockWidth)
if usePreviousMask {
c.ApplyPreviousMask()
}
}
func (c *Motion) publishParsedBlocks(caster *broker.Broker, frame *[]motionVector) int {
blocksTriggered := 0
for i, v := range *frame {
c.output[i] = 0
if v.X != 0 {
//log.Printf("Frames: %2d X1: %3d Y1: %3d", c.NumInspectFrames, v.X, v.Y)
c.output[i] = 1
blocksTriggered++
}
}
caster.Publish(c.output)
return blocksTriggered
}
// Detect parses motion vectors provided by raspivid
// Lower senseThreshold value increases the sensitivity to motion.
func (c *Motion) Detect(caster *broker.Broker) {
conn := listen(c.Protocol, c.ListenPort)
//f, _ := os.Create("motion.vec")
numMacroblocks := ((c.Width + 16) / 16) * (c.Height / 16) // the right-most column is padding?
numUsableMacroblocks := (c.Width / 16) * (c.Height / 16)
currMacroBlocks := make([]motionVector, 0, numMacroblocks)
currCondensedBlocks := make([]motionVector, numUsableMacroblocks/(c.BlockWidth*c.BlockWidth))
c.output = make([]byte, numUsableMacroblocks/(c.BlockWidth*c.BlockWidth))
ignoredFrames := 0
buf := make([]byte, 1024)
s := bufio.NewReader(conn)
blocksRead := 0
for {
_, err := s.Read(buf)
if err != nil {
log.Println("Motion detection stopped: " + err.Error())
return
}
bufIdx := 0
for bufIdx < len(buf) {
// Manually convert since binary.Read runs really slow on a Pi Zero (~20% CPU)
temp := motionVector{}
temp.X = int8(buf[0+bufIdx])
temp.Y = int8(buf[1+bufIdx])
//temp.SAD = int16(buf[2+bufIdx]) << 4 // SAD might be spiking around keyframes and triggers false positives
//temp.SAD |= int16(buf[3+bufIdx])
currMacroBlocks = append(currMacroBlocks, temp)
bufIdx += sizeofMotionVector
blocksRead++
if blocksRead == numMacroblocks {
blocksRead = 0
if ignoredFrames < ignoreFirstFrames {
ignoredFrames++
continue
}
c.condenseBlocksDirection(&currCondensedBlocks, &currMacroBlocks)
if c.publishParsedBlocks(caster, &currCondensedBlocks) > 0 {
c.recorder.StopTime = time.Now().Add(time.Second * 5)
}
//binary.Write(f, binary.LittleEndian, &currMacroBlocks) // write to file
currMacroBlocks = currMacroBlocks[:0]
}
}
}
}
// Start motion detection and continues listening after interruptions to the data stream
func (c *Motion) Start(caster *broker.Broker, recorder *Recorder) {
c.recorder = recorder
for {
c.Detect(caster)
}
} | pkg/raspivid/raspividmotion.go | 0.555556 | 0.407216 | raspividmotion.go | starcoder |
package main
import (
"container/heap"
)
// 480 sliding window median using hash heap
type intHeap struct {
arr []*Element
hash map[int]int
isSmall bool
}
type Element struct {
value int
index int
}
func (ih *intHeap) Len() int {
return len((*ih).arr)
}
func (ih *intHeap) Less(i, j int) bool {
if ih.isSmall {
return (*ih).arr[i].value < (*ih).arr[j].value
} else {
return (*ih).arr[i].value > (*ih).arr[j].value
}
}
func (ih *intHeap) Push(ele interface{}) {
(*ih).arr = append((*ih).arr, ele.(*Element))
(*ih).hash[ele.(*Element).index] = (*ih).Len() - 1
}
func (ih *intHeap) Swap(i, j int) {
(*ih).hash[(*ih).arr[i].index], (*ih).hash[(*ih).arr[j].index] = (*ih).hash[(*ih).arr[j].index], (*ih).hash[(*ih).arr[i].index]
(*ih).arr[i], (*ih).arr[j] = (*ih).arr[j], (*ih).arr[i]
}
func (ih *intHeap) Pop() interface{} {
ele := (*ih).arr[(*ih).Len() - 1]
delete(ih.hash, (*ih).arr[(*ih).Len() - 1].index)
(*ih).arr = (*ih).arr[0: (*ih).Len() - 1]
return ele
}
func (ih *intHeap) Front() interface{} {
ele := heap.Pop(ih)
heap.Push(ih, ele)
return ele
}
func (ih *intHeap) Remove(index int) {
if value, exist := ih.hash[index]; exist {
heap.Remove(ih, value)
}
}
func medianSlidingWindow(nums []int, k int) []float64 {
results := make([]float64, 0)
if len(nums) == 0 || k <= 0 || len(nums) < k {
return []float64{}
} else if k == 1 {
for _, num := range nums {
results = append(results, float64(num))
}
return results
}
smallHeap := &intHeap{
arr: make([]*Element, 0),
hash: map[int]int{},
isSmall: true,
}
bigHeap := &intHeap{
arr: make([]*Element, 0),
hash: map[int]int{},
isSmall: false,
}
for i, num := range nums {
if smallHeap.Len() == 0 && bigHeap.Len() == 0 {
heap.Push(bigHeap, &Element{
num,
i,
})
} else if smallHeap.Len() == 0 {
if bigHeap.Front().(*Element).value > num {
heap.Push(bigHeap, &Element{
num,
i,
})
} else {
heap.Push(smallHeap, &Element{
num,
i,
})
}
} else if bigHeap.Len() == 0 {
if smallHeap.Front().(*Element).value > num {
heap.Push(bigHeap, &Element{
num,
i,
})
} else {
heap.Push(smallHeap, &Element{
num,
i,
})
}
} else {
if smallHeap.Front().(*Element).value >= num {
heap.Push(bigHeap, &Element{
num,
i,
})
} else {
heap.Push(smallHeap, &Element{
num,
i,
})
}
}
if i >= k {
smallHeap.Remove(i-k)
bigHeap.Remove(i-k)
}
balance(smallHeap, bigHeap)
if i >= k - 1 {
if smallHeap.Len() == bigHeap.Len() {
results = append(results, float64(smallHeap.Front().(*Element).value + bigHeap.Front().(*Element).value) / 2.0 )
} else {
if smallHeap.Len() > bigHeap.Len() {
results = append(results, float64(smallHeap.Front().(*Element).value))
} else {
results = append(results, float64(bigHeap.Front().(*Element).value))
}
}
}
}
return results
}
// balance between small and big heap
func balance(small *intHeap, big *intHeap) {
if small.Len() > big.Len() {
balance(big, small)
} else if small.Len() == big.Len() || small.Len() == big.Len() - 1 {
return
} else {
for big.Len() - small.Len() > 1 {
heap.Push(small, heap.Pop(big))
}
}
} | sliding-window/480-sliding-window-median.go | 0.560974 | 0.441733 | 480-sliding-window-median.go | starcoder |
package mmaths
import (
"fmt"
"math"
)
// Matrix alias for a 2d slice
type Matrix [][]float64
// Print matrix
func (mt Matrix) Print() {
n := len(mt)
m := len(mt[0])
for i := 0; i < n; i++ {
for j := 0; j < m; j++ {
fmt.Printf("%5f", mt[i][j])
fmt.Print(" ")
}
println()
}
}
// T transpose matrix
func (mt Matrix) T() Matrix {
r, c := len(mt), len(mt[0])
var b Matrix = make([][]float64, c)
for j := 0; j < c; j++ {
b[j] = make([]float64, r)
for i := 0; i < r; i++ {
b[j][i] = mt[i][j]
}
}
return b
}
// Multiply matrices
func (mt Matrix) Multiply(mp Matrix) Matrix {
nL := len(mt)
mL := len(mt[0])
nR := len(mp)
mR := len(mp[0])
if nR != mL {
panic("incompatible matrices for multiplication")
}
var b Matrix = make([][]float64, nL)
for n := 0; n < nL; n++ {
b[n] = make([]float64, mR)
for m := 0; m < mR; m++ {
for k := 0; k < nR; k++ {
b[n][m] += mt[n][k] * mp[k][m]
}
}
}
return b
}
// GaussJordanElimination is an algorithm for solving systems of linear equations.
// It is usually understood as a sequence of operations performed on the associated
// matrix of coefficients. This method can also be used to find the rank of a matrix,
// to calculate the determinant of a matrix, and to calculate the inverse of an
// invertible square matrix.
// from: http://www.sanfoundry.com/cpp-program-implement-gauss-jordan-elimination/
func (mt Matrix) GaussJordanElimination() Matrix {
n := len(mt)
m := len(mt[0])
a := make([][]float64, n*2)
for i := 0; i < n*2; i++ {
a[i] = make([]float64, m*2)
}
// copy matrix values for solving
for i := 0; i < n; i++ {
for j := 0; j < m; j++ {
a[i][j] = mt[i][j]
}
for j := m; j < 2*m; j++ {
if j == i+m {
a[i][j] = 1.
}
}
}
// partial pivoting
for i := n - 1; i > 0; i-- {
if a[i-1][1] < a[i][1] {
for j := 0; j < m*2; j++ {
d := a[i][j]
a[i][j] = a[i-1][j]
a[i-1][j] = d
}
}
}
// reducing to diagonal matrix
for i := 0; i < n; i++ {
for j := 0; j < m*2; j++ {
if j != i {
d := a[j][i] / a[i][i]
for k := 0; k < m*2; k++ {
a[j][k] -= a[i][k] * d
}
}
}
}
// reducing to unit matrix
for i := 0; i < n; i++ {
d := a[i][i]
for j := 0; j < m*2; j++ {
a[i][j] = a[i][j] / d
}
}
var b Matrix = make([][]float64, n)
for i := 0; i < n; i++ {
b[i] = a[i][m:]
}
return b
}
var t bool
// GradientDescent is a first-order iterative optimization algorithm for finding the minimum of a function.
// used to solve for x in Ax=B
// from: https://stackoverflow.com/questions/16422287/linear-regression-library-for-go-language
func GradientDescent(A Matrix, B []float64, n int, alpha float64) []float64 {
at := A.T()
x := make([]float64, len(at))
t = true
for i := 0; i < n; i++ {
diffs := calcDiff(x, B, at)
grad := calcGradient(diffs, at)
for j := 0; j < len(grad); j++ {
x[j] += alpha * grad[j]
}
}
return x
}
func calcDiff(x []float64, b []float64, a [][]float64) []float64 {
diffs := make([]float64, len(b))
for i := 0; i < len(b); i++ {
prediction := 0.0
for j := 0; j < len(x); j++ {
prediction += x[j] * a[j][i]
}
diffs[i] = b[i] - prediction
if t && math.IsNaN(diffs[i]) {
fmt.Println("diffs Nan")
fmt.Println(b[i], prediction)
fmt.Println(x)
t = false
}
if t && math.IsInf(diffs[i], 0) {
fmt.Println("diffs Inf")
fmt.Println(b[i], prediction)
fmt.Println(x)
t = false
}
}
return diffs
}
func calcGradient(diffs []float64, a [][]float64) []float64 {
gradient := make([]float64, len(a))
for i := 0; i < len(diffs); i++ {
for j := 0; j < len(a); j++ {
gradient[j] += diffs[i] * a[j][i]
if t && math.IsNaN(gradient[j]) {
fmt.Println("grad1")
fmt.Println(diffs[i], a[j][i], gradient[j])
t = false
}
}
}
for i := 0; i < len(a); i++ {
g1 := gradient[i]
gradient[i] = gradient[i] / float64(len(diffs))
if t && math.IsNaN(gradient[i]) {
fmt.Println("grad2")
fmt.Println(g1, gradient[i])
t = false
}
}
return gradient
} | matrix.go | 0.610105 | 0.462655 | matrix.go | starcoder |
package evaluate_cluster
import (
dist "github.com/verlandz/clustering-phone/utility/distance"
"math"
)
/*
Measure by level of cohesion and separation
ref :
- https://en.wikipedia.org/wiki/Davies%E2%80%93Bouldin_index
- https://www.hackerearth.com/problem/approximate/davies-bouldin-index/
- https://caridokumen.com/download/metode-manhattan-euclidean-dan-chebyshev-pada-algoritma-k-means-untuk-pengelompokan-status-desa-_5a46c56bb7d7bc7b7a1dbdf1_pdf
- https://www.semanticscholar.org/paper/A-Bounded-Index-for-Cluster-Validity-Saitta-Raphael/9701405b0d601e169636a2541940a070087acd5b
*/
func (r *req) DaviesBouldinIndex() (int, int, float64) {
empty := map[int]bool{}
valid := r.N_Cluster
// checking for empty cluster
for i := 0; i < r.N_Cluster; i++ {
if len(r.group[i]) == 0 {
empty[i] = true
valid--
}
}
avg := 0.0
for i := 0; i < r.N_Cluster; i++ {
if empty[i] {
continue
}
temp := 0.0
for j := 0; j < r.N_Cluster; j++ {
if i == j || empty[j] {
continue
}
temp = math.Max(temp, r.rij(i, j))
}
avg += temp
}
avg /= float64(valid)
return r.N_Cluster, valid, avg
}
func (r *req) rij(i, j int) float64 {
// cohesion
var Si, Sj float64
lnSi := len(r.group[i])
for k := 0; k < lnSi; k++ {
Si += dist.Get(r.centroid[i], r.group[i][k].Features, r.distance)
}
Si /= float64(lnSi)
lnSj := len(r.group[j])
for k := 0; k < lnSj; k++ {
Sj += dist.Get(r.centroid[j], r.group[j][k].Features, r.distance)
}
Sj /= float64(lnSj)
// separation
var Mij float64
Mij = dist.Get(r.centroid[i], r.centroid[j], r.distance)
return (Si + Sj) / Mij
}
/*
Measure by SSE and resulting graphic like elbow
the optimal cluster is the one who has sharp corner, called elbow point
ref :
- https://bl.ocks.org/rpgove/0060ff3b656618e9136b
*/
func (r *req) ElbowMethodSSE() (int, int, float64) {
sse := 0.0
valid := 0
for i := range r.group {
if len(r.group[i]) == 0 {
continue
}
valid++
// mean cluster
mean := r.clusterMean(i)
for j := range r.group[i] {
// mean point
datapoint := r.group[i][j]
mn := 0.0
for k := range datapoint.Features {
mn += datapoint.Features[k]
}
mn /= float64(len(datapoint.Features))
sse += (mn - mean) * (mn - mean)
}
}
return r.N_Cluster, valid, sse
}
func (r *req) clusterMean(idx int) float64 {
arr := []float64{}
for i, vi := range r.group[idx] {
for j, vj := range vi.Features {
if i == 0 {
arr = append(arr, vj)
} else {
arr[j] += vj
}
}
}
ln := float64(len(r.group[idx]))
mean := 0.0
for i := range arr {
arr[i] /= ln
mean += arr[i]
}
mean /= float64(len(arr))
return mean
}
/*
* THIS IS CUSTOM METHOD
get mean from all mean centroid to its member
*/
func (r *req) Simple() (int, int, float64) {
avg, valid := 0.0, 0
// group
for i := 0; i < r.N_Cluster; i++ {
if len(r.group[i]) == 0 {
continue
}
temp := 0.0
for j := range r.group[i] {
temp += dist.Get(r.centroid[i], r.group[i][j].Features, r.distance)
}
temp /= float64(len(r.group[i]))
avg += temp
valid++
}
avg /= float64(valid)
return r.N_Cluster, valid, avg
} | utility/evaluate_cluster/evaluate_cluster.go | 0.635562 | 0.441613 | evaluate_cluster.go | starcoder |
package nifi
import (
"encoding/json"
)
// Position The position of a component on the graph
type Position struct {
// The x coordinate.
X *float64 `json:"x,omitempty"`
// The y coordinate.
Y *float64 `json:"y,omitempty"`
}
// NewPosition instantiates a new Position object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewPosition() *Position {
this := Position{}
return &this
}
// NewPositionWithDefaults instantiates a new Position object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewPositionWithDefaults() *Position {
this := Position{}
return &this
}
// GetX returns the X field value if set, zero value otherwise.
func (o *Position) GetX() float64 {
if o == nil || o.X == nil {
var ret float64
return ret
}
return *o.X
}
// GetXOk returns a tuple with the X field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *Position) GetXOk() (*float64, bool) {
if o == nil || o.X == nil {
return nil, false
}
return o.X, true
}
// HasX returns a boolean if a field has been set.
func (o *Position) HasX() bool {
if o != nil && o.X != nil {
return true
}
return false
}
// SetX gets a reference to the given float64 and assigns it to the X field.
func (o *Position) SetX(v float64) {
o.X = &v
}
// GetY returns the Y field value if set, zero value otherwise.
func (o *Position) GetY() float64 {
if o == nil || o.Y == nil {
var ret float64
return ret
}
return *o.Y
}
// GetYOk returns a tuple with the Y field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *Position) GetYOk() (*float64, bool) {
if o == nil || o.Y == nil {
return nil, false
}
return o.Y, true
}
// HasY returns a boolean if a field has been set.
func (o *Position) HasY() bool {
if o != nil && o.Y != nil {
return true
}
return false
}
// SetY gets a reference to the given float64 and assigns it to the Y field.
func (o *Position) SetY(v float64) {
o.Y = &v
}
func (o Position) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if o.X != nil {
toSerialize["x"] = o.X
}
if o.Y != nil {
toSerialize["y"] = o.Y
}
return json.Marshal(toSerialize)
}
type NullablePosition struct {
value *Position
isSet bool
}
func (v NullablePosition) Get() *Position {
return v.value
}
func (v *NullablePosition) Set(val *Position) {
v.value = val
v.isSet = true
}
func (v NullablePosition) IsSet() bool {
return v.isSet
}
func (v *NullablePosition) Unset() {
v.value = nil
v.isSet = false
}
func NewNullablePosition(val *Position) *NullablePosition {
return &NullablePosition{value: val, isSet: true}
}
func (v NullablePosition) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullablePosition) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | model_position.go | 0.865224 | 0.468426 | model_position.go | starcoder |
package openmessaging
type Headers interface {
/**
* The {@code DESTINATION} header field contains the destination to which the message is being sent.
* <p>
* When a message is sent this value is set to the right {@code Queue}, then the message will be sent to the
* specified destination.
* <p>
* When a message is received, its destination is equivalent to the {@code Queue} where the message resides in.
*/
SetDestination(destination string) (Headers, error)
/**
* The {@code MESSAGE_ID} header field contains a value that uniquely identifies each message sent by a {@code
* Producer}.
*/
SetMessageId(messageId string) (Headers, error)
/**
* The {@code BORN_TIMESTAMP} header field contains the time a message was handed off to a {@code Producer} to
* be sent.
* <p>
* When a message is sent, BORN_TIMESTAMP will be set with current timestamp as the born timestamp of a message
* in client side, on return from the send method, the message's BORN_TIMESTAMP header field contains this
* value.
* <p>
* When a message is received its, BORN_TIMESTAMP header field contains this same value.
* <p>
* This filed is a {@code long} value, measured in milliseconds.
*/
SetBornTimestamp(bornTimestamp int64) (Headers, error)
/**
* The {@code BORN_HOST} header field contains the born host info of a message in client side.
* <p>
* When a message is sent, BORN_HOST will be set with the local host info, on return from the send method, the
* message's BORN_HOST header field contains this value.
* <p>
* When a message is received, its BORN_HOST header field contains this same value.
*/
SetBornHost(bornHost string) (Headers, error)
/**
* The {@code STORE_TIMESTAMP} header field contains the store timestamp of a message in server side.
* <p>
* When a message is sent, STORE_TIMESTAMP is ignored.
* <p>
* When the send method returns it contains a server-assigned value.
* <p>
* This filed is a {@code long} value, measured in milliseconds.
*/
SetStoreTimestamp(storeTimestamp int64) (Headers, error)
/**
* The {@code STORE_HOST} header field contains the store host info of a message in server side.
* <p>
* When a message is sent, STORE_HOST is ignored.
* <p>
* When the send method returns it contains a server-assigned value.
*/
SetStoreHost(storeHost string) (Headers, error)
/**
* The {@code DELAY_TIME} header field contains a number that represents the delayed times in milliseconds.
* <p></p>
* The message will be delivered after delayTime milliseconds starting from {@CODE BORN_TIMESTAMP} . When this
* filed isn't set explicitly, this means this message should be delivered immediately.
*/
SetDelayTime(delayTime int64) (Headers, error)
/**
* The {@code EXPIRE_TIME} header field contains the expiration time, it represents a time-to-live value.
* <p>
* The {@code EXPIRE_TIME} represents a relative valid interval that a message can be delivered in it. If the
* EXPIRE_TIME field is specified as zero, that indicates the message does not expire.
* </p>
* <p>
* When an undelivered message's expiration time is reached, the message should be destroyed. OMS does not
* define a notification of message expiration.
* </p>
*/
SetExpireTime(expireTime int64) (Headers, error)
/**
* The {@code PRIORITY} header field contains the priority level of a message, a message with a higher priority
* value should be delivered preferentially.
* <p>
* OMS defines a ten level priority value with 1 as the lowest priority and 10 as the highest, and the default
* priority is 5. The priority beyond this region will be ignored.
* <p>
* OMS does not require or provide any guarantee that the message should be delivered in priority order
* strictly, but the vendor should provide a best effort to deliver expedited messages ahead of normal
* messages.
* <p>
* If PRIORITY field isn't set explicitly, use {@code 5} as the default priority.
*/
SetPriority(priority int16) (Headers, error)
/**
* The {@code RELIABILITY} header field contains the reliability level of a message, the vendor should guarantee
* the reliability level for a message.
* <p>
* OMS defines two modes of message delivery:
* <ul>
* <li>
* PERSISTENT, the persistent mode instructs the vendor should provide stable storage to ensure the message
* won't be lost.
* </li>
* <li>
* NON_PERSISTENT, this mode does not require the message be logged to stable storage, in most cases, the memory
* storage is enough for better performance and lower cost.
* </li>
* </ul>
*/
SetDurability(durability int16) (Headers, error)
/**
* The {@code messagekey} header field contains the custom key of a message.
* <p>
* This key is a customer identifier for a class of messages, and this key may be used for server to hash or
* dispatch messages, or even can use this key to implement order message.
* <p>
*/
SetMessageKey(messageKey string) (Headers, error)
/**
* The {@code TRACE_ID} header field contains the trace ID of a message, which represents a global and unique
* identification, to associate key events in the whole lifecycle of a message, like sent by who, stored at
* where, and received by who.
* <p></p>
* And, the messaging system only plays exchange role in a distributed system in most cases, so the TraceID can
* be used to trace the whole call link with other parts in the whole system.
*/
SetTraceId(traceId string) (Headers, error)
/**
* The {@code DELIVERY_COUNT} header field contains a number, which represents the count of the message
* delivery.
*/
SetDeliveryCount(deliveryCount int) (Headers, error)
/**
* This field {@code TRANSACTION_ID} is used in transactional message, and it can be used to trace a
* transaction.
* <p></p>
* So the same {@code TRANSACTION_ID} will be appeared not only in prepare message, but also in commit message,
* and consumer received message also contains this field.
*/
SetTransactionId(transactionId string) (Headers, error)
/**
* A client can use the {@code CORRELATION_ID} field to link one message with another. A typical use is to link
* a response message with its request message.
*/
SetCorrelationId(scorrelationId string) (Headers, error)
/**
* The field {@code COMPRESSION} in headers represents the message body compress algorithm. vendors are free to
* choose the compression algorithm, but must ensure that the decompressed message is delivered to the user.
*/
SetCompression(compression int16) (Headers, error)
/**
* See {@link Headers#setDestination(String)}
*
* @return destination
*/
GetDestination() (string, error)
/**
* See {@link Headers#setMessageId(String)}
*
* @return messageId
*/
GetMessageId() (string, error)
/**
* See {@link Headers#setBornTimestamp(long)}
*
* @return bornTimestamp
*/
GetBornTimestamp() (int64, error)
/**
* See {@link Headers#setBornHost(String)}
*
* @return bornHost
*/
GetBornHost() (string, error)
/**
* See {@link Headers#setStoreTimestamp(long)}
*
* @return storeTimestamp
*/
GetStoreTimestamp() (int64, error)
/**
* See {@link Headers#setStoreHost(String)}
*
* @return storeHost
*/
GetStoreHost() (string, error)
/**
* See {@link Headers#setDelayTime(long)}
*
* @return delayTime
*/
GetDelayTime() (int64, error)
/**
* See {@link Headers#setExpireTime(long)}
*
* @return expireTime
*/
GetExpireTime() (int64, error)
/**
* See {@link Headers#setPriority(short)}
*
* @return priority
*/
GetPriority() (int16, error)
/**
* See {@link Headers#setDurability(short)}
*
* @return durability
*/
GetDurability() (int16, error)
/**
* See {@link Headers#setMessageKey(String)}
*
* @return messageKey
*/
GetMessageKey() (string, error)
/**
* See {@link Headers#setTraceId(String)}
*
* @return traceId
*/
GetTraceId() (string, error)
/**
* See {@link Headers#setDeliveryCount(int)}
*
* @return deliveryCount
*/
GetDeliveryCount() (int, error)
/**
* See {@link Headers#setTransactionId(String)}
*
* @return transactionId
*/
GetTransactionId() (string, error)
/**
* See {@link Headers#setCorrelationId(String)}
*
* @return correlationId
*/
GetCorrelationId() (string, error)
/**
* See {@link Headers#setCompression(short)}
*
* @return compression
*/
GetCompression() (int16, error)
} | openmessaging/headers.go | 0.839306 | 0.576661 | headers.go | starcoder |
package core
import (
"bytes"
"log"
"strconv"
)
// SizeEstimator estimates the Jaccard coefficients between the different
// datasets. The Jaccard coefficient between two datasets is defined as
// the cardinality of the intersection divided by the cardinality of the
// union of the two datasets.
type SizeEstimator struct {
AbstractDatasetSimilarityEstimator
}
// Compute method constructs the Similarity Matrix
func (e *SizeEstimator) Compute() error {
return datasetSimilarityEstimatorCompute(e)
}
// Similarity returns the similarity between two datasets
func (e *SizeEstimator) Similarity(a, b *Dataset) float64 {
sizeA, sizeB := float64(len(a.Data())), float64(len(b.Data()))
if sizeA >= sizeB {
return sizeB / sizeA
}
return sizeA / sizeB
}
// Configure sets the necessary parameters before the similarity execution
func (e *SizeEstimator) Configure(conf map[string]string) {
if val, ok := conf["concurrency"]; ok {
conv, err := strconv.ParseInt(val, 10, 32)
if err != nil {
log.Println(err)
} else {
e.concurrency = int(conv)
}
} else {
e.concurrency = 1
}
}
// Options returns a list of applicable parameters
func (e *SizeEstimator) Options() map[string]string {
return map[string]string{
"concurrency": "max num of threads used (int)",
}
}
// Serialize returns a byte array containing the estimator.
func (e *SizeEstimator) Serialize() []byte {
buffer := new(bytes.Buffer)
buffer.Write(getBytesInt(int(SimilarityTypeJaccard)))
buffer.Write(
datasetSimilarityEstimatorSerialize(e.AbstractDatasetSimilarityEstimator))
return buffer.Bytes()
}
// Deserialize instantiates the estimator based on a byte array
func (e *SizeEstimator) Deserialize(b []byte) {
buffer := bytes.NewBuffer(b)
tempInt := make([]byte, 4)
buffer.Read(tempInt) // consume estimator type
buffer.Read(tempInt)
absEstBytes := make([]byte, getIntBytes(tempInt))
buffer.Read(absEstBytes)
e.AbstractDatasetSimilarityEstimator =
*datasetSimilarityEstimatorDeserialize(absEstBytes)
} | core/similaritysize.go | 0.848471 | 0.635915 | similaritysize.go | starcoder |
package banana
import "time"
type GameID string
type DB interface {
// Creates a new game with the given name and creator.
NewGame(name string, creator PlayerID, config *Config) (GameID, error)
// Get all of the games.
Games() ([]*Game, error)
// Loads a game with the given ID.
Game(id GameID) (*Game, error)
// Loads the bunch for the game with the given ID.
Bunch(id GameID) (*Bunch, error)
// Registers a player in our system.
RegisterPlayer(name string) (PlayerID, error)
// Adds a player to a not-yet-started game.
AddPlayerToGame(gID GameID, pID PlayerID) error
// Get all the players for a game.
Players(id GameID) ([]*Player, error)
// Loads a player with the given ID.
Player(id PlayerID) (*Player, error)
// Loads the board for the given game and player IDs.
Board(gID GameID, pID PlayerID) (*Board, error)
// Loads tiles for the given game and player IDs.
Tiles(gID GameID, pID PlayerID) (*Tiles, error)
// Updates a player's board.
UpdateBoard(gID GameID, pID PlayerID, board *Board) error
// Updates a player's tiles.
UpdateTiles(gID GameID, pID PlayerID, tiles *Tiles) error
// Updates the bunch for the game.
UpdateBunch(id GameID, bunch *Bunch) error
// Starts a game, and sets everyone's initial tile sets.
StartGame(id GameID, players map[PlayerID]*Tiles, bunch *Bunch) error
// Ends a given game, stops players from adding more to their boards.
EndGame(id GameID) error
}
type GameStatus int
const (
UnknownStatus GameStatus = iota
WaitingForPlayers
InProgress
Finished
)
func (g GameStatus) String() string {
switch g {
case WaitingForPlayers:
return "Waiting For Players"
case InProgress:
return "In Progress"
case Finished:
return "Finished"
default:
return "Unknown"
}
}
type Game struct {
ID GameID
Creator PlayerID
Name string
Status GameStatus
CreatedAt time.Time
Config *Config
}
type Config struct {
// The minimum number of letters that a word needs to have to be considered
// valid.
MinLettersInWord int
}
func (c *Config) Clone() *Config {
return &Config{
MinLettersInWord: c.MinLettersInWord,
}
}
func (g *Game) Clone() *Game {
return &Game{
ID: g.ID,
Creator: g.Creator,
Name: g.Name,
Status: g.Status,
CreatedAt: g.CreatedAt,
Config: g.Config.Clone(),
}
}
type PlayerID string
type Player struct {
ID PlayerID
Name string
AddedAt time.Time
}
func (p *Player) Clone() *Player {
return &Player{
ID: p.ID,
Name: p.Name,
AddedAt: p.AddedAt,
}
}
// Orientation describes how a board is placed on the board.
type Orientation int
const (
// NoOrientation is a catch-all for unknown orientations.
NoOrientation Orientation = iota
// Horizontal means the word is placed on the board from left to right.
Horizontal
// Vertical means the word is placed on the board from top to bottom.
Vertical
)
type BoardValidation struct {
InvalidWords []CharLocs
ShortWords []CharLocs
InvalidBoard bool
DetachedBoard bool
UnusedLetters []string
ExtraLetters []string
}
// CharLocs is a list of letters and the word they
// make up, this is currently only used for
// returning bad words.
type CharLocs struct {
// The word that is made of the charlocs.
Word string
Locs []CharLoc
}
// BoardStatus describes if a board is valid, or how it is invalid.
type BoardStatus struct {
Code BoardStatusCode
Errors []string
}
// BoardStatusCode describes the current validity of a board.
type BoardStatusCode int
const (
Success BoardStatusCode = iota
InvalidWord
DetachedBoard
NotAllLetters
ExtraLetters
InvalidBoard
)
// byX is a way of sorting a slice of charlocs that sorts them first according to
// their X position, then by their Y position.
type byX []CharLoc
func (x byX) Len() int { return len(x) }
func (x byX) Less(i, j int) bool {
if x[i].Loc.X != x[j].Loc.X {
return x[i].Loc.X < x[j].Loc.X
}
return x[i].Loc.Y < x[j].Loc.Y
}
func (x byX) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
// byY is a way of sorting a slice of charlocs that sorts them first according to
// their Y position, then by their X position.
type byY []CharLoc
func (y byY) Len() int { return len(y) }
func (y byY) Less(i, j int) bool {
if y[i].Loc.Y != y[j].Loc.Y {
return y[i].Loc.Y < y[j].Loc.Y
}
return y[i].Loc.X < y[j].Loc.X
}
func (y byY) Swap(i, j int) { y[i], y[j] = y[j], y[i] } | banana/banana.go | 0.666497 | 0.548069 | banana.go | starcoder |
package ledger
import (
"fmt"
"github.com/72nd/acc/pkg/schema"
"github.com/72nd/acc/pkg/util"
)
// EntriesForTransaction returns the journal entries for a given schema.Transaction.
func EntriesForTransaction(s schema.Schema, trn schema.Transaction) []Entry {
if trn.AssociatedDocument.Empty() {
return entriesForTransactionWithDocument(s, trn)
}
return entrieForDefaultTransaction(s, trn, nil)
}
// entriesForTransactionWithDocument returns the entries for transactions with an associated
// document.
func entriesForTransactionWithDocument(s schema.Schema, trn schema.Transaction) []Entry {
exp, err := s.Expenses.ExpenseByRef(trn.AssociatedDocument)
if err == nil {
return SettlementEntriesForExpense(s, trn, *exp)
}
inv, err := s.Invoices.InvoiceByRef(trn.AssociatedDocument)
if err == nil {
return SettlementEntriesForInvoice(s, trn, *inv)
}
return entrieForDefaultTransaction(s, trn, fmt.Errorf("no expense/invoice for id \"%s\" found", trn.AssociatedDocument))
}
// entrieForDefaultTransaction is the fallback function. It is possible to give an additional
// error as parameter. This error will be appended to the transaction comment.
func entrieForDefaultTransaction(s schema.Schema, trn schema.Transaction, err error) []Entry {
cmt := NewManualComment("default", trn.String())
cmt.add(err)
var acc1, acc2 string
if trn.TransactionType == util.CreditTransaction {
// Incoming transaction
acc1 = s.JournalConfig.BankAccount
acc2 = defaultAccount
} else {
// Outgoing transaction
acc1 = defaultAccount
acc2 = s.JournalConfig.BankAccount
}
return []Entry{
{
Date: trn.DateTime(),
Status: UnmarkedStatus,
Code: trn.Identifier,
Description: fmt.Sprintf("some help: %s", trn.String()),
Comment: cmt,
Account1: acc1,
Account2: acc2,
Amount: trn.Amount,
}}
}
/*
func (t Transaction) defaultEntries(a Acc) []Entry {
var account1, account2 string
// Incoming transaction
if t.TransactionType == util.CreditTransaction {
account1 = a.JournalConfig.BankAccount
account2 = defaultAccount
} else {
account1 = defaultAccount
account2 = a.JournalConfig.BankAccount
}
return []Entry{
{
Date: t.DateTime(),
Status: UnmarkedStatus,
Code: t.Identifier,
Description: t.JournalDescription(a),
Comment: NewManualComment("default", t.String()),
Account1: account1,
Account2: account2,
Amount: t.Amount,
}}
}
*/ | pkg/ledger/transactions.go | 0.664649 | 0.405684 | transactions.go | starcoder |
package geo
import "fmt"
type Cylinder struct {
EndDistanceNM int // Nautical Miles. Start distance is end of inner cylinder (or origin)
Floor int // In hundreds of feet
Ceil int // In hundreds of feet
}
// Each sector is a pie wedge, with consistent floor/ceil cylinders
type ClassBSector struct {
StartBearing int // Magnetic bearing (-13.68 to get to magnetic @ SFO)
EndBearing int
Steps []Cylinder // Ordered by asc DistanceNM
}
type ClassBMap struct {
Sectors []ClassBSector // Must be ordered by asc StartBearing, and support wraparound
Center Latlong
Name string
}
// Walk treads a circle around the map until we find the sector that matches our bearing, and
// then walks out from the middle until we find the zone of the sector we lie within.
func (m ClassBMap) Walk(distNM, bearing float64) (floor,ceil int, inRange bool) {
inRange = false
// Walk the sectors until we find the first one which contains our bearing
// TODO: funky wraparound thing
for _,sector := range m.Sectors {
if bearing < float64(sector.EndBearing) {
// Now, walk the Cylinders and find first one that contains our distance
for _,cyl := range sector.Steps {
if distNM < float64(cyl.EndDistanceNM) {
floor, ceil, inRange = cyl.Floor, cyl.Ceil, true
return // We found our class B limits !
}
}
}
return // We are past the outer limits of this sector's cylinders; not in range
}
panic(fmt.Sprintf("Bad ClassBMap, we fell off the end, given bearing=%f", bearing))
return
}
// ClassBRange works out if a position is within range of the given map; and if so, what the
// altitude limits are at that position.
func (m ClassBMap)ClassBRange(pos Latlong) (floor,ceil float64, inRange bool) {
floor,ceil,inRange = 0.0, 0.0, false
distNM := pos.DistNM(m.Center)
bearing := pos.BearingTowards(m.Center)
var f,c int
f,c,inRange = m.Walk(distNM, bearing)
if inRange {
floor = float64(f) * 100.0
ceil = float64(c) * 100.0
}
return
}
// The output after ClassB analysis of a single position+altitude
type TPClassBAnalysis struct {
// The verdict
WithinRange bool // If we're not within range, the rest has no meaning.
VerticalDisposition int // <0 below; =0 within; >0 above
BelowBy float64 // If below, by how many feet
Reasoning string // Explanation of stuff
// Handy data to have around later
I int // Index into the track for the point we've analyzed
InchesHg float64 // The pressure correction applied
IndicatedAltitude float64 // The pressure corrected altitude
Floor,Ceil float64 // The Class B space the point was in (0 if not in space)
DistNM float64 // Seeing as we've calculated it :)
AllowThisPoint bool // If true, this point is not a violation, regardless of data
}
func (a TPClassBAnalysis)IsViolation() bool {
if a.AllowThisPoint { return false }
if !a.WithinRange { return false }
return a.VerticalDisposition < 0
}
func (m ClassBMap)ClassBPointAnalysis(pos Latlong, speed float64, alt,tol float64, o *TPClassBAnalysis) {
distNM := pos.DistNM(m.Center)
bearing := pos.BearingTowards(m.Center)
o.DistNM = distNM
o.Reasoning = fmt.Sprintf("** ClassB analysis: aircraft at %s, %.0f kt, %.0f feet\n",
pos,speed,alt)
o.Reasoning += fmt.Sprintf("* Distance to %s in NM: %.1f; bearing towards %s: %.1f\n",
m.Name, distNM, m.Name, bearing)
o.Floor,o.Ceil,o.WithinRange = m.ClassBRange(pos)
if !o.WithinRange {
o.Reasoning += "* not in range; too far away from "+m.Name+"\n"
return
}
limitStr := fmt.Sprintf("%d/%d", int(o.Ceil/100.0), int(o.Floor/100.0))
o.Reasoning += fmt.Sprintf("* In <b>%s</b> space, at <b>%.0f</b> feet\n", limitStr, alt)
if (alt > o.Ceil) {
o.VerticalDisposition = 1
o.Reasoning += "* above class B ceiling\n"
} else if (alt > o.Floor-tol-1) { // Allow <tol> feet of wriggle room
o.VerticalDisposition = 0
o.Reasoning += "* within (tolerance of) class B height range\n"
} else {
o.VerticalDisposition = -1
o.Reasoning += "* below class B floor\n"
o.BelowBy = o.Floor - alt
}
return
}
// {{{ -------------------------={ E N D }=----------------------------------
// Local variables:
// folded-file: t
// end:
// }}} | classb.go | 0.525856 | 0.585457 | classb.go | starcoder |
package valuepb
import (
"fmt"
"strconv"
"github.com/golang/protobuf/proto"
"google.golang.org/protobuf/types/known/structpb"
)
func BoolValue(b bool) *structpb.Value {
return &structpb.Value{
Kind: &structpb.Value_BoolValue{
BoolValue: b,
},
}
}
func Bool(v *structpb.Value) bool {
return int64(Float64(v)) != 0
}
func BoolPtr(v *structpb.Value) *bool {
return proto.Bool(Bool(v))
}
func Int64Value(v int64) *structpb.Value {
return &structpb.Value{
Kind: &structpb.Value_NumberValue{
NumberValue: float64(v),
},
}
}
func Int64(v *structpb.Value) int64 {
return int64(Float64(v))
}
func Int64Ptr(v *structpb.Value) *int64 {
return proto.Int64(Int64(v))
}
func Uint64Value(v uint64) *structpb.Value {
return &structpb.Value{
Kind: &structpb.Value_NumberValue{
NumberValue: float64(v),
},
}
}
func Uint64(v *structpb.Value) uint64 {
return uint64(Float64(v))
}
func Uint64Ptr(v *structpb.Value) *uint64 {
return proto.Uint64(Uint64(v))
}
func Int32Value(v int32) *structpb.Value {
return &structpb.Value{
Kind: &structpb.Value_NumberValue{
NumberValue: float64(v),
},
}
}
func Int32(v *structpb.Value) int32 {
return int32(Float64(v))
}
func Int32Ptr(v *structpb.Value) *int32 {
return proto.Int32(Int32(v))
}
func Uint32Value(v uint32) *structpb.Value {
return &structpb.Value{
Kind: &structpb.Value_NumberValue{
NumberValue: float64(v),
},
}
}
func Uint32(v *structpb.Value) uint32 {
return uint32(Float64(v))
}
func Uint32Ptr(v *structpb.Value) *uint32 {
return proto.Uint32(Uint32(v))
}
func Float64Value(v float64) *structpb.Value {
return &structpb.Value{
Kind: &structpb.Value_NumberValue{
NumberValue: v,
},
}
}
func Float64(v *structpb.Value) float64 {
if v == nil {
return 0
}
switch n := v.Kind.(type) {
case *structpb.Value_NumberValue:
return n.NumberValue
case *structpb.Value_StringValue:
if fv, err := strconv.ParseFloat(n.StringValue, 64); err == nil {
return fv
}
case *structpb.Value_BoolValue:
if n.BoolValue {
return 1
}
}
return 0
}
func Float64Ptr(v *structpb.Value) *float64 {
return proto.Float64(Float64(v))
}
func StringValue(s string) *structpb.Value {
return &structpb.Value{
Kind: &structpb.Value_StringValue{
StringValue: s,
},
}
}
func String(v *structpb.Value) string {
if v == nil {
return ""
}
switch n := v.Kind.(type) {
case *structpb.Value_NumberValue:
return fmt.Sprintf("%v", n.NumberValue)
case *structpb.Value_StringValue:
return n.StringValue
case *structpb.Value_BoolValue:
return strconv.FormatBool(n.BoolValue)
}
return ""
}
func StringPtr(v *structpb.Value) *string {
return proto.String(String(v))
} | util/valuepb/value.go | 0.692226 | 0.405772 | value.go | starcoder |
package crawl
import (
"math/rand"
"strings"
"sync"
"time"
)
// Peer defines a node structure that exists in the NodePool. Every Peer should
// have an RPC address defined, but a network is not strictly required.
type Peer struct {
RPCAddr string
Network string
}
func (p Peer) String() string {
if p.RPCAddr != "" && p.Network != "" {
return p.RPCAddr + ";" + p.Network
}
return p.RPCAddr
}
// NodePool implements an abstraction over a pool of nodes for which to crawl.
// It also contains a collection of nodes for which to reseed the pool when it's
// empty. Once the reseed list has reached capacity, a random node is removed
// when another is added.
type NodePool struct {
rw sync.RWMutex
nodes map[Peer]struct{}
reseedNodes []Peer
rng *rand.Rand
}
func NewNodePool(reseedCap uint) *NodePool {
return &NodePool{
nodes: make(map[Peer]struct{}),
reseedNodes: make([]Peer, 0, reseedCap),
rng: rand.New(rand.NewSource(time.Now().Unix())),
}
}
// Size returns the size of the pool.
func (np *NodePool) Size() int {
np.rw.RLock()
defer np.rw.RUnlock()
return len(np.nodes)
}
// Seed seeds the node pool with a given set of nodes. For every seed, we split
// it on a ';' delimiter to get the RPC address and the network (if provided).
func (np *NodePool) Seed(seeds []string) {
for _, s := range seeds {
tokens := strings.Split(s, ";")
switch len(tokens) {
case 1:
np.AddNode(Peer{RPCAddr: tokens[0]})
case 2:
np.AddNode(Peer{RPCAddr: tokens[0], Network: tokens[1]})
}
}
}
// RandomNode returns a random node, based on Golang's map semantics, from the
// pool.
func (np *NodePool) RandomNode() (Peer, bool) {
np.rw.RLock()
defer np.rw.RUnlock()
for nodeRPCAddr := range np.nodes {
return nodeRPCAddr, true
}
return Peer{}, false
}
// AddNode adds a node to the node pool by adding it to the internal node list.
// In addition, we attempt to add it to the internal reseed node list. If the
// reseed list is full, it replaces a random node in the reseed list, otherwise
// it is directly added to it.
func (np *NodePool) AddNode(p Peer) {
np.rw.Lock()
defer np.rw.Unlock()
np.nodes[p] = struct{}{}
if len(np.reseedNodes) < cap(np.reseedNodes) {
np.reseedNodes = append(np.reseedNodes, p)
} else {
// replace random node with the new node
i := np.rng.Intn(len(np.reseedNodes))
np.reseedNodes[i] = p
}
}
// HasNode returns true if a node exists in the node pool and false otherwise.
func (np *NodePool) HasNode(p Peer) bool {
np.rw.RLock()
defer np.rw.RUnlock()
_, ok := np.nodes[p]
return ok
}
// DeleteNode removes a node from the node pool if it exists.
func (np *NodePool) DeleteNode(p Peer) {
np.rw.Lock()
defer np.rw.Unlock()
delete(np.nodes, p)
}
// Reseed seeds the node pool with all the nodes found in the internal reseed
// list.
func (np *NodePool) Reseed() {
np.rw.Lock()
defer np.rw.Unlock()
for _, p := range np.reseedNodes {
np.nodes[p] = struct{}{}
}
} | server/crawl/pool.go | 0.658198 | 0.483283 | pool.go | starcoder |
package algs
import (
"fmt"
"math"
)
/*
IntToRoman solves the following problem:
Roman numerals are represented by seven different symbols: I, V, X, L, C, D and M.
Symbol Value
I 1
V 5
X 10
L 50
C 100
D 500
M 1000
For example, two is written as II in Roman numeral, just two one's added together. Twelve is written as, XII, which is simply X + II. The number twenty seven is written as XXVII, which is XX + V + II.
Roman numerals are usually written largest to smallest from left to right. However, the numeral for four is not IIII. Instead, the number four is written as IV. Because the one is before the five we subtract it making four. The same principle applies to the number nine, which is written as IX. There are six instances where subtraction is used:
I can be placed before V (5) and X (10) to make 4 and 9.
X can be placed before L (50) and C (100) to make 40 and 90.
C can be placed before D (500) and M (1000) to make 400 and 900.
Given an integer, convert it to a roman numeral. Input is guaranteed to be within the range from 1 to 3999.
Example 1:
Input: 3
Output: "III"
Example 2:
Input: 4
Output: "IV"
Example 3:
Input: 9
Output: "IX"
Example 4:
Input: 58
Output: "LVIII"
Explanation: L = 50, V = 5, III = 3.
Example 5:
Input: 1994
Output: "MCMXCIV"
Explanation: M = 1000, CM = 900, XC = 90 and IV = 4.
*/
func IntToRoman(num int) (r string) {
if num > 3999 || num < 1 {
return ""
}
// 基本元素
e := map[int]string{
1: "I",
5: "V",
10: "X",
50: "L",
100: "C",
500: "D",
1000: "M",
}
// 从个位往上开始逐个拼凑数值
for i := 0; num >= 1; i++ {
v := num % 10 // 看它是几
g := int(math.Pow10(i)) // 它属于第几位
switch v {
case 0:
// no-op
case 1, 2, 3:
for i := 0; i < v; i++ {
r = e[g] + r
}
case 4:
r = e[g] + e[5*g] + r
case 5:
r = e[5*g] + r
case 6, 7, 8:
for i := 0; i < v-5; i++ {
r = e[g] + r
}
r = e[5*g] + r
case 9:
r = e[g] + e[g*10] + r
default:
panic(fmt.Sprintf("impossible: %d", v))
}
num /= 10
}
return r
} | algs/0012.integer_to_roman.go | 0.669961 | 0.575439 | 0012.integer_to_roman.go | starcoder |
package main
import "fmt"
import "os"
type point struct {
x, y int
}
func main() {
// Go ofrece varios "verbos" de impresión, diseñados
// para dar formato a valores de Go simples. Por
// ejemplo, esto imprime una instancia de nuestra
// estructura `point`.
p := point{1, 2}
fmt.Printf("%v\n", p)
// Si el valor es una estructura, la varianete `%+v`
// incluirá el nombre de los campos de la estructura.
fmt.Printf("%+v\n", p)
// La variante `%#v` imprime una representación de la
// sintáxis en Go del valor, por ejemplo, el fragmento
// de código que produciría ese valor.
fmt.Printf("%#v\n", p)
// Para imprimir el tipo de un valor, se usa `%T`.
fmt.Printf("%T\n", p)
// El formateo de boleanos es directo.
fmt.Printf("%t\n", true)
// Existen muchas opciones para formatear enteros.
// Se usa `%d` para un formato base-10 estándar.
fmt.Printf("%d\n", 123)
// Esto imprime la representación binaria.
fmt.Printf("%b\n", 14)
// Esto imprime la letra que corresponda a ese entero
fmt.Printf("%c\n", 33)
// `%x` provee codificación hexadecimal.
fmt.Printf("%x\n", 456)
// Existen también varias opciones de formato para
// números de punto flotante. Para formato decimal
// se usa `%f`.
fmt.Printf("%f\n", 78.9)
// `%e` y `%E` dan formato a los números de punto
// flotante usando (versiones ligeramente distintas
// de) la notación científica.
fmt.Printf("%e\n", 123400000.0)
fmt.Printf("%E\n", 123400000.0)
// Para cadenas simples se usa `%s`.
fmt.Printf("%s\n", "\"cadena\"")
// Para incluir doble comilla como en el código Go
// se usa `%q`.
fmt.Printf("%q\n", "\"cadena\"")
// Como con los enteros anteriormente, `%x` despliega
// la cadena en base-16 usando dos letras en la
// salida por cada byte que haya en la entrada.
fmt.Printf("%x\n", "hexadecimaleame esto")
// Para imprimir la representación de un apuntador
// se usa `%p`.
fmt.Printf("%p\n", &p)
// Al dar formato a los números generalmente se desea
// controlar el ancho y la precisión del resultado.
// Para especificar el ancho de un entero, se usa un
// número después del `%` en el verbo. Por omisión el
// resultado estará justificado a la derecha usando
// espacios.
fmt.Printf("|%6d|%6d|\n", 12, 345)
// También puedes especificar el ancho de los números
// de punto flotante y generalmente también se quiere
// restringir la precisión del punto decimal al mismo
// tiempo. Esto se logra usando la sintáxis:
// ancho.precisión
fmt.Printf("|%6.2f|%6.2f|\n", 1.2, 3.45)
// Para justificar a la izquierda se usa la bandera
// `-`.
fmt.Printf("|%-6.2f|%-6.2f|\n", 1.2, 3.45)
// También se puede querer controlar el ancho al dar
// formato a cadenas, especialmente si se requiere
// que queden alineadas para salida tipo tabla. Para
// justificación básica a la deerecha.
fmt.Printf("|%6s|%6s|\n", "foo", "b")
// Para justificar a la izquierda se usa la bandera
// `-` al igual que en los números.
fmt.Printf("|%-6s|%-6s|\n", "foo", "b")
// Hasta ahora hemos usado `Printf`, que imprime la
// cadena formateada a `os.Stdout`. `Sprintf` le da
// formato y regresa la cadena sin imprimirla en
// ningún lado.
s := fmt.Sprintf("una %s", "cadena")
fmt.Println(s)
// Se puede formateo-imprimir a otros `io.Writers`
// además de `os.Stdout` usando `Fprintf`.
fmt.Fprintf(os.Stderr, "un %s\n", "error")
} | examples/formateo-de-cadenas/formateo-de-cadenas.go | 0.530966 | 0.471527 | formateo-de-cadenas.go | starcoder |
package trie
import (
"fmt"
)
const (
endOfWord = rune(0x7fffffff)
)
// Trie implements a dead simple trie data structure.
type Trie struct {
data []rune
next []*Trie
}
// New returns a new instance of a `Trie` data structure.
func New() *Trie {
return &Trie{
data: []rune{},
next: []*Trie{},
}
}
func newNode(w []rune) *Trie {
return &Trie{
data: w,
next: []*Trie{},
}
}
// Add adds single slice of runes to the current Trie data structure.
func (t *Trie) add(w []rune) error {
// If data[...] is empty - add the node and we are done.
if len(t.data) == 0 && len(t.next) == 0 {
t.data = w
return nil
}
// If data[] starts with any amount of w[], forward w and add it to a new
// *Trie node (one may exist - revisit this).
for i, r := range w {
// Detect no more data, that is: w is common with data and there are no
// chars of data left to consume -- create a new trie or merge into
// one of the Tries that exist.
if i >= len(t.data) {
for _, dt := range t.next {
if dt.Merge(w[i:]) == nil {
// Done - merge worked!
return nil
}
}
// Merge didnt work - add it to this Trie.
t.next = append(t.next, newNode(w[i:]))
return nil
}
// Iteration case - we match nodes so far and there is data left.
if r == t.data[i] {
continue
}
// Mismatch with letters left in w - split here. The parent will have
// two tries attached here - one with the data up til now + the contents
// of w[i:] and another with this node further qualified.
if r == endOfWord && t.data[i] == endOfWord {
// No op -- word already exists
return nil
} else if r == endOfWord {
break
} else {
wNode := newNode(w[i:])
oNode := newNode(t.data[i:])
oNode.next = t.next
t.data = t.data[:i]
t.next = []*Trie{oNode, wNode}
}
return nil
}
// w (abc) was a complete subset of t.data (abcdef) - we need to split at
// this point and separate the words.
oldNode := newNode(t.data[len(w)-1:])
oldNode.next = t.next
endNode := newNode([]rune{endOfWord})
t.data = w[:len(w)-1]
t.next = []*Trie{endNode, oldNode}
return nil
}
func (t *Trie) Add(w []rune) error {
return t.add(append(w, endOfWord))
}
// Merge merges the source trie into the current one, returns error if it is not
// a legal merge.
func (t *Trie) Merge(w []rune) error {
if t.data[0] != w[0] {
return fmt.Errorf("merge failed")
}
return t.add(w)
}
func (t *Trie) allWords(prefix []rune) ([][]rune, error) {
me := append(prefix, t.data...)
ret := [][]rune{}
if len(me) > 0 && me[len(me)-1] == endOfWord {
ret = append(ret, me[:len(me)-1])
}
for _, st := range t.next {
words, err := st.allWords(me)
if err != nil {
return nil, err
}
ret = append(ret, words...)
}
return ret, nil
}
func (t *Trie) AllWords() ([][]rune, error) {
return t.allWords([]rune{})
}
// func (t *Trie) dump(l string, p []rune) {
// log.Printf("%s%s%s\n", l, string(p), string(t.data))
// for _, st := range t.next {
// st.dump(l+".", append(p, t.data...))
// }
// } | trie.go | 0.635222 | 0.669394 | trie.go | starcoder |
package lkf
const (
BlockSize = 512 // The block size in bytes
blockSizeInWords = BlockSize / 4 // The block size in words. Each word is uint32
delta = 0x9e3779b9 // Magic constant
)
// The 128-bit key for encrypting/decrypting lkf files. It is divided into 4 parts of 32 bit each.
var key = [4]uint32{
0x8ac14c27,
0x42845ac1,
<KEY>,
<KEY>,
}
func calcKey(lWord, rWord, x, k uint32) uint32 {
n := (lWord>>5 ^ rWord<<2) + (rWord>>3 ^ lWord<<4)
n ^= (key[(x>>2^k)&3] ^ lWord) + (x ^ rWord)
return n
}
func min(x, y int) int {
if x < y {
return x
}
return y
}
// A Cryptor represents internal buffer, used for encrypting/decrypting passed data.
type Cryptor struct {
block [blockSizeInWords]uint32
}
func (c *Cryptor) toBlock(data []byte) {
for i := range c.block {
b := data[4*i:]
c.block[i] = uint32(b[0]) | uint32(b[1])<<8 | uint32(b[2])<<16 | uint32(b[3])<<24
}
}
func (c *Cryptor) fromBlock(data []byte) {
for i, v := range c.block {
b := data[4*i:]
b[0] = byte(v)
b[1] = byte(v >> 8)
b[2] = byte(v >> 16)
b[3] = byte(v >> 24)
}
}
// Encrypt encrypts a number of blocks from src into dst.
// Returns the number of encrypted bytes.
// If the length of src or dst < BlockSize, it doesn't encrypt anything and returns 0.
func (c *Cryptor) Encrypt(dst, src []byte) int {
numBlocks := min(len(dst), len(src)) / BlockSize
for i := 0; i < numBlocks; i++ {
c.toBlock(src[i*BlockSize:])
// Used only 3 rounds
for r := uint32(1); r <= 3; r++ {
var x uint32 = r * delta
lWord := c.block[blockSizeInWords-1]
for k := 0; k < blockSizeInWords-1; k++ {
c.block[k] += calcKey(lWord, c.block[k+1], x, uint32(k))
lWord = c.block[k]
}
c.block[blockSizeInWords-1] += calcKey(lWord, c.block[0], x, uint32(blockSizeInWords-1))
}
c.fromBlock(dst[i*BlockSize:])
}
return numBlocks * BlockSize
}
// Decrypt decrypts a number of blocks from src into dst.
// Returns the number of decrypted bytes.
// If the length of src or dst < BlockSize, it doesn't decrypt anything and returns 0.
func (c *Cryptor) Decrypt(dst, src []byte) int {
numBlocks := min(len(dst), len(src)) / BlockSize
for i := 0; i < numBlocks; i++ {
c.toBlock(src[i*BlockSize:])
// Used only 3 rounds
for r := uint32(3); r != 0; r-- {
var x uint32 = r * delta
rWord := c.block[0]
for k := blockSizeInWords - 1; k > 0; k-- {
c.block[k] -= calcKey(c.block[k-1], rWord, x, uint32(k))
rWord = c.block[k]
}
c.block[0] -= calcKey(c.block[blockSizeInWords-1], rWord, x, uint32(0))
}
c.fromBlock(dst[i*BlockSize:])
}
return numBlocks * BlockSize
} | lkf.go | 0.643441 | 0.419351 | lkf.go | starcoder |
package utils
import (
"fmt"
"math/big"
"strconv"
"strings"
)
//PrintAnswer - Prints a formatted answer for problemNum and answer
func PrintAnswer(problemNum int, answer int) {
fmt.Printf("The answer to Project Euler Problem #%d is :%d\n", problemNum, answer)
}
func PrintBigIntAnswer(problemNum int, answer *big.Int) {
fmt.Printf("The answer to Project Euler Problem #%d is :%d\n", problemNum, answer)
}
func PrintStringAnswer(problemNum int, answer string) {
fmt.Printf("The answer to Project Euler Problem #%d is :%s\n", problemNum, answer)
}
func PrintMatrix15(matrix [15][15]int) {
for i := 0; i < 15; i++ {
for j := 0; j < 15; j++ {
fmt.Printf("%d ", matrix[i][j])
}
fmt.Print("\n")
}
}
func PrintIntArray(arr []int) {
for i := 0; i < len(arr); i++ {
fmt.Printf("%d ", arr[i])
}
}
//IsPalidrome - Checks if num is a palidrome
func IsPalidrome(num int) bool {
numStr := strconv.Itoa(num)
revesredNum := Reverse(numStr)
return strings.Compare(numStr, revesredNum) == 0
}
func IsPalidromeString(value string) bool {
revesredString := Reverse(value)
return strings.Compare(value, revesredString) == 0
}
//Reverse - revesrses string s
func Reverse(s string) string {
r := []rune(s)
for i, j := 0, len(r)-1; i < len(r)/2; i, j = i+1, j-1 {
r[i], r[j] = r[j], r[i]
}
return string(r)
}
func SumDigits(digits string) int {
sum := 0
for i := 0; i < len(digits); i++ {
digit, _ := strconv.Atoi(string(digits[i]))
sum += digit
}
return sum
}
func SumIntDigits(num int) int {
digits := GetDigits(num)
sum := SumIntArray(digits)
return sum
}
func ProductIntDigits(num int) int {
digits := GetDigits(num)
product := ProductIntArray(digits)
return product
}
//GetDigits - Retruns an array of digits that compose the given integer
func GetDigits(num int) []int {
digits := make([]int, 0)
numStr := strconv.Itoa(num)
for i := 0; i < len(numStr); i++ {
digit, _ := strconv.Atoi(string(numStr[i]))
digits = append(digits, digit)
}
return digits
}
//Calculates the sum of an integer array
func SumIntArray(array []int) int {
sum := 0
for i := 0; i < len(array); i++ {
sum += array[i]
}
return sum
}
func ProductIntArray(array []int) int {
product := 1
for i := 0; i < len(array); i++ {
product *= array[i]
}
return product
}
//FindIndex returns the index of the integer in the integer array if it is found, otherwise it will return -1
func FindIndex(intArray []int, intToSearchFor int) int {
for i, item := range intArray {
if item == intToSearchFor {
return i
}
}
return -1
}
func Contains(s []int, e int) bool {
for _, a := range s {
if a == e {
return true
}
}
return false
}
func ContainsBig(s []big.Int, e big.Int) bool {
for _, a := range s {
if a.Cmp(&e) == 0 {
return true
}
}
return false
}
func IsPanDigital(num string) bool {
if len(num) != 9 {
return false
}
for i := 1; i <= 9; i++ {
digitStr := strconv.Itoa(i)
if !strings.Contains(num, digitStr) {
return false
}
}
return true
} | utils/utils.go | 0.554712 | 0.507629 | utils.go | starcoder |
package xtime
import (
"database/sql/driver"
"encoding/json"
"fmt"
"strconv"
"time"
"ksitigarbha/timezone"
)
type Timestamp struct{ time.Time }
func (m Timestamp) timestamp() int64 {
return m.UnixMilli()
}
func (m Timestamp) UnixMilli() int64 {
return m.Time.UnixNano() / int64(time.Millisecond)
}
func (m Timestamp) format() string {
return m.Time.Format(TimestampPattern)
}
func (m Timestamp) AddDates(years, months, days int) Timestamp {
return Timestamp{Time: m.Time.AddDate(years, months, days)}
}
func (m Timestamp) AddHours(hours int) Timestamp {
return Timestamp{Time: m.Time.Add(time.Duration(hours) * time.Hour)}
}
func (m Timestamp) AddMinutes(minutes int) Timestamp {
return Timestamp{Time: m.Time.Add(time.Duration(minutes) * time.Minute)}
}
func (m Timestamp) AddSeconds(secs int) Timestamp {
return Timestamp{Time: m.Time.Add(time.Duration(secs) * time.Second)}
}
func (m Timestamp) AddDays(days int) Timestamp {
return m.AddDates(0, 0, days)
}
func (m Timestamp) After(other Timestamp) bool {
return m.Time.After(other.Time)
}
func (m Timestamp) Before(other Timestamp) bool {
return m.Time.Before(other.Time)
}
// String returns the Date as a string
func (m Timestamp) String() string {
return m.format()
}
// Equals returns true if the dates are equal
func (m Timestamp) Equals(other Timestamp) bool {
return m.Time.Equal(other.Time)
}
func (m Timestamp) Days(other Timestamp) int {
hours := m.Time.Sub(other.Time).Hours()
return int(hours) / HoursPerDay
}
// UnmarshalJSON converts a byte array into a Date
func (m *Timestamp) UnmarshalJSON(text []byte) error {
var ts int64
err := json.Unmarshal(text, &ts)
if err != nil {
return err
}
t := time.Unix(ts/1000, 0).In(timezone.China)
*m = Timestamp{t}
return nil
}
// MarshalJSON returns the JSON output of a Date.
// Null will return a zero value date.
func (m Timestamp) MarshalJSON() ([]byte, error) {
if m.IsZero() {
return []byte("null"), nil
}
return json.Marshal(m.timestamp())
}
// UnmarshalText converts a byte array into a Timestamp.
func (m *Timestamp) UnmarshalText(text []byte) error {
ts, err := strconv.ParseInt(string(text), 10, 64)
if err != nil {
return err
}
t := time.Unix(ts/1000, 0).In(timezone.China)
*m = Timestamp{t}
return nil
}
func (m *Timestamp) DateInLoc(loc *time.Location) Date {
var t = m.Time.In(loc)
year, month, date := t.Date()
return NewDate(year, month, date, loc)
}
// MarshalText returns the text of a Timestamp.
// An empty string will be returned for a zero value Timestamp.
func (m Timestamp) MarshalText() ([]byte, error) {
if m.IsZero() {
return []byte(""), nil
}
return []byte(fmt.Sprintf("%d", m.timestamp())), nil
}
// Scan converts an SQL value into a Timestamp
func (m *Timestamp) Scan(value interface{}) error {
if value == nil {
return nil
}
m.Time = value.(time.Time)
return nil
}
// Value returns the Timestamp formatted for insert into database
func (m Timestamp) Value() (driver.Value, error) {
return m.Time, nil
}
func CurrentTimestamp() Timestamp {
return NewTimestampFromTime(time.Now())
}
func NewTimestampFromTime(t time.Time) Timestamp {
year, month, day := t.Date()
return NewMilliSecondTimeInLocation(year, month, day, t.Hour(), t.Minute(), t.Second(), t.Location())
}
func NewMilliSecondTimeInLocation(year int, month time.Month, day, hour, min, sec int, loc *time.Location) Timestamp {
// Remove all second and nano second information and mark as UTC
return Timestamp{Time: time.Date(year, month, day, hour, min, sec, 0, loc)}
}
// New creates a new Date
func NewMilliSecondTime(year int, month time.Month, day, hour, min, sec int) Timestamp {
return NewMilliSecondTimeInLocation(year, month, day, hour, min, sec, time.UTC)
}
func ParseTimestamp(value int64) (Timestamp, error) {
t := time.Unix(value/1000, 0)
return Timestamp{t}, nil
} | xtime/timestamp.go | 0.732496 | 0.449091 | timestamp.go | starcoder |
package pipeline
import (
"errors"
"fmt"
"reflect"
)
/*
As Go does not support overloading and/or defining new infix operators, we have to implement the pipe operator as a function.
We start by defining Pipe function and Pipeline type for its result.
This presents a rather simple method to implement a pipe operator in Go. It works, but it also eliminates
type-safety. It casts all arguments into interface{} before the execution of the pipeline, and it uses reflection
to apply functions. So, there is no way to check functions and their arguments at compile-time.
The benchmark in this repo shows that (on my machine): while the direct function call results 2/3 ns/op,
Pipe runs in ~600/700 ns/op which is 300–350× worst.
This makes it impractical for repetitive small tasks, as it has a relatively big overhead because of the
reflection. Although it is specially useful for medium-sized tasks.
*/
/*
A Pipeline instance is just another function that does the actual work. It accepts zero or more inputs and gives an error.
The number of its input arguments must match the input arguments of the first function in fs.
But its output may or may not match the last function, and that is because Go does not have variadic return values.
*/
type Pipeline func(...interface{}) error
/*
errType is the type of error interface.
*/
var errType = reflect.TypeOf((*error)(nil)).Elem()
/*
Pipe uses variadic arguments to take zero or more functions as inputs and produces a Pipeline.
*/
func Pipe(fns ...interface{}) Pipeline {
if len(fns) == 0 {
return emptyFn
}
return func(args ...interface{}) (err error) {
/*
?
*/
defer func() {
if r := recover(); r != nil {
err = fmt.Errorf("pipeline does panic: %v", r)
}
}()
/*
We start by processing the input arguments.
Go reflection enables us to call functions dynamically, but for that, we need to pass the input arguments
as an slice of reflect.Value values. So first, we need to do the conversion.
*/
var inputs []reflect.Value
for _, arg := range args {
inputs = append(inputs, reflect.ValueOf(arg))
}
/*
Secondly, we have to solve the nested function calls, f(g(..)). We can always unwind nested
functions using a for-loop.
We don't have to forget to handle possible errors, without passing them down to the pipeline.
*/
for fnIndex, fn := range fns {
// call the function
outputs := reflect.ValueOf(fn).Call(inputs)
// ?
inputs = inputs[:0]
fnType := reflect.TypeOf(fn)
for outputIndex, output := range outputs {
if fnType.Out(outputIndex).Implements(errType) {
/*
Pay attention on how to check return values for errors, neglecting the nil ones.
*/
if !output.IsNil() {
err = fmt.Errorf("%s func failed: %w", ord(fnIndex), output.Interface().(error))
return
}
} else {
inputs = append(inputs, output)
}
}
}
return
}
}
/*
emptyFn returning nil error
*/
func emptyFn(...interface{}) error {
return nil
}
/*
?
*/
func ord(index int) string {
order := index + 1
switch {
case order > 10 && order < 20:
return fmt.Sprintf("%dth", order)
case order%10 == 1:
return fmt.Sprintf("%dst", order)
case order%10 == 2:
return fmt.Sprintf("%dnd", order)
case order%10 == 3:
return fmt.Sprintf("%drd", order)
default:
return fmt.Sprintf("%dth", order)
}
}
func powerPlusOne(x int) (int, error) {
var result int
/*
Here you see how easy it is to compose functions with Pipe.
There is no need to handle errors for each function and the list of composing functions can go on indefinitely.
*/
err := Pipe(
// power
func(x int) (int, error) {
if x < 0 {
return 0, errors.New("x should not be negative")
}
return x * x, nil
},
// PlusOne
func(x int) int { return x + 1 },
/*
We call the last function of the pipeline a sink, i.e. its job is to gather the results and clean the pipeline.
The sink should not return any values other than an optional error.
*/
func(x int) { result = x },
)(x) // the execution of pipeline
if err != nil {
return -1, err
}
return result, nil
}
func PossibleSolution() {
res, err := powerPlusOne(5)
if err != nil {
panic(err)
}
fmt.Println("POWER+1:", res)
fmt.Println("")
} | pipeline/pipeline.go | 0.617974 | 0.538983 | pipeline.go | starcoder |
//-----------------------------------------------------------------------------
package sdf
import (
"fmt"
"github.com/fogleman/pt/pt"
)
//-----------------------------------------------------------------------------
func RenderPNG(s SDF3, render_floor bool) {
scene := pt.Scene{}
light := pt.LightMaterial(pt.White, 180)
d := 4.0
scene.Add(pt.NewSphere(pt.V(-1, -1, 0.5).Normalize().MulScalar(d), 0.25, light))
scene.Add(pt.NewSphere(pt.V(0, -1, 0.25).Normalize().MulScalar(d), 0.25, light))
scene.Add(pt.NewSphere(pt.V(-1, 1, 0).Normalize().MulScalar(d), 0.25, light))
material := pt.GlossyMaterial(pt.HexColor(0x468966), 1.2, pt.Radians(20))
s0 := NewPtSDF(s)
//s0 = pt.NewTransformSDF(s0, pt.Translate(pt.V(0, 0, 0.2)))
//s0 = pt.NewTransformSDF(s0, pt.Rotate(pt.V(0, 0, 1), pt.Radians(30)))
scene.Add(pt.NewSDFShape(s0, material))
if render_floor {
bb := s0.BoundingBox()
z_min := bb.Min.Z
z_height := bb.Max.Z - bb.Min.Z
z_gap := z_height * 0.1 // 10% of height
floor := pt.GlossyMaterial(pt.HexColor(0xFFF0A5), 1.2, pt.Radians(20))
floor_plane := pt.V(0, 0, z_min-z_gap)
floor_normal := pt.V(0, 0, 1)
scene.Add(pt.NewPlane(floor_plane, floor_normal, floor))
}
camera := pt.LookAt(pt.V(-3, 0, 1), pt.V(0, 0, 0), pt.V(0, 0, 1), 35)
sampler := pt.NewSampler(4, 4)
sampler.LightMode = pt.LightModeAll
sampler.SpecularMode = pt.SpecularModeAll
renderer := pt.NewRenderer(&scene, &camera, sampler, 800, 600)
renderer.IterativeRender("out%03d.png", 10)
}
//-----------------------------------------------------------------------------
// Render an SDF3 as an STL triangle mesh file.
func RenderSTL(
s SDF3, //sdf3 to render
mesh_cells int, //number of cells on the longest axis. e.g 200
path string, //path to filename
) {
// work out the region we will sample
bb0 := s.BoundingBox()
bb0_size := bb0.Size()
mesh_inc := bb0_size.MaxComponent() / float64(mesh_cells)
bb1_size := bb0_size.DivScalar(mesh_inc)
bb1_size = bb1_size.Ceil().AddScalar(1)
cells := bb1_size.ToV3i()
bb1_size = bb1_size.MulScalar(mesh_inc)
bb := NewBox3(bb0.Center(), bb1_size)
fmt.Printf("rendering %s (%dx%dx%d)\n", path, cells[0], cells[1], cells[2])
m := NewSDFMesh(s, bb, mesh_inc)
err := SaveSTL(path, m)
if err != nil {
fmt.Printf("%s", err)
}
}
//----------------------------------------------------------------------------- | sdf/render.go | 0.567457 | 0.435301 | render.go | starcoder |
package noodle
//GLEnum represents all available WebGL constants, prefixed with Gl and turned into UpperCamelCase. For example, DEPTH_BUFFER_BIT is now GlDepthBufferBit
type GLEnum = int
const (
//GlDepthBufferBit passed to <code>clear</code> to clear the current depth buffer.
GlDepthBufferBit GLEnum = 0x00000100
//GlStencilBufferBit passed to <code>clear</code> to clear the current stencil buffer.
GlStencilBufferBit = 0x00000400
//GlColorBufferBit passed to <code>clear</code> to clear the current color buffer.
GlColorBufferBit = 0x00004000
//GlPoints passed to <code>drawelements</code> or <code>drawarrays</code> to draw single points.
GlPoints = 0x0000
//GlLines passed to <code>drawelements</code> or <code>drawarrays</code> to draw lines. each vertex connects to the one after it.
GlLines = 0x0001
//GlLineLoop passed to <code>drawelements</code> or <code>drawarrays</code> to draw lines. each set of two vertices is treated as a separate line segment.
GlLineLoop = 0x0002
//GlLineStrip passed to <code>drawelements</code> or <code>drawarrays</code> to draw a connected group of line segments from the first vertex to the last.
GlLineStrip = 0x0003
//GlTriangles passed to <code>drawelements</code> or <code>drawarrays</code> to draw triangles. each set of three vertices creates a separate triangle.
GlTriangles = 0x0004
//GlTriangleStrip passed to <code>drawelements</code> or <code>drawarrays</code> to draw a connected group of triangles.
GlTriangleStrip = 0x0005
//GlTriangleFan passed to <code>drawelements</code> or <code>drawarrays</code> to draw a connected group of triangles. each vertex connects to the previous and the first vertex in the fan.
GlTriangleFan = 0x0006
//GlZero passed to <code>blendfunc</code> or <code>blendfuncseparate</code> to turn off a component.
GlZero = 0
//GlOne passed to <code>blendfunc</code> or <code>blendfuncseparate</code> to turn on a component.
GlOne = 1
//GlSrcColor passed to <code>blendfunc</code> or <code>blendfuncseparate</code> to multiply a component by the source elements color.
GlSrcColor = 0x0300
//GlOneMinusSrcColor passed to <code>blendfunc</code> or <code>blendfuncseparate</code> to multiply a component by one minus the source elements color.
GlOneMinusSrcColor = 0x0301
//GlSrcAlpha passed to <code>blendfunc</code> or <code>blendfuncseparate</code> to multiply a component by the source's alpha.
GlSrcAlpha = 0x0302
//GlOneMinusSrcAlpha passed to <code>blendfunc</code> or <code>blendfuncseparate</code> to multiply a component by one minus the source's alpha.
GlOneMinusSrcAlpha = 0x0303
//GlDstAlpha passed to <code>blendfunc</code> or <code>blendfuncseparate</code> to multiply a component by the destination's alpha.
GlDstAlpha = 0x0304
//GlOneMinusDstAlpha passed to <code>blendfunc</code> or <code>blendfuncseparate</code> to multiply a component by one minus the destination's alpha.
GlOneMinusDstAlpha = 0x0305
//GlDstColor passed to <code>blendfunc</code> or <code>blendfuncseparate</code> to multiply a component by the destination's color.
GlDstColor = 0x0306
//GlOneMinusDstColor passed to <code>blendfunc</code> or <code>blendfuncseparate</code> to multiply a component by one minus the destination's color.
GlOneMinusDstColor = 0x0307
//GlSrcAlphaSaturate passed to <code>blendfunc</code> or <code>blendfuncseparate</code> to multiply a component by the minimum of source's alpha or one minus the destination's alpha.
GlSrcAlphaSaturate = 0x0308
//GlConstantColor passed to <code>blendfunc</code> or <code>blendfuncseparate</code> to specify a constant color blend function.
GlConstantColor = 0x8001
//GlOneMinusConstantColor passed to <code>blendfunc</code> or <code>blendfuncseparate</code> to specify one minus a constant color blend function.
GlOneMinusConstantColor = 0x8002
//GlConstantAlpha passed to <code>blendfunc</code> or <code>blendfuncseparate</code> to specify a constant alpha blend function.
GlConstantAlpha = 0x8003
//GlOneMinusConstantAlpha passed to <code>blendfunc</code> or <code>blendfuncseparate</code> to specify one minus a constant alpha blend function.
GlOneMinusConstantAlpha = 0x8004
//GlFuncAdd passed to <code>blendequation</code> or <code>blendequationseparate</code> to set an addition blend function.
GlFuncAdd = 0x8006
//GlFuncSubtract passed to <code>blendequation</code> or <code>blendequationseparate</code> to specify a subtraction blend function (source - destination).
GlFuncSubtract = 0x800a
//GlFuncReverseSubtract passed to <code>blendequation</code> or <code>blendequationseparate</code> to specify a reverse subtraction blend function (destination - source).
GlFuncReverseSubtract = 0x800b
//GlBlendEquation passed to <code>getparameter</code> to get the current rgb blend function.
GlBlendEquation = 0x8009
//GlBlendEquationRgb passed to <code>getparameter</code> to get the current rgb blend function. same as blendEquation
GlBlendEquationRgb = 0x8009
//GlBlendEquationAlpha passed to <code>getparameter</code> to get the current alpha blend function. same as blendEquation
GlBlendEquationAlpha = 0x883d
//GlBlendDstRgb passed to <code>getparameter</code> to get the current destination rgb blend function.
GlBlendDstRgb = 0x80c8
//GlBlendSrcRgb passed to <code>getparameter</code> to get the current destination rgb blend function.
GlBlendSrcRgb = 0x80c9
//GlBlendDstAlpha passed to <code>getparameter</code> to get the current destination alpha blend function.
GlBlendDstAlpha = 0x80ca
//GlBlendSrcAlpha passed to <code>getparameter</code> to get the current source alpha blend function.
GlBlendSrcAlpha = 0x80cb
//GlBlendColor passed to <code>getparameter</code> to return a the current blend color.
GlBlendColor = 0x8005
//GlArrayBufferBinding passed to <code>getparameter</code> to get the array buffer binding.
GlArrayBufferBinding = 0x8894
//GlElementArrayBufferBinding passed to <code>getparameter</code> to get the current element array buffer.
GlElementArrayBufferBinding = 0x8895
//GlLineWidth passed to <code>getparameter</code> to get the current <code>linewidth</code> (set by the <code>linewidth</code> method).
GlLineWidth = 0x0b21
//GlAliasedPointSizeRange passed to <code>getparameter</code> to get the current size of a point drawn with <code>gl.points</code>
GlAliasedPointSizeRange = 0x846d
//GlAliasedLineWidthRange passed to <code>getparameter</code> to get the range of available widths for a line. returns a length-2 array with the lo value at 0, and hight at 1.
GlAliasedLineWidthRange = 0x846e
//GlCullFaceMode passed to <code>getparameter</code> to get the current value of <code>cullface</code>. should return <code>front</code>, <code>back</code>, or <code>frontAndBack</code>
GlCullFaceMode = 0x0b45
//GlFrontFace passed to <code>getparameter</code> to determine the current value of <code>frontface</code>. should return <code>cw</code> or <code>ccw</code>.
GlFrontFace = 0x0b46
//GlDepthRange passed to <code>getparameter</code> to return a length-2 array of floats giving the current depth range.
GlDepthRange = 0x0b70
//GlDepthWritemask passed to <code>getparameter</code> to determine if the depth write mask is enabled.
GlDepthWritemask = 0x0b72
//GlDepthClearValue passed to <code>getparameter</code> to determine the current depth clear value.
GlDepthClearValue = 0x0b73
//GlDepthFunc passed to <code>getparameter</code> to get the current depth function. returns <code>never</code>, <code>always</code>, <code>less</code>, <code>equal</code>, <code>lequal</code>, <code>greater</code>, <code>gequal</code>, or <code>notequal</code>.
GlDepthFunc = 0x0b74
//GlStencilClearValue passed to <code>getparameter</code> to get the value the stencil will be cleared to.
GlStencilClearValue = 0x0b91
//GlStencilFunc passed to <code>getparameter</code> to get the current stencil function. returns <code>never</code>, <code>always</code>, <code>less</code>, <code>equal</code>, <code>lequal</code>, <code>greater</code>, <code>gequal</code>, or <code>notequal</code>.
GlStencilFunc = 0x0b92
//GlStencilFail passed to <code>getparameter</code> to get the current stencil fail function. should return <code>keep</code>, <code>replace</code>, <code>incr</code>, <code>decr</code>, <code>invert</code>, <code>incrWrap</code>, or <code>decrWrap</code>.
GlStencilFail = 0x0b94
//GlStencilPassDepthFail passed to <code>getparameter</code> to get the current stencil fail function should the depth buffer test fail. should return <code>keep</code>, <code>replace</code>, <code>incr</code>, <code>decr</code>, <code>invert</code>, <code>incrWrap</code>, or <code>decrWrap</code>.
GlStencilPassDepthFail = 0x0b95
//GlStencilPassDepthPass passed to <code>getparameter</code> to get the current stencil fail function should the depth buffer test pass. should return keep, replace, incr, decr, invert, incrWrap, or decrWrap.
GlStencilPassDepthPass = 0x0b96
//GlStencilRef passed to <code>getparameter</code> to get the reference value used for stencil tests.
GlStencilRef = 0x0b97
//GlStencilValueMask
GlStencilValueMask = 0x0b93
//GlStencilWritemask
GlStencilWritemask = 0x0b98
//GlStencilBackFunc
GlStencilBackFunc = 0x8800
//GlStencilBackFail
GlStencilBackFail = 0x8801
//GlStencilBackPassDepthFail
GlStencilBackPassDepthFail = 0x8802
//GlStencilBackPassDepthPass
GlStencilBackPassDepthPass = 0x8803
//GlStencilBackRef
GlStencilBackRef = 0x8ca3
//GlStencilBackValueMask
GlStencilBackValueMask = 0x8ca4
//GlStencilBackWritemask
GlStencilBackWritemask = 0x8ca5
//GlViewport returns an <a href="/en-us/docs/web/javascript/reference/globalObjects/int32array" title="the int32array typed array represents an array of twos-complement 32-bit signed integers in the platform byte order. if control over byte order is needed, use dataview instead. the contents are initialized to 0. once established, you can reference elements in the array using the object's methods, or using standard array index syntax (that is, using bracket notation)."><code>int32array</code></a> with four elements for the current viewport dimensions.
GlViewport = 0x0ba2
//GlScissorBox returns an <a href="/en-us/docs/web/javascript/reference/globalObjects/int32array" title="the int32array typed array represents an array of twos-complement 32-bit signed integers in the platform byte order. if control over byte order is needed, use dataview instead. the contents are initialized to 0. once established, you can reference elements in the array using the object's methods, or using standard array index syntax (that is, using bracket notation)."><code>int32array</code></a> with four elements for the current scissor box dimensions.
GlScissorBox = 0x0c10
//GlColorClearValue
GlColorClearValue = 0x0c22
//GlColorWritemask
GlColorWritemask = 0x0c23
//GlUnpackAlignment
GlUnpackAlignment = 0x0cf5
//GlPackAlignment
GlPackAlignment = 0x0d05
//GlMaxTextureSize
GlMaxTextureSize = 0x0d33
//GlMaxViewportDims
GlMaxViewportDims = 0x0d3a
//GlSubpixelBits
GlSubpixelBits = 0x0d50
//GlRedBits
GlRedBits = 0x0d52
//GlGreenBits
GlGreenBits = 0x0d53
//GlBlueBits
GlBlueBits = 0x0d54
//GlAlphaBits
GlAlphaBits = 0x0d55
//GlDepthBits
GlDepthBits = 0x0d56
//GlStencilBits
GlStencilBits = 0x0d57
//GlPolygonOffsetUnits
GlPolygonOffsetUnits = 0x2a00
//GlPolygonOffsetFactor
GlPolygonOffsetFactor = 0x8038
//GlTextureBinding2d
GlTextureBinding2d = 0x8069
//GlSampleBuffers
GlSampleBuffers = 0x80a8
//GlSamples
GlSamples = 0x80a9
//GlSampleCoverageValue
GlSampleCoverageValue = 0x80aa
//GlSampleCoverageInvert
GlSampleCoverageInvert = 0x80ab
//GlCompressedTextureFormats
GlCompressedTextureFormats = 0x86a3
//GlVendor
GlVendor = 0x1f00
//GlRenderer
GlRenderer = 0x1f01
//GlVersion
GlVersion = 0x1f02
//GlImplementationColorReadType
GlImplementationColorReadType = 0x8b9a
//GlImplementationColorReadFormat
GlImplementationColorReadFormat = 0x8b9b
//GlBrowserDefaultWebgl
GlBrowserDefaultWebgl = 0x9244
//GlStaticDraw passed to <code>bufferdata</code> as a hint about whether the contents of the buffer are likely to be used often and not change often.
GlStaticDraw = 0x88e4
//GlStreamDraw passed to <code>bufferdata</code> as a hint about whether the contents of the buffer are likely to not be used often.
GlStreamDraw = 0x88e0
//GlDynamicDraw passed to <code>bufferdata</code> as a hint about whether the contents of the buffer are likely to be used often and change often.
GlDynamicDraw = 0x88e8
//GlArrayBuffer passed to <code>bindbuffer</code> or <code>bufferdata</code> to specify the type of buffer being used.
GlArrayBuffer = 0x8892
//GlElementArrayBuffer passed to <code>bindbuffer</code> or <code>bufferdata</code> to specify the type of buffer being used.
GlElementArrayBuffer = 0x8893
//GlBufferSize passed to <code>getbufferparameter</code> to get a buffer's size.
GlBufferSize = 0x8764
//GlBufferUsage passed to <code>getbufferparameter</code> to get the hint for the buffer passed in when it was created.
GlBufferUsage = 0x8765
//GlCurrentVertexAttrib passed to <code>getvertexattrib</code> to read back the current vertex attribute.
GlCurrentVertexAttrib = 0x8626
//GlVertexAttribArrayEnabled
GlVertexAttribArrayEnabled = 0x8622
//GlVertexAttribArraySize
GlVertexAttribArraySize = 0x8623
//GlVertexAttribArrayStride
GlVertexAttribArrayStride = 0x8624
//GlVertexAttribArrayType
GlVertexAttribArrayType = 0x8625
//GlVertexAttribArrayNormalized
GlVertexAttribArrayNormalized = 0x886a
//GlVertexAttribArrayPointer
GlVertexAttribArrayPointer = 0x8645
//GlVertexAttribArrayBufferBinding
GlVertexAttribArrayBufferBinding = 0x889f
//GlCullFace passed to <code>enable</code>/<code>disable</code> to turn on/off culling. can also be used with <code>getparameter</code> to find the current culling method.
GlCullFace = 0x0b44
//GlFront passed to <code>cullface</code> to specify that only front faces should be culled.
GlFront = 0x0404
//GlBack passed to <code>cullface</code> to specify that only back faces should be culled.
GlBack = 0x0405
//GlFrontAndBack passed to <code>cullface</code> to specify that front and back faces should be culled.
GlFrontAndBack = 0x0408
//GlBlend passed to <code>enable</code>/<code>disable</code> to turn on/off blending. can also be used with <code>getparameter</code> to find the current blending method.
GlBlend = 0x0be2
//GlDepthTest passed to <code>enable</code>/<code>disable</code> to turn on/off the depth test. can also be used with <code>getparameter</code> to query the depth test.
GlDepthTest = 0x0b71
//GlDither passed to <code>enable</code>/<code>disable</code> to turn on/off dithering. can also be used with <code>getparameter</code> to find the current dithering method.
GlDither = 0x0bd0
//GlPolygonOffsetFill passed to <code>enable</code>/<code>disable</code> to turn on/off the polygon offset. useful for rendering hidden-line images, decals, and or solids with highlighted edges. can also be used with <code>getparameter</code> to query the scissor test.
GlPolygonOffsetFill = 0x8037
//GlSampleAlphaToCoverage passed to <code>enable</code>/<code>disable</code> to turn on/off the alpha to coverage. used in multi-sampling alpha channels.
GlSampleAlphaToCoverage = 0x809e
//GlSampleCoverage passed to <code>enable</code>/<code>disable</code> to turn on/off the sample coverage. used in multi-sampling.
GlSampleCoverage = 0x80a0
//GlScissorTest passed to <code>enable</code>/<code>disable</code> to turn on/off the scissor test. can also be used with <code>getparameter</code> to query the scissor test.
GlScissorTest = 0x0c11
//GlStencilTest passed to <code>enable</code>/<code>disable</code> to turn on/off the stencil test. can also be used with <code>getparameter</code> to query the stencil test.
GlStencilTest = 0x0b90
//GlNoError returned from <code>geterror</code>.
GlNoError = 0
//GlInvalidEnum returned from <code>geterror</code>.
GlInvalidEnum = 0x0500
//GlInvalidValue returned from <code>geterror</code>.
GlInvalidValue = 0x0501
//GlInvalidOperation returned from <code>geterror</code>.
GlInvalidOperation = 0x0502
//GlOutOfMemory returned from <code>geterror</code>.
GlOutOfMemory = 0x0505
//GlContextLostWebgl returned from <code>geterror</code>.
GlContextLostWebgl = 0x9242
//GlCw passed to <code>frontface</code> to specify the front face of a polygon is drawn in the clockwise direction
GlCw = 0x0900
//GlCcw passed to <code>frontface</code> to specify the front face of a polygon is drawn in the counter clockwise direction
GlCcw = 0x0901
//GlDontCare there is no preference for this behavior.
GlDontCare = 0x1100
//GlFastest the most efficient behavior should be used.
GlFastest = 0x1101
//GlNicest the most correct or the highest quality option should be used.
GlNicest = 0x1102
//GlGenerateMipmapHint hint for the quality of filtering when generating mipmap images with <a href="/en-us/docs/web/api/webglrenderingcontext/generatemipmap" title="the webglrenderingcontext.generatemipmap() method of the webgl api generates a set of mipmaps for a webgltexture object."><code>webglrenderingcontext.generatemipmap()</code></a>.
GlGenerateMipmapHint = 0x8192
//GlByte
GlByte = 0x1400
//GlShort
GlShort = 0x1402
//GlUnsignedShort
GlUnsignedShort = 0x1403
//GlInt
GlInt = 0x1404
//GlUnsignedInt
GlUnsignedInt = 0x1405
//GlFloat
GlFloat = 0x1406
//GlDepthComponent
GlDepthComponent = 0x1902
//GlAlpha
GlAlpha = 0x1906
//GlRGB
GlRGB = 0x1907
//GlRGBA
GlRGBA = 0x1908
//GlLuminance
GlLuminance = 0x1909
//GlLuminanceAlpha
GlLuminanceAlpha = 0x190a
//GlUnsignedByte
GlUnsignedByte = 0x1401
//GlUnsignedShort4444
GlUnsignedShort4444 = 0x8033
//GlUnsignedShort5551
GlUnsignedShort5551 = 0x8034
//GlUnsignedShort565
GlUnsignedShort565 = 0x8363
//GlFragmentShader passed to <code>createshader</code> to define a fragment shader.
GlFragmentShader = 0x8b30
//GlVertexShader passed to <code>createshader</code> to define a vertex shader
GlVertexShader = 0x8b31
//GlCompileStatus passed to <code>getshaderparamter</code> to get the status of the compilation. returns false if the shader was not compiled. you can then query <code>getshaderinfolog</code> to find the exact error
GlCompileStatus = 0x8b81
//GlDeleteStatus passed to <code>getshaderparamter</code> to determine if a shader was deleted via <code>deleteshader</code>. returns true if it was, false otherwise.
GlDeleteStatus = 0x8b80
//GlLinkStatus passed to <code>getprogramparameter</code> after calling <code>linkprogram</code> to determine if a program was linked correctly. returns false if there were errors. use <code>getprograminfolog</code> to find the exact error.
GlLinkStatus = 0x8b82
//GlValidateStatus passed to <code>getprogramparameter</code> after calling <code>validateprogram</code> to determine if it is valid. returns false if errors were found.
GlValidateStatus = 0x8b83
//GlAttachedShaders passed to <code>getprogramparameter</code> after calling <code>attachshader</code> to determine if the shader was attached correctly. returns false if errors occurred.
GlAttachedShaders = 0x8b85
//GlActiveAttributes passed to <code>getprogramparameter</code> to get the number of attributes active in a program.
GlActiveAttributes = 0x8b89
//GlActiveUniforms passed to <code>getprogramparamter</code> to get the number of uniforms active in a program.
GlActiveUniforms = 0x8b86
//GlMaxVertexAttribs the maximum number of entries possible in the vertex attribute list.
GlMaxVertexAttribs = 0x8869
//GlMaxVertexUniformVectors
GlMaxVertexUniformVectors = 0x8dfb
//GlMaxVaryingVectors
GlMaxVaryingVectors = 0x8dfc
//GlMaxCombinedTextureImageUnits
GlMaxCombinedTextureImageUnits = 0x8b4d
//GlMaxVertexTextureImageUnits
GlMaxVertexTextureImageUnits = 0x8b4c
//GlMaxTextureImageUnits implementation dependent number of maximum texture units. at least 8.
GlMaxTextureImageUnits = 0x8872
//GlMaxFragmentUniformVectors
GlMaxFragmentUniformVectors = 0x8dfd
//GlShaderType
GlShaderType = 0x8b4f
//GlShadingLanguageVersion
GlShadingLanguageVersion = 0x8b8c
//GlCurrentProgram
GlCurrentProgram = 0x8b8d
//GlNever passed to <code>depthfunction</code> or <code>stencilfunction</code> to specify depth or stencil tests will never pass. i.e. nothing will be drawn.
GlNever = 0x0200
//GlLess passed to <code>depthfunction</code> or <code>stencilfunction</code> to specify depth or stencil tests will pass if the new depth value is less than the stored value.
GlLess = 0x0201
//GlEqual passed to <code>depthfunction</code> or <code>stencilfunction</code> to specify depth or stencil tests will pass if the new depth value is equals to the stored value.
GlEqual = 0x0202
//GlLEqual passed to <code>depthfunction</code> or <code>stencilfunction</code> to specify depth or stencil tests will pass if the new depth value is less than or equal to the stored value.
GlLEqual = 0x0203
//GlGreater passed to <code>depthfunction</code> or <code>stencilfunction</code> to specify depth or stencil tests will pass if the new depth value is greater than the stored value.
GlGreater = 0x0204
//GlNotEqual passed to <code>depthfunction</code> or <code>stencilfunction</code> to specify depth or stencil tests will pass if the new depth value is not equal to the stored value.
GlNotEqual = 0x0205
//GlGEqual passed to <code>depthfunction</code> or <code>stencilfunction</code> to specify depth or stencil tests will pass if the new depth value is greater than or equal to the stored value.
GlGEqual = 0x0206
//GlAlways passed to <code>depthfunction</code> or <code>stencilfunction</code> to specify depth or stencil tests will always pass. i.e. pixels will be drawn in the order they are drawn.
GlAlways = 0x0207
//GlKeep
GlKeep = 0x1e00
//GlReplace
GlReplace = 0x1e01
//GlIncr
GlIncr = 0x1e02
//GlDecr
GlDecr = 0x1e03
//GlInvert
GlInvert = 0x150a
//GlIncrWrap
GlIncrWrap = 0x8507
//GlDecrWrap
GlDecrWrap = 0x8508
//GlNearest
GlNearest = 0x2600
//GlLinear
GlLinear = 0x2601
//GlNearestMipmapNearest
GlNearestMipmapNearest = 0x2700
//GlLinearMipmapNearest
GlLinearMipmapNearest = 0x2701
//GlNearestMipmapLinear
GlNearestMipmapLinear = 0x2702
//GlLinearMipmapLinear
GlLinearMipmapLinear = 0x2703
//GlTextureMagFilter
GlTextureMagFilter = 0x2800
//GlTextureMinFilter
GlTextureMinFilter = 0x2801
//GlTextureWrapS
GlTextureWrapS = 0x2802
//GlTextureWrapT
GlTextureWrapT = 0x2803
//GlTexture2D
GlTexture2D = 0x0de1
//GlTexture
GlTexture = 0x1702
//GlTextureCubeMap
GlTextureCubeMap = 0x8513
//GlTextureBindingCubeMap
GlTextureBindingCubeMap = 0x8514
//GlTextureCubeMapPositiveX
GlTextureCubeMapPositiveX = 0x8515
//GlTextureCubeMapNegativeX
GlTextureCubeMapNegativeX = 0x8516
//GlTextureCubeMapPositiveY
GlTextureCubeMapPositiveY = 0x8517
//GlTextureCubeMapNegativeY
GlTextureCubeMapNegativeY = 0x8518
//GlTextureCubeMapPositiveZ
GlTextureCubeMapPositiveZ = 0x8519
//GlTextureCubeMapNegativeZ
GlTextureCubeMapNegativeZ = 0x851a
//GlMaxCubeMapTextureSize
GlMaxCubeMapTextureSize = 0x851c
//GlActiveTexture the current active texture unit.
GlActiveTexture = 0x84e0
//GlRepeat
GlRepeat = 0x2901
//GlClampToEdge
GlClampToEdge = 0x812f
//GlMirroredRepeat
GlMirroredRepeat = 0x8370
//GlFloatVec2
GlFloatVec2 = 0x8b50
//GlFloatVec3
GlFloatVec3 = 0x8b51
//GlFloatVec4
GlFloatVec4 = 0x8b52
//GlIntVec2
GlIntVec2 = 0x8b53
//GlIntVec3
GlIntVec3 = 0x8b54
//GlIntVec4
GlIntVec4 = 0x8b55
//GlBool
GlBool = 0x8b56
//GlBoolVec2
GlBoolVec2 = 0x8b57
//GlBoolVec3
GlBoolVec3 = 0x8b58
//GlBoolVec4
GlBoolVec4 = 0x8b59
//GlFloatMat2
GlFloatMat2 = 0x8b5a
//GlFloatMat3
GlFloatMat3 = 0x8b5b
//GlFloatMat4
GlFloatMat4 = 0x8b5c
//GlSampler2d
GlSampler2d = 0x8b5e
//GlSamplerCube
GlSamplerCube = 0x8b60
//GlLowFloat
GlLowFloat = 0x8df0
//GlMediumFloat
GlMediumFloat = 0x8df1
//GlHighFloat
GlHighFloat = 0x8df2
//GlLowInt
GlLowInt = 0x8df3
//GlMediumInt
GlMediumInt = 0x8df4
//GlHighInt
GlHighInt = 0x8df5
//GlFramebuffer
GlFramebuffer = 0x8d40
//GlRenderbuffer
GlRenderbuffer = 0x8d41
//GlRGBA4
GlRGBA4 = 0x8056
//GlRGB5A1
GlRGB5A1 = 0x8057
//GlRGB565
GlRGB565 = 0x8d62
//GlDepthComponent16
GlDepthComponent16 = 0x81a5
//GlStencilIndex8
GlStencilIndex8 = 0x8d48
//GlDepthStencil
GlDepthStencil = 0x84f9
//GlRenderbufferWidth
GlRenderbufferWidth = 0x8d42
//GlRenderbufferHeight
GlRenderbufferHeight = 0x8d43
//GlRenderbufferInternalFormat
GlRenderbufferInternalFormat = 0x8d44
//GlRenderbufferRedSize
GlRenderbufferRedSize = 0x8d50
//GlRenderbufferGreenSize
GlRenderbufferGreenSize = 0x8d51
//GlRenderbufferBlueSize
GlRenderbufferBlueSize = 0x8d52
//GlRenderbufferAlphaSize
GlRenderbufferAlphaSize = 0x8d53
//GlRenderbufferDepthSize
GlRenderbufferDepthSize = 0x8d54
//GlRenderbufferStencilSize
GlRenderbufferStencilSize = 0x8d55
//GlFramebufferAttachmentObjectType
GlFramebufferAttachmentObjectType = 0x8cd0
//GlFramebufferAttachmentObjectName
GlFramebufferAttachmentObjectName = 0x8cd1
//GlFramebufferAttachmentTextureLevel
GlFramebufferAttachmentTextureLevel = 0x8cd2
//GlFramebufferAttachmentTextureCubeMapFace
GlFramebufferAttachmentTextureCubeMapFace = 0x8cd3
//GlColorAttachment0
GlColorAttachment0 = 0x8ce0
//GlDepthAttachment
GlDepthAttachment = 0x8d00
//GlStencilAttachment
GlStencilAttachment = 0x8d20
//GlNone
GlNone = 0
//GlFramebufferComplete
GlFramebufferComplete = 0x8cd5
//GlFramebufferIncompleteAttachment
GlFramebufferIncompleteAttachment = 0x8cd6
//GlFramebufferIncompleteMissingAttachment
GlFramebufferIncompleteMissingAttachment = 0x8cd7
//GlFramebufferIncompleteDimensions
GlFramebufferIncompleteDimensions = 0x8cd9
//GlFramebufferUnsupported
GlFramebufferUnsupported = 0x8cdd
//GlFramebufferBinding
GlFramebufferBinding = 0x8ca6
//GlRenderbufferBinding
GlRenderbufferBinding = 0x8ca7
//GlMaxRenderbufferSize
GlMaxRenderbufferSize = 0x84e8
//GlInvalidFramebufferOperation
GlInvalidFramebufferOperation = 0x0506
//GlUnpackFlipYWebgl
GlUnpackFlipYWebgl = 0x9240
//GlUnpackPremultiplyAlphaWebgl
GlUnpackPremultiplyAlphaWebgl = 0x9241
//GlUnpackColorspaceConversionWebgl
GlUnpackColorspaceConversionWebgl = 0x9243
//GlReadBuffer
GlReadBuffer = 0x0c02
//GlUnpackRowLength
GlUnpackRowLength = 0x0cf2
//GlUnpackSkipRows
GlUnpackSkipRows = 0x0cf3
//GlUnpackSkipPixels
GlUnpackSkipPixels = 0x0cf4
//GlPackRowLength
GlPackRowLength = 0x0d02
//GlPackSkipRows
GlPackSkipRows = 0x0d03
//GlPackSkipPixels
GlPackSkipPixels = 0x0d04
//GlTextureBinding3d
GlTextureBinding3d = 0x806a
//GlUnpackSkipImages
GlUnpackSkipImages = 0x806d
//GlUnpackImageHeight
GlUnpackImageHeight = 0x806e
//GlMax3dTextureSize
GlMax3dTextureSize = 0x8073
//GlMaxElementsVertices
GlMaxElementsVertices = 0x80e8
//GlMaxElementsIndices
GlMaxElementsIndices = 0x80e9
//GlMaxTextureLodBias
GlMaxTextureLodBias = 0x84fd
//GlMaxFragmentUniformComponents
GlMaxFragmentUniformComponents = 0x8b49
//GlMaxVertexUniformComponents
GlMaxVertexUniformComponents = 0x8b4a
//GlMaxArrayTextureLayers
GlMaxArrayTextureLayers = 0x88ff
//GlMinProgramTexelOffset
GlMinProgramTexelOffset = 0x8904
//GlMaxProgramTexelOffset
GlMaxProgramTexelOffset = 0x8905
//GlMaxVaryingComponents
GlMaxVaryingComponents = 0x8b4b
//GlFragmentShaderDerivativeHint
GlFragmentShaderDerivativeHint = 0x8b8b
//GlRasterizerDiscard
GlRasterizerDiscard = 0x8c89
//GlVertexArrayBinding
GlVertexArrayBinding = 0x85b5
//GlMaxVertexOutputComponents
GlMaxVertexOutputComponents = 0x9122
//GlMaxFragmentInputComponents
GlMaxFragmentInputComponents = 0x9125
//GlMaxServerWaitTimeout
GlMaxServerWaitTimeout = 0x9111
//GlMaxElementIndex
GlMaxElementIndex = 0x8d6b
//GlRed
GlRed = 0x1903
//GlRGB8
GlRGB8 = 0x8051
//GlRGBA8
GlRGBA8 = 0x8058
//GlRGB10A2
GlRGB10A2 = 0x8059
//GlTexture3d
GlTexture3d = 0x806f
//GlTextureWrapR
GlTextureWrapR = 0x8072
//GlTextureMinLod
GlTextureMinLod = 0x813a
//GlTextureMaxLod
GlTextureMaxLod = 0x813b
//GlTextureBaseLevel
GlTextureBaseLevel = 0x813c
//GlTextureMaxLevel
GlTextureMaxLevel = 0x813d
//GlTextureCompareMode
GlTextureCompareMode = 0x884c
//GlTextureCompareFunc
GlTextureCompareFunc = 0x884d
//GlSrgb
GlSrgb = 0x8c40
//GlSrgb8
GlSrgb8 = 0x8c41
//GlSrgb8Alpha8
GlSrgb8Alpha8 = 0x8c43
//GlCompareRefToTexture
GlCompareRefToTexture = 0x884e
//GlRGBA32f
GlRGBA32f = 0x8814
//GlRGB32f
GlRGB32f = 0x8815
//GlRGBA16f
GlRGBA16f = 0x881a
//GlRGB16f
GlRGB16f = 0x881b
//GlTexture2DArray
GlTexture2DArray = 0x8c1a
//GlTextureBinding2dArray
GlTextureBinding2dArray = 0x8c1d
//GlR11fG11fB10f
GlR11fG11fB10f = 0x8c3a
//GlRGB9E5
GlRGB9E5 = 0x8c3d
//GlRGBA32ui
GlRGBA32ui = 0x8d70
//GlRGB32ui
GlRGB32ui = 0x8d71
//GlRGBA16ui
GlRGBA16ui = 0x8d76
//GlRGB16ui
GlRGB16ui = 0x8d77
//GlRGBA8ui
GlRGBA8ui = 0x8d7c
//GlRGB8ui
GlRGB8ui = 0x8d7d
//GlRGBA32i
GlRGBA32i = 0x8d82
//GlRGB32i
GlRGB32i = 0x8d83
//GlRGBA16i
GlRGBA16i = 0x8d88
//GlRGB16i
GlRGB16i = 0x8d89
//GlRGBA8i
GlRGBA8i = 0x8d8e
//GlRGB8i
GlRGB8i = 0x8d8f
//GlRedInteger
GlRedInteger = 0x8d94
//GlRGBInteger
GlRGBInteger = 0x8d98
//GlRGBAInteger
GlRGBAInteger = 0x8d99
//GlR8
GlR8 = 0x8229
//GlRg8
GlRg8 = 0x822b
//GlRGB10A2ui
GlRGB10A2ui = 0x906f
//GlTextureImmutableFormat
GlTextureImmutableFormat = 0x912f
//GlTextureImmutableLevels
GlTextureImmutableLevels = 0x82df
//GlUnsignedInt2101010Rev
GlUnsignedInt2101010Rev = 0x8368
//GlUnsignedInt10f11f11fRev
GlUnsignedInt10f11f11fRev = 0x8c3b
//GlUnsignedInt5999Rev
GlUnsignedInt5999Rev = 0x8c3e
//GlFloat32UnsignedInt248Rev
GlFloat32UnsignedInt248Rev = 0x8dad
//GlHalfFloat
GlHalfFloat = 0x140b
//GlRg
GlRg = 0x8227
//GlRgInteger
GlRgInteger = 0x8228
//GlInt2101010Rev
GlInt2101010Rev = 0x8d9f
//GlCurrentQuery
GlCurrentQuery = 0x8865
//GlQueryResult
GlQueryResult = 0x8866
//GlQueryResultAvailable
GlQueryResultAvailable = 0x8867
//GlAnySamplesPassed
GlAnySamplesPassed = 0x8c2f
//GlAnySamplesPassedConservative
GlAnySamplesPassedConservative = 0x8d6a
//GlMaxDrawBuffers
GlMaxDrawBuffers = 0x8824
//GlDrawBuffer0
GlDrawBuffer0 = 0x8825
//GlDrawBuffer1
GlDrawBuffer1 = 0x8826
//GlDrawBuffer2
GlDrawBuffer2 = 0x8827
//GlDrawBuffer3
GlDrawBuffer3 = 0x8828
//GlDrawBuffer4
GlDrawBuffer4 = 0x8829
//GlDrawBuffer5
GlDrawBuffer5 = 0x882a
//GlDrawBuffer6
GlDrawBuffer6 = 0x882b
//GlDrawBuffer7
GlDrawBuffer7 = 0x882c
//GlDrawBuffer8
GlDrawBuffer8 = 0x882d
//GlDrawBuffer9
GlDrawBuffer9 = 0x882e
//GlDrawBuffer10
GlDrawBuffer10 = 0x882f
//GlDrawBuffer11
GlDrawBuffer11 = 0x8830
//GlDrawBuffer12
GlDrawBuffer12 = 0x8831
//GlDrawBuffer13
GlDrawBuffer13 = 0x8832
//GlDrawBuffer14
GlDrawBuffer14 = 0x8833
//GlDrawBuffer15
GlDrawBuffer15 = 0x8834
//GlMaxColorAttachments
GlMaxColorAttachments = 0x8cdf
//GlColorAttachment1
GlColorAttachment1 = 0x8ce1
//GlColorAttachment2
GlColorAttachment2 = 0x8ce2
//GlColorAttachment3
GlColorAttachment3 = 0x8ce3
//GlColorAttachment4
GlColorAttachment4 = 0x8ce4
//GlColorAttachment5
GlColorAttachment5 = 0x8ce5
//GlColorAttachment6
GlColorAttachment6 = 0x8ce6
//GlColorAttachment7
GlColorAttachment7 = 0x8ce7
//GlColorAttachment8
GlColorAttachment8 = 0x8ce8
//GlColorAttachment9
GlColorAttachment9 = 0x8ce9
//GlColorAttachment10
GlColorAttachment10 = 0x8cea
//GlColorAttachment11
GlColorAttachment11 = 0x8ceb
//GlColorAttachment12
GlColorAttachment12 = 0x8cec
//GlColorAttachment13
GlColorAttachment13 = 0x8ced
//GlColorAttachment14
GlColorAttachment14 = 0x8cee
//GlColorAttachment15
GlColorAttachment15 = 0x8cef
//GlSampler3d
GlSampler3d = 0x8b5f
//GlSampler2dShadow
GlSampler2dShadow = 0x8b62
//GlSampler2dArray
GlSampler2dArray = 0x8dc1
//GlSampler2dArrayShadow
GlSampler2dArrayShadow = 0x8dc4
//GlSamplerCubeShadow
GlSamplerCubeShadow = 0x8dc5
//GlIntSampler2d
GlIntSampler2d = 0x8dca
//GlIntSampler3d
GlIntSampler3d = 0x8dcb
//GlIntSamplerCube
GlIntSamplerCube = 0x8dcc
//GlIntSampler2dArray
GlIntSampler2dArray = 0x8dcf
//GlUnsignedIntSampler2d
GlUnsignedIntSampler2d = 0x8dd2
//GlUnsignedIntSampler3d
GlUnsignedIntSampler3d = 0x8dd3
//GlUnsignedIntSamplerCube
GlUnsignedIntSamplerCube = 0x8dd4
//GlUnsignedIntSampler2dArray
GlUnsignedIntSampler2dArray = 0x8dd7
//GlMaxSamples
GlMaxSamples = 0x8d57
//GlSamplerBinding
GlSamplerBinding = 0x8919
//GlPixelPackBuffer
GlPixelPackBuffer = 0x88eb
//GlPixelUnpackBuffer
GlPixelUnpackBuffer = 0x88ec
//GlPixelPackBufferBinding
GlPixelPackBufferBinding = 0x88ed
//GlPixelUnpackBufferBinding
GlPixelUnpackBufferBinding = 0x88ef
//GlCopyReadBuffer
GlCopyReadBuffer = 0x8f36
//GlCopyWriteBuffer
GlCopyWriteBuffer = 0x8f37
//GlCopyReadBufferBinding
GlCopyReadBufferBinding = 0x8f36
//GlCopyWriteBufferBinding
GlCopyWriteBufferBinding = 0x8f37
//GlFloatMat2x3
GlFloatMat2x3 = 0x8b65
//GlFloatMat2x4
GlFloatMat2x4 = 0x8b66
//GlFloatMat3x2
GlFloatMat3x2 = 0x8b67
//GlFloatMat3x4
GlFloatMat3x4 = 0x8b68
//GlFloatMat4x2
GlFloatMat4x2 = 0x8b69
//GlFloatMat4x3
GlFloatMat4x3 = 0x8b6a
//GlUnsignedIntVec2
GlUnsignedIntVec2 = 0x8dc6
//GlUnsignedIntVec3
GlUnsignedIntVec3 = 0x8dc7
//GlUnsignedIntVec4
GlUnsignedIntVec4 = 0x8dc8
//GlUnsignedNormalized
GlUnsignedNormalized = 0x8c17
//GlSignedNormalized
GlSignedNormalized = 0x8f9c
//GlVertexAttribArrayInteger
GlVertexAttribArrayInteger = 0x88fd
//GlVertexAttribArrayDivisor
GlVertexAttribArrayDivisor = 0x88fe
//GlTransformFeedbackBufferMode
GlTransformFeedbackBufferMode = 0x8c7f
//GlMaxTransformFeedbackSeparateComponents
GlMaxTransformFeedbackSeparateComponents = 0x8c80
//GlTransformFeedbackVaryings
GlTransformFeedbackVaryings = 0x8c83
//GlTransformFeedbackBufferStart
GlTransformFeedbackBufferStart = 0x8c84
//GlTransformFeedbackBufferSize
GlTransformFeedbackBufferSize = 0x8c85
//GlTransformFeedbackPrimitivesWritten
GlTransformFeedbackPrimitivesWritten = 0x8c88
//GlMaxTransformFeedbackInterleavedComponents
GlMaxTransformFeedbackInterleavedComponents = 0x8c8a
//GlMaxTransformFeedbackSeparateAttribs
GlMaxTransformFeedbackSeparateAttribs = 0x8c8b
//GlInterleavedAttribs
GlInterleavedAttribs = 0x8c8c
//GlSeparateAttribs
GlSeparateAttribs = 0x8c8d
//GlTransformFeedbackBuffer
GlTransformFeedbackBuffer = 0x8c8e
//GlTransformFeedbackBufferBinding
GlTransformFeedbackBufferBinding = 0x8c8f
//GlTransformFeedback
GlTransformFeedback = 0x8e22
//GlTransformFeedbackPaused
GlTransformFeedbackPaused = 0x8e23
//GlTransformFeedbackActive
GlTransformFeedbackActive = 0x8e24
//GlTransformFeedbackBinding
GlTransformFeedbackBinding = 0x8e25
//GlFramebufferAttachmentColorEncoding
GlFramebufferAttachmentColorEncoding = 0x8210
//GlFramebufferAttachmentComponentType
GlFramebufferAttachmentComponentType = 0x8211
//GlFramebufferAttachmentRedSize
GlFramebufferAttachmentRedSize = 0x8212
//GlFramebufferAttachmentGreenSize
GlFramebufferAttachmentGreenSize = 0x8213
//GlFramebufferAttachmentBlueSize
GlFramebufferAttachmentBlueSize = 0x8214
//GlFramebufferAttachmentAlphaSize
GlFramebufferAttachmentAlphaSize = 0x8215
//GlFramebufferAttachmentDepthSize
GlFramebufferAttachmentDepthSize = 0x8216
//GlFramebufferAttachmentStencilSize
GlFramebufferAttachmentStencilSize = 0x8217
//GlFramebufferDefault
GlFramebufferDefault = 0x8218
//GlDepthStencilAttachment
GlDepthStencilAttachment = 0x821a
//GlDepth24Stencil8
GlDepth24Stencil8 = 0x88f0
//GlDrawFramebufferBinding
GlDrawFramebufferBinding = 0x8ca6
//GlReadFramebuffer
GlReadFramebuffer = 0x8ca8
//GlDrawFramebuffer
GlDrawFramebuffer = 0x8ca9
//GlReadFramebufferBinding
GlReadFramebufferBinding = 0x8caa
//GlRenderbufferSamples
GlRenderbufferSamples = 0x8cab
//GlFramebufferAttachmentTextureLayer
GlFramebufferAttachmentTextureLayer = 0x8cd4
//GlFramebufferIncompleteMultisample
GlFramebufferIncompleteMultisample = 0x8d56
//GlUniformBuffer
GlUniformBuffer = 0x8a11
//GlUniformBufferBinding
GlUniformBufferBinding = 0x8a28
//GlUniformBufferStart
GlUniformBufferStart = 0x8a29
//GlUniformBufferSize
GlUniformBufferSize = 0x8a2a
//GlMaxVertexUniformBlocks
GlMaxVertexUniformBlocks = 0x8a2b
//GlMaxFragmentUniformBlocks
GlMaxFragmentUniformBlocks = 0x8a2d
//GlMaxCombinedUniformBlocks
GlMaxCombinedUniformBlocks = 0x8a2e
//GlMaxUniformBufferBindings
GlMaxUniformBufferBindings = 0x8a2f
//GlMaxUniformBlockSize
GlMaxUniformBlockSize = 0x8a30
//GlMaxCombinedVertexUniformComponents
GlMaxCombinedVertexUniformComponents = 0x8a31
//GlMaxCombinedFragmentUniformComponents
GlMaxCombinedFragmentUniformComponents = 0x8a33
//GlUniformBufferOffsetAlignment
GlUniformBufferOffsetAlignment = 0x8a34
//GlActiveUniformBlocks
GlActiveUniformBlocks = 0x8a36
//GlUniformType
GlUniformType = 0x8a37
//GlUniformSize
GlUniformSize = 0x8a38
//GlUniformBlockIndex
GlUniformBlockIndex = 0x8a3a
//GlUniformOffset
GlUniformOffset = 0x8a3b
//GlUniformArrayStride
GlUniformArrayStride = 0x8a3c
//GlUniformMatrixStride
GlUniformMatrixStride = 0x8a3d
//GlUniformIsRowMajor
GlUniformIsRowMajor = 0x8a3e
//GlUniformBlockBinding
GlUniformBlockBinding = 0x8a3f
//GlUniformBlockDataSize
GlUniformBlockDataSize = 0x8a40
//GlUniformBlockActiveUniforms
GlUniformBlockActiveUniforms = 0x8a42
//GlUniformBlockActiveUniformIndices
GlUniformBlockActiveUniformIndices = 0x8a43
//GlUniformBlockReferencedByVertexShader
GlUniformBlockReferencedByVertexShader = 0x8a44
//GlUniformBlockReferencedByFragmentShader
GlUniformBlockReferencedByFragmentShader = 0x8a46
//GlObjectType
GlObjectType = 0x9112
//GlSyncCondition
GlSyncCondition = 0x9113
//GlSyncStatus
GlSyncStatus = 0x9114
//GlSyncFlags
GlSyncFlags = 0x9115
//GlSyncFence
GlSyncFence = 0x9116
//GlSyncGpuCommandsComplete
GlSyncGpuCommandsComplete = 0x9117
//GlUnsignaled
GlUnsignaled = 0x9118
//GlSignaled
GlSignaled = 0x9119
//GlAlreadySignaled
GlAlreadySignaled = 0x911a
//GlTimeoutExpired
GlTimeoutExpired = 0x911b
//GlConditionSatisfied
GlConditionSatisfied = 0x911c
//GlWaitFailed
GlWaitFailed = 0x911d
//GlSyncFlushCommandsBit
GlSyncFlushCommandsBit = 0x00000001
//GlColor
GlColor = 0x1800
//GlStencil
GlStencil = 0x1802
//GlMin
GlMin = 0x8007
//GlDepthComponent24
GlDepthComponent24 = 0x81a6
//GlStreamRead
GlStreamRead = 0x88e1
//GlStreamCopy
GlStreamCopy = 0x88e2
//GlStaticRead
GlStaticRead = 0x88e5
//GlStaticCopy
GlStaticCopy = 0x88e6
//GlDynamicRead
GlDynamicRead = 0x88e9
//GlDynamicCopy
GlDynamicCopy = 0x88ea
//GlDepthComponent32f
GlDepthComponent32f = 0x8cac
//GlDepth32fStencil8
GlDepth32fStencil8 = 0x8cad
//GlInvalidIndex
GlInvalidIndex = 0xffffffff
//GlTimeoutIgnored
GlTimeoutIgnored = -1
//GlMaxClientWaitTimeoutWebgl
GlMaxClientWaitTimeoutWebgl = 0x9247
//GlVertexAttribArrayDivisorAngle describes the frequency divisor used for instanced rendering.
GlVertexAttribArrayDivisorAngle = 0x88fe
//GlUnmaskedVendorWebgl passed to <code>getparameter</code> to get the vendor string of the graphics driver.
GlUnmaskedVendorWebgl = 0x9245
//GlUnmaskedRendererWebgl passed to <code>getparameter</code> to get the renderer string of the graphics driver.
GlUnmaskedRendererWebgl = 0x9246
//GlMaxTextureMaxAnisotropyExt returns the maximum available anisotropy.
GlMaxTextureMaxAnisotropyExt = 0x84ff
//GlTextureMaxAnisotropyExt passed to <code>texparameter</code> to set the desired maximum anisotropy for a texture.
GlTextureMaxAnisotropyExt = 0x84fe
//GlCompressedRgbS3tcDxt1Ext a dxt1-compressed image in an rgb image format.
GlCompressedRgbS3tcDxt1Ext = 0x83f0
//GlCompressedRgbaS3tcDxt1Ext a dxt1-compressed image in an rgb image format with a simple on/off alpha value.
GlCompressedRgbaS3tcDxt1Ext = 0x83f1
//GlCompressedRgbaS3tcDxt3Ext a dxt3-compressed image in an rgba image format. compared to a 32-bit rgba texture, it offers 4:1 compression.
GlCompressedRgbaS3tcDxt3Ext = 0x83f2
//GlCompressedRgbaS3tcDxt5Ext a dxt5-compressed image in an rgba image format. it also provides a 4:1 compression, but differs to the dxt3 compression in how the alpha compression is done.
GlCompressedRgbaS3tcDxt5Ext = 0x83f3
//GlCompressedR11Eac one-channel (red) unsigned format compression.
GlCompressedR11Eac = 0x9270
//GlCompressedSignedR11Eac one-channel (red) signed format compression.
GlCompressedSignedR11Eac = 0x9271
//GlCompressedRg11Eac two-channel (red and green) unsigned format compression.
GlCompressedRg11Eac = 0x9272
//GlCompressedSignedRg11Eac two-channel (red and green) signed format compression.
GlCompressedSignedRg11Eac = 0x9273
//GlCompressedRgb8Etc2 compresses rbg8 data with no alpha channel.
GlCompressedRgb8Etc2 = 0x9274
//GlCompressedRgba8Etc2Eac compresses rgba8 data. the rgb part is encoded the same as <code>rgbEtc2</code>, but the alpha part is encoded separately.
GlCompressedRgba8Etc2Eac = 0x9275
//GlCompressedSrgb8Etc2 compresses srbg8 data with no alpha channel.
GlCompressedSrgb8Etc2 = 0x9276
//GlCompressedSrgb8Alpha8Etc2Eac compresses srgba8 data. the srgb part is encoded the same as <code>srgbEtc2</code>, but the alpha part is encoded separately.
GlCompressedSrgb8Alpha8Etc2Eac = 0x9277
//GlCompressedRgb8PunchthroughAlpha1Etc2 similar to <code>rgb8Etc</code>, but with ability to punch through the alpha channel, which means to make it completely opaque or transparent.
GlCompressedRgb8PunchthroughAlpha1Etc2 = 0x9278
//GlCompressedSrgb8PunchthroughAlpha1Etc2 similar to <code>srgb8Etc</code>, but with ability to punch through the alpha channel, which means to make it completely opaque or transparent.
GlCompressedSrgb8PunchthroughAlpha1Etc2 = 0x9279
//GlCompressedRgbPvrtc4bppv1Img rgb compression in 4-bit mode. one block for each 4×4 pixels.
GlCompressedRgbPvrtc4bppv1Img = 0x8c00
//GlCompressedRgbaPvrtc4bppv1Img rgba compression in 4-bit mode. one block for each 4×4 pixels.
GlCompressedRgbaPvrtc4bppv1Img = 0x8c02
//GlCompressedRgbPvrtc2bppv1Img rgb compression in 2-bit mode. one block for each 8×4 pixels.
GlCompressedRgbPvrtc2bppv1Img = 0x8c01
//GlCompressedRgbaPvrtc2bppv1Img rgba compression in 2-bit mode. one block for each 8×4 pixe
GlCompressedRgbaPvrtc2bppv1Img = 0x8c03
//GlCompressedRgbEtc1Webgl compresses 24-bit rgb data with no alpha channel.
GlCompressedRgbEtc1Webgl = 0x8d64
//GlCompressedRgbAtcWebgl compresses rgb textures with no alpha channel.
GlCompressedRgbAtcWebgl = 0x8c92
//GlCompressedRgbaAtcExplicitAlphaWebgl compresses rgba textures using explicit alpha encoding (useful when alpha transitions are sharp).
GlCompressedRgbaAtcExplicitAlphaWebgl = 0x8c92
//GlCompressedRgbaAtcInterpolatedAlphaWebgl compresses rgba textures using interpolated alpha encoding (useful when alpha transitions are gradient).
GlCompressedRgbaAtcInterpolatedAlphaWebgl = 0x87ee
//GlUnsignedInt248Webgl unsigned integer type for 24-bit depth texture data.
GlUnsignedInt248Webgl = 0x84fa
//GlHalfFloatOes half floating-point type (16-bit).
GlHalfFloatOes = 0x8d61
//GlRGBA32fExt rgba 32-bit floating-point color-renderable format.
GlRGBA32fExt = 0x8814
//GlRGB32fExt rgb 32-bit floating-point color-renderable format.
GlRGB32fExt = 0x8815
//GlFramebufferAttachmentComponentTypeExt
GlFramebufferAttachmentComponentTypeExt = 0x8211
//GlUnsignedNormalizedExt
GlUnsignedNormalizedExt = 0x8c17
//GlMinExt produces the minimum color components of the source and destination colors.
GlMinExt = 0x8007
//GlMaxExt produces the maximum color components of the source and destination colors.
GlMaxExt = 0x8008
//GlSrgbExt unsized srgb format that leaves the precision up to the driver.
GlSrgbExt = 0x8c40
//GlSrgbAlphaExt unsized srgb format with unsized alpha component.
GlSrgbAlphaExt = 0x8c42
//GlSrgb8Alpha8Ext sized (8-bit) srgb and alpha formats.
GlSrgb8Alpha8Ext = 0x8c43
//GlFramebufferAttachmentColorEncodingExt returns the framebuffer color encoding.
GlFramebufferAttachmentColorEncodingExt = 0x8210
//GlFragmentShaderDerivativeHintOes indicates the accuracy of the derivative calculation for the glsl built-in functions: <code>dfdx</code>, <code>dfdy</code>, and <code>fwidth</code>.
GlFragmentShaderDerivativeHintOes = 0x8b8b
//GlColorAttachment0Webgl framebuffer color attachment point
GlColorAttachment0Webgl = 0x8ce0
//GlColorAttachment1Webgl framebuffer color attachment point
GlColorAttachment1Webgl = 0x8ce1
//GlColorAttachment2Webgl framebuffer color attachment point
GlColorAttachment2Webgl = 0x8ce2
//GlColorAttachment3Webgl framebuffer color attachment point
GlColorAttachment3Webgl = 0x8ce3
//GlColorAttachment4Webgl framebuffer color attachment point
GlColorAttachment4Webgl = 0x8ce4
//GlColorAttachment5Webgl framebuffer color attachment point
GlColorAttachment5Webgl = 0x8ce5
//GlColorAttachment6Webgl framebuffer color attachment point
GlColorAttachment6Webgl = 0x8ce6
//GlColorAttachment7Webgl framebuffer color attachment point
GlColorAttachment7Webgl = 0x8ce7
//GlColorAttachment8Webgl framebuffer color attachment point
GlColorAttachment8Webgl = 0x8ce8
//GlColorAttachment9Webgl framebuffer color attachment point
GlColorAttachment9Webgl = 0x8ce9
//GlColorAttachment10Webgl framebuffer color attachment point
GlColorAttachment10Webgl = 0x8cea
//GlColorAttachment11Webgl framebuffer color attachment point
GlColorAttachment11Webgl = 0x8ceb
//GlColorAttachment12Webgl framebuffer color attachment point
GlColorAttachment12Webgl = 0x8cec
//GlColorAttachment13Webgl framebuffer color attachment point
GlColorAttachment13Webgl = 0x8ced
//GlColorAttachment14Webgl framebuffer color attachment point
GlColorAttachment14Webgl = 0x8cee
//GlColorAttachment15Webgl framebuffer color attachment point
GlColorAttachment15Webgl = 0x8cef
//GlDrawBuffer0Webgl draw buffer
GlDrawBuffer0Webgl = 0x8825
//GlDrawBuffer1Webgl draw buffer
GlDrawBuffer1Webgl = 0x8826
//GlDrawBuffer2Webgl draw buffer
GlDrawBuffer2Webgl = 0x8827
//GlDrawBuffer3Webgl draw buffer
GlDrawBuffer3Webgl = 0x8828
//GlDrawBuffer4Webgl draw buffer
GlDrawBuffer4Webgl = 0x8829
//GlDrawBuffer5Webgl draw buffer
GlDrawBuffer5Webgl = 0x882a
//GlDrawBuffer6Webgl draw buffer
GlDrawBuffer6Webgl = 0x882b
//GlDrawBuffer7Webgl draw buffer
GlDrawBuffer7Webgl = 0x882c
//GlDrawBuffer8Webgl draw buffer
GlDrawBuffer8Webgl = 0x882d
//GlDrawBuffer9Webgl draw buffer
GlDrawBuffer9Webgl = 0x882e
//GlDrawBuffer10Webgl draw buffer
GlDrawBuffer10Webgl = 0x882f
//GlDrawBuffer11Webgl draw buffer
GlDrawBuffer11Webgl = 0x8830
//GlDrawBuffer12Webgl draw buffer
GlDrawBuffer12Webgl = 0x8831
//GlDrawBuffer13Webgl draw buffer
GlDrawBuffer13Webgl = 0x8832
//GlDrawBuffer14Webgl draw buffer
GlDrawBuffer14Webgl = 0x8833
//GlDrawBuffer15Webgl draw buffer
GlDrawBuffer15Webgl = 0x8834
//GlMaxColorAttachmentsWebgl maximum number of framebuffer color attachment points
GlMaxColorAttachmentsWebgl = 0x8cdf
//GlMaxDrawBuffersWebgl maximum number of draw buffers
GlMaxDrawBuffersWebgl = 0x8824
//GlVertexArrayBindingOes the bound vertex array object (vao).
GlVertexArrayBindingOes = 0x85b5
//GlQueryCounterBitsExt the number of bits used to hold the query result for the given target.
GlQueryCounterBitsExt = 0x8864
//GlCurrentQueryExt the currently active query.
GlCurrentQueryExt = 0x8865
//GlQueryResultExt the query result.
GlQueryResultExt = 0x8866
//GlQueryResultAvailableExt a boolean indicating whether or not a query result is available.
GlQueryResultAvailableExt = 0x8867
//GlTimeElapsedExt elapsed time (in nanoseconds).
GlTimeElapsedExt = 0x88bf
//GlTimestampExt the current time.
GlTimestampExt = 0x8e28
//GlGpuDisjointExt a boolean indicating whether or not the gpu performed any disjoint operation.
GlGpuDisjointExt = 0x8fbb
)
const (
//GlTexture0 A texture unit. The first texture unit
GlTexture0 = iota + 0x84c0
//GlTexture1 A texture unit
GlTexture1
//GlTexture2 A texture unit
GlTexture2
//GlTexture3 A texture unit
GlTexture3
//GlTexture4 A texture unit
GlTexture4
//GlTexture5 A texture unit
GlTexture5
//GlTexture6 A texture unit
GlTexture6
//GlTexture7 A texture unit
GlTexture7
//GlTexture8 A texture unit
GlTexture8
//GlTexture9 A texture unit
GlTexture9
//GlTexture10 A texture unit
GlTexture10
//GlTexture11 A texture unit
GlTexture11
//GlTexture12 A texture unit
GlTexture12
//GlTexture13 A texture unit
GlTexture13
//GlTexture14 A texture unit
GlTexture14
//GlTexture15 A texture unit
GlTexture15
//GlTexture16 A texture unit
GlTexture16
//GlTexture17 A texture unit
GlTexture17
//GlTexture18 A texture unit
GlTexture18
//GlTexture19 A texture unit
GlTexture19
//GlTexture20 A texture unit
GlTexture20
//GlTexture21 A texture unit
GlTexture21
//GlTexture22 A texture unit
GlTexture22
//GlTexture23 A texture unit
GlTexture23
//GlTexture24 A texture unit
GlTexture24
//GlTexture25 A texture unit
GlTexture25
//GlTexture26 A texture unit
GlTexture26
//GlTexture27 A texture unit
GlTexture27
//GlTexture28 A texture unit
GlTexture28
//GlTexture29 A texture unit
GlTexture29
//GlTexture30 A texture unit
GlTexture30
//GlTexture31 A texture unit. The last texture unit.
GlTexture31 = 0x84df
) | glconsts.go | 0.696475 | 0.490053 | glconsts.go | starcoder |
package simple
import (
"fmt"
"strings"
)
/* twoSum: https://leetcode.com/problems/two-sum/
Given an array of integers nums and an integer target, return indices of the two numbers such that they add up to target.
You may assume that each input would have exactly one solution, and you may not use the same element twice.
You can return the answer in any order.
solution:
take advantage of existing data structure: map have O(1) access time:
1. loop for one round to keep position of every value
2. also check for the diff part is existing in map or not,
a. if exists, then we get one answer, just return the index of diff + current
*/
func TwoSum(nums []int, target int) []int {
var nmap map[int]int = map[int]int{}
result := make([]int, 2)
for index, num := range nums {
diff := target - num
if _, ok := nmap[diff]; ok {
result[0] = nmap[diff]
result[1] = index
return result
}
nmap[num] = index
}
return result
}
/*WordPattern
give pattern: abba, words: "dog cat cat dog" is one match
solution:
1. split string s, make sure it have same length as pattern,
2. define 2 maps, for keep: pattern to value , and value to pattern
2. loop rune (char) in pattern
if rune in map, the value should also in map
if they both in map, then the value of each mapped item should also as expected
map[rune] == value && map[value] == rune
if they both not in map, just add them to map
* make sure diff partten match diff word (so we use 2 maps here)
*/
func WordPattern(pattern string, s string) bool {
results := strings.Split(s, " ")
if len(results) != len(pattern) {
return false
}
var maps map[rune]string = map[rune]string{}
var mapsr map[string]rune = map[string]rune{}
for i, char := range pattern {
target := results[i]
vs, oks := maps[char]
pr, okr := mapsr[target]
if oks != okr {
return false
}
if oks {
if vs != target || char != pr {
return false
}
continue
} else {
maps[char] = target
mapsr[target] = char
}
}
return true
}
func LetterCombinations(digits string) []string {
results := make([]string, 0)
if len(digits) == 0 {
return results
}
/*
1. initial num -> map[num][]string mappings
2. loop: calculate num+x mappings -> map[numx][]string
3. return map[digits]
*/
mappings := map[string][]string{
//"0": {"0"},
//"1": {"1"},
"2": {"a", "b", "c"},
"3": {"d", "e", "f"},
"4": {"g", "h", "i"},
"5": {"j", "k", "l"},
"6": {"m", "n", "o"},
"7": {"p", "q", "r", "s"},
"8": {"t", "u", "v"},
"9": {"w", "x", "y", "z"},
}
for i := 1; i < len(digits); i++ {
key := digits[:i+1]
if _, ok := mappings[key]; !ok {
combine(digits[:i], digits[i:i+1], mappings)
}
}
return mappings[digits]
}
func combine(s string, b string, mappings map[string][]string) {
results := []string{}
first, second := mappings[s], mappings[b]
for i := 0; i < len(first); i++ {
for j := 0; j < len(second); j++ {
results = append(results, first[i]+second[j])
}
}
mappings[s+b] = results
}
func divide(dividend int, divisor int) int {
if dividend == 0 {
return 0
}
sign := 1
if dividend < 0 && divisor > 0 {
sign = -1
dividend = 0 - dividend
} else if dividend > 0 && divisor < 0 {
sign = -1
divisor = 0 - divisor
}
mappings := make(map[int]int, 0)
i := 1
const max int = 1024 * 1024 * 1024
mappings[1] = divisor
for {
if mappings[i] >= dividend || mappings[i] >= max {
break
}
mappings[i+i] = mappings[i] + mappings[i]
i = i + i
}
topTimes := i
times := 0
j := topTimes
for dividend > divisor {
if dividend >= mappings[j] {
dividend -= mappings[j]
times += j
} else {
j = j >> 1
}
}
return sign * times
}
func IsValidSudoku(board [][]byte) bool {
/*
1. horizontal check
2. vertical check
3. 3x3 check
*/
maps := make(map[string]bool, 0)
for i := 0; i < 9; i++ {
for j := 0; j < 9; j++ {
val := board[i][j]
if val == '.' {
continue
}
row, col, three := fmt.Sprintf("%v in row %v", val, i), fmt.Sprintf("%v in col %v", val, j), fmt.Sprintf("%v in three %v, %v", val, i/3, j/3)
if maps[row] || maps[col] || maps[three] {
return false
}
maps[row] = true
maps[col] = true
maps[three] = true
}
}
return true
}
func GroupAnagrams(strs []string) [][]string {
results := make(map[string][]string, 0)
for _, val := range strs {
list := make([]int, 26)
for _, ch := range val {
list[ch-'a']++
}
key := ""
for i, v := range list {
if v == 0 {
continue
}
key += fmt.Sprintf("%v%v-", i, v)
}
//fmt.Println(key, list)
ls, ok := results[key]
if ok {
results[key] = append(ls, val)
} else {
results[key] = []string{val}
}
}
solution := make([][]string, 0)
for _, ls := range results {
solution = append(solution, ls)
}
return solution
}
/*
find the shortest array
build element map for this array
loop second array:
if element in map, output and decrease in map
*/
func Intersect(nums1 []int, nums2 []int) []int {
if len(nums1) > len(nums2) {
nums1, nums2 = nums2, nums1
}
emap := make(map[int]int, 0)
for _, val := range nums1 {
emap[val]++
}
results := make([]int, 0)
for _, val := range nums2 {
count, exists := emap[val]
if exists && count > 0 {
results = append(results, val)
emap[val] = count - 1
}
}
return results
} | Algorithm-go/simple/map.go | 0.813498 | 0.518241 | map.go | starcoder |
package ast
// Literal represents a Literal node.
type Literal struct {
NullLiteral string
BooleanLiteral string
NumericLiteral *NumericLiteral
StringLiteral string
}
// NumericLiteral represents a NumericLiteral node.
type NumericLiteral struct {
DecimalLiteral string
BinaryIntegerLiteral string
OctalIntegerLiteral string
HexIntegerLiteral string
}
// ArrayLiteral represents a ArrayLiteral node.
type ArrayLiteral struct {
Elision *Elision
ElementList *ElementList
Comma bool
}
// ElementList represents a ElementList node.
type ElementList struct {
Elision *Elision
AssignmentExpression *AssignmentExpression
SpreadElement *SpreadElement
ElementList *ElementList
Comma bool
}
// SpreadElement represents a SpreadElement node.
type SpreadElement struct {
AssignmentExpression *AssignmentExpression
}
// ObjectLiteral represents a ObjectLiteral node.
type ObjectLiteral struct {
PropertyDefinitionList *PropertyDefinitionList
Comma bool
}
// PropertyDefinitionList represents a PropertyDefinitionList node.
type PropertyDefinitionList struct {
PropertyDefinitions []*PropertyDefinition
}
// PropertyDefinition represents a PropertyDefinition node.
type PropertyDefinition struct {
Colon bool
Ellipsis bool
IdentifierReference *IdentifierReference
CoverInitializedName *CoverInitializedName
PropertyName *PropertyName
AssignmentExpression *AssignmentExpression
MethodDefinition *MethodDefinition
}
// PropertyName represents a PropertyName node.
type PropertyName struct {
LiteralPropertyName *LiteralPropertyName
ComputedPropertyName *ComputedPropertyName
}
// LiteralPropertyName represents a LiteralPropertyName node.
type LiteralPropertyName struct {
IdentifierName string
StringLiteral string
NumericLiteral *NumericLiteral
}
// ComputedPropertyName represents a ComputedPropertyName node.
type ComputedPropertyName struct {
AssignmentExpression *AssignmentExpression
}
// CoverInitializedName represents a CoverInitializedName node.
type CoverInitializedName struct {
IdentifierReference *IdentifierReference
Initializer *Initializer
}
// TemplateLiteral represents a TemplateLiteral node.
type TemplateLiteral struct {
NoSubstitutionTemplate string
SubstitutionTemplate *SubstitutionTemplate
}
// SubstitutionTemplate represents a SubstitutionTemplate node.
type SubstitutionTemplate struct {
TemplateHead string
Expression *Expression
TemplateSpans *TemplateSpans
}
// TemplateSpans represents a TemplateSpans node.
type TemplateSpans struct {
TemplateTail string
TemplateMiddleList *TemplateMiddleList
}
// TemplateMiddleList represents a TemplateMiddleList node.
type TemplateMiddleList struct {
TemplateMiddle string
Expression *Expression
TemplateMiddleList *TemplateMiddleList
}
// RegularExpressionLiteral represents a RegularExpressionLiteral node.
type RegularExpressionLiteral struct {
RegularExpressionBody string
RegularExpressionFlags string
} | internal/parser/ast/literal.go | 0.669637 | 0.406567 | literal.go | starcoder |
package models
import (
"github.com/aspose-tasks-cloud/aspose-tasks-cloud-go/api/custom"
)
// Represents a resource assignment in a project.
type ResourceAssignment struct {
// Returns or sets a task unique id to which a resource is assigned.
TaskUid int32 `json:"taskUid"`
// Returns or sets a resource unique id assigned to a task.
ResourceUid int32 `json:"resourceUid"`
// Returns or sets the global unique identifier of an assignment.
Guid string `json:"guid,omitempty"`
// Returns or sets the unique identifier of an assignment.
Uid int32 `json:"uid"`
// Returns or sets the amount of a work completed on an assignment.
PercentWorkComplete int32 `json:"percentWorkComplete"`
// Returns or sets the actual cost incurred on an assignment.
ActualCost float32 `json:"actualCost"`
// Returns or sets the actual finish date of an assignment.
ActualFinish custom.TimeWithoutTZ `json:"actualFinish"`
// Returns or sets the actual overtime cost incurred on an assignment.
ActualOvertimeCost float32 `json:"actualOvertimeCost"`
// Returns or sets the actual amount of an overtime work incurred on an assignment.
ActualOvertimeWork *string `json:"actualOvertimeWork,omitempty"`
// Returns or sets the actual start date of an assignment.
ActualStart custom.TimeWithoutTZ `json:"actualStart"`
// Returns or sets the actual amount of a work incurred on an assignment.
ActualWork *string `json:"actualWork,omitempty"`
// Returns or sets the actual cost of a work performed on an assignment to-date.
Acwp float64 `json:"acwp"`
// Determines whether a resource has accepted all of its assignments.
Confirmed bool `json:"confirmed"`
// Returns or sets the projected or scheduled cost of an assignment.
Cost float32 `json:"cost"`
// Returns or sets the cost rate table used for this assignment.
CostRateTableType *RateType `json:"costRateTableType"`
// Returns or sets the difference between the cost and baseline cost of a resource.
CostVariance float64 `json:"costVariance"`
// Returns or sets the earned value cost variance.
Cv float64 `json:"cv"`
// Returns or sets the delay of an assignment.
Delay int32 `json:"delay"`
// Returns or sets the scheduled finish date of an assignment.
Finish custom.TimeWithoutTZ `json:"finish"`
// Returns or sets the variance of an assignment finish date from a baseline finish date.
FinishVariance int32 `json:"finishVariance"`
// Returns or sets the title of the hyperlink associated with an assignment.
Hyperlink string `json:"hyperlink,omitempty"`
// Returns or sets the hyperlink associated with an assignment.
HyperlinkAddress string `json:"hyperlinkAddress,omitempty"`
// Returns or sets the document bookmark of the hyperlink associated with an assignment.
HyperlinkSubAddress string `json:"hyperlinkSubAddress,omitempty"`
// Returns or sets the variance of an assignment work from the baseline work as minutes.
WorkVariance float64 `json:"workVariance"`
// Determines whether the Units have Fixed Rate.
HasFixedRateUnits bool `json:"hasFixedRateUnits"`
// Determines whether the consumption of an assigned material resource occurs in a single, fixed amount.
FixedMaterial bool `json:"fixedMaterial"`
// Returns or sets the delay caused by leveling.
LevelingDelay int32 `json:"levelingDelay"`
// Returns or sets the duration format of a delay.
LevelingDelayFormat *TimeUnitType `json:"levelingDelayFormat"`
// Determines whether the Project is linked to another OLE object.
LinkedFields bool `json:"linkedFields"`
// Determines whether the assignment is a milestone.
Milestone bool `json:"milestone"`
// Returns or sets the text notes associated with an assignment.
Notes string `json:"notes,omitempty"`
// Determines whether the assignment is overallocated.
Overallocated bool `json:"overallocated"`
// Returns or sets the sum of the actual and remaining overtime cost of an assignment.
OvertimeCost float32 `json:"overtimeCost"`
// Returns or sets the scheduled overtime work of an assignment.
OvertimeWork *string `json:"overtimeWork,omitempty"`
// Returns or sets the largest number of a resource's units assigned to a task.
PeakUnits float64 `json:"peakUnits"`
// Returns or sets the amount of a non-overtime work scheduled for an assignment.
RegularWork *string `json:"regularWork,omitempty"`
// Returns or sets the remaining projected cost of completing an assignment.
RemainingCost float32 `json:"remainingCost"`
// Returns or sets the remaining projected overtime cost of completing an assignment.
RemainingOvertimeCost float32 `json:"remainingOvertimeCost"`
// Returns or sets the remaining overtime work scheduled to complete an assignment.
RemainingOvertimeWork *string `json:"remainingOvertimeWork,omitempty"`
// Returns or sets the remaining work scheduled to complete an assignment.
RemainingWork *string `json:"remainingWork,omitempty"`
// Determines whether the response has been received for a TeamAssign message.
ResponsePending bool `json:"responsePending"`
// Returns or sets the scheduled start date of an assignment.
Start custom.TimeWithoutTZ `json:"start"`
// Returns or sets the date when assignment is stopped.
Stop custom.TimeWithoutTZ `json:"stop"`
// Returns or sets the date when assignment is resumed.
Resume custom.TimeWithoutTZ `json:"resume"`
// Returns or sets the variance of an assignment start date from a baseline start date.
StartVariance int32 `json:"startVariance"`
// Determines whether the task is a summary task.
Summary bool `json:"summary"`
// Returns or sets the earned value schedule variance, through the project status date.
Sv float64 `json:"sv"`
// Returns or sets the number of units for an assignment.
Units float64 `json:"units"`
// Determines whether the resource assigned to a task needs to be updated as to the status of the task.
UpdateNeeded bool `json:"updateNeeded"`
// Returns or sets the difference between basline cost and total cost. Read/write Double.
Vac float64 `json:"vac"`
// Returns or sets the amount of scheduled work for an assignment. Read/write TimeSpan.
Work *string `json:"work,omitempty"`
// Returns or sets the work contour of an assignment.
WorkContour *WorkContourType `json:"workContour"`
// Returns or sets the budgeted cost of a work on assignment.
Bcws float64 `json:"bcws"`
// Returns or sets the budgeted cost of a work performed on assignment to-date.
Bcwp float64 `json:"bcwp"`
// Returns or sets the booking type of an assignment.
BookingType *BookingType `json:"bookingType"`
// Returns or sets the duration through which actual work is protected.
ActualWorkProtected *string `json:"actualWorkProtected,omitempty"`
// Returns or sets the duration through which actual overtime work is protected.
ActualOvertimeWorkProtected *string `json:"actualOvertimeWorkProtected,omitempty"`
// Returns or sets the date that the assignment was created.
CreationDate custom.TimeWithoutTZ `json:"creationDate"`
// Returns or sets the name of an assignment owner.
AssnOwner string `json:"assnOwner,omitempty"`
// Returns or sets the Guid of an assignment owner.
AssnOwnerGuid string `json:"assnOwnerGuid,omitempty"`
// Returns or sets the budgeted cost of resources on an assignment.
BudgetCost float32 `json:"budgetCost"`
// Returns or sets the budgeted work amount for a work or material resources on an assignment.
BudgetWork *string `json:"budgetWork,omitempty"`
// Returns the time unit for the usage rate of the material resource assignment.
RateScale *RateScaleType `json:"rateScale"`
// List of ResourceAssignment's Baseline values.
Baselines []AssignmentBaseline `json:"baselines,omitempty"`
// ResourceAssignment extended attributes.
ExtendedAttributes []ExtendedAttribute `json:"extendedAttributes,omitempty"`
// Represents a collection of TimephasedData objects.
TimephasedData []TimephasedData `json:"timephasedData,omitempty"`
} | api/models/resource_assignment.go | 0.808143 | 0.581303 | resource_assignment.go | starcoder |
package processing
import (
"math"
"github.com/go-gl/mathgl/mgl32"
"github.com/kyroy/kdtree"
"github.com/kyroy/kdtree/points"
"github.com/roboticeyes/gometry/geom"
)
// RemoveDuplicates removes vertex duplicates of a triangle mesh. If two vertices are closer than
// the given distanceThreshold, then the vertices are merged
func RemoveDuplicates(mesh geom.TriangleMesh, distanceThreshold float64) error {
vertices := []mgl32.Vec3{}
triangles := []geom.Triangle{}
// create kdtree with first point
tree := kdtree.New([]kdtree.Point{
points.NewPoint([]float64{
float64(mesh.Vertex(0).X()),
float64(mesh.Vertex(0).Y()),
float64(mesh.Vertex(0).Z()),
}, 0 /* index in vertices slice */),
})
vertices = append(vertices, mesh.Vertex(0))
// 1. Traverse all vertices and add only those vertices which seem to be duplicate because the
// dictance to the found point in the KDtree is smaller than an epsilon
for i := 1; i < len(mesh.Vertices()); i++ {
vtx := mesh.Vertex(i)
nearest := tree.KNN(&points.Point{
Coordinates: []float64{
float64(vtx.X()),
float64(vtx.Y()),
float64(vtx.Z()),
},
}, 1)
if len(nearest) > 0 {
// fmt.Println("Found nearest (search - treenode) ", vtx, nearest[0])
dist := vtx.Sub(mgl32.Vec3{
float32(nearest[0].Dimension(0)),
float32(nearest[0].Dimension(1)),
float32(nearest[0].Dimension(2)),
}).Len()
if math.Abs(float64(dist)) > distanceThreshold {
vertices = append(vertices, vtx)
tree.Insert(
points.NewPoint([]float64{
float64(vtx.X()),
float64(vtx.Y()),
float64(vtx.Z()),
}, len(vertices)-1))
} else {
// ignore, since those points are already in the list
}
} else {
// should not happen, just failsafe
vertices = append(vertices, vtx)
tree.Insert(
points.NewPoint([]float64{
float64(vtx.X()),
float64(vtx.Y()),
float64(vtx.Z()),
}, len(vertices)-1))
}
}
// 2. Walk through triangle list and lookup every vertex in the kd tree
for i := 0; i < len(mesh.Triangles()); i++ {
triangle := mesh.Triangles()[i]
for j := 0; j < 3; j++ {
vtx := mesh.Vertex(triangle[j])
nearest := tree.KNN(&points.Point{
Coordinates: []float64{
float64(vtx.X()),
float64(vtx.Y()),
float64(vtx.Z()),
},
}, 1)
if len(nearest) > 0 {
pt := nearest[0].(*points.Point)
triangle[j] = pt.Data.(int)
}
}
triangles = append(triangles, triangle)
}
mesh.SetVertices(vertices)
mesh.SetTriangles(triangles)
return nil
} | processing/mesh.go | 0.681515 | 0.582135 | mesh.go | starcoder |
package blockcf
import (
"encoding/binary"
"github.com/endurio/ndrd/chaincfg/chainhash"
"github.com/endurio/ndrd/gcs"
"github.com/endurio/ndrd/txscript"
"github.com/endurio/ndrd/wire"
)
// P is the collision probability used for block committed filters (2^-20)
const P = 20
// Entries describes all of the filter entries used to create a GCS filter and
// provides methods for appending data structures found in blocks.
type Entries [][]byte
// AddOutPoint adds a serialized outpoint to an entries slice.
func (e *Entries) AddOutPoint(outpoint *wire.OutPoint) {
entry := make([]byte, chainhash.HashSize+4)
copy(entry, outpoint.Hash[:])
binary.LittleEndian.PutUint32(entry[chainhash.HashSize:], outpoint.Index)
*e = append(*e, entry)
}
// AddHash adds a hash to an entries slice.
func (e *Entries) AddHash(hash *chainhash.Hash) {
*e = append(*e, hash[:])
}
// AddRegularPkScript adds the regular tx output script to an entries slice.
func (e *Entries) AddRegularPkScript(script []byte) {
*e = append(*e, script)
}
// AddStakePkScript adds the output script without the stake opcode tag to an
// entries slice.
func (e *Entries) AddStakePkScript(script []byte) {
*e = append(*e, script[1:])
}
// AddSigScript adds any data pushes of a signature script to an entries slice.
func (e *Entries) AddSigScript(script []byte) {
// Ignore errors and add pushed data, if any
pushes, err := txscript.PushedData(script)
if err == nil && len(pushes) != 0 {
*e = append(*e, pushes...)
}
}
// Key creates a block committed filter key by truncating a block hash to the
// key size.
func Key(hash *chainhash.Hash) [gcs.KeySize]byte {
var key [gcs.KeySize]byte
copy(key[:], hash[:])
return key
}
// Regular builds a regular GCS filter from a block. A regular GCS filter will
// contain all the previous regular outpoints spent within a block, as well as
// the data pushes within all the outputs created within a block which can be
// spent by regular transactions.
func Regular(block *wire.MsgBlock) (*gcs.Filter, error) {
var data Entries
// For regular transactions, all previous outpoints except the coinbase's
// are committed, and all output scripts are committed.
for i, tx := range block.Transactions {
if i != 0 {
for _, txIn := range tx.TxIn {
data.AddOutPoint(&txIn.PreviousOutPoint)
}
}
for _, txOut := range tx.TxOut {
data.AddRegularPkScript(txOut.PkScript)
}
}
// Create the key by truncating the block hash.
blockHash := block.BlockHash()
key := Key(&blockHash)
return gcs.NewFilter(P, key, data)
}
// Extended builds an extended GCS filter from a block. An extended filter
// supplements a regular basic filter by including all transaction hashes of
// regular and stake transactions, and adding the witness data (a.k.a. the
// signature script) found within every non-coinbase regular transaction.
func Extended(block *wire.MsgBlock) (*gcs.Filter, error) {
var data Entries
// For each regular transaction, commit the transaction hash. For all
// regular transactions except the coinbase, commit pushes to the signature
// script (witness).
coinbaseHash := block.Transactions[0].TxHash()
data.AddHash(&coinbaseHash)
for _, tx := range block.Transactions[1:] {
txHash := tx.TxHash()
data.AddHash(&txHash)
for _, txIn := range tx.TxIn {
if txIn.SignatureScript != nil {
data.AddSigScript(txIn.SignatureScript)
}
}
}
// Create the key by truncating the block hash.
blockHash := block.BlockHash()
key := Key(&blockHash)
return gcs.NewFilter(P, key, data)
} | gcs/blockcf/blockcf.go | 0.77373 | 0.460895 | blockcf.go | starcoder |
package types
import "time"
// SeriesChannel channel to report series on
type SeriesChannel chan Series
// Series represents a series of candles of a symbol
type Series struct {
// Symbol of the series
Symbol Symbol
// Timeframe of the series
Timeframe Timeframe
// Candles of the series (last candle in the array is the current candle)
Candles []OHLC
}
// NewSeries creates a Series instance
func NewSeries(symbol Symbol, timeframe Timeframe, candles []OHLC) Series {
return Series{
Symbol: symbol,
Timeframe: timeframe,
Candles: candles,
}
}
func (s Series) SubSeries(start int, length int) Series {
if start+length > len(s.Candles) {
length = len(s.Candles) - start
}
return Series{
Symbol: s.Symbol,
Timeframe: s.Timeframe,
Candles: s.Candles[start:(start + length)],
}
}
func (s Series) Length() int {
return len(s.Candles)
}
func (s Series) CurrentCandleTime() time.Time {
return s.Candles[len(s.Candles)-1].OpenTime
}
func (s Series) PreviousCandleTime() time.Time {
return s.Candles[len(s.Candles)-2].OpenTime
}
// CurrentOpen price (the active candle)
func (s Series) CurrentOpen() float64 {
return s.Candles[len(s.Candles)-1].Open
}
// PreviousOpen price (the last finished candle)
func (s Series) PreviousOpen() float64 {
return s.Candles[len(s.Candles)-2].Open
}
// Open prices
func (s Series) Open() (res []float64) {
var numCandles, index int
var candle OHLC
numCandles = len(s.Candles)
res = make([]float64, numCandles, numCandles)
for index, candle = range s.Candles {
res[index] = candle.Open
}
return
}
// OpenRange prices
func (s Series) OpenRange(start int, size int) (res []float64) {
var index int
if (start + size) > len(s.Candles) {
size = len(s.Candles) - start
}
res = make([]float64, size, size)
for index = start; index < (start + size); index++ {
res[index-start] = s.Candles[index].Open
}
return
}
// OpenLastN prices
func (s Series) OpenLastN(size int) (res []float64) {
var start int
start = len(s.Candles) - size
res = s.OpenRange(start, size)
return
}
// CurrentHigh price (the active candle)
func (s Series) CurrentHigh() float64 {
return s.Candles[len(s.Candles)-1].High
}
// PreviousHigh price (the last finished candle)
func (s Series) PreviousHigh() float64 {
return s.Candles[len(s.Candles)-2].High
}
// High prices
func (s Series) High() (res []float64) {
var numCandles, index int
var candle OHLC
numCandles = len(s.Candles)
res = make([]float64, numCandles, numCandles)
for index, candle = range s.Candles {
res[index] = candle.High
}
return
}
// HighRange prices
func (s Series) HighRange(start int, size int) (res []float64) {
var index int
if (start + size) > len(s.Candles) {
size = len(s.Candles) - start
}
res = make([]float64, size, size)
for index = start; index < (start + size); index++ {
res[index-start] = s.Candles[index].High
}
return
}
// HighLastN prices
func (s Series) HighLastN(size int) (res []float64) {
var start int
start = len(s.Candles) - size
res = s.HighRange(start, size)
return
}
// CurrentLow price (the active candle)
func (s Series) CurrentLow() float64 {
return s.Candles[len(s.Candles)-1].Low
}
// PreviousLow price (the last finished candle)
func (s Series) PreviousLow() float64 {
return s.Candles[len(s.Candles)-2].Low
}
// Low prices
func (s Series) Low() (res []float64) {
var numCandles, index int
var candle OHLC
numCandles = len(s.Candles)
res = make([]float64, numCandles, numCandles)
for index, candle = range s.Candles {
res[index] = candle.Low
}
return
}
// LowRange prices
func (s Series) LowRange(start int, size int) (res []float64) {
var index int
if (start + size) > len(s.Candles) {
size = len(s.Candles) - start
}
res = make([]float64, size, size)
for index = start; index < (start + size); index++ {
res[index-start] = s.Candles[index].Low
}
return
}
// LowLastN prices
func (s Series) LowLastN(size int) (res []float64) {
var start int
start = len(s.Candles) - size
res = s.LowRange(start, size)
return
}
// CurrentClose price (the active candle)
func (s Series) CurrentClose() float64 {
return s.Candles[len(s.Candles)-1].Close
}
// PreviousClose price (the last finished candle)
func (s Series) PreviousClose() float64 {
return s.Candles[len(s.Candles)-2].Close
}
// Close prices
func (s Series) Close() (res []float64) {
var numCandles, index int
var candle OHLC
numCandles = len(s.Candles)
res = make([]float64, numCandles, numCandles)
for index, candle = range s.Candles {
res[index] = candle.Close
}
return
}
// CloseRange prices
func (s Series) CloseRange(start int, size int) (res []float64) {
var index int
if (start + size) > len(s.Candles) {
size = len(s.Candles) - start
}
res = make([]float64, size, size)
for index = start; index < (start + size); index++ {
res[index-start] = s.Candles[index].Close
}
return
}
// CloseLastN prices
func (s Series) CloseLastN(size int) (res []float64) {
var start int
start = len(s.Candles) - size
res = s.CloseRange(start, size)
return
} | types/series.go | 0.751101 | 0.519399 | series.go | starcoder |
package layer
// GetAutoOrient returns the AutoOrient field if it's non-nil, zero value otherwise.
func (i *ImageT) GetAutoOrient() int {
if i == nil || i.AutoOrient == nil {
return 0
}
return *i.AutoOrient
}
// GetBlendMode returns the BlendMode field if it's non-nil, zero value otherwise.
func (i *ImageT) GetBlendMode() int {
if i == nil || i.BlendMode == nil {
return 0
}
return *i.BlendMode
}
// GetClass returns the Class field if it's non-nil, zero value otherwise.
func (i *ImageT) GetClass() string {
if i == nil || i.Class == nil {
return ""
}
return *i.Class
}
// GetDdd returns the Ddd field if it's non-nil, zero value otherwise.
func (i *ImageT) GetDdd() int {
if i == nil || i.Ddd == nil {
return 0
}
return *i.Ddd
}
// GetHasMask returns the HasMask field if it's non-nil, zero value otherwise.
func (i *ImageT) GetHasMask() bool {
if i == nil || i.HasMask == nil {
return false
}
return *i.HasMask
}
// GetInded returns the Inded field if it's non-nil, zero value otherwise.
func (i *ImageT) GetInded() int {
if i == nil || i.Inded == nil {
return 0
}
return *i.Inded
}
// GetInPoint returns the InPoint field if it's non-nil, zero value otherwise.
func (i *ImageT) GetInPoint() float64 {
if i == nil || i.InPoint == nil {
return 0.0
}
return *i.InPoint
}
// GetLayerName returns the LayerName field if it's non-nil, zero value otherwise.
func (i *ImageT) GetLayerName() string {
if i == nil || i.LayerName == nil {
return ""
}
return *i.LayerName
}
// GetName returns the Name field if it's non-nil, zero value otherwise.
func (i *ImageT) GetName() string {
if i == nil || i.Name == nil {
return ""
}
return *i.Name
}
// GetOutPoint returns the OutPoint field if it's non-nil, zero value otherwise.
func (i *ImageT) GetOutPoint() float64 {
if i == nil || i.OutPoint == nil {
return 0.0
}
return *i.OutPoint
}
// GetParent returns the Parent field if it's non-nil, zero value otherwise.
func (i *ImageT) GetParent() int {
if i == nil || i.Parent == nil {
return 0
}
return *i.Parent
}
// GetRefID returns the RefID field if it's non-nil, zero value otherwise.
func (i *ImageT) GetRefID() string {
if i == nil || i.RefID == nil {
return ""
}
return *i.RefID
}
// GetStartTime returns the StartTime field if it's non-nil, zero value otherwise.
func (i *ImageT) GetStartTime() float64 {
if i == nil || i.StartTime == nil {
return 0.0
}
return *i.StartTime
}
// GetTimeStretching returns the TimeStretching field if it's non-nil, zero value otherwise.
func (i *ImageT) GetTimeStretching() float64 {
if i == nil || i.TimeStretching == nil {
return 0.0
}
return *i.TimeStretching
}
// GetAutoOrient returns the AutoOrient field if it's non-nil, zero value otherwise.
func (n *NullT) GetAutoOrient() int {
if n == nil || n.AutoOrient == nil {
return 0
}
return *n.AutoOrient
}
// GetClass returns the Class field if it's non-nil, zero value otherwise.
func (n *NullT) GetClass() string {
if n == nil || n.Class == nil {
return ""
}
return *n.Class
}
// GetDdd returns the Ddd field if it's non-nil, zero value otherwise.
func (n *NullT) GetDdd() int {
if n == nil || n.Ddd == nil {
return 0
}
return *n.Ddd
}
// GetInd returns the Ind field if it's non-nil, zero value otherwise.
func (n *NullT) GetInd() int {
if n == nil || n.Ind == nil {
return 0
}
return *n.Ind
}
// GetInPoint returns the InPoint field if it's non-nil, zero value otherwise.
func (n *NullT) GetInPoint() float64 {
if n == nil || n.InPoint == nil {
return 0.0
}
return *n.InPoint
}
// GetLayerName returns the LayerName field if it's non-nil, zero value otherwise.
func (n *NullT) GetLayerName() string {
if n == nil || n.LayerName == nil {
return ""
}
return *n.LayerName
}
// GetName returns the Name field if it's non-nil, zero value otherwise.
func (n *NullT) GetName() float64 {
if n == nil || n.Name == nil {
return 0.0
}
return *n.Name
}
// GetOutPoint returns the OutPoint field if it's non-nil, zero value otherwise.
func (n *NullT) GetOutPoint() float64 {
if n == nil || n.OutPoint == nil {
return 0.0
}
return *n.OutPoint
}
// GetParent returns the Parent field if it's non-nil, zero value otherwise.
func (n *NullT) GetParent() int {
if n == nil || n.Parent == nil {
return 0
}
return *n.Parent
}
// GetStartTime returns the StartTime field if it's non-nil, zero value otherwise.
func (n *NullT) GetStartTime() float64 {
if n == nil || n.StartTime == nil {
return 0.0
}
return *n.StartTime
}
// GetTimeStretching returns the TimeStretching field if it's non-nil, zero value otherwise.
func (n *NullT) GetTimeStretching() float64 {
if n == nil || n.TimeStretching == nil {
return 0.0
}
return *n.TimeStretching
}
// GetAutoOrient returns the AutoOrient field if it's non-nil, zero value otherwise.
func (p *PreCompT) GetAutoOrient() int {
if p == nil || p.AutoOrient == nil {
return 0
}
return *p.AutoOrient
}
// GetBlendMode returns the BlendMode field if it's non-nil, zero value otherwise.
func (p *PreCompT) GetBlendMode() int {
if p == nil || p.BlendMode == nil {
return 0
}
return *p.BlendMode
}
// GetClass returns the Class field if it's non-nil, zero value otherwise.
func (p *PreCompT) GetClass() string {
if p == nil || p.Class == nil {
return ""
}
return *p.Class
}
// GetDdd returns the Ddd field if it's non-nil, zero value otherwise.
func (p *PreCompT) GetDdd() int {
if p == nil || p.Ddd == nil {
return 0
}
return *p.Ddd
}
// GetHasMask returns the HasMask field if it's non-nil, zero value otherwise.
func (p *PreCompT) GetHasMask() bool {
if p == nil || p.HasMask == nil {
return false
}
return *p.HasMask
}
// GetIndex returns the Index field if it's non-nil, zero value otherwise.
func (p *PreCompT) GetIndex() int {
if p == nil || p.Index == nil {
return 0
}
return *p.Index
}
// GetInPoint returns the InPoint field if it's non-nil, zero value otherwise.
func (p *PreCompT) GetInPoint() float64 {
if p == nil || p.InPoint == nil {
return 0.0
}
return *p.InPoint
}
// GetLayerName returns the LayerName field if it's non-nil, zero value otherwise.
func (p *PreCompT) GetLayerName() string {
if p == nil || p.LayerName == nil {
return ""
}
return *p.LayerName
}
// GetName returns the Name field if it's non-nil, zero value otherwise.
func (p *PreCompT) GetName() string {
if p == nil || p.Name == nil {
return ""
}
return *p.Name
}
// GetOutPoint returns the OutPoint field if it's non-nil, zero value otherwise.
func (p *PreCompT) GetOutPoint() float64 {
if p == nil || p.OutPoint == nil {
return 0.0
}
return *p.OutPoint
}
// GetParent returns the Parent field if it's non-nil, zero value otherwise.
func (p *PreCompT) GetParent() int {
if p == nil || p.Parent == nil {
return 0
}
return *p.Parent
}
// GetRefID returns the RefID field if it's non-nil, zero value otherwise.
func (p *PreCompT) GetRefID() string {
if p == nil || p.RefID == nil {
return ""
}
return *p.RefID
}
// GetStartTime returns the StartTime field if it's non-nil, zero value otherwise.
func (p *PreCompT) GetStartTime() float64 {
if p == nil || p.StartTime == nil {
return 0.0
}
return *p.StartTime
}
// GetTimeMapping returns the TimeMapping field if it's non-nil, zero value otherwise.
func (p *PreCompT) GetTimeMapping() float64 {
if p == nil || p.TimeMapping == nil {
return 0.0
}
return *p.TimeMapping
}
// GetTimeStretching returns the TimeStretching field if it's non-nil, zero value otherwise.
func (p *PreCompT) GetTimeStretching() float64 {
if p == nil || p.TimeStretching == nil {
return 0.0
}
return *p.TimeStretching
}
// GetAutoOrient returns the AutoOrient field if it's non-nil, zero value otherwise.
func (s *ShapeT) GetAutoOrient() int {
if s == nil || s.AutoOrient == nil {
return 0
}
return *s.AutoOrient
}
// GetBlendMode returns the BlendMode field if it's non-nil, zero value otherwise.
func (s *ShapeT) GetBlendMode() int {
if s == nil || s.BlendMode == nil {
return 0
}
return *s.BlendMode
}
// GetClass returns the Class field if it's non-nil, zero value otherwise.
func (s *ShapeT) GetClass() string {
if s == nil || s.Class == nil {
return ""
}
return *s.Class
}
// GetDdd returns the Ddd field if it's non-nil, zero value otherwise.
func (s *ShapeT) GetDdd() int {
if s == nil || s.Ddd == nil {
return 0
}
return *s.Ddd
}
// GetHasMask returns the HasMask field if it's non-nil, zero value otherwise.
func (s *ShapeT) GetHasMask() bool {
if s == nil || s.HasMask == nil {
return false
}
return *s.HasMask
}
// GetIndex returns the Index field if it's non-nil, zero value otherwise.
func (s *ShapeT) GetIndex() int {
if s == nil || s.Index == nil {
return 0
}
return *s.Index
}
// GetInPoint returns the InPoint field if it's non-nil, zero value otherwise.
func (s *ShapeT) GetInPoint() float64 {
if s == nil || s.InPoint == nil {
return 0.0
}
return *s.InPoint
}
// GetLayerName returns the LayerName field if it's non-nil, zero value otherwise.
func (s *ShapeT) GetLayerName() string {
if s == nil || s.LayerName == nil {
return ""
}
return *s.LayerName
}
// GetName returns the Name field if it's non-nil, zero value otherwise.
func (s *ShapeT) GetName() string {
if s == nil || s.Name == nil {
return ""
}
return *s.Name
}
// GetOutPoint returns the OutPoint field if it's non-nil, zero value otherwise.
func (s *ShapeT) GetOutPoint() float64 {
if s == nil || s.OutPoint == nil {
return 0.0
}
return *s.OutPoint
}
// GetParent returns the Parent field if it's non-nil, zero value otherwise.
func (s *ShapeT) GetParent() int {
if s == nil || s.Parent == nil {
return 0
}
return *s.Parent
}
// GetStartTime returns the StartTime field if it's non-nil, zero value otherwise.
func (s *ShapeT) GetStartTime() float64 {
if s == nil || s.StartTime == nil {
return 0.0
}
return *s.StartTime
}
// GetTimeStretching returns the TimeStretching field if it's non-nil, zero value otherwise.
func (s *ShapeT) GetTimeStretching() float64 {
if s == nil || s.TimeStretching == nil {
return 0.0
}
return *s.TimeStretching
} | lottie/layer/layer-accessors.go | 0.841125 | 0.535584 | layer-accessors.go | starcoder |
package synthetics
import (
"encoding/json"
)
// V202101beta1DNS struct for V202101beta1DNS
type V202101beta1DNS struct {
Name *string `json:"name,omitempty"`
}
// NewV202101beta1DNS instantiates a new V202101beta1DNS object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewV202101beta1DNS() *V202101beta1DNS {
this := V202101beta1DNS{}
return &this
}
// NewV202101beta1DNSWithDefaults instantiates a new V202101beta1DNS object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewV202101beta1DNSWithDefaults() *V202101beta1DNS {
this := V202101beta1DNS{}
return &this
}
// GetName returns the Name field value if set, zero value otherwise.
func (o *V202101beta1DNS) GetName() string {
if o == nil || o.Name == nil {
var ret string
return ret
}
return *o.Name
}
// GetNameOk returns a tuple with the Name field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *V202101beta1DNS) GetNameOk() (*string, bool) {
if o == nil || o.Name == nil {
return nil, false
}
return o.Name, true
}
// HasName returns a boolean if a field has been set.
func (o *V202101beta1DNS) HasName() bool {
if o != nil && o.Name != nil {
return true
}
return false
}
// SetName gets a reference to the given string and assigns it to the Name field.
func (o *V202101beta1DNS) SetName(v string) {
o.Name = &v
}
func (o V202101beta1DNS) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if o.Name != nil {
toSerialize["name"] = o.Name
}
return json.Marshal(toSerialize)
}
type NullableV202101beta1DNS struct {
value *V202101beta1DNS
isSet bool
}
func (v NullableV202101beta1DNS) Get() *V202101beta1DNS {
return v.value
}
func (v *NullableV202101beta1DNS) Set(val *V202101beta1DNS) {
v.value = val
v.isSet = true
}
func (v NullableV202101beta1DNS) IsSet() bool {
return v.isSet
}
func (v *NullableV202101beta1DNS) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableV202101beta1DNS(val *V202101beta1DNS) *NullableV202101beta1DNS {
return &NullableV202101beta1DNS{value: val, isSet: true}
}
func (v NullableV202101beta1DNS) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableV202101beta1DNS) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | kentikapi/synthetics/model_v202101beta1_dns.go | 0.710528 | 0.406273 | model_v202101beta1_dns.go | starcoder |
package gspeed
import (
"fmt"
"github.com/cryptowilliam/goutil/basic/gerrors"
"github.com/cryptowilliam/goutil/container/gstring"
"strconv"
"strings"
"time"
"unicode"
)
// bit size of speed
// 为什么用float64而不用uint64?float64的表示范围比uint64大很多,unint64根本达不到YB级别
type Speed float64
// Map between speed unit of bit and bits size
const (
_ = iota // ignore first value by assigning to blank identifier
Kb Speed = 1 << (10 * iota) // 1 Kb = 1024 bits
Mb // 1 Mb = 1048576 bits
Gb
Tb
Pb
Eb
Zb
Yb
)
// Map between speed unit of byte and bits size
const (
_ = iota // ignore first value by assigning to blank identifier
KB Speed = 8 * (1 << (10 * iota)) // 1 KB = (8 * 1024) bits
MB // 1 MB = (8 * 1048576) bits
GB
TB
PB
EB
ZB
YB
)
func FromBytesInterval(bytes float64, interval time.Duration) (Speed, error) {
bytesps := float64(bytes) / (float64(interval) / float64(time.Second))
return FromBytes(bytesps)
}
func FromBitsInterval(bits float64, interval time.Duration) (Speed, error) {
bitsps := float64(bits) / (float64(interval) / float64(time.Second))
return FromBits(bitsps)
}
func FromBytes(size float64) (Speed, error) {
if size < 0 {
return Speed(0), gerrors.New("Negative speed error")
}
return Speed(size * 8), nil
}
func FromBytesUint64(size uint64) Speed {
return Speed(float64(size) * 8)
}
func FromBits(size float64) (Speed, error) {
if size < 0 {
return Speed(0), gerrors.New("Negative speed error")
}
return Speed(size), nil
}
func (s Speed) GetByteSize() float64 {
return float64(s) / 8.0
}
func (s Speed) GetBitSize() float64 {
return float64(s)
}
func (s Speed) GreaterThan(s2 Speed) bool {
return float64(s) > float64(s2)
}
func (s Speed) GreaterThanOrEqual(s2 Speed) bool {
return float64(s) >= float64(s2)
}
func (s Speed) LessThan(s2 Speed) bool {
return float64(s) < float64(s2)
}
func (s Speed) LessThanOrEqual(s2 Speed) bool {
return float64(s) <= float64(s2)
}
func (s Speed) Equals(s2 Speed) bool {
return float64(s) == float64(s2)
}
func (s Speed) String() string {
return (&s).StringWithBitUnit()
}
func (s Speed) MarshalJSON() ([]byte, error) {
return []byte(fmt.Sprintf("\"%s\"", s.String())), nil
}
func (s *Speed) UnmarshalJSON(b []byte) error {
str := string(b)
if len(str) <= 1 {
return gerrors.Errorf("invalid json speed '%s'", s)
}
if str[0] != '"' || str[len(str)-1] != '"' {
return gerrors.Errorf("invalid json speed '%s'", s)
}
str = gstring.RemoveHead(str, 1)
str = gstring.RemoveTail(str, 1)
speed, err := ParseString(str)
if err != nil {
return err
}
*s = speed
return nil
}
func (s *Speed) StringAuto() string {
b := *s
switch {
case b >= YB:
return fmt.Sprintf("%.2fYB", b/YB)
case b >= Yb:
return fmt.Sprintf("%.2fYb", b/Yb)
case b >= ZB:
return fmt.Sprintf("%.2fZB", b/ZB)
case b >= Zb:
return fmt.Sprintf("%.2fZb", b/Zb)
case b >= EB:
return fmt.Sprintf("%.2fEB", b/EB)
case b >= Eb:
return fmt.Sprintf("%.2fEb", b/Eb)
case b >= PB:
return fmt.Sprintf("%.2fPB", b/PB)
case b >= Pb:
return fmt.Sprintf("%.2fPb", b/Pb)
case b >= TB:
return fmt.Sprintf("%.2fTB", b/TB)
case b >= Tb:
return fmt.Sprintf("%.2fTb", b/Tb)
case b >= GB:
return fmt.Sprintf("%.2fGB", b/GB)
case b >= Gb:
return fmt.Sprintf("%.2fGb", b/Gb)
case b >= MB:
return fmt.Sprintf("%.2fMB", b/MB)
case b >= Mb:
return fmt.Sprintf("%.2fMb", b/Mb)
case b >= KB:
return fmt.Sprintf("%.2fKB", b/KB)
case b >= Kb:
return fmt.Sprintf("%.2fKb", b/Kb)
}
return fmt.Sprintf("%.2fB", b)
}
func (s *Speed) StringWithByteUnit() string {
b := *s
switch {
case b >= YB:
return fmt.Sprintf("%.2fYB", b/YB)
case b >= ZB:
return fmt.Sprintf("%.2fZB", b/ZB)
case b >= EB:
return fmt.Sprintf("%.2fEB", b/EB)
case b >= PB:
return fmt.Sprintf("%.2fPB", b/PB)
case b >= TB:
return fmt.Sprintf("%.2fTB", b/TB)
case b >= GB:
return fmt.Sprintf("%.2fGB", b/GB)
case b >= MB:
return fmt.Sprintf("%.2fMB", b/MB)
case b >= KB:
return fmt.Sprintf("%.2fKB", b/KB)
}
return fmt.Sprintf("%.2fB", b)
}
func (s *Speed) StringWithBitUnit() string {
b := *s
switch {
case b >= Yb:
return fmt.Sprintf("%.2fYb", b/Yb)
case b >= Zb:
return fmt.Sprintf("%.2fZb", b/Zb)
case b >= Eb:
return fmt.Sprintf("%.2fEb", b/Eb)
case b >= Pb:
return fmt.Sprintf("%.2fPb", b/Pb)
case b >= Tb:
return fmt.Sprintf("%.2fTb", b/Tb)
case b >= Gb:
return fmt.Sprintf("%.2fGb", b/Gb)
case b >= Mb:
return fmt.Sprintf("%.2fMb", b/Mb)
case b >= Kb:
return fmt.Sprintf("%.2fKb", b/Kb)
}
return fmt.Sprintf("%.2fb", b)
}
// speed string sample: "2M" "2Mb" "2Mbit" "2Mbits" "2Mbyte" "2Mbytes" "2 Mb" "*/s" "*ps"
func ParseString(speed string) (Speed, error) {
s := strings.TrimSpace(speed)
s = strings.Replace(s, " ", "", -1) // Remove space in middle
if len(s) == 0 {
return Speed(0), gerrors.New("speed string \"" + speed + "\" is empty")
}
// Parse number from head
nonDigitPos := -1
for i, v := range s {
if !unicode.IsDigit(v) && v != '.' {
nonDigitPos = i
break
}
}
if nonDigitPos < 1 {
return Speed(0), gerrors.New("speed string \"" + speed + "\" lack of speed unit part")
}
numberStr := s[:nonDigitPos] // NOTICE: end pos char is not included in return string
number, err := strconv.ParseFloat(numberStr, 64)
if err != nil {
return Speed(0), err
}
// Parse unit from tail
unitStr := s[len(numberStr):]
// Remove per second string from tail
if gstring.EndWith(strings.ToLower(unitStr), "/s") || gstring.EndWith(strings.ToLower(unitStr), "ps") {
unitStr = gstring.RemoveTail(unitStr, 2)
}
if len(unitStr) == 0 {
return Speed(0), gerrors.New("speed string \"" + speed + "\" has no unit")
}
// Parse "k" "m" "g" "t"...
unitA := unitStr[0:1] // NOTICE: end pos char is not included in return string
unitA = strings.ToLower(unitA)
// Parse "b" "B" "bit" "byte" "bits" "bytes"
var unitB string
if len(unitStr) == 1 { // such like "2M", unit string is "M", its length is 1
unitB = "bit"
} else if len(unitStr) > 1 {
unitStr = unitStr[1:]
if unitStr == "B" || strings.ToLower(unitStr) == "byte" || strings.ToLower(unitStr) == "bytes" {
unitB = "byte"
} else if unitStr == "b" || strings.ToLower(unitStr) == "bit" || strings.ToLower(unitStr) == "bits" {
unitB = "bit"
} else {
return Speed(0), gerrors.New("speed string \"" + speed + "\" unit syntax error")
}
}
// Calculate speed
switch unitA {
case "k":
if unitB == "byte" {
return FromBits(number * KB.GetBitSize())
} else {
return FromBits(number * Kb.GetBitSize())
}
case "m":
if unitB == "byte" {
return FromBits(number * MB.GetBitSize())
} else {
return FromBits(number * Mb.GetBitSize())
}
case "g":
if unitB == "byte" {
return FromBits(number * GB.GetBitSize())
} else {
return FromBits(number * Gb.GetBitSize())
}
case "t":
if unitB == "byte" {
return FromBits(number * TB.GetBitSize())
} else {
return FromBits(number * Tb.GetBitSize())
}
case "p":
if unitB == "byte" {
return FromBits(number * PB.GetBitSize())
} else {
return FromBits(number * Pb.GetBitSize())
}
case "e":
if unitB == "byte" {
return FromBits(number * EB.GetBitSize())
} else {
return FromBits(number * Eb.GetBitSize())
}
case "z":
if unitB == "byte" {
return FromBits(number * ZB.GetBitSize())
} else {
return FromBits(number * Zb.GetBitSize())
}
case "y":
if unitB == "byte" {
return FromBits(number * YB.GetBitSize())
} else {
return FromBits(number * Yb.GetBitSize())
}
default:
return Speed(0), gerrors.New("speed string \"" + speed + "\" is invalid speed string")
}
} | container/gspeed/speed.go | 0.528533 | 0.473657 | speed.go | starcoder |
// Package diff implements the difference algorithm, which is based upon
// <NAME>, <NAME>, <NAME>, and <NAME>,
// "An O(NP) Sequence Comparison Algorithm" August 1989.
package diff
type Interface interface {
// Equal returns whether the elements at i and j are equal.
Equal(i, j int) bool
}
// Bytes returns the differences between byte slices.
func Bytes(a, b []byte) []Change {
return Diff(len(a), len(b), &bytes{a, b})
}
type bytes struct {
A, B []byte
}
func (p *bytes) Equal(i, j int) bool { return p.A[i] == p.B[j] }
// Ints returns the differences between int slices.
func Ints(a, b []int) []Change {
return Diff(len(a), len(b), &ints{a, b})
}
type ints struct {
A, B []int
}
func (p *ints) Equal(i, j int) bool { return p.A[i] == p.B[j] }
// Runes returns the differences between rune slices.
func Runes(a, b []rune) []Change {
return Diff(len(a), len(b), &runes{a, b})
}
type runes struct {
A, B []rune
}
func (p *runes) Equal(i, j int) bool { return p.A[i] == p.B[j] }
// Strings returns the differences between string slices.
func Strings(a, b []string) []Change {
return Diff(len(a), len(b), &strings{a, b})
}
type strings struct {
A, B []string
}
func (p *strings) Equal(i, j int) bool { return p.A[i] == p.B[j] }
// Diff returns the differences between data.
// It makes O(NP) (the worst case) calls to data.Equal.
func Diff(m, n int, data Interface) []Change {
c := &context{data: data}
if n >= m {
c.M = m
c.N = n
} else {
c.M = n
c.N = m
c.xchg = true
}
c.Δ = c.N - c.M
return c.compare()
}
type Change struct {
A, B int // position in a and b
Del int // number of elements that deleted from a
Ins int // number of elements that inserted into b
}
type context struct {
data Interface
M, N int
Δ int
fp []point
xchg bool
}
func (c *context) compare() []Change {
c.fp = make([]point, (c.M+1)+(c.N+1)+1)
for i := range c.fp {
c.fp[i].y = -1
}
Δ := c.Δ + (c.M + 1)
for p := 0; c.fp[Δ].y != c.N; p++ {
for k := -p; k < c.Δ; k++ {
c.snake(k)
}
for k := c.Δ + p; k > c.Δ; k-- {
c.snake(k)
}
c.snake(c.Δ)
}
lcs, n := c.reverse(c.fp[Δ].lcs)
cl := make([]Change, 0, n+1)
var x, y int
for ; lcs != nil; lcs = lcs.next {
if x < lcs.x || y < lcs.y {
if !c.xchg {
cl = append(cl, Change{x, y, lcs.x - x, lcs.y - y})
} else {
cl = append(cl, Change{y, x, lcs.y - y, lcs.x - x})
}
}
x = lcs.x + lcs.n
y = lcs.y + lcs.n
}
if x < c.M || y < c.N {
if !c.xchg {
cl = append(cl, Change{x, y, c.M - x, c.N - y})
} else {
cl = append(cl, Change{y, x, c.N - y, c.M - x})
}
}
return cl
}
func (c *context) snake(k int) {
var y int
var prev *lcs
kk := k + (c.M + 1)
h := &c.fp[kk-1]
v := &c.fp[kk+1]
if h.y+1 >= v.y {
y = h.y + 1
prev = h.lcs
} else {
y = v.y
prev = v.lcs
}
x := y - k
n := 0
for x < c.M && y < c.N {
var eq bool
if !c.xchg {
eq = c.data.Equal(x, y)
} else {
eq = c.data.Equal(y, x)
}
if !eq {
break
}
x++
y++
n++
}
p := &c.fp[kk]
p.y = y
if n == 0 {
p.lcs = prev
} else {
p.lcs = &lcs{
x: x - n,
y: y - n,
n: n,
next: prev,
}
}
}
func (c *context) reverse(curr *lcs) (next *lcs, n int) {
for ; curr != nil; n++ {
curr.next, next, curr = next, curr, curr.next
}
return
}
type point struct {
y int
lcs *lcs
}
type lcs struct {
x, y int
n int
next *lcs
} | diff.go | 0.721743 | 0.520679 | diff.go | starcoder |
package core
import (
"context"
"fmt"
"github.com/kmgreen2/agglo/internal/common"
"github.com/kmgreen2/agglo/pkg/util"
"reflect"
"regexp"
)
type Transformable struct {
value interface{}
}
func NewTransformable(value interface{}) *Transformable {
return &Transformable{value}
}
func (t Transformable) Kind() reflect.Kind {
return reflect.TypeOf(t.value).Kind()
}
func (t Transformable) Value() interface{} {
return t.value
}
func (t Transformable) Copy() *Transformable {
if t.Kind() == reflect.Slice {
slice := t.Value().([]interface{})
outSlice := util.CopyableSlice(slice).DeepCopy()
return &Transformable{outSlice}
} else if t.Kind() == reflect.Map {
m := t.Value().(map[string]interface{})
outMap := util.CopyableMap(m).DeepCopy()
return &Transformable{outMap}
} else {
return &Transformable{t.Value()}
}
}
type FieldTransformation interface {
Transform(in *Transformable) (*Transformable, error)
}
type PopHeadTransformation struct {}
func (t PopHeadTransformation) Transform(in *Transformable) (*Transformable, error) {
if in.Kind() == reflect.Slice {
switch v := in.Value().(type) {
case []interface{}:
if len(v) > 0 {
return &Transformable{v[0]}, nil
} else {
return &Transformable{nil}, nil
}
case []map[string]interface{}:
if len(v) > 0 {
return &Transformable{v[0]}, nil
} else {
return &Transformable{nil}, nil
}
default:
return nil, fmt.Errorf("Pop transformation only compatible with arrays, not %v", in.Kind())
}
}
return nil, fmt.Errorf("Pop transformation only compatible with arrays, not %v", in.Kind())
}
type PopTailTransformation struct {}
func (t PopTailTransformation) Transform(in *Transformable) (*Transformable, error) {
if in.Kind() == reflect.Slice {
switch v := in.Value().(type) {
case []interface{}:
if len(v) > 0 {
lastElement := len(v) - 1
return &Transformable{v[lastElement]}, nil
} else {
return &Transformable{nil}, nil
}
case []map[string]interface{}:
if len(v) > 0 {
lastElement := len(v) - 1
return &Transformable{v[lastElement]}, nil
} else {
return &Transformable{nil}, nil
}
default:
return nil, fmt.Errorf("Pop transformation only compatible with arrays, not %v", in.Kind())
}
}
return nil, fmt.Errorf("Pop transformation only compatible with arrays, not %v", in.Kind())
}
type MapTransformation struct {
MapFunc func(interface{}) (interface{}, error)
}
func NewExecMapTransformation(path string, cmdArgs ...string) *MapTransformation {
execRunnable := util.NewExecRunnable(util.WithPath(path), util.WithCmdArgs(cmdArgs...))
return &MapTransformation{
func (in interface{}) (interface{}, error) {
err := execRunnable.SetInData(in)
if err != nil {
return nil, err
}
return execRunnable.Run(context.Background())
},
}
}
func (t MapTransformation) Transform(in *Transformable) (*Transformable, error) {
if in.Kind() == reflect.Slice {
slice := in.Value().([]interface{})
var outSlice []interface{}
for _, v := range slice {
mVal, err := t.MapFunc(v)
if err != nil {
return nil, err
}
outSlice = append(outSlice, mVal)
}
return &Transformable{outSlice}, nil
} else if in.Kind() == reflect.Map {
m := in.Value().(map[string]interface{})
outMap := make(map[string]interface{})
for k, v := range m {
mVal, err := t.MapFunc(v)
if err != nil {
return nil, err
}
outMap[k] = mVal
}
return &Transformable{outMap}, nil
} else {
mVal, err := t.MapFunc(in.Value())
if err != nil {
return nil, err
}
return &Transformable{mVal}, nil
}
}
type LeftFoldTransformation struct {
FoldFunc func(acc, v interface{}) (interface{}, error)
}
func NewExecLeftFoldTransformation(path string) *LeftFoldTransformation {
execRunnable := util.NewExecRunnable(util.WithPath(path))
return &LeftFoldTransformation{
func (acc, in interface{}) (interface{}, error) {
switch val := in.(type) {
case map[string]interface{}:
if err := common.SetUsingInternalKey(common.AccumulatorKey, acc, val, false); err != nil {
return nil, util.NewInvalidError(err.Error())
}
default:
msg := fmt.Sprintf("expected map[string]interface{} argument to fold. Got %v",
reflect.TypeOf(val))
return nil, util.NewInvalidError(msg)
}
err := execRunnable.SetInData(in)
if err != nil {
return nil, err
}
return execRunnable.Run(context.Background())
},
}
}
func (t LeftFoldTransformation) Transform(in *Transformable) (*Transformable, error) {
if in.Kind() == reflect.Slice {
slice := in.Value().([]interface{})
var acc interface{}
var err error
for _, v := range slice {
acc, err = t.FoldFunc(acc, v)
if err != nil {
return nil, err
}
}
return &Transformable{acc}, nil
} else {
return nil, fmt.Errorf("")
}
}
type RightFoldTransformation struct {
FoldFunc func(acc, v interface{}) (interface{}, error)
}
func NewExecRightFoldTransformation(path string) *RightFoldTransformation {
execRunnable := util.NewExecRunnable(util.WithPath(path))
return &RightFoldTransformation{
func (acc, in interface{}) (interface{}, error) {
switch val := in.(type) {
case map[string]interface{}:
if err := common.SetUsingInternalKey(common.AccumulatorKey, acc, val, false); err != nil {
return nil, util.NewInvalidError(err.Error())
}
default:
msg := fmt.Sprintf("expected map[string]interface{} argument to fold. Got %v",
reflect.TypeOf(val))
return nil, util.NewInvalidError(msg)
}
err := execRunnable.SetInData(in)
if err != nil {
return nil, err
}
return execRunnable.Run(context.Background())
},
}
}
func (t RightFoldTransformation) Transform(in *Transformable) (*Transformable, error) {
if in.Kind() == reflect.Slice {
slice := in.Value().([]interface{})
var acc interface{}
var err error
for i, _ := range slice {
acc, err = t.FoldFunc(acc, slice[len(slice)-i-1])
if err != nil {
return nil, err
}
}
return &Transformable{acc}, nil
} else {
return nil, fmt.Errorf("")
}
}
type CopyTransformation struct {}
func (t CopyTransformation) Transform(in *Transformable) (*Transformable, error) {
return in.Copy(), nil
}
type SumTransformation struct {}
func (t SumTransformation) Transform(in *Transformable) (*Transformable, error) {
if in.Kind() != reflect.Slice {
return nil, fmt.Errorf("")
}
list := in.Value().([]interface{})
sum := float64(0)
for _, elm := range list {
x, err := util.GetNumeric(elm)
if err != nil {
return nil, fmt.Errorf("")
}
sum += x
}
return &Transformable{sum}, nil
}
/*
* Fold helpers
*/
func foldMinFunc(acc, v interface{}) (interface{}, error) {
if acc == nil {
acc = v
return acc, nil
}
accVal, vVal, err := util.NumericResolver(acc, v)
if err != nil {
return 0, err
} else if accVal > vVal {
accVal = vVal
}
return accVal, nil
}
func foldMaxFunc(acc, v interface{}) (interface{}, error) {
if acc == nil {
acc = v
return acc, nil
}
accVal, vVal, err := util.NumericResolver(acc, v)
if err != nil {
return 0, err
} else if accVal < vVal {
accVal = vVal
}
return accVal, nil
}
func foldCountFunc(matcher func(interface{}) bool) func (acc, v interface{}) (interface{}, error) {
return func(acc, v interface{}) (interface{}, error) {
if acc == nil {
acc = 0
if matcher(v) {
acc = float64(1)
}
return acc, nil
}
if accVal, err := util.GetNumeric(acc); err != nil {
return 0, err
} else {
if matcher(v) {
accVal++
}
return accVal, nil
}
}
}
// The fold functions are mostly for illustration.
// ToDo: Need to figure out the best way to serialize
// and generalize the matcher functions, so more useful
// folds can be done
var LeftFoldMin = &LeftFoldTransformation{
foldMinFunc,
}
var RightFoldMin = &RightFoldTransformation{
foldMinFunc,
}
var LeftFoldMax = &LeftFoldTransformation{
foldMaxFunc,
}
var RightFoldMax = &RightFoldTransformation{
foldMaxFunc,
}
var LeftFoldCountAll = &LeftFoldTransformation{
foldCountFunc(func (x interface{}) bool {
return true
}),
}
var RightFoldCountAll = &RightFoldTransformation{
foldCountFunc(func (x interface{}) bool {
return true
}),
}
/*
* Map helpers
*/
func mapApplyRegex(regex string, replace string) func(interface{}) (interface{}, error) {
// Not ideal, but we want to compile the regex and include in the returned function
// This means that an invalid regex will lead to every call in map to return an error
re, err := regexp.Compile(regex)
return func(v interface{}) (interface{}, error) {
var source string
if err != nil {
return nil, err
}
switch val := v.(type) {
case string:
source = val
default:
msg := fmt.Sprintf("expected string for regex source, got %v", reflect.TypeOf(v))
return nil, util.NewInvalidError(msg)
}
result := string(re.ReplaceAll([]byte(source), []byte(replace)))
return result, nil
}
}
func mapAddConstant(x interface{}) func(interface{}) (interface{}, error) {
return func(v interface{}) (interface{}, error) {
xVal, vVal, err := util.NumericResolver(x, v)
if err != nil {
return 0, err
} else {
return xVal + vVal, nil
}
}
}
func mapMultConstant(x interface{}) func(interface{}) (interface{}, error) {
return func(v interface{}) (interface{}, error) {
xVal, vVal, err := util.NumericResolver(x, v)
if err != nil {
return 0, err
} else {
return xVal * vVal, nil
}
}
}
func MapApplyRegex(regex string, replace string) FieldTransformation {
return &MapTransformation{
mapApplyRegex(regex, replace),
}
}
func MapAddConstant(x float64) FieldTransformation {
return &MapTransformation{
mapAddConstant(x),
}
}
func MapMultConstant(x float64) FieldTransformation {
return &MapTransformation{
mapMultConstant(x),
}
}
type Transformation struct {
transformers []FieldTransformation
condition *Condition
}
func (t *Transformation) Transform(in *Transformable) (*Transformable, error) {
var err error
curr := in
for _, transformer := range t.transformers {
curr, err = transformer.Transform(curr)
if err != nil {
return nil, err
}
}
return curr, nil
}
func (t *Transformation) ShouldTransform(in map[string]interface{}) (bool, error) {
return t.condition.Evaluate(in)
}
func NewTransformation(transformers []FieldTransformation, condition *Condition) *Transformation {
if condition == nil {
condition = TrueCondition
}
return &Transformation{
transformers,
condition,
}
}
type TransformationBuilder struct {
transformation *Transformation
}
func NewTransformationBuilder() *TransformationBuilder {
return &TransformationBuilder{
&Transformation{
condition: TrueCondition,
},
}
}
func (t *TransformationBuilder) AddFieldTransformation(transformation FieldTransformation) *TransformationBuilder {
t.transformation.transformers = append(t.transformation.transformers, transformation)
return t
}
func (t *TransformationBuilder) AddCondition(condition *Condition) *TransformationBuilder {
t.transformation.condition = condition
return t
}
func (t *TransformationBuilder) Get() *Transformation {
return t.transformation
} | internal/core/transformation.go | 0.546738 | 0.450299 | transformation.go | starcoder |
package qrencode
import (
"bytes"
"image"
"image/color"
)
/*BitVector*/
type BitVector struct {
boolBitVector
}
func (v *BitVector) AppendBits(b BitVector) {
v.boolBitVector.AppendBits(b.boolBitVector)
}
func (v *BitVector) String() string {
b := bytes.Buffer{}
for i, l := 0, v.Length(); i < l; i++ {
if v.Get(i) {
b.WriteString("X")
} else {
b.WriteString(".")
}
}
return b.String()
}
/*BitGrid*/
type BitGrid struct {
boolBitGrid
}
func NewBitGrid(width, height int) *BitGrid {
return &BitGrid{newBoolBitGrid(width, height)}
}
func (g *BitGrid) String() string {
b := bytes.Buffer{}
for y, w, h := 0, g.Width(), g.Height(); y < h; y++ {
for x := 0; x < w; x++ {
if g.Empty(x, y) {
b.WriteString(" ")
} else if g.Get(x, y) {
b.WriteString("1")
} else {
b.WriteString("0")
}
}
b.WriteString("\n")
}
return b.String()
}
// Return an image of the grid, with black blocks for true items and
// white blocks for false items, with the given block size and a
// default margin.
func (g *BitGrid) Image(blockSize int) image.Image {
return g.ImageWithMargin(blockSize, 4)
}
// Return an image of the grid, with black blocks for true items and
// white blocks for false items, with the given block size and margin.
func (g *BitGrid) ImageWithMargin(blockSize, margin int) image.Image {
width := blockSize * (2*margin + g.Width())
height := blockSize * (2*margin + g.Height())
i := image.NewGray16(image.Rect(0, 0, width, height))
for y := 0; y < blockSize*margin; y++ {
for x := 0; x < width; x++ {
i.Set(x, y, color.White)
i.Set(x, height-1-y, color.White)
}
}
for y := blockSize * margin; y < height-blockSize*margin; y++ {
for x := 0; x < blockSize*margin; x++ {
i.Set(x, y, color.White)
i.Set(width-1-x, y, color.White)
}
}
for y, w, h := 0, g.Width(), g.Height(); y < h; y++ {
for x := 0; x < w; x++ {
x0 := blockSize * (x + margin)
y0 := blockSize * (y + margin)
c := color.White
if g.Get(x, y) {
c = color.Black
}
for dy := 0; dy < blockSize; dy++ {
for dx := 0; dx < blockSize; dx++ {
i.Set(x0+dx, y0+dy, c)
}
}
}
}
return i
}
/*boolBitVector*/
type boolBitVector struct {
bits []bool
}
func (v *boolBitVector) Length() int {
return len(v.bits)
}
func (v *boolBitVector) Get(i int) bool {
return v.bits[i]
}
func (v *boolBitVector) AppendBit(b bool) {
v.bits = append(v.bits, b)
}
func (v *boolBitVector) Append(b, count int) {
for i := uint(count); i > 0; i-- {
v.AppendBit((b>>(i-1))&1 == 1)
}
}
func (v *boolBitVector) AppendBits(b boolBitVector) {
v.bits = append(v.bits, b.bits...)
}
/*boolBitGrid*/
type boolBitGrid struct {
width, height int
bits []bool
}
func newBoolBitGrid(width, height int) boolBitGrid {
return boolBitGrid{width, height, make([]bool, 2*width*height)}
}
func (g *boolBitGrid) Width() int {
return g.width
}
func (g *boolBitGrid) Height() int {
return g.height
}
func (g *boolBitGrid) Empty(x, y int) bool {
return !g.bits[2*(x+y*g.width)]
}
func (g *boolBitGrid) Get(x, y int) bool {
return g.bits[2*(x+y*g.width)+1]
}
func (g *boolBitGrid) Set(x, y int, v bool) {
g.bits[2*(x+y*g.width)] = true
g.bits[2*(x+y*g.width)+1] = v
}
func (g *boolBitGrid) Clear() {
for i, _ := range g.bits {
g.bits[i] = false
}
} | tools/qrencode/bits.go | 0.68742 | 0.55935 | bits.go | starcoder |
package sqlite
import (
"bytes"
"database/sql/driver"
"fmt"
"time"
)
const (
julianDay = 2440587.5 // 1970-01-01 00:00:00 is JD 2440587.5
dayInSeconds = 60 * 60 * 24
)
// JulianDayToUTC transforms a julian day number into an UTC Time.
func JulianDayToUTC(jd float64) time.Time {
jd -= julianDay
jd *= dayInSeconds
return time.Unix(int64(jd), 0).UTC()
}
// JulianDayToLocalTime transforms a julian day number into a local Time.
func JulianDayToLocalTime(jd float64) time.Time {
jd -= julianDay
jd *= dayInSeconds
return time.Unix(int64(jd), 0)
}
// JulianDay converts a Time into a julian day number.
func JulianDay(t time.Time) float64 {
ns := float64(t.Unix())
if ns >= 0 {
ns += 0.5
}
return ns/dayInSeconds + julianDay
}
// UnixTime is an alias used to persist time as int64 (max precision is 1s and timezone is lost)
type UnixTime struct {
time.Time
}
// Scan implements the database/sql/Scanner interface.
func (t *UnixTime) Scan(src interface{}) error {
if src == nil {
t.Time = time.Time{}
return nil
} else if unixepoch, ok := src.(int64); ok {
t.Time = time.Unix(unixepoch, 0) // local time
return nil
}
return fmt.Errorf("unsupported UnixTime src: %T, %v", src, src)
}
// Value implements the database/sql/driver/Valuer interface
func (t UnixTime) Value() (driver.Value, error) {
if t.IsZero() {
return nil, nil
}
return t.Unix(), nil
}
// JulianTime is an alias used to persist time as float64 (max precision is 1s and timezone is lost)
type JulianTime struct {
time.Time
}
// Scan implements the database/sql/Scanner interface.
func (t *JulianTime) Scan(src interface{}) error {
if src == nil {
t.Time = time.Time{}
return nil
} else if jd, ok := src.(int64); ok {
t.Time = JulianDayToLocalTime(float64(jd)) // local time
return nil
} else if jd, ok := src.(float64); ok {
t.Time = JulianDayToLocalTime(jd) // local time
return nil
}
return fmt.Errorf("unsupported JulianTime src: %T", src)
}
// Value implements the database/sql/driver/Valuer interface
func (t JulianTime) Value() (driver.Value, error) {
if t.IsZero() {
return nil, nil
}
return JulianDay(t.Time), nil
}
// TimeStamp is an alias used to persist time as '2006-01-02T15:04:05.000Z07:00' string
type TimeStamp struct {
time.Time
}
// Scan implements the database/sql/Scanner interface.
func (t *TimeStamp) Scan(src interface{}) error {
if src == nil {
t.Time = time.Time{}
return nil
} else if txt, ok := src.(string); ok {
v, err := time.Parse("2006-01-02T15:04:05.000Z07:00", txt)
if err != nil {
return err
}
t.Time = v
return nil
}
return fmt.Errorf("unsupported TimeStamp src: %T", src)
}
// Value implements the database/sql/driver/Valuer interface
func (t TimeStamp) Value() (driver.Value, error) {
if t.IsZero() {
return nil, nil
}
return t.Format("2006-01-02T15:04:05.000Z07:00"), nil
}
// MarshalText encoding.TextMarshaler interface.
// TimeStamp is formatted as null when zero or RFC3339.
func (t TimeStamp) MarshalText() ([]byte, error) {
if t.IsZero() {
return []byte("null"), nil
}
return t.Time.MarshalText()
}
// UnmarshalText implements the encoding.TextUnmarshaler interface.
// Date is expected in RFC3339 format or null.
func (t *TimeStamp) UnmarshalText(data []byte) error {
if bytes.Equal(data, []byte("null")) {
t.Time = time.Time{}
return nil
}
ti := &t.Time
return ti.UnmarshalText(data)
} | vendor/github.com/gwenn/gosqlite/date.go | 0.783285 | 0.51818 | date.go | starcoder |
package np
import (
"errors"
"math"
"github.com/ryadzenine/dolphin/models"
)
type EstimatorState struct {
State []float64
version int
}
func (r EstimatorState) Values() []float64 {
return r.State
}
func (r EstimatorState) Version() int {
return r.version
}
type RevezEstimator struct {
Vectors []models.Vector
state []float64
Step int
Rate func(int) float64
Smoothing func(int) float64
Kernel func(models.Vector) float64
}
func (r *RevezEstimator) Error(testData []models.SLPoint) float64 {
return r.FastL2Error(testData)
}
// Fast L2 Error: is only to use when we are sure that the points are aligned
func (r *RevezEstimator) FastL2Error(testData []models.SLPoint) float64 {
err := 0.0
for key, v := range testData {
p := r.state[key]
err = err + (p-v.Y)*(p-v.Y)
}
return math.Sqrt(err) / float64(len(testData))
}
func (r *RevezEstimator) L2Error(testData []models.SLPoint) float64 {
err := 0.0
for _, v := range testData {
p, _ := r.Predict(v.X)
err = err + (p-v.Y)*(p-v.Y)
}
return math.Sqrt(err) / float64(len(testData))
}
func (r *RevezEstimator) Predict(p models.Vector) (float64, error) {
// first we seek the closest point
for i, pt := range r.Vectors {
if models.L1Norm(pt, p) == 0 {
return r.state[i], nil
}
}
return 0, errors.New("the point is outside the learning domain")
}
func (r *RevezEstimator) Average(convexPart []float64, l models.SLPoint) {
r.Step++
ht := r.Smoothing(r.Step)
for j, point := range r.Vectors {
tmp := make([]float64, len(l.X))
for i, v := range l.X {
tmp[i] = (point[i] - v) / ht
}
tmpKer := r.Kernel(tmp) / math.Pow(ht, float64(len(l.X)))
r.state[j] = convexPart[j] - r.Rate(r.Step)*(tmpKer*r.state[j]-l.Y*tmpKer)
}
}
func (r *RevezEstimator) Compute(p models.SLPoint) {
r.Average(r.state, p)
}
func (r RevezEstimator) State() models.State {
return EstimatorState{State: r.state, version: r.Step}
}
func NewRevezEstimator(points []models.Vector) (*RevezEstimator, error) {
dim := len(points[0])
e := RevezEstimator{
Vectors: points,
state: make([]float64, len(points)),
Step: 0,
Rate: func(i int) float64 { return 1.0 / float64(i) },
Smoothing: func(t int) float64 { return math.Pow(float64(t), -1./float64(dim+2)) },
Kernel: models.GaussianKernel}
return &e, nil
} | models/np/estimators.go | 0.616012 | 0.556882 | estimators.go | starcoder |
package immutablestack
// Iterator is a function taking a each substack of a given stack.
// It can return an error to break iteration.
type Iterator func(ImmutableStack) error
// Functor is a function capable of modifying stack elements.
type Functor func(interface{}) interface{}
// ImmutableStack is an abstract immutable LIFO data structure.
type ImmutableStack interface {
// Top returns the top element on the stack.
Top() interface{}
// Pop returns the next substack.
Pop() ImmutableStack
// Size returns the size of the stack.
Size() uint64
// Push pushes a new element onto the stack and returns a new stack.
Push(interface{}) ImmutableStack
// ForEach iterates over the stack and applies the iterator function to each
// substack until it finds the empty one or the iterator returns an error.
ForEach(Iterator)
// FMap applies a function to each element on the stack and returns the new
// stack with results of the function application.
FMap(f Functor) ImmutableStack
}
// ImmutableStack is an abstract immutable LIFO data structure.
type immutableStackImpl struct {
top interface{}
pop ImmutableStack
size uint64
}
// New returns a new instance of ImmutableStack.
func New() ImmutableStack {
return &immutableStackImpl{
top: nil,
pop: nil,
size: 0,
}
}
func (i *immutableStackImpl) Top() interface{} {
return i.top
}
func (i *immutableStackImpl) Pop() ImmutableStack {
return i.pop
}
func (i *immutableStackImpl) Size() uint64 {
return i.size
}
func (i *immutableStackImpl) Push(element interface{}) ImmutableStack {
return &immutableStackImpl{
top: element,
pop: i,
size: i.size + 1,
}
}
func (i *immutableStackImpl) ForEach(iterator Iterator) {
var cursor ImmutableStack = i
for {
if err := iterator(cursor); err != nil {
break
}
cursor = cursor.Pop()
if cursor.Top() == nil {
break
}
}
}
func (i *immutableStackImpl) FMap(f Functor) ImmutableStack {
if i.top == nil {
return New()
}
return &immutableStackImpl{
top: f(i.top),
pop: i.Pop().FMap(f),
size: i.size,
}
} | immutablestack.go | 0.803174 | 0.490663 | immutablestack.go | starcoder |
package game
// PieceType abstraction for a single piece
type PieceType interface {
Name() string
BlackSymbol() rune
WhiteSymbol() rune
IsValidMove(currentLocation, newLocation Location, color Color) bool
CanCapture(currentLocation, opponentLocation Location, color Color) bool
}
// Piece an actual piece
type Piece struct {
PieceType
Color
Location
}
func abs(x int8) int8 {
if x < 0 {
return -x
}
return x
}
func onSameRankOrFile(location1, location2 Location) bool {
return location1.rank == location2.rank || location1.file == location2.file
}
func onSameDiagonal(location1, location2 Location) bool {
return location1.rank-location2.rank == location1.file-location2.file ||
location1.rank-location2.rank == location2.file-location1.file
}
// Pawn a pawn
type Pawn struct {
}
// Rook a rook
type Rook struct {
}
// Bishop a bishop
type Bishop struct {
}
// Knight a rook
type Knight struct {
}
// Queen a rook
type Queen struct {
}
// King a rook
type King struct {
}
// IsValidMove for a pawn
func (p Pawn) IsValidMove(currentLocation, newLocation Location, color Color) bool {
// Has to go in a straight line
if currentLocation.file != newLocation.file {
return false
}
// Switch to "white's" perspective
currentRank := currentLocation.rank
newRank := newLocation.rank
if color == Black {
currentRank = (BoardSize - 1) - currentRank
newRank = (BoardSize - 1) - newRank
}
// Has to go forward
if currentRank > newRank {
return false
}
// Can never move more than 2
if newRank-currentRank > 2 {
return false
}
// Can only move two if starting at square '1'
if newRank-currentRank == 2 && currentRank != 1 {
return false
}
return true
}
// IsValidMove for a rook
func (r Rook) IsValidMove(currentLocation, newLocation Location, color Color) bool {
return onSameRankOrFile(currentLocation, newLocation)
}
// IsValidMove for a knight
func (k Knight) IsValidMove(currentLocation, newLocation Location, color Color) bool {
rankDiff := abs(currentLocation.rank - newLocation.rank)
fileDiff := abs(currentLocation.file - newLocation.file)
if (rankDiff == 2 && fileDiff == 1) || (rankDiff == 1 && fileDiff == 2) {
return true
}
return false
}
// IsValidMove for a bishop
func (b Bishop) IsValidMove(currentLocation, newLocation Location, color Color) bool {
return onSameDiagonal(currentLocation, newLocation)
}
// IsValidMove for a queen
func (q Queen) IsValidMove(currentLocation, newLocation Location, color Color) bool {
return onSameRankOrFile(currentLocation, newLocation) || onSameDiagonal(currentLocation, newLocation)
}
// IsValidMove for a king
func (k King) IsValidMove(currentLocation, newLocation Location, color Color) bool {
rankDiff := abs(currentLocation.rank - newLocation.rank)
fileDiff := abs(currentLocation.file - newLocation.file)
if rankDiff <= 1 && fileDiff <= 1 {
return true
}
return false
}
// CanCapture from a pawn's perspective
func (p Pawn) CanCapture(currentLocation, opponentLocation Location, color Color) bool {
fileDiff := abs(currentLocation.file - opponentLocation.file)
// Must be one file to the left or right
if fileDiff != 1 {
return false
}
// Switch to "white's" perspective
currentRank := currentLocation.rank
opponentRank := opponentLocation.rank
if color == Black {
currentRank = (BoardSize - 1) - currentRank
opponentRank = (BoardSize - 1) - opponentRank
}
// Must be the very next rank
if currentRank+1 != opponentRank {
return false
}
return true
}
// CanCapture from a rook's perspective
func (r Rook) CanCapture(currentLocation, opponentLocation Location, color Color) bool {
return r.IsValidMove(currentLocation, opponentLocation, color)
}
// CanCapture from a knight's perspective
func (k Knight) CanCapture(currentLocation, opponentLocation Location, color Color) bool {
return k.IsValidMove(currentLocation, opponentLocation, color)
}
// CanCapture from a bishop's perspective
func (b Bishop) CanCapture(currentLocation, opponentLocation Location, color Color) bool {
return b.IsValidMove(currentLocation, opponentLocation, color)
}
// CanCapture from a queen's perspective
func (q Queen) CanCapture(currentLocation, opponentLocation Location, color Color) bool {
return q.IsValidMove(currentLocation, opponentLocation, color)
}
// CanCapture from a king's perspective
func (k King) CanCapture(currentLocation, opponentLocation Location, color Color) bool {
return k.IsValidMove(currentLocation, opponentLocation, color)
} | pkg/game/piece.go | 0.869507 | 0.457561 | piece.go | starcoder |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.