code
stringlengths
114
1.05M
path
stringlengths
3
312
quality_prob
float64
0.5
0.99
learning_prob
float64
0.2
1
filename
stringlengths
3
168
kind
stringclasses
1 value
package ros2 import ( "time" gotime "time" ) const maxUint32 = int64(^uint32(0)) // ROS TIME IMPLEMENTATION //Time struct contains a temporal value {sec,nsec} type Time struct { temporal } //NewTime creates a Time object of given integers {sec,nsec} func NewTime(sec uint32, nsec uint32) Time { sec, nsec = normalizeTemporal(int64(sec), int64(nsec)) return Time{temporal{sec, nsec}} } //Now creates a Time object of value Now func Now() Time { var t Time t.FromNSec(uint64(gotime.Now().UnixNano())) return t } //Diff returns difference of two Time objects as a Duration func (t *Time) Diff(from Time) Duration { sec, nsec := normalizeTemporal(int64(t.Sec)-int64(from.Sec), int64(t.NSec)-int64(from.NSec)) return Duration{temporal{sec, nsec}} } //Add returns sum of Time and Duration given func (t *Time) Add(d Duration) Time { sec, nsec := normalizeTemporal(int64(t.Sec)+int64(d.Sec), int64(t.NSec)+int64(d.NSec)) return Time{temporal{sec, nsec}} } //Sub returns subtraction of Time and Duration given func (t *Time) Sub(d Duration) Time { sec, nsec := normalizeTemporal(int64(t.Sec)-int64(d.Sec), int64(t.NSec)-int64(d.NSec)) return Time{temporal{sec, nsec}} } //Cmp returns int comparison of two Time objects func (t *Time) Cmp(other Time) int { return cmpUint64(t.ToNSec(), other.ToNSec()) } // ROS DURATION IMPLEMENTATION //Duration type which is a wrapper for a temporal value of {sec,nsec} type Duration struct { temporal } //NewDuration instantiates a new Duration item with given sec and nsec integers func NewDuration(sec uint32, nsec uint32) Duration { sec, nsec = normalizeTemporal(int64(sec), int64(nsec)) return Duration{temporal{sec, nsec}} } //Add function for adding two durations together func (d *Duration) Add(other Duration) Duration { sec, nsec := normalizeTemporal(int64(d.Sec)+int64(other.Sec), int64(d.NSec)+int64(other.NSec)) return Duration{temporal{sec, nsec}} } //Sub function for subtracting a duration from another func (d *Duration) Sub(other Duration) Duration { sec, nsec := normalizeTemporal(int64(d.Sec)-int64(other.Sec), int64(d.NSec)-int64(other.NSec)) return Duration{temporal{sec, nsec}} } //Cmp function to compare two durations func (d *Duration) Cmp(other Duration) int { return cmpUint64(d.ToNSec(), other.ToNSec()) } //Sleep function pauses go routine for duration d func (d *Duration) Sleep() error { if !d.IsZero() { time.Sleep(time.Duration(d.ToNSec()) * time.Nanosecond) } return nil } // ROS TEMPORAL IMPLEMENTATION func normalizeTemporal(sec int64, nsec int64) (uint32, uint32) { const SecondInNanosecond = 1000000000 if nsec > SecondInNanosecond { sec += nsec / SecondInNanosecond nsec = nsec % SecondInNanosecond } else if nsec < 0 { sec += nsec/SecondInNanosecond - 1 nsec = nsec%SecondInNanosecond + SecondInNanosecond } if sec < 0 || sec > maxUint32 { panic("Time is out of range") } return uint32(sec), uint32(nsec) } func cmpUint64(lhs, rhs uint64) int { var result int if lhs > rhs { result = 1 } else if lhs < rhs { result = -1 } else { result = 0 } return result } type temporal struct { Sec uint32 NSec uint32 } func (t *temporal) IsZero() bool { return t.Sec == 0 && t.NSec == 0 } func (t *temporal) ToSec() float64 { return float64(t.Sec) + float64(t.NSec)*1e-9 } func (t *temporal) ToNSec() uint64 { return uint64(t.Sec)*1000000000 + uint64(t.NSec) } func (t *temporal) FromSec(sec float64) { nsec := uint64(sec * 1e9) t.FromNSec(nsec) } func (t *temporal) FromNSec(nsec uint64) { t.Sec, t.NSec = normalizeTemporal(0, int64(nsec)) } func (t *temporal) Normalize() { t.Sec, t.NSec = normalizeTemporal(int64(t.Sec), int64(t.NSec)) }
ros2/time.go
0.823719
0.572842
time.go
starcoder
package griblib import ( "encoding/binary" "errors" "fmt" "io" ) func fixNegLatLon(num int32) int32 { if num < 0 { return -int32(uint32(num) &^ uint32(0x80000000)) } return num } //ScaledValue specifies the scale of a value type ScaledValue struct { Scale uint8 `json:"scale"` Value uint32 `json:"value"` } //BasicAngle specifies the angle of a grid type BasicAngle struct { BasicAngle uint32 `json:"basicAngle"` BasicAngleSub uint32 `json:"basicAngleSub"` } //Grid is an interface for all grids. type Grid interface { Export() map[string]string } //ReadGrid reads grid from binary input to the grid-number specified by templateNumber func ReadGrid(f io.Reader, templateNumber uint16) (Grid, error) { var err error var g Grid switch templateNumber { case 0: var grid Grid0 err = binary.Read(f, binary.BigEndian, &grid) grid.La1 = fixNegLatLon(grid.La1) grid.Lo1 = fixNegLatLon(grid.Lo1) grid.La2 = fixNegLatLon(grid.La2) grid.Lo2 = fixNegLatLon(grid.Lo2) g = &grid case 10: var grid Grid10 err = binary.Read(f, binary.BigEndian, &grid) grid.La1 = fixNegLatLon(grid.La1) grid.Lo1 = fixNegLatLon(grid.Lo1) grid.La2 = fixNegLatLon(grid.La2) grid.Lo2 = fixNegLatLon(grid.Lo2) g = &grid case 20: var grid Grid20 err = binary.Read(f, binary.BigEndian, &grid) grid.La1 = fixNegLatLon(grid.La1) grid.Lo1 = fixNegLatLon(grid.Lo1) g = &grid case 30: var grid Grid30 err = binary.Read(f, binary.BigEndian, &grid) grid.La1 = fixNegLatLon(grid.La1) grid.Lo1 = fixNegLatLon(grid.Lo1) g = &grid case 40: var grid Grid40 err = binary.Read(f, binary.BigEndian, &grid) grid.La1 = fixNegLatLon(grid.La1) grid.Lo1 = fixNegLatLon(grid.Lo1) grid.La2 = fixNegLatLon(grid.La2) grid.Lo2 = fixNegLatLon(grid.Lo2) g = &grid case 90: var grid Grid90 return &grid, binary.Read(f, binary.BigEndian, &grid) default: var grid Grid90 return &grid, errors.New(fmt.Sprint("Unsupported grid definition ", templateNumber)) } return g, err } //GridHeader is a common header in all grids type GridHeader struct { EarthShape uint8 `json:"earthShape"` SphericalRadius ScaledValue `json:"sphericalRadius"` MajorAxis ScaledValue `json:"majorAxis"` MinorAxis ScaledValue `json:"minorAxis"` } //Export gridheader to a map[string]string func (h *GridHeader) Export() (d map[string]string) { return map[string]string{ "earth": EarthShapeDescription(int(h.EarthShape)), } } //Grid0 Definition Template 3.0: Latitude/longitude (or equidistant cylindrical, or Plate Carree) type Grid0 struct { //Name := "Latitude/longitude (or equidistant cylindrical, or Plate Carree) " GridHeader Ni uint32 `json:"ni"` // lines along parallel(latitudes) Nj uint32 `json:"nj"` // lines along meridian(longitude) BasicAngle BasicAngle `json:"basicAngle"` La1 int32 `json:"la1"` // latitude of first grid-point Lo1 int32 `json:"lo1"` // longitude of first grid-point ResolutionAndComponentFlags uint8 `json:"resolutionAndComponentFlags"` La2 int32 `json:"la2"` // latitude of last grid-point Lo2 int32 `json:"lo2"` // longitude of first grid-point Di int32 `json:"di"` // direction i increment Dj int32 `json:"dj"` // direction j increment ScanningMode uint8 `json:"scanningMode"` } //Export Grid0 to a map[string]string func (h *Grid0) Export() map[string]string { return map[string]string{ "earth": EarthShapeDescription(int(h.EarthShape)), "ni": fmt.Sprint(h.Ni), "nj": fmt.Sprint(h.Nj), "basicAngle": fmt.Sprint(h.BasicAngle.BasicAngle), "basicAngleSub": fmt.Sprint(h.BasicAngle.BasicAngleSub), "la1": fmt.Sprint(h.La1), "lo1": fmt.Sprint(h.Lo1), "la2": fmt.Sprint(h.La2), "lo2": fmt.Sprint(h.Lo2), "di": fmt.Sprint(h.Di), "dj": fmt.Sprint(h.Dj), "scanningMode": fmt.Sprint(h.ScanningMode), } } //Grid10 Definition Template 3.10: Mercator type Grid10 struct { //name := "Mercator" GridHeader Ni uint32 `json:"ni"` Nj int32 `json:"nj"` La1 int32 `json:"la1"` Lo1 int32 `json:"lo1"` ResolutionAndComponentFlags uint8 `json:"resolutionAndComponentFlags"` Lad int32 `json:"lad"` La2 int32 `json:"la2"` Lo2 int32 `json:"lo2"` ScanningMode uint8 `json:"scanningMode"` GridOrientation uint32 `json:"gridOrientation"` Di int32 `json:"di"` Dj int32 `json:"dj"` } //Grid20 Definition Template 3.20: Polar stereographic projection type Grid20 struct { //name = "Polar stereographic projection "; GridHeader Nx uint32 `json:"nx"` Ny uint32 `json:"ny"` La1 int32 `json:"na1"` Lo1 int32 `json:"lo1"` ResolutionAndComponentFlags uint8 `json:"resolutionAndComponentFlags"` Lad int32 `json:"lad"` Lov int32 `json:"lov"` Dx int32 `json:"dx"` Dy int32 `json:"dy"` ProjectionCenter uint8 `json:"projectionCenter"` ScanningMode uint8 `json:"scanningMode"` } //Grid30 Definition Template 3.30: Lambert conformal type Grid30 struct { //name = "Polar stereographic projection "; GridHeader Nx uint32 `json:"nx"` Ny uint32 `json:"ny"` La1 int32 `json:"la1"` Lo1 int32 `json:"lo1"` ResolutionAndComponentFlags uint8 `json:"resolutionAndComponentFlags"` Lad int32 `json:"lad"` Lov int32 `json:"lov"` Dx int32 `json:"dx"` Dy int32 `json:"dy"` ProjectionCenter uint8 `json:"projectionCenter"` ScanningMode uint8 `json:"scanningMode"` Latin1 uint32 `json:"latin1"` Latin2 uint32 `json:"latin2"` LaSouthPole uint32 `json:"laSouthPole"` LoSouthPole uint32 `json:"loSouthPole"` } // Grid40 Definition Template 3.40: Gaussian latitude/longitude type Grid40 struct { //name = "Gaussian latitude/longitude "; GridHeader Ni uint32 `json:"ni"` Nj uint32 `json:"nj"` BasicAngle uint32 `json:"basicAngle"` La1 int32 `json:"la1"` Lo1 int32 `json:"lo1"` ResolutionAndComponentFlags uint8 `json:"resolutionAndComponentFlags"` La2 int32 `json:"la2"` Lo2 int32 `json:"lo2"` Di int32 `json:"di"` N uint32 `json:"n"` ScanningMode uint8 `json:"scanningMode"` } // Grid90 Definition Template 3.90: Space view perspective or orthographic // FIXME: implement properly //Grid90 type Grid90 struct { //name = "Space view perspective or orthographic "; GridHeader Nx uint32 `json:"nx"` Ny uint32 `json:"ny"` //BasicAngle BasicAngle Lap int32 `json:"lap"` Lop int32 `json:"lop"` ResolutionAndComponentFlags uint8 `json:"resolutionAndComponentFlags"` Dx uint32 `json:"dx"` Dy uint32 `json:"dy"` Xp uint32 `json:"xp"` Yp uint32 `json:"yp"` ScanningMode uint8 `json:"scanningMode"` Orientation uint32 `json:"orientation"` Nr uint32 `json:"nr"` Xo uint32 `json:"xo"` Yo uint32 `json:"yo"` }
griblib/grids.go
0.650356
0.420362
grids.go
starcoder
package video // FrameDecoder is for decoding compressed frames with the help of data streams. // A new instance of a decoder is created with a FrameDecoderBuilder. type FrameDecoder struct { horizontalTiles int verticalTiles int colorer TileColorFunction paletteLookupList []byte controlWords []ControlWord } func newFrameDecoder(builder *FrameDecoderBuilder) *FrameDecoder { listLen := len(builder.paletteLookupList) decoder := &FrameDecoder{ horizontalTiles: builder.width / TileSideLength, verticalTiles: builder.height / TileSideLength, colorer: builder.colorer, paletteLookupList: make([]byte, listLen+16), controlWords: builder.controlWords} copy(decoder.paletteLookupList[:listLen], builder.paletteLookupList) return decoder } // Decode reads the provided streams to paint a frame. func (decoder *FrameDecoder) Decode(bitstreamData []byte, maskstreamData []byte) { bitstream := NewBitstreamReader(bitstreamData) maskstream := NewMaskstreamReader(maskstreamData) lastControl := ControlWord(0) for vTile := 0; vTile < decoder.verticalTiles && !bitstream.Exhausted(); vTile++ { for hTile := 0; hTile < decoder.horizontalTiles && !bitstream.Exhausted(); hTile++ { control := decoder.readNextControlWord(bitstream) if control.Type() == CtrlUnknown { panic("Unknown control in use") } else if control.Type() == CtrlRepeatPrevious { control = lastControl } if control.Type() == CtrlSkip { skipCount := bitstream.Read(5) bitstream.Advance(5) if skipCount == 0x1F { hTile = decoder.horizontalTiles } else { hTile += int(skipCount) } } else { decoder.colorTile(hTile, vTile, control, maskstream) } lastControl = control } } } func (decoder *FrameDecoder) readNextControlWord(bitstream *BitstreamReader) ControlWord { controlIndex := bitstream.Read(12) control := decoder.controlWords[controlIndex] if control.IsLongOffset() { bitstream.Advance(8) for control.IsLongOffset() { bitstream.Advance(4) offset := bitstream.Read(4) controlIndex = control.LongOffset() + offset control = decoder.controlWords[controlIndex] } } bitstream.Advance(control.Count()) return control } func (decoder *FrameDecoder) colorTile(hTile, vTile int, control ControlWord, maskstream *MaskstreamReader) { param := control.Parameter() switch control.Type() { case CtrlColorTile2ColorsStatic: { decoder.colorer(hTile, vTile, []byte{byte(param & 0xFF), byte(param >> 8 & 0xFF)}, 0xAAAA, 1) } case CtrlColorTile2ColorsMasked: { decoder.colorer(hTile, vTile, []byte{byte(param & 0xFF), byte(param >> 8 & 0xFF)}, maskstream.Read(2), 1) } case CtrlColorTile4ColorsMasked: { decoder.colorer(hTile, vTile, decoder.paletteLookupList[param:param+4], maskstream.Read(4), 2) } case CtrlColorTile8ColorsMasked: { decoder.colorer(hTile, vTile, decoder.paletteLookupList[param:param+8], maskstream.Read(6), 3) } case CtrlColorTile16ColorsMasked: { decoder.colorer(hTile, vTile, decoder.paletteLookupList[param:param+16], maskstream.Read(8), 4) } } }
compress/video/FrameDecoder.go
0.834677
0.462655
FrameDecoder.go
starcoder
package models import ( i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization" ) // ApprovalWorkflowProvider type ApprovalWorkflowProvider struct { Entity // The businessFlows property businessFlows []BusinessFlowable // The businessFlowsWithRequestsAwaitingMyDecision property businessFlowsWithRequestsAwaitingMyDecision []BusinessFlowable // The displayName property displayName *string // The policyTemplates property policyTemplates []GovernancePolicyTemplateable } // NewApprovalWorkflowProvider instantiates a new approvalWorkflowProvider and sets the default values. func NewApprovalWorkflowProvider()(*ApprovalWorkflowProvider) { m := &ApprovalWorkflowProvider{ Entity: *NewEntity(), } return m } // CreateApprovalWorkflowProviderFromDiscriminatorValue creates a new instance of the appropriate class based on discriminator value func CreateApprovalWorkflowProviderFromDiscriminatorValue(parseNode i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, error) { return NewApprovalWorkflowProvider(), nil } // GetBusinessFlows gets the businessFlows property value. The businessFlows property func (m *ApprovalWorkflowProvider) GetBusinessFlows()([]BusinessFlowable) { if m == nil { return nil } else { return m.businessFlows } } // GetBusinessFlowsWithRequestsAwaitingMyDecision gets the businessFlowsWithRequestsAwaitingMyDecision property value. The businessFlowsWithRequestsAwaitingMyDecision property func (m *ApprovalWorkflowProvider) GetBusinessFlowsWithRequestsAwaitingMyDecision()([]BusinessFlowable) { if m == nil { return nil } else { return m.businessFlowsWithRequestsAwaitingMyDecision } } // GetDisplayName gets the displayName property value. The displayName property func (m *ApprovalWorkflowProvider) GetDisplayName()(*string) { if m == nil { return nil } else { return m.displayName } } // GetFieldDeserializers the deserialization information for the current model func (m *ApprovalWorkflowProvider) GetFieldDeserializers()(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(error)) { res := m.Entity.GetFieldDeserializers() res["businessFlows"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { val, err := n.GetCollectionOfObjectValues(CreateBusinessFlowFromDiscriminatorValue) if err != nil { return err } if val != nil { res := make([]BusinessFlowable, len(val)) for i, v := range val { res[i] = v.(BusinessFlowable) } m.SetBusinessFlows(res) } return nil } res["businessFlowsWithRequestsAwaitingMyDecision"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { val, err := n.GetCollectionOfObjectValues(CreateBusinessFlowFromDiscriminatorValue) if err != nil { return err } if val != nil { res := make([]BusinessFlowable, len(val)) for i, v := range val { res[i] = v.(BusinessFlowable) } m.SetBusinessFlowsWithRequestsAwaitingMyDecision(res) } return nil } res["displayName"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { val, err := n.GetStringValue() if err != nil { return err } if val != nil { m.SetDisplayName(val) } return nil } res["policyTemplates"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { val, err := n.GetCollectionOfObjectValues(CreateGovernancePolicyTemplateFromDiscriminatorValue) if err != nil { return err } if val != nil { res := make([]GovernancePolicyTemplateable, len(val)) for i, v := range val { res[i] = v.(GovernancePolicyTemplateable) } m.SetPolicyTemplates(res) } return nil } return res } // GetPolicyTemplates gets the policyTemplates property value. The policyTemplates property func (m *ApprovalWorkflowProvider) GetPolicyTemplates()([]GovernancePolicyTemplateable) { if m == nil { return nil } else { return m.policyTemplates } } // Serialize serializes information the current object func (m *ApprovalWorkflowProvider) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) { err := m.Entity.Serialize(writer) if err != nil { return err } if m.GetBusinessFlows() != nil { cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetBusinessFlows())) for i, v := range m.GetBusinessFlows() { cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable) } err = writer.WriteCollectionOfObjectValues("businessFlows", cast) if err != nil { return err } } if m.GetBusinessFlowsWithRequestsAwaitingMyDecision() != nil { cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetBusinessFlowsWithRequestsAwaitingMyDecision())) for i, v := range m.GetBusinessFlowsWithRequestsAwaitingMyDecision() { cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable) } err = writer.WriteCollectionOfObjectValues("businessFlowsWithRequestsAwaitingMyDecision", cast) if err != nil { return err } } { err = writer.WriteStringValue("displayName", m.GetDisplayName()) if err != nil { return err } } if m.GetPolicyTemplates() != nil { cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetPolicyTemplates())) for i, v := range m.GetPolicyTemplates() { cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable) } err = writer.WriteCollectionOfObjectValues("policyTemplates", cast) if err != nil { return err } } return nil } // SetBusinessFlows sets the businessFlows property value. The businessFlows property func (m *ApprovalWorkflowProvider) SetBusinessFlows(value []BusinessFlowable)() { if m != nil { m.businessFlows = value } } // SetBusinessFlowsWithRequestsAwaitingMyDecision sets the businessFlowsWithRequestsAwaitingMyDecision property value. The businessFlowsWithRequestsAwaitingMyDecision property func (m *ApprovalWorkflowProvider) SetBusinessFlowsWithRequestsAwaitingMyDecision(value []BusinessFlowable)() { if m != nil { m.businessFlowsWithRequestsAwaitingMyDecision = value } } // SetDisplayName sets the displayName property value. The displayName property func (m *ApprovalWorkflowProvider) SetDisplayName(value *string)() { if m != nil { m.displayName = value } } // SetPolicyTemplates sets the policyTemplates property value. The policyTemplates property func (m *ApprovalWorkflowProvider) SetPolicyTemplates(value []GovernancePolicyTemplateable)() { if m != nil { m.policyTemplates = value } }
models/approval_workflow_provider.go
0.589716
0.431764
approval_workflow_provider.go
starcoder
package shardingconfig import ( "math/big" "time" "github.com/harmony-one/harmony/internal/genesis" ) // Schedule returns the sharding configuration instance for the given // epoch. type Schedule interface { InstanceForEpoch(epoch *big.Int) Instance // BlocksPerEpoch returns the number of blocks per each Epoch BlocksPerEpoch() uint64 // CalcEpochNumber returns the epoch number based on the block number CalcEpochNumber(blockNum uint64) *big.Int // IsLastBlock check if the block is the last block in the epoch IsLastBlock(blockNum uint64) bool // VDFDifficulty returns number of iterations for VDF calculation VdfDifficulty() int // ConsensusRatio ratio of new nodes vs consensus total nodes ConsensusRatio() float64 // FirstCrossLinkBlock returns the first cross link block number that will be accepted into beacon chain FirstCrossLinkBlock() uint64 // TODO: remove it after randomness feature turned on mainnet //RandomnessStartingEpoch returns starting epoch of randonness generation RandomnessStartingEpoch() uint64 // Max amount limit for a valid transaction MaxTxAmountLimit() *big.Int // Max number of transactions of a particular account per block level MaxNumRecentTxsPerAccountLimit() uint64 // Max total number of transactions allowed as pending transactions in transaction pool MaxTxPoolSizeLimit() int // Max total number of transactions allowed to be processed per block MaxNumTxsPerBlockLimit() int // How long "recent" means for transaction in time Duration unit RecentTxDuration() time.Duration // configuration for throttling pending transactions TxsThrottleConfig() *TxsThrottleConfig } // Instance is one sharding configuration instance. type Instance interface { // NumShards returns the number of shards in the network. NumShards() uint32 // NumNodesPerShard returns number of nodes in each shard. NumNodesPerShard() int // NumHarmonyOperatedNodesPerShard returns number of nodes in each shard // that are operated by Harmony. NumHarmonyOperatedNodesPerShard() int // HmyAccounts returns a list of Harmony accounts HmyAccounts() []genesis.DeployAccount // FnAccounts returns a list of Foundational node accounts FnAccounts() []genesis.DeployAccount // FindAccount returns the deploy account based on the blskey FindAccount(blsPubKey string) (bool, *genesis.DeployAccount) // ReshardingEpoch returns a list of Epoch while off-chain resharding happens ReshardingEpoch() []*big.Int } // TxThrottleFlag is the throttling flag for each transaction // Refer below enum declaration for more context. type TxThrottleFlag int // TxThrottleFlag is determined per transaction // during the new block proposal and pending transactions throttling const ( TxSelect TxThrottleFlag = iota TxUnselect TxInvalid ) func (result TxThrottleFlag) String() string { switch result { case TxSelect: return "TxSelect" case TxUnselect: return "TxUnselect" case TxInvalid: return "TxInvalid" } return "TxThrottleUnknown" } // TxsThrottleConfig contains configuration for throttling pending transactions per node block type TxsThrottleConfig struct { // Max amount limit for a valid transaction MaxTxAmountLimit *big.Int // Max number of transactions of a particular account for the past hour RecentTxDuration time.Duration // Max number of transactions of a particular account for the past hour MaxNumRecentTxsPerAccountLimit uint64 // Max total number of transactions allowed as pending transactions in transaction pool MaxTxPoolSizeLimit int // Max total number of transactions allowed to be processed per block MaxNumTxsPerBlockLimit int }
internal/configs/sharding/shardingconfig.go
0.562657
0.408513
shardingconfig.go
starcoder
package wfc import ( // "fmt" "image" "image/color" "math" ) /** * OverlappingModel Type */ type OverlappingModel struct { *BaseModel // Underlying model of generic Wave Function Collapse algorithm N int // Size of patterns (ie pixel distance of influencing pixels) Colors []color.Color // Array of unique colors in input Ground int // Id of the specific pattern to use as the bottom of the generation. A value of -1 means that this is unset Patterns []Pattern // Array of unique patterns in input Propagator [][][][]int // Table of which patterns (t2) mathch a given pattern (t1) at offset (dx, dy) [t1][dx][dy][t2] Fmxmn, Fmymn int // Width and height of output, minus n } /** * Pattern Type. Flattened array of color codes. */ type Pattern []int /** * NewOverlappingModel * @param {image.Image} img The source image * @param {int} N Size of the patterns * @param {int} width The width of the generated image * @param {int} height The height of the generated image * @param {bool} periodicInput Whether the source image is to be considered as periodic / as a repeatable texture * @param {bool} periodicOutput Whether the generation should be periodic / a repeatable texture * @param {int} symmetry Allowed symmetries from 1 (no symmetry) to 8 (all mirrored / rotated variations) * @param {int} [ground=0] Id of the specific pattern to use as the bottom of the generation ( see https://github.com/mxgmn/WaveFunctionCollapse/issues/3#issuecomment-250995366 ) * @return *OverlappingModel A pointer to a new copy of the model */ func NewOverlappingModel(img image.Image, n, width, height int, periodicInput, periodicOutput bool, symmetry int, ground bool) *OverlappingModel { // Initialize model model := &OverlappingModel{BaseModel: &BaseModel{}} model.N = n model.Fmx = width model.Fmy = height model.Periodic = periodicOutput model.Ground = -1 bounds := img.Bounds() dataWidth := bounds.Max.X dataHeight := bounds.Max.Y // Build up a palette of colors (by assigning numbers to unique color values) sample := make([][]int, dataWidth) for i := range sample { sample[i] = make([]int, dataHeight) } model.Colors = make([]color.Color, 0) colorMap := make(map[color.Color]int) for y := 0; y < dataHeight; y++ { for x := 0; x < dataWidth; x++ { color := img.At(x, y) if _, ok := colorMap[color]; !ok { colorMap[color] = len(model.Colors) model.Colors = append(model.Colors, color) } sample[x][y] = colorMap[color] } } // Extract various patterns from input (patterns are 1D arrays of sample codes) c := len(model.Colors) w := int(math.Pow(float64(c), float64(n*n))) // Given a transforming function, return a flattened array of the N*N pattern getPattern := func(transformer func(x, y int) int) Pattern { result := make(Pattern, n*n) for y := 0; y < n; y++ { for x := 0; x < n; x++ { result[x+y*n] = transformer(x, y) } } return result } // Return a flattened array of the N*N pattern at (x, y) using sample codes patternFromSample := func(x, y int) Pattern { return getPattern(func(dx, dy int) int { return sample[(x+dx)%dataWidth][(y+dy)%dataHeight] }) } rotate := func(p Pattern) Pattern { return getPattern(func(x, y int) int { return p[n-1-y+x*n] }) } reflect := func(p Pattern) Pattern { return getPattern(func(x, y int) int { return p[n-1-x+y*n] }) } // Compute a "hash" value for indexing patterns (unique for unique patterns) indexFromPattern := func(p Pattern) int { result := 0 power := 1 for i := 0; i < len(p); i++ { result += p[len(p)-1-i] * power power *= c } return result } // Reverse the hash of a pattern's index patternFromIndex := func(ind int) Pattern { residue := ind power := w result := make(Pattern, n*n) for i := 0; i < len(result); i++ { power /= c count := 0 for residue >= power { residue -= power count++ } result[i] = count } return result } // Build map of patterns (indexed by computed hash) to weights based on frequency in sample weights := make(map[int]int) weightsKeys := make([]int, 0) var horizontalBound, verticalBound int if periodicInput { horizontalBound = dataWidth verticalBound = dataHeight } else { horizontalBound = dataWidth - n + 1 verticalBound = dataHeight - n + 1 } for y := 0; y < verticalBound; y++ { for x := 0; x < horizontalBound; x++ { ps := make([]Pattern, 8, 8) ps[0] = patternFromSample(x, y) ps[1] = reflect(ps[0]) ps[2] = rotate(ps[0]) ps[3] = reflect(ps[2]) ps[4] = rotate(ps[2]) ps[5] = reflect(ps[4]) ps[6] = rotate(ps[4]) ps[7] = reflect(ps[6]) for k := 0; k < symmetry; k++ { ind := indexFromPattern(ps[k]) if _, ok := weights[ind]; ok { weights[ind]++ } else { weightsKeys = append(weightsKeys, ind) weights[ind] = 1 } if ground && y == verticalBound-1 && x == 0 && k == 0 { // Set groung pattern model.Ground = len(weightsKeys) - 1 } } } } model.T = len(weightsKeys) // Store the patterns and cooresponding weights (stationary) model.Patterns = make([]Pattern, model.T) model.Stationary = make([]float64, model.T) model.Propagator = make([][][][]int, model.T) for i, wk := range weightsKeys { model.Patterns[i] = patternFromIndex(wk) model.Stationary[i] = float64(weights[wk]) } // Initialize wave (to all true) and changes (to all false) fields model.Wave = make([][][]bool, model.Fmx) model.Changes = make([][]bool, model.Fmx) for x := 0; x < model.Fmx; x++ { model.Wave[x] = make([][]bool, model.Fmy) model.Changes[x] = make([]bool, model.Fmy) for y := 0; y < model.Fmy; y++ { model.Wave[x][y] = make([]bool, model.T) model.Changes[x][y] = false for t := 0; t < model.T; t++ { model.Wave[x][y][t] = true } } } // Check that the spaces n distance away have no conflicts agrees := func(p1, p2 Pattern, dx, dy int) bool { var xmin, xmax, ymin, ymax int if dx < 0 { xmin = 0 xmax = dx + n } else { xmin = dx xmax = n } if dy < 0 { ymin = 0 ymax = dy + n } else { ymin = dy ymax = n } for y := ymin; y < ymax; y++ { for x := xmin; x < xmax; x++ { if p1[x+n*y] != p2[x-dx+n*(y-dy)] { return false } } } return true } // Build table of which patterns can exist next to another for t := 0; t < model.T; t++ { model.Propagator[t] = make([][][]int, 2*n-1) for x := 0; x < 2*n-1; x++ { model.Propagator[t][x] = make([][]int, 2*n-1) for y := 0; y < 2*n-1; y++ { list := make([]int, 0) for t2 := 0; t2 < model.T; t2++ { if agrees(model.Patterns[t], model.Patterns[t2], x-n+1, y-n+1) { list = append(list, t2) } } model.Propagator[t][x][y] = make([]int, len(list)) for k := 0; k < len(list); k++ { model.Propagator[t][x][y][k] = list[k] } } } } model.Fmxmn = model.Fmx - model.N model.Fmymn = model.Fmy - model.N return model } /** * OnBoundary */ func (model *OverlappingModel) OnBoundary(x, y int) bool { return !model.Periodic && (x > model.Fmxmn || y > model.Fmymn) } /** * Propagate * return: bool, change occured in this iteration */ func (model *OverlappingModel) Propagate() bool { change := false startLoop := -model.N + 1 endLoop := model.N for x := 0; x < model.Fmx; x++ { for y := 0; y < model.Fmy; y++ { if model.Changes[x][y] { model.Changes[x][y] = false for dx := startLoop; dx < endLoop; dx++ { for dy := startLoop; dy < endLoop; dy++ { sx := x + dx sy := y + dy if sx < 0 { sx += model.Fmx } else if sx >= model.Fmx { sx -= model.Fmx } if sy < 0 { sy += model.Fmy } else if sy >= model.Fmy { sy -= model.Fmy } if !model.Periodic && (sx > model.Fmx || sy > model.Fmy) { continue } allowed := model.Wave[sx][sy] for t := 0; t < model.T; t++ { if !allowed[t] { continue } b := false prop := model.Propagator[t][model.N-1-dx][model.N-1-dy] for i := 0; i < len(prop) && !b; i++ { b = model.Wave[x][y][prop[i]] } if !b { model.Changes[sx][sy] = true change = true allowed[t] = false } } } } } } } return change } /** * Clear the internal state, then set ground pattern */ func (model *OverlappingModel) Clear() { model.ClearBase(model) if model.Ground != -1 && model.T > 1 { for x := 0; x < model.Fmx; x++ { for t := 0; t < model.T; t++ { if t != model.Ground { model.Wave[x][model.Fmy-1][t] = false } } model.Changes[x][model.Fmy-1] = true for y := 0; y < model.Fmy-1; y++ { model.Wave[x][y][model.Ground] = false model.Changes[x][y] = true } } for model.Propagate() { // Empty loop } } } /** * Create a image.Image holding the data for a complete image */ func (model *OverlappingModel) RenderCompleteImage() image.Image { output := make([][]color.Color, model.Fmx) for i := range output { output[i] = make([]color.Color, model.Fmy) } for y := 0; y < model.Fmy; y++ { for x := 0; x < model.Fmx; x++ { for t := 0; t < model.T; t++ { if model.Wave[x][y][t] { output[x][y] = model.Colors[model.Patterns[t][0]] } } } } return GeneratedImage{output} } /** * Create a image.Image holding the data for an incomplete image */ func (model *OverlappingModel) RenderIncompleteImage() image.Image { output := make([][]color.Color, model.Fmx) for i := range output { output[i] = make([]color.Color, model.Fmy) } var contributorNumber, sR, sG, sB, sA uint32 for y := 0; y < model.Fmy; y++ { for x := 0; x < model.Fmx; x++ { contributorNumber, sR, sG, sB, sA = 0, 0, 0, 0, 0 for dy := 0; dy < model.N; dy++ { for dx := 0; dx < model.N; dx++ { sx := x - dx if sx < 0 { sx += model.Fmx } sy := y - dy if sy < 0 { sy += model.Fmy } if !model.Periodic && (sx > model.Fmxmn || sy > model.Fmymn) { continue } for t := 0; t < model.T; t++ { if model.Wave[sx][sy][t] { contributorNumber++ r, g, b, a := model.Colors[model.Patterns[t][dx+dy*model.N]].RGBA() sR += r sG += g sB += b sA += a } } } } if contributorNumber == 0 { output[x][y] = color.RGBA{127, 127, 127, 255} } else { uR := uint8((sR / contributorNumber) >> 8) uG := uint8((sG / contributorNumber) >> 8) uB := uint8((sB / contributorNumber) >> 8) uA := uint8((sA / contributorNumber) >> 8) output[x][y] = color.RGBA{uR, uG, uB, uA} } } } return GeneratedImage{output} } /** * Retrieve the RGBA data * returns: Image */ func (model *OverlappingModel) Render() image.Image { if model.IsGenerationSuccessful() { return model.RenderCompleteImage() } else { return model.RenderIncompleteImage() } } /** * Retrieve the RGBA data * returns: Image, finished, successful */ func (model *OverlappingModel) Iterate(iterations int) (image.Image, bool, bool) { finished := model.BaseModel.Iterate(model, iterations) return model.Render(), finished, model.IsGenerationSuccessful() } /** * Retrieve the RGBA data * returns: Image, successful */ func (model *OverlappingModel) Generate() (image.Image, bool) { model.BaseModel.Generate(model) return model.Render(), model.IsGenerationSuccessful() }
overlap-model.go
0.762954
0.508788
overlap-model.go
starcoder
package jit import ( "fmt" "strings" "github.com/tetratelabs/wazero/internal/wasm/buildoptions" ) // nilRegister is used to indicate a register argument a variable is invalid and not an actual register. const nilRegister int16 = -1 func isNilRegister(r int16) bool { return r == nilRegister } func isIntRegister(r int16) bool { return unreservedGeneralPurposeIntRegisters[0] <= r && r <= unreservedGeneralPurposeIntRegisters[len(unreservedGeneralPurposeIntRegisters)-1] } func isFloatRegister(r int16) bool { return generalPurposeFloatRegisters[0] <= r && r <= generalPurposeFloatRegisters[len(generalPurposeFloatRegisters)-1] } func isZeroRegister(r int16) bool { return r == zeroRegister } // conditionalRegisterState indicates a state of the conditional flag register. // In arm64, conditional registers are defined as arm64.COND_*. // In amd64, we define each flag value in value_locations_amd64.go type conditionalRegisterState int16 const conditionalRegisterStateUnset conditionalRegisterState = 0 // valueLocation corresponds to each variable pushed onto the wazeroir (virtual) stack, // and it has the information about where it exists in the physical machine. // It might exist in registers, or maybe on in the non-virtual physical stack allocated in memory. type valueLocation struct { regType generalPurposeRegisterType // Set to nilRegister if the value is stored in the memory stack. register int16 // Set to conditionalRegisterStateUnset if the value is not on the conditional register. conditionalRegister conditionalRegisterState // This is the location of this value in the memory stack at runtime, stackPointer uint64 } func (v *valueLocation) registerType() (t generalPurposeRegisterType) { return v.regType } func (v *valueLocation) setRegisterType(t generalPurposeRegisterType) { v.regType = t } func (v *valueLocation) setRegister(reg int16) { v.register = reg v.conditionalRegister = conditionalRegisterStateUnset } func (v *valueLocation) onRegister() bool { return v.register != nilRegister && v.conditionalRegister == conditionalRegisterStateUnset } func (v *valueLocation) onStack() bool { return v.register == nilRegister && v.conditionalRegister == conditionalRegisterStateUnset } func (v *valueLocation) onConditionalRegister() bool { return v.conditionalRegister != conditionalRegisterStateUnset } func (v *valueLocation) String() string { var location string if v.onStack() { location = fmt.Sprintf("stack(%d)", v.stackPointer) } else if v.onConditionalRegister() { location = fmt.Sprintf("conditional(%d)", v.conditionalRegister) } else if v.onRegister() { location = fmt.Sprintf("register(%d)", v.register) } return fmt.Sprintf("{type=%s,location=%s}", v.regType, location) } func newValueLocationStack() *valueLocationStack { return &valueLocationStack{ usedRegisters: map[int16]struct{}{}, } } // valueLocationStack represents the wazeroir virtual stack // where each item holds the location information about where it exists // on the physical machine at runtime. // Notably this is only used in the compilation phase, not runtime, // and we change the state of this struct at every wazeroir operation we compile. // In this way, we can see where the operands of a operation (for example, // two variables for wazeroir add operation.) exist and check the neccesity for // moving the variable to registers to perform actual CPU instruction // to achieve wazeroir's add operation. type valueLocationStack struct { // stack holds all the variables. stack []*valueLocation // sp is the current stack pointer. sp uint64 // usedRegisters stores the used registers. usedRegisters map[int16]struct{} // stackPointerCeil tracks max(.sp) across the lifespan of this struct. stackPointerCeil uint64 } func (v *valueLocationStack) String() string { var stackStr []string for i := uint64(0); i < v.sp; i++ { stackStr = append(stackStr, v.stack[i].String()) } var usedRegisters []string for reg := range v.usedRegisters { usedRegisters = append(usedRegisters, fmt.Sprintf("%d", reg)) } return fmt.Sprintf("sp=%d, stack=[%s], used_registers=[%s]", v.sp, strings.Join(stackStr, ","), strings.Join(usedRegisters, ",")) } func (s *valueLocationStack) clone() *valueLocationStack { ret := &valueLocationStack{} ret.sp = s.sp ret.usedRegisters = make(map[int16]struct{}, len(ret.usedRegisters)) for r := range s.usedRegisters { ret.markRegisterUsed(r) } ret.stack = make([]*valueLocation, len(s.stack)) for i, v := range s.stack { ret.stack[i] = &valueLocation{ regType: v.regType, conditionalRegister: v.conditionalRegister, stackPointer: v.stackPointer, register: v.register, } } ret.stackPointerCeil = s.stackPointerCeil return ret } // pushValueLocationOnRegister creates a new valueLocation with a given register and pushes onto // the location stack. func (s *valueLocationStack) pushValueLocationOnRegister(reg int16) (loc *valueLocation) { loc = &valueLocation{register: reg, conditionalRegister: conditionalRegisterStateUnset} if buildoptions.IsDebugMode { if _, ok := s.usedRegisters[loc.register]; ok { panic("bug in compiler: try pushing a register which is already in use") } } if isIntRegister(reg) { loc.setRegisterType(generalPurposeRegisterTypeInt) } else if isFloatRegister(reg) { loc.setRegisterType(generalPurposeRegisterTypeFloat) } s.markRegisterUsed(reg) s.push(loc) return } // pushValueLocationOnRegister creates a new valueLocation and pushes onto the location stack. func (s *valueLocationStack) pushValueLocationOnStack() (loc *valueLocation) { loc = &valueLocation{register: nilRegister, conditionalRegister: conditionalRegisterStateUnset} s.push(loc) return } // pushValueLocationOnRegister creates a new valueLocation with a given conditional register state // and pushes onto the location stack. func (s *valueLocationStack) pushValueLocationOnConditionalRegister(state conditionalRegisterState) (loc *valueLocation) { loc = &valueLocation{register: nilRegister, conditionalRegister: state} s.push(loc) return } // push pushes to a given valueLocation onto the stack. func (s *valueLocationStack) push(loc *valueLocation) { loc.stackPointer = s.sp if s.sp >= uint64(len(s.stack)) { // This case we need to grow the stack capacity by appending the item, // rather than indexing. s.stack = append(s.stack, loc) } else { s.stack[s.sp] = loc } if s.sp > s.stackPointerCeil { s.stackPointerCeil = s.sp } s.sp++ } func (s *valueLocationStack) pop() (loc *valueLocation) { s.sp-- loc = s.stack[s.sp] return } func (s *valueLocationStack) peek() (loc *valueLocation) { loc = s.stack[s.sp-1] return } func (s *valueLocationStack) releaseRegister(loc *valueLocation) { s.markRegisterUnused(loc.register) loc.register = nilRegister loc.conditionalRegister = conditionalRegisterStateUnset } func (s *valueLocationStack) markRegisterUnused(regs ...int16) { for _, reg := range regs { delete(s.usedRegisters, reg) } } func (s *valueLocationStack) markRegisterUsed(regs ...int16) { for _, reg := range regs { if !isZeroRegister(reg) { s.usedRegisters[reg] = struct{}{} } } } type generalPurposeRegisterType byte const ( generalPurposeRegisterTypeInt generalPurposeRegisterType = iota generalPurposeRegisterTypeFloat ) func (tp generalPurposeRegisterType) String() (ret string) { switch tp { case generalPurposeRegisterTypeInt: ret = "int" case generalPurposeRegisterTypeFloat: ret = "float" } return } // takeFreeRegister searches for unused registers. Any found are marked used and returned. func (s *valueLocationStack) takeFreeRegister(tp generalPurposeRegisterType) (reg int16, found bool) { var targetRegs []int16 switch tp { case generalPurposeRegisterTypeFloat: targetRegs = generalPurposeFloatRegisters case generalPurposeRegisterTypeInt: targetRegs = unreservedGeneralPurposeIntRegisters } for _, candidate := range targetRegs { if _, ok := s.usedRegisters[candidate]; ok { continue } return candidate, true } return 0, false } func (s *valueLocationStack) takeFreeRegisters(tp generalPurposeRegisterType, num int) (regs []int16, found bool) { var targetRegs []int16 switch tp { case generalPurposeRegisterTypeFloat: targetRegs = generalPurposeFloatRegisters case generalPurposeRegisterTypeInt: targetRegs = unreservedGeneralPurposeIntRegisters } regs = make([]int16, 0, num) for _, candidate := range targetRegs { if _, ok := s.usedRegisters[candidate]; ok { continue } regs = append(regs, candidate) if len(regs) == num { found = true break } } return } // Search through the stack, and steal the register from the last used // variable on the stack. func (s *valueLocationStack) takeStealTargetFromUsedRegister(tp generalPurposeRegisterType) (*valueLocation, bool) { for i := uint64(0); i < s.sp; i++ { loc := s.stack[i] if loc.onRegister() { switch tp { case generalPurposeRegisterTypeFloat: if isFloatRegister(loc.register) { return loc, true } case generalPurposeRegisterTypeInt: if isIntRegister(loc.register) { return loc, true } } } } return nil, false }
internal/wasm/jit/jit_value_location.go
0.73029
0.47457
jit_value_location.go
starcoder
package levy import ( "math" "math/rand" "sort" "fmt" ) // Gamma Function via Lanczos approximation formula. Depracated in favor of math.Gamma (Stirling approximation) func gamma(x float64) float64 { return math.Exp(logGamma(x)) } func logGamma(x float64) float64 { tmp := (x - 0.5) * math.Log(x + 4.5) - (x + 4.5) ser := 1.0 + 76.18009172947146 / (x + 0) - 86.50532032941677 / (x + 1) + 24.01409824083091 / (x + 2) - 1.231739572450155 / (x + 3) + 0.001208650973866179 / (x + 4) - 0.000005395239384953 / (x + 5) return tmp + math.Log(ser * math.Sqrt(2 * math.Pi)) } func randNormal(m, s float64) float64 { return rand.NormFloat64() * s + m } func interpolate(l *Linear, val float64) (float64, error) { var est float64 err := l.validate(val) if err != nil { return est, err } est = l.interpolate(val) return est, nil } type Linear struct { BaseInterpolation } func NewLinear() *Linear { li := &Linear{} return li } func (li *Linear) interpolate(val float64) float64 { var est float64 l, r := li.searchNearestNeighbours(val, 0, len(li.XY)-1) lX := li.XY[l].X rX := li.XY[r].X lY := li.XY[l].Y rY := li.XY[r].Y est = lY + (rY-lY)/(rX-lX)*(val-lX) return est } func (li *Linear) validate(val float64) error { if val < li.XY[0].X { return fmt.Errorf("Out of bounds: %f less than %f", val, li.XY[0].X) } if val > li.XY[len(li.XY)-1].X { return fmt.Errorf("Out of bounds: %f greater than %f", val, li.XY[len(li.XY)-1].X) } return nil } func (li *Linear) searchNearestNeighbours(val float64, l, r int) (int, int) { middle := (l + r) / 2 if (val >= li.XY[middle-1].X) && (val <= li.XY[middle].X) { return middle - 1, middle } else if val < li.XY[middle-1].X { return li.searchNearestNeighbours(val, l, middle-2) } return li.searchNearestNeighbours(val, middle+1, r) } type BaseInterpolation struct { XY []XYPair X []float64 Y []float64 } func (b *BaseInterpolation) Fit(x, y []float64) error { if len(x) != len(y) { return fmt.Errorf("Pairs do not match X: %f Y: %f", x, y) } b.X = x b.Y = y b.XY = sliceToPairs(x, y) sortPairs(b.XY) return nil } type XYPair struct { X float64 Y float64 } func sortPairs(cp []XYPair) { sort.Slice(cp, func(i, j int) bool { return cp[i].X < cp[j].X }) } func sliceToPairs(x, y []float64) []XYPair { cp := make([]XYPair, len(x)) for i := 0; i < len(x); i++ { cp = append(cp, XYPair{X: x[i], Y: y[i]}) } return cp }
levy/utils.go
0.626238
0.465448
utils.go
starcoder
package hijri import ( "math" "time" ) // ToHijri converts Gregorian date to standard Hijri date. func ToHijri(date time.Time) (int, int, int) { // We only need the date, so we just set the time to noon date = time.Date(date.Year(), date.Month(), date.Day(), 12, 0, 0, 0, time.UTC) // Calculate Julian Day jd := dateToJD(date) // Get number of days since 1 Muharram 1H flNDays := math.Floor(jd - 1948438.5) nDays := int(flNDays) // Split days per 30 years, for easier calculation yearsBy30 := int(math.Floor(flNDays / 10631.0)) // Get the leftover days leftoverDays := nDays - yearsBy30*10631 if leftoverDays < 0 { leftoverDays *= -1 } // From leftover days, calculate year var isLeapYear bool year := yearsBy30 * 30 for i := 1; ; i++ { year++ isLeapYear = false daysInYear := 354 switch i { case 2, 5, 7, 10, 13, 16, 18, 21, 24, 26, 29: isLeapYear = true daysInYear = 355 } leftoverDays -= daysInYear if leftoverDays <= 0 { leftoverDays += daysInYear break } } // From leftover days, calculate month and day day := 0 month := 0 for i := 1; ; i++ { month++ daysInMonth := 29 + i%2 if isLeapYear && month == 12 { daysInMonth = 30 } leftoverDays -= daysInMonth if leftoverDays <= 0 { day = leftoverDays + daysInMonth break } } return year, month, day } // FromHijri converts standard Hijri date to Gregorian date. func FromHijri(year, month, day int) time.Time { // Calculate N days until the end of last year Y := year - 1 yearBy30 := Y / 30 leftoverYear := Y - yearBy30*30 isNegativeYear := year < 0 // If the year is negative, for easir calculation, make leftover positive for now if isNegativeYear { leftoverYear *= -1 } // Count leap days in the leftover years nLeapDays := 0 for i := 1; i <= leftoverYear; i++ { switch i { case 2, 5, 7, 10, 13, 16, 18, 21, 24, 26, 29: nLeapDays++ } } // If it was negative, put back the minus if isNegativeYear { leftoverYear *= -1 nLeapDays *= -1 } nDaysLastYear := yearBy30*10631 + leftoverYear*354 + nLeapDays // Calculate N days in this year, until the end of last month M := month - 1 nDaysLastMonth := 0 for i := 1; i <= M; i++ { if i%2 == 0 { nDaysLastMonth += 29 } else { nDaysLastMonth += 30 } } // Calculate Julian Days since 1 Muharram 1H nDays := nDaysLastYear + nDaysLastMonth + day jd := 1948438.5 + float64(nDays) return jdToDate(jd) }
hijri.go
0.670285
0.421016
hijri.go
starcoder
package main import ( "math" "math/rand" "sort" "github.com/qeedquan/go-media/math/f64" ) const ( SCALLOPED_REGION_MIN_AREA = 0.00000001 ) type ArcData struct { P f64.Vec2 R float64 Sign float64 D float64 Theta float64 IntegralAtStart float64 RSquared float64 DSquared float64 } type ScallopedSector struct { P f64.Vec2 A1, A2, Area float64 Arcs [2]ArcData } type ScallopedRegion struct { Regions []ScallopedSector MinArea float64 Area float64 } func NewScallopedSector(pt f64.Vec2, a1, a2 float64, p1 f64.Vec2, r1, sign1 float64, p2 f64.Vec2, r2, sign2 float64) ScallopedSector { v1 := p1.Sub(pt) v2 := p2.Sub(pt) arc1 := ArcData{ P: p1, R: r1, Sign: sign1, D: math.Sqrt(v1.X*v1.X + v1.Y*v1.Y), Theta: math.Atan2(v1.Y, v1.X), } arc1.RSquared = arc1.R * arc1.R arc1.DSquared = arc1.D * arc1.D arc1.IntegralAtStart = integralOfDistToCircle(a1-arc1.Theta, arc1.D, arc1.R, arc1.Sign) arc2 := ArcData{ P: p2, R: r2, Sign: sign2, D: math.Sqrt(v2.X*v2.X + v2.Y*v2.Y), Theta: math.Atan2(v2.Y, v2.X), } arc2.RSquared = arc2.R * arc2.R arc2.DSquared = arc2.D * arc2.D arc2.IntegralAtStart = integralOfDistToCircle(a1-arc2.Theta, arc2.D, arc2.R, arc2.Sign) s := ScallopedSector{ P: pt, A1: a1, A2: a2, Arcs: [2]ArcData{arc1, arc2}, } s.Area = s.calcAreaToAngle(a2) return s } func (s *ScallopedSector) calcAreaToAngle(angle float64) float64 { underInner := integralOfDistToCircle(angle-s.Arcs[0].Theta, s.Arcs[0].D, s.Arcs[0].R, s.Arcs[0].Sign) - s.Arcs[0].IntegralAtStart underOuter := integralOfDistToCircle(angle-s.Arcs[1].Theta, s.Arcs[1].D, s.Arcs[1].R, s.Arcs[1].Sign) - s.Arcs[1].IntegralAtStart return underOuter - underInner } func (s *ScallopedSector) calcAngleForArea(area float64) float64 { lo := s.A1 hi := s.A2 cur := lo + (hi-lo)*rand.Float64() for i := 0; i < 10; i++ { if s.calcAreaToAngle(cur) < s.Area { lo = cur cur = (cur + hi) * .5 } else { hi = cur cur = (lo + cur) * .5 } } return cur } func (s *ScallopedSector) distToCurve(angle float64, index int) float64 { alpha := angle - s.Arcs[index].Theta sin_alpha := math.Sin(alpha) t0 := s.Arcs[index].RSquared - s.Arcs[index].DSquared*sin_alpha*sin_alpha if t0 < 0 { return s.Arcs[index].D * math.Cos(alpha) } return s.Arcs[index].D*math.Cos(alpha) + s.Arcs[index].Sign*math.Sqrt(t0) } func (s *ScallopedSector) Sample() f64.Vec2 { angle := s.calcAngleForArea(s.Area * rand.Float64()) d1 := s.distToCurve(angle, 0) d2 := s.distToCurve(angle, 1) d := math.Sqrt(d1*d1 + (d2*d2-d1*d1)*rand.Float64()) return f64.Vec2{ s.P.X + math.Cos(angle)*d, s.P.Y + math.Sin(angle)*d, } } func (s *ScallopedSector) canonizeAngle(angle float64) float64 { delta := math.Mod(angle-s.A1, 2*math.Pi) if delta < 0 { delta += 2 * math.Pi } return s.A1 + delta } func (s *ScallopedSector) distToCircle(angle float64, C f64.Vec2, r float64) (d1, d2 float64) { v := f64.Vec2{C.X - s.P.X, C.Y - s.P.Y} dSquared := v.X*v.X + v.Y*v.Y theta := math.Atan2(v.Y, v.X) alpha := angle - theta sin_alpha := math.Sin(alpha) xSquared := r*r - dSquared*sin_alpha*sin_alpha if xSquared < 0 { d1 = -10000000 d2 = d1 } else { a := math.Sqrt(dSquared) * math.Cos(alpha) x := math.Sqrt(xSquared) d1 = a - x d2 = a + x } return } func (s *ScallopedSector) SubtractDisk(C f64.Vec2, r float64) (regions []ScallopedSector) { var angles []float64 v := f64.Vec2{C.X - s.P.X, C.Y - s.P.Y} d := math.Sqrt(v.X*v.X + v.Y*v.Y) if r < d { theta := math.Atan2(v.Y, v.X) x := math.Sqrt(d*d - r*r) alpha := math.Asin(r / d) angle := s.canonizeAngle(theta + alpha) if s.A1 < angle && angle < s.A2 { if s.distToCurve(angle, 0) < x && x < s.distToCurve(angle, 1) { angles = append(angles, angle) } } angle = s.canonizeAngle(theta - alpha) if s.A1 < angle && angle < s.A2 { if s.distToCurve(angle, 0) < x && x < s.distToCurve(angle, 1) { angles = append(angles, angle) } } } for arcIndex := 0; arcIndex < 2; arcIndex++ { C2 := s.Arcs[arcIndex].P R := s.Arcs[arcIndex].R v := f64.Vec2{C.X - C2.X, C.Y - C2.Y} d := math.Sqrt(v.X*v.X + v.Y*v.Y) if d > math.SmallestNonzeroFloat64 { invD := 1.0 / d x := (d*d - r*r + R*R) * invD * .5 k := R*R - x*x if k > 0 { y := math.Sqrt(k) vx := v.X * invD vy := v.Y * invD vx_x := vx * x vy_x := vy * x vx_y := vx * y vy_y := vy * y angle := s.canonizeAngle(math.Atan2(C2.Y+vy_x+vx_y-s.P.Y, C2.X+vx_x-vy_y-s.P.X)) if s.A1 < angle && angle < s.A2 { angles = append(angles, angle) } angle = s.canonizeAngle(math.Atan2(C2.Y+vy_x-vx_y-s.P.Y, C2.X+vx_x+vy_y-s.P.X)) if s.A1 < angle && angle < s.A2 { angles = append(angles, angle) } } } } sort.Float64s(angles) angles = append([]float64{s.A1}, angles...) angles = append(angles, s.A2) for i := 1; i < len(angles); i++ { a1 := angles[i-1] a2 := angles[i] midA := (a1 + a2) * .5 inner := s.distToCurve(midA, 0) outer := s.distToCurve(midA, 1) d1, d2 := s.distToCircle(midA, C, r) if d2 < inner || d1 > outer { regions = append(regions, NewScallopedSector(s.P, s.A1, s.A2, s.Arcs[0].P, s.Arcs[0].R, s.Arcs[0].Sign, s.Arcs[1].P, s.Arcs[1].R, s.Arcs[1].Sign)) } else { if inner < d1 { regions = append(regions, NewScallopedSector(s.P, s.A1, s.A2, s.Arcs[0].P, s.Arcs[0].R, s.Arcs[0].Sign, C, r, -1)) } if d2 < outer { regions = append(regions, NewScallopedSector(s.P, s.A1, s.A2, C, r, 1, s.Arcs[1].P, s.Arcs[1].R, s.Arcs[1].Sign)) } } } return } func NewScallopedRegion(P f64.Vec2, r1, r2, minArea float64) *ScallopedRegion { r := &ScallopedRegion{} r.Regions = append(r.Regions, NewScallopedSector(P, 0, 2*math.Pi, P, r1, 1, P, r2, 1)) r.Area = r.Regions[0].Area return r } func (s *ScallopedRegion) SubtractDisk(C f64.Vec2, r float64) { var newRegions []ScallopedSector s.Area = 0 for i := range s.Regions { var tmp []ScallopedSector ss := s.Regions[i] tmp = append(tmp, ss.SubtractDisk(C, r)...) for j := range tmp { nss := tmp[j] if nss.Area > s.MinArea { s.Area += nss.Area if len(newRegions) > 0 { last := newRegions[len(newRegions)-1] if last.A2 == nss.A1 && (last.Arcs[0].P == nss.Arcs[0].P && last.Arcs[0].R == nss.Arcs[0].R && last.Arcs[0].Sign == nss.Arcs[0].Sign) && (last.Arcs[1].P == nss.Arcs[1].P && last.Arcs[1].R == nss.Arcs[1].R && last.Arcs[1].Sign == nss.Arcs[1].Sign) { last.A2 = nss.A2 last.Area = last.calcAreaToAngle(last.A2) continue } } } } } s.Regions = newRegions } func (s *ScallopedRegion) Sample() f64.Vec2 { if len(s.Regions) == 0 { panic("sampled from empty region") } a := s.Area * rand.Float64() ss := s.Regions[0] for i := range s.Regions { ss = s.Regions[i] if a < ss.Area { break } a -= ss.Area } return ss.Sample() } func (s *ScallopedRegion) IsEmpty() bool { return len(s.Regions) == 0 } func integralOfDistToCircle(x, d, r, k float64) float64 { sin_x := math.Sin(x) d_sin_x := d * sin_x y := sin_x * d / r if y < -1 { y = -1 } else if y > 1 { y = 1 } theta := math.Asin(y) return (r*(r*(x+ k*theta)+ k*math.Cos(theta)*d_sin_x) + d*math.Cos(x)*d_sin_x) * .5 }
gfx/pdsample/scalloped_sector.go
0.710126
0.473292
scalloped_sector.go
starcoder
package noborders import ( "image" "image/color" "math" "github.com/gonum/stat" ) // sliceOperation performs a transform operation on every row and column slice on the // specified image and crop. func sliceOperation(img image.Image, crop image.Rectangle, oper func(img image.Image, r image.Rectangle) float64) (rowResults, colResults []float64) { for x := crop.Min.X; x < crop.Max.X; x++ { var col = image.Rect(x, crop.Min.Y, x+1, crop.Max.Y) colResults = append(colResults, oper(img, col)) } for y := crop.Min.Y; y < crop.Max.Y; y++ { var row = image.Rect(crop.Min.X, y, crop.Max.X, y+1) rowResults = append(rowResults, oper(img, row)) } return } // variance computes the pixel intensity variance of a portion of an image. func variance(img image.Image, r image.Rectangle) float64 { var vals []float64 for x := r.Min.X; x < r.Max.X; x++ { for y := r.Min.Y; y < r.Max.Y; y++ { vals = append(vals, float64(greyvalue(img.At(x, y)))) } } return stat.Variance(vals, nil) } // entropy calculates the entropy of a portion of an image. // Adapted from https://github.com/iand/salience (modified to handle 1-pixel image slices) // Who adapted it from http://www.astro.cornell.edu/research/projects/compression/entropy.html func entropy(img image.Image, r image.Rectangle) float64 { arraySize := 256*2 - 1 freq := make([]float64, arraySize) if r.Max.X-r.Min.X < 2 && r.Max.Y-r.Min.Y < 2 { return 0.0 } if r.Max.Y-r.Min.Y < 2 { for x := r.Min.X; x < r.Max.X-1; x++ { for y := r.Min.Y; y < r.Max.Y; y++ { diff := greyvalue(img.At(x, y)) - greyvalue(img.At(x+1, y)) if -(arraySize+1)/2 < diff && diff < (arraySize+1)/2 { freq[diff+(arraySize-1)/2]++ } } } } else { for y := r.Min.Y; y < r.Max.Y-1; y++ { for x := r.Min.X; x < r.Max.X; x++ { diff := greyvalue(img.At(x, y)) - greyvalue(img.At(x, y+1)) if -(arraySize+1)/2 < diff && diff < (arraySize+1)/2 { freq[diff+(arraySize-1)/2]++ } } } } n := 0.0 for _, v := range freq { n += v } e := 0.0 for i := 0; i < len(freq); i++ { freq[i] = freq[i] / n if freq[i] != 0.0 { e -= freq[i] * math.Log2(freq[i]) } } return e } // greyvalue computes the greyscale value of a Color based on the luminosity method. func greyvalue(c color.Color) int { r, g, b, _ := c.RGBA() return int((r*299 + g*587 + b*114) / 1000) }
vendor/github.com/neocortical/noborders/math.go
0.805861
0.619989
math.go
starcoder
package Vector2D import ( "testing" ) func TestNew(t *testing.T) { var x = float64(1.1) var y = float64(2.2) v := New(x, y) if v.X != x || v.Y != y { t.Fail() } } func TestFromScalar(t *testing.T) { var s = float64(1.1) v := FromScalar(s) if v.X != s || v.Y != s { t.Fail() } } func TestZero(t *testing.T) { v := Zero() if v.X != 0 || v.Y != 0 { t.Fail() } } func TestUnit(t *testing.T) { v := Unit() if v.X != 1 || v.Y != 1 { t.Fail() } } func TestCopy(t *testing.T) { var x = float64(1.1) var y = float64(2.2) v1 := Vector2D{x, y} v2 := v1.Copy() if v2.X != x || v2.Y != y { t.Fail() } } func TestAdd(t *testing.T) { var x1 = float64(1) var y1 = float64(2) var x2 = float64(4) var y2 = float64(8) v1 := Vector2D{x1, y1} v2 := Vector2D{x2, y2} result := v1.Add(v2) if result.X != 5 || result.Y != 10 { t.Fail() } } func TestSubtract(t *testing.T) { var x1 = float64(1) var y1 = float64(2) var x2 = float64(4) var y2 = float64(8) v1 := Vector2D{x1, y1} v2 := Vector2D{x2, y2} result := v1.Subtract(v2) if result.X != -3 || result.Y != -6 { t.Fail() } } func TestMultiply(t *testing.T) { var x1 = float64(1) var y1 = float64(2) var x2 = float64(4) var y2 = float64(8) v1 := Vector2D{x1, y1} v2 := Vector2D{x2, y2} result := v1.Multiply(v2) if result.X != 4 || result.Y != 16 { t.Fail() } } func TestDivide(t *testing.T) { var x1 = float64(1) var y1 = float64(2) var x2 = float64(4) var y2 = float64(10) v1 := Vector2D{x1, y1} v2 := Vector2D{x2, y2} result := v1.Divide(v2) if result.X != 0.25 || result.Y != 0.2 { t.Fail() } } func TestMultiplyScalar(t *testing.T) { var x = float64(1) var y = float64(2) v := Vector2D{x, y} result := v.MultiplyScalar(1.1) if result.X != 1.1 || result.Y != 2.2 { t.Fail() } } func TestDivideScalar(t *testing.T) { var x = float64(1) var y = float64(2) v := Vector2D{x, y} result := v.DivideScalar(2) if result.X != 0.5 || result.Y != 1 { t.Fail() } } func TestString(t *testing.T) { var x = float64(1) var y = float64(2) v := Vector2D{x, y} result := v.String() if result != "1:2" { t.Fail() } }
Core/Vector2D/TestVector2D.go
0.623492
0.582343
TestVector2D.go
starcoder
package main import ( "fmt" "math" "os" "strconv" "strings" ) func main() { if len(os.Args) != 3 { fmt.Println("You must pass value and numbers") os.Exit(1) } stringNumbers := strings.Split(os.Args[2], ",") numbers := make([]int, len(stringNumbers)) for i, n := range stringNumbers { nInt, _ := strconv.Atoi(n) numbers[i] = nInt } value, _ := strconv.Atoi(os.Args[1]) fmt.Printf("Binary chop: value %d in %v numbers: %d\n", value, numbers, Chop(value, numbers)) } // Chop finds the position of value in a sorted array of values func Chop(value int, numbers []int) int { leftSide := 0 rightSide := len(numbers) - 1 for leftSide <= rightSide { middle := int(math.Floor(float64((leftSide + rightSide) / 2))) if numbers[middle] > value { rightSide = middle - 1 } else if numbers[middle] < value { leftSide = middle + 1 } else { return middle } } return -1 } // RecursiveChop finds the position of value in a sorted array of values (recursively) func RecursiveChop(value int, numbers []int) int { return chop(value, numbers, 0, len(numbers)-1) } func chop(value int, numbers []int, leftSide int, rightSide int) int { middle := int(math.Floor(float64((leftSide + rightSide) / 2))) if leftSide <= rightSide { if numbers[middle] > value { return chop(value, numbers, leftSide, middle-1) } else if numbers[middle] < value { return chop(value, numbers, middle+1, rightSide) } else { return middle } } return -1 } // ChopInRotatedArray finds the position of a value in a rotate array with // a pivot. This pivot element is the only element for which next element to it is smaller than it. // For example: 3 4 5 1 2 -> pivot is 5 (index 2) func ChopInRotatedArray(value int, numbers []int) int { var index int pivot := FindPivot(numbers) if pivot == -1 { return Chop(value, numbers) } if numbers[pivot] < value { index = Chop(value, numbers[0:pivot-1]) } else { index = Chop(value, numbers[pivot+1:len(numbers)]) } if index >= 0 { return index + len(numbers[0:pivot+1]) } return -1 } func FindPivot(numbers []int) int { return findPivot(numbers, 0, len(numbers)-1) } func findPivot(numbers []int, leftSide int, rightSide int) int { var compareTo int // We don't have a pivot if rightSide < leftSide { return -1 } if rightSide == leftSide { return leftSide } middle := int(math.Floor(float64((leftSide + rightSide) / 2))) if numbers[middle] > rightSide { leftSide = middle + 1 compareTo = leftSide } else if numbers[middle] < leftSide { rightSide = middle - 1 compareTo = rightSide } if numbers[middle] > numbers[compareTo] { return middle } return findPivot(numbers, leftSide, rightSide) }
chop/main.go
0.672439
0.448185
main.go
starcoder
package sampler import ( "math" "time" "github.com/DataDog/datadog-agent/pkg/trace/atomic" "github.com/DataDog/datadog-agent/pkg/trace/metrics" "github.com/DataDog/datadog-agent/pkg/trace/pb" "github.com/DataDog/datadog-agent/pkg/trace/watchdog" ) const ( decayPeriod time.Duration = 5 * time.Second // With this factor, any past trace counts for less than 50% after 6*decayPeriod and >1% after 39*decayPeriod // We can keep it hardcoded, but having `decayPeriod` configurable should be enough? defaultDecayFactor float64 = 1.125 // 9/8 initialSignatureScoreOffset float64 = 1 minSignatureScoreOffset float64 = 0.01 defaultSignatureScoreSlope float64 = 3 // defaultSamplingRateThresholdTo1 defines the maximum allowed sampling rate below 1. // If this is surpassed, the rate is set to 1. defaultSamplingRateThresholdTo1 float64 = 1 ) // EngineType represents the type of a sampler engine. type EngineType int const ( // NormalScoreEngineType is the type of the ScoreEngine sampling non-error traces. NormalScoreEngineType EngineType = iota // ErrorsScoreEngineType is the type of the ScoreEngine sampling error traces. ErrorsScoreEngineType // PriorityEngineType is type of the priority sampler engine type. PriorityEngineType ) // Sampler is the main component of the sampling logic type Sampler struct { // Storage of the state of the sampler Backend *MemoryBackend // Extra sampling rate to combine to the existing sampling extraRate float64 // Maximum limit to the total number of traces per second to sample targetTPS *atomic.Float64 // rateThresholdTo1 is the value above which all computed sampling rates will be set to 1 rateThresholdTo1 float64 // Sample any signature with a score lower than scoreSamplingOffset // It is basically the number of similar traces per second after which we start sampling signatureScoreOffset *atomic.Float64 // Logarithm slope for the scoring function signatureScoreSlope *atomic.Float64 // signatureScoreFactor = math.Pow(signatureScoreSlope, math.Log10(scoreSamplingOffset)) signatureScoreFactor *atomic.Float64 tags []string exit chan struct{} stopped chan struct{} } // newSampler returns an initialized Sampler func newSampler(extraRate float64, targetTPS float64, tags []string) *Sampler { s := &Sampler{ Backend: NewMemoryBackend(decayPeriod, defaultDecayFactor), extraRate: extraRate, targetTPS: atomic.NewFloat(targetTPS), rateThresholdTo1: defaultSamplingRateThresholdTo1, signatureScoreOffset: atomic.NewFloat(0), signatureScoreSlope: atomic.NewFloat(0), signatureScoreFactor: atomic.NewFloat(0), tags: tags, exit: make(chan struct{}), stopped: make(chan struct{}), } s.SetSignatureCoefficients(initialSignatureScoreOffset, defaultSignatureScoreSlope) return s } // SetSignatureCoefficients updates the internal scoring coefficients used by the signature scoring func (s *Sampler) SetSignatureCoefficients(offset float64, slope float64) { s.signatureScoreOffset.Store(offset) s.signatureScoreSlope.Store(slope) s.signatureScoreFactor.Store(math.Pow(slope, math.Log10(offset))) } // UpdateExtraRate updates the extra sample rate func (s *Sampler) UpdateExtraRate(extraRate float64) { s.extraRate = extraRate } // UpdateTargetTPS updates the max TPS limit func (s *Sampler) UpdateTargetTPS(targetTPS float64) { s.targetTPS.Store(targetTPS) } // Start runs and the Sampler main loop func (s *Sampler) Start() { go func() { defer watchdog.LogOnPanic() decayTicker := time.NewTicker(s.Backend.DecayPeriod) statsTicker := time.NewTicker(10 * time.Second) defer decayTicker.Stop() defer statsTicker.Stop() for { select { case <-decayTicker.C: s.update() case <-statsTicker.C: s.report() case <-s.exit: close(s.stopped) return } } }() } // update decays scores and rate computation coefficients func (s *Sampler) update() { s.Backend.DecayScore() s.AdjustScoring() } func (s *Sampler) report() { kept, seen := s.Backend.report() metrics.Count("datadog.trace_agent.sampler.kept", kept, s.tags, 1) metrics.Count("datadog.trace_agent.sampler.seen", seen, s.tags, 1) } // Stop stops the main Run loop func (s *Sampler) Stop() { close(s.exit) <-s.stopped } // GetSampleRate returns the sample rate to apply to a trace. func (s *Sampler) GetSampleRate(trace pb.Trace, root *pb.Span, signature Signature) float64 { return s.loadRate(s.GetSignatureSampleRate(signature) * s.extraRate) } // GetTargetTPSSampleRate returns an extra sample rate to apply if we are above targetTPS. func (s *Sampler) GetTargetTPSSampleRate() float64 { // When above targetTPS, apply an additional sample rate to statistically respect the limit targetTPSrate := 1.0 configuredTargetTPS := s.targetTPS.Load() if configuredTargetTPS > 0 { currentTPS := s.Backend.GetUpperSampledScore() if currentTPS > configuredTargetTPS { targetTPSrate = configuredTargetTPS / currentTPS } } return targetTPSrate } func (s *Sampler) setRateThresholdTo1(r float64) { s.rateThresholdTo1 = r }
pkg/trace/sampler/coresampler.go
0.741019
0.436502
coresampler.go
starcoder
package curve import ( "errors" "fmt" "math/big" GF "github.com/armfazh/tozan-ecc/field" ) // wcCurve is a Weierstrass curve type wcCurve struct { *params RationalMap } type WC = *wcCurve func (e *wcCurve) String() string { return "y^2=x^3+Ax^2+Bx\n" + e.params.String() } func (e *wcCurve) New() EllCurve { if e.IsValid() { e.RationalMap = e.ToWeierstrass() return e } panic(errors.New("can't instantiate a WeierstrassC curve")) } func (e *wcCurve) NewPoint(x, y GF.Elt) (P Point) { if P = (&ptWc{e, &afPoint{x: x, y: y}}); e.IsOnCurve(P) { return P } panic(fmt.Errorf("%v not on %v", P, e)) } func (e *wcCurve) IsValid() bool { F := e.F t0 := F.Sqr(e.A) // A^2 t1 := F.Add(e.B, e.B) // 2B t1 = F.Add(t1, t1) // 4B t0 = F.Sub(t0, t1) // A^2-4B t0 = F.Mul(t0, e.B) // B(A^2-4B) return !F.IsZero(t0) // B(A^2-4B) != 0 } func (e *wcCurve) IsEqual(ec EllCurve) bool { e0 := ec.(*weCurve) return e.F.IsEqual(e0.F) && e.F.AreEqual(e.A, e0.A) && e.F.AreEqual(e.B, e0.B) } func (e *wcCurve) Identity() Point { return &infPoint{} } func (e *wcCurve) IsOnCurve(p Point) bool { return e.Codomain().IsOnCurve(e.Push(p)) } func (e *wcCurve) Add(p, q Point) Point { return e.Pull(e.Codomain().Add(e.Push(p), e.Push(q))) } func (e *wcCurve) Double(p Point) Point { return e.Pull(e.Codomain().Double(e.Push(p))) } func (e *wcCurve) Neg(p Point) Point { return e.Pull(e.Codomain().Neg(e.Push(p))) } func (e *wcCurve) ClearCofactor(p Point) Point { return e.Pull(e.Codomain().ClearCofactor(e.Push(p))) } func (e *wcCurve) ScalarMult(p Point, k *big.Int) Point { return e.params.scalarMult(e, p, k) } // ptWc is an affine point on a wcCurve curve. type ptWc struct { *wcCurve *afPoint } func (p *ptWc) String() string { return p.afPoint.String() } func (p *ptWc) Copy() Point { return &ptWc{p.wcCurve, p.copy()} } func (p *ptWc) IsEqual(q Point) bool { qq := q.(*ptWc) return p.wcCurve.IsEqual(qq.wcCurve) && p.isEqual(p.F, qq.afPoint) } func (p *ptWc) IsIdentity() bool { return false } func (p *ptWc) IsTwoTorsion() bool { return p.F.IsZero(p.y) }
curve/wc.go
0.827375
0.454109
wc.go
starcoder
package big import ( "database/sql/driver" "encoding/json" "errors" "fmt" "math/big" ) var ( flZero = *big.NewFloat(0) ZERO = NewDecimal(0) ONE = NewDecimal(1) TEN = NewDecimal(10) ) // Decimal is the main exported type. It is a simple, immutable wrapper around a *big.Float type Decimal struct { fl *big.Float } // NewDecimal creates a new Decimal type from a float value. func NewDecimal(fl float64) Decimal { return Decimal{big.NewFloat(fl)} } // NewFromString creates a new Decimal type from a string value. func NewFromString(str string) Decimal { bfl := new(big.Float) bfl.UnmarshalText([]byte(str)) return Decimal{bfl} } // Add adds a decimal instance to another Decimal instance. func (d Decimal) Add(addend Decimal) Decimal { return Decimal{d.cpy().Add(d.fl, addend.fl)} } // Add adds a decimal instance to another Decimal instance. func Add(x, y float64) Decimal { return NewDecimal(x).Add(NewDecimal(y)) } // Sub subtracts another decimal instance from this Decimal instance. func (d Decimal) Sub(subtrahend Decimal) Decimal { return Decimal{d.cpy().Sub(d.fl, subtrahend.fl)} } // Sub subtracts another decimal instance from this Decimal instance. func Sub(x, y float64) Decimal { return NewDecimal(x).Sub(NewDecimal(y)) } // Mul multiplies another decimal instance with this Decimal instance. func (d Decimal) Mul(factor Decimal) Decimal { return Decimal{d.cpy().Mul(d.fl, factor.fl)} } // Mul multiplies another decimal instance with this Decimal instance. func Mul(x, y float64) Decimal { return NewDecimal(x).Mul(NewDecimal(y)) } // Div divides this Decimal by the denominator passed. func (d Decimal) Div(denominator Decimal) Decimal { return Decimal{d.cpy().Quo(d.fl, denominator.fl)} } // Div divides this Decimal by the denominator passed. func Div(x, y float64) Decimal { return NewDecimal(x).Div(NewDecimal(y)) } // Frac returns another Decimal instance representing this Decimal multiplied by the // provided float. func (d Decimal) Frac(f float64) Decimal { return d.Mul(NewDecimal(f)) } // Neg returns this Decimal multiplied by -1. func (d Decimal) Neg() Decimal { return d.Mul(NewDecimal(-1)) } // Abs returns the absolute value of this Decimal func (d Decimal) Abs() Decimal { if d.LT(ZERO) { return d.Mul(ONE.Neg()) } return d } // Pow returns the decimal to the inputted power func (d Decimal) Pow(exp int) Decimal { if exp == 0 { return ONE } x := Decimal{d.cpy()} for i := 1; i < exp; i++ { x = x.Mul(d) } return x } // Sqrt returns the deciamal's square root func (d Decimal) Sqrt() Decimal { return Decimal{d.cpy().Sqrt(d.cpy())} } // EQ returns true if this Decimal exactly equals the provided decimal. func (d Decimal) EQ(other Decimal) bool { return d.Cmp(other) == 0 } // LT returns true if this decimal is less than the provided decimal. func (d Decimal) LT(other Decimal) bool { return d.Cmp(other) < 0 } // LTE returns true if this decimal is less or equal to the provided decimal. func (d Decimal) LTE(other Decimal) bool { return d.Cmp(other) <= 0 } // GT returns true if this decimal is greater than the provided decimal. func (d Decimal) GT(other Decimal) bool { return d.Cmp(other) > 0 } // GTE returns true if this decimal is greater than or equal to the provided decimal. func (d Decimal) GTE(other Decimal) bool { return d.Cmp(other) >= 0 } // Cmp will return 1 if this decimal is greater than the provided, 0 if they are the same, and -1 if it is less. func (d Decimal) Cmp(other Decimal) int { return d.fl.Cmp(other.fl) } // Float will return this Decimal as a float value. // Note that there may be some loss of precision in this operation. func (d Decimal) Float() float64 { f, _ := d.fl.Float64() return f } // Zero will return true if this Decimal is equal to 0. func (d Decimal) Zero() bool { return d.fl.Cmp(&flZero) == 0 } func (d Decimal) String() string { return d.fl.String() } func (d Decimal) FormattedString(places int) string { format := "%." + fmt.Sprint(places) + "f" fl := d.Float() return fmt.Sprintf(format, fl) } // MarshalJSON implements the json.Marshaler interface func (d Decimal) MarshalJSON() ([]byte, error) { return d.fl.MarshalText() } // UnmarshalJSON implements the json.Unmarshaler interface func (d *Decimal) UnmarshalJSON(b []byte) error { if d.fl == nil { d.fl = big.NewFloat(0) } return d.fl.UnmarshalText(b) } // Value implements the sql.Valuer interface func (d Decimal) Value() (driver.Value, error) { return d.String(), nil } // Scan implements the sql.Scanner interface func (d *Decimal) Scan(src interface{}) error { switch src.(type) { case string: return json.Unmarshal([]byte(src.(string)), d) case []byte: return json.Unmarshal([]byte(src.([]byte)), d) default: return errors.New(fmt.Sprint("Passed value ", src, " should be a string")) } } func (d Decimal) cpy() *big.Float { cpy := new(big.Float) return cpy.Copy(d.fl) }
big/decimal.go
0.834272
0.437884
decimal.go
starcoder
package gen import ( _ "embed" // Needed to support go:embed directive pschema "github.com/pulumi/pulumi/pkg/v3/codegen/schema" v1 "k8s.io/api/core/v1" ) var serviceSpec = pschema.ComplexTypeSpec{ ObjectTypeSpec: pschema.ObjectTypeSpec{ Properties: map[string]pschema.PropertySpec{ "type": { TypeSpec: pschema.TypeSpec{ OneOf: []pschema.TypeSpec{ {Type: "string"}, {Ref: "#/types/kubernetes:core/v1:ServiceSpecType"}, }, }, }, }, }, } var serviceSpecType = pschema.ComplexTypeSpec{ ObjectTypeSpec: pschema.ObjectTypeSpec{ Type: "string", }, Enum: []pschema.EnumValueSpec{ {Value: v1.ServiceTypeExternalName}, {Value: v1.ServiceTypeClusterIP}, {Value: v1.ServiceTypeNodePort}, {Value: v1.ServiceTypeLoadBalancer}, }, } //go:embed examples/overlays/chartV2.md var helmV2ChartMD string var helmV2ChartResource = pschema.ResourceSpec{ IsComponent: true, ObjectTypeSpec: pschema.ObjectTypeSpec{ IsOverlay: true, Description: helmV2ChartMD, Properties: map[string]pschema.PropertySpec{ "resources": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "Resources created by the Chart.", }, "urn": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "The stable logical URN used to distinctly address a resource, both before and after deployments.", }, }, Type: "object", }, InputProperties: map[string]pschema.PropertySpec{ "chart": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "The name of the chart to deploy. If [repo] is provided, this chart name will be prefixed by the repo name. Example: repo: \"stable\", chart: \"nginx-ingress\" -> \"stable/nginx-ingress\" Example: chart: \"stable/nginx-ingress\" -> \"stable/nginx-ingress\"\n\nRequired if specifying `ChartOpts` for a remote chart.", }, "fetchOpts": { TypeSpec: pschema.TypeSpec{ Ref: "#/types/kubernetes:helm.sh/v2:FetchOpts", }, Description: "Additional options to customize the fetching of the Helm chart.", }, "path": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "The path to the chart directory which contains the `Chart.yaml` file.\n\nRequired if specifying `LocalChartOpts`.", }, "namespace": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "The optional namespace to install chart resources into.", }, "repo": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "The repository name of the chart to deploy. Example: \"stable\".\n\nUsed only when specifying options for a remote chart.", }, "resourcePrefix": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "An optional prefix for the auto-generated resource names. Example: A resource created with resourcePrefix=\"foo\" would produce a resource named \"foo-resourceName\".", }, "transformations": { TypeSpec: pschema.TypeSpec{ Type: "array", Items: &pschema.TypeSpec{ Ref: "pulumi.json#/Any", }, }, Description: "Optional array of transformations to apply to resources that will be created by this chart prior to creation. Allows customization of the chart behaviour without directly modifying the chart itself.", }, "values": { TypeSpec: pschema.TypeSpec{ Type: "object", AdditionalProperties: &pschema.TypeSpec{ Ref: "pulumi.json#/Any", }, }, Description: "Overrides for chart values.", }, "version": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "The version of the chart to deploy. If not provided, the latest version will be deployed.", }, }, } //go:embed examples/overlays/chartV3.md var helmV3ChartMD string var helmV3ChartResource = pschema.ResourceSpec{ IsComponent: true, ObjectTypeSpec: pschema.ObjectTypeSpec{ IsOverlay: true, Description: helmV3ChartMD, Properties: map[string]pschema.PropertySpec{ "resources": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "Resources created by the Chart.", }, "urn": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "The stable logical URN used to distinctly address a resource, both before and after deployments.", }, }, Type: "object", }, InputProperties: map[string]pschema.PropertySpec{ "chart": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "The name of the chart to deploy. If [repo] is provided, this chart name will be prefixed by the repo name. Example: repo: \"stable\", chart: \"nginx-ingress\" -> \"stable/nginx-ingress\" Example: chart: \"stable/nginx-ingress\" -> \"stable/nginx-ingress\"\n\nRequired if specifying `ChartOpts` for a remote chart.", }, "fetchOpts": { TypeSpec: pschema.TypeSpec{ Ref: "#/types/kubernetes:helm.sh/v3:FetchOpts", }, Description: "Additional options to customize the fetching of the Helm chart.", }, "path": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "The path to the chart directory which contains the `Chart.yaml` file.\n\nRequired if specifying `LocalChartOpts`.", }, "namespace": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "The optional namespace to install chart resources into.", }, "repo": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "The repository name of the chart to deploy. Example: \"stable\".\n\nUsed only when specifying options for a remote chart.", }, "resourcePrefix": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "An optional prefix for the auto-generated resource names. Example: A resource created with resourcePrefix=\"foo\" would produce a resource named \"foo-resourceName\".", }, "transformations": { TypeSpec: pschema.TypeSpec{ Type: "array", Items: &pschema.TypeSpec{ Ref: "pulumi.json#/Any", }, }, Description: "Optional array of transformations to apply to resources that will be created by this chart prior to creation. Allows customization of the chart behaviour without directly modifying the chart itself.", }, "values": { TypeSpec: pschema.TypeSpec{ Type: "object", AdditionalProperties: &pschema.TypeSpec{ Ref: "pulumi.json#/Any", }, }, Description: "Overrides for chart values.", }, "version": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "The version of the chart to deploy. If not provided, the latest version will be deployed.", }, }, } var helmV2FetchOpts = pschema.ComplexTypeSpec{ ObjectTypeSpec: pschema.ObjectTypeSpec{ IsOverlay: true, Description: "Additional options to customize the fetching of the Helm chart.", Properties: map[string]pschema.PropertySpec{ "caFile": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "Verify certificates of HTTPS-enabled servers using this CA bundle.", }, "certFile": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "Identify HTTPS client using this SSL certificate file.", }, "destination": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "Location to write the chart. If this and tardir are specified, tardir is appended to this (default \".\").", }, "devel": { TypeSpec: pschema.TypeSpec{ Type: "boolean", }, Description: "Use development versions, too. Equivalent to version '>0.0.0-0'. If –version is set, this is ignored.", }, "home": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "Location of your Helm config. Overrides $HELM_HOME (default \"/Users/abc/.helm\").", }, "keyFile": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "Identify HTTPS client using this SSL key file.", }, "keyring": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "Keyring containing public keys (default “/Users/abc/.gnupg/pubring.gpg”).", }, "password": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "Chart repository password.", }, "prov": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "Fetch the provenance file, but don’t perform verification.", }, "repo": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "Chart repository url where to locate the requested chart.", }, "untar": { TypeSpec: pschema.TypeSpec{ Type: "boolean", }, Description: "If set to false, will leave the chart as a tarball after downloading.", }, "untardir": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "If untar is specified, this flag specifies the name of the directory into which the chart is expanded (default \".\").", }, "username": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "Chart repository username.", }, "verify": { TypeSpec: pschema.TypeSpec{ Type: "boolean", }, Description: "Verify the package against its signature.", }, "version": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "Specific version of a chart. Without this, the latest version is fetched.", }, }, Type: "object", }, } var helmV3RepoOpts = pschema.ComplexTypeSpec{ ObjectTypeSpec: pschema.ObjectTypeSpec{ Description: "Specification defining the Helm chart repository to use.", Properties: map[string]pschema.PropertySpec{ "repo": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "Repository where to locate the requested chart. If is a URL the chart is installed without installing the repository.", }, "keyFile": { // TODO: Content or file TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "The repository's cert key file", }, "certFile": { // TODO: Content or file TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "The repository's cert file", }, "caFile": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "The Repository's CA File", }, "username": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "Username for HTTP basic authentication", }, "password": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Secret: true, Description: "Password for HTTP basic authentication", }, }, Language: map[string]pschema.RawMessage{ "nodejs": rawMessage(map[string][]string{ "requiredOutputs": { "repo", "keyFile", "certFile", "caFile", "username", "password", }}), }, Type: "object", }, } var helmV3ReleaseStatus = pschema.ComplexTypeSpec{ ObjectTypeSpec: pschema.ObjectTypeSpec{ Required: []string{"status"}, Properties: map[string]pschema.PropertySpec{ "name": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "Name is the name of the release.", }, "revision": { TypeSpec: pschema.TypeSpec{ Type: "integer", }, Description: "Version is an int32 which represents the version of the release.", }, "namespace": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "Namespace is the kubernetes namespace of the release.", }, "chart": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "The name of the chart.", }, "version": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "A SemVer 2 conformant version string of the chart.", }, "appVersion": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "The version number of the application being deployed.", }, "status": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "Status of the release.", }, }, Language: map[string]pschema.RawMessage{ "nodejs": rawMessage(map[string][]string{ "requiredOutputs": { "name", "revision", "namespace", "chart", "version", "appVersion", "values", "status", }}), }, Type: "object", }, } var kubeClientSettings = pschema.ComplexTypeSpec{ ObjectTypeSpec: pschema.ObjectTypeSpec{ Description: "Options for tuning the Kubernetes client used by a Provider.", Properties: map[string]pschema.PropertySpec{ "burst": { Description: "Maximum burst for throttle. Default value is 10.", TypeSpec: pschema.TypeSpec{Type: "integer"}, DefaultInfo: &pschema.DefaultSpec{ Environment: []string{ "PULUMI_K8S_CLIENT_BURST", }, }, }, "qps": { Description: "Maximum queries per second (QPS) to the API server from this client. Default value is 5.", TypeSpec: pschema.TypeSpec{Type: "number"}, DefaultInfo: &pschema.DefaultSpec{ Environment: []string{ "PULUMI_K8S_CLIENT_QPS", }, }, }, }, Type: "object", }, } var helmReleaseSettings = pschema.ComplexTypeSpec{ ObjectTypeSpec: pschema.ObjectTypeSpec{ Description: "Options to configure the Helm Release resource.", Properties: map[string]pschema.PropertySpec{ "driver": { DefaultInfo: &pschema.DefaultSpec{ Environment: []string{ "PULUMI_K8S_HELM_DRIVER", }, }, Description: "The backend storage driver for Helm. Values are: configmap, secret, memory, sql.", TypeSpec: pschema.TypeSpec{Type: "string"}, }, "pluginsPath": { DefaultInfo: &pschema.DefaultSpec{ Environment: []string{ "PULUMI_K8S_HELM_PLUGINS_PATH", }, }, Description: "The path to the helm plugins directory.", TypeSpec: pschema.TypeSpec{Type: "string"}, }, "registryConfigPath": { DefaultInfo: &pschema.DefaultSpec{ Environment: []string{ "PULUMI_K8S_HELM_REGISTRY_CONFIG_PATH", }, }, Description: "The path to the registry config file.", TypeSpec: pschema.TypeSpec{Type: "string"}, }, "repositoryConfigPath": { DefaultInfo: &pschema.DefaultSpec{ Environment: []string{ "PULUMI_K8S_HELM_REPOSITORY_CONFIG_PATH", }, }, Description: "The path to the file containing repository names and URLs.", TypeSpec: pschema.TypeSpec{Type: "string"}, }, "repositoryCache": { DefaultInfo: &pschema.DefaultSpec{ Environment: []string{ "PULUMI_K8S_HELM_REPOSITORY_CACHE", }, }, Description: "The path to the file containing cached repository indexes.", TypeSpec: pschema.TypeSpec{Type: "string"}, }, }, Type: "object", }, } //go:embed examples/overlays/helmRelease.md var helmV3ReleaseMD string var helmV3ReleaseResource = pschema.ResourceSpec{ ObjectTypeSpec: pschema.ObjectTypeSpec{ Description: helmV3ReleaseMD, Properties: map[string]pschema.PropertySpec{ "name": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "Release name.", }, "repositoryOpts": { TypeSpec: pschema.TypeSpec{ Ref: "#/types/kubernetes:helm.sh/v3:RepositoryOpts", }, Description: "Specification defining the Helm chart repository to use.", }, "chart": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "Chart name to be installed. A path may be used.", }, "version": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "Specify the exact chart version to install. If this is not specified, the latest version is installed.", }, "devel": { TypeSpec: pschema.TypeSpec{ Type: "boolean", }, Description: "Use chart development versions, too. Equivalent to version '>0.0.0-0'. If `version` is set, this is ignored.", }, "valueYamlFiles": { TypeSpec: pschema.TypeSpec{ Type: "array", Items: &pschema.TypeSpec{ Ref: "pulumi.json#/Asset", }, }, Description: "List of assets (raw yaml files). Content is read and merged with values (with values taking precedence).", }, "values": { TypeSpec: pschema.TypeSpec{ Type: "object", AdditionalProperties: &pschema.TypeSpec{ Ref: "pulumi.json#/Any", }, }, Description: "Custom values set for the release.", }, "manifest": { TypeSpec: pschema.TypeSpec{ Type: "object", AdditionalProperties: &pschema.TypeSpec{ Ref: "pulumi.json#/Any", }, }, Description: "The rendered manifests as JSON. Not yet supported.", }, "resourceNames": { TypeSpec: pschema.TypeSpec{ Type: "object", AdditionalProperties: &pschema.TypeSpec{ Type: "array", Items: &pschema.TypeSpec{ Type: "string", }, }, }, Description: "Names of resources created by the release grouped by \"kind/version\".", }, "namespace": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "Namespace to install the release into.", }, "verify": { TypeSpec: pschema.TypeSpec{ Type: "boolean", }, Description: "Verify the package before installing it.", }, "keyring": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "Location of public keys used for verification. Used only if `verify` is true", }, "timeout": { TypeSpec: pschema.TypeSpec{ Type: "integer", }, Description: "Time in seconds to wait for any individual kubernetes operation.", }, "disableWebhooks": { TypeSpec: pschema.TypeSpec{ Type: "boolean", }, Description: "Prevent hooks from running.", }, "disableCRDHooks": { TypeSpec: pschema.TypeSpec{ Type: "boolean", }, Description: "Prevent CRD hooks from, running, but run other hooks. See helm install --no-crd-hook", }, "reuseValues": { TypeSpec: pschema.TypeSpec{ Type: "boolean", }, Description: "When upgrading, reuse the last release's values and merge in any overrides. If 'resetValues' is specified, this is ignored", }, "resetValues": { TypeSpec: pschema.TypeSpec{ Type: "boolean", }, Description: "When upgrading, reset the values to the ones built into the chart.", }, "forceUpdate": { TypeSpec: pschema.TypeSpec{ Type: "boolean", }, Description: "Force resource update through delete/recreate if needed.", }, "recreatePods": { TypeSpec: pschema.TypeSpec{ Type: "boolean", }, Description: "Perform pods restart during upgrade/rollback.", }, "cleanupOnFail": { TypeSpec: pschema.TypeSpec{ Type: "boolean", }, Description: "Allow deletion of new resources created in this upgrade when upgrade fails.", }, "maxHistory": { TypeSpec: pschema.TypeSpec{ Type: "integer", }, Description: "Limit the maximum number of revisions saved per release. Use 0 for no limit.", }, "atomic": { TypeSpec: pschema.TypeSpec{ Type: "boolean", }, Description: "If set, installation process purges chart on fail. `skipAwait` will be disabled automatically if atomic is used.", }, "skipCrds": { TypeSpec: pschema.TypeSpec{ Type: "boolean", }, Description: "If set, no CRDs will be installed. By default, CRDs are installed if not already present.", }, "renderSubchartNotes": { TypeSpec: pschema.TypeSpec{ Type: "boolean", }, Description: "If set, render subchart notes along with the parent.", }, "disableOpenapiValidation": { TypeSpec: pschema.TypeSpec{ Type: "boolean", }, Description: "If set, the installation process will not validate rendered templates against the Kubernetes OpenAPI Schema", }, "skipAwait": { TypeSpec: pschema.TypeSpec{ Type: "boolean", }, Description: "By default, the provider waits until all resources are in a ready state before marking the release as successful. Setting this to true will skip such await logic.", }, "waitForJobs": { TypeSpec: pschema.TypeSpec{ Type: "boolean", }, Description: "Will wait until all Jobs have been completed before marking the release as successful. This is ignored if `skipAwait` is enabled.", }, "dependencyUpdate": { TypeSpec: pschema.TypeSpec{ Type: "boolean", }, Description: "Run helm dependency update before installing the chart.", }, "replace": { TypeSpec: pschema.TypeSpec{ Type: "boolean", }, Description: "Re-use the given name, even if that name is already used. This is unsafe in production", }, "description": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "Add a custom description", }, "createNamespace": { TypeSpec: pschema.TypeSpec{ Type: "boolean", }, Description: "Create the namespace if it does not exist.", }, "postrender": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "Postrender command to run.", }, "lint": { TypeSpec: pschema.TypeSpec{ Type: "boolean", }, Description: "Run helm lint when planning.", }, "status": { TypeSpec: pschema.TypeSpec{ Ref: "#/types/kubernetes:helm.sh/v3:ReleaseStatus", }, Description: "Status of the deployed release.", }, }, Type: "object", Required: []string{ "chart", "status", }, Language: map[string]pschema.RawMessage{ "nodejs": rawMessage(map[string][]string{ "requiredOutputs": { "name", "chart", "version", "devel", "values", "set", "manifest", "namespace", "verify", "keyring", "timeout", "disableWebhooks", "disableCRDHooks", "reuseValues", "resetValues", "forceUpdate", "recreatePods", "cleanupOnFail", "maxHistory", "atomic", "skipCrds", "renderSubchartNotes", "disableOpenapiValidation", "skipAwait", "waitForJobs", "dependencyUpdate", "replace", "description", "createNamespace", "postrender", "lint", "status", }, }), }, }, InputProperties: map[string]pschema.PropertySpec{ "name": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "Release name.", }, "repositoryOpts": { TypeSpec: pschema.TypeSpec{ Ref: "#/types/kubernetes:helm.sh/v3:RepositoryOpts", }, Description: "Specification defining the Helm chart repository to use.", }, "chart": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "Chart name to be installed. A path may be used.", }, "version": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "Specify the exact chart version to install. If this is not specified, the latest version is installed.", }, "devel": { TypeSpec: pschema.TypeSpec{ Type: "boolean", }, Description: "Use chart development versions, too. Equivalent to version '>0.0.0-0'. If `version` is set, this is ignored.", }, "valueYamlFiles": { TypeSpec: pschema.TypeSpec{ Type: "array", Items: &pschema.TypeSpec{ Ref: "pulumi.json#/Asset", }, }, Description: "List of assets (raw yaml files). Content is read and merged with values.", }, "values": { TypeSpec: pschema.TypeSpec{ Type: "object", AdditionalProperties: &pschema.TypeSpec{ Ref: "pulumi.json#/Any", }, }, Description: "Custom values set for the release.", }, "manifest": { TypeSpec: pschema.TypeSpec{ Type: "object", AdditionalProperties: &pschema.TypeSpec{ Ref: "pulumi.json#/Any", }, }, Description: "The rendered manifests as JSON. Not yet supported.", }, "resourceNames": { TypeSpec: pschema.TypeSpec{ Type: "object", AdditionalProperties: &pschema.TypeSpec{ Type: "array", Items: &pschema.TypeSpec{ Type: "string", }, }, }, Description: "Names of resources created by the release grouped by \"kind/version\".", }, "namespace": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "Namespace to install the release into.", }, "verify": { TypeSpec: pschema.TypeSpec{ Type: "boolean", }, Description: "Verify the package before installing it.", }, "keyring": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "Location of public keys used for verification. Used only if `verify` is true", }, "timeout": { TypeSpec: pschema.TypeSpec{ Type: "integer", }, Description: "Time in seconds to wait for any individual kubernetes operation.", }, "disableWebhooks": { TypeSpec: pschema.TypeSpec{ Type: "boolean", }, Description: "Prevent hooks from running.", }, "disableCRDHooks": { TypeSpec: pschema.TypeSpec{ Type: "boolean", }, Description: "Prevent CRD hooks from, running, but run other hooks. See helm install --no-crd-hook", }, "reuseValues": { TypeSpec: pschema.TypeSpec{ Type: "boolean", }, Description: "When upgrading, reuse the last release's values and merge in any overrides. If 'resetValues' is specified, this is ignored", }, "resetValues": { TypeSpec: pschema.TypeSpec{ Type: "boolean", }, Description: "When upgrading, reset the values to the ones built into the chart.", }, "forceUpdate": { TypeSpec: pschema.TypeSpec{ Type: "boolean", }, Description: "Force resource update through delete/recreate if needed.", }, "recreatePods": { TypeSpec: pschema.TypeSpec{ Type: "boolean", }, Description: "Perform pods restart during upgrade/rollback.", }, "cleanupOnFail": { TypeSpec: pschema.TypeSpec{ Type: "boolean", }, Description: "Allow deletion of new resources created in this upgrade when upgrade fails.", }, "maxHistory": { TypeSpec: pschema.TypeSpec{ Type: "integer", }, Description: "Limit the maximum number of revisions saved per release. Use 0 for no limit.", }, "atomic": { TypeSpec: pschema.TypeSpec{ Type: "boolean", }, Description: "If set, installation process purges chart on fail. `skipAwait` will be disabled automatically if atomic is used.", }, "skipCrds": { TypeSpec: pschema.TypeSpec{ Type: "boolean", }, Description: "If set, no CRDs will be installed. By default, CRDs are installed if not already present.", }, "renderSubchartNotes": { TypeSpec: pschema.TypeSpec{ Type: "boolean", }, Description: "If set, render subchart notes along with the parent.", }, "disableOpenapiValidation": { TypeSpec: pschema.TypeSpec{ Type: "boolean", }, Description: "If set, the installation process will not validate rendered templates against the Kubernetes OpenAPI Schema", }, "skipAwait": { TypeSpec: pschema.TypeSpec{ Type: "boolean", }, Description: "By default, the provider waits until all resources are in a ready state before marking the release as successful. Setting this to true will skip such await logic.", }, "waitForJobs": { TypeSpec: pschema.TypeSpec{ Type: "boolean", }, Description: "Will wait until all Jobs have been completed before marking the release as successful. This is ignored if `skipAwait` is enabled.", }, "dependencyUpdate": { TypeSpec: pschema.TypeSpec{ Type: "boolean", }, Description: "Run helm dependency update before installing the chart.", }, "replace": { TypeSpec: pschema.TypeSpec{ Type: "boolean", }, Description: "Re-use the given name, even if that name is already used. This is unsafe in production", }, "description": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "Add a custom description", }, "createNamespace": { TypeSpec: pschema.TypeSpec{ Type: "boolean", }, Description: "Create the namespace if it does not exist.", }, "postrender": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "Postrender command to run.", }, "lint": { TypeSpec: pschema.TypeSpec{ Type: "boolean", }, Description: "Run helm lint when planning.", }, "compat": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Const: "true", }, }, RequiredInputs: []string{ "chart", }, } //go:embed examples/overlays/kustomizeDirectory.md var kustomizeDirectoryMD string var kustomizeDirectoryResource = pschema.ResourceSpec{ IsComponent: true, ObjectTypeSpec: pschema.ObjectTypeSpec{ IsOverlay: true, Description: kustomizeDirectoryMD, Properties: map[string]pschema.PropertySpec{ "directory": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "The directory containing the kustomization to apply. The value can be a local directory or a folder in a\ngit repository.\nExample: ./helloWorld\nExample: https://github.com/kubernetes-sigs/kustomize/tree/master/examples/helloWorld", }, "resourcePrefix": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "An optional prefix for the auto-generated resource names. Example: A resource created with resourcePrefix=\"foo\" would produce a resource named \"foo-resourceName\".", }, "transformations": { TypeSpec: pschema.TypeSpec{ Type: "array", Items: &pschema.TypeSpec{ Ref: "pulumi.json#/Any", }, }, Description: "A set of transformations to apply to Kubernetes resource definitions before registering with engine.", }, }, Type: "object", Required: []string{ "directory", }, }, InputProperties: map[string]pschema.PropertySpec{ "directory": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "The directory containing the kustomization to apply. The value can be a local directory or a folder in a\ngit repository.\nExample: ./helloWorld\nExample: https://github.com/kubernetes-sigs/kustomize/tree/master/examples/helloWorld", }, "resourcePrefix": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "An optional prefix for the auto-generated resource names. Example: A resource created with resourcePrefix=\"foo\" would produce a resource named \"foo-resourceName\".", }, "transformations": { TypeSpec: pschema.TypeSpec{ Type: "array", Items: &pschema.TypeSpec{ Ref: "pulumi.json#/Any", }, }, Description: "A set of transformations to apply to Kubernetes resource definitions before registering with engine.", }, }, RequiredInputs: []string{ "directory", }, } //go:embed examples/overlays/configFile.md var configFileMD string var yamlConfigFileResource = pschema.ResourceSpec{ IsComponent: true, ObjectTypeSpec: pschema.ObjectTypeSpec{ IsOverlay: true, Description: configFileMD, Properties: map[string]pschema.PropertySpec{ "resources": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "Resources created by the ConfigFile.", }, }, Type: "object", }, InputProperties: map[string]pschema.PropertySpec{ "file": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "Path or a URL that uniquely identifies a file.", }, "resourcePrefix": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "An optional prefix for the auto-generated resource names. Example: A resource created with resourcePrefix=\"foo\" would produce a resource named \"foo-resourceName\".", }, "transformations": { TypeSpec: pschema.TypeSpec{ Type: "array", Items: &pschema.TypeSpec{ Ref: "pulumi.json#/Any", }, }, Description: "A set of transformations to apply to Kubernetes resource definitions before registering with engine.", }, }, RequiredInputs: []string{ "file", }, } //go:embed examples/overlays/configGroup.md var configGroupMD string var yamlConfigGroupResource = pschema.ResourceSpec{ IsComponent: true, ObjectTypeSpec: pschema.ObjectTypeSpec{ IsOverlay: true, Description: configGroupMD, Properties: map[string]pschema.PropertySpec{ "resources": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "Resources created by the ConfigGroup.", }, }, Type: "object", }, InputProperties: map[string]pschema.PropertySpec{ "files": { TypeSpec: pschema.TypeSpec{ OneOf: []pschema.TypeSpec{ { Type: "string", }, { Type: "array", Items: &pschema.TypeSpec{ Type: "string", }, }, }, }, Description: "Set of paths or a URLs that uniquely identify files.", }, "objs": { TypeSpec: pschema.TypeSpec{ OneOf: []pschema.TypeSpec{ { Ref: "pulumi.json#/Any", }, { Type: "array", Items: &pschema.TypeSpec{ Ref: "pulumi.json#/Any", }, }, }, }, Description: "Objects representing Kubernetes resources.", }, "resourcePrefix": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "An optional prefix for the auto-generated resource names. Example: A resource created with resourcePrefix=\"foo\" would produce a resource named \"foo-resourceName\".", }, "transformations": { TypeSpec: pschema.TypeSpec{ Type: "array", Items: &pschema.TypeSpec{ Ref: "pulumi.json#/Any", }, }, Description: "A set of transformations to apply to Kubernetes resource definitions before registering with engine.", }, "yaml": { TypeSpec: pschema.TypeSpec{ OneOf: []pschema.TypeSpec{ { Type: "string", }, { Type: "array", Items: &pschema.TypeSpec{ Type: "string", }, }, }, }, Description: "YAML text containing Kubernetes resource definitions.", }, }, } var apiextentionsCustomResource = pschema.ResourceSpec{ ObjectTypeSpec: pschema.ObjectTypeSpec{ IsOverlay: true, Description: "CustomResource represents an instance of a CustomResourceDefinition (CRD). For example, the\n CoreOS Prometheus operator exposes a CRD `monitoring.coreos.com/ServiceMonitor`; to\n instantiate this as a Pulumi resource, one could call `new CustomResource`, passing the\n `ServiceMonitor` resource definition as an argument.", Properties: map[string]pschema.PropertySpec{ "apiVersion": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources", }, "kind": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds", }, "metadata": { TypeSpec: pschema.TypeSpec{ Ref: "#/types/kubernetes:meta/v1:ObjectMeta", }, Description: "Standard object metadata; More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#metadata.", }, }, Type: "object", Required: []string{ "apiVersion", "kind", }, }, InputProperties: map[string]pschema.PropertySpec{ "apiVersion": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources", }, "kind": { TypeSpec: pschema.TypeSpec{ Type: "string", }, Description: "Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds", }, "metadata": { TypeSpec: pschema.TypeSpec{ Ref: "#/types/kubernetes:meta/v1:ObjectMeta", }, Description: "Standard object metadata; More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#metadata.", }, "others": { TypeSpec: pschema.TypeSpec{ Type: "object", AdditionalProperties: &pschema.TypeSpec{ Ref: "pulumi.json#/Any", }, }, Description: "This field is not an actual property. It is used to represent custom property names and their values that can be passed in addition to the other input properties.", }, }, RequiredInputs: []string{ "apiVersion", "kind", }, } func init() { typeOverlays["kubernetes:core/v1:ServiceSpec"] = serviceSpec typeOverlays["kubernetes:core/v1:ServiceSpecType"] = serviceSpecType typeOverlays["kubernetes:helm.sh/v2:FetchOpts"] = helmV2FetchOpts typeOverlays["kubernetes:helm.sh/v3:FetchOpts"] = helmV2FetchOpts // v2 fetch opts are identical to v3 typeOverlays["kubernetes:helm.sh/v3:RepositoryOpts"] = helmV3RepoOpts typeOverlays["kubernetes:helm.sh/v3:ReleaseStatus"] = helmV3ReleaseStatus typeOverlays["kubernetes:index:KubeClientSettings"] = kubeClientSettings typeOverlays["kubernetes:index:HelmReleaseSettings"] = helmReleaseSettings resourceOverlays["kubernetes:apiextensions.k8s.io:CustomResource"] = apiextentionsCustomResource resourceOverlays["kubernetes:helm.sh/v2:Chart"] = helmV2ChartResource resourceOverlays["kubernetes:helm.sh/v3:Chart"] = helmV3ChartResource resourceOverlays["kubernetes:helm.sh/v3:Release"] = helmV3ReleaseResource resourceOverlays["kubernetes:kustomize:Directory"] = kustomizeDirectoryResource resourceOverlays["kubernetes:yaml:ConfigFile"] = yamlConfigFileResource resourceOverlays["kubernetes:yaml:ConfigGroup"] = yamlConfigGroupResource }
provider/pkg/gen/overlays.go
0.60054
0.404566
overlays.go
starcoder
package divide_and_conquer import ( "fmt" "reflect" "runtime" "time" "github.com/davecgh/go-spew/spew" ) // https://medium.com/capital-one-developers/buffered-channels-in-go-what-are-they-good-for-43703871828 // One common pattern for goroutines is fan-out. When you want to apply the same data to multiple algorithms, // you can launch a goroutine for each subtask, and then gather the data back in when they are done. // For example, you might want to process the same data via multiple scoring algorithms and return back // all of the scores or pull data from multiple microservices to compose a single page. A buffered channel is an // ideal way to gather the data back from your subtasks. func RunDivideAndConquer() { type in struct { a int b int } type out struct { source string result int } evaluators := []Evaluator{ EvaluatorFunc(func(inV interface{}) (interface{}, error) { time.Sleep(time.Second * 1) i := inV.(in) r := i.a + i.b return out{"Plus", r}, nil }), EvaluatorFunc(func(inV interface{}) (interface{}, error) { i := inV.(in) r := i.a * i.b return out{"Multi", r}, nil }), EvaluatorFunc(func(inV interface{}) (interface{}, error) { i := inV.(in) r := i.a - i.b return out{"min", r}, nil }), EvaluatorFunc(func(inV interface{}) (interface{}, error) { i := inV.(in) r := i.a / i.b return out{"divider", r}, nil }), } r, errors := DivideAndConquer(in{2, 3}, evaluators, 10*time.Millisecond) spew.Dump(r, errors) } type Evaluator interface { Evaluate(data interface{}) (interface{}, error) Name() string } type EvaluatorFunc func(interface{}) (interface{}, error) func (ef EvaluatorFunc) Evaluate(in interface{}) (interface{}, error) { return ef(in) } func (ef EvaluatorFunc) Name() string { return runtime.FuncForPC(reflect.ValueOf(ef).Pointer()).Name() } func DivideAndConquer(data interface{}, evaluators []Evaluator, timeout time.Duration) ([]interface{}, []error) { gather := make(chan interface{}, len(evaluators)) errors := make(chan error, len(evaluators)) for _, v := range evaluators { go func(e Evaluator) { // Why not just use an unbuffered channel? The answer is that we don’t want to leak any goroutines. // While the Go runtime is capable of handling thousands or hundreds of thousands of goroutines at a time, // each goroutine does use some resources, so you don’t want to leave them hanging around when // you don’t have to. If you do, a long-running Go program will start performing poorly ch := make(chan interface{}, 1) ech := make(chan error, 1) go func() { result, err := e.Evaluate(data) if err != nil { ech <- err } else { ch <- result } }() // Remember that an unbuffered channel pauses the writing goroutine until there’s a read by another // goroutine. If the timeout triggers before the Evaluator finishes executing, // the read will never happen because the only place those channels are read is in the outer // goroutine’s select statement, and the outer goroutine exited after the timeout triggered. // This means that using an unbuffered channel will cause the inner goroutine to wait forever // whenever there is a timeout, leaking the goroutine. Again, the buffered channel proves // useful because we know exactly how many writes we can expect. select { case r := <-ch: gather <- r case err := <-ech: errors <- err case <-time.After(timeout): errors <- fmt.Errorf("%s timeout after %v on %v", e.Name(), timeout, data) } }(v) } out := make([]interface{}, 0, len(evaluators)) errs := make([]error, 0, len(evaluators)) for range evaluators { select { case r := <-gather: out = append(out, r) case e := <-errors: errs = append(errs, e) } } return out, errs }
concurrency/subtasks/divide_and_conquer/divide_and_conquer.go
0.63861
0.411111
divide_and_conquer.go
starcoder
package onshape import ( "encoding/json" ) // BTPExpressionCall240 struct for BTPExpressionCall240 type BTPExpressionCall240 struct { BTPExpression9 BtType *string `json:"btType,omitempty"` FunctionExpression *BTPExpression9 `json:"functionExpression,omitempty"` FunctionName *BTPName261 `json:"functionName,omitempty"` FunctionNameString *string `json:"functionNameString,omitempty"` SpaceInEmptyList *BTPSpace10 `json:"spaceInEmptyList,omitempty"` } // NewBTPExpressionCall240 instantiates a new BTPExpressionCall240 object // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments // will change when the set of required properties is changed func NewBTPExpressionCall240() *BTPExpressionCall240 { this := BTPExpressionCall240{} return &this } // NewBTPExpressionCall240WithDefaults instantiates a new BTPExpressionCall240 object // This constructor will only assign default values to properties that have it defined, // but it doesn't guarantee that properties required by API are set func NewBTPExpressionCall240WithDefaults() *BTPExpressionCall240 { this := BTPExpressionCall240{} return &this } // GetBtType returns the BtType field value if set, zero value otherwise. func (o *BTPExpressionCall240) GetBtType() string { if o == nil || o.BtType == nil { var ret string return ret } return *o.BtType } // GetBtTypeOk returns a tuple with the BtType field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *BTPExpressionCall240) GetBtTypeOk() (*string, bool) { if o == nil || o.BtType == nil { return nil, false } return o.BtType, true } // HasBtType returns a boolean if a field has been set. func (o *BTPExpressionCall240) HasBtType() bool { if o != nil && o.BtType != nil { return true } return false } // SetBtType gets a reference to the given string and assigns it to the BtType field. func (o *BTPExpressionCall240) SetBtType(v string) { o.BtType = &v } // GetFunctionExpression returns the FunctionExpression field value if set, zero value otherwise. func (o *BTPExpressionCall240) GetFunctionExpression() BTPExpression9 { if o == nil || o.FunctionExpression == nil { var ret BTPExpression9 return ret } return *o.FunctionExpression } // GetFunctionExpressionOk returns a tuple with the FunctionExpression field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *BTPExpressionCall240) GetFunctionExpressionOk() (*BTPExpression9, bool) { if o == nil || o.FunctionExpression == nil { return nil, false } return o.FunctionExpression, true } // HasFunctionExpression returns a boolean if a field has been set. func (o *BTPExpressionCall240) HasFunctionExpression() bool { if o != nil && o.FunctionExpression != nil { return true } return false } // SetFunctionExpression gets a reference to the given BTPExpression9 and assigns it to the FunctionExpression field. func (o *BTPExpressionCall240) SetFunctionExpression(v BTPExpression9) { o.FunctionExpression = &v } // GetFunctionName returns the FunctionName field value if set, zero value otherwise. func (o *BTPExpressionCall240) GetFunctionName() BTPName261 { if o == nil || o.FunctionName == nil { var ret BTPName261 return ret } return *o.FunctionName } // GetFunctionNameOk returns a tuple with the FunctionName field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *BTPExpressionCall240) GetFunctionNameOk() (*BTPName261, bool) { if o == nil || o.FunctionName == nil { return nil, false } return o.FunctionName, true } // HasFunctionName returns a boolean if a field has been set. func (o *BTPExpressionCall240) HasFunctionName() bool { if o != nil && o.FunctionName != nil { return true } return false } // SetFunctionName gets a reference to the given BTPName261 and assigns it to the FunctionName field. func (o *BTPExpressionCall240) SetFunctionName(v BTPName261) { o.FunctionName = &v } // GetFunctionNameString returns the FunctionNameString field value if set, zero value otherwise. func (o *BTPExpressionCall240) GetFunctionNameString() string { if o == nil || o.FunctionNameString == nil { var ret string return ret } return *o.FunctionNameString } // GetFunctionNameStringOk returns a tuple with the FunctionNameString field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *BTPExpressionCall240) GetFunctionNameStringOk() (*string, bool) { if o == nil || o.FunctionNameString == nil { return nil, false } return o.FunctionNameString, true } // HasFunctionNameString returns a boolean if a field has been set. func (o *BTPExpressionCall240) HasFunctionNameString() bool { if o != nil && o.FunctionNameString != nil { return true } return false } // SetFunctionNameString gets a reference to the given string and assigns it to the FunctionNameString field. func (o *BTPExpressionCall240) SetFunctionNameString(v string) { o.FunctionNameString = &v } // GetSpaceInEmptyList returns the SpaceInEmptyList field value if set, zero value otherwise. func (o *BTPExpressionCall240) GetSpaceInEmptyList() BTPSpace10 { if o == nil || o.SpaceInEmptyList == nil { var ret BTPSpace10 return ret } return *o.SpaceInEmptyList } // GetSpaceInEmptyListOk returns a tuple with the SpaceInEmptyList field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *BTPExpressionCall240) GetSpaceInEmptyListOk() (*BTPSpace10, bool) { if o == nil || o.SpaceInEmptyList == nil { return nil, false } return o.SpaceInEmptyList, true } // HasSpaceInEmptyList returns a boolean if a field has been set. func (o *BTPExpressionCall240) HasSpaceInEmptyList() bool { if o != nil && o.SpaceInEmptyList != nil { return true } return false } // SetSpaceInEmptyList gets a reference to the given BTPSpace10 and assigns it to the SpaceInEmptyList field. func (o *BTPExpressionCall240) SetSpaceInEmptyList(v BTPSpace10) { o.SpaceInEmptyList = &v } func (o BTPExpressionCall240) MarshalJSON() ([]byte, error) { toSerialize := map[string]interface{}{} serializedBTPExpression9, errBTPExpression9 := json.Marshal(o.BTPExpression9) if errBTPExpression9 != nil { return []byte{}, errBTPExpression9 } errBTPExpression9 = json.Unmarshal([]byte(serializedBTPExpression9), &toSerialize) if errBTPExpression9 != nil { return []byte{}, errBTPExpression9 } if o.BtType != nil { toSerialize["btType"] = o.BtType } if o.FunctionExpression != nil { toSerialize["functionExpression"] = o.FunctionExpression } if o.FunctionName != nil { toSerialize["functionName"] = o.FunctionName } if o.FunctionNameString != nil { toSerialize["functionNameString"] = o.FunctionNameString } if o.SpaceInEmptyList != nil { toSerialize["spaceInEmptyList"] = o.SpaceInEmptyList } return json.Marshal(toSerialize) } type NullableBTPExpressionCall240 struct { value *BTPExpressionCall240 isSet bool } func (v NullableBTPExpressionCall240) Get() *BTPExpressionCall240 { return v.value } func (v *NullableBTPExpressionCall240) Set(val *BTPExpressionCall240) { v.value = val v.isSet = true } func (v NullableBTPExpressionCall240) IsSet() bool { return v.isSet } func (v *NullableBTPExpressionCall240) Unset() { v.value = nil v.isSet = false } func NewNullableBTPExpressionCall240(val *BTPExpressionCall240) *NullableBTPExpressionCall240 { return &NullableBTPExpressionCall240{value: val, isSet: true} } func (v NullableBTPExpressionCall240) MarshalJSON() ([]byte, error) { return json.Marshal(v.value) } func (v *NullableBTPExpressionCall240) UnmarshalJSON(src []byte) error { v.isSet = true return json.Unmarshal(src, &v.value) }
onshape/model_btp_expression_call_240.go
0.702938
0.478468
model_btp_expression_call_240.go
starcoder
package rs485 import ( "encoding/binary" "math" ) // BigEndianUint32Swapped converts bytes to uint32 wrapped as uint64 with swapped word order. // To use the result as int32 value make sure to convert to uint32 first before converting to int32. func BigEndianUint32Swapped(b []byte) uint64 { _ = b[3] // bounds check hint to compiler; see golang.org/issue/14808 return uint64(b[3])<<16 | uint64(b[2])<<24 | uint64(b[1]) | uint64(b[0])<<8 } // RTUTransform functions convert RTU bytes to meaningful data types. type RTUTransform func([]byte) float64 // RTUIeee754ToFloat64 converts 32 bit IEEE 754 float readings func RTUIeee754ToFloat64(b []byte) float64 { bits := binary.BigEndian.Uint32(b) f := math.Float32frombits(bits) return float64(f) } // RTUIeee754ToFloat64Swapped converts 32 bit IEEE 754 float readings func RTUIeee754ToFloat64Swapped(b []byte) float64 { bits := uint32(BigEndianUint32Swapped(b)) f := math.Float32frombits(bits) return float64(f) } // RTUUint16ToFloat64 converts 16 bit unsigned integer readings func RTUUint16ToFloat64(b []byte) float64 { u := binary.BigEndian.Uint16(b) return float64(u) } // RTUUint32ToFloat64 converts 32 bit unsigned integer readings func RTUUint32ToFloat64(b []byte) float64 { u := binary.BigEndian.Uint32(b) return float64(u) } // RTUUint32ToFloat64Swapped converts 32 bit unsigned integer readings with swapped word order func RTUUint32ToFloat64Swapped(b []byte) float64 { u := uint32(BigEndianUint32Swapped(b)) return float64(u) } // RTUUint64ToFloat64 converts 64 bit unsigned integer readings func RTUUint64ToFloat64(b []byte) float64 { u := binary.BigEndian.Uint64(b) return float64(u) } // RTUInt16ToFloat64 converts 16 bit signed integer readings func RTUInt16ToFloat64(b []byte) float64 { u := int16(binary.BigEndian.Uint16(b)) return float64(u) } // RTUInt32ToFloat64 converts 32 bit signed integer readings func RTUInt32ToFloat64(b []byte) float64 { u := int32(binary.BigEndian.Uint32(b)) return float64(u) } // RTUInt32ToFloat64Swapped converts 32 bit unsigned integer readings with swapped word order func RTUInt32ToFloat64Swapped(b []byte) float64 { u := int32(BigEndianUint32Swapped(b)) return float64(u) } // RTUInt64ToFloat64 converts 64 bit signed integer readings func RTUInt64ToFloat64(b []byte) float64 { u := int64(binary.BigEndian.Uint64(b)) return float64(u) } // MakeScaledTransform creates an RTUTransform with applied scaler func MakeScaledTransform(transform RTUTransform, scaler float64) RTUTransform { return RTUTransform(func(b []byte) float64 { unscaled := transform(b) f := unscaled / scaler return f }) }
meters/rs485/transform.go
0.760917
0.46393
transform.go
starcoder
package ast // ChildVisitor is a callback function used by VisitChildren to visit nodes. type ChildVisitor func(interface{}) // ChildTransformer is a callback function used by TransformChildren to visit nodes. type ChildTransformer func(child, parent interface{}) interface{} // TransformChildren is a helper function which calls the provided callback for // each child node of the argument, replacing their values with the values // returned from v. func TransformChildren(n interface{}, v ChildTransformer) { switch n := n.(type) { case *BinaryExpr: n.Left = v(n.Left, n).(Expression) n.Right = v(n.Right, n).(Expression) case *IndexExpr: n.Base = v(n.Base, n).(Expression) n.Index = v(n.Index, n).(Expression) case *ConditionalExpr: n.Cond = v(n.Cond, n).(Expression) n.TrueExpr = v(n.TrueExpr, n).(Expression) n.FalseExpr = v(n.FalseExpr, n).(Expression) case *UnaryExpr: n.Expr = v(n.Expr, n).(Expression) case *DotExpr: n.Expr = v(n.Expr, n).(Expression) case *ConstantExpr: n.Value = v(n.Value, n).(Value) case *VarRefExpr: n.Sym = v(n.Sym, n).(ValueSymbol) case *TypeConversionExpr: if n.RetType != nil { n.RetType = v(n.RetType, n).(Type) } case *ParenExpr: n.Expr = v(n.Expr, n).(Expression) case *CallExpr: n.Callee = v(n.Callee, n).(Expression) for i, c := range n.Args { n.Args[i] = v(c, n).(Expression) } case *DeclarationStmt: n.Decl = v(n.Decl, n) case *ExpressionStmt: n.Expr = v(n.Expr, n).(Expression) case *CaseStmt: n.Expr = v(n.Expr, n).(Expression) case *SwitchStmt: n.Expr = v(n.Expr, n).(Expression) n.Stmts = v(n.Stmts, n).(*CompoundStmt) case *WhileStmt: n.Cond = v(n.Cond, n) n.Stmt = v(n.Stmt, n) case *DoStmt: n.Stmt = v(n.Stmt, n) n.Expr = v(n.Expr, n).(Expression) case *ForStmt: n.Init = v(n.Init, n) n.Cond = v(n.Cond, n) n.Loop = v(n.Loop, n).(Expression) n.Body = v(n.Body, n) case *ContinueStmt, *BreakStmt, *DiscardStmt, *EmptyStmt, *DefaultStmt: case *IfStmt: n.IfExpr = v(n.IfExpr, n).(Expression) n.ThenStmt = v(n.ThenStmt, n) if n.ElseStmt != nil { n.ElseStmt = v(n.ElseStmt, n) } case *CompoundStmt: for i, c := range n.Stmts { n.Stmts[i] = v(c, n) } case *ReturnStmt: if n.Expr != nil { n.Expr = v(n.Expr, n).(Expression) } case *BuiltinType: case *ArrayType: if n.Base != nil { n.Base = v(n.Base, n).(Type) } if n.Size != nil { n.Size = v(n.Size, n).(Expression) } case *StructType: if n.StructDef { n.Sym = v(n.Sym, n).(*StructSym) } case *LayoutQualifier: case *TypeQualifiers: if n.Layout != nil { n.Layout = v(n.Layout, n).(*LayoutQualifier) } case *PrecisionDecl: if n.Type != nil { n.Type = v(n.Type, n).(*BuiltinType) } case *FunctionDecl: if n.RetType != nil { n.RetType = v(n.RetType, n).(Type) } for i, c := range n.Params { n.Params[i] = v(c, n).(*FuncParameterSym) } if n.Stmts != nil { n.Stmts = v(n.Stmts, n).(*CompoundStmt) } case *MultiVarDecl: if n.Quals != nil { n.Quals = v(n.Quals, n).(*TypeQualifiers) } if n.Type != nil { n.Type = v(n.Type, n).(Type) } for i, c := range n.Vars { n.Vars[i] = v(c, n).(*VariableSym) } case *LayoutDecl: n.Layout = v(n.Layout, n).(*LayoutQualifier) case *InvariantDecl: for i, c := range n.Vars { n.Vars[i] = v(c, n).(*VarRefExpr) } case *UniformDecl: n.Block = v(n.Block, n).(*UniformBlock) if n.Size != nil { n.Size = v(n.Size, n).(Expression) } case *VariableSym: if n.SymType != nil { n.SymType = v(n.SymType, n).(Type) } if n.Init != nil { n.Init = v(n.Init, n).(Expression) } case *FuncParameterSym: if n.SymType != nil { n.SymType = v(n.SymType, n).(Type) } case *StructSym: for i, c := range n.Vars { n.Vars[i] = v(c, n).(*MultiVarDecl) } case *UniformBlock: if n.Layout != nil { n.Layout = v(n.Layout, n).(*LayoutQualifier) } for i, c := range n.Vars { n.Vars[i] = v(c, n).(*MultiVarDecl) } case *ExpressionCond: n.Expr = v(n.Expr, n).(Expression) case *VarDeclCond: n.Sym = v(n.Sym, n).(*VariableSym) case *Ast: decls := n.Decls for i, c := range decls { decls[i] = v(c, n) } } } // VisitChildren is a helper function which calls the provided callback for each child node of // the argument. func VisitChildren(n interface{}, v ChildVisitor) { TransformChildren(n, func(child, parent interface{}) interface{} { v(child) return child }) }
gapis/api/gles/glsl/ast/visitor.go
0.5083
0.477067
visitor.go
starcoder
package godiffpriv import ( "encoding/json" "math" "reflect" "strconv" "time" "github.com/montanaflynn/stats" "golang.org/x/exp/rand" "gonum.org/v1/gonum/stat/distuv" ) // Internal representation for numeric datasets type quantitative struct { data []float64 } // Internal representation for symbolic datasets type qualitative struct { data []string } // Interface to represents private data intenal, it was implemented for query and sesitivity methods type private interface { query() sensitivity() } //Privatevalue is a interface to represents a private values to implements the object factory type Privatevalue interface { ApplyPrivacy(float64) ([]byte, error) } // Object will be build to represents for symbolic datasets type privatequali struct { data []string } // Object will be build to represents for numeric datasets type privatequant struct { data []float64 } // LapMechanism define noise mechanism to used on differential privacy, to calculate random noise // Params: mi (float64): distribution mean; scale (float64): it is a standard deviation; // times (int: amount random values will be gererate // Returns: float65 array with values of the map func lapMechanism(mi float64, scale float64, times int) ([]float64, error) { var dLap distuv.Laplace dLap.Mu = 0 dLap.Scale = scale dLap.Src = rand.NewSource(uint64(time.Now().UTC().UnixNano())) var probs []float64 for i := 0; i < times; i++ { probs = append(probs, dLap.Rand()) } return probs, nil } func (q *quantitative) query() (float64, error) { return stats.Mean(q.data) } func (q *qualitative) query() (map[string]int, error) { hist := make(map[string]int) for _, item := range q.data { hist[item]++ } return hist, nil } // mapToSliceInt is a fucntion to convert map values to slice // Params: data (maps[string]int): dataset will be convert // Returns: float65 array with values of the map func mapToSliceInt(data map[string]int) []float64 { var values []float64 for _, value := range data { values = append(values, float64(value)) } return values } // sensitivity is a method to calculate sensitivity on dataset with numeric data // Params: None // Returns: float64 to represents sensitivity value and the error if there exists func (q *quantitative) sensitivity() (float64, error) { var val float64 var max float64 for i := 0; i < len(q.data); i++ { d1Slice := make([]float64, len(q.data)) d2Slice := make([]float64, len(q.data)) var d1 quantitative var d2 quantitative copy(d1Slice, q.data) copy(d2Slice, q.data) d1.data = append(d1Slice[:i], d1Slice[i+1:]...) if (i + 1) > len(d2Slice)-1 { d2.data = d2Slice[1:] } else { d2.data = append(d2Slice[:i+1], d2Slice[(i+1)+1:]...) } q1, err := d1.query() q2, err := d2.query() if err != nil { return 0.0, err } val = math.Abs(q1 - q2) max = math.Max(max, val) } return max, nil } // sensitivity is a method to calculate sensitivity on dataset with symbolic data // Params: None // Returns: float64 to represents sensitivity value and the error if there exists func (q *qualitative) sensitivity() (float64, error) { var val float64 var max float64 for i := 0; i < len(q.data); i++ { d1Slice := make([]string, len(q.data)) d2Slice := make([]string, len(q.data)) var d1 qualitative var d2 qualitative copy(d1Slice, q.data) copy(d2Slice, q.data) d1.data = append(d1Slice[:i], d1Slice[i+1:]...) if (i + 1) > len(d2Slice)-1 { d2.data = d2Slice[1:] } else { d2.data = append(d2Slice[:i+1], d2Slice[(i+1)+1:]...) } q1, _ := d1.query() q2, _ := d2.query() arrayQ1 := mapToSliceInt(q1) arrayQ2 := mapToSliceInt(q2) size := 0 if len(arrayQ1) > len(arrayQ2) { size = len(arrayQ2) } else if len(arrayQ2) > len(arrayQ1) { size = len(arrayQ1) } for j := 0; j < size; j++ { result := math.Abs(arrayQ1[j] - arrayQ2[j]) val = math.Max(val, result) } max = math.Max(max, val) } return max, nil } // ApplyPrivacy is a method to apply privacy on numeric data // Params: epsilon (float64): noise level // Returns: a byte array that would be converted to map[string]float64 func (priv *privatequant) ApplyPrivacy(epsilon float64) ([]byte, error) { q := new(quantitative) q.data = priv.data s, _ := q.sensitivity() b := s / epsilon noise, _ := lapMechanism(0, b, 1) data, _ := q.query() privData := data + noise[0] privBytes, err := json.Marshal(map[string]float64{"data": privData}) if err != nil { return nil, err } return privBytes, nil } // ApplyPrivacy is a method to apply privacy on symbolic data // Params: epsilon (float64): noise level // Returns: a byte array that would be converted to map[string]float64 func (priv *privatequali) ApplyPrivacy(epsilon float64) ([]byte, error) { q := new(qualitative) q.data = priv.data s, _ := q.sensitivity() b := s / epsilon noise, err := lapMechanism(0, b, 1) if err != nil { return nil, err } data, err := q.query() if err != nil { return nil, err } privData := make(map[string]float64) i := 1 for _, val := range data { key := strconv.Itoa(i) privData[key] = float64(val) + noise[0] i++ } privBytes, err := json.Marshal(privData) if err != nil { return nil, err } return privBytes, nil } // PrivateDataFactory is a factory method to generate private objects // Params: dataset: a string to describes object type // Returns: object to numeric or symbolic data func PrivateDataFactory(dataset interface{}) Privatevalue { val := reflect.ValueOf(dataset) t := val.Index(0) switch t.Kind() { case reflect.Float64: //dt := make([]float64, val.Len()) var dt []float64 for i := 0; i < val.Len(); i++ { dt = append(dt, float64(val.Index(i).Float())) } quant := new(privatequant) quant.data = dt return quant case reflect.String: var dt []string for i := 0; i < val.Len(); i++ { dt = append(dt, string(val.Index(i).String())) } quali := new(privatequali) quali.data = dt return quali default: return nil } }
diffpriv-core.go
0.714528
0.506652
diffpriv-core.go
starcoder
package fp func (l BoolList) IsEmpty() bool { return l == NilBoolList } func (l StringList) IsEmpty() bool { return l == NilStringList } func (l IntList) IsEmpty() bool { return l == NilIntList } func (l Int64List) IsEmpty() bool { return l == NilInt64List } func (l ByteList) IsEmpty() bool { return l == NilByteList } func (l RuneList) IsEmpty() bool { return l == NilRuneList } func (l Float32List) IsEmpty() bool { return l == NilFloat32List } func (l Float64List) IsEmpty() bool { return l == NilFloat64List } func (l AnyList) IsEmpty() bool { return l == NilAnyList } func (l Tuple2List) IsEmpty() bool { return l == NilTuple2List } func (l BoolArrayList) IsEmpty() bool { return l == NilBoolArrayList } func (l StringArrayList) IsEmpty() bool { return l == NilStringArrayList } func (l IntArrayList) IsEmpty() bool { return l == NilIntArrayList } func (l Int64ArrayList) IsEmpty() bool { return l == NilInt64ArrayList } func (l ByteArrayList) IsEmpty() bool { return l == NilByteArrayList } func (l RuneArrayList) IsEmpty() bool { return l == NilRuneArrayList } func (l Float32ArrayList) IsEmpty() bool { return l == NilFloat32ArrayList } func (l Float64ArrayList) IsEmpty() bool { return l == NilFloat64ArrayList } func (l AnyArrayList) IsEmpty() bool { return l == NilAnyArrayList } func (l Tuple2ArrayList) IsEmpty() bool { return l == NilTuple2ArrayList } func (l BoolOptionList) IsEmpty() bool { return l == NilBoolOptionList } func (l StringOptionList) IsEmpty() bool { return l == NilStringOptionList } func (l IntOptionList) IsEmpty() bool { return l == NilIntOptionList } func (l Int64OptionList) IsEmpty() bool { return l == NilInt64OptionList } func (l ByteOptionList) IsEmpty() bool { return l == NilByteOptionList } func (l RuneOptionList) IsEmpty() bool { return l == NilRuneOptionList } func (l Float32OptionList) IsEmpty() bool { return l == NilFloat32OptionList } func (l Float64OptionList) IsEmpty() bool { return l == NilFloat64OptionList } func (l AnyOptionList) IsEmpty() bool { return l == NilAnyOptionList } func (l Tuple2OptionList) IsEmpty() bool { return l == NilTuple2OptionList } func (l BoolListList) IsEmpty() bool { return l == NilBoolListList } func (l StringListList) IsEmpty() bool { return l == NilStringListList } func (l IntListList) IsEmpty() bool { return l == NilIntListList } func (l Int64ListList) IsEmpty() bool { return l == NilInt64ListList } func (l ByteListList) IsEmpty() bool { return l == NilByteListList } func (l RuneListList) IsEmpty() bool { return l == NilRuneListList } func (l Float32ListList) IsEmpty() bool { return l == NilFloat32ListList } func (l Float64ListList) IsEmpty() bool { return l == NilFloat64ListList } func (l AnyListList) IsEmpty() bool { return l == NilAnyListList } func (l Tuple2ListList) IsEmpty() bool { return l == NilTuple2ListList }
fp/bootstrap_list_isempty.go
0.601008
0.625552
bootstrap_list_isempty.go
starcoder
package waddrmgr import ( "time" chainhash "github.com/l0k18/pod/pkg/chain/hash" "github.com/l0k18/pod/pkg/db/walletdb" ) // BlockStamp defines a block (by height and a unique hash) and is used to mark a point in the blockchain that an // address manager element is synced to. type BlockStamp struct { Height int32 Hash chainhash.Hash Timestamp time.Time } // syncState houses the sync state of the manager. It consists of the recently seen blocks as height, as well as the // start and current sync block stamps. type syncState struct { // startBlock is the first block that can be safely used to start a rescan. It is either the block the manager was // created with, or the earliest block provided with imported addresses or scripts. startBlock BlockStamp // syncedTo is the current block the addresses in the manager are known to be synced against. syncedTo BlockStamp } // newSyncState returns a new sync state with the provided parameters. func newSyncState(startBlock, syncedTo *BlockStamp) *syncState { return &syncState{ startBlock: *startBlock, syncedTo: *syncedTo, } } // SetSyncedTo marks the address manager to be in sync with the recently-seen block described by the blockstamp. When // the provided blockstamp is nil, the oldest blockstamp of the block the manager was created at and of all imported // addresses will be used. This effectively allows the manager to be marked as unsynced back to the oldest known point // any of the addresses have appeared in the block chain. func (m *Manager) SetSyncedTo(ns walletdb.ReadWriteBucket, bs *BlockStamp) error { m.mtx.Lock() defer m.mtx.Unlock() // Use the stored start blockstamp and reset recent hashes and height when the provided blockstamp is nil. if bs == nil { bs = &m.syncState.startBlock } // Update the database. err := putSyncedTo(ns, bs) if err != nil { Error(err) return err } // Update memory now that the database is updated. m.syncState.syncedTo = *bs return nil } // SyncedTo returns details about the block height and hash that the address manager is synced through at the very // least. The intention is that callers can use this information for intelligently initiating rescans to sync back to // the best chain from the last known good block. func (m *Manager) SyncedTo() BlockStamp { m.mtx.Lock() defer m.mtx.Unlock() return m.syncState.syncedTo } // BlockHash returns the block hash at a particular block height. This information is useful for comparing against the // chain back-end to see if a reorg is taking place and how far back it goes. func (m *Manager) BlockHash(ns walletdb.ReadBucket, height int32) ( *chainhash.Hash, error) { m.mtx.Lock() defer m.mtx.Unlock() return fetchBlockHash(ns, height) } // Birthday returns the birthday, or earliest time a key could have been used, for the manager. func (m *Manager) Birthday() time.Time { m.mtx.Lock() defer m.mtx.Unlock() return m.birthday } // SetBirthday sets the birthday, or earliest time a key could have been used, for the manager. func (m *Manager) SetBirthday(ns walletdb.ReadWriteBucket, birthday time.Time) error { m.mtx.Lock() defer m.mtx.Unlock() m.birthday = birthday return putBirthday(ns, birthday) }
pkg/wallet/addrmgr/sync.go
0.692018
0.434701
sync.go
starcoder
package main import ( "bufio" "fmt" "os" "strconv" "strings" ) // Must have either x1==x2 or y1==y2 to be a valid line. type line struct { x1, y1, x2, y2 int } func (l line) parallel(o line) bool { return (l.x1 == l.x2) == (o.x1 == o.x2) } func (l line) length() int { if l.x1 == l.x2 { return abs(l.y1 - l.y2) } return abs(l.x1 - l.x2) } func main() { var wire1, wire2 []line var scanner = bufio.NewScanner(os.Stdin) scanner.Scan() wire1 = constructWire(scanner.Text()) scanner.Scan() wire2 = constructWire(scanner.Text()) fmt.Println("Distance to origin of closest crossing:", findMinCrossingDistance(wire1, wire2)) fmt.Println("Number of steps until first crossing:", findMinCrossingSteps(wire1, wire2)) } func constructWire(s string) []line { var segments []line var x, y, nx, ny = 0, 0, 0, 0 for _, step := range strings.Split(s, ",") { stepSize, _ := strconv.Atoi(step[1:]) switch rune(step[0]) { case 'R': nx += stepSize case 'L': nx -= stepSize case 'U': ny += stepSize case 'D': ny -= stepSize } segments = append(segments, line{x, y, nx, ny}) x, y = nx, ny } return segments } func findMinCrossingSteps(wire1, wire2 []line) int { var minSteps = 99999 var steps, oldSteps, newSteps int for _, segment1 := range wire1 { oldSteps = steps for _, segment2 := range wire2 { doIntersect, x, y := intersectLines(segment1, segment2) if segment1.x1 == segment1.x2 { newSteps = steps + abs(y - segment1.y1) + abs(x - segment2.x1) } else { newSteps = steps + abs(y - segment2.y1) + abs(x - segment1.x1) } if doIntersect && (x != 0 || y != 0) && newSteps < minSteps { minSteps = newSteps } steps += segment2.length() } steps = oldSteps + segment1.length() } return minSteps } func findMinCrossingDistance(wire1, wire2 []line) int { var minDistance = 99999 for _, segment1 := range wire1 { for _, segment2 := range wire2 { doIntersect, x, y := intersectLines(segment1, segment2) if doIntersect && (x != 0 || y != 0) && abs(x) + abs(y) < minDistance { minDistance = abs(x) + abs(y) } } } return minDistance } // Checks two lines for intersection. // Returns a boolean indication whether they intersect and the position of intersection. func intersectLines(line1, line2 line) (bool, int, int) { if line1.parallel(line2) { return false, 0, 0 } if line1.x1 != line1.x2 { // let line1 always be the vertical one line1, line2 = line2, line1 } if inRange(line2.y1, line1.y1, line1.y2) && inRange(line1.x1, line2.x1, line2.x2) { return true, line1.x1, line2.y1 } return false, 0, 0 } func abs(n int) int { if n < 0 { return -n } return n } // Checks whether n is in the given range. where we don't know the order of the bounds. func inRange(n, bound1, bound2 int) bool { return (n >= bound1 && n <= bound2) || (n >= bound2 && n <= bound1) }
2019/day03/crossedWires.go
0.590897
0.420778
crossedWires.go
starcoder
package transactions import ( "bytes" "github.com/dusk-network/dusk-blockchain/pkg/p2p/wire/encoding" ) // Note represents a Phoenix note. type Note struct { Randomness []byte `json:"randomness"` PkR []byte `json:"pk_r"` Commitment []byte `json:"commitment"` Nonce []byte `json:"nonce"` EncryptedData []byte `json:"encrypted_data"` } // NewNote returns a new empty Note struct. func NewNote() *Note { return &Note{ Randomness: make([]byte, 32), PkR: make([]byte, 32), Commitment: make([]byte, 32), Nonce: make([]byte, 32), EncryptedData: make([]byte, 96), } } // Copy complies with message.Safe interface. It returns a deep copy of // the message safe to publish to multiple subscribers. func (n *Note) Copy() *Note { randomness := make([]byte, len(n.Randomness)) pkR := make([]byte, len(n.PkR)) commitment := make([]byte, len(n.Commitment)) nonce := make([]byte, len(n.Nonce)) encData := make([]byte, len(n.EncryptedData)) copy(randomness, n.Randomness) copy(pkR, n.PkR) copy(commitment, n.Commitment) copy(nonce, n.Nonce) copy(encData, n.EncryptedData) return &Note{ Randomness: randomness, PkR: pkR, Commitment: commitment, Nonce: nonce, EncryptedData: encData, } } // MarshalNote writes the Note struct into a bytes.Buffer. func MarshalNote(r *bytes.Buffer, f *Note) error { if err := encoding.Write256(r, f.Randomness); err != nil { return err } if err := encoding.Write256(r, f.PkR); err != nil { return err } if err := encoding.Write256(r, f.Commitment); err != nil { return err } if err := encoding.Write256(r, f.Nonce); err != nil { return err } return encoding.WriteVarBytes(r, f.EncryptedData) } // UnmarshalNote reads a Note struct from a bytes.Buffer. func UnmarshalNote(r *bytes.Buffer, f *Note) error { if err := encoding.Read256(r, f.Randomness); err != nil { return err } if err := encoding.Read256(r, f.PkR); err != nil { return err } if err := encoding.Read256(r, f.Commitment); err != nil { return err } if err := encoding.Read256(r, f.Nonce); err != nil { return err } return encoding.ReadVarBytes(r, &f.EncryptedData) } // Equal returns whether or not two Notes are equal. func (n *Note) Equal(other *Note) bool { if !bytes.Equal(n.Randomness, other.Randomness) { return false } if !bytes.Equal(n.PkR, other.PkR) { return false } if !bytes.Equal(n.Commitment, other.Commitment) { return false } if !bytes.Equal(n.Nonce, other.Nonce) { return false } return bytes.Equal(n.EncryptedData, other.EncryptedData) }
pkg/core/data/ipc/transactions/note.go
0.82559
0.422445
note.go
starcoder
package stl // This file contains the 3D vector data type that is used for the triangles import ( "math" ) // Vec3 represents a 3D vector, used in Triangle for normal vector and vertices. type Vec3 [3]float32 // vec3Zero is the zero vector var vec3Zero = Vec3{0, 0, 0} // len returns the Euclidean length of a vector. func (vec Vec3) len() float64 { return math.Sqrt(float64(vec[0]*vec[0] + vec[1]*vec[1] + vec[2]*vec[2])) } // unitVec returns vec multiplied by 1/vec.Len(), so its new length is 1. If the vector is empty, it is returned as such. func (vec Vec3) unitVec() Vec3 { l := vec.len() if l == 0 { return vec } return Vec3{float32(float64(vec[0]) / l), float32(float64(vec[1]) / l), float32(float64(vec[2]) / l)} } // multScalar multiplies vec by scalar. func (vec Vec3) multScalar(scalar float64) Vec3 { return Vec3{float32(float64(vec[0]) * scalar), float32(float64(vec[1]) * scalar), float32(float64(vec[2]) * scalar)} } // almostEqual returns true if vec and o are equal allowing for numerical error tol. func (vec Vec3) almostEqual(o Vec3, tol float32) bool { return almostEqual32(vec[0], o[0], tol) && almostEqual32(vec[1], o[1], tol) && almostEqual32(vec[2], o[2], tol) } // add returns the sum of vectors vec and o. func (vec Vec3) add(o Vec3) Vec3 { return Vec3{ vec[0] + o[0], vec[1] + o[1], vec[2] + o[2], } } // diff returns the difference vec - o. func (vec Vec3) diff(o Vec3) Vec3 { return Vec3{ vec[0] - o[0], vec[1] - o[1], vec[2] - o[2], } } // cross returns the vector cross product vec x o. func (vec Vec3) cross(o Vec3) Vec3 { return Vec3{ vec[1]*o[2] - vec[2]*o[1], vec[2]*o[0] - vec[0]*o[2], vec[0]*o[1] - vec[1]*o[0], } } // dot returns the dot product between vec and o. func (vec Vec3) dot(o Vec3) float64 { return float64(vec[0])*float64(o[0]) + float64(vec[1])*float64(o[1]) + float64(vec[2])*float64(o[2]) } // angle between vec and o in radians, without sign, between 0 and Pi. // If vec or o is the origin, this returns 0. func (vec Vec3) angle(o Vec3) float64 { lenProd := vec.len() * o.len() if lenProd == 0 { return 0 } cosAngle := vec.dot(o) / lenProd // Numerical correction if cosAngle < -1 { cosAngle = -1 } else if cosAngle > 1 { cosAngle = 1 } return math.Acos(cosAngle) }
vec3.go
0.909648
0.768993
vec3.go
starcoder
package listing // Service provides risk matrix and risk listing operations type Service interface { GetRiskMatrix(int) (RiskMatrix, error) GetRiskMatrixByPath(string) (RiskMatrix, error) GetAllRisks(int) []Risk GetRisk(string) (Risk, error) GetAllRiskMatrix() []RiskMatrix GetMediaPath() (string, error) } type Repository interface { // GetRiskMatrix returns the risk matrix with the given ID GetRiskMatrix(int) (RiskMatrix, error) // GetRiskMatrixByPath returns a risk matrix with the specified image path GetRiskMatrixByPath(string) (RiskMatrix, error) // GetAllRiskMatrix returns all the risk matrix stored GetAllRiskMatrix() []RiskMatrix // GetAllRisks returns a list of all risks for a given RiskMatrix ID GetAllRisks(int) []Risk // GetRisk returns a risk with the given ID GetRisk(string) (Risk, error) // GetMediaPath returns the path where the media is stored GetMediaPath() (string, error) } type service struct { r Repository } // NewService creates a listing service with the necessary dependencies func NewService(r Repository) Service { return &service{r} } // GetRiskMatrix returns a risk matrix with specified ID func (s *service) GetRiskMatrix(id int) (RiskMatrix, error) { rm, err := s.r.GetRiskMatrix(id) return rm, err } // GetRiskMatrixByPath returns a risk matrix with the specified image path func (s *service) GetRiskMatrixByPath(p string) (RiskMatrix, error) { rm, err := s.r.GetRiskMatrixByPath(p) return rm, err } // GetAllRiskMatrix returns all the risk matrix stored func (s *service) GetAllRiskMatrix() []RiskMatrix { return s.r.GetAllRiskMatrix() } // GetAllRisks returns all risks specified in a RiskMatrix func (s *service) GetAllRisks(riskMatrixID int) []Risk { return s.r.GetAllRisks(riskMatrixID) } // GetRisk returns a risk with the given ID func (s *service) GetRisk(riskID string) (Risk, error) { r, err := s.r.GetRisk(riskID) return r, err } // GetMediaPath returns the path where the media is stored func (s *service) GetMediaPath() (string, error) { path, err := s.r.GetMediaPath() return path, err }
pkg/listing/service.go
0.807005
0.588032
service.go
starcoder
// Package gbst provides an implementation of the GBST (gradient-based subword tokenization) // module from the Charformer paper (https://arxiv.org/abs/2106.12672). // It automatically learns latent sub-words representations from characters in a data-driven fashion. package gbst import ( "encoding/gob" "math" mat "github.com/nlpodyssey/spago/pkg/mat32" "github.com/nlpodyssey/spago/pkg/ml/ag" "github.com/nlpodyssey/spago/pkg/ml/nn" "github.com/nlpodyssey/spago/pkg/ml/nn/convolution1d" "github.com/nlpodyssey/spago/pkg/ml/nn/linear" ) var ( _ nn.Model = &Model{} ) // Model contains the serializable parameters. type Model struct { nn.BaseModel Config Conv []*convolution1d.Model Proj []*convolution1d.Model Scorer *linear.Model PadUntil int } // Config provides configuration settings for GBST (Gradient Based Subword Tokenization) type Config struct { InputSize int MaxBlockSize int BlockSize []int DownsampleFactor int ScoreConsensusAttention bool } func init() { gob.Register(&Model{}) } // New returns a new convolution Model, initialized according to the given configuration. func New(config Config) *Model { if config.DownsampleFactor > config.MaxBlockSize { panic("gbst: downsample factor must be lower than maxiumum block size") } return &Model{ Config: config, Conv: makeConvModels(config.InputSize, convolution1d.Config{ KernelSizeX: 1, KernelSizeY: config.MaxBlockSize, YStride: 1, InputChannels: 1, OutputChannels: 1, Mask: nil, DepthWise: false, Activation: ag.OpIdentity, }), Proj: makeConvModels(config.InputSize, convolution1d.Config{ KernelSizeX: config.InputSize, KernelSizeY: 1, YStride: 1, InputChannels: 1, OutputChannels: 1, Mask: nil, DepthWise: false, Activation: ag.OpIdentity, }), Scorer: linear.New(config.InputSize, 1), PadUntil: lcm(config.BlockSize[0], config.BlockSize[1], config.BlockSize[2:len(config.BlockSize)]...), } } func makeConvModels(n int, config convolution1d.Config) []*convolution1d.Model { ms := make([]*convolution1d.Model, n) for i := 0; i < n; i++ { ms[i] = convolution1d.New(config) } return ms } // Forward performs the forward step for each input node and returns the result. func (m *Model) Forward(xs ...ag.Node) []ag.Node { l := len(xs) ys := make([]ag.Node, l) xs = m.padToMultiple(xs...) stackedIn := m.Graph().Stack(xs...) transposedStackedIn := m.Graph().T(stackedIn) stackedConvolvedXs := m.convolution(transposedStackedIn) convolvedEmbeddings := m.projection(stackedConvolvedXs, transposedStackedIn.Value().Rows()) meanSequences := m.blocksMean(convolvedEmbeddings, l) scores := m.scorer(meanSequences, l) ys = m.weightSequence(meanSequences, scores, l) return m.downsample(ys) } // padToMultiple pads the sequence until lcm of blocks length func (m *Model) padToMultiple(xs ...ag.Node) []ag.Node { n := nextDivisibleLength(len(xs), m.PadUntil) for i := 0; i < n; i++ { if i >= len(xs) { xs = append(xs, m.Graph().NewVariable(mat.NewEmptyVecDense(m.InputSize), false)) } } return xs } // blockMean applies the average pooling of different size blocks. For example, considering blocks size 2 // out[0] = out[1] = average(in[0], in[1]); out[2] = out[3] = average(in[2], in[3]) and so on. func (m *Model) blockMean(sequence []ag.Node, outputSize, blockSize int) []ag.Node { g := m.Graph() out := make([]ag.Node, outputSize) l := len(sequence) for i := 0; i < l; i++ { if i < outputSize { if i%blockSize == 0 { if i+blockSize <= l { out[i] = g.Mean(sequence[i : i+blockSize]) } else { out[i] = g.Mean(sequence[i:l]) } } else { out[i] = g.Identity(out[i-1]) } } } if l < outputSize { for k := l; k < outputSize; k++ { out[k] = g.Identity(out[l-1]) } } return out } // seqMean applies the average pooling of the output sequence. // This method, in contrast to the blockMean, returns a downsampled sequence. // For example, considering blocks size 2 // out[0] = average(in[0], in[1]); out[1] = average(in[2], in[3]) and so on. func (m *Model) seqMean(sequence []ag.Node, outputSize, blockSize int) []ag.Node { g := m.Graph() out := make([]ag.Node, outputSize) l := len(sequence) j := 0 for i := 0; i < l; i++ { if i%blockSize == 0 { if i+blockSize <= l { out[j] = g.Mean(sequence[i : i+blockSize]) j++ } else { out[j] = g.Mean(sequence[i:l]) } } } return out } // scorer is a parametrized linear function that produce a score for each candidate block. func (m *Model) scorer(blocksSequence [][]ag.Node, length int) []ag.Node { g := m.Graph() scores := make([]ag.Node, length) for i := 0; i < length; i++ { ff := make([]ag.Node, len(m.BlockSize)) for j, seq := range blocksSequence { ff[j] = m.Scorer.Forward(g.T(seq[i]))[0] } scores[i] = g.Softmax(g.Concat(ff...)) } if m.Config.ScoreConsensusAttention { return m.scoreWithConsensusAttention(scores, length) } return scores } func (m *Model) scoreWithConsensusAttention(scores []ag.Node, length int) []ag.Node { g := m.Graph() scoresAttention := make([]ag.Node, length) stackedScores := g.Stack(scores...) dotProd := g.Mul(stackedScores, g.T(stackedScores)) for i := 0; i < length; i++ { row := g.RowView(dotProd, i) softmaxAttention := g.Softmax(row) scoresAttention[i] = g.T(g.Mul(g.T(softmaxAttention), stackedScores)) } return scoresAttention } // weightSequence calculates the weighted sum between all blocks and their score. func (m *Model) weightSequence(blocksSequence [][]ag.Node, scores []ag.Node, length int) []ag.Node { g := m.Graph() out := make([]ag.Node, length) for i := 0; i < length; i++ { sepScores := nn.SeparateVec(m.Graph(), scores[i]) weightedScore := m.Graph().NewVariable(mat.NewEmptyVecDense(m.InputSize), true) for j, seq := range blocksSequence { weightedScore = g.Add(weightedScore, g.ProdScalar(seq[i], sepScores[j])) } out[i] = weightedScore } return out } // convolution applies 1d convolution through the sequence, for each character dimension. func (m *Model) convolution(xs ag.Node) ag.Node { convolved := make([]ag.Node, xs.Value().Rows()) for i := 0; i < xs.Value().Rows(); i++ { row := m.Graph().RowView(xs, i) convolved[i] = m.Conv[i].Forward(row)[0] } return m.Graph().Stack(convolved...) } // projection applies a projection after the convolution through the sequence, for each character dimension. func (m *Model) projection(inStackedVectors ag.Node, length int) []ag.Node { projectedXs := make([]ag.Node, length) for i := 0; i < length; i++ { projectedXs[i] = m.Proj[i].Forward(inStackedVectors)[0] } stackedProjectedXs := m.Graph().Stack(projectedXs...) stackedProjectedXs = m.Graph().T(stackedProjectedXs) convolvedEmbeddings := make([]ag.Node, stackedProjectedXs.Value().Rows()) for i := range convolvedEmbeddings { convolvedEmbeddings[i] = m.Graph().RowView(stackedProjectedXs, i) } return convolvedEmbeddings } // blocksMean calculates the average pooling for each block of length 1 .. m.Blocksize, for the sequence of length l func (m *Model) blocksMean(convolvedEmbeddings []ag.Node, length int) [][]ag.Node { meanSequences := make([][]ag.Node, len(m.BlockSize)) meanSequences[0] = make([]ag.Node, length) maxLen := len(convolvedEmbeddings) for i := 0; i < length; i++ { if i < maxLen { meanSequences[0][i] = m.Graph().Identity(convolvedEmbeddings[i]) } else { meanSequences[0][i] = m.Graph().Identity(convolvedEmbeddings[maxLen-1]) } } for i := 1; i < len(meanSequences); i++ { meanSequences[i] = m.blockMean(convolvedEmbeddings, length, m.BlockSize[i]) } return meanSequences } // downsample reduces the sequence by the downsample factor func (m *Model) downsample(xs []ag.Node) []ag.Node { if m.DownsampleFactor < 2 { return xs } return m.seqMean(xs, int(math.Ceil(float64(len(xs))/float64(m.DownsampleFactor))), m.DownsampleFactor) }
pkg/nlp/gbst/model.go
0.908133
0.508605
model.go
starcoder
package byteslice // LUnset apply AND operation on a byte slice with an "unset" byte slice using big endian order. func LUnset(data, unsetData []byte) []byte { var dataLength = len(data) if dataLength < 1 { return data } unsetDataLength := len(unsetData) operationLength := dataLength operationCut := dataLength if unsetDataLength > dataLength { operationLength = unsetDataLength } result, _ := Unset(RPad(data, operationLength, 0xFF), RPad(unsetData, operationLength, 0xFF)) return result[:operationCut] } // LSet apply OR operation on a byte slice with an "set" byte slice using big endian order. func LSet(data, setData []byte) []byte { dataLength := len(data) setDataLength := len(setData) operationLength := dataLength if setDataLength > dataLength { operationLength = setDataLength } result, _ := Set(RPad(data, operationLength, 0x00), RPad(setData, operationLength, 0x00)) return result } // LToggle apply XOR operation on a byte slice with an "toggle" byte slice using big endian order. func LToggle(data, toggleData []byte) []byte { dataLength := len(data) toggleDataLength := len(toggleData) operationLength := dataLength if toggleDataLength > dataLength { operationLength = toggleDataLength } result, _ := Toggle(RPad(data, operationLength, 0x00), RPad(toggleData, operationLength, 0x00)) return result } // LSubset get the byte slice of a subset of the big endian ordered data byte defined // by the least significant bit and the most significant bit. func LSubset(data []byte, leastSignificantBit, mostSignificantBit uint64) []byte { var maxDataMostSignificantBit = uint64(maxBitsLength*len(data) - 1) if mostSignificantBit <= leastSignificantBit || leastSignificantBit > maxDataMostSignificantBit { return make([]byte, 0) } if mostSignificantBit > maxDataMostSignificantBit { mostSignificantBit = maxDataMostSignificantBit } var result = RShift(data, maxDataMostSignificantBit-mostSignificantBit) var correctiveShift = maxDataMostSignificantBit - mostSignificantBit + leastSignificantBit result = LShift(result, correctiveShift) var size = computeSize(leastSignificantBit, mostSignificantBit) return result[:size] }
byteslice_bigendian.go
0.796292
0.571288
byteslice_bigendian.go
starcoder
package utils import ( "math/big" "github.com/daoleno/uniswapv3-sdk/constants" ) /** * Returns an imprecise maximum amount of liquidity received for a given amount of token 0. * This function is available to accommodate LiquidityAmounts#getLiquidityForAmount0 in the v3 periphery, * which could be more precise by at least 32 bits by dividing by Q64 instead of Q96 in the intermediate step, * and shifting the subtracted ratio left by 32 bits. This imprecise calculation will likely be replaced in a future * v3 router contract. * @param sqrtRatioAX96 The price at the lower boundary * @param sqrtRatioBX96 The price at the upper boundary * @param amount0 The token0 amount * @returns liquidity for amount0, imprecise */ func maxLiquidityForAmount0Imprecise(sqrtRatioAX96, sqrtRatioBX96, amount0 *big.Int) *big.Int { if sqrtRatioAX96.Cmp(sqrtRatioBX96) > 0 { sqrtRatioAX96, sqrtRatioBX96 = sqrtRatioBX96, sqrtRatioAX96 } intermediate := new(big.Int).Div(new(big.Int).Mul(sqrtRatioAX96, sqrtRatioBX96), constants.Q96) return new(big.Int).Div(new(big.Int).Mul(amount0, intermediate), new(big.Int).Sub(sqrtRatioBX96, sqrtRatioAX96)) } /** * Returns a precise maximum amount of liquidity received for a given amount of token 0 by dividing by Q64 instead of Q96 in the intermediate step, * and shifting the subtracted ratio left by 32 bits. * @param sqrtRatioAX96 The price at the lower boundary * @param sqrtRatioBX96 The price at the upper boundary * @param amount0 The token0 amount * @returns liquidity for amount0, precise */ func maxLiquidityForAmount0Precise(sqrtRatioAX96, sqrtRatioBX96, amount0 *big.Int) *big.Int { if sqrtRatioAX96.Cmp(sqrtRatioBX96) > 0 { sqrtRatioAX96, sqrtRatioBX96 = sqrtRatioBX96, sqrtRatioAX96 } numerator := new(big.Int).Mul(new(big.Int).Mul(amount0, sqrtRatioAX96), sqrtRatioBX96) denominator := new(big.Int).Mul(constants.Q96, new(big.Int).Sub(sqrtRatioBX96, sqrtRatioAX96)) return new(big.Int).Div(numerator, denominator) } /** * Computes the maximum amount of liquidity received for a given amount of token1 * @param sqrtRatioAX96 The price at the lower tick boundary * @param sqrtRatioBX96 The price at the upper tick boundary * @param amount1 The token1 amount * @returns liquidity for amount1 */ func maxLiquidityForAmount1(sqrtRatioAX96, sqrtRatioBX96, amount1 *big.Int) *big.Int { if sqrtRatioAX96.Cmp(sqrtRatioBX96) > 0 { sqrtRatioAX96, sqrtRatioBX96 = sqrtRatioBX96, sqrtRatioAX96 } return new(big.Int).Div(new(big.Int).Mul(amount1, constants.Q96), new(big.Int).Sub(sqrtRatioBX96, sqrtRatioAX96)) } /** * Computes the maximum amount of liquidity received for a given amount of token0, token1, * and the prices at the tick boundaries. * @param sqrtRatioCurrentX96 the current price * @param sqrtRatioAX96 price at lower boundary * @param sqrtRatioBX96 price at upper boundary * @param amount0 token0 amount * @param amount1 token1 amount * @param useFullPrecision if false, liquidity will be maximized according to what the router can calculate, * not what core can theoretically support */ func maxLiquidityForAmounts(sqrtRatioCurrentX96 *big.Int, sqrtRatioAX96, sqrtRatioBX96 *big.Int, amount0, amount1 *big.Int, useFullPrecision bool) *big.Int { if sqrtRatioAX96.Cmp(sqrtRatioBX96) > 0 { sqrtRatioAX96, sqrtRatioBX96 = sqrtRatioBX96, sqrtRatioAX96 } var maxLiquidityForAmount0 func(*big.Int, *big.Int, *big.Int) *big.Int if useFullPrecision { maxLiquidityForAmount0 = maxLiquidityForAmount0Precise } else { maxLiquidityForAmount0 = maxLiquidityForAmount0Imprecise } if sqrtRatioCurrentX96.Cmp(sqrtRatioAX96) <= 0 { return maxLiquidityForAmount0(sqrtRatioAX96, sqrtRatioBX96, amount0) } else if sqrtRatioCurrentX96.Cmp(sqrtRatioBX96) < 0 { liquidity0 := maxLiquidityForAmount0(sqrtRatioCurrentX96, sqrtRatioBX96, amount0) liquidity1 := maxLiquidityForAmount1(sqrtRatioAX96, sqrtRatioCurrentX96, amount1) if liquidity0.Cmp(liquidity1) < 0 { return liquidity0 } return liquidity1 } else { return maxLiquidityForAmount1(sqrtRatioAX96, sqrtRatioBX96, amount1) } }
utils/max_liquidity_for_amounts.go
0.81119
0.694296
max_liquidity_for_amounts.go
starcoder
package skin import ( "bytes" _ "embed" "image" "image/draw" "image/png" "log" "strings" drw "golang.org/x/image/draw" "golang.org/x/image/math/f64" ) var ( skewA float64 = 26.0 / 45.0 skewB float64 = skewA * 2.0 transformForward matrix3 = matrix3{ XX: 1, YX: -skewA, XY: 0, YY: skewB, X0: 0, Y0: skewA, } transformUp matrix3 = matrix3{ XX: 1, YX: -skewA, XY: 1, YY: skewA, X0: 0, Y0: 0, } transformRight matrix3 = matrix3{ XX: 1, YX: skewA, XY: 0, YY: skewB, X0: 0, Y0: 0, } //go:embed steve.png rawSteveData []byte //go:embed alex.png rawAlexData []byte steveSkin *image.NRGBA = image.NewNRGBA(image.Rect(0, 0, 64, 64)) alexSkin *image.NRGBA = image.NewNRGBA(image.Rect(0, 0, 64, 64)) ) func init() { { rawSteveSkin, err := png.Decode(bytes.NewReader(rawSteveData)) if err != nil { log.Fatal(err) } draw.Draw(steveSkin, rawSteveSkin.Bounds(), rawSteveSkin, image.Pt(0, 0), draw.Src) } { rawAlexSkin, err := png.Decode(bytes.NewReader(rawAlexData)) if err != nil { log.Fatal(err) } draw.Draw(alexSkin, rawAlexSkin.Bounds(), rawAlexSkin, image.Pt(0, 0), draw.Src) } } func GetDefaultSkin(slim bool) *image.NRGBA { if slim { return alexSkin } return steveSkin } func extract(img *image.NRGBA, x, y, width, height int) *image.NRGBA { output := image.NewNRGBA(image.Rect(0, 0, width, height)) draw.Draw(output, output.Bounds(), img, image.Pt(x, y), draw.Src) return output } func scale(img *image.NRGBA, scale int) *image.NRGBA { if scale == 1 { return img } bounds := img.Bounds().Max output := image.NewNRGBA(image.Rect(0, 0, bounds.X*scale, bounds.Y*scale)) for x := 0; x < bounds.X; x++ { for y := 0; y < bounds.Y; y++ { color := img.At(x, y) for sx := 0; sx < scale; sx++ { for sy := 0; sy < scale; sy++ { output.Set(x*scale+sx, y*scale+sy, color) } } } } return output } func removeTransparency(img *image.NRGBA) *image.NRGBA { output := image.NewNRGBA(img.Bounds()) for i, l := 0, len(img.Pix); i < l; i += 4 { output.Pix[i] = img.Pix[i] output.Pix[i+1] = img.Pix[i+1] output.Pix[i+2] = img.Pix[i+2] output.Pix[i+3] = 255 } return output } func IsOldSkin(img *image.NRGBA) bool { return img.Bounds().Max.Y < 64 } func composite(bottom, top *image.NRGBA, x, y int) *image.NRGBA { output := image.NewNRGBA(bottom.Bounds()) topBounds := top.Bounds().Max draw.Draw(output, bottom.Bounds(), bottom, image.Pt(0, 0), draw.Src) draw.Draw(output, image.Rect(0, 0, topBounds.X+x, topBounds.Y+y), top, image.Pt(-x, -y), draw.Over) return output } func flipHorizontal(img *image.NRGBA) *image.NRGBA { data := img.Pix bounds := img.Bounds() output := image.NewNRGBA(bounds) for x := 0; x < bounds.Max.X; x++ { for y := 0; y < bounds.Max.Y; y++ { fx := bounds.Max.X - x - 1 fi := fx*4 + y*4*bounds.Max.X ix := x*4 + y*4*bounds.Max.X for i := 0; i < 4; i++ { output.Pix[ix+i] = data[fi+i] } } } return output } func fixTransparency(img *image.NRGBA) *image.NRGBA { a := img.Pix[0:4] if a[3] == 0 { return img } output := clone(img) for i, l := 0, len(output.Pix); i < l; i += 4 { if output.Pix[i+0] != a[0] || output.Pix[i+1] != a[1] || output.Pix[i+2] != a[2] || output.Pix[i+3] != a[3] { continue } output.Pix[i+3] = 0 } return output } func clone(img *image.NRGBA) *image.NRGBA { bounds := img.Bounds() output := image.NewNRGBA(bounds) draw.Draw(output, bounds, img, image.Pt(0, 0), draw.Src) return output } func getSlimOffset(slim bool) int { if slim { return 1 } return 0 } func compositeTransform(bottom, top *image.NRGBA, mat matrix3, x, y float64) *image.NRGBA { output := image.NewNRGBA(bottom.Bounds()) draw.Draw(output, bottom.Bounds(), bottom, image.Pt(0, 0), draw.Src) transformer := drw.NearestNeighbor fx, fy := float64(x), float64(y) m := mat.Translate(fx, fy) transformer.Transform(output, f64.Aff3{m.XX, m.XY, m.X0, m.YX, m.YY, m.Y0}, top, top.Bounds(), draw.Over, nil) return output } // Credit: https://github.com/LapisBlue/Lapitar/blob/master/mc/uuid.go func isEven(c uint8) bool { switch { case c >= '0' && c <= '9': return (c & 1) == 0 case c >= 'a' && c <= 'f': return (c & 1) == 1 default: panic("Invalid digit " + string(c)) } } func IsSlimFromUUID(uuid string) bool { uuid = strings.ReplaceAll(uuid, "-", "") return (isEven(uuid[7]) != isEven(uuid[16+7])) != (isEven(uuid[15]) != isEven(uuid[16+15])) }
util.go
0.591015
0.430327
util.go
starcoder
package vec2 import ( "math" "github.com/jppribeiro/go-vectorial/matrix2" ) // Vec2 defines a 3-dimension vector type Vec2 struct { I float64 J float64 } // New returns a nre vec2 pointer func New(i float64, j float64) *Vec2 { return &Vec2{i, j} } // Add takes a Vec2 transforming v1 by adding their dimensions func (v1 *Vec2) Add(v2 Vec2) { v1.I += v2.I v1.J += v2.J } // Add takes two Vec2 returning a new Vec2 whose dimensions are equal to the sum of the given Vec2 dimensions func Add(v1 Vec2, v2 Vec2) *Vec2 { return &Vec2{v1.I + v2.I, v1.J + v2.J} } // Dot takes a Vec2 and returns the dot product of the two vectors func (v1 *Vec2) Dot(v2 Vec2) float64 { return Dot(*v1, v2) } // Dot takes two Vec2 and returns the dot product of the two Vec2 func Dot(v1 Vec2, v2 Vec2) float64 { return v1.I*v2.I + v1.J*v2.J } // Magnitude returns the magnitude of a Vec2 func (v1 *Vec2) Magnitude() float64 { return Magnitude(*v1) } // Magnitude takes a Vec2 and returns its magnitude func Magnitude(v Vec2) float64 { return math.Sqrt(Dot(v, v)) } // MirrorI transforms v by mirroring its I-Dimension func (v1 *Vec2) MirrorI() { v1.I = -v1.I } // MirrorI takes a Vec2 and mirrors its I-dimension returning a new Vec2 func MirrorI(v Vec2) *Vec2 { return &Vec2{-v.I, v.J} } // MirrorJ transforms v by mirroring its J-Dimension func (v1 *Vec2) MirrorJ() { v1.J = -v1.J } // MirrorJ takes a Vec2 and mirrors its J-dimension returning a new Vec2 func MirrorJ(v Vec2) *Vec2 { return &Vec2{v.I, -v.J} } // Mirror transforms v by mirroring all of its dimensions func (v1 *Vec2) Mirror() { v1.MirrorI() v1.MirrorJ() } // Mirror takes a Vec2 and mirrors all of its dimensions, returning a new Vec2 func Mirror(v Vec2) *Vec2 { return &Vec2{-v.I, -v.J} } // Scale scales v by an s amount func (v1 *Vec2) Scale(s float64) { v1.I *= s v1.J *= s } // Scale scales a given Vec2 by an s amount, returning a new Vec2 func Scale(v Vec2, s float64) *Vec2 { return &Vec2{v.I * s, v.J * s} } // Sum takes a Vec2 and adds it to the underlying Vec2 transforming it func (v1 *Vec2) Sum(v2 Vec2) { v1.I += v2.I v1.J += v2.J } // Sum takes two Vec2 and returns their sum as a new Vec2 func Sum(v1 Vec2, v2 Vec2) *Vec2 { return &Vec2{v1.I + v2.I, v1.J + v2.J} } // Unit tranforms a Vec2 into a unit vector func (v1 *Vec2) Unit() { m := Magnitude(*v1) v1.I /= m v1.J /= m } // Unit takes a Vec2 and returns a unit vector func Unit(v Vec2) *Vec2 { m := Magnitude(v) return &Vec2{v.I / m, v.J / m} } // Angle takes a Vec2 and calculates the angle (in radians) between two vectors func (v1 Vec2) Angle(v2 Vec2) float64 { return Angle(v1, v2) } // Angle takes two Vec2 ant calculates the angle (in radians) between them func Angle(v1 Vec2, v2 Vec2) float64 { return math.Acos(Dot(v1, v2) / (Magnitude(v1) * Magnitude(v2))) } // Rotate takes an angle in radians and rotates the vector // Note: positive angle rotates counter-clockwise // negative angle rotates clockwise func (v1 Vec2) Rotate(theta float64) { rMatrix := matrix2.RotationMatrix(theta) v1.I = rMatrix.M11*v1.I + rMatrix.M12*v1.J v1.J = rMatrix.M21*v1.I + rMatrix.M22*v1.J } // Rotate takes a Vec2 and an angle in radians and returns a new vector // equal to the original vector, rotated by theta rads. // Note: positive angle rotates counter-clockwise // negative angle rotates clockwise func Rotate(v Vec2, theta float64) *Vec2 { rMatrix := matrix2.RotationMatrix(theta) return &Vec2{ rMatrix.M11*v.I + rMatrix.M12*v.J, rMatrix.M21*v.I + rMatrix.M22*v.J, } }
vec2/vec2.go
0.935273
0.629888
vec2.go
starcoder
package goldilocks import ( "errors" "fmt" fp "github.com/Windscribe/go-vhost/circl/math/fp448" ) // Point is a point on the Goldilocks Curve. type Point struct{ x, y, z, ta, tb fp.Elt } func (P Point) String() string { return fmt.Sprintf("x: %v\ny: %v\nz: %v\nta: %v\ntb: %v", P.x, P.y, P.z, P.ta, P.tb) } // FromAffine creates a point from affine coordinates. func FromAffine(x, y *fp.Elt) (*Point, error) { P := &Point{ x: *x, y: *y, z: fp.One(), ta: *x, tb: *y, } if !(Curve{}).IsOnCurve(P) { return P, errors.New("point not on curve") } return P, nil } // isLessThan returns true if 0 <= x < y, and assumes that slices are of the // same length and are interpreted in little-endian order. func isLessThan(x, y []byte) bool { i := len(x) - 1 for i > 0 && x[i] == y[i] { i-- } return x[i] < y[i] } // FromBytes returns a point from the input buffer. func FromBytes(in []byte) (*Point, error) { if len(in) < fp.Size+1 { return nil, errors.New("wrong input length") } var err = errors.New("invalid decoding") P := &Point{} signX := in[fp.Size] >> 7 copy(P.y[:], in[:fp.Size]) p := fp.P() if !isLessThan(P.y[:], p[:]) { return nil, err } u, v := &fp.Elt{}, &fp.Elt{} one := fp.One() fp.Sqr(u, &P.y) // u = y^2 fp.Mul(v, u, &paramD) // v = dy^2 fp.Sub(u, u, &one) // u = y^2-1 fp.Sub(v, v, &one) // v = dy^2-1 isQR := fp.InvSqrt(&P.x, u, v) // x = sqrt(u/v) if !isQR { return nil, err } fp.Modp(&P.x) // x = x mod p if fp.IsZero(&P.x) && signX == 1 { return nil, err } if signX != (P.x[0] & 1) { fp.Neg(&P.x, &P.x) } P.ta = P.x P.tb = P.y P.z = fp.One() return P, nil } // IsIdentity returns true is P is the identity Point. func (P *Point) IsIdentity() bool { return fp.IsZero(&P.x) && !fp.IsZero(&P.y) && !fp.IsZero(&P.z) && P.y == P.z } // IsEqual returns true if P is equivalent to Q. func (P *Point) IsEqual(Q *Point) bool { l, r := &fp.Elt{}, &fp.Elt{} fp.Mul(l, &P.x, &Q.z) fp.Mul(r, &Q.x, &P.z) fp.Sub(l, l, r) b := fp.IsZero(l) fp.Mul(l, &P.y, &Q.z) fp.Mul(r, &Q.y, &P.z) fp.Sub(l, l, r) b = b && fp.IsZero(l) fp.Mul(l, &P.ta, &P.tb) fp.Mul(l, l, &Q.z) fp.Mul(r, &Q.ta, &Q.tb) fp.Mul(r, r, &P.z) fp.Sub(l, l, r) b = b && fp.IsZero(l) return b } // Neg obtains the inverse of the Point. func (P *Point) Neg() { fp.Neg(&P.x, &P.x); fp.Neg(&P.ta, &P.ta) } // ToAffine returns the x,y affine coordinates of P. func (P *Point) ToAffine() (x, y fp.Elt) { fp.Inv(&P.z, &P.z) // 1/z fp.Mul(&P.x, &P.x, &P.z) // x/z fp.Mul(&P.y, &P.y, &P.z) // y/z fp.Modp(&P.x) fp.Modp(&P.y) fp.SetOne(&P.z) P.ta = P.x P.tb = P.y return P.x, P.y } // ToBytes stores P into a slice of bytes. func (P *Point) ToBytes(out []byte) error { if len(out) < fp.Size+1 { return errors.New("invalid decoding") } x, y := P.ToAffine() out[fp.Size] = (x[0] & 1) << 7 return fp.ToBytes(out[:fp.Size], &y) } // MarshalBinary encodes the receiver into a binary form and returns the result. func (P *Point) MarshalBinary() (data []byte, err error) { data = make([]byte, fp.Size+1) err = P.ToBytes(data[:fp.Size+1]) return data, err } // UnmarshalBinary must be able to decode the form generated by MarshalBinary. func (P *Point) UnmarshalBinary(data []byte) error { Q, err := FromBytes(data); *P = *Q; return err } // Double sets P = 2Q. func (P *Point) Double() { P.Add(P) } // Add sets P =P+Q.. func (P *Point) Add(Q *Point) { // This is formula (5) from "Twisted Edwards Curves Revisited" by // <NAME>., <NAME>., <NAME>., <NAME>. (2008) // https://doi.org/10.1007/978-3-540-89255-7_20 x1, y1, z1, ta1, tb1 := &P.x, &P.y, &P.z, &P.ta, &P.tb x2, y2, z2, ta2, tb2 := &Q.x, &Q.y, &Q.z, &Q.ta, &Q.tb x3, y3, z3, E, H := &P.x, &P.y, &P.z, &P.ta, &P.tb A, B, C, D := &fp.Elt{}, &fp.Elt{}, &fp.Elt{}, &fp.Elt{} t1, t2, F, G := C, D, &fp.Elt{}, &fp.Elt{} fp.Mul(t1, ta1, tb1) // t1 = ta1*tb1 fp.Mul(t2, ta2, tb2) // t2 = ta2*tb2 fp.Mul(A, x1, x2) // A = x1*x2 fp.Mul(B, y1, y2) // B = y1*y2 fp.Mul(C, t1, t2) // t1*t2 fp.Mul(C, C, &paramD) // C = d*t1*t2 fp.Mul(D, z1, z2) // D = z1*z2 fp.Add(F, x1, y1) // x1+y1 fp.Add(E, x2, y2) // x2+y2 fp.Mul(E, E, F) // (x1+y1)*(x2+y2) fp.Sub(E, E, A) // (x1+y1)*(x2+y2)-A fp.Sub(E, E, B) // E = (x1+y1)*(x2+y2)-A-B fp.Sub(F, D, C) // F = D-C fp.Add(G, D, C) // G = D+C fp.Sub(H, B, A) // H = B-A fp.Mul(z3, F, G) // Z = F * G fp.Mul(x3, E, F) // X = E * F fp.Mul(y3, G, H) // Y = G * H, T = E * H }
circl/ecc/goldilocks/point.go
0.792865
0.400163
point.go
starcoder
package heap import "github.com/ericmittelhammer/ginomialheap/tree" type BinomialHeap struct { // head of the heap. will be the tree with the lowest degree in the heap Head *tree.BinomialTree // shortcut pointer to the tree with the smallest head element. // ensures O(1) lookup Min *tree.BinomialTree } // utility method used to build the new heap // there are two cases // 1. h1 and h2 have the same degree. merge them and add to the tail // if the tail is of the same degree, merge as well // 2. h1 and h2 have different degrees. choose the smaller and add // to tail, unless tail is of the same degree, in which case merge func attachTo(tail *tree.BinomialTree, h1 *tree.BinomialTree, h2 *tree.BinomialTree) { if h1 == nil { tail.Sibling = h2 } else if h2 == nil { tail.Sibling = h1 } else if h1.Degree == h2.Degree { h1tail := h1.Sibling h2tail := h2.Sibling h1.Sibling = nil h2.Sibling = nil merged, _ := tree.Merge(h1, h2) if tail == nil { tail = merged } else if tail.Degree == merged.Degree { tail, _ = tree.Merge(tail, merged) } else { tail.Sibling = merged } attachTo(tail, h1tail, h2tail) } else { var toAttach *tree.BinomialTree if h1.Degree < h2.Degree { toAttach = h1 h1 = h1.Sibling toAttach.Sibling = nil } else { toAttach = h2 h2 = h2.Sibling toAttach.Sibling = nil } if tail == nil { tail = toAttach } else if tail.Degree == toAttach.Degree { tail, _ = tree.Merge(tail, toAttach) } else { tail.Sibling = toAttach } attachTo(tail, h1, h2) } } // merge two heaps according to the merge algorithm // choose the lowest degree tree between the head of the two heaps // merging trees when there are more than one of the same rank. func merge(heap1 *BinomialHeap, heap2 *BinomialHeap) *BinomialHeap { var min *tree.BinomialTree if heap1.Min.Value < heap2.Min.Value { min = heap1.Min } else { min = heap2.Min } result := &BinomialHeap{Head: nil, Min: min} attachTo(result.Head, heap1.Head, heap2.Head) return result } // Create an empty heap func Create() *BinomialHeap { return &BinomialHeap{} }
heap/heap.go
0.665302
0.607285
heap.go
starcoder
package tracertest import ( "fmt" "net/http" "reflect" "testing" "github.com/DataDog/dd-trace-go/tracer" "github.com/DataDog/dd-trace-go/tracer/ext" "github.com/stretchr/testify/assert" ) // CopySpan returns a new span with the same fields of the copied one. // This function is necessary because the usual assignment copies the mutex address // and then the use of the copied span can conflict with the original one when concurent calls. func CopySpan(span *tracer.Span, trc *tracer.Tracer) *tracer.Span { newSpan := tracer.NewSpan(span.Name, span.Service, span.Resource, span.SpanID, span.TraceID, span.ParentID, trc) newSpan.Type = ext.SQLType newSpan.Meta = span.Meta return newSpan } // Test strict equality between the most important fields of the two spans func CompareSpan(t *testing.T, expectedSpan, actualSpan *tracer.Span, debug ...bool) { if len(debug) > 0 && debug[0] { fmt.Printf("-> ExpectedSpan: \n%s\n\n", expectedSpan) } assert := assert.New(t) assert.Equal(expectedSpan.Name, actualSpan.Name) assert.Equal(expectedSpan.Service, actualSpan.Service) assert.Equal(expectedSpan.Resource, actualSpan.Resource) assert.Equal(expectedSpan.Type, actualSpan.Type) assert.True(reflect.DeepEqual(expectedSpan.Meta, actualSpan.Meta), fmt.Sprintf("%v != %v", expectedSpan.Meta, actualSpan.Meta)) } // Return a Tracer with a DummyTransport func GetTestTracer() (*tracer.Tracer, *DummyTransport) { transport := &DummyTransport{} tracer := tracer.NewTracerTransport(transport) return tracer, transport } // dummyTransport is a transport that just buffers spans and encoding type DummyTransport struct { traces [][]*tracer.Span services map[string]tracer.Service } func (t *DummyTransport) SendTraces(traces [][]*tracer.Span) (*http.Response, error) { t.traces = append(t.traces, traces...) return nil, nil } func (t *DummyTransport) SendServices(services map[string]tracer.Service) (*http.Response, error) { t.services = services return nil, nil } func (t *DummyTransport) Traces() [][]*tracer.Span { traces := t.traces t.traces = nil return traces } func (t *DummyTransport) SetHeader(key, value string) {}
tracer/tracertest/tracertest.go
0.782413
0.506408
tracertest.go
starcoder
package xutil import ( "errors" "time" ) // UNow returns universal current timestamp (location set to UTC). func UNow() time.Time { return time.Now().UTC() } // UNowPtr returns universal current timestamp (location set to UTC) // as pointer value. func UNowPtr() *time.Time { ts := time.Now().UTC() return &ts } // UDate returns the universal timestamp according to // year-month-day hour:min:sec.nsec (location always set to UTC). // This is a wrapper around Go's time.Date. func UDate(year int, month time.Month, day, hour, min, sec, nsec int) time.Time { return time.Date(year, month, day, hour, min, sec, nsec, time.UTC) } // UDatePtr returns the universal timestamp according to // year-month-day hour:min:sec.nsec (location always set to UTC) as pointer value. // This is a wrapper around Go's time.Date. func UDatePtr(year int, month time.Month, day, hour, min, sec, nsec int) *time.Time { ts := time.Date(year, month, day, hour, min, sec, nsec, time.UTC) return &ts } // FractionalTime wraps around time.Time to customize the marshalling // to JSON to always include a fractional part, even when it is zero. // Only precision up and till microseconds (0,000001) is considered. type FractionalTime struct { time.Time } // MarshalJSON implements the json.Marshaler interface. // The time is a quoted string in RFC 3339 format, with sub-second precision added, even // when zero. // Code borrowed from Go's time.Time. func (t FractionalTime) MarshalJSON() ([]byte, error) { if y := t.Year(); y < 0 || y >= 10000 { // RFC 3339 is clear that years are 4 digits exactly. // See golang.org/issue/4556#c15 for more discussion. return nil, errors.New("Time.MarshalJSON: year outside of range [0,9999]") } b := make([]byte, 0, len(time.RFC3339Nano)+2) b = t.AppendFormat(b, "\"2006-01-02T15:04:05.999999Z07:00") if b[20] != '.' { // until 9999, always at this position var end []byte end = append(end, b[20:]...) // usually 1 byte (Z), but we actually dont know b = b[0:20] b = append(b, []byte{'.', '0'}...) b = append(b, end...) } b = append(b, '"') return b, nil }
xutil/temporal.go
0.786705
0.424412
temporal.go
starcoder
package af // Function is a struct used for giving an underlying function a name and definitions. type Function struct { Name string // the name of the function Description string // a description of the function Aliases []string // aliases for the function used when providing a public API Definitions []Definition // the definitions of the function Function func(args ...interface{}) (interface{}, error) // the underlying function to execute } // Map returns a map of metadata about the function. func (f Function) Map() map[string]interface{} { return map[string]interface{}{ "name": f.Name, "description": f.Description, "aliases": f.Aliases, } } // IsBoolean returns true if the function always returns a boolean value. func (f Function) IsBoolean() bool { if len(f.Definitions) == 0 { return false } for _, d := range f.Definitions { if !d.IsBoolean() { return false } } return true } // IsInteger returns true if the function always returns an integer value. func (f Function) IsInteger() bool { if len(f.Definitions) == 0 { return false } for _, d := range f.Definitions { if !d.IsInteger() { return false } } return true } // IsFloat returns true if the function always returns a floating point number value. func (f Function) IsFloat() bool { if len(f.Definitions) == 0 { return false } for _, d := range f.Definitions { if !d.IsFloat() { return false } } return true } // IsNumber returns true if the function always returns a numeric value. func (f Function) IsNumber() bool { if len(f.Definitions) == 0 { return false } for _, d := range f.Definitions { if !d.IsNumber() { return false } } return true } // IsString returns true if the function always returns a string value. func (f Function) IsString() bool { if len(f.Definitions) == 0 { return false } for _, d := range f.Definitions { if !d.IsString() { return false } } return true } // IsValid returns true if the arguments match a definition of the function. func (f Function) IsValid(args ...interface{}) bool { if len(f.Definitions) == 0 { return true } for _, d := range f.Definitions { if d.IsValid(args...) { return true } } return false } // Validate returns a ErrInvalidArguments error if the arguments do not match a definition of the function. func (f Function) Validate(args ...interface{}) error { valid := f.IsValid(args...) if !valid { return ErrInvalidArguments{Function: f.Name, Arguments: args} } return nil } // Run executes the function with the provided arguments and returns the result, and error if any. // It accepts variadic input. func (f Function) Run(args ...interface{}) (interface{}, error) { return f.Function(args...) } // ValidateRun validates the function arguments and then runs the function if valid. // It accepts variadic input. func (f Function) ValidateRun(args ...interface{}) (interface{}, error) { err := f.Validate(args...) if err != nil { return nil, err } return f.Run(args...) } // MustRun executes the function with the provided arguments and returns the result. If there is any error, then panics. func (f Function) MustRun(args ...interface{}) interface{} { output, err := f.Function(args...) if err != nil { panic(err) } return output }
pkg/af/Function.go
0.750736
0.54819
Function.go
starcoder
package numato import ( "bytes" "fmt" "strconv" "strings" ) // Simulator controls a dummy Numato device. // It only deals with the input/output of the numato.Numato object and does not // handle all valid inputs to a real Numato. type Simulator struct { relays, GPIOs, ADCs uint8 state map[portType][]bool buf bytes.Buffer pending []byte } // OpenSimulator returns a Simulator and a Numato object under its control. func OpenSimulator(relays, GPIOs, ADCs uint8) (*Simulator, *Numato) { sim := &Simulator{ relays, GPIOs, ADCs, map[portType][]bool{ Relay: make([]bool, relays), GPIO: make([]bool, GPIOs), }, bytes.Buffer{}, []byte{}, } dummy := &Numato{sim} return sim, dummy } // Read can be used to receive responses from the Simulator. func (sim *Simulator) Read(b []byte) (int, error) { return sim.buf.Read(b) } // Write acts as a dummy serial port and processes any completed command. // Incomplete commands will be buffered and handled once a '\r' is written. func (sim *Simulator) Write(b []byte) (int, error) { commands := bytes.Split(b, []byte("\r")) commands[0] = append(sim.pending, commands[0]...) for i := 0; i < len(commands)-1; i++ { sim.process(commands[i]) } sim.pending = commands[len(commands)-1] return sim.buf.Write(b) } func (sim *Simulator) process(cmd []byte) { // Simulate the echo behaviour sim.buf.Write(cmd) sim.buf.Write([]byte("\r")) components := strings.Split(string(cmd), " ") if len(components) != 3 { return } num, err := strconv.Atoi(components[2]) if err != nil { return } p := Port{ portType(components[0]), num, } s := state(components[1]) switch s { case On: fallthrough case Off: sim.Set(p, s) case read: on, err := sim.IsOn(p) if err != nil { break } status := "on" if !on { status = "off" } sim.buf.Write([]byte(fmt.Sprintf("\n\r%s\n\r", status))) default: // an error happened } sim.buf.Write([]byte("\n\r> ")) } // Close is a noop. func (sim *Simulator) Close() error { return nil } // Off turns the simulated port off. func (sim *Simulator) Off(p Port) { sim.Set(p, Off) } // On turns the simulated port on. func (sim *Simulator) On(p Port) { sim.Set(p, On) } // Set sets a port to the profided state. func (sim *Simulator) Set(p Port, s state) error { set, ok := sim.state[p.Class] if !ok { panic("invalid type") } if p.Number >= len(set) { panic("port out of range") } set[p.Number] = s == On return nil } // IsOn reads the status of the port as seen by the simulator. func (sim *Simulator) IsOn(p Port) (bool, error) { set, ok := sim.state[p.Class] if !ok { panic("invalid type") } if p.Number >= len(set) { panic("port out of range") } return set[p.Number], nil }
simulator.go
0.583441
0.451085
simulator.go
starcoder
package intcode import ( "fmt" ) type Operation interface { ex(*Intcode) int } func operation(memory []int, ip int) (Operation, error) { var op Operation value := memory[ip] opcode := value % 100 modes := ParseModes(value) switch opcode { case 99: op = Halt{} case 1: op = Add{memory[ip+1], memory[ip+2], memory[ip+3], modes} case 2: op = Mul{memory[ip+1], memory[ip+2], memory[ip+3], modes} case 3: op = Input{memory[ip+1], modes} case 4: op = Output{memory[ip+1], modes} case 5: op = JumpTrueOp{memory[ip+1], memory[ip+2], modes} case 6: op = JumpFalseOp{memory[ip+1], memory[ip+2], modes} case 7: op = LtCmpOp{memory[ip+1], memory[ip+2], memory[ip+3], modes} case 8: op = EqCmpOp{memory[ip+1], memory[ip+2], memory[ip+3], modes} case 9: op = RelBase{memory[ip+1], modes} default: return nil, ErrOpcode{opcode} } return op, nil } type Modes []int func ParseModes(instruction int) Modes { modes := make(Modes, 0) v := instruction / 100 for v != 0 { modes = append(modes, v%10) v = v / 10 } return modes } func (m Modes) Mode(i int) int { if i < len(m) { return m[i] } return 0 } type ErrOpcode struct { opcode int } func (e ErrOpcode) Error() string { return fmt.Sprintf("Unexpected Opcode %d", e.opcode) } type Halt struct{} func (h Halt) ex(ic *Intcode) int { ic.Halt() return 0 } type Binop struct { id0, id1 int result int modes Modes } type Add Binop func (a Add) ex(ic *Intcode) int { val0 := ic.Mpeek(a.id0, a.modes.Mode(0)) val1 := ic.Mpeek(a.id1, a.modes.Mode(1)) ic.Mpoke(a.result, a.modes.Mode(2), val0+val1) return 3 } type Mul Binop func (a Mul) ex(ic *Intcode) int { val0 := ic.Mpeek(a.id0, a.modes.Mode(0)) val1 := ic.Mpeek(a.id1, a.modes.Mode(1)) ic.Mpoke(a.result, a.modes.Mode(2), val0*val1) return 3 } type OnePOp struct { index int modes Modes } type Input OnePOp func (i Input) ex(ic *Intcode) int { ic.Mpoke(i.index, i.modes.Mode(0), ic.PopInput()) return 1 } type Output OnePOp func (o Output) ex(ic *Intcode) int { ic.PushOutput(ic.Mpeek(o.index, o.modes.Mode(0))) return 1 } type RelBase OnePOp func (r RelBase) ex(ic *Intcode) int { ic.MoveBase(ic.Mpeek(r.index, r.modes.Mode(0))) return 1 } type JumpOp struct { value int dest int modes Modes } type JumpTrueOp JumpOp func (j JumpTrueOp) ex(ic *Intcode) int { val := ic.Mpeek(j.value, j.modes.Mode(0)) if val != 0 { ic.SetPc(ic.Mpeek(j.dest, j.modes.Mode(1))) return 0 } return 2 } type JumpFalseOp JumpOp func (j JumpFalseOp) ex(ic *Intcode) int { val := ic.Mpeek(j.value, j.modes.Mode(0)) if val == 0 { ic.SetPc(ic.Mpeek(j.dest, j.modes.Mode(1))) return 0 } return 2 } type CmpOp struct { p0 int p1 int dest int modes Modes } type LtCmpOp CmpOp func (c LtCmpOp) ex(ic *Intcode) int { val0 := ic.Mpeek(c.p0, c.modes.Mode(0)) val1 := ic.Mpeek(c.p1, c.modes.Mode(1)) if val0 < val1 { ic.Mpoke(c.dest, c.modes.Mode(2), 1) } else { ic.Mpoke(c.dest, c.modes.Mode(2), 0) } return 3 } type EqCmpOp CmpOp func (c EqCmpOp) ex(ic *Intcode) int { val0 := ic.Mpeek(c.p0, c.modes.Mode(0)) val1 := ic.Mpeek(c.p1, c.modes.Mode(1)) if val0 == val1 { ic.Mpoke(c.dest, c.modes.Mode(2), 1) } else { ic.Mpoke(c.dest, c.modes.Mode(2), 0) } return 3 } type ErrOutOfRange struct { ic *Intcode need int } func (e ErrOutOfRange) Error() string { return fmt.Sprintf("Out of range: pc:%d, limit:%d, needed:%d", e.ic.Pc(), e.ic.Len(), e.need) }
v19/internal/intcode/operation.go
0.639286
0.465205
operation.go
starcoder
package deregexp import ( "fmt" "regexp/syntax" "sort" "strings" ) // part describes a part of the regexp after converting it to this simple form. type part interface { describePart() string } // word is a literal. type word string // separator is one (or more) unknown characters, which we can't substring filter with, so they just separate words. type separator struct{} // orPart is the logical or of any of its subparts. type orPart []part // concatenation is a list of parts that follow directly after another. type concatenation []part func (w word) describePart() string { return string(w) } func (separator) describePart() string { return "." } func (g orPart) describePart() string { var ret []string for _, w := range g { ret = append(ret, w.describePart()) } return "(" + strings.Join(ret, "|") + ")" } func (c concatenation) describePart() string { var ret []string for _, w := range c { ret = append(ret, w.describePart()) } return "(" + strings.Join(ret, ", ") + ")" } // stripBare takes a regexp and simplifies to down to the 4 types of `part`. func stripBare(re *syntax.Regexp) (retPart part) { switch re.Op { case syntax.OpNoMatch: // matches no strings // TODO(quis): Introduce a part type for this? return word("__no_matches") case syntax.OpEmptyMatch: // matches empty string return word("") case syntax.OpLiteral: // matches Runes sequence return word(re.Rune) case syntax.OpCharClass: // matches Runes interpreted as range pair list rs := expandRanges(re.Rune) if len(rs) > 5 { return separator{} } var ret orPart for _, r := range rs { ret = append(ret, word(fmt.Sprintf("%c", r))) } return ret case syntax.OpAnyCharNotNL: // matches any character except newline return separator{} case syntax.OpAnyChar: // matches any character return separator{} case syntax.OpBeginLine: // matches empty string at beginning of line return separator{} case syntax.OpEndLine: // matches empty string at end of line return separator{} case syntax.OpBeginText: // matches empty string at beginning of text // TODO(quis): Introduce a part type for this so we can generate SQL expressions with LIKEs that can be anchored at the start/end of a field. return separator{} case syntax.OpEndText: // matches empty string at end of text return separator{} case syntax.OpWordBoundary: // matches word boundary `\b` return word("") case syntax.OpNoWordBoundary: // matches word non-boundary `\B` return word("") case syntax.OpCapture: // capturing subexpression with index Cap, optional name Name return stripBare(re.Sub[0]) case syntax.OpStar: // matches Sub[0] zero or more times return separator{} case syntax.OpPlus: // matches Sub[0] one or more times return concatenation{stripBare(re.Sub[0]), separator{}} case syntax.OpQuest: // matches Sub[0] zero or one times return orPart{stripBare(re.Sub[0]), word("")} case syntax.OpRepeat: // matches Sub[0] at least Min times, at most Max (Max == -1 is no limit) s := stripBare(re.Sub[0]) // If the difference is more than 5 we're generating too many different combinations. Just treat it as a separator rather than generating all possibilities. if re.Max == -1 || re.Max-re.Min > 5 { var ret concatenation for i := 0; re.Min > i; i++ { ret = append(ret, s) } if re.Min != re.Max { ret = append(ret, separator{}) } return ret } else { var ret orPart for j := re.Min; re.Max >= j; j++ { var c concatenation for i := 0; j > i; i++ { c = append(c, s) } ret = append(ret, c) } return ret } case syntax.OpConcat: // matches concatenation of Subs var ret concatenation for _, s := range re.Sub { ret = append(ret, stripBare(s)) } return ret case syntax.OpAlternate: // matches alternation of Subs var ret orPart for _, s := range re.Sub { ret = append(ret, stripBare(s)) } return ret default: panic(fmt.Errorf("unknown opcode %d", re.Op)) } } // expandRanges expands ranges from a character class to the full set. func expandRanges(rs []int32) []int32 { ret := map[int32]bool{} for i, s := range rs { if i%2 == 1 { continue } for c := s; rs[i+1] >= c; c++ { ret[c] = true } } out := make([]int32, 0, len(ret)) for c := range ret { out = append(out, c) } sort.Slice(out, func(i, j int) bool { return out[i] < out[j] }) return out }
parts.go
0.52902
0.432303
parts.go
starcoder
package finnhub import ( "encoding/json" ) // BasicFinancials struct for BasicFinancials type BasicFinancials struct { // Symbol of the company. Symbol *string `json:"symbol,omitempty"` // Metric type. MetricType *string `json:"metricType,omitempty"` Series *map[string]interface{} `json:"series,omitempty"` Metric *map[string]interface{} `json:"metric,omitempty"` } // NewBasicFinancials instantiates a new BasicFinancials object // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments // will change when the set of required properties is changed func NewBasicFinancials() *BasicFinancials { this := BasicFinancials{} return &this } // NewBasicFinancialsWithDefaults instantiates a new BasicFinancials object // This constructor will only assign default values to properties that have it defined, // but it doesn't guarantee that properties required by API are set func NewBasicFinancialsWithDefaults() *BasicFinancials { this := BasicFinancials{} return &this } // GetSymbol returns the Symbol field value if set, zero value otherwise. func (o *BasicFinancials) GetSymbol() string { if o == nil || o.Symbol == nil { var ret string return ret } return *o.Symbol } // GetSymbolOk returns a tuple with the Symbol field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *BasicFinancials) GetSymbolOk() (*string, bool) { if o == nil || o.Symbol == nil { return nil, false } return o.Symbol, true } // HasSymbol returns a boolean if a field has been set. func (o *BasicFinancials) HasSymbol() bool { if o != nil && o.Symbol != nil { return true } return false } // SetSymbol gets a reference to the given string and assigns it to the Symbol field. func (o *BasicFinancials) SetSymbol(v string) { o.Symbol = &v } // GetMetricType returns the MetricType field value if set, zero value otherwise. func (o *BasicFinancials) GetMetricType() string { if o == nil || o.MetricType == nil { var ret string return ret } return *o.MetricType } // GetMetricTypeOk returns a tuple with the MetricType field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *BasicFinancials) GetMetricTypeOk() (*string, bool) { if o == nil || o.MetricType == nil { return nil, false } return o.MetricType, true } // HasMetricType returns a boolean if a field has been set. func (o *BasicFinancials) HasMetricType() bool { if o != nil && o.MetricType != nil { return true } return false } // SetMetricType gets a reference to the given string and assigns it to the MetricType field. func (o *BasicFinancials) SetMetricType(v string) { o.MetricType = &v } // GetSeries returns the Series field value if set, zero value otherwise. func (o *BasicFinancials) GetSeries() map[string]interface{} { if o == nil || o.Series == nil { var ret map[string]interface{} return ret } return *o.Series } // GetSeriesOk returns a tuple with the Series field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *BasicFinancials) GetSeriesOk() (*map[string]interface{}, bool) { if o == nil || o.Series == nil { return nil, false } return o.Series, true } // HasSeries returns a boolean if a field has been set. func (o *BasicFinancials) HasSeries() bool { if o != nil && o.Series != nil { return true } return false } // SetSeries gets a reference to the given map[string]interface{} and assigns it to the Series field. func (o *BasicFinancials) SetSeries(v map[string]interface{}) { o.Series = &v } // GetMetric returns the Metric field value if set, zero value otherwise. func (o *BasicFinancials) GetMetric() map[string]interface{} { if o == nil || o.Metric == nil { var ret map[string]interface{} return ret } return *o.Metric } // GetMetricOk returns a tuple with the Metric field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *BasicFinancials) GetMetricOk() (*map[string]interface{}, bool) { if o == nil || o.Metric == nil { return nil, false } return o.Metric, true } // HasMetric returns a boolean if a field has been set. func (o *BasicFinancials) HasMetric() bool { if o != nil && o.Metric != nil { return true } return false } // SetMetric gets a reference to the given map[string]interface{} and assigns it to the Metric field. func (o *BasicFinancials) SetMetric(v map[string]interface{}) { o.Metric = &v } func (o BasicFinancials) MarshalJSON() ([]byte, error) { toSerialize := map[string]interface{}{} if o.Symbol != nil { toSerialize["symbol"] = o.Symbol } if o.MetricType != nil { toSerialize["metricType"] = o.MetricType } if o.Series != nil { toSerialize["series"] = o.Series } if o.Metric != nil { toSerialize["metric"] = o.Metric } return json.Marshal(toSerialize) } type NullableBasicFinancials struct { value *BasicFinancials isSet bool } func (v NullableBasicFinancials) Get() *BasicFinancials { return v.value } func (v *NullableBasicFinancials) Set(val *BasicFinancials) { v.value = val v.isSet = true } func (v NullableBasicFinancials) IsSet() bool { return v.isSet } func (v *NullableBasicFinancials) Unset() { v.value = nil v.isSet = false } func NewNullableBasicFinancials(val *BasicFinancials) *NullableBasicFinancials { return &NullableBasicFinancials{value: val, isSet: true} } func (v NullableBasicFinancials) MarshalJSON() ([]byte, error) { return json.Marshal(v.value) } func (v *NullableBasicFinancials) UnmarshalJSON(src []byte) error { v.isSet = true return json.Unmarshal(src, &v.value) }
model_basic_financials.go
0.734596
0.489442
model_basic_financials.go
starcoder
package z import ( "encoding/binary" ) // Buffer is equivalent of bytes.Buffer without the ability to read. It uses z.Calloc to allocate // memory, which depending upon how the code is compiled could use jemalloc for allocations. type Buffer struct { buf []byte offset int } // NewBuffer would allocate a buffer of size sz upfront. func NewBuffer(sz int) *Buffer { return &Buffer{ buf: Calloc(sz), offset: 0, } } // Len would return the number of bytes written to the buffer so far. func (b *Buffer) Len() int { return b.offset } // Bytes would return all the written bytes as a slice. func (b *Buffer) Bytes() []byte { return b.buf[0:b.offset] } // smallBufferSize is an initial allocation minimal capacity. const smallBufferSize = 64 // Grow would grow the buffer to have at least n more bytes. In case the buffer is at capacity, it // would reallocate twice the size of current capacity + n, to ensure n bytes can be written to the // buffer without further allocation. func (b *Buffer) Grow(n int) { // In this case, len and cap are the same. if len(b.buf) == 0 && n <= smallBufferSize { b.buf = Calloc(smallBufferSize) return } else if b.buf == nil { b.buf = Calloc(n) return } if b.offset+n < len(b.buf) { return } sz := 2*len(b.buf) + n newBuf := Calloc(sz) copy(newBuf, b.buf[:b.offset]) Free(b.buf) b.buf = newBuf } // Allocate is a way to get a slice of size n back from the buffer. This slice can be directly // written to. Warning: Allocate is not thread-safe. The byte slice returned MUST be used before // further calls to Buffer. func (b *Buffer) Allocate(n int) []byte { b.Grow(n) off := b.offset b.offset += n return b.buf[off:b.offset] } func (b *Buffer) writeLen(sz int) { buf := b.Allocate(4) binary.BigEndian.PutUint32(buf, uint32(sz)) } // SliceAllocate would encode the size provided into the buffer, followed by a call to Allocate, // hence returning the slice of size sz. This can be used to allocate a lot of small buffers into // this big buffer. // Note that SliceAllocate should NOT be mixed with normal calls to Write. Otherwise, SliceOffsets // won't work. func (b *Buffer) SliceAllocate(sz int) []byte { b.Grow(4 + sz) b.writeLen(sz) return b.Allocate(sz) } // SliceOffsets would return the offsets of all slices written to the buffer. // TODO: Perhaps keep the offsets separate in another buffer, and allow access to slices via index. func (b *Buffer) SliceOffsets(offsets []int) []int { start := 0 for start < b.offset { offsets = append(offsets, start) sz := binary.BigEndian.Uint32(b.buf[start:]) start += 4 + int(sz) } return offsets } // Slice would return the slice written at offset. func (b *Buffer) Slice(offset int) []byte { sz := binary.BigEndian.Uint32(b.buf[offset:]) start := offset + 4 return b.buf[start : start+int(sz)] } // Write would write p bytes to the buffer. func (b *Buffer) Write(p []byte) (n int, err error) { b.Grow(len(p)) n = copy(b.buf[b.offset:], p) b.offset += n return n, nil } // Reset would reset the buffer to be reused. func (b *Buffer) Reset() { b.offset = 0 } // Release would free up the memory allocated by the buffer. Once the usage of buffer is done, it is // important to call Release, otherwise a memory leak can happen. func (b *Buffer) Release() { Free(b.buf) }
z/buffer.go
0.681091
0.431944
buffer.go
starcoder
package aggregation import ( "sync" "time" ) // TimedFloat64Buckets keeps buckets that have been collected at a certain time. type TimedFloat64Buckets struct { bucketsMutex sync.RWMutex // Metrics received in a certain timeframe are all summed up. // This assumes that we don't take multiple readings of // the same metric in the same bucket (per second currently). // The only case where this might happen currently is when activator scales // a revision from 0. The metrics for that bucket might be off // by exactly "1" as that poke always reports a concurrency of 1. // Since we're windowing metrics anyway, that slight skew is acceptable. buckets map[time.Time]float64 granularity time.Duration } // NewTimedFloat64Buckets generates a new TimedFloat64Buckets with the given // granularity. func NewTimedFloat64Buckets(granularity time.Duration) *TimedFloat64Buckets { return &TimedFloat64Buckets{ buckets: make(map[time.Time]float64), granularity: granularity, } } // Record adds a value with an associated time to the correct bucket. func (t *TimedFloat64Buckets) Record(time time.Time, name string, value float64) { t.bucketsMutex.Lock() defer t.bucketsMutex.Unlock() bucketKey := time.Truncate(t.granularity) t.buckets[bucketKey] += value } // isEmpty returns whether or not there are no values currently stored. // isEmpty requires t.bucketMux to be held. func (t *TimedFloat64Buckets) isEmpty() bool { return len(t.buckets) == 0 } // ForEachBucket calls the given Accumulator function for each bucket. // Returns true if any data was recorded. func (t *TimedFloat64Buckets) ForEachBucket(accs ...Accumulator) bool { t.bucketsMutex.RLock() defer t.bucketsMutex.RUnlock() if t.isEmpty() { return false } for bucketTime, bucket := range t.buckets { for _, acc := range accs { acc(bucketTime, bucket) } } return true } // RemoveOlderThan removes buckets older than the given time from the state. func (t *TimedFloat64Buckets) RemoveOlderThan(time time.Time) { t.bucketsMutex.Lock() defer t.bucketsMutex.Unlock() for bucketTime := range t.buckets { if bucketTime.Before(time) { delete(t.buckets, bucketTime) } } }
pkg/autoscaler/aggregation/bucketing.go
0.772101
0.456046
bucketing.go
starcoder
package seating import ( "bytes" "io/ioutil" ) // DataFile defines where to read input data from var DataFile = "data/game.txt" // Answer provides the day's answers func Answer() (int, int, error) { data, err := ioutil.ReadFile(DataFile) if err != nil { return 0, 0, err } data = bytes.TrimRight(data, "\n") changes := 0 g1 := NewGrid(data) for { if changes = g1.Round(); changes == 0 { break } } changes = 0 g2 := NewGrid(data) for { if changes = g2.PickyRound(); changes == 0 { break } } return g1.NumberOccupied, g2.NumberOccupied, nil } // Grid represents the seating grid type Grid struct { g []byte // Real grid s []byte // Shadow grid for writing RowLength int Height int NumberOccupied int } const ( occupied byte = '#' empty = 'L' floor = '.' space = ' ' ) // NewGrid creates a new grid from the given input func NewGrid(data []byte) *Grid { rows := bytes.Split(data, []byte("\n")) g := append(bytes.Join(rows, []byte(" ")), []byte(" ")...) grid := &Grid{ g: g, // Grid s: make([]byte, len(g)), RowLength: len(rows[0]), Height: len(rows), } copy(grid.s, grid.g) return grid } // GetString returns the current grid string func (g *Grid) GetString() string { return string(g.g) } // GetXY fetches a value from the virtual grid func (g *Grid) GetXY(x, y int) byte { if g.invalidLocation(x, y) { return space } return g.g[g.xyToLinear(x, y)] } // GetShadowXY fetches a value from the shadow grid func (g *Grid) GetShadowXY(x, y int) byte { if g.invalidLocation(x, y) { return space } return g.s[g.xyToLinear(x, y)] } // SetXY sets an element at the given location on the shadow grid func (g *Grid) SetXY(x, y int, element byte) { if g.invalidLocation(x, y) { return } g.s[g.xyToLinear(x, y)] = element } func (g *Grid) xyToLinear(x, y int) int { return x + (y * (g.RowLength + 1)) } func (g *Grid) invalidLocation(x, y int) bool { if x < 0 || y < 0 || x >= g.RowLength || y >= g.Height { return true } return false } // Round performs a round of seating-of-life func (g *Grid) Round() (changes int) { for y := 0; y < g.Height; y++ { for x := 0; x < g.RowLength; x++ { element := g.GetXY(x, y) switch element { case empty: if g.Neighbours(x, y) == 0 { g.SetXY(x, y, occupied) changes++ g.NumberOccupied++ } case occupied: if g.Neighbours(x, y) >= 4 { g.SetXY(x, y, empty) changes++ g.NumberOccupied-- } } } } copy(g.g, g.s) return changes } // Neighbours returns the number of occupied neighbours to the given location func (g *Grid) Neighbours(x, y int) (numOccupied int) { for sy := y - 1; sy <= y+1; sy++ { for sx := x - 1; sx <= x+1; sx++ { if x == sx && y == sy { continue } if g.GetXY(sx, sy) == occupied { numOccupied++ } } } return numOccupied } type matrix struct{ x, y int } var checks = []matrix{ {-1, 0}, {-1, -1}, {0, -1}, {1, -1}, {1, 0}, {1, 1}, {0, 1}, {-1, 1}, } // PickyRound performs a round of seating-of-life func (g *Grid) PickyRound() (changes int) { i := 0 for y := 0; y < g.Height; y++ { for x := 0; x < g.RowLength; x++ { element := g.GetXY(x, y) switch element { case empty: if g.PickyNeighbours(x, y) == 0 { g.SetXY(x, y, occupied) changes++ g.NumberOccupied++ } case occupied: if g.PickyNeighbours(x, y) >= 5 { g.SetXY(x, y, empty) changes++ g.NumberOccupied-- } } i++ } } copy(g.g, g.s) return changes } // PickyNeighbours returns the number of "neighbours" in eight directions func (g *Grid) PickyNeighbours(x, y int) (numOccupied int) { for _, check := range checks { if numOccupied += g.checkLocation(x, y, check); numOccupied == 5 { return numOccupied } } return numOccupied } func (g *Grid) checkLocation(x, y int, modifier matrix) int { nx := x + modifier.x ny := y + modifier.y for !g.invalidLocation(nx, ny) { switch g.GetXY(nx, ny) { case empty: return 0 case occupied: return 1 } nx += modifier.x ny += modifier.y } return 0 }
2020/pkg/seating/seating.go
0.675444
0.410461
seating.go
starcoder
package base type Tuple interface { GetData(index int) Feature SetData(index int, newValue interface{}) GetClass() Feature SetClass(newClass interface{}) DataSize() int IsNumeric() bool } type NumericTuple interface { Tuple GetNumericData(index int) float64 ToFloatSlice() []float64 } type IntTuple interface { NumericTuple GetIntData(index int) int } // General type GeneralTuple struct { Data []Feature Class Feature } // Feature types will be inferred. func NewTuple(data []interface{}, class interface{}) Tuple { return &GeneralTuple{InferFeatures(data), InferFeature(class)}; } func (this GeneralTuple) GetData(index int) Feature { return this.Data[index]; } func (this GeneralTuple) SetData(index int, newValue interface{}) { this.Data[index] = InferFeature(newValue); } func (this GeneralTuple) GetClass() Feature { return this.Class; } func (this *GeneralTuple) SetClass(newClass interface{}) { this.Class = InferFeature(newClass); } func (this GeneralTuple) DataSize() int { return len(this.Data); } func (this GeneralTuple) IsNumeric() bool { return false; } // Numeric type FloatTuple struct { Data []NumericFeature Class Feature } func NewFloatTuple(data []float64, class interface{}) NumericTuple { var tupleData []NumericFeature = make([]NumericFeature, len(data)); for i, _ := range(data) { tupleData[i] = Float(data[i]); } return &FloatTuple{tupleData, InferFeature(class)}; } // Feature types will be inferred. func NewNumericTuple(data []interface{}, class interface{}) NumericTuple { return &FloatTuple{InferNumericFeatures(data), InferFeature(class)}; } func (this FloatTuple) GetData(index int) Feature { return this.Data[index]; } func (this FloatTuple) SetData(index int, newValue interface{}) { this.Data[index] = InferNumericFeature(newValue); } func (this FloatTuple) GetNumericData(index int) float64 { return this.Data[index].NumericValue(); } func (this FloatTuple) ToFloatSlice() []float64 { var rtn []float64 = make([]float64, this.DataSize()); for i, _ := range(rtn) { rtn[i] = this.GetNumericData(i); } return rtn; } func (this FloatTuple) GetClass() Feature { return this.Class; } func (this *FloatTuple) SetClass(newClass interface{}) { this.Class = InferFeature(newClass); } func (this FloatTuple) DataSize() int { return len(this.Data); } func (this FloatTuple) IsNumeric() bool { return true; } // Integer // Note: the name is pretty bad, but we want people passing the interface not struct. type IntegerTuple struct { Data []IntFeature Class Feature } // Feature types will be inferred. func NewIntTuple(data []interface{}, class interface{}) IntTuple { return &IntegerTuple{InferIntFeatures(data), InferFeature(class)}; } func (this IntegerTuple) GetData(index int) Feature { return this.Data[index]; } func (this IntegerTuple) SetData(index int, newValue interface{}) { this.Data[index] = InferIntFeature(newValue); } func (this IntegerTuple) GetNumericData(index int) float64 { return this.Data[index].NumericValue(); } func (this IntegerTuple) ToFloatSlice() []float64 { var rtn []float64 = make([]float64, this.DataSize()); for i, _ := range(rtn) { rtn[i] = this.GetNumericData(i); } return rtn; } func (this IntegerTuple) GetIntData(index int) int { return this.Data[index].IntValue(); } func (this IntegerTuple) GetClass() Feature { return this.Class; } func (this *IntegerTuple) SetClass(newClass interface{}) { this.Class = InferFeature(newClass); } func (this IntegerTuple) DataSize() int { return len(this.Data); } func (this IntegerTuple) IsNumeric() bool { return true; }
base/tuple.go
0.798815
0.425486
tuple.go
starcoder
package go2048 import ( "bytes" "image" ) type cellContenter interface { Size() image.Point CellValue(cell image.Point) (val int, ok bool) } type dummyCellContenter image.Point func (c dummyCellContenter) Size() image.Point { return image.Point(c) } func (c dummyCellContenter) CellValue(cell image.Point) (val int, ok bool) { return 0, false } func encodePrintableTest(g *grid) []byte { return encodePrintable(g, "\t", BorderTable(5)) } func encodePrintable(cc cellContenter, prefix string, ssr [][]rune) []byte { var cellSize = image.Point{X: 5, Y: 1} pe := newPrintEncoder(cellSize, prefix, cc, ssr) return pe.Encode() } type printEncoder struct { gridSize image.Point cellSize image.Point prefix string cc cellContenter ssr [][]rune } func newPrintEncoder(cellSize image.Point, prefix string, cc cellContenter, ssr [][]rune) *printEncoder { if cellSize.X < 1 { cellSize.X = 1 } if cellSize.Y < 1 { cellSize.Y = 1 } gridSize := cc.Size() return &printEncoder{ gridSize: gridSize, cellSize: cellSize, prefix: prefix, cc: cc, ssr: ssr, } } func (pe *printEncoder) Encode() []byte { var buf = new(bytes.Buffer) yn := pe.gridSize.Y var ( cellHeight = pe.cellSize.Y beforeHeight = (cellHeight - 1) / 2 afterHeight = (cellHeight - 1) - beforeHeight ssr = pe.ssr ) for y := 0; y < yn; y++ { if y == 0 { pe.writeLine(buf, ssr[0]) } for i := 0; i < beforeHeight; i++ { pe.writeLine(buf, ssr[1]) } pe.writeLineVal(buf, ssr[1], y) for i := 0; i < afterHeight; i++ { pe.writeLine(buf, ssr[1]) } if y < yn-1 { pe.writeLine(buf, ssr[2]) } else { pe.writeLine(buf, ssr[3]) } } return buf.Bytes() } func (pe *printEncoder) writeLine(buf *bytes.Buffer, sr []rune) { var ( xn = pe.gridSize.X cellWidth = pe.cellSize.X ) buf.WriteString(pe.prefix) for x := 0; x < xn; x++ { if x == 0 { buf.WriteRune(sr[0]) } for i := 0; i < cellWidth; i++ { buf.WriteRune(sr[1]) } if x < xn-1 { buf.WriteRune(sr[2]) } else { buf.WriteRune(sr[3]) } } buf.WriteByte('\n') } func (pe *printEncoder) writeLineVal(buf *bytes.Buffer, sr []rune, y int) { var ( xn = pe.gridSize.X cellWidth = pe.cellSize.X ) buf.WriteString(pe.prefix) for x := 0; x < xn; x++ { if x == 0 { buf.WriteRune(sr[0]) } if val, ok := pe.cc.CellValue(image.Point{x, y}); ok { if cellWidth >= 4+2 { buf.WriteRune(sr[1]) buf.WriteString(itoaN(val, cellWidth-2, byte(sr[1]))) buf.WriteRune(sr[1]) } else { buf.WriteString(itoaN(val, cellWidth, byte(sr[1]))) } } else { for i := 0; i < cellWidth; i++ { buf.WriteRune(sr[1]) } } if x < xn-1 { buf.WriteRune(sr[2]) } else { buf.WriteRune(sr[3]) } } buf.WriteByte('\n') } func repeatRune(r rune, n int) string { rs := make([]rune, n) for i := 0; i < n; i++ { rs[i] = r } return string(rs) } func itoaN(x, n int, fillByte byte) string { data := make([]byte, n) i := n if (x == 0) && (i > 0) { data[i-1] = '0' i-- } for (x > 0) && (i > 0) { quo, rem := quoRem(x, 10) x = quo data[i-1] = byte(rem + '0') i-- } for i > 0 { data[i-1] = fillByte i-- } return string(data) }
printable.go
0.570092
0.42185
printable.go
starcoder
package particles // Config contains data for particles configuration type Config struct { Speed float64 Area float64 Size float64 Color string Bounds bool Bounce bool Move bool } // DefaultConfig is a default value for config var DefaultConfig = Config{2.0, 80.0, 1.8, "#ccccFF", true, true, true} // Particles is a list of particles type Particles []*Particle // Particle represents a single particle type Particle struct { Config ID int Position Vector Direction Vector } // NewParticle initializes a new particle func NewParticle(id int, config Config) *Particle { return &Particle{ ID: id, Config: config, Direction: Vector{ randf(-100, 100) / 100.0, randf(-100, 100) / 100.0, }, } } // RandomizePosition sets a random (within a given range) position for the particle func (p *Particle) RandomizePosition(width, height int) { p.Position = Vector{ randf(0, width), randf(0, height), } } // ReverseX makes particles go the other way func (p *Particle) ReverseX() { p.Direction[0] = -p.Direction[0] } // ReverseY makes particles go the other way func (p *Particle) ReverseY() { p.Direction[1] = -p.Direction[1] } // Reverse makes particles go the other way func (p *Particle) Reverse() { p.ReverseX() p.ReverseY() } // Distance returns the distance between two particles func (p *Particle) Distance(pa *Particle) float64 { return p.Position.Distance(pa.Position) } // Move moves a particle func (p *Particle) Move() { p.Position[0] += (p.Direction[0] * p.Speed) p.Position[1] += (p.Direction[1] * p.Speed) } // Bounce bounces a particle func (p *Particle) Bounce(maxx, maxy float64) { if p.Position[0] > maxx || p.Position[0] < 0.0 { p.ReverseX() } if p.Position[1] > maxy || p.Position[1] < 0.0 { p.ReverseY() } } // Bounds checks if a particle is within bounds func (p *Particle) Bounds(maxx, maxy float64) { if p.Position[0] > maxx { p.Position[0] = 0.0 } if p.Position[1] > maxy { p.Position[1] = 0.0 } if p.Position[0] < 0.0 { p.Position[0] = maxx } if p.Position[1] < 0.0 { p.Position[1] = maxy } }
particles/particle.go
0.751192
0.517083
particle.go
starcoder
package cmd import ( "github.com/gdurandvadas/tfc/module" "github.com/spf13/cobra" "github.com/spf13/viper" ) var moduleCmd = &cobra.Command{ Use: "module", Short: "Execute module actions", Long: `The module sub-command is in charge of the interaction with the modules in the Terraform Cloud Modules Registry. All child commands of module make use of the ` + "`" + `.module.{yml|yaml}` + "`" + ` file, that is expected on the root dir of in the module path. The file requires the yaml configs defined in **module.Module** struct ` + "```yaml" + ` name: test-module provider: "null" ignored: - docs - .module.yaml ` + "```" + ` `, Run: func(cmd *cobra.Command, args []string) {}, } var modulePublishCmd = &cobra.Command{ Use: "publish [semver] [module-path]", Example: "publish patch my/module", Short: "publish a new module version", Long: `The publish sub-command let you upload a new module version to the Terraform Cloud Modules Registry. The versions are calculated with semver from the existing, if the module doesn't exist, it will create it with v0.0.1. `, Args: cobra.MinimumNArgs(2), Aliases: []string{"mp"}, RunE: func(cmd *cobra.Command, args []string) error { module := &module.Module{ Path: args[1], OrganizationName: viper.GetString("organization"), Client: client, Ctx: ctx, } module.Publish(args[0]) return nil }, } var moduleDeleteCmd = &cobra.Command{ Use: "delete [module-path]", Example: "delete my/module", Short: "deletes a module provider or specific version", Long: `The command let you select between version or provider: - version: delete a specific module version from the list - provider: delete all version within the module/provider slug `, Args: cobra.MinimumNArgs(1), Aliases: []string{"md"}, RunE: func(cmd *cobra.Command, args []string) error { module := &module.Module{ Path: args[1], OrganizationName: viper.GetString("organization"), Client: client, Ctx: ctx, } module.Delete() return nil }, }
cmd/module.go
0.586404
0.543227
module.go
starcoder
package pcfmetrics import ( "fmt" "strconv" "strings" "time" ) type counter interface { Count() int64 } type gauge interface { Value() int64 } type gaugeFloat64 interface { Value() float64 } type meter interface { Count() int64 Rate1() float64 Rate5() float64 Rate15() float64 RateMean() float64 } type histogram interface { Count() int64 Sum() int64 Max() int64 Min() int64 Mean() float64 StdDev() float64 Variance() float64 Percentiles([]float64) []float64 } type timer interface { Count() int64 Rate1() float64 Rate5() float64 Rate15() float64 RateMean() float64 Sum() int64 Max() int64 Min() int64 Mean() float64 StdDev() float64 Variance() float64 Percentiles([]float64) []float64 } func convertGauge(gauge gauge, name string) *dataPoint { return convertGenericGauge(float64(gauge.Value()), name) } func convertGaugeFloat64(gauge gaugeFloat64, name string) *dataPoint { return convertGenericGauge(gauge.Value(), name) } func convertMeter(meter meter, name string) []*dataPoint { return []*dataPoint{ convertCounter(meter, joinNameParts(name, "count")), convertGenericGauge(meter.Rate1(), joinNameParts(name, "rate.1-minute")), convertGenericGauge(meter.Rate5(), joinNameParts(name, "rate.5-minute")), convertGenericGauge(meter.Rate15(), joinNameParts(name, "rate.15-minute")), convertGenericGauge(meter.RateMean(), joinNameParts(name, "rate.mean")), } } func convertHistogram(histogram histogram, name string) []*dataPoint { return convertHistogramWithTimeUnit(histogram, name, time.Duration(0)) } func convertHistogramWithTimeUnit(histogram histogram, name string, timeUnit time.Duration) []*dataPoint { points := []*dataPoint{ convertCounter(histogram, joinNameParts(name, "count")), convertGenericGaugeWithUnit(histogram.Mean(), joinNameParts(name, "mean"), timeUnit), convertGenericGaugeWithUnit(histogram.StdDev(), joinNameParts(name, "stddev"), timeUnit), convertGenericGaugeWithUnit(float64(histogram.Sum()), joinNameParts(name, "sum"), timeUnit), convertGenericGaugeWithUnit(histogram.Variance(), joinNameParts(name, "variance"), timeUnit), convertGenericGaugeWithUnit(float64(histogram.Max()), joinNameParts(name, "max"), timeUnit), convertGenericGaugeWithUnit(float64(histogram.Min()), joinNameParts(name, "min"), timeUnit), } points = append(points, generatePercentileDataPoints(histogram, name, timeUnit)...) return points } func generatePercentileDataPoints(histogram histogram, name string, timeUnit time.Duration) []*dataPoint { var points []*dataPoint percentileIds := []float64{75, 95, 98, 99, 99.9} for i, value := range histogram.Percentiles(percentileIds) { dataPoint := convertGenericGaugeWithUnit( float64(value), getPercentileName(name, percentileIds[i]), timeUnit, ) points = append(points, dataPoint) } return points } func getPercentileName(name string, percentileId float64) string { percentileIdString := strconv.FormatFloat(percentileId, 'f', -1, 64) percentileWithoutPeriods := strings.Replace(percentileIdString, ".", "", -1) return joinNameParts(name, fmt.Sprintf("%sthPercentile", percentileWithoutPeriods)) } func convertTimer(timer timer, name string, timeUnit time.Duration) []*dataPoint { points := []*dataPoint{ convertCounter(timer, joinNameParts(name, "count")), } meterDataPoints := convertMeter(timer, name) meterDataPointsWithoutCounter := meterDataPoints[1:] points = append(points, meterDataPointsWithoutCounter...) if timeUnit == time.Duration(0) { timeUnit = time.Millisecond } histogramDataPoints := convertHistogramWithTimeUnit(timer, joinNameParts(name, "duration"), timeUnit) histogramDataPointsWithoutCounter := histogramDataPoints[1:] points = append(points, histogramDataPointsWithoutCounter...) return points } func convertCounter(counter counter, name string) *dataPoint { return &dataPoint{ Name: name, Value: float64(counter.Count()), Type: "counter", } } func convertGenericGauge(value float64, name string) *dataPoint { return &dataPoint{ Name: name, Value: value, Type: "gauge", } } func convertGenericGaugeWithUnit(value float64, name string, timeUnit time.Duration) *dataPoint { if timeUnit == time.Duration(0) { return convertGenericGauge(value, name) } return &dataPoint{ Name: name, Value: value / float64(timeUnit), Type: "gauge", Unit: getTimeUnitName(timeUnit), } } func getTimeUnitName(timeUnit time.Duration) string { switch { case timeUnit == time.Second: return "seconds" case timeUnit == time.Millisecond: return "milliseconds" case timeUnit == time.Microsecond: return "microseconds" case timeUnit == time.Nanosecond: return "nanoseconds" default: return "milliseconds" } } func joinNameParts(names ...string) string { return strings.Join(names, ".") }
converters.go
0.733452
0.474205
converters.go
starcoder
package main import ( "github.com/ByteArena/box2d" "github.com/wdevore/Ranger-Go-IGE/api" "github.com/wdevore/Ranger-Go-IGE/engine/rendering/color" "github.com/wdevore/Ranger-Go-IGE/extras/shapes" ) type slopePhysicsComponent struct { physicsComponent slope api.INode } func newFencePhysicsComponent() *slopePhysicsComponent { o := new(slopePhysicsComponent) return o } func (p *slopePhysicsComponent) Build(world api.IWorld, parent api.INode, phyWorld *box2d.B2World, position api.IPoint, rotation float64) { p.buildNodes(world, parent, position, rotation) p.buildPhysics(phyWorld, position, rotation) } func (p *slopePhysicsComponent) buildPhysics(phyWorld *box2d.B2World, position api.IPoint, rotation float64) { p.position = position // ------------------------------------------- // A body def used to create bodies bDef := box2d.MakeB2BodyDef() bDef.Type = box2d.B2BodyType.B2_staticBody // Set the position of the Body px := position.X() py := position.Y() bDef.Position.Set( float64(px), float64(py), ) bDef.Angle = p.slope.Rotation() // An instance of a body to contain Fixtures. // This body represents the entire fence (i.e. all four sides) p.b2Body = phyWorld.CreateBody(&bDef) fd := box2d.MakeB2FixtureDef() tln := p.slope.(*shapes.MonoHLineNode) halfLength := float64(tln.HalfLength()) // ------------------------------------------------------------ // Top fixture // px := p.topLineNode.Position().X() // py := p.topLineNode.Position().Y() b2Shape := box2d.MakeB2EdgeShape() b2Shape.Set( box2d.MakeB2Vec2(-halfLength, 0.0), box2d.MakeB2Vec2(halfLength, 0.0)) fd.Shape = &b2Shape // fmt.Println(p.topLineNode.Rotation()) p.b2Body.CreateFixtureFromDef(&fd) // attach Fixture to body } func (p *slopePhysicsComponent) buildNodes(world api.IWorld, parent api.INode, position api.IPoint, rotation float64) error { var err error p.slope, err = shapes.NewMonoHLineNode("Bottom", world, parent) if err != nil { return err } p.slope.SetScale(25.0) p.slope.SetPosition(position.X(), position.Y()) p.slope.SetRotation(rotation) ghl := p.slope.(*shapes.MonoHLineNode) ghl.SetColor(color.NewPaletteInt64(color.Yellow)) return nil }
examples/complex/physics/basic/p6_slopes/slope_physics_component.go
0.712232
0.40489
slope_physics_component.go
starcoder
package docs import "github.com/swaggo/swag" const docTemplate = `{ "schemes": {{ marshal .Schemes }}, "swagger": "2.0", "info": { "description": "{{escape .Description}}", "title": "{{.Title}}", "contact": {}, "version": "{{.Version}}" }, "host": "{{.Host}}", "basePath": "{{.BasePath}}", "paths": { "/api/v1/addresses": { "get": { "description": "get list of addresses", "consumes": [ "*/*" ], "produces": [ "application/json" ], "tags": [ "Addresses" ], "summary": "Get Addresses", "parameters": [ { "type": "integer", "description": "amount of records", "name": "limit", "in": "query" }, { "type": "integer", "description": "skip to a record", "name": "skip", "in": "query" }, { "type": "boolean", "description": "contract addresses only", "name": "is_contract", "in": "query" }, { "type": "string", "description": "find by address", "name": "address", "in": "query" } ], "responses": { "200": { "description": "OK", "schema": { "type": "array", "items": { "$ref": "#/definitions/models.AddressList" } } }, "422": { "description": "Unprocessable Entity", "schema": { "type": "object", "additionalProperties": true } } } } }, "/api/v1/addresses/contracts": { "get": { "description": "get list of contracts", "consumes": [ "*/*" ], "produces": [ "application/json" ], "tags": [ "Addresses" ], "summary": "Get contracts", "parameters": [ { "type": "integer", "description": "amount of records", "name": "limit", "in": "query" }, { "type": "integer", "description": "skip to a record", "name": "skip", "in": "query" } ], "responses": { "200": { "description": "OK", "schema": { "type": "array", "items": { "$ref": "#/definitions/models.ContractList" } } }, "422": { "description": "Unprocessable Entity", "schema": { "type": "object", "additionalProperties": true } } } } }, "/api/v1/addresses/details/{address}": { "get": { "description": "get details of an address", "consumes": [ "*/*" ], "produces": [ "application/json" ], "tags": [ "Addresses" ], "summary": "Get Address Details", "parameters": [ { "type": "string", "description": "find by address", "name": "address", "in": "path", "required": true } ], "responses": { "200": { "description": "OK", "schema": { "$ref": "#/definitions/models.Address" } }, "422": { "description": "Unprocessable Entity", "schema": { "type": "object", "additionalProperties": true } } } } }, "/api/v1/addresses/token-addresses/{address}": { "get": { "description": "get list of token contracts by address", "consumes": [ "*/*" ], "produces": [ "application/json" ], "tags": [ "Addresses" ], "summary": "Get Token Addresses", "parameters": [ { "type": "string", "description": "address", "name": "address", "in": "path", "required": true } ], "responses": { "200": { "description": "OK", "schema": { "type": "array", "items": { "type": "string" } } }, "422": { "description": "Unprocessable Entity", "schema": { "type": "object", "additionalProperties": true } } } } }, "/api/v1/blocks": { "get": { "description": "get historical blocks", "consumes": [ "*/*" ], "produces": [ "application/json" ], "tags": [ "Blocks" ], "summary": "Get Blocks", "parameters": [ { "type": "integer", "description": "amount of records", "name": "limit", "in": "query" }, { "type": "integer", "description": "skip to a record", "name": "skip", "in": "query" }, { "type": "integer", "description": "find by block number", "name": "number", "in": "query" }, { "type": "integer", "description": "range by start block number", "name": "start_number", "in": "query" }, { "type": "integer", "description": "range by end block number", "name": "end_number", "in": "query" }, { "type": "string", "description": "find by block hash", "name": "hash", "in": "query" }, { "type": "string", "description": "find by block creator", "name": "created_by", "in": "query" }, { "type": "string", "description": "desc or asc", "name": "sort", "in": "query" } ], "responses": { "200": { "description": "OK", "schema": { "type": "array", "items": { "$ref": "#/definitions/models.BlockList" } } }, "422": { "description": "Unprocessable Entity", "schema": { "type": "object", "additionalProperties": true } } } } }, "/api/v1/blocks/timestamp/{timestamp}": { "get": { "description": "get details of a block based on timestamp in millisecond epoch time", "consumes": [ "*/*" ], "produces": [ "application/json" ], "tags": [ "Blocks" ], "summary": "Get Block Details By Nearest Timestamp", "parameters": [ { "type": "integer", "description": "timestamp", "name": "timestamp", "in": "path", "required": true } ], "responses": { "200": { "description": "OK", "schema": { "$ref": "#/definitions/models.Block" } }, "422": { "description": "Unprocessable Entity", "schema": { "type": "object", "additionalProperties": true } } } } }, "/api/v1/blocks/{number}": { "get": { "description": "get details of a block", "consumes": [ "*/*" ], "produces": [ "application/json" ], "tags": [ "Blocks" ], "summary": "Get Block Details", "parameters": [ { "type": "integer", "description": "block number", "name": "number", "in": "path", "required": true } ], "responses": { "200": { "description": "OK", "schema": { "$ref": "#/definitions/models.Block" } }, "422": { "description": "Unprocessable Entity", "schema": { "type": "object", "additionalProperties": true } } } } }, "/api/v1/logs": { "get": { "description": "get historical logs", "consumes": [ "*/*" ], "produces": [ "application/json" ], "tags": [ "Logs" ], "summary": "Get Logs", "parameters": [ { "type": "integer", "description": "amount of records", "name": "limit", "in": "query" }, { "type": "integer", "description": "skip to a record", "name": "skip", "in": "query" }, { "type": "integer", "description": "skip to a record", "name": "block_number", "in": "query" }, { "type": "string", "description": "find by transaction hash", "name": "transaction_hash", "in": "query" }, { "type": "string", "description": "find by address", "name": "address", "in": "query" }, { "type": "string", "description": "find by method", "name": "method", "in": "query" } ], "responses": { "200": { "description": "OK", "schema": { "type": "array", "items": { "$ref": "#/definitions/models.Log" } } }, "422": { "description": "Unprocessable Entity", "schema": { "type": "object", "additionalProperties": true } } } } }, "/api/v1/transactions": { "get": { "description": "get historical transactions", "consumes": [ "*/*" ], "produces": [ "application/json" ], "tags": [ "Transactions" ], "summary": "Get Transactions", "parameters": [ { "type": "integer", "description": "amount of records", "name": "limit", "in": "query" }, { "type": "integer", "description": "skip to a record", "name": "skip", "in": "query" }, { "type": "string", "description": "find by from address", "name": "from", "in": "query" }, { "type": "string", "description": "find by to address", "name": "to", "in": "query" }, { "type": "string", "description": "find by type", "name": "type", "in": "query" }, { "type": "integer", "description": "find by block number", "name": "block_number", "in": "query" }, { "type": "integer", "description": "find by block number range", "name": "start_block_number", "in": "query" }, { "type": "integer", "description": "find by block number range", "name": "end_block_number", "in": "query" }, { "type": "string", "description": "find by method", "name": "method", "in": "query" }, { "type": "string", "description": "desc or asc", "name": "sort", "in": "query" } ], "responses": { "200": { "description": "OK", "schema": { "type": "array", "items": { "$ref": "#/definitions/models.TransactionList" } } }, "422": { "description": "Unprocessable Entity", "schema": { "type": "object", "additionalProperties": true } } } } }, "/api/v1/transactions/address/{address}": { "get": { "description": "get transactions by address", "consumes": [ "*/*" ], "produces": [ "application/json" ], "tags": [ "Transactions" ], "summary": "Get Transactions by address", "parameters": [ { "type": "integer", "description": "amount of records", "name": "limit", "in": "query" }, { "type": "integer", "description": "skip to a record", "name": "skip", "in": "query" }, { "type": "string", "description": "address", "name": "address", "in": "path", "required": true } ], "responses": { "200": { "description": "OK", "schema": { "$ref": "#/definitions/models.TransactionList" } }, "422": { "description": "Unprocessable Entity", "schema": { "type": "object", "additionalProperties": true } } } } }, "/api/v1/transactions/block-number/{block_number}": { "get": { "description": "get transactions by block_number", "consumes": [ "*/*" ], "produces": [ "application/json" ], "tags": [ "Transactions" ], "summary": "Get Transactions by block_number", "parameters": [ { "type": "integer", "description": "amount of records", "name": "limit", "in": "query" }, { "type": "integer", "description": "skip to a record", "name": "skip", "in": "query" }, { "type": "string", "description": "block_number", "name": "block_number", "in": "path", "required": true } ], "responses": { "200": { "description": "OK", "schema": { "$ref": "#/definitions/models.TransactionList" } }, "422": { "description": "Unprocessable Entity", "schema": { "type": "object", "additionalProperties": true } } } } }, "/api/v1/transactions/details/{hash}": { "get": { "description": "get details of a transaction", "consumes": [ "*/*" ], "produces": [ "application/json" ], "tags": [ "Transactions" ], "summary": "Get Transaction", "parameters": [ { "type": "string", "description": "transaction hash", "name": "hash", "in": "path", "required": true } ], "responses": { "200": { "description": "OK", "schema": { "$ref": "#/definitions/models.Transaction" } }, "422": { "description": "Unprocessable Entity", "schema": { "type": "object", "additionalProperties": true } } } } }, "/api/v1/transactions/internal/address/{address}": { "get": { "description": "Get internal transactions by address", "consumes": [ "*/*" ], "produces": [ "application/json" ], "tags": [ "Transactions" ], "summary": "Get internal transactions by address", "parameters": [ { "type": "integer", "description": "amount of records", "name": "limit", "in": "query" }, { "type": "integer", "description": "skip to a record", "name": "skip", "in": "query" }, { "type": "string", "description": "find by address", "name": "address", "in": "path", "required": true } ], "responses": { "200": { "description": "OK", "schema": { "type": "array", "items": { "$ref": "#/definitions/models.TransactionInternalList" } } }, "422": { "description": "Unprocessable Entity", "schema": { "type": "object", "additionalProperties": true } } } } }, "/api/v1/transactions/internal/block-number/{block_number}": { "get": { "description": "Get internal transactions by block number", "consumes": [ "*/*" ], "produces": [ "application/json" ], "tags": [ "Transactions" ], "summary": "Get internal transactions by block number", "parameters": [ { "type": "integer", "description": "amount of records", "name": "limit", "in": "query" }, { "type": "integer", "description": "skip to a record", "name": "skip", "in": "query" }, { "type": "string", "description": "block_number", "name": "block_number", "in": "path", "required": true } ], "responses": { "200": { "description": "OK", "schema": { "type": "array", "items": { "$ref": "#/definitions/models.TransactionInternalList" } } }, "422": { "description": "Unprocessable Entity", "schema": { "type": "object", "additionalProperties": true } } } } }, "/api/v1/transactions/internal/{hash}": { "get": { "description": "Get internal transactions by hash", "consumes": [ "*/*" ], "produces": [ "application/json" ], "tags": [ "Transactions" ], "summary": "Get internal transactions by hash", "parameters": [ { "type": "integer", "description": "amount of records", "name": "limit", "in": "query" }, { "type": "integer", "description": "skip to a record", "name": "skip", "in": "query" }, { "type": "string", "description": "find by hash", "name": "hash", "in": "path", "required": true } ], "responses": { "200": { "description": "OK", "schema": { "type": "array", "items": { "$ref": "#/definitions/models.TransactionInternalList" } } }, "422": { "description": "Unprocessable Entity", "schema": { "type": "object", "additionalProperties": true } } } } }, "/api/v1/transactions/token-holders/token-contract/{token_contract_address}": { "get": { "description": "get token holders", "consumes": [ "*/*" ], "produces": [ "application/json" ], "tags": [ "Transactions" ], "summary": "Get token holders by token contract", "parameters": [ { "type": "integer", "description": "amount of records", "name": "limit", "in": "query" }, { "type": "integer", "description": "skip to a record", "name": "skip", "in": "query" }, { "type": "string", "description": "find by token contract address", "name": "token_contract_address", "in": "path", "required": true } ], "responses": { "200": { "description": "OK", "schema": { "type": "array", "items": { "$ref": "#/definitions/models.TokenAddress" } } }, "422": { "description": "Unprocessable Entity", "schema": { "type": "object", "additionalProperties": true } } } } }, "/api/v1/transactions/token-transfers": { "get": { "description": "get historical token transfers", "consumes": [ "*/*" ], "produces": [ "application/json" ], "tags": [ "Transactions" ], "summary": "Get token transfers", "parameters": [ { "type": "integer", "description": "amount of records", "name": "limit", "in": "query" }, { "type": "integer", "description": "skip to a record", "name": "skip", "in": "query" }, { "type": "string", "description": "find by from address", "name": "from", "in": "query" }, { "type": "string", "description": "find by to address", "name": "to", "in": "query" }, { "type": "integer", "description": "find by block number", "name": "block_number", "in": "query" }, { "type": "integer", "description": "find by block number range", "name": "start_block_number", "in": "query" }, { "type": "integer", "description": "find by block number range", "name": "end_block_number", "in": "query" }, { "type": "string", "description": "find by token contract", "name": "token_contract_address", "in": "query" }, { "type": "string", "description": "find by transaction hash", "name": "transaction_hash", "in": "query" } ], "responses": { "200": { "description": "OK", "schema": { "type": "array", "items": { "$ref": "#/definitions/models.TokenTransfer" } } }, "422": { "description": "Unprocessable Entity", "schema": { "type": "object", "additionalProperties": true } } } } }, "/api/v1/transactions/token-transfers/address/{address}": { "get": { "description": "get historical token transfers by address", "consumes": [ "*/*" ], "produces": [ "application/json" ], "tags": [ "Transactions" ], "summary": "Get token transfer by address", "parameters": [ { "type": "integer", "description": "amount of records", "name": "limit", "in": "query" }, { "type": "integer", "description": "skip to a record", "name": "skip", "in": "query" }, { "type": "string", "description": "find by address", "name": "address", "in": "path", "required": true } ], "responses": { "200": { "description": "OK", "schema": { "type": "array", "items": { "$ref": "#/definitions/models.TokenTransfer" } } }, "422": { "description": "Unprocessable Entity", "schema": { "type": "object", "additionalProperties": true } } } } }, "/api/v1/transactions/token-transfers/token-contract/{token_contract_address}": { "get": { "description": "get historical token transfers by token contract", "consumes": [ "*/*" ], "produces": [ "application/json" ], "tags": [ "Transactions" ], "summary": "Get token transfers by token contract", "parameters": [ { "type": "integer", "description": "amount of records", "name": "limit", "in": "query" }, { "type": "integer", "description": "skip to a record", "name": "skip", "in": "query" }, { "type": "string", "description": "find by token contract address", "name": "token_contract_address", "in": "path", "required": true } ], "responses": { "200": { "description": "OK", "schema": { "type": "array", "items": { "$ref": "#/definitions/models.TokenTransfer" } } }, "422": { "description": "Unprocessable Entity", "schema": { "type": "object", "additionalProperties": true } } } } }, "/metadata": { "get": { "description": "get the status of server.", "consumes": [ "*/*" ], "produces": [ "application/json" ], "tags": [ "Version" ], "summary": "Show the status of server.", "responses": { "200": { "description": "OK", "schema": { "type": "object", "additionalProperties": true } } } } }, "/version": { "get": { "description": "get the status of server.", "consumes": [ "*/*" ], "produces": [ "application/json" ], "tags": [ "Version" ], "summary": "Show the status of server.", "responses": { "200": { "description": "OK", "schema": { "type": "object", "additionalProperties": true } } } } } }, "definitions": { "models.Address": { "type": "object", "properties": { "address": { "type": "string" }, "balance": { "type": "number" }, "created_timestamp": { "type": "integer" }, "is_contract": { "type": "boolean" }, "is_prep": { "description": "Goveranance", "type": "boolean" }, "is_token": { "type": "boolean" }, "log_count": { "type": "integer" }, "name": { "description": "Only relevant in contract addresses", "type": "string" }, "status": { "type": "string" }, "transaction_count": { "type": "integer" }, "type": { "type": "string" } } }, "models.AddressList": { "type": "object", "properties": { "address": { "type": "string" }, "balance": { "type": "number" }, "transaction_count": { "type": "integer" }, "type": { "type": "string" } } }, "models.Block": { "type": "object", "properties": { "block_time": { "type": "integer" }, "failed_transaction_count": { "type": "integer" }, "hash": { "type": "string" }, "internal_transaction_amount": { "type": "string" }, "internal_transaction_count": { "type": "integer" }, "item_id": { "type": "string" }, "item_timestamp": { "type": "string" }, "merkle_root_hash": { "type": "string" }, "next_leader": { "type": "string" }, "number": { "type": "integer" }, "parent_hash": { "type": "string" }, "peer_id": { "type": "string" }, "signature": { "description": "Base", "type": "string" }, "timestamp": { "type": "integer" }, "transaction_amount": { "type": "string" }, "transaction_count": { "type": "integer" }, "transaction_fees": { "type": "string" }, "type": { "type": "string" }, "version": { "type": "string" } } }, "models.BlockList": { "type": "object", "properties": { "hash": { "type": "string" }, "number": { "type": "integer" }, "peer_id": { "type": "string" }, "timestamp": { "type": "integer" }, "transaction_amount": { "type": "string" }, "transaction_count": { "type": "integer" }, "transaction_fees": { "type": "string" } } }, "models.ContractList": { "type": "object", "properties": { "address": { "type": "string" }, "balance": { "type": "number" }, "created_timestamp": { "type": "integer" }, "log_count": { "type": "integer" }, "name": { "type": "string" }, "status": { "type": "string" }, "transaction_count": { "type": "integer" } } }, "models.Log": { "type": "object", "properties": { "address": { "type": "string" }, "block_number": { "type": "integer" }, "block_timestamp": { "type": "integer" }, "data": { "type": "string" }, "indexed": { "type": "string" }, "log_index": { "type": "integer" }, "method": { "type": "string" }, "transaction_hash": { "type": "string" } } }, "models.TokenAddress": { "type": "object", "properties": { "address": { "type": "string" }, "balance": { "type": "number" }, "token_contract_address": { "type": "string" } } }, "models.TokenTransfer": { "type": "object", "properties": { "block_number": { "type": "integer" }, "block_timestamp": { "type": "integer" }, "from_address": { "type": "string" }, "log_index": { "type": "integer" }, "to_address": { "type": "string" }, "token_contract_address": { "type": "string" }, "token_contract_name": { "type": "string" }, "token_contract_symbol": { "type": "string" }, "transaction_fee": { "type": "string" }, "transaction_hash": { "type": "string" }, "value": { "type": "string" }, "value_decimal": { "type": "number" } } }, "models.Transaction": { "type": "object", "properties": { "block_hash": { "type": "string" }, "block_number": { "type": "integer" }, "block_timestamp": { "type": "integer" }, "cumulative_step_used": { "type": "string" }, "data": { "type": "string" }, "data_type": { "type": "string" }, "from_address": { "type": "string" }, "hash": { "type": "string" }, "log_count": { "type": "integer" }, "log_index": { "type": "integer" }, "logs_bloom": { "type": "string" }, "method": { "type": "string" }, "nid": { "type": "string" }, "nonce": { "type": "string" }, "score_address": { "type": "string" }, "signature": { "type": "string" }, "status": { "type": "string" }, "step_limit": { "type": "string" }, "step_price": { "type": "string" }, "step_used": { "type": "string" }, "timestamp": { "type": "integer" }, "to_address": { "type": "string" }, "transaction_fee": { "type": "string" }, "transaction_index": { "type": "integer" }, "type": { "type": "string" }, "value": { "type": "string" }, "value_decimal": { "type": "number" }, "version": { "type": "string" } } }, "models.TransactionInternalList": { "type": "object", "properties": { "block_hash": { "type": "string" }, "block_number": { "type": "integer" }, "block_timestamp": { "type": "integer" }, "data": { "type": "string" }, "from_address": { "type": "string" }, "hash": { "type": "string" }, "status": { "type": "string" }, "to_address": { "type": "string" }, "transaction_index": { "type": "integer" }, "type": { "type": "string" }, "value": { "type": "string" } } }, "models.TransactionList": { "type": "object", "properties": { "block_number": { "type": "integer" }, "block_timestamp": { "type": "integer" }, "data": { "type": "string" }, "from_address": { "type": "string" }, "hash": { "type": "string" }, "method": { "type": "string" }, "status": { "type": "string" }, "to_address": { "type": "string" }, "transaction_fee": { "type": "string" }, "type": { "type": "string" }, "value": { "type": "string" }, "value_decimal": { "type": "number" } } } } }` // SwaggerInfo holds exported Swagger Info so clients can modify it var SwaggerInfo = &swag.Spec{ Version: "2.0", Host: "", BasePath: "", Schemes: []string{}, Title: "Icon Go API", Description: "The icon tracker API", InfoInstanceName: "swagger", SwaggerTemplate: docTemplate, } func init() { swag.Register(SwaggerInfo.InstanceName(), SwaggerInfo) }
src/api/docs/docs.go
0.630685
0.405566
docs.go
starcoder
package dataframe import ( "crypto/sha1" "encoding/json" "fmt" "log" "sort" "strings" "github.com/ptiger10/pd/internal/index" "github.com/ptiger10/pd/internal/values" "github.com/ptiger10/pd/series" "github.com/ptiger10/pd/options" ) // Rename the DataFrame. func (df *DataFrame) Rename(name string) { df.name = name } // renames a column in place func (df *DataFrame) renameCol(col int, multiName string) error { names := strings.Split(multiName, values.GetMultiColNameSeparator()) if len(names) > df.ColLevels() { return fmt.Errorf("df.renameCol(): len(multiName) cannot exceed number of column levels (%d > %d)", len(names), df.ColLevels()) } for j := 0; j < df.ColLevels(); j++ { if j < len(names) { df.cols.Levels[j].Labels[col] = names[j] df.cols.Levels[j].IsDefault = false df.cols.Levels[j].Refresh() } } return nil } // RenameCols renames the columns at the specified labels. func (df *DataFrame) RenameCols(columns map[string]string) { for label, name := range columns { colLocs := df.SelectCols([]string{label}, 0) for _, loc := range colLocs { err := df.renameCol(loc, name) if err != nil { if options.GetLogWarnings() { log.Printf("RenameCols(): %v", err) } } } } return } // replace one DataFrame with another in place. func (df *DataFrame) replace(df2 *DataFrame) { df.name = df2.name df.vals = df2.vals df.index = df2.index df.cols = df2.cols } // Convert converts every series in a DataFrame to datatype and modifies the DataFrame in place. func (ip InPlace) Convert(dataType string) error { for m := 0; m < ip.df.NumCols(); m++ { newValues, err := values.Convert(ip.df.vals[m].Values, options.DT(dataType)) if err != nil { return fmt.Errorf("df.Convert(): %v", err) } ip.df.vals[m].Values = newValues ip.df.vals[m].DataType = options.DT(dataType) } return nil } // Convert converts every series in a DataFrame to datatype and returns a new DataFrame. func (df *DataFrame) Convert(dataType string) (*DataFrame, error) { df = df.Copy() err := df.InPlace.Convert(dataType) return df, err } // [START InPlace] // // Sort sorts the series by its values and modifies the DataFrame in place. // func (ip InPlace) Sort(asc bool) { // if asc { // sort.Stable(ip) // } else { // sort.Stable(sort.Reverse(ip)) // } // } // Len returns the length of the underlying DataFrame (required by Sort interface) func (ip InPlace) Len() int { return ip.df.Len() } // Set selects the first column in column level 0 with the label and sets its values to s. If an error occurs, the error is logged and nothing happens. func (ip InPlace) Set(colLabel string, s *series.Series) { col := ip.df.SelectCol(colLabel) if col == -1 { return } if s.Len() != ip.Len() { if options.GetLogWarnings() { log.Printf("df.Set(): series must be same length as df (%d != %d)", s.Len(), ip.Len()) } return } container, _ := s.ToInternalComponents() ip.df.vals[col] = container } // SwapRows swaps the selected rows in place. func (ip InPlace) SwapRows(i, j int) { for m := 0; m < ip.df.NumCols(); m++ { ip.df.vals[m].Values.Swap(i, j) } for l := 0; l < ip.df.IndexLevels(); l++ { ip.df.index.Levels[l].Labels.Swap(i, j) ip.df.index.Levels[l].NeedsRefresh = true } } // SwapColumns swaps the selected columns in place. func (ip InPlace) SwapColumns(i, j int) { ip.df.vals[i], ip.df.vals[j] = ip.df.vals[j], ip.df.vals[i] for l := 0; l < ip.df.ColLevels(); l++ { ip.df.cols.Levels[l].Labels[i], ip.df.cols.Levels[l].Labels[j] = ip.df.cols.Levels[l].Labels[j], ip.df.cols.Levels[l].Labels[i] ip.df.cols.Levels[l].Refresh() } } // // Less returns true if the value at i > j in col. // func (ip InPlace) Less(col int, i, j int) bool { // return ip.df.vals[col].Values.Less(i, j) // } // InsertRow inserts a new row into the DataFrame immediately before the specified integer position and modifies the DataFrame in place. // If the original DataFrame is empty, replaces it with a new DataFrame. func (ip InPlace) InsertRow(row int, val []interface{}, idxLabels ...interface{}) error { // Handling empty DataFrame if Equal(ip.df, newEmptyDataFrame()) { df, err := New(val, Config{MultiIndex: idxLabels}) if err != nil { return fmt.Errorf("DataFrame.InsertRow(): inserting into empty DataFrame requires creating a new DataFrame: %v", err) } ip.df.replace(df) return nil } // Handling errors if len(idxLabels) > ip.df.index.NumLevels() { return fmt.Errorf("DataFrame.InsertRow() len(idxLabels) must not exceed number of index levels: (%d != %d)", len(idxLabels), ip.df.index.NumLevels()) } if row > ip.Len() { return fmt.Errorf("DataFrame.InsertRow(): invalid row: %d (max %v)", row, ip.Len()) } if len(val) != ip.df.NumCols() { return fmt.Errorf("DataFrame.InsertRow(): len(val) must equal number of columns (%d != %d)", len(val), ip.df.NumCols()) } for _, v := range idxLabels { if _, err := values.InterfaceFactory(v); err != nil { return fmt.Errorf("DataFrame.InsertRow(): %v", err) } } for _, v := range val { if _, err := values.InterfaceFactory(v); err != nil { return fmt.Errorf("DataFrame.InsertRow(): %v", err) } } // Insertion once errors have been handled for j := 0; j < ip.df.IndexLevels(); j++ { if j < len(idxLabels) { ip.df.index.Levels[j].Labels.Insert(row, idxLabels[j]) ip.df.index.Levels[j].IsDefault = false } else { ip.df.index.Levels[j].Labels.Insert(row, "") } // Reorder a default index if ip.df.index.Levels[j].IsDefault { // ducks error because index level is known to be in series. ip.df.Index.Reindex(j) } else { ip.df.index.Levels[j].NeedsRefresh = true } } for m := 0; m < ip.df.NumCols(); m++ { ip.df.vals[m].Values.Insert(row, val[m]) } return nil } // InsertCol inserts a column with an indefinite number of column labels immediately before the specified column position and modifies the DataFrame in place. // TODO: derive colLabels from name func (ip InPlace) InsertCol(col int, s *series.Series, colLabels ...string) error { // Handling empty DataFrame if Equal(ip.df, newEmptyDataFrame()) { vals, idx := s.ToInternalComponents() cols := index.CreateMultiCol([][]string{colLabels}, nil) df := newFromComponents([]values.Container{vals}, idx, cols, "") ip.df.replace(df) return nil } // Handling errors if len(colLabels) > ip.df.cols.NumLevels() { return fmt.Errorf("DataFrame.InsertCol() len(colLabels) must not exceed number of column levels: (%d > %d)", len(colLabels), ip.df.cols.NumLevels()) } if col > ip.df.NumCols() { return fmt.Errorf("DataFrame.InsertCol(): invalid col: %d (max %v)", col, ip.df.NumCols()) } if s.Len() != ip.df.Len() { return fmt.Errorf("DataFrame.InsertCol(): series must be same length as df (%d != %d)", s.Len(), ip.df.Len()) } // Insertion once errors have been handled for j := 0; j < ip.df.cols.NumLevels(); j++ { if j < len(colLabels) { ip.df.cols.Levels[j].Labels = append(ip.df.cols.Levels[j].Labels[:col], append([]string{colLabels[j]}, ip.df.cols.Levels[j].Labels[col:]...)...) ip.df.cols.Levels[j].IsDefault = false // ducks error because col labels are string labelContainer := values.MustCreateValuesFromInterface(colLabels[j]) // switch column level datatype to string unless it is already int64 (ie a default index) and the addition is int64 if labelContainer.DataType != options.Int64 { ip.df.cols.Levels[j].DataType = options.String } } else { // add empty column for all levels where it was not supplied ip.df.cols.Levels[j].Labels = append(ip.df.cols.Levels[j].Labels[:col], append([]string{"NaN"}, ip.df.cols.Levels[j].Labels[col:]...)...) } // Reorder default columns if ip.df.cols.Levels[j].IsDefault { ip.df.cols.Levels[j].ResetDefault() } else { ip.df.cols.Levels[j].Refresh() } } container, _ := s.ToInternalComponents() ip.df.vals = append(ip.df.vals[:col], append([]values.Container{container}, ip.df.vals[col:]...)...) return nil } // AppendRow adds a row at a specified integer position and modifies the DataFrame in place. func (ip InPlace) AppendRow(val []interface{}, idxLabels ...interface{}) error { err := ip.df.InPlace.InsertRow(ip.Len(), val, idxLabels...) if err != nil { return fmt.Errorf("DataFrame.AppendRow(): %v", err) } return nil } // AppendCol adds a row at a specified integer position and modifies the DataFrame in place. // TODO take colLabels from series func (ip InPlace) AppendCol(s *series.Series, colLabels ...string) error { err := ip.df.InPlace.InsertCol(ip.Len(), s, colLabels...) if err != nil { return fmt.Errorf("DataFrame.AppendCol(): %v", err) } return nil } // SetRow sets the values in the specified row to val and modifies the DataFrame in place. First converts val to be the same type as the index level. func (ip InPlace) SetRow(row int, val interface{}) error { if err := ip.df.ensureRowPositions([]int{row}); err != nil { return fmt.Errorf("DataFrame.SetRow(): %v", err) } if _, err := values.InterfaceFactory(val); err != nil { return fmt.Errorf("DataFrame.SetRow(): %v", err) } for m := 0; m < ip.df.NumCols(); m++ { ip.df.vals[m].Values.Set(row, val) } return nil } // SetRows sets all the values in the specified rows to val and modifies the DataFrame in place. First converts val to be the same type as the index level. // If an error would be encountered in any row position, the entire operation is cancelled before it starts. func (ip InPlace) SetRows(rowPositions []int, val interface{}) error { if err := ip.df.ensureRowPositions(rowPositions); err != nil { return fmt.Errorf("DataFrame.SetRows(): %v", err) } if _, err := values.InterfaceFactory(val); err != nil { return fmt.Errorf("DataFrame.SetRows(): %v", err) } for m := 0; m < ip.df.NumCols(); m++ { for _, row := range rowPositions { ip.df.vals[m].Values.Set(row, val) } } return nil } // SetCol sets the values in the specified column to val and modifies the DataFrame in place. func (ip InPlace) SetCol(col int, s *series.Series) error { if err := ip.df.ensureColumnPositions([]int{col}); err != nil { return fmt.Errorf("DataFrame.SetCol(): %v", err) } if s.Len() != ip.df.Len() { return fmt.Errorf("DataFrame.SetCol(): series must be same length as df (%d != %d)", s.Len(), ip.df.Len()) } container, _ := s.ToInternalComponents() ip.df.vals[col] = container return nil } // SetCols sets the values in the specified columns to val and modifies the DataFrame in place. func (ip InPlace) SetCols(columnPositions []int, s *series.Series) error { if err := ip.df.ensureColumnPositions(columnPositions); err != nil { return fmt.Errorf("DataFrame.SetCol(): %v", err) } if s.Len() != ip.df.Len() { return fmt.Errorf("DataFrame.SetCol(): series must be same length as df (%d != %d)", s.Len(), ip.df.Len()) } container, _ := s.ToInternalComponents() for _, col := range columnPositions { ip.df.vals[col] = container } return nil } // DropRow drops the row at the specified integer position and modifies the DataFrame in place. func (ip InPlace) DropRow(row int) error { if err := ip.dropMany([]int{row}); err != nil { return fmt.Errorf("DataFrame.DropRow(): %v", err) } return nil } // DropRows drops the rows at the specified integer position and modifies the DataFrame in place. // If an error would be encountered in any row position, the entire operation is cancelled before it starts. func (ip InPlace) DropRows(rowPositions []int) error { if err := ip.dropMany(rowPositions); err != nil { return fmt.Errorf("DataFrame.DropRows(): %v", err) } return nil } // Hash computes a unique identifer for each Row. func (r Row) hash() string { jsonBytes, _ := json.Marshal(r) h := sha1.New() h.Write(jsonBytes) bs := h.Sum(nil) return fmt.Sprintf("%x", bs) } // DropDuplicates drops any rows containing duplicate index + DataFrame values and modifies the DataFrame in place. func (ip InPlace) DropDuplicates() { var toDrop []int g := ip.df.GroupByIndex() for _, group := range g.Groups() { // only inspect groups with at least one position if positions := g.groups[group].Positions; len(positions) > 0 { exists := make(map[interface{}]bool) for _, pos := range positions { if exists[ip.df.Row(pos).hash()] { toDrop = append(toDrop, pos) } else { exists[ip.df.Row(pos).hash()] = true } } } } // ducks error because position is assumed to be in DataFrame ip.DropRows(toDrop) } // DropNull drops all null values and modifies the DataFrame in place. If an invalid column is provided, returns original DataFrame. func (ip InPlace) DropNull(cols ...int) { if err := ip.df.ensureColumnPositions(cols); err != nil { if options.GetLogWarnings() { log.Printf("df.DropNull(): %v", err) } return } ip.dropMany(ip.df.null(cols...)) return } // dropMany drops multiple rows and modifies the DataFrame in place. func (ip InPlace) dropMany(positions []int) error { if err := ip.df.ensureRowPositions(positions); err != nil { return err } sort.IntSlice(positions).Sort() for i, position := range positions { ip.df.InPlace.dropOne(position - i) } if ip.Len() == 0 { ip.df.replace(newEmptyDataFrame()) } return nil } // dropOne drops a row at a specified integer position and modifies the DataFrame in place. // Should be called via dropMany to catch errors. func (ip InPlace) dropOne(pos int) { for i := 0; i < ip.df.index.NumLevels(); i++ { ip.df.index.Levels[i].Labels.Drop(pos) ip.df.index.Levels[i].NeedsRefresh = true } for m := 0; m < ip.df.NumCols(); m++ { ip.df.vals[m].Values.Drop(pos) } return } // DropCol drops a column at a specified integer position and modifies the DataFrame in place. func (ip InPlace) DropCol(col int) error { // Handling errors if err := ip.df.ensureColumnPositions([]int{col}); err != nil { return fmt.Errorf("DataFrame.DropCol(): %v", err) } for j := 0; j < ip.df.cols.NumLevels(); j++ { ip.df.cols.Levels[j].Labels = append(ip.df.cols.Levels[j].Labels[:col], ip.df.cols.Levels[j].Labels[col+1:]...) ip.df.cols.Levels[j].Refresh() } ip.df.vals = append(ip.df.vals[:col], ip.df.vals[col+1:]...) if ip.df.NumCols() == 0 { ip.df.replace(newEmptyDataFrame()) } return nil } // DropCols drops the columns at the specified integer positions and modifies the DataFrame in place. func (ip InPlace) DropCols(columnPositions []int) error { if err := ip.df.ensureColumnPositions(columnPositions); err != nil { return fmt.Errorf("DataFrame.DropCols(): %v", err) } sort.IntSlice(columnPositions).Sort() for i, position := range columnPositions { // ducks error because all columns are assumed to be safe after aggregate error check above ip.df.InPlace.DropCol(position - i) } return nil } // does not expect errors and does not drop columns func (ip InPlace) setIndex(col int) { container := ip.df.vals[col] newLevel := index.Level{Name: ip.df.cols.Name(col), Labels: container.Values, DataType: container.DataType, NeedsRefresh: true} // prepend ip.df.index.Levels = append([]index.Level{newLevel}, ip.df.index.Levels...) return } // drops columns after setting. For use in GroupBy func (ip InPlace) setIndexes(cols []int) { reversedCols := make([]int, len(cols)) for i := 1; i <= len(cols); i++ { reversedCols[len(cols)-i] = cols[i-1] } for _, col := range reversedCols { ip.setIndex(col) } ip.df.index.NeedsRefresh = true ip.DropCols(cols) } // SetIndex sets a column as an index level, drops the column, and modifies the DataFrame in place. If col is the only column, nothing happens. func (ip InPlace) SetIndex(col int) error { if err := ip.df.ensureColumnPositions([]int{col}); err != nil { return fmt.Errorf("DataFrame.SetIndex(): %v", err) } if ip.df.NumCols() == 1 { return nil } ip.setIndex(col) ip.df.index.NeedsRefresh = true ip.DropCol(col) return nil } // If level is the only level, a default int index is inserted. func (ip InPlace) resetIndex(level int) { container := values.Container{Values: ip.df.index.Levels[level].Labels, DataType: ip.df.index.Levels[level].DataType} ip.df.vals = append(ip.df.vals, container) names := strings.Split(ip.df.index.Levels[level].Name, values.GetMultiColNameSeparator()) for j := 0; j < ip.df.ColLevels(); j++ { ip.df.cols.Levels[j].Labels = append(ip.df.cols.Levels[j].Labels, names[j]) ip.df.cols.Levels[j].Refresh() } // ducks error because levels are certain to be in index ip.df.index.DropLevel(level) return } // ResetIndex sets an index level as a column, drops the index level, and modifies the DataFrame in place. // If level is the only level, a default int index is inserted. func (ip InPlace) ResetIndex(level int) error { if err := ip.df.ensureIndexLevelPositions([]int{level}); err != nil { return fmt.Errorf("DataFrame.ResetIndex(): %v", err) } ip.resetIndex(level) return nil } // ToFloat64 converts DataFrame values to float64 in place. func (ip InPlace) ToFloat64() { for m := 0; m < ip.df.NumCols(); m++ { ip.df.vals[m].Values = ip.df.vals[m].Values.ToFloat64() ip.df.vals[m].DataType = options.Float64 } } // ToInt64 converts DataFrame values to int64 in place. func (ip InPlace) ToInt64() { for m := 0; m < ip.df.NumCols(); m++ { ip.df.vals[m].Values = ip.df.vals[m].Values.ToInt64() ip.df.vals[m].DataType = options.Int64 } } // ToString converts DataFrame values to string in place. func (ip InPlace) ToString() { for m := 0; m < ip.df.NumCols(); m++ { ip.df.vals[m].Values = ip.df.vals[m].Values.ToString() ip.df.vals[m].DataType = options.String } } // ToBool converts DataFrame values to bool in place. func (ip InPlace) ToBool() { for m := 0; m < ip.df.NumCols(); m++ { ip.df.vals[m].Values = ip.df.vals[m].Values.ToBool() ip.df.vals[m].DataType = options.Bool } } // ToDateTime converts DataFrame values to datetime in place. func (ip InPlace) ToDateTime() { for m := 0; m < ip.df.NumCols(); m++ { ip.df.vals[m].Values = ip.df.vals[m].Values.ToDateTime() ip.df.vals[m].DataType = options.DateTime } } // ToInterface converts DataFrame values to interface in place. func (ip InPlace) ToInterface() { for m := 0; m < ip.df.NumCols(); m++ { ip.df.vals[m].Values = ip.df.vals[m].Values.ToInterface() ip.df.vals[m].DataType = options.Interface } } // [END InPlace] // [START Copy] // // Sort sorts the series by its values and returns a new DataFrame. // func (df *DataFrame) Sort(asc bool) *DataFrame { // df = df.Copy() // df.InPlace.Sort(asc) // return df // } // SwapRows swaps the selected rows and returns a new DataFrame. func (df *DataFrame) SwapRows(i, j int) (*DataFrame, error) { df = df.Copy() if i >= df.Len() { return newEmptyDataFrame(), fmt.Errorf("invalid position: %d (max %v)", i, df.Len()-1) } if j >= df.Len() { return newEmptyDataFrame(), fmt.Errorf("invalid position: %d (max %v)", j, df.Len()-1) } df.InPlace.SwapRows(i, j) return df, nil } // SwapColumns swaps the selected rows and returns a new DataFrame. func (df *DataFrame) SwapColumns(i, j int) (*DataFrame, error) { df = df.Copy() if i >= df.NumCols() { return newEmptyDataFrame(), fmt.Errorf("invalid position: %d (max %v)", i, df.Len()-1) } if j >= df.NumCols() { return newEmptyDataFrame(), fmt.Errorf("invalid position: %d (max %v)", j, df.Len()-1) } df.InPlace.SwapColumns(i, j) return df, nil } // Set selects the first column in column level 0 with the label and sets its values to s, then returns a new DataFrame. // If an error occurs, the error is logged and nothing happens. func (df *DataFrame) Set(colLabel string, s *series.Series) *DataFrame { df = df.Copy() df.InPlace.Set(colLabel, s) return df } // InsertRow inserts a new row into the DataFrame immediately before the specified integer position and returns a new DataFrame. func (df *DataFrame) InsertRow(row int, val []interface{}, idxLabels ...interface{}) (*DataFrame, error) { df = df.Copy() err := df.InPlace.InsertRow(row, val, idxLabels...) return df, err } // InsertCol inserts a new column into the DataFrame immediately before the specified integer position and returns a new DataFrame. func (df *DataFrame) InsertCol(row int, s *series.Series, colLabels ...string) (*DataFrame, error) { df = df.Copy() err := df.InPlace.InsertCol(row, s, colLabels...) return df, err } // AppendRow adds a row at the end and returns a new DataFrame. func (df *DataFrame) AppendRow(val []interface{}, idxLabels ...interface{}) (*DataFrame, error) { df, err := df.InsertRow(df.Len(), val, idxLabels...) if err != nil { return newEmptyDataFrame(), fmt.Errorf("DataFrame.AppendRow(): %v", err) } return df, nil } // AppendCol adds a column at the end and returns a new DataFrame. func (df *DataFrame) AppendCol(s *series.Series, colLabels ...string) (*DataFrame, error) { df, err := df.InsertCol(df.Len(), s, colLabels...) if err != nil { return newEmptyDataFrame(), fmt.Errorf("DataFrame.AppendCol(): %v", err) } return df, nil } // SetRow sets the value in the specified rows to val and returns a new DataFrame. func (df *DataFrame) SetRow(row int, val interface{}) (*DataFrame, error) { df = df.Copy() err := df.InPlace.SetRow(row, val) return df, err } // SetRows sets all the values in the specified rows to val and returns a new DataFrame. func (df *DataFrame) SetRows(rowPositions []int, val interface{}) (*DataFrame, error) { df = df.Copy() err := df.InPlace.SetRows(rowPositions, val) return df, err } // SetCol sets all the values in the specified columns to val and returns a new DataFrame. func (df *DataFrame) SetCol(col int, s *series.Series) (*DataFrame, error) { df = df.Copy() err := df.InPlace.SetCol(col, s) return df, err } // SetCols sets all the values in the specified columns to val and returns a new DataFrame. func (df *DataFrame) SetCols(columnPositions []int, s *series.Series) (*DataFrame, error) { df = df.Copy() err := df.InPlace.SetCols(columnPositions, s) return df, err } // DropRow drops the row at the specified integer position and returns a new DataFrame. func (df *DataFrame) DropRow(row int) (*DataFrame, error) { df = df.Copy() err := df.InPlace.DropRow(row) return df, err } // DropRows drops the rows at the specified integer position and returns a new DataFrame. func (df *DataFrame) DropRows(positions []int) (*DataFrame, error) { df = df.Copy() err := df.InPlace.DropRows(positions) return df, err } // DropCol drops the column at the specified integer position and returns a new DataFrame. func (df *DataFrame) DropCol(col int) (*DataFrame, error) { df = df.Copy() err := df.InPlace.DropCol(col) return df, err } // DropCols drops the column at the specified integer position and returns a new DataFrame. func (df *DataFrame) DropCols(columnPositions []int) (*DataFrame, error) { df = df.Copy() err := df.InPlace.DropCols(columnPositions) return df, err } // DropNull drops all null values and returns a new DataFrame. If an invalid column is provided, returns a copy of the original DataFrame. func (df *DataFrame) DropNull(cols ...int) *DataFrame { df = df.Copy() df.InPlace.DropNull(cols...) return df } // DropDuplicates drops any rows containing duplicate index + DataFrame values and returns a new DataFrame. func (df *DataFrame) DropDuplicates() *DataFrame { df = df.Copy() df.InPlace.DropDuplicates() return df } // SetIndex sets a column as an index level, drops the column, and returns a new DataFrame. If col is the only column, nothing happens. func (df *DataFrame) SetIndex(col int) (*DataFrame, error) { df = df.Copy() err := df.InPlace.SetIndex(col) return df, err } // ResetIndex sets an index level as a column, drops the index level, and returns a new DataFrame. // If level is the only level, a default int index is inserted. func (df *DataFrame) ResetIndex(level int) (*DataFrame, error) { df = df.Copy() err := df.InPlace.ResetIndex(level) return df, err } // ToFloat64 converts DataFrame values to float64 and returns a new DataFrame. func (df *DataFrame) ToFloat64() *DataFrame { df = df.Copy() df.InPlace.ToFloat64() return df } // ToInt64 converts DataFrame values to int64 and returns a new DataFrame. func (df *DataFrame) ToInt64() *DataFrame { df = df.Copy() df.InPlace.ToInt64() return df } // ToString converts DataFrame values to string and returns a new DataFrame. func (df *DataFrame) ToString() *DataFrame { df = df.Copy() df.InPlace.ToString() return df } // ToBool converts DataFrame values to bool and returns a new DataFrame. func (df *DataFrame) ToBool() *DataFrame { df = df.Copy() df.InPlace.ToBool() return df } // ToDateTime converts DataFrame values to time.Time and returns a new DataFrame. func (df *DataFrame) ToDateTime() *DataFrame { df = df.Copy() df.InPlace.ToDateTime() return df } // ToInterface converts DataFrame values to interface and returns a new DataFrame. func (df *DataFrame) ToInterface() *DataFrame { df = df.Copy() df.InPlace.ToInterface() return df } // [END Copy]
dataframe/modify.go
0.510741
0.410225
modify.go
starcoder
package sqlset import "context" /* Adapter is an interface providing the methods needed to implement a Set with a database backend. ColumnName takes a string feature name and returns a column name for the feature in a string or an error CreateDiscreteValuesTable should create a table containing the different values discrete features can take in the samples of the working sets. CreateSampleTable should create a table for the samples, using foreign keys to the discrete value table for discrete features and a suitable float64 representation for continuous ones. It should also generate an id column. AddDiscreteValues should add to the discrete value table the given discrete values, and return an error if any cannot be added. ListDiscreteValues should return a map of integer to string that relates numeric ids of the discrete values to their string values, or an error. AddSamples should add a sample to the samples table for each rawSample received. A rawSample here is a map of column name to an interface containing the numeric id for a discrete feature value or a float64 for a continuous feature value. Samples should be added considering all discrete and continuous feature columns only. NULL values should be used for column values not available in the rawSample. The number of samples added or an error must be returned. ListSamples should provide a slice of rawSamples as described above satisfying the given feature criteria and specifying the values for the given discrete and continuous feature columns, or an error. IterateOnSamples is similar to ListSamples, but takes an additional lambda to iterate on the samples rather than returned them all. This method should call the lambda for every sample satisfying the criteria. The lambda takes an index for the sample (0,1,2,...) and a raw sample and returns a boolean and an error, which must be true and nil in order for this method not to stop. This method should return an error if the samples cannot be traversed or any error the lambda returns. CountSamples should return the number of samples in the samples table that satisfy the riven feature criteria or an error if they cannot be counted. ListSampleDiscreteFeatureValues takes a discrete feature column name and a slice of feature criteria and should return an slice with the numeric IDs for the different values for the given feature column name on samples satisfying the given criteria, or an error. ListSampleContinuousFeatureValues takes a continuous feature column name and a slice of feature criteria and should return an slice with the different values for the given feature column name on samples satisfying the given criteria, or an error. CountSampleDiscreteFeatureValues takes a discrete feature column name and a slice of feature criteria and should return a map relating the numeric IDs for the discrete values for the given feature column on samples in the table satisfying the given criteria to the number of times they appear among the samples satisfying the given criteria or an error. CountSampleContinuousFeatureValues takes a continuous feature column name and a slice of feature criteria and should return a map relating the continuous values for the given column name on samples in the table satisfying the given criteria to the number of times they appear among the samples satisfying the given criteria or an error. */ type Adapter interface { ColumnName(string) (string, error) CreateDiscreteValuesTable(ctx context.Context) error CreateSampleTable(ctx context.Context, discreteFeatureColumns, continuousFeatureColumns []string) error AddDiscreteValues(context.Context, []string) (int, error) ListDiscreteValues(ctx context.Context) (map[int]string, error) AddSamples(ctx context.Context, rawSamples []map[string]interface{}, discreteFeatureColumns, continuousFeatureColumns []string) (int, error) ListSamples(ctx context.Context, criteria []*FeatureCriterion, discreteFeatureColumns, continuousFeatureColumns []string) ([]map[string]interface{}, error) IterateOnSamples(ctx context.Context, criteria []*FeatureCriterion, discreteFeatureColumns, continuousFeatureColumns []string, lambda func(int, map[string]interface{}) (bool, error)) error CountSamples(context.Context, []*FeatureCriterion) (int, error) ListSampleDiscreteFeatureValues(context.Context, string, []*FeatureCriterion) ([]int, error) ListSampleContinuousFeatureValues(context.Context, string, []*FeatureCriterion) ([]float64, error) CountSampleDiscreteFeatureValues(context.Context, string, []*FeatureCriterion) (map[int]int, error) CountSampleContinuousFeatureValues(context.Context, string, []*FeatureCriterion) (map[float64]int, error) }
set/sqlset/adapter.go
0.760828
0.873053
adapter.go
starcoder
package calculator import ( "fmt" "math" "strconv" "strings" ) // Add takes some numbers and returns the result of adding them together. func Add(inputs ...float64) (float64, error) { res := inputs[0] if len(inputs) < 2 { return 0, fmt.Errorf("bad input: %f (only one operand)", res) } for _, n := range inputs[1:] { res += n } return res, nil } // Subtract takes some numbers and returns the result of subtracting them // together. func Substract(inputs ...float64) (float64, error) { res := inputs[0] if len(inputs) < 2 { return 0, fmt.Errorf("bad input: %f (only one operand)", res) } for _, n := range inputs[1:] { res -= n } return res, nil } // Multiply takes some numbers and returns the result of multiplying them // together. func Multiply(inputs ...float64) (float64, error) { res := inputs[0] if len(inputs) < 2 { return 0, fmt.Errorf("bad input: %f (only one operand)", res) } for _, n := range inputs[1:] { res *= n } return res, nil } // Divide takes two numbers and returns the result of dividing them // together. func Divide(inputs ...float64) (float64, error) { res := inputs[0] if len(inputs) < 2 { return 0, fmt.Errorf("bad input: %f (only one operand)", res) } for _, n := range inputs[1:] { if n == 0 { return 0, fmt.Errorf("bad input: %f, %f (division by zero is not allowed)", res, n) } res /= n } return res, nil } // Sqrt takes a number and returns its square root. func Sqrt(a float64) (float64, error) { if a < 0 { return 0, fmt.Errorf("bad input: %f (square root of a negative number is not allowed)", a) } return math.Sqrt(a), nil } func Calculate(s string) (float64, error) { // Remove whitespaces (simply) s = strings.Replace(s, " ", "", -1) // Split between + - * / var symbol rune f := func(c rune) bool { symbols := []rune{'+', '-', '*', '/'} for _, r := range symbols { if c == r { symbol = r return true } } return false } fields := strings.FieldsFunc(s, f) if len(fields) == 1 { return 0, fmt.Errorf("bad input: %s (unrecognized operation)", s) } a, err := strconv.ParseFloat(fields[0], 64) if err != nil { return 0, fmt.Errorf("bad input: %s", fields[0]) } b, err := strconv.ParseFloat(fields[1], 64) if err != nil { return 0, fmt.Errorf("bad input: %s", fields[1]) } var res float64 switch symbol { case '+': res, err = Add(a, b) case '-': res, err = Substract(a, b) case '*': res, err = Multiply(a, b) case '/': res, err = Divide(a, b) } return res, err }
calculator.go
0.800185
0.513181
calculator.go
starcoder
package std // A StrMap is a map[string]interface{} and holds arbitrary unboxed values. type StrMap struct { // Map contains all string keys and values Map map[string]interface{} } func (m *StrMap) init() { if m.Map == nil { m.Map = make(map[string]interface{}) } } // Put replaces any unboxed key in the map with the unboxed value func (m *StrMap) Put(key string, value *Box) { m.init() m.Map[key] = value.Unbox() } // Get returns the value in a box for the given key or nil func (m *StrMap) Get(key string) *Box { m.init() return Wrap(m.Map[key]) } // String is a shortcut for #Get(key).String() and avoids an extra Box allocation func (m *StrMap) String(key string) string { if m == nil { return "" } return duckTypeString(m.Map[key]) } // Int32 is a shortcut for #Get(key).Int32() and avoids an extra Box allocation func (m *StrMap) Int32(key string) int32 { if m == nil { return 0 } return int32(duckTypeInt64(m.Map[key])) } // Int64 is a shortcut for #Get(key).Int64() and avoids an extra Box allocation func (m *StrMap) Int64(key string) int64 { if m == nil { return 0 } return duckTypeInt64(m.Map[key]) } // StrMap is a shortcut for #Get(key).StrMap() and avoids an extra Box allocation func (m *StrMap) StrMap(key string) *StrMap { if m == nil { return nil } return duckTypeStrMap(m.Map[key]) } // Len returns the amount of entries in the map func (m *StrMap) Len() int { m.init() return len(m.Map) } // Keys returns the keys as a Slice. Caution: the order of keys is not stable, so keep the StrSlice while iterating. func (m *StrMap) Keys() *StrSlice { m.init() res := make([]string, len(m.Map)) idx := 0 for k := range m.Map { res[idx] = k idx++ } return &StrSlice{res} } // Values returns the values as a Slice func (m *StrMap) Values() *Slice { m.init() res := make([]interface{}, len(m.Map)) idx := 0 for _, v := range m.Map { res[idx] = v idx++ } return &Slice{res} } // Has checks if the unboxed key exists, not if the associated value is nil or not func (m *StrMap) Has(key string) bool { m.init() _, ok := m.Map[key] return ok } // Delete removes the unboxed key from the map func (m *StrMap) Delete(key string) { m.init() delete(m.Map, key) } // Box returns this Map in a box func (m *StrMap) Box() *Box { return &Box{m} }
strmap.go
0.797754
0.488954
strmap.go
starcoder
package aoc2015 /* --- Day 6: Probably a Fire Hazard --- Because your neighbors keep defeating you in the holiday house decorating contest year after year, you've decided to deploy one million lights in a 1000x1000 grid. Furthermore, because you've been especially nice this year, Santa has mailed you instructions on how to display the ideal lighting configuration. Lights in your grid are numbered from 0 to 999 in each direction; the lights at each corner are at 0,0, 0,999, 999,999, and 999,0. The instructions include whether to turn on, turn off, or toggle various inclusive ranges given as coordinate pairs. Each coordinate pair represents opposite corners of a rectangle, inclusive; a coordinate pair like 0,0 through 2,2 therefore refers to 9 lights in a 3x3 square. The lights all start turned off. To defeat your neighbors this year, all you have to do is set up your lights by doing the instructions Santa sent you in order. For example: turn on 0,0 through 999,999 would turn on (or leave on) every light. toggle 0,0 through 999,0 would toggle the first line of 1000 lights, turning off the ones that were on, and turning on the ones that were off. turn off 499,499 through 500,500 would turn off (or leave off) the middle four lights. After following the instructions, how many lights are lit? */ import ( "fmt" "strconv" "strings" ) // AOC_2015_05 is the entrypoint func (app *Application) Y2015D06() { app.Y2015D06P1() app.Y2015D06P2() } func (app *Application) Y2015D06P1() { splits := strings.Split(DAY_2015_06_DATA, "\n") grid := NewLightGrid() for _, instruction := range splits { grid.Execute(instruction) } countOn, countOff := grid.CountOnOff() fmt.Printf("On %v Off %v\n", countOn, countOff) } /* You just finish implementing your winning light pattern when you realize you mistranslated Santa's message from <NAME>. The light grid you bought actually has individual brightness controls; each light can have a brightness of zero or more. The lights all start at zero. The phrase turn on actually means that you should increase the brightness of those lights by 1. The phrase turn off actually means that you should decrease the brightness of those lights by 1, to a minimum of zero. The phrase toggle actually means that you should increase the brightness of those lights by 2. What is the total brightness of all lights combined after following Santa's instructions? For example: turn on 0,0 through 0,0 would increase the total brightness by 1. toggle 0,0 through 999,999 would increase the total brightness by 2000000. */ func (app *Application) Y2015D06P2() { splits := strings.Split(DAY_2015_06_DATA, "\n") grid := NewLightGrid() for _, instruction := range splits { grid.Execute(instruction) } countOn, countOff := grid.CountOnOff() b := grid.TotalBrightness() fmt.Printf("On %v Off %v\n", countOn, countOff) fmt.Printf("Brightness %v\n", b) } type LightGrid struct { coordinates map[string]bool brightness map[string]int } func NewLightGrid() *LightGrid { lg := LightGrid{coordinates: make(map[string]bool), brightness: make(map[string]int)} return &lg } func (grid *LightGrid) ExecuteAll(instructions string) { splits := strings.Split(instructions, "\n") for _, instruction := range splits { grid.Execute(instruction) } } func (grid *LightGrid) Execute(instruction string) { x1, y1, x2, y2 := grid.ParseCoordinates(instruction) if strings.Index(instruction, "toggle") > -1 { // toggle fmt.Printf("instruction = '%v', toggle (%v,%v)->(%v,%v)\n", instruction, x1, y1, x2, y2) for x := x1; x <= x2; x++ { for y := y1; y <= y2; y++ { grid.Toggle(x, y) } } } else if strings.Index(instruction, "turn on") > -1 { fmt.Printf("instruction = '%v', turn on (%v,%v)->(%v,%v)\n", instruction, x1, y1, x2, y2) for x := x1; x <= x2; x++ { for y := y1; y <= y2; y++ { grid.TurnOn(x, y) } } } else if strings.Index(instruction, "turn off") > -1 { fmt.Printf("instruction = '%v', turn off (%v,%v)->(%v,%v)\n", instruction, x1, y1, x2, y2) for x := x1; x <= x2; x++ { for y := y1; y <= y2; y++ { grid.TurnOff(x, y) } } } } func (grid *LightGrid) ParseCoordinates(instruction string) (int, int, int, int) { instruction = strings.ReplaceAll(instruction, "toggle ", "") instruction = strings.ReplaceAll(instruction, "turn on ", "") instruction = strings.ReplaceAll(instruction, "turn off ", "") coordinates := strings.ReplaceAll(instruction, " through ", ",") splits := strings.Split(coordinates, ",") x1, _ := strconv.Atoi(splits[0]) y1, _ := strconv.Atoi(splits[1]) x2, _ := strconv.Atoi(splits[2]) y2, _ := strconv.Atoi(splits[3]) return x1, y1, x2, y2 } func (grid *LightGrid) Toggle(x int, y int) { key := fmt.Sprintf("%v,%v", x, y) value, exists := grid.coordinates[key] brightness, _ := grid.brightness[key] if exists { value = !value brightness += 2 } else { value = true brightness = 2 } grid.coordinates[key] = value grid.brightness[key] = brightness } func (grid *LightGrid) TurnOn(x int, y int) { key := fmt.Sprintf("%v,%v", x, y) grid.coordinates[key] = true brightness, exists := grid.brightness[key] if exists { brightness += 1 } else { brightness = 1 } grid.brightness[key] = brightness } func (grid *LightGrid) TotalBrightness() int { total := 0 for _, value := range grid.brightness { total += value } return total } func (grid *LightGrid) TurnOff(x int, y int) { key := fmt.Sprintf("%v,%v", x, y) grid.coordinates[key] = false brightness, exists := grid.brightness[key] if exists { brightness -= 1 if brightness < 0 { brightness = 0 } } else { brightness = 0 } grid.brightness[key] = brightness } func (grid *LightGrid) Get(x int, y int) bool { key := fmt.Sprintf("%v,%v", x, y) value, exists := grid.coordinates[key] if !exists { return false } else { return value } } func (grid *LightGrid) CountOnOff() (int, int) { on := 0 off := 0 for x := 0; x < 1000; x++ { for y := 0; y < 1000; y++ { if grid.Get(x, y) { on++ } else { off++ } } } return on, off }
app/aoc2015/aoc2015_06.go
0.795975
0.664894
aoc2015_06.go
starcoder
package imageoutput import "math" // CoordinateCollection holds an array of coordinates as they turn into symmetry patterns. type CoordinateCollection struct { coordinates *[]*MappedCoordinate } // Coordinates returns the collection of coordinates. func (c *CoordinateCollection) Coordinates() *[]*MappedCoordinate { return c.coordinates } // MinimumTransformedX returns the lowest TransformedX coordinate in the collection. func (c *CoordinateCollection) MinimumTransformedX() float64 { foundCandidate := false minimumX := math.NaN() for _, coordinate := range *c.coordinates { coordinateIsValid := coordinate.CanBeCompared() && coordinate.SatisfiesFilter() if coordinateIsValid && (!foundCandidate || (coordinate.TransformedX() < minimumX)) { minimumX = coordinate.TransformedX() foundCandidate = true } } return minimumX } // MaximumTransformedX returns the greatest TransformedX coordinate in the collection. func (c *CoordinateCollection) MaximumTransformedX() float64 { foundCandidate := false maximumX := math.NaN() for _, coordinate := range *c.coordinates { coordinateIsValid := coordinate.CanBeCompared() && coordinate.SatisfiesFilter() if coordinateIsValid && (!foundCandidate || coordinate.TransformedX() > maximumX) { maximumX = coordinate.TransformedX() foundCandidate = true } } return maximumX } // MinimumTransformedY returns the lowest TransformedY coordinate in the collection. func (c *CoordinateCollection) MinimumTransformedY() float64 { foundCandidate := false minimumY := math.NaN() for _, coordinate := range *c.coordinates { coordinateIsValid := coordinate.CanBeCompared() && coordinate.SatisfiesFilter() if coordinateIsValid && (!foundCandidate || coordinate.TransformedY() < minimumY) { minimumY = coordinate.TransformedY() foundCandidate = true } } return minimumY } // MaximumTransformedY returns the greatest TransformedY coordinate in the collection. func (c *CoordinateCollection) MaximumTransformedY() float64 { foundCandidate := false maximumY := math.NaN() for _, coordinate := range *c.coordinates { coordinateIsValid := coordinate.CanBeCompared() && coordinate.SatisfiesFilter() if coordinateIsValid && (!foundCandidate || coordinate.TransformedY() > maximumY) { maximumY = coordinate.TransformedY() foundCandidate = true } } return maximumY }
entities/imageoutput/coordinatecollection.go
0.917488
0.410284
coordinatecollection.go
starcoder
package cldr var localeslice = []LocaleInfo{ {Lang: "af", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "af_NA", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "af_ZA", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "agq", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "agq_CM", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "ak", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "ak_GH", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "am", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "am_ET", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "ar", SepDecimal: '٫', SepGroup: '٬', DigitZero: '٠', DigitNine: '٩'}, {Lang: "ar_001", SepDecimal: '٫', SepGroup: '٬', DigitZero: '٠', DigitNine: '٩'}, {Lang: "ar_AE", SepDecimal: '٫', SepGroup: '٬', DigitZero: '٠', DigitNine: '٩'}, {Lang: "ar_BH", SepDecimal: '٫', SepGroup: '٬', DigitZero: '٠', DigitNine: '٩'}, {Lang: "ar_DJ", SepDecimal: '٫', SepGroup: '٬', DigitZero: '٠', DigitNine: '٩'}, {Lang: "ar_DZ", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "ar_EG", SepDecimal: '٫', SepGroup: '٬', DigitZero: '٠', DigitNine: '٩'}, {Lang: "ar_EH", SepDecimal: '٫', SepGroup: '٬', DigitZero: '0', DigitNine: '9'}, {Lang: "ar_ER", SepDecimal: '٫', SepGroup: '٬', DigitZero: '٠', DigitNine: '٩'}, {Lang: "ar_IL", SepDecimal: '٫', SepGroup: '٬', DigitZero: '٠', DigitNine: '٩'}, {Lang: "ar_IQ", SepDecimal: '٫', SepGroup: '٬', DigitZero: '٠', DigitNine: '٩'}, {Lang: "ar_JO", SepDecimal: '٫', SepGroup: '٬', DigitZero: '٠', DigitNine: '٩'}, {Lang: "ar_KM", SepDecimal: '٫', SepGroup: '٬', DigitZero: '٠', DigitNine: '٩'}, {Lang: "ar_KW", SepDecimal: '٫', SepGroup: '٬', DigitZero: '٠', DigitNine: '٩'}, {Lang: "ar_LB", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "ar_LY", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "ar_MA", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "ar_MR", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "ar_OM", SepDecimal: '٫', SepGroup: '٬', DigitZero: '٠', DigitNine: '٩'}, {Lang: "ar_PS", SepDecimal: '٫', SepGroup: '٬', DigitZero: '٠', DigitNine: '٩'}, {Lang: "ar_QA", SepDecimal: '٫', SepGroup: '٬', DigitZero: '٠', DigitNine: '٩'}, {Lang: "ar_SA", SepDecimal: '٫', SepGroup: '٬', DigitZero: '0', DigitNine: '9'}, {Lang: "ar_SD", SepDecimal: '٫', SepGroup: '٬', DigitZero: '٠', DigitNine: '٩'}, {Lang: "ar_SO", SepDecimal: '٫', SepGroup: '٬', DigitZero: '0', DigitNine: '9'}, {Lang: "ar_SS", SepDecimal: '٫', SepGroup: '٬', DigitZero: '٠', DigitNine: '٩'}, {Lang: "ar_SY", SepDecimal: '٫', SepGroup: '٬', DigitZero: '٠', DigitNine: '٩'}, {Lang: "ar_TD", SepDecimal: '٫', SepGroup: '٬', DigitZero: '٠', DigitNine: '٩'}, {Lang: "ar_TN", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "ar_YE", SepDecimal: '٫', SepGroup: '٬', DigitZero: '٠', DigitNine: '٩'}, {Lang: "as", SepDecimal: '.', SepGroup: ',', DigitZero: '০', DigitNine: '৯'}, {Lang: "as_IN", SepDecimal: '.', SepGroup: ',', DigitZero: '০', DigitNine: '৯'}, {Lang: "asa", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "asa_TZ", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "ast", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "ast_ES", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "az", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "az_Cyrl", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "az_Cyrl_AZ", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "az_Latn", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "az_Latn_AZ", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "bas", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "bas_CM", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "be", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "be_BY", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "bem", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "bem_ZM", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "bez", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "bez_TZ", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "bg", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "bg_BG", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "bm", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "bm_ML", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "bn", SepDecimal: '.', SepGroup: ',', DigitZero: '০', DigitNine: '৯'}, {Lang: "bn_BD", SepDecimal: '.', SepGroup: ',', DigitZero: '০', DigitNine: '৯'}, {Lang: "bn_IN", SepDecimal: '.', SepGroup: ',', DigitZero: '০', DigitNine: '৯'}, {Lang: "bo", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "bo_CN", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "bo_IN", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "br", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "br_FR", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "brx", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "brx_IN", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "bs", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "bs_Cyrl", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "bs_Cyrl_BA", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "bs_Latn", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "bs_Latn_BA", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "ca", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "ca_AD", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "ca_ES", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "ca_ES_VALENCIA", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "ca_FR", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "ca_IT", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "ccp", SepDecimal: '.', SepGroup: 0, DigitZero: '𑄶', DigitNine: '𑄿'}, {Lang: "ccp_BD", SepDecimal: '.', SepGroup: 0, DigitZero: '𑄶', DigitNine: '𑄿'}, {Lang: "ccp_IN", SepDecimal: '.', SepGroup: 0, DigitZero: '𑄶', DigitNine: '𑄿'}, {Lang: "ce", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "ce_RU", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "cgg", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "cgg_UG", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "chr", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "chr_US", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "ckb", SepDecimal: '٫', SepGroup: '٬', DigitZero: '٠', DigitNine: '٩'}, {Lang: "ckb_IQ", SepDecimal: '٫', SepGroup: '٬', DigitZero: '٠', DigitNine: '٩'}, {Lang: "ckb_IR", SepDecimal: '٫', SepGroup: '٬', DigitZero: '٠', DigitNine: '٩'}, {Lang: "cs", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "cs_CZ", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "cu", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "cu_RU", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "cy", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "cy_GB", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "da", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "da_DK", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "da_GL", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "dav", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "dav_KE", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "de", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "de_AT", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "de_BE", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "de_CH", SepDecimal: '.', SepGroup: '’', DigitZero: '0', DigitNine: '9'}, {Lang: "de_DE", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "de_IT", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "de_LI", SepDecimal: '.', SepGroup: '’', DigitZero: '0', DigitNine: '9'}, {Lang: "de_LU", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "dje", SepDecimal: '.', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "dje_NE", SepDecimal: '.', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "dsb", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "dsb_DE", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "dua", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "dua_CM", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "dyo", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "dyo_SN", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "dz", SepDecimal: '.', SepGroup: ',', DigitZero: '༠', DigitNine: '༩'}, {Lang: "dz_BT", SepDecimal: '.', SepGroup: ',', DigitZero: '༠', DigitNine: '༩'}, {Lang: "ebu", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "ebu_KE", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "ee", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "ee_GH", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "ee_TG", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "el", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "el_CY", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "el_GR", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "en", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_001", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_150", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "en_AG", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_AI", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_AS", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_AT", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_AU", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_BB", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_BE", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "en_BI", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_BM", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_BS", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_BW", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_BZ", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_CA", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_CC", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_CH", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_CK", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_CM", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_CX", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_CY", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_DE", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_DG", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_DK", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_DM", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_ER", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_FI", SepDecimal: '.', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "en_FJ", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_FK", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_FM", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_GB", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_GD", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_GG", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_GH", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_GI", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_GM", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_GU", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_GY", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_HK", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_IE", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_IL", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_IM", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_IN", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_IO", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_JE", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_JM", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_KE", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_KI", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_KN", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_KY", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_LC", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_LR", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_LS", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_MG", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_MH", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_MO", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_MP", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_MS", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_MT", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_MU", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_MW", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_MY", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_NA", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_NF", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_NG", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_NL", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_NR", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_NU", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_NZ", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_PG", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_PH", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_PK", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_PN", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_PR", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_PW", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_RW", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_SB", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_SC", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_SD", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_SE", SepDecimal: '.', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "en_SG", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_SH", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_SI", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_SL", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_SS", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_SX", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_SZ", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_TC", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_TK", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_TO", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_TT", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_TV", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_TZ", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_UG", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_UM", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_US", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_US_POSIX", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_VC", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_VG", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_VI", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_VU", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_WS", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_ZA", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "en_ZM", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "en_ZW", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "eo", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "eo_001", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "es", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "es_419", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "es_AR", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "es_BO", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "es_BR", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "es_BZ", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "es_CL", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "es_CO", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "es_CR", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "es_CU", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "es_DO", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "es_EA", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "es_EC", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "es_ES", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "es_GQ", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "es_GT", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "es_HN", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "es_IC", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "es_MX", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "es_NI", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "es_PA", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "es_PE", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "es_PH", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "es_PR", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "es_PY", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "es_SV", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "es_US", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "es_UY", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "es_VE", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "et", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "et_EE", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "eu", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "eu_ES", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "ewo", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "ewo_CM", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fa", SepDecimal: '٫', SepGroup: '٬', DigitZero: '۰', DigitNine: '۹'}, {Lang: "fa_AF", SepDecimal: '٫', SepGroup: '٬', DigitZero: '۰', DigitNine: '۹'}, {Lang: "fa_IR", SepDecimal: '٫', SepGroup: '٬', DigitZero: '۰', DigitNine: '۹'}, {Lang: "ff", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "ff_CM", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "ff_GN", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "ff_MR", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "ff_SN", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fi", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fi_FI", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fil", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "fil_PH", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "fo", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "fo_DK", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "fo_FO", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "fr", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fr_BE", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fr_BF", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fr_BI", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fr_BJ", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fr_BL", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fr_CA", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fr_CD", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fr_CF", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fr_CG", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fr_CH", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fr_CI", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fr_CM", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fr_DJ", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fr_DZ", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fr_FR", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fr_GA", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fr_GF", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fr_GN", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fr_GP", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fr_GQ", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fr_HT", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fr_KM", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fr_LU", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "fr_MA", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "fr_MC", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fr_MF", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fr_MG", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fr_ML", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fr_MQ", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fr_MR", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fr_MU", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fr_NC", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fr_NE", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fr_PF", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fr_PM", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fr_RE", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fr_RW", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fr_SC", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fr_SN", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fr_SY", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fr_TD", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fr_TG", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fr_TN", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fr_VU", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fr_WF", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fr_YT", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "fur", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "fur_IT", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "fy", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "fy_NL", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "ga", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "ga_IE", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "gd", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "gd_GB", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "gl", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "gl_ES", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "gsw", SepDecimal: '.', SepGroup: '’', DigitZero: '0', DigitNine: '9'}, {Lang: "gsw_CH", SepDecimal: '.', SepGroup: '’', DigitZero: '0', DigitNine: '9'}, {Lang: "gsw_FR", SepDecimal: '.', SepGroup: '’', DigitZero: '0', DigitNine: '9'}, {Lang: "gsw_LI", SepDecimal: '.', SepGroup: '’', DigitZero: '0', DigitNine: '9'}, {Lang: "gu", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "gu_IN", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "guz", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "guz_KE", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "gv", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "gv_IM", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "ha", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "ha_GH", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "ha_NE", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "ha_NG", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "haw", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "haw_US", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "he", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "he_IL", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "hi", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "hi_IN", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "hr", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "hr_BA", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "hr_HR", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "hsb", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "hsb_DE", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "hu", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "hu_HU", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "hy", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "hy_AM", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "id", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "id_ID", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "ig", SepDecimal: '٫', SepGroup: '٬', DigitZero: '٠', DigitNine: '٩'}, {Lang: "ig_NG", SepDecimal: '٫', SepGroup: '٬', DigitZero: '٠', DigitNine: '٩'}, {Lang: "ii", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "ii_CN", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "is", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "is_IS", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "it", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "it_CH", SepDecimal: '.', SepGroup: '’', DigitZero: '0', DigitNine: '9'}, {Lang: "it_IT", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "it_SM", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "it_VA", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "ja", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "ja_JP", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "jgo", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "jgo_CM", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "jmc", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "jmc_TZ", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "ka", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "ka_GE", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "kab", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "kab_DZ", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "kam", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "kam_KE", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "kde", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "kde_TZ", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "kea", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "kea_CV", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "khq", SepDecimal: 0, SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "khq_ML", SepDecimal: 0, SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "ki", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "ki_KE", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "kk", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "kk_KZ", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "kkj", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "kkj_CM", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "kl", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "kl_GL", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "kln", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "kln_KE", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "km", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "km_KH", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "kn", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "kn_IN", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "ko", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "ko_KP", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "ko_KR", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "kok", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "kok_IN", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "ks", SepDecimal: 0, SepGroup: 0, DigitZero: '۰', DigitNine: '۹'}, {Lang: "ks_IN", SepDecimal: 0, SepGroup: 0, DigitZero: '۰', DigitNine: '۹'}, {Lang: "ksb", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "ksb_TZ", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "ksf", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "ksf_CM", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "ksh", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "ksh_DE", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "kw", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "kw_GB", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "ky", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "ky_KG", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "lag", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "lag_TZ", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "lb", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "lb_LU", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "lg", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "lg_UG", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "lkt", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "lkt_US", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "ln", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "ln_AO", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "ln_CD", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "ln_CF", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "ln_CG", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "lo", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "lo_LA", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "lrc", SepDecimal: 0, SepGroup: 0, DigitZero: '۰', DigitNine: '۹'}, {Lang: "lrc_IQ", SepDecimal: 0, SepGroup: 0, DigitZero: '۰', DigitNine: '۹'}, {Lang: "lrc_IR", SepDecimal: 0, SepGroup: 0, DigitZero: '۰', DigitNine: '۹'}, {Lang: "lt", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "lt_LT", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "lu", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "lu_CD", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "luo", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "luo_KE", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "luy", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "luy_KE", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "lv", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "lv_LV", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "mas", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "mas_KE", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "mas_TZ", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "mer", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "mer_KE", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "mfe", SepDecimal: 0, SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "mfe_MU", SepDecimal: 0, SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "mg", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "mg_MG", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "mgh", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "mgh_MZ", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "mgo", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "mgo_CM", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "mk", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "mk_MK", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "ml", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "ml_IN", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "mn", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "mn_MN", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "mr", SepDecimal: '.', SepGroup: ',', DigitZero: '०', DigitNine: '९'}, {Lang: "mr_IN", SepDecimal: '.', SepGroup: ',', DigitZero: '०', DigitNine: '९'}, {Lang: "ms", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "ms_BN", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "ms_MY", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "ms_SG", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "mt", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "mt_MT", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "mua", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "mua_CM", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "my", SepDecimal: '.', SepGroup: ',', DigitZero: '၀', DigitNine: '၉'}, {Lang: "my_MM", SepDecimal: '.', SepGroup: ',', DigitZero: '၀', DigitNine: '၉'}, {Lang: "mzn", SepDecimal: 0, SepGroup: 0, DigitZero: '۰', DigitNine: '۹'}, {Lang: "mzn_IR", SepDecimal: 0, SepGroup: 0, DigitZero: '۰', DigitNine: '۹'}, {Lang: "naq", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "naq_NA", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "nb", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "nb_NO", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "nb_SJ", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "nd", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "nd_ZW", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "nds", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "nds_DE", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "nds_NL", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "ne", SepDecimal: '.', SepGroup: ',', DigitZero: '०', DigitNine: '९'}, {Lang: "ne_IN", SepDecimal: '.', SepGroup: ',', DigitZero: '०', DigitNine: '९'}, {Lang: "ne_NP", SepDecimal: '.', SepGroup: ',', DigitZero: '०', DigitNine: '९'}, {Lang: "nl", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "nl_AW", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "nl_BE", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "nl_BQ", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "nl_CW", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "nl_NL", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "nl_SR", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "nl_SX", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "nmg", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "nmg_CM", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "nn", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "nn_NO", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "nnh", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "nnh_CM", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "nus", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "nus_SS", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "nyn", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "nyn_UG", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "om", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "om_ET", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "om_KE", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "or", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "or_IN", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "os", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "os_GE", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "os_RU", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "pa", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "pa_Arab", SepDecimal: '.', SepGroup: ',', DigitZero: '۰', DigitNine: '۹'}, {Lang: "pa_Arab_PK", SepDecimal: '.', SepGroup: ',', DigitZero: '۰', DigitNine: '۹'}, {Lang: "pa_Guru", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "pa_Guru_IN", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "pl", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "pl_PL", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "prg", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "prg_001", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "ps", SepDecimal: '٫', SepGroup: '٬', DigitZero: '۰', DigitNine: '۹'}, {Lang: "ps_AF", SepDecimal: '٫', SepGroup: '٬', DigitZero: '۰', DigitNine: '۹'}, {Lang: "pt", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "pt_AO", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "pt_BR", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "pt_CH", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "pt_CV", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "pt_GQ", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "pt_GW", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "pt_LU", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "pt_MO", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "pt_MZ", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "pt_PT", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "pt_ST", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "pt_TL", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "qu", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "qu_BO", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "qu_EC", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "qu_PE", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "rm", SepDecimal: '.', SepGroup: '’', DigitZero: '0', DigitNine: '9'}, {Lang: "rm_CH", SepDecimal: '.', SepGroup: '’', DigitZero: '0', DigitNine: '9'}, {Lang: "rn", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "rn_BI", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "ro", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "ro_MD", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "ro_RO", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "rof", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "rof_TZ", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "ru", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "ru_BY", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "ru_KG", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "ru_KZ", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "ru_MD", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "ru_RU", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "ru_UA", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "rw", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "rw_RW", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "rwk", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "rwk_TZ", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "sah", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "sah_RU", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "saq", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "saq_KE", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "sbp", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "sbp_TZ", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "sd", SepDecimal: 0, SepGroup: 0, DigitZero: '٠', DigitNine: '٩'}, {Lang: "sd_PK", SepDecimal: 0, SepGroup: 0, DigitZero: '٠', DigitNine: '٩'}, {Lang: "se", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "se_FI", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "se_NO", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "se_SE", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "seh", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "seh_MZ", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "ses", SepDecimal: 0, SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "ses_ML", SepDecimal: 0, SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "sg", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "sg_CF", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "shi", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "shi_Latn", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "shi_Latn_MA", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "shi_Tfng", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "shi_Tfng_MA", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "si", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "si_LK", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "sk", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "sk_SK", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "sl", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "sl_SI", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "smn", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "smn_FI", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "sn", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "sn_ZW", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "so", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "so_DJ", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "so_ET", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "so_KE", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "so_SO", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "sq", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "sq_AL", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "sq_MK", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "sq_XK", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "sr", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "sr_Cyrl", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "sr_Cyrl_BA", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "sr_Cyrl_ME", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "sr_Cyrl_RS", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "sr_Cyrl_XK", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "sr_Latn", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "sr_Latn_BA", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "sr_Latn_ME", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "sr_Latn_RS", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "sr_Latn_XK", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "sv", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "sv_AX", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "sv_FI", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "sv_SE", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "sw", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "sw_CD", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "sw_KE", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "sw_TZ", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "sw_UG", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "ta", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "ta_IN", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "ta_LK", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "ta_MY", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "ta_SG", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "te", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "te_IN", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "teo", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "teo_KE", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "teo_UG", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "tg", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "tg_TJ", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "th", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "th_TH", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "ti", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "ti_ER", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "ti_ET", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "tk", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "tk_TM", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "to", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "to_TO", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "tr", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "tr_CY", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "tr_TR", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "tt", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "tt_RU", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "twq", SepDecimal: '.', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "twq_NE", SepDecimal: '.', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "tzm", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "tzm_MA", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "ug", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "ug_CN", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "uk", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "uk_UA", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "ur", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "ur_IN", SepDecimal: '.', SepGroup: ',', DigitZero: '۰', DigitNine: '۹'}, {Lang: "ur_PK", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "uz", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "uz_Arab", SepDecimal: '٫', SepGroup: '٬', DigitZero: '۰', DigitNine: '۹'}, {Lang: "uz_Arab_AF", SepDecimal: '٫', SepGroup: '٬', DigitZero: '۰', DigitNine: '۹'}, {Lang: "uz_Cyrl", SepDecimal: '٫', SepGroup: '٬', DigitZero: '۰', DigitNine: '۹'}, {Lang: "uz_Cyrl_UZ", SepDecimal: '٫', SepGroup: '٬', DigitZero: '۰', DigitNine: '۹'}, {Lang: "uz_Latn", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "uz_Latn_UZ", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "vai", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "vai_Latn", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "vai_Latn_LR", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "vai_Vaii", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "vai_Vaii_LR", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "vi", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "vi_VN", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "vo", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "vo_001", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "vun", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "vun_TZ", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "wae", SepDecimal: ',', SepGroup: '’', DigitZero: '0', DigitNine: '9'}, {Lang: "wae_CH", SepDecimal: ',', SepGroup: '’', DigitZero: '0', DigitNine: '9'}, {Lang: "wo", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "wo_SN", SepDecimal: ',', SepGroup: '.', DigitZero: '0', DigitNine: '9'}, {Lang: "xog", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "xog_UG", SepDecimal: 0, SepGroup: 0, DigitZero: 0, DigitNine: 0}, {Lang: "yav", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "yav_CM", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "yi", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "yi_001", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "yo", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "yo_BJ", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "yo_NG", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "yue", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "yue_Hans", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "yue_Hans_CN", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "yue_Hant", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "yue_Hant_HK", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "zgh", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "zgh_MA", SepDecimal: ',', SepGroup: ' ', DigitZero: '0', DigitNine: '9'}, {Lang: "zh", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "zh_Hans", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "zh_Hans_CN", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "zh_Hans_HK", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "zh_Hans_MO", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "zh_Hans_SG", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "zh_Hant", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "zh_Hant_HK", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "zh_Hant_MO", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "zh_Hant_TW", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "zu", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, {Lang: "zu_ZA", SepDecimal: '.', SepGroup: ',', DigitZero: '0', DigitNine: '9'}, }
internal/cldr/tables.go
0.557845
0.406214
tables.go
starcoder
package pgtype import ( "database/sql/driver" "encoding/binary" "math" "strconv" "github.com/jackc/pgio" errors "golang.org/x/xerrors" ) type Float8 struct { Float float64 Status Status } func (dst *Float8) Set(src interface{}) error { if src == nil { *dst = Float8{Status: Null} return nil } if value, ok := src.(interface{ Get() interface{} }); ok { value2 := value.Get() if value2 != value { return dst.Set(value2) } } switch value := src.(type) { case float32: *dst = Float8{Float: float64(value), Status: Present} case float64: *dst = Float8{Float: value, Status: Present} case int8: *dst = Float8{Float: float64(value), Status: Present} case uint8: *dst = Float8{Float: float64(value), Status: Present} case int16: *dst = Float8{Float: float64(value), Status: Present} case uint16: *dst = Float8{Float: float64(value), Status: Present} case int32: *dst = Float8{Float: float64(value), Status: Present} case uint32: *dst = Float8{Float: float64(value), Status: Present} case int64: f64 := float64(value) if int64(f64) == value { *dst = Float8{Float: f64, Status: Present} } else { return errors.Errorf("%v cannot be exactly represented as float64", value) } case uint64: f64 := float64(value) if uint64(f64) == value { *dst = Float8{Float: f64, Status: Present} } else { return errors.Errorf("%v cannot be exactly represented as float64", value) } case int: f64 := float64(value) if int(f64) == value { *dst = Float8{Float: f64, Status: Present} } else { return errors.Errorf("%v cannot be exactly represented as float64", value) } case uint: f64 := float64(value) if uint(f64) == value { *dst = Float8{Float: f64, Status: Present} } else { return errors.Errorf("%v cannot be exactly represented as float64", value) } case string: num, err := strconv.ParseFloat(value, 64) if err != nil { return err } *dst = Float8{Float: float64(num), Status: Present} default: if originalSrc, ok := underlyingNumberType(src); ok { return dst.Set(originalSrc) } return errors.Errorf("cannot convert %v to Float8", value) } return nil } func (dst Float8) Get() interface{} { switch dst.Status { case Present: return dst.Float case Null: return nil default: return dst.Status } } func (src *Float8) AssignTo(dst interface{}) error { return float64AssignTo(src.Float, src.Status, dst) } func (dst *Float8) DecodeText(ci *ConnInfo, src []byte) error { if src == nil { *dst = Float8{Status: Null} return nil } n, err := strconv.ParseFloat(string(src), 64) if err != nil { return err } *dst = Float8{Float: n, Status: Present} return nil } func (dst *Float8) DecodeBinary(ci *ConnInfo, src []byte) error { if src == nil { *dst = Float8{Status: Null} return nil } if len(src) != 8 { return errors.Errorf("invalid length for float4: %v", len(src)) } n := int64(binary.BigEndian.Uint64(src)) *dst = Float8{Float: math.Float64frombits(uint64(n)), Status: Present} return nil } func (src Float8) EncodeText(ci *ConnInfo, buf []byte) ([]byte, error) { switch src.Status { case Null: return nil, nil case Undefined: return nil, errUndefined } buf = append(buf, strconv.FormatFloat(float64(src.Float), 'f', -1, 64)...) return buf, nil } func (src Float8) EncodeBinary(ci *ConnInfo, buf []byte) ([]byte, error) { switch src.Status { case Null: return nil, nil case Undefined: return nil, errUndefined } buf = pgio.AppendUint64(buf, math.Float64bits(src.Float)) return buf, nil } // Scan implements the database/sql Scanner interface. func (dst *Float8) Scan(src interface{}) error { if src == nil { *dst = Float8{Status: Null} return nil } switch src := src.(type) { case float64: *dst = Float8{Float: src, Status: Present} return nil case string: return dst.DecodeText(nil, []byte(src)) case []byte: srcCopy := make([]byte, len(src)) copy(srcCopy, src) return dst.DecodeText(nil, srcCopy) } return errors.Errorf("cannot scan %T", src) } // Value implements the database/sql/driver Valuer interface. func (src Float8) Value() (driver.Value, error) { switch src.Status { case Present: return src.Float, nil case Null: return nil, nil default: return nil, errUndefined } }
float8.go
0.674587
0.448004
float8.go
starcoder
package html // Attributes defines a list of attribute pairs type Attributes []AttrPair // AttrPair defines a n attribute key and value pair type AttrPair struct { Key string Value interface{} } // Attr returns an attribute pair with the given key and value func Attr(key string, value interface{}) Attributes { return Attributes{AttrPair{Key: key, Value: value}} } // Attr adds an attribute to the list of attribute pairs func (a Attributes) Attr(key string, id interface{}) Attributes { return append(a, AttrPair{Key: key, Value: id}) } func ID(id interface{}) Attributes { return Attributes{AttrPair{Key: "id", Value: id}} } func (a Attributes) ID(id interface{}) Attributes { return append(a, AttrPair{Key: "id", Value: id}) } func Class(class interface{}) Attributes { return Attributes{AttrPair{Key: "class", Value: class}} } func (a Attributes) Class(class interface{}) Attributes { return append(a, AttrPair{Key: "class", Value: class}) } func Href(href interface{}) Attributes { return Attributes{AttrPair{Key: "href", Value: href}} } func (a Attributes) Href(href interface{}) Attributes { return append(a, AttrPair{Key: "href", Value: href}) } func Rel(rel interface{}) Attributes { return Attributes{AttrPair{Key: "rel", Value: rel}} } func (a Attributes) Rel(rel interface{}) Attributes { return append(a, AttrPair{Key: "rel", Value: rel}) } func Name(name interface{}) Attributes { return Attributes{AttrPair{Key: "name", Value: name}} } func (a Attributes) Name(name interface{}) Attributes { return append(a, AttrPair{Key: "name", Value: name}) } func Alt(name interface{}) Attributes { return Attributes{AttrPair{Key: "alt", Value: name}} } func (a Attributes) Alt(name interface{}) Attributes { return append(a, AttrPair{Key: "alt", Value: name}) } func (a Attributes) Content(name interface{}) Attributes { return append(a, AttrPair{Key: "content", Value: name}) } func Content(name interface{}) Attributes { return Attributes{AttrPair{Key: "content", Value: name}} } func Checked() Attributes { return Attributes{AttrPair{Key: "checked", Value: nil}} } func (a Attributes) Checked() Attributes { return append(a, AttrPair{Key: "checked", Value: nil}) } func Defer() Attributes { return Attributes{AttrPair{Key: "defer", Value: nil}} } func (a Attributes) Defer() Attributes { return append(a, AttrPair{Key: "defer", Value: nil}) } func Src(src interface{}) Attributes { return Attributes{AttrPair{Key: "src", Value: src}} } func (a Attributes) Src(src interface{}) Attributes { return append(a, AttrPair{Key: "src", Value: src}) } func Action(action interface{}) Attributes { return Attributes{AttrPair{Key: "action", Value: action}} } func (a Attributes) Action(action interface{}) Attributes { return append(a, AttrPair{Key: "action", Value: action}) } func Method(method interface{}) Attributes { return Attributes{AttrPair{Key: "method", Value: method}} } func (a Attributes) Method(method interface{}) Attributes { return append(a, AttrPair{Key: "method", Value: method}) } func Type(typ interface{}) Attributes { return Attributes{AttrPair{Key: "type", Value: typ}} } func (a Attributes) Type(typ interface{}) Attributes { return append(a, AttrPair{Key: "type", Value: typ}) } func For(fo interface{}) Attributes { return Attributes{AttrPair{Key: "for", Value: fo}} } func (a Attributes) For(fo interface{}) Attributes { return append(a, AttrPair{Key: "for", Value: fo}) } func Value(value interface{}) Attributes { return Attributes{AttrPair{Key: "value", Value: value}} } func (a Attributes) Value(value interface{}) Attributes { return append(a, AttrPair{Key: "value", Value: value}) } func Data(key string, value interface{}) Attributes { return Attributes{AttrPair{Key: "data-" + key, Value: value}} } func (a Attributes) Data(key string, value interface{}) Attributes { return append(a, AttrPair{Key: "data-" + key, Value: value}) } func Charset(charset interface{}) Attributes { return Attributes{AttrPair{Key: "charset", Value: charset}} } func (a Attributes) Charset(charset interface{}) Attributes { return append(a, AttrPair{Key: "charset", Value: charset}) } func Styles(style string) Attributes { return Attributes{AttrPair{Key: "style", Value: style}} } func (a Attributes) Styles(style string) Attributes { return append(a, AttrPair{Key: "style", Value: style}) }
attributes.go
0.87251
0.450843
attributes.go
starcoder
package render import ( "text/template" "github.com/VirtusLab/crypt/aws" "github.com/VirtusLab/crypt/azure" "github.com/VirtusLab/crypt/gcp" ) /* TemplateFunctions provides template functions for render or the standard (text/template) template engine - encryptAWS - encrypts the data from inside of the template using AWS KMS, for best results use with gzip and b64enc - decryptAWS - decrypts the data from inside of the template using AWS KMS, for best results use with ungzip and b64dec - encryptGCP - encrypts the data from inside of the template using GCP KMS, for best results use with gzip and b64enc - decryptGCP - decrypts the data from inside of the template using GCP KMS, for best results use with ungzip and b64dec - encryptAzure - encrypts the data from inside of the template using Azure Key Vault, for best results use with gzip and b64enc - decryptAzure - decrypts the data from inside of the template using Azure Key Vault, for best results use with ungzip and b64dec */ func TemplateFunctions() template.FuncMap { return template.FuncMap{ "encryptAWS": EncryptAWS, "decryptAWS": DecryptAWS, "encryptGCP": EncryptGCP, "decryptGCP": DecryptGCP, "encryptAzure": EncryptAzure, "decryptAzure": DecryptAzure, } } // EncryptAWS encrypts plaintext using AWS KMS func EncryptAWS(awsKms, awsRegion, awsProfile, plaintext string) ([]byte, error) { amazon := aws.New(awsKms, awsRegion, awsProfile) result, err := amazon.Encrypt([]byte(plaintext)) if err != nil { return nil, err } return result, nil } // DecryptAWS decrypts ciphertext using AWS KMS func DecryptAWS(awsRegion, awsProfile, ciphertext string) (string, error) { amazon := aws.New("" /* not needed for decryption */, awsRegion, awsProfile) result, err := amazon.Decrypt([]byte(ciphertext)) if err != nil { return "", err } return string(result), nil } // EncryptGCP encrypts plaintext using GCP KMS func EncryptGCP(gcpProject, gcpLocation, gcpKeyring, gcpKey, plaintext string) ([]byte, error) { googleKms := gcp.New(gcpProject, gcpLocation, gcpKeyring, gcpKey) result, err := googleKms.Encrypt([]byte(plaintext)) if err != nil { return nil, err } return result, nil } // DecryptGCP decrypts ciphertext using GCP KMS func DecryptGCP(gcpProject, gcpLocation, gcpKeyring, gcpKey, ciphertext string) (string, error) { googleKms := gcp.New(gcpProject, gcpLocation, gcpKeyring, gcpKey) result, err := googleKms.Decrypt([]byte(ciphertext)) if err != nil { return "", err } return string(result), nil } // EncryptAzure encrypts plaintext using Azure Key Vault func EncryptAzure(azureVaultURL, azureKey, azureKeyVersion, plaintext string) ([]byte, error) { azr, err := azure.New(azureVaultURL, azureKey, azureKeyVersion) if err != nil { return nil, err } result, err := azr.Encrypt([]byte(plaintext)) if err != nil { return nil, err } return result, nil } // DecryptAzure decrypts ciphertext using Azure Key Vault func DecryptAzure(azureVaultURL, azureKey, azureKeyVersion, ciphertext string) (string, error) { azr, err := azure.New(azureVaultURL, azureKey, azureKeyVersion) if err != nil { return "", err } result, err := azr.Decrypt([]byte(ciphertext)) if err != nil { return "", err } return string(result), nil }
crypto/render/functions.go
0.692122
0.441011
functions.go
starcoder
package openapi import ( "encoding/json" "fmt" "net/url" "strings" "time" "github.com/twilio/twilio-go/client" ) // Optional parameters for the method 'CreateCompositionHook' type CreateCompositionHookParams struct { // An array of track names from the same group room to merge into the compositions created by the composition hook. Can include zero or more track names. A composition triggered by the composition hook includes all audio sources specified in `audio_sources` except those specified in `audio_sources_excluded`. The track names in this parameter can include an asterisk as a wild card character, which matches zero or more characters in a track name. For example, `student*` includes tracks named `student` as well as `studentTeam`. AudioSources *[]string `json:"AudioSources,omitempty"` // An array of track names to exclude. A composition triggered by the composition hook includes all audio sources specified in `audio_sources` except for those specified in `audio_sources_excluded`. The track names in this parameter can include an asterisk as a wild card character, which matches zero or more characters in a track name. For example, `student*` excludes `student` as well as `studentTeam`. This parameter can also be empty. AudioSourcesExcluded *[]string `json:"AudioSourcesExcluded,omitempty"` // Whether the composition hook is active. When `true`, the composition hook will be triggered for every completed Group Room in the account. When `false`, the composition hook will never be triggered. Enabled *bool `json:"Enabled,omitempty"` // The container format of the media files used by the compositions created by the composition hook. Can be: `mp4` or `webm` and the default is `webm`. If `mp4` or `webm`, `audio_sources` must have one or more tracks and/or a `video_layout` element must contain a valid `video_sources` list, otherwise an error occurs. Format *string `json:"Format,omitempty"` // A descriptive string that you create to describe the resource. It can be up to 100 characters long and it must be unique within the account. FriendlyName *string `json:"FriendlyName,omitempty"` // A string that describes the columns (width) and rows (height) of the generated composed video in pixels. Defaults to `640x480`. The string's format is `{width}x{height}` where: * 16 <= `{width}` <= 1280 * 16 <= `{height}` <= 1280 * `{width}` * `{height}` <= 921,600 Typical values are: * HD = `1280x720` * PAL = `1024x576` * VGA = `640x480` * CIF = `320x240` Note that the `resolution` imposes an aspect ratio to the resulting composition. When the original video tracks are constrained by the aspect ratio, they are scaled to fit. See [Specifying Video Layouts](https://www.twilio.com/docs/video/api/compositions-resource#specifying-video-layouts) for more info. Resolution *string `json:"Resolution,omitempty"` // The URL we should call using the `status_callback_method` to send status information to your application on every composition event. If not provided, status callback events will not be dispatched. StatusCallback *string `json:"StatusCallback,omitempty"` // The HTTP method we should use to call `status_callback`. Can be: `POST` or `GET` and the default is `POST`. StatusCallbackMethod *string `json:"StatusCallbackMethod,omitempty"` // Whether to clip the intervals where there is no active media in the Compositions triggered by the composition hook. The default is `true`. Compositions with `trim` enabled are shorter when the Room is created and no Participant joins for a while as well as if all the Participants leave the room and join later, because those gaps will be removed. See [Specifying Video Layouts](https://www.twilio.com/docs/video/api/compositions-resource#specifying-video-layouts) for more info. Trim *bool `json:"Trim,omitempty"` // An object that describes the video layout of the composition hook in terms of regions. See [Specifying Video Layouts](https://www.twilio.com/docs/video/api/compositions-resource#specifying-video-layouts) for more info. VideoLayout *map[string]interface{} `json:"VideoLayout,omitempty"` } func (params *CreateCompositionHookParams) SetAudioSources(AudioSources []string) *CreateCompositionHookParams { params.AudioSources = &AudioSources return params } func (params *CreateCompositionHookParams) SetAudioSourcesExcluded(AudioSourcesExcluded []string) *CreateCompositionHookParams { params.AudioSourcesExcluded = &AudioSourcesExcluded return params } func (params *CreateCompositionHookParams) SetEnabled(Enabled bool) *CreateCompositionHookParams { params.Enabled = &Enabled return params } func (params *CreateCompositionHookParams) SetFormat(Format string) *CreateCompositionHookParams { params.Format = &Format return params } func (params *CreateCompositionHookParams) SetFriendlyName(FriendlyName string) *CreateCompositionHookParams { params.FriendlyName = &FriendlyName return params } func (params *CreateCompositionHookParams) SetResolution(Resolution string) *CreateCompositionHookParams { params.Resolution = &Resolution return params } func (params *CreateCompositionHookParams) SetStatusCallback(StatusCallback string) *CreateCompositionHookParams { params.StatusCallback = &StatusCallback return params } func (params *CreateCompositionHookParams) SetStatusCallbackMethod(StatusCallbackMethod string) *CreateCompositionHookParams { params.StatusCallbackMethod = &StatusCallbackMethod return params } func (params *CreateCompositionHookParams) SetTrim(Trim bool) *CreateCompositionHookParams { params.Trim = &Trim return params } func (params *CreateCompositionHookParams) SetVideoLayout(VideoLayout map[string]interface{}) *CreateCompositionHookParams { params.VideoLayout = &VideoLayout return params } func (c *ApiService) CreateCompositionHook(params *CreateCompositionHookParams) (*VideoV1CompositionHook, error) { path := "/v1/CompositionHooks" data := url.Values{} headers := make(map[string]interface{}) if params != nil && params.AudioSources != nil { for _, item := range *params.AudioSources { data.Add("AudioSources", item) } } if params != nil && params.AudioSourcesExcluded != nil { for _, item := range *params.AudioSourcesExcluded { data.Add("AudioSourcesExcluded", item) } } if params != nil && params.Enabled != nil { data.Set("Enabled", fmt.Sprint(*params.Enabled)) } if params != nil && params.Format != nil { data.Set("Format", *params.Format) } if params != nil && params.FriendlyName != nil { data.Set("FriendlyName", *params.FriendlyName) } if params != nil && params.Resolution != nil { data.Set("Resolution", *params.Resolution) } if params != nil && params.StatusCallback != nil { data.Set("StatusCallback", *params.StatusCallback) } if params != nil && params.StatusCallbackMethod != nil { data.Set("StatusCallbackMethod", *params.StatusCallbackMethod) } if params != nil && params.Trim != nil { data.Set("Trim", fmt.Sprint(*params.Trim)) } if params != nil && params.VideoLayout != nil { v, err := json.Marshal(params.VideoLayout) if err != nil { return nil, err } data.Set("VideoLayout", string(v)) } resp, err := c.requestHandler.Post(c.baseURL+path, data, headers) if err != nil { return nil, err } defer resp.Body.Close() ps := &VideoV1CompositionHook{} if err := json.NewDecoder(resp.Body).Decode(ps); err != nil { return nil, err } return ps, err } // Delete a Recording CompositionHook resource identified by a &#x60;CompositionHook SID&#x60;. func (c *ApiService) DeleteCompositionHook(Sid string) error { path := "/v1/CompositionHooks/{Sid}" path = strings.Replace(path, "{"+"Sid"+"}", Sid, -1) data := url.Values{} headers := make(map[string]interface{}) resp, err := c.requestHandler.Delete(c.baseURL+path, data, headers) if err != nil { return err } defer resp.Body.Close() return nil } // Returns a single CompositionHook resource identified by a CompositionHook SID. func (c *ApiService) FetchCompositionHook(Sid string) (*VideoV1CompositionHook, error) { path := "/v1/CompositionHooks/{Sid}" path = strings.Replace(path, "{"+"Sid"+"}", Sid, -1) data := url.Values{} headers := make(map[string]interface{}) resp, err := c.requestHandler.Get(c.baseURL+path, data, headers) if err != nil { return nil, err } defer resp.Body.Close() ps := &VideoV1CompositionHook{} if err := json.NewDecoder(resp.Body).Decode(ps); err != nil { return nil, err } return ps, err } // Optional parameters for the method 'ListCompositionHook' type ListCompositionHookParams struct { // Read only CompositionHook resources with an `enabled` value that matches this parameter. Enabled *bool `json:"Enabled,omitempty"` // Read only CompositionHook resources created on or after this [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) datetime with time zone. DateCreatedAfter *time.Time `json:"DateCreatedAfter,omitempty"` // Read only CompositionHook resources created before this [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) datetime with time zone. DateCreatedBefore *time.Time `json:"DateCreatedBefore,omitempty"` // Read only CompositionHook resources with friendly names that match this string. The match is not case sensitive and can include asterisk `*` characters as wildcard match. FriendlyName *string `json:"FriendlyName,omitempty"` // How many resources to return in each list page. The default is 50, and the maximum is 1000. PageSize *int `json:"PageSize,omitempty"` // Max number of records to return. Limit *int `json:"limit,omitempty"` } func (params *ListCompositionHookParams) SetEnabled(Enabled bool) *ListCompositionHookParams { params.Enabled = &Enabled return params } func (params *ListCompositionHookParams) SetDateCreatedAfter(DateCreatedAfter time.Time) *ListCompositionHookParams { params.DateCreatedAfter = &DateCreatedAfter return params } func (params *ListCompositionHookParams) SetDateCreatedBefore(DateCreatedBefore time.Time) *ListCompositionHookParams { params.DateCreatedBefore = &DateCreatedBefore return params } func (params *ListCompositionHookParams) SetFriendlyName(FriendlyName string) *ListCompositionHookParams { params.FriendlyName = &FriendlyName return params } func (params *ListCompositionHookParams) SetPageSize(PageSize int) *ListCompositionHookParams { params.PageSize = &PageSize return params } func (params *ListCompositionHookParams) SetLimit(Limit int) *ListCompositionHookParams { params.Limit = &Limit return params } // Retrieve a single page of CompositionHook records from the API. Request is executed immediately. func (c *ApiService) PageCompositionHook(params *ListCompositionHookParams, pageToken, pageNumber string) (*ListCompositionHookResponse, error) { path := "/v1/CompositionHooks" data := url.Values{} headers := make(map[string]interface{}) if params != nil && params.Enabled != nil { data.Set("Enabled", fmt.Sprint(*params.Enabled)) } if params != nil && params.DateCreatedAfter != nil { data.Set("DateCreatedAfter", fmt.Sprint((*params.DateCreatedAfter).Format(time.RFC3339))) } if params != nil && params.DateCreatedBefore != nil { data.Set("DateCreatedBefore", fmt.Sprint((*params.DateCreatedBefore).Format(time.RFC3339))) } if params != nil && params.FriendlyName != nil { data.Set("FriendlyName", *params.FriendlyName) } if params != nil && params.PageSize != nil { data.Set("PageSize", fmt.Sprint(*params.PageSize)) } if pageToken != "" { data.Set("PageToken", pageToken) } if pageNumber != "" { data.Set("Page", pageNumber) } resp, err := c.requestHandler.Get(c.baseURL+path, data, headers) if err != nil { return nil, err } defer resp.Body.Close() ps := &ListCompositionHookResponse{} if err := json.NewDecoder(resp.Body).Decode(ps); err != nil { return nil, err } return ps, err } // Lists CompositionHook records from the API as a list. Unlike stream, this operation is eager and loads 'limit' records into memory before returning. func (c *ApiService) ListCompositionHook(params *ListCompositionHookParams) ([]VideoV1CompositionHook, error) { if params == nil { params = &ListCompositionHookParams{} } params.SetPageSize(client.ReadLimits(params.PageSize, params.Limit)) response, err := c.PageCompositionHook(params, "", "") if err != nil { return nil, err } curRecord := 0 var records []VideoV1CompositionHook for response != nil { records = append(records, response.CompositionHooks...) var record interface{} if record, err = client.GetNext(c.baseURL, response, &curRecord, params.Limit, c.getNextListCompositionHookResponse); record == nil || err != nil { return records, err } response = record.(*ListCompositionHookResponse) } return records, err } // Streams CompositionHook records from the API as a channel stream. This operation lazily loads records as efficiently as possible until the limit is reached. func (c *ApiService) StreamCompositionHook(params *ListCompositionHookParams) (chan VideoV1CompositionHook, error) { if params == nil { params = &ListCompositionHookParams{} } params.SetPageSize(client.ReadLimits(params.PageSize, params.Limit)) response, err := c.PageCompositionHook(params, "", "") if err != nil { return nil, err } curRecord := 0 //set buffer size of the channel to 1 channel := make(chan VideoV1CompositionHook, 1) go func() { for response != nil { for item := range response.CompositionHooks { channel <- response.CompositionHooks[item] } var record interface{} if record, err = client.GetNext(c.baseURL, response, &curRecord, params.Limit, c.getNextListCompositionHookResponse); record == nil || err != nil { close(channel) return } response = record.(*ListCompositionHookResponse) } close(channel) }() return channel, err } func (c *ApiService) getNextListCompositionHookResponse(nextPageUrl string) (interface{}, error) { if nextPageUrl == "" { return nil, nil } resp, err := c.requestHandler.Get(nextPageUrl, nil, nil) if err != nil { return nil, err } defer resp.Body.Close() ps := &ListCompositionHookResponse{} if err := json.NewDecoder(resp.Body).Decode(ps); err != nil { return nil, err } return ps, nil } // Optional parameters for the method 'UpdateCompositionHook' type UpdateCompositionHookParams struct { // An array of track names from the same group room to merge into the compositions created by the composition hook. Can include zero or more track names. A composition triggered by the composition hook includes all audio sources specified in `audio_sources` except those specified in `audio_sources_excluded`. The track names in this parameter can include an asterisk as a wild card character, which matches zero or more characters in a track name. For example, `student*` includes tracks named `student` as well as `studentTeam`. AudioSources *[]string `json:"AudioSources,omitempty"` // An array of track names to exclude. A composition triggered by the composition hook includes all audio sources specified in `audio_sources` except for those specified in `audio_sources_excluded`. The track names in this parameter can include an asterisk as a wild card character, which matches zero or more characters in a track name. For example, `student*` excludes `student` as well as `studentTeam`. This parameter can also be empty. AudioSourcesExcluded *[]string `json:"AudioSourcesExcluded,omitempty"` // Whether the composition hook is active. When `true`, the composition hook will be triggered for every completed Group Room in the account. When `false`, the composition hook never triggers. Enabled *bool `json:"Enabled,omitempty"` // The container format of the media files used by the compositions created by the composition hook. Can be: `mp4` or `webm` and the default is `webm`. If `mp4` or `webm`, `audio_sources` must have one or more tracks and/or a `video_layout` element must contain a valid `video_sources` list, otherwise an error occurs. Format *string `json:"Format,omitempty"` // A descriptive string that you create to describe the resource. It can be up to 100 characters long and it must be unique within the account. FriendlyName *string `json:"FriendlyName,omitempty"` // A string that describes the columns (width) and rows (height) of the generated composed video in pixels. Defaults to `640x480`. The string's format is `{width}x{height}` where: * 16 <= `{width}` <= 1280 * 16 <= `{height}` <= 1280 * `{width}` * `{height}` <= 921,600 Typical values are: * HD = `1280x720` * PAL = `1024x576` * VGA = `640x480` * CIF = `320x240` Note that the `resolution` imposes an aspect ratio to the resulting composition. When the original video tracks are constrained by the aspect ratio, they are scaled to fit. See [Specifying Video Layouts](https://www.twilio.com/docs/video/api/compositions-resource#specifying-video-layouts) for more info. Resolution *string `json:"Resolution,omitempty"` // The URL we should call using the `status_callback_method` to send status information to your application on every composition event. If not provided, status callback events will not be dispatched. StatusCallback *string `json:"StatusCallback,omitempty"` // The HTTP method we should use to call `status_callback`. Can be: `POST` or `GET` and the default is `POST`. StatusCallbackMethod *string `json:"StatusCallbackMethod,omitempty"` // Whether to clip the intervals where there is no active media in the compositions triggered by the composition hook. The default is `true`. Compositions with `trim` enabled are shorter when the Room is created and no Participant joins for a while as well as if all the Participants leave the room and join later, because those gaps will be removed. See [Specifying Video Layouts](https://www.twilio.com/docs/video/api/compositions-resource#specifying-video-layouts) for more info. Trim *bool `json:"Trim,omitempty"` // A JSON object that describes the video layout of the composition hook in terms of regions. See [Specifying Video Layouts](https://www.twilio.com/docs/video/api/compositions-resource#specifying-video-layouts) for more info. VideoLayout *map[string]interface{} `json:"VideoLayout,omitempty"` } func (params *UpdateCompositionHookParams) SetAudioSources(AudioSources []string) *UpdateCompositionHookParams { params.AudioSources = &AudioSources return params } func (params *UpdateCompositionHookParams) SetAudioSourcesExcluded(AudioSourcesExcluded []string) *UpdateCompositionHookParams { params.AudioSourcesExcluded = &AudioSourcesExcluded return params } func (params *UpdateCompositionHookParams) SetEnabled(Enabled bool) *UpdateCompositionHookParams { params.Enabled = &Enabled return params } func (params *UpdateCompositionHookParams) SetFormat(Format string) *UpdateCompositionHookParams { params.Format = &Format return params } func (params *UpdateCompositionHookParams) SetFriendlyName(FriendlyName string) *UpdateCompositionHookParams { params.FriendlyName = &FriendlyName return params } func (params *UpdateCompositionHookParams) SetResolution(Resolution string) *UpdateCompositionHookParams { params.Resolution = &Resolution return params } func (params *UpdateCompositionHookParams) SetStatusCallback(StatusCallback string) *UpdateCompositionHookParams { params.StatusCallback = &StatusCallback return params } func (params *UpdateCompositionHookParams) SetStatusCallbackMethod(StatusCallbackMethod string) *UpdateCompositionHookParams { params.StatusCallbackMethod = &StatusCallbackMethod return params } func (params *UpdateCompositionHookParams) SetTrim(Trim bool) *UpdateCompositionHookParams { params.Trim = &Trim return params } func (params *UpdateCompositionHookParams) SetVideoLayout(VideoLayout map[string]interface{}) *UpdateCompositionHookParams { params.VideoLayout = &VideoLayout return params } func (c *ApiService) UpdateCompositionHook(Sid string, params *UpdateCompositionHookParams) (*VideoV1CompositionHook, error) { path := "/v1/CompositionHooks/{Sid}" path = strings.Replace(path, "{"+"Sid"+"}", Sid, -1) data := url.Values{} headers := make(map[string]interface{}) if params != nil && params.AudioSources != nil { for _, item := range *params.AudioSources { data.Add("AudioSources", item) } } if params != nil && params.AudioSourcesExcluded != nil { for _, item := range *params.AudioSourcesExcluded { data.Add("AudioSourcesExcluded", item) } } if params != nil && params.Enabled != nil { data.Set("Enabled", fmt.Sprint(*params.Enabled)) } if params != nil && params.Format != nil { data.Set("Format", *params.Format) } if params != nil && params.FriendlyName != nil { data.Set("FriendlyName", *params.FriendlyName) } if params != nil && params.Resolution != nil { data.Set("Resolution", *params.Resolution) } if params != nil && params.StatusCallback != nil { data.Set("StatusCallback", *params.StatusCallback) } if params != nil && params.StatusCallbackMethod != nil { data.Set("StatusCallbackMethod", *params.StatusCallbackMethod) } if params != nil && params.Trim != nil { data.Set("Trim", fmt.Sprint(*params.Trim)) } if params != nil && params.VideoLayout != nil { v, err := json.Marshal(params.VideoLayout) if err != nil { return nil, err } data.Set("VideoLayout", string(v)) } resp, err := c.requestHandler.Post(c.baseURL+path, data, headers) if err != nil { return nil, err } defer resp.Body.Close() ps := &VideoV1CompositionHook{} if err := json.NewDecoder(resp.Body).Decode(ps); err != nil { return nil, err } return ps, err }
rest/video/v1/composition_hooks.go
0.82029
0.522324
composition_hooks.go
starcoder
package transform import ( "image/png" "io" "math" "github.com/disintegration/imaging" "github.com/tidepool-org/platform/errors" "github.com/tidepool-org/platform/image" "github.com/tidepool-org/platform/pointer" "github.com/tidepool-org/platform/structure" structureValidator "github.com/tidepool-org/platform/structure/validator" ) type Transformer interface { CalculateTransform(contentAttributes *image.ContentAttributes, rendition *image.Rendition) (*Transform, error) TransformContent(reader io.Reader, transform *Transform) (io.ReadCloser, error) } type Transform struct { Rendition image.Rendition `json:"rendition,omitempty"` ContentWidth int `json:"contentWidth,omitempty"` ContentHeight int `json:"contentHeight,omitempty"` Resize bool `json:"resize,omitempty"` Crop bool `json:"crop,omitempty"` } func NewTransform() *Transform { return &Transform{} } func NewTransformWithRendition(rendition *image.Rendition) (*Transform, error) { if rendition == nil { return nil, errors.New("rendition is missing") } if rendition.Width == nil { return nil, errors.New("rendition width is missing") } if rendition.Height == nil { return nil, errors.New("rendition height is missing") } return &Transform{ Rendition: *rendition, ContentWidth: *rendition.Width, ContentHeight: *rendition.Height, }, nil } func (t *Transform) Validate(validator structure.Validator) { t.Rendition.Validate(validator.WithReference("rendition")) validator.Int("width", &t.ContentWidth).InRange(image.WidthMinimum, image.WidthMaximum) validator.Int("height", &t.ContentHeight).InRange(image.HeightMinimum, image.HeightMaximum) if !t.Resize { validator.Bool("crop", &t.Crop).False() } } func (t *Transform) ConstrainContentWidth(aspectRatio float64) { t.ContentWidth = int(math.Round(float64(t.ContentHeight) * aspectRatio)) } func (t *Transform) ConstrainContentHeight(aspectRatio float64) { t.ContentHeight = int(math.Round(float64(t.ContentWidth) / aspectRatio)) } func (t *Transform) ConstrainWidth(aspectRatio float64) { t.ConstrainContentWidth(aspectRatio) t.Rendition.Width = pointer.FromInt(t.ContentWidth) } func (t *Transform) ConstrainHeight(aspectRatio float64) { t.ConstrainContentHeight(aspectRatio) t.Rendition.Height = pointer.FromInt(t.ContentHeight) } func (t *Transform) Reset(contentAttributes *image.ContentAttributes) error { if contentAttributes == nil { return errors.New("content attributes is missing") } if contentAttributes.Width == nil { return errors.New("content attributes width is missing") } if contentAttributes.Height == nil { return errors.New("content attributes height is missing") } t.Rendition.Width = contentAttributes.Width t.Rendition.Height = contentAttributes.Height t.Rendition.Mode = pointer.FromString(image.ModeScale) if t.Rendition.SupportsQuality() { t.Rendition.Quality = pointer.FromInt(image.QualityDefault) } t.ContentWidth = *contentAttributes.Width t.ContentHeight = *contentAttributes.Height t.Resize = false t.Crop = false return nil } type TransformerImpl struct{} func NewTransformer() *TransformerImpl { return &TransformerImpl{} } func (t *TransformerImpl) CalculateTransform(contentAttributes *image.ContentAttributes, rendition *image.Rendition) (*Transform, error) { if contentAttributes == nil { return nil, errors.New("content attributes is missing") } else if err := structureValidator.New().Validate(contentAttributes); err != nil { return nil, errors.Wrap(err, "content attributes is invalid") } if rendition == nil { return nil, errors.New("rendition is missing") } else if err := structureValidator.New().Validate(rendition); err != nil { return nil, errors.Wrap(err, "rendition is invalid") } contentAttributesAspectRatio := float64(*contentAttributes.Width) / float64(*contentAttributes.Height) transform, err := NewTransformWithRendition(rendition.WithDefaults(contentAttributesAspectRatio)) if err != nil { return nil, err } transformRenditionAspectRatio := float64(*transform.Rendition.Width) / float64(*transform.Rendition.Height) if transformRenditionAspectRatio < contentAttributesAspectRatio { switch *transform.Rendition.Mode { case image.ModeFill, image.ModeFillDown: transform.ConstrainContentWidth(contentAttributesAspectRatio) case image.ModeFit, image.ModeFitDown: transform.ConstrainHeight(contentAttributesAspectRatio) case image.ModePad, image.ModePadDown: transform.ConstrainContentHeight(contentAttributesAspectRatio) } } else if transformRenditionAspectRatio > contentAttributesAspectRatio { switch *transform.Rendition.Mode { case image.ModeFill, image.ModeFillDown: transform.ConstrainContentHeight(contentAttributesAspectRatio) case image.ModeFit, image.ModeFitDown: transform.ConstrainWidth(contentAttributesAspectRatio) case image.ModePad, image.ModePadDown: transform.ConstrainContentWidth(contentAttributesAspectRatio) } } switch *transform.Rendition.Mode { case image.ModeFillDown, image.ModeFitDown, image.ModePadDown, image.ModeScaleDown: if transform.ContentWidth <= *contentAttributes.Width && transform.ContentHeight <= *contentAttributes.Height { transform.Rendition.Mode = pointer.FromString(image.NormalizeMode(*transform.Rendition.Mode)) } else { transform.Reset(contentAttributes) } } if *transform.Rendition.Width == transform.ContentWidth && *transform.Rendition.Height == transform.ContentHeight { transform.Rendition.Mode = pointer.FromString(image.ModeScale) } if *transform.Rendition.Mode != image.ModePad && (!contentAttributes.SupportsTransparency() || transform.Rendition.SupportsTransparency()) { transform.Rendition.Background = nil } transform.Resize = transform.ContentWidth != *contentAttributes.Width || transform.ContentHeight != *contentAttributes.Height transform.Crop = *transform.Rendition.Mode == image.ModeFill return transform, nil } func (t *TransformerImpl) TransformContent(reader io.Reader, transform *Transform) (io.ReadCloser, error) { if reader == nil { return nil, errors.New("reader is missing") } if transform == nil { return nil, errors.New("transform is missing") } else if err := structureValidator.New().Validate(transform); err != nil { return nil, errors.Wrap(err, "transform is invalid") } var format imaging.Format var encodeOptions []imaging.EncodeOption switch *transform.Rendition.MediaType { case image.MediaTypeImageJPEG: format = imaging.JPEG if transform.Rendition.Quality != nil { encodeOptions = append(encodeOptions, imaging.JPEGQuality(*transform.Rendition.Quality)) } else { encodeOptions = append(encodeOptions, imaging.JPEGQuality(image.QualityDefault)) } case image.MediaTypeImagePNG: format = imaging.PNG encodeOptions = append(encodeOptions, imaging.PNGCompressionLevel(pngCompressionLevelDefault)) } content, err := imaging.Decode(reader) if err != nil { return nil, errors.Wrapf(err, "unable to decode content") } if transform.Resize { content = imaging.Resize(content, transform.ContentWidth, transform.ContentHeight, resampleFilterDefault) } if transform.Rendition.Background != nil { if transform.Rendition.SupportsTransparency() { content = imaging.PasteCenter(imaging.New(*transform.Rendition.Width, *transform.Rendition.Height, *transform.Rendition.Background), content) } else { content = imaging.OverlayCenter(imaging.New(*transform.Rendition.Width, *transform.Rendition.Height, *transform.Rendition.Background), content, 1.0) } } else if transform.Crop { content = imaging.CropCenter(content, *transform.Rendition.Width, *transform.Rendition.Height) } pipeReader, pipeWriter := io.Pipe() go func() { var encodeErr error defer func() { pipeWriter.CloseWithError(encodeErr) }() if encodeErr = imaging.Encode(pipeWriter, content, format, encodeOptions...); encodeErr != nil { encodeErr = errors.Wrapf(encodeErr, "unable to encode content") } }() return pipeReader, nil } const pngCompressionLevelDefault = png.DefaultCompression var resampleFilterDefault = imaging.Lanczos
image/transform/transform.go
0.751739
0.465934
transform.go
starcoder
package generator import ( "fmt" ) type loopKind = int const ( niceLoop loopKind = iota strongLoop weakLoop ) func (g *Grid) xCycles(verbose uint) (res bool) { // Find all strong links. A pair of points form a strong link if they contain the only two instances of a digit within a unit (box, column, or row). var strongLinks [10]map[unitLink]bool g.findStrongLinks(&box, &strongLinks) g.findStrongLinks(&col, &strongLinks) g.findStrongLinks(&row, &strongLinks) // Find all weak links. A pair of points form a weak link if they contain the two instances of a digit within a unit (box, column, or row). There can be other instances of the digit in the same unit. var weakLinks [10]map[unitLink]bool g.findXCycleWeakLinks(&box, &weakLinks) g.findXCycleWeakLinks(&col, &weakLinks) g.findXCycleWeakLinks(&row, &weakLinks) for d := 1; d <= 9; d++ { // Process nice chains. niceChain := findCycle(d, niceLoop, strongLinks[d], weakLinks[d]) var overlap [rows][cols]bool for _, c := range niceChain { if c.strong { continue } // Only consider weak links. nl := neighbors(c.left) nr := neighbors(c.right) for r := zero; r < rows; r++ { for c := zero; c < cols; c++ { overlap[r][c] = nl[r][c] && nr[r][c] } } overlap[c.left.r][c.left.c] = false // Members of the chain cannot be removed. overlap[c.right.r][c.right.c] = false // In each cell that is seen by both ends of a weak chain link, the digit can be removed. for r := zero; r < rows; r++ { for c := zero; c < cols; c++ { if overlap[r][c] { if g.pt(point{r, c}).andNot(1 << d) { g.cellChange(&res, verbose, "xCycles: nice chain removes %d from %s\n", d, point{r, c}) } } } } } } if res { return } for d := 1; d <= 9; d++ { // Process strong chains. strongChain := findCycle(d, strongLoop, strongLinks[d], weakLinks[d]) // Find the strong discontinuity (two strong links in a row) and fix the digit at the intersection to the current digit. length := len(strongChain) if length >= 3 { first := strongChain[0] last := strongChain[length-1] if first.strong && last.strong { // If the first and last links are strong, the discontinuity is the last point in the chain (last.right). if g.pt(last.right).setTo(1 << d) { g.cellChange(&res, verbose, "xCycles: strong chain sets %s to %d\n", last.right, d) } } else { // Search the chain for the discontinuity. for i := 0; i < length-1; i++ { if strongChain[i].strong && strongChain[i+1].strong { if g.pt(strongChain[i].right).setTo(1 << d) { g.cellChange(&res, verbose, "xCycles: strong chain sets %s to %d\n", strongChain[i].right, d) } break // Once we find the discontinuity, we can stop looking because there can be only one ("Highlander"). } } } } } if res { return } for d := 1; d <= 9; d++ { // Process weak chains. weakChain := findCycle(d, weakLoop, strongLinks[d], weakLinks[d]) // Find the weak discontinuity (two weak links in a row) and remove the current digit at the intersection as a candidate. length := len(weakChain) if length >= 3 { first := weakChain[0] last := weakChain[length-1] if !first.strong && !last.strong { // If the first and last links are weak, the discontinuity is the last point in the chain (last.right). if g.pt(last.right).andNot(1 << d) { g.cellChange(&res, verbose, "xCycles: weak chain removes %d from %s\n", d, last.right) } } else { // Search the chain for the discontinuity. for i := 0; i < length-1; i++ { if !weakChain[i].strong && !weakChain[i+1].strong { if g.pt(weakChain[i].right).andNot(1 << d) { g.cellChange(&res, verbose, "xCycles: weak chain removes %d from %s\n", d, weakChain[i].right) } break // Once we find the discontinuity, we can stop looking because there can be only one ("Highlander"). } } } } } return } func (g *Grid) checkUnit(d int, p1, p2 point) bool { b1 := boxOfPoint(p1) b2 := boxOfPoint(p2) if b1 == b2 { count := 0 for _, p := range box.unit[b1] { if *g.pt(p)&cell(1<<d) != 0 { count++ } if count > 2 { return true } } } if p1.c == p2.c { count := 0 for _, p := range col.unit[b1] { if *g.pt(p)&cell(1<<d) != 0 { count++ } if count > 2 { return true } } } if p1.r == p2.r { count := 0 for _, p := range row.unit[b1] { if *g.pt(p)&cell(1<<d) != 0 { count++ } if count > 2 { return true } } } return false } func (g *Grid) findXCycleWeakLinks(gr *group, weakLinks *[10]map[unitLink]bool) { for pi, ps := range gr.unit { points := g.digitPoints(ps) for d := 1; d <= 9; d++ { p := points[d] if len(p) < 3 { continue } w := &(*weakLinks)[d] if *w == nil { *w = make(map[unitLink]bool) } for _, p1 := range p { for _, p2 := range p { if p1 == p2 { continue } (*w)[sortLink(unitLink{link{pair{p1, p2}, d}, fmt.Sprintf("%s %d", gr.name, pi), false})] = true } } } } } func chainLoops(chain []unitLink) bool { if len(chain) == 0 { return false } return chain[0].left == chain[len(chain)-1].right } func chainValid(checkComplete bool, kind loopKind, chain []unitLink) bool { doubleStrong := false doubleWeak := false switch len(chain) { case 0: return false case 1: return true default: if checkComplete && len(chain) > 2 { // If the chain wraps around and the first and last links match in strength, we need to account for that. first := chain[0] last := chain[len(chain)-1] if first.left == last.right { if last.strong == first.strong { if first.strong { doubleStrong = true } else { doubleWeak = true } } } } for i := 0; i < len(chain)-1; i++ { if chain[i].strong && chain[i+1].strong { if doubleStrong { return false } doubleStrong = true } else if !chain[i].strong && !chain[i+1].strong { if doubleWeak { return false } doubleWeak = true } } } if doubleStrong && doubleWeak { return false } switch kind { case niceLoop: return !doubleStrong && !doubleWeak case strongLoop: if checkComplete { return doubleStrong && !doubleWeak } return true case weakLoop: if checkComplete { return !doubleStrong && doubleWeak } return true } return false } func findCycle(digit int, kind loopKind, strongLinks, weakLinks map[unitLink]bool) (res []unitLink) { // Try each strong link as the start of the chain and keep the longest chain we can form. for s := range strongLinks { chain := []unitLink{s} best := []unitLink{} findCycleRecursive(digit, kind, chain, &best, strongLinks, weakLinks) if len(best) > len(res) { res = res[:0] for _, c := range best { res = append(res, c) } } } return } func findCycleRecursive(digit int, kind loopKind, chain []unitLink, best *[]unitLink, strongLinks, weakLinks map[unitLink]bool) { // If the right side of the last item in the chain links back to the head, we are done. TODO: keep searching for a longer chain. if chainValid(true, kind, chain) && chainLoops(chain) { if len(chain) > len(*best) { *best = (*best)[:0] for _, c := range chain { *best = append(*best, c) } } } last := chain[len(chain)-1] var candidates []unitLink strongAllowed := false weakAllowed := false switch kind { case niceLoop: strongAllowed = !last.strong weakAllowed = last.strong case strongLoop: strongAllowed = true weakAllowed = last.strong case weakLoop: strongAllowed = !last.strong weakAllowed = true } if strongAllowed { strongOuter: for s := range strongLinks { reversed := unitLink{link{pair{s.right, s.left}, s.digit}, s.unit, s.strong} for _, c := range chain { if s.pair == c.pair || reversed.pair == c.pair { // Already in the chain, skip. continue strongOuter } } if last.unit == s.unit { continue } if last.right == s.left { candidates = append(candidates, s) } if last.right == reversed.left { candidates = append(candidates, reversed) } } } if weakAllowed { weakOuter: for w := range weakLinks { reversed := unitLink{link{pair{w.right, w.left}, w.digit}, w.unit, w.strong} for _, c := range chain { if w.pair == c.pair || reversed.pair == c.pair { // Already in the chain, skip. continue weakOuter } } if last.unit == w.unit { continue } if last.right == w.left { candidates = append(candidates, w) } if last.right == reversed.left { candidates = append(candidates, reversed) } } } for _, c := range candidates { chain = append(chain, c) if chainValid(false, kind, chain) { findCycleRecursive(digit, kind, chain, best, strongLinks, weakLinks) } chain = chain[:len(chain)-1] } return }
generator/xCycles.go
0.594787
0.557604
xCycles.go
starcoder
package graph import ( i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55 "github.com/microsoft/kiota/abstractions/go/serialization" ) // WorkbookChart type WorkbookChart struct { Entity // Represents chart axes. Read-only. axes *WorkbookChartAxes; // Represents the datalabels on the chart. Read-only. dataLabels *WorkbookChartDataLabels; // Encapsulates the format properties for the chart area. Read-only. format *WorkbookChartAreaFormat; // Represents the height, in points, of the chart object. height *float64; // The distance, in points, from the left side of the chart to the worksheet origin. left *float64; // Represents the legend for the chart. Read-only. legend *WorkbookChartLegend; // Represents the name of a chart object. name *string; // Represents either a single series or collection of series in the chart. Read-only. series []WorkbookChartSeries; // Represents the title of the specified chart, including the text, visibility, position and formating of the title. Read-only. title *WorkbookChartTitle; // Represents the distance, in points, from the top edge of the object to the top of row 1 (on a worksheet) or the top of the chart area (on a chart). top *float64; // Represents the width, in points, of the chart object. width *float64; // The worksheet containing the current chart. Read-only. worksheet *WorkbookWorksheet; } // NewWorkbookChart instantiates a new workbookChart and sets the default values. func NewWorkbookChart()(*WorkbookChart) { m := &WorkbookChart{ Entity: *NewEntity(), } return m } // GetAxes gets the axes property value. Represents chart axes. Read-only. func (m *WorkbookChart) GetAxes()(*WorkbookChartAxes) { if m == nil { return nil } else { return m.axes } } // GetDataLabels gets the dataLabels property value. Represents the datalabels on the chart. Read-only. func (m *WorkbookChart) GetDataLabels()(*WorkbookChartDataLabels) { if m == nil { return nil } else { return m.dataLabels } } // GetFormat gets the format property value. Encapsulates the format properties for the chart area. Read-only. func (m *WorkbookChart) GetFormat()(*WorkbookChartAreaFormat) { if m == nil { return nil } else { return m.format } } // GetHeight gets the height property value. Represents the height, in points, of the chart object. func (m *WorkbookChart) GetHeight()(*float64) { if m == nil { return nil } else { return m.height } } // GetLeft gets the left property value. The distance, in points, from the left side of the chart to the worksheet origin. func (m *WorkbookChart) GetLeft()(*float64) { if m == nil { return nil } else { return m.left } } // GetLegend gets the legend property value. Represents the legend for the chart. Read-only. func (m *WorkbookChart) GetLegend()(*WorkbookChartLegend) { if m == nil { return nil } else { return m.legend } } // GetName gets the name property value. Represents the name of a chart object. func (m *WorkbookChart) GetName()(*string) { if m == nil { return nil } else { return m.name } } // GetSeries gets the series property value. Represents either a single series or collection of series in the chart. Read-only. func (m *WorkbookChart) GetSeries()([]WorkbookChartSeries) { if m == nil { return nil } else { return m.series } } // GetTitle gets the title property value. Represents the title of the specified chart, including the text, visibility, position and formating of the title. Read-only. func (m *WorkbookChart) GetTitle()(*WorkbookChartTitle) { if m == nil { return nil } else { return m.title } } // GetTop gets the top property value. Represents the distance, in points, from the top edge of the object to the top of row 1 (on a worksheet) or the top of the chart area (on a chart). func (m *WorkbookChart) GetTop()(*float64) { if m == nil { return nil } else { return m.top } } // GetWidth gets the width property value. Represents the width, in points, of the chart object. func (m *WorkbookChart) GetWidth()(*float64) { if m == nil { return nil } else { return m.width } } // GetWorksheet gets the worksheet property value. The worksheet containing the current chart. Read-only. func (m *WorkbookChart) GetWorksheet()(*WorkbookWorksheet) { if m == nil { return nil } else { return m.worksheet } } // GetFieldDeserializers the deserialization information for the current model func (m *WorkbookChart) GetFieldDeserializers()(map[string]func(interface{}, i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode)(error)) { res := m.Entity.GetFieldDeserializers() res["axes"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetObjectValue(func () i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable { return NewWorkbookChartAxes() }) if err != nil { return err } if val != nil { m.SetAxes(val.(*WorkbookChartAxes)) } return nil } res["dataLabels"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetObjectValue(func () i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable { return NewWorkbookChartDataLabels() }) if err != nil { return err } if val != nil { m.SetDataLabels(val.(*WorkbookChartDataLabels)) } return nil } res["format"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetObjectValue(func () i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable { return NewWorkbookChartAreaFormat() }) if err != nil { return err } if val != nil { m.SetFormat(val.(*WorkbookChartAreaFormat)) } return nil } res["height"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetFloat64Value() if err != nil { return err } if val != nil { m.SetHeight(val) } return nil } res["left"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetFloat64Value() if err != nil { return err } if val != nil { m.SetLeft(val) } return nil } res["legend"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetObjectValue(func () i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable { return NewWorkbookChartLegend() }) if err != nil { return err } if val != nil { m.SetLegend(val.(*WorkbookChartLegend)) } return nil } res["name"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetStringValue() if err != nil { return err } if val != nil { m.SetName(val) } return nil } res["series"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetCollectionOfObjectValues(func () i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable { return NewWorkbookChartSeries() }) if err != nil { return err } if val != nil { res := make([]WorkbookChartSeries, len(val)) for i, v := range val { res[i] = *(v.(*WorkbookChartSeries)) } m.SetSeries(res) } return nil } res["title"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetObjectValue(func () i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable { return NewWorkbookChartTitle() }) if err != nil { return err } if val != nil { m.SetTitle(val.(*WorkbookChartTitle)) } return nil } res["top"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetFloat64Value() if err != nil { return err } if val != nil { m.SetTop(val) } return nil } res["width"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetFloat64Value() if err != nil { return err } if val != nil { m.SetWidth(val) } return nil } res["worksheet"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error { val, err := n.GetObjectValue(func () i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable { return NewWorkbookWorksheet() }) if err != nil { return err } if val != nil { m.SetWorksheet(val.(*WorkbookWorksheet)) } return nil } return res } func (m *WorkbookChart) IsNil()(bool) { return m == nil } // Serialize serializes information the current object func (m *WorkbookChart) Serialize(writer i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.SerializationWriter)(error) { err := m.Entity.Serialize(writer) if err != nil { return err } { err = writer.WriteObjectValue("axes", m.GetAxes()) if err != nil { return err } } { err = writer.WriteObjectValue("dataLabels", m.GetDataLabels()) if err != nil { return err } } { err = writer.WriteObjectValue("format", m.GetFormat()) if err != nil { return err } } { err = writer.WriteFloat64Value("height", m.GetHeight()) if err != nil { return err } } { err = writer.WriteFloat64Value("left", m.GetLeft()) if err != nil { return err } } { err = writer.WriteObjectValue("legend", m.GetLegend()) if err != nil { return err } } { err = writer.WriteStringValue("name", m.GetName()) if err != nil { return err } } { cast := make([]i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable, len(m.GetSeries())) for i, v := range m.GetSeries() { temp := v cast[i] = i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable(&temp) } err = writer.WriteCollectionOfObjectValues("series", cast) if err != nil { return err } } { err = writer.WriteObjectValue("title", m.GetTitle()) if err != nil { return err } } { err = writer.WriteFloat64Value("top", m.GetTop()) if err != nil { return err } } { err = writer.WriteFloat64Value("width", m.GetWidth()) if err != nil { return err } } { err = writer.WriteObjectValue("worksheet", m.GetWorksheet()) if err != nil { return err } } return nil } // SetAxes sets the axes property value. Represents chart axes. Read-only. func (m *WorkbookChart) SetAxes(value *WorkbookChartAxes)() { m.axes = value } // SetDataLabels sets the dataLabels property value. Represents the datalabels on the chart. Read-only. func (m *WorkbookChart) SetDataLabels(value *WorkbookChartDataLabels)() { m.dataLabels = value } // SetFormat sets the format property value. Encapsulates the format properties for the chart area. Read-only. func (m *WorkbookChart) SetFormat(value *WorkbookChartAreaFormat)() { m.format = value } // SetHeight sets the height property value. Represents the height, in points, of the chart object. func (m *WorkbookChart) SetHeight(value *float64)() { m.height = value } // SetLeft sets the left property value. The distance, in points, from the left side of the chart to the worksheet origin. func (m *WorkbookChart) SetLeft(value *float64)() { m.left = value } // SetLegend sets the legend property value. Represents the legend for the chart. Read-only. func (m *WorkbookChart) SetLegend(value *WorkbookChartLegend)() { m.legend = value } // SetName sets the name property value. Represents the name of a chart object. func (m *WorkbookChart) SetName(value *string)() { m.name = value } // SetSeries sets the series property value. Represents either a single series or collection of series in the chart. Read-only. func (m *WorkbookChart) SetSeries(value []WorkbookChartSeries)() { m.series = value } // SetTitle sets the title property value. Represents the title of the specified chart, including the text, visibility, position and formating of the title. Read-only. func (m *WorkbookChart) SetTitle(value *WorkbookChartTitle)() { m.title = value } // SetTop sets the top property value. Represents the distance, in points, from the top edge of the object to the top of row 1 (on a worksheet) or the top of the chart area (on a chart). func (m *WorkbookChart) SetTop(value *float64)() { m.top = value } // SetWidth sets the width property value. Represents the width, in points, of the chart object. func (m *WorkbookChart) SetWidth(value *float64)() { m.width = value } // SetWorksheet sets the worksheet property value. The worksheet containing the current chart. Read-only. func (m *WorkbookChart) SetWorksheet(value *WorkbookWorksheet)() { m.worksheet = value }
models/microsoft/graph/workbook_chart.go
0.716913
0.497315
workbook_chart.go
starcoder
package geometry import ( "math" "github.com/thommil/tge-g3n/gls" "github.com/thommil/tge-g3n/math32" ) // Cylinder represents a cylinder geometry type Cylinder struct { Geometry RadiusTop float64 RadiusBottom float64 Height float64 RadialSegments int HeightSegments int ThetaStart float64 ThetaLength float64 Top bool Bottom bool } // NewCylinder creates and returns a pointer to a new Cylinder geometry object. func NewCylinder(radiusTop, radiusBottom, height float64, radialSegments, heightSegments int, thetaStart, thetaLength float64, top, bottom bool) *Cylinder { c := new(Cylinder) c.Geometry.Init() c.RadiusTop = radiusTop c.RadiusBottom = radiusBottom c.Height = height c.RadialSegments = radialSegments c.HeightSegments = heightSegments c.ThetaStart = thetaStart c.ThetaLength = thetaLength c.Top = top c.Bottom = bottom heightHalf := height / 2 vertices := [][]int{} uvsOrig := [][]math32.Vector2{} // Create buffer for vertex positions positions := math32.NewArrayF32(0, 0) for y := 0; y <= heightSegments; y++ { var verticesRow = []int{} var uvsRow = []math32.Vector2{} v := float64(y) / float64(heightSegments) radius := v*(radiusBottom-radiusTop) + radiusTop for x := 0; x <= radialSegments; x++ { u := float64(x) / float64(radialSegments) var vertex math32.Vector3 vertex.X = float32(radius * math.Sin(u*thetaLength+thetaStart)) vertex.Y = float32(-v*height + heightHalf) vertex.Z = float32(radius * math.Cos(u*thetaLength+thetaStart)) positions.AppendVector3(&vertex) verticesRow = append(verticesRow, positions.Size()/3-1) uvsRow = append(uvsRow, math32.Vector2{float32(u), 1.0 - float32(v)}) } vertices = append(vertices, verticesRow) uvsOrig = append(uvsOrig, uvsRow) } tanTheta := (radiusBottom - radiusTop) / height var na, nb math32.Vector3 // Create preallocated buffers for normals and uvs and buffer for indices npos := positions.Size() normals := math32.NewArrayF32(npos, npos) uvs := math32.NewArrayF32(2*npos/3, 2*npos/3) indices := math32.NewArrayU32(0, 0) for x := 0; x < radialSegments; x++ { if radiusTop != 0 { positions.GetVector3(3*vertices[0][x], &na) positions.GetVector3(3*vertices[0][x+1], &nb) } else { positions.GetVector3(3*vertices[1][x], &na) positions.GetVector3(3*vertices[1][x+1], &nb) } na.SetY(float32(math.Sqrt(float64(na.X*na.X+na.Z*na.Z)) * tanTheta)).Normalize() nb.SetY(float32(math.Sqrt(float64(nb.X*nb.X+nb.Z*nb.Z)) * tanTheta)).Normalize() for y := 0; y < heightSegments; y++ { v1 := vertices[y][x] v2 := vertices[y+1][x] v3 := vertices[y+1][x+1] v4 := vertices[y][x+1] n1 := na n2 := na n3 := nb n4 := nb uv1 := uvsOrig[y][x] uv2 := uvsOrig[y+1][x] uv3 := uvsOrig[y+1][x+1] uv4 := uvsOrig[y][x+1] indices.Append(uint32(v1), uint32(v2), uint32(v4)) normals.SetVector3(3*v1, &n1) normals.SetVector3(3*v2, &n2) normals.SetVector3(3*v4, &n4) indices.Append(uint32(v2), uint32(v3), uint32(v4)) normals.SetVector3(3*v2, &n2) normals.SetVector3(3*v3, &n3) normals.SetVector3(3*v4, &n4) uvs.SetVector2(2*v1, &uv1) uvs.SetVector2(2*v2, &uv2) uvs.SetVector2(2*v3, &uv3) uvs.SetVector2(2*v4, &uv4) } } // First group is the body of the cylinder // without the caps c.AddGroup(0, indices.Size(), 0) nextGroup := indices.Size() // Top cap if top && radiusTop > 0 { // Array of vertex indicesOrig to build used to build the faces. indicesOrig := []uint32{} nextidx := positions.Size() / 3 // Appends top segments vertices and builds array of its indicesOrig var uv1, uv2, uv3 math32.Vector2 for x := 0; x < radialSegments; x++ { uv1 = uvsOrig[0][x] uv2 = uvsOrig[0][x+1] uv3 = math32.Vector2{uv2.X, 0} // Appends CENTER with its own UV. positions.Append(0, float32(heightHalf), 0) normals.Append(0, 1, 0) uvs.AppendVector2(&uv3) indicesOrig = append(indicesOrig, uint32(nextidx)) nextidx++ // Appends vertex v := math32.Vector3{} vi := vertices[0][x] positions.GetVector3(3*vi, &v) positions.AppendVector3(&v) normals.Append(0, 1, 0) uvs.AppendVector2(&uv1) indicesOrig = append(indicesOrig, uint32(nextidx)) nextidx++ } // Appends copy of first vertex (center) var vertex, normal math32.Vector3 var uv math32.Vector2 positions.GetVector3(3*int(indicesOrig[0]), &vertex) normals.GetVector3(3*int(indicesOrig[0]), &normal) uvs.GetVector2(2*int(indicesOrig[0]), &uv) positions.AppendVector3(&vertex) normals.AppendVector3(&normal) uvs.AppendVector2(&uv) indicesOrig = append(indicesOrig, uint32(nextidx)) nextidx++ // Appends copy of second vertex (v1) USING LAST UV2 positions.GetVector3(3*int(indicesOrig[1]), &vertex) normals.GetVector3(3*int(indicesOrig[1]), &normal) positions.AppendVector3(&vertex) normals.AppendVector3(&normal) uvs.AppendVector2(&uv2) indicesOrig = append(indicesOrig, uint32(nextidx)) nextidx++ // Append faces indicesOrig for x := 0; x < radialSegments; x++ { pos := 2 * x i1 := indicesOrig[pos] i2 := indicesOrig[pos+1] i3 := indicesOrig[pos+3] indices.Append(uint32(i1), uint32(i2), uint32(i3)) } // Second group is optional top cap of the cylinder c.AddGroup(nextGroup, indices.Size()-nextGroup, 1) nextGroup = indices.Size() } // Bottom cap if bottom && radiusBottom > 0 { // Array of vertex indicesOrig to build used to build the faces. indicesOrig := []uint32{} nextidx := positions.Size() / 3 // Appends top segments vertices and builds array of its indicesOrig var uv1, uv2, uv3 math32.Vector2 for x := 0; x < radialSegments; x++ { uv1 = uvsOrig[heightSegments][x] uv2 = uvsOrig[heightSegments][x+1] uv3 = math32.Vector2{uv2.X, 1} // Appends CENTER with its own UV. positions.Append(0, float32(-heightHalf), 0) normals.Append(0, -1, 0) uvs.AppendVector2(&uv3) indicesOrig = append(indicesOrig, uint32(nextidx)) nextidx++ // Appends vertex v := math32.Vector3{} vi := vertices[heightSegments][x] positions.GetVector3(3*vi, &v) positions.AppendVector3(&v) normals.Append(0, -1, 0) uvs.AppendVector2(&uv1) indicesOrig = append(indicesOrig, uint32(nextidx)) nextidx++ } // Appends copy of first vertex (center) var vertex, normal math32.Vector3 var uv math32.Vector2 positions.GetVector3(3*int(indicesOrig[0]), &vertex) normals.GetVector3(3*int(indicesOrig[0]), &normal) uvs.GetVector2(2*int(indicesOrig[0]), &uv) positions.AppendVector3(&vertex) normals.AppendVector3(&normal) uvs.AppendVector2(&uv) indicesOrig = append(indicesOrig, uint32(nextidx)) nextidx++ // Appends copy of second vertex (v1) USING LAST UV2 positions.GetVector3(3*int(indicesOrig[1]), &vertex) normals.GetVector3(3*int(indicesOrig[1]), &normal) positions.AppendVector3(&vertex) normals.AppendVector3(&normal) uvs.AppendVector2(&uv2) indicesOrig = append(indicesOrig, uint32(nextidx)) nextidx++ // Appends faces indicesOrig for x := 0; x < radialSegments; x++ { pos := 2 * x i1 := indicesOrig[pos] i2 := indicesOrig[pos+3] i3 := indicesOrig[pos+1] indices.Append(uint32(i1), uint32(i2), uint32(i3)) } // Third group is optional bottom cap of the cylinder c.AddGroup(nextGroup, indices.Size()-nextGroup, 2) } c.SetIndices(indices) c.AddVBO(gls.NewVBO(positions).AddAttrib(gls.VertexPosition)) c.AddVBO(gls.NewVBO(normals).AddAttrib(gls.VertexNormal)) c.AddVBO(gls.NewVBO(uvs).AddAttrib(gls.VertexTexcoord)) return c }
geometry/cylinder.go
0.76908
0.658637
cylinder.go
starcoder
package tools import ( "fmt" "gopkg.in/bblfsh/sdk.v2/uast/nodes" "gopkg.in/bblfsh/sdk.v2/uast/query" "gopkg.in/bblfsh/sdk.v2/uast/query/xpath" ) // NewContext creates a new query context. func NewContext(root nodes.Node) *Context { return &Context{ root: root, xpath: xpath.New(), } } type Context struct { root nodes.Node xpath query.Interface } // Filter filters the tree and returns the iterator of nodes that satisfy the given query. func (c *Context) Filter(query string) (query.Iterator, error) { if query == "" { query = "//*" } return c.xpath.Execute(c.root, query) } // FilterNode filters the tree and returns a single node that satisfy the given query. func (c *Context) FilterNode(query string) (nodes.Node, error) { it, err := c.Filter(query) if err != nil { return nil, err } if !it.Next() { return nil, nil } nd, _ := it.Node().(nodes.Node) return nd, nil } // FilterValue evaluates a query and returns a results as a value. func (c *Context) FilterValue(query string) (nodes.Value, error) { nd, err := c.FilterNode(query) if err != nil { return nil, err } v, ok := nd.(nodes.Value) if !ok { return nil, fmt.Errorf("expected value, got: %T", nd) } return v, nil } // FilterNode evaluates a query and returns a results as a boolean value. func (c *Context) FilterBool(query string) (bool, error) { val, err := c.FilterValue(query) if err != nil { return false, err } v, _ := val.(nodes.Bool) return bool(v), nil } // FilterNumber evaluates a query and returns a results as a float64 value. func (c *Context) FilterNumber(query string) (float64, error) { val, err := c.FilterNode(query) if err != nil { return 0, err } switch val := val.(type) { case nodes.Float: return float64(val), nil case nodes.Int: return float64(val), nil case nodes.Uint: return float64(val), nil } return 0, fmt.Errorf("expected number, got: %T", val) } // FilterInt evaluates a query and returns a results as an int value. func (c *Context) FilterInt(query string) (int, error) { val, err := c.FilterNode(query) if err != nil { return 0, err } switch val := val.(type) { case nodes.Float: return int(val), nil case nodes.Int: return int(val), nil case nodes.Uint: return int(val), nil } return 0, fmt.Errorf("expected int, got: %T", val) } // FilterString evaluates a query and returns a results as a string value. func (c *Context) FilterString(query string) (string, error) { val, err := c.FilterNode(query) if err != nil { return "", err } v, ok := val.(nodes.String) if !ok { return "", fmt.Errorf("expected string, got: %T", val) } return string(v), nil } // Filter filters the tree and returns the iterator of nodes that satisfy the given query. func Filter(node nodes.Node, query string) (query.Iterator, error) { return NewContext(node).Filter(query) } // FilterNode filters the tree and returns a single node that satisfy the given query. func FilterNode(node nodes.Node, query string) (nodes.Node, error) { return NewContext(node).FilterNode(query) } // FilterValue evaluates a query and returns a results as a value. func FilterValue(node nodes.Node, query string) (nodes.Value, error) { return NewContext(node).FilterValue(query) } // FilterNode evaluates a query and returns a results as a boolean value. func FilterBool(node nodes.Node, query string) (bool, error) { return NewContext(node).FilterBool(query) } // FilterNumber evaluates a query and returns a results as a float64 value. func FilterNumber(node nodes.Node, query string) (float64, error) { return NewContext(node).FilterNumber(query) } // FilterInt evaluates a query and returns a results as an int value. func FilterInt(node nodes.Node, query string) (int, error) { return NewContext(node).FilterInt(query) } // FilterString evaluates a query and returns a results as a string value. func FilterString(node nodes.Node, query string) (string, error) { return NewContext(node).FilterString(query) }
tools/context.go
0.788909
0.479077
context.go
starcoder
package metrics import ( "sync" "time" ) // Timers capture the duration and rate of events. type Timer interface { Metric Count() int64 Max() int64 Mean() float64 Min() int64 Percentile(float64) float64 Percentiles([]float64) []float64 Rate1() float64 Rate5() float64 Rate15() float64 RateMean() float64 StdDev() float64 Sum() int64 Time(func()) Update(time.Duration) UpdateSince(time.Time) Variance() float64 } // NewCustomTimer constructs a new StandardTimer from a Histogram and a Meter. func NewCustomTimer(meta *MetricMeta, h Histogram, m Meter) Timer { if UseNilMetrics { return NilTimer{} } return &StandardTimer{ MetricMeta: meta, histogram: h, meter: m, } } // NewTimer constructs a new StandardTimer using an exponentially-decaying // sample with the same reservoir size and alpha as UNIX load averages. func NewTimer(meta *MetricMeta) Timer { if UseNilMetrics { return NilTimer{} } return &StandardTimer{ MetricMeta: meta, histogram: NewHistogram(meta, NewExpDecaySample(1028, 0.015)), meter: NewMeter(meta), } } func RegTimer(name string, tagStrings ...string) Timer { tr := NewTimer(NewMetricMeta(name, tagStrings)) MetricStats.Register(tr) return tr } // NilTimer is a no-op Timer. type NilTimer struct { *MetricMeta h Histogram m Meter } // Count is a no-op. func (NilTimer) Count() int64 { return 0 } // Max is a no-op. func (NilTimer) Max() int64 { return 0 } // Mean is a no-op. func (NilTimer) Mean() float64 { return 0.0 } // Min is a no-op. func (NilTimer) Min() int64 { return 0 } // Percentile is a no-op. func (NilTimer) Percentile(p float64) float64 { return 0.0 } // Percentiles is a no-op. func (NilTimer) Percentiles(ps []float64) []float64 { return make([]float64, len(ps)) } // Rate1 is a no-op. func (NilTimer) Rate1() float64 { return 0.0 } // Rate5 is a no-op. func (NilTimer) Rate5() float64 { return 0.0 } // Rate15 is a no-op. func (NilTimer) Rate15() float64 { return 0.0 } // RateMean is a no-op. func (NilTimer) RateMean() float64 { return 0.0 } // Snapshot is a no-op. func (n NilTimer) Snapshot() Metric { return n } // StdDev is a no-op. func (NilTimer) StdDev() float64 { return 0.0 } // Sum is a no-op. func (NilTimer) Sum() int64 { return 0 } // Time is a no-op. func (NilTimer) Time(func()) {} // Update is a no-op. func (NilTimer) Update(time.Duration) {} // UpdateSince is a no-op. func (NilTimer) UpdateSince(time.Time) {} // Variance is a no-op. func (NilTimer) Variance() float64 { return 0.0 } // StandardTimer is the standard implementation of a Timer and uses a Histogram // and Meter. type StandardTimer struct { *MetricMeta histogram Histogram meter Meter mutex sync.Mutex } // Count returns the number of events recorded. func (t *StandardTimer) Count() int64 { return t.histogram.Count() } // Max returns the maximum value in the sample. func (t *StandardTimer) Max() int64 { return t.histogram.Max() } // Mean returns the mean of the values in the sample. func (t *StandardTimer) Mean() float64 { return t.histogram.Mean() } // Min returns the minimum value in the sample. func (t *StandardTimer) Min() int64 { return t.histogram.Min() } // Percentile returns an arbitrary percentile of the values in the sample. func (t *StandardTimer) Percentile(p float64) float64 { return t.histogram.Percentile(p) } // Percentiles returns a slice of arbitrary percentiles of the values in the // sample. func (t *StandardTimer) Percentiles(ps []float64) []float64 { return t.histogram.Percentiles(ps) } // Rate1 returns the one-minute moving average rate of events per second. func (t *StandardTimer) Rate1() float64 { return t.meter.Rate1() } // Rate5 returns the five-minute moving average rate of events per second. func (t *StandardTimer) Rate5() float64 { return t.meter.Rate5() } // Rate15 returns the fifteen-minute moving average rate of events per second. func (t *StandardTimer) Rate15() float64 { return t.meter.Rate15() } // RateMean returns the meter's mean rate of events per second. func (t *StandardTimer) RateMean() float64 { return t.meter.RateMean() } // Snapshot returns a read-only copy of the timer. func (t *StandardTimer) Snapshot() Metric { t.mutex.Lock() defer t.mutex.Unlock() return &TimerSnapshot{ MetricMeta: t.MetricMeta, histogram: t.histogram.Snapshot().(*HistogramSnapshot), meter: t.meter.Snapshot().(*MeterSnapshot), } } // StdDev returns the standard deviation of the values in the sample. func (t *StandardTimer) StdDev() float64 { return t.histogram.StdDev() } // Sum returns the sum in the sample. func (t *StandardTimer) Sum() int64 { return t.histogram.Sum() } // Record the duration of the execution of the given function. func (t *StandardTimer) Time(f func()) { ts := time.Now() f() t.Update(time.Since(ts)) } // Record the duration of an event. func (t *StandardTimer) Update(d time.Duration) { t.mutex.Lock() defer t.mutex.Unlock() t.histogram.Update(int64(d)) t.meter.Mark(1) } // Record the duration of an event that started at a time and ends now. func (t *StandardTimer) UpdateSince(ts time.Time) { t.mutex.Lock() defer t.mutex.Unlock() t.histogram.Update(int64(time.Since(ts))) t.meter.Mark(1) } // Variance returns the variance of the values in the sample. func (t *StandardTimer) Variance() float64 { return t.histogram.Variance() } // TimerSnapshot is a read-only copy of another Timer. type TimerSnapshot struct { *MetricMeta histogram *HistogramSnapshot meter *MeterSnapshot } // Count returns the number of events recorded at the time the snapshot was // taken. func (t *TimerSnapshot) Count() int64 { return t.histogram.Count() } // Max returns the maximum value at the time the snapshot was taken. func (t *TimerSnapshot) Max() int64 { return t.histogram.Max() } // Mean returns the mean value at the time the snapshot was taken. func (t *TimerSnapshot) Mean() float64 { return t.histogram.Mean() } // Min returns the minimum value at the time the snapshot was taken. func (t *TimerSnapshot) Min() int64 { return t.histogram.Min() } // Percentile returns an arbitrary percentile of sampled values at the time the // snapshot was taken. func (t *TimerSnapshot) Percentile(p float64) float64 { return t.histogram.Percentile(p) } // Percentiles returns a slice of arbitrary percentiles of sampled values at // the time the snapshot was taken. func (t *TimerSnapshot) Percentiles(ps []float64) []float64 { return t.histogram.Percentiles(ps) } // Rate1 returns the one-minute moving average rate of events per second at the // time the snapshot was taken. func (t *TimerSnapshot) Rate1() float64 { return t.meter.Rate1() } // Rate5 returns the five-minute moving average rate of events per second at // the time the snapshot was taken. func (t *TimerSnapshot) Rate5() float64 { return t.meter.Rate5() } // Rate15 returns the fifteen-minute moving average rate of events per second // at the time the snapshot was taken. func (t *TimerSnapshot) Rate15() float64 { return t.meter.Rate15() } // RateMean returns the meter's mean rate of events per second at the time the // snapshot was taken. func (t *TimerSnapshot) RateMean() float64 { return t.meter.RateMean() } // Snapshot returns the snapshot. func (t *TimerSnapshot) Snapshot() Metric { return t } // StdDev returns the standard deviation of the values at the time the snapshot // was taken. func (t *TimerSnapshot) StdDev() float64 { return t.histogram.StdDev() } // Sum returns the sum at the time the snapshot was taken. func (t *TimerSnapshot) Sum() int64 { return t.histogram.Sum() } // Time panics. func (*TimerSnapshot) Time(func()) { panic("Time called on a TimerSnapshot") } // Update panics. func (*TimerSnapshot) Update(time.Duration) { panic("Update called on a TimerSnapshot") } // UpdateSince panics. func (*TimerSnapshot) UpdateSince(time.Time) { panic("UpdateSince called on a TimerSnapshot") } // Variance returns the variance of the values at the time the snapshot was // taken. func (t *TimerSnapshot) Variance() float64 { return t.histogram.Variance() }
pkg/metrics/timer.go
0.878549
0.483466
timer.go
starcoder
package filters import ( "regexp" "github.com/containerd/containerd/log" ) // Filter matches specific resources based the provided filter type Filter interface { Match(adaptor Adaptor) bool } // FilterFunc is a function that handles matching with an adaptor type FilterFunc func(Adaptor) bool // Match matches the FilterFunc returning true if the object matches the filter func (fn FilterFunc) Match(adaptor Adaptor) bool { return fn(adaptor) } // Always is a filter that always returns true for any type of object var Always FilterFunc = func(adaptor Adaptor) bool { return true } // Any allows multiple filters to be matched against the object type Any []Filter // Match returns true if any of the provided filters are true func (m Any) Match(adaptor Adaptor) bool { for _, m := range m { if m.Match(adaptor) { return true } } return false } // All allows multiple filters to be matched against the object type All []Filter // Match only returns true if all filters match the object func (m All) Match(adaptor Adaptor) bool { for _, m := range m { if !m.Match(adaptor) { return false } } return true } type operator int const ( operatorPresent = iota operatorEqual operatorNotEqual operatorMatches ) func (op operator) String() string { switch op { case operatorPresent: return "?" case operatorEqual: return "==" case operatorNotEqual: return "!=" case operatorMatches: return "~=" } return "unknown" } type selector struct { fieldpath []string operator operator value string re *regexp.Regexp } func (m selector) Match(adaptor Adaptor) bool { value, present := adaptor.Field(m.fieldpath) switch m.operator { case operatorPresent: return present case operatorEqual: return present && value == m.value case operatorNotEqual: return value != m.value case operatorMatches: if m.re == nil { r, err := regexp.Compile(m.value) if err != nil { log.L.Errorf("error compiling regexp %q", m.value) return false } m.re = r } return m.re.MatchString(value) default: return false } }
vendor/github.com/containerd/containerd/filters/filter.go
0.775095
0.441071
filter.go
starcoder
// Package merkle 实现默克尔树相关的hash计算 package merkle import ( "bytes" "runtime" "github.com/bnchain/bnchain/common" "github.com/bnchain/bnchain/types" ) /* WARNING! If you're reading this because you're learning about crypto and/or designing a new system that will use merkle trees, keep in mind that the following merkle tree algorithm has a serious flaw related to duplicate txids, resulting in a vulnerability (CVE-2012-2459). The reason is that if the number of hashes in the list at a given time is odd, the last one is duplicated before computing the next level (which is unusual in Merkle trees). This results in certain sequences of transactions leading to the same merkle root. For example, these two trees: A A / \ / \ B C B C / \ | / \ / \ D E F D E F F / \ / \ / \ / \ / \ / \ / \ 1 2 3 4 5 6 1 2 3 4 5 6 5 6 for transaction lists [1,2,3,4,5,6] and [1,2,3,4,5,6,5,6] (where 5 and 6 are repeated) result in the same root hash A (because the hash of both of (F) and (F,F) is C). The vulnerability results from being able to send a block with such a transaction list, with the same merkle root, and the same block hash as the original without duplication, resulting in failed validation. If the receiving node proceeds to mark that block as permanently invalid however, it will fail to accept further unmodified (and thus potentially valid) versions of the same block. We defend against this by detecting the case where we would hash two identical hashes at the end of the list together, and treating that identically to the block having an invalid merkle root. Assuming no double-SHA256 collisions, this will detect all known ways of changing the transactions without affecting the merkle root. */ /*GetMerkleRoot This implements a constant-space merkle root/path calculator, limited to 2^32 leaves. */ //flage =1 只计算roothash flage =2 只计算branch flage =3 计算roothash 和 branch func getMerkleRoot(hashes [][]byte) []byte { cache := make([]byte, 64) level := 0 for len(hashes) > 1 { if len(hashes)&1 != 0 { //奇数 hashes = append(hashes, hashes[len(hashes)-1]) } index := 0 for i := 0; i < len(hashes); i += 2 { hashes[index] = GetHashFromTwoHash(cache, hashes[i], hashes[i+1]) index++ } level++ hashes = hashes[0:index] } if len(hashes) == 0 { return nil } return hashes[0] } func log2(data int) int { level := 1 if data <= 0 { return 0 } for { data = data / 2 if data <= 1 { return level } level++ } } func pow2(d int) (p int) { if d <= 0 { return 1 } p = 1 for i := 0; i < d; i++ { p *= 2 } return p } func calcLevel(n int) int { if n == 1 { return 1 } level := 0 for n > 1 { if n&1 != 0 { n++ } n = n / 2 level++ } return level } func getMerkleRootPad(hashes [][]byte, step int) []byte { level1 := calcLevel(len(hashes)) level2 := log2(step) var root []byte cache := make([]byte, 64) if len(hashes) == 1 { root = GetHashFromTwoHash(cache, hashes[0], hashes[0]) } else { root = getMerkleRoot(hashes) } for i := 0; i < level2-level1; i++ { root = GetHashFromTwoHash(cache, root, root) } return root } type childstate struct { hash []byte index int } //GetMerkleRoot 256构成一个组,进行计算 // n * step = hashes // (hashes / n) func GetMerkleRoot(hashes [][]byte) []byte { ncpu := runtime.NumCPU() if len(hashes) <= 80 || ncpu <= 1 { return getMerkleRoot(hashes) } step := log2(len(hashes) / ncpu) if step < 1 { step = 1 } step = pow2(step) if step > 256 { step = 256 } ch := make(chan *childstate, 10) //pad to step rem := len(hashes) % step l := len(hashes) / step if rem != 0 { l++ } for i := 0; i < l; i++ { end := (i + 1) * step if end > len(hashes) { end = len(hashes) } child := hashes[i*step : end] go func(index int, h [][]byte) { var subhash []byte if len(h) != step { subhash = getMerkleRootPad(h, step) } else { subhash = getMerkleRoot(h) } ch <- &childstate{ hash: subhash, index: index, } }(i, child) } childlist := make([][]byte, l) for i := 0; i < l; i++ { sub := <-ch childlist[sub.index] = sub.hash } return getMerkleRoot(childlist) } /*Computation This implements a constant-space merkle root/path calculator, limited to 2^32 leaves. */ //flage =1 只计算roothash flage =2 只计算branch flage =3 计算roothash 和 branch func Computation(leaves [][]byte, flage int, branchpos uint32) (roothash []byte, mutated bool, pbranch [][]byte) { if len(leaves) == 0 { return nil, false, nil } if flage < 1 || flage > 3 { return nil, false, nil } var count int var branch [][]byte var level uint32 var h []byte inner := make([][]byte, 32) var matchlevel uint32 = 0xff mutated = false var matchh bool cache := make([]byte, 64) for count, h = range leaves { if (uint32(count) == branchpos) && (flage&2) != 0 { matchh = true } else { matchh = false } count++ // 1左移level位 for level = 0; 0 == ((count) & (1 << level)); level++ { //需要计算branch if (flage & 2) != 0 { if matchh { branch = append(branch, inner[level]) } else if matchlevel == level { branch = append(branch, h) matchh = true } } if bytes.Equal(inner[level], h) { mutated = true } //计算inner[level] + h 的hash值 h = GetHashFromTwoHash(cache, inner[level], h) } inner[level] = h if matchh { matchlevel = level } } for level = 0; 0 == (count & (1 << level)); level++ { } h = inner[level] matchh = matchlevel == level for count != (1 << level) { if (flage&2) != 0 && matchh { branch = append(branch, h) } h = GetHashFromTwoHash(cache, h, h) count += (1 << level) level++ // And propagate the result upwards accordingly. for 0 == (count & (1 << level)) { if (flage & 2) != 0 { if matchh { branch = append(branch, inner[level]) } else if matchlevel == level { branch = append(branch, h) matchh = true } } h = GetHashFromTwoHash(cache, inner[level], h) level++ } } return h, mutated, branch } //GetHashFromTwoHash 计算左右节点hash的父hash func GetHashFromTwoHash(parent []byte, left []byte, right []byte) []byte { if left == nil || right == nil { return nil } copy(parent, left) copy(parent[32:], right) return common.Sha2Sum(parent) } //GetMerkleBranch 获取指定txindex的branch position 从0开始 func GetMerkleBranch(leaves [][]byte, position uint32) [][]byte { _, _, branchs := Computation(leaves, 2, position) return branchs } //GetMerkleRootFromBranch 通过branch 获取对应的roothash 用于指定txhash的proof证明 func GetMerkleRootFromBranch(merkleBranch [][]byte, leaf []byte, Index uint32) []byte { hash := leaf hashcache := make([]byte, 64) for _, branch := range merkleBranch { if (Index & 1) != 0 { hash = GetHashFromTwoHash(hashcache, branch, hash) } else { hash = GetHashFromTwoHash(hashcache, hash, branch) } Index >>= 1 } return hash } //GetMerkleRootAndBranch 获取merkle roothash 以及指定tx index的branch,注释:position从0开始 func GetMerkleRootAndBranch(leaves [][]byte, position uint32) (roothash []byte, branchs [][]byte) { roothash, _, branchs = Computation(leaves, 3, position) return } var zeroHash [32]byte //CalcMerkleRoot 计算merkle树根 func CalcMerkleRoot(txs []*types.Transaction) []byte { var hashes [][]byte for _, tx := range txs { hashes = append(hashes, tx.Hash()) } if hashes == nil { return zeroHash[:] } merkleroot := GetMerkleRoot(hashes) if merkleroot == nil { panic("calc merkle root error") } return merkleroot } //CalcMerkleRootCache 计算merkle树根缓存 func CalcMerkleRootCache(txs []*types.TransactionCache) []byte { var hashes [][]byte for _, tx := range txs { hashes = append(hashes, tx.Hash()) } if hashes == nil { return zeroHash[:] } merkleroot := GetMerkleRoot(hashes) if merkleroot == nil { panic("calc merkle root error") } return merkleroot }
vendor/github.com/33cn/chain33/common/merkle/merkle.go
0.5083
0.505127
merkle.go
starcoder
package chunk import ( "bytes" "encoding/binary" "fmt" "github.com/df-mc/dragonfly/server/block/cube" "github.com/sandertv/gophertunnel/minecraft/nbt" "github.com/sandertv/gophertunnel/minecraft/protocol" "sync" ) const ( // CurrentBlockVersion is the current version of blocks (states) of the game. This version is composed // of 4 bytes indicating a version, interpreted as a big endian int. The current version represents // 172.16.17.32 {1, 16, 0, 14}. CurrentBlockVersion int32 = 17825806 ) // RuntimeIDToState must hold a function to convert a runtime ID to a name and its state properties. var RuntimeIDToState func(runtimeID uint32) (name string, properties map[string]interface{}, found bool) // StateToRuntimeID must hold a function to convert a name and its state properties to a runtime ID. var StateToRuntimeID func(name string, properties map[string]interface{}) (runtimeID uint32, found bool) const ( // SubChunkVersion is the current version of the written sub chunks, specifying the format they are // written on disk and over network. SubChunkVersion = 8 ) // SerialisedData holds the serialised data of a chunk. It consists of the chunk's block data itself, a height // map, the biomes and entities and block entities. type SerialisedData struct { // sub holds the data of the serialised sub chunks in a chunk. Sub chunks that are empty or that otherwise // don't exist are represented as an empty slice (or technically, nil). SubChunks [16][]byte // Data2D is the 2D data of the chunk, which is composed of the biome IDs (256 bytes) and optionally the // height map of the chunk. Data2D []byte // BlockNBT is an encoded NBT array of all blocks that carry additional NBT, such as chests, with all // their contents. BlockNBT []byte } // pool is used to pool byte buffers used for encoding chunks. var pool = sync.Pool{ New: func() interface{} { return bytes.NewBuffer(make([]byte, 0, 1024)) }, } // NetworkDecode decodes the network serialised data passed into a Chunk if successful. If not, the chunk // returned is nil and the error non-nil. // The sub chunk count passed must be that found in the LevelChunk packet. //noinspection GoUnusedExportedFunction func NetworkDecode(airRuntimeId uint32, data []byte, subChunkCount int) (*Chunk, error) { c, buf := New(airRuntimeId), bytes.NewBuffer(data) for y := 0; y < subChunkCount; y++ { ver, err := buf.ReadByte() if err != nil { return nil, fmt.Errorf("error reading version: %w", err) } c.sub[y] = NewSubChunk(airRuntimeId) switch ver { default: return nil, fmt.Errorf("unknown sub chunk version %v: can't decode", ver) case 1: // Version 1 only has one layer for each sub chunk, but uses the format with palettes. storage, err := networkDecodeBlockStorage(buf) if err != nil { return nil, err } c.sub[y].storages = append(c.sub[y].storages, storage) case 8: // Version 8 allows up to 256 layers for one sub chunk. storageCount, err := buf.ReadByte() if err != nil { return nil, fmt.Errorf("error reading storage count: %w", err) } c.sub[y].storages = make([]*BlockStorage, storageCount) for i := byte(0); i < storageCount; i++ { c.sub[y].storages[i], err = networkDecodeBlockStorage(buf) if err != nil { return nil, err } } } } if _, err := buf.Read(c.biomes[:]); err != nil { return nil, fmt.Errorf("error reading biomes: %w", err) } _, _ = buf.ReadByte() dec := nbt.NewDecoder(buf) for buf.Len() != 0 { var m map[string]interface{} if err := dec.Decode(&m); err != nil { return nil, fmt.Errorf("error decoding block entity: %w", err) } c.SetBlockNBT(cube.Pos{int(m["x"].(int32)), int(m["y"].(int32)), int(m["z"].(int32))}, m) } return c, nil } // NetworkEncode encodes a chunk passed to its network representation and returns it as a SerialisedData, // which may be sent over network. func NetworkEncode(c *Chunk) (d SerialisedData) { buf := pool.Get().(*bytes.Buffer) for y, sub := range c.sub { if sub == nil { // No need to put empty sub chunks in the SerialisedData. continue } _ = buf.WriteByte(SubChunkVersion) _ = buf.WriteByte(byte(len(sub.storages))) for _, storage := range sub.storages { _ = buf.WriteByte(byte(storage.bitsPerBlock<<1) | 1) b := make([]byte, len(storage.blocks)*4) for i, v := range storage.blocks { // Explicitly don't use the binary package to greatly improve performance of writing the uint32s. b[i*4], b[i*4+1], b[i*4+2], b[i*4+3] = byte(v), byte(v>>8), byte(v>>16), byte(v>>24) } _, _ = buf.Write(b) _ = protocol.WriteVarint32(buf, int32(storage.palette.Len())) for _, runtimeID := range storage.palette.blockRuntimeIDs { _ = protocol.WriteVarint32(buf, int32(runtimeID)) } } d.SubChunks[y] = make([]byte, buf.Len()) _, _ = buf.Read(d.SubChunks[y]) } d.Data2D = append(c.biomes[:], 0) buf.Reset() enc := nbt.NewEncoder(buf) for _, data := range c.blockEntities { _ = enc.Encode(data) } d.BlockNBT = append([]byte(nil), buf.Bytes()...) buf.Reset() pool.Put(buf) return } // emptyHeightMap is saved for the height map while it is not implemented. var emptyHeightMap = make([]byte, 512) // DiskEncode encodes a chunk to its disk representation, so that it may be stored in a database, giving other // servers the ability to read the chunk. func DiskEncode(c *Chunk, blob bool) (d SerialisedData) { buf := pool.Get().(*bytes.Buffer) for y, sub := range c.sub { if sub == nil || len(sub.storages) == 0 { // The sub chunk at this Y value is empty, so don't write it. continue } _ = buf.WriteByte(SubChunkVersion) _ = buf.WriteByte(byte(len(sub.storages))) for _, storage := range sub.storages { diskEncodeBlockStorage(buf, storage, blob) } d.SubChunks[y] = append([]byte(nil), buf.Bytes()...) buf.Reset() } // We simply write a zero slice for the height map, as there is little profit of writing it here. buf.Write(emptyHeightMap) buf.Write(c.biomes[:]) d.Data2D = append([]byte(nil), buf.Bytes()...) if blob { buf.Reset() enc := nbt.NewEncoder(buf) for _, data := range c.blockEntities { _ = enc.Encode(data) } d.BlockNBT = append([]byte(nil), buf.Bytes()...) } buf.Reset() pool.Put(buf) return d } // DiskDecode decodes the data from a SerialisedData object into a chunk and returns it. If the data was // invalid, an error is returned. func DiskDecode(data SerialisedData) (*Chunk, error) { air, ok := StateToRuntimeID("minecraft:air", nil) if !ok { panic("cannot find air runtime ID") } c := New(air) copy(c.biomes[:], data.Data2D[512:]) for y, sub := range data.SubChunks { if len(sub) == 0 { // No data for this sub chunk. continue } buf := bytes.NewBuffer(sub) ver, err := buf.ReadByte() if err != nil { return nil, fmt.Errorf("error reading version: %w", err) } c.sub[y] = NewSubChunk(air) switch ver { default: return nil, fmt.Errorf("unknown sub chunk version %v: can't decode", ver) case 1: // Version 1 only has one layer for each sub chunk, but uses the format with palettes. storage, err := diskDecodeBlockStorage(buf) if err != nil { return nil, err } c.sub[y].storages = append(c.sub[y].storages, storage) case 8: // Version 8 allows up to 256 layers for one sub chunk. storageCount, err := buf.ReadByte() if err != nil { return nil, fmt.Errorf("error reading storage count: %w", err) } c.sub[y].storages = make([]*BlockStorage, storageCount) for i := byte(0); i < storageCount; i++ { c.sub[y].storages[i], err = diskDecodeBlockStorage(buf) if err != nil { return nil, err } } } } return c, nil } // blockEntry represents a block as found in a disk save of a world. type blockEntry struct { Name string `nbt:"name"` State map[string]interface{} `nbt:"states"` Version int32 `nbt:"version"` } // diskEncodeBlockStorage encodes a block storage to its disk representation into the buffer passed. func diskEncodeBlockStorage(buf *bytes.Buffer, storage *BlockStorage, blob bool) { _ = buf.WriteByte(byte(storage.bitsPerBlock << 1)) for _, b := range storage.blocks { _ = binary.Write(buf, binary.LittleEndian, b) } if !blob { _ = binary.Write(buf, binary.LittleEndian, int32(storage.palette.Len())) } else { _ = protocol.WriteVarint32(buf, int32(storage.palette.Len())) } blocks := make([]blockEntry, storage.palette.Len()) for index, runtimeID := range storage.palette.blockRuntimeIDs { // Get the block state registered with the runtime IDs we have in the palette of the block storage // as we need the name and data value to store. name, props, ok := RuntimeIDToState(runtimeID) if !ok { // Should never happen, but we panic with a reasonable error anyway. panic(fmt.Sprintf("cannot find block by runtime ID %v", runtimeID)) } blocks[index] = blockEntry{ Name: name, State: props, Version: CurrentBlockVersion, } } var encoding nbt.Encoding = nbt.LittleEndian if blob { encoding = nbt.NetworkLittleEndian } // Marshal the slice of block states into NBT and add it to the byte slice. enc := nbt.NewEncoderWithEncoding(buf, encoding) for _, b := range blocks { _ = enc.Encode(b) } } // networkDecodeBlockStorage decodes a block storage from the buffer passed, assuming it holds data for a // network encoded block storage, and returns it if successful. func networkDecodeBlockStorage(buf *bytes.Buffer) (*BlockStorage, error) { blockSize, err := buf.ReadByte() if err != nil { return nil, fmt.Errorf("error reading block size: %w", err) } blockSize >>= 1 // blocksPerUint32 is the amount of blocks that may be stored in a single uint32. blocksPerUint32 := 32 / int(blockSize) // uint32Count is the amount of uint32s required to store all blocks: 4096 blocks need to be stored in // total. uint32Count := 4096 / blocksPerUint32 if paletteSize(blockSize).padded() { // We've got one of the padded sizes, so the block storage has another uint32 to be able to store // every block. uint32Count++ } uint32s := make([]uint32, uint32Count) data := buf.Next(uint32Count * 4) if len(data) != uint32Count*4 { return nil, fmt.Errorf("cannot read block storage: not enough block data present: expected %v bytes, got %v", uint32Count*4, len(data)) } for i := 0; i < uint32Count; i++ { // Explicitly don't use the binary package to greatly improve performance of reading the uint32s. uint32s[i] = uint32(data[i*4]) | uint32(data[i*4+1])<<8 | uint32(data[i*4+2])<<16 | uint32(data[i*4+3])<<24 } var paletteCount int32 if err := protocol.Varint32(buf, &paletteCount); err != nil { return nil, fmt.Errorf("error reading palette entry count: %w", err) } if paletteCount <= 0 { return nil, fmt.Errorf("invalid palette entry count %v", paletteCount) } blocks, temp := make([]uint32, paletteCount), int32(0) for i := int32(0); i < paletteCount; i++ { if err := protocol.Varint32(buf, &temp); err != nil { return nil, fmt.Errorf("error decoding palette entry: %w", err) } blocks[i] = uint32(temp) } return newBlockStorage(uint32s, &Palette{blockRuntimeIDs: blocks, size: paletteSize(blockSize)}), nil } // diskDecodeBlockStorage decodes a block storage from the buffer passed. If not successful, an error is // returned. func diskDecodeBlockStorage(buf *bytes.Buffer) (*BlockStorage, error) { blockSize, err := buf.ReadByte() if err != nil { return nil, fmt.Errorf("error reading block size: %w", err) } blockSize >>= 1 // blocksPerUint32 is the amount of blocks that may be stored in a single uint32. blocksPerUint32 := 32 / int(blockSize) // uint32Count is the amount of uint32s required to store all blocks: 4096 blocks need to be stored in // total. uint32Count := 4096 / blocksPerUint32 if paletteSize(blockSize).padded() { // We've got one of the padded sizes, so the block storage has another uint32 to be able to store // every block. uint32Count++ } uint32s := make([]uint32, uint32Count) data := buf.Next(uint32Count * 4) if len(data) != uint32Count*4 { return nil, fmt.Errorf("cannot read block storage: not enough block data present: expected %v bytes, got %v", uint32Count*4, len(data)) } for i := 0; i < uint32Count; i++ { // Explicitly don't use the binary package to greatly improve performance of reading the uint32s. uint32s[i] = uint32(data[i*4]) | uint32(data[i*4+1])<<8 | uint32(data[i*4+2])<<16 | uint32(data[i*4+3])<<24 } // The next 4 bytes are an LE int32, but we simply read it and decode the int32 ourselves, as it's much // faster here. data = buf.Next(4) if len(data) != 4 { return nil, fmt.Errorf("cannot read palette entry count: expected 4 bytes, got %v", len(data)) } paletteCount := uint32(data[0]) | uint32(data[1])<<8 | uint32(data[2])<<16 | uint32(data[3])<<24 blocks := make([]blockEntry, paletteCount) dec := nbt.NewDecoderWithEncoding(buf, nbt.LittleEndian) // There are paletteCount NBT tags that represent unique blocks. for i := uint32(0); i < paletteCount; i++ { if err := dec.Decode(&blocks[i]); err != nil { return nil, fmt.Errorf("error decoding block: %w", err) } } palette := newPalette(paletteSize(blockSize), make([]uint32, paletteCount)) for i, b := range blocks { var ok bool palette.blockRuntimeIDs[i], ok = StateToRuntimeID(b.Name, b.State) if !ok { return nil, fmt.Errorf("cannot get runtime ID of block state %v{%+v}", b.Name, b.State) } } return newBlockStorage(uint32s, palette), nil }
server/world/chunk/data.go
0.642096
0.438905
data.go
starcoder
package geo import ( "fmt" "math" "github.com/adzr/mathex" ) const ( // DecimalPlaces is the number of decimal places considered in the geo-location point latlng values, to indicate the // precision of the coordinates. DecimalPlaces = 8 // RoundOn is the decimal value considered when rounding the geo-location point latlng values. RoundOn = 0.5 // TotalLongitude is a constant representing the maximum longitude value, ignoring the negative values. TotalLongitude float64 = 360.0 // HalfLongitude is a constant representing the maximum longitude value, considering the negative values. HalfLongitude float64 = 180.0 // NorthPoleLat is a constant representing the maximum latitude value, always at the center of the north pole. NorthPoleLat float64 = 90.0 // SouthPoleLat is a constant representing the maximum latitude value, always at the center of the south pole. SouthPoleLat float64 = -90.0 ) // Point is a geo-location point represented by latitude and longitude (latlng) values. type Point interface { // Latitude is an angle which ranges from 0° at the Equator to +90° at the North Pole or -90° at the South Pole. Latitude() float64 // Longitude is an angle which ranges from 0° at the Prime Meridian to +180° eastward and −180° westward. Longitude() float64 } type point struct { latitude float64 longitude float64 } func (p *point) String() string { return fmt.Sprintf("(%v, %v)", p.Latitude(), p.Longitude()) } func (p *point) Latitude() float64 { if p != nil { return p.latitude } return 0.0 } func (p *point) Longitude() float64 { if p != nil { return p.longitude } return 0.0 } // NewPoint creates a new geo-location point instance, given the latlng values. // The function auto-normalizes the values to void over-calculations. // It keeps the latitude value always capped between -90 and +90. // It sets the longitude to 0 incase if the latitude value is exactly equals to -90 or +90, // since the longitude has no meaning if the latitude is at one of the two poles. // It also keeps the longitude value always capped between almost -180 and +180. func NewPoint(latitude float64, longitude float64) Point { // create the point. p := &point{} // normalize latitude. if latitude > 0 { p.latitude = math.Min(latitude, NorthPoleLat) } else { p.latitude = math.Max(latitude, SouthPoleLat) } p.latitude = mathex.Round(p.latitude, DecimalPlaces, RoundOn) // normalize longitude. if math.Abs(p.latitude) == NorthPoleLat { p.longitude = 0 } else { remainder := math.Mod(longitude, TotalLongitude) if remainder <= -HalfLongitude { remainder += TotalLongitude } else if remainder > HalfLongitude { remainder -= TotalLongitude } p.longitude = remainder } p.longitude = mathex.Round(p.longitude, DecimalPlaces, RoundOn) return p }
point.go
0.828766
0.696958
point.go
starcoder
package golf import ( "encoding/binary" "fmt" "os" ) // Set of constants which specify the type of segment in a program/segment // header. const ( SegTypeNull = uint32(0) SegTypeLoad = uint32(1) SegTypeDynamic = uint32(2) SegTypeInterp = uint32(3) SegTypeNote = uint32(4) SegTypeReserved = uint32(5) SegTypeProgHdr = uint32(6) SegTypeTLS = uint32(7) SegTypeNumDefinedTypes = uint32(8) SegTypeStartOSSpecific = uint32(0x60000000) SegTypeGnuEHFrame = uint32(0x6474e550) SegTypeGnuStack = uint32(0x6474e551) SegTypeGnuRelRO = uint32(0x6474e552) SegTypeSunOSStart = uint32(0x6ffffffa) SegTypeSunWBSS = uint32(0x6ffffffa) SegTypeSunWStack = uint32(0x6ffffffb) SegTypeSunOSEnd = uint32(0x6fffffff) SegTypeEndOSSpecific = uint32(0x6fffffff) SegTypeStartProcSpecific = uint32(0x70000000) SegTypeEndProcSpecific = uint32(0x7fffffff) ) // Set of constants which represent the segment flags. // A segment header can include a flag generated from more than one // of these values using the '|' operator. For example, a flag could // be generated as the result of SegFlagsExecutable | SegFlagsReadable. const ( SegFlagsExecutable = uint32(1 << 0) SegFlagsWritable = uint32(1 << 1) SegFlagsReadable = uint32(1 << 2) SegFlagsOSSpecificMask = uint32(0x0ff00000) SegFlagsProcSpecificMask = uint32(0xf0000000) ) // A SegHdr represents the entry in the program header table of an ELF file. type SegHdr interface { // Returns the class of the segment, which is the class of the ELF file to // which it belongs. Class() ELFClass // Returns the type of the sgement, Type() uint32 // Returns the offset of the segment in the ELF file. Offset() uint64 // Returns the virtual address of the segment. VirtualAddress() uint64 // Returns the physical address of the segment. PhysicalAddress() uint64 // Returns the size of the segment in the ELF file. FileSize() uint64 // Returns the size of the segment in the memory. MemSize() uint64 // Returns the flags for the segment. Flags() uint32 // Returns the alignment of the segment. Alignment() uint64 } type segHdr32 struct { diskData struct { Type uint32 Offset uint32 VirtualAddress uint32 PhysicalAddress uint32 FileSize uint32 MemSize uint32 Flags uint32 Alignment uint32 } } func (hdr *segHdr32) Class() ELFClass { return Class32 } func (hdr *segHdr32) Type() uint32 { return hdr.diskData.Type } func (hdr *segHdr32) Offset() uint64 { return uint64(hdr.diskData.Offset) } func (hdr *segHdr32) VirtualAddress() uint64 { return uint64(hdr.diskData.VirtualAddress) } func (hdr *segHdr32) PhysicalAddress() uint64 { return uint64(hdr.diskData.PhysicalAddress) } func (hdr *segHdr32) FileSize() uint64 { return uint64(hdr.diskData.FileSize) } func (hdr *segHdr32) MemSize() uint64 { return uint64(hdr.diskData.MemSize) } func (hdr *segHdr32) Flags() uint32 { return hdr.diskData.Flags } func (hdr *segHdr32) Alignment() uint64 { return uint64(hdr.diskData.Alignment) } type segHdr64 struct { diskData struct { Type uint32 Flags uint32 Offset uint64 VirtualAddress uint64 PhysicalAddress uint64 FileSize uint64 MemSize uint64 Alignment uint64 } } func (hdr *segHdr64) Class() ELFClass { return Class64 } func (hdr *segHdr64) Type() uint32 { return hdr.diskData.Type } func (hdr *segHdr64) Offset() uint64 { return hdr.diskData.Offset } func (hdr *segHdr64) VirtualAddress() uint64 { return hdr.diskData.VirtualAddress } func (hdr *segHdr64) PhysicalAddress() uint64 { return hdr.diskData.PhysicalAddress } func (hdr *segHdr64) FileSize() uint64 { return hdr.diskData.FileSize } func (hdr *segHdr64) MemSize() uint64 { return hdr.diskData.MemSize } func (hdr *segHdr64) Flags() uint32 { return hdr.diskData.Flags } func (hdr *segHdr64) Alignment() uint64 { return hdr.diskData.Alignment } func readSegHdrTbl(file *os.File, header ELFHeader) ([]SegHdr, error) { _, err := file.Seek(int64(header.ProgHdrTblOffset()), 0) if err != nil { err = fmt.Errorf( "Unable to seek to the program header table in '%s'.\n%s", file.Name(), err.Error()) return nil, err } var segHdrTbl []SegHdr for i := uint16(0); i < header.ProgHdrCount(); i++ { endianess := header.ELFIdent().Endianess var hdr SegHdr if header.ELFIdent().Class == Class32 { hdr32 := new(segHdr32) err = binary.Read(file, endianMap[endianess], &hdr32.diskData) hdr = hdr32 } else { hdr64 := new(segHdr64) err = binary.Read(file, endianMap[endianess], &hdr64.diskData) hdr = hdr64 } if err != nil { err = fmt.Errorf( "Error reading segment header from '%s'.\n%s", file.Name(), err.Error()) return nil, err } segHdrTbl = append(segHdrTbl, hdr) } return segHdrTbl, nil }
golf/segments.go
0.644561
0.460168
segments.go
starcoder
package webgl const ( TEXTURE0 = 0x84C0 // A texture unit. TEXTURE1 = 0x84C1 // A texture unit. TEXTURE2 = 0x84C2 // A texture unit. TEXTURE3 = 0x84C3 // A texture unit. TEXTURE4 = 0x84C4 // A texture unit. TEXTURE5 = 0x84C5 // A texture unit. TEXTURE6 = 0x84C6 // A texture unit. TEXTURE7 = 0x84C7 // A texture unit. TEXTURE8 = 0x84C8 // A texture unit. TEXTURE9 = 0x84C9 // A texture unit. TEXTURE10 = 0x84CA // A texture unit. TEXTURE11 = 0x84CB // A texture unit. TEXTURE12 = 0x84CC // A texture unit. TEXTURE13 = 0x84CD // A texture unit. TEXTURE14 = 0x84CE // A texture unit. TEXTURE15 = 0x84CF // A texture unit. TEXTURE16 = 0x84D0 // A texture unit. TEXTURE17 = 0x84D1 // A texture unit. TEXTURE18 = 0x84D2 // A texture unit. TEXTURE19 = 0x84D3 // A texture unit. TEXTURE20 = 0x84D4 // A texture unit. TEXTURE21 = 0x84D5 // A texture unit. TEXTURE22 = 0x84D6 // A texture unit. TEXTURE23 = 0x84D7 // A texture unit. TEXTURE24 = 0x84D8 // A texture unit. TEXTURE25 = 0x84D9 // A texture unit. TEXTURE26 = 0x84DA // A texture unit. TEXTURE27 = 0x84DB // A texture unit. TEXTURE28 = 0x84DC // A texture unit. TEXTURE29 = 0x84DD // A texture unit. TEXTURE30 = 0x84DE // A texture unit. TEXTURE31 = 0x84DF // A texture unit. DEPTH_BUFFER_BIT = 0x00000100 // Passed to clear to clear the current depth buffer. STENCIL_BUFFER_BIT = 0x00000400 // Passed to clear to clear the current stencil buffer. COLOR_BUFFER_BIT = 0x00004000 // Passed to clear to clear the current color buffer. POINTS = 0x0000 // Passed to drawElements or drawArrays to draw single points. LINES = 0x0001 // Passed to drawElements or drawArrays to draw lines. Each vertex connects to the one after it. LINE_LOOP = 0x0002 // Passed to drawElements or drawArrays to draw lines. Each set of two vertices is treated as a separate line segment. LINE_STRIP = 0x0003 // Passed to drawElements or drawArrays to draw a connected group of line segments from the first vertex to the last. TRIANGLES = 0x0004 // Passed to drawElements or drawArrays to draw triangles. Each set of three vertices creates a separate triangle. TRIANGLE_STRIP = 0x0005 // Passed to drawElements or drawArrays to draw a connected group of triangles. TRIANGLE_FAN = 0x0006 // Passed to drawElements or drawArrays to draw a connected group of triangles. Each vertex connects to the previous and the first vertex in the fan. ZERO = 0 // Passed to blendFunc or blendFuncSeparate to turn off a component. ONE = 1 // Passed to blendFunc or blendFuncSeparate to turn on a component. SRC_COLOR = 0x0300 // Passed to blendFunc or blendFuncSeparate to multiply a component by the source elements color. ONE_MINUS_SRC_COLOR = 0x0301 // Passed to blendFunc or blendFuncSeparate to multiply a component by one minus the source elements color. SRC_ALPHA = 0x0302 // Passed to blendFunc or blendFuncSeparate to multiply a component by the source's alpha. ONE_MINUS_SRC_ALPHA = 0x0303 // Passed to blendFunc or blendFuncSeparate to multiply a component by one minus the source's alpha. DST_ALPHA = 0x0304 // Passed to blendFunc or blendFuncSeparate to multiply a component by the destination's alpha. ONE_MINUS_DST_ALPHA = 0x0305 // Passed to blendFunc or blendFuncSeparate to multiply a component by one minus the destination's alpha. DST_COLOR = 0x0306 // Passed to blendFunc or blendFuncSeparate to multiply a component by the destination's color. ONE_MINUS_DST_COLOR = 0x0307 // Passed to blendFunc or blendFuncSeparate to multiply a component by one minus the destination's color. SRC_ALPHA_SATURATE = 0x0308 // Passed to blendFunc or blendFuncSeparate to multiply a component by the minimum of source's alpha or one minus the destination's alpha. CONSTANT_COLOR = 0x8001 // Passed to blendFunc or blendFuncSeparate to specify a constant color blend function. ONE_MINUS_CONSTANT_COLOR = 0x8002 // Passed to blendFunc or blendFuncSeparate to specify one minus a constant color blend function. CONSTANT_ALPHA = 0x8003 // Passed to blendFunc or blendFuncSeparate to specify a constant alpha blend function. ONE_MINUS_CONSTANT_ALPHA = 0x8004 // Passed to blendFunc or blendFuncSeparate to specify one minus a constant alpha blend function. FUNC_ADD = 0x8006 // Passed to blendEquation or blendEquationSeparate to set an addition blend function. FUNC_SUBTRACT = 0x800A // Passed to blendEquation or blendEquationSeparate to specify a subtraction blend function (source - destination). FUNC_REVERSE_SUBTRACT = 0x800B // Passed to blendEquation or blendEquationSeparate to specify a reverse subtraction blend function (destination - source). BLEND_EQUATION = 0x8009 // Passed to getParameter to get the current RGB blend function. BLEND_EQUATION_RGB = 0x8009 // Passed to getParameter to get the current RGB blend function. Same as BLEND_EQUATION BLEND_EQUATION_ALPHA = 0x883D // Passed to getParameter to get the current alpha blend function. Same as BLEND_EQUATION BLEND_DST_RGB = 0x80C8 // Passed to getParameter to get the current destination RGB blend function. BLEND_SRC_RGB = 0x80C9 // Passed to getParameter to get the current destination RGB blend function. BLEND_DST_ALPHA = 0x80CA // Passed to getParameter to get the current destination alpha blend function. BLEND_SRC_ALPHA = 0x80CB // Passed to getParameter to get the current source alpha blend function. BLEND_COLOR = 0x8005 // Passed to getParameter to return a the current blend color. ARRAY_BUFFER_BINDING = 0x8894 // Passed to getParameter to get the array buffer binding. ELEMENT_ARRAY_BUFFER_BINDING = 0x8895 // Passed to getParameter to get the current element array buffer. LINE_WIDTH = 0x0B21 // Passed to getParameter to get the current lineWidth (set by the lineWidth method). ALIASED_POINT_SIZE_RANGE = 0x846D // Passed to getParameter to get the current size of a point drawn with gl.POINTS ALIASED_LINE_WIDTH_RANGE = 0x846E // Passed to getParameter to get the range of available widths for a line. Returns a length-2 array with the lo value at 0, and hight at 1. CULL_FACE_MODE = 0x0B45 // Passed to getParameter to get the current value of cullFace. Should return FRONT, BACK, or FRONT_AND_BACK FRONT_FACE = 0x0B46 // Passed to getParameter to determine the current value of frontFace. Should return CW or CCW. DEPTH_RANGE = 0x0B70 // Passed to getParameter to return a length-2 array of floats giving the current depth range. DEPTH_WRITEMASK = 0x0B72 // Passed to getParameter to determine if the depth write mask is enabled. DEPTH_CLEAR_VALUE = 0x0B73 // Passed to getParameter to determine the current depth clear value. DEPTH_FUNC = 0x0B74 // Passed to getParameter to get the current depth function. Returns NEVER, ALWAYS, LESS, EQUAL, LEQUAL, GREATER, GEQUAL, or NOTEQUAL. STENCIL_CLEAR_VALUE = 0x0B91 // Passed to getParameter to get the value the stencil will be cleared to. STENCIL_FUNC = 0x0B92 // Passed to getParameter to get the current stencil function. Returns NEVER, ALWAYS, LESS, EQUAL, LEQUAL, GREATER, GEQUAL, or NOTEQUAL. STENCIL_FAIL = 0x0B94 // Passed to getParameter to get the current stencil fail function. Should return KEEP, REPLACE, INCR, DECR, INVERT, INCR_WRAP, or DECR_WRAP. STENCIL_PASS_DEPTH_FAIL = 0x0B95 // Passed to getParameter to get the current stencil fail function should the depth buffer test fail. Should return KEEP, REPLACE, INCR, DECR, INVERT, INCR_WRAP, or DECR_WRAP. STENCIL_PASS_DEPTH_PASS = 0x0B96 // Passed to getParameter to get the current stencil fail function should the depth buffer test pass. Should return KEEP, REPLACE, INCR, DECR, INVERT, INCR_WRAP, or DECR_WRAP. STENCIL_REF = 0x0B97 // Passed to getParameter to get the reference value used for stencil tests. STENCIL_VALUE_MASK = 0x0B93 STENCIL_WRITEMASK = 0x0B98 STENCIL_BACK_FUNC = 0x8800 STENCIL_BACK_FAIL = 0x8801 STENCIL_BACK_PASS_DEPTH_FAIL = 0x8802 STENCIL_BACK_PASS_DEPTH_PASS = 0x8803 STENCIL_BACK_REF = 0x8CA3 STENCIL_BACK_VALUE_MASK = 0x8CA4 STENCIL_BACK_WRITEMASK = 0x8CA5 VIEWPORT = 0x0BA2 // Returns an Int32Array with four elements for the current viewport dimensions. SCISSOR_BOX = 0x0C10 // Returns an Int32Array with four elements for the current scissor box dimensions. COLOR_CLEAR_VALUE = 0x0C22 COLOR_WRITEMASK = 0x0C23 UNPACK_ALIGNMENT = 0x0CF5 PACK_ALIGNMENT = 0x0D05 MAX_TEXTURE_SIZE = 0x0D33 MAX_VIEWPORT_DIMS = 0x0D3A SUBPIXEL_BITS = 0x0D50 RED_BITS = 0x0D52 GREEN_BITS = 0x0D53 BLUE_BITS = 0x0D54 ALPHA_BITS = 0x0D55 DEPTH_BITS = 0x0D56 STENCIL_BITS = 0x0D57 POLYGON_OFFSET_UNITS = 0x2A00 POLYGON_OFFSET_FACTOR = 0x8038 TEXTURE_BINDING_2D = 0x8069 SAMPLE_BUFFERS = 0x80A8 SAMPLES = 0x80A9 SAMPLE_COVERAGE_VALUE = 0x80AA SAMPLE_COVERAGE_INVERT = 0x80AB COMPRESSED_TEXTURE_FORMATS = 0x86A3 VENDOR = 0x1F00 RENDERER = 0x1F01 VERSION = 0x1F02 IMPLEMENTATION_COLOR_READ_TYPE = 0x8B9A IMPLEMENTATION_COLOR_READ_FORMAT = 0x8B9B BROWSER_DEFAULT_WEBGL = 0x9244 STATIC_DRAW = 0x88E4 // Passed to bufferData as a hint about whether the contents of the buffer are likely to be used often and not change often. STREAM_DRAW = 0x88E0 // Passed to bufferData as a hint about whether the contents of the buffer are likely to not be used often. DYNAMIC_DRAW = 0x88E8 // Passed to bufferData as a hint about whether the contents of the buffer are likely to be used often and change often. ARRAY_BUFFER = 0x8892 // Passed to bindBuffer or bufferData to specify the type of buffer being used. ELEMENT_ARRAY_BUFFER = 0x8893 // Passed to bindBuffer or bufferData to specify the type of buffer being used. BUFFER_SIZE = 0x8764 // Passed to getBufferParameter to get a buffer's size. BUFFER_USAGE = 0x8765 // Passed to getBufferParameter to get the hint for the buffer passed in when it was created. CURRENT_VERTEX_ATTRIB = 0x8626 // Passed to getVertexAttrib to read back the current vertex attribute. VERTEX_ATTRIB_ARRAY_ENABLED = 0x8622 VERTEX_ATTRIB_ARRAY_SIZE = 0x8623 VERTEX_ATTRIB_ARRAY_STRIDE = 0x8624 VERTEX_ATTRIB_ARRAY_TYPE = 0x8625 VERTEX_ATTRIB_ARRAY_NORMALIZED = 0x886A VERTEX_ATTRIB_ARRAY_POINTER = 0x8645 VERTEX_ATTRIB_ARRAY_BUFFER_BINDING = 0x889F CULL_FACE = 0x0B44 // Passed to enable/disable to turn on/off culling. Can also be used with getParameter to find the current culling method. FRONT = 0x0404 // Passed to cullFace to specify that only front faces should be culled. BACK = 0x0405 // Passed to cullFace to specify that only back faces should be culled. FRONT_AND_BACK = 0x0408 // Passed to cullFace to specify that front and back faces should be culled. BLEND = 0x0BE2 // Passed to enable/disable to turn on/off blending. Can also be used with getParameter to find the current blending method. DEPTH_TEST = 0x0B71 // Passed to enable/disable to turn on/off the depth test. Can also be used with getParameter to query the depth test. DITHER = 0x0BD0 // Passed to enable/disable to turn on/off dithering. Can also be used with getParameter to find the current dithering method. POLYGON_OFFSET_FILL = 0x8037 // Passed to enable/disable to turn on/off the polygon offset. Useful for rendering hidden-line images, decals, and or solids with highlighted edges. Can also be used with getParameter to query the scissor test. SAMPLE_ALPHA_TO_COVERAGE = 0x809E // Passed to enable/disable to turn on/off the alpha to coverage. Used in multi-sampling alpha channels. SAMPLE_COVERAGE = 0x80A0 // Passed to enable/disable to turn on/off the sample coverage. Used in multi-sampling. SCISSOR_TEST = 0x0C11 // Passed to enable/disable to turn on/off the scissor test. Can also be used with getParameter to query the scissor test. STENCIL_TEST = 0x0B90 // Passed to enable/disable to turn on/off the stencil test. Can also be used with getParameter to query the stencil test. NO_ERROR = 0 // Returned from getError. INVALID_ENUM = 0x0500 // Returned from getError. INVALID_VALUE = 0x0501 // Returned from getError. INVALID_OPERATION = 0x0502 // Returned from getError. OUT_OF_MEMORY = 0x0505 // Returned from getError. CONTEXT_LOST_WEBGL = 0x9242 // Returned from getError. CW = 0x0900 // Passed to frontFace to specify the front face of a polygon is drawn in the clockwise direction CCW = 0x0901 // Passed to frontFace to specify the front face of a polygon is drawn in the counter clockwise direction DONT_CARE = 0x1100 // There is no preference for this behavior. FASTEST = 0x1101 // The most efficient behavior should be used. NICEST = 0x1102 // The most correct or the highest quality option should be used. GENERATE_MIPMAP_HINT = 0x8192 // Hint for the quality of filtering when generating mipmap images with WebGLRenderingContext.generateMipmap(). BYTE = 0x1400 UNSIGNED_BYTE = 0x1401 SHORT = 0x1402 UNSIGNED_SHORT = 0x1403 INT = 0x1404 UNSIGNED_INT = 0x1405 FLOAT = 0x1406 DEPTH_COMPONENT = 0x1902 ALPHA = 0x1906 RGB = 0x1907 RGBA = 0x1908 LUMINANCE = 0x1909 LUMINANCE_ALPHA = 0x190A UNSIGNED_SHORT_4_4_4_4 = 0x8033 UNSIGNED_SHORT_5_5_5_1 = 0x8034 UNSIGNED_SHORT_5_6_5 = 0x8363 FRAGMENT_SHADER = 0x8B30 // Passed to createShader to define a fragment shader. VERTEX_SHADER = 0x8B31 // Passed to createShader to define a vertex shader COMPILE_STATUS = 0x8B81 // Passed to getShaderParameter to get the status of the compilation. Returns false if the shader was not compiled. You can then query getShaderInfoLog to find the exact error DELETE_STATUS = 0x8B80 // Passed to getShaderParameter to determine if a shader was deleted via deleteShader. Returns true if it was, false otherwise. LINK_STATUS = 0x8B82 // Passed to getProgramParameter after calling linkProgram to determine if a program was linked correctly. Returns false if there were errors. Use getProgramInfoLog to find the exact error. VALIDATE_STATUS = 0x8B83 // Passed to getProgramParameter after calling validateProgram to determine if it is valid. Returns false if errors were found. ATTACHED_SHADERS = 0x8B85 // Passed to getProgramParameter after calling attachShader to determine if the shader was attached correctly. Returns false if errors occurred. ACTIVE_ATTRIBUTES = 0x8B89 // Passed to getProgramParameter to get the number of attributes active in a program. ACTIVE_UNIFORMS = 0x8B86 // Passed to getProgramParameter to get the number of uniforms active in a program. MAX_VERTEX_ATTRIBS = 0x8869 // The maximum number of entries possible in the vertex attribute list. MAX_VERTEX_UNIFORM_VECTORS = 0x8DFB MAX_VARYING_VECTORS = 0x8DFC MAX_COMBINED_TEXTURE_IMAGE_UNITS = 0x8B4D MAX_VERTEX_TEXTURE_IMAGE_UNITS = 0x8B4C MAX_TEXTURE_IMAGE_UNITS = 0x8872 // Implementation dependent number of maximum texture units. At least 8. MAX_FRAGMENT_UNIFORM_VECTORS = 0x8DFD SHADER_TYPE = 0x8B4F SHADING_LANGUAGE_VERSION = 0x8B8C CURRENT_PROGRAM = 0x8B8D NEVER = 0x0200 // Passed to depthFunction or stencilFunction to specify depth or stencil tests will never pass. i.e. Nothing will be drawn. LESS = 0x0201 // Passed to depthFunction or stencilFunction to specify depth or stencil tests will pass if the new depth value is less than the stored value. EQUAL = 0x0202 // Passed to depthFunction or stencilFunction to specify depth or stencil tests will pass if the new depth value is equals to the stored value. LEQUAL = 0x0203 // Passed to depthFunction or stencilFunction to specify depth or stencil tests will pass if the new depth value is less than or equal to the stored value. GREATER = 0x0204 // Passed to depthFunction or stencilFunction to specify depth or stencil tests will pass if the new depth value is greater than the stored value. NOTEQUAL = 0x0205 // Passed to depthFunction or stencilFunction to specify depth or stencil tests will pass if the new depth value is not equal to the stored value. GEQUAL = 0x0206 // Passed to depthFunction or stencilFunction to specify depth or stencil tests will pass if the new depth value is greater than or equal to the stored value. ALWAYS = 0x0207 // Passed to depthFunction or stencilFunction to specify depth or stencil tests will always pass. i.e. Pixels will be drawn in the order they are drawn. KEEP = 0x1E00 REPLACE = 0x1E01 INCR = 0x1E02 DECR = 0x1E03 INVERT = 0x150A INCR_WRAP = 0x8507 DECR_WRAP = 0x8508 NEAREST = 0x2600 LINEAR = 0x2601 NEAREST_MIPMAP_NEAREST = 0x2700 LINEAR_MIPMAP_NEAREST = 0x2701 NEAREST_MIPMAP_LINEAR = 0x2702 LINEAR_MIPMAP_LINEAR = 0x2703 TEXTURE_MAG_FILTER = 0x2800 TEXTURE_MIN_FILTER = 0x2801 TEXTURE_WRAP_S = 0x2802 TEXTURE_WRAP_T = 0x2803 TEXTURE_2D = 0x0DE1 TEXTURE = 0x1702 TEXTURE_CUBE_MAP = 0x8513 TEXTURE_BINDING_CUBE_MAP = 0x8514 TEXTURE_CUBE_MAP_POSITIVE_X = 0x8515 TEXTURE_CUBE_MAP_NEGATIVE_X = 0x8516 TEXTURE_CUBE_MAP_POSITIVE_Y = 0x8517 TEXTURE_CUBE_MAP_NEGATIVE_Y = 0x8518 TEXTURE_CUBE_MAP_POSITIVE_Z = 0x8519 TEXTURE_CUBE_MAP_NEGATIVE_Z = 0x851A MAX_CUBE_MAP_TEXTURE_SIZE = 0x851C ACTIVE_TEXTURE = 0x84E0 // The current active texture unit. REPEAT = 0x2901 CLAMP_TO_EDGE = 0x812F MIRRORED_REPEAT = 0x8370 FLOAT_VEC2 = 0x8B50 FLOAT_VEC3 = 0x8B51 FLOAT_VEC4 = 0x8B52 INT_VEC2 = 0x8B53 INT_VEC3 = 0x8B54 INT_VEC4 = 0x8B55 BOOL = 0x8B56 BOOL_VEC2 = 0x8B57 BOOL_VEC3 = 0x8B58 BOOL_VEC4 = 0x8B59 FLOAT_MAT2 = 0x8B5A FLOAT_MAT3 = 0x8B5B FLOAT_MAT4 = 0x8B5C SAMPLER_2D = 0x8B5E SAMPLER_CUBE = 0x8B60 LOW_FLOAT = 0x8DF0 MEDIUM_FLOAT = 0x8DF1 HIGH_FLOAT = 0x8DF2 LOW_INT = 0x8DF3 MEDIUM_INT = 0x8DF4 HIGH_INT = 0x8DF5 FRAMEBUFFER = 0x8D40 RENDERBUFFER = 0x8D41 RGBA4 = 0x8056 RGB5_A1 = 0x8057 RGB565 = 0x8D62 DEPTH_COMPONENT16 = 0x81A5 STENCIL_INDEX8 = 0x8D48 DEPTH_STENCIL = 0x84F9 RENDERBUFFER_WIDTH = 0x8D42 RENDERBUFFER_HEIGHT = 0x8D43 RENDERBUFFER_INTERNAL_FORMAT = 0x8D44 RENDERBUFFER_RED_SIZE = 0x8D50 RENDERBUFFER_GREEN_SIZE = 0x8D51 RENDERBUFFER_BLUE_SIZE = 0x8D52 RENDERBUFFER_ALPHA_SIZE = 0x8D53 RENDERBUFFER_DEPTH_SIZE = 0x8D54 RENDERBUFFER_STENCIL_SIZE = 0x8D55 FRAMEBUFFER_ATTACHMENT_OBJECT_TYPE = 0x8CD0 FRAMEBUFFER_ATTACHMENT_OBJECT_NAME = 0x8CD1 FRAMEBUFFER_ATTACHMENT_TEXTURE_LEVEL = 0x8CD2 FRAMEBUFFER_ATTACHMENT_TEXTURE_CUBE_MAP_FACE = 0x8CD3 COLOR_ATTACHMENT0 = 0x8CE0 DEPTH_ATTACHMENT = 0x8D00 STENCIL_ATTACHMENT = 0x8D20 DEPTH_STENCIL_ATTACHMENT = 0x821A NONE = 0 FRAMEBUFFER_COMPLETE = 0x8CD5 FRAMEBUFFER_INCOMPLETE_ATTACHMENT = 0x8CD6 FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT = 0x8CD7 FRAMEBUFFER_INCOMPLETE_DIMENSIONS = 0x8CD9 FRAMEBUFFER_UNSUPPORTED = 0x8CDD FRAMEBUFFER_BINDING = 0x8CA6 RENDERBUFFER_BINDING = 0x8CA7 MAX_RENDERBUFFER_SIZE = 0x84E8 INVALID_FRAMEBUFFER_OPERATION = 0x0506 UNPACK_FLIP_Y_WEBGL = 0x9240 UNPACK_PREMULTIPLY_ALPHA_WEBGL = 0x9241 UNPACK_COLORSPACE_CONVERSION_WEBGL = 0x9243 READ_BUFFER = 0x0C02 UNPACK_ROW_LENGTH = 0x0CF2 UNPACK_SKIP_ROWS = 0x0CF3 UNPACK_SKIP_PIXELS = 0x0CF4 PACK_ROW_LENGTH = 0x0D02 PACK_SKIP_ROWS = 0x0D03 PACK_SKIP_PIXELS = 0x0D04 TEXTURE_BINDING_3D = 0x806A UNPACK_SKIP_IMAGES = 0x806D UNPACK_IMAGE_HEIGHT = 0x806E MAX_3D_TEXTURE_SIZE = 0x8073 MAX_ELEMENTS_VERTICES = 0x80E8 MAX_ELEMENTS_INDICES = 0x80E9 MAX_TEXTURE_LOD_BIAS = 0x84FD MAX_FRAGMENT_UNIFORM_COMPONENTS = 0x8B49 MAX_VERTEX_UNIFORM_COMPONENTS = 0x8B4A MAX_ARRAY_TEXTURE_LAYERS = 0x88FF MIN_PROGRAM_TEXEL_OFFSET = 0x8904 MAX_PROGRAM_TEXEL_OFFSET = 0x8905 MAX_VARYING_COMPONENTS = 0x8B4B FRAGMENT_SHADER_DERIVATIVE_HINT = 0x8B8B RASTERIZER_DISCARD = 0x8C89 VERTEX_ARRAY_BINDING = 0x85B5 MAX_VERTEX_OUTPUT_COMPONENTS = 0x9122 MAX_FRAGMENT_INPUT_COMPONENTS = 0x9125 MAX_SERVER_WAIT_TIMEOUT = 0x9111 MAX_ELEMENT_INDEX = 0x8D6B RED = 0x1903 RGB8 = 0x8051 RGBA8 = 0x8058 RGB10_A2 = 0x8059 TEXTURE_3D = 0x806F TEXTURE_WRAP_R = 0x8072 TEXTURE_MIN_LOD = 0x813A TEXTURE_MAX_LOD = 0x813B TEXTURE_BASE_LEVEL = 0x813C TEXTURE_MAX_LEVEL = 0x813D TEXTURE_COMPARE_MODE = 0x884C TEXTURE_COMPARE_FUNC = 0x884D SRGB = 0x8C40 SRGB8 = 0x8C41 SRGB8_ALPHA8 = 0x8C43 COMPARE_REF_TO_TEXTURE = 0x884E RGBA32F = 0x8814 RGB32F = 0x8815 RGBA16F = 0x881A RGB16F = 0x881B TEXTURE_2D_ARRAY = 0x8C1A TEXTURE_BINDING_2D_ARRAY = 0x8C1D R11F_G11F_B10F = 0x8C3A RGB9_E5 = 0x8C3D RGBA32UI = 0x8D70 RGB32UI = 0x8D71 RGBA16UI = 0x8D76 RGB16UI = 0x8D77 RGBA8UI = 0x8D7C RGB8UI = 0x8D7D RGBA32I = 0x8D82 RGB32I = 0x8D83 RGBA16I = 0x8D88 RGB16I = 0x8D89 RGBA8I = 0x8D8E RGB8I = 0x8D8F RED_INTEGER = 0x8D94 RGB_INTEGER = 0x8D98 RGBA_INTEGER = 0x8D99 R8 = 0x8229 RG8 = 0x822B R16F = 0x822D R32F = 0x822E RG16F = 0x822F RG32F = 0x8230 R8I = 0x8231 R8UI = 0x8232 R16I = 0x8233 R16UI = 0x8234 R32I = 0x8235 R32UI = 0x8236 RG8I = 0x8237 RG8UI = 0x8238 RG16I = 0x8239 RG16UI = 0x823A RG32I = 0x823B RG32UI = 0x823C R8_SNORM = 0x8F94 RG8_SNORM = 0x8F95 RGB8_SNORM = 0x8F96 RGBA8_SNORM = 0x8F97 RGB10_A2UI = 0x906F TEXTURE_IMMUTABLE_FORMAT = 0x912F TEXTURE_IMMUTABLE_LEVELS = 0x82DF UNSIGNED_INT_2_10_10_10_REV = 0x8368 UNSIGNED_INT_10F_11F_11F_REV = 0x8C3B UNSIGNED_INT_5_9_9_9_REV = 0x8C3E FLOAT_32_UNSIGNED_INT_24_8_REV = 0x8DAD UNSIGNED_INT_24_8 = 0x84FA HALF_FLOAT = 0x140B RG = 0x8227 RG_INTEGER = 0x8228 INT_2_10_10_10_REV = 0x8D9F CURRENT_QUERY = 0x8865 QUERY_RESULT = 0x8866 QUERY_RESULT_AVAILABLE = 0x8867 ANY_SAMPLES_PASSED = 0x8C2F ANY_SAMPLES_PASSED_CONSERVATIVE = 0x8D6A MAX_DRAW_BUFFERS = 0x8824 DRAW_BUFFER0 = 0x8825 DRAW_BUFFER1 = 0x8826 DRAW_BUFFER2 = 0x8827 DRAW_BUFFER3 = 0x8828 DRAW_BUFFER4 = 0x8829 DRAW_BUFFER5 = 0x882A DRAW_BUFFER6 = 0x882B DRAW_BUFFER7 = 0x882C DRAW_BUFFER8 = 0x882D DRAW_BUFFER9 = 0x882E DRAW_BUFFER10 = 0x882F DRAW_BUFFER11 = 0x8830 DRAW_BUFFER12 = 0x8831 DRAW_BUFFER13 = 0x8832 DRAW_BUFFER14 = 0x8833 DRAW_BUFFER15 = 0x8834 MAX_COLOR_ATTACHMENTS = 0x8CDF COLOR_ATTACHMENT1 = 0x8CE1 COLOR_ATTACHMENT2 = 0x8CE2 COLOR_ATTACHMENT3 = 0x8CE3 COLOR_ATTACHMENT4 = 0x8CE4 COLOR_ATTACHMENT5 = 0x8CE5 COLOR_ATTACHMENT6 = 0x8CE6 COLOR_ATTACHMENT7 = 0x8CE7 COLOR_ATTACHMENT8 = 0x8CE8 COLOR_ATTACHMENT9 = 0x8CE9 COLOR_ATTACHMENT10 = 0x8CEA COLOR_ATTACHMENT11 = 0x8CEB COLOR_ATTACHMENT12 = 0x8CEC COLOR_ATTACHMENT13 = 0x8CED COLOR_ATTACHMENT14 = 0x8CEE COLOR_ATTACHMENT15 = 0x8CEF SAMPLER_3D = 0x8B5F SAMPLER_2D_SHADOW = 0x8B62 SAMPLER_2D_ARRAY = 0x8DC1 SAMPLER_2D_ARRAY_SHADOW = 0x8DC4 SAMPLER_CUBE_SHADOW = 0x8DC5 INT_SAMPLER_2D = 0x8DCA INT_SAMPLER_3D = 0x8DCB INT_SAMPLER_CUBE = 0x8DCC INT_SAMPLER_2D_ARRAY = 0x8DCF UNSIGNED_INT_SAMPLER_2D = 0x8DD2 UNSIGNED_INT_SAMPLER_3D = 0x8DD3 UNSIGNED_INT_SAMPLER_CUBE = 0x8DD4 UNSIGNED_INT_SAMPLER_2D_ARRAY = 0x8DD7 MAX_SAMPLES = 0x8D57 SAMPLER_BINDING = 0x8919 PIXEL_PACK_BUFFER = 0x88EB PIXEL_UNPACK_BUFFER = 0x88EC PIXEL_PACK_BUFFER_BINDING = 0x88ED PIXEL_UNPACK_BUFFER_BINDING = 0x88EF COPY_READ_BUFFER = 0x8F36 COPY_WRITE_BUFFER = 0x8F37 COPY_READ_BUFFER_BINDING = 0x8F36 COPY_WRITE_BUFFER_BINDING = 0x8F37 FLOAT_MAT2x3 = 0x8B65 FLOAT_MAT2x4 = 0x8B66 FLOAT_MAT3x2 = 0x8B67 FLOAT_MAT3x4 = 0x8B68 FLOAT_MAT4x2 = 0x8B69 FLOAT_MAT4x3 = 0x8B6A UNSIGNED_INT_VEC2 = 0x8DC6 UNSIGNED_INT_VEC3 = 0x8DC7 UNSIGNED_INT_VEC4 = 0x8DC8 UNSIGNED_NORMALIZED = 0x8C17 SIGNED_NORMALIZED = 0x8F9C VERTEX_ATTRIB_ARRAY_INTEGER = 0x88FD VERTEX_ATTRIB_ARRAY_DIVISOR = 0x88FE TRANSFORM_FEEDBACK_BUFFER_MODE = 0x8C7F MAX_TRANSFORM_FEEDBACK_SEPARATE_COMPONENTS = 0x8C80 TRANSFORM_FEEDBACK_VARYINGS = 0x8C83 TRANSFORM_FEEDBACK_BUFFER_START = 0x8C84 TRANSFORM_FEEDBACK_BUFFER_SIZE = 0x8C85 TRANSFORM_FEEDBACK_PRIMITIVES_WRITTEN = 0x8C88 MAX_TRANSFORM_FEEDBACK_INTERLEAVED_COMPONENTS = 0x8C8A MAX_TRANSFORM_FEEDBACK_SEPARATE_ATTRIBS = 0x8C8B INTERLEAVED_ATTRIBS = 0x8C8C SEPARATE_ATTRIBS = 0x8C8D TRANSFORM_FEEDBACK_BUFFER = 0x8C8E TRANSFORM_FEEDBACK_BUFFER_BINDING = 0x8C8F TRANSFORM_FEEDBACK = 0x8E22 TRANSFORM_FEEDBACK_PAUSED = 0x8E23 TRANSFORM_FEEDBACK_ACTIVE = 0x8E24 TRANSFORM_FEEDBACK_BINDING = 0x8E25 FRAMEBUFFER_ATTACHMENT_COLOR_ENCODING = 0x8210 FRAMEBUFFER_ATTACHMENT_COMPONENT_TYPE = 0x8211 FRAMEBUFFER_ATTACHMENT_RED_SIZE = 0x8212 FRAMEBUFFER_ATTACHMENT_GREEN_SIZE = 0x8213 FRAMEBUFFER_ATTACHMENT_BLUE_SIZE = 0x8214 FRAMEBUFFER_ATTACHMENT_ALPHA_SIZE = 0x8215 FRAMEBUFFER_ATTACHMENT_DEPTH_SIZE = 0x8216 FRAMEBUFFER_ATTACHMENT_STENCIL_SIZE = 0x8217 FRAMEBUFFER_DEFAULT = 0x8218 DEPTH24_STENCIL8 = 0x88F0 DRAW_FRAMEBUFFER_BINDING = 0x8CA6 READ_FRAMEBUFFER = 0x8CA8 DRAW_FRAMEBUFFER = 0x8CA9 READ_FRAMEBUFFER_BINDING = 0x8CAA RENDERBUFFER_SAMPLES = 0x8CAB FRAMEBUFFER_ATTACHMENT_TEXTURE_LAYER = 0x8CD4 FRAMEBUFFER_INCOMPLETE_MULTISAMPLE = 0x8D56 UNIFORM_BUFFER = 0x8A11 UNIFORM_BUFFER_BINDING = 0x8A28 UNIFORM_BUFFER_START = 0x8A29 UNIFORM_BUFFER_SIZE = 0x8A2A MAX_VERTEX_UNIFORM_BLOCKS = 0x8A2B MAX_FRAGMENT_UNIFORM_BLOCKS = 0x8A2D MAX_COMBINED_UNIFORM_BLOCKS = 0x8A2E MAX_UNIFORM_BUFFER_BINDINGS = 0x8A2F MAX_UNIFORM_BLOCK_SIZE = 0x8A30 MAX_COMBINED_VERTEX_UNIFORM_COMPONENTS = 0x8A31 MAX_COMBINED_FRAGMENT_UNIFORM_COMPONENTS = 0x8A33 UNIFORM_BUFFER_OFFSET_ALIGNMENT = 0x8A34 ACTIVE_UNIFORM_BLOCKS = 0x8A36 UNIFORM_TYPE = 0x8A37 UNIFORM_SIZE = 0x8A38 UNIFORM_BLOCK_INDEX = 0x8A3A UNIFORM_OFFSET = 0x8A3B UNIFORM_ARRAY_STRIDE = 0x8A3C UNIFORM_MATRIX_STRIDE = 0x8A3D UNIFORM_IS_ROW_MAJOR = 0x8A3E UNIFORM_BLOCK_BINDING = 0x8A3F UNIFORM_BLOCK_DATA_SIZE = 0x8A40 UNIFORM_BLOCK_ACTIVE_UNIFORMS = 0x8A42 UNIFORM_BLOCK_ACTIVE_UNIFORM_INDICES = 0x8A43 UNIFORM_BLOCK_REFERENCED_BY_VERTEX_SHADER = 0x8A44 UNIFORM_BLOCK_REFERENCED_BY_FRAGMENT_SHADER = 0x8A46 OBJECT_TYPE = 0x9112 SYNC_CONDITION = 0x9113 SYNC_STATUS = 0x9114 SYNC_FLAGS = 0x9115 SYNC_FENCE = 0x9116 SYNC_GPU_COMMANDS_COMPLETE = 0x9117 UNSIGNALED = 0x9118 SIGNALED = 0x9119 ALREADY_SIGNALED = 0x911A TIMEOUT_EXPIRED = 0x911B CONDITION_SATISFIED = 0x911C WAIT_FAILED = 0x911D SYNC_FLUSH_COMMANDS_BIT = 0x00000001 COLOR = 0x1800 DEPTH = 0x1801 STENCIL = 0x1802 MIN = 0x8007 MAX = 0x8008 DEPTH_COMPONENT24 = 0x81A6 STREAM_READ = 0x88E1 STREAM_COPY = 0x88E2 STATIC_READ = 0x88E5 STATIC_COPY = 0x88E6 DYNAMIC_READ = 0x88E9 DYNAMIC_COPY = 0x88EA DEPTH_COMPONENT32F = 0x8CAC DEPTH32F_STENCIL8 = 0x8CAD INVALID_INDEX = 0xFFFFFFFF TIMEOUT_IGNORED = -1 MAX_CLIENT_WAIT_TIMEOUT_WEBGL = 0x9247 VERTEX_ATTRIB_ARRAY_DIVISOR_ANGLE = 0x88FE // Describes the frequency divisor used for instanced rendering. UNMASKED_VENDOR_WEBGL = 0x9245 // Passed to getParameter to get the vendor string of the graphics driver. UNMASKED_RENDERER_WEBGL = 0x9246 // Passed to getParameter to get the renderer string of the graphics driver. MAX_TEXTURE_MAX_ANISOTROPY_EXT = 0x84FF // Returns the maximum available anisotropy. TEXTURE_MAX_ANISOTROPY_EXT = 0x84FE // Passed to texParameter to set the desired maximum anisotropy for a texture. COMPRESSED_RGB_S3TC_DXT1_EXT = 0x83F0 // A DXT1-compressed image in an RGB image format. COMPRESSED_RGBA_S3TC_DXT1_EXT = 0x83F1 // A DXT1-compressed image in an RGB image format with a simple on/off alpha value. COMPRESSED_RGBA_S3TC_DXT3_EXT = 0x83F2 // A DXT3-compressed image in an RGBA image format. Compared to a 32-bit RGBA texture, it offers 4:1 compression. COMPRESSED_RGBA_S3TC_DXT5_EXT = 0x83F3 // A DXT5-compressed image in an RGBA image format. It also provides a 4:1 compression, but differs to the DXT3 compression in how the alpha compression is done. COMPRESSED_R11_EAC = 0x9270 // One-channel (red) unsigned format compression. COMPRESSED_SIGNED_R11_EAC = 0x9271 // One-channel (red) signed format compression. COMPRESSED_RG11_EAC = 0x9272 // Two-channel (red and green) unsigned format compression. COMPRESSED_SIGNED_RG11_EAC = 0x9273 // Two-channel (red and green) signed format compression. COMPRESSED_RGB8_ETC2 = 0x9274 // Compresses RBG8 data with no alpha channel. COMPRESSED_RGBA8_ETC2_EAC = 0x9275 // Compresses RGBA8 data. The RGB part is encoded the same as RGB_ETC2, but the alpha part is encoded separately. COMPRESSED_SRGB8_ETC2 = 0x9276 // Compresses sRBG8 data with no alpha channel. COMPRESSED_SRGB8_ALPHA8_ETC2_EAC = 0x9277 // Compresses sRGBA8 data. The sRGB part is encoded the same as SRGB_ETC2, but the alpha part is encoded separately. COMPRESSED_RGB8_PUNCHTHROUGH_ALPHA1_ETC2 = 0x9278 // Similar to RGB8_ETC, but with ability to punch through the alpha channel, which means to make it completely opaque or transparent. COMPRESSED_SRGB8_PUNCHTHROUGH_ALPHA1_ETC2 = 0x9279 // Similar to SRGB8_ETC, but with ability to punch through the alpha channel, which means to make it completely opaque or transparent. COMPRESSED_RGB_PVRTC_4BPPV1_IMG = 0x8C00 // RGB compression in 4-bit mode. One block for each 4×4 pixels. COMPRESSED_RGBA_PVRTC_4BPPV1_IMG = 0x8C02 // RGBA compression in 4-bit mode. One block for each 4×4 pixels. COMPRESSED_RGB_PVRTC_2BPPV1_IMG = 0x8C01 // RGB compression in 2-bit mode. One block for each 8×4 pixels. COMPRESSED_RGBA_PVRTC_2BPPV1_IMG = 0x8C03 // RGBA compression in 2-bit mode. One block for each 8×4 pixe COMPRESSED_RGB_ETC1_WEBGL = 0x8D64 // Compresses 24-bit RGB data with no alpha channel. UNSIGNED_INT_24_8_WEBGL = 0x84FA // Unsigned integer type for 24-bit depth texture data. HALF_FLOAT_OES = 0x8D61 // Half floating-point type (16-bit). RGBA32F_EXT = 0x8814 // RGBA 32-bit floating-point color-renderable format. RGB32F_EXT = 0x8815 // RGB 32-bit floating-point color-renderable format. FRAMEBUFFER_ATTACHMENT_COMPONENT_TYPE_EXT = 0x8211 UNSIGNED_NORMALIZED_EXT = 0x8C17 MIN_EXT = 0x8007 // Produces the minimum color components of the source and destination colors. MAX_EXT = 0x8008 // Produces the maximum color components of the source and destination colors. SRGB_EXT = 0x8C40 // Unsized sRGB format that leaves the precision up to the driver. SRGB_ALPHA_EXT = 0x8C42 // Unsized sRGB format with unsized alpha component. SRGB8_ALPHA8_EXT = 0x8C43 // Sized (8-bit) sRGB and alpha formats. FRAMEBUFFER_ATTACHMENT_COLOR_ENCODING_EXT = 0x8210 // Returns the framebuffer color encoding. FRAGMENT_SHADER_DERIVATIVE_HINT_OES = 0x8B8B // Indicates the accuracy of the derivative calculation for the GLSL built-in functions: dFdx, dFdy, and fwidth. COLOR_ATTACHMENT0_WEBGL = 0x8CE0 // Framebuffer color attachment point COLOR_ATTACHMENT1_WEBGL = 0x8CE1 // Framebuffer color attachment point COLOR_ATTACHMENT2_WEBGL = 0x8CE2 // Framebuffer color attachment point COLOR_ATTACHMENT3_WEBGL = 0x8CE3 // Framebuffer color attachment point COLOR_ATTACHMENT4_WEBGL = 0x8CE4 // Framebuffer color attachment point COLOR_ATTACHMENT5_WEBGL = 0x8CE5 // Framebuffer color attachment point COLOR_ATTACHMENT6_WEBGL = 0x8CE6 // Framebuffer color attachment point COLOR_ATTACHMENT7_WEBGL = 0x8CE7 // Framebuffer color attachment point COLOR_ATTACHMENT8_WEBGL = 0x8CE8 // Framebuffer color attachment point COLOR_ATTACHMENT9_WEBGL = 0x8CE9 // Framebuffer color attachment point COLOR_ATTACHMENT10_WEBGL = 0x8CEA // Framebuffer color attachment point COLOR_ATTACHMENT11_WEBGL = 0x8CEB // Framebuffer color attachment point COLOR_ATTACHMENT12_WEBGL = 0x8CEC // Framebuffer color attachment point COLOR_ATTACHMENT13_WEBGL = 0x8CED // Framebuffer color attachment point COLOR_ATTACHMENT14_WEBGL = 0x8CEE // Framebuffer color attachment point COLOR_ATTACHMENT15_WEBGL = 0x8CEF // Framebuffer color attachment point DRAW_BUFFER0_WEBGL = 0x8825 // Draw buffer DRAW_BUFFER1_WEBGL = 0x8826 // Draw buffer DRAW_BUFFER2_WEBGL = 0x8827 // Draw buffer DRAW_BUFFER3_WEBGL = 0x8828 // Draw buffer DRAW_BUFFER4_WEBGL = 0x8829 // Draw buffer DRAW_BUFFER5_WEBGL = 0x882A // Draw buffer DRAW_BUFFER6_WEBGL = 0x882B // Draw buffer DRAW_BUFFER7_WEBGL = 0x882C // Draw buffer DRAW_BUFFER8_WEBGL = 0x882D // Draw buffer DRAW_BUFFER9_WEBGL = 0x882E // Draw buffer DRAW_BUFFER10_WEBGL = 0x882F // Draw buffer DRAW_BUFFER11_WEBGL = 0x8830 // Draw buffer DRAW_BUFFER12_WEBGL = 0x8831 // Draw buffer DRAW_BUFFER13_WEBGL = 0x8832 // Draw buffer DRAW_BUFFER14_WEBGL = 0x8833 // Draw buffer DRAW_BUFFER15_WEBGL = 0x8834 // Draw buffer MAX_COLOR_ATTACHMENTS_WEBGL = 0x8CDF // Maximum number of framebuffer color attachment points MAX_DRAW_BUFFERS_WEBGL = 0x8824 // Maximum number of draw buffers VERTEX_ARRAY_BINDING_OES = 0x85B5 // The bound vertex array object (VAO). QUERY_COUNTER_BITS_EXT = 0x8864 // The number of bits used to hold the query result for the given target. CURRENT_QUERY_EXT = 0x8865 // The currently active query. QUERY_RESULT_EXT = 0x8866 // The query result. QUERY_RESULT_AVAILABLE_EXT = 0x8867 // A Boolean indicating whether or not a query result is available. TIME_ELAPSED_EXT = 0x88BF // Elapsed time (in nanoseconds). TIMESTAMP_EXT = 0x8E28 // The current time. GPU_DISJOINT_EXT = 0x8FBB // A Boolean indicating whether or not the GPU performed any disjoint operation. )
webgl/constants.go
0.609873
0.625695
constants.go
starcoder
package seam import ( "image" "image/draw" "math" "sync" ) func energy(img *image.RGBA, x, y int) float32 { neighbours := [8]float32{ luminance(img, x-1, y-1), luminance(img, x, y-1), luminance(img, x+1, y-1), luminance(img, x-1, y), luminance(img, x+1, y), luminance(img, x-1, y+1), luminance(img, x, y+1), luminance(img, x+1, y+1), } eX := neighbours[0] + neighbours[3] + neighbours[5] - neighbours[2] - neighbours[4] - neighbours[7] eY := neighbours[0] + neighbours[1] + neighbours[2] - neighbours[5] - neighbours[6] - neighbours[7] return float32(math.Abs(float64(eX)) + math.Abs(float64(eY))) } func luminance(img *image.RGBA, x, y int) float32 { c := img.RGBAAt(x, y) return 0.2126*float32(c.R) + 0.7152*float32(c.G) + 0.0722*float32(c.B) } func RemoveVerticalSeams(img image.Image, seamsToRemove int) image.Image { imgBounds := img.Bounds() resultImg := image.NewRGBA(image.Rect(0, 0, imgBounds.Dx(), imgBounds.Dy())) resultBounds := resultImg.Bounds() draw.Draw(resultImg, resultBounds, img, image.Pt(0, 0), draw.Src) energyWidth := imgBounds.Dx() energies := make([]float32, imgBounds.Dx()*imgBounds.Dy(), imgBounds.Dx()*imgBounds.Dy()) accumulatedEnergies := make([]float32, len(energies), len(energies)) seamPositions := make([]int, imgBounds.Dy(), imgBounds.Dy()) // Calculate initial energy map for i := imgBounds.Min.Y; i < imgBounds.Max.Y; i++ { for j := imgBounds.Min.X; j < imgBounds.Max.X; j++ { offset := (i-imgBounds.Min.Y)*energyWidth + (j - imgBounds.Min.X) energies[offset] = energy(resultImg, j, i) } } // Calculate accumulated energies for j := 0; j < resultBounds.Dx(); j++ { accumulatedEnergies[j] = energies[j] } for i := 1; i < resultBounds.Dy(); i++ { for j := 0; j < resultBounds.Dx(); j++ { offset := i*energyWidth + j northOffset := offset - energyWidth minE := accumulatedEnergies[northOffset] if j > 0 && accumulatedEnergies[northOffset-1] < minE { minE = accumulatedEnergies[northOffset-1] } if j < resultBounds.Dx()-1 && accumulatedEnergies[northOffset+1] < minE { minE = accumulatedEnergies[northOffset+1] } accumulatedEnergies[offset] = energies[offset] + minE } } for seamCount := 0; seamCount < seamsToRemove; seamCount++ { // Find beginning of optimal seam rowOffset := (resultBounds.Dy() - 1) * energyWidth seamMinE := accumulatedEnergies[rowOffset] seamX := 0 for j := 1; j < resultBounds.Dx(); j++ { energy := accumulatedEnergies[rowOffset+j] if energy < seamMinE { seamMinE = energy seamX = j } } seamPositions[resultBounds.Dy()-1] = seamX // Trace seam upwards for i := resultBounds.Dy() - 2; i >= 0; i-- { prevRow := i*energyWidth + seamX minE := accumulatedEnergies[prevRow] if seamX > 0 && accumulatedEnergies[prevRow-1] < minE { minE = accumulatedEnergies[prevRow-1] seamX-- } if seamX < resultBounds.Dx()-1 && accumulatedEnergies[prevRow+1] < minE { minE = accumulatedEnergies[prevRow+1] seamX++ } seamPositions[i] = seamX } resultBounds.Max.X-- var shiftGroup sync.WaitGroup shiftGroup.Add(imgBounds.Dy()) // Shift row segments over the seam for i := 0; i < imgBounds.Dy(); i++ { row := i seamPos := seamPositions[i] go func() { for j := seamPos; j < resultBounds.Max.X; j++ { resultImg.SetRGBA(j, row, resultImg.RGBAAt(imgBounds.Min.X+j+1, imgBounds.Min.Y+row)) } rowOffset := row * energyWidth offset := rowOffset + seamPos copy(energies[offset:rowOffset+energyWidth], energies[offset+1:rowOffset+energyWidth]) shiftGroup.Done() }() } shiftGroup.Wait() // Update energies along seam for i := 0; i < resultBounds.Dy(); i++ { j := seamPositions[i] if j < resultBounds.Dx() { energies[i*energyWidth+j] = energy(resultImg, j, i) } if j > 0 { energies[i*energyWidth+j-1] = energy(resultImg, j-1, i) } } // Update the accumulated energies propagating from the seam for j := seamPositions[0]; j < resultBounds.Dx(); j++ { accumulatedEnergies[j] = energies[j] } for i := 1; i < resultBounds.Dy(); i++ { lowBound := seamPositions[0] - i if lowBound < 0 { lowBound = 0 } highBound := seamPositions[0] + i if highBound > resultBounds.Dx()-1 { highBound = resultBounds.Dx() - 1 } for offset, j := i*energyWidth+lowBound, lowBound; j <= highBound; offset, j = offset+1, j+1 { northOffset := offset - energyWidth minE := accumulatedEnergies[northOffset] if j > 0 && accumulatedEnergies[northOffset-1] < minE { minE = accumulatedEnergies[northOffset-1] } if j < resultBounds.Dx()-1 && accumulatedEnergies[northOffset+1] < minE { minE = accumulatedEnergies[northOffset+1] } accumulatedEnergies[offset] = energies[offset] + minE } } } return resultImg.SubImage(resultBounds) }
seams.go
0.637257
0.54825
seams.go
starcoder
package main import ( "bufio" "fmt" "os" "strconv" ) func main() { scanner := bufio.NewScanner(os.Stdin) scanner.Split(bufio.ScanWords) _ = scanner.Scan() n, _ := strconv.Atoi(scanner.Text()) fruit := make([]weight, 0, n) for i := 0; i < n && scanner.Scan(); i++ { w, _ := strconv.Atoi(scanner.Text()) fruit = append(fruit, weight(w)) } _ = scanner.Scan() k, _ := strconv.Atoi(scanner.Text()) fmt.Println(greedy{weight(k), new(basket).with(n).insert(fruit...)}.eat()) } type weight int type basket struct { fruit []weight head int } func (basket *basket) with(capacity int) *basket { basket.fruit, basket.head = make([]weight, 0, capacity), 0 return basket } func (basket *basket) insert(fruit ...weight) *basket { // assert(len(fruit) <= cap(basket.fruit) - len(basket.fruit)) basket.fruit = append(basket.fruit, fruit...) return basket.sort() } func (basket *basket) empty() bool { return len(basket.fruit) == 0 || basket.fruit[0] == 0 } func (basket *basket) sort() *basket { // build min heap for i := len(basket.fruit) / 2; i >= 0; i-- { siftDown(basket.fruit, i) } // sort by min heap process := basket.fruit for last := len(basket.fruit) - 1; last > 0; last-- { process[0], process[last] = process[last], process[0] process = process[:last] siftDown(process, 0) } // remove stumps for i := len(basket.fruit) - 1; i >= 0; i-- { if basket.fruit[i] > 0 { break } basket.fruit = basket.fruit[:i] } return basket } func (basket *basket) take() weight { if basket.head < len(basket.fruit) { fruit := basket.fruit[basket.head] basket.head++ return fruit } return 0 } func (basket *basket) back(fruit ...weight) { // assert(basket.head - len(fruit) == 0) basket.head = 0 copy(basket.fruit, fruit) basket.sort() } type greedy struct { force weight basket interface { empty() bool take() weight back(fruit ...weight) } } func (greedy greedy) eat() int { var steps int for !greedy.basket.empty() { steps++ portion, w := make([]weight, 0, 4), weight(0) for w < greedy.force { // assert(f <= greedy.force) f := greedy.basket.take() if f == 0 { break } w += f portion = append(portion, f) } w = greedy.force for i := range portion { w -= portion[i] if w < 0 { break } if portion[i] == 1 { portion[i] = 0 continue } portion[i] /= 2 } greedy.basket.back(portion...) } return steps } func siftDown(data []weight, i int) { min := i l := left(i) if l < len(data) && data[l] < data[min] { min = l } r := right(i) if r < len(data) && data[r] < data[min] { min = r } if i != min { data[i], data[min] = data[min], data[i] siftDown(data, min) } } func left(i int) int { return 2*i + 1 } func right(i int) int { return 2 * (i + 1) }
stepik/course-156/lesson-12560/step-7/main.go
0.500977
0.419588
main.go
starcoder
package testza import ( "math/rand" "testing" "github.com/MarvinJWendt/testza/internal" ) // MockInputsStringsHelper contains strings test sets. type MockInputsStringsHelper struct{} // Empty returns a test set with a single empty string. func (s MockInputsStringsHelper) Empty() []string { return []string{""} } // Long returns a test set with long random strings. // Returns: // - Random string (length: 25) // - Random string (length: 50) // - Random string (length: 100) // - Random string (length: 1,000) // - Random string (length: 100,000) func (s MockInputsStringsHelper) Long() (testSet []string) { testSet = append(testSet, s.GenerateRandom(1, 25)...) testSet = append(testSet, s.GenerateRandom(1, 50)...) testSet = append(testSet, s.GenerateRandom(1, 100)...) testSet = append(testSet, s.GenerateRandom(1, 1_000)...) testSet = append(testSet, s.GenerateRandom(1, 100_000)...) return } // Numeric returns a test set with strings that are numeric. // The highest number in here is "9223372036854775807", which is equal to the maxmim int64. func (s MockInputsStringsHelper) Numeric() []string { positiveNumbers := []string{"0", "1", "2", "3", "100", "1.1", "1337", "13.37", "0.000000000001", "9223372036854775807"} negativeNumbers := s.Modify(positiveNumbers, func(index int, value string) string { return "-" + value }) return append(positiveNumbers, negativeNumbers...) } // Usernames returns a test set with usernames. func (s MockInputsStringsHelper) Usernames() []string { return []string{"MarvinJWendt", "Zipper1337", "n00b", "l33t", "j0rgan", "test", "test123", "TEST", "test_", "TEST_"} } // EmailAddresses returns a test set with valid email addresses. func (s MockInputsStringsHelper) EmailAddresses() []string { return []string{ "<EMAIL>", "<EMAIL>", "<EMAIL>", "<EMAIL>", "test@127.0.0.1", "test@[127.0.0.1]", "<EMAIL>", "_____<EMAIL>", "<EMAIL>", `valid.”email\ <EMAIL>`, `<EMAIL>`, `123456789012345678<EMAIL>`, `"first\"last"@<EMAIL>`, `"first@<EMAIL>"@<EMAIL>`, `"first\\last"@<EMAIL>`, `x@x23456789.x23456789.x23456789.x23456789.x23456789.x23456789.x23456789.x23456789.x23456789.x23456789.x23456789.x23456789.x23456789.x23456789.x23456789.x23456789.x23456789.x23456789.x23456789.x23456789.x23456789.x23456789.x23456789.x23456789.x23456789.x2`, `1234567890123456789012345678901234567890123456789012345678@12345678901234567890123456789012345678901234567890123456789.12345678901234567890123456789012345678901234567890123456789.123456789012345678901234567890123456789012345678901234567890123.iana.org`, `first.last@[12.34.56.78]`, `first.last@[IPv6:::12.34.56.78]`, `first.last@[IPv6:::b3:b4]`, `first.last@[IPv6:::]`, `first.last@[IPv6:fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b:12.34.56.78]`, `"first\last"@<EMAIL>`, `<EMAIL>`, `<EMAIL> `, `customer/department=<EMAIL>`, `"<NAME>\" L."@<EMAIL>`, `+1~1+@<EMAIL>`, `{_<EMAIL>_}@<EMAIL>`, `"[[ test ]]"@<EMAIL>`, `"test&#13;&#10; blah"<EMAIL>`, `(foo)cal(bar)@(baz)iamcal.com(quux)`, `cal(woo(yay)hoopla)@<EMAIL>`, `cal(foo\@bar)@<EMAIL>`, `cal(foo\)bar)@<EMAIL>`, `first(Welcome to&#13;&#10; the ("wonderful" (!)) world&#13;&#10; of email)@<EMAIL>`, `pete(his account)@silly.test(his host)`, `c@(Chris's host.)public.example`, } } // HtmlTags returns a test set with html tags. func (s MockInputsStringsHelper) HtmlTags() []string { return []string{ "<script>alert('XSS')</script>", "<script>", `<a href="https://github.com/MarvinJWendt/testza">link</a>`, `</body>`, `</html>`, } } // Full contains all string test sets plus ten generated random strings. func (s MockInputsStringsHelper) Full() (ret []string) { ret = append(ret, s.Usernames()...) ret = append(ret, s.HtmlTags()...) ret = append(ret, s.EmailAddresses()...) ret = append(ret, s.Empty()...) ret = append(ret, s.Numeric()...) ret = append(ret, s.Long()...) for i := 0; i < 10; i++ { ret = append(ret, s.GenerateRandom(1, i)...) } return } // Limit limits a test set in size. func (s MockInputsStringsHelper) Limit(testSet []string, max int) []string { if len(testSet) <= max { return testSet } if max <= 0 { return []string{} } return testSet[:max] } // GenerateRandom returns random StringsHelper in a test set. func (s MockInputsStringsHelper) GenerateRandom(count, length int) (result []string) { var letters = []rune("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789") for i := 0; i < count; i++ { str := make([]rune, length) for i := range str { str[i] = letters[rand.Intn(len(letters))] } result = append(result, string(str)) } return } // RunTests runs a test for every value in a testset. // You can use the value as input parameter for your functions, to sanity test against many different cases. // This ensures that your functions have a correct error handling and enables you to test against hunderts of cases easily. func (s MockInputsStringsHelper) RunTests(t testRunner, testSet []string, testFunc func(t *testing.T, index int, str string)) { if test, ok := t.(helper); ok { test.Helper() } test := internal.GetTest(t) if test == nil { t.Error(internal.ErrCanNotRunIfNotBuiltinTesting) return } for i, str := range testSet { test.Run(str, func(t *testing.T) { t.Helper() testFunc(t, i, str) }) } } // Modify returns a modified version of a test set. func (s MockInputsStringsHelper) Modify(inputSlice []string, f func(index int, value string) string) (ret []string) { for i, str := range inputSlice { ret = append(ret, f(i, str)) } return }
mock-strings.go
0.721351
0.453927
mock-strings.go
starcoder
package queuedprocessor import ( "go.opencensus.io/stats" "go.opencensus.io/stats/view" "go.opencensus.io/tag" "go.opentelemetry.io/collector/obsreport" "go.opentelemetry.io/collector/processor" ) // Variables related to metrics specific to queued processor. var ( statInQueueLatencyMs = stats.Int64("queue_latency", "Latency (in milliseconds) that a batch stayed in queue", stats.UnitMilliseconds) statSendLatencyMs = stats.Int64("send_latency", "Latency (in milliseconds) to send a batch", stats.UnitMilliseconds) statSuccessSendOps = stats.Int64("success_send", "Number of successful send operations", stats.UnitDimensionless) statFailedSendOps = stats.Int64("fail_send", "Number of failed send operations", stats.UnitDimensionless) statQueueLength = stats.Int64("queue_length", "Current length of the queue (in batches)", stats.UnitDimensionless) latencyDistributionAggregation = view.Distribution(10, 25, 50, 75, 100, 250, 500, 750, 1000, 2000, 3000, 4000, 5000, 10000, 20000, 30000, 50000) queueLengthView = &view.View{ Name: statQueueLength.Name(), Measure: statQueueLength, Description: "Current number of batches in the queue", TagKeys: []tag.Key{processor.TagProcessorNameKey}, Aggregation: view.LastValue(), } sendLatencyView = &view.View{ Name: statSendLatencyMs.Name(), Measure: statSendLatencyMs, Description: "The latency of the successful send operations.", TagKeys: []tag.Key{processor.TagProcessorNameKey}, Aggregation: latencyDistributionAggregation, } inQueueLatencyView = &view.View{ Name: statInQueueLatencyMs.Name(), Measure: statInQueueLatencyMs, Description: "The \"in queue\" latency of the successful send operations.", TagKeys: []tag.Key{processor.TagProcessorNameKey}, Aggregation: latencyDistributionAggregation, } ) // MetricViews return the metrics views according to given telemetry level. func MetricViews() []*view.View { tagKeys := processor.MetricTagKeys() countSuccessSendView := &view.View{ Name: statSuccessSendOps.Name(), Measure: statSuccessSendOps, Description: "The number of successful send operations performed by queued_retry processor", TagKeys: tagKeys, Aggregation: view.Sum(), } countFailuresSendView := &view.View{ Name: statFailedSendOps.Name(), Measure: statFailedSendOps, Description: "The number of failed send operations performed by queued_retry processor", TagKeys: tagKeys, Aggregation: view.Sum(), } legacyViews := []*view.View{queueLengthView, countSuccessSendView, countFailuresSendView, sendLatencyView, inQueueLatencyView} return obsreport.ProcessorMetricViews(typeStr, legacyViews) }
processor/queuedprocessor/metrics.go
0.63307
0.425068
metrics.go
starcoder
package affine import ( "math" ) // calcuate the cos and sin degree value func cosSinDeg(deg float64) (float64, float64) { deg = math.Mod(deg, 360.0) switch deg { case 90.0: return 0.0, 1.0 case 180.0: return -1.0, 0.0 case 270.0: return 0.0, -1.0 } rad := deg * math.Pi / 180.0 return math.Cos(rad), math.Sin(rad) } // https://www.zhihu.com/question/20666664 // Affine transform for translate bettwen spatial reference system and pixel reference system type Affine struct { A float64 //width of a pixel B float64 //row rotation (typically zero) C float64 //x-coordinate of the upper-left corner of the upper-left pixel D float64 //column rotation (typically zero) E float64 //height of a pixel (typically negative) F float64 //y-coordinate of the of the upper-left corner of the upper-left pixel } // Create the identity transform // | x' | | 1 0 0 | | x | // | y' | = | 0 1 0 | | y | // | 1 | | 0 0 1 | | 1 | func Identity() Affine { newAff := Affine{ 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, } return newAff } // Create a translation transform from an offset vector // | x' | | 1 0 xoff | | x | // | y' | = | 0 1 yoff | | y | // | 1 | | 0 0 1 | | 1 | func Translation(xoff float64, yoff float64) Affine { newAff := Affine{ 1, 0, xoff, 0, 1, yoff, } return newAff } // Create a scaling transform from a scalar // | x' | | scale 0 1 | | x | // | y' | = | 0 scale 1 | | y | // | 1 | | 0 0 1 | | 1 | func Scale(scaling float64) Affine { newAff := Affine{ scaling, 0, 0, 0, scaling, 0, } return newAff } // Create a scaling transform from a scalar // | x' | | c s 1 | | x | // | y' | = | -s c 1 | | y | // | 1 | | 0 0 1 | | 1 | func Rotation(angle float64, pivot [2]float64) Affine { ca, sa := cosSinDeg(angle) px, py := pivot[0], pivot[1] newAff := Affine{ ca, -sa, px - px*ca + py*sa, sa, ca, py - px*sa - py*ca, } return newAff } // calcuate the transformed affine with another transform // | a" b" c" | | a b c | | a' b' c' | // | d" e" f" | = | d e f | | d' e' f' | // | 0 0 1 | | 0 0 1 | | 0 0 1 | func (aff *Affine) Mul(affOther Affine) { sa, sb, sc, sd, se, sf := aff.A, aff.B, aff.C, aff.D, aff.E, aff.F oa, ob, oc, od, oe, of := affOther.A, affOther.B, affOther.C, affOther.D, affOther.E, affOther.F aff.A = sa*oa + sb*od aff.B = sa*ob + sb*oe aff.C = sa*oc + sb*of + sc aff.D = sd*oa + se*od aff.E = sd*ob + se*oe aff.F = sd*oc + se*of + sf } // get the affine params from gdal func (aff *Affine) FromGdal(affGdal [6]float64) { aff.A = affGdal[1] aff.B = affGdal[2] aff.C = affGdal[0] aff.D = affGdal[4] aff.E = affGdal[5] aff.F = affGdal[3] } // get convert the affine transform to gdal order func (aff Affine) ToGdal() [6]float64 { var gt [6]float64 gt[1] = aff.A gt[2] = aff.B gt[0] = aff.C gt[4] = aff.D gt[5] = aff.E gt[3] = aff.F return gt } // get the x,y from the pixel row,col func (aff Affine) XY(col int, row int) (float64, float64) { var x, y float64 x = aff.A*float64(col) + aff.C y = aff.E*float64(row) + aff.F return x, y } // convert the spatial reference system x,y to row,col // note the x corspand to col, and y corespond to row func (aff Affine) ColRow(x float64, y float64) (int, int) { col := int(math.Floor((x - aff.C) / aff.A)) row := int(math.Floor((y - aff.F) / aff.E)) return col, row }
affine.go
0.903986
0.729002
affine.go
starcoder
package evaluator import ( "github.com/manishmeganathan/tunalang/object" "github.com/manishmeganathan/tunalang/syntaxtree" ) var ( NULL = &object.Null{} TRUE = &object.Boolean{Value: true} FALSE = &object.Boolean{Value: false} ) // A function that evaluates a Syntax Tree given a node // on it and returns an evaluated object func Evaluate(node syntaxtree.Node, env *object.Environment) object.Object { // Check the type of Syntax Tree Node switch node := node.(type) { // Program Node (Tree Root) case *syntaxtree.Program: // Evaluate the statements in the program return evalProgram(node, env) // Return Statement Node case *syntaxtree.ReturnStatement: // Evaluate the Expression in the return statement val := Evaluate(node.ReturnValue, env) // Check if evaluated value is an error if isError(val) { // Return the error return val } // Return the evaluated return object return &object.ReturnValue{Value: val} // Let Statement Node case *syntaxtree.LetStatement: // Evaluate the Expression in the let statement val := Evaluate(node.Value, env) // Check if evaluated value is an error if isError(val) { // Return the error return val } // Set the evaluated object and the literal // name to the environment store env.Set(node.Name.Value, val) // Expression Node case *syntaxtree.ExpressionStatement: // Recursive evaluation return Evaluate(node.Expression, env) // Prefix Expression Node case *syntaxtree.PrefixExpression: // Evaluate the expression into an object right := Evaluate(node.Right, env) // Check if evaluated value is an error if isError(right) { // Return the error return right } // Evaluate the object for the operator return evalPrefixExpression(node.Operator, right) // Infix Expression Node case *syntaxtree.InfixExpression: // Evaluate the left node left := Evaluate(node.Left, env) // Check if evaluated left value is an error if isError(left) { // Return the error return left } // Evaluate the right node right := Evaluate(node.Right, env) // Check if evaluated right value is an error if isError(right) { // Return the error return right } // Evaluate the expression with the objects and the operator return evalInfixExpression(node.Operator, left, right) // Block Statement Node case *syntaxtree.BlockStatement: // Evaluate the statements in the block return evalBlockStatement(node, env) // If Expression Node case *syntaxtree.IfExpression: // Evaluate the if expression return evalIfExpression(node, env) // Call Expression Node case *syntaxtree.CallExpression: // Evaluate the function function := Evaluate(node.Function, env) // Check if the evaluated value is an error if isError(function) { // Return the error return function } // Evaluate the function arguments args := evalExpressions(node.Arguments, env) // Check for errors if len(args) == 1 && isError(args[0]) { // Return the error return args[0] } // Evaluate the function call return applyFunction(function, args) // Identifier Expression Node case *syntaxtree.IndexExpression: // Evaluate the left expression left := Evaluate(node.Left, env) // Check if evaluated value is an error if isError(left) { // Return the error return left } // Evaluate the index expression index := Evaluate(node.Index, env) // Check if evaluated value is an error if isError(index) { // Return the error return index } // Evaluate the index expression return evalIndexExpression(left, index) // List Literal Node case *syntaxtree.ListLiteral: // Evaluate the list literal elements elements := evalExpressions(node.Elements, env) // Check for errors if len(elements) == 1 && isError(elements[0]) { // Return the error return elements[0] } // Return the List Object return &object.List{Elements: elements} // Map Literal Node case *syntaxtree.MapLiteral: // Evaluate the map literal return evalMapLiteral(node, env) // Function Literal Node case *syntaxtree.FunctionLiteral: // Return the Function Object return &object.Function{Parameters: node.Parameters, Env: env, Body: node.Body} // Identifier Literal Node case *syntaxtree.Identifier: // Evaluate the identifier return evalIdentifier(node, env) // Integer Literal Node case *syntaxtree.IntegerLiteral: // Return the Integer Object return &object.Integer{Value: node.Value} // Boolean Literal Node case *syntaxtree.BooleanLiteral: // Return the native Boolean Object for the value return getNativeBoolean(node.Value) // String Literal Node case *syntaxtree.StringLiteral: // Return the String Object return &object.String{Value: node.Value} } // Return nil if not evaluated return nil } // A function that returns the native Boolean // Object for a given boolean value func getNativeBoolean(input bool) *object.Boolean { // Check the input value if input { // Return the TRUE boolean native return TRUE } // Return the FALSE boolean native return FALSE } // A function that returns whether an Object is 'truthy'. // An object is 'truthy' if it is not false and not null. func isTruthy(obj object.Object) bool { // Check object value switch obj { // Null values are not truthy case NULL: return false // True values are truthy case TRUE: return true // False values are not truthy case FALSE: return false // All other types are truthy default: return true } } // A function that returns whether an Object is an Error func isError(obj object.Object) bool { // Check if object is non null if obj != nil { // Check the object type for Error return obj.Type() == object.ERROR_OBJ } // Return false (null object) return false } // A function that applies a given function object on a slice of object arguments func applyFunction(fn object.Object, args []object.Object) object.Object { switch fn := fn.(type) { case *object.Function: // Create the function's extended environment extendedEnv := extendFunctionEnv(fn, args) // Evaluate the function body evaluated := Evaluate(fn.Body, extendedEnv) // Return the unwrapped value return unwrapReturnValue(evaluated) case *object.Builtin: // Call the built-in function with the args return fn.Fn(args...) default: // Return an Error return object.NewError("not a function: %s", fn.Type()) } } // A function that creates an extended environment for a function func extendFunctionEnv(fn *object.Function, args []object.Object) *object.Environment { // Create a new enclosed enivronment env := object.NewEnclosedEnvironment(fn.Env) // Iterate over the function args for paramIdx, param := range fn.Parameters { // Add the function arg to the enclosed environment env.Set(param.Value, args[paramIdx]) } // Return the extended environment return env } // A function that unwraps an object into its value if it is a Return Object func unwrapReturnValue(obj object.Object) object.Object { // Check if the given object is a Return Object if returnValue, ok := obj.(*object.ReturnValue); ok { // Return the return value return returnValue.Value } // Return the object back return obj }
evaluator/core.go
0.690455
0.522324
core.go
starcoder
package accounting import ( "fmt" "time" "github.com/lightningnetwork/lnd/lnwire" "github.com/shopspring/decimal" "github.com/lightninglabs/faraday/fiat" ) // Report contains a set of entries. type Report []*HarmonyEntry // HarmonyEntry represents a single action on our balance. type HarmonyEntry struct { // Timestamp is the time at which the event occurred. // On chain events: timestamp will be obtained from the block timestamp. // Off chain events: timestamp will be obtained from lnd's records. Timestamp time.Time // Amount is the balance change incurred by this entry, expressed in // msat. Amount lnwire.MilliSatoshi // FiatValue is the fiat value of this entry's amount. This value is // expressed as a decimal so that we do not lose precision. FiatValue decimal.Decimal // TxID is the transaction ID of this entry. TxID string // Reference is a unique identifier for this entry, if available. Reference string // Note is an optional note field. Note string // Type describes the type of entry. Type EntryType // Category indicates whether the entry is part of a custom category. Category string // OnChain indicates whether the transaction occurred on or off chain. OnChain bool // Credit is true if the amount listed is a credit, and false if it is // a debit. Credit bool // BTCPrice is the timestamped bitcoin price we used to get our fiat // value. BTCPrice *fiat.USDPrice } // newHarmonyEntry produces a harmony entry. If provided with a negative amount, // it will produce a record for a debit with the absolute value set in the // amount field. Likewise, the fiat price will be obtained from the positive // value. If passed a positive value, an entry for a credit will be made, and no // changes to the amount will be made. Zero value entries will be recorded as // a credit. func newHarmonyEntry(ts time.Time, amountMsat int64, e EntryType, txid, reference, note, category string, onChain bool, convert usdPrice) (*HarmonyEntry, error) { var ( absAmt = amountMsat credit = true ) if absAmt < 0 { absAmt *= -1 credit = false } btcPrice, err := convert(ts) if err != nil { return nil, err } amtMsat := lnwire.MilliSatoshi(absAmt) return &HarmonyEntry{ Timestamp: ts, Amount: amtMsat, FiatValue: fiat.MsatToUSD(btcPrice.Price, amtMsat), TxID: txid, Reference: reference, Note: note, Type: e, Category: category, OnChain: onChain, Credit: credit, BTCPrice: btcPrice, }, nil } // EntryType indicates the lightning specific type of an entry. type EntryType int const ( _ EntryType = iota // EntryTypeLocalChannelOpen represents the funding transaction we // created to open a channel to a remote peer. EntryTypeLocalChannelOpen // EntryTypeRemoteChannelOpen represents the funding transaction that // our peer created to open a channel to us. EntryTypeRemoteChannelOpen // EntryTypeChannelOpenFee records the fees we paid on chain when // opening a channel to a remote peer. EntryTypeChannelOpenFee // EntryTypeChannelClose represents a channel closing transaction. If // we were paid out a balance by this transaction, the entry will // contain that amount. Note that the on chain resolutions required to // resolve a force close are not contained in this category. If we // force closed, our own balance will also require further on chain // resolution, so it will not be included. EntryTypeChannelClose // EntryTypeReceipt indicates that we have received a payment. Off // chain, this receipt is an invoice that we were paid via lightning. // On chain, this receipt is an on chain transaction paying into our // wallet. EntryTypeReceipt // EntryTypePayment indicates that we have made a payment. Off chain, // this payment is a lightning payment to an invoice. On chain, this // receipt is an on chain transaction paying from our wallet. EntryTypePayment // EntryTypeFee represent fees paid for on chain transactions or off // chain routing. Note that this entry type excludes fees for channel // opens and closes. EntryTypeFee // EntryTypeCircularReceipt represents an invoice that we paid to // ourselves. This occurs when circular payments are used to rebalance // channels. EntryTypeCircularReceipt // EntryTypeForward represents a forward through our node. EntryTypeForward // EntryTypeForwardFee represents the fees we earned forwarding a // payment. EntryTypeForwardFee // EntryTypeCircularPayment represents an operational payment which // we pay to ourselves to rebalance channels. EntryTypeCircularPayment // EntryTypeCircularPaymentFee represents a the fees paid on an // operational payment paid to ourselves to rebalance channels. EntryTypeCircularPaymentFee // EntryTypeSweep represents an on chain payment which swept funds // back to our own wallet. EntryTypeSweep // EntryTypeSweepFee represents the fees that were paid to sweep funds // back to our own wallet. EntryTypeSweepFee // EntryTypeChannelCloseFee represents fees our node paid to close a // channel. EntryTypeChannelCloseFee ) // String returns the string representation of an entry type. func (e EntryType) String() string { switch e { case EntryTypeLocalChannelOpen: return "local channel open" case EntryTypeRemoteChannelOpen: return "remote channel open" case EntryTypeChannelOpenFee: return "channel open fee" case EntryTypeChannelClose: return "channel close fee" case EntryTypeReceipt: return "receipt" case EntryTypePayment: return "payment" case EntryTypeFee: return "fee" case EntryTypeCircularReceipt: return "circular payment receipt" case EntryTypeForward: return "forward" case EntryTypeForwardFee: return "forward fee" case EntryTypeCircularPayment: return "circular payment" case EntryTypeCircularPaymentFee: return "circular payment fee" case EntryTypeSweep: return "sweep" case EntryTypeSweepFee: return "sweep fee" case EntryTypeChannelCloseFee: return "channel close fee" default: return fmt.Sprintf("unknown: %d", e) } }
accounting/report.go
0.73782
0.427636
report.go
starcoder
package triangle // Point defines a struct having as components the point X and Y coordinate position. type Point struct { x, y int } // Node struct having as components the node X and Y coordinate position. type Node struct { X, Y int } // Struct which defines a circle geometry element. type circle struct { x, y, radius int } // newNode creates a new node. func newNode(x, y int) Node { return Node{x, y} } // isEq check if two nodes are approximately equals. func (n Node) isEq(p Node) bool { dx := n.X - p.X dy := n.Y - p.Y if dx < 0 { dx = -dx } if dy < 0 { dy = -dy } if float64(dx) < 0.0001 && float64(dy) < 0.0001 { return true } return false } // Edge struct having as component the node list. type edge struct { nodes []Node } // newEdge creates a new edge. func newEdge(p0, p1 Node) []Node { nodes := []Node{p0, p1} return nodes } // isEq check if two edge are approximately equals. func (e edge) isEq(edge edge) bool { na := e.nodes nb := edge.nodes na0, na1 := na[0], na[1] nb0, nb1 := nb[0], nb[1] if (na0.isEq(nb0) && na1.isEq(nb1)) || (na0.isEq(nb1) && na1.isEq(nb0)) { return true } return false } // Triangle struct which defines the basic components of a triangle. // It's constructed from the nodes, it's edges and the circumcircle which describes the triangle circumference. type Triangle struct { Nodes []Node edges []edge circle circle } var t = Triangle{} // newTriangle creates a new triangle which circumcircle encloses the point to be added. func (t Triangle) newTriangle(p0, p1, p2 Node) Triangle { t.Nodes = []Node{p0, p1, p2} t.edges = []edge{{newEdge(p0, p1)}, {newEdge(p1, p2)}, {newEdge(p2, p0)}} // Create a circumscribed circle of this triangle. // The circumcircle of a triangle is the circle which has the three vertices of the triangle lying on its circumference. circle := t.circle ax, ay := p1.X-p0.X, p1.Y-p0.Y bx, by := p2.X-p0.X, p2.Y-p0.Y m := p1.X*p1.X - p0.X*p0.X + p1.Y*p1.Y - p0.Y*p0.Y u := p2.X*p2.X - p0.X*p0.X + p2.Y*p2.Y - p0.Y*p0.Y s := 1.0 / (2.0 * (float64(ax*by) - float64(ay*bx))) circle.x = int(float64((p2.Y-p0.Y)*m+(p0.Y-p1.Y)*u) * s) circle.y = int(float64((p0.X-p2.X)*m+(p1.X-p0.X)*u) * s) // Calculate the distance between the node points and the triangle circumcircle. dx := p0.X - circle.x dy := p0.Y - circle.y // Calculate the circle radius. circle.radius = dx*dx + dy*dy t.circle = circle return t } // Delaunay is the main entry struct which defines the delaunay system. type Delaunay struct { width int height int triangles []Triangle } // Init initialize the delaunay structure. func (d *Delaunay) Init(width, height int) *Delaunay { d.width = width d.height = height d.triangles = nil d.clear() return d } // clear method clears the delaunay triangles slice. func (d *Delaunay) clear() { p0 := newNode(0, 0) p1 := newNode(d.width, 0) p2 := newNode(d.width, d.height) p3 := newNode(0, d.height) // Create the supertriangle, an artificial triangle which encompasses all the points. // At the end of the triangulation process any triangles which share edges with the supertriangle are deleted from the triangle list. d.triangles = []Triangle{t.newTriangle(p0, p1, p2), t.newTriangle(p0, p2, p3)} } // Insert new triangles into the delaunay triangles slice. func (d *Delaunay) Insert(points []Point) *Delaunay { var ( i, j, k int x, y, dx, dy int distSq int polygon []edge edges []edge temps []Triangle ) for k = 0; k < len(points); k++ { x = points[k].x y = points[k].y triangles := d.triangles edges = nil temps = nil for i = 0; i < len(d.triangles); i++ { t := triangles[i] //Check whether the points are inside the triangle circumcircle. circle := t.circle dx = circle.x - x dy = circle.y - y distSq = dx*dx + dy*dy if distSq < circle.radius { // Save triangle edges in case they are included. edges = append(edges, t.edges[0], t.edges[1], t.edges[2]) } else { // If not included carry over. temps = append(temps, t) } } polygon = nil // Check duplication of edges, delete if duplicates. edgesLoop: for i = 0; i < len(edges); i++ { edge := edges[i] for j = 0; j < len(polygon); j++ { // Remove identical edges. if edge.isEq(polygon[j]) { // Remove polygon from the polygon slice. polygon = append(polygon[:j], polygon[j+1:]...) continue edgesLoop } } // Insert new edge into the polygon slice. polygon = append(polygon, edge) } for i = 0; i < len(polygon); i++ { edge := polygon[i] temps = append(temps, t.newTriangle(edge.nodes[0], edge.nodes[1], newNode(x, y))) } d.triangles = temps } return d } // GetTriangles return the generated triangles. func (d *Delaunay) GetTriangles() []Triangle { return d.triangles }
server/vendor/github.com/esimov/triangle/delaunay.go
0.862699
0.736092
delaunay.go
starcoder
package plan import ( "github.com/insionng/yougam/libraries/pingcap/tidb/ast" "github.com/insionng/yougam/libraries/pingcap/tidb/parser/opcode" ) const ( rateFull float64 = 1 rateEqual float64 = 0.01 rateNotEqual float64 = 0.99 rateBetween float64 = 0.1 rateGreaterOrLess float64 = 0.33 rateIsFalse float64 = 0.1 rateIsNull float64 = 0.1 rateLike float64 = 0.1 ) // guesstimateFilterRate guesstimates the filter rate for an expression. // For example: a table has 100 rows, after filter expression 'a between 0 and 9', // 10 rows returned, then the filter rate is '0.1'. // It only depends on the expression type, not the expression value. // The expr parameter should contain only one column name. func guesstimateFilterRate(expr ast.ExprNode) float64 { switch x := expr.(type) { case *ast.BetweenExpr: return rateBetween case *ast.BinaryOperationExpr: return guesstimateBinop(x) case *ast.ColumnNameExpr: return rateFull case *ast.IsNullExpr: return guesstimateIsNull(x) case *ast.IsTruthExpr: return guesstimateIsTrue(x) case *ast.ParenthesesExpr: return guesstimateFilterRate(x.Expr) case *ast.PatternInExpr: return guesstimatePatternIn(x) case *ast.PatternLikeExpr: return guesstimatePatternLike(x) } return rateFull } func guesstimateBinop(expr *ast.BinaryOperationExpr) float64 { switch expr.Op { case opcode.AndAnd: // P(A and B) = P(A) * P(B) return guesstimateFilterRate(expr.L) * guesstimateFilterRate(expr.R) case opcode.OrOr: // P(A or B) = P(A) + P(B) – P(A and B) rateL := guesstimateFilterRate(expr.L) rateR := guesstimateFilterRate(expr.R) return rateL + rateR - rateL*rateR case opcode.EQ: return rateEqual case opcode.GT, opcode.GE, opcode.LT, opcode.LE: return rateGreaterOrLess case opcode.NE: return rateNotEqual } return rateFull } func guesstimateIsNull(expr *ast.IsNullExpr) float64 { if expr.Not { return rateFull - rateIsNull } return rateIsNull } func guesstimateIsTrue(expr *ast.IsTruthExpr) float64 { if expr.True == 0 { if expr.Not { return rateFull - rateIsFalse } return rateIsFalse } if expr.Not { return rateIsFalse + rateIsNull } return rateFull - rateIsFalse - rateIsNull } func guesstimatePatternIn(expr *ast.PatternInExpr) float64 { if len(expr.List) > 0 { rate := rateEqual * float64(len(expr.List)) if expr.Not { return rateFull - rate } return rate } return rateFull } func guesstimatePatternLike(expr *ast.PatternLikeExpr) float64 { if expr.Not { return rateFull - rateLike } return rateLike }
libraries/pingcap/tidb/optimizer/plan/filterrate.go
0.644561
0.547887
filterrate.go
starcoder
package packed import () // util/packed/BulkOperationPacked.java // Non-specialized BulkOperation for Packed format type BulkOperationPacked struct { *BulkOperationImpl bitsPerValue int longBlockCount int longValueCount int byteBlockCount int byteValueCount int mask int64 intMask int } func newBulkOperationPacked(bitsPerValue uint32) *BulkOperationPacked { self := &BulkOperationPacked{} self.bitsPerValue = int(bitsPerValue) assert(bitsPerValue > 0 && bitsPerValue <= 64) blocks := uint32(bitsPerValue) for (blocks & 1) == 0 { blocks = (blocks >> 1) } self.longBlockCount = int(blocks) self.longValueCount = 64 * self.longBlockCount / int(bitsPerValue) byteBlockCount := 8 * self.longBlockCount byteValueCount := self.longValueCount for (byteBlockCount&1) == 0 && (byteValueCount&1) == 0 { byteBlockCount = (byteBlockCount >> 1) byteValueCount = (byteValueCount >> 1) } self.byteBlockCount = byteBlockCount self.byteValueCount = byteValueCount if bitsPerValue == 64 { self.mask = ^int64(0) } else { self.mask = (int64(1) << bitsPerValue) - 1 } self.intMask = int(self.mask) assert(self.longValueCount*int(bitsPerValue) == 64*self.longBlockCount) self.BulkOperationImpl = newBulkOperationImpl(self) return self } func (p *BulkOperationPacked) LongBlockCount() int { return p.longBlockCount } func (p *BulkOperationPacked) LongValueCount() int { return p.longValueCount } func (p *BulkOperationPacked) ByteBlockCount() int { return p.byteBlockCount } func (p *BulkOperationPacked) ByteValueCount() int { return p.byteValueCount } func (p *BulkOperationPacked) DecodeLongToLong(blocks, values []int64, iterations int) { blocksOff, valuesOff := 0, 0 bitsLeft := 64 for i := 0; i < p.longValueCount*iterations; i++ { bitsLeft -= p.bitsPerValue if bitsLeft < 0 { values[valuesOff] = ((blocks[blocksOff] & ((int64(1) << uint(p.bitsPerValue+bitsLeft)) - 1)) << uint(-bitsLeft)) | int64(uint64(blocks[blocksOff+1])>>uint(64+bitsLeft)) valuesOff++ blocksOff++ bitsLeft += 64 } else { values[valuesOff] = int64(uint64(blocks[blocksOff])>>uint(bitsLeft)) & p.mask valuesOff++ } } } func (p *BulkOperationPacked) decodeByteToLong(blocks []byte, values []int64, iterations int) { panic("niy") } func (p *BulkOperationPacked) encodeLongToLong(values, blocks []int64, iterations int) { var nextBlock int64 = 0 var bitsLeft int = 64 valuesOffset, blocksOffset := 0, 0 for i, limit := 0, p.longValueCount*iterations; i < limit; i++ { bitsLeft -= p.bitsPerValue switch { case bitsLeft > 0: nextBlock |= (values[valuesOffset] << uint(bitsLeft)) valuesOffset++ case bitsLeft == 0: nextBlock |= values[valuesOffset] valuesOffset++ blocks[blocksOffset] = nextBlock blocksOffset++ nextBlock = 0 bitsLeft = 64 default: // bitsLeft < 0 nextBlock |= int64(uint64(values[valuesOffset]) >> uint(-bitsLeft)) blocks[blocksOffset] = nextBlock blocksOffset++ nextBlock = (values[valuesOffset] & ((1 << uint(-bitsLeft)) - 1) << uint(64+bitsLeft)) valuesOffset++ bitsLeft += 64 } } } func (p *BulkOperationPacked) encodeLongToByte(values []int64, blocks []byte, iterations int) { var nextBlock int = 0 var bitsLeft int = 8 valuesOffset, blocksOffset := 0, 0 for i, limit := 0, p.byteValueCount*iterations; i < limit; i++ { v := values[valuesOffset] valuesOffset++ assert(UnsignedBitsRequired(v) <= p.bitsPerValue) if p.bitsPerValue < bitsLeft { // just buffer nextBlock |= int(v << uint(bitsLeft-p.bitsPerValue)) bitsLeft -= p.bitsPerValue } else { // flush as many blocks as possible bits := uint(p.bitsPerValue - bitsLeft) blocks[blocksOffset] = byte(nextBlock | int(uint64(v)>>bits)) blocksOffset++ for bits >= 8 { bits -= 8 blocks[blocksOffset] = byte(uint64(v) >> bits) blocksOffset++ } // then buffer bitsLeft = int(8 - bits) nextBlock = int((v & ((1 << bits) - 1)) << uint(bitsLeft)) } } assert(bitsLeft == 8) } func (p *BulkOperationPacked) EncodeIntToByte(values []int, blocks []byte, iterations int) { valuesOff, blocksOff := 0, 0 nextBlock, bitsLeft := 0, 8 for i := 0; i < p.byteValueCount*iterations; i++ { v := values[valuesOff] valuesOff++ assert(BitsRequired(int64(v)) <= p.bitsPerValue) if p.bitsPerValue < bitsLeft { // just buffer nextBlock |= (v << uint(bitsLeft-p.bitsPerValue)) bitsLeft -= p.bitsPerValue } else { // flush as many blocks as possible bits := p.bitsPerValue - bitsLeft blocks[blocksOff] = byte(nextBlock | int(uint(v)>>uint(bits))) blocksOff++ for bits >= 8 { bits -= 8 blocks[blocksOff] = byte(uint(v) >> uint(bits)) blocksOff++ } // then buffer bitsLeft = 8 - bits nextBlock = (v & ((1 << uint(bits)) - 1)) << uint(bitsLeft) } } assert(bitsLeft == 8) } // util/packed/BulkOperationPackedSingleBlock.java // Non-specialized BulkOperation for PACKED_SINGLE_BLOCK format type BulkOperationPackedSingleBlock struct { *BulkOperationImpl bitsPerValue int valueCount int mask int64 } const BLOCK_COUNT = 1 func newBulkOperationPackedSingleBlock(bitsPerValue uint32) BulkOperation { // log.Printf("Initializing BulkOperationPackedSingleBlock(%v)", bitsPerValue) self := &BulkOperationPackedSingleBlock{ bitsPerValue: int(bitsPerValue), valueCount: 64 / int(bitsPerValue), mask: (int64(1) << bitsPerValue) - 1, } self.BulkOperationImpl = newBulkOperationImpl(self) return self } func (p *BulkOperationPackedSingleBlock) LongBlockCount() int { return BLOCK_COUNT } func (p *BulkOperationPackedSingleBlock) ByteBlockCount() int { return BLOCK_COUNT * 8 } func (p *BulkOperationPackedSingleBlock) LongValueCount() int { return p.valueCount } func (p *BulkOperationPackedSingleBlock) ByteValueCount() int { return p.valueCount } func (p *BulkOperationPackedSingleBlock) decodeLongs(block int64, values []int64) int { off := 0 values[off] = block & p.mask off++ for j := 1; j < p.valueCount; j++ { block = int64(uint64(block) >> uint(p.bitsPerValue)) values[off] = block & p.mask off++ } return off } func (p *BulkOperationPackedSingleBlock) encodeLongs(values []int64) int64 { off := 0 block := values[off] off++ for j := 1; j < p.valueCount; j++ { block |= (values[off] << uint(j*p.bitsPerValue)) off++ } return block } func (p *BulkOperationPackedSingleBlock) encodeInts(values []int) int64 { off := 0 block := int64(values[off]) off++ for j := 1; j < p.valueCount; j++ { block |= int64(values[off]) << uint(j*p.bitsPerValue) off++ } return block } func (p *BulkOperationPackedSingleBlock) DecodeLongToLong(blocks, values []int64, iterations int) { blocksOffset, valuesOffset := 0, 0 for i := 0; i < iterations; i++ { block := blocks[blocksOffset] blocksOffset++ valuesOffset = p.decodeLongs(block, values[valuesOffset:]) } } func (p *BulkOperationPackedSingleBlock) decodeByteToLong(blocks []byte, values []int64, iterations int) { panic("niy") } func (p *BulkOperationPackedSingleBlock) encodeLongToLong(values, blocks []int64, iterations int) { valuesOffset, blocksOffset := 0, 0 for i, limit := 0, iterations; i < limit; i++ { blocks[blocksOffset] = p.encodeLongs(values[valuesOffset:]) blocksOffset++ valuesOffset += p.valueCount } } func (p *BulkOperationPackedSingleBlock) encodeLongToByte(values []int64, blocks []byte, iterations int) { valuesOffset, blocksOffset := 0, 0 for i := 0; i < iterations; i++ { block := p.encodeLongs(values[valuesOffset:]) valuesOffset += p.valueCount blocksOffset += p.writeLong(block, blocks[blocksOffset:]) } } func (p *BulkOperationPackedSingleBlock) EncodeIntToByte(values []int, blocks []byte, iterations int) { valuesOffset, blocksOffset := 0, 0 for i := 0; i < iterations; i++ { block := p.encodeInts(values[valuesOffset:]) valuesOffset += p.valueCount blocksOffset += p.writeLong(block, blocks[blocksOffset:]) } }
core/util/packed/bulkop.go
0.58522
0.447279
bulkop.go
starcoder
package hoverfly import ( "encoding/json" "github.com/SpectoLabs/hoverfly/core/handlers/v2" "github.com/SpectoLabs/hoverfly/functional-tests" "github.com/SpectoLabs/hoverfly/functional-tests/testdata" . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" ) var _ = Describe("Running Hoverfly with older simulations", func() { var ( hoverfly *functional_tests.Hoverfly ) BeforeEach(func() { hoverfly = functional_tests.NewHoverfly() }) AfterEach(func() { hoverfly.Stop() }) Context("v1 simulations", func() { BeforeEach(func() { hoverfly.Start() }) It("should upgrade it to the latest simulation", func() { hoverfly.ImportSimulation(testdata.V1JsonPayload) upgradedSimulation := hoverfly.ExportSimulation() simulation := v2.SimulationViewV5{} json.Unmarshal([]byte(testdata.V5JsonPayload), &simulation) Expect(upgradedSimulation.DataViewV5).To(Equal(simulation.DataViewV5)) }) }) Context("v3 simulations", func() { BeforeEach(func() { hoverfly.Start() }) It("should upgrade it to the latest simulation", func() { hoverfly.ImportSimulation(testdata.V3Delays) upgradedSimulation := hoverfly.ExportSimulation() simulation := v2.SimulationViewV5{} json.Unmarshal([]byte(testdata.Delays), &simulation) Expect(upgradedSimulation.DataViewV5).To(Equal(simulation.DataViewV5)) }) It("should upgrade it to the latest simulation", func() { hoverfly.ImportSimulation(testdata.V3ClosestMissProof) upgradedSimulation := hoverfly.ExportSimulation() simulation := v2.SimulationViewV5{} json.Unmarshal([]byte(testdata.ClosestMissProof), &simulation) Expect(upgradedSimulation.DataViewV5).To(Equal(simulation.DataViewV5)) }) It("should upgrade it to the latest simulation", func() { hoverfly.ImportSimulation(testdata.V3ExactMatch) upgradedSimulation := hoverfly.ExportSimulation() simulation := v2.SimulationViewV5{} json.Unmarshal([]byte(testdata.ExactMatch), &simulation) Expect(upgradedSimulation.DataViewV5).To(Equal(simulation.DataViewV5)) }) It("should upgrade it to the latest simulation", func() { hoverfly.ImportSimulation(testdata.V3GlobMatch) upgradedSimulation := hoverfly.ExportSimulation() simulation := v2.SimulationViewV5{} json.Unmarshal([]byte(testdata.GlobMatch), &simulation) Expect(upgradedSimulation.DataViewV5).To(Equal(simulation.DataViewV5)) }) It("should upgrade it to the latest simulation", func() { hoverfly.ImportSimulation(testdata.V3XmlMatch) upgradedSimulation := hoverfly.ExportSimulation() simulation := v2.SimulationViewV5{} json.Unmarshal([]byte(testdata.XmlMatch), &simulation) Expect(upgradedSimulation.DataViewV5).To(Equal(simulation.DataViewV5)) }) It("should upgrade it to the latest simulation", func() { hoverfly.ImportSimulation(testdata.V3XpathMatch) upgradedSimulation := hoverfly.ExportSimulation() simulation := v2.SimulationViewV5{} json.Unmarshal([]byte(testdata.XpathMatch), &simulation) Expect(upgradedSimulation.DataViewV5).To(Equal(simulation.DataViewV5)) }) }) Context("v4 simulations", func() { BeforeEach(func() { hoverfly.Start() }) It("should upgrade it to the latest simulation", func() { hoverfly.ImportSimulation(testdata.V4QueryMatchers) upgradedSimulation := hoverfly.ExportSimulation() simulation := v2.SimulationViewV5{} json.Unmarshal([]byte(testdata.QueryMatchers), &simulation) Expect(upgradedSimulation.DataViewV5).To(Equal(simulation.DataViewV5)) }) It("should upgrade it to the latest simulation", func() { hoverfly.ImportSimulation(testdata.V4HeaderMatchers) upgradedSimulation := hoverfly.ExportSimulation() simulation := v2.SimulationViewV5{} json.Unmarshal([]byte(testdata.HeaderMatchers), &simulation) Expect(upgradedSimulation.DataViewV5).To(Equal(simulation.DataViewV5)) }) }) })
functional-tests/core/ft_simulation_upgrading.go
0.526099
0.618694
ft_simulation_upgrading.go
starcoder
package common import ( "github.com/ajstarks/svgo" "fmt" "math" "math/rand" "os" ) type WeightedBoundable struct { Boundable Boundable Weight float64 } func (wb WeightedBoundable) Bounds() Rectangle { return wb.Boundable.Bounds() } type WidthBoundable struct { Boundable Boundable Width float64 } func (b WidthBoundable) Bounds() Rectangle { return b.Boundable.Bounds() } type ColoredBoundable struct { Boundable Boundable Color string } func (b ColoredBoundable) Bounds() Rectangle { return b.Boundable.Bounds() } type SvgLabel struct { Point Point Text string } func (b SvgLabel) Bounds() Rectangle { return b.Point.Bounds() } type EmbeddedImage struct { Src Point Dst Point Image string } func (img EmbeddedImage) Bounds() Rectangle { return EmptyRectangle.Extend(img.Src).Extend(img.Dst) } type WrappingBoundable interface { Unwrap() Boundable } type SVGOptions struct { Scale float64 Zoom float64 ScaleX float64 ScaleY float64 Sparse float64 Bounds Rectangle Blur float64 StrokeWidth float64 Color string Unflip bool } func CreateSVG(fname string, elements [][]Boundable, options SVGOptions) error { file, err := os.Create(fname) if err != nil { return err } defer file.Close() // get bounds over all elements var r Rectangle if options.Bounds.Max.X > options.Bounds.Min.X { r = options.Bounds } else { r = EmptyRectangle for _, l := range elements { for _, element := range l { r = r.ExtendRect(element.Bounds()) } } } // autoscale if requested scaleFactor := options.Scale if scaleFactor == 0 { l := math.Max(r.Lengths().X, r.Lengths().Y) scaleFactor = 1000 / l } scale := Point{scaleFactor, scaleFactor} if options.ScaleX != 0 && options.ScaleY != 0 { scale.X = options.ScaleX scale.Y = options.ScaleY } if options.Zoom != 0 { scale = scale.Scale(options.Zoom) } strokeWidth := options.StrokeWidth if strokeWidth == 0 { strokeWidth = 2 } canvas := svg.New(file) length := r.Lengths().MulPairwise(scale) canvas.Start(int(length.X) + 1, int(length.Y) + 1) canvas.Rect(0, 0, int(length.X) + 1, int(length.Y) + 1, "fill:white") if options.Blur > 0 { canvas.Filter("blur") canvas.FeGaussianBlur(svg.Filterspec{In: "SourceGraphic"}, options.Blur, options.Blur) canvas.Fend() canvas.Group("filter=\"url(#blur)\"") canvas.Rect(0, 0, int(length.X) + 1, int(length.Y) + 1, "fill:white") } transform := func(point Point) (int, int) { p := point.Sub(r.Min).MulPairwise(scale) if options.Unflip { return int(p.X), int(p.Y) } else { return int(p.X), int(length.Y - p.Y) } } var drawElement func(element Boundable, color string, weight float64, width float64) error drawElement = func(element Boundable, color string, weight float64, width float64) error { switch element := element.(type) { case Point: if !r.Contains(element) { return nil } x, y := transform(element) style := fmt.Sprintf("fill:%s", color) if weight != 1 { style += fmt.Sprintf(";opacity:%f", weight) } canvas.Circle(x, y, int(width), style) case Segment: if !r.Intersects(element.Bounds()) { return nil } srcX, srcY := transform(element.Start) dstX, dstY := transform(element.End) style := fmt.Sprintf("stroke:%s;stroke-width:%f", color, width) if weight != 1 { style += fmt.Sprintf(";opacity:%f", weight) } canvas.Line(srcX, srcY, dstX, dstY, style) case *Graph: for _, edge := range element.Edges { if err := drawElement(edge.Segment(), color, weight, width); err != nil { return err } } case Traces: for _, trace := range element { for j := 1; j < len(trace.Observations); j++ { if err := drawElement(Segment{trace.Observations[j - 1].Point, trace.Observations[j].Point}, color, weight, width); err != nil { return err } } } case SvgLabel: style := fmt.Sprintf("fill:%s;font-size:%f", color, width) x, y := transform(element.Point) canvas.Text(x, y, element.Text, style) case WeightedBoundable: return drawElement(element.Boundable, color, element.Weight, width) case WidthBoundable: return drawElement(element.Boundable, color, weight, element.Width) case ColoredBoundable: return drawElement(element.Boundable, element.Color, weight, width) case EmbeddedImage: srcX, srcY := transform(element.Src) dstX, dstY := transform(element.Dst) canvas.Image(srcX, srcY, dstX - srcX, dstY - srcY, element.Image) case WrappingBoundable: return drawElement(element.Unwrap(), color, weight, width) default: return fmt.Errorf("failed to process an element: %v", element) } return nil } var colors []string if options.Color != "" { colors = []string{options.Color} } else { colors = []string{"red", "blue", "green", "purple", "olive", "gray"} } for i, l := range elements { color := colors[i % len(colors)] for _, element := range l { if options.Sparse > 0 && rand.Float64() >= options.Sparse { continue } if err := drawElement(element, color, 1, strokeWidth); err != nil { return err } } } if options.Blur > 0 { canvas.Gend() } canvas.End() return nil }
fbastani-solution/common/svg.go
0.66454
0.410343
svg.go
starcoder
package main import ( "sort" ) /***************************************************************************************************** * * Given a non-empty list of words, return the k most frequent elements. * Your answer should be sorted by frequency from highest to lowest. If two words have the same * frequency, then the word with the lower alphabetical order comes first. * * Example 1: * * Input: ["i", "love", "leetcode", "i", "love", "coding"], k = 2 * Output: ["i", "love"] * Explanation: "i" and "love" are the two most frequent words. * Note that "i" comes before "love" due to a lower alphabetical order. * * Example 2: * * Input: ["the", "day", "is", "sunny", "the", "the", "the", "sunny", "is", "is"], k = 4 * Output: ["the", "is", "sunny", "day"] * Explanation: "the", "is", "sunny" and "day" are the four most frequent words, * with the number of occurrence being 4, 3, 2 and 1 respectively. * * Note: * * You may assume k is always valid, 1 &le; k &le; number of unique elements. * Input words contain only lowercase letters. * * Follow up: * * Try to solve it in O(n log k) time and O(n) extra space. * ******************************************************************************************************/ // 思路1 堆 todo @zhangshilin // 思路2 hash +sort //time:66.67 mem:55.56 func topKFrequent(words []string, k int) []string { // 处理边界 if len(words) < 1 { return words } type wordInfo struct { word string times int } occo := map[string]int{} for _, v := range words { occo[v]++ } sortList := []wordInfo{} for k, v := range occo { sortList = append(sortList, wordInfo{k, v}) } // 排序 sort.Slice(sortList, func(i, j int) bool { if sortList[i].times == sortList[j].times { return sortList[i].word < sortList[j].word } return sortList[i].times>sortList[j].times }) minNum := func(x, y int) int { if x <= y { return x } return y }(len(sortList), k) ans := []string{} for i := 0; i < minNum; i++ { ans = append(ans, sortList[i].word) } return ans } func main() { topKFrequent([]string{ "i", "love", "leetcode", "i", "love", "coding", },2) }
leetcode/692.top_k_frequent_words/692.TopKFrequentWords_zmillionaire.go
0.500732
0.439146
692.TopKFrequentWords_zmillionaire.go
starcoder
package moving_average import ( "github.com/influxdata/flux" "github.com/influxdata/flux/array" "github.com/influxdata/flux/execute" "github.com/influxdata/flux/values" ) type ExponentialMovingAverage struct { inTimePeriod int i []int count []float64 value []float64 periodReached []bool lastVal []interface{} Multiplier float64 ema [][]interface{} } func New(inTimePeriod int, numCols int) *ExponentialMovingAverage { return &ExponentialMovingAverage{ inTimePeriod: inTimePeriod, i: make([]int, numCols), count: make([]float64, numCols), value: make([]float64, numCols), periodReached: make([]bool, numCols), lastVal: make([]interface{}, numCols), Multiplier: 2 / float64(inTimePeriod+1), ema: make([][]interface{}, numCols), } } func (r *ExponentialMovingAverage) PassThrough(vs *ArrayContainer, b execute.TableBuilder, bj int) error { j := 0 for ; r.i[bj] < r.inTimePeriod && j < vs.Len(); r.i[bj]++ { if vs.IsNull(j) { r.lastVal[bj] = nil } else { r.lastVal[bj] = vs.OrigValue(j) } j++ } if r.i[bj] == r.inTimePeriod && !r.periodReached[bj] { if vs.IsNull(j - 1) { if err := b.AppendNil(bj); err != nil { return err } } else { if err := b.AppendValue(bj, values.New(vs.OrigValue(j-1))); err != nil { return err } } r.periodReached[bj] = true } for ; r.i[bj] >= r.inTimePeriod && j < vs.Len(); r.i[bj]++ { if vs.IsNull(j) { if err := b.AppendNil(bj); err != nil { return err } } else { if err := b.AppendValue(bj, values.New(vs.OrigValue(j))); err != nil { return err } } j++ } return nil } func (r *ExponentialMovingAverage) DoNumeric(vs *ArrayContainer, b execute.TableBuilder, bj int, doExponentialMovingAverage bool, appendToTable bool) error { if !doExponentialMovingAverage { return r.PassThrough(vs, b, bj) } var appendVal func(v float64) error var appendNil func() error if appendToTable { appendVal = func(v float64) error { if err := b.AppendFloat(bj, v); err != nil { return err } return nil } appendNil = func() error { if err := b.AppendNil(bj); err != nil { return err } return nil } } else { appendVal = func(v float64) error { r.ema[bj] = append(r.ema[bj], v) return nil } appendNil = func() error { r.ema[bj] = append(r.ema[bj], nil) return nil } } j := 0 // Build the first value of the EMA for ; r.i[bj] < r.inTimePeriod && j < vs.Len(); r.i[bj]++ { if !vs.IsNull(j) { r.value[bj] += vs.Value(j).Float() r.count[bj]++ r.lastVal[bj] = vs.OrigValue(j) } else { r.lastVal[bj] = nil } j++ } // Append the first value of the EMA if r.i[bj] == r.inTimePeriod && !r.periodReached[bj] { if r.count[bj] != 0 { r.value[bj] = r.value[bj] / r.count[bj] if err := appendVal(r.value[bj]); err != nil { return err } } else { if err := appendNil(); err != nil { return err } } r.periodReached[bj] = true } l := vs.Len() for ; j < l; j++ { if vs.IsNull(j) { if r.count[bj] == 0 { if err := appendNil(); err != nil { return err } } else { if err := appendVal(r.value[bj]); err != nil { return err } } } else { cValue := vs.Value(j).Float() var ema float64 if r.count[bj] == 0 { ema = cValue r.count[bj]++ } else { ema = (cValue * r.Multiplier) + (r.value[bj] * (1.0 - r.Multiplier)) } if err := appendVal(ema); err != nil { return err } r.value[bj] = ema } r.i[bj]++ } return nil } func (r *ExponentialMovingAverage) PassThroughTime(vs *array.Int, b execute.TableBuilder, bj int) error { j := 0 for ; r.i[bj] < r.inTimePeriod && j < vs.Len(); r.i[bj]++ { if vs.IsNull(j) { r.lastVal[bj] = nil } else { r.lastVal[bj] = execute.Time(vs.Value(j)) } j++ } if r.i[bj] == r.inTimePeriod && !r.periodReached[bj] { if vs.IsNull(j - 1) { if err := b.AppendNil(bj); err != nil { return err } } else { if err := b.AppendTime(bj, execute.Time(vs.Value(j-1))); err != nil { return err } } r.periodReached[bj] = true } for ; r.i[bj] >= r.inTimePeriod && j < vs.Len(); r.i[bj]++ { if vs.IsNull(j) { if err := b.AppendNil(bj); err != nil { return err } } else { if err := b.AppendTime(bj, execute.Time(vs.Value(j))); err != nil { return err } } j++ } return nil } func (r *ExponentialMovingAverage) GetEMA(bj int) []interface{} { return r.ema[bj] } // The current EMA value func (r *ExponentialMovingAverage) Value(bj int) float64 { return r.value[bj] } // The last value encountered func (r *ExponentialMovingAverage) LastVal(bj int) interface{} { return r.lastVal[bj] } // Used when we need to do EMA step by step func (r *ExponentialMovingAverage) Add(v float64, bj int) float64 { var avg float64 if r.count[bj] == 0 { avg = v } else { lastAvg := r.value[bj] if !r.periodReached[bj] { avg = (lastAvg*float64(r.count[bj]) + v) / float64(r.count[bj]+1) } else { avg = (v * r.Multiplier) + (r.value[bj] * (1.0 - r.Multiplier)) } } r.value[bj] = avg if r.count[bj] < float64(r.inTimePeriod) { // don't just keep incrementing to prevent potential overflow r.count[bj]++ } if r.i[bj] == r.inTimePeriod-1 { r.periodReached[bj] = true } r.i[bj]++ return avg } // Used when we need to do EMA step by step // Skips over the null, increments cursor func (r *ExponentialMovingAverage) AddNull(bj int) { if r.i[bj] == r.inTimePeriod-1 { r.periodReached[bj] = true } r.i[bj]++ } // If we don't have enough values for a proper EMA, just append the last value (which is the average of the values so far) func (r *ExponentialMovingAverage) Finish(cols []flux.ColMeta, builder execute.TableBuilder, valueIdx int) error { for j := range cols { if !r.periodReached[j] { if j != valueIdx { if r.lastVal[j] == nil { if err := builder.AppendNil(j); err != nil { return err } } else { if err := builder.AppendValue(j, values.New(r.lastVal[j])); err != nil { return err } } } else { if r.count[j] != 0 { average := r.value[j] / r.count[j] if err := builder.AppendFloat(j, average); err != nil { return err } } else { if err := builder.AppendNil(j); err != nil { return err } } } } } return nil }
internal/moving_average/exponential_moving_average.go
0.606265
0.53692
exponential_moving_average.go
starcoder
package tpe import ( "sort" ) func ones1d(size int) []float64 { ones := make([]float64, size) for i := 0; i < size; i++ { ones[i] = 1 } return ones } func linspace(start, stop float64, num int, endPoint bool) []float64 { step := 0. if endPoint { if num == 1 { return []float64{start} } step = (stop - start) / float64(num-1) } else { if num == 0 { return []float64{} } step = (stop - start) / float64(num) } r := make([]float64, num, num) for i := 0; i < num; i++ { r[i] = start + float64(i)*step } return r } func choice(array []float64, idxs []int) []float64 { results := make([]float64, len(idxs)) for i, idx := range idxs { results[i] = array[idx] } return results } func location(array []float64, key float64) int { i := 0 size := len(array) for { mid := (i + size) / 2 if i == size { break } if array[mid] < key { i = mid + 1 } else { size = mid } } return i } func searchsorted(array, values []float64) []int { var indexes []int for _, val := range values { indexes = append(indexes, location(array, val)) } return indexes } func bincount(x []int, weights []float64, minlength int) []float64 { // Count the number of occurrences of each value in array of non-negative ints. // https://docs.scipy.org/doc/numpy/reference/generated/numpy.bincount.html counts := make([]float64, minlength) for i := range x { if x[i] > len(counts)-1 { for j := len(counts) - 1; j < x[i]; j++ { counts = append(counts, 0) } } if x[i] > len(weights)-1 { counts[x[i]]++ } else { counts[x[i]] += weights[x[i]] } } return counts } func clip(array []float64, min, max float64) { for i := range array { if array[i] < min { array[i] = min } else if array[i] > max { array[i] = max } } } func argSort2d(lossVals [][2]float64) []int { type sortable struct { index int lossVal [2]float64 } x := make([]sortable, len(lossVals)) for i := 0; i < len(lossVals); i++ { x[i] = sortable{ index: i, lossVal: lossVals[i], } } sort.SliceStable(x, func(i, j int) bool { if x[i].lossVal[0] == x[j].lossVal[0] { return x[i].lossVal[1] < x[j].lossVal[1] } return x[i].lossVal[0] < x[j].lossVal[0] }) results := make([]int, len(x)) for i := 0; i < len(x); i++ { results[i] = x[i].index } return results }
tpe/array.go
0.621081
0.406214
array.go
starcoder
package dsf import ( "encoding/binary" "fmt" "github.com/snmoore/go/audio" "reflect" ) // FmtChunk is the file structure of the fmt chunk within a DSD stream file. // See "DSF File Format Specification", v1.01, Sony Corporation. All data is // little-endian. This is exported to allow reading with binary.Read. type FmtChunk struct { // fmt chunk header. // 'f' , 'm' , 't' , ' ' (includes 1 space). Header [4]byte // Size of this chunk. // Usually 52 bytes. Size [8]byte // Format version. // 1. Version [4]byte // Format id. // 0: DSD raw. Identifier [4]byte // Channel type. // 1: mono // 2: stereo // 3: 3 channels // 4: quad // 5: 4 channels // 6: 5 channels // 7: 5.1 channels ChannelType [4]byte // Channel num. // 1: mono // 2: stereo // ... // 6: 6 channels ChannelNum [4]byte // Sampling frequency in Hertz. // 2822400, 5644800, 11289600 or 22579200. SamplingFrequency [4]byte // Bits per sample. // 1 or 8. BitsPerSample [4]byte // Sample count. // This is for 1 channel e.g. for n seconds of data: // SampleCount = SamplingFrequency * n. SampleCount [8]byte // Block size per channel. // 4096, unused samples should be filled with zero. BlockSize [4]byte // Reserved. // Filled with zero. Reserved [4]byte } // Header identifying a fmt chunk within a DSD stream file. const fmtChunkHeader = "fmt " // Size in bytes of a fmt chunk within a DSD stream file. const fmtChunkSize = 52 // Value of the Version field. const fmtVersion = 1 // Value of the Identifier field. const fmtIdentifier = 0 // Values of the ChannelType field and their meaning. var fmtChannelType = map[uint32]string{ 1: "mono", 2: "stereo", 3: "3 channels", 4: "quad", 5: "4 channels", 6: "5 channels", 7: "5.1 channels", } // Channel order corresponding to the ChannelType field. // The mapping for mono is undefined in the specification, but using center // seems reasonable and allows an easy way to check for mismatch between the // ChannelType and ChannelNum fields. var fmtChannelOrder = map[uint32][]audio.Channel{ 1: {audio.Center}, 2: {audio.FrontLeft, audio.FrontRight}, 3: {audio.FrontLeft, audio.FrontRight, audio.Center}, 4: {audio.FrontLeft, audio.FrontRight, audio.BackLeft, audio.BackRight}, 5: {audio.FrontLeft, audio.FrontRight, audio.Center, audio.LowFrequency}, 6: {audio.FrontLeft, audio.FrontRight, audio.Center, audio.BackLeft, audio.BackRight}, 7: {audio.FrontLeft, audio.FrontRight, audio.Center, audio.LowFrequency, audio.BackLeft, audio.BackRight}, } // Values of the ChannelNum field and their meaning. var fmtChannelNum = map[uint32]string{ 1: "mono", 2: "stereo", 3: "3 channels", 4: "4 channels", 5: "5 channels", 6: "6 channels", } // Values of the SamplingFrequency field and their meaning. // Only 2822400 and 5644800 are defined by the specification, but the other // rates are in active use. The strings are not defined within the specification // but are in active use. var fmtSamplingFrequency = map[uint32]string{ 2822400: "DSD64", 5644800: "DSD128", 11289600: "DSD256", 22579200: "DSD512", } // Values of the BitsPerSample field. var fmtBitsPerSample = map[uint32]struct{}{ 1: {}, 8: {}, } // Value of the BlockSize field. const fmtBlockSize = 4096 // Value of the Reserved field. const fmtReserved = 0 // readFmtChunk reads the fmt chunk and stores the result in d. func (d *decoder) readFmtChunk() error { // Read the entire chunk in one go err := binary.Read(d.reader, binary.LittleEndian, &d.fmt) if err != nil { return err } // Chunk header header := string(d.fmt.Header[:]) switch header { case fmtChunkHeader: // This is the expected chunk header case dsdChunkHeader: return fmt.Errorf("fmt: expected fmt chunk but found DSD chunk") case dataChunkHeader: return fmt.Errorf("fmt: expected fmt chunk but found data chunk") default: return fmt.Errorf("fmt: bad chunk header: %q\nfmt chunk: % x", header, d.fmt) } // Size of this chunk size := binary.LittleEndian.Uint64(d.fmt.Size[:]) if size != fmtChunkSize { return fmt.Errorf("fmt: bad chunk size: %v\nfmt chunk: % x", size, d.fmt) } // Format version formatVersion := binary.LittleEndian.Uint32(d.fmt.Version[:]) if formatVersion != fmtVersion { return fmt.Errorf("fmt: bad format version: %v\nfmt chunk: % x", formatVersion, d.fmt) } // Format id formatId := binary.LittleEndian.Uint32(d.fmt.Identifier[:]) if formatId != fmtIdentifier { return fmt.Errorf("fmt: bad format id: %v\nfmt chunk: % x", formatId, d.fmt) } // Channel Type channelType := binary.LittleEndian.Uint32(d.fmt.ChannelType[:]) channelTypeString, ok := fmtChannelType[channelType] if !ok { return fmt.Errorf("fmt: bad channel type: %v\nfmt chunk: % x", channelType, d.fmt) } // Channel order corresponding to the ChannelType field order, _ := fmtChannelOrder[channelType] // Channel num channelNum := binary.LittleEndian.Uint32(d.fmt.ChannelNum[:]) _, ok = fmtChannelNum[channelNum] if !ok { return fmt.Errorf("fmt: bad channel num: %v\nfmt chunk: % x", channelNum, d.fmt) } if channelNum != uint32(len(order)) { return fmt.Errorf("fmt: mismatch between channel type %v and channel num %v:\nfmt chunk: % x", channelType, channelNum, d.fmt) } // Sampling frequency samplingFrequency := binary.LittleEndian.Uint32(d.fmt.SamplingFrequency[:]) samplingFrequencyString, ok := fmtSamplingFrequency[samplingFrequency] if !ok { return fmt.Errorf("fmt: bad sampling frequency: %v\nfmt chunk: % x", samplingFrequency, d.fmt) } // Bits per sample bitsPerSample := binary.LittleEndian.Uint32(d.fmt.BitsPerSample[:]) _, ok = fmtBitsPerSample[bitsPerSample] if !ok { return fmt.Errorf("fmt: bad bits per sample: %v\nfmt chunk: % x", bitsPerSample, d.fmt) } // Sample count sampleCount := binary.LittleEndian.Uint64(d.fmt.SampleCount[:]) // Block size per channel blockSize := binary.LittleEndian.Uint32(d.fmt.BlockSize[:]) if blockSize != fmtBlockSize { return fmt.Errorf("fmt: bad block size: %v\nfmt chunk: % x", blockSize, d.fmt) } // Reserved reserved := binary.LittleEndian.Uint32(d.fmt.Reserved[:]) if reserved != fmtReserved { return fmt.Errorf("fmt: bad reserved bytes: %#x\nfmt chunk: % x", reserved, d.fmt) } // Log the fields of the chunk (only active if a log output has been set) d.logger.Print("\nFmt Chunk\n=========\n") d.logger.Printf("Chunk header: %q\n", header) d.logger.Printf("Size of this chunk: %v bytes\n", size) d.logger.Printf("Format version: %v\n", formatVersion) d.logger.Printf("Format id: %v\n", formatId) d.logger.Printf("Channel type: %v (%s)\n", channelType, channelTypeString) d.logger.Printf("Channel num: %v\n", channelNum) if len(order) > 1 { var s string for i, channel := range order { if i < len(order)-1 { s += channel.String() + ", " } else { s += channel.String() } } d.logger.Printf("Channel order: %v\n", s) } d.logger.Printf("Sampling frequency: %vHz (%s)\n", samplingFrequency, samplingFrequencyString) d.logger.Printf("Bits per sample: %v\n", bitsPerSample) d.logger.Printf("Sample count: %v\n", sampleCount) d.logger.Printf("Block size per channel: %v bytes\n", blockSize) // Store the information that is useful d.audio.Encoding = audio.DSD d.audio.NumChannels = uint(channelNum) d.audio.ChannelOrder = order d.audio.SamplingFrequency = uint(samplingFrequency) d.audio.BitsPerSample = uint(bitsPerSample) d.audio.BlockSize = uint(blockSize) // Prepare the audio.Audio in d to hold the encoded samples length := sampleCount if bitsPerSample == 1 { length = (length + 7) / 8 // fit up to 8 samples into 1 byte } if (length % uint64(blockSize)) > 0 { // pad to the block size length += uint64(blockSize) - (length % uint64(blockSize)) } length *= uint64(channelNum) // same amount for each channel d.audio.EncodedSamples = make([]byte, length) return nil } // writeFmtChunk writes the fmt chunk. func (e *encoder) writeFmtChunk() error { // Chunk header header := fmtChunkHeader copy(e.fmt.Header[:], header) // Size of this chunk size := uint64(fmtChunkSize) binary.LittleEndian.PutUint64(e.fmt.Size[:], size) // Format version formatVersion := uint32(fmtVersion) binary.LittleEndian.PutUint32(e.fmt.Version[:], formatVersion) // Format id formatId := uint32(fmtIdentifier) binary.LittleEndian.PutUint32(e.fmt.Identifier[:], formatId) // Channel type var channelType uint32 for key, order := range fmtChannelOrder { if reflect.DeepEqual(e.audio.ChannelOrder, order) { channelType = key } } if channelType == 0 { var s string for i, channel := range e.audio.ChannelOrder { if i < len(e.audio.ChannelOrder)-1 { s += channel.String() + ", " } else { s += channel.String() } } return fmt.Errorf("fmt: unsupported channel ordering: %v", s) } channelTypeString, _ := fmtChannelType[channelType] binary.LittleEndian.PutUint32(e.fmt.ChannelType[:], channelType) // Channel num channelNum := uint32(e.audio.NumChannels) if channelNum > 1 && (channelNum != uint32(len(e.audio.ChannelOrder))) { return fmt.Errorf("fmt: mismatch between num channels and channel order: %v, %v", channelNum, e.audio.ChannelOrder) } binary.LittleEndian.PutUint32(e.fmt.ChannelNum[:], channelNum) // SamplingFrequency samplingFrequency := uint32(e.audio.SamplingFrequency) samplingFrequencyString, ok := fmtSamplingFrequency[samplingFrequency] if !ok { return fmt.Errorf("fmt: unsupported sampling frequency: %v", samplingFrequency) } binary.LittleEndian.PutUint32(e.fmt.SamplingFrequency[:], samplingFrequency) // Bits per sample bitsPerSample := uint32(e.audio.BitsPerSample) _, ok = fmtBitsPerSample[bitsPerSample] if !ok { return fmt.Errorf("fmt: unsupported bits per sample: %v", bitsPerSample) } binary.LittleEndian.PutUint32(e.fmt.BitsPerSample[:], bitsPerSample) // SampleCount // Log the fields of the chunk (only active if a log output has been set) e.logger.Print("\nFmt Chunk\n=========\n") e.logger.Printf("Chunk header: %q\n", header) e.logger.Printf("Size of this chunk: %v\n", size) e.logger.Printf("Format version: %v\n", formatVersion) e.logger.Printf("Format id: %v\n", formatId) e.logger.Printf("Channel type: %v (%s)\n", channelType, channelTypeString) e.logger.Printf("Channel num: %v\n", channelNum) if len(e.audio.ChannelOrder) > 1 { var s string for i, channel := range e.audio.ChannelOrder { if i < len(e.audio.ChannelOrder)-1 { s += channel.String() + ", " } else { s += channel.String() } } e.logger.Printf("Channel order: %v\n", s) } e.logger.Printf("Sampling frequency: %vHz (%s)\n", samplingFrequency, samplingFrequencyString) e.logger.Printf("Bits per sample: %v\n", bitsPerSample) // Write the entire chunk in one go err := binary.Write(e.writer, binary.LittleEndian, &e.fmt) if err != nil { return err } return nil }
audio/dsf/fmt.go
0.668664
0.472197
fmt.go
starcoder
package std //* import ( "github.com/mb0/xelf/cor" "github.com/mb0/xelf/exp" "github.com/mb0/xelf/lit" "github.com/mb0/xelf/typ" ) var ErrExpectNumer = cor.StrError("expected numer argument") func opAdd(r, n float64) (float64, error) { return r + n, nil } func opMul(r, n float64) (float64, error) { return r * n, nil } // addSpec adds up all arguments and converts the sum to the first argument's type. var addSpec = core.add(SpecDXX("<form add @1|num plain:list|num @1>", func(x CallCtx) (exp.El, error) { return execNums(x, 0, opAdd) })) // mulSpec multiplies all arguments and converts the product to the first argument's type. var mulSpec = core.add(SpecDXX("<form mul @1|num plain:list|num @1>", func(x CallCtx) (exp.El, error) { return execNums(x, 1, opMul) })) // subSpec subtracts the sum of the rest from the first argument and // converts to the first argument's type. var subSpec = core.add(SpecDXX("<form sub @1|num plain:list|num @1>", func(x CallCtx) (exp.El, error) { err := x.Layout.Eval(x.Prog, x.Env, x.Hint) if err != nil { if err != exp.ErrUnres { return x.Call, err } } fst := x.Arg(0) rest := x.Args(1) n := getNumer(fst) ctx := numCtx{} if n == nil { ctx.idx = -1 } err = redNums(rest, &ctx, opAdd) if err != nil { return x.Call, err } if n == nil { if ctx.idx >= 0 { ctx.unres[ctx.idx] = &exp.Atom{Lit: lit.Num(ctx.res)} } x.Groups[1] = ctx.unres return x.Call, exp.ErrUnres } var l lit.Lit = lit.Num(n.Num() - ctx.res) if fst.Typ() != typ.Num { l, err = lit.Convert(l, fst.Typ(), 0) if err != nil { return nil, err } } if len(ctx.unres) != 0 { x.Groups[0] = []exp.El{&exp.Atom{Lit: l, Src: fst.Source()}} x.Groups[1] = ctx.unres return x.Call, exp.ErrUnres } return &exp.Atom{Lit: l}, nil })) // divSpec divides the product of the rest from the first argument. // If the first argument is an int div, integer division is used, otherwise it uses float division. // The result is converted to the first argument's type. var divSpec = core.add(SpecDXX("<form div @1|num plain:list|num @1>", func(x CallCtx) (exp.El, error) { err := x.Layout.Eval(x.Prog, x.Env, x.Hint) if err != nil { if err != exp.ErrUnres { return x.Call, err } } fst := x.Arg(0) n := getNumer(fst) ctx := numCtx{res: 1} if n == nil { ctx.idx = -1 } err = redNums(x.Args(1), &ctx, opMul) if err != nil { return x.Call, err } if n == nil { if ctx.idx >= 0 { ctx.unres[ctx.idx] = &exp.Atom{Lit: lit.Num(ctx.res)} } x.Call.Groups[1] = ctx.unres return x.Call, exp.ErrUnres } if ctx.res == 0 { return nil, cor.Error("zero devision") } isint := fst.Typ().Kind&typ.MaskElem == typ.KindInt if isint { ctx.res = float64(int64(n.Num()) / int64(ctx.res)) } else { ctx.res = n.Num() / ctx.res } var l lit.Lit = lit.Num(ctx.res) if fst.Typ() != typ.Num { l, err = lit.Convert(l, fst.Typ(), 0) if err != nil { return nil, err } } la := &exp.Atom{Lit: l, Src: fst.Source()} if len(ctx.unres) != 0 { x.Groups[0] = []exp.El{la} x.Groups[1] = ctx.unres return x.Call, exp.ErrUnres } return la, nil })) // remSpec calculates the remainder of the first two arguments and always returns an int. var remSpec = core.add(SpecDX("<form rem int int int>", func(x CallCtx) (exp.El, error) { err := x.Layout.Eval(x.Prog, x.Env, typ.Void) if err != nil { return x.Call, err } res, aok := getNum(x.Arg(0)) mod, bok := getNum(x.Arg(1)) if !aok || !bok { return x.Call, exp.ErrUnres } return &exp.Atom{Lit: lit.Int(res.Num()) % lit.Int(mod.Num())}, nil })) func getNum(el exp.El) (lit.Numeric, bool) { if a, ok := el.(*exp.Atom); ok { if n, ok := a.Lit.(lit.Numeric); ok { return n, ok } } return nil, false } // absSpec returns the argument with the absolute numeric value. var absSpec = core.add(SpecDX("<form abs @1|num @1>", func(x CallCtx) (fst exp.El, err error) { return sign(x, false) })) // negSpec returns the argument with the negated numeric value. var negSpec = core.add(SpecDX("<form neg @1|num @1>", func(x CallCtx) (fst exp.El, err error) { return sign(x, true) })) func sign(x CallCtx, neg bool) (_ exp.El, err error) { err = x.Layout.Eval(x.Prog, x.Env, x.Hint) if err != nil { return x.Call, err } fst := x.Arg(0).(*exp.Atom) switch v := fst.Lit.(type) { case lit.Int: if neg || v < 0 { fst.Lit = -v } case lit.Num: if neg || v < 0 { fst.Lit = -v } case lit.Real: if neg || v < 0 { fst.Lit = -v } case lit.Numeric: n := v.Num() if !neg && n >= 0 { break } nl := lit.Num(-n) if a, ok := v.(lit.Proxy); ok { err = a.Assign(nl) if err != nil { return nil, err } } else { fst.Lit, err = lit.Convert(nl, v.Typ(), 0) if err != nil { return nil, err } } default: return nil, cor.Errorf("%v got %T", ErrExpectNumer, fst) } return fst, nil } // minSpec returns the argument with the smallest numeric value or an error. var minSpec = core.add(SpecDX("<form min @1|num plain?:list|@1 @1>", func(x CallCtx) (exp.El, error) { var i int return execNums(x, 0, func(r, n float64) (float64, error) { if i++; i > 0 && r < n { return r, nil } return n, nil }) })) // maxSpec returns the argument with the greatest numeric value or an error. var maxSpec = core.add(SpecDX("<form max @1|num plain?:list|@1 @1>", func(x CallCtx) (exp.El, error) { var i int return execNums(x, 0, func(r, n float64) (float64, error) { if i++; i > 0 && r > n { return r, nil } return n, nil }) })) func getNumer(e exp.El) lit.Numeric { if a, ok := e.(*exp.Atom); ok { v, _ := deopt(a.Lit).(lit.Numeric) return v } return nil } type numOp = func(r, e float64) (float64, error) func deopt(l lit.Lit) lit.Lit { if o, ok := l.(lit.Opter); ok { if l = o.Some(); l == nil { t, _ := o.Typ().Deopt() l = lit.Zero(t) } } return l } func execNums(x CallCtx, res float64, f numOp) (exp.El, error) { err := x.Layout.Eval(x.Prog, x.Env, x.Hint) if err != nil { if err != exp.ErrUnres { return x.Call, err } } part := err != nil ctx := numCtx{res: res, idx: -1} fst := x.Arg(0) if part { ctx.unres = []exp.El{fst} } n := getNumer(fst) if n != nil { ctx.idx = 0 ctx.res = n.Num() } err = redNums(x.Args(1), &ctx, f) if err != nil { return x.Call, err } if len(ctx.unres) == 0 { var l lit.Lit = lit.Num(ctx.res) if fst.Typ() != typ.Num { l, err = lit.Convert(l, fst.Typ(), 0) if err != nil { return nil, err } } return &exp.Atom{Lit: l}, nil } if ctx.idx >= 0 && ctx.idx < len(ctx.unres) { ctx.unres[ctx.idx] = &exp.Atom{Lit: lit.Num(ctx.res)} } x.Groups[0] = ctx.unres[:1] x.Groups[1] = ctx.unres[1:] return x.Call, exp.ErrUnres } type numCtx struct { res float64 idx int unres []exp.El } func redNums(args []exp.El, c *numCtx, f numOp) (err error) { for _, arg := range args { v := getNumer(arg) if v == nil { c.unres = append(c.unres, arg) continue } if c.idx < 0 { c.idx = len(c.unres) c.unres = append(c.unres, arg) } c.res, err = f(c.res, v.Num()) if err != nil { return err } } return nil }
std/arit.go
0.556641
0.409811
arit.go
starcoder
package spogoto // CursorCommands are functions that operate on the Cursor manipulating // its position. type CursorCommands map[string]func(RunSet) // RunSet is a container for a Spogoto code's execution environment. // The RunSet contains the DataStacks that the code will operate on as // well as other information regarding a code's execution. type RunSet interface { RegisterStack(string, DataStack) Stack(string) DataStack Ok(string, int64) bool Bad(string, int64) bool DataStacks() map[string]DataStack DataStack(string) DataStack Cursor() *Cursor CursorCommand(string) CursorCommands() CursorCommands IncrementInstructionCount() InstructionCount() int64 InitializeStack(string, Elements) } // Cursor is a representation of a pointer pointing to the current // Instruction on which the interpreter will have to execute. type Cursor struct { Position int64 Instructions InstructionSet } type runset struct { dataStacks map[string]DataStack cursor Cursor cursorCommands map[string]func(RunSet) instructionCount int64 } // NewRunSet creates a RunSet. func NewRunSet(i Interpreter) *runset { constructors := i.StackConstructors() dataStacks := map[string]DataStack{} for _, constructor := range constructors { stackType, stack := constructor() dataStacks[stackType] = stack } r := &runset{dataStacks: dataStacks} addCursorCommands(r) return r } // Cursor returns the Cursor. func (r *runset) Cursor() *Cursor { return &r.cursor } // IncrementInstructionCount increments the InstructionCount of course. func (r *runset) IncrementInstructionCount() { r.instructionCount++ } // InstructionCount returns the total number of Instructions executed for // a single run. func (r *runset) InstructionCount() int64 { return r.instructionCount } // CursorCommand executes cursor-related functions. func (r *runset) CursorCommand(fn string) { theFunc, ok := r.cursorCommands[fn] if ok { theFunc(r) } } // CursorCommands returns all CursorCommands available. func (r *runset) CursorCommands() CursorCommands { return r.cursorCommands } // RegisterStack adds a stack to the available DataStacks identified by name. func (r *runset) RegisterStack(name string, stack DataStack) { r.dataStacks[name] = stack } // Stack returns the stack registered with name. func (r *runset) Stack(name string) DataStack { s, ok := r.dataStacks[name] if !ok { s = &NullDataStack{} } return s } // Ok returns true if stack is available and has the number of elements // required. func (r *runset) Ok(name string, count int64) bool { return !r.Bad(name, count) } // Bad returns false if stack is available and has the number of elements // required. func (r *runset) Bad(name string, count int64) bool { return r.Stack(name).Lack(count) } // DataStacks returns all the DataStacks registered for this RunSet. func (r *runset) DataStacks() map[string]DataStack { return r.dataStacks } func (r *runset) InitializeStack(stackType string, elements Elements) { for _, element := range elements { r.dataStacks[stackType].Push(element) } } func (r *runset) DataStack(stackType string) DataStack { return r.dataStacks[stackType] } func instructionCount(r RunSet) int64 { return int64(len(r.Cursor().Instructions)) } func addCursorCommands(rs *runset) { commands := make(CursorCommands) commands["skipif"] = func(r RunSet) { if r.Bad("boolean", 1) { return } if r.Stack("boolean").Pop().(bool) { r.Cursor().Position++ } } commands["end"] = func(r RunSet) { r.Cursor().Position = instructionCount(r) } commands["endif"] = func(r RunSet) { if r.Bad("boolean", 1) { return } if r.Stack("boolean").Pop().(bool) { commands["end"](r) } } commands["goto"] = func(r RunSet) { if r.Bad("integer", 1) { return } pos := r.Stack("integer").Pop().(int64) if pos < 0 || pos > instructionCount(r) { return } r.Cursor().Position = int64(pos - 1) } commands["gotoif"] = func(r RunSet) { if r.Ok("boolean", 1) && r.Stack("boolean").Pop().(bool) { commands["goto"](r) } } rs.cursorCommands = commands }
runset.go
0.62395
0.449211
runset.go
starcoder
package business import ( validation "github.com/go-ozzo/ozzo-validation" "github.com/go-ozzo/ozzo-validation/is" ) // Validate validates the CreateEdgeClusterRequest model and return error if the validation failes // Returns error if validation failes func (val CreateEdgeClusterRequest) Validate() error { return validation.ValidateStruct(&val, // Email must be provided validation.Field(&val.UserEmail, validation.Required, is.Email), // Validate EdgeCluster using its own validation rules validation.Field(&val.EdgeCluster), ) } // Validate validates the ReadEdgeClusterRequest model and return error if the validation failes // Returns error if validation failes func (val ReadEdgeClusterRequest) Validate() error { return validation.ValidateStruct(&val, // Email must be provided validation.Field(&val.UserEmail, validation.Required, is.Email), // EdgeClusterID cannot be empty validation.Field(&val.EdgeClusterID, validation.Required), ) } // Validate validates the UpdateEdgeClusterRequest model and return error if the validation failes // Returns error if validation failes func (val UpdateEdgeClusterRequest) Validate() error { return validation.ValidateStruct(&val, // Email must be provided validation.Field(&val.UserEmail, validation.Required, is.Email), // EdgeClusterID cannot be empty validation.Field(&val.EdgeClusterID, validation.Required), // Validate EdgeCluster using its own validation rules validation.Field(&val.EdgeCluster), ) } // Validate validates the DeleteEdgeClusterRequest model and return error if the validation failes // Returns error if validation failes func (val DeleteEdgeClusterRequest) Validate() error { return validation.ValidateStruct(&val, // Email must be provided validation.Field(&val.UserEmail, validation.Required, is.Email), // EdgeClusterID cannot be empty validation.Field(&val.EdgeClusterID, validation.Required), ) } // Validate validates the ListEdgeClustersRequest model and return error if the validation failes // Returns error if validation failes func (val ListEdgeClustersRequest) Validate() error { return validation.ValidateStruct(&val, // Email must be provided validation.Field(&val.UserEmail, validation.Required, is.Email), ) } // Validate validates the ListEdgeClusterNodesRequest model and return error if the validation failes // Returns error if validation failes func (val ListEdgeClusterNodesRequest) Validate() error { return validation.ValidateStruct(&val, // Email must be provided validation.Field(&val.UserEmail, validation.Required, is.Email), // EdgeClusterID cannot be empty validation.Field(&val.EdgeClusterID, validation.Required), ) } // Validate validates the ListEdgeClusterPodsRequest model and return error if the validation failes // Returns error if validation failes func (val ListEdgeClusterPodsRequest) Validate() error { return validation.ValidateStruct(&val, // Email must be provided validation.Field(&val.UserEmail, validation.Required, is.Email), // EdgeClusterID cannot be empty validation.Field(&val.EdgeClusterID, validation.Required), ) } // Validate validates the ListEdgeClusterServicesRequest model and return error if the validation failes // Returns error if validation failes func (val ListEdgeClusterServicesRequest) Validate() error { return validation.ValidateStruct(&val, // Email must be provided validation.Field(&val.UserEmail, validation.Required, is.Email), // EdgeClusterID cannot be empty validation.Field(&val.EdgeClusterID, validation.Required), ) }
services/business/validation.go
0.729038
0.407274
validation.go
starcoder
package yaspeech // The Voice for the synthesized speech. // You can choose one of the following voices: // Female voice: alyss, jane, oksana and omazh. // Male voice: zahar and ermil. // Default value of the parameter: oksana. type Voice string // Voices of the synthesized speech. const ( VoiceAlyss Voice = "alyss" VoiceJane Voice = "jane" VoiceOksana Voice = "oksana" VoiceOmazh Voice = "omazh" VoiceZahar Voice = "zahar" VoiceErmil Voice = "ermil" ) // Emotion is emotional tone of the voice. // Acceptable values: // good — Cheerful and friendly. // evil — Irritated. // neutral (default) — Without emotion. type Emotion string //Emotional tone of the voice const ( EmotionGood Emotion = "good" EmotionEvil Emotion = "evil" EmotionNeutral Emotion = "neutral" ) // The Format of the synthesized audio. // Acceptable values: // lpcm — Audio file is synthesized in the LPCM format with no WAV header. Audio characteristics: // Sampling — 8, 16, or 48 kHz, depending on the sampleRateHertz parameter value. // Bit depth — 16-bit. // Byte order — Reversed (little-endian). // Audio data is stored as signed integers. // oggopus (default) — Data in the audio file is encoded using the OPUS audio codec and compressed using the OGG container format (OggOpus). type Format string // Formats of the synthesized audio. const ( FormatLpcm Format = "lpcm" FormatOggOpus Format = "oggopus" ) // Language of the synthesized speech // Acceptable values: // ru-RU (default) — Russian. // en-US — English. // tr-TR — Turkish. type Language string // Languages const ( LangRU Language = "ru-RU" LangEN Language = "en-US" LangTR Language = "tr-TR" ) // SampleRateHertz is the sampling frequency of the synthesized audio. // Used if format is set to lpcm. Acceptable values: // 48000 (default) — Sampling rate of 48 kHz. // 16000 — Sampling rate of 16 kHz. // 8000 — Sampling rate of 8 kHz.1 type SampleRateHertz int // The sampling frequency of the synthesized audio const ( SampleRate48000 SampleRateHertz = 48000 SampleRate16000 SampleRateHertz = 16000 SampleRate8000 SampleRateHertz = 8000 ) // Topic is the language model to be used for recognition. // The closer the model is matched, the better the recognition result. // You can only specify one model per request. // Default parameter value: general. type Topic string const ( // TopicGeneral - Short phrases containing 3-5 words on various topics, including search engine or website queries. TopicGeneral Topic = "general" // TopicMaps - Addresses and names of companies or geographical features. TopicMaps Topic = "maps" // TopicDates - Names of months, ordinal numbers, and cardinal numbers. TopicDates Topic = "dates" // TopicNames - First and last names and phone call requests. TopicNames Topic = "names" // TopicNumbers - Cardinal numbers from 1 to 999 and delimiters (dot, comma, and dash) TopicNumbers Topic = "numbers" ) // // ProfanityFilter controls the profanity filter in recognized speech. // // Acceptable values: // // false (default) — Profanity is not excluded from recognition results. // // true — Profanity is excluded from recognition results. // type ProfanityFilter bool
options.go
0.542621
0.400896
options.go
starcoder
package trietree import ( "github.com/howz97/algorithm/alphabet" "github.com/howz97/algorithm/queue" "github.com/howz97/algorithm/util" ) // Three direction trie tree that is compressible type TSTC struct { TSTCNode compressed bool } func (t *TSTC) Compress() error { t.compress() t.compressed = true return nil } func (t *TSTC) IsCompressed() bool { return t.compressed } func (t *TSTC) Upsert(_ alphabet.Interface, k []rune, v T) { if t.compressed { panic("can not upsert after compress") } t.TSTCNode.Upsert(nil, k, v) } type TSTCNode struct { rs []rune v T left, mid, right *TSTCNode } func newTSTCNode(r rune) *TSTCNode { return &TSTCNode{rs: []rune{r}} } func (t *TSTCNode) Value() T { return t.v } func (t *TSTCNode) Upsert(_ alphabet.Interface, k []rune, v T) { if len(k) == 0 { panic("empty key") } switch true { case k[0] < t.rs[0]: if t.left == nil { t.left = newTSTCNode(k[0]) } t.left.Upsert(nil, k, v) case k[0] > t.rs[0]: if t.right == nil { t.right = newTSTCNode(k[0]) } t.right.Upsert(nil, k, v) default: if len(k) == 1 { t.v = v } else { if t.mid == nil { t.mid = newTSTCNode(k[1]) } t.mid.Upsert(nil, k[1:], v) } } } func (t *TSTCNode) compress() { if t == nil { return } if t.canShrink() { t.rs = append(t.rs, t.mid.rs...) t.left = t.mid.left t.right = t.mid.right t.v = t.mid.v t.mid = t.mid.mid t.compress() } else { t.left.compress() t.mid.compress() t.right.compress() } return } func (t *TSTCNode) canShrink() bool { return t.v == nil && t.left == nil && t.right == nil } func (t *TSTCNode) Delete(_ alphabet.Interface, k []rune) { if len(k) < len(t.rs) { return } if !isRunesEqual(t.rs, k, 0, len(t.rs)-1) { return } i := len(t.rs) - 1 switch true { case k[i] < t.rs[i]: if t.left != nil { t.left.Delete(nil, k[i:]) } case k[i] > t.rs[i]: if t.right != nil { t.right.Delete(nil, k[i:]) } default: if len(k) == len(t.rs) { t.v = nil } else if t.mid != nil { t.mid.Delete(nil, k[len(t.rs):]) } } } // Find 找到k对应的节点(n),有这个节点不代表k存在,是否存在需要看n.v是否为nil func (t *TSTCNode) Find(_ alphabet.Interface, k []rune) T { if len(k) < len(t.rs) || !isRunesEqual(t.rs, k, 0, len(t.rs)-1) { return nil } i := len(t.rs) - 1 switch true { case k[i] < t.rs[i]: if t.left != nil { return t.left.Find(nil, k[i:]) } case k[i] > t.rs[i]: if t.right != nil { return t.right.Find(nil, k[i:]) } default: if len(k) == len(t.rs) { return t.v } else if t.mid != nil { return t.mid.Find(nil, k[len(t.rs):]) } } return nil } func (t *TSTCNode) Locate(_ alphabet.Interface, k []rune) (TrieNode, []rune) { if len(k) < len(t.rs) { if isRunesEqual2(k, t.rs[:len(k)]) { return t, t.rs[len(k):] } return nil, nil } if !isRunesEqual(t.rs, k, 0, len(t.rs)-1) { return nil, nil } i := len(t.rs) - 1 switch true { case k[i] < t.rs[i]: if t.left != nil { return t.left.Locate(nil, k[i:]) } case k[i] > t.rs[i]: if t.right != nil { return t.right.Locate(nil, k[i:]) } default: if len(k) == len(t.rs) { return t, nil } else if t.mid != nil { return t.mid.Locate(nil, k[len(t.rs):]) } } return nil, nil } func (t *TSTCNode) LongestPrefixOf(_ alphabet.Interface, s []rune, d, l int) int { if len(s) < d+len(t.rs) { return l } if !isRunesEqual(s[d:], t.rs, 0, len(t.rs)-1) { return l } i := len(t.rs) - 1 switch true { case s[d+i] < t.rs[i]: if t.left != nil { return t.left.LongestPrefixOf(nil, s, d+i, l) } case s[d+i] > t.rs[i]: if t.right != nil { return t.right.LongestPrefixOf(nil, s, d+i, l) } default: if t.v != nil { l = d + len(t.rs) } if len(s) == d+len(t.rs) { return l } else if t.mid != nil { return t.mid.LongestPrefixOf(nil, s, d+len(t.rs), l) } } return l } func (t *TSTCNode) Collect(_ alphabet.Interface, prefix string, keys *queue.SliStr) { if t.v != nil { keys.PushBack(prefix) } if t.mid != nil { t.mid.collect(nil, prefix, keys) } } func (t *TSTCNode) collect(_ alphabet.Interface, prefix string, keys *queue.SliStr) { if t.v != nil { keys.PushBack(prefix + string(t.rs)) } if t.left != nil { t.left.collect(nil, prefix+string(t.rs[:len(t.rs)-1]), keys) } if t.mid != nil { t.mid.collect(nil, prefix+string(t.rs), keys) } if t.right != nil { t.right.collect(nil, prefix+string(t.rs[:len(t.rs)-1]), keys) } } func (t *TSTCNode) KeysMatch(_ alphabet.Interface, pattern []rune, prefix string, keys *queue.SliStr) { if len(pattern) < len(t.rs) || !util.IsRunesMatch(pattern[:len(t.rs)-1], t.rs[:len(t.rs)-1]) { return } i := len(t.rs) - 1 if t.left != nil && (pattern[i] == '.' || pattern[i] < t.rs[i]) { t.left.KeysMatch(nil, pattern[len(t.rs)-1:], prefix+string(t.rs[:len(t.rs)-1]), keys) } if t.right != nil && (pattern[i] == '.' || pattern[i] > t.rs[i]) { t.right.KeysMatch(nil, pattern[len(t.rs)-1:], prefix+string(t.rs[:len(t.rs)-1]), keys) } if util.IsRuneMatch(pattern[i], t.rs[i]) { if len(pattern) == len(t.rs) { if t.v != nil { keys.PushBack(prefix + string(t.rs)) } } else if t.mid != nil { t.mid.KeysMatch(nil, pattern[len(t.rs):], prefix+string(t.rs), keys) } } } func (t *TSTCNode) Keys(_ alphabet.Interface, keys *queue.SliStr) { t.collect(nil, "", keys) } func (t *TSTCNode) IsCompressed() bool { panic("should not be called") } func (t *TSTCNode) Compress() error { panic("should not be called") } func (t *TSTCNode) SetVal(v T) { t.v = v } func isRunesEqual2(a, b []rune) bool { if len(a) != len(b) { return false } for i := range a { if a[i] != b[i] { return false } } return true } func isRunesEqual(a, b []rune, lo, hi int) bool { for ; lo < hi; lo++ { if a[lo] != b[lo] { return false } } return true }
trie_tree/tstc_node.go
0.508788
0.441252
tstc_node.go
starcoder
package note import "strconv" func (xs IDSlice) Append(add ...ID) IDSliceDelta { return xs.Insert(len(xs), add...) } func (xs IDSlice) Retain(r int) IDSliceDelta { return IDSliceDelta{}.Retain(r) } func (xs IDSlice) Insert(i int, add ...ID) IDSliceDelta { return xs.Retain(i).Insert(add...) } func (xs IDSlice) Delete(i, num int) IDSliceDelta { return xs.Retain(i).Delete(num) } func (xs IDSlice) DeleteElements(del ...ID) IDSliceDelta { is := make(map[int]bool) for _, r := range del { for i, x := range xs { if x == r { is[i] = true } } } var delta IDSliceDelta from := 0 deleting := false for i := range xs { if deleting { if !is[i] { delta = delta.Delete(i - from) deleting = false from = i } } else { if is[i] { delta = delta.Retain(i - from) deleting = true from = i } } } if deleting { delta = delta.Delete(len(xs) - from) } return delta } // PrefixMatch returns the number of elements at the beginning of xs that match the // elements at the beginning of ys. func (xs IDSlice) PrefixMatch(ys []ID) int { i := 0 for ; i < len(xs) && i < len(ys); i++ { if xs[i] != ys[i] { break } } return i } type IDSliceDelta []IDSliceOp func (x IDSliceDelta) Retain(r int) IDSliceDelta { if r == 0 { return x } return append(x, IDSliceOpRetain(r)) } func (x IDSliceDelta) Insert(add ...ID) IDSliceDelta { return append(x, IDSliceOpInsert(add)) } func (x IDSliceDelta) Delete(d int) IDSliceDelta { return append(x, IDSliceOpDelete(d)) } func (x IDSliceDelta) Rebase(base IDSliceDelta) (IDSliceDelta, error) { var res IDSliceDelta xi, bi := 0, 0 var r, xop, bop IDSliceOp for { if xop == nil { if xi >= len(x) { break } xop = x[xi] xi++ } if bop == nil { if bi >= len(base) { break } bop = base[bi] bi++ } r, xop, bop = xop.Rebase(bop) if r != nil { res = append(res, r) } } if xop != nil { res = append(res, xop) } res = append(res, x[xi:]...) var cres IDSliceDelta for _, r := range res { if len(cres) == 0 { if r.Len() > 0 { cres = append(cres, r) } } else { c, ok := cres[len(cres)-1].Compact(r) if ok { cres[len(cres)-1] = c } else if !ok && r.Len() > 0 { cres = append(cres, r) } } } return cres, nil } type IDSliceOp interface { // Leaves returns how many elements of a slice of length n would remain // to be transformed by additional ops after applying this op. Returns // a negative number if and only if this op cannot be coherently // applied to a slice of length n. Leaves(n int) int // Len returns the number of elements inserted, retained, or deleted by // this op. Len() int // Skip returns an equivalent op that assumes its intent is already carried // out for the first n elements. May panic if n > Len(). Skip(n int) IDSliceOp // Rebase transforms op into a rebased op r (or nil), a subsequent op for // rebasing xn (or nil), and a subsequent base bn (or nil). Rebase(base IDSliceOp) (r IDSliceOp, xn IDSliceOp, bn IDSliceOp) // Compact expands this op to include o if possible, returning true if // successful. Compact(o IDSliceOp) (IDSliceOp, bool) Apply(IDSlice) (include IDSlice, remainder IDSlice) String() string } type IDSliceOpInsert []ID type IDSliceOpRetain int type IDSliceOpDelete int func (x IDSliceOpInsert) Leaves(in int) int { return in } func (x IDSliceOpInsert) Len() int { return len(x) } func (x IDSliceOpInsert) Skip(n int) IDSliceOp { return x[n:] } func (x IDSliceOpInsert) Rebase(base IDSliceOp) (IDSliceOp, IDSliceOp, IDSliceOp) { switch bo := base.(type) { case IDSliceOpInsert: return IDSliceOpRetain(bo.Len()), x, nil case IDSliceOpRetain: return x, nil, bo case IDSliceOpDelete: return x, nil, bo } panic("unknown base type") } func (x IDSliceOpInsert) Compact(op IDSliceOp) (IDSliceOp, bool) { if o, ok := op.(IDSliceOpInsert); ok { return append(x, o...), true } return x, false } func (x IDSliceOpInsert) Apply(xs IDSlice) (IDSlice, IDSlice) { return IDSlice(x), xs } func (x IDSliceOpInsert) String() string { return "insert " + IDSlice(x).String() } func (x IDSliceOpRetain) String() string { return "retain " + strconv.Itoa(int(x)) } func (x IDSliceOpDelete) String() string { return "delete " + strconv.Itoa(int(x)) } func (x IDSliceOpRetain) Leaves(in int) int { return in - int(x) } func (x IDSliceOpRetain) Len() int { return int(x) } func (x IDSliceOpRetain) Skip(n int) IDSliceOp { return x - IDSliceOpRetain(n) } func (x IDSliceOpRetain) Rebase(base IDSliceOp) (IDSliceOp, IDSliceOp, IDSliceOp) { switch bo := base.(type) { case IDSliceOpInsert: // Retain what has been inserted return x + IDSliceOpRetain(len(bo)), nil, nil case IDSliceOpRetain: // Retain the matching prefix switch { case x < bo: return x, nil, bo - x case x == bo: return x, nil, nil case x > bo: return bo, x - bo, nil } case IDSliceOpDelete: // Can't retain what has been deleted switch { case x.Len() < bo.Len(): // Retention is cancelled by deletion and there is still more to delete. return nil, nil, bo - IDSliceOpDelete(x) case x.Len() == bo.Len(): // Retention is cancelled by deletion. return nil, nil, nil case x.Len() > bo.Len(): // Retention is partially cancelled by deletion, there is more to retain. return nil, x - IDSliceOpRetain(bo), nil } } panic("unknown base type") } func (x IDSliceOpRetain) Compact(op IDSliceOp) (IDSliceOp, bool) { if o, ok := op.(IDSliceOpRetain); ok { return x + o, true } return x, false } func (x IDSliceOpRetain) Apply(xs IDSlice) (IDSlice, IDSlice) { return xs[:x], xs[x:] } func (x IDSliceOpDelete) Leaves(in int) int { return in - int(x) } func (x IDSliceOpDelete) Len() int { return int(x) } func (x IDSliceOpDelete) Skip(n int) IDSliceOp { return x - IDSliceOpDelete(n) } func (x IDSliceOpDelete) Rebase(base IDSliceOp) (IDSliceOp, IDSliceOp, IDSliceOp) { switch bo := base.(type) { case IDSliceOpInsert: return IDSliceOpRetain(bo.Len()), x, nil case IDSliceOpRetain: // Delete the matching prefix switch { case x.Len() < bo.Len(): return x, nil, bo - IDSliceOpRetain(x) case x.Len() == bo.Len(): return x, nil, nil case x.Len() > bo.Len(): return IDSliceOpDelete(bo), x.Skip(bo.Len()), nil } case IDSliceOpDelete: switch { case x.Len() < bo.Len(): return nil, nil, bo.Skip(x.Len()) case x.Len() == bo.Len(): return nil, nil, nil case x.Len() > bo.Len(): return nil, x - bo, nil } } panic("unknown base type") } func (x IDSliceOpDelete) Compact(op IDSliceOp) (IDSliceOp, bool) { if o, ok := op.(IDSliceOpDelete); ok { return x + o, true } return x, false } func (x IDSliceOpDelete) Apply(xs IDSlice) (IDSlice, IDSlice) { return nil, xs[x:] } func (xs IDSlice) CanApply(ops []IDSliceOp) bool { ln := len(xs) for _, op := range ops { if ln = op.Leaves(ln); ln < 0 { return false } } return true } func (xs IDSlice) Apply(ops []IDSliceOp) IDSlice { var head, mid, tail IDSlice tail = xs for _, op := range ops { mid, tail = op.Apply(tail) head = append(head, mid...) } return append(head, tail...) } // IDSliceDiff produces a set of operations that can be applied to xs to // produce a slice that would match slice b. func IDSliceDiff(a, b []ID) IDSliceDelta { var ( ops IDSliceDelta amid, bmid, midlen = idSliceLCS(IDSlice(a), IDSlice(b)) ) if midlen == 0 { if len(a) > 0 { ops = append(ops, IDSliceOpDelete(len(a))) } if len(b) > 0 { ops = append(ops, IDSliceOpInsert(b)) } } else { ops = append(ops, IDSliceDiff(a[:amid], b[:bmid])...) ops = append(ops, IDSliceOpRetain(midlen)) ops = append(ops, IDSliceDiff(a[amid+midlen:], b[bmid+midlen:])...) } return ops } func idSliceLCS(a, b IDSlice) (ai, bi, ln int) { ls := make([]int, len(a)*len(b)) max := 0 a0, b0 := 0, 0 for ai, aa := range a { for bi, bb := range b { if aa == bb { li := ai*len(b) + bi if ai == 0 || bi == 0 { ls[li] = 1 } else { ls[li] = ls[(ai-1)*len(b)+bi-1] + 1 } if ls[li] > max { max = ls[li] a0, b0 = ai+1-max, bi+1-max } } } } return a0, b0, max }
note/id_ot.go
0.64579
0.438424
id_ot.go
starcoder
package hector import ( "math" ) /** * It's based the paper "Scalable Training of L1-Regularized Log-Linear Models" * by <NAME> and <NAME> * user: weixuan * To change this template use File | Settings | File Templates. */ type QuasiNewtonHelper struct { // config numHist int64 minimizer Minimizer // historical data sList, yList []*Vector roList []float64 curPos, curGrad *Vector } type Minimizer interface { NextPoint(curPos *Vector, dir *Vector, alpha float64) *Vector Evaluate(curPos *Vector) float64 } const MAX_BACKTRACKING_ITER = 50 func NewQuasiNewtonHelper(numHist int, minimizer Minimizer, curPos *Vector, curGrad *Vector) (*QuasiNewtonHelper) { h := new(QuasiNewtonHelper) h.numHist = int64(numHist) h.minimizer = minimizer h.curPos = curPos h.curGrad = curGrad h.sList = make([]*Vector, 0) h.yList = make([]*Vector, 0) h.roList = make([]float64, 0) return h } // Description: Update the dir from -grad to optimal direction // Dir will be modified directly func (h *QuasiNewtonHelper) ApplyQuasiInverseHession(dir *Vector) { count := len(h.sList) if count == 0 { return } alphas := make([]float64, count, count) for n:=count-1; n>=0; n-- { alphas[n] = -dir.Dot(h.sList[n]) / h.roList[n] dir.ApplyElemWiseMultiplyAccumulation(h.yList[n], alphas[n]) } lastY := h.yList[count-1] yDotY := lastY.Dot(lastY) scalar := h.roList[count-1] / yDotY dir.ApplyScale(scalar) for n:=0; n<count; n++ { beta := dir.Dot(h.yList[n]) / h.roList[n] dir.ApplyElemWiseMultiplyAccumulation(h.sList[n], -alphas[n] - beta) } return } func (h *QuasiNewtonHelper) BackTrackingLineSearch(cost float64, pos *Vector, grad *Vector, dir *Vector, isInit bool) (nextCost float64, nextPos *Vector) { dotGradDir := grad.Dot(dir) if dotGradDir == 0 { return cost, pos } if dotGradDir > 0 { panic("BackTracking: to the opposite direction of grad") } alpha := 1.0 backoff := 0.5 if isInit { normDir := math.Sqrt(dir.Dot(dir)) alpha = (1/normDir) backoff = 0.1 } var c1 float64 = 1e-4 for cntItr:=0; cntItr <= MAX_BACKTRACKING_ITER; cntItr++ { nextPos = h.minimizer.NextPoint(pos, dir, alpha) nextCost = h.minimizer.Evaluate(nextPos) if (nextCost <= cost + c1 * dotGradDir * alpha) { break } alpha *= backoff } return nextCost, nextPos } func (h *QuasiNewtonHelper) updateState(nextPos *Vector, nextGrad *Vector) (isOptimal bool) { if int64(len(h.sList)) >= h.numHist { h.sList = h.sList[1:] h.yList = h.yList[1:] h.roList = h.roList[1:] } newS := nextPos.ElemWiseMultiplyAdd(h.curPos, -1) newY := nextGrad.ElemWiseMultiplyAdd(h.curGrad, -1) ro := newS.Dot(newY) h.sList = append(h.sList, newS) h.yList = append(h.yList, newY) h.roList = append(h.roList, ro) h.curPos = nextPos h.curGrad = nextGrad return ro == 0 }
quasinewton_helper.go
0.692954
0.426202
quasinewton_helper.go
starcoder
package tableprinter import ( "reflect" ) type stringable interface { String() string } func (p *Printer) makeTable(value interface{}) (*table, error) { // Check that we've not been given a nil value: if value == nil { return nil, ErrNoData } // See if we have an easily stringable interface: if stringable, ok := value.(stringable); ok { return p.tableFromBasicValue(stringable.String()) } // Take a different approach depending on the type of data that was provided: switch reflect.TypeOf(value).Kind() { // Maps get turned into a single-row table: case reflect.Map: return p.tableFromMapValue(value) // For pointers we just recurse on their interface: case reflect.Ptr: return p.makeTable(reflect.ValueOf(value).Elem().Interface()) // Slices get turned into a multi-row table: case reflect.Slice: return p.tableFromSliceValue(value) // Structs get turned into a single-row table: case reflect.Struct: return p.tableFromStructValue(value) // The default is a one-row one-column table: default: return p.tableFromBasicValue(value) } } // formatValue determines how we display things: func (p *Printer) formatValue(value interface{}) string { // If this value has a String() method then we should use that: if stringable, ok := value.(stringable); ok { return p.spewConfig.Sprintf("%s", stringable.String()) } return p.spewConfig.Sprintf("%v", value) } // tableFromBasicValue turns an interface into a single column in a single row: func (p *Printer) tableFromBasicValue(value interface{}) (*table, error) { var table = new(table) var row = make(tableRow) // Just add the one value: table.addHeader(defaultFieldName) row.setField(defaultFieldName, p.formatValue(value)) table.addRow(row) return table, nil } // tableFromMapValue turns a map into a single-row table: func (p *Printer) tableFromMapValue(value interface{}) (*table, error) { var table = new(table) var row = make(tableRow) // Turn the value into a map[string]interface{}: assertedMap, ok := value.(map[string]interface{}) if !ok { return nil, ErrAssertion } // Add the map fields to the table: for fieldName, fieldValue := range assertedMap { table.addHeader(fieldName) switch reflect.TypeOf(fieldValue).Kind() { case reflect.Ptr: reflectedFieldValue := reflect.ValueOf(fieldValue).Elem() if reflectedFieldValue.CanInterface() { row.setField(fieldName, p.formatValue(reflectedFieldValue.Interface())) continue } row.setField(fieldName, p.formatValue(reflectedFieldValue)) default: row.setField(fieldName, p.formatValue(fieldValue)) } } // Add the row to the table: table.addRow(row) return table, nil } // tableFromSliceValue turns a slice into a multi-row table: func (p *Printer) tableFromSliceValue(value interface{}) (*table, error) { var table = new(table) // Reflect the value to gain access to its elements: reflectedValue := reflect.ValueOf(value) // Turn each entry into a table (with a row that we can take): for i := 0; i < reflectedValue.Len(); i++ { tempTable, err := p.makeTable(reflectedValue.Index(i).Interface()) if err != nil { return nil, err } // Add the new row and headers to our table: table.headers = tempTable.headers table.addRow(tempTable.rows[0]) } return table, nil } // tableFromStructValue turns a struct into a single-row table: func (p *Printer) tableFromStructValue(value interface{}) (*table, error) { var table = new(table) var row = make(tableRow) // Reflect the value to gain access to its elements: reflectedType := reflect.TypeOf(value) reflectedValue := reflect.ValueOf(value) // Add the struct fields to the table: for i := 0; i < reflectedType.NumField(); i++ { fieldName := reflectedType.Field(i).Name fieldValue := reflectedValue.Field(i) table.addHeader(fieldName) // We can only work with exported fields: if !fieldValue.CanInterface() { row.setField(fieldName, unexportedFieldValue) continue } // Type switch: switch reflectedType.Field(i).Type.Kind() { // Pointers can be nil, so we need to check this (or just take the Elem() value): case reflect.Ptr: if fieldValue.IsNil() { row.setField(fieldName, nilFieldValue) continue } row.setField(fieldName, p.formatValue(fieldValue.Elem().Interface())) default: row.setField(fieldName, p.formatValue(fieldValue.Interface())) } } // Add the row to the table: table.addRow(row) return table, nil }
table_types.go
0.740174
0.499817
table_types.go
starcoder
package main import ( "fmt" "io/ioutil" "log" "math" "os" "sort" "strings" ) type Point struct{ x, y int } // Method for distance between 2 points func (p Point) dist(p0 Point) float64 { return math.Sqrt(math.Pow(float64(p.x-p0.x), 2) + math.Pow(float64(p.y-p0.y), 2)) } func main() { if len(os.Args) < 2 { log.Fatal(` [ERROR]: Provide the dataset! **Usage: ./main /path/to/file `) } data, err := ioutil.ReadFile(os.Args[1]) if err != nil { log.Fatal(err) } grid := strings.Split(strings.Trim(string(data), " \n"), "\n") asteroidsMap := make(map[Point]bool) asteroids := make([][]int, len(grid)) for y, row := range grid { asteroids[y] = make([]int, len(grid[y])) for x, block := range row { if block == '#' { asteroidsMap[Point{x, y}] = true asteroids[y][x] = 1 } else if block != '.' { log.Fatal("Unknown symbol[%c] encountered!", block) } } } bestCount, bestLocation := findBestLocation(asteroidsMap) fmt.Printf("Number of asteroids encountered from best location %v : %d\n", bestLocation, bestCount) asteroid200 := vaporize(bestLocation, asteroids, 200) fmt.Printf(`Location of 200th asteroid to vaporize: %v asteroid_200[X] * 100 + asteroid_200[Y] = %d `, asteroid200, asteroid200.x*100+asteroid200.y) } func findBestLocation(asteriods map[Point]bool) (int, Point) { var bestCount int var dx, dy int var bestLocation Point for p1 := range asteriods { Set := make(map[Point]bool) for p2 := range asteriods { if p1 != p2 { dx, dy = p2.x-p1.x, p2.y-p1.y dx, dy = dx/HCF(dx, dy), dy/HCF(dx, dy) Set[Point{dx, dy}] = true } } if len(Set) > bestCount { bestCount = len(Set) bestLocation = p1 } } return bestCount, bestLocation } func vaporize(station Point, asteroids [][]int, targetAsteroidNumber int) Point { // Count and map the slopes to asteroids dirs := make(map[Point][]Point) // Slopes for y := 0; y < len(asteroids); y++ { for x := 0; x < len(asteroids[0]); x++ { currentPoint := Point{x: x, y: y} if asteroids[y][x] == 1 && currentPoint != station { dir := Point{ x: x - station.x, y: y - station.y, } // Scale down dir = Point{ x: dir.x / HCF(dir.x, dir.y), y: dir.y / HCF(dir.x, dir.y), } dirs[dir] = append(dirs[dir], currentPoint) } } } getAngleOf := func(slope Point) (angle float64) { angle = math.Atan2(float64(slope.y), float64(slope.x))/(2.0*math.Pi)*360.0 + 90.0 if angle < 0 { angle += 360.0 } return angle } angles := make([]float64, 0) asteroidAtAngle := make(map[float64][]Point) for slope := range dirs { angle := getAngleOf(slope) asteroidAtAngle[angle] = dirs[slope] sort.Slice(asteroidAtAngle[angle], func(i, j int) bool { return asteroidAtAngle[angle][i].dist(station) < asteroidAtAngle[angle][j].dist(station) }) if len(asteroidAtAngle[angle]) >= 1 { angles = append(angles, angle) } sort.Float64s(angles) } for i := 0; i < targetAsteroidNumber; { for _, angle := range angles { if len(asteroidAtAngle[angle]) > 0 { if i++; targetAsteroidNumber == i { return asteroidAtAngle[angle][0] // Asteroid location will be found here! } asteroidAtAngle[angle] = asteroidAtAngle[angle][1:] // filter } } } return Point{x: -1, y: -1} // Indicating asteroid not found! } func HCF(n1, n2 int) int { if n1 < 0 { n1 = -n1 } if n2 < 0 { n2 = -n2 } for n2 != 0 { n1, n2 = n2, n1%n2 } return n1 }
2019/Day-10/Monitoring_Station/main.go
0.623606
0.411702
main.go
starcoder