code stringlengths 114 1.05M | path stringlengths 3 312 | quality_prob float64 0.5 0.99 | learning_prob float64 0.2 1 | filename stringlengths 3 168 | kind stringclasses 1 value |
|---|---|---|---|---|---|
package sheetfile
import (
"github.com/fourstring/sheetfs/master/config"
"github.com/fourstring/sheetfs/master/model"
"gorm.io/gorm"
"gorm.io/gorm/clause"
)
/*
Chunk
Represent a fixed-size block of data stored on some DataNode.
The size of a Chunk is given by config.BytesPerChunk.
A Version is maintained by MasterNode and DataNode separately. Latest
Version of a Chunk is stored in MasterNode, and the actual Version is placed
on DataNode. Version is necessary for serializing write operations to a Chunk.
When a client issues a write operation, MasterNode will increase the Version by
1 and return it to client. Client must send both data to write and the Version
to DataNode which actually stores the Chunk. This operation success iff version
in request is equal to Version in DataNode plus 1, by which we achieve serialization
of write operations.
Version can also be utilized to select correct replication of a Chunk when quorums
were introduced.
As to other metadata datastructures, Chunk should be maintained in memory, with the
aid of journaling to tolerate fault, and flushed to sqlite during checkpointing only.
*/
type Chunk struct {
model.Model
DataNode string
Version uint64
Cells []*Cell
}
/*
isAvailable
Returns true if c is available to store a new Cell with given size.
*/
func (c *Chunk) isAvailable(size uint64) bool {
used := uint64(0)
for _, cell := range c.Cells {
used += cell.Size
}
remains := config.BytesPerChunk - used
return size <= remains
}
/*
Persistent
Flush Chunk data in memory into sqlite. But Chunk.Cells is not taken into consideration
because dynamic table names are applied. They should be persisted manually.
This method should be used only for checkpointing, and is supposed to be called
in a transaction for atomicity.
*/
func (c *Chunk) Persistent(tx *gorm.DB) {
tx.Omit(clause.Associations).Clauses(clause.OnConflict{UpdateAll: true}).Create(c)
}
/*
Snapshot
Returns a *Chunk points to the copy of c.
See SheetFile for the necessity of Snapshot.
@return
*Chunk points to the copy of c.
*/
func (c *Chunk) Snapshot() *Chunk {
var nc Chunk
nc = *c
for i, cell := range c.Cells {
nc.Cells[i] = cell.Snapshot()
}
return &nc
}
/*
loadChunkForFile
Load a chunk for a sheet with given id from sqlite. And preload all Cells simultaneously.
This function do not check id passed in, so it's not exported. Caller should
check against id.
@para
tx: a gorm connection, it can be a transaction.
filename
id: Chunk.ID
@return
*Chunk
*/
func loadChunkForFile(tx *gorm.DB, filename string, id uint64) *Chunk {
var c Chunk
tx.Preload("Cells", func(db *gorm.DB) *gorm.DB {
return db.Table(GetCellTableName(filename))
}).First(&c, id)
return &c
} | master/sheetfile/chunk.go | 0.61832 | 0.540136 | chunk.go | starcoder |
package datadog
import (
"encoding/json"
)
// SLOBulkDeleteResponseData An array of service level objective objects.
type SLOBulkDeleteResponseData struct {
// An array of service level objective object IDs that indicates which objects that were completely deleted.
Deleted *[]string `json:"deleted,omitempty"`
// An array of service level objective object IDs that indicates which objects that were modified (objects for which at least one threshold was deleted, but that were not completely deleted).
Updated *[]string `json:"updated,omitempty"`
// UnparsedObject contains the raw value of the object if there was an error when deserializing into the struct
UnparsedObject map[string]interface{} `json:-`
}
// NewSLOBulkDeleteResponseData instantiates a new SLOBulkDeleteResponseData object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewSLOBulkDeleteResponseData() *SLOBulkDeleteResponseData {
this := SLOBulkDeleteResponseData{}
return &this
}
// NewSLOBulkDeleteResponseDataWithDefaults instantiates a new SLOBulkDeleteResponseData object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewSLOBulkDeleteResponseDataWithDefaults() *SLOBulkDeleteResponseData {
this := SLOBulkDeleteResponseData{}
return &this
}
// GetDeleted returns the Deleted field value if set, zero value otherwise.
func (o *SLOBulkDeleteResponseData) GetDeleted() []string {
if o == nil || o.Deleted == nil {
var ret []string
return ret
}
return *o.Deleted
}
// GetDeletedOk returns a tuple with the Deleted field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *SLOBulkDeleteResponseData) GetDeletedOk() (*[]string, bool) {
if o == nil || o.Deleted == nil {
return nil, false
}
return o.Deleted, true
}
// HasDeleted returns a boolean if a field has been set.
func (o *SLOBulkDeleteResponseData) HasDeleted() bool {
if o != nil && o.Deleted != nil {
return true
}
return false
}
// SetDeleted gets a reference to the given []string and assigns it to the Deleted field.
func (o *SLOBulkDeleteResponseData) SetDeleted(v []string) {
o.Deleted = &v
}
// GetUpdated returns the Updated field value if set, zero value otherwise.
func (o *SLOBulkDeleteResponseData) GetUpdated() []string {
if o == nil || o.Updated == nil {
var ret []string
return ret
}
return *o.Updated
}
// GetUpdatedOk returns a tuple with the Updated field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *SLOBulkDeleteResponseData) GetUpdatedOk() (*[]string, bool) {
if o == nil || o.Updated == nil {
return nil, false
}
return o.Updated, true
}
// HasUpdated returns a boolean if a field has been set.
func (o *SLOBulkDeleteResponseData) HasUpdated() bool {
if o != nil && o.Updated != nil {
return true
}
return false
}
// SetUpdated gets a reference to the given []string and assigns it to the Updated field.
func (o *SLOBulkDeleteResponseData) SetUpdated(v []string) {
o.Updated = &v
}
func (o SLOBulkDeleteResponseData) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if o.UnparsedObject != nil {
return json.Marshal(o.UnparsedObject)
}
if o.Deleted != nil {
toSerialize["deleted"] = o.Deleted
}
if o.Updated != nil {
toSerialize["updated"] = o.Updated
}
return json.Marshal(toSerialize)
}
func (o *SLOBulkDeleteResponseData) UnmarshalJSON(bytes []byte) (err error) {
raw := map[string]interface{}{}
all := struct {
Deleted *[]string `json:"deleted,omitempty"`
Updated *[]string `json:"updated,omitempty"`
}{}
err = json.Unmarshal(bytes, &all)
if err != nil {
err = json.Unmarshal(bytes, &raw)
if err != nil {
return err
}
o.UnparsedObject = raw
return nil
}
o.Deleted = all.Deleted
o.Updated = all.Updated
return nil
}
type NullableSLOBulkDeleteResponseData struct {
value *SLOBulkDeleteResponseData
isSet bool
}
func (v NullableSLOBulkDeleteResponseData) Get() *SLOBulkDeleteResponseData {
return v.value
}
func (v *NullableSLOBulkDeleteResponseData) Set(val *SLOBulkDeleteResponseData) {
v.value = val
v.isSet = true
}
func (v NullableSLOBulkDeleteResponseData) IsSet() bool {
return v.isSet
}
func (v *NullableSLOBulkDeleteResponseData) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableSLOBulkDeleteResponseData(val *SLOBulkDeleteResponseData) *NullableSLOBulkDeleteResponseData {
return &NullableSLOBulkDeleteResponseData{value: val, isSet: true}
}
func (v NullableSLOBulkDeleteResponseData) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableSLOBulkDeleteResponseData) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | api/v1/datadog/model_slo_bulk_delete_response_data.go | 0.736211 | 0.406332 | model_slo_bulk_delete_response_data.go | starcoder |
package types
import (
"errors"
"fmt"
"math/rand"
sdk "github.com/cosmos/cosmos-sdk/types"
sdkerrors "github.com/cosmos/cosmos-sdk/types/errors"
)
// Has check if an input is between double weight range
func (wr DoubleWeightRange) Has(number sdk.Dec) bool {
return wr.Lower.GTE(number) && wr.Upper.LTE(number)
}
// Generate uses the weight table to generate a random number. Its uses a 2 int64 random generation mechanism.
// E.g. 2 weight ranges are provided with values [100.00, 500.00 weight: 8] and [600.00, 800.00 weight: 2] so now we
// generate a random number from 0 to 10 and if its from 0 to 8 then selected range = [100.00, 500.00] else [600.00, 800.00].
// next we get a random number from the selected range and return that
func (wt DoubleWeightTable) Generate() (sdk.Dec, error) {
var lastWeight int64
weights := make([]int64, len(wt))
for i, weightRange := range wt {
lastWeight += int64(weightRange.Weight)
weights[i] = lastWeight
}
if lastWeight == 0 {
return sdk.ZeroDec(), errors.New("total weight of DoubleWeightTable shouldn't be zero")
}
randWeight := rand.Int63n(lastWeight)
var first int64
chosenIndex := -1
for i, weight := range weights {
if randWeight >= first && randWeight < weight {
chosenIndex = i
break
}
first = weight
}
if chosenIndex < 0 || chosenIndex >= len(wt) {
return sdk.ZeroDec(), errors.New("something went wrong generating random double value")
}
selectedWeightRange := wt[chosenIndex]
if selectedWeightRange.Upper.Equal(selectedWeightRange.Lower) {
return selectedWeightRange.Upper, nil
}
randNum := rand.Float64()
randStr := fmt.Sprintf("%f", randNum)
randDec, err := sdk.NewDecFromStr(randStr)
if err != nil {
return selectedWeightRange.Lower, sdkerrors.Wrapf(err, "error creating random sdk.Dec : float: %f, string %s", randNum, randStr)
}
return randDec.Mul(selectedWeightRange.Upper.Sub(selectedWeightRange.Lower)).Add(selectedWeightRange.Lower), nil
}
// Has checks if any of the weight ranges has the number
func (wt DoubleWeightTable) Has(number sdk.Dec) bool {
for _, weightRange := range wt {
if weightRange.Has(number) {
return true
}
}
return false
}
// Has check if a number is under IntWeightRange
func (wr IntWeightRange) Has(number int64) bool {
return number >= wr.Lower && number < wr.Upper
}
// Generate uses the weight table to generate a random number. Its uses a 2 int64 random generation mechanism.
// E.g. 2 weight ranges are provided with values [100, 500 weight: 8] and [600, 800 weight: 2] so now we
// generate a random number from 0 to 10 and if its from 0 to 8 then selected range = [100, 500] else [600, 800].
// next we get a random number from the selected range and return that
func (wt IntWeightTable) Generate() (int64, error) {
var lastWeight int64
weights := make([]int64, len(wt))
for i, weightRange := range wt {
lastWeight += int64(weightRange.Weight)
weights[i] = lastWeight
}
if lastWeight == 0 {
return 0, errors.New("total weight of IntWeightTable shouldn't be zero")
}
randWeight := rand.Int63n(lastWeight)
var first int64
chosenIndex := -1
for i, weight := range weights {
if randWeight >= first && randWeight < weight {
chosenIndex = i
break
}
first = weight
}
if chosenIndex < 0 || chosenIndex >= len(wt) {
return 0, errors.New("something went wrong generating random integer value")
}
selectedWeightRange := wt[chosenIndex]
if selectedWeightRange.Upper == selectedWeightRange.Lower {
return selectedWeightRange.Upper, nil
}
if selectedWeightRange.Upper > selectedWeightRange.Lower {
return rand.Int63n(selectedWeightRange.Upper-selectedWeightRange.Lower) + selectedWeightRange.Lower, nil
}
return selectedWeightRange.Lower, nil
}
// Has checks if any of the weight ranges has the number
func (wt IntWeightTable) Has(number int) bool {
for _, weightRange := range wt {
if weightRange.Has(int64(number)) {
return true
}
}
return false
} | database/types/recipe_weight_table.go | 0.743354 | 0.495484 | recipe_weight_table.go | starcoder |
package mat
import (
"github.com/angelsolaorbaiceta/inkmath/vec"
)
// A SparseMat is a matrix where the zeroes aren't stored.
type SparseMat struct {
rows, cols int
data map[int]map[int]float64
}
// MakeSquareSparse creates a new square sparse matrix with the given number of rows and columns.
func MakeSquareSparse(size int) *SparseMat {
return MakeSparse(size, size)
}
// MakeSparse creates a new sparse matrix with the given number of rows and columns.
func MakeSparse(rows, cols int) *SparseMat {
return &SparseMat{rows, cols, make(map[int]map[int]float64)}
}
/*
MakeSparseWithData creates a new sparse matrix initialized with the given data.
This method is mainly used for testing purposes as it makes no sense to create a sparse
matrix with a given data slice. Most of the elements in a sparse matrix should be zero.
*/
func MakeSparseWithData(rows, cols int, data []float64) *SparseMat {
matrix := MakeSparse(rows, cols)
FillMatrixWithData(matrix, data)
return matrix
}
/*
MakeIdentity creates a new sparse matrix with all zeroes except in the main diagonal,
which has ones.
*/
func MakeIdentity(size int) *SparseMat {
identity := MakeSparse(size, size)
for i := 0; i < size; i++ {
identity.SetValue(i, i, 1.0)
}
return identity
}
// Rows returns the number of rows in the matrix.
func (m SparseMat) Rows() int { return m.rows }
// Cols returns the number of columns in the matrix.
func (m SparseMat) Cols() int { return m.cols }
// Value returns the value at a given row and column.
func (m SparseMat) Value(row, col int) float64 {
if dataRow, hasRow := m.data[row]; hasRow {
if value, hasValue := dataRow[col]; hasValue {
return value
}
}
return 0.0
}
// NonZeroIndicesAtRow returns a slice with all non-zero elements indices for the given row.
func (m SparseMat) NonZeroIndicesAtRow(row int) []int {
if dataRow, hasRow := m.data[row]; hasRow {
var (
keys = make([]int, len(m.data[row]))
i = 0
)
for k := range dataRow {
keys[i] = k
i++
}
return keys
}
return []int{}
}
// TimesVector multiplies this matrix and a vector.
func (m SparseMat) TimesVector(vector vec.ReadOnlyVector) vec.ReadOnlyVector {
if m.cols != vector.Length() {
panic("Can't multiply matrix and vector due to size mismatch")
}
result := vec.Make(m.rows)
for rowIndex := range m.data {
result.SetValue(rowIndex, m.rowTimesVector(rowIndex, vector))
}
return result
}
// TimesMatrix multiplies this matrix times other.
func (m SparseMat) TimesMatrix(other ReadOnlyMatrix) ReadOnlyMatrix {
if m.cols != other.Rows() {
panic("Can't multiply matrices due to size mismatch")
}
var (
rows = m.rows
cols = other.Cols()
sum float64
result = MakeSparse(rows, cols)
)
for i, row := range m.data {
for j := 0; j < cols; j++ {
sum = 0.0
for k, val := range row {
sum += val * other.Value(k, j)
}
result.SetValue(i, j, sum)
}
}
return result
}
// RowTimesVector returns the result of multiplying the row at the given index times the given vector.
func (m SparseMat) RowTimesVector(row int, vector vec.ReadOnlyVector) float64 {
if m.cols != vector.Length() {
panic("Can't multiply matrix row with vector due to size mismatch")
}
return m.rowTimesVector(row, vector)
}
func (m SparseMat) rowTimesVector(row int, vector vec.ReadOnlyVector) float64 {
if rowData, hasRow := m.data[row]; hasRow {
result := 0.0
for i, val := range rowData {
result += vector.Value(i) * val
}
return result
}
return 0.0
} | mat/sparse_matrix.go | 0.872619 | 0.796094 | sparse_matrix.go | starcoder |
package document
import (
"bytes"
"strings"
"time"
)
type operator uint8
const (
operatorEq operator = iota + 1
operatorGt
operatorGte
operatorLt
operatorLte
)
func (op operator) String() string {
switch op {
case operatorEq:
return "="
case operatorGt:
return ">"
case operatorGte:
return ">="
case operatorLt:
return "<"
case operatorLte:
return "<="
}
return ""
}
// IsEqual returns true if v is equal to the given value.
func (v Value) IsEqual(other Value) (bool, error) {
return compare(operatorEq, v, other, false)
}
// IsNotEqual returns true if v is not equal to the given value.
func (v Value) IsNotEqual(other Value) (bool, error) {
ok, err := v.IsEqual(other)
if err != nil {
return ok, err
}
return !ok, nil
}
// IsGreaterThan returns true if v is greather than the given value.
func (v Value) IsGreaterThan(other Value) (bool, error) {
return compare(operatorGt, v, other, false)
}
// IsGreaterThanOrEqual returns true if v is greather than or equal to the given value.
func (v Value) IsGreaterThanOrEqual(other Value) (bool, error) {
return compare(operatorGte, v, other, false)
}
// IsLesserThan returns true if v is lesser than the given value.
func (v Value) IsLesserThan(other Value) (bool, error) {
return compare(operatorLt, v, other, false)
}
// IsLesserThanOrEqual returns true if v is lesser than or equal to the given value.
func (v Value) IsLesserThanOrEqual(other Value) (bool, error) {
return compare(operatorLte, v, other, false)
}
func compare(op operator, l, r Value, compareDifferentTypes bool) (bool, error) {
switch {
// deal with nil
case l.Type == NullValue || r.Type == NullValue:
return compareWithNull(op, l, r)
// compare booleans together
case l.Type == BoolValue && r.Type == BoolValue:
return compareBooleans(op, l.V.(bool), r.V.(bool)), nil
// compare texts together
case l.Type == TextValue && r.Type == TextValue:
return compareTexts(op, l.V.(string), r.V.(string)), nil
// compare blobs together
case r.Type == BlobValue && l.Type == BlobValue:
return compareBlobs(op, l.V.([]byte), r.V.([]byte)), nil
// compare integers together
case l.Type == IntegerValue && r.Type == IntegerValue:
return compareIntegers(op, l.V.(int64), r.V.(int64)), nil
// compare numbers together
case l.Type.IsNumber() && r.Type.IsNumber():
return compareNumbers(op, l, r)
// compare durations together
case l.Type == DurationValue && r.Type == DurationValue:
return compareIntegers(op, int64(l.V.(time.Duration)), int64(r.V.(time.Duration))), nil
// compare arrays together
case l.Type == ArrayValue && r.Type == ArrayValue:
return compareArrays(op, l.V.(Array), r.V.(Array))
// compare documents together
case l.Type == DocumentValue && r.Type == DocumentValue:
return compareDocuments(op, l.V.(Document), r.V.(Document))
}
if compareDifferentTypes {
switch op {
case operatorEq:
return false, nil
case operatorGt, operatorGte:
return l.Type > r.Type, nil
case operatorLt, operatorLte:
return l.Type < r.Type, nil
}
}
return false, nil
}
func compareWithNull(op operator, l, r Value) (bool, error) {
switch op {
case operatorEq, operatorGte, operatorLte:
return l.Type == r.Type, nil
case operatorGt, operatorLt:
return false, nil
}
return false, nil
}
func compareBooleans(op operator, a, b bool) bool {
switch op {
case operatorEq:
return a == b
case operatorGt:
return a == true && b == false
case operatorGte:
return a == b || a == true
case operatorLt:
return a == false && b == true
case operatorLte:
return a == b || a == false
}
return false
}
func compareTexts(op operator, l, r string) bool {
switch op {
case operatorEq:
return l == r
case operatorGt:
return strings.Compare(l, r) > 0
case operatorGte:
return strings.Compare(l, r) >= 0
case operatorLt:
return strings.Compare(l, r) < 0
case operatorLte:
return strings.Compare(l, r) <= 0
}
return false
}
func compareBlobs(op operator, l, r []byte) bool {
switch op {
case operatorEq:
return bytes.Equal(l, r)
case operatorGt:
return bytes.Compare(l, r) > 0
case operatorGte:
return bytes.Compare(l, r) >= 0
case operatorLt:
return bytes.Compare(l, r) < 0
case operatorLte:
return bytes.Compare(l, r) <= 0
}
return false
}
func compareIntegers(op operator, l, r int64) bool {
switch op {
case operatorEq:
return l == r
case operatorGt:
return l > r
case operatorGte:
return l >= r
case operatorLt:
return l < r
case operatorLte:
return l <= r
}
return false
}
func compareNumbers(op operator, l, r Value) (bool, error) {
var err error
l, err = l.CastAsDouble()
if err != nil {
return false, err
}
r, err = r.CastAsDouble()
if err != nil {
return false, err
}
af := l.V.(float64)
bf := r.V.(float64)
var ok bool
switch op {
case operatorEq:
ok = af == bf
case operatorGt:
ok = af > bf
case operatorGte:
ok = af >= bf
case operatorLt:
ok = af < bf
case operatorLte:
ok = af <= bf
}
return ok, nil
}
func compareArrays(op operator, l Array, r Array) (bool, error) {
var i, j int
for {
lv, lerr := l.GetByIndex(i)
rv, rerr := r.GetByIndex(j)
if lerr == nil {
i++
}
if rerr == nil {
j++
}
if lerr != nil || rerr != nil {
break
}
isEq, err := compare(operatorEq, lv, rv, true)
if err != nil {
return false, err
}
if !isEq && op != operatorEq {
return compare(op, lv, rv, true)
}
if !isEq {
return false, nil
}
}
switch {
case i > j:
switch op {
case operatorEq, operatorLt, operatorLte:
return false, nil
default:
return true, nil
}
case i < j:
switch op {
case operatorEq, operatorGt, operatorGte:
return false, nil
default:
return true, nil
}
default:
switch op {
case operatorEq, operatorGte, operatorLte:
return true, nil
default:
return false, nil
}
}
}
func compareDocuments(op operator, l, r Document) (bool, error) {
lf, err := Fields(l)
if err != nil {
return false, err
}
rf, err := Fields(r)
if err != nil {
return false, err
}
if len(lf) == 0 && len(rf) > 0 {
switch op {
case operatorEq:
return false, nil
case operatorGt:
return false, nil
case operatorGte:
return false, nil
case operatorLt:
return true, nil
case operatorLte:
return true, nil
}
}
if len(rf) == 0 && len(lf) > 0 {
switch op {
case operatorEq:
return false, nil
case operatorGt:
return true, nil
case operatorGte:
return true, nil
case operatorLt:
return false, nil
case operatorLte:
return false, nil
}
}
var i, j int
for i < len(lf) && j < len(rf) {
if cmp := strings.Compare(lf[i], rf[j]); cmp != 0 {
switch op {
case operatorEq:
return false, nil
case operatorGt:
return cmp > 0, nil
case operatorGte:
return cmp >= 0, nil
case operatorLt:
return cmp < 0, nil
case operatorLte:
return cmp <= 0, nil
}
}
lv, lerr := l.GetByField(lf[i])
rv, rerr := r.GetByField(rf[j])
if lerr == nil {
i++
}
if rerr == nil {
j++
}
if lerr != nil || rerr != nil {
break
}
isEq, err := compare(operatorEq, lv, rv, true)
if err != nil {
return false, err
}
if !isEq && op != operatorEq {
return compare(op, lv, rv, true)
}
if !isEq {
return false, nil
}
}
switch {
case i > j:
switch op {
case operatorEq, operatorLt, operatorLte:
return false, nil
default:
return true, nil
}
case i < j:
switch op {
case operatorEq, operatorGt, operatorGte:
return false, nil
default:
return true, nil
}
default:
switch op {
case operatorEq, operatorGte, operatorLte:
return true, nil
default:
return false, nil
}
}
} | document/compare.go | 0.743354 | 0.52007 | compare.go | starcoder |
package world
import (
"github.com/df-mc/dragonfly/server/block/cube"
"github.com/go-gl/mathgl/mgl64"
"math"
)
// blockPosFromNBT returns a position from the X, Y and Z components stored in the NBT data map passed. The
// map is assumed to have an 'x', 'y' and 'z' key.
//noinspection GoCommentLeadingSpace
func blockPosFromNBT(data map[string]interface{}) cube.Pos {
//lint:ignore S1005 Double assignment is done explicitly to prevent panics.
xInterface, _ := data["x"]
//lint:ignore S1005 Double assignment is done explicitly to prevent panics.
yInterface, _ := data["y"]
//lint:ignore S1005 Double assignment is done explicitly to prevent panics.
zInterface, _ := data["z"]
x, _ := xInterface.(int32)
y, _ := yInterface.(int32)
z, _ := zInterface.(int32)
return cube.Pos{int(x), int(y), int(z)}
}
// ChunkPos holds the position of a chunk. The type is provided as a utility struct for keeping track of a
// chunk's position. Chunks do not themselves keep track of that. Chunk positions are different than block
// positions in the way that increasing the X/Z by one means increasing the absolute value on the X/Z axis in
// terms of blocks by 16.
type ChunkPos [2]int32
// X returns the X coordinate of the chunk position.
func (p ChunkPos) X() int32 {
return p[0]
}
// Z returns the Z coordinate of the chunk position.
func (p ChunkPos) Z() int32 {
return p[1]
}
// chunkPosFromVec3 returns a chunk position from the Vec3 passed. The coordinates of the chunk position are
// those of the Vec3 divided by 16, then rounded down.
func chunkPosFromVec3(vec3 mgl64.Vec3) ChunkPos {
return ChunkPos{
int32(math.Floor(vec3[0])) >> 4,
int32(math.Floor(vec3[2])) >> 4,
}
}
// chunkPosFromBlockPos returns a chunk position of the chunk that a block at this position would be in.
func chunkPosFromBlockPos(p cube.Pos) ChunkPos {
return ChunkPos{int32(p[0] >> 4), int32(p[2] >> 4)}
}
// Distance returns the distance between two vectors.
func Distance(a, b mgl64.Vec3) float64 {
return b.Sub(a).Len()
} | server/world/position.go | 0.820649 | 0.45532 | position.go | starcoder |
package plaid
import (
"encoding/json"
)
// AccountAssets struct for AccountAssets
type AccountAssets struct {
// Plaid’s unique identifier for the account. This value will not change unless Plaid can't reconcile the account with the data returned by the financial institution. This may occur, for example, when the name of the account changes. If this happens a new `account_id` will be assigned to the account. The `account_id` can also change if the `access_token` is deleted and the same credentials that were used to generate that `access_token` are used to generate a new `access_token` on a later date. In that case, the new `account_id` will be different from the old `account_id`. If an account with a specific `account_id` disappears instead of changing, the account is likely closed. Closed accounts are not returned by the Plaid API. Like all Plaid identifiers, the `account_id` is case sensitive.
AccountId string `json:"account_id"`
Balances AccountBalance `json:"balances"`
// The last 2-4 alphanumeric characters of an account's official account number. Note that the mask may be non-unique between an Item's accounts, and it may also not match the mask that the bank displays to the user.
Mask NullableString `json:"mask"`
// The name of the account, either assigned by the user or by the financial institution itself
Name string `json:"name"`
// The official name of the account as given by the financial institution
OfficialName NullableString `json:"official_name"`
Type AccountType `json:"type"`
Subtype NullableAccountSubtype `json:"subtype"`
// The current verification status of an Auth Item initiated through Automated or Manual micro-deposits. Returned for Auth Items only. `pending_automatic_verification`: The Item is pending automatic verification `pending_manual_verification`: The Item is pending manual micro-deposit verification. Items remain in this state until the user successfully verifies the two amounts. `automatically_verified`: The Item has successfully been automatically verified `manually_verified`: The Item has successfully been manually verified `verification_expired`: Plaid was unable to automatically verify the deposit within 7 calendar days and will no longer attempt to validate the Item. Users may retry by submitting their information again through Link. `verification_failed`: The Item failed manual micro-deposit verification because the user exhausted all 3 verification attempts. Users may retry by submitting their information again through Link.
VerificationStatus *string `json:"verification_status,omitempty"`
// The duration of transaction history available for this Item, typically defined as the time since the date of the earliest transaction in that account. Only returned by Assets endpoints.
DaysAvailable float32 `json:"days_available"`
// Transaction history associated with the account. Only returned by Assets endpoints. Transaction history returned by endpoints such as `/transactions/get` or `/investments/transactions/get` will be returned in the top-level `transactions` field instead.
Transactions []AssetReportTransaction `json:"transactions"`
// Data returned by the financial institution about the account owner or owners. Only returned by Identity or Assets endpoints. Multiple owners on a single account will be represented in the same `owner` object, not in multiple owner objects within the array.
Owners []Owner `json:"owners"`
// Calculated data about the historical balances on the account. Only returned by Assets endpoints.
HistoricalBalances []HistoricalBalance `json:"historical_balances"`
}
// NewAccountAssets instantiates a new AccountAssets object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewAccountAssets(accountId string, balances AccountBalance, mask NullableString, name string, officialName NullableString, type_ AccountType, subtype NullableAccountSubtype, daysAvailable float32, transactions []AssetReportTransaction, owners []Owner, historicalBalances []HistoricalBalance) *AccountAssets {
this := AccountAssets{}
this.AccountId = accountId
this.Balances = balances
this.Mask = mask
this.Name = name
this.OfficialName = officialName
this.Type = type_
this.Subtype = subtype
this.DaysAvailable = daysAvailable
this.Transactions = transactions
this.Owners = owners
this.HistoricalBalances = historicalBalances
return &this
}
// NewAccountAssetsWithDefaults instantiates a new AccountAssets object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewAccountAssetsWithDefaults() *AccountAssets {
this := AccountAssets{}
return &this
}
// GetAccountId returns the AccountId field value
func (o *AccountAssets) GetAccountId() string {
if o == nil {
var ret string
return ret
}
return o.AccountId
}
// GetAccountIdOk returns a tuple with the AccountId field value
// and a boolean to check if the value has been set.
func (o *AccountAssets) GetAccountIdOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.AccountId, true
}
// SetAccountId sets field value
func (o *AccountAssets) SetAccountId(v string) {
o.AccountId = v
}
// GetBalances returns the Balances field value
func (o *AccountAssets) GetBalances() AccountBalance {
if o == nil {
var ret AccountBalance
return ret
}
return o.Balances
}
// GetBalancesOk returns a tuple with the Balances field value
// and a boolean to check if the value has been set.
func (o *AccountAssets) GetBalancesOk() (*AccountBalance, bool) {
if o == nil {
return nil, false
}
return &o.Balances, true
}
// SetBalances sets field value
func (o *AccountAssets) SetBalances(v AccountBalance) {
o.Balances = v
}
// GetMask returns the Mask field value
// If the value is explicit nil, the zero value for string will be returned
func (o *AccountAssets) GetMask() string {
if o == nil || o.Mask.Get() == nil {
var ret string
return ret
}
return *o.Mask.Get()
}
// GetMaskOk returns a tuple with the Mask field value
// and a boolean to check if the value has been set.
// NOTE: If the value is an explicit nil, `nil, true` will be returned
func (o *AccountAssets) GetMaskOk() (*string, bool) {
if o == nil {
return nil, false
}
return o.Mask.Get(), o.Mask.IsSet()
}
// SetMask sets field value
func (o *AccountAssets) SetMask(v string) {
o.Mask.Set(&v)
}
// GetName returns the Name field value
func (o *AccountAssets) GetName() string {
if o == nil {
var ret string
return ret
}
return o.Name
}
// GetNameOk returns a tuple with the Name field value
// and a boolean to check if the value has been set.
func (o *AccountAssets) GetNameOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.Name, true
}
// SetName sets field value
func (o *AccountAssets) SetName(v string) {
o.Name = v
}
// GetOfficialName returns the OfficialName field value
// If the value is explicit nil, the zero value for string will be returned
func (o *AccountAssets) GetOfficialName() string {
if o == nil || o.OfficialName.Get() == nil {
var ret string
return ret
}
return *o.OfficialName.Get()
}
// GetOfficialNameOk returns a tuple with the OfficialName field value
// and a boolean to check if the value has been set.
// NOTE: If the value is an explicit nil, `nil, true` will be returned
func (o *AccountAssets) GetOfficialNameOk() (*string, bool) {
if o == nil {
return nil, false
}
return o.OfficialName.Get(), o.OfficialName.IsSet()
}
// SetOfficialName sets field value
func (o *AccountAssets) SetOfficialName(v string) {
o.OfficialName.Set(&v)
}
// GetType returns the Type field value
func (o *AccountAssets) GetType() AccountType {
if o == nil {
var ret AccountType
return ret
}
return o.Type
}
// GetTypeOk returns a tuple with the Type field value
// and a boolean to check if the value has been set.
func (o *AccountAssets) GetTypeOk() (*AccountType, bool) {
if o == nil {
return nil, false
}
return &o.Type, true
}
// SetType sets field value
func (o *AccountAssets) SetType(v AccountType) {
o.Type = v
}
// GetSubtype returns the Subtype field value
// If the value is explicit nil, the zero value for AccountSubtype will be returned
func (o *AccountAssets) GetSubtype() AccountSubtype {
if o == nil || o.Subtype.Get() == nil {
var ret AccountSubtype
return ret
}
return *o.Subtype.Get()
}
// GetSubtypeOk returns a tuple with the Subtype field value
// and a boolean to check if the value has been set.
// NOTE: If the value is an explicit nil, `nil, true` will be returned
func (o *AccountAssets) GetSubtypeOk() (*AccountSubtype, bool) {
if o == nil {
return nil, false
}
return o.Subtype.Get(), o.Subtype.IsSet()
}
// SetSubtype sets field value
func (o *AccountAssets) SetSubtype(v AccountSubtype) {
o.Subtype.Set(&v)
}
// GetVerificationStatus returns the VerificationStatus field value if set, zero value otherwise.
func (o *AccountAssets) GetVerificationStatus() string {
if o == nil || o.VerificationStatus == nil {
var ret string
return ret
}
return *o.VerificationStatus
}
// GetVerificationStatusOk returns a tuple with the VerificationStatus field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *AccountAssets) GetVerificationStatusOk() (*string, bool) {
if o == nil || o.VerificationStatus == nil {
return nil, false
}
return o.VerificationStatus, true
}
// HasVerificationStatus returns a boolean if a field has been set.
func (o *AccountAssets) HasVerificationStatus() bool {
if o != nil && o.VerificationStatus != nil {
return true
}
return false
}
// SetVerificationStatus gets a reference to the given string and assigns it to the VerificationStatus field.
func (o *AccountAssets) SetVerificationStatus(v string) {
o.VerificationStatus = &v
}
// GetDaysAvailable returns the DaysAvailable field value
func (o *AccountAssets) GetDaysAvailable() float32 {
if o == nil {
var ret float32
return ret
}
return o.DaysAvailable
}
// GetDaysAvailableOk returns a tuple with the DaysAvailable field value
// and a boolean to check if the value has been set.
func (o *AccountAssets) GetDaysAvailableOk() (*float32, bool) {
if o == nil {
return nil, false
}
return &o.DaysAvailable, true
}
// SetDaysAvailable sets field value
func (o *AccountAssets) SetDaysAvailable(v float32) {
o.DaysAvailable = v
}
// GetTransactions returns the Transactions field value
func (o *AccountAssets) GetTransactions() []AssetReportTransaction {
if o == nil {
var ret []AssetReportTransaction
return ret
}
return o.Transactions
}
// GetTransactionsOk returns a tuple with the Transactions field value
// and a boolean to check if the value has been set.
func (o *AccountAssets) GetTransactionsOk() (*[]AssetReportTransaction, bool) {
if o == nil {
return nil, false
}
return &o.Transactions, true
}
// SetTransactions sets field value
func (o *AccountAssets) SetTransactions(v []AssetReportTransaction) {
o.Transactions = v
}
// GetOwners returns the Owners field value
func (o *AccountAssets) GetOwners() []Owner {
if o == nil {
var ret []Owner
return ret
}
return o.Owners
}
// GetOwnersOk returns a tuple with the Owners field value
// and a boolean to check if the value has been set.
func (o *AccountAssets) GetOwnersOk() (*[]Owner, bool) {
if o == nil {
return nil, false
}
return &o.Owners, true
}
// SetOwners sets field value
func (o *AccountAssets) SetOwners(v []Owner) {
o.Owners = v
}
// GetHistoricalBalances returns the HistoricalBalances field value
func (o *AccountAssets) GetHistoricalBalances() []HistoricalBalance {
if o == nil {
var ret []HistoricalBalance
return ret
}
return o.HistoricalBalances
}
// GetHistoricalBalancesOk returns a tuple with the HistoricalBalances field value
// and a boolean to check if the value has been set.
func (o *AccountAssets) GetHistoricalBalancesOk() (*[]HistoricalBalance, bool) {
if o == nil {
return nil, false
}
return &o.HistoricalBalances, true
}
// SetHistoricalBalances sets field value
func (o *AccountAssets) SetHistoricalBalances(v []HistoricalBalance) {
o.HistoricalBalances = v
}
func (o AccountAssets) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if true {
toSerialize["account_id"] = o.AccountId
}
if true {
toSerialize["balances"] = o.Balances
}
if true {
toSerialize["mask"] = o.Mask.Get()
}
if true {
toSerialize["name"] = o.Name
}
if true {
toSerialize["official_name"] = o.OfficialName.Get()
}
if true {
toSerialize["type"] = o.Type
}
if true {
toSerialize["subtype"] = o.Subtype.Get()
}
if o.VerificationStatus != nil {
toSerialize["verification_status"] = o.VerificationStatus
}
if true {
toSerialize["days_available"] = o.DaysAvailable
}
if true {
toSerialize["transactions"] = o.Transactions
}
if true {
toSerialize["owners"] = o.Owners
}
if true {
toSerialize["historical_balances"] = o.HistoricalBalances
}
return json.Marshal(toSerialize)
}
type NullableAccountAssets struct {
value *AccountAssets
isSet bool
}
func (v NullableAccountAssets) Get() *AccountAssets {
return v.value
}
func (v *NullableAccountAssets) Set(val *AccountAssets) {
v.value = val
v.isSet = true
}
func (v NullableAccountAssets) IsSet() bool {
return v.isSet
}
func (v *NullableAccountAssets) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableAccountAssets(val *AccountAssets) *NullableAccountAssets {
return &NullableAccountAssets{value: val, isSet: true}
}
func (v NullableAccountAssets) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableAccountAssets) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | plaid/model_account_assets.go | 0.792865 | 0.51251 | model_account_assets.go | starcoder |
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Test that we restrict inlining into very large functions.
// See issue #26546.
package foo
func small(a []int) int { // ERROR "can inline small as:.*" "small a does not escape"
// Cost 16 body (need cost < 20).
// See cmd/compile/internal/gc/inl.go:inlineBigFunction*
return a[0] + a[1] + a[2] + a[3]
}
func medium(a []int) int { // ERROR "can inline medium as:.*" "medium a does not escape"
// Cost 32 body (need cost > 20 and cost < 80).
// See cmd/compile/internal/gc/inl.go:inlineBigFunction*
return a[0] + a[1] + a[2] + a[3] + a[4] + a[5] + a[6] + a[7]
}
func f(a []int) int { // ERROR "cannot inline f:.*" "f a does not escape"
// Add lots of nodes to f's body. We need >5000.
// See cmd/compile/internal/gc/inl.go:inlineBigFunction*
a[0] = 0
a[1] = 0
a[2] = 0
a[3] = 0
a[4] = 0
a[5] = 0
a[6] = 0
a[7] = 0
a[8] = 0
a[9] = 0
a[10] = 0
a[11] = 0
a[12] = 0
a[13] = 0
a[14] = 0
a[15] = 0
a[16] = 0
a[17] = 0
a[18] = 0
a[19] = 0
a[20] = 0
a[21] = 0
a[22] = 0
a[23] = 0
a[24] = 0
a[25] = 0
a[26] = 0
a[27] = 0
a[28] = 0
a[29] = 0
a[30] = 0
a[31] = 0
a[32] = 0
a[33] = 0
a[34] = 0
a[35] = 0
a[36] = 0
a[37] = 0
a[38] = 0
a[39] = 0
a[40] = 0
a[41] = 0
a[42] = 0
a[43] = 0
a[44] = 0
a[45] = 0
a[46] = 0
a[47] = 0
a[48] = 0
a[49] = 0
a[50] = 0
a[51] = 0
a[52] = 0
a[53] = 0
a[54] = 0
a[55] = 0
a[56] = 0
a[57] = 0
a[58] = 0
a[59] = 0
a[60] = 0
a[61] = 0
a[62] = 0
a[63] = 0
a[64] = 0
a[65] = 0
a[66] = 0
a[67] = 0
a[68] = 0
a[69] = 0
a[70] = 0
a[71] = 0
a[72] = 0
a[73] = 0
a[74] = 0
a[75] = 0
a[76] = 0
a[77] = 0
a[78] = 0
a[79] = 0
a[80] = 0
a[81] = 0
a[82] = 0
a[83] = 0
a[84] = 0
a[85] = 0
a[86] = 0
a[87] = 0
a[88] = 0
a[89] = 0
a[90] = 0
a[91] = 0
a[92] = 0
a[93] = 0
a[94] = 0
a[95] = 0
a[96] = 0
a[97] = 0
a[98] = 0
a[99] = 0
a[100] = 0
a[101] = 0
a[102] = 0
a[103] = 0
a[104] = 0
a[105] = 0
a[106] = 0
a[107] = 0
a[108] = 0
a[109] = 0
a[110] = 0
a[111] = 0
a[112] = 0
a[113] = 0
a[114] = 0
a[115] = 0
a[116] = 0
a[117] = 0
a[118] = 0
a[119] = 0
a[120] = 0
a[121] = 0
a[122] = 0
a[123] = 0
a[124] = 0
a[125] = 0
a[126] = 0
a[127] = 0
a[128] = 0
a[129] = 0
a[130] = 0
a[131] = 0
a[132] = 0
a[133] = 0
a[134] = 0
a[135] = 0
a[136] = 0
a[137] = 0
a[138] = 0
a[139] = 0
a[140] = 0
a[141] = 0
a[142] = 0
a[143] = 0
a[144] = 0
a[145] = 0
a[146] = 0
a[147] = 0
a[148] = 0
a[149] = 0
a[150] = 0
a[151] = 0
a[152] = 0
a[153] = 0
a[154] = 0
a[155] = 0
a[156] = 0
a[157] = 0
a[158] = 0
a[159] = 0
a[160] = 0
a[161] = 0
a[162] = 0
a[163] = 0
a[164] = 0
a[165] = 0
a[166] = 0
a[167] = 0
a[168] = 0
a[169] = 0
a[170] = 0
a[171] = 0
a[172] = 0
a[173] = 0
a[174] = 0
a[175] = 0
a[176] = 0
a[177] = 0
a[178] = 0
a[179] = 0
a[180] = 0
a[181] = 0
a[182] = 0
a[183] = 0
a[184] = 0
a[185] = 0
a[186] = 0
a[187] = 0
a[188] = 0
a[189] = 0
a[190] = 0
a[191] = 0
a[192] = 0
a[193] = 0
a[194] = 0
a[195] = 0
a[196] = 0
a[197] = 0
a[198] = 0
a[199] = 0
a[200] = 0
a[201] = 0
a[202] = 0
a[203] = 0
a[204] = 0
a[205] = 0
a[206] = 0
a[207] = 0
a[208] = 0
a[209] = 0
a[210] = 0
a[211] = 0
a[212] = 0
a[213] = 0
a[214] = 0
a[215] = 0
a[216] = 0
a[217] = 0
a[218] = 0
a[219] = 0
a[220] = 0
a[221] = 0
a[222] = 0
a[223] = 0
a[224] = 0
a[225] = 0
a[226] = 0
a[227] = 0
a[228] = 0
a[229] = 0
a[230] = 0
a[231] = 0
a[232] = 0
a[233] = 0
a[234] = 0
a[235] = 0
a[236] = 0
a[237] = 0
a[238] = 0
a[239] = 0
a[240] = 0
a[241] = 0
a[242] = 0
a[243] = 0
a[244] = 0
a[245] = 0
a[246] = 0
a[247] = 0
a[248] = 0
a[249] = 0
a[250] = 0
a[251] = 0
a[252] = 0
a[253] = 0
a[254] = 0
a[255] = 0
a[256] = 0
a[257] = 0
a[258] = 0
a[259] = 0
a[260] = 0
a[261] = 0
a[262] = 0
a[263] = 0
a[264] = 0
a[265] = 0
a[266] = 0
a[267] = 0
a[268] = 0
a[269] = 0
a[270] = 0
a[271] = 0
a[272] = 0
a[273] = 0
a[274] = 0
a[275] = 0
a[276] = 0
a[277] = 0
a[278] = 0
a[279] = 0
a[280] = 0
a[281] = 0
a[282] = 0
a[283] = 0
a[284] = 0
a[285] = 0
a[286] = 0
a[287] = 0
a[288] = 0
a[289] = 0
a[290] = 0
a[291] = 0
a[292] = 0
a[293] = 0
a[294] = 0
a[295] = 0
a[296] = 0
a[297] = 0
a[298] = 0
a[299] = 0
a[300] = 0
a[301] = 0
a[302] = 0
a[303] = 0
a[304] = 0
a[305] = 0
a[306] = 0
a[307] = 0
a[308] = 0
a[309] = 0
a[310] = 0
a[311] = 0
a[312] = 0
a[313] = 0
a[314] = 0
a[315] = 0
a[316] = 0
a[317] = 0
a[318] = 0
a[319] = 0
a[320] = 0
a[321] = 0
a[322] = 0
a[323] = 0
a[324] = 0
a[325] = 0
a[326] = 0
a[327] = 0
a[328] = 0
a[329] = 0
a[330] = 0
a[331] = 0
a[332] = 0
a[333] = 0
a[334] = 0
a[335] = 0
a[336] = 0
a[337] = 0
a[338] = 0
a[339] = 0
a[340] = 0
a[341] = 0
a[342] = 0
a[343] = 0
a[344] = 0
a[345] = 0
a[346] = 0
a[347] = 0
a[348] = 0
a[349] = 0
a[350] = 0
a[351] = 0
a[352] = 0
a[353] = 0
a[354] = 0
a[355] = 0
a[356] = 0
a[357] = 0
a[358] = 0
a[359] = 0
a[360] = 0
a[361] = 0
a[362] = 0
a[363] = 0
a[364] = 0
a[365] = 0
a[366] = 0
a[367] = 0
a[368] = 0
a[369] = 0
a[370] = 0
a[371] = 0
a[372] = 0
a[373] = 0
a[374] = 0
a[375] = 0
a[376] = 0
a[377] = 0
a[378] = 0
a[379] = 0
a[380] = 0
a[381] = 0
a[382] = 0
a[383] = 0
a[384] = 0
a[385] = 0
a[386] = 0
a[387] = 0
a[388] = 0
a[389] = 0
a[390] = 0
a[391] = 0
a[392] = 0
a[393] = 0
a[394] = 0
a[395] = 0
a[396] = 0
a[397] = 0
a[398] = 0
a[399] = 0
a[400] = 0
a[401] = 0
a[402] = 0
a[403] = 0
a[404] = 0
a[405] = 0
a[406] = 0
a[407] = 0
a[408] = 0
a[409] = 0
a[410] = 0
a[411] = 0
a[412] = 0
a[413] = 0
a[414] = 0
a[415] = 0
a[416] = 0
a[417] = 0
a[418] = 0
a[419] = 0
a[420] = 0
a[421] = 0
a[422] = 0
a[423] = 0
a[424] = 0
a[425] = 0
a[426] = 0
a[427] = 0
a[428] = 0
a[429] = 0
a[430] = 0
a[431] = 0
a[432] = 0
a[433] = 0
a[434] = 0
a[435] = 0
a[436] = 0
a[437] = 0
a[438] = 0
a[439] = 0
a[440] = 0
a[441] = 0
a[442] = 0
a[443] = 0
a[444] = 0
a[445] = 0
a[446] = 0
a[447] = 0
a[448] = 0
a[449] = 0
a[450] = 0
a[451] = 0
a[452] = 0
a[453] = 0
a[454] = 0
a[455] = 0
a[456] = 0
a[457] = 0
a[458] = 0
a[459] = 0
a[460] = 0
a[461] = 0
a[462] = 0
a[463] = 0
a[464] = 0
a[465] = 0
a[466] = 0
a[467] = 0
a[468] = 0
a[469] = 0
a[470] = 0
a[471] = 0
a[472] = 0
a[473] = 0
a[474] = 0
a[475] = 0
a[476] = 0
a[477] = 0
a[478] = 0
a[479] = 0
a[480] = 0
a[481] = 0
a[482] = 0
a[483] = 0
a[484] = 0
a[485] = 0
a[486] = 0
a[487] = 0
a[488] = 0
a[489] = 0
a[490] = 0
a[491] = 0
a[492] = 0
a[493] = 0
a[494] = 0
a[495] = 0
a[496] = 0
a[497] = 0
a[498] = 0
a[499] = 0
a[500] = 0
a[501] = 0
a[502] = 0
a[503] = 0
a[504] = 0
a[505] = 0
a[506] = 0
a[507] = 0
a[508] = 0
a[509] = 0
a[510] = 0
a[511] = 0
a[512] = 0
a[513] = 0
a[514] = 0
a[515] = 0
a[516] = 0
a[517] = 0
a[518] = 0
a[519] = 0
a[520] = 0
a[521] = 0
a[522] = 0
a[523] = 0
a[524] = 0
a[525] = 0
a[526] = 0
a[527] = 0
a[528] = 0
a[529] = 0
a[530] = 0
a[531] = 0
a[532] = 0
a[533] = 0
a[534] = 0
a[535] = 0
a[536] = 0
a[537] = 0
a[538] = 0
a[539] = 0
a[540] = 0
a[541] = 0
a[542] = 0
a[543] = 0
a[544] = 0
a[545] = 0
a[546] = 0
a[547] = 0
a[548] = 0
a[549] = 0
a[550] = 0
a[551] = 0
a[552] = 0
a[553] = 0
a[554] = 0
a[555] = 0
a[556] = 0
a[557] = 0
a[558] = 0
a[559] = 0
a[560] = 0
a[561] = 0
a[562] = 0
a[563] = 0
a[564] = 0
a[565] = 0
a[566] = 0
a[567] = 0
a[568] = 0
a[569] = 0
a[570] = 0
a[571] = 0
a[572] = 0
a[573] = 0
a[574] = 0
a[575] = 0
a[576] = 0
a[577] = 0
a[578] = 0
a[579] = 0
a[580] = 0
a[581] = 0
a[582] = 0
a[583] = 0
a[584] = 0
a[585] = 0
a[586] = 0
a[587] = 0
a[588] = 0
a[589] = 0
a[590] = 0
a[591] = 0
a[592] = 0
a[593] = 0
a[594] = 0
a[595] = 0
a[596] = 0
a[597] = 0
a[598] = 0
a[599] = 0
a[600] = 0
a[601] = 0
a[602] = 0
a[603] = 0
a[604] = 0
a[605] = 0
a[606] = 0
a[607] = 0
a[608] = 0
a[609] = 0
a[610] = 0
a[611] = 0
a[612] = 0
a[613] = 0
a[614] = 0
a[615] = 0
a[616] = 0
a[617] = 0
a[618] = 0
a[619] = 0
a[620] = 0
a[621] = 0
a[622] = 0
a[623] = 0
a[624] = 0
a[625] = 0
a[626] = 0
a[627] = 0
a[628] = 0
a[629] = 0
a[630] = 0
a[631] = 0
a[632] = 0
a[633] = 0
a[634] = 0
a[635] = 0
a[636] = 0
a[637] = 0
a[638] = 0
a[639] = 0
a[640] = 0
a[641] = 0
a[642] = 0
a[643] = 0
a[644] = 0
a[645] = 0
a[646] = 0
a[647] = 0
a[648] = 0
a[649] = 0
a[650] = 0
a[651] = 0
a[652] = 0
a[653] = 0
a[654] = 0
a[655] = 0
a[656] = 0
a[657] = 0
a[658] = 0
a[659] = 0
a[660] = 0
a[661] = 0
a[662] = 0
a[663] = 0
a[664] = 0
a[665] = 0
a[666] = 0
a[667] = 0
a[668] = 0
a[669] = 0
a[670] = 0
a[671] = 0
a[672] = 0
a[673] = 0
a[674] = 0
a[675] = 0
a[676] = 0
a[677] = 0
a[678] = 0
a[679] = 0
a[680] = 0
a[681] = 0
a[682] = 0
a[683] = 0
a[684] = 0
a[685] = 0
a[686] = 0
a[687] = 0
a[688] = 0
a[689] = 0
a[690] = 0
a[691] = 0
a[692] = 0
a[693] = 0
a[694] = 0
a[695] = 0
a[696] = 0
a[697] = 0
a[698] = 0
a[699] = 0
a[700] = 0
a[701] = 0
a[702] = 0
a[703] = 0
a[704] = 0
a[705] = 0
a[706] = 0
a[707] = 0
a[708] = 0
a[709] = 0
a[710] = 0
a[711] = 0
a[712] = 0
a[713] = 0
a[714] = 0
a[715] = 0
a[716] = 0
a[717] = 0
a[718] = 0
a[719] = 0
a[720] = 0
a[721] = 0
a[722] = 0
a[723] = 0
a[724] = 0
a[725] = 0
a[726] = 0
a[727] = 0
a[728] = 0
a[729] = 0
a[730] = 0
a[731] = 0
a[732] = 0
a[733] = 0
a[734] = 0
a[735] = 0
a[736] = 0
a[737] = 0
a[738] = 0
a[739] = 0
a[740] = 0
a[741] = 0
a[742] = 0
a[743] = 0
a[744] = 0
a[745] = 0
a[746] = 0
a[747] = 0
a[748] = 0
a[749] = 0
a[750] = 0
a[751] = 0
a[752] = 0
a[753] = 0
a[754] = 0
a[755] = 0
a[756] = 0
a[757] = 0
a[758] = 0
a[759] = 0
a[760] = 0
a[761] = 0
a[762] = 0
a[763] = 0
a[764] = 0
a[765] = 0
a[766] = 0
a[767] = 0
a[768] = 0
a[769] = 0
a[770] = 0
a[771] = 0
a[772] = 0
a[773] = 0
a[774] = 0
a[775] = 0
a[776] = 0
a[777] = 0
a[778] = 0
a[779] = 0
a[780] = 0
a[781] = 0
a[782] = 0
a[783] = 0
a[784] = 0
a[785] = 0
a[786] = 0
a[787] = 0
a[788] = 0
a[789] = 0
a[790] = 0
a[791] = 0
a[792] = 0
a[793] = 0
a[794] = 0
a[795] = 0
a[796] = 0
a[797] = 0
a[798] = 0
a[799] = 0
a[800] = 0
a[801] = 0
a[802] = 0
a[803] = 0
a[804] = 0
a[805] = 0
a[806] = 0
a[807] = 0
a[808] = 0
a[809] = 0
a[810] = 0
a[811] = 0
a[812] = 0
a[813] = 0
a[814] = 0
a[815] = 0
a[816] = 0
a[817] = 0
a[818] = 0
a[819] = 0
a[820] = 0
a[821] = 0
a[822] = 0
a[823] = 0
a[824] = 0
a[825] = 0
a[826] = 0
a[827] = 0
a[828] = 0
a[829] = 0
a[830] = 0
a[831] = 0
a[832] = 0
a[833] = 0
a[834] = 0
a[835] = 0
a[836] = 0
a[837] = 0
a[838] = 0
a[839] = 0
a[840] = 0
a[841] = 0
a[842] = 0
a[843] = 0
a[844] = 0
a[845] = 0
a[846] = 0
a[847] = 0
a[848] = 0
a[849] = 0
a[850] = 0
a[851] = 0
a[852] = 0
a[853] = 0
a[854] = 0
a[855] = 0
a[856] = 0
a[857] = 0
a[858] = 0
a[859] = 0
a[860] = 0
a[861] = 0
a[862] = 0
a[863] = 0
a[864] = 0
a[865] = 0
a[866] = 0
a[867] = 0
a[868] = 0
a[869] = 0
a[870] = 0
a[871] = 0
a[872] = 0
a[873] = 0
a[874] = 0
a[875] = 0
a[876] = 0
a[877] = 0
a[878] = 0
a[879] = 0
a[880] = 0
a[881] = 0
a[882] = 0
a[883] = 0
a[884] = 0
a[885] = 0
a[886] = 0
a[887] = 0
a[888] = 0
a[889] = 0
a[890] = 0
a[891] = 0
a[892] = 0
a[893] = 0
a[894] = 0
a[895] = 0
a[896] = 0
a[897] = 0
a[898] = 0
a[899] = 0
a[900] = 0
a[901] = 0
a[902] = 0
a[903] = 0
a[904] = 0
a[905] = 0
a[906] = 0
a[907] = 0
a[908] = 0
a[909] = 0
a[910] = 0
a[911] = 0
a[912] = 0
a[913] = 0
a[914] = 0
a[915] = 0
a[916] = 0
a[917] = 0
a[918] = 0
a[919] = 0
a[920] = 0
a[921] = 0
a[922] = 0
a[923] = 0
a[924] = 0
a[925] = 0
a[926] = 0
a[927] = 0
a[928] = 0
a[929] = 0
a[930] = 0
a[931] = 0
a[932] = 0
a[933] = 0
a[934] = 0
a[935] = 0
a[936] = 0
a[937] = 0
a[938] = 0
a[939] = 0
a[940] = 0
a[941] = 0
a[942] = 0
a[943] = 0
a[944] = 0
a[945] = 0
a[946] = 0
a[947] = 0
a[948] = 0
a[949] = 0
a[950] = 0
a[951] = 0
a[952] = 0
a[953] = 0
a[954] = 0
a[955] = 0
a[956] = 0
a[957] = 0
a[958] = 0
a[959] = 0
a[960] = 0
a[961] = 0
a[962] = 0
a[963] = 0
a[964] = 0
a[965] = 0
a[966] = 0
a[967] = 0
a[968] = 0
a[969] = 0
a[970] = 0
a[971] = 0
a[972] = 0
a[973] = 0
a[974] = 0
a[975] = 0
a[976] = 0
a[977] = 0
a[978] = 0
a[979] = 0
a[980] = 0
a[981] = 0
a[982] = 0
a[983] = 0
a[984] = 0
a[985] = 0
a[986] = 0
a[987] = 0
a[988] = 0
a[989] = 0
a[990] = 0
a[991] = 0
a[992] = 0
a[993] = 0
a[994] = 0
a[995] = 0
a[996] = 0
a[997] = 0
a[998] = 0
a[999] = 0
x := small(a) // ERROR "inlining call to small .*"
y := medium(a) // The crux of this test: medium is not inlined.
return x + y
} | test/inline_big.go | 0.532547 | 0.44553 | inline_big.go | starcoder |
package datasets
import (
"math"
"math/rand"
"runtime"
"sort"
"github.com/pa-m/sklearn/base"
"github.com/pa-m/sklearn/preprocessing"
"gonum.org/v1/gonum/floats"
"gonum.org/v1/gonum/mat"
"gonum.org/v1/gonum/stat/distmv"
)
// MakeRegression Generate a random regression problem
// n_samples : int, optional (default=100) The number of samples.
// n_features : int, optional (default=100) The number of features.
// n_informative : int, optional (default=10) The number of informative features, i.e., the number of features used to build the linear model used to generate the output.
// n_targets : int, optional (default=1) The number of regression targets, i.e., the dimension of the y output vector associated with a sample. By default, the output is a scalar.
// bias : float64 or []float64 or mat.Matrix, optional (default=0.0) The bias term in the underlying linear model.
// effective_rank : int , optional (default=None) currently unused
// tail_strength : float between 0.0 and 1.0, optional (default=0.5) currently unused
// shuffle : boolean, optional (default=True)
// coef : boolean. the coefficients of the underlying linear model are returned regardless its value.
// random_state : *math.Rand optional (default=nil)
func MakeRegression(kwargs map[string]interface{}) (X, y, Coef *mat.Dense) {
rnd := func() float64 { return rand.NormFloat64() }
var nSamples, nFeatures, nInformative, nTargets, Shuffle = 100, 100, 10, 1, true
if v, ok := kwargs["n_samples"]; ok {
nSamples = v.(int)
}
if v, ok := kwargs["n_features"]; ok {
nFeatures = v.(int)
}
if v, ok := kwargs["n_informative"]; ok {
nInformative = v.(int)
}
if v, ok := kwargs["n_targets"]; ok {
nTargets = v.(int)
}
if v, ok := kwargs["random_state"]; ok {
rnd = func() float64 { return v.(*rand.Rand).NormFloat64() }
}
X = mat.NewDense(nSamples, nFeatures, nil)
if !Shuffle {
col := make([]float64, nSamples)
for feat := 0; feat < nFeatures; feat++ {
mat.Col(col, feat, X)
sort.Float64s(col)
X.SetCol(feat, col)
}
}
y = mat.NewDense(nSamples, nTargets, nil)
if nInformative > nFeatures {
nInformative = nFeatures
}
Coef = mat.NewDense(nInformative, nTargets, nil)
xmat := X.RawMatrix()
for xi := 0; xi < xmat.Rows*xmat.Stride; xi += xmat.Stride {
for xj := 0; xj < xmat.Cols; xj++ {
xmat.Data[xi+xj] = rnd()
}
}
cmat := Coef.RawMatrix()
for ci := 0; ci < cmat.Rows*cmat.Stride; ci += cmat.Stride {
for cj := 0; cj < cmat.Cols; cj++ {
cmat.Data[ci+cj] = rnd()
}
}
base.MatDimsCheck(".", y, X.Slice(0, nSamples, 0, nInformative), Coef)
y.Mul(X.Slice(0, nSamples, 0, nInformative), Coef)
if v, ok := kwargs["bias"]; ok {
ymat := y.RawMatrix()
switch vv := v.(type) {
case float64:
for yi := 0; yi < ymat.Rows*ymat.Stride; yi += ymat.Stride {
for yj := 0; yj < ymat.Cols; yj++ {
ymat.Data[yi+yj] += vv
}
}
case mat.Matrix:
for yi := 0; yi < ymat.Rows*ymat.Stride; yi += ymat.Stride {
for yj := 0; yj < ymat.Cols; yj++ {
ymat.Data[yi+yj] += vv.At(0, yj)
}
}
case []float64:
for yi := 0; yi < ymat.Rows*ymat.Stride; yi += ymat.Stride {
for yj := 0; yj < ymat.Cols; yj++ {
ymat.Data[yi+yj] += vv[yj]
}
}
}
}
return
}
// TODO sklearn.datasets.make_classification(n_samples=100, n_features=20, n_informative=2, n_redundant=2, n_repeated=0, n_classes=2, n_clusters_per_class=2, weights=None, flip_y=0.01, class_sep=1.0, hypercube=True, shift=0.0, scale=1.0, shuffle=True, random_state=None)[source]
// MakeBlobsConfig is the struct of MakeBlobs params
type MakeBlobsConfig struct {
NSamples int
NFeatures int
Centers interface{} // integer or mat.Matrix(NCenters,NFeatures)
ClusterStd float64
CenterBox []float64
Shuffle bool
RandomState *rand.Rand
}
// MakeBlobs Generate isotropic Gaussian blobs for clustering
// config may be null or preintialised
// config.Centers may be and int or a mat.Matrix
// unlinke scikit-learn's make_blob, Shuffle is false by default
func MakeBlobs(config *MakeBlobsConfig) (X, Y *mat.Dense) {
if config == nil {
config = &MakeBlobsConfig{}
}
if config.NSamples <= 0 {
config.NSamples = 100
}
if config.NFeatures <= 0 {
config.NFeatures = 2
}
var Centers *mat.Dense
randomizeCenters := true
switch c := config.Centers.(type) {
case int:
Centers = mat.NewDense(c, config.NFeatures, nil)
case mat.Matrix:
Centers = mat.DenseCopyOf(c)
randomizeCenters = false
default:
Centers = mat.NewDense(3, config.NFeatures, nil)
}
NCenters, _ := Centers.Dims()
if config.ClusterStd <= 0. {
config.ClusterStd = 1.
}
if config.CenterBox == nil {
config.CenterBox = []float64{-10, 10}
}
randNormFloat64 := rand.NormFloat64
randIntn := rand.Intn
if config.RandomState != nil {
randNormFloat64 = config.RandomState.NormFloat64
randIntn = config.RandomState.Intn
}
if randomizeCenters {
boxCenter := (config.CenterBox[0] + config.CenterBox[1]) / 2
boxRadius := math.Abs(config.CenterBox[1]-config.CenterBox[0]) / 2
Craw := Centers.RawMatrix()
for i := range Craw.Data {
for {
Craw.Data[i] = boxCenter + randNormFloat64()*boxRadius
if Craw.Data[i] >= config.CenterBox[0] && Craw.Data[i] < config.CenterBox[1] {
break
}
}
}
}
X = mat.NewDense(config.NSamples, config.NFeatures, nil)
Y = mat.NewDense(config.NSamples, 1, nil)
base.Parallelize(runtime.NumCPU(), config.NSamples, func(th, start, end int) {
mu := make([]float64, config.NFeatures)
sigma := mat.NewSymDense(config.NFeatures, nil)
for i := 0; i < config.NFeatures; i++ {
sigma.SetSym(i, i, 1)
}
sigma.ScaleSym(config.ClusterStd*config.ClusterStd, sigma)
normal, _ := distmv.NewNormal(mu, sigma, nil)
_ = normal
for sample := start; sample < end; sample++ {
cluster := randIntn(NCenters)
Y.Set(sample, 0, float64(cluster))
normal.Rand(X.RawRowView(sample))
floats.Add(X.RawRowView(sample), Centers.RawRowView(cluster))
}
})
if config.Shuffle {
X, Y = preprocessing.NewShuffler().FitTransform(X, Y)
}
return
} | datasets/samples_generator.go | 0.590307 | 0.484868 | samples_generator.go | starcoder |
package dicom
import (
"bytes"
"compress/flate"
"fmt"
"io"
)
// DataElementIterator represents an iterator over a DataSet's DataElements
type DataElementIterator interface {
// Next returns the next DataElement in the DataSet. If there is no next DataElement, the
// error io.EOF is returned. In Addition, if any previously returned DataElements contained
// iterable objects like SequenceIterator, BulkDataIterator, these iterators are emptied.
Next() (*DataElement, error)
// Close discards all remaining DataElements in the iterator
Close() error
// Length returns the number of bytes of the DataSet defined by elements in the iterator. Can
// be equal to UndefinedLength (or equivalently 0xFFFFFFFF) to represent undefined length
Length() uint32
syntax() transferSyntax
}
// NewDataElementIterator creates a DataElementIterator from a DICOM file. The implementation
// returned will consume input from the io.Reader given as needed. It is the callers responsibility
// to ensure that Close is called when done consuming DataElements.
func NewDataElementIterator(r io.Reader) (DataElementIterator, error) {
dr := newDcmReader(r)
if err := readDicomSignature(dr); err != nil {
return nil, err
}
metaHeaderBytes, err := bufferMetadataHeader(dr)
if err != nil {
return nil, fmt.Errorf("reading meta header: %v", err)
}
syntax, err := findSyntax(metaHeaderBytes)
if err != nil {
return nil, fmt.Errorf("finding transfer syntax: %v", err)
}
metaReader := newDcmReader(bytes.NewBuffer(metaHeaderBytes))
metaHeader := newDataElementIterator(metaReader, explicitVRLittleEndian, UndefinedLength)
if syntax == deflatedExplicitVRLittleEndian {
decompressor := flate.NewReader(r)
dr := newDcmReader(decompressor)
iter := &dataElementIterator{
dr: dr,
transferSyntax: syntax,
currentElement: nil,
empty: false,
metaHeader: metaHeader,
length: UndefinedLength,
}
return &deflatedDataElementIterator{DataElementIterator: iter, closer: decompressor}, nil
}
return &dataElementIterator{
dr: dr,
transferSyntax: syntax,
currentElement: nil,
empty: false,
metaHeader: metaHeader,
length: UndefinedLength,
}, nil
}
// newDataElementIterator creates a DataElementIterator from a byte stream that excludes header info
// (preamble and metadata elements)
func newDataElementIterator(r *dcmReader, syntax transferSyntax, length uint32) DataElementIterator {
return &dataElementIterator{
r,
syntax,
nil,
false,
emptyElementIterator{syntax},
length,
}
}
type dataElementIterator struct {
dr *dcmReader
transferSyntax transferSyntax
currentElement *DataElement
empty bool
metaHeader DataElementIterator
length uint32
}
func (it *dataElementIterator) Next() (*DataElement, error) {
metaElem, err := it.metaHeader.Next()
if err == io.EOF {
return it.nextDataSetElement()
}
if err != nil {
return nil, err
}
return metaElem, nil
}
func (it *dataElementIterator) syntax() transferSyntax {
return it.transferSyntax
}
func (it *dataElementIterator) nextDataSetElement() (*DataElement, error) {
if it.empty {
return nil, io.EOF
}
if err := it.closeCurrent(); err != nil {
return nil, fmt.Errorf("closing: %v", err)
}
element, err := readDataElement(it.dr, it.transferSyntax)
if err == io.EOF {
it.empty = true
return nil, io.EOF
}
if err != nil {
return nil, fmt.Errorf("parsing element: %v", err)
}
it.currentElement = element
return it.currentElement, nil
}
func (it *dataElementIterator) Close() error {
// empty the iterator
for _, err := it.Next(); err != io.EOF; _, err = it.Next() {
if err != nil {
return fmt.Errorf("unexpected error closing iterator: %v", err)
}
}
return nil
}
func (it *dataElementIterator) Length() uint32 {
return it.length
}
// closeCurrent ensures the iterator is ready to read the next DataElement. If this iterator
// previously returned a stream of bytes such as a BulkDataIterator, we need to make sure this
// previously returned stream is emptied in order to advance the input to the bytes of the
// next DataElement. This pattern is similar to the implementation of multipart.Reader in the
// go standard library. https://golang.org/src/mime/multipart/multipart.go?s=8400:8697#L303
func (it *dataElementIterator) closeCurrent() error {
if it.currentElement == nil {
return nil
}
if closer, ok := it.currentElement.ValueField.(io.Closer); ok {
return closer.Close()
}
return nil
}
func readDicomSignature(r *dcmReader) error {
if err := r.Skip(128); err != nil {
return fmt.Errorf("skipping preamble: %v", err)
}
magic, err := r.String(4)
if err != nil {
return fmt.Errorf("reading DICOM signature: %v", err)
}
if magic != "DICM" {
return fmt.Errorf("wrong DICOM signature: %v", magic)
}
return nil
}
func bufferMetadataHeader(dr *dcmReader) ([]byte, error) {
firstElemBytes, err := dr.Bytes(4 /*tag*/ + 2 /*vr*/ + 2 /*len*/ + 4 /*UL=4bytes*/)
if err != nil {
return nil, fmt.Errorf("buffering bytes of FileMetaInformationGroupLength: %v", err)
}
firstElem, err := readDataElement(newDcmReader(bytes.NewBuffer(firstElemBytes)), explicitVRLittleEndian)
if err != nil {
return nil, fmt.Errorf("parsing FileMetaInformationGroupLength element: %v", err)
}
metaGroupLength, err := firstElem.IntValue()
if err != nil {
return nil, fmt.Errorf("FileMetaInformationGroupLength could not be converted to int: %v", err)
}
remainderBytes, err := dr.Bytes(metaGroupLength)
if err != nil {
return nil, fmt.Errorf("buffering file meta elements: %v", err)
}
return append(firstElemBytes, remainderBytes...), nil
}
func findSyntax(metaHeaderBytes []byte) (transferSyntax, error) {
var syntax transferSyntax
metaDCMReader := newDcmReader(bytes.NewBuffer(metaHeaderBytes))
metaIter := newDataElementIterator(metaDCMReader, explicitVRLittleEndian, UndefinedLength)
for elem, err := metaIter.Next(); err != io.EOF; elem, err = metaIter.Next() {
if err != nil {
return syntax, fmt.Errorf("reading meta element: %v", err)
}
if elem.Tag != TransferSyntaxUIDTag {
continue
}
syntaxID, err := elem.StringValue()
if err != nil {
return nil, fmt.Errorf("syntax element could not be converted to string: %v", err)
}
return lookupTransferSyntax(syntaxID), nil
}
return syntax, fmt.Errorf("transfer syntax not found")
}
type deflatedDataElementIterator struct {
DataElementIterator
closer io.Closer
}
func (it *deflatedDataElementIterator) Close() error {
if err := it.DataElementIterator.Close(); err != nil {
return err
}
return it.closer.Close()
}
type emptyElementIterator struct {
transferSyntax transferSyntax
}
func (it emptyElementIterator) Next() (*DataElement, error) {
return nil, io.EOF
}
func (it emptyElementIterator) syntax() transferSyntax {
return it.transferSyntax
}
func (it emptyElementIterator) Close() error {
return nil
}
func (it emptyElementIterator) Length() uint32 {
return 0
} | dicom/iterator.go | 0.76454 | 0.412116 | iterator.go | starcoder |
package main
import (
"log"
)
func parseFlashMap(data []string) HeightMap {
heightMap := HeightMap{[][]int{}}
for _, line := range data {
energies := []int{}
for _, energy := range line {
energies = append(energies, int(energy-'0'))
}
heightMap.points = append(heightMap.points, energies)
}
return heightMap
}
func (heightMap HeightMap) contains(point Point32) bool {
return point.x >= 0 && point.y >= 0 && point.x < heightMap.xsize() && point.y < heightMap.ysize()
}
func (heightMap HeightMap) getAllAdjacentPoints(point Point32) []Point32 {
adjacent := []Point32{}
for y := -1; y <= 1; y++ {
for x := -1; x <= 1; x++ {
point := Point32{point.x + x, point.y + y}
if heightMap.contains(point) {
adjacent = append(adjacent, point)
}
}
}
return adjacent
}
func flash(heightMap HeightMap, point Point32) int {
flashes := 1
for _, adj := range heightMap.getAllAdjacentPoints(point) {
heightMap.points[adj.y][adj.x] += 1
if heightMap.points[adj.y][adj.x] == 10 {
flashes += flash(heightMap, adj)
}
}
return flashes
}
func countFlashes(heightMap HeightMap, days int) int {
totalFlashes := 0
for i := 0; i < days; i++ {
for y := 0; y < heightMap.ysize(); y++ {
for x := 0; x < heightMap.xsize(); x++ {
heightMap.points[y][x] += 1
if heightMap.points[y][x] == 10 {
totalFlashes += flash(heightMap, Point32{x, y})
}
}
}
for y := 0; y < heightMap.ysize(); y++ {
for x := 0; x < heightMap.xsize(); x++ {
if heightMap.points[y][x] >= 10 {
heightMap.points[y][x] = 0
}
}
}
}
return totalFlashes
}
func allFlashAt(heightMap HeightMap) int {
for i := 0; ; i++ {
stepFlashes := 0
for y := 0; y < heightMap.ysize(); y++ {
for x := 0; x < heightMap.xsize(); x++ {
heightMap.points[y][x] += 1
if heightMap.points[y][x] == 10 {
stepFlashes += flash(heightMap, Point32{x, y})
}
}
}
for y := 0; y < heightMap.ysize(); y++ {
for x := 0; x < heightMap.xsize(); x++ {
if heightMap.points[y][x] >= 10 {
heightMap.points[y][x] = 0
}
}
}
if stepFlashes == heightMap.ysize()*heightMap.xsize() {
return i + 1
}
}
}
func main11() {
data, err := ReadInputFrom("11.inp")
if err != nil {
log.Fatal(err)
return
}
flashMap := parseFlashMap(data)
log.Println(countFlashes(flashMap, 100))
flashMap2 := parseFlashMap(data)
log.Println(allFlashAt(flashMap2))
} | 2021/11.go | 0.577495 | 0.429489 | 11.go | starcoder |
package coinharness
import (
"fmt"
"github.com/jfixby/pin"
"github.com/jfixby/pin/commandline"
"strconv"
"strings"
)
// DeploySimpleChain defines harness setup sequence for this package:
// 1. obtains a new mining wallet address
// 2. restart harness node and wallet with the new mining address
// 3. builds a new chain with the target number of mature outputs
// receiving the mining reward to the test wallet
// 4. syncs wallet to the tip of the chain
func DeploySimpleChain(testSetup *ChainWithMatureOutputsSpawner, h *Harness) {
pin.AssertNotEmpty("harness name", h.Name)
fmt.Println("Deploying Harness[" + h.Name + "]")
createFlag := testSetup.CreateTempWallet
// launch a fresh h (assumes h working dir is empty)
{
args := &launchArguments{
DebugNodeOutput: testSetup.DebugNodeOutput,
DebugWalletOutput: testSetup.DebugWalletOutput,
NodeExtraArguments: testSetup.NodeStartExtraArguments,
}
if createFlag {
args.WalletExtraArguments = make(map[string]interface{})
args.WalletExtraArguments["createtemp"] = commandline.NoArgumentValue
}
launchHarnessSequence(h, args)
}
// Get a new address from the WalletTestServer
// to be set with node --miningaddr
var address Address
var err error
{
for {
address, err = h.Wallet.NewAddress(DefaultAccountName)
if err != nil {
pin.D("address", address)
pin.D("error", err)
pin.Sleep(1000)
} else {
break
}
}
//pin.CheckTestSetupMalfunction(err)
h.MiningAddress = address
pin.AssertNotNil("MiningAddress", h.MiningAddress)
pin.AssertNotEmpty("MiningAddress", h.MiningAddress.String())
fmt.Println("Mining address: " + h.MiningAddress.String())
}
// restart the h with the new argument
{
shutdownHarnessSequence(h)
args := &launchArguments{
DebugNodeOutput: testSetup.DebugNodeOutput,
DebugWalletOutput: testSetup.DebugWalletOutput,
NodeExtraArguments: testSetup.NodeStartExtraArguments,
}
if createFlag {
args.WalletExtraArguments = make(map[string]interface{})
args.WalletExtraArguments["createtemp"] = commandline.NoArgumentValue
}
launchHarnessSequence(h, args)
}
{
if testSetup.NumMatureOutputs > 0 {
numToGenerate := int64(testSetup.ActiveNet.CoinbaseMaturity()) + testSetup.NumMatureOutputs
err := GenerateTestChain(numToGenerate, h.NodeRPCClient())
pin.CheckTestSetupMalfunction(err)
}
// wait for the WalletTestServer to sync up to the current height
_, H, e := h.NodeRPCClient().GetBestBlock()
pin.CheckTestSetupMalfunction(e)
h.Wallet.Sync(H)
}
fmt.Println("Harness[" + h.Name + "] is ready")
}
// local struct to bundle launchHarnessSequence function arguments
type launchArguments struct {
DebugNodeOutput bool
DebugWalletOutput bool
MiningAddress Address
NodeExtraArguments map[string]interface{}
WalletExtraArguments map[string]interface{}
}
// launchHarnessSequence
func launchHarnessSequence(h *Harness, args *launchArguments) {
node := h.Node
wallet := h.Wallet
sargs := &StartNodeArgs{
DebugOutput: args.DebugNodeOutput,
MiningAddress: h.MiningAddress,
ExtraArguments: args.NodeExtraArguments,
}
node.Start(sargs)
rpcConfig := node.RPCConnectionConfig()
walletLaunchArguments := &TestWalletStartArgs{
NodeRPCCertFile: node.CertFile(),
DebugOutput: args.DebugWalletOutput,
MaxSecondsToWaitOnLaunch: 90,
NodeRPCConfig: rpcConfig,
ExtraArguments: args.WalletExtraArguments,
}
// wait for the WalletTestServer to sync up to the current height
_, _, e := h.NodeRPCClient().GetBestBlock()
pin.CheckTestSetupMalfunction(e)
wallet.Start(walletLaunchArguments)
}
// shutdownHarnessSequence reverses the launchHarnessSequence
func shutdownHarnessSequence(harness *Harness) {
harness.Wallet.Stop()
harness.Node.Stop()
}
// ExtractSeedSaltFromHarnessName tries to split harness name string
// at `.`-character and parse the second part as a uint32 number.
// Otherwise returns default value.
func ExtractSeedSaltFromHarnessName(harnessName string) uint32 {
parts := strings.Split(harnessName, ".")
if len(parts) != 2 {
// no salt specified, return default value
return 0
}
seedString := parts[1]
tmp, err := strconv.Atoi(seedString)
seedNonce := uint32(tmp)
pin.CheckTestSetupMalfunction(err)
return seedNonce
} | simplechainbuilder.go | 0.529507 | 0.433742 | simplechainbuilder.go | starcoder |
package gamescene
import (
"image"
"image/color"
"github.com/hajimehoshi/ebiten"
"github.com/hajimehoshi/ebiten/ebitenutil"
"github.com/hajimehoshi/gopherwalk/internal/scene"
)
type Dir int
const (
DirLeft Dir = iota
DirRight
DirUp
DirDown
)
const (
tileWidth = 16
tileHeight = 16
)
func shift(area image.Rectangle, dir Dir) image.Rectangle {
switch dir {
case DirLeft:
area.Min.X--
area.Max.X--
case DirRight:
area.Min.X++
area.Max.X++
case DirUp:
area.Min.Y--
area.Max.Y--
case DirDown:
area.Min.Y++
area.Max.Y++
default:
panic("not reached")
}
return area
}
func edge(area image.Rectangle, from Dir) image.Rectangle {
switch from {
case DirLeft:
area.Min.X = area.Max.X - 1
case DirRight:
area.Max.X = area.Min.X + 1
case DirUp:
area.Min.Y = area.Max.Y - 1
case DirDown:
area.Max.Y = area.Min.Y + 1
default:
panic("not reached")
}
return area
}
type Object interface {
OverlapsWithDir(rect image.Rectangle, dir Dir) bool
Update(context scene.Context)
Draw(screen *ebiten.Image)
}
type ObjectWall struct {
big bool
x int
y int
}
func (o *ObjectWall) area() image.Rectangle {
w := tileWidth
h := tileHeight
if o.big {
w *= 2
h *= 2
}
return image.Rect(o.x*tileWidth, o.y*tileHeight, o.x*tileWidth+w, o.y*tileHeight+h)
}
func (o *ObjectWall) OverlapsWithDir(rect image.Rectangle, dir Dir) bool {
return edge(o.area(), dir).Overlaps(shift(rect, dir))
}
func (o *ObjectWall) Update(context scene.Context) {
}
func (o *ObjectWall) Draw(screen *ebiten.Image) {
x := o.x * tileWidth
y := o.y * tileHeight
if o.big {
w := tileWidth*2 - 1
h := tileWidth*2 - 1
ebitenutil.DrawRect(screen, float64(x), float64(y), float64(w), float64(h), color.NRGBA{0x66, 0x66, 0x66, 0xff})
} else {
w := tileWidth - 1
h := tileWidth - 1
ebitenutil.DrawRect(screen, float64(x), float64(y), float64(w), float64(h), color.NRGBA{0x66, 0x66, 0x66, 0xff})
}
}
type ObjectFF struct {
big bool
x int
y int
on bool
}
func (o *ObjectFF) area() image.Rectangle {
w := tileWidth
h := tileHeight
if o.big {
w *= 2
h *= 2
}
return image.Rect(o.x*tileWidth, o.y*tileHeight, o.x*tileWidth+w, o.y*tileHeight+h)
}
func (o *ObjectFF) OverlapsWithDir(rect image.Rectangle, dir Dir) bool {
if !o.on {
return false
}
return edge(o.area(), dir).Overlaps(shift(rect, dir))
}
func (o *ObjectFF) Update(context scene.Context) {
if !context.Input().IsJustTapped() {
return
}
x, y := context.Input().CursorPosition()
if !image.Pt(x, y).In(o.area()) {
return
}
o.on = !o.on
}
func (o *ObjectFF) Draw(screen *ebiten.Image) {
c := color.NRGBA{0xff, 0x00, 0x00, 0x40}
if o.on {
c = color.NRGBA{0xff, 0x00, 0x00, 0xff}
}
x := o.x * tileWidth
y := o.y * tileHeight
if o.big {
w := tileWidth*2 - 1
h := tileWidth*2 - 1
ebitenutil.DrawRect(screen, float64(x), float64(y), float64(w), float64(h), c)
} else {
w := tileWidth - 1
h := tileWidth - 1
ebitenutil.DrawRect(screen, float64(x), float64(y), float64(w), float64(h), c)
}
}
type ObjectElevator struct {
x int
y int
}
func (o *ObjectElevator) area() image.Rectangle {
w := tileWidth
h := tileHeight
return image.Rect(o.x*tileWidth, o.y*tileHeight, o.x*tileWidth+w, o.y*tileHeight+h)
}
func (o *ObjectElevator) Overlaps(rect image.Rectangle) bool {
return o.area().Overlaps(rect)
}
func (o *ObjectElevator) OverlapsWithDir(rect image.Rectangle, dir Dir) bool {
return edge(o.area(), dir).Overlaps(shift(rect, dir))
}
func (o *ObjectElevator) Update(context scene.Context) {
}
func (o *ObjectElevator) Draw(screen *ebiten.Image) {
x := o.x * tileWidth
y := o.y * tileHeight
ebitenutil.DrawRect(screen, float64(x), float64(y), float64(tileWidth), float64(tileHeight), color.NRGBA{0xff, 0xff, 0x00, 0xff})
}
type ObjectGoal struct {
x int
y int
}
func (o *ObjectGoal) area() image.Rectangle {
w := tileWidth
h := tileHeight
return image.Rect(o.x*tileWidth, o.y*tileHeight, o.x*tileWidth+w, o.y*tileHeight+h)
}
func (o *ObjectGoal) OverlapsWithDir(rect image.Rectangle, dir Dir) bool {
return edge(o.area(), dir).Overlaps(shift(rect, dir))
}
func (o *ObjectGoal) Update(context scene.Context) {
}
func (o *ObjectGoal) Draw(screen *ebiten.Image) {
x := o.x * tileWidth
y := o.y * tileHeight
w := tileWidth - 1
h := tileWidth - 1
ebitenutil.DrawRect(screen, float64(x), float64(y), float64(w), float64(h), color.NRGBA{0xff, 0x66, 0x00, 0xff})
} | internal/gamescene/object.go | 0.621541 | 0.427337 | object.go | starcoder |
package grouping
import (
"fmt"
"log"
)
/*
* grouping implements an algorithm that grouping data points that are practically a rank and an
* associated value. In this software package, the value is the amount of data
* that a rank is sending or receiving.
* The grouping algorithm is quite simple:
* - We compare the median and the mean of the values and if they are too much
* appart (10% of the highest value by default), the group is removed and
* individual data point put back into the group to the left or the right,
* whichever the closer.
* - When checking if a group needs to be dismantled, we also check if we would
* have a better repartition of the data points by splitting the group in two.
* - The algorithm is recursive so when a group is dismantled and data points
* added to the group to the left or the right, these groups can end up
* behind dismantled too. The aglorithm is supposed to stabilize since a group
* can be composed of a single data point.
*/
type Group struct {
Elts []int
Min int
Max int
CachedSum int
}
type Engine struct {
Groups []*Group
}
const (
DEFAULT_MEAN_MEDIAN_DEVIATION = 0.1 // max of 10% of deviation
)
func getRemainder(n int, d int) float64 {
return float64(n - d*(n/d))
}
func getValue(rank int, values []int) int {
return values[rank]
}
func getDistanceFromGroup(val int, gp *Group) (int, error) {
if gp.Max > val && val > gp.Min {
// something wrong, the value belong to the group
return -1, fmt.Errorf("value belongs to group")
}
if gp.Max <= val {
return val - gp.Max, nil
}
if gp.Min >= val {
return gp.Min - val, nil
}
return -1, nil
}
/**
* lookupGroup finds the group that is the most likely to accept the data
* point. For that we scan the min/max of each group, if the value is within
* the min/max, the group is selected. If the value is between the max of a
* group and the min of another group, we calculate the distance to each and
* select the closest group
*/
func (e *Engine) lookupGroup(val int) (*Group, error) {
index := 0
if len(e.Groups) == 0 {
return nil, nil
}
log.Printf("Looking up group for value %d", val)
for _, g := range e.Groups {
log.Printf("Group #%d, min: %d, max: %d", index, g.Min, g.Max)
// Within Min and Max of a group
if g.Min <= val && g.Max >= val {
return g, nil
}
// the value is beyond the last group
if index == len(e.Groups)-1 && val > g.Max {
return g, nil
}
// the value is before the first group
if index == 0 && val < g.Min {
return g, nil
}
// the value is in-between 2 groups
if g.Max < val && index < len(e.Groups)-1 && e.Groups[index+1].Min > val {
d1, err := getDistanceFromGroup(val, g)
if err != nil {
return nil, err
}
d2, err := getDistanceFromGroup(val, e.Groups[index+1])
if err != nil {
return nil, err
}
if d1 <= d2 {
return g, nil
}
return e.Groups[index+1], nil
}
index++
}
return nil, fmt.Errorf("unable to correctly scan groups")
}
func (gp *Group) addAndShift(rank int, index int) error {
var newList []int
newList = append(newList, gp.Elts[:index]...)
newList = append(newList, rank)
newList = append(newList, gp.Elts[index:]...)
gp.Elts = newList
return nil
}
func (gp *Group) addElt(rank int, values []int) error {
val := values[rank]
log.Printf("Adding element %d-%d to group with min=%d and max=%d", rank, val, gp.Min, gp.Max)
// The array is ordered
log.Printf("Inserting new element in group's elements")
i := 0
// It is not unusual to have the same values coming over and over
// so we check with the max value of the group, it actually saves
// time quite often
if val >= gp.Max {
i = len(gp.Elts)
} else {
for i < len(gp.Elts) && values[gp.Elts[i]] <= values[rank] {
i++
}
}
if i == len(gp.Elts) {
// We add the new value at the end of the array
log.Printf("Inserting element at the end of the group")
gp.Elts = append(gp.Elts, rank)
} else {
log.Printf("Shifting elements within the group at index %d...", i)
err := gp.addAndShift(rank, i)
if err != nil {
return err
}
}
log.Printf("Updating group's metadata (first rank is %d)...", rank)
//gp.Size++
gp.CachedSum += values[rank]
gp.Min = values[gp.Elts[0]]
gp.Max = values[gp.Elts[len(gp.Elts)-1]]
log.Printf("Element successfully added (size: %d; min: %d, max: %d)", len(gp.Elts), gp.Min, gp.Max)
return nil
}
func createGroup(rank int, val int, values []int) (*Group, error) {
newGroup := new(Group)
newGroup.Min = val
newGroup.Max = val
err := newGroup.addElt(rank, values)
if err != nil {
return nil, err
}
return newGroup, nil
}
func (e *Engine) addGroup(gp *Group) error {
index := 0
if len(e.Groups) == 0 {
e.Groups = append(e.Groups, gp)
return nil
}
log.Printf("Adding group with min: %d and max: %d", gp.Min, gp.Max)
for _, g := range e.Groups {
log.Printf("Comparing with group with min: %d and max: %d", g.Min, g.Max)
if gp.Min < g.Max {
break
}
index++
}
log.Printf("Need to insert new group at index: %d", index)
return e.insertGroup(gp, index)
}
func getMedian(size int, data []int, values []int) float64 {
if size == 1 {
return float64(values[data[0]])
}
idx1 := 0
idx2 := 0
if getRemainder(size, 2) == 1 {
idx1 = data[size/2]
return float64(values[idx1])
}
idx1 = size/2 - 1
idx2 = size / 2
sum := values[data[idx1]] + values[data[idx2]]
median := sum / 2
return float64(median)
}
func (gp *Group) getMedianWithAdditionalPoint(id int, val int, values []int) float64 {
middle := len(gp.Elts) / 2
if len(gp.Elts) == 1 {
log.Println("Only one elements, manually calculating median with new element")
return (float64(values[gp.Elts[0]]+val) / 2)
}
if getRemainder(len(gp.Elts)+1, 2) == 0 {
// Odd total number of data points, even number of elements already in the group
// Regardless of where the extra data point would land in the sorted list
// of the group's elements, the point in the middle of the group's elements
// will always be used.
// the two values used to calculate the median
value1 := -1
value2 := -1
index := middle
candidateRank := gp.Elts[index]
if values[candidateRank] > val && val > values[candidateRank-1] {
// The extra element goes in between the middle of the group's elements and the element to its left.
// It shifts the element to the left to calculate the median
value1 = val
value2 = values[candidateRank]
}
if values[candidateRank] > val && val < values[candidateRank-1] {
// The extra element falls toward the begining of the group's elements; it shifts the two elements
// required to calculate the median
value1 = values[candidateRank-1]
value2 = values[candidateRank]
}
if values[candidateRank] < val && val < values[candidateRank+1] {
// The extra element falls in between the middle of the group's elements and the element to its right.
value1 = values[candidateRank]
value2 = val
}
if values[candidateRank] < val && val > values[candidateRank+1] {
// The extra element falls toward the end of the group's elements.
value1 = values[candidateRank]
value2 = values[candidateRank+1]
}
if values[candidateRank] == val {
// If the extra element has the same value than the middle of the group's elements, it will be added
// right in the middle, shifting the second half of the group's elements starting by the elements to
// the left
value1 = val
value2 = values[candidateRank]
}
if values[candidateRank+1] == val {
// If the extra elements has the same value then the middle + 1 of the group's elements, it will be
// added to the right of the middle, shifting the second half of the group's elements starting by the
// elements to the right
value1 = val
value2 = values[candidateRank+1]
}
/*
if value1 == -1 || value2 == -1 {
for i := 0; i < len(gp->Elts); i++ {
fprintf(stderr, "-> elt %d: rank: %d, value: %d\n", i, gp->elts[i], values[gp->elts[i]]);
}
}
*/
sum := value1 + value2
median := float64(sum) / 2
return median
} else {
// Even total number of data points, odd number of elements already in group
index := middle - 1
candidateRank := gp.Elts[index]
if values[candidateRank] > val {
// The new value falls to the left of the two elements from the original group that are candidate
return float64(values[candidateRank])
}
if values[gp.Elts[index+1]] < val {
// The new value falls to the right of the two elements from the original group that are candidate
return float64(values[gp.Elts[index+1]])
}
if values[candidateRank] <= val && values[gp.Elts[index+1]] >= val {
// The new element falls right in the middle of the new group
return float64(val)
}
}
// We should not actually get here
return -1
}
func (gp *Group) getMedian(values []int) float64 {
return getMedian(len(gp.Elts), gp.Elts, values)
}
func (gp *Group) getMean(values []int) float64 {
log.Printf("Calculating mean based on cache sum: %d and %d elements", gp.CachedSum, len(gp.Elts))
return float64(gp.CachedSum / len(gp.Elts))
}
func affinityIsOkay(mean float64, median float64) bool {
// If the mean and median do not deviate too much, we add the new data point to the group
// Once the new data point is added to the group, we check the group to see if it needs
// to be split.
maxMeanMedian := float64(0)
minMeanMedian := float64(0)
affinityOkay := false // true when the mean and median are in acceptable range
if median > mean {
maxMeanMedian = median
minMeanMedian = mean
} else {
maxMeanMedian = mean
minMeanMedian = median
}
log.Printf("Mean: %f; median: %f", mean, median)
a := maxMeanMedian * (1 - DEFAULT_MEAN_MEDIAN_DEVIATION)
if a <= minMeanMedian {
affinityOkay = true
}
return affinityOkay
}
func (gp *Group) groupIsBalanced(values []int) bool {
// We calculate the mean and median.
median := gp.getMedian(values)
mean := gp.getMean(values)
return affinityIsOkay(mean, median)
}
func (e *Engine) unlinkGroup(gp *Group) error {
index := 0
for _, g := range e.Groups {
if g == gp {
break
}
index++
}
if index >= len(e.Groups) {
return fmt.Errorf("cannot find group")
}
// we must be careful to keep the order.
e.Groups = append(e.Groups[:index], e.Groups[index+1:]...)
return nil
}
func groupToString(values []int) string {
str := ""
for _, v := range values {
str = fmt.Sprintf("%s %d", str, v)
}
return str
}
func (e *Engine) insertGroup(gp *Group, index int) error {
log.Printf("Inserting group at index: %d", index)
var newGroupList []*Group
if index == 0 {
e.Groups = append([]*Group{gp}, e.Groups...)
} else {
newGroupList = append(newGroupList, e.Groups[:index]...)
newGroupList = append(newGroupList, gp)
e.Groups = append(newGroupList, e.Groups[index:]...)
}
return nil
}
func (e *Engine) splitGroup(gp *Group, indexSplit int, values []int) (*Group, error) {
// Create the new group
ng, err := createGroup(gp.Elts[indexSplit], values[gp.Elts[indexSplit]], values)
if err != nil {
return nil, err
}
// Find index of the group
i := 0
for i < len(e.Groups) {
if e.Groups[i] == gp {
break
}
i++
}
if i == len(e.Groups) {
return nil, fmt.Errorf("unable to find group")
}
log.Printf("group index is: %d", i)
// Transfer all the elements to transfer into the new group into a temporary list
// We do not include the element at indexSplit because it is already in the new
// group
var temp []int
for j := indexSplit + 1; j < len(gp.Elts); j++ {
temp = append(temp, e.Groups[i].Elts[j])
}
// Remove all the elements that are moving to the new group
e.Groups[i].Elts = e.Groups[i].Elts[:indexSplit]
log.Printf("Split group now has %d elements", len(e.Groups[i].Elts))
// Update the group's metadata after removal of elements
e.Groups[i].CachedSum = 0
for j := 0; j < len(e.Groups[i].Elts); j++ {
e.Groups[i].CachedSum += values[e.Groups[i].Elts[j]]
}
gp.Min = values[gp.Elts[0]]
gp.Max = values[gp.Elts[len(gp.Elts)-1]]
// Transfer elements from initial group to new one
for j := 0; j < len(temp); j++ {
err := ng.addElt(temp[j], values)
if err != nil {
return nil, err
}
}
// Finally we add the new group
log.Printf("Group split, inserting new group at index %d", i+1)
err = e.insertGroup(ng, i+1)
if err != nil {
return nil, err
}
return ng, nil
}
func (e *Engine) balanceGroupWithNewElement(gp *Group, id int, val int, values []int) error {
sum := float64(gp.CachedSum + val)
mean := float64(sum / float64(len(gp.Elts)+1))
log.Printf("Mean of %d with %d elements is %f", gp.CachedSum+val, len(gp.Elts)+1, mean)
// Now we calculate the median
median := gp.getMedianWithAdditionalPoint(id, val, values)
if affinityIsOkay(mean, median) {
err := gp.addElt(id, values)
if err != nil {
return err
}
} else {
log.Println("Group needs to be split")
// We figure out where we need to split the group
i := 0
for i < len(gp.Elts) && values[gp.Elts[i]] < values[id] {
i++
}
if i < len(gp.Elts) {
log.Printf("Group needs to split in two")
newGroup, err := e.splitGroup(gp, i, values)
if err != nil {
return err
}
// We find the group that is the closest to the element to add
d1, err := getDistanceFromGroup(values[id], gp)
if err != nil {
return err
}
d2, err := getDistanceFromGroup(values[id], newGroup)
if err != nil {
return err
}
if d2 < d1 {
err := newGroup.addElt(id, values)
if err != nil {
return err
}
} else {
err := gp.addElt(id, values)
if err != nil {
return err
}
}
} else {
log.Printf("Group spliting only needs to add new group at the end (index: %d, len: %d)", i, len(gp.Elts))
newGroup, err := createGroup(id, val, values)
if err != nil {
return err
}
err = e.addGroup(newGroup)
if err != nil {
return err
}
}
}
return nil
}
func (e *Engine) AddDatapoint(id int, values []int) error {
val := getValue(id, values)
// We scan the groups to see which group is the most likely to be suitable
gp, err := e.lookupGroup(val)
if err != nil {
return err
}
if gp == nil {
log.Println("No group found, creating a new one")
gp, err := createGroup(id, val, values)
if err != nil {
return nil
}
err = e.addGroup(gp)
if err != nil {
return err
}
} else {
log.Println("Adding data point to existing group")
err := e.balanceGroupWithNewElement(gp, id, val, values)
if err != nil {
return err
}
}
return nil
}
func Init() *Engine {
newEngine := new(Engine)
return newEngine
}
/*
func (e *Engine) Fini() error {
// Anything to do?
return nil
}
*/
func (e *Engine) GetGroups() ([]*Group, error) {
return e.Groups, nil
} | tools/internal/pkg/grouping/grouping.go | 0.692122 | 0.610831 | grouping.go | starcoder |
package sqlx
import (
"fmt"
"reflect"
"strings"
"time"
)
type Updater struct {
Where interface{}
Updater interface{}
}
func (u *Updater) TableName() string {
if u.Updater == nil {
return ""
}
if table, ok := u.Updater.(Table); ok && len(table.TableName()) > 0 {
return table.TableName()
}
if t := reflect.TypeOf(u.Updater).Elem(); t != nil {
return strings.ToLower(t.Name())
}
return ""
}
func (u *Updater) where() (query string, args []interface{}) {
if u.Where == nil {
return "", nil
}
t, v := reflect.TypeOf(u.Where).Elem(), reflect.ValueOf(u.Where).Elem()
if t == nil || t.Kind() != reflect.Struct {
return "", nil
}
var values []string
for i := 0; i < t.NumField(); i++ {
switch v.Field(i).Kind() {
case reflect.Bool:
case reflect.Int:
case reflect.Int8:
case reflect.Int16:
case reflect.Int32:
case reflect.Int64:
case reflect.Uint:
case reflect.Uint8:
case reflect.Uint16:
case reflect.Uint32:
case reflect.Uint64:
case reflect.String:
default:
continue
}
colName := t.Field(i).Tag.Get("db")
if len(colName) <= 0 {
continue
}
values = append(values, fmt.Sprintf("`%v`=?", colName))
args = append(args, v.Field(i).Interface())
}
return strings.Join(values, " AND "), args
}
func (u *Updater) Convert() (sql string, args []interface{}) {
t, v := reflect.TypeOf(u.Updater).Elem(), reflect.ValueOf(u.Updater).Elem()
if t == nil {
return
}
var values []string
for i := 0; i < t.NumField(); i++ {
switch v.Field(i).Kind() {
case reflect.Int:
case reflect.Int8:
case reflect.Int16:
case reflect.Int32:
case reflect.Int64:
case reflect.Uint:
case reflect.Uint8:
case reflect.Uint16:
case reflect.Uint32:
case reflect.Uint64:
case reflect.String:
case reflect.Struct:
case reflect.Slice:
default:
continue
}
colName := t.Field(i).Tag.Get("db")
if len(colName) <= 0 {
continue
}
values = append(values, fmt.Sprintf("`%v`=?", colName))
if encoder, ok := v.Field(i).Addr().Interface().(Marshaler); ok {
str, err := encoder.Marshal()
if err != nil {
return
}
args = append(args, str)
continue
}
if t, ok := v.Field(i).Interface().(time.Time); ok {
args = append(args, t.Format("2006-01-02 15:04:05"))
continue
}
args = append(args, v.Field(i).Interface())
}
query, whereArgs := u.where()
args = append(args, whereArgs...)
return fmt.Sprintf("UPDATE `%v` SET %v WHERE %v;", u.TableName(), strings.Join(values, ","), query), args
}
func (u *Updater) Scan(scan func(dest ...interface{}) error) error {
return nil
}
func WithUpdater(where, updater interface{}) Convertor {
switch reflect.TypeOf(updater).Kind() {
case reflect.Ptr:
default:
return nil
}
switch reflect.TypeOf(where).Kind() {
case reflect.Ptr:
default:
return nil
}
return &Updater{
Where: where,
Updater: updater,
}
} | updater.go | 0.532182 | 0.403831 | updater.go | starcoder |
package api
import (
. "github.com/gocircuit/circuit/gocircuit.org/render"
)
func RenderMainPage() string {
figs := A{
"FigHierarchy": RenderFigurePngSvg(
"Virtual anchor hierarchy, depicting elements attached to some of the anchors.", "hierarchy", "600px"),
"FigResidence": RenderFigurePngSvg(
"Except for the root, every anchor physically resides on some circuit host. "+
"The root anchor is a logical object representing your client's connection to the cluster.",
"residence", "630px"),
}
return RenderHtml("Go client API", Render(mainBody, figs))
}
const mainBody = `
<h1>Go client API</h1>
<p>To use the Go client API to the circuit, start by importing the client package:
<pre>
import "github.com/gocircuit/circuit/client"
</pre>
<h2>System abstraction</h2>
<p>The circuit organizes all of the cluster resources in an abstract hierarchichal namespace—a rooted
tree with named edges.
Every node in the tree is called an <em>anchor</em> and
every anchor is associated with the root-to-anchor path that leads to it.
A path identifies its anchor uniquely.
In file system notation, paths are strings like <code>"/Xf1c8d96119cc6919/foo/bar"</code>.
<p>In addition to being a tree node in the namespace, each anchor can have none or one <em>element</em>
attached to it. An element is a logical object that manages an underlying computational resource.
There are different kinds of elements, according to their underlying resource: process, container, name server, channel, etc.
{{.FigHierarchy}}
<p>The Go client interface is organized around the anchor hierarchy abstraction.
<p>An interface called <code>Anchor</code> represents an anchor. It provides methods
for traversing and inspecting its descendant anchors, as well as methods for creating or retrieving
the element associated it.
<p>All circuit applications begin with a call to <code>Dial</code> (or <code>DialDiscover</code>)
which establishes connection to a circuit cluster and returns an <code>Anchor</code>
object representing the root of the hierarchy. (Programming details for connecting into a cluster
are given below.)
<h2>Anchor and element residence</h2>
<p>Every anchor—excluding the root anchor—as well as its attached element (if any) physically
reside on some specific host in the circuit cluster. (The <code>Anchor</code> and element objects in a
Go client application are merely references to the underlying anchor and element structures.)
<p>The following illustration demonstrates how the hierarchy structure implies the physical
location of anchors and elements.
{{.FigResidence}}
<p>The root anchor (which you obtain from <code>Dial</code> or <code>DialDiscover</code>)
is special. It symbolically represents your client's connection to the circuit cluster. As such,
the root anchor resides only in your client's runtime—i.e. it is not persistent.
No elements can be attached to the root anchor.
<p>The children of the root anchor are always, by definition, server anchors.
Server anchors correspond to currently live hosts (aka servers) in your circuit cluster.
Server anchors are created and removed by the circuit system, as hosts join or leave
the circuit cluster.
<p>Server anchors physically reside on their respective host and they have an
attached <code>Server</code> element that allows you to query various
runtime parameters of the host. <code>Server</code> elements are permanently
attached to their anchors.
<p>All anchors descendant to server anchors, and their attached elements, are created
by the user. All such user anchors as well as the elements that might be attached to them
reside—by definition—on the host of the server anchor that they descend from.
<h2 id="errors">Panics and errors</h2>
<p>All programmatic manipulation of a circuit client involves calling methods
of <code>Anchor</code> or element objects. As we discussed, all anchors
and elements have an implied physical place of residence (on one of the cluster hosts).
<p>In general, any method invokation might result in one of two types of errors:
<em>application errors</em> and <em>system errors</em>.
<p>Application errors are things like trying to create an element on anchor that
already has one, or trying to start a process using a missing binary, for instance.
Such errors will be returned in the form of Go <code>error</code> return values
of the respective method.
<p>Independently of application errors, every invokation of an anchor or
element method may fail if the underlying object is physically unreachable.
Anchors residing on a dead host are unreachable and so are their elements,
for example. Such errors are treated in a separate category of system errors
and they are reported as panics. In particular, if a host is unreachable,
all anchors descendant to and including its server anchor will cause panics when used.
<p>By design, any anchor or element method invokation will result in
a panic, if a system error occurs. We uniformly report system errors as
panics in order to separate them semantically from application errors.
But also because they have asynchronous nature and because they usually
result in a very different way of being handled by the application programmer.
<p>That said, such panic conditions are not critical. These panics merely
indicate that the host where an anchor or element physically resides is currently
unreachable. The underlying host can be unreachable either if dead or as the
result of a complete network partition (partial partitions do not affect the system).
<p>An anchor or element object that produces a panic remains in a valid
state after the panic and it can be re-used. If the underlying resource is still
unreachable, another panic will be produced. But if the system has
recovered from a network partition and the underlying resource is reachable
again, follow on method calls will succeed.
<h3>Connection panics</h3>
<p>Panics in any method invocation can also be caused if the client's connection
to a circuit server is lost. This type of panic is permanent, as the circuit client
does not attempt automatic reconnection to the circuit cluster.
<p>There is a way to distinguish between host-only panics and permanent client
connection panics. After catching a panic anywhere, the user application can
simply call the root anchor's <code>View</code> method (which lists the contents of
the anchor). If this call also results in a panic, this is an indication that the client
connection has been lost altogether.
` | gocircuit.org/api/api.go | 0.80525 | 0.755502 | api.go | starcoder |
package Set1
func hexToBase64(hexString string) string {
return byteArrayToBase64(hexToByteArray(hexString))
}
func hexToByteArray(hexString string) []byte {
bytes := make([]byte, len(hexString)/2)
hexCharToByte := map[byte]byte{
'0': 0,
'1': 1,
'2': 2,
'3': 3,
'4': 4,
'5': 5,
'6': 6,
'7': 7,
'8': 8,
'9': 9,
'a': 10,
'b': 11,
'c': 12,
'd': 13,
'e': 14,
'f': 15,
}
// Since all chars in a hex string take just one byte, we iterate over our String by doing
for i := 0; i < len(hexString); i += 2 {
bytes[i/2] = hexCharToByte[hexString[i]]*16 + hexCharToByte[hexString[i+1]]
}
return bytes
}
func byteArrayToBase64(byteArray []byte) string {
// 3 bytes = 4 base64 symbols
res := make([]byte, (len(byteArray)*4)/3)
resultIndex := 0
for i := 0; i+2 < len(byteArray); i += 3 {
// 24 bits long so we need space
var val uint32
val = 0
// Concatenate the three bytes together
val = uint32(byteArray[i])<<16 + uint32(byteArray[i+1])<<8 + uint32(byteArray[i+2])
// Break it down into 4 base64 symbols
for j := 0; j < 4; j++ {
res[resultIndex+3-j] = byteToBase64Symbol(byte(val % 64))
val /= 64
}
resultIndex += 4
}
// Bytes were exactly divisble by 3
if len(byteArray)%3 == 0 {
return string(res)
}
// Have leftover bytes? Need to shift to make the bitcount divisible by 6
// Need 4 extra base64 chars for our leftover bytes, intialize with padding
ending := make([]byte, 0)
var finalVal uint32
// One byte left over
if len(byteArray)%3 == 1 {
// Make our final value 12 bit long
finalVal = uint32(byteArray[len(byteArray)-1]) << 4
res = res[0 : len(res)-1]
// Two bytes left over
} else {
// Make our final value 18 bit long
finalVal = uint32(byteArray[len(byteArray)-2])<<10 + uint32(byteArray[len(byteArray)-1])<<2
res = res[0 : len(res)-2]
}
// Parse our last value to base64
for finalVal > 0 {
ending = prependbyte(ending, byteToBase64Symbol(byte(finalVal%64)))
finalVal /= 64
}
// Pad it to 4 chars using "=" symbol
for len(ending) < 4 {
ending = append(ending, '=')
}
return string(res) + string(ending)
}
func byteArrayToHex(byteArray []byte) string {
ByteToHexChar := map[byte]byte{
0: '0',
1: '1',
2: '2',
3: '3',
4: '4',
5: '5',
6: '6',
7: '7',
8: '8',
9: '9',
10: 'a',
11: 'b',
12: 'c',
13: 'd',
14: 'e',
15: 'f',
}
// 1 byte = 2 hex symbols
res := make([]byte, len(byteArray)*2)
for i := 0; i < len(byteArray); i++ {
toConvert := byteArray[i]
res[2*i+1] = ByteToHexChar[toConvert%16]
toConvert /= 16
res[2*i] = ByteToHexChar[toConvert%16]
}
return string(res)
}
func byteToBase64Symbol(b byte) byte {
// Capital letters
if b <= 25 {
return b + 65
}
// Small letters
if b <= 51 {
return b + 71
}
// Numbers
if b <= 61 {
return b - 4
}
// "+"" symbol
if b == 62 {
return 42
}
// "/" symbol
return 47
}
// Taken from https://stackoverflow.com/questions/53737435/how-to-prepend-int-to-slice
func prependbyte(x []byte, y byte) []byte {
x = append(x, y)
copy(x[1:], x)
x[0] = y
return x
} | Set1/Chall1.go | 0.72526 | 0.46563 | Chall1.go | starcoder |
package model
import (
"bytes"
"strings"
"time"
"github.com/d3ce1t/areyouin-server/api"
"github.com/d3ce1t/areyouin-server/utils"
)
type Event struct {
id int64
authorID int64
authorName string
description string
pictureDigest []byte
createdDate time.Time // Seconds precision
modifiedDate time.Time // Seconds precision
inboxPosition time.Time // Seconds precision
startDate time.Time // Seconds precision
endDate time.Time // Seconds precision
cancelled bool
Participants *ParticipantList
// Owner of this event object in RAM
owner int64
// Used to compute the timestamp for this event version when stored in DB
timestamp int64
// If this event is a modification of another one, oldEvent must
// point to that object
oldEvent *Event
// Indicate if this object has a copy in database. For instance,
// an event loaded from db will have isPersisted set. However, a
// modified event will have it unset.
isPersisted bool
}
func newEventFromDTO(dto *api.EventDTO) *Event {
event := &Event{
id: dto.Id,
authorID: dto.AuthorId,
authorName: dto.AuthorName,
description: dto.Description,
pictureDigest: dto.PictureDigest,
createdDate: utils.MillisToTimeUTC(dto.CreatedDate).Truncate(time.Second),
modifiedDate: time.Unix(0, dto.Timestamp*1000).UTC().Truncate(time.Second),
inboxPosition: utils.MillisToTimeUTC(dto.InboxPosition).Truncate(time.Second),
startDate: utils.MillisToTimeUTC(dto.StartDate).Truncate(time.Second),
endDate: utils.MillisToTimeUTC(dto.EndDate).Truncate(time.Second),
cancelled: dto.Cancelled,
Participants: newParticipantList(),
timestamp: dto.Timestamp,
}
for _, p := range dto.Participants {
event.Participants.participants[p.UserID] = newParticipantFromDTO(p)
if p.Response == api.AttendanceResponse_ASSIST {
event.Participants.numAttendees++
}
}
event.Participants.numGuests = len(event.Participants.participants)
return event
}
func newEventListFromDTO(dtos []*api.EventDTO) []*Event {
results := make([]*Event, 0, len(dtos))
for _, eventDTO := range dtos {
results = append(results, newEventFromDTO(eventDTO))
}
return results
}
func (e *Event) Id() int64 {
return e.id
}
func (e *Event) AuthorID() int64 {
return e.authorID
}
func (e *Event) AuthorName() string {
return e.authorName
}
func (e *Event) CreatedDate() time.Time {
return e.createdDate
}
func (e *Event) ModifiedDate() time.Time {
return e.modifiedDate
}
func (e *Event) StartDate() time.Time {
return e.startDate
}
func (e *Event) EndDate() time.Time {
return e.endDate
}
func (e *Event) Title() string {
var str string
pos := strings.Index(e.description, "\n")
if pos != -1 {
str = e.description[0:pos]
} else {
str = e.description
}
fields := strings.Fields(str)
title := fields[0]
i := 1
for i < utils.MinInt(10, len(fields)) {
title += " " + fields[i]
i++
}
if i < len(fields) {
title += "..."
}
return title
}
func (e *Event) Description() string {
return e.description
}
func (e *Event) InboxPosition() time.Time {
return e.inboxPosition
}
func (e *Event) PictureDigest() []byte {
return e.pictureDigest
}
func (e *Event) NumAttendees() int {
return e.Participants.numAttendees
}
func (e *Event) NumGuests() int {
return e.Participants.numGuests
}
func (e *Event) Status() api.EventState {
currentDate := time.Now()
if e.IsCancelled() {
return api.EventState_CANCELLED
} else if e.startDate.After(currentDate) {
return api.EventState_NOT_STARTED
} else if e.endDate.Before(currentDate) || e.endDate.Equal(currentDate) { // End date isn't included
return api.EventState_FINISHED
}
return api.EventState_ONGOING
}
func (e *Event) IsCancelled() bool {
return e.cancelled
}
func (e *Event) Timestamp() int64 {
return e.timestamp
}
func (e *Event) Equal(other *Event) bool {
return e.id == other.id &&
e.authorID == other.authorID && e.authorName == other.authorName &&
e.description == other.description &&
bytes.Equal(e.pictureDigest, other.pictureDigest) &&
e.createdDate.Equal(other.createdDate) &&
e.modifiedDate.Equal(other.modifiedDate) &&
e.inboxPosition.Equal(other.inboxPosition) &&
e.startDate.Equal(other.startDate) &&
e.endDate.Equal(other.endDate) &&
e.cancelled == other.cancelled &&
e.timestamp == other.timestamp &&
e.Participants.Equal(other.Participants)
}
func (e *Event) IsZero() bool {
return e.id == 0 && e.authorID == 0 && e.authorName == "" &&
e.description == "" && e.pictureDigest == nil &&
e.createdDate.IsZero() && e.modifiedDate.IsZero() &&
e.inboxPosition.IsZero() && e.startDate.IsZero() &&
e.endDate.IsZero() && e.cancelled == false &&
e.Participants == nil
}
func (e *Event) AsDTO() *api.EventDTO {
dto := &api.EventDTO{
Id: e.id,
AuthorId: e.authorID,
AuthorName: e.authorName,
Description: e.description,
PictureDigest: e.pictureDigest,
CreatedDate: utils.TimeToMillis(e.createdDate),
InboxPosition: utils.TimeToMillis(e.inboxPosition),
StartDate: utils.TimeToMillis(e.startDate),
EndDate: utils.TimeToMillis(e.endDate),
Cancelled: e.cancelled,
Participants: make(map[int64]*api.ParticipantDTO),
Timestamp: e.timestamp,
}
for _, v := range e.Participants.participants {
dto.Participants[v.id] = v.AsDTO()
}
return dto
}
func (e *Event) Clone() *Event {
eventCopy := new(Event)
*eventCopy = *e
eventCopy.pictureDigest = make([]byte, len(e.pictureDigest))
copy(eventCopy.pictureDigest, e.pictureDigest)
eventCopy.Participants = e.Participants.Clone()
return eventCopy
}
func (e *Event) CloneWithEmptyParticipants() *Event {
eventCopy := new(Event)
*eventCopy = *e
eventCopy.pictureDigest = make([]byte, len(e.pictureDigest))
copy(eventCopy.pictureDigest, e.pictureDigest)
eventCopy.Participants = newParticipantList()
eventCopy.Participants.numGuests = e.Participants.numGuests
eventCopy.Participants.numAttendees = e.Participants.numAttendees
return eventCopy
} | model/event.go | 0.621541 | 0.402862 | event.go | starcoder |
package dateparser
import (
"time"
"strings"
)
type HandleFn func(d *Date, ts []*Token) bool
type Pattern struct {
children []*Pattern
Matchers []*Matcher
HandleFn HandleFn
}
func NewPattern() *Pattern {
return &Pattern{}
}
func (p *Pattern) Add() *Pattern {
if p.children == nil {
p.children = []*Pattern{}
}
ptn := &Pattern{}
p.children = append(p.children, ptn)
return ptn
}
func (p *Pattern) Parse(b []byte, def *time.Time) *Date {
tokens := (&Timelex{b, 0}).All()
date := &Date{}
idx := 0
for idx < len(tokens) {
n := p.parse(date, tokens[idx:])
if n == 0 {
n = 1 // unparsed. skip the first token and continue.
}
idx += n
}
return date.AddDefault(def)
}
func (p *Pattern) parse(d *Date, ts []*Token) int {
if len(ts) < len(p.Matchers) {
return 0
}
for i, matcher := range p.Matchers {
if !matcher.Match(ts[i]) {
return 0 // unmatched.
}
}
if len(p.children) > 0 {
for _, ptn := range p.children {
if n := ptn.parse(d, ts); n > 0 {
return n
}
}
return 0
}
if p.HandleFn(d, ts) {
return len(p.Matchers)
} else {
return 0
}
}
func (p *Pattern) Match(s string) *Pattern {
allmatchers := []*Matcher{
{"2006", YYYY},
{"06", YY},
{"01", Month},
{"Jan", MonthName},
{"02", DD},
{"Mon", Weekday},
{"MST", Timezone},
{"0700", TimezoneOffset},
{"15", HH24},
{"03", HH12},
{"07", HH12}, // used for timezone offsets (07:00)
{"04", MINS},
{"05", SECS},
{"00", SECS}, // used for timezone offsets (07:00)
{"pm", AmPm},
{"-", Sign},
// uncaptured.
{"hours", HoursName},
{"mins", MinsName},
{"secs", SecsName},
{"/", DateSep},
{":", TimeSep},
}
matchers := []*Matcher{}
for len(s) > 0 {
if s[0] == ' ' {
s = s[1:] // skip spaces
continue
}
found := false
for _, m := range allmatchers {
if strings.HasPrefix(s, m.Fmt) {
found = true
matchers = append(matchers, m)
s = s[len(m.Fmt):]
break
}
}
if !found {
panic("Unrecognized format:" + s)
}
}
p.Matchers = matchers
return p
}
func (p *Pattern) Handle(fn HandleFn) *Pattern {
p.HandleFn = fn
return p
}
// --- MATCHERS
// 59-mins or 59-seconds
var MINS = Match([]string{"0", "1", "2", "3", "4", "5", "6", "7", "8", "9",
"00", "01", "02", "03", "04", "05", "06", "07", "08", "09", "10", "11",
"12", "13", "14", "15", "16", "17", "18", "19", "20", "21", "22", "23",
"24", "25", "26", "27", "28", "29", "30", "31", "32", "33", "34", "35",
"36", "37", "38", "39", "40", "41", "42", "43", "44", "45", "46", "47",
"48", "49", "50", "51", "52", "53", "54", "55", "56", "57", "58", "59"})
var SECS = MINS
// 12-hours
var HH12 = Match([]string{"0", "1", "2", "3", "4", "5", "6", "7", "8", "9",
"00", "01", "02", "03", "04", "05", "06", "07", "08", "09", "10", "11",
"12"})
// 24-hours
var HH24 = Match([]string{"0", "1", "2", "3", "4", "5", "6", "7", "8", "9",
"00", "01", "02", "03", "04", "05", "06", "07", "08", "09", "10", "11",
"12", "13", "14", "15", "16", "17", "18", "19", "20", "21", "22", "23"})
// 31-days
var DD = Match([]string{"0", "1", "2", "3", "4", "5", "6", "7", "8", "9",
"00", "01", "02", "03", "04", "05", "06", "07", "08", "09", "10", "11",
"12", "13", "14", "15", "16", "17", "18", "19", "20", "21", "22", "23",
"24", "25", "26", "27", "28", "29", "30", "31"})
// weekday names
var Weekday = MatchMap(map[string]int{
"Sunday": 1,
"Sun": 1,
"Monday": 2,
"Mon": 2,
"Tuesday": 3,
"Tues": 3,
"Tue": 3,
"Wednesday": 4,
"Wed": 4,
"Thursday": 5,
"Thurs": 5,
"Thur": 5,
"Thu": 5,
"Friday": 6,
"Fri": 6,
"Saturday": 7,
"Sat": 7,
})
// 12-months
var MM = Match([]string{"0", "1", "2", "3", "4", "5", "6", "7", "8", "9",
"00", "01", "02", "03", "04", "05", "06", "07", "08", "09", "10", "11",
"12"})
// month names
var MonthName = MatchMap(map[string]int{
"January": 1,
"Jan": 1,
"February": 2,
"Feb": 2,
"March": 3,
"Mar": 3,
"April": 4,
"Apr": 4,
"May": 5,
"June": 6,
"Jun": 6,
"July": 7,
"Jul": 7,
"August": 8,
"Aug": 8,
"September": 9,
"Sept": 9,
"Sep": 9,
"October": 10,
"Oct": 10,
"Novemeber": 11,
"Nov": 11,
"December": 12,
"Dec": 12,
})
// month: either name or MM
var Month = MatchMap(map[string]int{
"January": 1,
"Jan": 1,
"01": 1,
"1": 1,
"February": 2,
"Feb": 2,
"02": 2,
"2": 2,
"March": 3,
"Mar": 3,
"03": 3,
"3": 3,
"April": 4,
"Apr": 4,
"04": 4,
"4": 4,
"May": 5,
"05": 5,
"5": 5,
"June": 6,
"Jun": 6,
"06": 6,
"6": 6,
"July": 7,
"Jul": 7,
"07": 7,
"7": 7,
"August": 8,
"Aug": 8,
"08": 8,
"8": 8,
"September": 9,
"Sept": 9,
"Sep": 9,
"09": 9,
"9": 9,
"October": 10,
"Oct": 10,
"10": 10,
"Novemeber": 11,
"Nov": 11,
"11": 11,
"December": 12,
"Dec": 12,
"12": 12,
})
// 4-digit year
var YYYY = func(t *Token) bool {
return t.IsNumber() && t.IsLen(4)
}
// 2-digit year
var YY = func(t *Token) bool {
return t.IsNumber() && t.IsLen(2)
}
// 4-digits timezone offset: HHMM
var TimezoneOffset = func(t *Token) bool {
return t.IsNumber() && t.IsLen(4) &&
HH12(&Token{t.V[:2], t.T, 0}) &&
MINS(&Token{t.V[2:4], t.T, 0})
}
// named timezone
var Timezone = Match([]string{"ACDT", "ACST", "ACT", "ACWDT", "ACWST", "ADDT",
"ADT", "AEDT", "AEST", "AFT", "AHDT", "AHST", "AKDT", "AKST", "AMST", "AMT",
"ANT", "APT", "ARST", "ART", "AST", "AWDT", "AWST", "AWT", "AZOMT", "AZOST",
"AZOT", "BDST", "BDT", "BEAT", "BEAUT", "BMT", "BNT", "BORT", "BORTST",
"BOST", "BOT", "BRST", "BRT", "BST", "BTT", "BURT", "CANT", "CAPT", "CAST",
"CAT", "CAWT", "CCT", "CDDT", "CDT", "CEMT", "CEST", "CET", "CGST", "CGT",
"CHADT", "CHAST", "CHDT", "CHOST", "CHOT", "CHUT", "CKHST", "CKT", "CLST",
"CLT", "CMT", "COST", "COT", "CPT", "CST", "CUT", "CVST", "CVT", "CWT",
"CXT", "ChST", "DACT", "DMT", "EASST", "EAST", "EAT", "ECT", "EDDT", "EDT",
"EEST", "EET", "EGST", "EGT", "EHDT", "EMT", "EPT", "EST", "EWT", "FFMT",
"FJST", "FJT", "FKST", "FKT", "FMT", "FNST", "FNT", "GALT", "GAMT", "GBGT",
"GFT", "GHST", "GILT", "GMT", "GST", "GYT", "HDT", "HKST", "HKT", "HMT",
"HOVST", "HOVT", "HST", "ICT", "IDDT", "IDT", "IHST", "IMT", "IOT", "IRDT",
"IRST", "ISST", "IST", "JAVT", "JCST", "JDT", "JMT", "JST", "JWST", "KART",
"KDT", "KMT", "KOST", "KST", "KWAT", "LHDT", "LHST", "LINT", "LKT", "LMT",
"LRT", "LST", "MADMT", "MADST", "MADT", "MALST", "MALT", "MART", "MDDT",
"MDST", "MDT", "MHT", "MIST", "MMT", "MOST", "MOT", "MPT", "MSD", "MSK",
"MST", "MUST", "MUT", "MVT", "MWT", "MYT", "NCST", "NCT", "NDDT", "NDT",
"NEGT", "NEST", "NET", "NFST", "NFT", "NMT", "NPT", "NRT", "NST", "NUT",
"NWT", "NZDT", "NZMT", "NZST", "PDDT", "PDT", "PEST", "PET", "PGT", "PHOT",
"PHST", "PHT", "PKST", "PKT", "PLMT", "PMDT", "PMMT", "PMST", "PMT", "PNT",
"PONT", "PPMT", "PPT", "PST", "PWT", "PYST", "PYT", "QMT", "RET", "RMT",
"SAST", "SBT", "SCT", "SDMT", "SDT", "SET", "SGT", "SJMT", "SMT", "SRT",
"SST", "SWAT", "TAHT", "TBMT", "TKT", "TLT", "TMT", "TOST", "TOT", "TVT",
"ULAST", "ULAT", "UYHST", "UYST", "UYT", "VET", "VUST", "VUT", "WAKT",
"WARST", "WART", "WAST", "WAT", "WEMT", "WEST", "WET", "WFT", "WGST", "WGT",
"WIB", "WIT", "WITA", "WMT", "WSDT", "WSST", "XJT", "YDDT", "YDT", "YPT",
"YST", "YWT"})
// formatting
var HoursName = Match([]string{"h", "hour", "hours"})
var MinsName = Match([]string{"m", "min", "mins", "minute", "minutes"})
var SecsName = Match([]string{"s", "sec", "secs", "second", "seconds"})
var DateSep = Match([]string{"-", "/", "."})
var TimeSep = Match([]string{":"})
var AmPm = Match([]string{"am", "pm"})
var Sign = Match([]string{"-", "+"}) | patterns.go | 0.576184 | 0.417628 | patterns.go | starcoder |
package findfirstandlastpositionofelementinsortedarray
// binary search
// time complexity: O(logn)
// space complexity: O(1)
func searchRange(nums []int, target int) []int {
lowerIndex := firstOccurance(nums, target)
first := -1
if lowerIndex != len(nums) && target == nums[lowerIndex] {
first = lowerIndex
}
upperIndex := lastOccurance(nums, target)
last := -1
if upperIndex == len(nums) && len(nums) > 0 && target == nums[len(nums)-1] {
last = len(nums) - 1
} else if upperIndex != len(nums) && upperIndex > 0 && target == nums[upperIndex-1] {
last = upperIndex - 1
}
return []int{first, last}
}
func firstOccurance(nums []int, target int) int {
var (
l int
r = len(nums)
)
for l != r { // 夹逼思想
mid := l + (r-l)/2
if nums[mid] < target {
l = mid + 1
} else {
r = mid
}
}
return l
}
func lastOccurance(nums []int, target int) int {
var (
l int
r = len(nums)
)
for l != r {
mid := l + (r-l)/2
if nums[mid] <= target {
l = mid + 1
} else {
r = mid
}
}
return l
}
// double index scan
// Time complexity: O(n)
// Space complexity: O(1)
func searchRange1(nums []int, target int) []int {
var (
l int
r = len(nums) - 1
)
for l <= r {
flag := false
if nums[l] != target {
l++
flag = true
}
if nums[r] != target {
r--
flag = true
}
if !flag {
break
}
}
if r < l {
return []int{-1, -1}
}
return []int{l, r}
}
// binary search + linear scan
// Time complexity: O(logn) ~ O(n)
// Space complexity: O(1)
func searchRange2(nums []int, target int) []int {
var (
l int
r = len(nums) - 1
tmp = -1
)
for l <= r {
mid := l + (r-l)/2
if nums[mid] == target {
tmp = mid
break
}
if nums[mid] < target {
l = mid + 1
} else {
r = mid - 1
}
}
if -1 == tmp {
return []int{-1, -1}
}
l = tmp
r = tmp
for true {
if l > 0 && nums[l-1] == target {
l--
} else {
break
}
}
for true {
if r < len(nums)-1 && target == nums[r+1] {
r++
} else {
break
}
}
return []int{l, r}
} | src/0034_find_first_and_last_position_of_element_in_sorted_array/find_first_and_last_position_of_element_in_sorted_array.go | 0.513425 | 0.437223 | find_first_and_last_position_of_element_in_sorted_array.go | starcoder |
package value
import (
"errors"
"fmt"
"reflect"
"time"
"google.golang.org/protobuf/types/known/timestamppb"
)
var errNotSlice = errors.New("not a slice type")
var errUnsupportedSliceType = errors.New("unsupported slice type")
// getTypeOfSlice returns the type of the elements of the slice
func getTypeOfSlice(i interface{}) (reflect.Type, error) {
v := reflect.ValueOf(i)
if v.Kind() != reflect.Slice {
return reflect.TypeOf(nil), errNotSlice
}
return v.Type().Elem(), nil
}
// sliceTypeToValueListTypeMap is the set of supported slice types
var sliceTypeToValueListTypeMap map[reflect.Type]ValueListType = map[reflect.Type]ValueListType{
reflect.TypeOf([]interface{}{}).Elem(): ValueListType_Interface,
reflect.TypeOf([]bool{}).Elem(): ValueListType_Bool,
reflect.TypeOf([]*bool{}).Elem(): ValueListType_PtrBool,
reflect.TypeOf([][]byte{}).Elem(): ValueListType_Bytes,
reflect.TypeOf([]float32{}).Elem(): ValueListType_Float,
reflect.TypeOf([]*float32{}).Elem(): ValueListType_PtrFloat,
reflect.TypeOf([]float64{}).Elem(): ValueListType_Double,
reflect.TypeOf([]*float64{}).Elem(): ValueListType_PtrDouble,
reflect.TypeOf([]int32{}).Elem(): ValueListType_Int32,
reflect.TypeOf([]*int32{}).Elem(): ValueListType_PtrInt32,
reflect.TypeOf([]int64{}).Elem(): ValueListType_Int64,
reflect.TypeOf([]*int64{}).Elem(): ValueListType_PtrInt64,
reflect.TypeOf([]uint32{}).Elem(): ValueListType_UInt32,
reflect.TypeOf([]*uint32{}).Elem(): ValueListType_PtrUInt32,
reflect.TypeOf([]uint64{}).Elem(): ValueListType_UInt64,
reflect.TypeOf([]*uint64{}).Elem(): ValueListType_PtrUInt64,
reflect.TypeOf([]string{}).Elem(): ValueListType_String,
reflect.TypeOf([]*string{}).Elem(): ValueListType_PtrString,
reflect.TypeOf([]time.Time{}).Elem(): ValueListType_Time,
reflect.TypeOf([]*time.Time{}).Elem(): ValueListType_PtrTime,
reflect.TypeOf([]time.Duration{}).Elem(): ValueListType_Duration,
reflect.TypeOf([]*time.Duration{}).Elem(): ValueListType_Duration,
reflect.TypeOf([][]interface{}{}).Elem(): ValueListType_ValueList,
reflect.TypeOf([]map[string]interface{}{}).Elem(): ValueListType_ValueMap,
}
// fromSliceTypeToValueListType maps from the type of the elements
// of the slice to the ValueListType enumeration value
func fromSliceTypeToValueListType(i interface{}) (ValueListType, error) {
t, err := getTypeOfSlice(i)
if err != nil {
return ValueListType_UnknownValueListType, err
}
vlt, ok := sliceTypeToValueListTypeMap[t]
if !ok {
return ValueListType_UnknownValueListType, errUnsupportedSliceType
}
return vlt, nil
}
// listBuilder creates a Value containing a ValueList
func listBuilder(i interface{}) (*Value, error) {
t, err := fromSliceTypeToValueListType(i)
if err != nil {
return nil, err
}
x := reflect.ValueOf(i)
l := make([]*Value, 0, x.Len())
for i := 0; i < x.Len(); i++ {
v, err := NewValue(x.Index(i).Interface())
if err != nil {
return nil, err
}
l = append(l, v)
}
return &Value{
V: &Value_L{
L: &Value_ValueList{
V: l,
T: t,
},
},
}, nil
}
// NewValue creates an instance of Value holding the specified value
func NewValue(i interface{}) (*Value, error) {
if i == nil {
return &Value{V: &Value_IsNull{IsNull: true}}, nil
}
var v *Value
switch x := i.(type) {
case bool:
{
v = &Value{V: &Value_B{B: x}}
}
case *bool:
{
if x == nil {
v = &Value{V: &Value_IsNull{IsNull: true}}
} else {
v = &Value{V: &Value_Pb{Pb: *x}}
}
}
case []byte:
{
v = &Value{V: &Value_X{X: x}}
}
case int32:
{
v = &Value{V: &Value_I32{I32: x}}
}
case *int32:
{
if x == nil {
v = &Value{V: &Value_IsNull{IsNull: true}}
} else {
v = &Value{V: &Value_Pi32{Pi32: *x}}
}
}
case int64:
{
v = &Value{V: &Value_I64{I64: x}}
}
case *int64:
{
if x == nil {
v = &Value{V: &Value_IsNull{IsNull: true}}
} else {
v = &Value{V: &Value_Pi64{Pi64: *x}}
}
}
case uint32:
{
v = &Value{V: &Value_U32{U32: x}}
}
case *uint32:
{
if x == nil {
v = &Value{V: &Value_IsNull{IsNull: true}}
} else {
v = &Value{V: &Value_Pu32{Pu32: *x}}
}
}
case uint64:
{
v = &Value{V: &Value_U64{U64: x}}
}
case *uint64:
{
if x == nil {
v = &Value{V: &Value_IsNull{IsNull: true}}
} else {
v = &Value{V: &Value_Pu64{Pu64: *x}}
}
}
case float32:
{
v = &Value{V: &Value_F{F: x}}
}
case *float32:
{
if x == nil {
v = &Value{V: &Value_IsNull{IsNull: true}}
} else {
v = &Value{V: &Value_Pf{Pf: *x}}
}
}
case float64:
{
v = &Value{V: &Value_D{D: x}}
}
case *float64:
{
if x == nil {
v = &Value{V: &Value_IsNull{IsNull: true}}
} else {
v = &Value{V: &Value_Pd{Pd: *x}}
}
}
case string:
{
v = &Value{V: &Value_S{S: x}}
}
case *string:
{
if x == nil {
v = &Value{V: &Value_IsNull{IsNull: true}}
} else {
v = &Value{V: &Value_Ps{Ps: *x}}
}
}
case time.Time:
{
v = &Value{V: &Value_T{
T: ×tamppb.Timestamp{
Seconds: x.Unix(),
Nanos: int32(x.Nanosecond()),
}},
}
}
case *time.Time:
{
if x == nil {
v = &Value{V: &Value_IsNull{IsNull: true}}
} else {
v = &Value{V: &Value_Pt{
Pt: ×tamppb.Timestamp{
Seconds: x.Unix(),
Nanos: int32(x.Nanosecond()),
}},
}
}
}
case time.Duration:
{
v = &Value{V: &Value_Dur{Dur: int64(x)}}
}
case *time.Duration:
{
if x == nil {
v = &Value{V: &Value_IsNull{IsNull: true}}
} else {
v = &Value{V: &Value_Pdur{Pdur: int64(*x)}}
}
}
case map[string]interface{}:
{
m := map[string]*Value{}
for k, v := range x {
newV, err := NewValue(v)
if err != nil {
return nil, err
}
m[k] = newV
}
v = &Value{
V: &Value_M{
M: &Value_ValueMap{M: m},
},
}
}
case []interface{},
[]bool, []*bool, [][]byte,
[]int64, []*int64,
[]uint64, []*uint64,
[]float64, []*float64,
[]int32, []*int32,
[]uint32, []*uint32,
[]float32, []*float32,
[]string, []*string,
[]time.Time, []*time.Time,
[]time.Duration, []*time.Duration,
[]map[string]interface{}:
{
var err error
v, err = listBuilder(x)
if err != nil {
return nil, err
}
}
default:
{
vv := reflect.ValueOf(i)
if vv.Type().Kind() != reflect.Slice {
return nil, fmt.Errorf("unsupported type: %v", reflect.TypeOf(x))
}
l := make([]*Value, vv.Len())
var err error
for i := 0; i < vv.Len(); i++ {
if !vv.Index(i).IsValid() {
continue
}
l[i], err = NewValue(vv.Index(i).Interface())
if err != nil {
return nil, err
}
}
v = &Value{
V: &Value_L{
L: &Value_ValueList{
V: l,
T: ValueListType_Interface,
},
},
}
}
}
return v, nil
} | types/value/valueBuilder.go | 0.610453 | 0.423696 | valueBuilder.go | starcoder |
package mercury
import (
"path/filepath"
"strings"
)
// NamespaceSpec implements a parsed namespace search
type NamespaceSpec interface {
Type() string
Value() string
String() string
Raw() string
Match(string) bool
}
// Namespace Spec types
const (
TypeNamespaceNode = "node"
TypeNamespaceStar = "star"
TypeNamespaceTrace = "trace"
)
// String output string value
func (n NamespaceSearch) String() string {
lis := make([]string, 0, len(n))
for _, v := range n {
lis = append(lis, v.String())
}
return strings.Join(lis, ",")
}
// String output string value
func (n NamespaceNode) String() string {
return string(n)
}
// String output string value
func (n NamespaceTrace) String() string {
return "trace:" + string(n)
}
// String output string value
func (n NamespaceStar) String() string {
return string(n)
}
// Quote return quoted value.
func (n NamespaceNode) Quote() string { return `'` + n.Value() + `'` }
// Quote return quoted value.
func (n NamespaceTrace) Quote() string { return `'` + n.Value() + `'` }
// Quote return quoted value.
func (n NamespaceStar) Quote() string { return `'` + n.Value() + `'` }
// NamespaceSearch list of namespace specs
type NamespaceSearch []NamespaceSpec
// NamespaceNode implements a node search value
type NamespaceNode string
// Type to identify the type
func (NamespaceNode) Type() string { return TypeNamespaceNode }
// Value to return the value
func (n NamespaceNode) Value() string { return string(n) }
// NamespaceTrace implements a trace search value
type NamespaceTrace string
// Type returns the type of the value
func (NamespaceTrace) Type() string { return TypeNamespaceTrace }
// Value to return the value
func (n NamespaceTrace) Value() string { return strings.Replace(string(n), "*", "%", -1) }
// NamespaceStar implements a trace search value
type NamespaceStar string
// Type returns the type of the value
func (NamespaceStar) Type() string { return TypeNamespaceStar }
// Value to return the value
func (n NamespaceStar) Value() string { return strings.Replace(string(n), "*", "%", -1) }
// ParseNamespace returns a list of parsed values
func ParseNamespace(ns string) (lis NamespaceSearch) {
for _, part := range strings.Split(ns, ";") {
if strings.HasPrefix(part, "trace:") {
for _, s := range strings.Split(part[6:], ",") {
lis = append(lis, NewNamespace(s, TypeNamespaceTrace))
}
} else {
for _, s := range strings.Split(part, ",") {
if strings.Contains(s, "*") {
lis = append(lis, NewNamespace(s, TypeNamespaceStar))
} else {
lis = append(lis, NewNamespace(s, TypeNamespaceNode))
}
}
}
}
return
}
// NewNamespace returns requested type that implements NamespaceSpec
func NewNamespace(ns, t string) NamespaceSpec {
switch t {
case TypeNamespaceTrace:
return NamespaceTrace(ns)
case TypeNamespaceStar:
return NamespaceStar(ns)
default:
return NamespaceNode(ns)
}
}
// Raw return raw value.
func (n NamespaceNode) Raw() string { return string(n) }
// Raw return raw value.
func (n NamespaceTrace) Raw() string { return string(n) }
// Raw return raw value.
func (n NamespaceStar) Raw() string { return string(n) }
// Match returns true if any match.
func (n NamespaceSearch) Match(s string) bool {
for _, m := range n {
ok, err := filepath.Match(m.Raw(), s)
if err != nil {
return false
}
if ok {
return true
}
}
return false
}
func match(n NamespaceSpec, s string) bool {
ok, err := filepath.Match(n.Raw(), s)
if err != nil {
return false
}
return ok
}
// Match returns true if any match.
func (n NamespaceNode) Match(s string) bool { return match(n, s) }
// Match returns true if any match.
func (n NamespaceTrace) Match(s string) bool { return match(n, s) }
// Match returns true if any match.
func (n NamespaceStar) Match(s string) bool { return match(n, s) } | mercury/search.go | 0.744563 | 0.512632 | search.go | starcoder |
package sandbox
import (
"encoding/json"
)
// SandboxInsertBeamStatsRequest struct for SandboxInsertBeamStatsRequest
type SandboxInsertBeamStatsRequest struct {
BeamStatsMap *SandboxInsertBeamStatsRequestBeamStatsMap `json:"beamStatsMap,omitempty"`
// UNIX time (in milliseconds)
Unixtime *int64 `json:"unixtime,omitempty"`
}
// NewSandboxInsertBeamStatsRequest instantiates a new SandboxInsertBeamStatsRequest object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewSandboxInsertBeamStatsRequest() *SandboxInsertBeamStatsRequest {
this := SandboxInsertBeamStatsRequest{}
return &this
}
// NewSandboxInsertBeamStatsRequestWithDefaults instantiates a new SandboxInsertBeamStatsRequest object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewSandboxInsertBeamStatsRequestWithDefaults() *SandboxInsertBeamStatsRequest {
this := SandboxInsertBeamStatsRequest{}
return &this
}
// GetBeamStatsMap returns the BeamStatsMap field value if set, zero value otherwise.
func (o *SandboxInsertBeamStatsRequest) GetBeamStatsMap() SandboxInsertBeamStatsRequestBeamStatsMap {
if o == nil || o.BeamStatsMap == nil {
var ret SandboxInsertBeamStatsRequestBeamStatsMap
return ret
}
return *o.BeamStatsMap
}
// GetBeamStatsMapOk returns a tuple with the BeamStatsMap field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *SandboxInsertBeamStatsRequest) GetBeamStatsMapOk() (*SandboxInsertBeamStatsRequestBeamStatsMap, bool) {
if o == nil || o.BeamStatsMap == nil {
return nil, false
}
return o.BeamStatsMap, true
}
// HasBeamStatsMap returns a boolean if a field has been set.
func (o *SandboxInsertBeamStatsRequest) HasBeamStatsMap() bool {
if o != nil && o.BeamStatsMap != nil {
return true
}
return false
}
// SetBeamStatsMap gets a reference to the given SandboxInsertBeamStatsRequestBeamStatsMap and assigns it to the BeamStatsMap field.
func (o *SandboxInsertBeamStatsRequest) SetBeamStatsMap(v SandboxInsertBeamStatsRequestBeamStatsMap) {
o.BeamStatsMap = &v
}
// GetUnixtime returns the Unixtime field value if set, zero value otherwise.
func (o *SandboxInsertBeamStatsRequest) GetUnixtime() int64 {
if o == nil || o.Unixtime == nil {
var ret int64
return ret
}
return *o.Unixtime
}
// GetUnixtimeOk returns a tuple with the Unixtime field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *SandboxInsertBeamStatsRequest) GetUnixtimeOk() (*int64, bool) {
if o == nil || o.Unixtime == nil {
return nil, false
}
return o.Unixtime, true
}
// HasUnixtime returns a boolean if a field has been set.
func (o *SandboxInsertBeamStatsRequest) HasUnixtime() bool {
if o != nil && o.Unixtime != nil {
return true
}
return false
}
// SetUnixtime gets a reference to the given int64 and assigns it to the Unixtime field.
func (o *SandboxInsertBeamStatsRequest) SetUnixtime(v int64) {
o.Unixtime = &v
}
func (o SandboxInsertBeamStatsRequest) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if o.BeamStatsMap != nil {
toSerialize["beamStatsMap"] = o.BeamStatsMap
}
if o.Unixtime != nil {
toSerialize["unixtime"] = o.Unixtime
}
return json.Marshal(toSerialize)
}
type NullableSandboxInsertBeamStatsRequest struct {
value *SandboxInsertBeamStatsRequest
isSet bool
}
func (v NullableSandboxInsertBeamStatsRequest) Get() *SandboxInsertBeamStatsRequest {
return v.value
}
func (v *NullableSandboxInsertBeamStatsRequest) Set(val *SandboxInsertBeamStatsRequest) {
v.value = val
v.isSet = true
}
func (v NullableSandboxInsertBeamStatsRequest) IsSet() bool {
return v.isSet
}
func (v *NullableSandboxInsertBeamStatsRequest) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableSandboxInsertBeamStatsRequest(val *SandboxInsertBeamStatsRequest) *NullableSandboxInsertBeamStatsRequest {
return &NullableSandboxInsertBeamStatsRequest{value: val, isSet: true}
}
func (v NullableSandboxInsertBeamStatsRequest) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableSandboxInsertBeamStatsRequest) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | openapi/sandbox/model_sandbox_insert_beam_stats_request.go | 0.705582 | 0.50708 | model_sandbox_insert_beam_stats_request.go | starcoder |
package sweetiebot
import (
"fmt"
"strings"
"github.com/bwmarrin/discordgo"
)
type GroupsModule struct {
}
func (w *GroupsModule) Name() string {
return "Groups"
}
func (w *GroupsModule) Register(info *GuildInfo) {}
func (w *GroupsModule) Commands() []Command {
return []Command{
&AddGroupCommand{},
&JoinGroupCommand{},
&ListGroupCommand{},
&LeaveGroupCommand{},
&PingCommand{},
&PurgeGroupCommand{},
}
}
func (w *GroupsModule) Description() string {
return "Contains commands for manipulating groups and pinging them."
}
type AddGroupCommand struct {
}
func (c *AddGroupCommand) Name() string {
return "AddGroup"
}
func (c *AddGroupCommand) Process(args []string, msg *discordgo.Message, indices []int, info *GuildInfo) (string, bool, *discordgo.MessageEmbed) {
if len(args) < 1 {
return "```You have to name the group!```", false, nil
}
arg := strings.TrimSpace(strings.ToLower(args[0]))
_, ok := info.config.Basic.Groups[arg]
if ok {
return "```That group already exists!```", false, nil
}
if len(info.config.Basic.Groups) <= 0 {
info.config.Basic.Groups = make(map[string]map[string]bool)
}
group := make(map[string]bool)
group[msg.Author.ID] = true
info.config.Basic.Groups[arg] = group
info.SaveConfig()
return "```Successfully created the " + arg + " group! Join it using !joingroup " + arg + " and ping it using !ping " + arg + ".```", false, nil
}
func (c *AddGroupCommand) Usage(info *GuildInfo) *CommandUsage {
return &CommandUsage{
Desc: "Creates a new group and automatically adds you to it. Groups are automatically destroyed when everyone in the group leaves.",
Params: []CommandUsageParam{
CommandUsageParam{Name: "name", Desc: "Name of the new group. Should not contain spaces or anything other than letters and numbers.", Optional: false},
},
}
}
func (c *AddGroupCommand) UsageShort() string { return "Creates a new group." }
type JoinGroupCommand struct {
}
func (c *JoinGroupCommand) Name() string {
return "JoinGroup"
}
func (c *JoinGroupCommand) Process(args []string, msg *discordgo.Message, indices []int, info *GuildInfo) (string, bool, *discordgo.MessageEmbed) {
if len(args) < 1 {
return "```You have to provide a group name!```", false, nil
}
arg := strings.TrimSpace(strings.ToLower(args[0]))
_, ok := info.config.Basic.Groups[arg]
if !ok {
return "```That group doesn't exist! Use !listgroup to list existing groups.```", false, nil
}
info.config.Basic.Groups[arg][msg.Author.ID] = true
info.SaveConfig()
return "```Successfully joined the " + arg + " group! Ping it using !ping " + arg + " or leave it using !leavegroup " + arg + ". WARNING: Pinging a group will ping EVERYONE IN THE GROUP.```", false, nil
}
func (c *JoinGroupCommand) Usage(info *GuildInfo) *CommandUsage {
return &CommandUsage{
Desc: "Joins an existing group.",
Params: []CommandUsageParam{
CommandUsageParam{Name: "group", Desc: "Name of the group to join (case-insensitive).", Optional: false},
},
}
}
func (c *JoinGroupCommand) UsageShort() string { return "Joins an existing group." }
type ListGroupCommand struct {
}
func (c *ListGroupCommand) Name() string {
return "ListGroup"
}
func (c *ListGroupCommand) Process(args []string, msg *discordgo.Message, indices []int, info *GuildInfo) (string, bool, *discordgo.MessageEmbed) {
if len(args) < 1 {
if len(info.config.Basic.Groups) <= 0 {
return "```No groups to list!```", false, nil
}
keys := make([]string, len(info.config.Basic.Groups))
i := 0
for k := range info.config.Basic.Groups {
keys[i] = k
i++
}
return "```\n" + strings.Join(keys, ", ") + "```", false, nil
}
arg := strings.TrimSpace(strings.ToLower(args[0]))
_, ok := info.config.Basic.Groups[arg]
if !ok {
return "```That group doesn't exist! Use !listgroup with no arguments to list existing groups.```", false, nil
}
pings := make([]string, len(info.config.Basic.Groups[arg]))
i := 0
for k := range info.config.Basic.Groups[arg] {
m, _, _, _ := sb.db.GetUser(SBatoi(k))
if m != nil {
pings[i] = m.Username
}
i++
}
return "```\n" + strings.Join(pings, ", ") + "```", false, nil
}
func (c *ListGroupCommand) Usage(info *GuildInfo) *CommandUsage {
return &CommandUsage{
Desc: "Lists all current groups, or lists all the members of a group.",
Params: []CommandUsageParam{
CommandUsageParam{Name: "group", Desc: "Name of the group to display. If omitted, will display all groups instead.", Optional: true},
},
}
}
func (c *ListGroupCommand) UsageShort() string { return "Lists all groups." }
type LeaveGroupCommand struct {
}
func (c *LeaveGroupCommand) Name() string {
return "LeaveGroup"
}
func (c *LeaveGroupCommand) Process(args []string, msg *discordgo.Message, indices []int, info *GuildInfo) (string, bool, *discordgo.MessageEmbed) {
if len(args) < 1 {
return "```You have to provide a group name!```", false, nil
}
arg := strings.TrimSpace(strings.ToLower(args[0]))
_, ok := info.config.Basic.Groups[arg]
if !ok {
return "```That group doesn't exist! Use !listgroup to list existing groups.```", false, nil
}
_, ok = info.config.Basic.Groups[arg][msg.Author.ID]
if !ok {
return "```You aren't in that group!```", false, nil
}
delete(info.config.Basic.Groups[arg], msg.Author.ID)
if len(info.config.Basic.Groups[arg]) <= 0 {
delete(info.config.Basic.Groups, arg)
}
info.SaveConfig()
return "```You have been removed from " + arg + "```", false, nil
}
func (c *LeaveGroupCommand) Usage(info *GuildInfo) *CommandUsage {
return &CommandUsage{
Desc: "Removes you from the given group, if you are a member of it.",
Params: []CommandUsageParam{
CommandUsageParam{Name: "group", Desc: "Name of the group to leave.", Optional: false},
},
}
}
func (c *LeaveGroupCommand) UsageShort() string { return "Removes you from a group." }
func getGroupPings(groups []string, info *GuildInfo) string {
if len(groups) == 0 {
return ""
}
union := make(map[string]bool)
for _, group := range groups {
for k, v := range info.config.Basic.Groups[group] {
union[k] = v
}
}
pings := make([]string, len(union), len(union))
i := 0
for k := range union {
pings[i] = SBitoa(SBatoi(k)) // We convert to integers and then back to strings to prevent bloons from fucking with the bot
i++
}
return "<@" + strings.Join(pings, "> <@") + ">"
}
type PingCommand struct {
}
func (c *PingCommand) Name() string {
return "Ping"
}
func (c *PingCommand) Process(args []string, msg *discordgo.Message, indices []int, info *GuildInfo) (string, bool, *discordgo.MessageEmbed) {
if len(args) < 1 {
return "```You have to provide a group name!```", false, nil
}
nargs := strings.SplitN(args[0], "\n", 2)
args = append(nargs, args[1:]...)
arg := strings.TrimSpace(strings.ToLower(args[0]))
_, ok := info.config.Basic.Groups[arg]
m := ""
if len(indices) > 1 {
m = msg.Content[indices[1]:]
}
if !ok {
groups := strings.Split(arg, "+")
for _, v := range groups {
_, ok = info.config.Basic.Groups[v]
if !ok {
return fmt.Sprintf("```The %s group doesn't exist! Use !listgroup to list existing groups.```", v), false, nil
}
_, ok = info.config.Basic.Groups[v][msg.Author.ID]
if !ok {
return fmt.Sprintf("```You aren't a member of %s. You can only ping groups you are a member of.```", v), false, nil
}
}
sb.dg.ChannelMessageSend(msg.ChannelID, arg+": "+getGroupPings(groups, info)+" "+info.SanitizeOutput(m))
} else {
_, ok = info.config.Basic.Groups[arg][msg.Author.ID]
if !ok {
return "```You can only ping groups you are a member of.```", false, nil
}
sb.dg.ChannelMessageSend(msg.ChannelID, arg+": "+getGroupPings([]string{arg}, info)+" "+info.SanitizeOutput(m))
}
return "", false, nil
}
func (c *PingCommand) Usage(info *GuildInfo) *CommandUsage {
return &CommandUsage{
Desc: "Pings everyone in a group with the given message, but only if you are a member of the group.",
Params: []CommandUsageParam{
CommandUsageParam{Name: "group", Desc: "Name of the group to ping. You can ping multiple groups at the same time by using `group1+group2`", Optional: false},
CommandUsageParam{Name: "arbitrary string", Desc: "String for Sweetiebot to echo to the group, no spaces required.", Optional: false},
},
}
}
func (c *PingCommand) UsageShort() string { return "Pings a group." }
type PurgeGroupCommand struct {
}
func (c *PurgeGroupCommand) Name() string {
return "PurgeGroup"
}
func (c *PurgeGroupCommand) Process(args []string, msg *discordgo.Message, indices []int, info *GuildInfo) (string, bool, *discordgo.MessageEmbed) {
if len(args) < 1 {
return "```You have to provide a group name!```", false, nil
}
arg := strings.TrimSpace(strings.ToLower(args[0]))
_, ok := info.config.Basic.Groups[arg]
if !ok {
return "```That group doesn't exist! Use !listgroup to list existing groups.```", false, nil
}
delete(info.config.Basic.Groups, arg)
info.SaveConfig()
return "```Deleted " + arg + "```", false, nil
}
func (c *PurgeGroupCommand) Usage(info *GuildInfo) *CommandUsage {
return &CommandUsage{
Desc: "Deletes the group, if it exists.",
Params: []CommandUsageParam{
CommandUsageParam{Name: "group", Desc: "Name of the group to delete.", Optional: false},
},
}
}
func (c *PurgeGroupCommand) UsageShort() string { return "Deletes a group." } | sweetiebot/groups_command.go | 0.61659 | 0.429609 | groups_command.go | starcoder |
package list
const ListMapToParamFunctions = `
//-------------------------------------------------------------------------------------------------
// List:MapTo[{{.TypeParameter}}]
{{if .TypeParameter.IsBasic}}
// MapTo{{.TypeParameter.LongName}} transforms {{.TName}}List to []{{.TypeParameter.Name}}.
func (list {{.TName}}List) MapTo{{.TypeParameter.LongName}}(fn func({{.PName}}) {{.TypeParameter}}) []{{.TypeParameter}} {
result := make([]{{.TypeParameter}}, 0, len(list))
for _, v := range list {
u := fn(v)
result = append(result, {{.Addr}}u)
}
return result
}
// FlatMapTo{{.TypeParameter.LongName}} transforms {{.TName}}List to {{.TypeParameter.Name}}List, by repeatedly
// calling the supplied function and concatenating the results as a single flat list.
func (list {{.TName}}List) FlatMapTo{{.TypeParameter.LongName}}(fn func({{.PName}}) []{{.TypeParameter}}) []{{.TypeParameter}} {
result := make([]{{.TypeParameter}}, 0, len(list))
for _, v := range list {
u := fn(v)
if len(u) > 0 {
result = append(result, u...)
}
}
return result
}
{{else}}
// MapTo{{.TypeParameter.LongName}} transforms {{.TName}}List to {{.TypeParameter.Name}}List.
func (list {{.TName}}List) MapTo{{.TypeParameter.LongName}}(fn func({{.PName}}) {{.TypeParameter}}) {{.TypeParameter.Name}}Collection {
result := make({{.TypeParameter.Name}}List, 0, len(list))
for _, v := range list {
u := fn(v)
result = append(result, {{.Addr}}u)
}
return result
}
// FlatMapTo{{.TypeParameter.LongName}} transforms {{.TName}}List to {{.TypeParameter.Name}}List, by repeatedly
// calling the supplied function and concatenating the results as a single flat list.
func (list {{.TName}}List) FlatMapTo{{.TypeParameter.LongName}}(fn func({{.PName}}) {{.TypeParameter.Name}}Collection) {{.TypeParameter.Name}}Collection {
result := make({{.TypeParameter.Name}}List, 0, len(list))
for _, v := range list {
u := fn(v)
if u.NonEmpty() {
result = append(result, (u.ToList())...)
}
}
return result
}
{{end}}
` | internal/list/mapToT.go | 0.751375 | 0.61855 | mapToT.go | starcoder |
package pathThroughMap
import (
"fmt"
e "github.com/daniloanp/IA/environment"
"math"
)
var currentDistance float64
//The math definition of distance
func distanceBetweenTwoPoints(p1, p2 e.Point) float64 {
var (
dX = float64(p1.Row - p2.Row)
dY = float64(p1.Column - p2.Column)
distance = math.Sqrt(dX*dX + dY*dY)
)
return distance
}
// heuristicCostEstimate for AStar
func heuristicCostEstimate(origin *Square, goal *Square) float64 {
var distance = distanceBetweenTwoPoints(origin.Position, goal.Position)
if distance >= currentDistance {
var min int = 1 << 20
for _, neighbor := range origin.Neighbors() { // which is the neighbor nearest to goal with min cost
if distanceBetweenTwoPoints(neighbor.Position, goal.Position) < distance && neighbor.Cost() < min {
min = neighbor.Cost()
}
}
distance += float64(min)
}
return distance
}
// getMin is a auxiliary function that return
func getMin(openSet map[*Square]bool, fScore map[*Square]float64) *Square {
var (
best *Square
min float64 = 1<<30 - 1
)
for j, _ := range openSet {
if fScore[j] <= min {
min = fScore[j]
best = j
}
}
return best
}
// Return `reverse` Path.
func reconstructPath(cameFrom map[*Square]*Square, current *Square) []*Square {
var path = make([]*Square, 1, 42*42)
path[0] = current
for next, ok := cameFrom[current]; ok && next != nil; next, ok = cameFrom[next] {
path = append(path, next)
current = next
}
return path
}
//AStar ...
func (v *Square) AStar(goal *Square) ([]*Square, int) {
var (
closedSet = make(map[*Square]bool)
openSet = map[*Square]bool{v: true}
cameFrom = make(map[*Square]*Square)
gScore = map[*Square]float64{v: 0}
fScore = map[*Square]float64{v: gScore[v] + heuristicCostEstimate(v, goal)}
)
fmt.Print("")
for len(openSet) > 0 {
var current = getMin(openSet, fScore)
if current == goal {
return reconstructPath(cameFrom, current), int(gScore[current])
}
delete(openSet, current)
closedSet[current] = true
currentDistance = distanceBetweenTwoPoints(current.Position, goal.Position)
for _, neighbor := range current.Neighbors() {
if neighbor == nil || closedSet[neighbor] {
continue
}
GScoreTry := gScore[current] + float64(neighbor.Cost())
neighborInOpenSet := openSet[neighbor]
if !neighborInOpenSet || GScoreTry < gScore[neighbor] {
cameFrom[neighbor] = current
gScore[neighbor] = GScoreTry
fScore[neighbor] = gScore[neighbor] + heuristicCostEstimate(neighbor, goal)
//adding it to openSet
openSet[neighbor] = true
}
}
}
return nil, 0
} | pathThroughMap/aStar.go | 0.766731 | 0.541227 | aStar.go | starcoder |
package gofft
import (
"math"
"math/bits"
)
// IsPow2 returns true if N is a perfect power of 2 (1, 2, 4, 8, ...) and false otherwise.
// Algorithm from: https://graphics.stanford.edu/~seander/bithacks.html#DetermineIfPowerOf2
func IsPow2(N int) bool {
if N == 0 {
return false
}
return (uint64(N) & uint64(N-1)) == 0
}
// NextPow2 returns the smallest power of 2 >= N.
func NextPow2(N int) int {
if N == 0 {
return 1
}
return 1 << uint64(bits.Len64(uint64(N-1)))
}
// ZeroPad pads x with 0s at the end into a new array of length N.
// This does not alter x, and creates an entirely new array.
// This should only be used as a convience function, and isn't meant for performance.
// You should call this as few times as possible since it does potentially large allocations.
func ZeroPad(x []complex128, N int) []complex128 {
y := make([]complex128, N)
copy(y, x)
return y
}
// ZeroPadToNextPow2 pads x with 0s at the end into a new array of length 2^N >= len(x)
// This does not alter x, and creates an entirely new array.
// This should only be used as a convience function, and isn't meant for performance.
// You should call this as few times as possible since it does potentially large allocations.
func ZeroPadToNextPow2(x []complex128) []complex128 {
N := NextPow2(len(x))
y := make([]complex128, N)
copy(y, x)
return y
}
// Float64ToComplex128Array converts a float64 array to the equivalent complex128 array
// using an imaginary part of 0.
func Float64ToComplex128Array(x []float64) []complex128 {
y := make([]complex128, len(x))
for i, v := range x {
y[i] = complex(v, 0)
}
return y
}
// Complex128ToFloat64Array converts a complex128 array to the equivalent float64 array
// taking only the real part.
func Complex128ToFloat64Array(x []complex128) []float64 {
y := make([]float64, len(x))
for i, v := range x {
y[i] = real(v)
}
return y
}
// RoundFloat64Array calls math.Round on each entry in x, changing the array in-place
func RoundFloat64Array(x []float64) {
for i, v := range x {
x[i] = math.Round(v)
}
} | utils.go | 0.808786 | 0.592283 | utils.go | starcoder |
package compositor
import (
"github.com/mattkimber/cargopositor/internal/utils"
"github.com/mattkimber/gandalf/geometry"
"github.com/mattkimber/gandalf/magica"
"log"
"math"
"strings"
)
func getBounds(v *magica.VoxelObject, ignoreMask bool) geometry.Bounds {
min := geometry.Point{X: v.Size.X, Y: v.Size.Y, Z: v.Size.Z}
max := geometry.Point{}
if ignoreMask {
return geometry.Bounds{Min: max, Max: min}
}
iterator := func(x, y, z int) {
if v.Voxels[x][y][z] == 255 {
if x < min.X {
min.X = x
}
if y < min.Y {
min.Y = y
}
if z < min.Z {
min.Z = z
}
if x > max.X {
max.X = x
}
if y > max.Y {
max.Y = y
}
if z > max.Z {
max.Z = z
}
}
}
v.Iterate(iterator)
return geometry.Bounds{Min: min, Max: max}
}
// Return the base object without any cargo
// (remove special voxels)
func ProduceEmpty(v magica.VoxelObject) (r magica.VoxelObject) {
r = v.Copy()
iterator := func(x, y, z int) {
if r.Voxels[x][y][z] == 255 {
r.Voxels[x][y][z] = 0
}
}
r.Iterate(iterator)
return r
}
// Return the base object without any changes at all
func Identity(v magica.VoxelObject) (r magica.VoxelObject) {
r = v.Copy()
return r
}
// RotateAndTile (and tile) the base object
func RotateAndTile(v magica.VoxelObject, angle float64, xOffset, yOffset int, scale geometry.PointF, boundingVolume BoundingVolume) (r magica.VoxelObject) {
radians := (angle * math.Pi) / 180
// If no bounding volume was supplied default to (0,0,0)-(max, max, max)
if (boundingVolume.Max == geometry.Point{}) {
boundingVolume.Max = geometry.Point{X: v.Size.X, Y: v.Size.Y, Z: v.Size.Z}
}
// If no scale is supplied default to (1,1,1)
if (scale == geometry.PointF{}) {
scale = geometry.PointF{X: 1, Y: 1, Z: 1}
}
r = v.Copy()
bvx := boundingVolume.Max.X - boundingVolume.Min.X
bvy := boundingVolume.Max.Y - boundingVolume.Min.Y
bvz := boundingVolume.Max.Z - boundingVolume.Min.Z
// Clear the object
iterator := func(x, y, z int) {
r.Voxels[x][y][z] = 0
}
r.Iterate(iterator)
// RotateAndTile the output
iterator = func(x, y, z int) {
sx := ((bvx + xOffset + int((float64(x)*math.Cos(radians)-float64(y)*math.Sin(radians))*scale.X)) % bvx) + boundingVolume.Min.X
sy := ((bvy + yOffset + int((float64(x)*math.Sin(radians)+float64(y)*math.Cos(radians))*scale.Y)) % bvy) + boundingVolume.Min.Y
sz := ((z + bvz) % bvz) + boundingVolume.Min.Z
if r.Voxels[x][y][z] == 0 && sx >= 0 && sy >= 0 && sx < v.Size.X && sy < v.Size.Y {
r.Voxels[x][y][z] = v.Voxels[sx][sy][sz]
}
}
r.Iterate(iterator)
return r
}
// Stairstep the base object (for every m steps in x, move n steps in z)
func Stairstep(v magica.VoxelObject, m float64, n int) (r magica.VoxelObject) {
r = v.Copy()
// Clear the object
iterator := func(x, y, z int) {
r.Voxels[x][y][z] = 0
}
r.Iterate(iterator)
// Stairstep the output
iterator = func(x, y, z int) {
step := z + int((float64(x)/m)*float64(n))
begin := step
if x > 0 {
prevStep := z + int((float64(x-1)/m)*float64(n))
if prevStep < step {
begin -= n
}
}
for s := begin; s < step+n; s++ {
if s >= 0 && s < v.Size.Z {
if r.Voxels[x][y][s] == 0 {
r.Voxels[x][y][s] = v.Voxels[x][y][z]
}
}
}
}
v.Iterate(iterator)
return
}
// Scale a cargo object to the cargo area
func AddScaled(dst magica.VoxelObject, src magica.VoxelObject, inputRamps, outputRamps []string, scaleLogic geometry.PointF, overwrite bool, ignoreMask bool, maskOriginal bool) (r magica.VoxelObject) {
r = dst.Copy()
// If there is an input/output ramp, we always use the first one when scaling
if len(inputRamps) > 0 && len(outputRamps) > 0{
src = Recolour(src, inputRamps[0], outputRamps[0])
}
dstBounds := getBounds(&r, ignoreMask)
srcBounds := geometry.Bounds{Min: geometry.Point{}, Max: geometry.Point{X: src.Size.X, Y: src.Size.Y, Z: src.Size.Z}}
srcSize, dstSize := srcBounds.GetSize(), dstBounds.GetSize()
scale := geometry.PointF{
X: ((float64(srcSize.X) / float64(dstSize.X+1)) * (1 - scaleLogic.X)) + scaleLogic.X,
Y: (float64(srcSize.Y)/float64(dstSize.Y+1))*(1-scaleLogic.Y) + scaleLogic.Y,
Z: (float64(srcSize.Z)/float64(dstSize.Z+1))*(1-scaleLogic.Z) + scaleLogic.Z,
}
iterator := func(x, y, z int) {
if (ignoreMask && r.Voxels[x][y][z] == 0) || r.Voxels[x][y][z] == 255 || overwrite {
minX := byte(math.Floor(float64(x-dstBounds.Min.X) * scale.X))
minY := byte(math.Floor(float64(y-dstBounds.Min.Y) * scale.Y))
minZ := byte(math.Floor(float64(z-dstBounds.Min.Z) * scale.Z))
maxX := byte(math.Ceil(float64((x+1)-dstBounds.Min.X) * scale.X))
maxY := byte(math.Ceil(float64((y+1)-dstBounds.Min.Y) * scale.Y))
maxZ := byte(math.Ceil(float64((z+1)-dstBounds.Min.Z) * scale.Z))
values := map[byte]int{}
max, modalIndex := 0, byte(0)
for i := minX; i < maxX; i++ {
for j := minY; j < maxY; j++ {
for k := minZ; k < maxZ; k++ {
if i < byte(srcBounds.Max.X) && j < byte(srcBounds.Max.Y) && k < byte(srcBounds.Max.Z) {
c := src.Voxels[i][j][k]
if c != 0 {
values[c]++
}
}
}
}
}
for k, v := range values {
if v > max {
max = v
modalIndex = k
}
}
if !overwrite || modalIndex != 0 {
r.Voxels[x][y][z] = modalIndex
}
} else if maskOriginal && r.Voxels[x][y][z] != 0 {
r.Voxels[x][y][z] = 255
}
}
r.Iterate(iterator)
return r
}
// Repeat a cargo object across the cargo area up to n times
func AddRepeated(v magica.VoxelObject, originalSrc magica.VoxelObject, n int, inputRamps, outputRamps []string, overwrite bool, ignoreMask bool, ignoreTruncation bool, maskOriginal bool) (r magica.VoxelObject) {
r = v.Copy()
dstBounds := getBounds(&r, ignoreMask)
srcBounds := geometry.Bounds{Min: geometry.Point{}, Max: geometry.Point{X: originalSrc.Size.X, Y: originalSrc.Size.Y, Z: originalSrc.Size.Z}}
srcSize, dstSize := srcBounds.GetSize(), dstBounds.GetSize()
lastItem := -1
ramps := len(inputRamps)
// Create all the necessary recolour objects
srcObjects := make([]magica.VoxelObject,ramps)
if ramps > 0 && len(inputRamps) == len(outputRamps) {
for idx, _ := range inputRamps {
srcObjects[idx] = Recolour(originalSrc, inputRamps[idx], outputRamps[idx])
}
} else {
srcObjects = append(srcObjects, originalSrc)
ramps = 1
}
items := (dstSize.Y + 1) / srcSize.Y
cols := (dstSize.X + 1) / srcSize.X
rows := (dstSize.Z + 1) / srcSize.Z
yOffset := ((dstSize.Y + 1) - (items * srcSize.Y)) / 2
xOffset := ((dstSize.X) - (cols * srcSize.X)) / 2
if ignoreTruncation {
yOffset = 0
xOffset = 0
}
var src magica.VoxelObject
iterator := func(x, y, z int) {
if (ignoreMask && r.Voxels[x][y][z] == 0) || r.Voxels[x][y][z] == 255 || overwrite {
item := ((y - yOffset) - dstBounds.Min.Y) / srcSize.Y
col := (dstBounds.Max.X - (x+(xOffset/2))) / (srcSize.X+xOffset)
row := (z - dstBounds.Min.Z) / srcSize.Z
if item+(col*items)+(row*cols*rows) != lastItem {
// Pick the recolour ramp for this item
src = srcObjects[(item+(col*items)+(row*cols*rows)) % ramps]
}
lastItem = item+(col*items)+(row*cols*rows)
sx := srcSize.X - 1 - (((dstBounds.Max.X) - (x+(xOffset/2))) % (srcSize.X + xOffset))
sy := (y - (yOffset + dstBounds.Min.Y)) % srcSize.Y
sz := (z - dstBounds.Min.Z) % srcSize.Z
if (n == 0 || overwrite || item+(col*items)+(row*cols*rows) < n) && ((n == 0 && ignoreTruncation) || (item < items && col < cols && row < rows)) && (y-dstBounds.Min.Y) >= yOffset {
if sx < 0 || sx >= srcSize.X {
r.Voxels[x][y][z] = 0
} else {
if !overwrite || src.Voxels[sx][sy][sz] != 0 {
r.Voxels[x][y][z] = src.Voxels[sx][sy][sz]
}
}
} else if !overwrite {
r.Voxels[x][y][z] = 0
}
} else if r.Voxels[x][y][z] != 0 && maskOriginal {
r.Voxels[x][y][z] = 255
}
}
r.Iterate(iterator)
return r
}
// Remove one voxel object from another (or clip against a colour)
func Remove(v magica.VoxelObject, src magica.VoxelObject, index uint8) (r magica.VoxelObject) {
r = v.Copy()
iterator := func(x, y, z int) {
if x < src.Size.X && y < src.Size.Y && z < src.Size.Z && src.Voxels[x][y][z] != index {
r.Voxels[x][y][z] = 0
}
}
r.Iterate(iterator)
return r
}
type Ramp struct {
InputLength float64
OutputLength float64
StartIndex int
EndIndex int
OutputStartIndex int
}
// Recolour according to input/output ramps
func Recolour(v magica.VoxelObject, inputRamp, outputRamp string) (r magica.VoxelObject) {
r = v.Copy()
if inputRamp == "" || outputRamp == "" {
return r
}
// Deal with the old GoRender format
if !strings.ContainsRune(inputRamp, '-') && !strings.ContainsRune(outputRamp, '-'){
inputRamp = strings.Replace(inputRamp, ",", "-", -1)
outputRamp = strings.Replace(outputRamp, ",", "-", -1)
}
inputRamps := strings.Split(inputRamp, ",")
outputRamps := strings.Split(outputRamp, ",")
if len(inputRamps) != len(outputRamps) {
log.Print("WARNING: Invalid colour remap specification (ramp lengths don't match) - object not recoloured")
return r
}
ramps := make([]Ramp, len(inputRamps))
for idx, _ := range inputRamps {
inputs, outputs := utils.SplitAndParseToInt(inputRamps[idx]), utils.SplitAndParseToInt(outputRamps[idx])
if len(inputs) < 2 || len(outputs) < 2 {
log.Printf("WARNING: Invalid colour remap specification %s/%s (invalid ramp length) - object not recoloured", inputRamps[idx], outputRamps[idx])
return r
}
ramps[idx] = Ramp{
InputLength: float64(inputs[1]-inputs[0]),
OutputLength: float64(outputs[1]-outputs[0]),
StartIndex: inputs[0],
EndIndex: inputs[1],
OutputStartIndex: outputs[0],
}
}
iterator := func(x, y, z int) {
c := r.Voxels[x][y][z]
for _, rmp := range ramps {
if c >= byte(rmp.StartIndex) && c <= byte(rmp.EndIndex) {
output :=rmp.OutputStartIndex + int(math.Round((float64(int(c)-rmp.StartIndex)/rmp.InputLength)*rmp.OutputLength))
r.Voxels[x][y][z] = byte(output)
// Only apply the first ramp we find (don't repeatedly map colours)
break
}
}
}
r.Iterate(iterator)
return r
}
// Rotate an object around its Y axis
func RotateY(v magica.VoxelObject, angle float64) (r magica.VoxelObject) {
sin, cos := math.Sin(degToRad(angle)), math.Cos(degToRad(angle))
orgMidpointX := float64(v.Size.X) / 2
orgMidpointZ := float64(v.Size.Z) / 2
xVector := (orgMidpointX * math.Abs(cos)) + (orgMidpointZ * math.Abs(sin))
zVector := (orgMidpointX * math.Abs(sin)) + (orgMidpointZ * math.Abs(cos))
sizeX, sizeZ := int(math.Ceil(xVector*2)), int(math.Ceil(zVector*2))
r = magica.VoxelObject{
Voxels: nil,
PaletteData: v.PaletteData,
Size: geometry.Point{X: sizeX, Y: v.Size.Y, Z: sizeZ},
}
// Create the voxel array
r.Voxels = make([][][]byte, r.Size.X)
for x := 0; x < r.Size.X; x++ {
r.Voxels[x] = make([][]byte, r.Size.Y)
for y := 0; y < r.Size.Y; y++ {
r.Voxels[x][y] = make([]byte, r.Size.Z)
}
}
vMidpointX := float64(v.Size.X) / 2
vMidpointZ := float64(v.Size.Z) / 2
iterator := func(x, y, z int) {
fdx := float64(x) - (float64(r.Size.X) / 2)
fdz := float64(z) - (float64(r.Size.Z) / 2)
fdx, fdz = (fdx*cos)+(fdz*sin), (fdx*-sin)+(fdz*cos)
dx := int(math.Ceil(fdx + vMidpointX))
dz := int(math.Ceil(fdz + vMidpointZ))
if dx >= 0 && dz >= 0 && dx < v.Size.X && dz < v.Size.Z {
r.Voxels[x][y][z] = v.Voxels[dx][y][dz]
}
}
r.Iterate(iterator)
return r
}
// Rotate an object around its Z axis, from the bottom
func RotateZ(v magica.VoxelObject, angle float64) (r magica.VoxelObject) {
sin, cos := math.Sin(degToRad(angle)), math.Cos(degToRad(angle))
orgMidpointY := float64(v.Size.Y) / 2
orgMidpointZ := float64(v.Size.Z) / 2
zVector := (orgMidpointY * math.Abs(sin)) + (orgMidpointZ * math.Abs(cos))
yVector := (orgMidpointY * math.Abs(cos)) + (orgMidpointZ * math.Abs(sin))
sizeZ, sizeY := int(math.Ceil(zVector*2)), int(math.Ceil(yVector*2))
r = magica.VoxelObject{
Voxels: nil,
PaletteData: v.PaletteData,
Size: geometry.Point{X: v.Size.X, Y: sizeY, Z: sizeZ},
}
// Create the voxel array
r.Voxels = make([][][]byte, r.Size.X)
for x := 0; x < r.Size.X; x++ {
r.Voxels[x] = make([][]byte, r.Size.Y)
for y := 0; y < r.Size.Y; y++ {
r.Voxels[x][y] = make([]byte, r.Size.Z)
}
}
vMidpointY := float64(v.Size.Y) / 2
iterator := func(x, y, z int) {
fdy := float64(y) - (float64(r.Size.Y) / 2)
fdz := float64(z)
fdy, fdz = (fdy*cos)+(fdz*sin), (fdy*-sin)+(fdz*cos)
dy := int(math.Ceil(fdy + vMidpointY))
dz := int(math.Ceil(fdz))
if dy >= 0 && dz >= 0 && dy < v.Size.Y && dz < v.Size.Z {
r.Voxels[x][y][z] = v.Voxels[x][dy][dz]
}
}
r.Iterate(iterator)
return r
}
func degToRad(angle float64) float64 {
return (angle / 180.0) * math.Pi
} | internal/compositor/compositor.go | 0.754373 | 0.540681 | compositor.go | starcoder |
package poc
import (
"encoding/binary"
"math/big"
"github.com/massnetorg/mass-core/poc/chiapos"
"github.com/massnetorg/mass-core/poc/pocutil"
)
func ChiaPlotSize(k int) uint64 {
return uint64(2*k+1) * (uint64(1) << (uint(k) - 1))
}
type ChiaProof struct {
pos *chiapos.ProofOfSpace
}
func NewChiaProof(pos *chiapos.ProofOfSpace) *ChiaProof {
return &ChiaProof{pos: pos}
}
func GetChiaProof(proof Proof) (*ChiaProof, error) {
if proof == nil {
return nil, ErrProofNilItf
}
chia, ok := proof.(*ChiaProof)
if !ok {
return nil, ErrProofType
}
return chia, nil
}
func MustGetChiaProof(proof Proof) *ChiaProof {
return proof.(*ChiaProof)
}
func GetChiaPoolPublicKey(proof Proof) (*chiapos.G1Element, error) {
chiaProof, err := GetChiaProof(proof)
if err != nil {
return nil, err
}
if chiaProof.pos == nil {
return nil, ErrProofNilChia
}
return chiaProof.pos.PoolPublicKey, nil
}
func GetChiaPlotID(proof Proof) ([32]byte, error) {
chiaProof, err := GetChiaProof(proof)
if err != nil {
return [32]byte{}, err
}
if chiaProof.pos == nil {
return [32]byte{}, ErrProofNilChia
}
return chiaProof.pos.GetID()
}
func MustGetChiaPoolPublicKey(proof Proof) *chiapos.G1Element {
return MustGetChiaProof(proof).pos.PoolPublicKey
}
func (proof *ChiaProof) Type() ProofType {
return ProofTypeChia
}
func (proof *ChiaProof) BitLength() int {
if proof.pos == nil {
return int(ProofTypeChia)
}
return int(proof.pos.KSize)
}
// Encode encodes proof to N + 1 bytes:
// | Chia PoS | ProofTypeChia |
// | N bytes | 1 byte |
func (proof *ChiaProof) Encode() []byte {
if proof.pos == nil {
return nil
}
bs := proof.pos.Encode()
data := make([]byte, len(bs)+1)
copy(data, bs)
data[len(data)-1] = uint8(ProofTypeChia)
return data
}
// decodeChia decodes proof from N + 1 bytes slice:
// | Chia PoS | ProofTypeChia |
// | N bytes | 1 byte |
func (proof *ChiaProof) Decode(data []byte) error {
if len(data) < 1 {
return ErrProofDecodeDataSize
}
if data[len(data)-1] != uint8(ProofTypeChia) {
return ErrProofInvalidBitLength
}
chiaPos := &chiapos.ProofOfSpace{}
if err := chiaPos.Decode(data[:len(data)-1]); err != nil {
return err
}
proof.pos = chiaPos
return nil
}
func (proof *ChiaProof) Quality(slot, height uint64) *big.Int {
if proof.pos == nil {
return big.NewInt(0)
}
chiaQuality, err := proof.pos.GetQuality()
if err != nil {
return big.NewInt(0)
}
hashVal := HashValChia(chiaQuality, slot, height)
q1 := Q1FactorChia(proof.pos.KSize)
return GetQuality(q1, hashVal)
}
// verifyProofChia verifies proof:
// (1) make sure BitLength is Valid. Should be integer even number in [24, 40].
// (2) perform function P on x and x_prime, the corresponding result
// y and y_prime should be a bit-flip pair.
// (3) perform function F on x and x_prime, the result z should
// be equal to the bit-length-cut challenge.
// It returns nil when proof is verified.
func (proof *ChiaProof) Verify(useless, challenge pocutil.Hash, filter bool) error {
if proof.pos == nil {
return ErrProofNilChia
}
quality, err := proof.pos.GetVerifiedQuality(challenge)
if err != nil {
return err
}
if len(quality) == 0 {
return ErrProofChiaNoQuality
}
return nil
}
func (proof *ChiaProof) VerifiedQuality(useless, challenge pocutil.Hash, filter bool, slot, height uint64) (*big.Int, error) {
if err := proof.Verify(useless, challenge, filter); err != nil {
return nil, err
}
return proof.Quality(slot, height), nil
}
func (proof *ChiaProof) Pos() *chiapos.ProofOfSpace {
return proof.pos
}
// HashValChia returns SHA256(t//s, chia_quality, height).
func HashValChia(chiaQuality []byte, slot, height uint64) pocutil.Hash {
data := make([]byte, len(chiaQuality)+8*2)
binary.LittleEndian.PutUint64(data, slot)
copy(data[8:], chiaQuality)
binary.LittleEndian.PutUint64(data[8+len(chiaQuality):], height)
return pocutil.SHA256(data)
}
func Q1FactorChia(k uint8) *big.Float {
a := big.NewFloat(float64(int64(1) << (k - 1)))
a.Mul(a, big.NewFloat(4*float64(2*k+1)))
return a.Mul(a, big.NewFloat(QualityConstantMASSIP0002*QualityConstantMASSValidity))
} | poc/proof_chia.go | 0.6508 | 0.481393 | proof_chia.go | starcoder |
package diff
import (
"fmt"
"strings"
"github.com/circl-dev/spec"
)
// CompareEnums returns added, deleted enum values
func CompareEnums(left, right []interface{}) []TypeDiff {
diffs := []TypeDiff{}
leftStrs := []string{}
rightStrs := []string{}
for _, eachLeft := range left {
leftStrs = append(leftStrs, fmt.Sprintf("%v", eachLeft))
}
for _, eachRight := range right {
rightStrs = append(rightStrs, fmt.Sprintf("%v", eachRight))
}
added, deleted, _ := fromStringArray(leftStrs).DiffsTo(rightStrs)
if len(added) > 0 {
typeChange := strings.Join(added, ",")
diffs = append(diffs, TypeDiff{Change: AddedEnumValue, Description: typeChange})
}
if len(deleted) > 0 {
typeChange := strings.Join(deleted, ",")
diffs = append(diffs, TypeDiff{Change: DeletedEnumValue, Description: typeChange})
}
return diffs
}
// CompareProperties recursive property comparison
func CompareProperties(location DifferenceLocation, schema1 *spec.Schema, schema2 *spec.Schema, getRefFn1 SchemaFromRefFn, getRefFn2 SchemaFromRefFn, cmp CompareSchemaFn) []SpecDifference {
propDiffs := []SpecDifference{}
if schema1.Properties == nil && schema2.Properties == nil {
return propDiffs
}
schema1Props := propertiesFor(schema1, getRefFn1)
schema2Props := propertiesFor(schema2, getRefFn2)
// find deleted and changed properties
for eachProp1Name, eachProp1 := range schema1Props {
eachProp1 := eachProp1
childLoc := addChildDiffNode(location, eachProp1Name, eachProp1.Schema)
if eachProp2, ok := schema2Props[eachProp1Name]; ok {
diffs := CheckToFromRequired(eachProp1.Required, eachProp2.Required)
if len(diffs) > 0 {
for _, diff := range diffs {
propDiffs = append(propDiffs, SpecDifference{DifferenceLocation: childLoc, Code: diff.Change})
}
}
cmp(childLoc, eachProp1.Schema, eachProp2.Schema)
} else {
propDiffs = append(propDiffs, SpecDifference{DifferenceLocation: childLoc, Code: DeletedProperty})
}
}
// find added properties
for eachProp2Name, eachProp2 := range schema2.Properties {
eachProp2 := eachProp2
if _, ok := schema1.Properties[eachProp2Name]; !ok {
childLoc := addChildDiffNode(location, eachProp2Name, &eachProp2)
propDiffs = append(propDiffs, SpecDifference{DifferenceLocation: childLoc, Code: AddedProperty})
}
}
return propDiffs
}
// CompareFloatValues compares a float data item
func CompareFloatValues(fieldName string, val1 *float64, val2 *float64, ifGreaterCode SpecChangeCode, ifLessCode SpecChangeCode) []TypeDiff {
diffs := []TypeDiff{}
if val1 != nil && val2 != nil {
if *val2 > *val1 {
diffs = append(diffs, TypeDiff{Change: ifGreaterCode, Description: fmt.Sprintf("%s %f->%f", fieldName, *val1, *val2)})
} else if *val2 < *val1 {
diffs = append(diffs, TypeDiff{Change: ifLessCode, Description: fmt.Sprintf("%s %f->%f", fieldName, *val1, *val2)})
}
} else {
if val1 != val2 {
if val1 != nil {
diffs = append(diffs, TypeDiff{Change: DeletedConstraint, Description: fmt.Sprintf("%s(%f)", fieldName, *val1)})
} else {
diffs = append(diffs, TypeDiff{Change: AddedConstraint, Description: fmt.Sprintf("%s(%f)", fieldName, *val2)})
}
}
}
return diffs
}
// CompareIntValues compares to int data items
func CompareIntValues(fieldName string, val1 *int64, val2 *int64, ifGreaterCode SpecChangeCode, ifLessCode SpecChangeCode) []TypeDiff {
diffs := []TypeDiff{}
if val1 != nil && val2 != nil {
if *val2 > *val1 {
diffs = append(diffs, TypeDiff{Change: ifGreaterCode, Description: fmt.Sprintf("%s %d->%d", fieldName, *val1, *val2)})
} else if *val2 < *val1 {
diffs = append(diffs, TypeDiff{Change: ifLessCode, Description: fmt.Sprintf("%s %d->%d", fieldName, *val1, *val2)})
}
} else {
if val1 != val2 {
if val1 != nil {
diffs = append(diffs, TypeDiff{Change: DeletedConstraint, Description: fmt.Sprintf("%s(%d)", fieldName, *val1)})
} else {
diffs = append(diffs, TypeDiff{Change: AddedConstraint, Description: fmt.Sprintf("%s(%d)", fieldName, *val2)})
}
}
}
return diffs
}
// CheckToFromPrimitiveType check for diff to or from a primitive
func CheckToFromPrimitiveType(diffs []TypeDiff, type1, type2 interface{}) []TypeDiff {
type1IsPrimitive := isPrimitive(type1)
type2IsPrimitive := isPrimitive(type2)
// Primitive to Obj or Obj to Primitive
if type1IsPrimitive != type2IsPrimitive {
typeStr1, isarray1 := getSchemaType(type1)
typeStr2, isarray2 := getSchemaType(type2)
return addTypeDiff(diffs, TypeDiff{Change: ChangedType, FromType: formatTypeString(typeStr1, isarray1), ToType: formatTypeString(typeStr2, isarray2)})
}
return diffs
}
// CheckRefChange has the property ref changed
func CheckRefChange(diffs []TypeDiff, type1, type2 interface{}) (diffReturn []TypeDiff) {
diffReturn = diffs
if isRefType(type1) && isRefType(type2) {
// both refs but to different objects (TODO detect renamed object)
ref1 := definitionFromRef(getRef(type1))
ref2 := definitionFromRef(getRef(type2))
if ref1 != ref2 {
diffReturn = addTypeDiff(diffReturn, TypeDiff{Change: RefTargetChanged, FromType: getSchemaTypeStr(type1), ToType: getSchemaTypeStr(type2)})
}
} else if isRefType(type1) != isRefType(type2) {
diffReturn = addTypeDiff(diffReturn, TypeDiff{Change: ChangedType, FromType: getSchemaTypeStr(type1), ToType: getSchemaTypeStr(type2)})
}
return
}
// checkNumericTypeChanges checks for changes to or from a numeric type
func checkNumericTypeChanges(diffs []TypeDiff, type1, type2 *spec.SchemaProps) []TypeDiff {
// Number
_, type1IsNumeric := numberWideness[type1.Type[0]]
_, type2IsNumeric := numberWideness[type2.Type[0]]
if type1IsNumeric && type2IsNumeric {
foundDiff := false
if type1.ExclusiveMaximum && !type2.ExclusiveMaximum {
diffs = addTypeDiff(diffs, TypeDiff{Change: WidenedType, Description: fmt.Sprintf("Exclusive Maximum Removed:%v->%v", type1.ExclusiveMaximum, type2.ExclusiveMaximum)})
foundDiff = true
}
if !type1.ExclusiveMaximum && type2.ExclusiveMaximum {
diffs = addTypeDiff(diffs, TypeDiff{Change: NarrowedType, Description: fmt.Sprintf("Exclusive Maximum Added:%v->%v", type1.ExclusiveMaximum, type2.ExclusiveMaximum)})
foundDiff = true
}
if type1.ExclusiveMinimum && !type2.ExclusiveMinimum {
diffs = addTypeDiff(diffs, TypeDiff{Change: WidenedType, Description: fmt.Sprintf("Exclusive Minimum Removed:%v->%v", type1.ExclusiveMaximum, type2.ExclusiveMaximum)})
foundDiff = true
}
if !type1.ExclusiveMinimum && type2.ExclusiveMinimum {
diffs = addTypeDiff(diffs, TypeDiff{Change: NarrowedType, Description: fmt.Sprintf("Exclusive Minimum Added:%v->%v", type1.ExclusiveMinimum, type2.ExclusiveMinimum)})
foundDiff = true
}
if !foundDiff {
maxDiffs := CompareFloatValues("Maximum", type1.Maximum, type2.Maximum, WidenedType, NarrowedType)
diffs = append(diffs, maxDiffs...)
minDiffs := CompareFloatValues("Minimum", type1.Minimum, type2.Minimum, NarrowedType, WidenedType)
diffs = append(diffs, minDiffs...)
}
}
return diffs
}
// CheckStringTypeChanges checks for changes to or from a string type
func CheckStringTypeChanges(diffs []TypeDiff, type1, type2 *spec.SchemaProps) []TypeDiff {
// string changes
if type1.Type[0] == StringType &&
type2.Type[0] == StringType {
minLengthDiffs := CompareIntValues("MinLength", type1.MinLength, type2.MinLength, NarrowedType, WidenedType)
diffs = append(diffs, minLengthDiffs...)
maxLengthDiffs := CompareIntValues("MaxLength", type1.MinLength, type2.MinLength, WidenedType, NarrowedType)
diffs = append(diffs, maxLengthDiffs...)
if type1.Pattern != type2.Pattern {
diffs = addTypeDiff(diffs, TypeDiff{Change: ChangedType, Description: fmt.Sprintf("Pattern Changed:%s->%s", type1.Pattern, type2.Pattern)})
}
if type1.Type[0] == StringType {
if len(type1.Enum) > 0 {
enumDiffs := CompareEnums(type1.Enum, type2.Enum)
diffs = append(diffs, enumDiffs...)
}
}
}
return diffs
}
// CheckToFromRequired checks for changes to or from a required property
func CheckToFromRequired(required1, required2 bool) (diffs []TypeDiff) {
if required1 != required2 {
code := ChangedOptionalToRequired
if required1 {
code = ChangedRequiredToOptional
}
diffs = addTypeDiff(diffs, TypeDiff{Change: code})
}
return diffs
}
const objType = "object"
func getTypeHierarchyChange(type1, type2 string) TypeDiff {
fromType := type1
if fromType == "" {
fromType = objType
}
toType := type2
if toType == "" {
toType = objType
}
diffDescription := fmt.Sprintf("%s -> %s", fromType, toType)
if isStringType(type1) && !isStringType(type2) {
return TypeDiff{Change: NarrowedType, Description: diffDescription}
}
if !isStringType(type1) && isStringType(type2) {
return TypeDiff{Change: WidenedType, Description: diffDescription}
}
type1Wideness, type1IsNumeric := numberWideness[type1]
type2Wideness, type2IsNumeric := numberWideness[type2]
if type1IsNumeric && type2IsNumeric {
if type1Wideness == type2Wideness {
return TypeDiff{Change: ChangedToCompatibleType, Description: diffDescription}
}
if type1Wideness > type2Wideness {
return TypeDiff{Change: NarrowedType, Description: diffDescription}
}
if type1Wideness < type2Wideness {
return TypeDiff{Change: WidenedType, Description: diffDescription}
}
}
return TypeDiff{Change: ChangedType, Description: diffDescription}
}
func isRefType(item interface{}) bool {
switch s := item.(type) {
case spec.Refable:
return s.Ref.String() != ""
case *spec.Schema:
return s.Ref.String() != ""
case *spec.SchemaProps:
return s.Ref.String() != ""
case *spec.SimpleSchema:
return false
default:
return false
}
} | cmd/swagger/commands/diff/checks.go | 0.639849 | 0.439988 | checks.go | starcoder |
package rsmt2d
import (
"errors"
"math"
)
// dataSquare stores all data for an original data square (ODS) or extended
// data square (EDS). Data is duplicated in both row-major and column-major
// order in order to be able to provide zero-allocation column slices.
type dataSquare struct {
squareRow [][][]byte // row-major
squareCol [][][]byte // col-major
width uint
chunkSize uint
rowRoots [][]byte
colRoots [][]byte
createTreeFn TreeConstructorFn
}
func newDataSquare(data [][]byte, treeCreator TreeConstructorFn) (*dataSquare, error) {
width := int(math.Ceil(math.Sqrt(float64(len(data)))))
if width*width != len(data) {
return nil, errors.New("number of chunks must be a square number")
}
chunkSize := len(data[0])
squareRow := make([][][]byte, width)
for i := 0; i < width; i++ {
squareRow[i] = data[i*width : i*width+width]
for j := 0; j < width; j++ {
if len(squareRow[i][j]) != chunkSize {
return nil, errors.New("all chunks must be of equal size")
}
}
}
squareCol := make([][][]byte, width)
for j := 0; j < width; j++ {
squareCol[j] = make([][]byte, width)
for i := 0; i < width; i++ {
squareCol[j][i] = data[i*width+j]
}
}
return &dataSquare{
squareRow: squareRow,
squareCol: squareCol,
width: uint(width),
chunkSize: uint(chunkSize),
createTreeFn: treeCreator,
}, nil
}
func (ds *dataSquare) extendSquare(extendedWidth uint, fillerChunk []byte) error {
if uint(len(fillerChunk)) != ds.chunkSize {
return errors.New("filler chunk size does not match data square chunk size")
}
newWidth := ds.width + extendedWidth
newSquareRow := make([][][]byte, newWidth)
fillerExtendedRow := make([][]byte, extendedWidth)
for i := uint(0); i < extendedWidth; i++ {
fillerExtendedRow[i] = fillerChunk
}
fillerRow := make([][]byte, newWidth)
for i := uint(0); i < newWidth; i++ {
fillerRow[i] = fillerChunk
}
row := make([][]byte, ds.width)
for i := uint(0); i < ds.width; i++ {
copy(row, ds.squareRow[i])
newSquareRow[i] = append(row, fillerExtendedRow...)
}
for i := ds.width; i < newWidth; i++ {
newSquareRow[i] = make([][]byte, newWidth)
copy(newSquareRow[i], fillerRow)
}
ds.squareRow = newSquareRow
newSquareCol := make([][][]byte, newWidth)
for j := uint(0); j < newWidth; j++ {
newSquareCol[j] = make([][]byte, newWidth)
for i := uint(0); i < newWidth; i++ {
newSquareCol[j][i] = newSquareRow[i][j]
}
}
ds.squareCol = newSquareCol
ds.width = newWidth
ds.resetRoots()
return nil
}
func (ds *dataSquare) rowSlice(x uint, y uint, length uint) [][]byte {
return ds.squareRow[x][y : y+length]
}
// row returns a row slice.
// Do not modify this slice directly, instead use setCell.
func (ds *dataSquare) row(x uint) [][]byte {
return ds.rowSlice(x, 0, ds.width)
}
func (ds *dataSquare) setRowSlice(x uint, y uint, newRow [][]byte) error {
for i := uint(0); i < uint(len(newRow)); i++ {
if len(newRow[i]) != int(ds.chunkSize) {
return errors.New("invalid chunk size")
}
}
for i := uint(0); i < uint(len(newRow)); i++ {
ds.squareRow[x][y+i] = newRow[i]
ds.squareCol[y+i][x] = newRow[i]
}
ds.resetRoots()
return nil
}
func (ds *dataSquare) colSlice(x uint, y uint, length uint) [][]byte {
return ds.squareCol[y][x : x+length]
}
// col returns a column slice.
// Do not modify this slice directly, instead use setCell.
func (ds *dataSquare) col(y uint) [][]byte {
return ds.colSlice(0, y, ds.width)
}
func (ds *dataSquare) setColSlice(x uint, y uint, newCol [][]byte) error {
for i := uint(0); i < uint(len(newCol)); i++ {
if len(newCol[i]) != int(ds.chunkSize) {
return errors.New("invalid chunk size")
}
}
for i := uint(0); i < uint(len(newCol)); i++ {
ds.squareRow[x+i][y] = newCol[i]
ds.squareCol[y][x+i] = newCol[i]
}
ds.resetRoots()
return nil
}
func (ds *dataSquare) resetRoots() {
ds.rowRoots = nil
ds.colRoots = nil
}
func (ds *dataSquare) computeRoots() {
rowRoots := make([][]byte, ds.width)
colRoots := make([][]byte, ds.width)
for i := uint(0); i < ds.width; i++ {
rowRoots[i] = ds.getRowRoot(i)
colRoots[i] = ds.getColRoot(i)
}
ds.rowRoots = rowRoots
ds.colRoots = colRoots
}
// getRowRoots returns the Merkle roots of all the rows in the square.
func (ds *dataSquare) getRowRoots() [][]byte {
if ds.rowRoots == nil {
ds.computeRoots()
}
return ds.rowRoots
}
// getRowRoot calculates and returns the root of the selected row. Note: unlike the
// getRowRoots method, getRowRoot uses the built-in cache when available.
func (ds *dataSquare) getRowRoot(x uint) []byte {
if ds.rowRoots != nil {
return ds.rowRoots[x]
}
tree := ds.createTreeFn()
for i, d := range ds.row(x) {
tree.Push(d, SquareIndex{Cell: uint(i), Axis: x})
}
return tree.Root()
}
// getColRoots returns the Merkle roots of all the columns in the square.
func (ds *dataSquare) getColRoots() [][]byte {
if ds.colRoots == nil {
ds.computeRoots()
}
return ds.colRoots
}
// getColRoot calculates and returns the root of the selected row. Note: unlike the
// getColRoots method, getColRoot uses the built-in cache when available.
func (ds *dataSquare) getColRoot(y uint) []byte {
if ds.colRoots != nil {
return ds.colRoots[y]
}
tree := ds.createTreeFn()
for i, d := range ds.col(y) {
tree.Push(d, SquareIndex{Axis: y, Cell: uint(i)})
}
return tree.Root()
}
// getCell returns a single chunk at a specific cell.
func (ds *dataSquare) getCell(x uint, y uint) []byte {
cell := make([]byte, ds.chunkSize)
copy(cell, ds.squareRow[x][y])
return cell
}
func (ds *dataSquare) setCell(x uint, y uint, newChunk []byte) {
ds.squareRow[x][y] = newChunk
ds.squareCol[y][x] = newChunk
ds.resetRoots()
}
func (ds *dataSquare) flattened() [][]byte {
flattened := [][]byte(nil)
for _, data := range ds.squareRow {
flattened = append(flattened, data...)
}
return flattened
} | datasquare.go | 0.712032 | 0.51013 | datasquare.go | starcoder |
package caf
import (
"encoding/binary"
"fmt"
"io"
"time"
)
type AudioDescChunk struct {
}
/*
Decoder
CAF files begin with a file header, which identifies the file type and the CAF version,
followed by a series of chunks. A chunk consists of a header, which defines the type of the chunk and
indicates the size of its data section, followed by the chunk data.
The nature and format of the data is specific to each type of chunk.
The only two chunk types required for every CAF file are the Audio Data chunk and the Audio Description chunk,
which specifies the audio data format.
The Audio Description chunk must be the first chunk following the file header.
The Audio Data chunk can appear anywhere else in the file, unless the size of its data section has not been determined.
In that case, the size field in the Audio Data chunk header is set to -1 and the Audio Data chunk must come last in the file
so that the end of the audio data chunk is the same as the end of the file.
This placement allows you to determine the data section size when that information is not available in the size field.
Audio is stored in the Audio Data chunk as a sequential series of packets. An audio packet in a CAF file contains one or more frames of audio data.
Every chunk consists of a chunk header followed by a data section. Chunk headers contain two fields:
* A four-character code indicating the chunk’s type
* A number indicating the chunk size in bytes
The format of the data in a chunk depends on the chunk type.
It consists of a series of sections, typically called fields.
The format of the audio data depends on the data type. All of the other fields in a CAF file are in big-endian (network) byte order.
*/
type Decoder struct {
r io.Reader
// Ch chan *TBD
// Format: the file type. This value must be set to 'caff'.
// You should consider only files with the Type field set to 'caff' to be valid CAF files.
Format [4]byte
// Version: The file version. For CAF files conforming to this specification, the version must be set to 1.
// If Apple releases a substantial revision of this specification, files compliant with that revision will have their Version
// field set to a number greater than 1.
Version uint16
// Flags reserved by Apple for future use. For CAF v1 files, must be set to 0. You should ignore any value of this field you don’t understand,
// and you should accept the file as a valid CAF file as long as the version and file type fields are valid.
Flags uint16
// The number of sample frames per second of the data. You can combine this value with the frames per packet to determine the amount of time represented by a packet. This value must be nonzero.
SampleRate float64
// A four-character code indicating the general kind of data in the stream.
FormatID [4]byte
// Flags specific to each format. May be set to 0 to indicate no format flags.
// Detailed specification linear PCM, MPEG-4 AAC, and AC-3
FormatFlags uint32
// The number of bytes in a packet of data. For formats with a variable packet size,
// this field is set to 0. In that case, the file must include a Packet Table chunk Packet Table Chunk.
// Packets are always aligned to a byte boundary. For an example of an Audio Description chunk for a format with a variable packet size
BytesPerPacket uint32
// The number of sample frames in each packet of data. For compressed formats,
// this field indicates the number of frames encoded in each packet. For formats with a variable number of frames per packet,
// this field is set to 0 and the file must include a Packet Table chunk Packet Table Chunk.
FramesPerPacket uint32
// The number of channels in each frame of data. This value must be nonzero.
ChannelsPerFrame uint32
// The number of bits of sample data for each channel in a frame of data.
// This field must be set to 0 if the data format (for instance any compressed format) does not contain separate samples for each channel
BitsPerChannel uint32
// Size of the audio data
//A size value of -1 indicates that the size of the data section for this chunk is unknown. In this case, the Audio Data chunk must appear last in the file
// so that the end of the Audio Data chunk is the same as the end of the file.
// This placement allows you to determine the data section size.
AudioDataSize int64
}
// String implements the stringer interface
func (d *Decoder) String() string {
out := fmt.Sprintf("Format: %s - %s", string(d.Format[:]), string(d.FormatID[:]))
out += fmt.Sprintf("%d channels @ %d - ", d.ChannelsPerFrame, int(d.SampleRate))
out += fmt.Sprintf("data size: %d", d.AudioDataSize)
return out
}
// Parse reads the file content and store it.
func (d *Decoder) Parse() error {
var err error
// File header
if err = d.Read(&d.Format); err != nil {
return err
}
if d.Format != fileHeaderID {
return fmt.Errorf("%s %s", string(d.Format[:]), ErrFmtNotSupported)
}
if err = d.Read(&d.Version); err != nil {
return err
}
if d.Version > 1 {
return fmt.Errorf("CAF v%s - %s", d.Version, ErrFmtNotSupported)
}
// ignore the flags value
if err = d.Read(&d.Flags); err != nil {
return err
}
// The Audio Description chunk is required and must appear in a CAF file immediately following the file header. It describes the format of the audio data in the Audio Data chunk.
cType, _, err := d.chunkHeader()
if err != nil {
return err
}
if cType != StreamDescriptionChunkID {
return fmt.Errorf("%s - Expected description chunk", ErrUnexpectedData)
}
if err := d.parseDescChunk(); err != nil {
return err
}
// parse the actual content
for err == nil {
err = d.parseChunk()
}
if err != io.EOF {
return err
}
return nil
}
// parseDescChunk parses the first chunk called description chunk.
func (d *Decoder) parseDescChunk() error {
if err := d.Read(&d.SampleRate); err != nil {
return err
}
if err := d.Read(&d.FormatID); err != nil {
return err
}
if err := d.Read(&d.FormatFlags); err != nil {
return err
}
if err := d.Read(&d.BytesPerPacket); err != nil {
return err
}
if err := d.Read(&d.FramesPerPacket); err != nil {
return err
}
if err := d.Read(&d.ChannelsPerFrame); err != nil {
return err
}
if err := d.Read(&d.BitsPerChannel); err != nil {
return err
}
return nil
}
func (d *Decoder) Duration() time.Duration {
//duration := time.Duration((float64(p.Size) / float64(p.AvgBytesPerSec)) * float64(time.Second))
//duration := time.Duration(float64(p.NumSampleFrames) / float64(p.SampleRate) * float64(time.Second))
return 0
}
func (d *Decoder) chunkHeader() ([4]byte, int64, error) {
var err error
var cSize int64
var cType [4]byte
if err = d.Read(&cType); err != nil {
return cType, 0, err
}
if err = d.Read(&cSize); err != nil {
return cType, 0, err
}
return cType, cSize, err
}
func (d *Decoder) parseChunk() error {
cType, cSize, err := d.chunkHeader()
if err != nil {
return err
}
t := cType
switch t {
case AudioDataChunkID:
d.AudioDataSize = cSize
// TODO:
// editCount uint32
// The modification status of the data section. You should initially set this field to 0, and should increment it each time the audio data in the file is modified.
// the rest of the data is the actual audio data.
var err error
bytesToSkip := cSize
for bytesToSkip > 0 {
readSize := bytesToSkip
if readSize > 4000 {
readSize = 4000
}
buf := make([]byte, readSize)
err = binary.Read(d.r, binary.LittleEndian, &buf)
if err != nil {
return nil
}
bytesToSkip -= readSize
}
default:
fmt.Println(string(t[:]))
buf := make([]byte, cSize)
return d.Read(buf)
}
return nil
}
func (d *Decoder) ReadByte() (byte, error) {
var b byte
err := binary.Read(d.r, binary.BigEndian, &b)
return b, err
}
// read reads n bytes from the parser's reader and stores them into the provided dst,
// which must be a pointer to a fixed-size value.
func (d *Decoder) Read(dst interface{}) error {
return binary.Read(d.r, binary.BigEndian, dst)
} | caf/decoder.go | 0.683736 | 0.745838 | decoder.go | starcoder |
package main
import (
"fmt"
"strings"
"github.com/andreaskoch/togglapi/date"
"github.com/andreaskoch/togglcsv/toggl"
)
// The TimeRecordMapper interface provides functions for mapping CSV records to time records and vice versa.
type TimeRecordMapper interface {
// GetTimeRecords returns a list of time records for the given CSV table rows.
GetTimeRecords(rows [][]string) ([]toggl.TimeRecord, error)
// GetTimeRecord returns a TimeRecord model from an CSV row.
GetTimeRecord(row []string) (toggl.TimeRecord, error)
// GetColumnNames returns the names of the CSV columns.
GetColumnNames() []string
// GetRow returns an CSV row for the given TimeRecord model.
GetRow(timeRecord toggl.TimeRecord) []string
}
// NewCSVTimeRecordMapper converts CSV rows to TimeRecord models and vice versa.
func NewCSVTimeRecordMapper(dateFormatter date.Formatter) TimeRecordMapper {
return &CSVTimeRecordMapper{
dateFormatter: dateFormatter,
columnNames: []string{"Start", "Stop", "Workspace Name", "Project Name", "Client Name", "Tag(s)", "Description"},
tagsSeparator: ",",
}
}
// CSVTimeRecordMapper converts CSV time records into TimeRecord models.
type CSVTimeRecordMapper struct {
dateFormatter date.Formatter
// columnNames contains the list of all CSV column names for CSV-based time reports used for import or export.
columnNames []string
// tagsSeparator contains the separator sign/string that is used to split and concatenate tags
tagsSeparator string
}
// GetTimeRecords returns a list of time records for the given CSV table rows.
func (mapper *CSVTimeRecordMapper) GetTimeRecords(rows [][]string) ([]toggl.TimeRecord, error) {
// cut the headline
if len(rows) > 0 {
firstLine := rows[0]
firstColumnName := mapper.GetColumnNames()[0]
if firstLine[0] == firstColumnName {
rows = rows[1:]
}
}
// create time record models from each row
var timeRecords []toggl.TimeRecord
for _, row := range rows {
timeRecord, timeRecordError := mapper.GetTimeRecord(row)
if timeRecordError != nil {
return nil, fmt.Errorf("Failed to create time entry from (%v): %s", row, timeRecordError.Error())
}
timeRecords = append(timeRecords, timeRecord)
}
return timeRecords, nil
}
// GetTimeRecord returns a TimeRecord model from an CSV row.
func (mapper *CSVTimeRecordMapper) GetTimeRecord(row []string) (toggl.TimeRecord, error) {
// check the number of columns
if len(row) != len(mapper.GetColumnNames()) {
return toggl.TimeRecord{}, fmt.Errorf("Wrong number of values in the given row. The required: %d. Given: %d", len(mapper.GetColumnNames()), len(row))
}
// Start date
startDateVal := row[0]
startDate, startDateError := mapper.dateFormatter.GetDate(startDateVal)
if startDateError != nil {
return toggl.TimeRecord{}, fmt.Errorf("Cannot parse the start date: %s", startDateError)
}
// Stop Date
stopDateVal := row[1]
stopDate, stopDateError := mapper.dateFormatter.GetDate(stopDateVal)
if stopDateError != nil {
return toggl.TimeRecord{}, fmt.Errorf("Cannot parse the stop date: %s", stopDateError)
}
// Workspace Name
workspaceVal := row[2]
workspaceVal = strings.TrimSpace(workspaceVal)
// Project Name
projectVal := row[3]
projectVal = strings.TrimSpace(projectVal)
// Client Name
clientVal := row[4]
clientVal = strings.TrimSpace(clientVal)
// Tags
tagsVal := row[5]
tags := strings.Split(tagsVal, mapper.tagsSeparator)
for index, tag := range tags {
tags[index] = strings.TrimSpace(tag)
}
// Description
descriptionVal := row[6]
description := strings.TrimSpace(descriptionVal)
if len(description) >= 3000 {
return toggl.TimeRecord{}, fmt.Errorf("The description text of the time entry %q is too long", startDate)
}
entry := toggl.TimeRecord{
Start: startDate,
Stop: stopDate,
WorkspaceName: workspaceVal,
ProjectName: projectVal,
ClientName: clientVal,
Description: description,
Tags: tags,
}
return entry, nil
}
// GetColumnNames returns the names of the CSV columns.
func (mapper *CSVTimeRecordMapper) GetColumnNames() []string {
return mapper.columnNames
}
// GetRow returns an CSV row for the given TimeRecord model.
func (mapper *CSVTimeRecordMapper) GetRow(timeRecord toggl.TimeRecord) []string {
return []string{
mapper.dateFormatter.GetDateString(timeRecord.Start),
mapper.dateFormatter.GetDateString(timeRecord.Stop),
timeRecord.WorkspaceName,
timeRecord.ProjectName,
timeRecord.ClientName,
strings.Join(timeRecord.Tags, mapper.tagsSeparator),
timeRecord.Description,
}
} | csvmapper.go | 0.692954 | 0.470311 | csvmapper.go | starcoder |
package time
import (
"errors"
"sync"
"time"
)
// Ceil returns the result of rounding t up to a multiple of d (since the zero time).
// If d <= 0, Ceil returns t unchanged.
func Ceil(t time.Time, d time.Duration) time.Time {
if d <= 0 {
return t
}
return t.Add(d).Truncate(d)
}
// Prev returns the nearest multiple of d before t (since the zero time).
// If d <= 0, Prev returns t unchanged.
func Prev(t time.Time, d time.Duration) time.Time {
if d <= 0 {
return t
}
t2 := t.Truncate(d)
if t2.Equal(t) {
t2 = t2.Add(-d)
}
return t2
}
// Next returns the nearest multiple of d after t (since the zero time).
// If d <= 0, Next returns t unchanged.
func Next(t time.Time, d time.Duration) time.Time {
if d <= 0 {
return t
}
return t.Truncate(d).Add(d)
}
// IsMultiple returns true if t is some multiple of d (since the zero time).
// If d <= 0, IsMultiple returns false.
func IsMultiple(t time.Time, d time.Duration) bool {
if d <= 0 {
return false
}
return t.Truncate(d).Equal(t)
}
// RoundedTicker is like a time.Ticker, but rounded up to
// the nearest multiple of the tick Duration from the zero time.
type RoundedTicker struct {
C <-chan time.Time
c chan<- time.Time
d time.Duration
once sync.Once
stopping chan struct{}
}
// NewRoundedTicker returns a new RoundedTicker.
func NewRoundedTicker(d time.Duration) *RoundedTicker {
if d <= 0 {
panic(errors.New("non-positive interval for NewRoundedTicker"))
}
c := make(chan time.Time)
rt := &RoundedTicker{
C: c,
c: c,
d: d,
stopping: make(chan struct{}),
}
go rt.run()
return rt
}
func (rt *RoundedTicker) run() {
nextTick := Ceil(time.Now(), rt.d)
doTick := time.NewTimer(time.Until(nextTick))
defer doTick.Stop()
for {
select {
case <-doTick.C:
t := nextTick
nextTick = nextTick.Add(rt.d)
doTick.Reset(time.Until(nextTick))
select {
case rt.c <- t:
// noop
default:
// noop
}
case <-rt.stopping:
return
}
}
}
// Stop turns off a ticker. After Stop, no more ticks will be sent.
// Stop does not close the channel, to prevent a concurrent goroutine reading from
// the channel from seeing an erroneous "tick".
func (rt *RoundedTicker) Stop() {
rt.once.Do(func() {
// Check if nil just in case RoundedTicker was directly initialized.
// https://github.com/golang/go/issues/21874
if rt.stopping != nil {
close(rt.stopping)
}
})
} | pkg/time/multiple.go | 0.775307 | 0.45847 | multiple.go | starcoder |
package hbook
import (
"errors"
"sort"
)
// Indices for the under- and over-flow 1-dim bins.
const (
UnderflowBin = -1
OverflowBin = -2
)
var (
errInvalidXAxis = errors.New("hbook: invalid X-axis limits")
errEmptyXAxis = errors.New("hbook: X-axis with zero bins")
errShortXAxis = errors.New("hbook: too few 1-dim X-bins")
errOverlapXAxis = errors.New("hbook: invalid X-binning (overlap)")
errNotSortedXAxis = errors.New("hbook: X-edges slice not sorted")
errDupEdgesXAxis = errors.New("hbook: duplicates in X-edge values")
errInvalidYAxis = errors.New("hbook: invalid Y-axis limits")
errEmptyYAxis = errors.New("hbook: Y-axis with zero bins")
errShortYAxis = errors.New("hbook: too few 1-dim Y-bins")
errOverlapYAxis = errors.New("hbook: invalid Y-binning (overlap)")
errNotSortedYAxis = errors.New("hbook: Y-edges slice not sorted")
errDupEdgesYAxis = errors.New("hbook: duplicates in Y-edge values")
)
// binning1D is a 1-dim binning of the x-axis.
type binning1D struct {
bins []Bin1D
dist dist1D
outflows [2]dist1D
xrange Range
}
func newBinning1D(n int, xmin, xmax float64) binning1D {
if xmin >= xmax {
panic(errInvalidXAxis)
}
if n <= 0 {
panic(errEmptyXAxis)
}
bng := binning1D{
bins: make([]Bin1D, n),
xrange: Range{Min: xmin, Max: xmax},
}
width := bng.xrange.Width() / float64(n)
for i := range bng.bins {
bin := &bng.bins[i]
bin.xrange.Min = xmin + float64(i)*width
bin.xrange.Max = xmin + float64(i+1)*width
}
return bng
}
func newBinning1DFromBins(xbins []Range) binning1D {
if len(xbins) < 1 {
panic(errShortXAxis)
}
n := len(xbins)
bng := binning1D{
bins: make([]Bin1D, n),
}
for i, xbin := range xbins {
bin := &bng.bins[i]
bin.xrange = xbin
}
sort.Sort(Bin1Ds(bng.bins))
for i := 0; i < len(bng.bins)-1; i++ {
b0 := bng.bins[i]
b1 := bng.bins[i+1]
if b0.xrange.Max > b1.xrange.Min {
panic(errOverlapXAxis)
}
}
bng.xrange = Range{Min: bng.bins[0].XMin(), Max: bng.bins[n-1].XMax()}
return bng
}
func newBinning1DFromEdges(edges []float64) binning1D {
if len(edges) <= 1 {
panic(errShortXAxis)
}
if !sort.IsSorted(sort.Float64Slice(edges)) {
panic(errNotSortedXAxis)
}
n := len(edges) - 1
bng := binning1D{
bins: make([]Bin1D, n),
xrange: Range{Min: edges[0], Max: edges[n]},
}
for i := range bng.bins {
bin := &bng.bins[i]
xmin := edges[i]
xmax := edges[i+1]
if xmin == xmax {
panic(errDupEdgesXAxis)
}
bin.xrange.Min = xmin
bin.xrange.Max = xmax
}
return bng
}
func (bng *binning1D) entries() int64 {
return bng.dist.Entries()
}
func (bng *binning1D) effEntries() float64 {
return bng.dist.EffEntries()
}
// xMin returns the low edge of the X-axis
func (bng *binning1D) xMin() float64 {
return bng.xrange.Min
}
// xMax returns the high edge of the X-axis
func (bng *binning1D) xMax() float64 {
return bng.xrange.Max
}
func (bng *binning1D) fill(x, w float64) {
idx := bng.coordToIndex(x)
bng.dist.fill(x, w)
if idx < 0 {
bng.outflows[-idx-1].fill(x, w)
return
}
if idx == len(bng.bins) {
// gap bin.
return
}
bng.bins[idx].fill(x, w)
}
// coordToIndex returns the bin index corresponding to the coordinate x.
func (bng *binning1D) coordToIndex(x float64) int {
switch {
case x < bng.xrange.Min:
return UnderflowBin
case x >= bng.xrange.Max:
return OverflowBin
}
return Bin1Ds(bng.bins).IndexOf(x)
}
func (bng *binning1D) scaleW(f float64) {
bng.dist.scaleW(f)
bng.outflows[0].scaleW(f)
bng.outflows[1].scaleW(f)
for i := range bng.bins {
bin := &bng.bins[i]
bin.scaleW(f)
}
}
// Bins returns the slice of bins for this binning.
func (bng *binning1D) Bins() []Bin1D {
return bng.bins
} | hbook/binning1d.go | 0.62601 | 0.473109 | binning1d.go | starcoder |
package linearModel
import (
"fmt"
"log"
"math"
"runtime"
// use dot import for lisibility
//"github.com/gcla/sklearn/base"
"github.com/gcla/sklearn/base"
"gonum.org/v1/gonum/mat"
gg "gorgonia.org/gorgonia"
"gorgonia.org/tensor"
)
// Float is gorgonia's Float64
var Float = gg.Float64
// LinearRegressionGorgonia is a multioutput libear regression using gorgonia
type LinearRegressionGorgonia struct {
LinearModel
nOutputs, Epochs int
LearningRate, Tol, Alpha, L1Ratio float
}
// NewLinearRegressionGorgonia create a *LinearRegressionGorgonia with good defaults
func NewLinearRegressionGorgonia() *LinearRegressionGorgonia {
return &LinearRegressionGorgonia{LinearModel: LinearModel{FitIntercept: true}, LearningRate: .1, Tol: 1e-7}
}
// Fit lears coef and intercept for a *LinearRegressionGorgonia
func (regr *LinearRegressionGorgonia) Fit(X0, y0 *mat.Dense) base.Transformer {
Float := gg.Float64
g := gg.NewGraph()
r, c := X0.Dims()
Xshape := []int{r, c}
nSamples := r
r, c = y0.Dims()
Yshape := []int{r, c}
regr.nOutputs = c
dMust := func(t *tensor.Dense, e error) *tensor.Dense {
check(e)
return t
}
xT := tensor.FromMat64(X0)
if regr.FitIntercept {
ones := tensor.Ones(Float, Xshape[0], 1)
xT = dMust(ones.Hstack(xT))
}
x := gg.NewMatrix(g, Float, gg.WithShape(xT.Shape()...), gg.WithName("x"), gg.WithValue(xT))
Must := gg.Must
var y, w, pred, cost *gg.Node
if regr.nOutputs == 1 {
y = gg.NewVector(g, Float, gg.WithShape(xT.Shape()[0]), gg.WithName("y"), gg.WithValue(tensor.FromMat64(y0)))
w = gg.NewVector(g, Float, gg.WithShape(xT.Shape()[1]), gg.WithName("w"), gg.WithInit(gg.Uniform(0., 1.)))
fmt.Println("x", x.Shape(), "w", w.Shape(), "y", y.Shape())
pred = Must(gg.Mul(x, w))
cost = Must(gg.Mean(Must(gg.Square(Must(gg.Sub(pred, y)))), 0))
} else {
y = gg.NewMatrix(g, Float, gg.WithShape(Yshape...), gg.WithName("y"), gg.WithValue(ToDenseTensor(y0)))
w = gg.NewMatrix(g, Float, gg.WithShape(xT.Shape()[1], Yshape[1]), gg.WithName("w"), gg.WithInit(gg.Uniform(0., 1.)))
pred = Must(gg.Mul(x, w))
cost = Must(gg.Mean(Must(gg.Square(Must(gg.Sub(pred, y)))), 0, 1))
}
if regr.Alpha >= 0. {
L1 := Must(gg.Mul(gg.NewConstant(regr.Alpha*regr.L1Ratio/float64(2*nSamples)), Must(gg.Sum(Must(gg.Abs(w))))))
cost = Must(gg.Add(cost, L1))
L2 := Must(gg.Mul(gg.NewConstant(regr.Alpha*(1.-regr.L1Ratio)/float64(2*nSamples)), Must(gg.Sum(Must(gg.Square(w))))))
cost = Must(gg.Add(cost, L2))
}
_, err := gg.Grad(cost, w)
// machine := NewLispMachine(g) // you can use a LispMachine, but it'll be VERY slow.
machine := gg.NewTapeMachine(g, gg.BindDualValues(w))
defer runtime.GC()
model := gg.Nodes{w}
solver := gg.NewAdamSolver(gg.WithLearnRate(regr.LearningRate), gg.WithClip(5)) // good idea to clip
if gg.CUDA {
runtime.LockOSThread()
defer runtime.UnlockOSThread()
}
if regr.Epochs <= 0 {
regr.Epochs = 1e6 / nSamples
}
for i := 0; i < regr.Epochs; i++ {
if err = machine.RunAll(); err != nil {
break
}
if err = solver.Step(model); err != nil {
log.Fatal(err)
}
//fmt.Println(i, cost.Value())
machine.Reset() // Reset is necessary in a loop like this
//fmt.Println(i, cost.Value())
if math.Sqrt(cost.Value().Data().(float)) < regr.Tol {
break
}
}
wmat := mat.NewDense(xT.Shape()[1], Yshape[1], w.Value().Data().([]float))
regr.Coef = mat.NewDense(Xshape[1], Yshape[1], nil)
regr.Intercept = mat.NewDense(1, Yshape[1], nil)
ifeat0 := 0
if regr.FitIntercept {
ifeat0 = 1
regr.Intercept.Clone(wmat.RowView(0).T())
}
regr.Coef.Apply(func(j, o int, c float64) float64 { return wmat.At(j+ifeat0, o) }, regr.Coef)
return regr
}
// Predict return predicted Ys for a list or Xs
func (regr *LinearRegressionGorgonia) Predict(X, Y *mat.Dense) base.Regressor {
regr.DecisionFunction(X, Y)
return regr
}
// FitTransform is for Pipeline
func (regr *LinearRegressionGorgonia) FitTransform(X, Y *mat.Dense) (Xout, Yout *mat.Dense) {
r, c := Y.Dims()
Xout, Yout = X, mat.NewDense(r, c, nil)
regr.Fit(X, Y)
regr.Predict(X, Yout)
return
}
// Transform is for Pipeline
func (regr *LinearRegressionGorgonia) Transform(X, Y *mat.Dense) (Xout, Yout *mat.Dense) {
r, c := Y.Dims()
Xout, Yout = X, mat.NewDense(r, c, nil)
regr.Predict(X, Yout)
return
}
// --------
func check(err error) {
if err != nil {
panic(err)
}
}
// ToDenseTensor converts to a *tensor.Dense
// accepts []float64 [][]float64 *mat.Dense mat.Matrix
func ToDenseTensor(X interface{}) *tensor.Dense {
switch v := X.(type) {
case []float:
return tensor.NewDense(Float, []int{len(v), 1}, tensor.WithBacking(v))
case [][]float:
b := make([]float, len(v)*len(v[0]))
k := 0
for _, vi := range v {
for _, vij := range vi {
b[k] = vij
k++
}
}
return tensor.New(tensor.WithShape(len(v), len(v[0])), tensor.WithBacking(b))
case *mat.Dense:
return tensor.FromMat64(v)
case mat.Matrix:
return tensor.FromMat64(mat.DenseCopyOf(v))
default:
panic("[]float or [][]float expected")
}
} | linear_model/gorgonia.go | 0.566978 | 0.443781 | gorgonia.go | starcoder |
package matrix
import (
"fmt"
"math/rand"
"gonum.org/v1/gonum/mat"
"gonum.org/v1/gonum/stat"
)
// ColsMax returns a slice of max values of first cols number of matrix columns
// It returns error if passed in matrix is nil, has zero size or requested number
// of columns exceeds the number of columns in the matrix passed in as parameter.
func ColsMax(cols int, m *mat.Dense) ([]float64, error) {
return withValidDim("cols", cols, m, mat.Max)
}
// ColsMin returns a slice of min values of first cols number of matrix columns
// It returns error if passed in matrix is nil, has zero size or requested number
// of columns exceeds the number of columns in the matrix passed in as parameter.
func ColsMin(cols int, m *mat.Dense) ([]float64, error) {
return withValidDim("cols", cols, m, mat.Min)
}
// ColsMean returns a slice of mean values of first cols matrix columns
// It returns error if passed in matrix is nil or has zero size or requested number
// of columns exceeds the number of columns in matrix m.
func ColsMean(cols int, m *mat.Dense) ([]float64, error) {
return withValidDim("cols", cols, m, mean)
}
// ColsStdev returns a slice of standard deviations of first cols matrix columns
// It returns error if passed in matrix is nil or has zero size or requested number
// of columns exceeds the number of columns in matrix m.
func ColsStdev(cols int, m *mat.Dense) ([]float64, error) {
return withValidDim("cols", cols, m, stdev)
}
// RowsMax returns a slice of max values of first rows matrix rows.
// It returns error if passed in matrix is nil or has zero size or requested number
// of rows exceeds the number of rows in matrix m.
func RowsMax(rows int, m *mat.Dense) ([]float64, error) {
return withValidDim("rows", rows, m, mat.Max)
}
// RowsMin returns a slice of min values of first rows matrix rows.
// It returns error if passed in matrix is nil or has zero size or requested number
// of rows exceeds the number of rows in matrix m.
func RowsMin(rows int, m *mat.Dense) ([]float64, error) {
return withValidDim("rows", rows, m, mat.Min)
}
// MakeRandom creates a new matrix with provided number of rows and columns
// which is initialized to random numbers uniformly distributed in interval [min, max].
// MakeRandom fails if non-positive matrix dimensions are requested.
func MakeRandom(rows, cols int, min, max float64) (*mat.Dense, error) {
return withValidDims(rows, cols, func() (*mat.Dense, error) {
// set random seed
rand.Seed(55)
// allocate data slice
randVals := make([]float64, rows*cols)
for i := range randVals {
// we need value between 0 and 1.0
randVals[i] = rand.Float64()*(max-min) + min
}
return mat.NewDense(rows, cols, randVals), nil
})
}
// MakeConstant returns a matrix of rows x cols whose each element is set to val.
// MakeConstant fails if invalid matrix dimensions are requested.
func MakeConstant(rows, cols int, val float64) (*mat.Dense, error) {
return withValidDims(rows, cols, func() (*mat.Dense, error) {
// allocate zero matrix and set every element to val
constMx := mat.NewDense(rows, cols, nil)
for i := 0; i < rows; i++ {
for j := 0; j < cols; j++ {
constMx.Set(i, j, val)
}
}
return constMx, nil
})
}
// AddConst adds a constant value to every element of matrix
// It modifies the matrix m passed in as a parameter.
// AddConstant fails with error if empty matrix is supplied
func AddConst(val float64, m *mat.Dense) (*mat.Dense, error) {
if m == nil {
return nil, fmt.Errorf("invalid matrix supplied: %v", m)
}
rows, cols := m.Dims()
return withValidDims(rows, cols, func() (*mat.Dense, error) {
// allocate zero matrix and set every element to val
for i := 0; i < rows; i++ {
for j := 0; j < cols; j++ {
m.Set(i, j, m.At(i, j)+val)
}
}
return m, nil
})
}
// viewFunc defines matrix dimension view function
type viewFunc func(int) mat.Vector
// dimFn applies function fn to first count matrix rows or columns.
// dim can be either set to rows or cols.
// dimFn collects the results into a slice and returns it
func dimFn(dim string, count int, m *mat.Dense, fn func(mat.Matrix) float64) []float64 {
res := make([]float64, count)
var viewFn viewFunc
switch dim {
case "rows":
viewFn = m.RowView
case "cols":
viewFn = m.ColView
}
for i := 0; i < count; i++ {
res[i] = fn(viewFn(i))
}
return res
}
// withValidDim executes function fn on first count of matrix columns or rows.
// It collects the results of each calculation and returns it in a slice.
// It returns error if either matrix m is nil, has zero size or requested number of
// particular dimension is larger than the matrix m dimensions.
func withValidDim(dim string, count int, m *mat.Dense,
fn func(mat.Matrix) float64) ([]float64, error) {
// matrix can't be nil
if m == nil {
return nil, fmt.Errorf("invalid matrix supplied: %v", m)
}
rows, cols := m.Dims()
switch dim {
case "rows":
if rows == 0 {
return nil, fmt.Errorf("invalid number of rows supplied: %v", m)
}
if count > rows {
return nil, fmt.Errorf("row count exceeds matrix rows: %d", count)
}
case "cols":
if cols == 0 {
return nil, fmt.Errorf("invalid number of columns supplied: %v", m)
}
if count > cols {
return nil, fmt.Errorf("column count exceeds matrix columns: %d", count)
}
}
return dimFn(dim, count, m, fn), nil
}
// withValidDims validates if the rows and cols are valid matrix dimensions
// It returns error if either rows or cols are invalid i.e. non-positive integers
func withValidDims(rows, cols int, fn func() (*mat.Dense, error)) (*mat.Dense, error) {
// can not create matrix with negative dimensions
if rows <= 0 {
return nil, fmt.Errorf("invalid number of rows: %d", rows)
}
if cols <= 0 {
return nil, fmt.Errorf("invalid number of columns: %d", cols)
}
return fn()
}
// returns a mean valur for a given matrix
func mean(m mat.Matrix) float64 {
r, c := m.Dims()
return mat.Sum(m) / (float64(r) * float64(c))
}
// returns a mean valur for a given matrix
func stdev(m mat.Matrix) float64 {
r, _ := m.Dims()
col := make([]float64, r)
mat.Col(col, 0, m)
return stat.StdDev(col, nil)
} | pkg/matrix/matrix.go | 0.883381 | 0.776072 | matrix.go | starcoder |
package gozmo
import (
"fmt"
"github.com/go-gl/mathgl/mgl32"
)
// A Rendered is an accelerated sprite drawer component. It supports color
// addition and multiplication
type Renderer struct {
mesh *Mesh
texture *Texture
textureName string
pixelsPerUnit uint32
index uint32
forceHeight float32
}
// The mesh is created and uploaded into the GPU only when needed.
func (renderer *Renderer) createMesh() {
if shader == -1 {
shader = int32(GLShader())
}
mesh := Mesh{}
mesh.abid = GLNewArray()
mesh.vbid = GLNewBuffer()
mesh.uvbid = GLNewBuffer()
mesh.vertices = []float32{-1, -1,
-1, 1,
1, -1,
1, -1,
1, 1,
-1, 1}
mesh.uvs = []float32{0, 1,
0, 0,
1, 1,
1, 1,
1, 0,
0, 0}
mesh.mulColor = mgl32.Vec4{1, 1, 1, 1}
GLBufferData(0, mesh.vbid, mesh.vertices)
GLBufferData(1, mesh.uvbid, mesh.uvs)
renderer.mesh = &mesh
}
func NewRenderer(texture *Texture) *Renderer {
// Default is 100 pixels per unit (like in Unity3D).
renderer := Renderer{texture: texture, pixelsPerUnit: 100}
if texture != nil {
renderer.textureName = texture.Name
renderer.createMesh()
}
return &renderer
}
func (renderer *Renderer) Start(gameObject *GameObject) {
}
func (renderer *Renderer) Update(gameObject *GameObject) {
if renderer.textureName == "" {
return
}
renderer.texture, _ = gameObject.Scene.textures[renderer.textureName]
if renderer.texture == nil {
return
}
if renderer.mesh == nil {
renderer.createMesh()
}
texture := renderer.texture
// Recompute the mesh size based on the texture.
var width float32
var height float32
if renderer.forceHeight > 0 {
height = renderer.forceHeight / 2
width = renderer.forceHeight * ((float32(texture.Width) / float32(texture.Cols)) / (float32(texture.Height) / float32(texture.Rows))) / 2
} else {
width = float32(texture.Width) / float32(texture.Cols) / float32(renderer.pixelsPerUnit) / 2
height = float32(texture.Height) / float32(texture.Rows) / float32(renderer.pixelsPerUnit) / 2
}
// Out-of-view culling, avoids drawing quads that are out of the view quad
// extract view sizes.
viewWidth := Engine.Window.OrthographicSize * Engine.Window.AspectRatio * 2
viewHeight := Engine.Window.OrthographicSize * 2
viewX := -Engine.Window.View[12] - (viewWidth / 2)
viewY := -Engine.Window.View[13] + (viewHeight / 2)
// Check if the object bounds are out of the view.
objX := gameObject.Position[0] - width
objY := gameObject.Position[1] + height
if (objX+(width*2)) < viewX ||
objX > (viewX+viewWidth) ||
(objY-(height*2)) > viewY ||
objY < (viewY-viewHeight) {
return
}
// Recompute uvs based on index.
idxX := renderer.index % texture.Cols
idxY := renderer.index / texture.Cols
uvw := (1.0 / float32(texture.Cols))
uvh := (1.0 / float32(texture.Rows))
uvx := uvw * float32(idxX)
uvy := uvh * float32(idxY)
model := mgl32.Translate3D(gameObject.Position[0], gameObject.Position[1], 0)
model = model.Mul4(mgl32.Scale3D(gameObject.Scale[0], gameObject.Scale[1], 1))
model = model.Mul4(mgl32.HomogRotate3DZ(gameObject.Rotation))
view := Engine.Window.View.Mul4(model)
ortho := Engine.Window.Projection.Mul4(view)
IncPerFrameStats("GL.DrawCalls", 1)
GLDraw(renderer.mesh, uint32(shader), width, height, int32(renderer.texture.tid), uvx, uvy, uvw, uvh, ortho)
}
func (renderer *Renderer) SetPixelsPerUnit(pixels uint32) {
renderer.pixelsPerUnit = pixels
}
func (renderer *Renderer) SetAttr(attr string, value interface{}) error {
switch attr {
case "index":
index, err := CastUInt32(value)
if err != nil {
return fmt.Errorf("%v attribute of %T", attr, renderer, err)
}
renderer.index = index
return nil
case "texture":
textureName, ok := value.(string)
if ok {
renderer.textureName = textureName
return nil
}
return fmt.Errorf("%v attribute of %T expects a string", attr, renderer)
case "addR":
color, ok := value.(float32)
if ok {
renderer.mesh.addColor[0] = color
return nil
}
return fmt.Errorf("%v attribute of %T expects a float32", attr, renderer)
case "addG":
color, ok := value.(float32)
if ok {
renderer.mesh.addColor[1] = color
return nil
}
return fmt.Errorf("%v attribute of %T expects a float32", attr, renderer)
case "addB":
color, ok := value.(float32)
if ok {
renderer.mesh.addColor[2] = color
return nil
}
return fmt.Errorf("%v attribute of %T expects a float32", attr, renderer)
case "addA":
color, ok := value.(float32)
if ok {
renderer.mesh.addColor[3] = color
return nil
}
return fmt.Errorf("%v attribute of %T expects a float32", attr, renderer)
case "mulR":
color, ok := value.(float32)
if ok {
renderer.mesh.mulColor[0] = color
return nil
}
return fmt.Errorf("%v attribute of %T expects a float32", attr, renderer)
case "mulG":
color, ok := value.(float32)
if ok {
renderer.mesh.mulColor[1] = color
return nil
}
return fmt.Errorf("%v attribute of %T expects a float32", attr, renderer)
case "mulB":
color, ok := value.(float32)
if ok {
renderer.mesh.mulColor[2] = color
return nil
}
return fmt.Errorf("%v attribute of %T expects a float32", attr, renderer)
case "mulA":
color, ok := value.(float32)
if ok {
renderer.mesh.mulColor[3] = color
return nil
}
return fmt.Errorf("%v attribute of %T expects a float32", attr, renderer)
case "forceHeight":
height, err := CastFloat32(value)
if err == nil {
renderer.forceHeight = height
return nil
}
return fmt.Errorf("%v attribute of %T expects a float32", attr, renderer)
}
return nil
}
func (renderer *Renderer) GetAttr(attr string) (interface{}, error) {
switch attr {
case "index":
return renderer.index, nil
case "texture":
return renderer.textureName, nil
case "addR":
return renderer.mesh.addColor[0], nil
case "addG":
return renderer.mesh.addColor[1], nil
case "addB":
return renderer.mesh.addColor[2], nil
case "addA":
return renderer.mesh.addColor[3], nil
case "mulR":
return renderer.mesh.mulColor[0], nil
case "mulG":
return renderer.mesh.mulColor[1], nil
case "mulB":
return renderer.mesh.mulColor[2], nil
case "mulA":
return renderer.mesh.mulColor[3], nil
}
return nil, fmt.Errorf("%v attribute of %T not found", attr, renderer)
}
func (renderer *Renderer) GetType() string {
return "Renderer"
}
func initRenderer(args []interface{}) Component {
return NewRenderer(nil)
}
func init() {
RegisterComponent("Renderer", initRenderer)
} | renderer.go | 0.755817 | 0.488161 | renderer.go | starcoder |
package utils
import (
"bytes"
"fmt"
"math"
"gonum.org/v1/gonum/mat"
)
type BlockMatrix struct {
M [][]Matrix // First slice points to rows of matrices - Note, the Matrix type allows for scalar matrices
Nr, Nc int // number of rows, columns in the square block matrix consisting of a sub-matrix in each cell
P []int // Permutation "matrix", created during an LUP decomposition, otherwise nil
Pcount int // count of number of pivots, used in determining sign of determinant
tol float64 // tolerance for reduction operations, like LUP decomposition
}
func NewBlockMatrix(Nr, Nc int) (R BlockMatrix) {
R = BlockMatrix{
Nr: Nr,
Nc: Nc,
tol: 0.00000001, // Default value
}
R.M = make([][]Matrix, Nr)
for n := range R.M {
R.M[n] = make([]Matrix, Nc)
}
return R
}
func NewBlockMatrixFromScalar(A Matrix) (R BlockMatrix) {
var (
Nr, Nc = A.Dims()
)
R = BlockMatrix{
Nr: Nr,
Nc: Nc,
tol: 0.00000001, // Default value
}
R.M = make([][]Matrix, Nr)
for n := range R.M {
R.M[n] = make([]Matrix, Nc)
}
for j := 0; j < Nr; j++ {
for i := 0; i < Nc; i++ {
R.M[i][j] = NewMatrix(1, 1, []float64{A.At(i, j)})
}
}
return R
}
func (bm BlockMatrix) Print() (out string) {
var (
output string
A = bm.M
)
buf := bytes.Buffer{}
for n, row := range A {
for m, Mat := range row {
label := fmt.Sprintf("[%d:%d]", n, m)
if Mat.IsEmpty() {
output = label + " nil "
} else {
output = Mat.Print(label)
}
buf.WriteString(fmt.Sprintf("%s", output))
}
buf.WriteString("\n")
}
return buf.String()
}
func (bm BlockMatrix) IsSquare() bool {
return bm.Nr == bm.Nc
}
func (bm *BlockMatrix) LUPDecompose() (err error) {
/*
Factors the current matrix into a lower [L] and upper [U] pair of diagonal matrices such that [M] = [L]x[U]
Algorithm from: https://en.wikipedia.org/wiki/LU_decomposition#C_code_example
The matrix is factored in place, replacing the current matrices within by a new matrix composed of the
[L-E] and [U] matrices, stored in the same original matrix locations. The companion method to LUPD decompose is
LUPSolve(), which can be called repeatedly to efficiently produce solutions to the problem:
[M] * X = B
where [M] is this matrix, and B is the known RHS vector and X is the target.
Matrix M is changed, it contains a copy of both matrices L-I and U as (L-I)+U such that:
P * [M] = L * U
*/
var (
imax int
absA, maxA float64
Scratch Matrix
N = bm.Nr
A = bm.M
)
if !bm.IsSquare() {
err = fmt.Errorf("Matrix must be square")
return
}
if len(bm.P) != 0 {
err = fmt.Errorf("LUPDecompose already called on this matrix, which has overwritten it")
return
}
bm.P = make([]int, N)
for i := range bm.P {
bm.P[i] = i
}
// counting pivots starting from N
bm.Pcount = N // initialize Pcount with N
for i := 0; i < N; i++ {
maxA = 0.
imax = i
for k := 0; k < N; k++ {
absA = math.Abs(mat.Det(A[k][i]))
if absA > maxA {
maxA = absA
imax = k
}
}
if maxA < bm.tol {
err = fmt.Errorf("matrix is degenerate with tolerance %8.5e", bm.tol)
return
}
if imax != i {
// pivot P
bm.P[i], bm.P[imax] = bm.P[imax], bm.P[i] // swap
// pivot rows of M
A[i], A[imax] = A[imax], A[i]
// counting pivots starting from N
bm.Pcount++
}
for j := i + 1; j < N; j++ {
if Scratch, err = A[i][i].Inverse(); err != nil {
return
}
A[j][i] = A[j][i].Mul(Scratch)
for k := i + 1; k < N; k++ {
A[j][k] = A[j][k].Subtract(A[j][i].Mul(A[i][k]))
}
}
}
return
}
func (bm BlockMatrix) LUPSolve(b []Matrix) (Bx BlockMatrix, err error) {
/*
Provided a solution vector B of size N x NB, calculate X for equation:
[M] * X = B
where [M] is the block matrix
Each sub-matrix within [M] is of size NBxNB
Each of the X and B vectors are of size NxNB
*/
var (
Scratch Matrix
P = bm.P
N = bm.Nr
A = bm.M
)
if len(P) == 0 {
err = fmt.Errorf("uninitialized - call LUPDecompose first")
return
}
/*
Provided a solution vector B of size N x NB, calculate X for equation:
[M] * X = B
where [M] is the block matrix
Each sub-matrix within [M] is of size NBxNB
Each of the X and B vectors are of size NxNB
*/
// Allocate solution X
Bx = NewBlockMatrix(N, 1)
X := Bx.M
for i := 0; i < N; i++ {
X[i][0] = b[P[i]].Copy()
for k := 0; k < i; k++ {
X[i][0] = X[i][0].Subtract(A[i][k].Mul(X[k][0]))
}
}
cDims := func(i, k int, a, x Matrix) {
var (
NrA, NcA = a.Dims()
NrX, NcX = x.Dims()
)
fmt.Printf("[i,k] = [%d,%d], [NrA,NcA] = [%d,%d], [NrX,NcX] = [%d,%d]\n",
i, k, NrA, NcA, NrX, NcX)
}
_ = cDims
for i := N - 1; i >= 0; i-- {
for k := i + 1; k < N; k++ {
//cDims(i, k, A[i][k], X[k][0])
//X[i][0] = X[i][0].Subtract(A[i][k].Mul(X[k][0]))
X[i][0] = X[i][0].Subtract(A[i][k].Mul(X[k][0].Transpose()))
}
if Scratch, err = A[i][i].Inverse(); err != nil {
panic(err)
}
X[i][0] = X[i][0].Transpose().Mul(Scratch)
}
for i := 0; i < N; i++ {
X[i][0] = X[i][0].Transpose()
}
return
}
func (bm BlockMatrix) LUPInvert() (R BlockMatrix, err error) {
var (
N = bm.Nr
P = bm.P
A = bm.M
Scratch Matrix
)
if len(bm.P) == 0 {
err = fmt.Errorf("uninitialized - call LUPDecompose first")
return
}
zero := NewMatrix(1, 1, []float64{0.})
one := NewMatrix(1, 1, []float64{1.})
R = NewBlockMatrix(N, N)
IA := R.M
for j := 0; j < N; j++ {
for i := 0; i < N; i++ {
if P[i] == j {
IA[i][j] = one.Copy()
} else {
IA[i][j] = zero.Copy()
}
for k := 0; k < i; k++ {
IA[i][j] = IA[i][j].Subtract(A[i][k].Mul(IA[k][j]))
}
}
for i := N - 1; i >= 0; i-- {
for k := i + 1; k < N; k++ {
IA[i][j] = IA[i][j].Subtract(A[i][k].Mul(IA[k][j]))
}
if Scratch, err = A[i][i].Inverse(); err != nil {
panic(err)
}
IA[i][j] = IA[i][j].Mul(Scratch)
}
}
return
}
func (bm BlockMatrix) LUPDeterminant() (det float64, err error) {
var (
N = bm.Nr
Pcount = bm.Pcount
A = bm.M
P = bm.P
)
if len(P) == 0 {
err = fmt.Errorf("uninitialized - call LUPDecompose first")
return
}
det = mat.Det(A[0][0])
for i := 1; i < N; i++ {
det *= mat.Det(A[i][i])
}
if (Pcount-N)%2 != 0 {
det = -det
}
return
}
func (bm BlockMatrix) GetTol() (tol float64) {
return bm.tol
}
func (bm BlockMatrix) Mul(ba BlockMatrix) (R BlockMatrix) {
var (
Left, Right = bm.M, ba.M
NrLeft, NcLeft = bm.Nr, bm.Nc
NrRight, NcRight = ba.Nr, ba.Nc
NrTarget, NcTarget = NcRight, NrLeft
Scratch Matrix
)
if NrRight != NcLeft {
panic(fmt.Errorf("number of rows in right Matrix should be %d, is %d", NcLeft, NrRight))
}
R = NewBlockMatrix(NrTarget, NcTarget)
R.tol = bm.tol
for j := 0; j < NcRight; j++ {
for i := 0; i < NrLeft; i++ {
// Iterate across columns of left and rows of right (NcLeft == NrRight) for sum at column j:0-NrLeft
for ii := 0; ii < NcLeft; ii++ { // For each column in left, or row in right
if (Left[i][ii].IsEmpty() || Right[ii][j].IsEmpty()) ||
(Left[i][ii].IsScalar() && Left[i][ii].DataP[0] == 0.) ||
(Right[ii][j].IsScalar() && Right[ii][j].DataP[0] == 0.) {
Scratch = NewMatrix(1, 1, []float64{0.})
} else {
Scratch = Left[i][ii].Mul(Right[ii][j])
}
if ii == 0 {
R.M[j][i] = Scratch
} else {
R.M[j][i] = R.M[j][i].Add(Scratch)
}
}
}
}
return
}
func (bm BlockMatrix) Add(ba BlockMatrix) {
var (
Nr, Nc = bm.Nr, bm.Nc
A = bm.M
)
for i := 0; i < Nr; i++ {
for j := 0; j < Nc; j++ {
A[i][j].Add(A[i][j])
}
}
return
}
func (bm BlockMatrix) Copy() (R BlockMatrix) {
var (
Nr, Nc = bm.Nr, bm.Nc
A = bm.M
)
R = NewBlockMatrix(Nr, Nc)
for j := 0; j < Nc; j++ {
for i := 0; i < Nr; i++ {
if !A[i][j].IsEmpty() {
R.M[i][j] = A[i][j].Copy()
}
}
}
return
}
func (bm BlockMatrix) Transpose() (R BlockMatrix) {
var (
Nr, Nc = bm.Nr, bm.Nc
A = bm.M
)
R = NewBlockMatrix(Nc, Nr)
for j := 0; j < Nc; j++ {
for i := 0; i < Nr; i++ {
if !A[i][j].IsEmpty() {
R.M[j][i] = A[i][j].Copy()
}
}
}
return
}
func (bm BlockMatrix) Scale(val float64) (R BlockMatrix) {
var (
Nr, Nc = bm.Nr, bm.Nc
A = bm.M
)
for j := 0; j < Nc; j++ {
for i := 0; i < Nr; i++ {
A[i][j].Scale(val)
}
}
return bm
} | utils/blockMatrix.go | 0.70619 | 0.573768 | blockMatrix.go | starcoder |
package grinder
import (
"go/ast"
"go/token"
"golang.org/x/tools/go/types"
"rsc.io/grind/block"
)
func Unlabel(x ast.Stmt) ast.Stmt {
for {
y, ok := x.(*ast.LabeledStmt)
if !ok {
return x
}
x = y.Stmt
}
}
func IsGotoTarget(blocks *block.Graph, x ast.Stmt) bool {
for {
y, ok := x.(*ast.LabeledStmt)
if !ok {
return false
}
if len(blocks.Goto[y.Label.Name]) > 0 {
return true
}
x = y.Stmt
}
}
func IsTerminatingStmt(blocks *block.Graph, x ast.Stmt) bool {
// Like http://golang.org/ref/spec#Terminating_statements
// but added break and continue for use in non-end-of-function
// contexts.
label := ""
for {
y, ok := x.(*ast.LabeledStmt)
if !ok {
break
}
label = y.Label.Name
x = y.Stmt
}
switch x := x.(type) {
case *ast.ReturnStmt:
return true
case *ast.BranchStmt:
switch x.Tok {
case token.BREAK, token.CONTINUE, token.GOTO:
return true
}
case *ast.IfStmt:
return x.Else != nil && IsTerminatingStmt(blocks, x.Body) && IsTerminatingStmt(blocks, x.Else)
case *ast.ForStmt:
return x.Cond == nil && len(blocks.Break[label]) == 0 && !hasBreak(x.Body)
case *ast.SwitchStmt:
if len(blocks.Break[label]) > 0 || hasBreak(x.Body) {
return false
}
hasDefault := false
for _, cas := range x.Body.List {
cas := cas.(*ast.CaseClause)
if cas.List == nil {
hasDefault = true
}
if len(cas.Body) == 0 {
return false
}
last := cas.Body[len(cas.Body)-1]
if !IsTerminatingStmt(blocks, last) && !isFallthrough(last) {
return false
}
}
if !hasDefault {
return false
}
return true
case *ast.TypeSwitchStmt:
if len(blocks.Break[label]) > 0 || hasBreak(x.Body) {
return false
}
hasDefault := false
for _, cas := range x.Body.List {
cas := cas.(*ast.CaseClause)
if cas.List == nil {
hasDefault = true
}
if len(cas.Body) == 0 {
return false
}
last := cas.Body[len(cas.Body)-1]
if !IsTerminatingStmt(blocks, last) && !isFallthrough(last) {
return false
}
}
if !hasDefault {
return false
}
return true
case *ast.SelectStmt:
if len(blocks.Break[label]) > 0 || hasBreak(x.Body) {
return false
}
for _, cas := range x.Body.List {
cas := cas.(*ast.CommClause)
if len(cas.Body) == 0 {
return false
}
last := cas.Body[len(cas.Body)-1]
if !IsTerminatingStmt(blocks, last) && !isFallthrough(last) {
return false
}
}
return true
}
return false
}
func isFallthrough(x ast.Stmt) bool {
xx, ok := x.(*ast.BranchStmt)
return ok && xx.Tok == token.FALLTHROUGH
}
func hasBreak(x ast.Stmt) bool {
found := false
ast.Inspect(x, func(x ast.Node) bool {
switch x := x.(type) {
case *ast.ForStmt, *ast.RangeStmt, *ast.SelectStmt, *ast.SwitchStmt, *ast.TypeSwitchStmt:
return false
case *ast.BranchStmt:
if x.Tok == token.BREAK && x.Label == nil {
found = true
}
case ast.Expr:
return false
}
return !found
})
return found
}
func (pkg *Package) LookupAtPos(fn *ast.FuncDecl, pos token.Pos, name string) types.Object {
scope := pkg.Info.Scopes[fn.Type]
ast.Inspect(fn.Body, func(x ast.Node) bool {
if x == nil {
return false
}
if pos < x.Pos() || x.End() <= pos {
return false
}
s := pkg.Info.Scopes[x]
if s != nil {
scope = s
}
return true
})
pkgScope := pkg.Types.Scope()
for s := scope; s != nil; s = s.Parent() {
obj := s.Lookup(name)
if obj != nil && (s == pkgScope || obj.Pos() < pos) {
return obj
}
}
return nil
}
// BlockList returns the list of statements contained by the block x,
// when x is an *ast.BlockStmt, *ast.CommClause, or *ast.CaseClause.
// Otherwise BlockList returns nil.
func BlockList(x ast.Node) []ast.Stmt {
switch x := x.(type) {
case *ast.BlockStmt:
return x.List
case *ast.CommClause:
return x.Body
case *ast.CaseClause:
return x.Body
}
return nil
} | grinder/ast.go | 0.511229 | 0.406037 | ast.go | starcoder |
package int_tree
import (
"github.com/joeyciechanowicz/letter-combinations/pkg/reader"
"sort"
)
func ToAlphabetIndex(letter rune) int {
return int(letter) - int(ToRune("a"))
}
func ToRune(letter string) rune {
return []rune(letter)[0]
}
var RuneToLetters = map[rune]int {
ToRune("a"): ToAlphabetIndex(ToRune("a")),
ToRune("b"): ToAlphabetIndex(ToRune("b")),
ToRune("c"): ToAlphabetIndex(ToRune("c")),
ToRune("d"): ToAlphabetIndex(ToRune("d")),
ToRune("e"): ToAlphabetIndex(ToRune("e")),
ToRune("f"): ToAlphabetIndex(ToRune("f")),
ToRune("g"): ToAlphabetIndex(ToRune("g")),
ToRune("h"): ToAlphabetIndex(ToRune("h")),
ToRune("i"): ToAlphabetIndex(ToRune("i")),
ToRune("j"): ToAlphabetIndex(ToRune("j")),
ToRune("k"): ToAlphabetIndex(ToRune("k")),
ToRune("l"): ToAlphabetIndex(ToRune("l")),
ToRune("m"): ToAlphabetIndex(ToRune("m")),
ToRune("n"): ToAlphabetIndex(ToRune("n")),
ToRune("o"): ToAlphabetIndex(ToRune("o")),
ToRune("p"): ToAlphabetIndex(ToRune("p")),
ToRune("q"): ToAlphabetIndex(ToRune("q")),
ToRune("r"): ToAlphabetIndex(ToRune("r")),
ToRune("s"): ToAlphabetIndex(ToRune("s")),
ToRune("t"): ToAlphabetIndex(ToRune("t")),
ToRune("u"): ToAlphabetIndex(ToRune("u")),
ToRune("v"): ToAlphabetIndex(ToRune("v")),
ToRune("w"): ToAlphabetIndex(ToRune("w")),
ToRune("x"): ToAlphabetIndex(ToRune("x")),
ToRune("y"): ToAlphabetIndex(ToRune("y")),
ToRune("z"): ToAlphabetIndex(ToRune("z")),
}
var Alphabet = [26]rune {
ToRune("a"),
ToRune("b"),
ToRune("c"),
ToRune("d"),
ToRune("e"),
ToRune("f"),
ToRune("g"),
ToRune("h"),
ToRune("i"),
ToRune("j"),
ToRune("k"),
ToRune("l"),
ToRune("m"),
ToRune("n"),
ToRune("o"),
ToRune("p"),
ToRune("q"),
ToRune("r"),
ToRune("s"),
ToRune("t"),
ToRune("u"),
ToRune("v"),
ToRune("w"),
ToRune("x"),
ToRune("y"),
ToRune("z"),
}
type LetterCount struct {
Letter int
Count byte
}
type WordDetails struct {
Word string
SortedLetterCounts []LetterCount
}
type WordDetailsSlice []WordDetails
type Node struct {
Children map[int]*Node
Words []*WordDetails
}
type runeSlice []rune
func (p runeSlice) Len() int { return len(p) }
func (p runeSlice) Less(i, j int) bool { return p[i] < p[j] }
func (p runeSlice) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
func NewWordDetails(word string) WordDetails {
var details = WordDetails{
word,
[]LetterCount{},
}
sortedLetters := []rune(word)
sort.Sort(runeSlice(sortedLetters))
letterCounts := make(map[int]byte)
details.SortedLetterCounts = append(details.SortedLetterCounts, LetterCount{ToAlphabetIndex(sortedLetters[0]), 0})
for _, char := range sortedLetters {
alphaIndex := ToAlphabetIndex(char)
letterCounts[alphaIndex] += 1
if details.SortedLetterCounts[len(details.SortedLetterCounts)-1].Letter != alphaIndex {
details.SortedLetterCounts = append(details.SortedLetterCounts, LetterCount{alphaIndex, 0})
}
}
for i, runeCount := range details.SortedLetterCounts {
details.SortedLetterCounts[i].Count = letterCounts[runeCount.Letter]
}
return details
}
/**
Creates a trie of WordDetails where int is used instead of runes
The int is the index of a letter in Alphabet
*/
func CreateIntDictionaryTree(filename string) (Node, []WordDetails){
nodeCount := 0
var words []WordDetails
var trie = Node{
make(map[int]*Node),
make([]*WordDetails, 0),
}
reader.ReadFile(filename, func(word string) {
var details WordDetails
details = NewWordDetails(word)
var head *Node
head = &trie
for _, runeCount := range details.SortedLetterCounts {
if _, ok := head.Children[runeCount.Letter]; !ok {
nodeCount++
head.Children[runeCount.Letter] = &Node{
make(map[int]*Node),
[]*WordDetails{},
}
}
head = head.Children[runeCount.Letter]
}
words = append(words, details)
head.Words = append(head.Words, &details)
})
//fmt.Println("Trie nodes: ", nodeCount)
return trie, words
} | pkg/int-tree/int-tree.go | 0.518546 | 0.425963 | int-tree.go | starcoder |
package mathf
import (
"fmt"
"math"
)
// A Quaternion describes a rotation in 3D space.
// The Quaternion is mathematically defined as Q = x*i + y*j + z*k + w, where (i,j,k) are imaginary basis vectors.
// (x,y,z) can be seen as a vector related to the axis of rotation, while the real multiplier, w, is related to the amount of rotation.
type Quaternion struct {
X float64
Y float64
Z float64
W float64
}
// NewQuaternion creates a new quaternion with given values
func NewQuaternion(x float64, y float64, z float64, w float64) *Quaternion {
return &Quaternion{
X: x,
Y: y,
Z: z,
W: w,
}
}
// NewZeroQuaternion creates a new zero quaternion [0,0,0,1]
func NewZeroQuaternion() *Quaternion {
return &Quaternion{
X: 0.0,
Y: 0.0,
Z: 0.0,
W: 1.0,
}
}
// Set the value of the quaternion
func (quaternion *Quaternion) Set(x float64, y float64, z float64, w float64) *Quaternion {
quaternion.X = x
quaternion.Y = y
quaternion.Z = z
quaternion.W = w
return quaternion
}
func (quaternion *Quaternion) Add(other *Quaternion) *Quaternion {
return &Quaternion{
X: quaternion.X + other.X,
Y: quaternion.Y + other.Y,
Z: quaternion.Z + other.Z,
W: quaternion.W + other.W,
}
}
// QuaternionFromAxisAngle creates the quaternion components from an axis and an angle
func QuaternionFromAxisAngle(axis *Vec3, angle float64) *Quaternion {
s := math.Sin(angle * 0.5)
return &Quaternion{
X: axis.X * s,
Y: axis.Y * s,
Z: axis.Z * s,
W: math.Cos(angle * 0.5),
}
}
// ToAxisAngle converts the quaternion to axis/angle representation.
func (quaternion *Quaternion) ToAxisAngle() (*Vec3, float64) {
quaternion.Normalize()
angle := 2.0 * math.Acos(quaternion.W)
s := math.Sqrt(1.0 - quaternion.W*quaternion.W)
if s < 0.0001 {
return &Vec3{
X: quaternion.X,
Y: quaternion.Y,
Z: quaternion.Z,
}, angle
}
return &Vec3{
X: quaternion.X / s,
Y: quaternion.Y / s,
Z: quaternion.Z / s,
}, angle
}
// QuaternionFromVectors creates a quaternion from the given two vectors.The resulting rotation will be the needed rotation to rotate u to v.
func QuaternionFromVectors(u *Vec3, v *Vec3) *Quaternion {
if u.IsAntiparallelTo(v, Epsilon) {
t1 := Vec3{}
t2 := Vec3{}
u.Tangents(&t1, &t2)
return QuaternionFromAxisAngle(&t1, math.Pi)
}
a := u.Cross(v)
quaternion := &Quaternion{
X: a.X,
Y: a.Y,
Z: a.Z,
W: math.Sqrt(u.SqrtLength()*v.SqrtLength()) + u.Dot(v),
}
quaternion.Normalize()
return quaternion
}
func (quaternion *Quaternion) Length() float64 {
return math.Sqrt(
quaternion.X*quaternion.X +
quaternion.Y*quaternion.Y +
quaternion.Z*quaternion.Z +
quaternion.W*quaternion.W)
}
// Normalize the quaternion
func (quaternion *Quaternion) Normalize() *Quaternion {
l := quaternion.Length()
if l <= 0.0 {
return NewZeroQuaternion()
}
invLength := 1.0 / l
norm := &Quaternion{
X: quaternion.X * invLength,
Y: quaternion.Y * invLength,
Z: quaternion.Z * invLength,
W: quaternion.W * invLength,
}
return norm
}
// Multiply this quaternion by the given
func (quaternion *Quaternion) Multiply(other *Quaternion) *Quaternion {
return &Quaternion{
X: quaternion.X*other.W + quaternion.W*other.X + quaternion.Y*other.Z - quaternion.Z*other.Y,
Y: quaternion.Y*other.W + quaternion.W*other.Y + quaternion.Z*other.X - quaternion.X*other.Z,
Z: quaternion.Z*other.W + quaternion.W*other.Z + quaternion.X*other.Y - quaternion.Y*other.X,
W: quaternion.W*other.W - quaternion.X*other.X - quaternion.Y*other.Y - quaternion.Z*other.Z,
}
}
// Inverse calculates the inverse quaternion rotation.
func (quaternion *Quaternion) Inverse() *Quaternion {
c := quaternion.Conjugate()
inorm2 := 1.0 / (quaternion.X*quaternion.X + quaternion.Y*quaternion.Y + quaternion.Z*quaternion.Z + quaternion.W*quaternion.W)
c.X = c.X * inorm2
c.Y = c.Y * inorm2
c.Z = c.Z * inorm2
c.W = c.W * inorm2
return c
}
// Conjugate calculates the quaternion conjugate
func (quaternion *Quaternion) Conjugate() *Quaternion {
return &Quaternion{
X: -quaternion.X,
Y: -quaternion.Y,
Z: -quaternion.Z,
W: quaternion.W,
}
}
// MultiplyVec multiply the quaternion by a vector
func (quaternion *Quaternion) MultiplyVec(vec *Vec3) *Vec3 {
ix := quaternion.W*vec.X + quaternion.Y*vec.Z - quaternion.Z*vec.Y
iy := quaternion.W*vec.Y + quaternion.Z*vec.X - quaternion.X*vec.Z
iz := quaternion.W*vec.Z + quaternion.X*vec.Y - quaternion.Y*vec.X
iw := -quaternion.X*vec.X - quaternion.Y*vec.Y - quaternion.Z*vec.Z
return NewVec3(
(ix*quaternion.W)+(iw*-quaternion.X)+(iy*-quaternion.Z)-(iz*-quaternion.Y),
(iy*quaternion.W)+(iw*-quaternion.Y)+(iz*-quaternion.X)-(ix*-quaternion.Z),
(iz*quaternion.W)+(iw*-quaternion.Z)+(ix*-quaternion.Y)-(iy*-quaternion.X))
}
// Clone this quaternion to new instance
func (quaternion *Quaternion) Clone() *Quaternion {
return &Quaternion{
X: quaternion.X,
Y: quaternion.Y,
Z: quaternion.Z,
W: quaternion.W,
}
}
// ToEuler convert the quaternion to euler angle representation, Order: YZX
func (quaternion *Quaternion) ToEuler() *Vec3 {
sqx := quaternion.X * quaternion.X
sqy := quaternion.Y * quaternion.Y
sqz := quaternion.Z * quaternion.Z
euler := NewZeroVec3()
// roll (x-axis rotation)
sinrCosp := -2.0 * (quaternion.Y*quaternion.Z - quaternion.X*quaternion.W)
cosrCosp := 1.0 - 2.0*(sqx+sqy)
euler.X = math.Atan2(sinrCosp, cosrCosp)
// pitch (y-axis rotation)
sinp := 2.0 * (quaternion.W*quaternion.Y + quaternion.Z*quaternion.X)
if math.Abs(sinp) >= 1.0 {
euler.Y = math.Copysign(math.Pi/2.0, sinp)
} else {
euler.Y = math.Asin(sinp)
}
// yaw (z-axis rotation)
sinyCosp := -2.0 * (quaternion.X*quaternion.Y - quaternion.W*quaternion.Z)
cosyCosp := 1.0 - 2.0*(sqy+sqz)
euler.Z = math.Atan2(sinyCosp, cosyCosp)
return euler
}
// QuaternionFromEuler creates the quaternion from the given euler angels
func QuaternionFromEuler(vec *Vec3) *Quaternion {
c1 := math.Cos(vec.X / 2.0)
c2 := math.Cos(vec.Y / 2.0)
c3 := math.Cos(vec.Z / 2.0)
s1 := math.Sin(vec.X / 2.0)
s2 := math.Sin(vec.Y / 2.0)
s3 := math.Sin(vec.Z / 2.0)
return &Quaternion{
X: s1*c2*c3 + c1*s2*s3,
Y: c1*s2*c3 - s1*c2*s3,
Z: c1*c2*s3 + s1*s2*c3,
W: c1*c2*c3 - s1*s2*s3,
}
}
// Integrate rotate an absolute orientation quaternion given an angular velocity and a time step.
func (quaternion *Quaternion) Integrate(angularVelocity *Vec3, dt float64, angularFactor *Vec3) *Quaternion {
ax := angularVelocity.X * angularFactor.X
ay := angularVelocity.Y * angularFactor.Y
az := angularVelocity.Z * angularFactor.Z
halfDT := dt * 0.5
return &Quaternion{
X: halfDT * (ax*quaternion.W + ay*quaternion.Z - az*quaternion.Y),
Y: halfDT * (ay*quaternion.W + az*quaternion.X - ax*quaternion.Z),
Z: halfDT * (az*quaternion.W + ax*quaternion.Y - ay*quaternion.X),
W: halfDT * (-ax*quaternion.X - ay*quaternion.Y - az*quaternion.Z),
}
}
// Slerp performs a spherical linear interpolation between two quat
func (quaternion *Quaternion) Slerp(toQuaternion *Quaternion, t float64) *Quaternion {
cosom := quaternion.X*toQuaternion.X + quaternion.Y*toQuaternion.Y + quaternion.Z*toQuaternion.Z + quaternion.W*toQuaternion.W
if cosom < 0.0 {
cosom = -cosom
toQuaternion = &Quaternion{
X: toQuaternion.X,
Y: toQuaternion.Y,
Z: toQuaternion.Z,
W: toQuaternion.W,
}
}
scale0 := 1.0 - t
scale1 := t
if (1.0 - cosom) > 0.000001 {
omega := math.Acos(cosom)
sinom := math.Sin(omega)
scale0 = math.Sin((1.0-t)*omega) / sinom
scale1 = math.Sin(t*omega) / sinom
}
return &Quaternion{
X: scale0*quaternion.X + scale1*toQuaternion.X,
Y: scale0*quaternion.X + scale1*toQuaternion.Y,
Z: scale0*quaternion.Z + scale1*toQuaternion.Z,
W: scale0*quaternion.W + scale1*toQuaternion.W,
}
}
func (quaternion *Quaternion) String() string {
return fmt.Sprintf("Quaternion [ x: %f, y: %f, z: %f, w: %f ]", quaternion.X, quaternion.Y, quaternion.Z, quaternion.W)
} | server/mathf/quaternion.go | 0.909203 | 0.826537 | quaternion.go | starcoder |
package plaid
import (
"encoding/json"
)
// IncomeBreakdown An object representing a breakdown of the different income types on the paystub.
type IncomeBreakdown struct {
// The type of income. Possible values include: `\"regular\"`: regular income `\"overtime\"`: overtime income `\"bonus\"`: bonus income
Type NullableString `json:"type"`
// The hourly rate at which the income is paid.
Rate NullableFloat32 `json:"rate"`
// The number of hours logged for this income for this pay period.
Hours NullableFloat32 `json:"hours"`
// The total pay for this pay period.
Total NullableFloat32 `json:"total"`
AdditionalProperties map[string]interface{}
}
type _IncomeBreakdown IncomeBreakdown
// NewIncomeBreakdown instantiates a new IncomeBreakdown object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewIncomeBreakdown(type_ NullableString, rate NullableFloat32, hours NullableFloat32, total NullableFloat32) *IncomeBreakdown {
this := IncomeBreakdown{}
this.Type = type_
this.Rate = rate
this.Hours = hours
this.Total = total
return &this
}
// NewIncomeBreakdownWithDefaults instantiates a new IncomeBreakdown object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewIncomeBreakdownWithDefaults() *IncomeBreakdown {
this := IncomeBreakdown{}
return &this
}
// GetType returns the Type field value
// If the value is explicit nil, the zero value for string will be returned
func (o *IncomeBreakdown) GetType() string {
if o == nil || o.Type.Get() == nil {
var ret string
return ret
}
return *o.Type.Get()
}
// GetTypeOk returns a tuple with the Type field value
// and a boolean to check if the value has been set.
// NOTE: If the value is an explicit nil, `nil, true` will be returned
func (o *IncomeBreakdown) GetTypeOk() (*string, bool) {
if o == nil {
return nil, false
}
return o.Type.Get(), o.Type.IsSet()
}
// SetType sets field value
func (o *IncomeBreakdown) SetType(v string) {
o.Type.Set(&v)
}
// GetRate returns the Rate field value
// If the value is explicit nil, the zero value for float32 will be returned
func (o *IncomeBreakdown) GetRate() float32 {
if o == nil || o.Rate.Get() == nil {
var ret float32
return ret
}
return *o.Rate.Get()
}
// GetRateOk returns a tuple with the Rate field value
// and a boolean to check if the value has been set.
// NOTE: If the value is an explicit nil, `nil, true` will be returned
func (o *IncomeBreakdown) GetRateOk() (*float32, bool) {
if o == nil {
return nil, false
}
return o.Rate.Get(), o.Rate.IsSet()
}
// SetRate sets field value
func (o *IncomeBreakdown) SetRate(v float32) {
o.Rate.Set(&v)
}
// GetHours returns the Hours field value
// If the value is explicit nil, the zero value for float32 will be returned
func (o *IncomeBreakdown) GetHours() float32 {
if o == nil || o.Hours.Get() == nil {
var ret float32
return ret
}
return *o.Hours.Get()
}
// GetHoursOk returns a tuple with the Hours field value
// and a boolean to check if the value has been set.
// NOTE: If the value is an explicit nil, `nil, true` will be returned
func (o *IncomeBreakdown) GetHoursOk() (*float32, bool) {
if o == nil {
return nil, false
}
return o.Hours.Get(), o.Hours.IsSet()
}
// SetHours sets field value
func (o *IncomeBreakdown) SetHours(v float32) {
o.Hours.Set(&v)
}
// GetTotal returns the Total field value
// If the value is explicit nil, the zero value for float32 will be returned
func (o *IncomeBreakdown) GetTotal() float32 {
if o == nil || o.Total.Get() == nil {
var ret float32
return ret
}
return *o.Total.Get()
}
// GetTotalOk returns a tuple with the Total field value
// and a boolean to check if the value has been set.
// NOTE: If the value is an explicit nil, `nil, true` will be returned
func (o *IncomeBreakdown) GetTotalOk() (*float32, bool) {
if o == nil {
return nil, false
}
return o.Total.Get(), o.Total.IsSet()
}
// SetTotal sets field value
func (o *IncomeBreakdown) SetTotal(v float32) {
o.Total.Set(&v)
}
func (o IncomeBreakdown) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if true {
toSerialize["type"] = o.Type.Get()
}
if true {
toSerialize["rate"] = o.Rate.Get()
}
if true {
toSerialize["hours"] = o.Hours.Get()
}
if true {
toSerialize["total"] = o.Total.Get()
}
for key, value := range o.AdditionalProperties {
toSerialize[key] = value
}
return json.Marshal(toSerialize)
}
func (o *IncomeBreakdown) UnmarshalJSON(bytes []byte) (err error) {
varIncomeBreakdown := _IncomeBreakdown{}
if err = json.Unmarshal(bytes, &varIncomeBreakdown); err == nil {
*o = IncomeBreakdown(varIncomeBreakdown)
}
additionalProperties := make(map[string]interface{})
if err = json.Unmarshal(bytes, &additionalProperties); err == nil {
delete(additionalProperties, "type")
delete(additionalProperties, "rate")
delete(additionalProperties, "hours")
delete(additionalProperties, "total")
o.AdditionalProperties = additionalProperties
}
return err
}
type NullableIncomeBreakdown struct {
value *IncomeBreakdown
isSet bool
}
func (v NullableIncomeBreakdown) Get() *IncomeBreakdown {
return v.value
}
func (v *NullableIncomeBreakdown) Set(val *IncomeBreakdown) {
v.value = val
v.isSet = true
}
func (v NullableIncomeBreakdown) IsSet() bool {
return v.isSet
}
func (v *NullableIncomeBreakdown) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableIncomeBreakdown(val *IncomeBreakdown) *NullableIncomeBreakdown {
return &NullableIncomeBreakdown{value: val, isSet: true}
}
func (v NullableIncomeBreakdown) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableIncomeBreakdown) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | plaid/model_income_breakdown.go | 0.79053 | 0.538012 | model_income_breakdown.go | starcoder |
package money
import (
"fmt"
"math"
"strings"
)
const (
decimalDefault = "."
)
// Money represents an amount ot a specific currency
type Money struct {
amount float64
currency Currency
}
// New creates a new Money object
func New(f float64, c Currency) *Money {
m := &Money{currency: c}
m.amount = m.round(f)
return m
}
// Amount returns the numerical value of the money
func (m Money) Amount() float64 {
return m.amount
}
// CurrencyCode returns the code of currency
func (m Money) CurrencyCode() string {
return string(m.currency)
}
// Currency returns the string representation of currency
func (m Money) Currency() string {
d := currencies[m.currency]
value := m.Format(d.thousand, d.decimal)
return fmt.Sprintf("%s %s", d.symbol, value)
}
// Symbol returns the currency symbom
func (m Money) Symbol(c Currency) string {
return currencies[c].symbol
}
// Format returns the formatted value according rules
func (m Money) Format(thousand, decimal byte) string {
if thousand == 0 {
thousand = ','
}
if decimal == 0 {
decimal = '.'
}
str := fmt.Sprintf("%.2f", m.amount)
arr := strings.Split(str, decimalDefault)
n := arr[0]
var bytes []byte
l := len(n) - 1
for i, j := l, 1; i >= 0; i-- {
if j > 3 {
j = 1
bytes = append(bytes, thousand)
}
j++
bytes = append(bytes, n[i])
}
var res string
for _, e := range bytes {
res = string(e) + res
}
return fmt.Sprintf("%s%s%s", res, string(decimal), arr[1])
}
// Formmated returns the formatted value according currency
func (m Money) Formatted() string {
d := currencies[m.currency]
return m.Format(d.thousand, d.decimal)
}
// Absolute returns the absolute value of current amount
func (m *Money) Absolute() float64 {
return math.Abs(m.amount)
}
func (m Money) Compare(v float64) int {
v = m.round(v)
if m.amount < v {
return -1
}
if m.amount > v {
return 1
}
return 0
}
func (m Money) Equals(v float64) bool {
v = m.round(v)
return m.Compare(v) == 0
}
func (m Money) GreaterThan(v float64) bool {
v = m.round(v)
return m.Compare(v) > 0
}
func (m Money) GreaterThanOrEqual(v float64) bool {
v = m.round(v)
return m.Compare(v) >= 0
}
func (m Money) LessThan(v float64) bool {
v = m.round(v)
return m.Compare(v) < 0
}
func (m Money) LessThanOrEqual(v float64) bool {
v = m.round(v)
return m.Compare(v) <= 0
}
func (m *Money) Subtract(v float64) float64 {
v = m.round(v)
m.amount -= v
return m.amount
}
func (m *Money) Sum(v float64) float64 {
v = m.round(v)
m.amount += v
return m.amount
}
func (m Money) round(v float64) float64 {
precision := currencies[m.currency].precision
base := math.Pow10(precision)
return math.Round(v*base) / base
} | money.go | 0.835986 | 0.405508 | money.go | starcoder |
package metrics
type Exporter struct {
podMetric *podMetric
nodeMetric *nodeMetric
gpuMetric *gpuMetric
}
func NewExporter() *Exporter {
return &Exporter{
podMetric: newPodMetric(),
nodeMetric: newNodeMetric(),
gpuMetric: newGPUMetric(),
}
}
func (exporter *Exporter) ExportPodMetricModelTime(
podNS, podName, dataGranularity string, val float64) {
exporter.podMetric.setPodMetricModelTime(podNS,
podName, dataGranularity, val)
exporter.podMetric.addPodMetricModelTimeTotal(podNS,
podName, dataGranularity, val)
}
func (exporter *Exporter) SetContainerMetricMAPE(
podNS, podName, name, metricType, dataGranularity string, val float64) {
exporter.podMetric.setContainerMetricMAPE(podNS,
podName, name, metricType, dataGranularity, val)
}
func (exporter *Exporter) SetContainerMetricRMSE(
podNS, podName, name, metricType, dataGranularity string, val float64) {
exporter.podMetric.setContainerMetricRMSE(podNS,
podName, name, metricType, dataGranularity, val)
}
func (exporter *Exporter) AddPodMetricDrift(
podNS, podName, dataGranularity string, val float64) {
exporter.podMetric.addPodMetricDrift(podNS,
podName, dataGranularity, val)
}
func (exporter *Exporter) ExportNodeMetricModelTime(
name, dataGranularity string, val float64) {
exporter.nodeMetric.setNodeMetricModelTime(name,
dataGranularity, val)
exporter.nodeMetric.addNodeMetricModelTimeTotal(name,
dataGranularity, val)
}
func (exporter *Exporter) SetNodeMetricMAPE(
name, metricType, dataGranularity string, val float64) {
exporter.nodeMetric.setNodeMetricMAPE(name,
metricType, dataGranularity, val)
}
func (exporter *Exporter) SetNodeMetricRMSE(
name, metricType, dataGranularity string, val float64) {
exporter.nodeMetric.setNodeMetricRMSE(name,
metricType, dataGranularity, val)
}
func (exporter *Exporter) AddNodeMetricDrift(
name, dataGranularity string, val float64) {
exporter.nodeMetric.addNodeMetricDrift(name,
dataGranularity, val)
}
func (exporter *Exporter) ExportGPUMetricModelTime(host, minor_number,
dataGranularity string, val float64) {
exporter.gpuMetric.setGPUMetricModelTime(host, minor_number,
dataGranularity, val)
exporter.gpuMetric.addGPUMetricModelTimeTotal(host, minor_number,
dataGranularity, val)
}
func (exporter *Exporter) SetGPUMetricMAPE(host, minor_number,
metricType, dataGranularity string, val float64) {
exporter.gpuMetric.setGPUMetricMAPE(host, minor_number,
metricType, dataGranularity, val)
}
func (exporter *Exporter) SetGPUMetricRMSE(host, minor_number,
metricType, dataGranularity string, val float64) {
exporter.gpuMetric.setGPUMetricRMSE(host, minor_number,
metricType, dataGranularity, val)
}
func (exporter *Exporter) AddGPUMetricDrift(host, minor_number,
dataGranularity string, val float64) {
exporter.gpuMetric.addGPUMetricDrift(host, minor_number,
dataGranularity, val)
} | ai-dispatcher/pkg/metrics/exporter.go | 0.748995 | 0.407982 | exporter.go | starcoder |
package regressiontest
import (
"context"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"go.skia.org/infra/perf/go/clustering2"
"go.skia.org/infra/perf/go/dataframe"
"go.skia.org/infra/perf/go/regression"
"go.skia.org/infra/perf/go/types"
)
var (
// timestamps is a list of timestamps used for each commit in the tests
// below.
timestamps = []int64{
1580000000,
1580000000 + 100,
1580000000 + 200,
1580000000 + 300}
)
// getTestVars returns vars needed by all the subtests below.
func getTestVars() (context.Context, types.CommitNumber) {
ctx := context.Background()
c := types.CommitNumber(1)
return ctx, c
}
// SetLowAndTriage tests that the implementation of the regression.Store
// interface operates correctly on the happy path.
func SetLowAndTriage(t *testing.T, store regression.Store) {
ctx, c := getTestVars()
// Args to Set* that are then serialized to the datastore.
df := &dataframe.FrameResponse{
Msg: "Looks like a regression",
}
cl := &clustering2.ClusterSummary{
Num: 50,
}
// TODO(jcgregorio) Break up into finer grained tests and add more tests.
// Create a new regression.
isNew, err := store.SetLow(ctx, c, "1", df, cl)
assert.True(t, isNew)
require.NoError(t, err)
// Overwrite a regression, which is allowed, and that it changes the
// returned 'isNew' value.
isNew, err = store.SetLow(ctx, c, "1", df, cl)
assert.False(t, isNew)
require.NoError(t, err)
// Confirm new regression is present.
ranges, err := store.Range(ctx, 1, 3)
require.NoError(t, err)
require.Len(t, ranges, 1)
// Triage existing regression.
tr := regression.TriageStatus{
Status: regression.Positive,
Message: "bad",
}
err = store.TriageLow(ctx, c, "1", tr)
require.NoError(t, err)
// Confirm regression is triaged.
ranges, err = store.Range(ctx, 1, 3)
require.NoError(t, err)
assert.Len(t, ranges, 1)
key := types.BadCommitNumber
for key = range ranges {
break
}
assert.Equal(t, regression.Positive, ranges[key].ByAlertID["1"].LowStatus.Status)
ranges, err = store.Range(ctx, 1, 3)
require.NoError(t, err)
assert.Len(t, ranges, 1)
}
// Range_Exact tests that Range returns values when begin=end.
func Range_Exact(t *testing.T, store regression.Store) {
ctx, c := getTestVars()
// Args to Set* that are then serialized to the datastore.
df := &dataframe.FrameResponse{
Msg: "Looks like a regression",
}
cl := &clustering2.ClusterSummary{
Num: 50,
}
// Create a new regression.
isNew, err := store.SetLow(ctx, c, "1", df, cl)
assert.True(t, isNew)
require.NoError(t, err)
// Confirm new regression is present.
ranges, err := store.Range(ctx, 1, 1)
require.NoError(t, err)
require.Len(t, ranges, 1)
}
// TriageNonExistentRegression tests that the implementation of the
// regression.Store interface fails as expected when triaging an unknown
// regression.
func TriageNonExistentRegression(t *testing.T, store regression.Store) {
ctx, c := getTestVars()
tr := regression.TriageStatus{
Status: regression.Positive,
Message: "bad",
}
// Try triaging a regression that doesn't exist.
err := store.TriageHigh(ctx, c, "12", tr)
assert.Error(t, err)
}
// Write tests that the implementation of the regression.Store interface can
// bulk write Regressions.
func Write(t *testing.T, store regression.Store) {
ctx := context.Background()
reg := ®ression.AllRegressionsForCommit{
ByAlertID: map[string]*regression.Regression{
"1": regression.NewRegression(),
},
}
err := store.Write(ctx, map[types.CommitNumber]*regression.AllRegressionsForCommit{2: reg})
require.NoError(t, err)
ranges, err := store.Range(ctx, 1, 3)
assert.NoError(t, err)
assert.Len(t, ranges, 1)
assert.Equal(t, reg, ranges[2])
}
// SubTestFunction is a func we will call to test one aspect of an
// implementation of regression.Store.
type SubTestFunction func(t *testing.T, store regression.Store)
// SubTests are all the subtests we have for regression.Store.
var SubTests = map[string]SubTestFunction{
"SetLowAndTriage": SetLowAndTriage,
"Range_Exact": Range_Exact,
"TriageNonExistentRegression": TriageNonExistentRegression,
"TestWrite": Write,
} | perf/go/regression/regressiontest/regressiontest.go | 0.534127 | 0.62631 | regressiontest.go | starcoder |
package gomonochromebitmap
import (
"fmt"
"image"
"image/color"
"math"
)
type MonoBitmap struct {
Pix []uint32 //using byte vs uint16 vs uint32 vs uint64... 32bit shoud suit well for raspi1/2
W int
H int
}
//Initializes empty bitmap
//fill is default value
func NewMonoBitmap(w int, h int, fill bool) MonoBitmap {
result := MonoBitmap{W: w, H: h, Pix: make([]uint32, w*h/32+1)}
if fill {
for index, _ := range result.Pix {
result.Pix[index] = 0xFFFFFFFF
}
}
return result
}
//Initializes bitmap from image
//Color conversion: if any Red,Green or Blue value is over threshold then pixel is true
func NewMonoBitmapFromImage(img image.Image, area image.Rectangle, threshold byte, invert bool) MonoBitmap {
b := img.Bounds()
w := b.Max.X
h := b.Max.Y
result := NewMonoBitmap(w, h, false)
for x := 0; x <= w; x++ {
for y := 0; y < h; y++ {
vr, vg, vb, _ := img.At(x, y).RGBA()
v := byte((intMax(int(vr), intMax(int(vg), int(vb)))) >> 8)
if v > threshold {
result.SetPix(x, y, !invert)
} else {
result.SetPix(x, y, invert)
}
}
}
return result
}
func (p *MonoBitmap) Bounds() image.Rectangle {
return image.Rect(0, 0, p.W, p.H)
}
/*
Return if does not fill all
*/
func (p *MonoBitmap) RLEdecode(activeFirst bool, data []byte) error {
//TODO line drawing... LESS naive solution
activeNow := activeFirst
for y := 0; y < p.H; y++ {
for x := 0; x < p.W; x++ {
for data[0] == 0 { //remove zeros
data = data[1:]
if len(data) == 0 {
return fmt.Errorf("Runned out of RLE data")
}
//fmt.Printf("data len =%v\n", len(data))
activeNow = !activeNow
}
data[0]--
p.SetPix(x, y, activeNow)
}
}
return nil
}
func (p *MonoBitmap) RLEencode(activeFirst bool) []byte {
counter := byte(0)
activeNow := activeFirst
result := []byte{}
for y := 0; y < p.H; y++ {
for x := 0; x < p.W; x++ {
if activeNow == p.GetPix(x, y) {
if counter < 254 {
counter++ //Nothing changed increase
} else {
//overflow
result = append(result, 255) //add maximum..this is pixel by pixel
activeNow = !activeNow
counter = 0
}
} else {
activeNow = !activeNow
result = append(result, counter) //write previous value
counter = 1
}
}
}
result = append(result, counter)
return result
}
//Creates RGBA image from bitmap
func (p *MonoBitmap) GetImage(trueColor color.Color, falseColor color.Color) image.Image {
result := image.NewRGBA(image.Rect(0, 0, p.W, p.H))
for x := 0; x < p.W; x++ {
for y := 0; y < p.H; y++ {
if p.GetPix(x, y) {
result.Set(x, y, trueColor)
} else {
result.Set(x, y, falseColor)
}
}
}
return result
}
/*
Generates image that is rendered like it was LCD. Space in between segments is transparent
upper vs lower color allows to render two color LCD's (like cyan and yellow strip)
*/
func (p *MonoBitmap) GetDisplayImage(trueColorUpper color.Color, trueColorDowner color.Color, upperRows int, falseColor color.Color, pixelW int, pixelH int, gapW int, gapH int) image.Image {
totW := p.W*(pixelW+gapW) - gapW
totH := p.H*(pixelH+gapH) - gapH
result := image.NewRGBA(image.Rect(0, 0, totW, totH))
for x := 0; x < p.W; x++ {
xp := x * (pixelW + gapW)
for y := 0; y < p.H; y++ {
yp := y * (pixelW + gapW)
colo := falseColor
if p.GetPix(x, y) {
colo = trueColorDowner
if upperRows > y {
colo = trueColorUpper
}
}
for i := 0; i < pixelW; i++ {
for j := 0; j < pixelH; j++ {
result.Set(xp+i, yp+j, colo)
}
}
}
}
return result
}
//Get view (size w,h) for display. Starting from corner p0. Result is centered. If p0 goes outside, function clamps view
//This is meant only for producing scrollable output picture for display. Better scaling functions elsewhere
//pxStep=0, autoscale, so bitmap will fit
//pxStep=1 is 1:1
//pxStep=2 is 2:1 (50% scale)
//pxStep=3 is 3:1 (25% scale)
//pxStep is limited to point where whole bitmap is visible
//Returns: image, actual cornerpoint and zoom used. Useful if UI includes
func (p *MonoBitmap) GetView(w int, h int, p0 image.Point, pxStep int, edges bool) MonoBitmap {
result := NewMonoBitmap(w, h, false)
maxStep := math.Max(float64(p.W)/float64(w), float64(p.H)/float64(h)) //In decimal
corner := image.Point{X: intMax(p0.X, 0), Y: intMax(p0.Y, 0)} //Limit point inside
var step float64
step = math.Min(float64(pxStep), math.Ceil(maxStep)) //Limits zooming out too much
if pxStep == 0 { //Autoscale
step = maxStep
corner = image.Point{X: 0, Y: 0}
if maxStep <= 0.5 { //Scale bigger
//TODO: this is only reason why decimal step is now needed. Todo later integer step
} else {
step = math.Ceil(step)
}
}
//Limit corner
corner.X = intMin(corner.X, int(float64(p.W)-step*float64(w)))
corner.Y = intMin(corner.Y, int(float64(p.H)-step*float64(h)))
for x := 0; x < w; x++ {
for y := 0; y < h; y++ {
a := int(float64(x)*step) + corner.X
b := int(float64(y)*step) + corner.Y
if (a < 0) || (b < 0) || (p.W <= a) || (p.H <= b) {
result.SetPix(x, y, edges)
} else {
result.SetPix(x, y, p.GetPix(a, b))
}
}
}
return result
}
//Fills rectangle area from map. Used for clearing image
func (p *MonoBitmap) Fill(area image.Rectangle, fillValue bool) {
//Naive solution. TODO later faster solution
for x := area.Min.X; x <= area.Max.X; x++ {
for y := area.Min.Y; y <= area.Max.Y; y++ {
p.SetPix(x, y, fillValue)
}
}
}
//Inverts pixel values
func (p *MonoBitmap) Invert(area image.Rectangle) {
//Naive solution. TODO later faster solution
for x := area.Min.X; x <= area.Max.X; x++ {
for y := area.Min.Y; y <= area.Max.Y; y++ {
p.SetPix(x, y, !p.GetPix(x, y))
}
}
}
//Flip with axis in vertical
func (p *MonoBitmap) FlipV() {
var v bool
var i int
for x := 0; x < p.W/2; x++ {
for y := 0; y < p.H; y++ {
v = p.GetPix(x, y)
i = p.W - x - 1
p.SetPix(x, y, p.GetPix(i, y))
p.SetPix(i, y, v)
}
}
}
func (p *MonoBitmap) FlipH() {
var v bool
var i int
for x := 0; x < p.W; x++ {
for y := 0; y < p.H/2; y++ {
v = p.GetPix(x, y)
i = p.H - y - 1
p.SetPix(x, y, p.GetPix(x, i))
p.SetPix(x, i, v)
}
}
}
//Rotates in 90 decree steps
//+1=90 clockwise
//-1=90 anticlockwise
//+2=180 clockwise etc...
func (p *MonoBitmap) Rotate90(turn90 int) {
angle := turn90 % 4
result := NewMonoBitmap(p.W, p.H, false)
switch angle {
case 0:
return //NOP
case 1, -3:
result.W = p.H
result.H = p.W
for x := 0; x < p.W; x++ {
for y := 0; y < p.H; y++ {
result.SetPix(p.H-y-1, x, p.GetPix(x, y))
}
}
case 2, -2:
for x := 0; x < p.W; x++ {
for y := 0; y < p.H; y++ {
result.SetPix(p.W-x-1, p.H-y-1, p.GetPix(x, y))
}
}
case 3, -1:
result.W = p.H
result.H = p.W
for x := 0; x < p.W; x++ {
for y := 0; y < p.H; y++ {
result.SetPix(y, p.W-x-1, p.GetPix(x, y))
}
}
}
p.W = result.W
p.H = result.H
p.Pix = result.Pix
}
// Bresenham's line, copied from http://41j.com/blog/2012/09/bresenhams-line-drawing-algorithm-implemetations-in-go-and-c/
func (p *MonoBitmap) Line(p0 image.Point, p1 image.Point, value bool) {
var cx int32 = int32(p0.X)
var cy int32 = int32(p0.Y)
var dx int32 = int32(p1.X) - cx
var dy int32 = int32(p1.Y) - cy
if dx < 0 {
dx = 0 - dx
}
if dy < 0 {
dy = 0 - dy
}
var sx int32
var sy int32
if cx < int32(p1.X) {
sx = 1
} else {
sx = -1
}
if cy < int32(p1.Y) {
sy = 1
} else {
sy = -1
}
var err int32 = dx - dy
var n int32
for n = 0; n < 1000; n++ {
p.SetPix(int(cx), int(cy), value)
if (cx == int32(p1.X)) && (cy == int32(p1.Y)) {
return
}
var e2 int32 = 2 * err
if e2 > (0 - dy) {
err = err - dy
cx = cx + sx
}
if e2 < dx {
err = err + dx
cy = cy + sy
}
}
}
//Horizontal line for filling
func (p *MonoBitmap) Hline(x0 int, x1 int, y int, value bool) {
for i := x0; i <= x1; i++ {
p.SetPix(i, y, value)
}
}
func (p *MonoBitmap) Vline(x int, y0 int, y1 int, value bool) {
for i := y0; i <= y1; i++ {
p.SetPix(x, i, value)
}
}
// Modified from C++ source https://en.wikipedia.org/wiki/Midpoint_circle_algorithm
func (p *MonoBitmap) CircleFill(p0 image.Point, r int, value bool) {
x := r
y := 0
err := 0
x0 := p0.X
y0 := p0.Y
for x >= y {
//fmt.Printf("X0:%v Y0:%v x=%v y=%v\n", x0, y0, x, y)
p.Hline(x0-x, x0+x, y0+y, value)
p.Hline(x0-x, x0+x, y0-y, value)
p.Hline(x0-y, x0+y, y0+x, value)
p.Hline(x0-y, x0+y, y0-x, value)
y += 1
err += 1 + 2*y
if 2*(err-x)+1 > 0 {
x -= 1
err += 1 - 2*x
}
}
}
// Modified from C++ source https://en.wikipedia.org/wiki/Midpoint_circle_algorithm
func (p *MonoBitmap) Circle(p0 image.Point, r int, value bool) {
x := r
y := 0
err := 0
x0 := p0.X
y0 := p0.Y
for x >= y {
p.SetPix(x0+x, y0+y, value)
p.SetPix(x0+y, y0+x, value)
p.SetPix(x0-y, y0+x, value)
p.SetPix(x0-x, y0+y, value)
p.SetPix(x0-x, y0-y, value)
p.SetPix(x0-y, y0-x, value)
p.SetPix(x0+y, y0-x, value)
p.SetPix(x0+x, y0-y, value)
y += 1
err += 1 + 2*y
if 2*(err-x)+1 > 0 {
x -= 1
err += 1 - 2*x
}
}
}
//Gets pixel. Returns false if out of range
func (p *MonoBitmap) GetPix(x int, y int) bool {
index := (x + p.W*y) / 32
alabitit := uint32((x + p.W*y) % 32)
//alabitit:=byte(x)&7
bittimaski := uint32(1 << alabitit)
if index < len(p.Pix) {
return ((p.Pix[index] & bittimaski) > 0)
}
return false
}
//TODO BUG: does not work if not div by 8
func (p *MonoBitmap) SetPix(x int, y int, value bool) {
index := (x + p.W*y) / 32
//alabitit:=byte(x)&7
alabitit := uint32((x + p.W*y) % 32)
bittimaski := uint32(1 << alabitit)
if (0 <= x) && (0 <= y) && (x < p.W) && (y < p.H) {
if value {
p.Pix[index] |= bittimaski
} else {
p.Pix[index] &= (bittimaski ^ uint32(0xFFFFFFFF))
}
}
}
//Draws source bitmap on bitmap
//drawTrue, draw when point value is true
//drawFalse, draw when point value is true
func (p *MonoBitmap) DrawBitmap(source MonoBitmap, sourceArea image.Rectangle, targetCorner image.Point, drawTrue bool, drawFalse bool, invert bool) {
//TODO naive solution, make optimized later
dx := sourceArea.Dx()
dy := sourceArea.Dy()
targetEnd := image.Point{X: intMin(p.W, targetCorner.X+dx), Y: intMin(p.H, targetCorner.Y+dy)}
//fmt.Printf("Haluu piirtää bitmapin %#v ---> %v\n",targetCorner,targetEnd)
for x := targetCorner.X; x < targetEnd.X; x++ {
for y := targetCorner.Y; y < targetEnd.Y; y++ {
v := source.GetPix(x-targetCorner.X+sourceArea.Min.X, y-targetCorner.Y+sourceArea.Min.Y)
if (v) && (drawTrue) {
p.SetPix(x, y, !invert)
}
if (!v) && (drawFalse) {
p.SetPix(x, y, invert)
}
}
}
}
//Prints message on screen.Creates new lines on \n
//Returns rectangle where text was printed
func (p *MonoBitmap) Print(text string, font map[rune]MonoBitmap, lineSpacing int, gap int, area image.Rectangle, drawTrue bool, drawFalse bool, invert bool, wrap bool) image.Rectangle {
result := image.Rectangle{Min: area.Min, Max: area.Min}
x := area.Min.X
y := area.Min.Y
//dim:=target.Bounds().Max
for _, c := range text {
if c == '\n' {
x = area.Min.X
y += lineSpacing
if y > area.Max.Y {
break
}
} else {
f, ok := font[c]
if !ok {
f = font['?'] //Not found in font set
}
if wrap {
if x+f.W > area.Max.X {
x = area.Min.X
y += lineSpacing
if y > area.Max.Y {
break
}
}
}
if (!wrap) || (x+f.W <= area.Max.X) {
p.DrawBitmap(f, f.Bounds(), image.Point{X: x, Y: y}, drawTrue, drawFalse, invert)
result.Max.X = intMax(result.Max.X, x+f.W)
result.Max.Y = intMax(result.Max.Y, y+f.H)
x += f.W + gap
}
}
}
return result
}
//Private Utils
func intMax(a int, b int) int {
if a > b {
return a
}
return b
}
func intMin(a int, b int) int {
if a < b {
return a
}
return b
} | gomonochromebitmap.go | 0.501709 | 0.481271 | gomonochromebitmap.go | starcoder |
package main
/* Day 7 part A:
Determine a tree structure given an ascii representation:
pbga (66)
xhth (57)
ebii (61)
havc (66)
ktlj (57)
fwft (72) -> ktlj, cntj, xhth
qoyq (66)
padx (45) -> pbga, havc, qoyq
tknk (41) -> ugml, padx, fwft
jptl (61)
ugml (68) -> gyxo, ebii, jptl
gyxo (61)
cntj (57)
tknk is at the bottom with children ugml, padx, fwft. ugml has children gyxo, ebii, jptl. padx has children pbga, havc, qoyq. fwft has children ktlj, cntj, xhth. The outmost children have no children.
Weights of each node are in parenthesis following its declaration. At this time they appear to be unused.
With the given input programmatically determine what node is the base holding up everything else.
Part B:
For any node with children, each of that node's children forms a sub-tree. Each of those sub-trees are supposed to be the same weight, or the node itself isn't balanced. The weight of a tower is the sum of the weights of the nodes in that tower.
In the example above, this means that for ugml's disc to be balanced, gyxo, ebii, and jptl must all have the same weight, and they do: 61.
However, for tknk to be balanced, each of its child nodes and all of its grandchildren must each match. This means that the following sums must all be the same:
ugml + (gyxo + ebii + jptl) = 68 + (61 + 61 + 61) = 251
padx + (pbga + havc + qoyq) = 45 + (66 + 66 + 66) = 243
fwft + (ktlj + cntj + xhth) = 72 + (57 + 57 + 57) = 243
ugml is unbalancing this which means that ugml itself has the incorrect weight. To correct the weight, subtract the difference (8) from ugml's weight.
Exactly one node has the incorrect weight: Identify the unbalanced node and determine what its weight should be to restore balance to the tower?
*/
import (
"bufio"
"flag"
"fmt"
"os"
"strings"
)
var inputFile = flag.String("inputFile", "./inputs/day07-example.txt", "Input file")
var partB = flag.Bool("partB", false, "Perform part B solution?")
type Node struct {
Name string
Children []*Node
Weight int
TreeWeight int
Parent *Node
}
func NewNode(name string, weight int) *Node {
return &Node{
Name: name,
Weight: weight,
TreeWeight: weight,
}
}
/* Return both the parent and child because both need updating at the caller */
func (n *Node) AddChild(c *Node) (*Node, *Node) {
n.Children = append(n.Children, c)
c.Parent = n
return n, c
}
// Compute the weights of all trees under n
func (n *Node) ComputeTreeWeight() (*Node, int) {
var sum, subtreeSum int
sum = n.Weight // Start with the sum as the current node's weight
for _, node := range n.Children {
node, subtreeSum = node.ComputeTreeWeight()
sum += subtreeSum
}
n.TreeWeight = sum
return n, n.TreeWeight
}
// Return a node's sibling
func (n *Node) GetSibling() *Node {
var ret *Node
for _, childNode := range n.Parent.Children {
if childNode != n {
ret = childNode
}
}
return ret
}
// Return true if the node is balanced
// Return false if it is not along with the offending node causing unbalance and its offset relative to the correct weights
func (n *Node) IsTreeBalanced() (bool, *Node, int) {
sum := 0
for _, childNode := range n.Children {
sum += childNode.TreeWeight
}
// Histogram will be weight => [Nodes with this weight]
weightHistogram := make(map[int][]*Node)
for _, child := range n.Children {
weightHistogram[child.TreeWeight] = append(weightHistogram[child.TreeWeight], child)
}
for _, weight := range weightHistogram {
if len(weight) == 1 {
return false, weight[0], weight[0].TreeWeight - weight[0].GetSibling().TreeWeight
}
}
return true, nil, 0
}
func main() {
flag.Parse()
input, err := os.Open(*inputFile)
if err != nil {
fmt.Printf("Couldn't read file: %s\n", err)
os.Exit(1)
}
defer input.Close()
// Create a map of name -> Node for quicker access, especially when building
tower := make(map[string]*Node)
// Map of parents => children. Keys are names of nodes whose children are the values
children := make(map[string][]string)
lineReader := bufio.NewScanner(input)
for lineReader.Scan() {
// loop over tokens separated by spaces
line := lineReader.Text()
var nodeName string
var nodeWeight int
for n, token := range strings.Split(line, " ") {
switch {
case n == 0:
// name
nodeName = token
case n == 1:
// weight
fmt.Sscanf(token, "(%d)", &nodeWeight)
case n == 2:
// ->
continue
case n > 2:
// list of children whose name may end in ,
childName := strings.TrimSuffix(token, ",")
children[nodeName] = append(children[nodeName], childName)
} // end switch, which means we have all the fields we need to make a new node
tower[nodeName] = NewNode(nodeName, nodeWeight)
} //we've read every line
} // EOF
// Go through and turn references to names into pointers
for parentName, childNames := range children {
// name => list of name's children
for _, child := range childNames {
tower[parentName], tower[child] = tower[parentName].AddChild(tower[child])
}
}
// at this point the tower has parentage, so, the only Node without a parent is the base.
var rootNode *Node
for _, node := range tower {
if node.Parent == nil {
rootNode = node
}
}
rootNode, _ = rootNode.ComputeTreeWeight()
if *partB {
balanced, offender, offset := rootNode.IsTreeBalanced()
var lastOffender *Node
var lastOffset int
workingNode := offender
for !balanced {
lastOffset = offset
lastOffender = offender
balanced, offender, offset = workingNode.IsTreeBalanced()
if !balanced {
workingNode = offender
}
}
if lastOffender != nil {
fmt.Printf("%s is not balanced! Adjust its weight by %d (should be %d)\n", lastOffender.Name, -1*lastOffset, lastOffender.Weight+(-1*lastOffset))
}
} else {
fmt.Printf("%s is the base\n", rootNode.Name)
}
} | 2017/day07.go | 0.76145 | 0.461199 | day07.go | starcoder |
package pair
import (
"image"
_ "image/gif"
_ "image/jpeg"
_ "image/png"
"os"
"time"
)
// Pair is a construct of 2 images to be compared, along with Score and time taken for comparison
// Score is a value between 0 to 1 indicating similarity of images.
// 0 indicates the images are identical, 1 indicates images are of different sizes
type Pair struct {
Image1 image.Image
Image2 image.Image
Score float64
Time float64
}
func NewImagePair(image1Path, image2Path string) (*Pair, error) {
p := &Pair{}
image1, err := readImage(image1Path)
if err != nil {
return nil, err
}
p.Image1 = image1
image2, err := readImage(image2Path)
if err != nil {
return nil, err
}
p.Image2 = image2
return p, nil
}
// Compare uses simple pixel based comparison to determine similarity between images.
// If the images are of different sizes, they are considered different with a Score of 1.
// Red, Green and Blue values of each pixel is compared to calculate the difference in pixel.
// Ratio of the sum of all pixel differences and Total no of pixels is user to determine the Score
func (p *Pair) Compare() {
defer p.elapsed()() // deferred call to get execution time of Compare func
if p.Image1.Bounds() != p.Image2.Bounds() {
p.Score = 1
return
}
bounds := p.Image2.Bounds()
var sum int64
for y := bounds.Min.Y; y < bounds.Max.Y; y++ {
for x := bounds.Min.X; x < bounds.Max.X; x++ {
r1, g1, b1, _ := p.Image1.At(x, y).RGBA()
r2, g2, b2, _ := p.Image2.At(x, y).RGBA()
sum += diff(r1, r2)
sum += diff(g1, g2)
sum += diff(b1, b2)
}
}
nPixels := (bounds.Max.X - bounds.Min.X) * (bounds.Max.Y - bounds.Min.Y)
p.Score = float64(sum) / (float64(nPixels) * 0xffff * 3)
if p.Score < 0.01 {
p.Score = 0
}
}
// Elapsed helps calculate the time taken for each comparison
// A deferred call at the start of Compare() helps calculate the time taken
func (p *Pair) elapsed() func() {
start := time.Now()
return func() {
p.Time = time.Since(start).Seconds()
}
}
func diff(a, b uint32) int64 {
if a > b {
return int64(a - b)
}
return int64(b - a)
}
func readImage(file string) (image.Image, error) {
f, err := os.Open(file)
if err != nil {
return nil, err
}
defer f.Close()
img, _, err := image.Decode(f)
if err != nil {
return nil, err
}
return img, nil
} | pkg/pair/pair.go | 0.778102 | 0.560493 | pair.go | starcoder |
package iso20022
// Details of the closing of the securities financing transaction.
type SecuritiesFinancingTransactionDetails34 struct {
// Unambiguous identification of the underlying securities financing trade as assigned by the instructing party. The identification is common to all collateral pieces (one or many).
SecuritiesFinancingTradeIdentification *RestrictedFINXMax16Text `xml:"SctiesFincgTradId,omitempty"`
// Unambiguous identification of the second leg of the transaction as known by the account owner (or the instructing party acting on its behalf).
ClosingLegIdentification *RestrictedFINXMax16Text `xml:"ClsgLegId,omitempty"`
// Closing date/time or maturity date/time of the transaction.
TerminationDate *TerminationDate5Choice `xml:"TermntnDt,omitempty"`
// Specifies whether the rate is fixed or variable.
RateType *RateType67Choice `xml:"RateTp,omitempty"`
// Legal framework of the transaction.
LegalFramework *LegalFramework4Choice `xml:"LglFrmwk,omitempty"`
// Specifies whether the maturity date of the securities financing transaction may be modified.
MaturityDateModification *YesNoIndicator `xml:"MtrtyDtMod,omitempty"`
// Specifies whether the interest is to be paid to the collateral taker. If set to no, the interest is paid to the collateral giver.
InterestPayment *YesNoIndicator `xml:"IntrstPmt,omitempty"`
// Index or support rate used together with the spread to calculate the
// repurchase rate.
VariableRateSupport *RateName2 `xml:"VarblRateSpprt,omitempty"`
// Rate to be used to recalculate the repurchase amount.
RepurchaseRate *Rate2 `xml:"RpRate,omitempty"`
// Minimum number of days' notice a counterparty needs for terminating the transaction.
TransactionCallDelay *Exact3NumericText `xml:"TxCallDely,omitempty"`
// Interest amount that has accrued in between coupon payment periods.
AccruedInterestAmount *AmountAndDirection59 `xml:"AcrdIntrstAmt,omitempty"`
// Total amount of money to be settled to terminate the transaction.
TerminationTransactionAmount *AmountAndDirection59 `xml:"TermntnTxAmt,omitempty"`
// Provides additional information about the second leg in narrative form.
SecondLegNarrative *RestrictedFINXMax140Text `xml:"ScndLegNrrtv,omitempty"`
}
func (s *SecuritiesFinancingTransactionDetails34) SetSecuritiesFinancingTradeIdentification(value string) {
s.SecuritiesFinancingTradeIdentification = (*RestrictedFINXMax16Text)(&value)
}
func (s *SecuritiesFinancingTransactionDetails34) SetClosingLegIdentification(value string) {
s.ClosingLegIdentification = (*RestrictedFINXMax16Text)(&value)
}
func (s *SecuritiesFinancingTransactionDetails34) AddTerminationDate() *TerminationDate5Choice {
s.TerminationDate = new(TerminationDate5Choice)
return s.TerminationDate
}
func (s *SecuritiesFinancingTransactionDetails34) AddRateType() *RateType67Choice {
s.RateType = new(RateType67Choice)
return s.RateType
}
func (s *SecuritiesFinancingTransactionDetails34) AddLegalFramework() *LegalFramework4Choice {
s.LegalFramework = new(LegalFramework4Choice)
return s.LegalFramework
}
func (s *SecuritiesFinancingTransactionDetails34) SetMaturityDateModification(value string) {
s.MaturityDateModification = (*YesNoIndicator)(&value)
}
func (s *SecuritiesFinancingTransactionDetails34) SetInterestPayment(value string) {
s.InterestPayment = (*YesNoIndicator)(&value)
}
func (s *SecuritiesFinancingTransactionDetails34) AddVariableRateSupport() *RateName2 {
s.VariableRateSupport = new(RateName2)
return s.VariableRateSupport
}
func (s *SecuritiesFinancingTransactionDetails34) AddRepurchaseRate() *Rate2 {
s.RepurchaseRate = new(Rate2)
return s.RepurchaseRate
}
func (s *SecuritiesFinancingTransactionDetails34) SetTransactionCallDelay(value string) {
s.TransactionCallDelay = (*Exact3NumericText)(&value)
}
func (s *SecuritiesFinancingTransactionDetails34) AddAccruedInterestAmount() *AmountAndDirection59 {
s.AccruedInterestAmount = new(AmountAndDirection59)
return s.AccruedInterestAmount
}
func (s *SecuritiesFinancingTransactionDetails34) AddTerminationTransactionAmount() *AmountAndDirection59 {
s.TerminationTransactionAmount = new(AmountAndDirection59)
return s.TerminationTransactionAmount
}
func (s *SecuritiesFinancingTransactionDetails34) SetSecondLegNarrative(value string) {
s.SecondLegNarrative = (*RestrictedFINXMax140Text)(&value)
} | SecuritiesFinancingTransactionDetails34.go | 0.823399 | 0.407805 | SecuritiesFinancingTransactionDetails34.go | starcoder |
package ocr
import (
"image"
"image/color"
"math"
"sort"
"github.com/LKKlein/gocv"
"github.com/PaddlePaddle/PaddleOCR/thirdparty/paddleocr-go/paddle"
clipper "github.com/ctessum/go.clipper"
)
type xFloatSortBy [][]float32
func (a xFloatSortBy) Len() int { return len(a) }
func (a xFloatSortBy) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
func (a xFloatSortBy) Less(i, j int) bool { return a[i][0] < a[j][0] }
type xIntSortBy [][]int
func (a xIntSortBy) Len() int { return len(a) }
func (a xIntSortBy) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
func (a xIntSortBy) Less(i, j int) bool { return a[i][0] < a[j][0] }
type DetPostProcess interface {
Run(output *paddle.ZeroCopyTensor, oriH, oriW int, ratioH, ratioW float64) [][][]int
}
type DBPostProcess struct {
thresh float64
boxThresh float64
maxCandidates int
unClipRatio float64
minSize int
}
func NewDBPostProcess(thresh, boxThresh, unClipRatio float64) *DBPostProcess {
return &DBPostProcess{
thresh: thresh,
boxThresh: boxThresh,
unClipRatio: unClipRatio,
maxCandidates: 1000,
minSize: 3,
}
}
func (d *DBPostProcess) getMinBoxes(rect gocv.RotatedRect) [][]float32 {
points := gocv.NewMat()
gocv.BoxPoints(rect, &points)
defer points.Close()
array := d.mat2slice(points)
sort.Sort(xFloatSortBy(array))
point1, point2, point3, point4 := array[0], array[1], array[2], array[3]
if array[3][1] <= array[2][1] {
point2, point3 = array[3], array[2]
} else {
point2, point3 = array[2], array[3]
}
if array[1][1] <= array[0][1] {
point1, point4 = array[1], array[0]
} else {
point1, point4 = array[0], array[1]
}
array = [][]float32{point1, point2, point3, point4}
return array
}
func (d *DBPostProcess) mat2slice(mat gocv.Mat) [][]float32 {
array := make([][]float32, mat.Rows())
for i := 0; i < mat.Rows(); i++ {
tmp := make([]float32, mat.Cols())
for j := 0; j < mat.Cols(); j++ {
tmp[j] = mat.GetFloatAt(i, j)
}
array[i] = tmp
}
return array
}
func (d *DBPostProcess) boxScoreFast(array [][]float32, pred gocv.Mat) float64 {
height, width := pred.Rows(), pred.Cols()
boxX := []float32{array[0][0], array[1][0], array[2][0], array[3][0]}
boxY := []float32{array[0][1], array[1][1], array[2][1], array[3][1]}
xmin := clip(int(math.Floor(float64(minf(boxX)))), 0, width-1)
xmax := clip(int(math.Ceil(float64(maxf(boxX)))), 0, width-1)
ymin := clip(int(math.Floor(float64(minf(boxY)))), 0, height-1)
ymax := clip(int(math.Ceil(float64(maxf(boxY)))), 0, height-1)
mask := gocv.NewMatWithSize(ymax-ymin+1, xmax-xmin+1, gocv.MatTypeCV8UC1)
defer mask.Close()
ppt := make([][]image.Point, 1)
ppt[0] = make([]image.Point, 4)
ppt[0][0] = image.Point{int(array[0][0]) - xmin, int(array[0][1]) - ymin}
ppt[0][1] = image.Point{int(array[1][0]) - xmin, int(array[1][1]) - ymin}
ppt[0][2] = image.Point{int(array[2][0]) - xmin, int(array[2][1]) - ymin}
ppt[0][3] = image.Point{int(array[3][0]) - xmin, int(array[3][1]) - ymin}
gocv.FillPoly(&mask, ppt, color.RGBA{0, 0, 1, 0})
croppedImg := pred.Region(image.Rect(xmin, ymin, xmax+1, ymax+1))
s := croppedImg.MeanWithMask(mask)
return s.Val1
}
func (d *DBPostProcess) unClip(box [][]float32) gocv.RotatedRect {
var area, dist float64
for i := 0; i < 4; i++ {
area += float64(box[i][0]*box[(i+1)%4][1] - box[i][1]*box[(i+1)%4][0])
dist += math.Sqrt(float64(
(box[i][0]-box[(i+1)%4][0])*(box[i][0]-box[(i+1)%4][0]) +
(box[i][1]-box[(i+1)%4][1])*(box[i][1]-box[(i+1)%4][1]),
))
}
area = math.Abs(area / 2.0)
distance := area * d.unClipRatio / dist
offset := clipper.NewClipperOffset()
path := make([]*clipper.IntPoint, 4)
path[0] = &clipper.IntPoint{X: clipper.CInt(box[0][0]), Y: clipper.CInt(box[0][1])}
path[1] = &clipper.IntPoint{X: clipper.CInt(box[1][0]), Y: clipper.CInt(box[1][1])}
path[2] = &clipper.IntPoint{X: clipper.CInt(box[2][0]), Y: clipper.CInt(box[2][1])}
path[3] = &clipper.IntPoint{X: clipper.CInt(box[3][0]), Y: clipper.CInt(box[3][1])}
offset.AddPath(clipper.Path(path), clipper.JtRound, clipper.EtClosedPolygon)
soln := offset.Execute(distance)
points := make([]image.Point, 0, 4)
for i := 0; i < len(soln); i++ {
for j := 0; j < len(soln[i]); j++ {
points = append(points, image.Point{int(soln[i][j].X), int(soln[i][j].Y)})
}
}
var res gocv.RotatedRect
if len(points) <= 0 {
points = make([]image.Point, 4)
points[0] = image.Pt(0, 0)
points[1] = image.Pt(1, 0)
points[2] = image.Pt(1, 1)
points[3] = image.Pt(0, 1)
res = gocv.RotatedRect{
Contour: points,
BoundingRect: image.Rect(0, 0, 1, 1),
Center: gocv.Point2f{X: 0.5, Y: 0.5},
Width: 1,
Height: 1,
Angle: 0,
}
} else {
res = gocv.MinAreaRect(points)
}
return res
}
func (d *DBPostProcess) boxesFromBitmap(pred gocv.Mat, mask gocv.Mat, ratioH float64, ratioW float64) [][][]int {
height, width := mask.Rows(), mask.Cols()
mask.MultiplyUChar(255)
contours := gocv.FindContours(mask, gocv.RetrievalList, gocv.ChainApproxSimple)
numContours := len(contours)
if numContours > d.maxCandidates {
numContours = d.maxCandidates
}
boxes := make([][][]int, 0, numContours)
for i := 0; i < numContours; i++ {
contour := contours[i]
boundingbox := gocv.MinAreaRect(contour)
if boundingbox.Width < float32(d.minSize) || boundingbox.Height < float32(d.minSize) {
continue
}
points := d.getMinBoxes(boundingbox)
score := d.boxScoreFast(points, pred)
if score < d.boxThresh {
continue
}
box := d.unClip(points)
if box.Width < float32(d.minSize+2) || box.Height < float32(d.minSize+2) {
continue
}
cliparray := d.getMinBoxes(box)
dstHeight, dstWidth := pred.Rows(), pred.Cols()
intcliparray := make([][]int, 4)
for i := 0; i < 4; i++ {
p := []int{
int(float64(clip(int(math.Round(
float64(cliparray[i][0]/float32(width)*float32(dstWidth)))), 0, dstWidth)) / ratioW),
int(float64(clip(int(math.Round(
float64(cliparray[i][1]/float32(height)*float32(dstHeight)))), 0, dstHeight)) / ratioH),
}
intcliparray[i] = p
}
boxes = append(boxes, intcliparray)
}
return boxes
}
func (d *DBPostProcess) orderPointsClockwise(box [][]int) [][]int {
sort.Sort(xIntSortBy(box))
leftmost := [][]int{box[0], box[1]}
rightmost := [][]int{box[2], box[3]}
if leftmost[0][1] > leftmost[1][1] {
leftmost[0], leftmost[1] = leftmost[1], leftmost[0]
}
if rightmost[0][1] > rightmost[1][1] {
rightmost[0], rightmost[1] = rightmost[1], rightmost[0]
}
return [][]int{leftmost[0], rightmost[0], rightmost[1], leftmost[1]}
}
func (d *DBPostProcess) filterTagDetRes(boxes [][][]int, oriH, oriW int) [][][]int {
points := make([][][]int, 0, len(boxes))
for i := 0; i < len(boxes); i++ {
boxes[i] = d.orderPointsClockwise(boxes[i])
for j := 0; j < len(boxes[i]); j++ {
boxes[i][j][0] = clip(boxes[i][j][0], 0, oriW-1)
boxes[i][j][1] = clip(boxes[i][j][1], 0, oriH-1)
}
}
for i := 0; i < len(boxes); i++ {
rectW := int(math.Sqrt(math.Pow(float64(boxes[i][0][0]-boxes[i][1][0]), 2.0) +
math.Pow(float64(boxes[i][0][1]-boxes[i][1][1]), 2.0)))
rectH := int(math.Sqrt(math.Pow(float64(boxes[i][0][0]-boxes[i][3][0]), 2.0) +
math.Pow(float64(boxes[i][0][1]-boxes[i][3][1]), 2.0)))
if rectW <= 4 || rectH <= 4 {
continue
}
points = append(points, boxes[i])
}
return points
}
func (d *DBPostProcess) Run(output *paddle.ZeroCopyTensor, oriH, oriW int, ratioH, ratioW float64) [][][]int {
v := output.Value().([][][][]float32)
shape := output.Shape()
height, width := int(shape[2]), int(shape[3])
pred := gocv.NewMatWithSize(height, width, gocv.MatTypeCV32F)
bitmap := gocv.NewMatWithSize(height, width, gocv.MatTypeCV8UC1)
thresh := float32(d.thresh)
for i := 0; i < height; i++ {
for j := 0; j < width; j++ {
pred.SetFloatAt(i, j, v[0][0][i][j])
if v[0][0][i][j] > thresh {
bitmap.SetUCharAt(i, j, 1)
} else {
bitmap.SetUCharAt(i, j, 0)
}
}
}
mask := gocv.NewMat()
kernel := gocv.GetStructuringElement(gocv.MorphRect, image.Point{2, 2})
gocv.Dilate(bitmap, &mask, kernel)
boxes := d.boxesFromBitmap(pred, mask, ratioH, ratioW)
dtboxes := d.filterTagDetRes(boxes, oriH, oriW)
return dtboxes
} | thirdparty/paddleocr-go/ocr/postprocess.go | 0.536799 | 0.515864 | postprocess.go | starcoder |
package logic
// Node represents a game tree node.
type Node struct {
// Value contains the value of this node.
Value [3][3]int
// Weight represents the weight of a move.
Weight int64
// Children is a slice containing children of this node.
Children []*Node
}
// Constants defined for assigning weights to positions.
const (
owin int64 = -1000
nowin int64 = 0
xwin int64 = 1000
)
// previous represents the most recently inserted value
var previous int = O
// Tree generates a new game tree.
func Tree() *Node {
var root *Node = &Node{
Value: [3][3]int{},
Children: []*Node{},
Weight: nowin,
}
generateChildren(root)
return root
}
// generateChildren recursively generates children for a node until the tree is complete.
func generateChildren(node *Node) {
var child, player int
if previous == X {
player = O
} else {
player = X
}
for i := 0; i < 3; i++ {
for k := 0; k < 3; k++ {
var matrix [3][3]int = node.Value
if matrix[i][k] == EMPTY {
matrix[i][k] = player
previous = player
}
if matrix != node.Value {
node.Children = append(node.Children, &Node{
Value: matrix,
Children: []*Node{},
})
if !Winner(matrix).Exists {
generateChildren(node.Children[child])
node.Children[child].Weight = nowin
} else {
if Winner(matrix).Player == X {
node.Children[child].Weight = xwin
} else if Winner(matrix).Player == O {
node.Children[child].Weight = owin
}
}
child++
}
}
}
}
// Winner is a helper function for checking if a child node has a winner.
func Winner(matrix [3][3]int) *Win {
for row := 0; row < 3; row++ {
if matrix[row][0] == X && matrix[row][1] == X && matrix[row][2] == X {
return &Win{Exists: true, Player: X}
} else if matrix[row][0] == O && matrix[row][1] == O && matrix[row][2] == O {
return &Win{Exists: true, Player: O}
}
}
for col := 0; col < 3; col++ {
if matrix[0][col] == X && matrix[1][col] == X && matrix[2][col] == X {
return &Win{Exists: true, Player: X}
} else if matrix[0][col] == O && matrix[1][col] == O && matrix[2][col] == O {
return &Win{Exists: true, Player: O}
}
}
var xtaken, otaken = 0, 0
for i := 0; i < 3; i++ {
if matrix[i][i] == X {
xtaken++
} else if matrix[i][i] == O {
otaken++
}
}
if xtaken == 3 {
return &Win{Exists: true, Player: X}
} else if otaken == 3 {
return &Win{Exists: true, Player: O}
}
if matrix[0][2] == X && matrix[1][1] == X && matrix[2][0] == X {
return &Win{Exists: true, Player: X}
} else if matrix[0][2] == O && matrix[1][1] == O && matrix[2][0] == O {
return &Win{Exists: true, Player: O}
}
return &Win{Exists: false}
} | src/logic/tree.go | 0.746416 | 0.501282 | tree.go | starcoder |
package event
func isPowerOfTwo(v uint64) bool {
if v == 0 {
return false
}
return (v & (v - 1)) == 0
}
// BaseEventDataRing is a ring of EventData
type BaseEventDataRing struct {
datas []BaseEventData
writePos uint64
readPos uint64
}
func BaseEventDataRingCreate(cap int) (self BaseEventDataRing) {
if !isPowerOfTwo(uint64(cap)) {
panic("cap is not power of two")
}
self.datas = make([]BaseEventData, cap)
return
}
// Offer puts data in the ring at current writePos if self.Free() > 0
// and increses the writePos.
func (self *BaseEventDataRing) Offer(data BaseEventData) (isIn bool) {
if self.Free() == 0 {
return false
}
self.datas[self.writePos%uint64(len(self.datas))] = data
self.writePos++
return true
}
// Pop returns the element at the current readPos if self.Empty() == false
// and increases the readPos.
func (self *BaseEventDataRing) Pop(ed *BaseEventData) (hasEvent bool) {
if self.Empty() {
return
}
rPos := self.readPos
self.readPos++
*ed = self.datas[rPos%uint64(len(self.datas))]
return true
}
// Get returns the element at the current readPos if self.Empty() == false
func (self *BaseEventDataRing) Get(ed *BaseEventData) (hasEvent bool) {
if self.Empty() {
return
}
*ed = self.datas[self.readPos%uint64(len(self.datas))]
return true
}
func (self *BaseEventDataRing) At() *BaseEventData {
if self.Empty() {
return nil
}
return &self.datas[self.readPos%uint64(len(self.datas))]
}
// IncReadPos pops cnt events. (adds cnt to self.readPos) panics if self.Cnt() < cnt
func (self *BaseEventDataRing) IncReadPos(cnt uint64) {
if self.Cnt() < cnt {
panic("self.Cnt() < cnt")
}
self.readPos += cnt
}
// Cnt returns the cnt of the elements in the ring.
func (self *BaseEventDataRing) Cnt() uint64 {
return self.writePos - self.readPos
}
// Empty returns whether the ring is empty.
func (self *BaseEventDataRing) Empty() bool {
return self.writePos == self.readPos
}
// Free returns the freespace in the ring.
func (self *BaseEventDataRing) Free() uint64 {
return uint64(len(self.datas)) - self.Cnt()
}
// Reset sets writePos and readPos to zero and
// sets all element of its datas to nil.
func (self *BaseEventDataRing) Reset() {
self.writePos = 0
self.readPos = 0
for i := range self.datas {
self.datas[i] = BaseEventData{}
}
} | event/baseeventdataring.go | 0.736401 | 0.45181 | baseeventdataring.go | starcoder |
package core
import (
"fmt"
"strconv"
"strings"
colorful "github.com/lucasb-eyer/go-colorful"
tiled "github.com/zaklaus/go-tiled"
rl "github.com/zaklaus/raylib-go/raylib"
"github.com/zaklaus/raylib-go/raymath"
"github.com/zaklaus/resolv/resolv"
"github.com/zaklaus/rurik/src/system"
)
const (
// FrustumSafeMargin safe margin to be considered safe to render off-screen
FrustumSafeMargin = 32.0
)
// Bits represent bitflags
type Bits uint64
// BitsSet sets a bit
func BitsSet(b, flag Bits) Bits { return b | flag }
// BitsClear clears a bit
func BitsClear(b, flag Bits) Bits { return b &^ flag }
// BitsToggle toggles a bit on/off
func BitsToggle(b, flag Bits) Bits { return b ^ flag }
// BitsHas checks if a bit is set on
func BitsHas(b, flag Bits) bool { return b&flag != 0 }
// CompileEventArgs returns cooked event arguments
func CompileEventArgs(args string) []string {
evntArglist := args
evntArgs := []string{evntArglist}
if strings.Contains(evntArglist, ";") {
evntArgs = strings.Split(evntArglist, ";")
}
return evntArgs
}
// RayRectangleInt32ToResolv conv
func rayRectangleInt32ToResolv(rec *resolv.Rectangle, i rl.RectangleInt32) {
*rec = resolv.Rectangle{
BasicShape: resolv.BasicShape{
X: i.X,
Y: i.Y,
},
W: i.Width,
H: i.Height,
}
}
// DrawTextCentered draws a text that is centered
func DrawTextCentered(text string, posX, posY, fontSize int32, color rl.Color) {
if fontSize < 10 {
fontSize = 10
}
rl.DrawText(text, posX-rl.MeasureText(text, fontSize)/2, posY, fontSize, color)
}
// Vector2Lerp lerps vec2
func Vector2Lerp(v1, v2 rl.Vector2, amount float32) (result rl.Vector2) {
result.X = v1.X + amount*(v2.X-v1.X)
result.Y = v1.Y + amount*(v2.Y-v1.Y)
return result
}
// ScalarLerp lerps a scalar value
func ScalarLerp(v1, v2 float32, amount float32) (result float32) {
result = v1 + amount*(v2-v1)
return result
}
// StringToVec2 conv
func StringToVec2(inp string) rl.Vector2 {
comps := strings.Split(inp, " ")
x, _ := strconv.ParseFloat(comps[0], 32)
y, _ := strconv.ParseFloat(comps[1], 32)
return rl.NewVector2(float32(x), float32(y))
}
// LerpColor lerps Color
func LerpColor(a, b rl.Vector3, t float64) rl.Vector3 {
return raymath.Vector3Lerp(a, b, float32(t))
}
// GetColorFromHex conv
func GetColorFromHex(hex string) (rl.Vector3, error) {
if hex == "" {
return rl.Vector3{}, fmt.Errorf("hex not specified")
}
c, err := colorful.Hex("#" + hex[3:])
if err != nil {
return rl.Vector3{}, err
}
d := rl.NewVector3(
float32(c.R),
float32(c.G),
float32(c.B),
)
return d, nil
}
// Vec3ToColor conv
func Vec3ToColor(a rl.Vector3) rl.Color {
return rl.NewColor(
uint8(a.X*255),
uint8(a.Y*255),
uint8(a.Z*255),
255,
)
}
// ColorToVec3 conv
func ColorToVec3(a rl.Color) rl.Vector3 {
return rl.NewVector3(
float32(a.R)/255.0,
float32(a.G)/255.0,
float32(a.B)/255.0,
)
}
// MixColor mixes two colors together
func MixColor(a, b rl.Color) rl.Color {
return Vec3ToColor(raymath.Vector3Lerp(
ColorToVec3(a),
ColorToVec3(b),
0.5,
))
}
// IsMouseInRectangle checks whether a mouse is inside of a rectangle
func IsMouseInRectangle(x, y, x2, y2 int32) bool {
x2 = x + x2
y2 = y + y2
m := system.GetMousePosition()
if m[0] > x && m[0] < x2 &&
m[1] > y && m[1] < y2 {
return true
}
return false
}
// IsMouseInRectangleRec checks whether a mouse is inside of a rectangle
func IsMouseInRectangleRec(rec rl.Rectangle) bool {
x := int32(rec.X)
y := int32(rec.Y)
x2 := x + int32(rec.Width)
y2 := y + int32(rec.Height)
m := system.GetMousePosition()
if m[0] > x && m[0] < x2 &&
m[1] > y && m[1] < y2 {
return true
}
return false
}
// IsMouseInRectangle2D checks whether a mouse is inside of a rectangle on the map
func IsMouseInRectangle2D(rec rl.RectangleInt32) bool {
x := float32(rec.X)
y := float32(rec.Y)
x2 := x + float32(rec.Width)
y2 := y + float32(rec.Height)
m := GetMousePosition2D()
mX := float32(m[0])
mY := float32(m[1])
if mX > x && mX < x2 &&
mY > y && mY < y2 {
return true
}
return false
}
// GetSpriteAABB retrieves Aseprite boundaries
func GetSpriteAABB(o *Object) rl.RectangleInt32 {
if o.Ase == nil {
return rl.RectangleInt32{
X: int32(o.Position.X) - o.Size[0]/2,
Y: int32(o.Position.Y) - o.Size[1]/2,
Width: o.Size[0],
Height: o.Size[1],
}
}
return rl.RectangleInt32{
X: int32(o.Position.X) - int32(float32(o.Ase.FrameWidth/2)) + int32(float32(o.Ase.FrameWidth/4)),
Y: int32(o.Position.Y),
Width: o.Ase.FrameWidth / 2,
Height: o.Ase.FrameHeight / 2,
}
}
// GetSolidAABB retrieves solid boundaries
func GetSolidAABB(o *Object) rl.RectangleInt32 {
return rl.RectangleInt32{
X: int32(o.Position.X),
Y: int32(o.Position.Y),
Width: o.Size[0],
Height: o.Size[1],
}
}
// Vector2ToIntArray converts raylib Vector2 to int32 array
func Vector2ToIntArray(a rl.Vector2) [2]int32 {
return [2]int32{
int32(a.X), int32(a.Y),
}
}
// IntArrayToVector2 converts int32 array to raylib Vector2
func IntArrayToVector2(a [2]int32) rl.Vector2 {
return rl.NewVector2(float32(a[0]), float32(a[1]))
}
// ScreenToWorldPos translates screen position to 2D world position
func ScreenToWorldPos(a [2]int32) [2]int32 {
camPos := rl.Vector2{}
var camZoom float32 = 1
if MainCamera != nil {
camPos = MainCamera.Position
camZoom = MainCamera.Zoom
}
return [2]int32{
int32(camPos.X + float32(a[0])/camZoom - float32(system.ScreenWidth)/2/camZoom),
int32(camPos.Y + float32(a[1])/camZoom - float32(system.ScreenHeight)/2/camZoom),
}
}
// WorldToScreenPos translates 2D world position to screen position
func WorldToScreenPos(a [2]int32) [2]int32 {
camPos := rl.Vector2{}
var camZoom float32 = 1
if MainCamera != nil {
camPos = MainCamera.Position
camZoom = MainCamera.Zoom
}
return [2]int32{
int32((float32(a[0]) - camPos.X + float32(system.ScreenWidth)/2/camZoom) * camZoom),
int32((float32(a[1]) - camPos.Y + float32(system.ScreenHeight)/2/camZoom) * camZoom),
}
}
// ScreenToWorldPosRec translates screen position to 2D world position
func ScreenToWorldPosRec(a rl.RectangleInt32) [2]int32 {
return ScreenToWorldPos([2]int32{a.X, a.Y})
}
// WorldToScreenPosRec translates screen position to 2D world position
func WorldToScreenPosRec(a rl.RectangleInt32) [2]int32 {
return WorldToScreenPos([2]int32{a.X, a.Y})
}
// GetMousePosition2D returns a mouse position within a map
func GetMousePosition2D() [2]int32 {
mo := rl.GetMousePosition()
m := [2]int32{
int32(mo.X / system.ScaleRatio),
int32(mo.Y / system.ScaleRatio),
}
return ScreenToWorldPos(m)
}
// PlayAnim plays an animation for a given object
func PlayAnim(p *Object, animName string) {
if p.Ase.GetAnimation(animName) != nil {
p.Ase.Play(animName)
} else {
//log.Println("Animation name:", animName, "not found!")
}
}
// GetSpriteRectangle retrieves sprite's bounds
func GetSpriteRectangle(o *Object) rl.Rectangle {
sourceX, sourceY := o.Ase.GetFrameXY()
return rl.NewRectangle(float32(sourceX), float32(sourceY), float32(o.Ase.FrameWidth), float32(o.Ase.FrameHeight))
}
// GetSpriteOrigin retrieves sprite's origin
func GetSpriteOrigin(o *Object) rl.Rectangle {
return rl.NewRectangle(float32(o.Position.X)-float32(o.Ase.FrameWidth/2), float32(o.Position.Y)-float32(o.Ase.FrameHeight/2), float32(o.Ase.FrameWidth), float32(o.Ase.FrameHeight))
}
// IsPointWithinRectangle checks whether a point is within a rectangle
func IsPointWithinRectangle(p rl.Vector2, r rl.Rectangle) bool {
if p.X > r.X && p.X < (r.X+r.Width) &&
p.Y > r.Y && p.Y < (r.Y+r.Height) {
return true
}
return false
}
// GetColorFromProperty retrieves a color from property
func GetColorFromProperty(o *tiled.Object, name string) rl.Color {
colorHex := o.Properties.GetString(name)
var color rl.Color
if colorHex != "" {
colorVec, _ := GetColorFromHex(colorHex)
color = Vec3ToColor(colorVec)
} else {
color = rl.Blank
}
return color
}
// GetVec2FromProperty retrieves a Vector2 from property
func GetVec2FromProperty(o *tiled.Object, name string) rl.Vector2 {
txtVec := o.Properties.GetString(name)
var vec rl.Vector2
if txtVec != "" {
vec = StringToVec2(txtVec)
}
return vec
}
// GetFloatFromProperty retrieves a float from property
func GetFloatFromProperty(o *tiled.Object, name string) float32 {
fltString := o.Properties.GetString(name)
var flt float32
if fltString != "" {
fltRaw, _ := strconv.ParseFloat(fltString, 32)
flt = float32(fltRaw)
} else {
flt = 0
}
return flt
}
// IsPointWithinFrustum checks whether a point is within camera's frustum
func IsPointWithinFrustum(p rl.Vector2) bool {
if MainCamera == nil {
return false
}
camOffset := rl.Vector2{
X: float32(int(float32(MainCamera.Position.X) - float32(system.ScreenWidth)/2/MainCamera.Zoom)),
Y: float32(int(float32(MainCamera.Position.Y) - float32(system.ScreenHeight)/2/MainCamera.Zoom)),
}
cam := rl.Rectangle{
X: camOffset.X - FrustumSafeMargin,
Y: camOffset.Y - FrustumSafeMargin,
Width: float32(system.ScreenWidth)/MainCamera.Zoom + FrustumSafeMargin*2,
Height: float32(system.ScreenHeight)/MainCamera.Zoom + FrustumSafeMargin*2,
}
return IsPointWithinRectangle(p, cam)
}
func atoiUnsafe(s string) int {
val, _ := strconv.Atoi(s)
return val
} | src/core/helpers.go | 0.675122 | 0.469824 | helpers.go | starcoder |
package networking
import "encoding/binary"
// Output represents a connection output (i.e. what's written to the connection). It wraps several helpers to write to this output.
type Output struct {
buf []byte
}
// NewOutput returns a well-formed Output.
func NewOutput() Output {
return Output{
buf: make([]byte, 0),
}
}
// Bytes returns the underlying buffer.
func (out *Output) Bytes() []byte {
return out.buf
}
// Write is just a wrapper around WriteBytes to make Request implement io.Writer
func (out *Output) Write(buf []byte) (int, error) {
out.WriteBytes(buf)
return len(buf), nil
}
// WriteByte is equivalent to WriteSingleByte but implements io.ByteWriter interface
func (out *Output) WriteByte(b byte) error {
out.WriteSingleByte(b)
return nil
}
// WriteSingleByte writes a single byte to the output.
func (out *Output) WriteSingleByte(b byte) {
out.buf = append(out.buf, b)
}
// WriteBytes writes a slice of bytes to the output.
func (out *Output) WriteBytes(b []byte) {
out.buf = append(out.buf, b...)
}
// WriteBigEndianInt16 writes a big endian 2-bytes int (short) to the output.
func (out *Output) WriteBigEndianInt16(i uint16) {
int16Buf := make([]byte, 2)
binary.BigEndian.PutUint16(int16Buf, i)
out.WriteBytes(int16Buf)
}
// WriteLittleEndianInt16 writes a little endian 2-bytes int (short) to the output.
func (out *Output) WriteLittleEndianInt16(i uint16) {
int16Buf := make([]byte, 2)
binary.LittleEndian.PutUint16(int16Buf, i)
out.WriteBytes(int16Buf)
}
// WriteBigEndianInt32 writes a big endian 4-bytes int to the output.
func (out *Output) WriteBigEndianInt32(i uint32) {
int32Buf := make([]byte, 4)
binary.BigEndian.PutUint32(int32Buf, i)
out.WriteBytes(int32Buf)
}
// WriteLittleEndianInt32 writes a little endian 4-bytes int to the output.
func (out *Output) WriteLittleEndianInt32(i uint32) {
int32Buf := make([]byte, 4)
binary.LittleEndian.PutUint32(int32Buf, i)
out.WriteBytes(int32Buf)
}
// WriteBigEndianInt64 writes a big endian 8-bytes int (long) to the output.
func (out *Output) WriteBigEndianInt64(i uint64) {
int64Buf := make([]byte, 8)
binary.BigEndian.PutUint64(int64Buf, i)
out.WriteBytes(int64Buf)
}
// WriteLittleEndianInt64 writes a little endian 8-bytes int (long) to the output.
func (out *Output) WriteLittleEndianInt64(i uint64) {
int64Buf := make([]byte, 8)
binary.LittleEndian.PutUint64(int64Buf, i)
out.WriteBytes(int64Buf)
}
// WriteUVarInt64 writes an unsigned varint to the output.
func (out *Output) WriteUVarInt(i uint64) {
uvarintBuf := make([]byte, binary.MaxVarintLen64)
n := binary.PutUvarint(uvarintBuf, uint64(i))
out.WriteBytes(uvarintBuf[:n])
}
// WriteVarInt64 writes a signed varint to the output.
func (out *Output) WriteVarInt(i int64) {
varintBuf := make([]byte, binary.MaxVarintLen64)
n := binary.PutVarint(varintBuf, int64(i))
out.WriteBytes(varintBuf[:n])
}
// WriteNullTerminatedString writes a null terminated string the the output.
func (out *Output) WriteNullTerminatedString(s string) {
out.WriteBytes([]byte(s))
out.WriteSingleByte(0)
}
// WriteString writes a standard minecraft protocol string to the output.
// It is a UTF-8 string prefixed with its size in bytes as an unsigned varint.
func (out *Output) WriteString(s string) {
out.WriteUVarInt(uint64(len(s)))
out.WriteBytes([]byte(s))
}
// MergeOutputs merge buffers of two outputs, creating a new output and without modifying any of the merged output buffer.
func MergeOutputs(out1, out2 Output) Output {
out := NewOutput()
out.WriteBytes(out1.buf)
out.WriteBytes(out2.buf)
return out
} | pkg/networking/output.go | 0.770983 | 0.409221 | output.go | starcoder |
package bisect_squares_lcci
import "math"
/*
面试题 16.13. 平分正方形 https://leetcode-cn.com/problems/bisect-squares-lcci/
给定两个正方形及一个二维平面。请找出将这两个正方形分割成两半的一条直线。假设正方形顶边和底边与 x 轴平行。
每个正方形的数据square包含3个数值,正方形的左下顶点坐标[X,Y] = [square[0],square[1]],以及正方形的边长square[2]。
所求直线穿过两个正方形会形成4个交点,请返回4个交点形成线段的两端点坐标
(两个端点即为4个交点中距离最远的2个点,这2个点所连成的线段一定会穿过另外2个交点)。
2个端点坐标[X1,Y1]和[X2,Y2]的返回格式为{X1,Y1,X2,Y2},要求若X1 != X2,需保证X1 < X2,否则需保证Y1 <= Y2。
若同时有多条直线满足要求,则选择斜率最大的一条计算并返回(与Y轴平行的直线视为斜率无穷大)。
示例:
输入:
square1 = {-1, -1, 2}
square2 = {0, -1, 2}
输出: {-1,0,2,0}
解释: 直线 y = 0 能将两个正方形同时分为等面积的两部分,返回的两线段端点为[-1,0]和[2,0]
提示:
square.length == 3
square[2] > 0
*/
/*
要均分两个正方形,则必须经过两个正方形的中心`o1`,`o2`
对直线`o1o2`与坐标轴平行的情况,可简单计算返回
其他情况,根据斜率可以判断最终的结果应该是直线`o1o2`与上下边界的交点还是与左右边界的交点
计算交点可以用两点式或点斜式
*/
func cutSquares(square1 []int, square2 []int) []float64 {
o1x := float64(square1[0]) + float64(square1[2])/2
o1y := float64(square1[1]) + float64(square1[2])/2
o2x := float64(square2[0]) + float64(square2[2])/2
o2y := float64(square2[1]) + float64(square2[2])/2
minX := min(square1[0], square2[0])
maxX := max(square1[0]+square1[2], square2[0]+square2[2])
minY := min(square1[1], square2[1])
maxY := max(square1[1]+square1[2], square2[1]+square2[2])
if equal(o1x, o2x) {
return []float64{o1x, minY, o1x, maxY}
}
if equal(o1y, o2y) {
return []float64{minX, o1y, maxX, o2y}
}
k := (o1y - o2y) / (o1x - o2x)
// 与上下两边交
//由两点式: (x-x1)(y2-y1)=(y-y1)(x2-x1)知,y=y0时,x=(y0-y1)(x2-x1)/(y2-y1) + x1
if k > 1 { // 左下右上走势
return []float64{(minY-o1y)*(o2x-o1x)/(o2y-o1y) + o1x, minY, (maxY-o1y)*(o2x-o1x)/(o2y-o1y) + o1x, maxY}
}
if k < -1 { // 左上右下走势
return []float64{(maxY-o1y)*(o2x-o1x)/(o2y-o1y) + o1x, maxY, (minY-o1y)*(o2x-o1x)/(o2y-o1y) + o1x, minY}
}
// 与左右两边交
//由两点式: (x-x1)(y2-y1)=(y-y1)(x2-x1)知,x=x0时, y=(x0-x1)(y2-y1)/(x2-x1) + y1
return []float64{minX, (minX-o1x)*(o2y-o1y)/(o2x-o1x) + o1y, maxX, (maxX-o1x)*(o2y-o1y)/(o2x-o1x) + o1y}
}
/*
最后计算交点用了两点式,也可以用点斜式:
// 点斜式: y - y2 = k*(x-x2)
if k > 1 {
return []float64{(minY-o2y)/k+o2x, minY, (maxY-o2y)/k+o2x, maxY}
}
if k < -1 {
return []float64{(maxY-o2y)/k+o2x, maxY, (minY-o2y)/k+o2x, minY}
}
return []float64{minX, o2y + k*(minX-o2x), maxX, o2y + k*(maxX-o2x)}
*/
func min(a, b int) float64 {
if a < b {
return float64(a)
}
return float64(b)
}
func max(a, b int) float64 {
if a > b {
return float64(a)
}
return float64(b)
}
func equal(a, b float64) bool {
return math.Abs(a-b) < 1e-6
} | solutions/bisect-squares-lcci/d.go | 0.54698 | 0.434941 | d.go | starcoder |
package unsafe
import (
"reflect"
"unsafe"
)
// UintptrToSlice returns a slice with len and cap of sz.
func UintptrToSlice(ptr uintptr, sz uint64) []byte {
return *(*[]byte)(unsafe.Pointer(&reflect.SliceHeader{
Data: uintptr(ptr),
Len: int(sz),
Cap: int(sz),
}))
}
// UnsafeToSlice returns a slice with len and cap of sz.
func UnsafeToSlice(ptr unsafe.Pointer, sz uint64) []byte {
return *(*[]byte)(unsafe.Pointer(&reflect.SliceHeader{
Data: uintptr(ptr),
Len: int(sz),
Cap: int(sz),
}))
}
// SliceToUintptr returns the pointer to which data data points.
func SliceToUintptr(data []byte) uintptr {
return uintptr(unsafe.Pointer(&data[0]))
}
// SliceToUnsafe returns the pointer to which data data points.
func SliceToUnsafe(data []byte) unsafe.Pointer {
return unsafe.Pointer(&data[0])
}
//UnsafeToUint64Slice returns a uint64 slice with len and cap of sz.
func UnsafeToUint64Slice(ptr unsafe.Pointer, sz uint64) []uint64 {
return *(*[]uint64)(unsafe.Pointer(&reflect.SliceHeader{
Data: uintptr(ptr),
Len: int(sz),
Cap: int(sz),
}))
}
//UnsafeToInt64Slice returns a int64 slice with len and cap of sz.
func UnsafeToInt64Slice(ptr unsafe.Pointer, sz uint64) []int64 {
return *(*[]int64)(unsafe.Pointer(&reflect.SliceHeader{
Data: uintptr(ptr),
Len: int(sz),
Cap: int(sz),
}))
}
// ByteSliceFromUint64 returns a byte slice with the length of 8
// and val copied to it without considering byte order.
func ByteSliceFromUint64(val uint64) []byte {
data := make([]byte, 8)
*(*uint64)(unsafe.Pointer(&data[0])) = val
return data
}
// Int64SliceToUint64Slice converts a int64 slice to a uint64 slice unsafe.
func Int64SliceToUint64Slice(vals []int64) []uint64 {
return *(*[]uint64)(unsafe.Pointer(&reflect.SliceHeader{
Data: uintptr(unsafe.Pointer(&vals[0])),
Len: len(vals),
Cap: cap(vals),
}))
}
// Uint64SliceToInt64Slice converts a uint64 slice to a int64 slice unsafe.
func Uint64SliceToInt64Slice(vals []uint64) []int64 {
return *(*[]int64)(unsafe.Pointer(&reflect.SliceHeader{
Data: uintptr(unsafe.Pointer(&vals[0])),
Len: len(vals),
Cap: cap(vals),
}))
}
// Uint64SliceToByteSlice converts a uint64 slice to a byte slice unsafe.
// Length of vals is multiplied by 8.
func Uint64SliceToByteSlice(vals []uint64) []byte {
return *(*[]byte)(unsafe.Pointer(&reflect.SliceHeader{
Data: uintptr(unsafe.Pointer(&vals[0])),
Len: len(vals) * 8,
Cap: cap(vals) * 8,
}))
}
// ByteSliceToUint64Slice converts a byte slice to a uint64 slice unsafe.
// Length of vals is divided by 8.
func ByteSliceToUint64Slice(vals []byte) []uint64 {
return *(*[]uint64)(unsafe.Pointer(&reflect.SliceHeader{
Data: uintptr(unsafe.Pointer(&vals[0])),
Len: len(vals) / 8,
Cap: cap(vals) / 8,
}))
}
// Int64SliceToByteSlice converts a int64 slice to a byte slice unsafe.
// Length of vals is multiplied by 8.
func Int64SliceToByteSlice(vals []int64) []byte {
return *(*[]byte)(unsafe.Pointer(&reflect.SliceHeader{
Data: uintptr(unsafe.Pointer(&vals[0])),
Len: len(vals) * 8,
Cap: cap(vals) * 8,
}))
}
// ByteSliceToInt64Slice converts a byte slice to a int64 slice unsafe.
// Length of vals is divided by 8.
func ByteSliceToInt64Slice(vals []byte) []int64 {
return *(*[]int64)(unsafe.Pointer(&reflect.SliceHeader{
Data: uintptr(unsafe.Pointer(&vals[0])),
Len: len(vals) / 8,
Cap: cap(vals) / 8,
}))
}
// Uint32SliceToByteSlice converts a uint32 slice to a byte slice unsafe.
// Length of vals is multiplied by 4.
func Uint32SliceToByteSlice(vals []uint32) []byte {
return *(*[]byte)(unsafe.Pointer(&reflect.SliceHeader{
Data: uintptr(unsafe.Pointer(&vals[0])),
Len: len(vals) * 4,
Cap: cap(vals) * 4,
}))
}
// ByteSliceToUint32Slice converts a byte slice to a uint32 slice unsafe.
// Length of vals is divided by 4.
func ByteSliceToUint32Slice(vals []byte) []uint32 {
return *(*[]uint32)(unsafe.Pointer(&reflect.SliceHeader{
Data: uintptr(unsafe.Pointer(&vals[0])),
Len: len(vals) / 4,
Cap: cap(vals) / 4,
}))
} | unsafe/unsafe.go | 0.646795 | 0.494263 | unsafe.go | starcoder |
package geom
// Grid provides an interface for reasoning over a 1D slice as if it were a
// 3D grid.
type Grid struct {
CellBounds
Length, Area, Volume int
uBounds [3]int
}
// GridLocation is a Grid which also specifies a physical location within
// a periodic super-grid.
type GridLocation struct {
Grid
Cells int
BoxWidth float64
}
// CellBounds represents a bounding box aligned to grid cells.
type CellBounds struct {
Origin, Width [3]int
}
// NewGrid returns a new Grid instance.
func NewGrid(origin, width [3]int) *Grid {
g := &Grid{}
g.Init(origin, width)
return g
}
// Init initializes a Grid instance.
func (g *Grid) Init(origin, width [3]int) {
g.Origin = origin
g.Width = width
g.Length = width[0]
g.Area = width[0] * width[1]
g.Volume = width[0] * width[1] * width[2]
for i := 0; i < 3; i++ {
g.uBounds[i] = g.Origin[i] + g.Width[i]
}
}
func NewGridLocation(
origin, width [3]int, boxWidth float64, cells int,
) *GridLocation {
g := &GridLocation{}
g.Init(origin, width, boxWidth, cells)
return g
}
func (g *GridLocation) Init(
origin, width [3]int, boxWidth float64, cells int,
) {
g.Grid.Init(origin, width)
g.BoxWidth = boxWidth
g.Cells = cells
}
// Idx returns the grid index corresponding to a set of coordinates.
func (g *Grid) Idx(x, y, z int) int {
// Those subtractions are actually unneccessary.
return ((x - g.Origin[0]) + (y-g.Origin[1])*g.Length +
(z-g.Origin[2])*g.Area)
}
// IdxCheck returns an index and true if the given coordinate are valid and
// false otherwise.
func (g *Grid) IdxCheck(x, y, z int) (idx int, ok bool) {
if !g.BoundsCheck(x, y, z) {
return -1, false
}
return g.Idx(x, y, z), true
}
// BoundsCheck returns true if the given coordinates are within the Grid and
// false otherwise.
func (g *Grid) BoundsCheck(x, y, z int) bool {
return (g.Origin[0] <= x && g.Origin[1] <= y && g.Origin[2] <= z) &&
(x < g.uBounds[0] && y < g.uBounds[1] &&
z < g.uBounds[2])
}
// Coords returns the x, y, z coordinates of a point from its grid index.
func (g *Grid) Coords(idx int) (x, y, z int) {
x = idx % g.Length
y = (idx % g.Area) / g.Length
z = idx / g.Area
return x, y, z
}
/*
// pMod computes the positive modulo x % y.
func pMod(x, y int) int {
m := x % y
if m < 0 {
m += y
}
return m
}
*/
// Intersect retursn true if the two bounding boxes overlap and false otherwise.
func (cb1 *CellBounds) Intersect(cb2 *CellBounds, width int) bool {
intr := true
var (
oSmall, oBig, wSmall, wBig int
)
for i := 0; intr && i < 3; i++ {
if cb1.Width[i] < cb2.Width[i] {
oSmall, wSmall = cb1.Origin[i], cb1.Width[i]
oBig, wBig = cb2.Origin[i], cb2.Width[i]
} else {
oSmall, wSmall = cb2.Origin[i], cb2.Width[i]
oBig, wBig = cb1.Origin[i], cb1.Width[i]
}
eSmall := oSmall + wSmall
beSmall := bound(eSmall, oBig, width)
boSmall := bound(oSmall, oBig, width)
intr = intr && (beSmall < wBig || boSmall < wBig)
}
return intr
}
func (cb1 *CellBounds) IntersectUnbounded(cb2 *CellBounds) bool {
intr := true
var (
oLow, oHigh, wLow int
)
for i := 0; intr && i < 3; i++ {
if cb1.Origin[i] < cb2.Origin[i] {
oLow, oHigh, wLow = cb1.Origin[i], cb2.Origin[i], cb1.Width[i]
} else {
oLow, oHigh, wLow = cb2.Origin[i], cb1.Origin[i], cb2.Width[i]
}
intr = intr && (oLow + wLow > oHigh)
}
return intr
}
func bound(x, origin, width int) int {
diff := x - origin
if diff < 0 { return diff + width }
if diff > width { return diff - width }
return diff
}
func (vcb *CellBounds) ScaleVecsSegment(
vs []Vec, cells int, boxWidth float64,
) {
fCells := float32(cells)
fWidth := float32(boxWidth)
scale := fCells / fWidth
origin := Vec{
float32(vcb.Origin[0]),
float32(vcb.Origin[1]),
float32(vcb.Origin[2]),
}
for i := range vs {
for j := 0; j < 3; j++ {
vs[i][j] *= scale
vs[i][j] = vs[i][j] - origin[j]
if vs[i][j] < 0 { vs[i][j] += fCells }
}
}
}
func (vcb *CellBounds) ScaleVecsDomain(
cb *CellBounds, vs []Vec, cells int, boxWidth float64,
) {
fCells := float32(cells)
fWidth := float32(boxWidth)
scale := fCells / fWidth
origin := Vec{
float32(vcb.Origin[0]),
float32(vcb.Origin[1]),
float32(vcb.Origin[2]),
}
diff := Vec{
float32(vcb.Origin[0] - cb.Origin[0]),
float32(vcb.Origin[1] - cb.Origin[1]),
float32(vcb.Origin[2] - cb.Origin[2]),
}
for i := 0; i < 3; i++ {
if diff[i] < -fCells/2 {
diff[i] += fCells
} else if diff[i] > fCells/2 {
diff[i] -= fCells
}
}
for i := range vs {
for j := 0; j < 3; j++ {
vs[i][j] *= scale
vs[i][j] = vs[i][j] - origin[j]
if vs[i][j] < 0 { vs[i][j] += fCells }
vs[i][j] += diff[j]
}
}
}
func maxV(vs []Vec, dim int) float32 {
max := vs[0][dim]
for i := range vs {
if max < vs[i][dim] { max = vs[i][dim] }
}
return max
}
func minV(vs []Vec, dim int) float32 {
min := vs[0][dim]
for i := range vs {
if min > vs[i][dim] { min = vs[i][dim] }
}
return min
}
func countInBounds(cb *CellBounds, vs []Vec) int {
num := 0
for _, v := range vs {
if int(v[0]) < cb.Width[0] && v[0] > 0 &&
int(v[1]) < cb.Width[1] && v[1] > 0 &&
int(v[2]) < cb.Width[2] && v[2] > 0 {
num++
}
}
return num
}
func fMinMax(min, max, x float32) (float32, float32) {
if x < min {
return x, max
}
if x > max {
return min, x
}
return min, max
} | render/geom/grid.go | 0.880528 | 0.640664 | grid.go | starcoder |
package morph
import "fmt"
// Table represents a mapping between an entity and a database table.
type Table struct {
typeName string
name string
alias string
columnsByName map[string]Column
columnsByField map[string]Column
}
// SetType associates the entity type to the table.
func (t *Table) SetType(entity interface{}) {
t.SetTypeName(fmt.Sprintf("%T", entity))
}
// SetTypeName modifies the entity type name for the table.
func (t *Table) SetTypeName(typeName string) {
t.typeName = typeName
}
// TypeName retrieves the type name of the entity associated to the table.
func (t *Table) TypeName() string {
return t.typeName
}
// Name retrieves the the table name.
func (t *Table) Name() string {
return t.name
}
// SetName modifies the name of the table.
func (t *Table) SetName(name string) {
t.name = name
}
// Alias retrieves the alias for the table.
func (t *Table) Alias() string {
return t.alias
}
// SetAlias modifies the alias of the table.
func (t *Table) SetAlias(alias string) {
t.alias = alias
}
// ColumnNames retrieves all of the column names for the table.
func (t *Table) ColumnNames() []string {
var columnNames []string
for name := range t.columnsByName {
columnNames = append(columnNames, name)
}
return columnNames
}
// ColumnName retrieves the column name associated to the provide field name.
func (t *Table) ColumnName(field string) (string, error) {
if t.columnsByField == nil {
t.columnsByField = make(map[string]Column)
}
if column, ok := t.columnsByField[field]; ok {
return column.Name(), nil
}
return "", fmt.Errorf("no mapping for field %q", field)
}
// FieldName retrieves the field name associated to the provided column name.
func (t *Table) FieldName(name string) (string, error) {
if t.columnsByName == nil {
t.columnsByName = make(map[string]Column)
}
if column, ok := t.columnsByName[name]; ok {
return column.Field(), nil
}
return "", fmt.Errorf("no mapping for column %q", name)
}
// Columns retrieves all of the columns for the table.
func (t *Table) Columns() []Column {
var columns []Column
for _, c := range t.columnsByName {
columns = append(columns, c)
}
return columns
}
// AddColumn adds a column to the table.
func (t *Table) AddColumn(column Column) error {
if t.columnsByName == nil {
t.columnsByName = make(map[string]Column)
}
if _, ok := t.columnsByName[column.Name()]; ok {
return fmt.Errorf(
"column with name %q already exists", column.Name())
}
if t.columnsByField == nil {
t.columnsByField = make(map[string]Column)
}
if _, ok := t.columnsByField[column.Field()]; ok {
return fmt.Errorf(
"column with field %q already exists", column.Field())
}
t.columnsByName[column.Name()],
t.columnsByField[column.Field()] = column, column
return nil
}
// AddColumns adds all of the provided columns to the table.
func (t *Table) AddColumns(columns ...Column) error {
for _, column := range columns {
if err := t.AddColumn(column); err != nil {
return err
}
}
return nil
} | table.go | 0.700588 | 0.411406 | table.go | starcoder |
package terminal
// https://github.com/cli/cli/blob/trunk/utils/table_printer.go
import (
"fmt"
"io"
"strings"
"github.com/cli/cli/pkg/text"
)
// TablePrinter prints table formatted output.
type TablePrinter interface {
// AddField adds a field with the given string to the row. If given, the
// ColorFunc will be used to colorize the resulting field text.
AddField(string, ColorFunc)
// EndRow ends a row and starts with a new one on the next AddField call.
EndRow()
// Render the table to the underlying output. It also clears internal state
// and makes the TablePrinter ready to use again.
Render() error
}
// NewTablePrinter creates a new TablePrinter writing its output to the
// underlying IO. It auto detects if a TTY is attached and uses a different
// output format that is easy to parse when piped.
func NewTablePrinter(io *IO) TablePrinter {
if io.isStdoutTTY {
return &ttyTablePrinter{
out: io.out,
maxWidth: io.TerminalWidth(),
}
}
return &tsvTablePrinter{
out: io.out,
}
}
type tableField struct {
Text string
TruncateFunc func(int, string) string
ColorFunc func(string) string
}
type ttyTablePrinter struct {
out io.Writer
maxWidth int
rows [][]tableField
}
// AddField adds a field with the given string to the row. If given, the
// ColorFunc will be used to colorize the resulting field text.
func (t *ttyTablePrinter) AddField(s string, colorFunc ColorFunc) {
if t.rows == nil {
t.rows = make([][]tableField, 1)
}
rowI := len(t.rows) - 1
field := tableField{
Text: s,
TruncateFunc: text.Truncate,
ColorFunc: colorFunc,
}
t.rows[rowI] = append(t.rows[rowI], field)
}
// EndRow ends a row and starts with a new one on the next AddField call.
func (t *ttyTablePrinter) EndRow() {
t.rows = append(t.rows, []tableField{})
}
// Render the table to the underlying output.
func (t *ttyTablePrinter) Render() error {
if len(t.rows) == 0 {
return nil
}
numCols := len(t.rows[0])
colWidths := make([]int, numCols)
// measure maximum content width per column
for _, row := range t.rows {
for col, field := range row {
textLen := text.DisplayWidth(field.Text)
if textLen > colWidths[col] {
colWidths[col] = textLen
}
}
}
delim := " "
availWidth := t.maxWidth - colWidths[0] - ((numCols - 1) * len(delim))
// add extra space from columns that are already narrower than threshold
for col := 1; col < numCols; col++ {
availColWidth := availWidth / (numCols - 1)
if extra := availColWidth - colWidths[col]; extra > 0 {
availWidth += extra
}
}
// cap all but first column to fit available terminal width
// TODO: support weighted instead of even redistribution
for col := 1; col < numCols; col++ {
availColWidth := availWidth / (numCols - 1)
if colWidths[col] > availColWidth {
colWidths[col] = availColWidth
}
}
for _, row := range t.rows {
for col, field := range row {
if col > 0 {
if _, err := fmt.Fprint(t.out, delim); err != nil {
return err
}
}
truncVal := field.TruncateFunc(colWidths[col], field.Text)
if col < numCols-1 {
// pad value with spaces on the right
if padWidth := colWidths[col] - text.DisplayWidth(field.Text); padWidth > 0 {
truncVal += strings.Repeat(" ", padWidth)
}
}
if field.ColorFunc != nil {
truncVal = field.ColorFunc(truncVal)
}
if _, err := fmt.Fprint(t.out, truncVal); err != nil {
return err
}
}
if len(row) > 0 {
if _, err := fmt.Fprint(t.out, "\n"); err != nil {
return err
}
}
}
t.rows = nil
return nil
}
type tsvTablePrinter struct {
out io.Writer
currentCol int
}
// AddField adds a field with the given string to the row. The ColorFunc is
// ignored.
func (t *tsvTablePrinter) AddField(s string, _ ColorFunc) {
if t.currentCol > 0 {
fmt.Fprint(t.out, "\t")
}
fmt.Fprint(t.out, s)
t.currentCol++
}
// EndRow ends a row and starts with a new one on the next AddField call.
func (t *tsvTablePrinter) EndRow() {
fmt.Fprint(t.out, "\n")
t.currentCol = 0
}
// Render the table to the underlying output.
func (t *tsvTablePrinter) Render() error {
t.currentCol = 0
return nil
} | pkg/terminal/table_printer.go | 0.759671 | 0.445891 | table_printer.go | starcoder |
package grid2d
import (
"github.com/maxfish/go-libs/pkg/geom"
)
//cellEdges is only used during the construction phase of the edges.
type cellEdges struct {
top, bottom, left, right *geom.Segment
}
type ComputeEdgesCallback func(x int, y int) bool
//ComputeEdges creates a list of segments covering all edges of the 2d grid.
//The segments coordinates assume each cell has a dimension of 1x1 units.
//The callback has to return 'true' for each cell which is considered solid, and for which an edge has to be computed.
func ComputeEdges(gridWidth, gridHeight int, isSolid ComputeEdgesCallback) []*geom.Segment {
segments := make([]*geom.Segment, 0)
edgesGrid := make([][]cellEdges, gridWidth)
for i := range edgesGrid {
edgesGrid[i] = make([]cellEdges, gridHeight)
}
for y := 0; y < gridHeight; y++ {
for x := 0; x < gridWidth; x++ {
if !isSolid(x, y) {
continue
}
// Top segment
if y-1 < 0 || !isSolid(x, y-1) {
if x-1 < 0 || edgesGrid[x-1][y].top == nil {
// Create a new segment
edgesGrid[x][y].top = &geom.Segment{
A: geom.Point{X: x, Y: y},
B: geom.Point{X: x + 1, Y: y},
}
segments = append(segments, edgesGrid[x][y].top)
} else {
// Reuse, and extend, the segment of the cell at the left
edgesGrid[x-1][y].top.B.X++
edgesGrid[x][y].top = edgesGrid[x-1][y].top
}
}
// Bottom segment
if y+1 >= gridHeight || !isSolid(x, y+1) {
if x-1 < 0 || edgesGrid[x-1][y].bottom == nil {
edgesGrid[x][y].bottom = &geom.Segment{
A: geom.Point{X: x + 1, Y: y + 1},
B: geom.Point{X: x, Y: y + 1},
}
segments = append(segments, edgesGrid[x][y].bottom)
} else {
// Reuse, and extend, the segment of the cell at the left
edgesGrid[x-1][y].bottom.A.X++
edgesGrid[x][y].bottom = edgesGrid[x-1][y].bottom
}
}
// Left segment
if x-1 < 0 || !isSolid(x-1, y) {
if y-1 < 0 || edgesGrid[x][y-1].left == nil {
edgesGrid[x][y].left = &geom.Segment{
A: geom.Point{X: x, Y: y + 1},
B: geom.Point{X: x, Y: y},
}
segments = append(segments, edgesGrid[x][y].left)
} else {
// Reuse, and extend, the segment of the cell above
edgesGrid[x][y-1].left.A.Y++
edgesGrid[x][y].left = edgesGrid[x][y-1].left
}
}
// Right segment
if x+1 >= gridWidth || !isSolid(x+1, y) {
if y-1 < 0 || edgesGrid[x][y-1].right == nil {
edgesGrid[x][y].right = &geom.Segment{
A: geom.Point{X: x + 1, Y: y},
B: geom.Point{X: x + 1, Y: y + 1},
}
segments = append(segments, edgesGrid[x][y].right)
} else {
// Reuse, and extend, the segment of the cell above
edgesGrid[x][y-1].right.B.Y++
edgesGrid[x][y].right = edgesGrid[x][y-1].right
}
}
}
}
return segments
} | pkg/grid2d/edges.go | 0.678966 | 0.725065 | edges.go | starcoder |
package cu
import (
"log"
"gitlab.com/akita/akita"
"gitlab.com/akita/mgpusim/insts"
"gitlab.com/akita/mgpusim/timing/wavefront"
"gitlab.com/akita/util"
"gitlab.com/akita/util/pipelining"
"gitlab.com/akita/util/tracing"
)
type vectorMemInst struct {
wavefront *wavefront.Wavefront
}
func (i vectorMemInst) TaskID() string {
return i.wavefront.DynamicInst().ID
}
// A VectorMemoryUnit is the block in a compute unit that can performs vector
// memory operations.
type VectorMemoryUnit struct {
cu *ComputeUnit
scratchpadPreparer ScratchpadPreparer
coalescer coalescer
numInstInFlight uint64
numTransactionInFlight uint64
maxInstructionsInFlight uint64
instructionPipeline pipelining.Pipeline
postInstructionPipelineBuffer util.Buffer
transactionsWaiting []VectorMemAccessInfo
transactionPipeline pipelining.Pipeline
postTransactionPipelineBuffer util.Buffer
isIdle bool
}
// NewVectorMemoryUnit creates a new Vector Memory Unit.
func NewVectorMemoryUnit(
cu *ComputeUnit,
scratchpadPreparer ScratchpadPreparer,
coalescer coalescer,
) *VectorMemoryUnit {
u := new(VectorMemoryUnit)
u.cu = cu
u.scratchpadPreparer = scratchpadPreparer
u.coalescer = coalescer
return u
}
// CanAcceptWave checks if the buffer of the read stage is occupied or not
func (u *VectorMemoryUnit) CanAcceptWave() bool {
return u.instructionPipeline.CanAccept()
}
// AcceptWave moves one wavefront into the read buffer of the Scalar unit
func (u *VectorMemoryUnit) AcceptWave(
wave *wavefront.Wavefront,
now akita.VTimeInSec,
) {
u.instructionPipeline.Accept(now, vectorMemInst{wavefront: wave})
u.numInstInFlight++
}
// IsIdle moves one wavefront into the read buffer of the Scalar unit
func (u *VectorMemoryUnit) IsIdle() bool {
u.isIdle = (u.numInstInFlight == 0) && (u.numTransactionInFlight == 0)
return u.isIdle
}
// Run executes three pipeline stages that are controlled by the
// VectorMemoryUnit
func (u *VectorMemoryUnit) Run(now akita.VTimeInSec) bool {
madeProgress := false
madeProgress = u.sendRequest(now) || madeProgress
madeProgress = u.transactionPipeline.Tick(now) || madeProgress
madeProgress = u.instToTransaction(now) || madeProgress
madeProgress = u.instructionPipeline.Tick(now) || madeProgress
return madeProgress
}
func (u *VectorMemoryUnit) instToTransaction(
now akita.VTimeInSec,
) bool {
if len(u.transactionsWaiting) > 0 {
return u.insertTransactionToPipeline(now)
}
return u.execute(now)
}
func (u *VectorMemoryUnit) insertTransactionToPipeline(
now akita.VTimeInSec,
) bool {
if !u.transactionPipeline.CanAccept() {
return false
}
u.transactionPipeline.Accept(now, u.transactionsWaiting[0])
u.transactionsWaiting = u.transactionsWaiting[1:]
return true
}
func (u *VectorMemoryUnit) execute(now akita.VTimeInSec) (madeProgress bool) {
item := u.postInstructionPipelineBuffer.Pop()
if item == nil {
return false
}
wave := item.(vectorMemInst).wavefront
inst := wave.Inst()
switch inst.FormatType {
case insts.FLAT:
ok := u.executeFlatInsts(now, wave)
if !ok {
return false
}
default:
log.Panicf("running inst %s in vector memory unit is not supported", inst.String(nil))
}
u.cu.UpdatePCAndSetReady(wave)
u.numInstInFlight--
return true
}
func (u *VectorMemoryUnit) executeFlatInsts(
now akita.VTimeInSec,
wavefront *wavefront.Wavefront,
) bool {
inst := wavefront.DynamicInst()
switch inst.Opcode {
case 16, 17, 18, 19, 20, 21, 22, 23: // FLAT_LOAD_BYTE
return u.executeFlatLoad(now, wavefront)
case 24, 25, 26, 27, 28, 29, 30, 31:
return u.executeFlatStore(now, wavefront)
default:
log.Panicf("Opcode %d for format FLAT is not supported.", inst.Opcode)
}
panic("never")
}
func (u *VectorMemoryUnit) executeFlatLoad(
now akita.VTimeInSec,
wave *wavefront.Wavefront,
) bool {
u.scratchpadPreparer.Prepare(wave, wave)
transactions := u.coalescer.generateMemTransactions(wave)
if len(transactions) == 0 {
u.cu.logInstTask(
now,
wave,
wave.DynamicInst(),
true,
)
return true
}
if len(transactions)+len(u.cu.InFlightVectorMemAccess) >
u.cu.InFlightVectorMemAccessLimit {
return false
}
wave.OutstandingVectorMemAccess++
wave.OutstandingScalarMemAccess++
for i, t := range transactions {
u.cu.InFlightVectorMemAccess = append(u.cu.InFlightVectorMemAccess, t)
if i != len(transactions)-1 {
t.Read.CanWaitForCoalesce = true
}
lowModule := u.cu.VectorMemModules.Find(t.Read.Address)
t.Read.Dst = lowModule
t.Read.Src = u.cu.ToVectorMem
t.Read.PID = wave.PID()
u.transactionsWaiting = append(u.transactionsWaiting, t)
}
return true
}
func (u *VectorMemoryUnit) executeFlatStore(
now akita.VTimeInSec,
wave *wavefront.Wavefront,
) bool {
u.scratchpadPreparer.Prepare(wave, wave)
transactions := u.coalescer.generateMemTransactions(wave)
if len(transactions) == 0 {
u.cu.logInstTask(
now,
wave,
wave.DynamicInst(),
true,
)
return true
}
if len(transactions)+len(u.cu.InFlightVectorMemAccess) >
u.cu.InFlightVectorMemAccessLimit {
return false
}
wave.OutstandingVectorMemAccess++
wave.OutstandingScalarMemAccess++
for i, t := range transactions {
u.cu.InFlightVectorMemAccess = append(u.cu.InFlightVectorMemAccess, t)
if i != len(transactions)-1 {
t.Write.CanWaitForCoalesce = true
}
lowModule := u.cu.VectorMemModules.Find(t.Write.Address)
t.Write.Dst = lowModule
t.Write.Src = u.cu.ToVectorMem
t.Write.PID = wave.PID()
u.transactionsWaiting = append(u.transactionsWaiting, t)
}
return true
}
func (u *VectorMemoryUnit) sendRequest(now akita.VTimeInSec) bool {
item := u.postTransactionPipelineBuffer.Peek()
if item == nil {
return false
}
var req akita.Msg
info := item.(VectorMemAccessInfo)
if info.Read != nil {
req = info.Read
} else {
req = info.Write
}
req.Meta().SendTime = now
err := u.cu.ToVectorMem.Send(req)
if err == nil {
u.postTransactionPipelineBuffer.Pop()
u.numTransactionInFlight--
tracing.TraceReqInitiate(req, now, u.cu, info.Inst.ID)
return true
}
return false
}
// Flush flushes
func (u *VectorMemoryUnit) Flush() {
u.instructionPipeline.Clear()
u.transactionPipeline.Clear()
u.postInstructionPipelineBuffer.Clear()
u.postTransactionPipelineBuffer.Clear()
u.transactionsWaiting = nil
u.numInstInFlight = 0
u.numTransactionInFlight = 0
} | timing/cu/vectormemoryunit.go | 0.659405 | 0.481454 | vectormemoryunit.go | starcoder |
package monitoring
import (
"fmt"
"github.com/chr-ras/advent-of-code-2019/util/calc"
g "github.com/chr-ras/advent-of-code-2019/util/geometry"
)
// BestAsteroidForMonitoringStation determines the best (== from where one can see the most asteroids) asteroid on the asteroid map to build a monitoring station on.
func BestAsteroidForMonitoringStation(asteroidMap []string) (asteroid g.Point, visibleAsteroids int) {
visibleAsteroids = 0
for row := 0; row < len(asteroidMap); row++ {
for column := 0; column < len(asteroidMap[row]); column++ {
if asteroidMap[row][column:column+1] == "#" {
currentAsteroid := g.Point{X: column, Y: row}
visibleAsteroidsForCurrentAsteroid, _ := CheckLineOfSight(asteroidMap, currentAsteroid)
if visibleAsteroidsForCurrentAsteroid > visibleAsteroids {
visibleAsteroids = visibleAsteroidsForCurrentAsteroid
asteroid = currentAsteroid
}
}
}
}
return
}
// CheckLineOfSight determines the count of visible asteroids from the specified asteroid.
func CheckLineOfSight(asteroidMap []string, asteroidToCheck g.Point) (countOfVisibleAsteroids int, visibilityStatus [][]CellStatus) {
countOfVisibleAsteroids = 0
visibilityStatus, mapHeight, mapWidth := prepareOutputSlice(asteroidMap)
for x := asteroidToCheck.X; x >= 0; x-- {
for y := asteroidToCheck.Y; y >= 0; y-- {
updateVisibilityStatus(asteroidMap, g.Point{X: x, Y: y}, asteroidToCheck, visibilityStatus, &countOfVisibleAsteroids, false)
}
for y := asteroidToCheck.Y + 1; y < mapHeight; y++ {
updateVisibilityStatus(asteroidMap, g.Point{X: x, Y: y}, asteroidToCheck, visibilityStatus, &countOfVisibleAsteroids, false)
}
}
for x := asteroidToCheck.X + 1; x < mapWidth; x++ {
for y := asteroidToCheck.Y; y >= 0; y-- {
updateVisibilityStatus(asteroidMap, g.Point{X: x, Y: y}, asteroidToCheck, visibilityStatus, &countOfVisibleAsteroids, false)
}
for y := asteroidToCheck.Y + 1; y < mapHeight; y++ {
updateVisibilityStatus(asteroidMap, g.Point{X: x, Y: y}, asteroidToCheck, visibilityStatus, &countOfVisibleAsteroids, false)
}
}
prettyPrint(visibilityStatus)
return
}
func updateVisibilityStatus(asteroidMap []string, spaceToCheck, startAsteroid g.Point, visibilityStatus [][]CellStatus, countOfVisibleAsteroids *int, isBlockingCheck bool) {
if spaceToCheck.X == startAsteroid.X && spaceToCheck.Y == startAsteroid.Y {
return
}
if visibilityStatus[spaceToCheck.Y][spaceToCheck.X] == BlockedAsteroid || visibilityStatus[spaceToCheck.Y][spaceToCheck.X] == BlockedSpace {
return
}
mapPosition := asteroidMap[spaceToCheck.Y][spaceToCheck.X : spaceToCheck.X+1]
positionIsAsteroid := false
switch mapPosition {
case ".":
if isBlockingCheck {
visibilityStatus[spaceToCheck.Y][spaceToCheck.X] = BlockedSpace
} else {
visibilityStatus[spaceToCheck.Y][spaceToCheck.X] = VisibleSpace
return
}
case "#":
positionIsAsteroid = true
if isBlockingCheck {
visibilityStatus[spaceToCheck.Y][spaceToCheck.X] = BlockedAsteroid
} else {
visibilityStatus[spaceToCheck.Y][spaceToCheck.X] = VisibleAsteroid
*countOfVisibleAsteroids++
}
default:
panic(fmt.Errorf("Unexpected mapPosition %v", mapPosition))
}
vectorX := spaceToCheck.X - startAsteroid.X
vectorY := spaceToCheck.Y - startAsteroid.Y
xYGreatestCommonDivisior := calc.GreatestCommonDivisor(vectorX, vectorY)
newSpaceToCheck := g.Point{X: spaceToCheck.X + vectorX/xYGreatestCommonDivisior, Y: spaceToCheck.Y + vectorY/xYGreatestCommonDivisior}
if newSpaceToCheck.Y < 0 || newSpaceToCheck.Y >= len(asteroidMap) || newSpaceToCheck.X < 0 || newSpaceToCheck.X >= len(asteroidMap[0]) {
// The new position is out of the map (expected to happen)
return
}
updateVisibilityStatus(asteroidMap, newSpaceToCheck, spaceToCheck, visibilityStatus, countOfVisibleAsteroids, isBlockingCheck || positionIsAsteroid)
}
func prettyPrint(visibilityStatus [][]CellStatus) {
for row := 0; row < len(visibilityStatus); row++ {
for column := 0; column < len(visibilityStatus[row]); column++ {
switch visibilityStatus[row][column] {
case Unvisited:
fmt.Print("?")
case VisibleSpace:
fmt.Print(".")
case VisibleAsteroid:
fmt.Print("X")
case BlockedSpace:
fmt.Print("░")
case BlockedAsteroid:
fmt.Print("x")
default:
fmt.Print("_")
}
}
fmt.Println()
}
fmt.Println()
}
func prepareOutputSlice(asteroidMap []string) (outputSlice [][]CellStatus, asteroidMapHeight, asteroidMapWidth int) {
asteroidMapHeight = len(asteroidMap)
asteroidMapWidth = len(asteroidMap[0])
outputSlice = make([][]CellStatus, asteroidMapHeight)
for i := 0; i < asteroidMapHeight; i++ {
outputSlice[i] = make([]CellStatus, asteroidMapWidth)
}
return
}
// CellStatus enum
type CellStatus int
const (
// Unvisited means: The cell has not been visited
Unvisited CellStatus = iota
// VisibleSpace means: The cell contains no asteroid and is visible from the specified asteroid
VisibleSpace
// VisibleAsteroid means: The cell contains an asteroid and is visible from the specified asteroid
VisibleAsteroid
// BlockedSpace means: The cell contains no asteroid and is not visible from the specified asteroid
BlockedSpace
// BlockedAsteroid means: The cell contains an asteroid and is not visible from the specified asteroid
BlockedAsteroid
) | 10-monitoring-station/monitoring/monitoring.go | 0.75183 | 0.61286 | monitoring.go | starcoder |
package vida
import (
"fmt"
)
// PrintError shows a given error in the terminal.
func PrintError(err error) {
fmt.Printf("\n\n\n %v:\n %v\n\n", runTimeError, err.Error())
}
// Error messages for type errors in binary operations.
func TypeErrorInBinaryOperator(op string, lhs, rhs Value) error {
return fmt.Errorf("type error with operator '%v' : (%v %v %v)", op, lhs.TypeName(), op, rhs.TypeName())
}
func NegativeShiftError(op string, lhs, rhs Value) error {
return fmt.Errorf("attempt to perform a shift bits operation with a negative shift amount '%v' : (%v %v %v)", op, lhs, op, rhs)
}
// Division by zero error.
func DivisionByZeroError() error {
return fmt.Errorf("attempt to perform a division by zero")
}
// Arity error in binary operator overload.
func ArityErrorInBinaryOperatorOverload() error {
return fmt.Errorf("the arity of a prefix operator overloading must be 2")
}
// Arity error in unary operator overload.
func ArityErrorInUnaryOperatorOverload() error {
return fmt.Errorf("the arity of a prefix operator overloading must be 1")
}
// Method not defined error.
func MethodNotDefined(method string, value Value) error {
return fmt.Errorf("the method '%v' not defined in the struct '%v'", method, value.TypeName())
}
// Error message for type errors in unary operations.
func TypeErrorInPrefixOperator(op string, value Value) error {
return fmt.Errorf("type error in prefix operator '%v' : (%v%v)", op, op, value.TypeName())
}
// Error messages for data structure subscriptions.
func IndexOutOfRangeError(length, index Int) error {
return fmt.Errorf("subscript index is out of range with length %v and index [%v]", length, index)
}
// Error message when using a value not hashable as key in a map or a set.
func ValueNotHashableError(key Value) error {
return fmt.Errorf("a value of type '%v' is not hashable", key.TypeName())
}
// Error message when using a non-string value as key in a record.
func RecordPropertyError(key Value) error {
return fmt.Errorf("the vaue of type of '%v' cannot be used as key in a value of type Record", key.TypeName())
}
// Error message when using a value that cannot be an index.
func ValueIsNotAnIndexError(value Value) error {
return fmt.Errorf("a value of type '%v' cannot be used as an index", value.TypeName())
}
// Error message when using a value that does not support subscription operations [].
func ValueDoesNotSupportSubscription(value Value) error {
return fmt.Errorf("a value of type '%v' does not support subscription", value.TypeName())
}
// Error message for subscription operations in instances.
func InstancesDoNotSupportSubscriptionWriting() error {
return fmt.Errorf("instances do not support subscription writing operations")
}
// Error messages for selection operations.
func NameNotDefinedInCompoundDataType(name string, dataStructure Value) error {
return fmt.Errorf("the property/method '%v' is not defiend in '%v'", name, dataStructure)
}
// Error messages for values that does not support selector operator (.).
func SelectionOperationNotSupported(value Value) error {
return fmt.Errorf("a value of type '%v' does not support selector operator '.'", value.TypeName())
}
// Error message used when trying to mutate an immutable value.
func ValueIsImmutable(value Value) error {
return fmt.Errorf("cannot change the state of a value of type '%v'", value.TypeName())
}
// Error produced when trying to extend a non-Struct Value.
func ValueDoesNotSupportExtension(value Value) error {
return fmt.Errorf("only structures can be extended")
}
// Error produced when trying to deriving properties and methods from a non-Struct value.
func CannotDeriveFromValue(value Value) error {
return fmt.Errorf("cannot derive properties or methods from a non-Struct value")
}
// Error when type does not support slicing.
func ValueDoesNotSupportSlicing(value Value) error {
return fmt.Errorf("a value of type '%v' does not support slicing", value.TypeName())
}
// Error when an operator is not defined for some data type.
func OperatorNotDefined(op byte, value Value) error {
return fmt.Errorf("the operator '%v' is not defined for the type '%v'", KindDescription[op], value.TypeName())
}
// Error message used when an unknown flag is used for subscription or selection operations. This error never should have happened.
func NeverShouldHaveHappened(what string) error {
return fmt.Errorf("sorry 💔 this error never ever should have happened.\n '%v'", what)
}
// Error message used when changing a byte array with wrong data type.
func BytesChangeMustBeWithNumericTypesOnly() error {
return fmt.Errorf("bytes state can be changed with integer data types only")
}
// Error when rune is out of range or it is illegal.
func RuneOutOfRangeOrIllegal() error {
return fmt.Errorf("rune is illigal or it is out of range")
}
// Error when type does not support value semantics.
func ValueIsNotValueSemantics(value Value) error {
return fmt.Errorf("a value of type '%v' has reference semantics", value.TypeName())
}
// Error when declaring a declared variable.
func VariableAlreadyDefined(identifier string) error {
return fmt.Errorf("re-declaring a variable '%v' that has already been declared", identifier)
}
// Error when changing the value of a not-declared variable.
func VariableNotDefined(identifier string) error {
return fmt.Errorf("the variable '%v' has not been declared yet", identifier)
}
// Error when an assertion failure occurs.
func AssertionFailure(message string) error {
return fmt.Errorf(message)
}
// Error when range expression error.
func RangeExpressionError(value Value) error {
return fmt.Errorf("a value of type '%v' is not iterable", value.TypeName())
}
// Error when step range is negative.
func RangeExpectedPositiveValue(value Value) error {
return fmt.Errorf("the value given as step in range expression is negative '%v'", value)
}
// Error when step range is negative.
func RangeExpectedIntegerValue(value Value) error {
return fmt.Errorf("one of the values found in range expression is not an Int '%v'", value.TypeName())
}
// Error when an instance overloaded __next operator with wrong arity.
func OverloadedOperatorWithWrongArity(method string, foundArity, must UInt32) error {
return fmt.Errorf("the overloaded function '%v' must have an arity of '%v', but found an arity of '%v'", method, must, foundArity)
}
// Error when not found overloaded some method.
func MethodNotOverloaded(method, structName string) error {
return fmt.Errorf("the Struct '%v' has not implemented the method '%v'", structName, method)
}
// Error when arity of a generator function is not zero.
func ArityGeneratorError(method string) error {
return fmt.Errorf("arity of a generator function '%v' must be 0", method)
}
// Error when expected an iterable value.
func ExpectedIterableValueError(value Value) error {
return fmt.Errorf("expected an iterable value in for-loop statement and got a value of type '%v'", value.TypeName())
}
// Error when expected a callable value.
func ExpectedCallableValueInDeferError(value Value) error {
return fmt.Errorf("expected a callable value in defer statement and got a value of type '%v'", value.TypeName())
}
// Error when unpacking values.
func UnpackCountDoesNotMatchError() error {
return fmt.Errorf("count of identifiers and values to unpack does not match")
}
// Error when a unpackable value was expected.
func ExpectedUnpackableValueError(value Value) error {
return fmt.Errorf("expected unpackable value but got '%v'", value.TypeName())
}
// Error when a method or property was not found.
func IsNotMethodProperty(method, structName string) error {
return fmt.Errorf("the identifier '%v' is not a method/property of the type '%v'", method, structName)
}
// Error when a unpackable value was expected.
func ExpectedListToSpreadError(value Value) error {
return fmt.Errorf("expected a List to spread its elements but got '%v'", value.TypeName())
}
// Stack Overflow Error
func StackOverfloError() error {
return fmt.Errorf("%v", stackOverflow)
}
// Error when a method or property was not found.
func VarArgArityError(arity, argCount UInt32) error {
return fmt.Errorf("expected at least %v arguments in function call and got %v", arity, argCount)
}
// Error when a method or property was not found.
func ArityError(arity, argCount UInt32) error {
return fmt.Errorf("expected %v arguments in function call and got %v", arity, argCount)
}
// Error when another value was expected.
func ExpectedTypeAndGotOtherType(expected, got string) error {
return fmt.Errorf("expected a value of type '%v' and got '%v'", expected, got)
} | vida/errors.go | 0.746046 | 0.433412 | errors.go | starcoder |
package types
import (
"fmt"
"math"
)
// Value is an arbitrary value that can be represented as Watson.
type Value struct {
Kind Kind
Int int64
Uint uint64
Float float64
String []byte
Object map[string]*Value
Array []*Value
Bool bool
}
// NewIntValue creates a new Value that contains an integer.
func NewIntValue(val int64) *Value {
return &Value{Kind: Int, Int: val}
}
// NewUintValue creates a new Value that contains an unsigned integer.
func NewUintValue(val uint64) *Value {
return &Value{Kind: Uint, Uint: val}
}
// NewFloatValue creates a new Value that contains a floating point number.
func NewFloatValue(val float64) *Value {
return &Value{Kind: Float, Float: val}
}
// NewStringValue creates a new Value that contains a string.
func NewStringValue(val []byte) *Value {
return &Value{Kind: String, String: val}
}
// NewObjectValue creates a new Value that contains an object.
func NewObjectValue(val map[string]*Value) *Value {
return &Value{Kind: Object, Object: val}
}
// NewArrayValue creates a new value that contains an array.
func NewArrayValue(val []*Value) *Value {
return &Value{Kind: Array, Array: val}
}
// NewBoolValue creates a new Value that contains a bool.
func NewBoolValue(val bool) *Value {
return &Value{Kind: Bool, Bool: val}
}
// NewNilValue creates a new Value that contains nil.
func NewNilValue() *Value {
return &Value{Kind: Nil}
}
// IsNaN returns true if v is a NaN; otherwise it returns false.
func (v *Value) IsNaN() bool {
return v.Kind == Float && math.IsNaN(v.Float)
}
// DeepCopy returns a deep copy of v.
func (v *Value) DeepCopy() *Value {
clone := &Value{Kind: v.Kind}
switch v.Kind {
case Int:
clone.Int = v.Int
case Uint:
clone.Uint = v.Uint
case Float:
clone.Float = v.Float
case String:
clone.String = make([]byte, len(v.String))
copy(clone.String, v.String)
case Object:
clone.Object = map[string]*Value{}
for k, v := range v.Object {
clone.Object[k] = v.DeepCopy()
}
case Array:
clone.Array = make([]*Value, 0, len(v.Array))
for _, v := range v.Array {
clone.Array = append(clone.Array, v.DeepCopy())
}
case Bool:
clone.Bool = v.Bool
case Nil:
// nop
default:
panic(fmt.Errorf("unknown kind: %d", v.Kind))
}
return clone
}
func (v *Value) GoString() string {
return fmt.Sprintf("{Kind: %#v, Value: %s}", v.Kind, v.goStringValue())
}
func (v *Value) goStringValue() string {
switch v.Kind {
case Int:
return fmt.Sprintf("%d", v.Int)
case Uint:
return fmt.Sprintf("%d", v.Uint)
case Float:
return fmt.Sprintf("%f", v.Float)
case String:
return fmt.Sprintf("%#v", v.String)
case Object:
return fmt.Sprintf("%#v", v.Object)
case Array:
return fmt.Sprintf("%#v", v.Array)
case Bool:
return fmt.Sprintf("%t", v.Bool)
case Nil:
return "nil"
default:
panic(fmt.Errorf("invalid kind: %d", v.Kind))
}
}
var _ fmt.GoStringer = &Value{}
// Kind is a type of Value.
type Kind int
const (
Int Kind = iota // 64-bit signed integer
Uint // 64-bit unsigned integer
Float // IEEE-754 64-bit floating-point number
String // string (represented as a byte array)
Object // object (set of key-value pairs)
Array // array
Bool // bool
Nil // nil
)
func (k Kind) GoString() string {
switch k {
case Int:
return "Int"
case Uint:
return "Uint"
case Float:
return "Float"
case String:
return "String"
case Object:
return "Object"
case Array:
return "Array"
case Bool:
return "Bool"
case Nil:
return "Nil"
default:
panic(fmt.Errorf("invalid kind: %d", k))
}
}
var _ fmt.GoStringer = Kind(0)
// By implementing Marshaler you can configure converting go objects into Values.
type Marshaler interface {
MarshalWatson() (*Value, error)
}
// By implementing Unmarshaler you can configure converting Values into go objects.
type Unmarshaler interface {
UnmarshalWatson(*Value) error
} | pkg/types/types.go | 0.726426 | 0.426441 | types.go | starcoder |
package keeper
import (
"sort"
sdk "github.com/cosmos/cosmos-sdk/types"
"github.com/tendermint/liquidity/x/liquidity/types"
)
// Execute Swap of the pool batch, Collect swap messages in batch for transact the same price for each batch and run them on endblock.
func (k Keeper) SwapExecution(ctx sdk.Context, liquidityPoolBatch types.PoolBatch) (uint64, error) {
// get all swap message batch states that are not executed, not succeeded, and not to be deleted.
swapMsgStates := k.GetAllNotProcessedPoolBatchSwapMsgStates(ctx, liquidityPoolBatch)
if len(swapMsgStates) == 0 {
return 0, nil
}
pool, found := k.GetPool(ctx, liquidityPoolBatch.PoolId)
if !found {
return 0, types.ErrPoolNotExists
}
// set executed states of all messages to true
for _, sms := range swapMsgStates {
sms.Executed = true
}
k.SetPoolBatchSwapMsgStatesByPointer(ctx, pool.Id, swapMsgStates)
currentHeight := ctx.BlockHeight()
types.ValidateStateAndExpireOrders(swapMsgStates, currentHeight, false)
// get reserve coins from the liquidity pool and calculate the current pool price (p = x / y)
reserveCoins := k.GetReserveCoins(ctx, pool)
X := reserveCoins[0].Amount.ToDec()
Y := reserveCoins[1].Amount.ToDec()
currentPoolPrice := X.Quo(Y)
denomX := reserveCoins[0].Denom
denomY := reserveCoins[1].Denom
// make orderMap, orderbook by sort orderMap
orderMap, XtoY, YtoX := types.MakeOrderMap(swapMsgStates, denomX, denomY, false)
orderBook := orderMap.SortOrderBook()
// check orderbook validity and compute batchResult(direction, swapPrice, ..)
result, found := orderBook.Match(X, Y)
executedMsgCount := uint64(len(swapMsgStates))
if !found {
err := k.RefundSwaps(ctx, pool, swapMsgStates)
return executedMsgCount, err
}
// find order match, calculate pool delta with the total x, y amounts for the invariant check
var matchResultXtoY, matchResultYtoX []types.MatchResult
poolXDelta := sdk.ZeroDec()
poolYDelta := sdk.ZeroDec()
if result.MatchType != types.NoMatch {
var poolXDeltaXtoY, poolXDeltaYtoX, poolYDeltaYtoX, poolYDeltaXtoY sdk.Dec
matchResultXtoY, poolXDeltaXtoY, poolYDeltaXtoY = types.FindOrderMatch(types.DirectionXtoY, XtoY, result.EX, result.SwapPrice, currentHeight)
matchResultYtoX, poolXDeltaYtoX, poolYDeltaYtoX = types.FindOrderMatch(types.DirectionYtoX, YtoX, result.EY, result.SwapPrice, currentHeight)
poolXDelta = poolXDeltaXtoY.Add(poolXDeltaYtoX)
poolYDelta = poolYDeltaXtoY.Add(poolYDeltaYtoX)
}
XtoY, YtoX, X, Y, poolXDelta2, poolYDelta2, fractionalCntX, fractionalCntY, decimalErrorX, decimalErrorY :=
k.UpdateState(X, Y, XtoY, YtoX, matchResultXtoY, matchResultYtoX)
lastPrice := X.Quo(Y)
if invariantCheckFlag {
SwapMatchingInvariants(XtoY, YtoX, fractionalCntX, fractionalCntY, matchResultXtoY, matchResultYtoX)
SwapPriceInvariants(matchResultXtoY, matchResultYtoX, poolXDelta, poolYDelta, poolXDelta2, poolYDelta2, decimalErrorX, decimalErrorY, result)
}
types.ValidateStateAndExpireOrders(XtoY, currentHeight, false)
types.ValidateStateAndExpireOrders(YtoX, currentHeight, false)
orderMapExecuted, _, _ := types.MakeOrderMap(append(XtoY, YtoX...), denomX, denomY, true)
orderBookExecuted := orderMapExecuted.SortOrderBook()
if !orderBookExecuted.Validate(lastPrice) {
panic(types.ErrOrderBookInvalidity)
}
types.ValidateStateAndExpireOrders(XtoY, currentHeight, true)
types.ValidateStateAndExpireOrders(YtoX, currentHeight, true)
// make index map for match result
matchResultMap := make(map[uint64]types.MatchResult)
for _, msg := range append(matchResultXtoY, matchResultYtoX...) {
if _, ok := matchResultMap[msg.OrderMsgIndex]; ok {
panic("duplicated match order")
}
matchResultMap[msg.OrderMsgIndex] = msg
}
if invariantCheckFlag {
SwapPriceDirection(currentPoolPrice, result)
SwapMsgStatesInvariants(matchResultXtoY, matchResultYtoX, matchResultMap, swapMsgStates, XtoY, YtoX)
SwapOrdersExecutionStateInvariants(matchResultMap, swapMsgStates, result, denomX)
}
// execute transact, refund, expire, send coins with escrow, update state by TransactAndRefundSwapLiquidityPool
if err := k.TransactAndRefundSwapLiquidityPool(ctx, swapMsgStates, matchResultMap, pool, result); err != nil {
panic(err)
}
return executedMsgCount, nil
}
// Update Buy, Sell swap batch messages using the result of match.
func (k Keeper) UpdateState(X, Y sdk.Dec, XtoY, YtoX []*types.SwapMsgState, matchResultXtoY, matchResultYtoX []types.MatchResult) (
[]*types.SwapMsgState, []*types.SwapMsgState, sdk.Dec, sdk.Dec, sdk.Dec, sdk.Dec, int, int, sdk.Dec, sdk.Dec) {
sort.SliceStable(XtoY, func(i, j int) bool {
return XtoY[i].Msg.OrderPrice.GT(XtoY[j].Msg.OrderPrice)
})
sort.SliceStable(YtoX, func(i, j int) bool {
return YtoX[i].Msg.OrderPrice.LT(YtoX[j].Msg.OrderPrice)
})
poolXDelta := sdk.ZeroDec()
poolYDelta := sdk.ZeroDec()
fractionalCntX := 0
fractionalCntY := 0
// Variables to accumulate and offset the values of int 1 caused by decimal error
decimalErrorX := sdk.ZeroDec()
decimalErrorY := sdk.ZeroDec()
for _, match := range matchResultXtoY {
poolXDelta = poolXDelta.Add(match.TransactedCoinAmt)
poolYDelta = poolYDelta.Sub(match.ExchangedDemandCoinAmt)
if match.BatchMsg.Msg.OfferCoin.Amount.ToDec().Sub(match.TransactedCoinAmt).LTE(sdk.OneDec()) ||
match.BatchMsg.RemainingOfferCoin.Amount.ToDec().Sub(match.TransactedCoinAmt).LTE(sdk.OneDec()) {
// full match
match.BatchMsg.ExchangedOfferCoin = match.BatchMsg.ExchangedOfferCoin.Add(
sdk.NewCoin(match.BatchMsg.RemainingOfferCoin.Denom, match.TransactedCoinAmt.TruncateInt()))
match.BatchMsg.RemainingOfferCoin = types.CoinSafeSubAmount(match.BatchMsg.RemainingOfferCoin, match.TransactedCoinAmt.TruncateInt())
match.BatchMsg.ReservedOfferCoinFee = types.CoinSafeSubAmount(match.BatchMsg.ReservedOfferCoinFee, match.OfferCoinFeeAmt.TruncateInt())
if match.BatchMsg.RemainingOfferCoin.Amount.Equal(sdk.OneInt()) {
decimalErrorX = decimalErrorX.Add(sdk.OneDec())
match.BatchMsg.RemainingOfferCoin.Amount = sdk.ZeroInt()
}
if match.BatchMsg.RemainingOfferCoin.Amount.Add(match.BatchMsg.ExchangedOfferCoin.Amount).
GT(match.BatchMsg.Msg.OfferCoin.Amount) ||
!match.BatchMsg.RemainingOfferCoin.Equal(sdk.NewCoin(match.BatchMsg.Msg.OfferCoin.Denom, sdk.ZeroInt())) ||
match.BatchMsg.ReservedOfferCoinFee.IsGTE(sdk.NewCoin(match.BatchMsg.ReservedOfferCoinFee.Denom, sdk.NewInt(2))) {
panic("remaining not matched 1")
} else {
match.BatchMsg.Succeeded = true
match.BatchMsg.ToBeDeleted = true
}
} else {
// fractional match
match.BatchMsg.ExchangedOfferCoin = match.BatchMsg.ExchangedOfferCoin.Add(sdk.NewCoin(match.BatchMsg.Msg.OfferCoin.Denom, match.TransactedCoinAmt.TruncateInt()))
match.BatchMsg.RemainingOfferCoin = types.CoinSafeSubAmount(match.BatchMsg.RemainingOfferCoin, match.TransactedCoinAmt.TruncateInt())
match.BatchMsg.ReservedOfferCoinFee = types.CoinSafeSubAmount(match.BatchMsg.ReservedOfferCoinFee, match.OfferCoinFeeAmt.TruncateInt())
match.BatchMsg.Succeeded = true
match.BatchMsg.ToBeDeleted = false
fractionalCntX++
}
}
for _, match := range matchResultYtoX {
poolXDelta = poolXDelta.Sub(match.ExchangedDemandCoinAmt)
poolYDelta = poolYDelta.Add(match.TransactedCoinAmt)
if match.BatchMsg.Msg.OfferCoin.Amount.ToDec().Sub(match.TransactedCoinAmt).LTE(sdk.OneDec()) ||
match.BatchMsg.RemainingOfferCoin.Amount.ToDec().Sub(match.TransactedCoinAmt).LTE(sdk.OneDec()) {
// full match
match.BatchMsg.ExchangedOfferCoin = match.BatchMsg.ExchangedOfferCoin.Add(
sdk.NewCoin(match.BatchMsg.RemainingOfferCoin.Denom, match.TransactedCoinAmt.TruncateInt()))
match.BatchMsg.RemainingOfferCoin = types.CoinSafeSubAmount(match.BatchMsg.RemainingOfferCoin, match.TransactedCoinAmt.TruncateInt())
match.BatchMsg.ReservedOfferCoinFee = types.CoinSafeSubAmount(match.BatchMsg.ReservedOfferCoinFee, match.OfferCoinFeeAmt.TruncateInt())
if match.BatchMsg.RemainingOfferCoin.Amount.Equal(sdk.OneInt()) {
decimalErrorY = decimalErrorY.Add(sdk.OneDec())
match.BatchMsg.RemainingOfferCoin.Amount = sdk.ZeroInt()
}
if match.BatchMsg.RemainingOfferCoin.Amount.Add(match.BatchMsg.ExchangedOfferCoin.Amount).
GT(match.BatchMsg.Msg.OfferCoin.Amount) ||
!match.BatchMsg.RemainingOfferCoin.Equal(sdk.NewCoin(match.BatchMsg.Msg.OfferCoin.Denom, sdk.ZeroInt())) ||
match.BatchMsg.ReservedOfferCoinFee.IsGTE(sdk.NewCoin(match.BatchMsg.ReservedOfferCoinFee.Denom, sdk.NewInt(2))) {
panic("remaining not matched 2")
} else {
match.BatchMsg.Succeeded = true
match.BatchMsg.ToBeDeleted = true
}
} else {
// fractional match
match.BatchMsg.ExchangedOfferCoin = match.BatchMsg.ExchangedOfferCoin.Add(sdk.NewCoin(match.BatchMsg.Msg.OfferCoin.Denom, match.TransactedCoinAmt.TruncateInt()))
match.BatchMsg.RemainingOfferCoin = types.CoinSafeSubAmount(match.BatchMsg.RemainingOfferCoin, match.TransactedCoinAmt.TruncateInt())
match.BatchMsg.ReservedOfferCoinFee = types.CoinSafeSubAmount(match.BatchMsg.ReservedOfferCoinFee, match.OfferCoinFeeAmt.TruncateInt())
match.BatchMsg.Succeeded = true
match.BatchMsg.ToBeDeleted = false
fractionalCntY++
}
}
// Offset accumulated decimal error values
poolXDelta = poolXDelta.Add(decimalErrorX)
poolYDelta = poolYDelta.Add(decimalErrorY)
X = X.Add(poolXDelta)
Y = Y.Add(poolYDelta)
return XtoY, YtoX, X, Y, poolXDelta, poolYDelta, fractionalCntX, fractionalCntY, decimalErrorX, decimalErrorY
} | x/liquidity/keeper/swap.go | 0.638046 | 0.404096 | swap.go | starcoder |
package transform
import (
"context"
"log"
"github.com/turbot/steampipe-plugin-sdk/plugin/quals"
)
// TransformData is the input to a transform function.
type TransformData struct {
// an optional parameter
Param interface{}
// the value to be transformed
Value interface{}
// a data object containing the source data for this column
HydrateItem interface{}
// all hydrate results
HydrateResults map[string]interface{}
// the column this transform is generating
ColumnName string
// the 'matrix item' associated with this row
MatrixItem map[string]interface{}
// KeyColumnQuals will be populated with the quals as a map of column name to an array of quals for that column
KeyColumnQuals map[string]quals.QualSlice
}
// TransformFunc is a function to transform a data value from the api value to a column value
// parameters are: value, parent json object, param
// returns the transformed HydrateItem
type TransformFunc func(context.Context, *TransformData) (interface{}, error)
type GetSourceFieldFunc func(interface{}) string
// ColumnTransforms struct defines the data transforms required to map from a JSON value to a column value
type ColumnTransforms struct {
// a list of transforms to apply to the data
Transforms []*TransformCall
// should this transform chain start with the default transform for the column
ApplyDefaultTransform bool
}
func (t *ColumnTransforms) Execute(ctx context.Context, transformData *TransformData, defaultTransform *ColumnTransforms) (interface{}, error) {
var value interface{}
var err error
if t.ApplyDefaultTransform {
log.Printf("[TRACE] ColumnTransforms.Execute - running default transforms first\n")
if value, err = callTransforms(ctx, value, transformData, defaultTransform.Transforms); err != nil {
return nil, err
}
}
return callTransforms(ctx, value, transformData, t.Transforms)
}
func callTransforms(ctx context.Context, value interface{}, transformData *TransformData, transforms []*TransformCall) (interface{}, error) {
for _, tr := range transforms {
var err error
value, err = tr.Execute(ctx, value, transformData)
if err != nil {
return nil, err
}
}
return value, nil
} | plugin/transform/column_transforms.go | 0.660501 | 0.403391 | column_transforms.go | starcoder |
// Package stroke converts complex strokes to gioui.org/op/clip operations.
package stroke
import (
"gioui.org/f32"
"gioui.org/op"
"gioui.org/op/clip"
)
// Path defines the shape of a Stroke.
type Path struct {
Segments []Segment
}
type Segment struct {
// op is the operator.
op segmentOp
// args is up to three (x, y) coordinates.
args [3]f32.Point
}
// Dashes defines the dash pattern of a Stroke.
type Dashes struct {
Phase float32
Dashes []float32
}
// Stroke defines a stroke.
type Stroke struct {
Path Path
Width float32 // Width of the stroked path.
// Miter is the limit to apply to a miter joint.
// The zero Miter disables the miter joint; setting Miter to +∞
// unconditionally enables the miter joint.
Miter float32
Cap StrokeCap // Cap describes the head or tail of a stroked path.
Join StrokeJoin // Join describes how stroked paths are collated.
Dashes Dashes
}
type segmentOp uint8
const (
segOpMoveTo segmentOp = iota
segOpLineTo
segOpQuadTo
segOpCubeTo
)
// StrokeCap describes the head or tail of a stroked path.
type StrokeCap uint8
const (
// RoundCap caps stroked paths with a round cap, joining the right-hand and
// left-hand sides of a stroked path with a half disc of diameter the
// stroked path's width.
RoundCap StrokeCap = iota
// FlatCap caps stroked paths with a flat cap, joining the right-hand
// and left-hand sides of a stroked path with a straight line.
FlatCap
// SquareCap caps stroked paths with a square cap, joining the right-hand
// and left-hand sides of a stroked path with a half square of length
// the stroked path's width.
SquareCap
)
// StrokeJoin describes how stroked paths are collated.
type StrokeJoin uint8
const (
// RoundJoin joins path segments with a round segment.
RoundJoin StrokeJoin = iota
// BevelJoin joins path segments with sharp bevels.
BevelJoin
)
func MoveTo(p f32.Point) Segment {
s := Segment{
op: segOpMoveTo,
}
s.args[0] = p
return s
}
func LineTo(p f32.Point) Segment {
s := Segment{
op: segOpLineTo,
}
s.args[0] = p
return s
}
func QuadTo(ctrl, end f32.Point) Segment {
s := Segment{
op: segOpQuadTo,
}
s.args[0] = ctrl
s.args[1] = end
return s
}
func CubeTo(ctrl0, ctrl1, end f32.Point) Segment {
s := Segment{
op: segOpCubeTo,
}
s.args[0] = ctrl0
s.args[1] = ctrl1
s.args[2] = end
return s
}
// Op returns a clip operation that approximates stroke.
func (s Stroke) Op(ops *op.Ops) clip.Op {
if len(s.Path.Segments) == 0 {
return clip.Op{}
}
// Approximate and output path data.
var outline clip.Path
outline.Begin(ops)
quads := strokePathCommands(s)
pen := f32.Pt(0, 0)
for _, quad := range quads {
q := quad.Quad
if q.From != pen {
pen = q.From
outline.MoveTo(pen)
}
outline.QuadTo(q.Ctrl, q.To)
pen = q.To
}
return clip.Outline{Path: outline.End()}.Op()
} | stroke/stroke.go | 0.67854 | 0.603056 | stroke.go | starcoder |
package convert
// A statute mile is 5,280 feet, as defined in 14 CFR Part 298.2 and mentioned in the
// Pilot's Handbook of Aeronautical Knowledge (http://www.faa.gov/regulations_policies/handbooks_manuals/aviation/pilot_handbook/media/PHAK%20-%20Chapter%2015.pdf).
const StatuteMileInFeet = 5280
// A nautical mile is mentioned as 6076.1 in the PHAK (http://www.faa.gov/regulations_policies/handbooks_manuals/aviation/pilot_handbook/media/PHAK%20-%20Chapter%2015.pdf).
// The NGA calls it 6,076.11549 (http://msi.nga.mil/MSISiteContent/StaticFiles/NAV_PUBS/DBP/endtables.pdf).
// Wikipedia calls it 6,076.12 (http://en.wikipedia.org/wiki/Nautical_mile#Conversions_to_other_units).
const NauticalMileInFeet = 6076.1
const statuteMileConversionFactor = StatuteMileInFeet / NauticalMileInFeet
const nauticalMileConversionFactor = NauticalMileInFeet / StatuteMileInFeet
// Convert statute miles to nautical miles.
func StatuteToNauticalMiles(statute float64) float64 {
return statute * nauticalMileConversionFactor
}
// Convert nautical miles to statute miles.
func NauticalToStatuteMiles(nautical float64) float64 {
return nautical * statuteMileConversionFactor
}
const gasWeightInPounds = 6
const jetAWeightInPounds = 6.8
const waterWeightInPounds = 8.35
const oilWaterWeightInPounds = 7.5
// Convert pounds of AvGas (100LL) to gallons.
func PoundsOfGasToGallons(pounds float64) float64 {
return pounds / gasWeightInPounds
}
// Convert gallons of AvGas (100LL) to pounds.
func GallonsOfGasToPounds(gallons float64) float64 {
return gallons * gasWeightInPounds
}
// Convert pounds of Jet-A to gallons.
func PoundsOfJetAToGallons(pounds float64) float64 {
return pounds / jetAWeightInPounds
}
// Convert gallons of Jet-A to pounds.
func GallonsOfJetAToPounds(gallons float64) float64 {
return gallons * jetAWeightInPounds
}
// Convert pounds of water to gallons.
func PoundsOfWaterToGallons(pounds float64) float64 {
return pounds / waterWeightInPounds
}
// Convert gallons of water to pounds.
func GallonsOfWaterToPounds(gallons float64) float64 {
return gallons * waterWeightInPounds
}
// Convert pounds of oil to gallons.
func PoundsOfOilToGallons(pounds float64) float64 {
return pounds / oilWaterWeightInPounds
}
// Convert gallons of oil to pounds.
func GallonsOfOilToPounds(gallons float64) float64 {
return gallons * oilWaterWeightInPounds
} | aviation/convert/convert.go | 0.708112 | 0.62157 | convert.go | starcoder |
package selector
import (
"github.com/llir/ll"
)
type Selector func(nt ll.NodeType) bool
var (
Any = func(t ll.NodeType) bool { return true }
APINotesField = func(t ll.NodeType) bool { return t == ll.APINotesField }
AShrExpr = func(t ll.NodeType) bool { return t == ll.AShrExpr }
AShrInst = func(t ll.NodeType) bool { return t == ll.AShrInst }
AddExpr = func(t ll.NodeType) bool { return t == ll.AddExpr }
AddInst = func(t ll.NodeType) bool { return t == ll.AddInst }
AddrSpace = func(t ll.NodeType) bool { return t == ll.AddrSpace }
AddrSpaceCastExpr = func(t ll.NodeType) bool { return t == ll.AddrSpaceCastExpr }
AddrSpaceCastInst = func(t ll.NodeType) bool { return t == ll.AddrSpaceCastInst }
Align = func(t ll.NodeType) bool { return t == ll.Align }
AlignField = func(t ll.NodeType) bool { return t == ll.AlignField }
AlignPair = func(t ll.NodeType) bool { return t == ll.AlignPair }
AlignStack = func(t ll.NodeType) bool { return t == ll.AlignStack }
AlignStackPair = func(t ll.NodeType) bool { return t == ll.AlignStackPair }
AlignStackTok = func(t ll.NodeType) bool { return t == ll.AlignStackTok }
AllocSize = func(t ll.NodeType) bool { return t == ll.AllocSize }
AllocaInst = func(t ll.NodeType) bool { return t == ll.AllocaInst }
AllocatedField = func(t ll.NodeType) bool { return t == ll.AllocatedField }
AndExpr = func(t ll.NodeType) bool { return t == ll.AndExpr }
AndInst = func(t ll.NodeType) bool { return t == ll.AndInst }
Arg = func(t ll.NodeType) bool { return t == ll.Arg }
ArgField = func(t ll.NodeType) bool { return t == ll.ArgField }
Args = func(t ll.NodeType) bool { return t == ll.Args }
ArrayConst = func(t ll.NodeType) bool { return t == ll.ArrayConst }
ArrayType = func(t ll.NodeType) bool { return t == ll.ArrayType }
AssociatedField = func(t ll.NodeType) bool { return t == ll.AssociatedField }
Atomic = func(t ll.NodeType) bool { return t == ll.Atomic }
AtomicOp = func(t ll.NodeType) bool { return t == ll.AtomicOp }
AtomicOrdering = func(t ll.NodeType) bool { return t == ll.AtomicOrdering }
AtomicRMWInst = func(t ll.NodeType) bool { return t == ll.AtomicRMWInst }
AttrGroupDef = func(t ll.NodeType) bool { return t == ll.AttrGroupDef }
AttrGroupID = func(t ll.NodeType) bool { return t == ll.AttrGroupID }
AttrPair = func(t ll.NodeType) bool { return t == ll.AttrPair }
AttrString = func(t ll.NodeType) bool { return t == ll.AttrString }
AttributesField = func(t ll.NodeType) bool { return t == ll.AttributesField }
BaseTypeField = func(t ll.NodeType) bool { return t == ll.BaseTypeField }
BasicBlock = func(t ll.NodeType) bool { return t == ll.BasicBlock }
BitCastExpr = func(t ll.NodeType) bool { return t == ll.BitCastExpr }
BitCastInst = func(t ll.NodeType) bool { return t == ll.BitCastInst }
BlockAddressConst = func(t ll.NodeType) bool { return t == ll.BlockAddressConst }
BoolConst = func(t ll.NodeType) bool { return t == ll.BoolConst }
BoolLit = func(t ll.NodeType) bool { return t == ll.BoolLit }
BrTerm = func(t ll.NodeType) bool { return t == ll.BrTerm }
ByRefAttr = func(t ll.NodeType) bool { return t == ll.ByRefAttr }
Byval = func(t ll.NodeType) bool { return t == ll.Byval }
CCField = func(t ll.NodeType) bool { return t == ll.CCField }
CallBrTerm = func(t ll.NodeType) bool { return t == ll.CallBrTerm }
CallInst = func(t ll.NodeType) bool { return t == ll.CallInst }
CallingConvEnum = func(t ll.NodeType) bool { return t == ll.CallingConvEnum }
CallingConvInt = func(t ll.NodeType) bool { return t == ll.CallingConvInt }
Case = func(t ll.NodeType) bool { return t == ll.Case }
CatchPadInst = func(t ll.NodeType) bool { return t == ll.CatchPadInst }
CatchRetTerm = func(t ll.NodeType) bool { return t == ll.CatchRetTerm }
CatchSwitchTerm = func(t ll.NodeType) bool { return t == ll.CatchSwitchTerm }
CharArrayConst = func(t ll.NodeType) bool { return t == ll.CharArrayConst }
ChecksumField = func(t ll.NodeType) bool { return t == ll.ChecksumField }
ChecksumKind = func(t ll.NodeType) bool { return t == ll.ChecksumKind }
ChecksumkindField = func(t ll.NodeType) bool { return t == ll.ChecksumkindField }
Clause = func(t ll.NodeType) bool { return t == ll.Clause }
ClauseType = func(t ll.NodeType) bool { return t == ll.ClauseType }
Cleanup = func(t ll.NodeType) bool { return t == ll.Cleanup }
CleanupPadInst = func(t ll.NodeType) bool { return t == ll.CleanupPadInst }
CleanupRetTerm = func(t ll.NodeType) bool { return t == ll.CleanupRetTerm }
CmpXchgInst = func(t ll.NodeType) bool { return t == ll.CmpXchgInst }
ColumnField = func(t ll.NodeType) bool { return t == ll.ColumnField }
Comdat = func(t ll.NodeType) bool { return t == ll.Comdat }
ComdatDef = func(t ll.NodeType) bool { return t == ll.ComdatDef }
ComdatName = func(t ll.NodeType) bool { return t == ll.ComdatName }
CondBrTerm = func(t ll.NodeType) bool { return t == ll.CondBrTerm }
ConfigMacrosField = func(t ll.NodeType) bool { return t == ll.ConfigMacrosField }
ContainingTypeField = func(t ll.NodeType) bool { return t == ll.ContainingTypeField }
CountField = func(t ll.NodeType) bool { return t == ll.CountField }
DIBasicType = func(t ll.NodeType) bool { return t == ll.DIBasicType }
DICommonBlock = func(t ll.NodeType) bool { return t == ll.DICommonBlock }
DICompileUnit = func(t ll.NodeType) bool { return t == ll.DICompileUnit }
DICompositeType = func(t ll.NodeType) bool { return t == ll.DICompositeType }
DIDerivedType = func(t ll.NodeType) bool { return t == ll.DIDerivedType }
DIEnumerator = func(t ll.NodeType) bool { return t == ll.DIEnumerator }
DIExpression = func(t ll.NodeType) bool { return t == ll.DIExpression }
DIFile = func(t ll.NodeType) bool { return t == ll.DIFile }
DIFlagEnum = func(t ll.NodeType) bool { return t == ll.DIFlagEnum }
DIFlagInt = func(t ll.NodeType) bool { return t == ll.DIFlagInt }
DIFlags = func(t ll.NodeType) bool { return t == ll.DIFlags }
DIGlobalVariable = func(t ll.NodeType) bool { return t == ll.DIGlobalVariable }
DIGlobalVariableExpression = func(t ll.NodeType) bool { return t == ll.DIGlobalVariableExpression }
DIImportedEntity = func(t ll.NodeType) bool { return t == ll.DIImportedEntity }
DILabel = func(t ll.NodeType) bool { return t == ll.DILabel }
DILexicalBlock = func(t ll.NodeType) bool { return t == ll.DILexicalBlock }
DILexicalBlockFile = func(t ll.NodeType) bool { return t == ll.DILexicalBlockFile }
DILocalVariable = func(t ll.NodeType) bool { return t == ll.DILocalVariable }
DILocation = func(t ll.NodeType) bool { return t == ll.DILocation }
DIMacro = func(t ll.NodeType) bool { return t == ll.DIMacro }
DIMacroFile = func(t ll.NodeType) bool { return t == ll.DIMacroFile }
DIModule = func(t ll.NodeType) bool { return t == ll.DIModule }
DINamespace = func(t ll.NodeType) bool { return t == ll.DINamespace }
DIObjCProperty = func(t ll.NodeType) bool { return t == ll.DIObjCProperty }
DISPFlagEnum = func(t ll.NodeType) bool { return t == ll.DISPFlagEnum }
DISPFlagInt = func(t ll.NodeType) bool { return t == ll.DISPFlagInt }
DISPFlags = func(t ll.NodeType) bool { return t == ll.DISPFlags }
DISubprogram = func(t ll.NodeType) bool { return t == ll.DISubprogram }
DISubrange = func(t ll.NodeType) bool { return t == ll.DISubrange }
DISubroutineType = func(t ll.NodeType) bool { return t == ll.DISubroutineType }
DITemplateTypeParameter = func(t ll.NodeType) bool { return t == ll.DITemplateTypeParameter }
DITemplateValueParameter = func(t ll.NodeType) bool { return t == ll.DITemplateValueParameter }
DLLStorageClass = func(t ll.NodeType) bool { return t == ll.DLLStorageClass }
DataLocationField = func(t ll.NodeType) bool { return t == ll.DataLocationField }
DebugInfoForProfilingField = func(t ll.NodeType) bool { return t == ll.DebugInfoForProfilingField }
DeclarationField = func(t ll.NodeType) bool { return t == ll.DeclarationField }
DefaultedField = func(t ll.NodeType) bool { return t == ll.DefaultedField }
Dereferenceable = func(t ll.NodeType) bool { return t == ll.Dereferenceable }
DereferenceableOrNull = func(t ll.NodeType) bool { return t == ll.DereferenceableOrNull }
DirectoryField = func(t ll.NodeType) bool { return t == ll.DirectoryField }
DiscriminatorField = func(t ll.NodeType) bool { return t == ll.DiscriminatorField }
DiscriminatorIntField = func(t ll.NodeType) bool { return t == ll.DiscriminatorIntField }
Distinct = func(t ll.NodeType) bool { return t == ll.Distinct }
DwarfAddressSpaceField = func(t ll.NodeType) bool { return t == ll.DwarfAddressSpaceField }
DwarfAttEncodingEnum = func(t ll.NodeType) bool { return t == ll.DwarfAttEncodingEnum }
DwarfAttEncodingInt = func(t ll.NodeType) bool { return t == ll.DwarfAttEncodingInt }
DwarfCCEnum = func(t ll.NodeType) bool { return t == ll.DwarfCCEnum }
DwarfCCInt = func(t ll.NodeType) bool { return t == ll.DwarfCCInt }
DwarfLangEnum = func(t ll.NodeType) bool { return t == ll.DwarfLangEnum }
DwarfLangInt = func(t ll.NodeType) bool { return t == ll.DwarfLangInt }
DwarfMacinfoEnum = func(t ll.NodeType) bool { return t == ll.DwarfMacinfoEnum }
DwarfMacinfoInt = func(t ll.NodeType) bool { return t == ll.DwarfMacinfoInt }
DwarfOp = func(t ll.NodeType) bool { return t == ll.DwarfOp }
DwarfTagEnum = func(t ll.NodeType) bool { return t == ll.DwarfTagEnum }
DwarfTagInt = func(t ll.NodeType) bool { return t == ll.DwarfTagInt }
DwarfVirtualityEnum = func(t ll.NodeType) bool { return t == ll.DwarfVirtualityEnum }
DwarfVirtualityInt = func(t ll.NodeType) bool { return t == ll.DwarfVirtualityInt }
DwoIdField = func(t ll.NodeType) bool { return t == ll.DwoIdField }
ElementsField = func(t ll.NodeType) bool { return t == ll.ElementsField }
Ellipsis = func(t ll.NodeType) bool { return t == ll.Ellipsis }
EmissionKindEnum = func(t ll.NodeType) bool { return t == ll.EmissionKindEnum }
EmissionKindField = func(t ll.NodeType) bool { return t == ll.EmissionKindField }
EmissionKindInt = func(t ll.NodeType) bool { return t == ll.EmissionKindInt }
EncodingField = func(t ll.NodeType) bool { return t == ll.EncodingField }
EntityField = func(t ll.NodeType) bool { return t == ll.EntityField }
EnumsField = func(t ll.NodeType) bool { return t == ll.EnumsField }
Exact = func(t ll.NodeType) bool { return t == ll.Exact }
ExceptionArg = func(t ll.NodeType) bool { return t == ll.ExceptionArg }
ExportSymbolsField = func(t ll.NodeType) bool { return t == ll.ExportSymbolsField }
ExprField = func(t ll.NodeType) bool { return t == ll.ExprField }
ExternLinkage = func(t ll.NodeType) bool { return t == ll.ExternLinkage }
ExternallyInitialized = func(t ll.NodeType) bool { return t == ll.ExternallyInitialized }
ExtraDataField = func(t ll.NodeType) bool { return t == ll.ExtraDataField }
ExtractElementExpr = func(t ll.NodeType) bool { return t == ll.ExtractElementExpr }
ExtractElementInst = func(t ll.NodeType) bool { return t == ll.ExtractElementInst }
ExtractValueExpr = func(t ll.NodeType) bool { return t == ll.ExtractValueExpr }
ExtractValueInst = func(t ll.NodeType) bool { return t == ll.ExtractValueInst }
FAddExpr = func(t ll.NodeType) bool { return t == ll.FAddExpr }
FAddInst = func(t ll.NodeType) bool { return t == ll.FAddInst }
FCmpExpr = func(t ll.NodeType) bool { return t == ll.FCmpExpr }
FCmpInst = func(t ll.NodeType) bool { return t == ll.FCmpInst }
FDivExpr = func(t ll.NodeType) bool { return t == ll.FDivExpr }
FDivInst = func(t ll.NodeType) bool { return t == ll.FDivInst }
FMulExpr = func(t ll.NodeType) bool { return t == ll.FMulExpr }
FMulInst = func(t ll.NodeType) bool { return t == ll.FMulInst }
FNegExpr = func(t ll.NodeType) bool { return t == ll.FNegExpr }
FNegInst = func(t ll.NodeType) bool { return t == ll.FNegInst }
FPExtExpr = func(t ll.NodeType) bool { return t == ll.FPExtExpr }
FPExtInst = func(t ll.NodeType) bool { return t == ll.FPExtInst }
FPToSIExpr = func(t ll.NodeType) bool { return t == ll.FPToSIExpr }
FPToSIInst = func(t ll.NodeType) bool { return t == ll.FPToSIInst }
FPToUIExpr = func(t ll.NodeType) bool { return t == ll.FPToUIExpr }
FPToUIInst = func(t ll.NodeType) bool { return t == ll.FPToUIInst }
FPTruncExpr = func(t ll.NodeType) bool { return t == ll.FPTruncExpr }
FPTruncInst = func(t ll.NodeType) bool { return t == ll.FPTruncInst }
FPred = func(t ll.NodeType) bool { return t == ll.FPred }
FRemExpr = func(t ll.NodeType) bool { return t == ll.FRemExpr }
FRemInst = func(t ll.NodeType) bool { return t == ll.FRemInst }
FSubExpr = func(t ll.NodeType) bool { return t == ll.FSubExpr }
FSubInst = func(t ll.NodeType) bool { return t == ll.FSubInst }
FastMathFlag = func(t ll.NodeType) bool { return t == ll.FastMathFlag }
FenceInst = func(t ll.NodeType) bool { return t == ll.FenceInst }
FileField = func(t ll.NodeType) bool { return t == ll.FileField }
FilenameField = func(t ll.NodeType) bool { return t == ll.FilenameField }
FlagsField = func(t ll.NodeType) bool { return t == ll.FlagsField }
FlagsStringField = func(t ll.NodeType) bool { return t == ll.FlagsStringField }
FloatConst = func(t ll.NodeType) bool { return t == ll.FloatConst }
FloatKind = func(t ll.NodeType) bool { return t == ll.FloatKind }
FloatLit = func(t ll.NodeType) bool { return t == ll.FloatLit }
FloatType = func(t ll.NodeType) bool { return t == ll.FloatType }
FreezeInst = func(t ll.NodeType) bool { return t == ll.FreezeInst }
FuncAttr = func(t ll.NodeType) bool { return t == ll.FuncAttr }
FuncBody = func(t ll.NodeType) bool { return t == ll.FuncBody }
FuncDecl = func(t ll.NodeType) bool { return t == ll.FuncDecl }
FuncDef = func(t ll.NodeType) bool { return t == ll.FuncDef }
FuncHeader = func(t ll.NodeType) bool { return t == ll.FuncHeader }
FuncType = func(t ll.NodeType) bool { return t == ll.FuncType }
GCNode = func(t ll.NodeType) bool { return t == ll.GCNode }
GEPIndex = func(t ll.NodeType) bool { return t == ll.GEPIndex }
GenericDINode = func(t ll.NodeType) bool { return t == ll.GenericDINode }
GetElementPtrExpr = func(t ll.NodeType) bool { return t == ll.GetElementPtrExpr }
GetElementPtrInst = func(t ll.NodeType) bool { return t == ll.GetElementPtrInst }
GetterField = func(t ll.NodeType) bool { return t == ll.GetterField }
GlobalDecl = func(t ll.NodeType) bool { return t == ll.GlobalDecl }
GlobalIdent = func(t ll.NodeType) bool { return t == ll.GlobalIdent }
GlobalsField = func(t ll.NodeType) bool { return t == ll.GlobalsField }
Handlers = func(t ll.NodeType) bool { return t == ll.Handlers }
HeaderField = func(t ll.NodeType) bool { return t == ll.HeaderField }
ICmpExpr = func(t ll.NodeType) bool { return t == ll.ICmpExpr }
ICmpInst = func(t ll.NodeType) bool { return t == ll.ICmpInst }
IPred = func(t ll.NodeType) bool { return t == ll.IPred }
IdentifierField = func(t ll.NodeType) bool { return t == ll.IdentifierField }
Immutable = func(t ll.NodeType) bool { return t == ll.Immutable }
ImportsField = func(t ll.NodeType) bool { return t == ll.ImportsField }
InAlloca = func(t ll.NodeType) bool { return t == ll.InAlloca }
InBounds = func(t ll.NodeType) bool { return t == ll.InBounds }
InRange = func(t ll.NodeType) bool { return t == ll.InRange }
Inc = func(t ll.NodeType) bool { return t == ll.Inc }
IncludePathField = func(t ll.NodeType) bool { return t == ll.IncludePathField }
IndirectBrTerm = func(t ll.NodeType) bool { return t == ll.IndirectBrTerm }
IndirectSymbolDef = func(t ll.NodeType) bool { return t == ll.IndirectSymbolDef }
IndirectSymbolKind = func(t ll.NodeType) bool { return t == ll.IndirectSymbolKind }
InlineAsm = func(t ll.NodeType) bool { return t == ll.InlineAsm }
InlinedAtField = func(t ll.NodeType) bool { return t == ll.InlinedAtField }
InsertElementExpr = func(t ll.NodeType) bool { return t == ll.InsertElementExpr }
InsertElementInst = func(t ll.NodeType) bool { return t == ll.InsertElementInst }
InsertValueExpr = func(t ll.NodeType) bool { return t == ll.InsertValueExpr }
InsertValueInst = func(t ll.NodeType) bool { return t == ll.InsertValueInst }
IntConst = func(t ll.NodeType) bool { return t == ll.IntConst }
IntLit = func(t ll.NodeType) bool { return t == ll.IntLit }
IntToPtrExpr = func(t ll.NodeType) bool { return t == ll.IntToPtrExpr }
IntToPtrInst = func(t ll.NodeType) bool { return t == ll.IntToPtrInst }
IntType = func(t ll.NodeType) bool { return t == ll.IntType }
IntelDialect = func(t ll.NodeType) bool { return t == ll.IntelDialect }
InvokeTerm = func(t ll.NodeType) bool { return t == ll.InvokeTerm }
IsDeclField = func(t ll.NodeType) bool { return t == ll.IsDeclField }
IsDefinitionField = func(t ll.NodeType) bool { return t == ll.IsDefinitionField }
IsImplicitCodeField = func(t ll.NodeType) bool { return t == ll.IsImplicitCodeField }
IsLocalField = func(t ll.NodeType) bool { return t == ll.IsLocalField }
IsOptimizedField = func(t ll.NodeType) bool { return t == ll.IsOptimizedField }
IsUnsignedField = func(t ll.NodeType) bool { return t == ll.IsUnsignedField }
LShrExpr = func(t ll.NodeType) bool { return t == ll.LShrExpr }
LShrInst = func(t ll.NodeType) bool { return t == ll.LShrInst }
Label = func(t ll.NodeType) bool { return t == ll.Label }
LabelIdent = func(t ll.NodeType) bool { return t == ll.LabelIdent }
LabelType = func(t ll.NodeType) bool { return t == ll.LabelType }
LandingPadInst = func(t ll.NodeType) bool { return t == ll.LandingPadInst }
LanguageField = func(t ll.NodeType) bool { return t == ll.LanguageField }
LineField = func(t ll.NodeType) bool { return t == ll.LineField }
Linkage = func(t ll.NodeType) bool { return t == ll.Linkage }
LinkageNameField = func(t ll.NodeType) bool { return t == ll.LinkageNameField }
LoadInst = func(t ll.NodeType) bool { return t == ll.LoadInst }
LocalDefInst = func(t ll.NodeType) bool { return t == ll.LocalDefInst }
LocalDefTerm = func(t ll.NodeType) bool { return t == ll.LocalDefTerm }
LocalIdent = func(t ll.NodeType) bool { return t == ll.LocalIdent }
LowerBoundField = func(t ll.NodeType) bool { return t == ll.LowerBoundField }
MDString = func(t ll.NodeType) bool { return t == ll.MDString }
MDTuple = func(t ll.NodeType) bool { return t == ll.MDTuple }
MMXType = func(t ll.NodeType) bool { return t == ll.MMXType }
MacrosField = func(t ll.NodeType) bool { return t == ll.MacrosField }
MetadataAttachment = func(t ll.NodeType) bool { return t == ll.MetadataAttachment }
MetadataDef = func(t ll.NodeType) bool { return t == ll.MetadataDef }
MetadataID = func(t ll.NodeType) bool { return t == ll.MetadataID }
MetadataName = func(t ll.NodeType) bool { return t == ll.MetadataName }
MetadataType = func(t ll.NodeType) bool { return t == ll.MetadataType }
Module = func(t ll.NodeType) bool { return t == ll.Module }
ModuleAsm = func(t ll.NodeType) bool { return t == ll.ModuleAsm }
MulExpr = func(t ll.NodeType) bool { return t == ll.MulExpr }
MulInst = func(t ll.NodeType) bool { return t == ll.MulInst }
NameField = func(t ll.NodeType) bool { return t == ll.NameField }
NameTableKindEnum = func(t ll.NodeType) bool { return t == ll.NameTableKindEnum }
NameTableKindField = func(t ll.NodeType) bool { return t == ll.NameTableKindField }
NameTableKindInt = func(t ll.NodeType) bool { return t == ll.NameTableKindInt }
NamedMetadataDef = func(t ll.NodeType) bool { return t == ll.NamedMetadataDef }
NamedType = func(t ll.NodeType) bool { return t == ll.NamedType }
NodesField = func(t ll.NodeType) bool { return t == ll.NodesField }
NoneConst = func(t ll.NodeType) bool { return t == ll.NoneConst }
NullConst = func(t ll.NodeType) bool { return t == ll.NullConst }
NullLit = func(t ll.NodeType) bool { return t == ll.NullLit }
OffsetField = func(t ll.NodeType) bool { return t == ll.OffsetField }
OpaqueType = func(t ll.NodeType) bool { return t == ll.OpaqueType }
OperandBundle = func(t ll.NodeType) bool { return t == ll.OperandBundle }
OperandsField = func(t ll.NodeType) bool { return t == ll.OperandsField }
OrExpr = func(t ll.NodeType) bool { return t == ll.OrExpr }
OrInst = func(t ll.NodeType) bool { return t == ll.OrInst }
OverflowFlag = func(t ll.NodeType) bool { return t == ll.OverflowFlag }
PackedStructType = func(t ll.NodeType) bool { return t == ll.PackedStructType }
Param = func(t ll.NodeType) bool { return t == ll.Param }
ParamAttr = func(t ll.NodeType) bool { return t == ll.ParamAttr }
Params = func(t ll.NodeType) bool { return t == ll.Params }
Partition = func(t ll.NodeType) bool { return t == ll.Partition }
Personality = func(t ll.NodeType) bool { return t == ll.Personality }
PhiInst = func(t ll.NodeType) bool { return t == ll.PhiInst }
PointerType = func(t ll.NodeType) bool { return t == ll.PointerType }
PoisonConst = func(t ll.NodeType) bool { return t == ll.PoisonConst }
Preallocated = func(t ll.NodeType) bool { return t == ll.Preallocated }
Preemption = func(t ll.NodeType) bool { return t == ll.Preemption }
Prefix = func(t ll.NodeType) bool { return t == ll.Prefix }
ProducerField = func(t ll.NodeType) bool { return t == ll.ProducerField }
Prologue = func(t ll.NodeType) bool { return t == ll.Prologue }
PtrToIntExpr = func(t ll.NodeType) bool { return t == ll.PtrToIntExpr }
PtrToIntInst = func(t ll.NodeType) bool { return t == ll.PtrToIntInst }
RangesBaseAddressField = func(t ll.NodeType) bool { return t == ll.RangesBaseAddressField }
RankField = func(t ll.NodeType) bool { return t == ll.RankField }
ResumeTerm = func(t ll.NodeType) bool { return t == ll.ResumeTerm }
RetTerm = func(t ll.NodeType) bool { return t == ll.RetTerm }
RetainedNodesField = func(t ll.NodeType) bool { return t == ll.RetainedNodesField }
RetainedTypesField = func(t ll.NodeType) bool { return t == ll.RetainedTypesField }
ReturnAttr = func(t ll.NodeType) bool { return t == ll.ReturnAttr }
RuntimeLangField = func(t ll.NodeType) bool { return t == ll.RuntimeLangField }
RuntimeVersionField = func(t ll.NodeType) bool { return t == ll.RuntimeVersionField }
SDKField = func(t ll.NodeType) bool { return t == ll.SDKField }
SDivExpr = func(t ll.NodeType) bool { return t == ll.SDivExpr }
SDivInst = func(t ll.NodeType) bool { return t == ll.SDivInst }
SExtExpr = func(t ll.NodeType) bool { return t == ll.SExtExpr }
SExtInst = func(t ll.NodeType) bool { return t == ll.SExtInst }
SIToFPExpr = func(t ll.NodeType) bool { return t == ll.SIToFPExpr }
SIToFPInst = func(t ll.NodeType) bool { return t == ll.SIToFPInst }
SPFlagsField = func(t ll.NodeType) bool { return t == ll.SPFlagsField }
SRemExpr = func(t ll.NodeType) bool { return t == ll.SRemExpr }
SRemInst = func(t ll.NodeType) bool { return t == ll.SRemInst }
ScalableVectorType = func(t ll.NodeType) bool { return t == ll.ScalableVectorType }
ScopeField = func(t ll.NodeType) bool { return t == ll.ScopeField }
ScopeLineField = func(t ll.NodeType) bool { return t == ll.ScopeLineField }
Section = func(t ll.NodeType) bool { return t == ll.Section }
SelectExpr = func(t ll.NodeType) bool { return t == ll.SelectExpr }
SelectInst = func(t ll.NodeType) bool { return t == ll.SelectInst }
SelectionKind = func(t ll.NodeType) bool { return t == ll.SelectionKind }
SetterField = func(t ll.NodeType) bool { return t == ll.SetterField }
ShlExpr = func(t ll.NodeType) bool { return t == ll.ShlExpr }
ShlInst = func(t ll.NodeType) bool { return t == ll.ShlInst }
ShuffleVectorExpr = func(t ll.NodeType) bool { return t == ll.ShuffleVectorExpr }
ShuffleVectorInst = func(t ll.NodeType) bool { return t == ll.ShuffleVectorInst }
SideEffect = func(t ll.NodeType) bool { return t == ll.SideEffect }
SizeField = func(t ll.NodeType) bool { return t == ll.SizeField }
SourceField = func(t ll.NodeType) bool { return t == ll.SourceField }
SourceFilename = func(t ll.NodeType) bool { return t == ll.SourceFilename }
SplitDebugFilenameField = func(t ll.NodeType) bool { return t == ll.SplitDebugFilenameField }
SplitDebugInliningField = func(t ll.NodeType) bool { return t == ll.SplitDebugInliningField }
StoreInst = func(t ll.NodeType) bool { return t == ll.StoreInst }
StrideField = func(t ll.NodeType) bool { return t == ll.StrideField }
StringLit = func(t ll.NodeType) bool { return t == ll.StringLit }
StructConst = func(t ll.NodeType) bool { return t == ll.StructConst }
StructRetAttr = func(t ll.NodeType) bool { return t == ll.StructRetAttr }
StructType = func(t ll.NodeType) bool { return t == ll.StructType }
SubExpr = func(t ll.NodeType) bool { return t == ll.SubExpr }
SubInst = func(t ll.NodeType) bool { return t == ll.SubInst }
SwiftError = func(t ll.NodeType) bool { return t == ll.SwiftError }
SwitchTerm = func(t ll.NodeType) bool { return t == ll.SwitchTerm }
SyncScope = func(t ll.NodeType) bool { return t == ll.SyncScope }
SysrootField = func(t ll.NodeType) bool { return t == ll.SysrootField }
TLSModel = func(t ll.NodeType) bool { return t == ll.TLSModel }
TagField = func(t ll.NodeType) bool { return t == ll.TagField }
Tail = func(t ll.NodeType) bool { return t == ll.Tail }
TargetDataLayout = func(t ll.NodeType) bool { return t == ll.TargetDataLayout }
TargetTriple = func(t ll.NodeType) bool { return t == ll.TargetTriple }
TemplateParamsField = func(t ll.NodeType) bool { return t == ll.TemplateParamsField }
ThisAdjustmentField = func(t ll.NodeType) bool { return t == ll.ThisAdjustmentField }
ThreadLocal = func(t ll.NodeType) bool { return t == ll.ThreadLocal }
ThrownTypesField = func(t ll.NodeType) bool { return t == ll.ThrownTypesField }
TokenType = func(t ll.NodeType) bool { return t == ll.TokenType }
TruncExpr = func(t ll.NodeType) bool { return t == ll.TruncExpr }
TruncInst = func(t ll.NodeType) bool { return t == ll.TruncInst }
TypeConst = func(t ll.NodeType) bool { return t == ll.TypeConst }
TypeDef = func(t ll.NodeType) bool { return t == ll.TypeDef }
TypeField = func(t ll.NodeType) bool { return t == ll.TypeField }
TypeMacinfoField = func(t ll.NodeType) bool { return t == ll.TypeMacinfoField }
TypeValue = func(t ll.NodeType) bool { return t == ll.TypeValue }
TypesField = func(t ll.NodeType) bool { return t == ll.TypesField }
UDivExpr = func(t ll.NodeType) bool { return t == ll.UDivExpr }
UDivInst = func(t ll.NodeType) bool { return t == ll.UDivInst }
UIToFPExpr = func(t ll.NodeType) bool { return t == ll.UIToFPExpr }
UIToFPInst = func(t ll.NodeType) bool { return t == ll.UIToFPInst }
URemExpr = func(t ll.NodeType) bool { return t == ll.URemExpr }
URemInst = func(t ll.NodeType) bool { return t == ll.URemInst }
UintLit = func(t ll.NodeType) bool { return t == ll.UintLit }
UndefConst = func(t ll.NodeType) bool { return t == ll.UndefConst }
UnitField = func(t ll.NodeType) bool { return t == ll.UnitField }
UnnamedAddr = func(t ll.NodeType) bool { return t == ll.UnnamedAddr }
UnreachableTerm = func(t ll.NodeType) bool { return t == ll.UnreachableTerm }
UnwindToCaller = func(t ll.NodeType) bool { return t == ll.UnwindToCaller }
UpperBoundField = func(t ll.NodeType) bool { return t == ll.UpperBoundField }
UseListOrder = func(t ll.NodeType) bool { return t == ll.UseListOrder }
UseListOrderBB = func(t ll.NodeType) bool { return t == ll.UseListOrderBB }
VAArgInst = func(t ll.NodeType) bool { return t == ll.VAArgInst }
ValueField = func(t ll.NodeType) bool { return t == ll.ValueField }
ValueIntField = func(t ll.NodeType) bool { return t == ll.ValueIntField }
ValueStringField = func(t ll.NodeType) bool { return t == ll.ValueStringField }
VarField = func(t ll.NodeType) bool { return t == ll.VarField }
VectorConst = func(t ll.NodeType) bool { return t == ll.VectorConst }
VectorType = func(t ll.NodeType) bool { return t == ll.VectorType }
VirtualIndexField = func(t ll.NodeType) bool { return t == ll.VirtualIndexField }
VirtualityField = func(t ll.NodeType) bool { return t == ll.VirtualityField }
Visibility = func(t ll.NodeType) bool { return t == ll.Visibility }
VoidType = func(t ll.NodeType) bool { return t == ll.VoidType }
Volatile = func(t ll.NodeType) bool { return t == ll.Volatile }
VtableHolderField = func(t ll.NodeType) bool { return t == ll.VtableHolderField }
Weak = func(t ll.NodeType) bool { return t == ll.Weak }
XorExpr = func(t ll.NodeType) bool { return t == ll.XorExpr }
XorInst = func(t ll.NodeType) bool { return t == ll.XorInst }
ZExtExpr = func(t ll.NodeType) bool { return t == ll.ZExtExpr }
ZExtInst = func(t ll.NodeType) bool { return t == ll.ZExtInst }
ZeroInitializerConst = func(t ll.NodeType) bool { return t == ll.ZeroInitializerConst }
CallingConv = OneOf(ll.CallingConv...)
ConcreteType = OneOf(ll.ConcreteType...)
Constant = OneOf(ll.Constant...)
ConstantExpr = OneOf(ll.ConstantExpr...)
DIBasicTypeField = OneOf(ll.DIBasicTypeField...)
DICommonBlockField = OneOf(ll.DICommonBlockField...)
DICompileUnitField = OneOf(ll.DICompileUnitField...)
DICompositeTypeField = OneOf(ll.DICompositeTypeField...)
DIDerivedTypeField = OneOf(ll.DIDerivedTypeField...)
DIEnumeratorField = OneOf(ll.DIEnumeratorField...)
DIExpressionField = OneOf(ll.DIExpressionField...)
DIFileField = OneOf(ll.DIFileField...)
DIFlag = OneOf(ll.DIFlag...)
DIGlobalVariableExpressionField = OneOf(ll.DIGlobalVariableExpressionField...)
DIGlobalVariableField = OneOf(ll.DIGlobalVariableField...)
DIImportedEntityField = OneOf(ll.DIImportedEntityField...)
DILabelField = OneOf(ll.DILabelField...)
DILexicalBlockField = OneOf(ll.DILexicalBlockField...)
DILexicalBlockFileField = OneOf(ll.DILexicalBlockFileField...)
DILocalVariableField = OneOf(ll.DILocalVariableField...)
DILocationField = OneOf(ll.DILocationField...)
DIMacroField = OneOf(ll.DIMacroField...)
DIMacroFileField = OneOf(ll.DIMacroFileField...)
DIModuleField = OneOf(ll.DIModuleField...)
DINamespaceField = OneOf(ll.DINamespaceField...)
DIObjCPropertyField = OneOf(ll.DIObjCPropertyField...)
DISPFlag = OneOf(ll.DISPFlag...)
DISubprogramField = OneOf(ll.DISubprogramField...)
DISubrangeField = OneOf(ll.DISubrangeField...)
DISubroutineTypeField = OneOf(ll.DISubroutineTypeField...)
DITemplateTypeParameterField = OneOf(ll.DITemplateTypeParameterField...)
DITemplateValueParameterField = OneOf(ll.DITemplateValueParameterField...)
DwarfAttEncoding = OneOf(ll.DwarfAttEncoding...)
DwarfAttEncodingOrUint = OneOf(ll.DwarfAttEncodingOrUint...)
DwarfCC = OneOf(ll.DwarfCC...)
DwarfLang = OneOf(ll.DwarfLang...)
DwarfMacinfo = OneOf(ll.DwarfMacinfo...)
DwarfTag = OneOf(ll.DwarfTag...)
DwarfVirtuality = OneOf(ll.DwarfVirtuality...)
EmissionKind = OneOf(ll.EmissionKind...)
ExceptionPad = OneOf(ll.ExceptionPad...)
FirstClassType = OneOf(ll.FirstClassType...)
FuncAttribute = OneOf(ll.FuncAttribute...)
FuncHdrField = OneOf(ll.FuncHdrField...)
GenericDINodeField = OneOf(ll.GenericDINodeField...)
GlobalField = OneOf(ll.GlobalField...)
IndirectSymbol = OneOf(ll.IndirectSymbol...)
Instruction = OneOf(ll.Instruction...)
MDField = OneOf(ll.MDField...)
MDFieldOrInt = OneOf(ll.MDFieldOrInt...)
MDNode = OneOf(ll.MDNode...)
Metadata = OneOf(ll.Metadata...)
MetadataNode = OneOf(ll.MetadataNode...)
NameTableKind = OneOf(ll.NameTableKind...)
ParamAttribute = OneOf(ll.ParamAttribute...)
ReturnAttribute = OneOf(ll.ReturnAttribute...)
SpecializedMDNode = OneOf(ll.SpecializedMDNode...)
TargetDef = OneOf(ll.TargetDef...)
Terminator = OneOf(ll.Terminator...)
TopLevelEntity = OneOf(ll.TopLevelEntity...)
Type = OneOf(ll.Type...)
UnwindTarget = OneOf(ll.UnwindTarget...)
Value = OneOf(ll.Value...)
ValueInstruction = OneOf(ll.ValueInstruction...)
ValueTerminator = OneOf(ll.ValueTerminator...)
)
func OneOf(types ...ll.NodeType) Selector {
if len(types) == 0 {
return func(ll.NodeType) bool { return false }
}
const bits = 32
max := 1
for _, t := range types {
if int(t) > max {
max = int(t)
}
}
size := (max + bits) / bits
bitarr := make([]uint32, size)
for _, t := range types {
bitarr[uint(t)/bits] |= 1 << (uint(t) % bits)
}
return func(t ll.NodeType) bool {
i := uint(t) / bits
return int(i) < len(bitarr) && bitarr[i]&(1<<(uint(t)%bits)) != 0
}
} | selector/selector.go | 0.515376 | 0.5901 | selector.go | starcoder |
package models
import (
i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e "time"
i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization"
)
// AccessPackageAssignment
type AccessPackageAssignment struct {
Entity
// Read-only. Nullable. Supports $filter (eq) on the id property and $expand query parameters.
accessPackage AccessPackageable
// Read-only. Nullable. Supports $filter (eq) on the id property
accessPackageAssignmentPolicy AccessPackageAssignmentPolicyable
// The accessPackageAssignmentRequests property
accessPackageAssignmentRequests []AccessPackageAssignmentRequestable
// The resource roles delivered to the target user for this assignment. Read-only. Nullable.
accessPackageAssignmentResourceRoles []AccessPackageAssignmentResourceRoleable
// The identifier of the access package. Read-only.
accessPackageId *string
// The identifier of the access package assignment policy. Read-only.
assignmentPolicyId *string
// The state of the access package assignment. Possible values are Delivering, Delivered, or Expired. Read-only. Supports $filter (eq).
assignmentState *string
// More information about the assignment lifecycle. Possible values include Delivering, Delivered, NearExpiry1DayNotificationTriggered, or ExpiredNotificationTriggered. Read-only.
assignmentStatus *string
// The identifier of the catalog containing the access package. Read-only.
catalogId *string
// The Timestamp type represents date and time information using ISO 8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 is 2014-01-01T00:00:00Z
expiredDateTime *i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e.Time
// Indicates whether the access package assignment is extended. Read-only.
isExtended *bool
// When the access assignment is to be in place. Read-only.
schedule RequestScheduleable
// The subject of the access package assignment. Read-only. Nullable. Supports $expand. Supports $filter (eq) on objectId.
target AccessPackageSubjectable
// The ID of the subject with the assignment. Read-only.
targetId *string
}
// NewAccessPackageAssignment instantiates a new accessPackageAssignment and sets the default values.
func NewAccessPackageAssignment()(*AccessPackageAssignment) {
m := &AccessPackageAssignment{
Entity: *NewEntity(),
}
return m
}
// CreateAccessPackageAssignmentFromDiscriminatorValue creates a new instance of the appropriate class based on discriminator value
func CreateAccessPackageAssignmentFromDiscriminatorValue(parseNode i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, error) {
return NewAccessPackageAssignment(), nil
}
// GetAccessPackage gets the accessPackage property value. Read-only. Nullable. Supports $filter (eq) on the id property and $expand query parameters.
func (m *AccessPackageAssignment) GetAccessPackage()(AccessPackageable) {
if m == nil {
return nil
} else {
return m.accessPackage
}
}
// GetAccessPackageAssignmentPolicy gets the accessPackageAssignmentPolicy property value. Read-only. Nullable. Supports $filter (eq) on the id property
func (m *AccessPackageAssignment) GetAccessPackageAssignmentPolicy()(AccessPackageAssignmentPolicyable) {
if m == nil {
return nil
} else {
return m.accessPackageAssignmentPolicy
}
}
// GetAccessPackageAssignmentRequests gets the accessPackageAssignmentRequests property value. The accessPackageAssignmentRequests property
func (m *AccessPackageAssignment) GetAccessPackageAssignmentRequests()([]AccessPackageAssignmentRequestable) {
if m == nil {
return nil
} else {
return m.accessPackageAssignmentRequests
}
}
// GetAccessPackageAssignmentResourceRoles gets the accessPackageAssignmentResourceRoles property value. The resource roles delivered to the target user for this assignment. Read-only. Nullable.
func (m *AccessPackageAssignment) GetAccessPackageAssignmentResourceRoles()([]AccessPackageAssignmentResourceRoleable) {
if m == nil {
return nil
} else {
return m.accessPackageAssignmentResourceRoles
}
}
// GetAccessPackageId gets the accessPackageId property value. The identifier of the access package. Read-only.
func (m *AccessPackageAssignment) GetAccessPackageId()(*string) {
if m == nil {
return nil
} else {
return m.accessPackageId
}
}
// GetAssignmentPolicyId gets the assignmentPolicyId property value. The identifier of the access package assignment policy. Read-only.
func (m *AccessPackageAssignment) GetAssignmentPolicyId()(*string) {
if m == nil {
return nil
} else {
return m.assignmentPolicyId
}
}
// GetAssignmentState gets the assignmentState property value. The state of the access package assignment. Possible values are Delivering, Delivered, or Expired. Read-only. Supports $filter (eq).
func (m *AccessPackageAssignment) GetAssignmentState()(*string) {
if m == nil {
return nil
} else {
return m.assignmentState
}
}
// GetAssignmentStatus gets the assignmentStatus property value. More information about the assignment lifecycle. Possible values include Delivering, Delivered, NearExpiry1DayNotificationTriggered, or ExpiredNotificationTriggered. Read-only.
func (m *AccessPackageAssignment) GetAssignmentStatus()(*string) {
if m == nil {
return nil
} else {
return m.assignmentStatus
}
}
// GetCatalogId gets the catalogId property value. The identifier of the catalog containing the access package. Read-only.
func (m *AccessPackageAssignment) GetCatalogId()(*string) {
if m == nil {
return nil
} else {
return m.catalogId
}
}
// GetExpiredDateTime gets the expiredDateTime property value. The Timestamp type represents date and time information using ISO 8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 is 2014-01-01T00:00:00Z
func (m *AccessPackageAssignment) GetExpiredDateTime()(*i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e.Time) {
if m == nil {
return nil
} else {
return m.expiredDateTime
}
}
// GetFieldDeserializers the deserialization information for the current model
func (m *AccessPackageAssignment) GetFieldDeserializers()(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(error)) {
res := m.Entity.GetFieldDeserializers()
res["accessPackage"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetObjectValue(CreateAccessPackageFromDiscriminatorValue)
if err != nil {
return err
}
if val != nil {
m.SetAccessPackage(val.(AccessPackageable))
}
return nil
}
res["accessPackageAssignmentPolicy"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetObjectValue(CreateAccessPackageAssignmentPolicyFromDiscriminatorValue)
if err != nil {
return err
}
if val != nil {
m.SetAccessPackageAssignmentPolicy(val.(AccessPackageAssignmentPolicyable))
}
return nil
}
res["accessPackageAssignmentRequests"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetCollectionOfObjectValues(CreateAccessPackageAssignmentRequestFromDiscriminatorValue)
if err != nil {
return err
}
if val != nil {
res := make([]AccessPackageAssignmentRequestable, len(val))
for i, v := range val {
res[i] = v.(AccessPackageAssignmentRequestable)
}
m.SetAccessPackageAssignmentRequests(res)
}
return nil
}
res["accessPackageAssignmentResourceRoles"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetCollectionOfObjectValues(CreateAccessPackageAssignmentResourceRoleFromDiscriminatorValue)
if err != nil {
return err
}
if val != nil {
res := make([]AccessPackageAssignmentResourceRoleable, len(val))
for i, v := range val {
res[i] = v.(AccessPackageAssignmentResourceRoleable)
}
m.SetAccessPackageAssignmentResourceRoles(res)
}
return nil
}
res["accessPackageId"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetStringValue()
if err != nil {
return err
}
if val != nil {
m.SetAccessPackageId(val)
}
return nil
}
res["assignmentPolicyId"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetStringValue()
if err != nil {
return err
}
if val != nil {
m.SetAssignmentPolicyId(val)
}
return nil
}
res["assignmentState"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetStringValue()
if err != nil {
return err
}
if val != nil {
m.SetAssignmentState(val)
}
return nil
}
res["assignmentStatus"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetStringValue()
if err != nil {
return err
}
if val != nil {
m.SetAssignmentStatus(val)
}
return nil
}
res["catalogId"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetStringValue()
if err != nil {
return err
}
if val != nil {
m.SetCatalogId(val)
}
return nil
}
res["expiredDateTime"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetTimeValue()
if err != nil {
return err
}
if val != nil {
m.SetExpiredDateTime(val)
}
return nil
}
res["isExtended"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetBoolValue()
if err != nil {
return err
}
if val != nil {
m.SetIsExtended(val)
}
return nil
}
res["schedule"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetObjectValue(CreateRequestScheduleFromDiscriminatorValue)
if err != nil {
return err
}
if val != nil {
m.SetSchedule(val.(RequestScheduleable))
}
return nil
}
res["target"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetObjectValue(CreateAccessPackageSubjectFromDiscriminatorValue)
if err != nil {
return err
}
if val != nil {
m.SetTarget(val.(AccessPackageSubjectable))
}
return nil
}
res["targetId"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetStringValue()
if err != nil {
return err
}
if val != nil {
m.SetTargetId(val)
}
return nil
}
return res
}
// GetIsExtended gets the isExtended property value. Indicates whether the access package assignment is extended. Read-only.
func (m *AccessPackageAssignment) GetIsExtended()(*bool) {
if m == nil {
return nil
} else {
return m.isExtended
}
}
// GetSchedule gets the schedule property value. When the access assignment is to be in place. Read-only.
func (m *AccessPackageAssignment) GetSchedule()(RequestScheduleable) {
if m == nil {
return nil
} else {
return m.schedule
}
}
// GetTarget gets the target property value. The subject of the access package assignment. Read-only. Nullable. Supports $expand. Supports $filter (eq) on objectId.
func (m *AccessPackageAssignment) GetTarget()(AccessPackageSubjectable) {
if m == nil {
return nil
} else {
return m.target
}
}
// GetTargetId gets the targetId property value. The ID of the subject with the assignment. Read-only.
func (m *AccessPackageAssignment) GetTargetId()(*string) {
if m == nil {
return nil
} else {
return m.targetId
}
}
// Serialize serializes information the current object
func (m *AccessPackageAssignment) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {
err := m.Entity.Serialize(writer)
if err != nil {
return err
}
{
err = writer.WriteObjectValue("accessPackage", m.GetAccessPackage())
if err != nil {
return err
}
}
{
err = writer.WriteObjectValue("accessPackageAssignmentPolicy", m.GetAccessPackageAssignmentPolicy())
if err != nil {
return err
}
}
if m.GetAccessPackageAssignmentRequests() != nil {
cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetAccessPackageAssignmentRequests()))
for i, v := range m.GetAccessPackageAssignmentRequests() {
cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)
}
err = writer.WriteCollectionOfObjectValues("accessPackageAssignmentRequests", cast)
if err != nil {
return err
}
}
if m.GetAccessPackageAssignmentResourceRoles() != nil {
cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetAccessPackageAssignmentResourceRoles()))
for i, v := range m.GetAccessPackageAssignmentResourceRoles() {
cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)
}
err = writer.WriteCollectionOfObjectValues("accessPackageAssignmentResourceRoles", cast)
if err != nil {
return err
}
}
{
err = writer.WriteStringValue("accessPackageId", m.GetAccessPackageId())
if err != nil {
return err
}
}
{
err = writer.WriteStringValue("assignmentPolicyId", m.GetAssignmentPolicyId())
if err != nil {
return err
}
}
{
err = writer.WriteStringValue("assignmentState", m.GetAssignmentState())
if err != nil {
return err
}
}
{
err = writer.WriteStringValue("assignmentStatus", m.GetAssignmentStatus())
if err != nil {
return err
}
}
{
err = writer.WriteStringValue("catalogId", m.GetCatalogId())
if err != nil {
return err
}
}
{
err = writer.WriteTimeValue("expiredDateTime", m.GetExpiredDateTime())
if err != nil {
return err
}
}
{
err = writer.WriteBoolValue("isExtended", m.GetIsExtended())
if err != nil {
return err
}
}
{
err = writer.WriteObjectValue("schedule", m.GetSchedule())
if err != nil {
return err
}
}
{
err = writer.WriteObjectValue("target", m.GetTarget())
if err != nil {
return err
}
}
{
err = writer.WriteStringValue("targetId", m.GetTargetId())
if err != nil {
return err
}
}
return nil
}
// SetAccessPackage sets the accessPackage property value. Read-only. Nullable. Supports $filter (eq) on the id property and $expand query parameters.
func (m *AccessPackageAssignment) SetAccessPackage(value AccessPackageable)() {
if m != nil {
m.accessPackage = value
}
}
// SetAccessPackageAssignmentPolicy sets the accessPackageAssignmentPolicy property value. Read-only. Nullable. Supports $filter (eq) on the id property
func (m *AccessPackageAssignment) SetAccessPackageAssignmentPolicy(value AccessPackageAssignmentPolicyable)() {
if m != nil {
m.accessPackageAssignmentPolicy = value
}
}
// SetAccessPackageAssignmentRequests sets the accessPackageAssignmentRequests property value. The accessPackageAssignmentRequests property
func (m *AccessPackageAssignment) SetAccessPackageAssignmentRequests(value []AccessPackageAssignmentRequestable)() {
if m != nil {
m.accessPackageAssignmentRequests = value
}
}
// SetAccessPackageAssignmentResourceRoles sets the accessPackageAssignmentResourceRoles property value. The resource roles delivered to the target user for this assignment. Read-only. Nullable.
func (m *AccessPackageAssignment) SetAccessPackageAssignmentResourceRoles(value []AccessPackageAssignmentResourceRoleable)() {
if m != nil {
m.accessPackageAssignmentResourceRoles = value
}
}
// SetAccessPackageId sets the accessPackageId property value. The identifier of the access package. Read-only.
func (m *AccessPackageAssignment) SetAccessPackageId(value *string)() {
if m != nil {
m.accessPackageId = value
}
}
// SetAssignmentPolicyId sets the assignmentPolicyId property value. The identifier of the access package assignment policy. Read-only.
func (m *AccessPackageAssignment) SetAssignmentPolicyId(value *string)() {
if m != nil {
m.assignmentPolicyId = value
}
}
// SetAssignmentState sets the assignmentState property value. The state of the access package assignment. Possible values are Delivering, Delivered, or Expired. Read-only. Supports $filter (eq).
func (m *AccessPackageAssignment) SetAssignmentState(value *string)() {
if m != nil {
m.assignmentState = value
}
}
// SetAssignmentStatus sets the assignmentStatus property value. More information about the assignment lifecycle. Possible values include Delivering, Delivered, NearExpiry1DayNotificationTriggered, or ExpiredNotificationTriggered. Read-only.
func (m *AccessPackageAssignment) SetAssignmentStatus(value *string)() {
if m != nil {
m.assignmentStatus = value
}
}
// SetCatalogId sets the catalogId property value. The identifier of the catalog containing the access package. Read-only.
func (m *AccessPackageAssignment) SetCatalogId(value *string)() {
if m != nil {
m.catalogId = value
}
}
// SetExpiredDateTime sets the expiredDateTime property value. The Timestamp type represents date and time information using ISO 8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 is 2014-01-01T00:00:00Z
func (m *AccessPackageAssignment) SetExpiredDateTime(value *i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e.Time)() {
if m != nil {
m.expiredDateTime = value
}
}
// SetIsExtended sets the isExtended property value. Indicates whether the access package assignment is extended. Read-only.
func (m *AccessPackageAssignment) SetIsExtended(value *bool)() {
if m != nil {
m.isExtended = value
}
}
// SetSchedule sets the schedule property value. When the access assignment is to be in place. Read-only.
func (m *AccessPackageAssignment) SetSchedule(value RequestScheduleable)() {
if m != nil {
m.schedule = value
}
}
// SetTarget sets the target property value. The subject of the access package assignment. Read-only. Nullable. Supports $expand. Supports $filter (eq) on objectId.
func (m *AccessPackageAssignment) SetTarget(value AccessPackageSubjectable)() {
if m != nil {
m.target = value
}
}
// SetTargetId sets the targetId property value. The ID of the subject with the assignment. Read-only.
func (m *AccessPackageAssignment) SetTargetId(value *string)() {
if m != nil {
m.targetId = value
}
} | models/access_package_assignment.go | 0.685739 | 0.497498 | access_package_assignment.go | starcoder |
package graphql
import (
"fmt"
"strings"
"time"
"github.com/skydive-project/skydive/graffiti/graph"
"github.com/skydive-project/skydive/graffiti/graph/traversal"
)
const (
// PrefixOriginName defines the prefix set in nodes/edges Origin field
PrefixOriginName = "graphql."
)
// newNode create a new node in Skydive with a custom createdAt and origin field
func (r *Resolver) newNode(id graph.Identifier, m graph.Metadata, createdAt *time.Time) (*graph.Node, error) {
var timestamp graph.Time
if createdAt != nil {
timestamp = graph.Time(*createdAt)
} else {
timestamp = graph.TimeUTC()
}
n := graph.CreateNode(id, m, timestamp, r.Graph.GetHost(), PrefixOriginName+r.Graph.GetOrigin())
if err := r.Graph.AddNode(n); err != nil {
return nil, err
}
return n, nil
}
// newEdge create a new edge in Skydive with a custom createdAt and origin field
func (r *Resolver) newEdge(id graph.Identifier, srcNode, dstNode *graph.Node, m graph.Metadata, createdAt *time.Time) (*graph.Edge, error) {
var timestamp graph.Time
if createdAt != nil {
timestamp = graph.Time(*createdAt)
} else {
timestamp = graph.TimeUTC()
}
e := graph.CreateEdge(id, srcNode, dstNode, m, timestamp, r.Graph.GetHost(), PrefixOriginName+r.Graph.GetOrigin())
if err := r.Graph.AddEdge(e); err != nil {
return nil, err
}
return e, nil
}
// getNodeFromGremlinQuery return a single node from a gremlin query
func (r *Resolver) getNodeFromGremlinQuery(query string) (*graph.Node, error) {
tr := traversal.NewGremlinTraversalParser()
ts, err := tr.Parse(strings.NewReader(query))
if err != nil {
return nil, fmt.Errorf("parsing gremlin query: %v", err)
}
// Execute the gremlin query
res, err := ts.Exec(r.Graph, false)
if err != nil {
return nil, fmt.Errorf("executing gremlin query: %v", err)
}
// Assume the gremlin query is outputting a list of nodes Fail otherwise
tv, ok := res.(*traversal.GraphTraversalV)
if !ok {
return nil, fmt.Errorf("invalid graph output, expecting nodes")
}
// Only one node is expected
nodes := tv.GetNodes()
if len(nodes) != 1 {
return nil, fmt.Errorf("expecting only one node, gremlin query returned %d", len(nodes))
}
return nodes[0], nil
}
// createOrUpdateNode create the node if it does not exists. Otherwise, update node's metadata.
// createdAt is used to define the creation date of the node.
// Return the node created or updated.
// Return created true if a new node has been created.
// Return updated true if the node already exists and its metadata has been updated.
// Return error if node could not be created or an error happens updating metadata.
func (r *Resolver) createOrUpdateNode(
id graph.Identifier,
metadata graph.Metadata,
createdAt *time.Time,
) (node *graph.Node, created bool, updated bool, err error) {
node = r.Graph.GetNode(id)
if node == nil {
node, err = r.newNode(id, metadata, createdAt)
if err != nil {
return node, created, updated, fmt.Errorf("unable to create node (id: %v, metadata: %+v): %v", id, metadata, err)
}
created = true
return node, created, updated, nil
}
// Node already exists. Update metadata
// Do not update device if we are supposed to update CreatedAt value
if createdAt != nil && !createdAt.Equal(time.Time(node.CreatedAt)) {
return node, created, updated, fmt.Errorf("forbidden to modify CreatedAt value of existing node")
}
// Store revision to know if SetMetadata has modified node's metadata
revisionPre := node.Revision
err = r.Graph.SetMetadata(node, metadata)
if err != nil {
return node, created, updated, fmt.Errorf("unable to update metadata of node %+v: %v", node, err)
}
if revisionPre != node.Revision {
updated = true
}
return node, created, updated, nil
}
// createOrUpdateEdge create the edge if it does not exists. Otherwise, update edge's metadata.
// An edge is considered the same if it has the same source and destination nodes and RelationType.
// createdAt is used to define the creation date of the node.
// Return the edge created or updated.
// Return created true if a new edge has been created.
// Return updated true if the edge already exists and its metadata has been updated.
// Return error if edge could not be created or an error happens updating metadata.
func (r *Resolver) createOrUpdateEdge(
srcNode *graph.Node,
dstNode *graph.Node,
metadata graph.Metadata,
createdAt *time.Time,
) (edge *graph.Edge, created bool, updated bool, err error) {
relationType, err := metadata.GetFieldString(MetaKeyRelationType)
if err != nil {
return edge, created, updated, fmt.Errorf("missing RelationType: %v", err)
}
if srcNode == nil {
return edge, created, updated, fmt.Errorf("invalid source node")
}
if dstNode == nil {
return edge, created, updated, fmt.Errorf("invalid destination node")
}
// Generate the edge interface based on node's ID and Metadata.RelationType
id := graph.GenID(string(srcNode.ID), string(dstNode.ID), relationType)
edge = r.Graph.GetEdge(id)
if edge == nil {
edge, err = r.newEdge(id, srcNode, dstNode, metadata, createdAt)
if err != nil {
return edge, created, updated, fmt.Errorf("unable to create edge type %s, between '%s' and '%s': %v", relationType, srcNode.ID, dstNode.ID, err)
}
created = true
return edge, created, updated, nil
}
// Edge already exists. Update metadata
// Store revision to know if SetMetadata has modified edge's metadata
revisionPre := edge.Revision
err = r.Graph.SetMetadata(edge, metadata)
if err != nil {
return edge, created, updated, fmt.Errorf("unable to update metadata of edge %+v: %v", edge, err)
}
if revisionPre != edge.Revision {
updated = true
}
if createdAt != nil && !createdAt.Equal(time.Time(edge.CreatedAt)) {
return edge, created, updated, fmt.Errorf("forbidden to modify CreatedAt value of existing edge")
}
return edge, created, updated, nil
} | topology/probes/graphql/skydive_helpers.go | 0.735737 | 0.438064 | skydive_helpers.go | starcoder |
package main
import (
"fmt"
cz "github.com/CloudNativeDataPlane/cndp/lang/go/tools/pkgs/colorize"
"github.com/gdamore/tcell/v2"
"github.com/rivo/tview"
)
type chartData struct {
points []float64
name string
}
// TitleColor - Set the title color to the windows
func TitleColor(msg string) string {
return fmt.Sprintf("[%s]", cz.Orange(msg))
}
// Center returns a new primitive which shows the provided primitive in its
// center, given the provided primitive's size.
func Center(width, height int, p tview.Primitive) tview.Primitive {
return tview.NewFlex().
AddItem(tview.NewBox(), 0, 1, false).
AddItem(tview.NewFlex().
SetDirection(tview.FlexRow).
AddItem(tview.NewBox(), 0, 1, false).
AddItem(p, height, 1, true).
AddItem(tview.NewBox(), 0, 1, false), width, 1, true).
AddItem(tview.NewBox(), 0, 1, false)
}
// TitleBox to return the top title window
func TitleBox(flex *tview.Flex) *tview.Box {
box := tview.NewBox().
SetBorder(true).
SetTitle(CloudMonInfo(true)).
SetTitleAlign(tview.AlignLeft)
flex.AddItem(box, 2, 1, false)
return box
}
// CreateTextView - helper routine to create a TextView
func CreateTextView(flex *tview.Flex, msg string, align, fixedSize, proportion int, focus bool) *tview.TextView {
textView := tview.NewTextView().
SetDynamicColors(true).
SetWrap(true)
textView.SetBorder(true).
SetTitle(TitleColor(msg)).
SetTitleAlign(align)
flex.AddItem(textView, fixedSize, proportion, focus)
return textView
}
// CreateTableView - Helper to create a Table
func CreateTableView(flex *tview.Flex, msg string, align, fixedSize, proportion int, focus bool) *tview.Table {
table := tview.NewTable().
SetFixed(1, 0).
SetEvaluateAllRows(true)
table.SetBorder(true).
SetTitle(TitleColor(msg)).
SetTitleAlign(align)
flex.AddItem(table, fixedSize, proportion, focus)
return table
}
// CreateForm window
func CreateForm(flex *tview.Flex, msg string, align, fixedSize, proportion int, focus bool) *tview.Form {
form := tview.NewForm().
SetFieldBackgroundColor(tcell.ColorDefault).
SetFieldTextColor(tcell.ColorSlateGrey)
form.SetBorder(true).
SetTitleAlign(align).
SetTitle(TitleColor(msg))
flex.AddItem(form, fixedSize, proportion, focus)
return form
}
// CreateList window
func CreateList(flex *tview.Flex, msg string, align, fixedSize, proportion int, focus bool) *tview.List {
list := tview.NewList().ShowSecondaryText(false)
list.SetBorder(true).
SetTitleAlign(align).
SetTitle(TitleColor(msg))
flex.AddItem(list, fixedSize, proportion, focus)
return list
}
// SetCell content given the information
// row, col of the cell to create and fill
// msg is the string content to insert in the cell
// a is an interface{} object list
// object a is int then alignment tview.AlignLeft/Right/Center
// object a is bool then set the cell as selectable or not
func SetCell(table *tview.Table, row, col int, msg string, a ...interface{}) *tview.TableCell {
align := tview.AlignRight
selectable := false
for _, v := range a {
switch v.(type) {
case int:
align = v.(int)
case bool:
selectable = v.(bool)
}
}
tableCell := tview.NewTableCell(msg).
SetAlign(align).
SetSelectable(selectable)
table.SetCell(row, col, tableCell)
return tableCell
} | lang/go/tools/cmon/helpers_tview.go | 0.73848 | 0.42471 | helpers_tview.go | starcoder |
package parser
import (
"strings"
)
const ParserNameOs = "os"
const FixtureFileOs = "oss.yml"
type OsReg struct {
Regular `yaml:",inline" json:",inline"`
Name string `yaml:"name" json:"name"`
Version string `yaml:"version" json:"version"`
}
// Known operating systems mapped to their internal short codes
var OperatingSystems = map[string]string{
`AIX`: `AIX`,
`AND`: `Android`,
`AMG`: `AmigaOS`,
`ATV`: `Apple TV`,
`ARL`: `Arch Linux`,
`BTR`: `BackTrack`,
`SBA`: `Bada`,
`BEO`: `BeOS`,
`BLB`: `BlackBerry OS`,
`QNX`: `BlackBerry Tablet OS`,
`BMP`: `Brew`,
`CES`: `CentOS`,
`COS`: `Chrome OS`,
`CYN`: `CyanogenMod`,
`DEB`: `Debian`,
`DFB`: `DragonFly`,
`FED`: `Fedora`,
`FOS`: `Firefox OS`,
`FIR`: `Fire OS`,
`BSD`: `FreeBSD`,
`GNT`: `Gentoo`,
`GTV`: `Google TV`,
`HPX`: `HP-UX`,
`HAI`: `Haiku OS`,
`IRI`: `IRIX`,
`INF`: `Inferno`,
`KOS`: `KaiOS`,
`KNO`: `Knoppix`,
`KBT`: `Kubuntu`,
`LIN`: `GNU/Linux`,
`LBT`: `Lubuntu`,
`VLN`: `VectorLinux`,
`MAC`: `Mac`,
`MAE`: `Maemo`,
`MDR`: `Mandriva`,
`SMG`: `MeeGo`,
`MCD`: `MocorDroid`,
`MIN`: `Mint`,
`MLD`: `MildWild`,
`MOR`: `MorphOS`,
`NBS`: `NetBSD`,
`MTK`: `MTK / Nucleus`,
`WII`: `Nintendo`,
`NDS`: `Nintendo Mobile`,
`OS2`: `OS/2`,
`T64`: `OSF1`,
`OBS`: `OpenBSD`,
`ORD`: `Ordissimo`,
`PSP`: `PlayStation Portable`,
`PS3`: `PlayStation`,
`RHT`: `Red Hat`,
`ROS`: `RISC OS`,
`REM`: `Remix OS`,
`RZD`: `RazoDroiD`,
`SAB`: `Sabayon`,
`SSE`: `SUSE`,
`SAF`: `Sailfish OS`,
`SLW`: `Slackware`,
`SOS`: `Solaris`,
`SYL`: `Syllable`,
`SYM`: `Symbian`,
`SYS`: `Symbian OS`,
`S40`: `Symbian OS Series 40`,
`S60`: `Symbian OS Series 60`,
`SY3`: `Symbian^3`,
`TDX`: `ThreadX`,
`TIZ`: `Tizen`,
`TOS`: `TmaxOS`,
`UBT`: `Ubuntu`,
`WTV`: `WebTV`,
`WIN`: `Windows`,
`WCE`: `Windows CE`,
`WIO`: `Windows IoT`,
`WMO`: `Windows Mobile`,
`WPH`: `Windows Phone`,
`WRT`: `Windows RT`,
`XBX`: `Xbox`,
`XBT`: `Xubuntu`,
`YNS`: `YunOs`,
`IOS`: `iOS`,
`POS`: `palmOS`,
`WOS`: `webOS`,
}
// Operating system families mapped to the short codes of the associated operating systems
var OsFamilies = map[string][]string{
`Android`: []string{`AND`, `CYN`, `FIR`, `REM`, `RZD`, `MLD`, `MCD`, `YNS`},
`AmigaOS`: []string{`AMG`, `MOR`},
`Apple TV`: []string{`ATV`},
`BlackBerry`: []string{`BLB`, `QNX`},
`Brew`: []string{`BMP`},
`BeOS`: []string{`BEO`, `HAI`},
`Chrome OS`: []string{`COS`},
`Firefox OS`: []string{`FOS`, `KOS`},
`Gaming Console`: []string{`WII`, `PS3`},
`Google TV`: []string{`GTV`},
`IBM`: []string{`OS2`},
`iOS`: []string{`IOS`},
`RISC OS`: []string{`ROS`},
`GNU/Linux`: []string{`LIN`, `ARL`, `DEB`, `KNO`, `MIN`, `UBT`, `KBT`, `XBT`, `LBT`, `FED`, `RHT`, `VLN`, `MDR`, `GNT`, `SAB`, `SLW`, `SSE`, `CES`, `BTR`, `SAF`, `ORD`, `TOS`},
`Mac`: []string{`MAC`},
`Mobile Gaming Console`: []string{`PSP`, `NDS`, `XBX`},
`Real-time OS`: []string{`MTK`, `TDX`},
`Other Mobile`: []string{`WOS`, `POS`, `SBA`, `TIZ`, `SMG`, `MAE`},
`Symbian`: []string{`SYM`, `SYS`, `SY3`, `S60`, `S40`},
`Unix`: []string{`SOS`, `AIX`, `HPX`, `BSD`, `NBS`, `OBS`, `DFB`, `SYL`, `IRI`, `T64`, `INF`},
`WebTV`: []string{`WTV`},
`Windows`: []string{`WIN`},
`Windows Mobile`: []string{`WPH`, `WMO`, `WCE`, `WRT`, `WIO`},
}
const (
PlatformTypeARM = "ARM"
PlatformTypeX64 = "x64"
PlatformTypeX86 = "x86"
PlatformTypeNONE = ""
)
type PlatformReg struct {
Name string
Regular
}
type OsMatchResult struct {
Name string `yaml:"name" json:"name"`
ShortName string `yaml:"short_name" json:"short_name"`
Version string `yaml:"version" json:"version"`
Platform string `yaml:"platform" json:"platform"`
}
type OsParser interface {
PreMatch(string) bool
Parse(string) *OsMatchResult
}
// Parses the useragent for operating system information
type Oss struct {
Regexes []*OsReg
platforms []*PlatformReg
overAllMatch Regular
}
func NewOss(file string) (*Oss, error) {
var v []*OsReg
err := ReadYamlFile(file, &v)
if err != nil {
return nil, err
}
ps := []*PlatformReg{
&PlatformReg{Name: PlatformTypeARM, Regular: Regular{Regex: "arm"}},
&PlatformReg{Name: PlatformTypeX64, Regular: Regular{Regex: "WOW64|x64|win64|amd64|x86_64"}},
&PlatformReg{Name: PlatformTypeX86, Regular: Regular{Regex: "i[0-9]86|i86pc"}},
}
for _, pp := range ps {
pp.Compile()
}
return &Oss{
Regexes: v,
platforms: ps,
}, nil
}
func (o *Oss) ParsePlatform(ua string) string {
for _, p := range o.platforms {
if p.IsMatchUserAgent(ua) {
return p.Name
}
}
return PlatformTypeNONE
}
func (o *Oss) PreMatch(ua string) bool {
if o.overAllMatch.Regexp == nil {
count := len(o.Regexes)
if count == 0 {
return false
}
sb := strings.Builder{}
sb.WriteString(o.Regexes[count-1].Regex)
for i := count - 2; i >= 0; i-- {
sb.WriteString("|")
sb.WriteString(o.Regexes[i].Regex)
}
o.overAllMatch.Regex = sb.String()
o.overAllMatch.Compile()
}
r := o.overAllMatch.IsMatchUserAgent(ua)
return r
}
func (o *Oss) Parse(ua string) *OsMatchResult {
var matches []string
var osRegex *OsReg
for _, osRegex = range o.Regexes {
matches = osRegex.MatchUserAgent(ua)
if len(matches) > 0 {
break
}
}
if len(matches) == 0 || osRegex == nil {
return nil
}
name := BuildByMatch(osRegex.Name, matches)
short := UnknownShort
for osShort, osName := range OperatingSystems {
if StringEqualIgnoreCase(name, osName) {
name = osName
short = osShort
break
}
}
result := &OsMatchResult{
Name: name,
ShortName: short,
Version: BuildVersion(osRegex.Version, matches),
Platform: o.ParsePlatform(ua),
}
return result
}
func GetOsFamily(osLabel string) string {
for k, vs := range OsFamilies {
for _, v := range vs {
if v == osLabel {
return k
}
}
}
return ""
}
func GetOsNameFromId(os, ver string) string {
if osFullName, ok := OperatingSystems[os]; ok {
return strings.TrimSpace(osFullName + " " + ver)
}
return ""
} | parser/os.go | 0.559531 | 0.607081 | os.go | starcoder |
package cbor
import (
"fmt"
"math"
log "github.com/sirupsen/logrus"
)
// PositiveInteger8 wraps a positive integer with 8 bits
type PositiveInteger8 struct {
basePositiveInteger
}
// PositiveInteger16 wraps a positive integer with 16 bits
type PositiveInteger16 struct {
basePositiveInteger
}
// PositiveInteger32 wraps a positive integer with 32 bits
type PositiveInteger32 struct {
basePositiveInteger
}
// PositiveInteger64 wraps a positive integer with 64 bits
type PositiveInteger64 struct {
basePositiveInteger
}
type basePositiveInteger struct {
baseDataItem
V uint64
}
// NewPositiveInteger8 returns a positive integer (8 bits) instance
func NewPositiveInteger8(value uint8) *PositiveInteger8 {
return &PositiveInteger8{
basePositiveInteger: newBasePositiveInteger(uint64(value), additionalType8Bits),
}
}
// ValueAsUint8 return value as uint8
func (p *PositiveInteger8) ValueAsUint8() uint8 {
return uint8(p.V)
}
// ValueAsUint16 return value as uint16
func (p *PositiveInteger8) ValueAsUint16() uint16 {
return uint16(p.V)
}
// ValueAsUint32 return value as uint16
func (p *PositiveInteger8) ValueAsUint32() uint32 {
return uint32(p.V)
}
// ValueAsUint64 return value as uint16
func (p *PositiveInteger8) ValueAsUint64() uint64 {
return p.V
}
// NewPositiveInteger16 returns a positive integer (16 bits) instance
func NewPositiveInteger16(value uint16) *PositiveInteger16 {
return &PositiveInteger16{
basePositiveInteger: newBasePositiveInteger(uint64(value), additionalType16Bits),
}
}
// ValueAsUint16 return value as uint16
func (p *PositiveInteger16) ValueAsUint16() uint16 {
return uint16(p.V)
}
// ValueAsUint32 return value as uint32
func (p *PositiveInteger16) ValueAsUint32() uint32 {
return uint32(p.V)
}
// ValueAsUint64 return value as uint64
func (p *PositiveInteger16) ValueAsUint64() uint64 {
return uint64(p.V)
}
// NewPositiveInteger32 returns a positive integer (32 bits) instance
func NewPositiveInteger32(value uint32) *PositiveInteger32 {
return &PositiveInteger32{
basePositiveInteger: newBasePositiveInteger(uint64(value), additionalType32Bits),
}
}
// ValueAsUint32 return value as uint32
func (p *PositiveInteger32) ValueAsUint32() uint32 {
return uint32(p.V)
}
// ValueAsUint64 return value as uint64
func (p *PositiveInteger32) ValueAsUint64() uint64 {
return uint64(p.V)
}
// NewPositiveInteger64 returns a positive integer (64 bits) instance
func NewPositiveInteger64(value uint64) *PositiveInteger64 {
return &PositiveInteger64{
basePositiveInteger: newBasePositiveInteger(uint64(value), additionalType64Bits),
}
}
// ValueAsUint64 return value as uint64
func (p *PositiveInteger64) ValueAsUint64() uint64 {
return uint64(p.V)
}
// NewPositiveInteger returns a positive integer using the most compact
// struct that would fit the value
func NewPositiveInteger(value uint64) DataItem {
switch {
case value > math.MaxUint32:
return NewPositiveInteger64(value)
case value > math.MaxUint16:
return NewPositiveInteger32(uint32(value))
case value > math.MaxUint8:
return NewPositiveInteger16(uint16(value))
default:
return NewPositiveInteger8(uint8(value))
}
}
// newBasePositiveInteger returns new base positive integer instance
func newBasePositiveInteger(value uint64, additionalType uint8) basePositiveInteger {
return basePositiveInteger{
baseDataItem: baseDataItem{
majorType: MajorTypePositiveInt,
additionalType: additionalType,
},
V: value,
}
}
// AdditionalType of this positive integer
func (b *basePositiveInteger) AdditionalType() uint8 {
switch {
case b.V > math.MaxUint32:
return additionalType64Bits
case b.V > math.MaxUint16:
return additionalType32Bits
case b.V > math.MaxUint8:
return additionalType16Bits
case b.V > uint64(additionalTypeDirectValue23):
return additionalType8Bits
default:
// value is between 0-23, use this as the additional type
return uint8(b.V)
}
}
// AdditionalTypeValue returns the uint value as uint64
func (b *basePositiveInteger) AdditionalTypeValue() uint64 {
return b.V
}
// Value of this positive integer
func (b *basePositiveInteger) Value() interface{} {
switch {
case b.V > math.MaxUint32:
return b.V
case b.V > math.MaxUint16:
return uint32(b.V)
case b.V > math.MaxUint8:
return uint16(b.V)
default:
return uint8(b.V)
}
}
// ValueAsInt64 of this positive integer as int64
func (b *basePositiveInteger) ValueAsUInt64() uint64 {
return b.V
}
// EncodeCBOR returns the CBOR binary representation of this positive integer
func (b *basePositiveInteger) EncodeCBOR() []byte {
return dataItemPrefix(MajorTypePositiveInt, b.V)
}
// String representation of this positive integer
func (b *basePositiveInteger) String() string {
typeString := ""
switch {
case b.additionalType == additionalType64Bits:
typeString = "PositiveInteger64"
break
case b.additionalType == additionalType32Bits:
typeString = "PositiveInteger32"
break
case b.additionalType == additionalType16Bits:
typeString = "PositiveInteger16"
break
case b.additionalType == additionalType8Bits,
b.additionalType <= additionalTypeDirectValue23:
typeString = "PositiveInteger8"
break
default:
log.WithField("additionalType", b.additionalType).Error("Unknown additional type")
}
return fmt.Sprintf("%s(%d)", typeString, b.V)
} | cbor/positive.go | 0.860222 | 0.410284 | positive.go | starcoder |
package network
import (
"math"
"fmt"
"errors"
"github.com/elmware/goNEAT/neat/utils"
)
var (
// The error to be raised when maximal number of network activation attempts exceeded
NetErrExceededMaxActivationAttempts = errors.New("maximal network activation attempts exceeded.")
// The error to be raised when unsupported sensors data array size provided
NetErrUnsupportedSensorsArraySize = errors.New("the sensors array size is unsupported by network solver")
// The error to be raised when depth calculation failed due to the loop in network
NetErrDepthCalculationFailedLoopDetected = errors.New("depth can not be determined for network with loop")
)
// Defines network solver interface which describes neural network structures with methods to run activation waves through
// them.
type NetworkSolver interface {
// Propagates activation wave through all network nodes provided number of steps in forward direction.
// Returns true if activation wave passed from all inputs to outputs.
ForwardSteps(steps int) (bool, error)
// Propagates activation wave through all network nodes provided number of steps by recursion from output nodes
// Returns true if activation wave passed from all inputs to outputs.
RecursiveSteps() (bool, error)
// Attempts to relax network given amount of steps until giving up. The network considered relaxed when absolute
// value of the change at any given point is less than maxAllowedSignalDelta during activation waves propagation.
// If maxAllowedSignalDelta value is less than or equal to 0, the method will return true without checking for relaxation.
Relax(maxSteps int, maxAllowedSignalDelta float64) (bool, error)
// Flushes network state by removing all current activations. Returns true if network flushed successfully or
// false in case of error.
Flush() (bool, error)
// Set sensors values to the input nodes of the network
LoadSensors(inputs []float64) error
// Read output values from the output nodes of the network
ReadOutputs() []float64
// Returns the total number of neural units in the network
NodeCount() int
// Returns the total number of links between nodes in the network
LinkCount() int
}
// NNodeType defines the type of NNode to create
type NodeType byte
// Predefined NNode types
const (
// The neuron type
NeuronNode NodeType = iota
// The sensor type
SensorNode
)
// Returns human readable NNode type name for given constant value
func NodeTypeName(ntype NodeType) string {
switch ntype {
case NeuronNode:
return "NEURON"
case SensorNode:
return "SENSOR"
default:
return "!!! UNKNOWN NODE TYPE !!!"
}
}
// NeuronType defines the type of neuron to create
type NodeNeuronType byte
// These are NNode layer type
const (
// The node is in hidden layer
HiddenNeuron NodeNeuronType = iota
// The node is in input layer
InputNeuron
// The node is in output layer
OutputNeuron
// The node is bias
BiasNeuron
)
// Returns human readable neuron type name for given constant
func NeuronTypeName(nlayer NodeNeuronType) string {
switch nlayer {
case HiddenNeuron:
return "HIDN"
case InputNeuron:
return "INPT"
case OutputNeuron:
return "OUTP"
case BiasNeuron:
return "BIAS"
default:
return "!!! UNKNOWN NEURON TYPE !!!"
}
}
// Returns neuron node type from its name
func NeuronTypeByName(name string) (NodeNeuronType, error) {
switch name {
case "HIDN":
return HiddenNeuron, nil
case "INPT":
return InputNeuron, nil
case "OUTP":
return OutputNeuron, nil
case "BIAS":
return BiasNeuron, nil
default:
return math.MaxInt8, errors.New("Unknown neuron type name: " + name)
}
}
// Method to calculate activation for specified neuron node based on it's ActivationType field value.
// Will return error and set -0.0 activation if unsupported activation type requested.
func ActivateNode(node *NNode, a *utils.NodeActivatorsFactory) (err error) {
out, err := a.ActivateByType(node.ActivationSum, node.Params, node.ActivationType)
if err == nil {
node.setActivation(out)
}
return err
}
// Method to activate neuron module presented by provided node. As a result of execution the activation values of all
// input nodes will be processed by corresponding activation function and corresponding activation values of output nodes
// will be set. Will panic if unsupported activation type requested.
func ActivateModule(module *NNode, a *utils.NodeActivatorsFactory) error {
inputs := make([]float64, len(module.Incoming))
for i, v := range module.Incoming {
inputs[i] = v.InNode.GetActiveOut()
}
outputs, err := a.ActivateModuleByType(inputs, module.Params, module.ActivationType)
if err != nil {
return err
}
if len(outputs) != len(module.Outgoing) {
return errors.New(fmt.Sprintf(
"The number of output parameters [%d] returned by module activator doesn't match " +
"the number of output neurons of the module [%d]", len(outputs), len(module.Outgoing)))
}
// set outputs
for i, out := range outputs {
module.Outgoing[i].OutNode.setActivation(out)
module.Outgoing[i].OutNode.isActive = true // activate output node
}
return nil
} | neat/network/common.go | 0.724091 | 0.555073 | common.go | starcoder |
package ez
import (
"reflect"
"runtime"
"testing"
)
// A Unit is a specification that can be used for testing and/or benchmarking.
type Unit struct {
gfn interface{}
rs []runner
T *testing.T
B *testing.B
tr bool
br bool
}
// A half is an incomplete case.
type half struct {
in in
u *Unit
}
// A runner is one of many sequential components of a Unit.
type runner interface {
runTest(int, *testing.T)
runBenchmark(int, *testing.B)
}
// Seq is a synonym for New until Sequential is split out from Parallel test types.
func Seq() *Unit { return New() }
// Call is a synonym for Func until Sequential is split out from Parallel test types.
func (u *Unit) Call(fn interface{}) *Unit { return u.Func(fn) }
// Do is a synonym for Step until Sequential is split out from Parallel test types.
func (u *Unit) Do(fn func()) *Unit { return u.Step(fn) }
// New returns a blank Unit.
func New() *Unit {
u := &Unit{}
// FIXME: Sadly, finalizers are not guaranteed to run, so they're of little comfort.
runtime.SetFinalizer(u, func(u *Unit) {
if !u.tr && !u.br {
panic("neither test nor benchmark ran")
}
})
return u
}
// Test returns a Unit for testing fn using t.
func Test(fn interface{}, t *testing.T) *Unit {
return New().setT(t).Func(fn)
}
// Benchmark returns a Unit for benchmarking fn using b.
func Benchmark(fn interface{}, b *testing.B) *Unit {
return New().setB(b).Func(fn)
}
func (u *Unit) setT(t *testing.T) *Unit {
u.T = t
return u
}
func (u *Unit) setB(b *testing.B) *Unit {
u.B = b
return u
}
// Func sets fn as the Unit's current function; it can be called more than once.
func (u *Unit) Func(fn interface{}) *Unit {
u.gfn = fn
return u
}
// Thru applies fn to the Unit, which is useful to combine or repeat common Unit components.
func (u *Unit) Thru(fn func(*Unit)) *Unit {
fn(u)
return u
}
// Step adds fn to the Unit as a Step.
func (u *Unit) Step(fn func()) *Unit {
u.rs = append(u.rs, Step{fn})
return u
}
func unwrapPointer(x interface{}) interface{} { return reflect.ValueOf(x).Elem().Interface() }
// TODO: Split into Is(xs ...interface{}) + EqualTo(xs ...interface{})
func (u *Unit) Equal(x interface{}, y interface{}) *Unit {
if reflect.ValueOf(x).Kind() != reflect.Ptr {
panic("source must be a pointer to allow mutation post-definition")
}
return u.addCase(unwrapPointer, newIn([]interface{}{x}), newOut([]interface{}{y}))
}
// Case adds in & out (plus the current function) as a Case to the Unit.
func (u *Unit) Case(in in, out out) *Unit {
return u.addCase(u.gfn, in, out)
}
// Cases adds every in/out pair in the CaseMap (plus the current function) as a Case to the Unit.
func (u *Unit) Cases(cs CaseMap) *Unit {
for in, out := range cs {
u = u.addCase(u.gfn, *in, out)
}
return u
}
// In returns xs as inputs that can be used in a Case or CaseMap.
func In(xs ...interface{}) *in { in := newIn(xs); return &in }
// Out returns xs as outputs that can be used in a Case or CaseMap.
func Out(xs ...interface{}) out { return newOut(xs) }
// Panic returns a requirement to panic with any value, and can be used in a Case or CaseMap; it is equivalent to PanicWith(Any).
func Panic() out { return newPanic(Any) }
// PanicWith returns a requirement to panic with x, and can be used in a Case or CaseMap.
func PanicWith(x interface{}) out { return newPanic(x) }
// In begins a Case with xs as inputs.
func (u *Unit) In(xs ...interface{}) *half { return &half{newIn(xs), u} }
// Out completes a Case with xs as outputs, and adds it to the Unit.
func (h *half) Out(xs ...interface{}) *Unit { return h.u.addCase(h.u.gfn, h.in, newOut(xs)) }
// PanicWith completes a Case that must panic with x, and adds it to the Unit.
func (h *half) PanicWith(x interface{}) *Unit { return h.u.addCase(h.u.gfn, h.in, newPanic(x)) }
// Panic completes a Case that must panic with any value, and adds it to the Unit; it is equivalent to PanicWith(Any).
func (h *half) Panic() *Unit { return h.u.addCase(h.u.gfn, h.in, newPanic(Any)) }
func (u *Unit) addCase(fn interface{}, in in, out out) *Unit {
u.rs = append(u.rs, newCase(fn, in, out))
return u
}
// Run runs the Unit as a test and/or benchmark, depending on whether T and/or B are set.
func (u *Unit) Run() {
if u.B == nil && u.T == nil {
panic("T and B are both nil")
}
if u.T != nil {
u.RunTest(u.T)
}
if u.B != nil {
u.RunBenchmark(u.B)
}
}
// RunTest runs the Unit as a test using t.
func (u *Unit) RunTest(t *testing.T) {
if u.tr {
panic("test already ran")
}
u.tr = true
for i, r := range u.rs {
r.runTest(i, t)
}
}
// RunBenchmark runs the Unit as a benchmark using b.
func (u *Unit) RunBenchmark(b *testing.B) {
if u.br {
panic("benchmark already ran")
}
u.br = true
for i := 0; i < b.N; i++ {
for j, r := range u.rs {
r.runBenchmark(j, b)
}
}
} | unit.go | 0.616012 | 0.400515 | unit.go | starcoder |
package matchers
import (
"fmt"
"reflect"
"runtime"
"github.com/onsi/gomega/format"
"k8s.io/utils/semantic"
)
// EqualitiesEqualMatcher is a matcher that matches the Expected value using the given Equalities
// and semantic.Equalities.DeepEqual.
type EqualitiesEqualMatcher struct {
Equalities semantic.Equalities
Expected interface{}
}
func (m *EqualitiesEqualMatcher) FailureMessage(actual interface{}) (message string) {
actualString, actualOK := actual.(string)
expectedString, expectedOK := m.Expected.(string)
if actualOK && expectedOK {
return format.MessageWithDiff(actualString, "to equal", expectedString)
}
return format.Message(actual, "to equal with equality", m.Expected)
}
func (m *EqualitiesEqualMatcher) NegatedFailureMessage(actual interface{}) (message string) {
return format.Message(actual, "not to equal with equality", m.Expected)
}
func (m *EqualitiesEqualMatcher) Match(actual interface{}) (bool, error) {
if m.Equalities == nil {
return false, fmt.Errorf("must set Equalities")
}
if actual == nil && m.Expected == nil {
return false, fmt.Errorf("refusing to compare <nil> to <nil>, BeNil() should be used instead")
}
return m.Equalities.DeepEqual(actual, m.Expected), nil
}
// EqualitiesDerivativeMatcher is a matcher that matches the Expected value using the given Equalities
// and semantic.Equalities.DeepDerivative.
type EqualitiesDerivativeMatcher struct {
Equalities semantic.Equalities
Expected interface{}
}
func (m *EqualitiesDerivativeMatcher) FailureMessage(actual interface{}) (message string) {
actualString, actualOK := actual.(string)
expectedString, expectedOK := m.Expected.(string)
if actualOK && expectedOK {
return format.MessageWithDiff(actualString, "to derive", expectedString)
}
return format.Message(actual, "to derive with equality", m.Expected)
}
func (m *EqualitiesDerivativeMatcher) NegatedFailureMessage(actual interface{}) (message string) {
return format.Message(actual, "not to derive with equality", m.Expected)
}
func (m *EqualitiesDerivativeMatcher) Match(actual interface{}) (bool, error) {
if m.Equalities == nil {
return false, fmt.Errorf("must set Equalities")
}
if actual == nil && m.Expected == nil {
return false, fmt.Errorf("refusing to compare <nil> to <nil>, BeNil() should be used instead")
}
return m.Equalities.DeepDerivative(actual, m.Expected), nil
}
type ErrorFuncMatcher struct {
Name string
Func func(err error) bool
}
func (m *ErrorFuncMatcher) Match(actual interface{}) (success bool, err error) {
if m.Func == nil {
return false, fmt.Errorf("must set Func")
}
actualErr, ok := actual.(error)
if !ok {
return false, fmt.Errorf("expected an error-type but got %s", format.Object(actual, 0))
}
return m.Func(actualErr), nil
}
func (m *ErrorFuncMatcher) nameOrFuncName() string {
if m.Name != "" {
return m.Name
}
return runtime.FuncForPC(reflect.ValueOf(m.Func).Pointer()).Name()
}
func (m *ErrorFuncMatcher) FailureMessage(actual interface{}) (message string) {
name := m.nameOrFuncName()
return fmt.Sprintf("expected an error matching %s to have occurred but got %s", name, format.Object(actual, 0))
}
func (m *ErrorFuncMatcher) NegatedFailureMessage(actual interface{}) (message string) {
name := m.nameOrFuncName()
return fmt.Sprintf("expected an error not matching %s to have occurred but got %s", name, format.Object(actual, 0))
} | testutils/matchers/matchers.go | 0.843992 | 0.619788 | matchers.go | starcoder |
package transpiler
// MapVisitor visit the Tree and return a map[string]interface{}, this map can be serialized
// to a YAML document.
type MapVisitor struct {
Content interface{}
}
// OnStr is called when we visit a StrVal.
func (m *MapVisitor) OnStr(v string) {
m.Content = v
}
// OnInt is called when we visit a IntVal.
func (m *MapVisitor) OnInt(v int) {
m.Content = v
}
// OnUInt is called when we visit a UintVal.
func (m *MapVisitor) OnUInt(v uint64) {
m.Content = v
}
// OnFloat is called when we visit a FloatVal.
func (m *MapVisitor) OnFloat(v float64) {
m.Content = v
}
// OnBool is called when we visit a Bool.
func (m *MapVisitor) OnBool(v bool) {
m.Content = v
}
// OnDict is called when we visit a Dict and return a VisitorDict.
func (m *MapVisitor) OnDict() VisitorDict {
newMap := make(map[string]interface{})
m.Content = newMap
return &MapVisitorDict{Content: newMap}
}
// OnList is called when we visit a List and we return a VisitorList.
func (m *MapVisitor) OnList() VisitorList {
m.Content = make([]interface{}, 0)
return &MapVisitorList{MapVisitor: m}
}
// MapVisitorDict Visitor used for the visiting the Dict.
type MapVisitorDict struct {
Content map[string]interface{}
lastVisitedKey string
}
// OnKey is called when we visit a key of a Dict.
func (m *MapVisitorDict) OnKey(s string) {
m.lastVisitedKey = s
}
// OnValue is called when we visit a value of a Dict.
func (m *MapVisitorDict) OnValue(v Visitor) {
visitor := v.(*MapVisitor)
m.Content[m.lastVisitedKey] = visitor.Content
}
// Visitor returns a MapVisitor.
func (m *MapVisitorDict) Visitor() Visitor {
return &MapVisitor{}
}
// OnComplete is called when you are done visiting the current Dict.
func (m *MapVisitorDict) OnComplete() {}
// MapVisitorList is a visitor to visit list.
type MapVisitorList struct {
MapVisitor *MapVisitor
}
// OnComplete is called when we finish to visit a List.
func (m *MapVisitorList) OnComplete() {}
// OnValue is called when we visit a value and return a visitor.
func (m *MapVisitorList) OnValue(v Visitor) {
visitor := v.(*MapVisitor)
m.MapVisitor.Content = append(m.MapVisitor.Content.([]interface{}), visitor.Content)
}
// Visitor return a visitor.
func (m *MapVisitorList) Visitor() Visitor {
return &MapVisitor{}
} | x-pack/elastic-agent/pkg/agent/transpiler/map_visitor.go | 0.699152 | 0.424591 | map_visitor.go | starcoder |
package gaia
import (
"fmt"
"github.com/globalsign/mgo/bson"
"github.com/mitchellh/copystructure"
"go.aporeto.io/elemental"
)
// PolicyGraphPolicyTypeValue represents the possible values for attribute "policyType".
type PolicyGraphPolicyTypeValue string
const (
// PolicyGraphPolicyTypeAuthorization represents the value Authorization.
PolicyGraphPolicyTypeAuthorization PolicyGraphPolicyTypeValue = "Authorization"
// PolicyGraphPolicyTypeCombined represents the value Combined.
PolicyGraphPolicyTypeCombined PolicyGraphPolicyTypeValue = "Combined"
// PolicyGraphPolicyTypeInfrastructure represents the value Infrastructure.
PolicyGraphPolicyTypeInfrastructure PolicyGraphPolicyTypeValue = "Infrastructure"
)
// PolicyGraphIdentity represents the Identity of the object.
var PolicyGraphIdentity = elemental.Identity{
Name: "policygraph",
Category: "policygraphs",
Package: "yeul",
Private: false,
}
// PolicyGraphsList represents a list of PolicyGraphs
type PolicyGraphsList []*PolicyGraph
// Identity returns the identity of the objects in the list.
func (o PolicyGraphsList) Identity() elemental.Identity {
return PolicyGraphIdentity
}
// Copy returns a pointer to a copy the PolicyGraphsList.
func (o PolicyGraphsList) Copy() elemental.Identifiables {
copy := append(PolicyGraphsList{}, o...)
return ©
}
// Append appends the objects to the a new copy of the PolicyGraphsList.
func (o PolicyGraphsList) Append(objects ...elemental.Identifiable) elemental.Identifiables {
out := append(PolicyGraphsList{}, o...)
for _, obj := range objects {
out = append(out, obj.(*PolicyGraph))
}
return out
}
// List converts the object to an elemental.IdentifiablesList.
func (o PolicyGraphsList) List() elemental.IdentifiablesList {
out := make(elemental.IdentifiablesList, len(o))
for i := 0; i < len(o); i++ {
out[i] = o[i]
}
return out
}
// DefaultOrder returns the default ordering fields of the content.
func (o PolicyGraphsList) DefaultOrder() []string {
return []string{}
}
// ToSparse returns the PolicyGraphsList converted to SparsePolicyGraphsList.
// Objects in the list will only contain the given fields. No field means entire field set.
func (o PolicyGraphsList) ToSparse(fields ...string) elemental.Identifiables {
out := make(SparsePolicyGraphsList, len(o))
for i := 0; i < len(o); i++ {
out[i] = o[i].ToSparse(fields...).(*SparsePolicyGraph)
}
return out
}
// Version returns the version of the content.
func (o PolicyGraphsList) Version() int {
return 1
}
// PolicyGraph represents the model of a policygraph
type PolicyGraph struct {
// The set of tags that a future-activated processing unit will have for which
// the user wants to evaluate policies and understand its connectivity options.
PUIdentity []string `json:"PUIdentity" msgpack:"PUIdentity" bson:"-" mapstructure:"PUIdentity,omitempty"`
// Contains the output of the policy evaluation. It is the same type of
// dependency map as created by other APIs.
DependencyMap *DependencyMap `json:"dependencyMap" msgpack:"dependencyMap" bson:"-" mapstructure:"dependencyMap,omitempty"`
// Identifies the type of policy that should be analyzed: `Authorization`(default),
// `Infrastructure`, or `Combined`.
PolicyType PolicyGraphPolicyTypeValue `json:"policyType" msgpack:"policyType" bson:"-" mapstructure:"policyType,omitempty"`
// Contains the tag expression that a processing unit must match in order
// to evaluate policy for it.
Selectors [][]string `json:"selectors" msgpack:"selectors" bson:"-" mapstructure:"selectors,omitempty"`
ModelVersion int `json:"-" msgpack:"-" bson:"_modelversion"`
}
// NewPolicyGraph returns a new *PolicyGraph
func NewPolicyGraph() *PolicyGraph {
return &PolicyGraph{
ModelVersion: 1,
DependencyMap: NewDependencyMap(),
PUIdentity: []string{},
PolicyType: PolicyGraphPolicyTypeAuthorization,
Selectors: [][]string{},
}
}
// Identity returns the Identity of the object.
func (o *PolicyGraph) Identity() elemental.Identity {
return PolicyGraphIdentity
}
// Identifier returns the value of the object's unique identifier.
func (o *PolicyGraph) Identifier() string {
return ""
}
// SetIdentifier sets the value of the object's unique identifier.
func (o *PolicyGraph) SetIdentifier(id string) {
}
// GetBSON implements the bson marshaling interface.
// This is used to transparently convert ID to MongoDBID as ObectID.
func (o *PolicyGraph) GetBSON() (interface{}, error) {
if o == nil {
return nil, nil
}
s := &mongoAttributesPolicyGraph{}
return s, nil
}
// SetBSON implements the bson marshaling interface.
// This is used to transparently convert ID to MongoDBID as ObectID.
func (o *PolicyGraph) SetBSON(raw bson.Raw) error {
if o == nil {
return nil
}
s := &mongoAttributesPolicyGraph{}
if err := raw.Unmarshal(s); err != nil {
return err
}
return nil
}
// Version returns the hardcoded version of the model.
func (o *PolicyGraph) Version() int {
return 1
}
// BleveType implements the bleve.Classifier Interface.
func (o *PolicyGraph) BleveType() string {
return "policygraph"
}
// DefaultOrder returns the list of default ordering fields.
func (o *PolicyGraph) DefaultOrder() []string {
return []string{}
}
// Doc returns the documentation for the object
func (o *PolicyGraph) Doc() string {
return `Returns a data structure representing the policy graph of all selected
processing units and their possible connectivity based on the current policies
associated with the namespace. Users can define a selector of processing units
in which they are interested or define the identity tags of a virtual processing
unit that is not yet activated.`
}
func (o *PolicyGraph) String() string {
return fmt.Sprintf("<%s:%s>", o.Identity().Name, o.Identifier())
}
// ToSparse returns the sparse version of the model.
// The returned object will only contain the given fields. No field means entire field set.
func (o *PolicyGraph) ToSparse(fields ...string) elemental.SparseIdentifiable {
if len(fields) == 0 {
// nolint: goimports
return &SparsePolicyGraph{
PUIdentity: &o.PUIdentity,
DependencyMap: o.DependencyMap,
PolicyType: &o.PolicyType,
Selectors: &o.Selectors,
}
}
sp := &SparsePolicyGraph{}
for _, f := range fields {
switch f {
case "PUIdentity":
sp.PUIdentity = &(o.PUIdentity)
case "dependencyMap":
sp.DependencyMap = o.DependencyMap
case "policyType":
sp.PolicyType = &(o.PolicyType)
case "selectors":
sp.Selectors = &(o.Selectors)
}
}
return sp
}
// Patch apply the non nil value of a *SparsePolicyGraph to the object.
func (o *PolicyGraph) Patch(sparse elemental.SparseIdentifiable) {
if !sparse.Identity().IsEqual(o.Identity()) {
panic("cannot patch from a parse with different identity")
}
so := sparse.(*SparsePolicyGraph)
if so.PUIdentity != nil {
o.PUIdentity = *so.PUIdentity
}
if so.DependencyMap != nil {
o.DependencyMap = so.DependencyMap
}
if so.PolicyType != nil {
o.PolicyType = *so.PolicyType
}
if so.Selectors != nil {
o.Selectors = *so.Selectors
}
}
// DeepCopy returns a deep copy if the PolicyGraph.
func (o *PolicyGraph) DeepCopy() *PolicyGraph {
if o == nil {
return nil
}
out := &PolicyGraph{}
o.DeepCopyInto(out)
return out
}
// DeepCopyInto copies the receiver into the given *PolicyGraph.
func (o *PolicyGraph) DeepCopyInto(out *PolicyGraph) {
target, err := copystructure.Copy(o)
if err != nil {
panic(fmt.Sprintf("Unable to deepcopy PolicyGraph: %s", err))
}
*out = *target.(*PolicyGraph)
}
// Validate valides the current information stored into the structure.
func (o *PolicyGraph) Validate() error {
errors := elemental.Errors{}
requiredErrors := elemental.Errors{}
if o.DependencyMap != nil {
elemental.ResetDefaultForZeroValues(o.DependencyMap)
if err := o.DependencyMap.Validate(); err != nil {
errors = errors.Append(err)
}
}
if err := elemental.ValidateStringInList("policyType", string(o.PolicyType), []string{"Authorization", "Infrastructure", "Combined"}, false); err != nil {
errors = errors.Append(err)
}
if err := ValidateTagsExpression("selectors", o.Selectors); err != nil {
errors = errors.Append(err)
}
if len(requiredErrors) > 0 {
return requiredErrors
}
if len(errors) > 0 {
return errors
}
return nil
}
// SpecificationForAttribute returns the AttributeSpecification for the given attribute name key.
func (*PolicyGraph) SpecificationForAttribute(name string) elemental.AttributeSpecification {
if v, ok := PolicyGraphAttributesMap[name]; ok {
return v
}
// We could not find it, so let's check on the lower case indexed spec map
return PolicyGraphLowerCaseAttributesMap[name]
}
// AttributeSpecifications returns the full attribute specifications map.
func (*PolicyGraph) AttributeSpecifications() map[string]elemental.AttributeSpecification {
return PolicyGraphAttributesMap
}
// ValueForAttribute returns the value for the given attribute.
// This is a very advanced function that you should not need but in some
// very specific use cases.
func (o *PolicyGraph) ValueForAttribute(name string) interface{} {
switch name {
case "PUIdentity":
return o.PUIdentity
case "dependencyMap":
return o.DependencyMap
case "policyType":
return o.PolicyType
case "selectors":
return o.Selectors
}
return nil
}
// PolicyGraphAttributesMap represents the map of attribute for PolicyGraph.
var PolicyGraphAttributesMap = map[string]elemental.AttributeSpecification{
"PUIdentity": {
AllowedChoices: []string{},
ConvertedName: "PUIdentity",
Description: `The set of tags that a future-activated processing unit will have for which
the user wants to evaluate policies and understand its connectivity options.`,
Exposed: true,
Name: "PUIdentity",
SubType: "string",
Type: "list",
},
"DependencyMap": {
AllowedChoices: []string{},
ConvertedName: "DependencyMap",
Description: `Contains the output of the policy evaluation. It is the same type of
dependency map as created by other APIs.`,
Exposed: true,
Name: "dependencyMap",
SubType: "dependencymap",
Type: "ref",
},
"PolicyType": {
AllowedChoices: []string{"Authorization", "Infrastructure", "Combined"},
ConvertedName: "PolicyType",
DefaultValue: PolicyGraphPolicyTypeAuthorization,
Description: `Identifies the type of policy that should be analyzed: ` + "`" + `Authorization` + "`" + `(default),
` + "`" + `Infrastructure` + "`" + `, or ` + "`" + `Combined` + "`" + `.`,
Exposed: true,
Name: "policyType",
Type: "enum",
},
"Selectors": {
AllowedChoices: []string{},
ConvertedName: "Selectors",
Description: `Contains the tag expression that a processing unit must match in order
to evaluate policy for it.`,
Exposed: true,
Name: "selectors",
SubType: "[][]string",
Type: "external",
},
}
// PolicyGraphLowerCaseAttributesMap represents the map of attribute for PolicyGraph.
var PolicyGraphLowerCaseAttributesMap = map[string]elemental.AttributeSpecification{
"puidentity": {
AllowedChoices: []string{},
ConvertedName: "PUIdentity",
Description: `The set of tags that a future-activated processing unit will have for which
the user wants to evaluate policies and understand its connectivity options.`,
Exposed: true,
Name: "PUIdentity",
SubType: "string",
Type: "list",
},
"dependencymap": {
AllowedChoices: []string{},
ConvertedName: "DependencyMap",
Description: `Contains the output of the policy evaluation. It is the same type of
dependency map as created by other APIs.`,
Exposed: true,
Name: "dependencyMap",
SubType: "dependencymap",
Type: "ref",
},
"policytype": {
AllowedChoices: []string{"Authorization", "Infrastructure", "Combined"},
ConvertedName: "PolicyType",
DefaultValue: PolicyGraphPolicyTypeAuthorization,
Description: `Identifies the type of policy that should be analyzed: ` + "`" + `Authorization` + "`" + `(default),
` + "`" + `Infrastructure` + "`" + `, or ` + "`" + `Combined` + "`" + `.`,
Exposed: true,
Name: "policyType",
Type: "enum",
},
"selectors": {
AllowedChoices: []string{},
ConvertedName: "Selectors",
Description: `Contains the tag expression that a processing unit must match in order
to evaluate policy for it.`,
Exposed: true,
Name: "selectors",
SubType: "[][]string",
Type: "external",
},
}
// SparsePolicyGraphsList represents a list of SparsePolicyGraphs
type SparsePolicyGraphsList []*SparsePolicyGraph
// Identity returns the identity of the objects in the list.
func (o SparsePolicyGraphsList) Identity() elemental.Identity {
return PolicyGraphIdentity
}
// Copy returns a pointer to a copy the SparsePolicyGraphsList.
func (o SparsePolicyGraphsList) Copy() elemental.Identifiables {
copy := append(SparsePolicyGraphsList{}, o...)
return ©
}
// Append appends the objects to the a new copy of the SparsePolicyGraphsList.
func (o SparsePolicyGraphsList) Append(objects ...elemental.Identifiable) elemental.Identifiables {
out := append(SparsePolicyGraphsList{}, o...)
for _, obj := range objects {
out = append(out, obj.(*SparsePolicyGraph))
}
return out
}
// List converts the object to an elemental.IdentifiablesList.
func (o SparsePolicyGraphsList) List() elemental.IdentifiablesList {
out := make(elemental.IdentifiablesList, len(o))
for i := 0; i < len(o); i++ {
out[i] = o[i]
}
return out
}
// DefaultOrder returns the default ordering fields of the content.
func (o SparsePolicyGraphsList) DefaultOrder() []string {
return []string{}
}
// ToPlain returns the SparsePolicyGraphsList converted to PolicyGraphsList.
func (o SparsePolicyGraphsList) ToPlain() elemental.IdentifiablesList {
out := make(elemental.IdentifiablesList, len(o))
for i := 0; i < len(o); i++ {
out[i] = o[i].ToPlain()
}
return out
}
// Version returns the version of the content.
func (o SparsePolicyGraphsList) Version() int {
return 1
}
// SparsePolicyGraph represents the sparse version of a policygraph.
type SparsePolicyGraph struct {
// The set of tags that a future-activated processing unit will have for which
// the user wants to evaluate policies and understand its connectivity options.
PUIdentity *[]string `json:"PUIdentity,omitempty" msgpack:"PUIdentity,omitempty" bson:"-" mapstructure:"PUIdentity,omitempty"`
// Contains the output of the policy evaluation. It is the same type of
// dependency map as created by other APIs.
DependencyMap *DependencyMap `json:"dependencyMap,omitempty" msgpack:"dependencyMap,omitempty" bson:"-" mapstructure:"dependencyMap,omitempty"`
// Identifies the type of policy that should be analyzed: `Authorization`(default),
// `Infrastructure`, or `Combined`.
PolicyType *PolicyGraphPolicyTypeValue `json:"policyType,omitempty" msgpack:"policyType,omitempty" bson:"-" mapstructure:"policyType,omitempty"`
// Contains the tag expression that a processing unit must match in order
// to evaluate policy for it.
Selectors *[][]string `json:"selectors,omitempty" msgpack:"selectors,omitempty" bson:"-" mapstructure:"selectors,omitempty"`
ModelVersion int `json:"-" msgpack:"-" bson:"_modelversion"`
}
// NewSparsePolicyGraph returns a new SparsePolicyGraph.
func NewSparsePolicyGraph() *SparsePolicyGraph {
return &SparsePolicyGraph{}
}
// Identity returns the Identity of the sparse object.
func (o *SparsePolicyGraph) Identity() elemental.Identity {
return PolicyGraphIdentity
}
// Identifier returns the value of the sparse object's unique identifier.
func (o *SparsePolicyGraph) Identifier() string {
return ""
}
// SetIdentifier sets the value of the sparse object's unique identifier.
func (o *SparsePolicyGraph) SetIdentifier(id string) {
}
// GetBSON implements the bson marshaling interface.
// This is used to transparently convert ID to MongoDBID as ObectID.
func (o *SparsePolicyGraph) GetBSON() (interface{}, error) {
if o == nil {
return nil, nil
}
s := &mongoAttributesSparsePolicyGraph{}
return s, nil
}
// SetBSON implements the bson marshaling interface.
// This is used to transparently convert ID to MongoDBID as ObectID.
func (o *SparsePolicyGraph) SetBSON(raw bson.Raw) error {
if o == nil {
return nil
}
s := &mongoAttributesSparsePolicyGraph{}
if err := raw.Unmarshal(s); err != nil {
return err
}
return nil
}
// Version returns the hardcoded version of the model.
func (o *SparsePolicyGraph) Version() int {
return 1
}
// ToPlain returns the plain version of the sparse model.
func (o *SparsePolicyGraph) ToPlain() elemental.PlainIdentifiable {
out := NewPolicyGraph()
if o.PUIdentity != nil {
out.PUIdentity = *o.PUIdentity
}
if o.DependencyMap != nil {
out.DependencyMap = o.DependencyMap
}
if o.PolicyType != nil {
out.PolicyType = *o.PolicyType
}
if o.Selectors != nil {
out.Selectors = *o.Selectors
}
return out
}
// DeepCopy returns a deep copy if the SparsePolicyGraph.
func (o *SparsePolicyGraph) DeepCopy() *SparsePolicyGraph {
if o == nil {
return nil
}
out := &SparsePolicyGraph{}
o.DeepCopyInto(out)
return out
}
// DeepCopyInto copies the receiver into the given *SparsePolicyGraph.
func (o *SparsePolicyGraph) DeepCopyInto(out *SparsePolicyGraph) {
target, err := copystructure.Copy(o)
if err != nil {
panic(fmt.Sprintf("Unable to deepcopy SparsePolicyGraph: %s", err))
}
*out = *target.(*SparsePolicyGraph)
}
type mongoAttributesPolicyGraph struct {
}
type mongoAttributesSparsePolicyGraph struct {
} | policygraph.go | 0.796886 | 0.404331 | policygraph.go | starcoder |
package platformsnotificationevents
import (
"encoding/json"
)
// ViasPersonalData struct for ViasPersonalData
type ViasPersonalData struct {
// The date of birth of the person. The date should be in ISO-8601 format yyyy-mm-dd (e.g. 2000-01-31).
DateOfBirth *string `json:"dateOfBirth,omitempty"`
// Key value pairs of document type and identify numbers
DocumentData *[]PersonalDocumentData `json:"documentData,omitempty"`
// The nationality of the person represented by a two-character country code. >The permitted country codes are defined in ISO-3166-1 alpha-2 (e.g. 'NL').
Nationality *string `json:"nationality,omitempty"`
}
// NewViasPersonalData instantiates a new ViasPersonalData object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewViasPersonalData() *ViasPersonalData {
this := ViasPersonalData{}
return &this
}
// NewViasPersonalDataWithDefaults instantiates a new ViasPersonalData object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewViasPersonalDataWithDefaults() *ViasPersonalData {
this := ViasPersonalData{}
return &this
}
// GetDateOfBirth returns the DateOfBirth field value if set, zero value otherwise.
func (o *ViasPersonalData) GetDateOfBirth() string {
if o == nil || o.DateOfBirth == nil {
var ret string
return ret
}
return *o.DateOfBirth
}
// GetDateOfBirthOk returns a tuple with the DateOfBirth field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *ViasPersonalData) GetDateOfBirthOk() (*string, bool) {
if o == nil || o.DateOfBirth == nil {
return nil, false
}
return o.DateOfBirth, true
}
// HasDateOfBirth returns a boolean if a field has been set.
func (o *ViasPersonalData) HasDateOfBirth() bool {
if o != nil && o.DateOfBirth != nil {
return true
}
return false
}
// SetDateOfBirth gets a reference to the given string and assigns it to the DateOfBirth field.
func (o *ViasPersonalData) SetDateOfBirth(v string) {
o.DateOfBirth = &v
}
// GetDocumentData returns the DocumentData field value if set, zero value otherwise.
func (o *ViasPersonalData) GetDocumentData() []PersonalDocumentData {
if o == nil || o.DocumentData == nil {
var ret []PersonalDocumentData
return ret
}
return *o.DocumentData
}
// GetDocumentDataOk returns a tuple with the DocumentData field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *ViasPersonalData) GetDocumentDataOk() (*[]PersonalDocumentData, bool) {
if o == nil || o.DocumentData == nil {
return nil, false
}
return o.DocumentData, true
}
// HasDocumentData returns a boolean if a field has been set.
func (o *ViasPersonalData) HasDocumentData() bool {
if o != nil && o.DocumentData != nil {
return true
}
return false
}
// SetDocumentData gets a reference to the given []PersonalDocumentData and assigns it to the DocumentData field.
func (o *ViasPersonalData) SetDocumentData(v []PersonalDocumentData) {
o.DocumentData = &v
}
// GetNationality returns the Nationality field value if set, zero value otherwise.
func (o *ViasPersonalData) GetNationality() string {
if o == nil || o.Nationality == nil {
var ret string
return ret
}
return *o.Nationality
}
// GetNationalityOk returns a tuple with the Nationality field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *ViasPersonalData) GetNationalityOk() (*string, bool) {
if o == nil || o.Nationality == nil {
return nil, false
}
return o.Nationality, true
}
// HasNationality returns a boolean if a field has been set.
func (o *ViasPersonalData) HasNationality() bool {
if o != nil && o.Nationality != nil {
return true
}
return false
}
// SetNationality gets a reference to the given string and assigns it to the Nationality field.
func (o *ViasPersonalData) SetNationality(v string) {
o.Nationality = &v
}
func (o ViasPersonalData) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if o.DateOfBirth != nil {
toSerialize["dateOfBirth"] = o.DateOfBirth
}
if o.DocumentData != nil {
toSerialize["documentData"] = o.DocumentData
}
if o.Nationality != nil {
toSerialize["nationality"] = o.Nationality
}
return json.Marshal(toSerialize)
}
type NullableViasPersonalData struct {
value *ViasPersonalData
isSet bool
}
func (v NullableViasPersonalData) Get() *ViasPersonalData {
return v.value
}
func (v *NullableViasPersonalData) Set(val *ViasPersonalData) {
v.value = val
v.isSet = true
}
func (v NullableViasPersonalData) IsSet() bool {
return v.isSet
}
func (v *NullableViasPersonalData) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableViasPersonalData(val *ViasPersonalData) *NullableViasPersonalData {
return &NullableViasPersonalData{value: val, isSet: true}
}
func (v NullableViasPersonalData) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableViasPersonalData) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | src/platformsnotificationevents/model_vias_personal_data.go | 0.763396 | 0.525795 | model_vias_personal_data.go | starcoder |
package mmr
type leafIndex uint64
func LeafIndex(value uint64) (res *leafIndex) {
v := leafIndex(value)
return &v
}
func (x *leafIndex) GetLeftBranch() IBlockIndex {
value := uint64(*x)
if value&1 == 0 && value > 1 {
return NodeIndex(uint64(*x) - 1)
}
return nil
}
func (x *leafIndex) GetSibling() IBlockIndex {
value := uint64(*x)
if value&1 == 1 {
return LeafIndex(value - 1)
} else {
return LeafIndex(value + 1)
}
}
func (x *leafIndex) RightUp() IBlockIndex {
value := x.Index()
if value&1 == 1 {
return NodeIndex(value)
}
return nil
}
func (x *leafIndex) GetTop() IBlockIndex {
value := uint64(*x)
if value&1 == 0 {
return LeafIndex(value)
}
return NodeIndex(value).GetTop()
}
func (x *leafIndex) Index() uint64 {
return uint64(*x)
}
// Calculates Peaks
// Algorythm:
// 1. Get to from current. Take it.
// 2. Go to the left branch.
// - if no any left brnaches - return
// - go to 1
func (x *leafIndex) GetPeaks() (res []IBlockIndex) {
var peak IBlockIndex = x
for {
peak = peak.GetTop()
res = append(res, peak)
if peak = peak.GetLeftBranch(); peak == nil {
return
}
}
}
// Leaf is always on the Zero height
func (x *leafIndex) GetHeight() uint64 {
return 0
}
func (x *leafIndex) IsRight() bool {
return x.Index()&1 == 1
}
func (x *leafIndex) SetValue(mmr *mmr, data *BlockData) {
mmr.db.SetBlock(x.Index(), data)
}
func (x *leafIndex) Value(mmr *mmr) (*BlockData, bool) {
return mmr.db.GetBlock(x.Index())
}
func (x *leafIndex) AppendValue(mmr *mmr, data *BlockData) {
mmr.db.SetBlock(x.Index(), data)
var node IBlockIndex = x
for node.IsRight() {
sibling := node.GetSibling()
if parent := node.RightUp(); parent != nil {
leftData, _ := sibling.Value(mmr)
aggregated := mmr.aggregate(leftData, data)
//fmt.Printf("Aggregate: %d [%d:%x]+ %d [%d:%x] = %d [%d:%x]\n", sibling.Index(), leftData.Weight,leftData.Hash, node.Index(), data.Weight,data.Hash, parent.Index(), aggregated.Weight, aggregated.Hash)
parent.SetValue(mmr, aggregated)
data = aggregated
node = parent
continue
}
return
}
} | utils/mmr/mmr.nodes.leaf.go | 0.651244 | 0.532668 | mmr.nodes.leaf.go | starcoder |
package suffix_array
import "sort"
// Suffix represents a single suffix item which holds a reference to the
// initial string index and the last range of sort indices.
type Suffix struct {
nr []int
idx int
}
// SuffixResult is the group of Suffix objects
type SuffixResult []*Suffix
// ToSuffixArray gets the sorted suffix array result.
func (s SuffixResult) ToSuffixArray() []int {
var suffixArray []int
for _, e := range s {
suffixArray = append(suffixArray, e.idx)
}
return suffixArray
}
// Search finds a given pattern within a string using a suffix array. It
// returns two integers, the start index of the suffix array for the match and
// the end index for the suffix array for the match.
func (s SuffixResult) Search(str, pat string) (int, int) {
suffixArray := s.ToSuffixArray()
n := len(suffixArray)
start := 0
end := n
for start < end {
mid := (start + end) / 2
if pat > str[suffixArray[mid]:n] {
start = mid + 1
} else {
end = mid
}
}
suffixStart := start
end = n
for start < end {
mid := (start + end) / 2
if pat < str[suffixArray[mid]:n] {
end = mid
} else {
start = mid + 1
}
}
return suffixStart, end
}
// BySuffix is a convenience type to handle sorting by the suffixes.
type BySuffix []*Suffix
// Len satisfies the sort interface.
func (s BySuffix) Len() int {
return len(s)
}
// Swap satisfies the sort interface.
func (s BySuffix) Swap(i, j int) {
s[i], s[j] = s[j], s[i]
}
// Less satisfies the sort interface. In this case, we check to see if the two
// sort indices are equal. If they are then we check which of the second
// indices is greater. If they aren't equal then we find out which of the
// first indices is greater.
func (s BySuffix) Less(i, j int) bool {
if s[i].nr[0] == s[j].nr[0] {
return s[i].nr[1] < s[j].nr[1]
}
return s[i].nr[0] < s[j].nr[0]
}
// ConstructSuffixArray builds the initial suffix array returning the subsequent suffix array
// along with the sort index matrix which can be used to find the LCP.
func ConstructSuffixArray(s string) (SuffixResult, [][]int) {
var sortIndex [][]int
n := len(s)
suffixes := make(SuffixResult, n)
// Build the initial sort index row containing integers based on distance from `a`
sortIndex = append(sortIndex, make([]int, n))
for i, char := range s {
sortIndex[0][i] = int(char - 'a')
}
// Here we are sorting suffixes in intervals of 2 characters. In this way,
// we first look at the suffixes as the first 2 characters, first 4, first
// 8, etc. This allows us to progressively make sorting decisions.
for step, count := 1, 1; count < n; step, count = step+1, count*2 {
// Build the array of suffixes where the range is based on the
// previous batches sort index results. Since we want to store the
// current sort index and the previous sort index (nr) then we check
// that we can. If not, we denote it with then -1.
for i := 0; i < n; i++ {
nr := make([]int, 2)
nr[0] = sortIndex[step-1][i]
if i+count < n {
nr[1] = sortIndex[step-1][i+count]
} else {
nr[1] = -1
}
suffixes[i] = &Suffix{nr: nr, idx: i}
}
// Sort the suffixes to their order based on the sort indexes collected above.
sort.Sort(BySuffix(suffixes))
// Build the next row of sort indexes based on the previous row.
sortIndex = append(sortIndex, make([]int, n))
for i := 0; i < n; i++ {
// This step really checks to see if the current suffix and the
// one before it are the same substring as one another. If they
// are, then we want to prior suffix's index to move into the next
// round.
if i > 0 && suffixes[i].nr[0] == suffixes[i-1].nr[0] && suffixes[i].nr[1] == suffixes[i-1].nr[1] {
sortIndex[step][suffixes[i].idx] = sortIndex[step][suffixes[i-1].idx]
} else {
sortIndex[step][suffixes[i].idx] = i
}
}
}
return suffixes, sortIndex
} | data-structures/suffix-array/suffix_array.go | 0.867612 | 0.495239 | suffix_array.go | starcoder |
package poly
import (
"image"
"math/rand"
)
type Polygon struct {
// Order represents the number of vertices in the polygon
Order int
ColorRGBA Color
// Vertices represents the list of coordinates for the vertices of the polygon
Vertices []Point
// Rectangle represents the minimum rectangle that contains the polygon
Rectangle image.Rectangle
subImage *image.RGBA
IsModified bool
Points []Point
HasPoints bool
}
func (polygon *Polygon) clone() Polygon {
polygonCopied := &Polygon{}
polygonCopied.Order = polygon.Order
polygonCopied.ColorRGBA = polygon.ColorRGBA
polygonCopied.Vertices = make([]Point, polygon.Order)
for i := range polygon.Vertices {
polygonCopied.Vertices[i] = polygon.Vertices[i].clone()
}
polygonCopied.Rectangle = polygon.Rectangle
polygonCopied.subImage = image.NewRGBA(polygon.subImage.Bounds())
copy(polygonCopied.subImage.Pix, polygon.subImage.Pix)
polygonCopied.HasPoints = polygon.HasPoints
return *polygonCopied
}
func NewRandomPolygon(order, maxX, maxY int) Polygon {
points := newRandomVertices(order, maxX, maxY)
x0, x1, y0, y1 := MinMaxPoints(points)
polygon := Polygon{}
polygon.Order = order
polygon.Vertices = points
polygon.Rectangle = image.Rect(x0, y0, x1, y1)
polygon.ColorRGBA = newRandomColor()
polygon.subImage = image.NewRGBA(image.Rect(0, 0, maxX, maxY))
polygon.HasPoints = false
return polygon
}
func (polygon *Polygon) mutate(ratio float64, w, h int) {
randomFloat := rand.Float64()
if randomFloat > 0.5 {
polygon.HasPoints = false
polygon.mutateVertex(ratio, w, h)
return
}
polygon.mutateColor()
return
}
func (polygon *Polygon) mutateColor() {
amplitude := 50
channel := rand.Intn(4)
displacement := rand.Intn(2*amplitude) - amplitude
colorList := [4]uint8{
polygon.ColorRGBA.R,
polygon.ColorRGBA.G,
polygon.ColorRGBA.B,
polygon.ColorRGBA.A,
}
colorList[channel] = clamp(int(colorList[channel]), displacement, 0, 255)
polygon.ColorRGBA =
Color{
colorList[0],
colorList[1],
colorList[2],
colorList[3],
}
}
func (polygon *Polygon) mutateVertex(ratio float64, width, height int) {
randomVertex := rand.Intn(len(polygon.Vertices))
var p [2]int
if ratio < 0.07 {
p = [2]int{
polygon.Vertices[randomVertex].X,
polygon.Vertices[randomVertex].Y,
}
amplitude := 10
displacement := rand.Intn(2*amplitude+1) - amplitude
direction := rand.Intn(2)
maxValue := [2]int{width, height}
p[direction] = int(clamp(p[direction], displacement, 0, maxValue[direction]))
} else {
p = [2]int{rand.Intn(width), rand.Intn(height)}
}
polygon.Vertices[randomVertex] = Point{p[0], p[1]}
}
func newRandomColor() Color {
var color [4]uint8
for i := 0; i < 3; i++ {
color[i] = uint8(rand.Intn(256))
}
color[3] = uint8(75)
return Color{color[0], color[1], color[2], color[3]}
}
func newRandomVertices(order, maxX, maxY int) []Point {
points := make([]Point, order)
for i := 0; i < order; i++ {
points[i] = Point{rand.Intn(maxX), rand.Intn(maxY)}
}
return points
} | poly/polygon.go | 0.794823 | 0.593845 | polygon.go | starcoder |
package tsuro
import "math/rand"
// Deck definition - the deck of tiles that players draw from
type Deck struct {
Tiles []Tile `json:"tiles"`
}
func (deck *Deck) Shuffle() {
for i := 0; i < len(deck.Tiles); i++ {
r := rand.Intn(len(deck.Tiles))
if i != r {
deck.Tiles[r], deck.Tiles[i] = deck.Tiles[i], deck.Tiles[r]
}
}
}
func (deck *Deck) Add(tile Tile) {
deck.Tiles = append(deck.Tiles, tile)
deck.Shuffle()
}
func (deck *Deck) Draw() Tile {
size := len(deck.Tiles)
tile := deck.Tiles[size-1]
deck.Tiles = deck.Tiles[:size-1]
return tile
}
func NewDeck() Deck {
tiles := []Tile{
{Edges: [][]Notch{{A, B}, {C, D}, {E, F}, {G, H}}},
{Edges: [][]Notch{{A, H}, {B, G}, {C, D}, {E, F}}},
{Edges: [][]Notch{{A, H}, {B, C}, {D, G}, {E, F}}},
{Edges: [][]Notch{{A, H}, {B, C}, {D, E}, {F, G}}},
{Edges: [][]Notch{{A, G}, {B, H}, {C, D}, {E, F}}},
{Edges: [][]Notch{{A, B}, {C, H}, {D, G}, {E, F}}},
{Edges: [][]Notch{{A, B}, {C, G}, {D, H}, {E, F}}},
{Edges: [][]Notch{{A, G}, {B, C}, {D, H}, {E, F}}},
{Edges: [][]Notch{{A, B}, {C, G}, {D, E}, {F, H}}},
{Edges: [][]Notch{{A, G}, {B, C}, {D, E}, {F, H}}},
{Edges: [][]Notch{{A, C}, {B, G}, {D, E}, {F, H}}},
{Edges: [][]Notch{{A, C}, {B, G}, {D, H}, {E, F}}},
{Edges: [][]Notch{{A, C}, {B, H}, {D, G}, {E, F}}},
{Edges: [][]Notch{{A, D}, {B, H}, {C, G}, {E, F}}},
{Edges: [][]Notch{{A, D}, {B, G}, {C, H}, {E, F}}},
{Edges: [][]Notch{{A, D}, {B, C}, {E, H}, {F, G}}},
{Edges: [][]Notch{{A, D}, {B, C}, {E, G}, {F, H}}},
{Edges: [][]Notch{{A, E}, {B, C}, {D, G}, {F, H}}},
{Edges: [][]Notch{{A, E}, {B, C}, {D, H}, {F, G}}},
{Edges: [][]Notch{{A, F}, {B, H}, {C, D}, {E, G}}},
{Edges: [][]Notch{{A, F}, {B, G}, {C, H}, {D, E}}},
{Edges: [][]Notch{{A, F}, {B, C}, {D, H}, {E, G}}},
{Edges: [][]Notch{{A, F}, {B, D}, {C, H}, {E, G}}},
{Edges: [][]Notch{{A, F}, {B, D}, {C, G}, {E, H}}},
{Edges: [][]Notch{{A, E}, {B, D}, {C, G}, {F, H}}},
{Edges: [][]Notch{{A, C}, {B, D}, {E, G}, {F, H}}},
{Edges: [][]Notch{{A, F}, {B, E}, {C, H}, {D, G}}},
{Edges: [][]Notch{{A, F}, {B, E}, {C, G}, {D, H}}},
{Edges: [][]Notch{{A, E}, {B, F}, {C, G}, {D, H}}},
{Edges: [][]Notch{{A, D}, {B, F}, {C, G}, {E, H}}},
{Edges: [][]Notch{{A, D}, {B, F}, {C, H}, {E, G}}},
{Edges: [][]Notch{{A, C}, {B, F}, {D, H}, {E, G}}},
{Edges: [][]Notch{{A, D}, {B, G}, {C, E}, {F, H}}},
{Edges: [][]Notch{{A, G}, {B, D}, {C, E}, {F, H}}},
{Edges: [][]Notch{{A, D}, {B, G}, {C, F}, {E, H}}},
}
deck := Deck{
Tiles: tiles,
}
deck.Shuffle()
return deck
} | backend/game/deck.go | 0.533641 | 0.7214 | deck.go | starcoder |
package date
import (
"encoding/json"
"fmt"
"regexp"
"strconv"
"time"
)
// Date represents a date without time information; it may represent only a year
// (month and day may be zero, comparisons work correctly in this case).
type Date struct {
Year int
Month int
Day int
}
// Build creates a Date from year, month, day
func Build(year, month, day int) Date {
return Date{
Year: year,
Month: month,
Day: day,
}
}
// AsTime converts the date object into the best representation of a time.Time
func (d Date) AsTime() time.Time {
switch {
case d.Year == 0:
return time.Time{}
case d.Month == 0, d.Day == 0:
return time.Date(d.Year, 1, 1, 0, 0, 0, 0, time.UTC)
default:
return time.Date(d.Year, time.Month(d.Month), d.Day, 0, 0, 0, 0, time.UTC)
}
}
// ToString returns a string representation of the date to the appropriate level of precision.
func (d Date) ToString() string {
switch {
case d.Year == 0:
return "N/A"
case d.Month == 0, d.Day == 0:
return strconv.Itoa(d.Year)
default:
return fmt.Sprintf("%04d-%02d-%02d", d.Year, d.Month, d.Day)
}
}
// CompareTo compares two date objects to an appropriate level of precision,
// and returns <0 if the receiver is less than other, >0 if it's greater than,
// and 0 if they're equal (at the level to which they can be compared).
// If one of the dates has different precision than the other, they are only compared
// to the lesser precision. An empty Date is considered to be less than any non-empty
// date and equal to itself.
func (d Date) CompareTo(other Date) int {
switch {
case d.Year == 0:
if other.Year == 0 {
return 0
}
return -1
case d.Month == 0, d.Day == 0:
return d.Year - other.Year
default:
switch {
case other.Year == 0:
return 1
case other.Month == 0, other.Day == 0:
return d.Year - other.Year
default:
if other.Year != d.Year {
return d.Year - other.Year
}
if other.Month != d.Month {
return d.Month - other.Month
}
return d.Day - other.Day
}
}
}
// IsZero returns true if the Date object is the zero value
func (d Date) IsZero() bool {
return d.Year == 0 && d.Month == 0 && d.Day == 0
}
// AsDate converts a time.Time into a Date object.
func AsDate(t time.Time) Date {
return Date{
Year: t.Year(),
Month: int(t.Month()),
Day: t.Day(),
}
}
// ParseDate parses a string and looks for the first thing in it that could be a date.
// If none are found, it returns a zero date.
// It also returns an index into the string pointing past the date that was found.
// If no date was found, the index is 0.
// The regex and logic are fairly finicky, which avoids lots of conditionals (perhaps at
// the expense of clarity).
func ParseDate(s string) (Date, int) {
// look for a 4-digit year that is not part of a longer string
patYMD := regexp.MustCompile(`\b([0-9]{4})([./-]([0-9]{1,2})[./-]([0-9]{1,2}))?\b`)
ixs := patYMD.FindStringSubmatchIndex(s)
if len(ixs) != 0 {
y, _ := strconv.Atoi(s[ixs[2]:ixs[3]])
if ixs[4] != -1 {
m, _ := strconv.Atoi(s[ixs[6]:ixs[7]])
d, _ := strconv.Atoi(s[ixs[8]:ixs[9]])
return Date{y, m, d}, ixs[1]
}
return Date{Year: y}, ixs[1]
}
return Date{}, 0
}
// ParseOnly calls ParseDate and ignores its returned index.
func ParseOnly(s string) Date {
date, _ := ParseDate(s)
return date
}
// ParseAllDates returns a slice of Date objects found in the given string.
func ParseAllDates(s string) []Date {
dates := make([]Date, 0)
for {
d, ix := ParseDate(s)
if ix == 0 {
break
}
dates = append(dates, d)
s = s[ix:]
}
return dates
}
// MarshalJSON implements json.Marshaler
func (d Date) MarshalJSON() ([]byte, error) {
return json.Marshal(d.ToString())
} | pkg/date/date.go | 0.751375 | 0.638201 | date.go | starcoder |
package feign
import (
"errors"
"reflect"
"time"
)
var typeTime = reflect.TypeOf((*time.Time)(nil)).Elem()
// ErrUnhandledType is an error returned when the type is not fillable.
// Unfillable types include funcs, chans, and interfaces. When filling structs,
// maps, or slices, these types will be ignored.
var ErrUnhandledType = errors.New("unhandled type")
// Filler is a func used to provide the value used to fill a struct field.
type Filler func(path string) (val interface{}, ok bool)
// Fill fills a type with random data.
func Fill(val interface{}, fillers ...Filler) error {
t := reflect.TypeOf(val)
if t.Kind() != reflect.Ptr || reflect.ValueOf(val).IsNil() {
return errors.New("not a pointer value")
}
v := reflect.ValueOf(val)
result, err := getValue("", val, fillers...)
if err != nil {
return err
}
v.Elem().Set(result.Elem().Convert(t.Elem()))
return nil
}
// MustFill fills a type with random data and panics if there is an error.
func MustFill(val interface{}, fillers ...Filler) {
err := Fill(val, fillers...)
if err != nil {
panic(err)
}
}
func getValue(path string, a interface{}, fillers ...Filler) (reflect.Value, error) {
if path != "" {
for _, fn := range fillers {
if v, ok := fn(path); ok {
if v == nil {
return reflect.Zero(reflect.TypeOf(a)), nil
}
return reflect.ValueOf(v), nil
}
}
}
t := reflect.TypeOf(a)
if t == nil {
return reflect.Value{}, ErrUnhandledType
}
switch t.Kind() {
case reflect.Ptr:
v := reflect.New(t.Elem())
var val reflect.Value
var err error
if a != reflect.Zero(reflect.TypeOf(a)).Interface() {
val, err = getValue(path, reflect.ValueOf(a).Elem().Interface(), fillers...)
if err != nil {
return reflect.Value{}, err
}
} else {
val, err = getValue(path, v.Elem().Interface(), fillers...)
if err != nil {
return reflect.Value{}, err
}
}
v.Elem().Set(val.Convert(t.Elem()))
return v, nil
case reflect.Struct:
switch t {
case typeTime:
ft := time.Time{}.Add(time.Duration(random.Int63()))
return reflect.ValueOf(ft), nil
default:
v := reflect.New(t).Elem()
for i := 0; i < v.NumField(); i++ {
field := v.Field(i)
if !field.CanSet() {
continue // avoid panic to set on unexported field in struct
}
val, err := getValue(path+"."+t.Field(i).Name, field.Interface(), fillers...)
if err == ErrUnhandledType {
continue
}
if err != nil {
return reflect.Value{}, err
}
val = val.Convert(field.Type())
v.Field(i).Set(val)
}
return v, nil
}
case reflect.String:
return reflect.ValueOf(randomString()), nil
case reflect.Array:
v := reflect.New(t).Elem()
for i := 0; i < t.Len(); i++ {
val, err := getValue(path, v.Index(i).Interface(), fillers...)
if err == ErrUnhandledType {
continue
}
if err != nil {
return reflect.Value{}, err
}
v.Index(i).Set(val.Convert(t.Elem()))
}
return v, nil
case reflect.Slice:
len := randomSliceAndMapSize()
if len == 0 {
return reflect.Zero(t), nil
}
v := reflect.MakeSlice(t, len, len)
for i := 0; i < v.Len(); i++ {
val, err := getValue(path, v.Index(i).Interface(), fillers...)
if err == ErrUnhandledType {
continue
}
if err != nil {
return reflect.Value{}, err
}
v.Index(i).Set(val.Convert(t.Elem()))
}
return v, nil
case reflect.Map:
len := randomSliceAndMapSize()
if len == 0 {
return reflect.Zero(t), nil
}
v := reflect.MakeMapWithSize(t, len)
for i := 0; i < len; i++ {
keyInstance := reflect.New(t.Key()).Elem().Interface()
key, err := getValue(path, keyInstance, fillers...)
if err == ErrUnhandledType {
continue
}
if err != nil {
return reflect.Value{}, err
}
valueInstance := reflect.New(t.Elem()).Elem().Interface()
val, err := getValue(path, valueInstance, fillers...)
if err == ErrUnhandledType {
continue
}
if err != nil {
return reflect.Value{}, err
}
v.SetMapIndex(key.Convert(t.Key()), val.Convert(t.Elem()))
}
return v, nil
case reflect.Bool:
return reflect.ValueOf(random.Intn(2) > 0), nil
case reflect.Int:
return reflect.ValueOf(randomInteger()), nil
case reflect.Int8:
return reflect.ValueOf(int8(randomInteger())), nil
case reflect.Int16:
return reflect.ValueOf(int16(randomInteger())), nil
case reflect.Int32:
return reflect.ValueOf(int32(randomInteger())), nil
case reflect.Int64:
return reflect.ValueOf(int64(randomInteger())), nil
case reflect.Float32:
return reflect.ValueOf(random.Float32()), nil
case reflect.Float64:
return reflect.ValueOf(random.Float64()), nil
case reflect.Uint:
return reflect.ValueOf(uint(randomInteger())), nil
case reflect.Uint8:
return reflect.ValueOf(uint8(randomInteger())), nil
case reflect.Uint16:
return reflect.ValueOf(uint16(randomInteger())), nil
case reflect.Uint32:
return reflect.ValueOf(uint32(randomInteger())), nil
case reflect.Uint64:
return reflect.ValueOf(uint64(randomInteger())), nil
default:
return reflect.Value{}, ErrUnhandledType
}
} | fill.go | 0.519278 | 0.409221 | fill.go | starcoder |
package bloomtree
import (
"errors"
"math"
"sort"
"github.com/willf/bitset"
)
type CompactMultiProof struct {
// Chunks are the leaves of the bloom tree, i.e. the bloom filter values for given parts of the bloom filter.
Chunks [][32]byte
// Proof are the hashes needed to reconstruct the bloom tree root.
Proof [][32]byte
// ProofType is 255 if the element is present in the bloom filter. it returns the index of the index if the element is not present in the bloom filter.
ProofType uint8
}
// newMultiProof generates a Merkle proof
func newCompactMultiProof(chunks [][32]byte, proof [][32]byte, proofType uint8) *CompactMultiProof {
return &CompactMultiProof{
Chunks: chunks,
Proof: proof,
ProofType: proofType,
}
}
func CheckProofType(proofType uint8) bool {
if proofType == maxK {
return true
}
return false
}
func checkChunkPresence(elemIndices []uint, bf *bitset.BitSet) bool {
for _, v := range elemIndices {
present := bf.Test(v)
if present != true {
return false
}
}
return true
}
func computeChunkIndices(elemIndices []uint) []uint64 {
chunkIndices := make([]uint64, len(elemIndices))
for i, v := range elemIndices {
index := uint64(math.Floor(float64(v) / float64(chunkSize)))
chunkIndices[i] = index
}
return chunkIndices
}
func determineOrder2Hash(ind1, indNeighbor int, h1, h2 [32]byte) [32]byte {
if ind1 > indNeighbor {
return hashChild(h2, h1)
}
return hashChild(h1, h2)
}
func verifyProof(chunkIndices []uint64, multiproof *CompactMultiProof, root [32]byte, treeLength int) (bool, error) {
var (
pairs []int
newIndices []uint64
newBlueNodes [][32]byte
)
proof := multiproof.Proof
blueNodes := multiproof.Chunks
prevIndices := chunkIndices
indMap := make(map[uint64]int)
leavesPerLayer := uint64(treeLength + 1)
currentLayer := uint64(0)
height := int(math.Log2(float64(treeLength / 2)))
// remove duplicates of blue nodes
var uniqueBlueNodes [][32]byte
uniqueBlueNodes = append(uniqueBlueNodes, blueNodes[0])
for i := 1; i < len(blueNodes); i++ {
if blueNodes[i] != blueNodes[i-1] {
uniqueBlueNodes = append(uniqueBlueNodes, blueNodes[i])
}
}
blueNodes = uniqueBlueNodes
// remove duplicates of proof
var uniqueProof [][32]byte
if len(proof) != 0 {
uniqueProof = append(uniqueProof, proof[0])
for i := 1; i < len(proof); i++ {
if proof[i] != proof[i-1] {
uniqueProof = append(uniqueProof, proof[i])
}
}
}
proof = uniqueProof
proofNum := 0
for i := 0; i <= height; i++ {
if len(newIndices) != 0 {
for j := 0; j < len(newIndices); j += 2 {
prevIndices = append(prevIndices, newIndices[j]/2)
}
newIndices = nil
}
for _, val := range prevIndices {
neighbor := val ^ 1
if _, ok := indMap[val+neighbor]; ok {
if indMap[val+neighbor] != int(val) {
indMap[val+neighbor] = -1
}
} else {
indMap[val+neighbor] = int(val)
pairs = append(pairs, int(val+neighbor))
}
}
for k, v := range indMap {
if v == -1 {
a, b := order((k-1)/2, (k+1)/2)
newIndices = append(newIndices, a, b)
} else {
a, b := order(uint64(v), k-uint64(v))
newIndices = append(newIndices, a, b)
}
}
sort.Ints(pairs)
blueNodeNum := 0
for _, v := range pairs {
value := uint64(v)
if indMap[value] == -1 {
newBlueNodes = append(newBlueNodes, hashChild(blueNodes[blueNodeNum], blueNodes[blueNodeNum+1]))
blueNodeNum += 2
} else {
newBlueNodes = append(newBlueNodes, determineOrder2Hash(indMap[value], v-indMap[value], blueNodes[blueNodeNum], proof[proofNum]))
blueNodeNum++
proofNum++
}
}
blueNodes = newBlueNodes
newBlueNodes = nil
blueNodeNum = 0
indMap = make(map[uint64]int)
pairs = nil
leavesPerLayer /= 2
currentLayer += leavesPerLayer
prevIndices = nil
}
if blueNodes[0] == root {
return true, nil
}
return false, nil
}
// VerifyCompactMultiProof return whether the multi proof provided is true or false.
// The proof type can be absence or presence
func VerifyCompactMultiProof(element, seedValue []byte, multiproof *CompactMultiProof, root [32]byte, bf BloomFilter) (bool, error) {
// find length of the tree
dbfBytes := len(bf.BitArray().Bytes())
if dbfBytes == 0 {
return false, errors.New("there was no bloom filter provided")
}
treeLeafs := int(math.Exp2(math.Ceil(math.Log2(float64(dbfBytes) / float64(chunkSize/64)))))
treeLength := (treeLeafs * 2) - 1
elemIndices := bf.MapElementToBF(element, seedValue)
elemIndicesCopy := elemIndices
if CheckProofType(multiproof.ProofType) {
sort.Slice(elemIndices, func(i, j int) bool { return elemIndices[i] < elemIndices[j] })
chunkIndices := computeChunkIndices(elemIndices)
present := checkChunkPresence(elemIndices, bf.BitArray())
if present != true {
return false, errors.New("the element is not inside the provided chunks for a presence proof")
}
verify, err := verifyProof(chunkIndices, multiproof, root, treeLength)
if err != nil {
return false, err
}
return verify, nil //verify, err
}
index := []uint{elemIndicesCopy[int(multiproof.ProofType)]}
chunkIndices := computeChunkIndices(index)
present := checkChunkPresence(index, bf.BitArray())
if present == true {
return false, errors.New("the element cannot be inside the provided chunk for an absence proof")
}
verify, err := verifyProof(chunkIndices, multiproof, root, treeLength)
if err != nil {
return false, err
}
return verify, nil //verify, err
} | proof.go | 0.617167 | 0.513425 | proof.go | starcoder |
// Attribs package offers up a flexible approach to attaching Attribs that then be used to avoid
// the brittle parameter problem.
package attributes
// Attributes is an array of name-value pairs.
type Attributes []NameValuePair
func (a *Attributes) Value(name string) interface{} {
for _, attribute := range *a {
if attribute.Name == name {
return attribute.Value
}
}
return nil
}
func (a Attributes) Entries(entries ...string) Attributes {
slice := make(Attributes, len(entries))
for _, attribute := range a {
for entryIndex, entry := range entries {
if attribute.Name == entry {
slice[entryIndex] = attribute
}
}
}
return slice
}
func (a Attributes) Add(name string, value interface{}) Attributes {
newEntry := NameValuePair{Name: name, Value: value}
return append(a, newEntry)
}
func (a Attributes) Join(attributes Attributes) Attributes {
newAttributes := a
for _, attrib := range attributes {
if a.Has(attrib.Name) {
newAttributes = newAttributes.Replace(attrib.Name, attrib.Value)
} else {
newAttributes = newAttributes.Add(attrib.Name, attrib.Value)
}
}
return newAttributes
}
func (a Attributes) Rename(oldName string, newName string) Attributes {
for index, attrib := range a {
if attrib.Name == oldName {
a[index].Name = newName
}
}
return a
}
func (a Attributes) Has(name string) bool {
return a.Value(name) != nil
}
func (a Attributes) Remove(name string) Attributes {
removeIndex := -1
for index, attrib := range a {
if attrib.Name == name {
removeIndex = index
}
}
frontAttributes := make(Attributes, 0)
frontAttributes = a[:removeIndex]
backAttributes := make(Attributes, 0)
backAttributes = a[removeIndex+1:]
return append(frontAttributes, backAttributes...)
}
func (a Attributes) Replace(name string, value interface{}) Attributes {
for index, attrib := range a {
if attrib.Name == name {
a[index].Value = value
}
}
return a
}
func (a Attributes) ReplaceAttributes(incomingAttributes Attributes) Attributes {
newAttributes := a
for _, attrib := range incomingAttributes {
if a.Has(attrib.Name) {
newAttributes = newAttributes.Replace(attrib.Name, attrib.Value)
} else {
newAttributes = newAttributes.Add(attrib.Name, attrib.Value)
}
}
return newAttributes
}
// NameValuePair is a struct allowing some Name as text to be associated with a matching Value of any type.
type NameValuePair struct {
Name string
Value interface{}
} | pkg/attributes/Attributes.go | 0.723895 | 0.454714 | Attributes.go | starcoder |
package poisson
//Option is function to update required setting
type Option func(*options)
type options struct {
//tries is a number of attempts to generate a new point
tries int
//minDistance is a minimum distance between two points
minDistance float64
//generator to use for random numbers
generator RandomGenerator
//areaFilter filters candidate point during 'generation' phase to get valid points inside of are. Allowed area boundaries - [0.0, 1.1] box
areaFilter PointFilter
//postFilter filters already valid point for additional condition
postFilter PointFilter
//points to fill the grid before processing
points []*Point
//startPoint is a point to start from
startPoint *Point
}
var defaultOptions = options{
tries: 30,
areaFilter: NewRectangleFilter(0, 1),
generator: NewBasicGenerator(0),
}
//WithTries set number of tries to generate a new point
func WithTries(tries int) Option {
return func(o *options) {
o.tries = tries
}
}
//WithGenerator set a random number genetator
func WithGenerator(generator RandomGenerator) Option {
return func(o *options) {
o.generator = generator
}
}
//WithMinDistance set a minimum distance between any two points
func WithMinDistance(distance float64) Option {
return func(o *options) {
o.minDistance = distance
}
}
//WithAreaFilter set an area filter for candidate points
func WithAreaFilter(filter PointFilter) Option {
return func(o *options) {
o.areaFilter = filter
}
}
//WithPostFilter set a post filter to drop points that do not meet required condition
func WithPostFilter(filter PointFilter) Option {
return func(o *options) {
o.postFilter = filter
}
}
//WithStartPoint set a start point at x,y to start from
func WithStartPoint(x, y float64) Option {
return func(o *options) {
o.startPoint = &Point{
x,
y,
}
}
}
//WithPoints set points to will the grid before processing. N.B.: only one point per cell - only last one will be stored at grid
func WithPoints(points []*Point) Option {
return func(o *options) {
o.points = points
}
} | options.go | 0.774498 | 0.495911 | options.go | starcoder |
package journal
import (
"math/big"
"sort"
"strings"
)
// An Amount is an amount of a certain unit, e.g., currency or commodity.
type Amount struct {
Number big.Int
Unit Unit
}
// Zero returns whether the amount is zero.
func (a *Amount) Zero() bool {
return isZero(&a.Number)
}
// Neg flips the sign of the amount.
func (a *Amount) Neg() {
a.Number.Neg(&a.Number)
}
// Equal returns true if the amounts are equal.
func (a *Amount) Equal(b *Amount) bool {
return a.Unit == b.Unit && a.Number.Cmp(&b.Number) == 0
}
func (a *Amount) String() string {
return decFormat(&a.Number, a.Unit.Scale) + " " + a.Unit.Symbol
}
// Unit describes a unit, e.g., currency or commodity.
type Unit struct {
// Symbol for the unit.
// This should be all uppercase ASCII letters.
Symbol string
// Scale indicates the minimum fractional unit amount,
// e.g. 100 means 0.01 is the smallest amount.
// This should be a multiple of 10.
Scale uint64
}
func (u Unit) String() string {
return u.Symbol
}
// A Balance represents a balance of amounts of various units.
// The zero value is ready for use.
type Balance struct {
m map[Unit]*big.Int
}
// Gets the Int for a Unit, initializing it if needed.
func (b *Balance) get(u Unit) *big.Int {
if b.m == nil {
b.m = make(map[Unit]*big.Int)
}
n := b.m[u]
if n == nil {
n = newInt()
b.m[u] = n
}
return n
}
// Add adds an amount to the balance.
func (b *Balance) Add(a *Amount) {
n := b.get(a.Unit)
n.Add(n, &a.Number)
}
// AddBal adds the amounts of the argument balance.
func (b *Balance) AddBal(b2 *Balance) {
if b2 == nil {
return
}
for k, v := range b2.m {
if isZero(v) {
continue
}
n := b.get(k)
n.Add(n, v)
}
}
// Neg negates the sign of the balance.
func (b *Balance) Neg() {
for _, v := range b.m {
v.Neg(v)
}
}
// Empty returns true if the balance is empty/zero.
func (b *Balance) Empty() bool {
if b == nil {
return true
}
for _, v := range b.m {
if !isZero(v) {
return false
}
}
return true
}
// Clear clears the balance, making it empty/zero.
func (b *Balance) Clear() {
if b == nil {
return
}
for k := range b.m {
delete(b.m, k)
}
}
// Has returns true if the balance has a non-zero amount for the unit.
func (b *Balance) Has(u Unit) bool {
if b == nil {
return false
}
n := b.m[u]
return n != nil && !isZero(n)
}
// Amount returns the amount of the given unit in the balance.
// The returned amount is independent memory from the balance.
func (b *Balance) Amount(u Unit) *Amount {
a := &Amount{Unit: u}
if b == nil {
return a
}
if n := b.m[u]; n != nil && !isZero(n) {
a.Number.Set(n)
}
return a
}
// Units returns the units in the balance
func (b *Balance) Units() []Unit {
var us []Unit
if b == nil {
return us
}
for k, v := range b.m {
if isZero(v) {
continue
}
us = append(us, k)
}
return us
}
// Amounts returns the amounts in the balance.
// The amounts are sorted by unit.
func (b *Balance) Amounts() []*Amount {
var as []*Amount
if b == nil {
return as
}
for k, v := range b.m {
if isZero(v) {
continue
}
a := &Amount{Unit: k}
a.Number.Set(v)
as = append(as, a)
}
sort.Slice(as, func(i, j int) bool {
return as[i].Unit.Symbol < as[j].Unit.Symbol
})
return as
}
// Equal returns true if the two balances are equal.
func (b *Balance) Equal(b2 *Balance) bool {
var b3 Balance
b3.Set(b)
b3.Neg()
b3.AddBal(b2)
return b3.Empty()
}
// Set sets the receiver balance to the argument balance.
func (b *Balance) Set(b2 *Balance) {
b.Clear()
if b2 == nil {
return
}
for k, v := range b2.m {
if !isZero(v) {
b.get(k).Set(v)
}
}
}
func (b *Balance) String() string {
amts := b.Amounts()
if len(amts) == 0 {
return "0"
}
s := make([]string, len(amts))
for i, a := range amts {
s[i] = a.String()
}
return strings.Join(s, ", ")
}
// A Balances maps multiple accounts to their balances.
type Balances map[Account]*Balance
// Add adds an amount to an account, even if the account is not yet in
// the map.
func (b Balances) Add(a Account, am *Amount) {
bal, ok := b[a]
if !ok {
bal = new(Balance)
b[a] = bal
}
bal.Add(am)
}
// Neg negates the signs of the balances.
func (b Balances) Neg() {
for _, b := range b {
b.Neg()
}
}
// Accounts returns all of the accounts with balances in sorted order.
func (b Balances) Accounts() []Account {
var new []Account
for a, b := range b {
if b.Empty() {
continue
}
new = append(new, a)
}
sort.Slice(new, func(i, j int) bool { return new[i] < new[j] })
return new
}
func isZero(n *big.Int) bool {
return len(n.Bits()) == 0
} | journal/amount.go | 0.828245 | 0.488832 | amount.go | starcoder |
package automata
const (
MinSize = 12
Steps = 20
)
type Automata struct {
gridCount int
walls []*Wall
currentStep int
particlesByStep [Steps][]*Particle
}
func New(gridRowCount int, walls []*Wall, particles []*Particle) *Automata {
particlesByStep := [Steps][]*Particle{}
for i := 0; i < Steps-1; i++ {
particlesByStep[i] = make([]*Particle, 0)
}
particlesByStep[Steps-1] = particles
a := &Automata{
gridCount: gridRowCount,
walls: walls,
currentStep: Steps - 1,
particlesByStep: particlesByStep,
}
a.propagateBackward()
return a
}
func (a *Automata) GetGridCount() int {
return a.gridCount
}
func fightParticles(particles []*Particle) {
for i, p := range particles {
pv := p.GetValue()
for j, po := range particles {
if i == j {
continue
}
if po.GetValue() > pv {
p.value *= po.value
}
}
}
}
func (a *Automata) propagateBackward() {
for step := Steps - 1; step > 0; step-- {
newParticles := make([]*Particle, len(a.particlesByStep[step]))
for i, p := range a.particlesByStep[step] {
newParticles[i] = p.Clone()
}
// Move particles in their respective directions first
for _, particle := range newParticles {
d := particle.GetDirection()
particle.position[0] += d[0]
particle.position[1] += d[1]
// Check if it hits a wall
wallReaction := WallEffectNone
wallPosition := WallPositionBottom
if particle.position[0] < 0 {
particle.position[0] = 0
wallReaction = a.walls[WallPositionLeft].reaction
wallPosition = WallPositionLeft
}
if particle.position[0] >= a.gridCount {
particle.position[0] = a.gridCount - 1
wallReaction = a.walls[WallPositionRight].reaction
wallPosition = WallPositionRight
}
if particle.position[1] < 0 {
particle.position[1] = 0
wallReaction = a.walls[WallPositionTop].reaction
wallPosition = WallPositionTop
}
if particle.position[1] >= a.gridCount {
particle.position[1] = a.gridCount - 1
wallReaction = a.walls[WallPositionBottom].reaction
wallPosition = WallPositionBottom
}
// Apply reaction if exists
if wallReaction != WallEffectNone {
switch wallReaction {
case WallEffectTeleportOpposite:
particle.SetDirectionModifier(1)
switch wallPosition {
case WallPositionLeft:
particle.position[0] = a.gridCount - 1
case WallPositionRight:
particle.position[0] = 0
case WallPositionTop:
particle.position[1] = a.gridCount - 1
case WallPositionBottom:
particle.position[1] = 0
}
case WallEffectReverseDirection:
particle.SetDirectionModifier(-1)
}
}
}
// Fight particles sharing the same coordinates
for y := 0; y < a.gridCount; y++ {
for x := 0; x < a.gridCount; x++ {
var particles []*Particle
for _, p := range newParticles {
if p.position[0] == x && p.position[1] == y {
particles = append(particles, p)
}
}
if len(particles) > 1 {
fightParticles(particles)
}
}
}
// Calculate particles
for _, p := range newParticles {
p.Calculate()
}
// Set the new particles to the previous step
a.particlesByStep[step-1] = newParticles
}
}
func (a *Automata) GetParticlesAtStep() []*Particle {
return a.particlesByStep[a.currentStep]
}
func (a *Automata) GetCurrentStep() int {
return a.currentStep
}
func (a *Automata) Advance() {
if a.currentStep >= Steps-1 {
return
}
a.currentStep++
}
func (a *Automata) Rewind() {
if a.currentStep == 0 {
return
}
a.currentStep--
} | _drafts/01/automata/automata.go | 0.603932 | 0.47993 | automata.go | starcoder |
package sample
import (
"math/rand"
"github.com/tendermint/spn/pkg/types"
monitoringc "github.com/tendermint/spn/x/monitoringc/types"
monitoringp "github.com/tendermint/spn/x/monitoringp/types"
)
const ConsensusStateNb = 2
// ConsensusState returns a sample ConsensusState
// nb allows to select a consensus state with a matching validator set
// consensus state 0 match with validator set 0
// nb is 0 if above max value
func ConsensusState(nb int) types.ConsensusState {
if nb >= ConsensusStateNb {
nb = 0
}
return []types.ConsensusState{
types.NewConsensusState(
"2022-01-12T12:25:19.523109Z",
"48C4C20AC5A7BD99A45AEBAB92E61F5667253A2C51CCCD84D20327D3CB8737C9",
"<KEY>
),
types.NewConsensusState(
"2022-01-12T14:15:12.981874Z",
"<KEY>",
"<KEY>
),
}[nb]
}
// ValidatorSet returns a sample ValidatorSet
// nb allows to select a consensus state with a matching validator set
// consensus state 0 match with validator set 0
// nb is 0 if above max value
func ValidatorSet(nb int) types.ValidatorSet {
if nb >= ConsensusStateNb {
nb = 0
}
return []types.ValidatorSet{
types.NewValidatorSet(
types.NewValidator("fYaox+q+N3XkGZdcQ5f3MH4/5J4oh6FRoYdW0vxRdIg=", 0, 100),
),
types.NewValidatorSet(
types.NewValidator("rQMyKjkzXXUhYsAdII6fSlTkFdf24hiSPGrSCBub5Oc=", 0, 100),
),
}[nb]
}
// MonitoringpParams returns a sample of params for the monitoring provider module
func MonitoringpParams(r *rand.Rand) monitoringp.Params {
lastBlockHeight := monitoringp.DefaultLastBlockHeight + r.Int63n(10)
consumerUnbondingpPeriod := types.MinimalUnbondingPeriod + r.Int63n(types.DefaultUnbondingPeriod)
consumerRevisionHeight := types.DefaultRevisionHeight + r.Uint64()
consumerChainID := monitoringp.DefaultConsumerChainID
consensusState := ConsensusState(0)
return monitoringp.NewParams(
lastBlockHeight,
consumerChainID,
consensusState,
consumerUnbondingpPeriod,
consumerRevisionHeight,
)
}
// MonitoringcParams returns a sample of params for the monitoring consumer module
func MonitoringcParams() monitoringc.Params {
return monitoringc.NewParams()
} | testutil/sample/monitoring.go | 0.696268 | 0.412471 | monitoring.go | starcoder |
package srg
import (
"fmt"
"strings"
"bytes"
"github.com/serulian/compiler/compilercommon"
"github.com/serulian/compiler/compilergraph"
"github.com/serulian/compiler/compilerutil"
"github.com/serulian/compiler/sourceshape"
)
// SRGTypeRef represents a type reference defined in the SRG.
type SRGTypeRef struct {
compilergraph.GraphNode
srg *SRG // The parent SRG.
}
type TypeRefKind int
const (
typeRefUnknown TypeRefKind = iota // An unknown type.
TypeRefNullable // A nullable type.
TypeRefStream // A stream type.
TypeRefSlice // A slice type.
TypeRefMapping // A mapping type.
TypeRefPath // A normal path type. May have generics.
TypeRefVoid // A void type reference.
TypeRefStruct // A struct type reference.
TypeRefAny // An any type reference.
)
// GetTypeRef returns an SRGTypeRef wrapper for the given type reference node.
func (g *SRG) GetTypeRef(node compilergraph.GraphNode) SRGTypeRef {
return SRGTypeRef{node, g}
}
// GetTypeReferences returns all the type references in the SRG.
func (g *SRG) GetTypeReferences() []SRGTypeRef {
it := g.findAllNodes(sourceshape.NodeTypeTypeReference).
BuildNodeIterator()
var refs []SRGTypeRef
for it.Next() {
refs = append(refs, SRGTypeRef{it.Node(), g})
}
return refs
}
// SourceRange returns the source range for this type ref.
func (t SRGTypeRef) SourceRange() (compilercommon.SourceRange, bool) {
return t.srg.SourceRangeOf(t.GraphNode)
}
// ResolutionName returns the last piece of the ResolutionPath.
// Panics if this is not a RefKind of TypeRefPath.
func (t SRGTypeRef) ResolutionName() string {
// TODO: optimize this?
pieces := strings.Split(t.ResolutionPath(), ".")
return pieces[len(pieces)-1]
}
// ResolutionPath returns the full resolution path for this type reference.
// Panics if this is not a RefKind of TypeRefPath.
func (t SRGTypeRef) ResolutionPath() string {
compilerutil.DCHECK(func() bool { return t.RefKind() == TypeRefPath }, "Expected type ref path")
var resolvePathPieces = make([]string, 0)
var currentPath compilergraph.GraphNode = t.GraphNode.
GetNode(sourceshape.NodeTypeReferencePath).
GetNode(sourceshape.NodeIdentifierPathRoot)
for {
// Add the path piece to the array.
name, hasName := currentPath.TryGet(sourceshape.NodeIdentifierAccessName)
if !hasName {
break
}
resolvePathPieces = append([]string{name}, resolvePathPieces...)
// If there is a source, continue searching.
source, found := currentPath.TryGetNode(sourceshape.NodeIdentifierAccessSource)
if !found {
break
}
currentPath = source
}
return strings.Join(resolvePathPieces, ".")
}
// ResolveType attempts to resolve the type path referenced by this type ref.
// Panics if this is not a RefKind of TypeRefPath.
func (t SRGTypeRef) ResolveType() (TypeResolutionResult, bool) {
// Find the parent module.
source := compilercommon.InputSource(t.GraphNode.Get(sourceshape.NodePredicateSource))
srgModule, found := t.srg.FindModuleBySource(source)
if !found {
panic(fmt.Sprintf("Unknown parent module: %s", source))
}
// Resolve the type path under the module.
resolutionPath := t.ResolutionPath()
resolvedType, typeFound := srgModule.ResolveTypePath(resolutionPath)
if typeFound {
return resolvedType, true
}
// If not found and the path is a single name, try to resolve as a generic
// under a parent function or type.
if strings.ContainsRune(resolutionPath, '.') {
// Not a single name.
return TypeResolutionResult{}, false
}
containingFilter := func(q compilergraph.GraphQuery) compilergraph.Query {
// For this filter, we check if the defining type (or type member) if the
// generic is the same type (or type member) containing the typeref. To do so,
// we perform a check that the start rune and end rune of the definition
// contains the range of the start and end rune, respectively, of the typeref. Since
// we know both nodes are in the same module, and the SRG is a tree, this validates
// that we are in the correct scope without having to walk the tree upward.
startRune := t.GraphNode.GetValue(sourceshape.NodePredicateStartRune).Int()
endRune := t.GraphNode.GetValue(sourceshape.NodePredicateEndRune).Int()
return q.
In(sourceshape.NodeTypeDefinitionGeneric, sourceshape.NodePredicateTypeMemberGeneric).
HasWhere(sourceshape.NodePredicateStartRune, compilergraph.WhereLTE, startRune).
HasWhere(sourceshape.NodePredicateEndRune, compilergraph.WhereGTE, endRune)
}
resolvedGenericNode, genericFound := t.srg.layer.
StartQuery(). // Find a node...
Has(sourceshape.NodeGenericPredicateName, resolutionPath). // With the generic name..
Has(sourceshape.NodePredicateSource, string(source)). // That is in this module...
IsKind(sourceshape.NodeTypeGeneric). // That is a generic...
FilterBy(containingFilter). // Filter by whether its defining type or member contains this typeref.
TryGetNode()
return resultForTypeOrGeneric(SRGTypeOrGeneric{resolvedGenericNode, t.srg}), genericFound
}
// InnerReference returns the inner type reference, if this is a nullable or stream.
func (t SRGTypeRef) InnerReference() (SRGTypeRef, bool) {
innerReference, hasInnerReference := t.GraphNode.TryGetNode(sourceshape.NodeTypeReferenceInnerType)
if !hasInnerReference {
return SRGTypeRef{}, false
}
return SRGTypeRef{innerReference, t.srg}, true
}
// Generics returns the generics defined on this type ref.
// Panics if this is not a RefKind of TypeRefPath.
func (t SRGTypeRef) Generics() []SRGTypeRef {
return t.subReferences(sourceshape.NodeTypeReferenceGeneric)
}
// HasGenerics returns whether this type reference has generics.
func (t SRGTypeRef) HasGenerics() bool {
_, found := t.GraphNode.TryGetNode(sourceshape.NodeTypeReferenceGeneric)
return found
}
// Parameters returns the parameters defined on this type ref.
// Panics if this is not a RefKind of TypeRefPath.
func (t SRGTypeRef) Parameters() []SRGTypeRef {
return t.subReferences(sourceshape.NodeTypeReferenceParameter)
}
// HasParameters returns whether this type reference has parameters.
func (t SRGTypeRef) HasParameters() bool {
_, found := t.GraphNode.TryGetNode(sourceshape.NodeTypeReferenceParameter)
return found
}
// subReferences returns the subreferences found off of the given predicate, if any.
func (t SRGTypeRef) subReferences(predicate compilergraph.Predicate) []SRGTypeRef {
subRefs := make([]SRGTypeRef, 0)
it := t.GraphNode.StartQuery().Out(predicate).BuildNodeIterator()
for it.Next() {
subRefs = append(subRefs, SRGTypeRef{it.Node(), t.srg})
}
return subRefs
}
// String returns the human-readable string form of this type reference.
func (t SRGTypeRef) String() string {
nodeKind := t.GraphNode.Kind().(sourceshape.NodeType)
innerReferenceString := "?"
if innerReference, hasInnerReference := t.InnerReference(); hasInnerReference {
innerReferenceString = innerReference.String()
}
switch nodeKind {
case sourceshape.NodeTypeVoid:
return "void"
case sourceshape.NodeTypeAny:
return "any"
case sourceshape.NodeTypeStructReference:
return "struct"
case sourceshape.NodeTypeStream:
return innerReferenceString + "*"
case sourceshape.NodeTypeSlice:
return "[]" + innerReferenceString
case sourceshape.NodeTypeMapping:
return "[]{" + innerReferenceString + "}"
case sourceshape.NodeTypeNullable:
return innerReferenceString + "?"
case sourceshape.NodeTypeTypeReference:
var buffer bytes.Buffer
buffer.WriteString(t.ResolutionName())
generics := t.Generics()
if len(generics) > 0 {
buffer.WriteString("<")
for index, generic := range generics {
if index > 0 {
buffer.WriteString(", ")
}
buffer.WriteString(generic.String())
}
buffer.WriteString(">")
}
parameters := t.Parameters()
if len(parameters) > 0 {
buffer.WriteString("(")
for index, parameter := range parameters {
if index > 0 {
buffer.WriteString(", ")
}
buffer.WriteString(parameter.String())
}
buffer.WriteString(")")
}
return buffer.String()
default:
panic(fmt.Sprintf("Unknown kind of type reference node %v", nodeKind))
}
}
// RefKind returns the kind of this type reference.
func (t SRGTypeRef) RefKind() TypeRefKind {
nodeKind := t.GraphNode.Kind().(sourceshape.NodeType)
switch nodeKind {
case sourceshape.NodeTypeVoid:
return TypeRefVoid
case sourceshape.NodeTypeAny:
return TypeRefAny
case sourceshape.NodeTypeStructReference:
return TypeRefStruct
case sourceshape.NodeTypeStream:
return TypeRefStream
case sourceshape.NodeTypeSlice:
return TypeRefSlice
case sourceshape.NodeTypeMapping:
return TypeRefMapping
case sourceshape.NodeTypeNullable:
return TypeRefNullable
case sourceshape.NodeTypeTypeReference:
return TypeRefPath
default:
panic(fmt.Sprintf("Unknown kind of type reference node %v", nodeKind))
}
} | graphs/srg/typeref.go | 0.682468 | 0.472562 | typeref.go | starcoder |
package iterator
import q "github.com/janderland/fdbq/keyval"
// Bool asserts the current element of the tuple is of type Bool.
// If the type assertion fails a ConversionError is returned. If the
// iterator points beyond the end of the tuple, a ShortTupleError is
// returned. Otherwise, the element is returned and the iterator is
// pointed at the next element.
func (x *TupleIterator) Bool() (out q.Bool, err error) {
if x.i >= len(x.t) {
panic(ShortTupleError)
}
var ok bool
if out, ok = x.t[x.i].(q.Bool); !ok {
err = ConversionError{
InValue: x.t[x.i],
OutType: out,
Index: x.i,
}
return
}
x.i++
return
}
// MustBool does the same thing as Bool, except it panics any
// errors instead of returning them. These errors will be recovered by
// the wrapping call to ReadTuple and returned by that function.
func (x *TupleIterator) MustBool() q.Bool {
val, err := x.Bool()
if err != nil {
panic(err)
}
return val
}
// Int asserts the current element of the tuple is of type Int.
// If the type assertion fails a ConversionError is returned. If the
// iterator points beyond the end of the tuple, a ShortTupleError is
// returned. Otherwise, the element is returned and the iterator is
// pointed at the next element.
func (x *TupleIterator) Int() (out q.Int, err error) {
if x.i >= len(x.t) {
panic(ShortTupleError)
}
var ok bool
if out, ok = x.t[x.i].(q.Int); !ok {
err = ConversionError{
InValue: x.t[x.i],
OutType: out,
Index: x.i,
}
return
}
x.i++
return
}
// MustInt does the same thing as Int, except it panics any
// errors instead of returning them. These errors will be recovered by
// the wrapping call to ReadTuple and returned by that function.
func (x *TupleIterator) MustInt() q.Int {
val, err := x.Int()
if err != nil {
panic(err)
}
return val
}
// Uint asserts the current element of the tuple is of type Uint.
// If the type assertion fails a ConversionError is returned. If the
// iterator points beyond the end of the tuple, a ShortTupleError is
// returned. Otherwise, the element is returned and the iterator is
// pointed at the next element.
func (x *TupleIterator) Uint() (out q.Uint, err error) {
if x.i >= len(x.t) {
panic(ShortTupleError)
}
var ok bool
if out, ok = x.t[x.i].(q.Uint); !ok {
err = ConversionError{
InValue: x.t[x.i],
OutType: out,
Index: x.i,
}
return
}
x.i++
return
}
// MustUint does the same thing as Uint, except it panics any
// errors instead of returning them. These errors will be recovered by
// the wrapping call to ReadTuple and returned by that function.
func (x *TupleIterator) MustUint() q.Uint {
val, err := x.Uint()
if err != nil {
panic(err)
}
return val
}
// BigInt asserts the current element of the tuple is of type BigInt.
// If the type assertion fails a ConversionError is returned. If the
// iterator points beyond the end of the tuple, a ShortTupleError is
// returned. Otherwise, the element is returned and the iterator is
// pointed at the next element.
func (x *TupleIterator) BigInt() (out q.BigInt, err error) {
if x.i >= len(x.t) {
panic(ShortTupleError)
}
var ok bool
if out, ok = x.t[x.i].(q.BigInt); !ok {
err = ConversionError{
InValue: x.t[x.i],
OutType: out,
Index: x.i,
}
return
}
x.i++
return
}
// MustBigInt does the same thing as BigInt, except it panics any
// errors instead of returning them. These errors will be recovered by
// the wrapping call to ReadTuple and returned by that function.
func (x *TupleIterator) MustBigInt() q.BigInt {
val, err := x.BigInt()
if err != nil {
panic(err)
}
return val
}
// Float asserts the current element of the tuple is of type Float.
// If the type assertion fails a ConversionError is returned. If the
// iterator points beyond the end of the tuple, a ShortTupleError is
// returned. Otherwise, the element is returned and the iterator is
// pointed at the next element.
func (x *TupleIterator) Float() (out q.Float, err error) {
if x.i >= len(x.t) {
panic(ShortTupleError)
}
var ok bool
if out, ok = x.t[x.i].(q.Float); !ok {
err = ConversionError{
InValue: x.t[x.i],
OutType: out,
Index: x.i,
}
return
}
x.i++
return
}
// MustFloat does the same thing as Float, except it panics any
// errors instead of returning them. These errors will be recovered by
// the wrapping call to ReadTuple and returned by that function.
func (x *TupleIterator) MustFloat() q.Float {
val, err := x.Float()
if err != nil {
panic(err)
}
return val
}
// String asserts the current element of the tuple is of type String.
// If the type assertion fails a ConversionError is returned. If the
// iterator points beyond the end of the tuple, a ShortTupleError is
// returned. Otherwise, the element is returned and the iterator is
// pointed at the next element.
func (x *TupleIterator) String() (out q.String, err error) {
if x.i >= len(x.t) {
panic(ShortTupleError)
}
var ok bool
if out, ok = x.t[x.i].(q.String); !ok {
err = ConversionError{
InValue: x.t[x.i],
OutType: out,
Index: x.i,
}
return
}
x.i++
return
}
// MustString does the same thing as String, except it panics any
// errors instead of returning them. These errors will be recovered by
// the wrapping call to ReadTuple and returned by that function.
func (x *TupleIterator) MustString() q.String {
val, err := x.String()
if err != nil {
panic(err)
}
return val
}
// Bytes asserts the current element of the tuple is of type Bytes.
// If the type assertion fails a ConversionError is returned. If the
// iterator points beyond the end of the tuple, a ShortTupleError is
// returned. Otherwise, the element is returned and the iterator is
// pointed at the next element.
func (x *TupleIterator) Bytes() (out q.Bytes, err error) {
if x.i >= len(x.t) {
panic(ShortTupleError)
}
var ok bool
if out, ok = x.t[x.i].(q.Bytes); !ok {
err = ConversionError{
InValue: x.t[x.i],
OutType: out,
Index: x.i,
}
return
}
x.i++
return
}
// MustBytes does the same thing as Bytes, except it panics any
// errors instead of returning them. These errors will be recovered by
// the wrapping call to ReadTuple and returned by that function.
func (x *TupleIterator) MustBytes() q.Bytes {
val, err := x.Bytes()
if err != nil {
panic(err)
}
return val
}
// UUID asserts the current element of the tuple is of type UUID.
// If the type assertion fails a ConversionError is returned. If the
// iterator points beyond the end of the tuple, a ShortTupleError is
// returned. Otherwise, the element is returned and the iterator is
// pointed at the next element.
func (x *TupleIterator) UUID() (out q.UUID, err error) {
if x.i >= len(x.t) {
panic(ShortTupleError)
}
var ok bool
if out, ok = x.t[x.i].(q.UUID); !ok {
err = ConversionError{
InValue: x.t[x.i],
OutType: out,
Index: x.i,
}
return
}
x.i++
return
}
// MustUUID does the same thing as UUID, except it panics any
// errors instead of returning them. These errors will be recovered by
// the wrapping call to ReadTuple and returned by that function.
func (x *TupleIterator) MustUUID() q.UUID {
val, err := x.UUID()
if err != nil {
panic(err)
}
return val
}
// Tuple asserts the current element of the tuple is of type Tuple.
// If the type assertion fails a ConversionError is returned. If the
// iterator points beyond the end of the tuple, a ShortTupleError is
// returned. Otherwise, the element is returned and the iterator is
// pointed at the next element.
func (x *TupleIterator) Tuple() (out q.Tuple, err error) {
if x.i >= len(x.t) {
panic(ShortTupleError)
}
var ok bool
if out, ok = x.t[x.i].(q.Tuple); !ok {
err = ConversionError{
InValue: x.t[x.i],
OutType: out,
Index: x.i,
}
return
}
x.i++
return
}
// MustTuple does the same thing as Tuple, except it panics any
// errors instead of returning them. These errors will be recovered by
// the wrapping call to ReadTuple and returned by that function.
func (x *TupleIterator) MustTuple() q.Tuple {
val, err := x.Tuple()
if err != nil {
panic(err)
}
return val
} | keyval/iterator/iterator.g.go | 0.720467 | 0.491578 | iterator.g.go | starcoder |
// Package geomfn contains functions that are used for geometry-based builtins.
package geomfn
import "github.com/twpayne/go-geom"
// applyCoordFunc applies a function on src to copy onto dst.
// Both slices represent a single Coord within the FlatCoord array.
type applyCoordFunc func(l geom.Layout, dst []float64, src []float64) error
// applyOnCoords applies the applyCoordFunc on each coordinate, returning
// a new array for the coordinates.
func applyOnCoords(flatCoords []float64, l geom.Layout, f applyCoordFunc) ([]float64, error) {
newCoords := make([]float64, len(flatCoords))
for i := 0; i < len(flatCoords); i += l.Stride() {
if err := f(l, newCoords[i:i+l.Stride()], flatCoords[i:i+l.Stride()]); err != nil {
return nil, err
}
}
return newCoords, nil
}
// applyOnCoordsForGeomT applies the applyCoordFunc on each coordinate in the geom.T,
// returning a copied over geom.T.
func applyOnCoordsForGeomT(g geom.T, f applyCoordFunc) (geom.T, error) {
if geomCollection, ok := g.(*geom.GeometryCollection); ok {
return applyOnCoordsForGeometryCollection(geomCollection, f)
}
newCoords, err := applyOnCoords(g.FlatCoords(), g.Layout(), f)
if err != nil {
return nil, err
}
switch t := g.(type) {
case *geom.Point:
g = geom.NewPointFlat(t.Layout(), newCoords).SetSRID(g.SRID())
case *geom.LineString:
g = geom.NewLineStringFlat(t.Layout(), newCoords).SetSRID(g.SRID())
case *geom.Polygon:
g = geom.NewPolygonFlat(t.Layout(), newCoords, t.Ends()).SetSRID(g.SRID())
case *geom.MultiPoint:
g = geom.NewMultiPointFlat(t.Layout(), newCoords).SetSRID(g.SRID())
case *geom.MultiLineString:
g = geom.NewMultiLineStringFlat(t.Layout(), newCoords, t.Ends()).SetSRID(g.SRID())
case *geom.MultiPolygon:
g = geom.NewMultiPolygonFlat(t.Layout(), newCoords, t.Endss()).SetSRID(g.SRID())
default:
return nil, geom.ErrUnsupportedType{Value: g}
}
return g, nil
}
// applyOnCoordsForGeometryCollection applies the applyCoordFunc on each coordinate
// inside a geometry collection, returning a copied over geom.T.
func applyOnCoordsForGeometryCollection(
geomCollection *geom.GeometryCollection, f applyCoordFunc,
) (*geom.GeometryCollection, error) {
res := geom.NewGeometryCollection()
for _, subG := range geomCollection.Geoms() {
subGeom, err := applyOnCoordsForGeomT(subG, f)
if err != nil {
return nil, err
}
if err := res.Push(subGeom); err != nil {
return nil, err
}
}
return res, nil
} | pkg/geo/geomfn/geomfn.go | 0.857156 | 0.515925 | geomfn.go | starcoder |
package design
import (
"fmt"
"math"
"regexp"
"time"
regen "github.com/zach-klippenstein/goregen"
)
// exampleGenerator generates a random example based on the given validations on the definition.
type exampleGenerator struct {
a *AttributeDefinition
r *RandomGenerator
}
// newExampleGenerator returns an example generator that uses the given random generator.
func newExampleGenerator(a *AttributeDefinition, r *RandomGenerator) *exampleGenerator {
return &exampleGenerator{a, r}
}
// Maximum number of tries for generating example.
const maxAttempts = 500
// Generate generates a random value based on the given validations.
func (eg *exampleGenerator) Generate(seen []string) interface{} {
// Randomize array length first, since that's from higher level
if eg.hasLengthValidation() {
return eg.generateValidatedLengthExample(seen)
}
// Enum should dominate, because the potential "examples" are fixed
if eg.hasEnumValidation() {
return eg.generateValidatedEnumExample()
}
// loop until a satisified example is generated
hasFormat, hasPattern, hasMinMax := eg.hasFormatValidation(), eg.hasPatternValidation(), eg.hasMinMaxValidation()
attempts := 0
for attempts < maxAttempts {
attempts++
var example interface{}
// Format comes first, since it initiates the example
if hasFormat {
example = eg.generateFormatExample()
}
// now validate with the rest of matchers; if not satisified, redo
if hasPattern {
if example == nil {
example = eg.generateValidatedPatternExample()
} else if !eg.checkPatternValidation(example) {
continue
}
}
if hasMinMax {
if example == nil {
example = eg.generateValidatedMinMaxValueExample()
} else if !eg.checkMinMaxValueValidation(example) {
continue
}
}
if example == nil {
example = eg.a.Type.GenerateExample(eg.r, seen)
}
return example
}
return eg.a.Type.GenerateExample(eg.r, seen)
}
func (eg *exampleGenerator) ExampleLength() int {
if eg.hasLengthValidation() {
minlength, maxlength := math.Inf(1), math.Inf(-1)
if eg.a.Validation.MinLength != nil {
minlength = float64(*eg.a.Validation.MinLength)
}
if eg.a.Validation.MaxLength != nil {
maxlength = float64(*eg.a.Validation.MaxLength)
}
count := 0
if math.IsInf(minlength, 1) {
count = int(maxlength) - (eg.r.Int() % 3)
} else if math.IsInf(maxlength, -1) {
count = int(minlength) + (eg.r.Int() % 3)
} else if minlength < maxlength {
diff := int(maxlength - minlength)
if diff > maxExampleLength {
diff = maxExampleLength
}
count = int(minlength) + (eg.r.Int() % diff)
} else if minlength == maxlength {
count = int(minlength)
} else {
panic("Validation: MinLength > MaxLength")
}
if count > maxExampleLength {
count = maxExampleLength
}
if count <= 0 && maxlength != 0 {
count = 1
}
return count
}
return eg.r.Int()%3 + 1
}
func (eg *exampleGenerator) hasLengthValidation() bool {
if eg.a.Validation == nil {
return false
}
return eg.a.Validation.MinLength != nil || eg.a.Validation.MaxLength != nil
}
const maxExampleLength = 10
// generateValidatedLengthExample generates a random size array of examples based on what's given.
func (eg *exampleGenerator) generateValidatedLengthExample(seen []string) interface{} {
count := eg.ExampleLength()
if !eg.a.Type.IsArray() {
return eg.r.faker.Characters(count)
}
res := make([]interface{}, count)
for i := 0; i < count; i++ {
res[i] = eg.a.Type.ToArray().ElemType.GenerateExample(eg.r, seen)
}
return res
}
func (eg *exampleGenerator) hasEnumValidation() bool {
return eg.a.Validation != nil && len(eg.a.Validation.Values) > 0
}
// generateValidatedEnumExample returns a random selected enum value.
func (eg *exampleGenerator) generateValidatedEnumExample() interface{} {
if !eg.hasEnumValidation() {
return nil
}
values := eg.a.Validation.Values
count := len(values)
i := eg.r.Int() % count
return values[i]
}
func (eg *exampleGenerator) hasFormatValidation() bool {
return eg.a.Validation != nil && eg.a.Validation.Format != ""
}
// generateFormatExample returns a random example based on the format the user asks.
func (eg *exampleGenerator) generateFormatExample() interface{} {
if !eg.hasFormatValidation() {
return nil
}
format := eg.a.Validation.Format
if res, ok := map[string]interface{}{
"email": eg.r.faker.Email(),
"hostname": eg.r.faker.DomainName() + "." + eg.r.faker.DomainSuffix(),
"date-time": time.Unix(int64(eg.r.Int())%1454957045, 0).Format(time.RFC3339), // to obtain a "fixed" rand
"ipv4": eg.r.faker.IPv4Address().String(),
"ipv6": eg.r.faker.IPv6Address().String(),
"ip": eg.r.faker.IPv4Address().String(),
"uri": eg.r.faker.URL(),
"mac": func() string {
res, err := regen.Generate(`([0-9A-F]{2}-){5}[0-9A-F]{2}`)
if err != nil {
return "12-34-56-78-9A-BC"
}
return res
}(),
"cidr": "192.168.100.14/24",
"regexp": eg.r.faker.Characters(3) + ".*",
"rfc1123": time.Unix(int64(eg.r.Int())%1454957045, 0).Format(time.RFC1123), // to obtain a "fixed" rand
}[format]; ok {
return res
}
panic("Validation: unknown format '" + format + "'") // bug
}
func (eg *exampleGenerator) hasPatternValidation() bool {
return eg.a.Validation != nil && eg.a.Validation.Pattern != ""
}
func (eg *exampleGenerator) checkPatternValidation(example interface{}) bool {
if !eg.hasPatternValidation() {
return true
}
pattern := eg.a.Validation.Pattern
re, err := regexp.Compile(pattern)
if err != nil {
panic("Validation: invalid pattern '" + pattern + "'")
}
if !re.MatchString(fmt.Sprint(example)) {
return false
}
return true
}
// generateValidatedPatternExample generates a random value that satisifies the pattern. Note: if
// multiple patterns are given, only one of them is used. currently, it doesn't support multiple.
func (eg *exampleGenerator) generateValidatedPatternExample() interface{} {
if !eg.hasPatternValidation() {
return false
}
pattern := eg.a.Validation.Pattern
example, err := regen.Generate(pattern)
if err != nil {
return eg.r.faker.Name()
}
return example
}
func (eg *exampleGenerator) hasMinMaxValidation() bool {
if eg.a.Validation == nil {
return false
}
return eg.a.Validation.Minimum != nil || eg.a.Validation.Maximum != nil
}
func (eg *exampleGenerator) checkMinMaxValueValidation(example interface{}) bool {
if !eg.hasMinMaxValidation() {
return true
}
valid := true
if min := eg.a.Validation.Minimum; min != nil {
if v, ok := example.(int); ok && float64(v) < *min {
valid = false
} else if v, ok := example.(float64); ok && v < *min {
valid = false
}
}
if !valid {
return false
}
if max := eg.a.Validation.Maximum; max != nil {
if v, ok := example.(int); ok && float64(v) > *max {
return false
} else if v, ok := example.(float64); ok && v > *max {
return false
}
}
return true
}
func (eg *exampleGenerator) generateValidatedMinMaxValueExample() interface{} {
if !eg.hasMinMaxValidation() {
return nil
}
min, max := math.Inf(1), math.Inf(-1)
if eg.a.Validation.Minimum != nil {
min = *eg.a.Validation.Minimum
}
if eg.a.Validation.Maximum != nil {
max = *eg.a.Validation.Maximum
}
if math.IsInf(min, 1) {
if eg.a.Type.Kind() == IntegerKind {
if max == 0 {
return int(max) - eg.r.Int()%3
}
return eg.r.Int() % int(max)
}
return eg.r.Float64() * max
} else if math.IsInf(max, -1) {
if eg.a.Type.Kind() == IntegerKind {
if min == 0 {
return int(min) + eg.r.Int()%3
}
return int(min) + eg.r.Int()%int(min)
}
return min + eg.r.Float64()*min
} else if min < max {
if eg.a.Type.Kind() == IntegerKind {
return int(min) + eg.r.Int()%int(max-min)
}
return min + eg.r.Float64()*(max-min)
} else if min == max {
if eg.a.Type.Kind() == IntegerKind {
return int(min)
}
return min
}
panic("Validation: Min > Max")
} | design/example.go | 0.673943 | 0.408336 | example.go | starcoder |
package gtime
import (
"bytes"
"strconv"
"strings"
"github.com/gogf/gf/text/gregex"
)
var (
// Refer: http://php.net/manual/en/function.date.php
formats = map[byte]string{
'd': "02", // Day: Day of the month, 2 digits with leading zeros. Eg: 01 to 31.
'D': "Mon", // Day: A textual representation of a day, three letters. Eg: Mon through Sun.
'w': "Monday", // Day: Numeric representation of the day of the week. Eg: 0 (for Sunday) through 6 (for Saturday).
'N': "Monday", // Day: ISO-8601 numeric representation of the day of the week. Eg: 1 (for Monday) through 7 (for Sunday).
'j': "=j=02", // Day: Day of the month without leading zeros. Eg: 1 to 31.
'S': "02", // Day: English ordinal suffix for the day of the month, 2 characters. Eg: st, nd, rd or th. Works well with j.
'l': "Monday", // Day: A full textual representation of the day of the week. Eg: Sunday through Saturday.
'z': "", // Day: The day of the year (starting from 0). Eg: 0 through 365.
'W': "", // Week: ISO-8601 week number of year, weeks starting on Monday. Eg: 42 (the 42nd week in the year).
'F': "January", // Month: A full textual representation of a month, such as January or March. Eg: January through December.
'm': "01", // Month: Numeric representation of a month, with leading zeros. Eg: 01 through 12.
'M': "Jan", // Month: A short textual representation of a month, three letters. Eg: Jan through Dec.
'n': "1", // Month: Numeric representation of a month, without leading zeros. Eg: 1 through 12.
't': "", // Month: Number of days in the given month. Eg: 28 through 31.
'Y': "2006", // Year: A full numeric representation of a year, 4 digits. Eg: 1999 or 2003.
'y': "06", // Year: A two digit representation of a year. Eg: 99 or 03.
'a': "pm", // Time: Lowercase Ante meridiem and Post meridiem. Eg: am or pm.
'A': "PM", // Time: Uppercase Ante meridiem and Post meridiem. Eg: AM or PM.
'g': "3", // Time: 12-hour format of an hour without leading zeros. Eg: 1 through 12.
'G': "=G=15", // Time: 24-hour format of an hour without leading zeros. Eg: 0 through 23.
'h': "03", // Time: 12-hour format of an hour with leading zeros. Eg: 01 through 12.
'H': "15", // Time: 24-hour format of an hour with leading zeros. Eg: 00 through 23.
'i': "04", // Time: Minutes with leading zeros. Eg: 00 to 59.
's': "05", // Time: Seconds with leading zeros. Eg: 00 through 59.
'u': "=u=.000", // Time: Milliseconds. Eg: 234, 678.
'U': "", // Time: Seconds since the Unix Epoch (January 1 1970 00:00:00 GMT).
'O': "-0700", // Zone: Difference to Greenwich time (GMT) in hours. Eg: +0200.
'P': "-07:00", // Zone: Difference to Greenwich time (GMT) with colon between hours and minutes. Eg: +02:00.
'T': "MST", // Zone: Timezone abbreviation. Eg: UTC, EST, MDT ...
'c': "2006-01-02T15:04:05-07:00", // Format: ISO 8601 date. Eg: 2004-02-12T15:19:21+00:00.
'r': "Mon, 02 Jan 06 15:04 MST", // Format: RFC 2822 formatted date. Eg: Thu, 21 Dec 2000 16:01:07 +0200.
}
// Week to number mapping.
weekMap = map[string]string{
"Sunday": "0",
"Monday": "1",
"Tuesday": "2",
"Wednesday": "3",
"Thursday": "4",
"Friday": "5",
"Saturday": "6",
}
// Day count of each month which is not in leap year.
dayOfMonth = []int{0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334}
)
// Format formats and returns the formatted result with custom <format>.
func (t *Time) Format(format string) string {
if t == nil {
return ""
}
runes := []rune(format)
buffer := bytes.NewBuffer(nil)
for i := 0; i < len(runes); {
switch runes[i] {
case '\\':
if i < len(runes)-1 {
buffer.WriteRune(runes[i+1])
i += 2
continue
} else {
return buffer.String()
}
case 'W':
buffer.WriteString(strconv.Itoa(t.WeeksOfYear()))
case 'z':
buffer.WriteString(strconv.Itoa(t.DayOfYear()))
case 't':
buffer.WriteString(strconv.Itoa(t.DaysInMonth()))
case 'U':
buffer.WriteString(strconv.FormatInt(t.Unix(), 10))
default:
if runes[i] > 255 {
buffer.WriteRune(runes[i])
break
}
if f, ok := formats[byte(runes[i])]; ok {
result := t.Time.Format(f)
// Particular chars should be handled here.
switch runes[i] {
case 'j':
for _, s := range []string{"=j=0", "=j="} {
result = strings.Replace(result, s, "", -1)
}
buffer.WriteString(result)
case 'G':
for _, s := range []string{"=G=0", "=G="} {
result = strings.Replace(result, s, "", -1)
}
buffer.WriteString(result)
case 'u':
buffer.WriteString(strings.Replace(result, "=u=.", "", -1))
case 'w':
buffer.WriteString(weekMap[result])
case 'N':
buffer.WriteString(strings.Replace(weekMap[result], "0", "7", -1))
case 'S':
buffer.WriteString(formatMonthDaySuffixMap(result))
default:
buffer.WriteString(result)
}
} else {
buffer.WriteRune(runes[i])
}
}
i++
}
return buffer.String()
}
// FormatNew formats and returns a new Time object with given custom <format>.
func (t *Time) FormatNew(format string) *Time {
return NewFromStr(t.Format(format))
}
// FormatTo formats <t> with given custom <format>.
func (t *Time) FormatTo(format string) *Time {
t.Time = NewFromStr(t.Format(format)).Time
return t
}
// Layout formats the time with stdlib layout and returns the formatted result.
func (t *Time) Layout(layout string) string {
return t.Time.Format(layout)
}
// LayoutNew formats the time with stdlib layout and returns the new Time object.
func (t *Time) LayoutNew(layout string) *Time {
return NewFromStr(t.Layout(layout))
}
// LayoutTo formats <t> with stdlib layout.
func (t *Time) LayoutTo(layout string) *Time {
t.Time = NewFromStr(t.Layout(layout)).Time
return t
}
// IsLeapYear checks whether the time is leap year.
func (t *Time) IsLeapYear() bool {
year := t.Year()
if (year%4 == 0 && year%100 != 0) || year%400 == 0 {
return true
}
return false
}
// DayOfYear checks and returns the position of the day for the year.
func (t *Time) DayOfYear() int {
day := t.Day()
month := int(t.Month())
if t.IsLeapYear() {
if month > 2 {
return dayOfMonth[month-1] + day
}
return dayOfMonth[month-1] + day - 1
}
return dayOfMonth[month-1] + day - 1
}
// DaysInMonth returns the day count of current month.
func (t *Time) DaysInMonth() int {
switch t.Month() {
case 1, 3, 5, 7, 8, 10, 12:
return 31
case 4, 6, 9, 11:
return 30
}
if t.IsLeapYear() {
return 29
}
return 28
}
// WeeksOfYear returns the point of current week for the year.
func (t *Time) WeeksOfYear() int {
_, week := t.ISOWeek()
return week
}
// formatToStdLayout converts custom format to stdlib layout.
func formatToStdLayout(format string) string {
b := bytes.NewBuffer(nil)
for i := 0; i < len(format); {
switch format[i] {
case '\\':
if i < len(format)-1 {
b.WriteByte(format[i+1])
i += 2
continue
} else {
return b.String()
}
default:
if f, ok := formats[format[i]]; ok {
// Handle particular chars.
switch format[i] {
case 'j':
b.WriteString("2")
case 'G':
b.WriteString("15")
case 'u':
if i > 0 && format[i-1] == '.' {
b.WriteString("000")
} else {
b.WriteString(".000")
}
default:
b.WriteString(f)
}
} else {
b.WriteByte(format[i])
}
i++
}
}
return b.String()
}
// formatToRegexPattern converts the custom format to its corresponding regular expression.
func formatToRegexPattern(format string) string {
s := gregex.Quote(formatToStdLayout(format))
s, _ = gregex.ReplaceString(`[0-9]`, `[0-9]`, s)
s, _ = gregex.ReplaceString(`[A-Za-z]`, `[A-Za-z]`, s)
return s
}
// formatMonthDaySuffixMap returns the short english word for current day.
func formatMonthDaySuffixMap(day string) string {
switch day {
case "01":
return "st"
case "02":
return "nd"
case "03":
return "rd"
default:
return "th"
}
} | os/gtime/gtime_format.go | 0.550607 | 0.46478 | gtime_format.go | starcoder |
package timeseries
import (
"errors"
"math"
)
// FirstObs returns the first Observation of a DataSeries, whether it is NaN() or not. See FirstValidObs
func FirstObs(data *DataSeries) (lst Observation, err error) {
data.SortChronAsc()
if len(*data) > 0 {
lst = (*data)[0]
} else {
//err = errors.New("DataSeries is of Length 0")
}
return lst, err
}
// First uses FirstObs and returns a float64
func First(data *DataSeries) float64 {
last, _ := FirstObs(data)
return last.Meas
}
// FirstValidObs returns the first Valid Observation (IsNaN()==false) of a DataSeries. See FirstObs
func FirstValidObs(data *DataSeries) (lst Observation, idx int, err error) {
data.SortChronAsc()
if len(*data) > 0 {
for i := 0; i < len((*data)); i++ {
if IsNaN((*data)[i].Meas) == false {
lst = (*data)[i]
idx = i
break
} else if i == len((*data))-1 && IsNaN((*data)[0].Meas) == true {
lst.Chron = (*data)[len((*data))-1].Chron
lst.Meas = NaN()
}
}
} else {
err = errors.New("No Valid Observation in DataSeries")
}
return lst, idx, err
}
// FirstValid uses FirstValidObs and returns a float64
func FirstValid(data *DataSeries) float64 {
last, _, _ := FirstValidObs(data)
return last.Meas
}
// LastObs returns the first Observation of a DataSeries, whether it is NaN() or not. See FirstValidObs
func LastObs(data *DataSeries) (lst Observation, err error) {
data.SortChronAsc()
if len(*data) > 0 {
lst = (*data)[len(*data)-1]
} else {
//err = errors.New("DataSeries is of Length 0")
}
return lst, err
}
// Last uses LastObs and returns a float64
func Last(data *DataSeries) float64 {
last, _ := LastObs(data)
return last.Meas
}
// LastValidObs returns the last Valid Observation (IsNaN()==false) of a DataSeries. See LastObs
func LastValidObs(data *DataSeries) (lst Observation, idx int, err error) {
data.SortChronAsc()
if len(*data) > 0 {
for i := len(*data) - 1; i >= 0; i-- {
if IsNaN((*data)[i].Meas) == false {
lst = (*data)[i]
idx = i
break
} else if i == 0 && IsNaN((*data)[0].Meas) == true {
lst.Chron = (*data)[0].Chron
lst.Meas = NaN()
}
}
} else {
err = errors.New("No Valid Observation in DataSeries")
}
return lst, idx, err
}
// LastValid uses FirstValidObs and returns a float64
func LastValid(data *DataSeries) float64 {
last, _, _ := LastValidObs(data)
return last.Meas
}
// MaxObs returns the Observation showing the Maximum value of Measures in a DataSeries. If no Valid Observation is available, returns NaN().
// MaxObs returns also the place of that Observation in DataSeries for possible further computations.
func MaxObs(data *DataSeries) (max Observation, idx int, err error) {
if len(*data) > 0 {
var flag bool
for i := 0; i < len(*data); i++ {
if IsNaN((*data)[i].Meas) == false {
max = (*data)[i]
flag = true
break
}
}
if flag == false {
max = BlankObservation()
//err = errors.New("No Valid Maximum in DataSeries")
} else {
for i := 0; i < len(*data); i++ {
if math.IsNaN((*data)[i].Meas) == false {
if (*data)[i].Meas > max.Meas {
max = (*data)[i]
}
}
}
}
} else {
err = errors.New("DataSeries is of Length 0")
}
return
}
// Max returns the float64 value showing the Maximum value of Measures in a DataSeries. If no Valid Observation is available, returns NaN().
// See MaxObs
func Max(data *DataSeries) (max float64) {
obs, _, _ := MaxObs(data)
return obs.Meas
}
// MinObs returns the Observation showing the Minimum value of Measures in a DataSeries. If no Valid Observation is available, returns NaN().
// MinObs returns also the place of that Observation in DataSeries for possible further computations.
func MinObs(data *DataSeries) (min Observation, idx int, err error) {
if len((*data)) > 0 {
var flag bool
for i := 0; i < len((*data)); i++ {
if IsNaN((*data)[i].Meas) == false {
min = (*data)[i]
flag = true
break
}
}
if flag == false {
min = BlankObservation()
err = errors.New("No Valid Minimum in DataSeries")
} else {
for i := 0; i < len((*data)); i++ {
if math.IsNaN((*data)[i].Meas) == false {
if (*data)[i].Meas < min.Meas {
min = (*data)[i]
}
}
}
}
} else {
err = errors.New("DataSeries is of Length 0")
}
return
}
// Min returns the float64 value showing the Minimum value of Measures in a DataSeries. If no Valid Observation is available, returns NaN().
// See MinObs
func Min(data *DataSeries) (min float64) {
obs, _, _ := MinObs(data)
return obs.Meas
}
// MeanObs returns the Estimated Mean value of Measures in a DataSeries. If no Valid Observation is available, returns NaN().
// MeanObs also returns the amount of Valid Observations in DataSeries for possible further computations.
func MeanObs(data *DataSeries) (avg float64, pop int, err error) {
if len(*data) > 0 {
for _, v := range *data {
if IsNaN(v.Meas) == false {
avg += v.Meas
pop += 1
}
}
avg = avg / float64(pop)
} else {
avg = NaN()
pop = 0
err = errors.New("DataSeries is of Length 0")
}
return
}
// Mean returns the float64 value showing the Estimated Mean value of Measures in a DataSeries. If no Valid Observation is available, returns NaN().
// See MeanObs
func Mean(data *DataSeries) (mean float64) {
mean, _, _ = MeanObs(data)
return mean
}
// StdDevObs returns the unbiased Estimated Standard Deviation value of Measures in a DataSeries. If no Valid Observation is available, returns NaN().
// StdDevObs also returns the amount of Valid Observations in DataSeries for possible further computations.
func StdDevObs(data *DataSeries) (std float64, pop int, err error) {
var zxc float64
if len(*data) > 1 {
avg, _, err := MeanObs(data)
avgsq:=avg*avg
if err != nil {
err = errors.New("Non Valid MeanObs. Standard Deviation Computation Impossible.")
}
var nn int
for _, v := range *data {
if IsNaN(v.Meas) == false {
zxc += (v.Meas*v.Meas - avgsq)
nn += 1
}
}
std = math.Sqrt(zxc / (float64(nn) - 1))
pop=nn
} else {
std = NaN()
pop = 0
err = errors.New("DataSeries is of Length 0")
}
return
}
// StdDev returns the float64 value showing the unbiased Estimated Standard Deviation value of Measures in a DataSeries. If no Valid Observation is available, returns NaN().
// See StdDevObs
func StdDev(data *DataSeries) (std float64) {
std, _, _ = StdDevObs(data)
return std
}
// EstimatedGauss returns the Estimated Gauss function underlying a sample. It is simply a container for Mean and StdDev.
func EstimatedGauss(data *DataSeries) (mean float64, std float64, err error) {
if len(*data) > 1 {
mean, _, err = MeanObs(data)
if err != nil {
err = errors.New("No MeanObs to DataSeries")
}
std, _, err = StdDevObs(data)
if err != nil {
err = errors.New("No Standard Error to DataSeries")
}
} else if len(*data) == 1 {
_, _, err := MeanObs(data)
if err != nil {
err = errors.New("No MeanObs to DataSeries")
}
err = errors.New("DataSeries of length 1")
} else {
err = errors.New("DataSeries is of Length 0")
}
return
}
// MedianObs returns the Observation closest to the Median Observation (percentile 50) in a DataSeries.
func MedianObs(data *DataSeries) (obs Observation, err error) {
// TODO
return obs, err
} | internalfunctions.go | 0.746324 | 0.497376 | internalfunctions.go | starcoder |
package meshdata
import (
"encoding/json"
"fmt"
"strings"
)
const (
// FlagsNone resets all mesh data flags.
FlagsNone = 0
// FlagObsolete flags the mesh data for removal.
FlagObsolete = 0x0001
)
// MeshData holds the entire mesh data managed by Mentix.
type MeshData struct {
Sites []*Site
ServiceTypes []*ServiceType
Flags int32 `json:"-"`
}
// Clear removes all saved data, leaving an empty mesh.
func (meshData *MeshData) Clear() {
meshData.Sites = nil
meshData.ServiceTypes = nil
meshData.Flags = FlagsNone
}
// AddSite adds a new site; if a site with the same ID already exists, the existing one is overwritten.
func (meshData *MeshData) AddSite(site *Site) {
if siteExisting := meshData.FindSite(site.GetID()); siteExisting != nil {
*siteExisting = *site
} else {
meshData.Sites = append(meshData.Sites, site)
}
}
// RemoveSite removes the provided site.
func (meshData *MeshData) RemoveSite(id string) {
if site := meshData.FindSite(id); site != nil {
for idx, siteExisting := range meshData.Sites {
if siteExisting == site {
lastIdx := len(meshData.Sites) - 1
meshData.Sites[idx] = meshData.Sites[lastIdx]
meshData.Sites[lastIdx] = nil
meshData.Sites = meshData.Sites[:lastIdx]
break
}
}
}
}
// FindSite searches for a site with the given ID.
func (meshData *MeshData) FindSite(id string) *Site {
for _, site := range meshData.Sites {
if strings.EqualFold(site.GetID(), id) {
return site
}
}
return nil
}
// AddServiceType adds a new service type; if a type with the same name already exists, the existing one is overwritten.
func (meshData *MeshData) AddServiceType(serviceType *ServiceType) {
if svcTypeExisting := meshData.FindServiceType(serviceType.Name); svcTypeExisting != nil {
*svcTypeExisting = *serviceType
} else {
meshData.ServiceTypes = append(meshData.ServiceTypes, serviceType)
}
}
// RemoveServiceType removes the provided service type.
func (meshData *MeshData) RemoveServiceType(name string) {
if serviceType := meshData.FindServiceType(name); serviceType != nil {
for idx, svcTypeExisting := range meshData.ServiceTypes {
if svcTypeExisting == serviceType {
lastIdx := len(meshData.ServiceTypes) - 1
meshData.ServiceTypes[idx] = meshData.ServiceTypes[lastIdx]
meshData.ServiceTypes[lastIdx] = nil
meshData.ServiceTypes = meshData.ServiceTypes[:lastIdx]
break
}
}
}
}
// FindServiceType searches for a service type with the given name.
func (meshData *MeshData) FindServiceType(name string) *ServiceType {
for _, serviceType := range meshData.ServiceTypes {
if strings.EqualFold(serviceType.Name, name) {
return serviceType
}
}
return nil
}
// Merge merges data from another MeshData instance into this one.
func (meshData *MeshData) Merge(inData *MeshData) {
for _, site := range inData.Sites {
meshData.AddSite(site)
}
for _, serviceType := range inData.ServiceTypes {
meshData.AddServiceType(serviceType)
}
}
// Unmerge removes data from another MeshData instance from this one.
func (meshData *MeshData) Unmerge(inData *MeshData) {
for _, site := range inData.Sites {
meshData.RemoveSite(site.GetID())
}
for _, serviceType := range inData.ServiceTypes {
meshData.RemoveServiceType(serviceType.Name)
}
}
// Verify checks if the mesh data is valid.
func (meshData *MeshData) Verify() error {
// Verify all sites
for _, site := range meshData.Sites {
if err := site.Verify(); err != nil {
return err
}
}
// Verify all service types
for _, serviceType := range meshData.ServiceTypes {
if err := serviceType.Verify(); err != nil {
return err
}
}
return nil
}
// InferMissingData infers missing data from other data where possible.
func (meshData *MeshData) InferMissingData() {
// Infer missing site data
for _, site := range meshData.Sites {
site.InferMissingData()
}
// Infer missing service type data
for _, serviceType := range meshData.ServiceTypes {
serviceType.InferMissingData()
}
}
// ToJSON converts the data to JSON.
func (meshData *MeshData) ToJSON() (string, error) {
data, err := json.MarshalIndent(meshData, "", "\t")
if err != nil {
return "", fmt.Errorf("unable to marshal the mesh data: %v", err)
}
return string(data), nil
}
// FromJSON converts JSON data to mesh data.
func (meshData *MeshData) FromJSON(data string) error {
meshData.Clear()
if err := json.Unmarshal([]byte(data), meshData); err != nil {
return fmt.Errorf("unable to unmarshal the mesh data: %v", err)
}
return nil
}
// Clone creates an exact copy of the mesh data.
func (meshData *MeshData) Clone() *MeshData {
clone := &MeshData{}
// To avoid any "deep copy" packages, use JSON en- and decoding instead
data, err := meshData.ToJSON()
if err == nil {
if err := clone.FromJSON(data); err != nil {
// In case of an error, clear the data
clone.Clear()
}
}
return clone
}
// Compare checks whether the stored data equals the data of another MeshData object.
func (meshData *MeshData) Compare(other *MeshData) bool {
if other == nil {
return false
}
// To avoid cumbersome comparisons, just compare the JSON-encoded data
json1, _ := meshData.ToJSON()
json2, _ := other.ToJSON()
return json1 == json2
}
// New returns a new (empty) MeshData object.
func New() *MeshData {
meshData := &MeshData{}
meshData.Clear()
return meshData
} | pkg/mentix/meshdata/meshdata.go | 0.70028 | 0.500122 | meshdata.go | starcoder |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.