code
stringlengths
114
1.05M
path
stringlengths
3
312
quality_prob
float64
0.5
0.99
learning_prob
float64
0.2
1
filename
stringlengths
3
168
kind
stringclasses
1 value
package onshape import ( "encoding/json" ) // BTPStatementTry1523 struct for BTPStatementTry1523 type BTPStatementTry1523 struct { BTPStatement269 Body *BTPStatementBlock271 `json:"body,omitempty"` BtType *string `json:"btType,omitempty"` CatchBlock *BTPStatementBlock271 `json:"catchBlock,omitempty"` CatchVariable *BTPIdentifier8 `json:"catchVariable,omitempty"` Name *BTPIdentifier8 `json:"name,omitempty"` Silent *bool `json:"silent,omitempty"` SpaceAfterCatch *BTPSpace10 `json:"spaceAfterCatch,omitempty"` SpaceBeforeSilent *BTPSpace10 `json:"spaceBeforeSilent,omitempty"` StandardType *string `json:"standardType,omitempty"` TypeName *string `json:"typeName,omitempty"` } // NewBTPStatementTry1523 instantiates a new BTPStatementTry1523 object // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments // will change when the set of required properties is changed func NewBTPStatementTry1523() *BTPStatementTry1523 { this := BTPStatementTry1523{} return &this } // NewBTPStatementTry1523WithDefaults instantiates a new BTPStatementTry1523 object // This constructor will only assign default values to properties that have it defined, // but it doesn't guarantee that properties required by API are set func NewBTPStatementTry1523WithDefaults() *BTPStatementTry1523 { this := BTPStatementTry1523{} return &this } // GetBody returns the Body field value if set, zero value otherwise. func (o *BTPStatementTry1523) GetBody() BTPStatementBlock271 { if o == nil || o.Body == nil { var ret BTPStatementBlock271 return ret } return *o.Body } // GetBodyOk returns a tuple with the Body field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *BTPStatementTry1523) GetBodyOk() (*BTPStatementBlock271, bool) { if o == nil || o.Body == nil { return nil, false } return o.Body, true } // HasBody returns a boolean if a field has been set. func (o *BTPStatementTry1523) HasBody() bool { if o != nil && o.Body != nil { return true } return false } // SetBody gets a reference to the given BTPStatementBlock271 and assigns it to the Body field. func (o *BTPStatementTry1523) SetBody(v BTPStatementBlock271) { o.Body = &v } // GetBtType returns the BtType field value if set, zero value otherwise. func (o *BTPStatementTry1523) GetBtType() string { if o == nil || o.BtType == nil { var ret string return ret } return *o.BtType } // GetBtTypeOk returns a tuple with the BtType field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *BTPStatementTry1523) GetBtTypeOk() (*string, bool) { if o == nil || o.BtType == nil { return nil, false } return o.BtType, true } // HasBtType returns a boolean if a field has been set. func (o *BTPStatementTry1523) HasBtType() bool { if o != nil && o.BtType != nil { return true } return false } // SetBtType gets a reference to the given string and assigns it to the BtType field. func (o *BTPStatementTry1523) SetBtType(v string) { o.BtType = &v } // GetCatchBlock returns the CatchBlock field value if set, zero value otherwise. func (o *BTPStatementTry1523) GetCatchBlock() BTPStatementBlock271 { if o == nil || o.CatchBlock == nil { var ret BTPStatementBlock271 return ret } return *o.CatchBlock } // GetCatchBlockOk returns a tuple with the CatchBlock field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *BTPStatementTry1523) GetCatchBlockOk() (*BTPStatementBlock271, bool) { if o == nil || o.CatchBlock == nil { return nil, false } return o.CatchBlock, true } // HasCatchBlock returns a boolean if a field has been set. func (o *BTPStatementTry1523) HasCatchBlock() bool { if o != nil && o.CatchBlock != nil { return true } return false } // SetCatchBlock gets a reference to the given BTPStatementBlock271 and assigns it to the CatchBlock field. func (o *BTPStatementTry1523) SetCatchBlock(v BTPStatementBlock271) { o.CatchBlock = &v } // GetCatchVariable returns the CatchVariable field value if set, zero value otherwise. func (o *BTPStatementTry1523) GetCatchVariable() BTPIdentifier8 { if o == nil || o.CatchVariable == nil { var ret BTPIdentifier8 return ret } return *o.CatchVariable } // GetCatchVariableOk returns a tuple with the CatchVariable field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *BTPStatementTry1523) GetCatchVariableOk() (*BTPIdentifier8, bool) { if o == nil || o.CatchVariable == nil { return nil, false } return o.CatchVariable, true } // HasCatchVariable returns a boolean if a field has been set. func (o *BTPStatementTry1523) HasCatchVariable() bool { if o != nil && o.CatchVariable != nil { return true } return false } // SetCatchVariable gets a reference to the given BTPIdentifier8 and assigns it to the CatchVariable field. func (o *BTPStatementTry1523) SetCatchVariable(v BTPIdentifier8) { o.CatchVariable = &v } // GetName returns the Name field value if set, zero value otherwise. func (o *BTPStatementTry1523) GetName() BTPIdentifier8 { if o == nil || o.Name == nil { var ret BTPIdentifier8 return ret } return *o.Name } // GetNameOk returns a tuple with the Name field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *BTPStatementTry1523) GetNameOk() (*BTPIdentifier8, bool) { if o == nil || o.Name == nil { return nil, false } return o.Name, true } // HasName returns a boolean if a field has been set. func (o *BTPStatementTry1523) HasName() bool { if o != nil && o.Name != nil { return true } return false } // SetName gets a reference to the given BTPIdentifier8 and assigns it to the Name field. func (o *BTPStatementTry1523) SetName(v BTPIdentifier8) { o.Name = &v } // GetSilent returns the Silent field value if set, zero value otherwise. func (o *BTPStatementTry1523) GetSilent() bool { if o == nil || o.Silent == nil { var ret bool return ret } return *o.Silent } // GetSilentOk returns a tuple with the Silent field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *BTPStatementTry1523) GetSilentOk() (*bool, bool) { if o == nil || o.Silent == nil { return nil, false } return o.Silent, true } // HasSilent returns a boolean if a field has been set. func (o *BTPStatementTry1523) HasSilent() bool { if o != nil && o.Silent != nil { return true } return false } // SetSilent gets a reference to the given bool and assigns it to the Silent field. func (o *BTPStatementTry1523) SetSilent(v bool) { o.Silent = &v } // GetSpaceAfterCatch returns the SpaceAfterCatch field value if set, zero value otherwise. func (o *BTPStatementTry1523) GetSpaceAfterCatch() BTPSpace10 { if o == nil || o.SpaceAfterCatch == nil { var ret BTPSpace10 return ret } return *o.SpaceAfterCatch } // GetSpaceAfterCatchOk returns a tuple with the SpaceAfterCatch field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *BTPStatementTry1523) GetSpaceAfterCatchOk() (*BTPSpace10, bool) { if o == nil || o.SpaceAfterCatch == nil { return nil, false } return o.SpaceAfterCatch, true } // HasSpaceAfterCatch returns a boolean if a field has been set. func (o *BTPStatementTry1523) HasSpaceAfterCatch() bool { if o != nil && o.SpaceAfterCatch != nil { return true } return false } // SetSpaceAfterCatch gets a reference to the given BTPSpace10 and assigns it to the SpaceAfterCatch field. func (o *BTPStatementTry1523) SetSpaceAfterCatch(v BTPSpace10) { o.SpaceAfterCatch = &v } // GetSpaceBeforeSilent returns the SpaceBeforeSilent field value if set, zero value otherwise. func (o *BTPStatementTry1523) GetSpaceBeforeSilent() BTPSpace10 { if o == nil || o.SpaceBeforeSilent == nil { var ret BTPSpace10 return ret } return *o.SpaceBeforeSilent } // GetSpaceBeforeSilentOk returns a tuple with the SpaceBeforeSilent field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *BTPStatementTry1523) GetSpaceBeforeSilentOk() (*BTPSpace10, bool) { if o == nil || o.SpaceBeforeSilent == nil { return nil, false } return o.SpaceBeforeSilent, true } // HasSpaceBeforeSilent returns a boolean if a field has been set. func (o *BTPStatementTry1523) HasSpaceBeforeSilent() bool { if o != nil && o.SpaceBeforeSilent != nil { return true } return false } // SetSpaceBeforeSilent gets a reference to the given BTPSpace10 and assigns it to the SpaceBeforeSilent field. func (o *BTPStatementTry1523) SetSpaceBeforeSilent(v BTPSpace10) { o.SpaceBeforeSilent = &v } // GetStandardType returns the StandardType field value if set, zero value otherwise. func (o *BTPStatementTry1523) GetStandardType() string { if o == nil || o.StandardType == nil { var ret string return ret } return *o.StandardType } // GetStandardTypeOk returns a tuple with the StandardType field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *BTPStatementTry1523) GetStandardTypeOk() (*string, bool) { if o == nil || o.StandardType == nil { return nil, false } return o.StandardType, true } // HasStandardType returns a boolean if a field has been set. func (o *BTPStatementTry1523) HasStandardType() bool { if o != nil && o.StandardType != nil { return true } return false } // SetStandardType gets a reference to the given string and assigns it to the StandardType field. func (o *BTPStatementTry1523) SetStandardType(v string) { o.StandardType = &v } // GetTypeName returns the TypeName field value if set, zero value otherwise. func (o *BTPStatementTry1523) GetTypeName() string { if o == nil || o.TypeName == nil { var ret string return ret } return *o.TypeName } // GetTypeNameOk returns a tuple with the TypeName field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *BTPStatementTry1523) GetTypeNameOk() (*string, bool) { if o == nil || o.TypeName == nil { return nil, false } return o.TypeName, true } // HasTypeName returns a boolean if a field has been set. func (o *BTPStatementTry1523) HasTypeName() bool { if o != nil && o.TypeName != nil { return true } return false } // SetTypeName gets a reference to the given string and assigns it to the TypeName field. func (o *BTPStatementTry1523) SetTypeName(v string) { o.TypeName = &v } func (o BTPStatementTry1523) MarshalJSON() ([]byte, error) { toSerialize := map[string]interface{}{} serializedBTPStatement269, errBTPStatement269 := json.Marshal(o.BTPStatement269) if errBTPStatement269 != nil { return []byte{}, errBTPStatement269 } errBTPStatement269 = json.Unmarshal([]byte(serializedBTPStatement269), &toSerialize) if errBTPStatement269 != nil { return []byte{}, errBTPStatement269 } if o.Body != nil { toSerialize["body"] = o.Body } if o.BtType != nil { toSerialize["btType"] = o.BtType } if o.CatchBlock != nil { toSerialize["catchBlock"] = o.CatchBlock } if o.CatchVariable != nil { toSerialize["catchVariable"] = o.CatchVariable } if o.Name != nil { toSerialize["name"] = o.Name } if o.Silent != nil { toSerialize["silent"] = o.Silent } if o.SpaceAfterCatch != nil { toSerialize["spaceAfterCatch"] = o.SpaceAfterCatch } if o.SpaceBeforeSilent != nil { toSerialize["spaceBeforeSilent"] = o.SpaceBeforeSilent } if o.StandardType != nil { toSerialize["standardType"] = o.StandardType } if o.TypeName != nil { toSerialize["typeName"] = o.TypeName } return json.Marshal(toSerialize) } type NullableBTPStatementTry1523 struct { value *BTPStatementTry1523 isSet bool } func (v NullableBTPStatementTry1523) Get() *BTPStatementTry1523 { return v.value } func (v *NullableBTPStatementTry1523) Set(val *BTPStatementTry1523) { v.value = val v.isSet = true } func (v NullableBTPStatementTry1523) IsSet() bool { return v.isSet } func (v *NullableBTPStatementTry1523) Unset() { v.value = nil v.isSet = false } func NewNullableBTPStatementTry1523(val *BTPStatementTry1523) *NullableBTPStatementTry1523 { return &NullableBTPStatementTry1523{value: val, isSet: true} } func (v NullableBTPStatementTry1523) MarshalJSON() ([]byte, error) { return json.Marshal(v.value) } func (v *NullableBTPStatementTry1523) UnmarshalJSON(src []byte) error { v.isSet = true return json.Unmarshal(src, &v.value) }
onshape/model_btp_statement_try_1523.go
0.724286
0.423518
model_btp_statement_try_1523.go
starcoder
package chunks // Postings provides iterative access over a postings list. type Postings interface { // Next advances the iterator and returns true if another value was found. Next() bool // Seek advances the iterator to value v or greater and returns // true if a value was found. Seek(v uint64) bool // At returns the value at the current iterator position. At() (int64, uint64, []int) // Err returns the last error of the iterator. Err() error } type ChunkReader interface { ReadChunk(bool, ...uint64) ChunkEnc } type Chunk interface { MinTime() int64 MaxTime() int64 ChunkEnc(bool, ChunkReader) ChunkEnc } type ChunkEnc interface { Bytes() [][]byte Iterator(int64, int64) Postings } // errPostings is an empty iterator that always errors. type errPostings struct { err error } func (e errPostings) Next() bool { return false } func (e errPostings) Seek(uint64) bool { return false } func (e errPostings) At() (int64, uint64, []int) { return 0, 0, nil } func (e errPostings) Err() error { return e.err } var EmptyPostings = errPostings{} func Intersect(its ...Postings) Postings { if len(its) == 0 { return EmptyPostings } if len(its) == 1 { return its[0] } l := len(its) / 2 return newIntersectPostings(Intersect(its[:l]...), Intersect(its[l:]...)) } type intersectPostings struct { a, b Postings aok, bok bool cur uint64 t int64 pos []int } func newIntersectPostings(a, b Postings) *intersectPostings { return &intersectPostings{a: a, b: b} } func (it *intersectPostings) At() (int64, uint64, []int) { return it.t, it.cur, it.pos } func (it *intersectPostings) doNext(t int64, id uint64, pos []int) bool { for { if !it.b.Seek(id) { return false } if _, vb, _ := it.b.At(); vb != id { if !it.a.Seek(vb) { return false } t, id, pos = it.a.At() if vb != id { continue } } it.cur = id it.t = t it.pos = pos return true } } func (it *intersectPostings) Next() bool { if !it.a.Next() { return false } return it.doNext(it.a.At()) } func (it *intersectPostings) Seek(id uint64) bool { if !it.a.Seek(id) { return false } return it.doNext(it.a.At()) } func (it *intersectPostings) Err() error { if it.a.Err() != nil { return it.a.Err() } return it.b.Err() } func Merge(its ...Postings) Postings { if len(its) == 0 { return nil } if len(its) == 1 { return its[0] } l := len(its) / 2 return newMergedPostings(Merge(its[:l]...), Merge(its[l:]...)) } type mergedPostings struct { a, b Postings initialized bool aok, bok bool cur uint64 t int64 pos []int } func NewMergedPostings(a, b Postings) *mergedPostings { return newMergedPostings(a, b) } func newMergedPostings(a, b Postings) *mergedPostings { return &mergedPostings{a: a, b: b} } func (it *mergedPostings) At() (int64, uint64, []int) { return it.t, it.cur, it.pos } func (it *mergedPostings) Next() bool { if !it.initialized { it.aok = it.a.Next() it.bok = it.b.Next() it.initialized = true } if !it.aok && !it.bok { return false } if !it.aok { it.t, it.cur, it.pos = it.b.At() it.bok = it.b.Next() return true } if !it.bok { it.t, it.cur, it.pos = it.a.At() it.aok = it.a.Next() return true } ta, acur, apos := it.a.At() tb, bcur, bpos := it.b.At() if acur < bcur { it.cur = acur it.t = ta it.pos = apos it.aok = it.a.Next() } else if acur > bcur { it.cur = bcur it.t = tb it.pos = bpos it.bok = it.b.Next() } else { it.cur = acur it.t = ta it.pos = apos it.aok = it.a.Next() it.bok = it.b.Next() } return true } func (it *mergedPostings) Seek(id uint64) bool { if it.cur >= id { return true } it.aok = it.a.Seek(id) it.bok = it.b.Seek(id) it.initialized = true return it.Next() } func (it *mergedPostings) Err() error { if it.a.Err() != nil { return it.a.Err() } return it.b.Err() } // // Iterator is a simple iterator that can only get the next value. // type Iterator interface { // At() (int64, uint64) // Err() error // Next() bool // }
pkg/engine/tem/chunks/chunks.go
0.71721
0.512144
chunks.go
starcoder
package data import ( "encoding/json" "time" ) // Task represents a piece of work. A task can have multiple sub tasks type Task struct { ID int `json:"id"` Title string `json:"title"` Content string `json:"content"` TimeCreated time.Time `json:"timeCreated"` SubTasks []Task `json:"subTasks"` Updates []TaskUpdate `json:"updated"` } // TaskUpdate represents a single piece of information that adds data to a task. type TaskUpdate struct { ID int `json:"id"` TimeCreated time.Time `json:"timeCreated"` Type TaskUpdateType `json:"type"` Title string `json:"title"` Description string `json:"description"` } // TaskUpdateType represents the kind of data a TaskUpdate represents. type TaskUpdateType string const ( // PhoneCall represents an interaction on the telephone PhoneCall TaskUpdateType = "PHONE_CALL" // File represents a file File = "FILE" // Mail represents an email Mail = "MAIL" ) // ToJSON takes a slice of tasks and returns their JSON representation. func ToJSON(tasks []Task) (tasksJSON []byte, err error) { return json.Marshal(tasks) } // ToJSON returns the JSON representation of the given task. func (task Task) ToJSON() (taskJSON []byte, err error) { return json.Marshal(task) } // GetTasksDummy returns a slice of tasks filled with static dummy data. func GetTasksDummy() []Task { task1 := Task{ ID: 1, Title: "The first task ever created!", Content: ` # Headline Content of a task: * Follows the [CommonMark](http://commonmark.org/) spec * Render a string as markdown `, TimeCreated: time.Now(), SubTasks: []Task{ { ID: 2, Title: "This is the first subtask of task 1!", Content: ` # Headline Content of a task: * Follows the [CommonMark](http://commonmark.org/) spec * Render a string as markdown `, TimeCreated: time.Now(), SubTasks: []Task{ { ID: 3, Title: "Hit is the first subtask of subtask 2!", Content: ` # Headline Content of a task: * Follows the [CommonMark](http://commonmark.org/) spec * Render a string as markdown `, TimeCreated: time.Now(), }, }, }, }, Updates: []TaskUpdate{ { ID: 1, TimeCreated: time.Now(), Type: PhoneCall, Title: "Update 1: Call from Mr. X", Description: ` # Headline Content of a task: * Follows the [CommonMark](http://commonmark.org/) spec * Render a string as markdown `, }, { ID: 2, TimeCreated: time.Now(), Type: File, Title: "Update 2: Last years sales", Description: ` # Headline Content of a task: * Follows the [CommonMark](http://commonmark.org/) spec * Render a string as markdown `, }, { ID: 3, TimeCreated: time.Now(), Type: Mail, Title: "Update 3: Mail from Mr. X", Description: ` # Headline Content of a task: * Follows the [CommonMark](http://commonmark.org/) spec * Render a string as markdown `, }, }, } task2 := Task{ ID: 2, Title: "The second task ever created!", Content: ` # Headline Content of a task: * Follows the [CommonMark](http://commonmark.org/) spec * Render a string as markdown `, TimeCreated: time.Now(), SubTasks: []Task{ { ID: 2, Title: "This is the first subtask of task 1!", Content: ` # Headline Content of a task: * Follows the [CommonMark](http://commonmark.org/) spec * Render a string as markdown `, TimeCreated: time.Now(), SubTasks: []Task{ { ID: 3, Title: "Hit is the first subtask of subtask 2!", Content: ` # Headline Content of a task: * Follows the [CommonMark](http://commonmark.org/) spec * Render a string as markdown `, TimeCreated: time.Now(), }, }, }, }, Updates: []TaskUpdate{ { ID: 1, TimeCreated: time.Now(), Type: PhoneCall, Title: "Update 1: Call from Mr. X", Description: ` # Headline Content of a task: * Follows the [CommonMark](http://commonmark.org/) spec * Render a string as markdown `, }, { ID: 2, TimeCreated: time.Now(), Type: File, Title: "Update 2: Last years sales", Description: ` # Headline Content of a task: * Follows the [CommonMark](http://commonmark.org/) spec * Render a string as markdown `, }, { ID: 3, TimeCreated: time.Now(), Type: Mail, Title: "Update 3: Mail from Mr. X", Description: ` # Headline Content of a task: * Follows the [CommonMark](http://commonmark.org/) spec * Render a string as markdown `, }, }, } task3 := Task{ ID: 3, Title: "The third task ever created!", Content: ` # Headline Content of a task: * Follows the [CommonMark](http://commonmark.org/) spec * Render a string as markdown `, TimeCreated: time.Now(), SubTasks: []Task{ { ID: 2, Title: "This is the first subtask of task 1!", Content: ` # Headline Content of a task: * Follows the [CommonMark](http://commonmark.org/) spec * Render a string as markdown `, TimeCreated: time.Now(), SubTasks: []Task{ { ID: 3, Title: "Hit is the first subtask of subtask 2!", Content: ` # Headline Content of a task: * Follows the [CommonMark](http://commonmark.org/) spec * Render a string as markdown `, TimeCreated: time.Now(), }, }, }, }, Updates: []TaskUpdate{ { ID: 1, TimeCreated: time.Now(), Type: PhoneCall, Title: "Update 1: Call from Mr. X", Description: ` # Headline Content of a task: * Follows the [CommonMark](http://commonmark.org/) spec * Render a string as markdown `, }, { ID: 2, TimeCreated: time.Now(), Type: File, Title: "Update 2: Last years sales", Description: ` # Headline Content of a task: * Follows the [CommonMark](http://commonmark.org/) spec * Render a string as markdown `, }, { ID: 3, TimeCreated: time.Now(), Type: Mail, Title: "Update 3: Mail from Mr. X", Description: ` # Headline Content of a task: * Follows the [CommonMark](http://commonmark.org/) spec * Render a string as markdown `, }, }, } return []Task{task1, task2, task3} }
data/data.go
0.566498
0.416589
data.go
starcoder
package aws import ( "context" "errors" "fmt" "sync" "time" "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/service/lambda" "github.com/aws/aws-sdk-go/service/lambda/lambdaiface" "github.com/benthosdev/benthos/v4/internal/impl/aws/config" "github.com/benthosdev/benthos/v4/public/service" ) func init() { conf := service.NewConfigSpec(). Stable(). Summary("Invokes an AWS lambda for each message. The contents of the message is the payload of the request, and the result of the invocation will become the new contents of the message."). Description(`The `+"`rate_limit`"+` field can be used to specify a rate limit [resource](/docs/components/rate_limits/about) to cap the rate of requests across parallel components service wide. In order to map or encode the payload to a specific request body, and map the response back into the original payload instead of replacing it entirely, you can use the `+"[`branch` processor](/docs/components/processors/branch)"+`. ### Error Handling When Benthos is unable to connect to the AWS endpoint or is otherwise unable to invoke the target lambda function it will retry the request according to the configured number of retries. Once these attempts have been exhausted the failed message will continue through the pipeline with it's contents unchanged, but flagged as having failed, allowing you to use [standard processor error handling patterns](/docs/configuration/error_handling). However, if the invocation of the function is successful but the function itself throws an error, then the message will have it's contents updated with a JSON payload describing the reason for the failure, and a metadata field `+"`lambda_function_error`"+` will be added to the message allowing you to detect and handle function errors with a `+"[`branch`](/docs/components/processors/branch)"+`: `+"```yaml"+` pipeline: processors: - branch: processors: - aws_lambda: function: foo result_map: | root = if meta().exists("lambda_function_error") { throw("Invocation failed due to %v: %v".format(this.errorType, this.errorMessage)) } else { this } output: switch: retry_until_success: false cases: - check: errored() output: reject: ${! error() } - output: resource: somewhere_else `+"```"+` ### Credentials By default Benthos will use a shared credentials file when connecting to AWS services. It's also possible to set them explicitly at the component level, allowing you to transfer data across accounts. You can find out more [in this document](/docs/guides/cloud/aws).`). Categories("Integration"). Version("3.36.0"). Example( "Branched Invoke", ` This example uses a `+"[`branch` processor](/docs/components/processors/branch/)"+` to map a new payload for triggering a lambda function with an ID and username from the original message, and the result of the lambda is discarded, meaning the original message is unchanged.`, ` pipeline: processors: - branch: request_map: '{"id":this.doc.id,"username":this.user.name}' processors: - aws_lambda: function: trigger_user_update `, ). Field(service.NewBoolField("parallel"). Description("Whether messages of a batch should be dispatched in parallel."). Default(false)). Field(service.NewStringField("function"). Description("The function to invoke.")). Field(service.NewStringField("rate_limit"). Description("An optional [`rate_limit`](/docs/components/rate_limits/about) to throttle invocations by."). Default(""). Advanced()) for _, f := range config.SessionFields() { conf = conf.Field(f) } conf = conf.Field(service.NewDurationField("timeout"). Description("The maximum period of time to wait before abandoning an invocation."). Default("5s"). Advanced()) conf = conf.Field(service.NewIntField("retries"). Description("The maximum number of retry attempts for each message."). Default(3). Advanced()) err := service.RegisterBatchProcessor( "aws_lambda", conf, func(conf *service.ParsedConfig, mgr *service.Resources) (service.BatchProcessor, error) { sess, err := GetSession(conf) if err != nil { return nil, err } parallel, err := conf.FieldBool("parallel") if err != nil { return nil, err } function, err := conf.FieldString("function") if err != nil { return nil, err } numRetries, err := conf.FieldInt("retries") if err != nil { return nil, err } rateLimit, err := conf.FieldString("rate_limit") if err != nil { return nil, err } timeout, err := conf.FieldDuration("timeout") if err != nil { return nil, err } return newLambdaProc(lambda.New(sess), parallel, function, numRetries, rateLimit, timeout, mgr) }) if err != nil { panic(err) } } //------------------------------------------------------------------------------ type lambdaProc struct { client *lambdaClient parallel bool functionName string log *service.Logger } func newLambdaProc( lambda lambdaiface.LambdaAPI, parallel bool, function string, numRetries int, rateLimit string, timeout time.Duration, mgr *service.Resources, ) (*lambdaProc, error) { l := &lambdaProc{ functionName: function, log: mgr.Logger(), parallel: parallel, } var err error if l.client, err = newLambdaClient(lambda, function, numRetries, rateLimit, timeout, mgr); err != nil { return nil, err } return l, nil } //------------------------------------------------------------------------------ func (l *lambdaProc) ProcessBatch(ctx context.Context, batch service.MessageBatch) ([]service.MessageBatch, error) { var resultMsg service.MessageBatch if !l.parallel || len(batch) == 1 { resultMsg = batch.Copy() for _, p := range resultMsg { if err := l.client.InvokeV2(p); err != nil { l.log.Errorf("Lambda function '%v' failed: %v\n", l.functionName, err) p.SetError(err) } } } else { parts := make([]*service.Message, len(batch)) for i, p := range batch { parts[i] = p.Copy() } wg := sync.WaitGroup{} wg.Add(len(parts)) for i := 0; i < len(parts); i++ { go func(index int) { err := l.client.InvokeV2(parts[index]) if err != nil { l.log.Errorf("Lambda parallel request to '%v' failed: %v\n", l.functionName, err) parts[index].SetError(err) } wg.Done() }(i) } wg.Wait() resultMsg = service.MessageBatch(parts) } return []service.MessageBatch{resultMsg}, nil } func (l *lambdaProc) Close(context.Context) error { return nil } //------------------------------------------------------------------------------ type lambdaClient struct { lambda lambdaiface.LambdaAPI log *service.Logger mgr *service.Resources function string retries int rateLimit string timeout time.Duration } func newLambdaClient( lambda lambdaiface.LambdaAPI, function string, numRetries int, rateLimit string, timeout time.Duration, mgr *service.Resources, ) (*lambdaClient, error) { l := lambdaClient{ lambda: lambda, log: mgr.Logger(), mgr: mgr, function: function, retries: numRetries, rateLimit: rateLimit, timeout: timeout, } if function == "" { return nil, errors.New("lambda function must not be empty") } if rateLimit != "" { if !l.mgr.HasRateLimit(rateLimit) { return nil, fmt.Errorf("rate limit resource '%v' was not found", rateLimit) } } return &l, nil } //------------------------------------------------------------------------------ func (l *lambdaClient) waitForAccess(ctx context.Context) bool { if l.rateLimit == "" { return true } for { var period time.Duration var err error if rerr := l.mgr.AccessRateLimit(ctx, l.rateLimit, func(rl service.RateLimit) { period, err = rl.Access(ctx) }); rerr != nil { err = rerr } if err != nil { l.log.Errorf("Rate limit error: %v\n", err) period = time.Second } if period > 0 { <-time.After(period) } else { return true } } } func (l *lambdaClient) InvokeV2(p *service.Message) error { remainingRetries := l.retries for { l.waitForAccess(context.Background()) mBytes, err := p.AsBytes() if err != nil { return err } ctx, done := context.WithTimeout(context.Background(), l.timeout) result, err := l.lambda.InvokeWithContext(ctx, &lambda.InvokeInput{ FunctionName: aws.String(l.function), Payload: mBytes, }) done() if err == nil { if result.FunctionError != nil { p.MetaSet("lambda_function_error", *result.FunctionError) } p.SetBytes(result.Payload) return nil } remainingRetries-- if remainingRetries < 0 { return err } } }
internal/impl/aws/processor_lambda.go
0.709321
0.67842
processor_lambda.go
starcoder
package ray import ( "fmt" "math" ) type Matrix [][]float64 type RowValues []float64 func NewMatrix(rows, columns int, rowsValues ...RowValues) Matrix { m := make(Matrix, rows) for r := 0; r < rows; r++ { m[r] = make([]float64, columns) } for r := range rowsValues { for i := range rowsValues[r] { m[r][i] = rowsValues[r][i] } } return m } func (m Matrix) Get(row, column int) float64 { return m[row][column] } func (m Matrix) Set(row, column int, val float64) { m[row][column] = val } func (m Matrix) SetRow(row int, vals ...float64) { for i := range m[row] { m[row][i] = vals[i] } } func (m Matrix) Transpose() (result Matrix) { result = NewMatrix(len(m[0]), len(m)) for row := range m { for col := range m[row] { result[col][row] = m[row][col] } } return result } func (m Matrix) Multiply(by Matrix) (result Matrix) { result = NewMatrix(len(m), len(by[0])) for row := range result { for col := range result[row] { var val float64 for byCol := range by { val = val + (m[row][byCol] * by[byCol][col]) } result[row][col] = val } } return result } func (m Matrix) Determinant() float64 { if len(m) == 2 && len(m[0]) == 2 { return (m[0][0] * m[1][1]) - (m[0][1] * m[1][0]) } var res float64 for col := range m[0] { c := m.Cofactor(0, col) res = res + (m[0][col] * c) } return res } func (m Matrix) SubMatrix(row, col int) (result Matrix) { result = NewMatrix(len(m)-1, len(m[0])-1) for r := range m { if r == row { continue } currentRow := r if currentRow > row { currentRow-- } for c := range m[r] { if c == col { continue } currentCol := c if currentCol > col { currentCol-- } result[currentRow][currentCol] = m[r][c] } } return result } func (m Matrix) Minor(row, col int) float64 { return m. SubMatrix(row, col). Determinant() } func (m Matrix) Cofactor(row, col int) float64 { if (row+col)%2 == 0 { return m.Minor(row, col) } return -m.Minor(row, col) } func (m Matrix) MultiplyByTuple(vals ...float64) (result Matrix) { const colSizeForTuple = 1 tuple := NewMatrix(len(vals), colSizeForTuple) for i := range vals { tuple[i] = []float64{vals[i]} } return m.Multiply(tuple) } func (m Matrix) MultiplyByVector(vec Vector) (result Vector) { mt := m.MultiplyByTuple(vec.GetX(), vec.GetY(), vec.GetZ(), vec.GetW()) return newTuple(mt[0][0], mt[1][0], mt[2][0], mt[3][0]) } func (m Matrix) Inverse() (result Matrix, err error) { d := m.Determinant() if d == 0 { return nil, NonInvertibleErr } result = NewMatrix(len(m), len(m[0])) for row := range m { for col := range m[row] { result[col][row] = m.Cofactor(row, col) / d } } return result, err } func Rotation(axis Axis, by float64) (rotation Matrix) { rotation = DefaultIdentityMatrix() cosBy := math.Cos(by) sinBy := math.Sin(by) switch axis { case X: rotation[1][1] = cosBy rotation[1][2] = -sinBy rotation[2][1] = sinBy rotation[2][2] = cosBy case Y: rotation[0][0] = cosBy rotation[0][2] = sinBy rotation[2][0] = -sinBy rotation[2][2] = cosBy case Z: rotation[0][0] = cosBy rotation[0][1] = -sinBy rotation[1][0] = sinBy rotation[1][1] = cosBy } return rotation } func (m Matrix) String() string { s := "[\n" for row := range m { for col := range m[row] { s = s + fmt.Sprintf("%v ", m[row][col]) } s = s + "\n" } s = s + "]" return s } func DefaultIdentityMatrix() (identityMatrix Matrix) { return IdentityMatrix(defaultIdentityMatrixSize, defaultIdentityMatrixSize) } func DefaultIdentityMatrixInverse() (identityMatrixInverse Matrix) { identityMatrixInverse, _ = DefaultIdentityMatrix().Inverse() return identityMatrixInverse } func IdentityMatrix(rows, cols int) (identityMatrix Matrix) { identityMatrix = NewMatrix(rows, cols) for row := range identityMatrix { for col := range identityMatrix[row] { if col == row { identityMatrix[row][col] = 1 } else { identityMatrix[row][col] = 0 } } } return identityMatrix } func ViewTransform(from, to, up Vector) (result Matrix) { fwd := to.Subtract(from).Normalize() upN := up.Normalize() left := Cross(fwd, upN) trueUp := Cross(left, fwd) orientation := NewMatrix(4, 4, RowValues{left.GetX(), left.GetY(), left.GetZ(), 0}, RowValues{trueUp.GetX(), trueUp.GetY(), trueUp.GetZ(), 0}, RowValues{-fwd.GetX(), -fwd.GetY(), -fwd.GetZ(), 0}, RowValues{0, 0, 0, 1}, ) return orientation.Multiply(Translation(-from.GetX(), -from.GetY(), -from.GetZ())) }
go/internal/ray/matrices.go
0.744656
0.469399
matrices.go
starcoder
package specifics // EmptyIntSlice returns an empty, non-nil int slice func EmptyIntSlice() []int { return make([]int, 0) } // IntSlicesEqual returns whether two integer arrays are // equal func IntSlicesEqual(arr1, arr2 []int) bool { if len(arr1) != len(arr2) { return false } for i, num := range arr1 { if num != arr2[i] { return false } } return true } // IntSlicesZipSortedNoDup zips two already-sorted arrays // and discards any duplicates func IntSlicesZipSortedNoDup(intArr1, intArr2 []int) []int { var zipped []int i := 0 j := 0 var lastInt int = -1 for i < len(intArr1) && j < len(intArr2) { currInt1 := intArr1[i] currInt2 := intArr2[j] if currInt1 < 0 || currInt2 < 0 { panic("Got negative number trying to zip sorted") } for currInt1 == lastInt { // this loop should never run // more than once, but can in // the event of intra-array // duplicates i++ currInt1 = intArr1[i] } for currInt2 == lastInt { // ditto for this loop j++ currInt2 = intArr2[j] } if currInt1 < currInt2 { zipped = append(zipped, currInt1) lastInt = currInt1 i++ } else if currInt2 < currInt1 { zipped = append(zipped, currInt2) lastInt = currInt2 j++ } else { zipped = append(zipped, currInt1) lastInt = currInt1 i++ j++ } } // add remaining elements if any for ; i < len(intArr1); i++ { zipped = append(zipped, intArr1[i]) } for ; j < len(intArr2); j++ { zipped = append(zipped, intArr2[j]) } return zipped } // IntSlicesBinarySearch returns the index of the found // element and -1 if not found func IntSlicesBinarySearch(haystack []int, needle int) int { low := 0 high := len(haystack) - 1 for low <= high { median := (low + high) / 2 if haystack[median] < needle { low = median + 1 } else { high = median - 1 } } if low == len(haystack) || haystack[low] != needle { return -1 } return low } // IntSlicesDeleteSorted deletes the specified value from the int slice // ONLY if it is found func IntSlicesDeleteSorted(sortedInt []int, toDelete int) []int { foundIndex := IntSlicesBinarySearch(sortedInt, toDelete) if foundIndex == -1 { return sortedInt } var newArr []int for i, num := range sortedInt { if i == foundIndex { continue } newArr = append(newArr, num) } return newArr }
intslices.go
0.693784
0.452415
intslices.go
starcoder
package ucb1 import ( `math` `math/rand` `time` ) func init() { rand.Seed(time.Now().UnixNano()) } type UCB1 struct { probabilities []float64 cost float64 rewardsPerArm []float64 timesPerArm []int steps []Step totalTimes int totalReward float64 } func New(probabilities []float64, cost float64) *UCB1 { var rewardsPerArm = make([]float64, len(probabilities)) var steps = make([]Step, 0) var timesPerArm = make([]int, len(probabilities)) return &UCB1{ probabilities: probabilities, cost: cost, rewardsPerArm: rewardsPerArm, timesPerArm: timesPerArm, steps: steps, } } func (u *UCB1) upperBound(arm int) float64 { return math.Sqrt(2 * math.Log(float64(u.totalTimes)) / float64(u.timesPerArm[arm])) } func (u *UCB1) avgReward(arm int) float64 { return u.rewardsPerArm[arm] / float64(u.timesPerArm[arm]) } func (u *UCB1) getReward(arm int) (reward float64) { reward = -u.cost if randVal := rand.Float64(); u.probabilities[arm] >= randVal { reward += 1.0 } return } func (u *UCB1) GuessArm() (selectedArm int) { if u.totalTimes < len(u.probabilities) { selectedArm = u.totalTimes } else { var maxUCB float64 for arm := range u.probabilities { if ucb := u.avgReward(arm) + u.upperBound(arm); ucb > maxUCB { selectedArm = arm maxUCB = ucb } } } var reward = u.getReward(selectedArm) u.rewardsPerArm[selectedArm] += reward u.totalReward += reward u.steps = append(u.steps, Step{selectedArm, u.totalReward}) u.totalTimes += 1 u.timesPerArm[selectedArm] += 1 return } func (u *UCB1) Probabilities() []float64 { return u.probabilities } func (u *UCB1) Cost() float64 { return u.cost } func (u *UCB1) RewardsPerArm() []float64 { return u.rewardsPerArm } func (u *UCB1) TimesPerArm() []int { return u.timesPerArm } func (u *UCB1) Steps() []Step { return u.steps } func (u *UCB1) TotalTimes() int { return u.totalTimes } func (u *UCB1) TotalReward() float64 { return u.totalReward }
B3S1 - Artifical Intelligence/Homework 5 - Multi-Armed Bandit (UCB1)/ucb1/ucb1.go
0.668772
0.404272
ucb1.go
starcoder
package learn import ( "math" "github.com/egon12/cols" ) type ( FieldName string FieldContent string Result string ID3Processor struct { Fields []FieldName DB Data } Data struct { Header []FieldName Rows Rows } Row struct { Input map[FieldName]FieldContent Result Result } Rows []Row ) func NextFieldName(data Data) FieldName { type Gains struct { FieldName Gain float64 } g := cols.Map(data.Header, func(field FieldName) Gains { return Gains{ Gain: Gain(data.Rows, field), FieldName: field, } }) sg := cols.MaxStruct(g, func(g Gains) float64 { return g.Gain }) return sg.FieldName } func ShouldReturnResult(rows Rows) bool { return rows.Entropy() == 0.0 } // ES get entropy from S (Result) func ES(rows Rows) float64 { return rows.Entropy() } // Gain get information Gain from field func Gain(rows Rows, field FieldName) float64 { fData := rows.GetDataFromField(field) fCount := cols.CountBy(fData, itself[FieldContent]) var res float64 = 0.0 for fieldContent, count := range fCount { entropy := ESContent(rows, field, fieldContent) probability := float64(count) / float64(len(rows)) res += probability * entropy } return rows.Entropy() - res } // ESContent get entropy from S(Result) and FieldContent func ESContent(rows Rows, field FieldName, content FieldContent) float64 { return rows.FilterByContent(field, content).Entropy() } func (i *ID3Processor) Load(db Data) { i.DB = db i.Fields = db.Header } func (r Rows) Entropy() float64 { rowCount := len(r) results := cols.Map(r, getResultFromRow) resultCounts := cols.CountBy(results, itself[Result]) var res float64 = 0.0 for _, c := range resultCounts { res += singleEntropy(float64(c), float64(rowCount)) } return res * -1 } func (r Rows) GetDataFromField(field FieldName) []FieldContent { return cols.Map(r, func(row Row) FieldContent { return FieldContent(row.Input[field]) }) } func (r Rows) FilterByContent(field FieldName, content FieldContent) Rows { filterFunc := isRowFieldHasContent(field, content) return cols.Filter(r, filterFunc) } func itself[T any](t T) T { return t } func singleEntropy(count, total float64) float64 { p := count / total return p * math.Log2(p) } func getResultFromRow(r Row) Result { return r.Result } func isRowFieldHasContent(f FieldName, c FieldContent) func(Row) bool { return func(r Row) bool { return r.Input[f] == c } }
learn/learn.go
0.74382
0.401893
learn.go
starcoder
package wid import ( "image" "gioui.org/f32" "gioui.org/layout" ) // Fit scales a widget to fit and clip to the constraints. type Fit uint8 const ( // Unscaled does not alter the scale of a widget. Unscaled Fit = iota // Contain scales widget as large as possible without cropping, // and it preserves aspect-ratio. Contain // Cover scales the widget to cover the constraint area and // preserves aspect-ratio. Cover // ScaleDown scales the widget smaller without cropping, // when it exceeds the constraint area. // It preserves aspect-ratio. ScaleDown // Fill stretches the widget to the constraints and does not // preserve aspect-ratio. Fill ) // scale computes the new dimensions and transformation required to fit dims to cs, given the position. func (fit Fit) scale(cs layout.Constraints, pos layout.Direction, dims layout.Dimensions) (layout.Dimensions, f32.Affine2D) { widgetSize := dims.Size if fit == Unscaled || dims.Size.X == 0 || dims.Size.Y == 0 { dims.Size = cs.Constrain(dims.Size) offset := pos.Position(widgetSize, dims.Size) dims.Baseline += offset.Y return dims, f32.Affine2D{}.Offset(layout.FPt(offset)) } scale := f32.Point{ X: float32(cs.Max.X) / float32(dims.Size.X), Y: float32(cs.Max.Y) / float32(dims.Size.Y), } switch fit { case Contain: if scale.Y < scale.X { scale.X = scale.Y } else { scale.Y = scale.X } case Cover: if scale.Y > scale.X { scale.X = scale.Y } else { scale.Y = scale.X } case ScaleDown: if scale.Y < scale.X { scale.X = scale.Y } else { scale.Y = scale.X } // The widget would need to be scaled up, no change needed. if scale.X >= 1 { dims.Size = cs.Constrain(dims.Size) offset := pos.Position(widgetSize, dims.Size) dims.Baseline += offset.Y return dims, f32.Affine2D{}.Offset(layout.FPt(offset)) } case Fill: } var scaledSize image.Point scaledSize.X = int(float32(widgetSize.X) * scale.X) scaledSize.Y = int(float32(widgetSize.Y) * scale.Y) dims.Size = cs.Constrain(scaledSize) dims.Baseline = int(float32(dims.Baseline) * scale.Y) offset := pos.Position(scaledSize, dims.Size) trans := f32.Affine2D{}. Scale(f32.Point{}, scale). Offset(layout.FPt(offset)) dims.Baseline += offset.Y return dims, trans }
wid/fit.go
0.786746
0.401189
fit.go
starcoder
package clover // Query represents a generic query which is submitted to a specific collection. type Query struct { engine StorageEngine collection string criteria *Criteria } func (q *Query) satisfy(doc *Document) bool { if q.criteria == nil { return true } return q.criteria.p(doc) } // Count returns the number of documents which satisfy the query (i.e. len(q.FindAll()) == q.Count()). func (q *Query) Count() (int, error) { docs, err := q.FindAll() return len(docs), err } // MatchPredicate selects all the documents which satisfy the supplied predicate function. func (q *Query) MatchPredicate(p func(doc *Document) bool) *Query { return q.Where(&Criteria{p}) } // Where returns a new Query which select all the documents fullfilling both the base query and the provided Criteria. func (q *Query) Where(c *Criteria) *Query { newCriteria := q.criteria if newCriteria == nil { newCriteria = c } else { newCriteria = newCriteria.And(c) } return &Query{ engine: q.engine, collection: q.collection, criteria: newCriteria, } } // FindById returns the document with the given id, if such a document exists and satisfies the underlying query, or null. func (q *Query) FindById(id string) (*Document, error) { return q.engine.FindById(q.collection, id) } // FindAll selects all the documents satisfying q. func (q *Query) FindAll() ([]*Document, error) { return q.engine.FindAll(q) } // Update updates all the document selected by q using the provided updateMap. // Each update is specified by a mapping fieldName -> newValue. func (q *Query) Update(updateMap map[string]interface{}) error { return q.engine.Update(q, updateMap) } // DeleteById removes the document with the given id from the underlying collection, provided that such a document exists and satisfies the underlying query. func (q *Query) DeleteById(id string) error { return q.engine.DeleteById(q.collection, id) } // Delete removes all the documents selected by q from the underlying collection. func (q *Query) Delete() error { return q.engine.Delete(q) }
query.go
0.886684
0.402451
query.go
starcoder
package primitive import ( "fmt" "image" "image/color" "strings" ) type Color struct { R, G, B, A int } func (c *Color) NRGBA() color.NRGBA { return color.NRGBA{uint8(c.R), uint8(c.G), uint8(c.B), uint8(c.A)} } func (c *Color) Delta(color *Color) Color { x := Color{c.R - color.R, c.G - color.G, c.B - color.B, c.A - color.A} if x.R < 0 { x.R = -x.R } x.R = clampInt(x.R, 0, 255) if x.G < 0 { x.G = -x.G } x.G = clampInt(x.G, 0, 255) if x.B < 0 { x.B = -x.B } x.B = clampInt(x.B, 0, 255) if x.A < 0 { x.A = -x.A } x.A = clampInt(x.A, 0, 255) return x } func MakeColor(c color.Color) Color { r, g, b, a := c.RGBA() result := Color{int(r / 257), int(g / 257), int(b / 257), int(a / 257)} vv("%v\n", result) return result } func MakeHexColor(x string) Color { x = strings.Trim(x, "#") var r, g, b, a int a = 255 switch len(x) { case 3: fmt.Sscanf(x, "%1x%1x%1x", &r, &g, &b) r = (r << 4) | r g = (g << 4) | g b = (b << 4) | b case 4: fmt.Sscanf(x, "%1x%1x%1x%1x", &r, &g, &b, &a) r = (r << 4) | r g = (g << 4) | g b = (b << 4) | b a = (a << 4) | a case 6: fmt.Sscanf(x, "%02x%02x%02x", &r, &g, &b) case 8: fmt.Sscanf(x, "%02x%02x%02x%02x", &r, &g, &b, &a) } result := Color{r, g, b, a} vv("%v\n", result) return result } // MostFrequentImageColor returns the average color in the image. func AverageImageColor(im image.Image) color.NRGBA { rgba := imageToRGBA(im) size := rgba.Bounds().Size() w, h := size.X, size.Y var r, g, b int for y := 0; y < h; y++ { for x := 0; x < w; x++ { c := rgba.RGBAAt(x, y) r += int(c.R) g += int(c.G) b += int(c.B) } } r /= w * h g /= w * h b /= w * h return color.NRGBA{uint8(r), uint8(g), uint8(b), 255} } // MostFrequentImageColor returns the most-frequently used color in the image. // NOTE: The low-order bits are masked off. func MostFrequentImageColor(im image.Image) color.NRGBA { const mask = 0xff - 0x03 rgba := imageToRGBA(im) size := rgba.Bounds().Size() w, h := size.X, size.Y frequency := make(map[color.RGBA]int) for y := 0; y < h; y++ { for x := 0; x < w; x++ { c := rgba.RGBAAt(x, y) c.A = 0 // discard low bits. c.R &= mask c.G &= mask c.B &= mask frequency[c]++ } } var best color.RGBA m := 0 for k, v := range frequency { vv("%v = %d", k, v) if v > m { best = k m = v } } return color.NRGBA{best.R, best.G, best.B, 255} } // ColorAtPoint returns the color at a point in the image. func ColorAtPoint(im image.Image, x, y int) color.NRGBA { rgba := imageToRGBA(im) size := rgba.Bounds().Size() if x < 0 || x > size.X { x = 0 } if y < 0 || y > size.Y { y = 0 } c := rgba.RGBAAt(x, y) return color.NRGBA{c.R, c.G, c.B, 255} }
primitive/color.go
0.818701
0.423518
color.go
starcoder
package models import ( i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization" ) // ApiApplication type ApiApplication struct { // When true, allows an application to use claims mapping without specifying a custom signing key. acceptMappedClaims *bool // Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well. additionalData map[string]interface{} // Used for bundling consent if you have a solution that contains two parts: a client app and a custom web API app. If you set the appID of the client app to this value, the user only consents once to the client app. Azure AD knows that consenting to the client means implicitly consenting to the web API and automatically provisions service principals for both APIs at the same time. Both the client and the web API app must be registered in the same tenant. knownClientApplications []string // The definition of the delegated permissions exposed by the web API represented by this application registration. These delegated permissions may be requested by a client application, and may be granted by users or administrators during consent. Delegated permissions are sometimes referred to as OAuth 2.0 scopes. oauth2PermissionScopes []PermissionScopeable // Lists the client applications that are pre-authorized with the specified delegated permissions to access this application's APIs. Users are not required to consent to any pre-authorized application (for the permissions specified). However, any additional permissions not listed in preAuthorizedApplications (requested through incremental consent for example) will require user consent. preAuthorizedApplications []PreAuthorizedApplicationable // Specifies the access token version expected by this resource. This changes the version and format of the JWT produced independent of the endpoint or client used to request the access token. The endpoint used, v1.0 or v2.0, is chosen by the client and only impacts the version of id_tokens. Resources need to explicitly configure requestedAccessTokenVersion to indicate the supported access token format. Possible values for requestedAccessTokenVersion are 1, 2, or null. If the value is null, this defaults to 1, which corresponds to the v1.0 endpoint. If signInAudience on the application is configured as AzureADandPersonalMicrosoftAccount, the value for this property must be 2 requestedAccessTokenVersion *int32 } // NewApiApplication instantiates a new apiApplication and sets the default values. func NewApiApplication()(*ApiApplication) { m := &ApiApplication{ } m.SetAdditionalData(make(map[string]interface{})); return m } // CreateApiApplicationFromDiscriminatorValue creates a new instance of the appropriate class based on discriminator value func CreateApiApplicationFromDiscriminatorValue(parseNode i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, error) { return NewApiApplication(), nil } // GetAcceptMappedClaims gets the acceptMappedClaims property value. When true, allows an application to use claims mapping without specifying a custom signing key. func (m *ApiApplication) GetAcceptMappedClaims()(*bool) { if m == nil { return nil } else { return m.acceptMappedClaims } } // GetAdditionalData gets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well. func (m *ApiApplication) GetAdditionalData()(map[string]interface{}) { if m == nil { return nil } else { return m.additionalData } } // GetFieldDeserializers the deserialization information for the current model func (m *ApiApplication) GetFieldDeserializers()(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(error)) { res := make(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(error)) res["acceptMappedClaims"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { val, err := n.GetBoolValue() if err != nil { return err } if val != nil { m.SetAcceptMappedClaims(val) } return nil } res["knownClientApplications"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { val, err := n.GetCollectionOfPrimitiveValues("string") if err != nil { return err } if val != nil { res := make([]string, len(val)) for i, v := range val { res[i] = *(v.(*string)) } m.SetKnownClientApplications(res) } return nil } res["oauth2PermissionScopes"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { val, err := n.GetCollectionOfObjectValues(CreatePermissionScopeFromDiscriminatorValue) if err != nil { return err } if val != nil { res := make([]PermissionScopeable, len(val)) for i, v := range val { res[i] = v.(PermissionScopeable) } m.SetOauth2PermissionScopes(res) } return nil } res["preAuthorizedApplications"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { val, err := n.GetCollectionOfObjectValues(CreatePreAuthorizedApplicationFromDiscriminatorValue) if err != nil { return err } if val != nil { res := make([]PreAuthorizedApplicationable, len(val)) for i, v := range val { res[i] = v.(PreAuthorizedApplicationable) } m.SetPreAuthorizedApplications(res) } return nil } res["requestedAccessTokenVersion"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { val, err := n.GetInt32Value() if err != nil { return err } if val != nil { m.SetRequestedAccessTokenVersion(val) } return nil } return res } // GetKnownClientApplications gets the knownClientApplications property value. Used for bundling consent if you have a solution that contains two parts: a client app and a custom web API app. If you set the appID of the client app to this value, the user only consents once to the client app. Azure AD knows that consenting to the client means implicitly consenting to the web API and automatically provisions service principals for both APIs at the same time. Both the client and the web API app must be registered in the same tenant. func (m *ApiApplication) GetKnownClientApplications()([]string) { if m == nil { return nil } else { return m.knownClientApplications } } // GetOauth2PermissionScopes gets the oauth2PermissionScopes property value. The definition of the delegated permissions exposed by the web API represented by this application registration. These delegated permissions may be requested by a client application, and may be granted by users or administrators during consent. Delegated permissions are sometimes referred to as OAuth 2.0 scopes. func (m *ApiApplication) GetOauth2PermissionScopes()([]PermissionScopeable) { if m == nil { return nil } else { return m.oauth2PermissionScopes } } // GetPreAuthorizedApplications gets the preAuthorizedApplications property value. Lists the client applications that are pre-authorized with the specified delegated permissions to access this application's APIs. Users are not required to consent to any pre-authorized application (for the permissions specified). However, any additional permissions not listed in preAuthorizedApplications (requested through incremental consent for example) will require user consent. func (m *ApiApplication) GetPreAuthorizedApplications()([]PreAuthorizedApplicationable) { if m == nil { return nil } else { return m.preAuthorizedApplications } } // GetRequestedAccessTokenVersion gets the requestedAccessTokenVersion property value. Specifies the access token version expected by this resource. This changes the version and format of the JWT produced independent of the endpoint or client used to request the access token. The endpoint used, v1.0 or v2.0, is chosen by the client and only impacts the version of id_tokens. Resources need to explicitly configure requestedAccessTokenVersion to indicate the supported access token format. Possible values for requestedAccessTokenVersion are 1, 2, or null. If the value is null, this defaults to 1, which corresponds to the v1.0 endpoint. If signInAudience on the application is configured as AzureADandPersonalMicrosoftAccount, the value for this property must be 2 func (m *ApiApplication) GetRequestedAccessTokenVersion()(*int32) { if m == nil { return nil } else { return m.requestedAccessTokenVersion } } // Serialize serializes information the current object func (m *ApiApplication) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) { { err := writer.WriteBoolValue("acceptMappedClaims", m.GetAcceptMappedClaims()) if err != nil { return err } } if m.GetKnownClientApplications() != nil { err := writer.WriteCollectionOfStringValues("knownClientApplications", m.GetKnownClientApplications()) if err != nil { return err } } if m.GetOauth2PermissionScopes() != nil { cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetOauth2PermissionScopes())) for i, v := range m.GetOauth2PermissionScopes() { cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable) } err := writer.WriteCollectionOfObjectValues("oauth2PermissionScopes", cast) if err != nil { return err } } if m.GetPreAuthorizedApplications() != nil { cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetPreAuthorizedApplications())) for i, v := range m.GetPreAuthorizedApplications() { cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable) } err := writer.WriteCollectionOfObjectValues("preAuthorizedApplications", cast) if err != nil { return err } } { err := writer.WriteInt32Value("requestedAccessTokenVersion", m.GetRequestedAccessTokenVersion()) if err != nil { return err } } { err := writer.WriteAdditionalData(m.GetAdditionalData()) if err != nil { return err } } return nil } // SetAcceptMappedClaims sets the acceptMappedClaims property value. When true, allows an application to use claims mapping without specifying a custom signing key. func (m *ApiApplication) SetAcceptMappedClaims(value *bool)() { if m != nil { m.acceptMappedClaims = value } } // SetAdditionalData sets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well. func (m *ApiApplication) SetAdditionalData(value map[string]interface{})() { if m != nil { m.additionalData = value } } // SetKnownClientApplications sets the knownClientApplications property value. Used for bundling consent if you have a solution that contains two parts: a client app and a custom web API app. If you set the appID of the client app to this value, the user only consents once to the client app. Azure AD knows that consenting to the client means implicitly consenting to the web API and automatically provisions service principals for both APIs at the same time. Both the client and the web API app must be registered in the same tenant. func (m *ApiApplication) SetKnownClientApplications(value []string)() { if m != nil { m.knownClientApplications = value } } // SetOauth2PermissionScopes sets the oauth2PermissionScopes property value. The definition of the delegated permissions exposed by the web API represented by this application registration. These delegated permissions may be requested by a client application, and may be granted by users or administrators during consent. Delegated permissions are sometimes referred to as OAuth 2.0 scopes. func (m *ApiApplication) SetOauth2PermissionScopes(value []PermissionScopeable)() { if m != nil { m.oauth2PermissionScopes = value } } // SetPreAuthorizedApplications sets the preAuthorizedApplications property value. Lists the client applications that are pre-authorized with the specified delegated permissions to access this application's APIs. Users are not required to consent to any pre-authorized application (for the permissions specified). However, any additional permissions not listed in preAuthorizedApplications (requested through incremental consent for example) will require user consent. func (m *ApiApplication) SetPreAuthorizedApplications(value []PreAuthorizedApplicationable)() { if m != nil { m.preAuthorizedApplications = value } } // SetRequestedAccessTokenVersion sets the requestedAccessTokenVersion property value. Specifies the access token version expected by this resource. This changes the version and format of the JWT produced independent of the endpoint or client used to request the access token. The endpoint used, v1.0 or v2.0, is chosen by the client and only impacts the version of id_tokens. Resources need to explicitly configure requestedAccessTokenVersion to indicate the supported access token format. Possible values for requestedAccessTokenVersion are 1, 2, or null. If the value is null, this defaults to 1, which corresponds to the v1.0 endpoint. If signInAudience on the application is configured as AzureADandPersonalMicrosoftAccount, the value for this property must be 2 func (m *ApiApplication) SetRequestedAccessTokenVersion(value *int32)() { if m != nil { m.requestedAccessTokenVersion = value } }
models/api_application.go
0.748076
0.422505
api_application.go
starcoder
package schedule import ( "fmt" "time" ) // Task describes when and how frequently a scheduled task should be executed and the component that provides a // method to actually perform the task type Task struct { // A human-readable name for the task Name string // An optional unique ID for the task (the IoC component name for this task will be used if not specified) ID string // The name of the IoC component implementing TaskLogic that actually performs this task Component string // The maximum number of overlapping instances of the task that are allowed to run. Zero means only one instance of this task can run at a time MaxOverlapping int // If set to true, suppress warning messages being logged when a task is scheduled to run while another instance is already running NoWarnOnOverlap bool // A human-readable expression (in English) of how frequently the task should be run - see package docs Every string // If set to true, any status updates messages sent from the task to the scheduler will be logged LogStatusMessages bool // The name of a component that is interested in receiving status updates from a running task StatusUpdateReceiver string // If set to true the task will never run Disabled bool // The number of times an invocation of this task should be re-tried if the task fails with an AllowRetryError MaxRetries int // A human-readable expression (in English) of how the interval to wait between a failure and a retry (e.g. 1 minute, 20 seconds) // Must be set if MaxRetries > 0 RetryInterval string receiver TaskStatusUpdateReceiver logic TaskLogic retryWait time.Duration } // FullName returns either task name + ID, just task name or just ID depending on which fields are set func (t *Task) FullName() string { if t.ID == "" { return t.Name } else if t.Name == "" { return t.ID } else { return fmt.Sprintf("%s (%s)", t.Name, t.ID) } } // StatusMessagef creates a TaskStatusUpdate with the supplied message func StatusMessagef(format string, a ...interface{}) TaskStatusUpdate { message := fmt.Sprintf(format, a...) return TaskStatusUpdate{ Message: message, } } // TaskStatusUpdate allows a task to communicate back to its manager some status. type TaskStatusUpdate struct { Message string Status interface{} } // TaskLogic is implemented by any component that can be invoked via a scheduled task type TaskLogic interface { ExecuteTask(c chan TaskStatusUpdate) error } // TaskStatusUpdateReceiver is implemented by a component that wants to receive status updates about an invocation of a task type TaskStatusUpdateReceiver interface { Receive(summary TaskInvocationSummary, update TaskStatusUpdate) } // TaskInvocationSummary meta-data about a task invocation type TaskInvocationSummary struct { TaskName string TaskID string StartedAt time.Time InvocationCount uint64 }
schedule/task.go
0.624294
0.414129
task.go
starcoder
package typed import ( "fmt" yaml "gopkg.in/yaml.v2" "sigs.k8s.io/structured-merge-diff/schema" "sigs.k8s.io/structured-merge-diff/value" ) // YAMLObject is an object encoded in YAML. type YAMLObject string // Parser implements YAMLParser and allows introspecting the schema. type Parser struct { Schema schema.Schema } // create builds an unvalidated parser. func create(schema YAMLObject) (*Parser, error) { p := Parser{} err := yaml.Unmarshal([]byte(schema), &p.Schema) return &p, err } func createOrDie(schema YAMLObject) *Parser { p, err := create(schema) if err != nil { panic(fmt.Errorf("failed to create parser: %v", err)) } return p } var ssParser = createOrDie(YAMLObject(schema.SchemaSchemaYAML)) // NewParser will build a YAMLParser from a schema. The schema is validated. func NewParser(schema YAMLObject) (*Parser, error) { _, err := ssParser.Type("schema").FromYAML(schema) if err != nil { return nil, fmt.Errorf("unable to validate schema: %v", err) } return create(schema) } // TypeNames returns a list of types this parser understands. func (p *Parser) TypeNames() (names []string) { for _, td := range p.Schema.Types { names = append(names, td.Name) } return names } // Type returns a helper which can produce objects of the given type. Any // errors are deferred until a further function is called. func (p *Parser) Type(name string) ParseableType { return &parseableType{ parser: p, typename: name, } } // ParseableType allows for easy production of typed objects. type ParseableType interface { IsValid() bool FromYAML(YAMLObject) (TypedValue, error) FromUnstructured(interface{}) (TypedValue, error) } type parseableType struct { parser *Parser typename string } var _ ParseableType = &parseableType{} // IsValid return true if p's schema and typename are valid. func (p *parseableType) IsValid() bool { _, ok := p.parser.Schema.Resolve(schema.TypeRef{NamedType: &p.typename}) return ok } // FromYAML parses a yaml string into an object with the current schema // and the type "typename" or an error if validation fails. func (p *parseableType) FromYAML(object YAMLObject) (TypedValue, error) { v, err := value.FromYAML([]byte(object)) if err != nil { return nil, err } return AsTyped(v, &p.parser.Schema, p.typename) } // FromUnstructured converts a go interface to a TypedValue. It will return an // error if the resulting object fails schema validation. func (p *parseableType) FromUnstructured(in interface{}) (TypedValue, error) { v, err := value.FromUnstructured(in) if err != nil { return nil, err } return AsTyped(v, &p.parser.Schema, p.typename) } // DeducedParseableType is a ParseableType that deduces the type from // the content of the object. type DeducedParseableType struct{} var _ ParseableType = DeducedParseableType{} // IsValid always returns true for a DeducedParseableType. func (p DeducedParseableType) IsValid() bool { return true } // FromYAML parses a yaml string into an object and deduces the type for // that object. func (p DeducedParseableType) FromYAML(object YAMLObject) (TypedValue, error) { v, err := value.FromYAML([]byte(object)) if err != nil { return nil, err } return AsTypedDeduced(v), nil } // FromUnstructured converts a go interface to a TypedValue. It will return an // error if the input object uses un-handled types. func (p DeducedParseableType) FromUnstructured(in interface{}) (TypedValue, error) { v, err := value.FromUnstructured(in) if err != nil { return nil, err } return AsTypedDeduced(v), nil }
vendor/sigs.k8s.io/structured-merge-diff/typed/parser.go
0.726426
0.408159
parser.go
starcoder
// Package archer contains the structs that represent archer concepts, and the associated interfaces to manipulate them. package archer // Environment represents the configuration of a particular environment in a project. It includes // the environment's account and region, name, as well as the project it belongs to. type Environment struct { Project string `json:"project"` // Name of the project this environment belongs to. Name string `json:"name"` // Name of the environment, must be unique within a project. Region string `json:"region"` // Name of the region this environment is stored in. AccountID string `json:"accountID"` // Account ID of the account this environment is stored in. Prod bool `json:"prod"` // Whether or not this environment is a production environment. RegistryURL string `json:"registryURL"` // URL For ECR Registry for this environment. ExecutionRoleARN string `json:"executionRoleARN"` // ARN used by CloudFormation to make modification to the environment stack. ManagerRoleARN string `json:"managerRoleARN"` // ARN for the manager role assumed to manipulate the environment and its applications. } // EnvironmentStore can List, Create, Get, and Delete environments in an underlying project management store. type EnvironmentStore interface { EnvironmentLister EnvironmentGetter EnvironmentCreator EnvironmentDeleter } // EnvironmentLister fetches and returns a list of environments from an underlying project management store. type EnvironmentLister interface { ListEnvironments(projectName string) ([]*Environment, error) } // EnvironmentGetter fetches and returns an environment from an underlying project management store. type EnvironmentGetter interface { GetEnvironment(projectName string, environmentName string) (*Environment, error) } // EnvironmentCreator creates an environment in the underlying project management store. type EnvironmentCreator interface { CreateEnvironment(env *Environment) error } // EnvironmentDeleter deletes an environment from the underlying project management store. type EnvironmentDeleter interface { DeleteEnvironment(projectName, environmentName string) error }
internal/pkg/archer/env.go
0.643105
0.498291
env.go
starcoder
package xmlwriter import ( "fmt" "strconv" ) // Attr represents an XML attribute to be written by the Writer. type Attr struct { Prefix string URI string Name string Value string } func (a Attr) writable() {} func (a Attr) kind() NodeKind { return AttrNode } // Bool writes a boolean to an attribute. func (a Attr) Bool(v bool) Attr { a.Value = strconv.FormatBool(v); return a } // Int writes an int to an attribute. func (a Attr) Int(v int) Attr { a.Value = strconv.FormatInt(int64(v), 10); return a } // Int8 writes an int8 to an attribute. func (a Attr) Int8(v int8) Attr { a.Value = strconv.FormatInt(int64(v), 10); return a } // Int16 writes an int16 to an attribute. func (a Attr) Int16(v int16) Attr { a.Value = strconv.FormatInt(int64(v), 10); return a } // Int32 writes an int32 to an attribute. func (a Attr) Int32(v int32) Attr { a.Value = strconv.FormatInt(int64(v), 10); return a } // Int64 writes an int64 to an attribute. func (a Attr) Int64(v int64) Attr { a.Value = strconv.FormatInt(int64(v), 10); return a } // Uint writes a uint to an attribute. func (a Attr) Uint(v int) Attr { a.Value = strconv.FormatUint(uint64(v), 10); return a } // Uint8 writes a uint8 to an attribute. func (a Attr) Uint8(v uint8) Attr { a.Value = strconv.FormatUint(uint64(v), 10); return a } // Uint16 writes a uint16 to an attribute. func (a Attr) Uint16(v uint16) Attr { a.Value = strconv.FormatUint(uint64(v), 10); return a } // Uint32 writes a uint32 to an attribute. func (a Attr) Uint32(v uint32) Attr { a.Value = strconv.FormatUint(uint64(v), 10); return a } // Uint64 writes a uint64 to an attribute. func (a Attr) Uint64(v uint64) Attr { a.Value = strconv.FormatUint(uint64(v), 10); return a } // Float32 writes a float32 to an attribute. func (a Attr) Float32(v float32) Attr { a.Value = strconv.FormatFloat(float64(v), 'g', -1, 32) return a } // Float64 writes a float64 to an attribute. func (a Attr) Float64(v float64) Attr { a.Value = strconv.FormatFloat(v, 'g', -1, 64); return a } func (a Attr) write(w *Writer) error { if w.Enforce { if err := w.checkParent(noNodeFlag | elemNodeFlag); err != nil { return err } } if w.Indenter != nil { if err := w.writeIndent(Event{StateOpen, AttrNode, 0}); err != nil { return err } } name := a.Name if a.Prefix != "" { name = a.Prefix + ":" + name } if a.URI != "" && w.current >= 0 { ns := ns{prefix: a.Prefix, uri: a.URI} found := false fail := false for _, existing := range w.nodes[w.current].elem.namespaces { if ns.prefix == existing.prefix { found = true if ns.uri != existing.uri { fail = true } break } } if fail { return fmt.Errorf("uri already exists for ns prefix %s", a.Prefix) } else if !found { w.nodes[w.current].elem.namespaces = append(w.nodes[w.current].elem.namespaces, ns) } } if err := w.printer.printAttr(name, a.Value, w.Enforce); err != nil { return err } if w.Indenter != nil { w.last = Event{StateEnded, AttrNode, 0} } return nil }
vendor/github.com/shabbyrobe/xmlwriter/attr.go
0.727007
0.445409
attr.go
starcoder
package codec import ( "fmt" "strconv" "time" ) type ParamCodec struct { OnDecode func(value string) (interface{}, error) OnEncode func(value interface{}) string } func (pc ParamCodec) Decode(name, value string, out interface{}) error { // Convert a possible multi-valued query parameter (comma-separated string) to a slice. values := QueryStringToList(value) // Use the first one as the single value in case of // - path parameter // - header parameter // - single-valued query parameter value = "" if len(values) > 0 { // For legacy generated code, values may be an empty slice even if the parameter is optional. value = values[0] } switch v := out.(type) { case *int: return pc.decodeInt(name, value, v) case *[]int: return pc.decodeIntSlice(name, values, v) case *int8: return pc.decodeInt8(name, value, v) case *[]int8: return pc.decodeInt8Slice(name, values, v) case *int16: return pc.decodeInt16(name, value, v) case *[]int16: return pc.decodeInt16Slice(name, values, v) case *int32: return pc.decodeInt32(name, value, v) case *[]int32: return pc.decodeInt32Slice(name, values, v) case *int64: return pc.decodeInt64(name, value, v) case *[]int64: return pc.decodeInt64Slice(name, values, v) case *uint: return pc.decodeUint(name, value, v) case *[]uint: return pc.decodeUintSlice(name, values, v) case *uint8: return pc.decodeUint8(name, value, v) case *[]uint8: return pc.decodeUint8Slice(name, values, v) case *uint16: return pc.decodeUint16(name, value, v) case *[]uint16: return pc.decodeUint16Slice(name, values, v) case *uint32: return pc.decodeUint32(name, value, v) case *[]uint32: return pc.decodeUint32Slice(name, values, v) case *uint64: return pc.decodeUint64(name, value, v) case *[]uint64: return pc.decodeUint64Slice(name, values, v) case *bool: return pc.decodeBool(name, value, v) case *[]bool: return pc.decodeBoolSlice(name, values, v) case *string: return pc.decodeString(name, value, v) case *[]string: return pc.decodeStringSlice(name, values, v) case *time.Time: return pc.decodeTime(name, value, v) case *[]time.Time: return pc.decodeTimeSlice(name, values, v) case *time.Duration: return pc.decodeDuration(name, value, v) case *[]time.Duration: return pc.decodeDurationSlice(name, values, v) default: // Panic since this is a programming error. panic(fmt.Errorf("unsupported out type: %T", v)) } } func (pc ParamCodec) Encode(name string, value interface{}) string { if pc.OnEncode != nil { return pc.OnEncode(value) } switch v := value.(type) { case int: return strconv.FormatInt(int64(v), 10) case []int: var values []string for _, vv := range v { values = append(values, strconv.FormatInt(int64(vv), 10)) } return QueryListToString(values) case int8: return strconv.FormatInt(int64(v), 10) case []int8: var values []string for _, vv := range v { values = append(values, strconv.FormatInt(int64(vv), 10)) } return QueryListToString(values) case int16: return strconv.FormatInt(int64(v), 10) case []int16: var values []string for _, vv := range v { values = append(values, strconv.FormatInt(int64(vv), 10)) } return QueryListToString(values) case int32: return strconv.FormatInt(int64(v), 10) case []int32: var values []string for _, vv := range v { values = append(values, strconv.FormatInt(int64(vv), 10)) } return QueryListToString(values) case int64: return strconv.FormatInt(v, 10) case []int64: var values []string for _, vv := range v { values = append(values, strconv.FormatInt(vv, 10)) } return QueryListToString(values) case uint: return strconv.FormatUint(uint64(v), 10) case []uint: var values []string for _, vv := range v { values = append(values, strconv.FormatUint(uint64(vv), 10)) } return QueryListToString(values) case uint8: return strconv.FormatUint(uint64(v), 10) case []uint8: var values []string for _, vv := range v { values = append(values, strconv.FormatUint(uint64(vv), 10)) } return QueryListToString(values) case uint16: return strconv.FormatUint(uint64(v), 10) case []uint16: var values []string for _, vv := range v { values = append(values, strconv.FormatUint(uint64(vv), 10)) } return QueryListToString(values) case uint32: return strconv.FormatUint(uint64(v), 10) case []uint32: var values []string for _, vv := range v { values = append(values, strconv.FormatUint(uint64(vv), 10)) } return QueryListToString(values) case uint64: return strconv.FormatUint(v, 10) case []uint64: var values []string for _, vv := range v { values = append(values, strconv.FormatUint(vv, 10)) } return QueryListToString(values) case bool: return strconv.FormatBool(v) case []bool: var values []string for _, vv := range v { values = append(values, strconv.FormatBool(vv)) } return QueryListToString(values) case string: return v case []string: return QueryListToString(v) case time.Time: return v.Format(time.RFC3339) case []time.Time: var values []string for _, vv := range v { values = append(values, vv.Format(time.RFC3339)) } return QueryListToString(values) case time.Duration: return v.String() case []time.Duration: var values []string for _, vv := range v { values = append(values, vv.String()) } return QueryListToString(values) default: return fmt.Sprintf("%v", value) } } func (pc ParamCodec) decodeInt(name, value string, out *int) error { if pc.OnDecode == nil { v, err := strconv.Atoi(value) if err != nil { return err } *out = v return nil } result, err := pc.OnDecode(value) if err != nil { return err } v, ok := result.(int) if !ok { // Panic since this is a programming error. panic(fmt.Errorf("decoder of %q returns %v (want int)", name, result)) } *out = v return nil } func (pc ParamCodec) decodeIntSlice(name string, values []string, out *[]int) error { // Do not support customized OnDecode for slice value. for _, value := range values { v, err := strconv.Atoi(value) if err != nil { return err } *out = append(*out, v) } return nil } func (pc ParamCodec) decodeInt8(name, value string, out *int8) error { if pc.OnDecode == nil { v, err := strconv.ParseInt(value, 10, 64) if err != nil { return err } *out = int8(v) return nil } result, err := pc.OnDecode(value) if err != nil { return err } v, ok := result.(int8) if !ok { // Panic since this is a programming error. panic(fmt.Errorf("decoder of %q returns %v (want int8)", name, result)) } *out = v return nil } func (pc ParamCodec) decodeInt8Slice(name string, values []string, out *[]int8) error { // Do not support customized OnDecode for slice value. for _, value := range values { v, err := strconv.ParseInt(value, 10, 64) if err != nil { return err } *out = append(*out, int8(v)) } return nil } func (pc ParamCodec) decodeInt16(name, value string, out *int16) error { if pc.OnDecode == nil { v, err := strconv.ParseInt(value, 10, 64) if err != nil { return err } *out = int16(v) return nil } result, err := pc.OnDecode(value) if err != nil { return err } v, ok := result.(int16) if !ok { // Panic since this is a programming error. panic(fmt.Errorf("decoder of %q returns %v (want int16)", name, result)) } *out = v return nil } func (pc ParamCodec) decodeInt16Slice(name string, values []string, out *[]int16) error { // Do not support customized OnDecode for slice value. for _, value := range values { v, err := strconv.ParseInt(value, 10, 64) if err != nil { return err } *out = append(*out, int16(v)) } return nil } func (pc ParamCodec) decodeInt32(name, value string, out *int32) error { if pc.OnDecode == nil { v, err := strconv.ParseInt(value, 10, 64) if err != nil { return err } *out = int32(v) return nil } result, err := pc.OnDecode(value) if err != nil { return err } v, ok := result.(int32) if !ok { // Panic since this is a programming error. panic(fmt.Errorf("decoder of %q returns %v (want int32)", name, result)) } *out = v return nil } func (pc ParamCodec) decodeInt32Slice(name string, values []string, out *[]int32) error { // Do not support customized OnDecode for slice value. for _, value := range values { v, err := strconv.ParseInt(value, 10, 64) if err != nil { return err } *out = append(*out, int32(v)) } return nil } func (pc ParamCodec) decodeInt64(name, value string, out *int64) error { if pc.OnDecode == nil { v, err := strconv.ParseInt(value, 10, 64) if err != nil { return err } *out = v return nil } result, err := pc.OnDecode(value) if err != nil { return err } v, ok := result.(int64) if !ok { // Panic since this is a programming error. panic(fmt.Errorf("decoder of %q returns %v (want int64)", name, result)) } *out = v return nil } func (pc ParamCodec) decodeInt64Slice(name string, values []string, out *[]int64) error { // Do not support customized OnDecode for slice value. for _, value := range values { v, err := strconv.ParseInt(value, 10, 64) if err != nil { return err } *out = append(*out, int64(v)) } return nil } func (pc ParamCodec) decodeUint(name, value string, out *uint) error { if pc.OnDecode == nil { v, err := strconv.ParseUint(value, 10, 64) if err != nil { return err } *out = uint(v) return nil } result, err := pc.OnDecode(value) if err != nil { return err } v, ok := result.(uint) if !ok { // Panic since this is a programming error. panic(fmt.Errorf("decoder of %q returns %v (want uint)", name, result)) } *out = v return nil } func (pc ParamCodec) decodeUintSlice(name string, values []string, out *[]uint) error { // Do not support customized OnDecode for slice value. for _, value := range values { v, err := strconv.ParseUint(value, 10, 64) if err != nil { return err } *out = append(*out, uint(v)) } return nil } func (pc ParamCodec) decodeUint8(name, value string, out *uint8) error { if pc.OnDecode == nil { v, err := strconv.ParseUint(value, 10, 64) if err != nil { return err } *out = uint8(v) return nil } result, err := pc.OnDecode(value) if err != nil { return err } v, ok := result.(uint8) if !ok { // Panic since this is a programming error. panic(fmt.Errorf("decoder of %q returns %v (want uint8)", name, result)) } *out = v return nil } func (pc ParamCodec) decodeUint8Slice(name string, values []string, out *[]uint8) error { // Do not support customized OnDecode for slice value. for _, value := range values { v, err := strconv.ParseUint(value, 10, 64) if err != nil { return err } *out = append(*out, uint8(v)) } return nil } func (pc ParamCodec) decodeUint16(name, value string, out *uint16) error { if pc.OnDecode == nil { v, err := strconv.ParseUint(value, 10, 64) if err != nil { return err } *out = uint16(v) return nil } result, err := pc.OnDecode(value) if err != nil { return err } v, ok := result.(uint16) if !ok { // Panic since this is a programming error. panic(fmt.Errorf("decoder of %q returns %v (want uint16)", name, result)) } *out = v return nil } func (pc ParamCodec) decodeUint16Slice(name string, values []string, out *[]uint16) error { // Do not support customized OnDecode for slice value. for _, value := range values { v, err := strconv.ParseUint(value, 10, 64) if err != nil { return err } *out = append(*out, uint16(v)) } return nil } func (pc ParamCodec) decodeUint32(name, value string, out *uint32) error { if pc.OnDecode == nil { v, err := strconv.ParseUint(value, 10, 64) if err != nil { return err } *out = uint32(v) return nil } result, err := pc.OnDecode(value) if err != nil { return err } v, ok := result.(uint32) if !ok { // Panic since this is a programming error. panic(fmt.Errorf("decoder of %q returns %v (want uint32)", name, result)) } *out = v return nil } func (pc ParamCodec) decodeUint32Slice(name string, values []string, out *[]uint32) error { // Do not support customized OnDecode for slice value. for _, value := range values { v, err := strconv.ParseUint(value, 10, 64) if err != nil { return err } *out = append(*out, uint32(v)) } return nil } func (pc ParamCodec) decodeUint64(name, value string, out *uint64) error { if pc.OnDecode == nil { v, err := strconv.ParseUint(value, 10, 64) if err != nil { return err } *out = v return nil } result, err := pc.OnDecode(value) if err != nil { return err } v, ok := result.(uint64) if !ok { // Panic since this is a programming error. panic(fmt.Errorf("decoder of %q returns %v (want uint64)", name, result)) } *out = v return nil } func (pc ParamCodec) decodeUint64Slice(name string, values []string, out *[]uint64) error { // Do not support customized OnDecode for slice value. for _, value := range values { v, err := strconv.ParseUint(value, 10, 64) if err != nil { return err } *out = append(*out, v) } return nil } func (pc ParamCodec) decodeBool(name, value string, out *bool) error { if pc.OnDecode == nil { v, err := strconv.ParseBool(value) if err != nil { return err } *out = v return nil } result, err := pc.OnDecode(value) if err != nil { return err } v, ok := result.(bool) if !ok { // Panic since this is a programming error. panic(fmt.Errorf("decoder of %q returns %v (want bool)", name, result)) } *out = v return nil } func (pc ParamCodec) decodeBoolSlice(name string, values []string, out *[]bool) error { // Do not support customized OnDecode for slice value. for _, value := range values { v, err := strconv.ParseBool(value) if err != nil { return err } *out = append(*out, v) } return nil } func (pc ParamCodec) decodeString(name, value string, out *string) error { if pc.OnDecode == nil { *out = value return nil } result, err := pc.OnDecode(value) if err != nil { return err } v, ok := result.(string) if !ok { // Panic since this is a programming error. panic(fmt.Errorf("decoder of %q returns %v (want string)", name, result)) } *out = v return nil } func (pc ParamCodec) decodeStringSlice(name string, values []string, out *[]string) error { // Do not support customized OnDecode for slice value. *out = append(*out, values...) return nil } func (pc ParamCodec) decodeTime(name, value string, out *time.Time) error { if pc.OnDecode == nil { v, err := time.Parse(time.RFC3339, value) if err != nil { return err } *out = v return nil } result, err := pc.OnDecode(value) if err != nil { return err } v, ok := result.(time.Time) if !ok { // Panic since this is a programming error. panic(fmt.Errorf("decoder of %q returns %v (want time.Time)", name, result)) } *out = v return nil } func (pc ParamCodec) decodeTimeSlice(name string, values []string, out *[]time.Time) error { // Do not support customized OnDecode for slice value. for _, value := range values { v, err := time.Parse(time.RFC3339, value) if err != nil { return err } *out = append(*out, v) } return nil } func (pc ParamCodec) decodeDuration(name, value string, out *time.Duration) error { if pc.OnDecode == nil { v, err := time.ParseDuration(value) if err != nil { return err } *out = v return nil } result, err := pc.OnDecode(value) if err != nil { return err } v, ok := result.(time.Duration) if !ok { // Panic since this is a programming error. panic(fmt.Errorf("decoder of %q returns %v (want time.Duration)", name, result)) } *out = v return nil } func (pc ParamCodec) decodeDurationSlice(name string, values []string, out *[]time.Duration) error { // Do not support customized OnDecode for slice value. for _, value := range values { v, err := time.ParseDuration(value) if err != nil { return err } *out = append(*out, v) } return nil }
pkg/codec/httpv2/paramcodec.go
0.647352
0.541712
paramcodec.go
starcoder
package alt import ( "fmt" "math" "reflect" "time" "github.com/ohler55/ojg/gen" ) // RecomposeFunc should build an object from data in a map returning the // recomposed object or an error. type RecomposeFunc func(map[string]interface{}) (interface{}, error) // Recomposer is used to recompose simple data into structs. type Recomposer struct { // CreateKey identifies the creation key in decomposed objects. CreateKey string composers map[string]*composer } // NewRecomposer creates a new instance. The composers are a map of objects // expected and functions to recompose them. If no function is provided then // reflection is used instead. func NewRecomposer(createKey string, composers map[interface{}]RecomposeFunc) (*Recomposer, error) { r := Recomposer{ CreateKey: createKey, composers: map[string]*composer{}, } for v, fun := range composers { rt := reflect.TypeOf(v) if err := r.registerComposer(rt, fun); err != nil { return nil, err } } return &r, nil } func (r *Recomposer) registerComposer(rt reflect.Type, fun RecomposeFunc) error { if rt.Kind() == reflect.Ptr { rt = rt.Elem() } full := rt.PkgPath() + "/" + rt.Name() // TBD could loosen this up and allow any type as long as a function is provided. if rt.Kind() != reflect.Struct { return fmt.Errorf("only structs can be recomposed. %s is not a struct type", rt) } c := composer{ fun: fun, short: rt.Name(), full: full, rtype: rt, } r.composers[c.short] = &c r.composers[c.full] = &c for i := rt.NumField() - 1; 0 <= i; i-- { f := rt.Field(i) ft := f.Type switch ft.Kind() { case reflect.Array, reflect.Slice, reflect.Map, reflect.Ptr: ft = ft.Elem() } _ = r.registerComposer(ft, nil) } return nil } // Recompose simple data into more complex go types. func (r *Recomposer) Recompose(v interface{}, tv ...interface{}) (interface{}, error) { var rt reflect.Type if 0 < len(tv) { rt = reflect.TypeOf(tv[0]) if rt.Kind() != reflect.Slice && rt.Kind() != reflect.Array { return nil, fmt.Errorf("only a slice type can be provided as an optional argument") } } result, err := r.recompose(v) if err == nil && rt != nil { if ra, ok := result.([]interface{}); ok { av := reflect.MakeSlice(rt, len(ra), len(ra)) et := rt.Elem() for i, v := range ra { vv := reflect.ValueOf(v) iv := av.Index(i) if vv.Type().ConvertibleTo(et) { iv.Set(vv.Convert(et)) } else { return nil, fmt.Errorf("can not convert (%s)%v to a %s", iv.Type(), iv, et) } } result = av.Interface() } } return result, err } func (r *Recomposer) recompose(v interface{}) (interface{}, error) { switch tv := v.(type) { case nil, bool, int64, float64, string, time.Time: case int: v = int64(tv) case int8: v = int64(tv) case int16: v = int64(tv) case int32: v = int64(tv) case uint: v = int64(tv) case uint8: v = int64(tv) case uint16: v = int64(tv) case uint32: v = int64(tv) case uint64: v = int64(tv) case float32: // This small rounding makes the conversion from 32 bit to 64 bit // display nicer. f, i := math.Frexp(float64(tv)) f = float64(int64(f*fracMax)) / fracMax v = math.Ldexp(f, i) case []interface{}: a := make([]interface{}, len(tv)) var err error for i, m := range tv { if a[i], err = r.recompose(m); err != nil { return nil, err } } v = a case map[string]interface{}: o := map[string]interface{}{} for k, m := range tv { if mv, err := r.recompose(m); err == nil { o[k] = mv } else { return nil, err } } if cv := o[r.CreateKey]; cv != nil { tn, _ := cv.(string) if b := r.composers[tn]; b != nil { return b.compose(o, r.CreateKey) } } v = o case gen.Bool: v = bool(tv) case gen.Int: v = int64(tv) case gen.Float: v = float64(tv) case gen.String: v = string(tv) case gen.Time: v = time.Time(tv) case gen.Big: v = string(tv) case gen.Array: a := make([]interface{}, len(tv)) var err error for i, m := range tv { if a[i], err = r.recompose(m); err != nil { return nil, err } } v = a case gen.Object: o := map[string]interface{}{} for k, m := range tv { if mv, err := r.recompose(m); err == nil { o[k] = mv } else { return nil, err } } if cv := o[r.CreateKey]; cv != nil { tn, _ := cv.(string) if b := r.composers[tn]; b != nil { return b.compose(o, r.CreateKey) } } v = o default: return nil, fmt.Errorf("%T is not a valid simple type", v) } return v, nil }
alt/recomposer.go
0.573678
0.470433
recomposer.go
starcoder
package polling import ( "io/ioutil" "net/http" ) type OpenExchangeRates struct { Disclaimer string `json:"disclaimer"` License string `json:"license"` Timestamp string `json:"timestamp"` Base string `json:"base"` Currency OpenExchangeCurrencies `json:"rates"` } type OpenExchangeCurrencies struct { AED float64 AFN float64 ALL float64 AMD float64 ANG float64 AOA float64 ARS float64 AUD float64 AWG float64 AZN float64 BAM float64 BBD float64 BDT float64 BGN float64 BHD float64 BIF float64 BMD float64 BND float64 BOB float64 BRL float64 BSD float64 BTC float64 BTN float64 BWP float64 BYN float64 BZD float64 CAD float64 CDF float64 CHF float64 CLF float64 CLP float64 CNH float64 CNY float64 COP float64 CRC float64 CUC float64 CUP float64 CVE float64 CZK float64 DJF float64 DKK float64 DOP float64 DZD float64 EGP float64 ERN float64 ETB float64 EUR float64 FJD float64 FKP float64 GBP float64 GEL float64 GGP float64 GHS float64 GIP float64 GMD float64 GNF float64 GTQ float64 GYD float64 HKD float64 HNL float64 HRK float64 HTG float64 HUF float64 IDR float64 ILS float64 IMP float64 INR float64 IQD float64 IRR float64 ISK float64 JEP float64 JMD float64 JOD float64 JPY float64 KES float64 KGS float64 KHR float64 KMF float64 KPW float64 KRW float64 KWD float64 KYD float64 KZT float64 LAK float64 LBP float64 LKR float64 LRD float64 LSL float64 LYD float64 MAD float64 MDL float64 MGA float64 MKD float64 MMK float64 MNT float64 MOP float64 MRO float64 MRU float64 MUR float64 MVR float64 MWK float64 MXN float64 MYR float64 MZN float64 NAD float64 NGN float64 NIO float64 NOK float64 NPR float64 NZD float64 OMR float64 PAB float64 PEN float64 PGK float64 PHP float64 PKR float64 PLN float64 PYG float64 QAR float64 RON float64 RSD float64 RUB float64 RWF float64 SAR float64 SBD float64 SCR float64 SDG float64 SEK float64 SGD float64 SHP float64 SLL float64 SOS float64 SRD float64 SSP float64 STD float64 SVC float64 SYP float64 SZL float64 THB float64 TJS float64 TMT float64 TND float64 TOP float64 TRY float64 TTD float64 TWD float64 TZS float64 UAH float64 UGX float64 USD float64 UYU float64 UZS float64 VEF float64 VES float64 VND float64 VUV float64 WST float64 XAF float64 XAG float64 XAU float64 XCD float64 XDR float64 XOF float64 XPD float64 XPF float64 XPT float64 YER float64 ZAR float64 ZMW float64 ZWL float64 } // you will need to replace the values put into peg structure func CallOpenExchangeRates() ([]byte, error) { resp, err := http.Get("https://openexchangerates.org/api/latest.json?app_id=<INSERT API KEY HERE>") if err != nil { return nil, err } else { defer resp.Body.Close() body, err := ioutil.ReadAll(resp.Body) return body, err } }
polling/openexchange.go
0.585101
0.503662
openexchange.go
starcoder
package anomalia import ( "sort" "sync" ) type mapper func(float64) float64 type mapperWithIndex func(int, float64) float64 type predicate func(float64) bool func minMax(data []float64) (float64, float64) { var ( max = data[0] min = data[0] ) for _, value := range data { if max < value { max = value } if min > value { min = value } } return min, max } func mapSlice(slice []float64, m mapper) []float64 { var ( wg sync.WaitGroup result = make([]float64, len(slice)) ) wg.Add(len(slice)) for i, value := range slice { go func(i int, value float64) { defer wg.Done() result[i] = m(value) }(i, value) } wg.Wait() return result } func mapSliceWithIndex(slice []float64, m mapperWithIndex) []float64 { var ( wg sync.WaitGroup result = make([]float64, len(slice)) ) wg.Add(len(slice)) for idx, value := range slice { go func(idx int, value float64) { defer wg.Done() result[idx] = m(idx, value) }(idx, value) } wg.Wait() return result } func filter(slice []float64, predicate predicate) (ret []float64) { for _, value := range slice { if predicate(value) { ret = append(ret, value) } } return } func copySlice(input []float64) []float64 { s := make([]float64, len(input)) copy(s, input) return s } func sortedCopy(input []float64) (copy []float64) { copy = copySlice(input) sort.Float64s(copy) return } func insertAt(slice []float64, pos int, elem float64) []float64 { if pos < 0 { pos = 0 } else if pos >= len(slice) { pos = len(slice) } out := make([]float64, len(slice)+1) copy(out[:pos], slice[:pos]) out[pos] = elem copy(out[pos+1:], slice[pos:]) return out } func mapIntKeys(dict map[int]float64) []int { keys := make([]int, len(dict)) i := 0 for key := range dict { keys[i] = key i++ } sort.Ints(keys) return keys } func mapFloat64Keys(m map[float64]float64) []float64 { keys := make([]float64, len(m)) i := 0 for key := range m { keys[i] = key i++ } sort.Float64s(keys) return keys } func indexOf(slice []float64, value float64) int { for idx := range slice { if slice[idx] == value { return idx } } return -1 } func unpackMap(m map[float64]float64) ([]float64, []float64) { keys := mapFloat64Keys(m) values := make([]float64, len(keys)) for idx, key := range keys { values[idx] = m[key] } return keys, values } func sumOfSquares(s []float64) float64 { sum := 0.0 for _, val := range s { sum += val * val } return sum } func sumOfProducts(s1 []float64, s2 []float64) float64 { sum := 0.0 for i := range s1 { sum += s1[i] * s2[i] } return sum }
helpers.go
0.541409
0.49939
helpers.go
starcoder
package benchmark import ( "reflect" "testing" ) func isBoolToUint32FuncCalibrated(supplier func() bool) bool { return isCalibrated(reflect.Bool, reflect.Uint32, reflect.ValueOf(supplier).Pointer()) } func isIntToUint32FuncCalibrated(supplier func() int) bool { return isCalibrated(reflect.Int, reflect.Uint32, reflect.ValueOf(supplier).Pointer()) } func isInt8ToUint32FuncCalibrated(supplier func() int8) bool { return isCalibrated(reflect.Int8, reflect.Uint32, reflect.ValueOf(supplier).Pointer()) } func isInt16ToUint32FuncCalibrated(supplier func() int16) bool { return isCalibrated(reflect.Int16, reflect.Uint32, reflect.ValueOf(supplier).Pointer()) } func isInt32ToUint32FuncCalibrated(supplier func() int32) bool { return isCalibrated(reflect.Int32, reflect.Uint32, reflect.ValueOf(supplier).Pointer()) } func isInt64ToUint32FuncCalibrated(supplier func() int64) bool { return isCalibrated(reflect.Int64, reflect.Uint32, reflect.ValueOf(supplier).Pointer()) } func isUintToUint32FuncCalibrated(supplier func() uint) bool { return isCalibrated(reflect.Uint, reflect.Uint32, reflect.ValueOf(supplier).Pointer()) } func isUint8ToUint32FuncCalibrated(supplier func() uint8) bool { return isCalibrated(reflect.Uint8, reflect.Uint32, reflect.ValueOf(supplier).Pointer()) } func isUint16ToUint32FuncCalibrated(supplier func() uint16) bool { return isCalibrated(reflect.Uint16, reflect.Uint32, reflect.ValueOf(supplier).Pointer()) } func isUint32ToUint32FuncCalibrated(supplier func() uint32) bool { return isCalibrated(reflect.Uint32, reflect.Uint32, reflect.ValueOf(supplier).Pointer()) } func isUint64ToUint32FuncCalibrated(supplier func() uint64) bool { return isCalibrated(reflect.Uint64, reflect.Uint32, reflect.ValueOf(supplier).Pointer()) } func setBoolToUint32FuncCalibrated(supplier func() bool) { setCalibrated(reflect.Bool, reflect.Uint32, reflect.ValueOf(supplier).Pointer()) } func setIntToUint32FuncCalibrated(supplier func() int) { setCalibrated(reflect.Int, reflect.Uint32, reflect.ValueOf(supplier).Pointer()) } func setInt8ToUint32FuncCalibrated(supplier func() int8) { setCalibrated(reflect.Int8, reflect.Uint32, reflect.ValueOf(supplier).Pointer()) } func setInt16ToUint32FuncCalibrated(supplier func() int16) { setCalibrated(reflect.Int16, reflect.Uint32, reflect.ValueOf(supplier).Pointer()) } func setInt32ToUint32FuncCalibrated(supplier func() int32) { setCalibrated(reflect.Int32, reflect.Uint32, reflect.ValueOf(supplier).Pointer()) } func setInt64ToUint32FuncCalibrated(supplier func() int64) { setCalibrated(reflect.Int64, reflect.Uint32, reflect.ValueOf(supplier).Pointer()) } func setUintToUint32FuncCalibrated(supplier func() uint) { setCalibrated(reflect.Uint, reflect.Uint32, reflect.ValueOf(supplier).Pointer()) } func setUint8ToUint32FuncCalibrated(supplier func() uint8) { setCalibrated(reflect.Uint8, reflect.Uint32, reflect.ValueOf(supplier).Pointer()) } func setUint16ToUint32FuncCalibrated(supplier func() uint16) { setCalibrated(reflect.Uint16, reflect.Uint32, reflect.ValueOf(supplier).Pointer()) } func setUint32ToUint32FuncCalibrated(supplier func() uint32) { setCalibrated(reflect.Uint32, reflect.Uint32, reflect.ValueOf(supplier).Pointer()) } func setUint64ToUint32FuncCalibrated(supplier func() uint64) { setCalibrated(reflect.Uint64, reflect.Uint32, reflect.ValueOf(supplier).Pointer()) } // BoolToUint32Func benchmarks a function with the signature: // func(bool) uint32 // ID: B-10-1 func BoolToUint32Func(b *testing.B, supplier func() bool, toUint32Func func(bool) uint32) { if !isBoolSupplierCalibrated(supplier) { panic("supplier function not calibrated") } if !isBoolToUint32FuncCalibrated(supplier) { panic("BoolToUint32Func not calibrated with this supplier") } for i, count := 0, b.N; i < count; i++ { toUint32Func(supplier()) } } // IntToUint32Func benchmarks a function with the signature: // func(int) uint32 // ID: B-10-2 func IntToUint32Func(b *testing.B, supplier func() int, toUint32Func func(int) uint32) { if !isIntSupplierCalibrated(supplier) { panic("supplier function not calibrated") } if !isIntToUint32FuncCalibrated(supplier) { panic("IntToUint32Func not calibrated with this supplier") } for i, count := 0, b.N; i < count; i++ { toUint32Func(supplier()) } } // Int8ToUint32Func benchmarks a function with the signature: // func(int8) uint32 // ID: B-10-3 func Int8ToUint32Func(b *testing.B, supplier func() int8, toUint32Func func(int8) uint32) { if !isInt8SupplierCalibrated(supplier) { panic("supplier function not calibrated") } if !isInt8ToUint32FuncCalibrated(supplier) { panic("Int8ToUint32Func not calibrated with this supplier") } for i, count := 0, b.N; i < count; i++ { toUint32Func(supplier()) } } // Int16ToUint32Func benchmarks a function with the signature: // func(int16) uint32 // ID: B-10-4 func Int16ToUint32Func(b *testing.B, supplier func() int16, toUint32Func func(int16) uint32) { if !isInt16SupplierCalibrated(supplier) { panic("supplier function not calibrated") } if !isInt16ToUint32FuncCalibrated(supplier) { panic("Int16ToUint32Func not calibrated with this supplier") } for i, count := 0, b.N; i < count; i++ { toUint32Func(supplier()) } } // Int32ToUint32Func benchmarks a function with the signature: // func(int32) uint32 // ID: B-10-5 func Int32ToUint32Func(b *testing.B, supplier func() int32, toUint32Func func(int32) uint32) { if !isInt32SupplierCalibrated(supplier) { panic("supplier function not calibrated") } if !isInt32ToUint32FuncCalibrated(supplier) { panic("Int32ToUint32Func not calibrated with this supplier") } for i, count := 0, b.N; i < count; i++ { toUint32Func(supplier()) } } // Int64ToUint32Func benchmarks a function with the signature: // func(int64) uint32 // ID: B-10-6 func Int64ToUint32Func(b *testing.B, supplier func() int64, toUint32Func func(int64) uint32) { if !isInt64SupplierCalibrated(supplier) { panic("supplier function not calibrated") } if !isInt64ToUint32FuncCalibrated(supplier) { panic("Int64ToUint32Func not calibrated with this supplier") } for i, count := 0, b.N; i < count; i++ { toUint32Func(supplier()) } } // UintToUint32Func benchmarks a function with the signature: // func(uint) uint32 // ID: B-10-7 func UintToUint32Func(b *testing.B, supplier func() uint, toUint32Func func(uint) uint32) { if !isUintSupplierCalibrated(supplier) { panic("supplier function not calibrated") } if !isUintToUint32FuncCalibrated(supplier) { panic("UintToUint32Func not calibrated with this supplier") } for i, count := 0, b.N; i < count; i++ { toUint32Func(supplier()) } } // Uint8ToUint32Func benchmarks a function with the signature: // func(uint8) uint32 // ID: B-10-8 func Uint8ToUint32Func(b *testing.B, supplier func() uint8, toUint32Func func(uint8) uint32) { if !isUint8SupplierCalibrated(supplier) { panic("supplier function not calibrated") } if !isUint8ToUint32FuncCalibrated(supplier) { panic("Uint8ToUint32Func not calibrated with this supplier") } for i, count := 0, b.N; i < count; i++ { toUint32Func(supplier()) } } // Uint16ToUint32Func benchmarks a function with the signature: // func(uint16) uint32 // ID: B-10-9 func Uint16ToUint32Func(b *testing.B, supplier func() uint16, toUint32Func func(uint16) uint32) { if !isUint16SupplierCalibrated(supplier) { panic("supplier function not calibrated") } if !isUint16ToUint32FuncCalibrated(supplier) { panic("Uint16ToUint32Func not calibrated with this supplier") } for i, count := 0, b.N; i < count; i++ { toUint32Func(supplier()) } } // Uint32ToUint32Func benchmarks a function with the signature: // func(uint32) uint32 // ID: B-10-10 func Uint32ToUint32Func(b *testing.B, supplier func() uint32, toUint32Func func(uint32) uint32) { if !isUint32SupplierCalibrated(supplier) { panic("supplier function not calibrated") } if !isUint32ToUint32FuncCalibrated(supplier) { panic("Uint32ToUint32Func not calibrated with this supplier") } for i, count := 0, b.N; i < count; i++ { toUint32Func(supplier()) } } // Uint64ToUint32Func benchmarks a function with the signature: // func(uint32) uint32 // ID: B-10-11 func Uint64ToUint32Func(b *testing.B, supplier func() uint64, toUint32Func func(uint64) uint32) { if !isUint64SupplierCalibrated(supplier) { panic("supplier function not calibrated") } if !isUint64ToUint32FuncCalibrated(supplier) { panic("Uint64ToUint32Func not calibrated with this supplier") } for i, count := 0, b.N; i < count; i++ { toUint32Func(supplier()) } }
common/benchmark/10_to_uint32_func.go
0.713032
0.70076
10_to_uint32_func.go
starcoder
package ql import ( "fmt" "io" ) // parse an expression into it's AST counter part type parser struct { src *scanner err error } // Parse convert any 'src' into an Expr (or error if it is not possible) func Parse(src io.Reader) (x Expr, err error) { //Start by building the scanner and cosuming the first token parser := &parser{src: newScanner(src)} parser.Next() return parser.OrExpr(), parser.err } func (p *parser) Next() { p.src.Next() } func (p *parser) OrExpr() Expr { x := p.AndExpr() if p.src.ttype == OR { pos := p.src.start p.Next() y := p.OrExpr() return &BinaryExpr{ X: x, Op: OR, OpPos: pos, Y: y, } } return x } func (p *parser) AndExpr() Expr { x := p.UnaryExpr() if p.src.ttype == AND { pos := p.src.start p.Next() return &BinaryExpr{ X: x, Op: AND, OpPos: pos, Y: p.UnaryExpr(), } } return x } func (p *parser) UnaryExpr() Expr { // unary are paren or any comparator switch p.src.ttype { case LPAREN: lpos := p.src.start p.Next() x := p.OrExpr() if p.src.ttype != RPAREN { p.err = fmt.Errorf("%v parenthesis mismatch, expected ')' found %v instead", p.src.start, p.src.ttype) return nil } rpos := p.src.start p.Next() return &ParenExpr{ LParenPos: lpos, X: x, RParenPos: rpos, } case NOT: pos := p.src.start p.Next() return &UnaryExpr{ Op: NOT, OpPos: pos, X: p.UnaryExpr(), } default: return p.LiteralOpExpr() } } func (p *parser) LiteralOpExpr() Expr { // lhs can be either a literal OR a function var lhs Expr switch p.src.ttype { case FUNCTION: pos := p.src.start fname := p.src.token.String() p.Next() // it has to be a '(' if p.src.ttype != LPAREN { p.err = fmt.Errorf("%v Invalid function call, need to start with a '('. Found %v instead", p.src.start, p.src.ttype) return nil } p.Next() //the X expr x := p.OrExpr() // it has to be a ')' if p.src.ttype != RPAREN { p.err = fmt.Errorf("%v Invalid function call, need to end with a ')'. Found %q:%v instead", p.src.start, p.src.token.String(), p.src.ttype) return nil } rpos := p.src.start p.Next() // Ok great lhs = &FuncExpr{ Func: fname, FuncPos: pos, RParenPos: rpos, X: x, } default: lhs = p.LiteralExpr() } op := p.src.ttype switch op { case EXISTS: p.Next() //consume it return &PostCompExpr{ X: lhs.(*Literal), Op: op, OpPos: p.src.start, } case LT, GT, EQ, MATCH: p.Next() //consume it pos := p.src.start y := p.LiteralExpr() return &CompExpr{ X: lhs, Op: op, OpPos: pos, Y: y, } default: return lhs } } func (p *parser) LiteralExpr() *Literal { switch p.src.ttype { case IDENT, REGEXP, DURATION, NUMBER: defer p.Next() return &Literal{ Kind: p.src.ttype, LitPos: p.src.start, Value: p.src.token.String(), } default: p.err = fmt.Errorf("%v Syntax Error: expecting one literal: Identifier, Regexp, Duration or Number; got %v instead", p.src.start, p.src.ttype) return nil } }
ql/parser.go
0.524395
0.423995
parser.go
starcoder
package gridserver import ( "fmt" "image/color" ) // TileFormat specifies the types of tiles to generate. type TileFormat int const ( // JSONTile indicates the tile output should be GeoJSON. JSONTile TileFormat = 0 // ImageTile indicates the tile output should be a PNG image. ImageTile TileFormat = 1 ) // TileRef represents a TMS tile reference, based on x/y/z values. // It provides the tile bounding box, and a function to convert lat/lng into pixel references. type TileRef struct { Z int X int Y int Options *TileOptions origin *origin SW *LatLng NE *LatLng } // origin gives the pixel coordinates of the tile origin. type origin struct { top float64 left float64 } // TileOptions are settings to adjust how the tiles are generated. type TileOptions struct { Format TileFormat LineColor color.Color LabelColor color.Color Projection Projection ZoomAdjust int } // String returns a string representation of the options. func (o TileOptions) String() string { if o.Format == JSONTile { // Colour settings aren't used for JSON tiles - they just mess up the caching. return fmt.Sprintf("%s_%d", o.Projection, o.ZoomAdjust) } r, g, b, a := o.LineColor.RGBA() line := fmt.Sprintf("#%02x%02x%02x%02x", uint8(r), uint8(g), uint8(b), uint8(a)) r, g, b, a = o.LabelColor.RGBA() label := fmt.Sprintf("#%02x%02x%02x%02x", uint8(r), uint8(g), uint8(b), uint8(a)) return fmt.Sprintf("%s_%s_%s_%d", line, label, o.Projection, o.ZoomAdjust) } // NewTileOptions returns a default set of options. func NewTileOptions() *TileOptions { return &TileOptions{Format: JSONTile, LineColor: lineColor, LabelColor: labelColor, Projection: NewMercatorTMS(), ZoomAdjust: 0} } // MakeTileRef constructs the tile reference. func MakeTileRef(x, y, z int, opts *TileOptions) *TileRef { if opts == nil { opts = NewTileOptions() } t := TileRef{X: x, Y: y, Z: z, Options: opts} latlo, lnglo, lathi, lnghi := t.Options.Projection.TileLatLngBounds(x, y, z) t.SW = &LatLng{latlo, lnglo} t.NE = &LatLng{lathi, lnghi} // We need the coordinates of the top left of this tile so we can correct the absolute pixel values. t.origin = &origin{} t.origin.left, t.origin.top = t.Options.Projection.TileOrigin(x, y, z) return &t } // LatLngToPixel converts a lat/lng pair in degrees to pixel values relative to the NW corner of the tile. func (t *TileRef) LatLngToPixel(lat, lng float64, tileSize float64) (x float64, y float64) { x, y = t.Options.Projection.LatLngToRaster(lat, lng, float64(t.Z)) // Make the coordinates relative to the top left of the tile. x = x - t.origin.left y = y - t.origin.top return } // Path converts a tile reference into a file path using zoom/x/y/options func (t *TileRef) Path() string { return fmt.Sprintf("%d/%d/%d/%d:%d:%d_%s", t.Z, t.X, t.Y, t.Z, t.X, t.Y, t.Options) }
tile_server/gridserver/tileref.go
0.829837
0.551755
tileref.go
starcoder
package nbt // Interface because there will be faster implementations than the most intuitive, probably slow one, // but they're gonna be very memory expensive and I can't decide whether that tradeoff shouldn't be // something that the user must decide. // Also, I know that this interface is a bad abstraction, but I need this interface for a facade, not // an abstraction. // Mapper describes a component that can be used to support more complex // mappings of known structures than marshalling could support. type Mapper interface { // Query will execute the given query string on the tag in this mapper. // The interpretation of the query is implementation specific. Query(string) (Tag, error) // MapByte will interpret the tag under the given query path as byte and // store it under the given *int8, or return an error if the tag under the // path is not a byte tag. MapByte(string, *int8) error // MapShort will interpret the tag under the given query path as short and // store it under the given *int16, or return an error if the tag under the // path is not a short tag. MapShort(string, *int16) error // MapInt will interpret the tag under the given query path as int and // store it under the given *int, or return an error if the tag under the // path is not an int tag. MapInt(string, *int) error // MapInt32 works just as MapInt, but converts the given int to an int32. // This can be useful if you have a lot of ints (which are 4-byte in NBT), // but want to save the extra 4 bytes if you're on a 64bit arch. MapInt32(string, *int32) error // MapLong will interpret the tag under the given query path as int64 and // store it under the given *int64, or return an error if the tag under the // path is not a long tag. MapLong(string, *int64) error // MapFloat will interpret the tag under the given query path as float32 and // store it under the given *float32, or return an error if the tag under the // path is not a long tag. MapFloat(string, *float32) error // MapDouble will interpret the tag under the given query path as float64 and // store it under the given *float64, or return an error if the tag under the // path is not a long tag. MapDouble(string, *float64) error // MapString will interpret the tag under the given query path as string and // store it under the given *string, or return an error if the tag under the // path is not a string tag. MapString(string, *string) error // MapByteArray will interpret the tag under the given query path as bytearray and // store it under the given *[]int8, or return an error if the tag under the // path is not a bytearray tag. MapByteArray(string, *[]int8) error // MapIntArray will interpret the tag under the given query path as intarray and // store it under the given *[]int, or return an error if the tag under the // path is not a intarray tag. MapIntArray(string, *[]int) error // MapInt32Array is the array equivalent to MapInt32. MapInt32Array(string, *[]int32) error // MapLongArray will interpret the tag under the given query path as longarray and // store it under the given *[]int64, or return an error if the tag under the // path is not a longarray tag. MapLongArray(string, *[]int64) error // MapList will interpret the tag under the given query path as list. It will return // an error if that tag is not a list (also returns an error if the path points to // an array). Before calling the mapping function, it will call the initializer function // once with the size of the list, allowing for preallocation. After that, it will call // the mapping function for every element in the list, with the mapper containing only // the list element at index i. i is the zero-based index of an element in the list. MapList(query string, initializer func(int), mapping func(i int, mapper Mapper) error) error // MapCustom is equivalent to calling Query, and then calling the given function with the tag // udner the query, or return an error if any. MapCustom(string, func(Tag) error) error }
mapper.go
0.508544
0.473292
mapper.go
starcoder
package q3bsp import ( "image/color" "github.com/g3n/engine/math32" ) // BSP is a binary space partition type BSP struct { header bspHeader EntityInfo string Textures []*Texture Planes []*Plane Nodes []*Node Leaves []*Leaf LeafFaces []*LeafFace // LeafBrushes stores lists of brush indices, with one list per leaf. There are a total of length / sizeof(leafbrush) records in the lump, where length is the size of the lump itself, as specified in the lump directory. LeafBrushes []*LeafBrush // Models describes rigid groups of world geometry. The first model correponds to the base portion of the map while the remaining models correspond to movable portions of the map, such as the map's doors, platforms, and buttons. Each model has a list of faces and list of brushes; these are especially important for the movable parts of the map, which (unlike the base portion of the map) do not have BSP trees associated with them. There are a total of length / sizeof(models) records in the lump, where length is the size of the lump itself, as specified in the lump directory. Models []*Model // Brushes stores a set of brushes, which are in turn used for collision detection. Each brush describes a convex volume as defined by its surrounding surfaces. There are a total of length / sizeof(brushes) records in the lump, where length is the size of the lump itself, as specified in the lump directory. Brushes []*Brush // BrushSides stores descriptions of brush bounding surfaces. There are a total of length / sizeof(brushsides) records in the lump, where length is the size of the lump itself, as specified in the lump directory. BrushSides []*BrushSide // Vertexes stores lists of vertex offsets, used to describe generalized triangle meshes. There are a total of length / sizeof(meshvert) records in the lump, where length is the size of the lump itself, as specified in the lump directory. Vertexes []*Vertex // MeshVertexOffsets stores lists of vertex offsets, used to describe generalized triangle meshes. There are a total of length / sizeof(meshvert) records in the lump, where length is the size of the lump itself, as specified in the lump directory. MeshVertexOffsets []*MeshVertexOffset // Effects stores references to volumetric shaders (typically fog) which affect the rendering of a particular group of faces. There are a total of length / sizeof(effect) records in the lump, where length is the size of the lump itself, as specified in the lump directory. Effects []*Effect // Faces stores information used to render the surfaces of the map. There are a total of length / sizeof(faces) records in the lump, where length is the size of the lump itself, as specified in the lump directory. Faces []*Face // Lightmaps stores the light map textures used make surface lighting look more realistic. There are a total of length / sizeof(lightmap) records in the lump, where length is the size of the lump itself, as specified in the lump directory. Lightmaps []*Lightmap // LightVolumes stores a uniform grid of lighting information used to illuminate non-map objects. There are a total of length / sizeof(lightvol) records in the lump, where length is the size of the lump itself, as specified in the lump directory. LightVolumes []*LightVolume // VisInfo stores bit vectors that provide cluster-to-cluster visibility information. There is exactly one visdata record, with a length equal to that specified in the lump directory. VisInfo []*VisData } func New() *BSP { b := &BSP{} b.header.Header = [4]byte{0x49, 0x42, 0x53, 0x50} b.header.Version = 0x2E return b } type entry struct { Offset int32 Size int32 } type bspHeader struct { Header [4]byte Version int32 } // Texture stores information about surfaces and volumes, which are in turn associated with faces, brushes, and brushsides. There are a total of length / sizeof(texture) records in the lump, where length is the size of the lump itself, as specified in the lump directory. type Texture struct { RawName [64]byte Flags int32 ContentFlags int32 } func (t *Texture) Name() string { return string(t.RawName[:]) } // Plane referenced by nodes and brushsides. There are a total of length / sizeof(plane) records in the lump, where length is the size of the lump itself, as specified in the lump directory. type Plane struct { Normal math32.Vector3 Dist float32 } // Node in the map's BSP tree. The BSP tree is used primarily as a spatial subdivision scheme, dividing the world into convex regions called leafs. The first node in the lump is the tree's root node. There are a total of length / sizeof(node) records in the lump, where length is the size of the lump itself, as specified in the lump directory. type Node struct { PlaneID int32 Children [2]int32 Mins [3]int32 Maxs [3]int32 } // Leaf of the map's BSP tree. Each leaf is a convex region that contains, among other things, a cluster index (for determining the other leafs potentially visible from within the leaf), a list of faces (for rendering), and a list of brushes (for collision detection). There are a total of length / sizeof(leaf) records in the lump, where length is the size of the lump itself, as specified in the lump directory. type Leaf struct { // Visdata cluster index ClusterID int32 Area int32 } // LeafFace stores leaf references, with one list per leaf. There are a total of length / sizeof(leafface) records in the lump, where length is the size of the lump itself, as specified in the lump directory. leafface type LeafFace struct { FaceID int32 } // LeafBrush has a brush reference type LeafBrush struct { BrushID int32 } // Model describes rigid groups of world geometry. The first model correponds to the base portion of the map while the remaining models correspond to movable portions of the map, such as the map's doors, platforms, and buttons. Each model has a list of faces and list of brushes; these are especially important for the movable parts of the map, which (unlike the base portion of the map) do not have BSP trees associated with them. There are a total of length / sizeof(models) records in the lump, where length is the size of the lump itself, as specified in the lump directory. type Model struct { // Bounding box min coord. Mins [3]int32 // Bounding box max coord. Maxs [3]int32 // First face for model. FaceID int32 FaceCount int32 BrushID int32 BrushCount int32 } // Brush is used for collision detection type Brush struct { BrushSide int32 BrushSideCount int32 TextureID int32 } // BrushSide is used for brush bounding surface info type BrushSide struct { PlaneID int32 TextureID int32 } type Vertex struct { Position math32.Vector3 // TexCoords stores texture coordinates. 0=surface, 1=lightmap. TexCoords [2][2]float32 Normal math32.Vector3 Color color.RGBA } type MeshVertexOffset struct { OffsetID int32 } type Effect struct { RawName [64]byte BrushID int32 // always 5 Unknown int32 } func (e *Effect) Name() string { return string(e.RawName[:]) } type Face struct { TextureID int32 EffectID int32 //1=polygon, 2=patch, 3=mesh, 4=billboard TypeID int32 // index of first vertex VertexID int32 VertexCount int32 MeshVertexID int32 MeshVertexCount int32 LightMapID int32 LightMapStart [2]int32 LightMapSize [2]int32 LightMapOrigin math32.Vector3 LightMapVectors [2]math32.Vector3 Normal math32.Vector3 Size math32.Vector2 } type Lightmap struct { Colors [128][128][3]uint8 } type LightVolume struct { Ambient [3]uint8 Directional [3]uint8 // 0=phi, 1=theta Direction [2]uint8 } type VisData struct { VectorCount int32 VectorSize int32 Vectors []uint8 } const ( // Game-related object descriptions. dirEntryEntities = 0 // Surface descriptions. dirEntryTextures = 1 // Planes used by map geometry. dirEntryPlanes = 2 // BSP tree nodes. dirEntryNodes = 3 // BSP tree leaves. dirEntryLeafs = 4 // Lists of face indices, one list per leaf. dirEntryLeaffaces = 5 // Lists of brush indices, one list per leaf. dirEntryLeafbrushes = 6 // Descriptions of rigid world geometry in map. dirEntryModels = 7 // Convex polyhedra used to describe solid space. dirEntryBrushes = 8 // Brush surfaces. dirEntryBrushsides = 9 // Vertices used to describe faces. dirEntryVertexes = 10 // Lists of offsets, one list per mesh. dirEntryMeshverts = 11 // List of special map effects. dirEntryEffects = 12 // Surface geometry. dirEntryFaces = 13 // Packed lightmap data. dirEntryLightmaps = 14 // Local illumination data. dirEntryLightvols = 15 // Cluster-cluster visibility data. dirEntryVisdata = 16 ) const ( //dirEntryEntitiesSize = 0 dirEntryTexturesSize = 76 dirEntryPlanesSize = 16 dirEntryNodesSize = 36 dirEntryLeafsSize = 8 dirEntryLeaffacesSize = 4 dirEntryLeafbrushesSize = 4 dirEntryModelsSize = 40 dirEntryBrushesSize = 12 dirEntryBrushsidesSize = 44 dirEntryVertexesSize = 44 dirEntryMeshvertsSize = 4 dirEntryEffectsSize = 72 dirEntryFacesSize = 108 dirEntryLightmapsSize = 49152 dirEntryLightvolsSize = 8 dirEntryVisdataSize = 8 )
q3bsp/bsp.go
0.591723
0.652643
bsp.go
starcoder
package tree import ( "fmt" dist "github.com/bkaraceylan/goophy/distance" ) //NJ creates an evolutionary tree from a distance matrix using neighbor-joining algorithm. func NJ(distmat dist.DistMat) *Tree { var intnodes []*Node tree := CreateTree("test") for _, lbl := range distmat.Ids { tree.AddNode(lbl, -1, "root") } for i := len(distmat.Matrix); i > 2; i-- { _, minqcoord, u1, u2 := minq(distmat) v1 := 0.5*distmat.Matrix[minqcoord[0]][minqcoord[1]] + 0.5*(u1-u2) v2 := 0.5*distmat.Matrix[minqcoord[0]][minqcoord[1]] + 0.5*(u2-u1) l1 := distmat.Ids[minqcoord[0]] l2 := distmat.Ids[minqcoord[1]] intlbl := fmt.Sprintf("int_%d", len(intnodes)) node := tree.AddNode(intlbl, -1, "") intnodes = append(intnodes, node) node2 := tree.GetNode(l1) node2.SetLength(v1) node3 := tree.GetNode(l2) node3.SetLength(v2) tree.AddChild(node, node2) tree.AddChild(node, node3) recalMatrix(&distmat, minqcoord[0], minqcoord[1], intlbl) } l1 := distmat.Ids[0] l2 := distmat.Ids[1] v1 := sumArray(distmat.Matrix[0]) / 2 v2 := sumArray(distmat.Matrix[1]) / 2 node2 := tree.GetNode(l1) node2.SetLength(v1) node3 := tree.GetNode(l2) node3.SetLength(v2) root := tree.GetNode("root") tree.AdoptChildren(root, node2) tree.RemoveNode(node2) tree.AddChild(root, node3) return tree } //recalMatrix recalculates the distance matrix. func recalMatrix(distmat *dist.DistMat, i int, j int, lbl string) { distmat.Ids = append(distmat.Ids, lbl) arr := make([]float64, len(distmat.Matrix)) distmat.Matrix = append(distmat.Matrix, arr) for k := 0; k < len(distmat.Matrix); k++ { distmat.Matrix[k] = append(distmat.Matrix[k], 0.0) } dist1 := distmat.Matrix[i][j] for k := 0; k < len(distmat.Matrix); k++ { if k == len(distmat.Matrix)-1 { continue } dist2 := distmat.Matrix[i][k] dist3 := distmat.Matrix[j][k] newdist := (dist2 + dist3 - dist1) / 2 distmat.Matrix[len(distmat.Matrix)-1][k] = newdist distmat.Matrix[k][len(distmat.Matrix)-1] = newdist } distmat.Ids = removeLabel(distmat.Ids, i) distmat.Ids = removeLabel(distmat.Ids, j) distmat.Matrix = removeRow(distmat.Matrix, i) distmat.Matrix = removeRow(distmat.Matrix, j) for k := 0; k < len(distmat.Matrix); k++ { distmat.Matrix[k] = removeCol(distmat.Matrix[k], i) distmat.Matrix[k] = removeCol(distmat.Matrix[k], j) } } func removeRow(s [][]float64, index int) [][]float64 { return append(s[:index], s[index+1:]...) } func removeCol(s []float64, index int) []float64 { return append(s[:index], s[index+1:]...) } func removeLabel(s []string, index int) []string { return append(s[:index], s[index+1:]...) } //minq calculates the qmatrix end returns the minimum q value, coords of the q value, u1 and u2 values for the minimumq func minq(distmat dist.DistMat) (float64, [2]int, float64, float64) { matrix := distmat.Matrix qmat := make([][]float64, len(matrix)) var minq float64 var us1 float64 var us2 float64 var minqcoord [2]int for i := 0; i < len(qmat); i++ { qmat[i] = make([]float64, len(matrix)) // j < len(qmat) old for j := 0; j < i; j++ { u1 := sumArray(matrix[i]) / float64((len(qmat) - 2)) u2 := sumArray(matrix[j]) / float64((len(qmat) - 2)) q := matrix[i][j] - u1 - u2 qmat[i][j] = q if q < minq { minq = q us1 = u1 us2 = u2 minqcoord[0] = i minqcoord[1] = j } } } return minq, minqcoord, us1, us2 } //sumArray returns the sum of values in an array. func sumArray(nums []float64) float64 { sum := 0.0 for _, num := range nums { sum = sum + num } return sum } //minArray returns the minimum value in an array. func minArray(nums []float64) float64 { min := 0.0 for _, num := range nums { if num < min { min = num } } return min }
tree/nj.go
0.727589
0.616445
nj.go
starcoder
package CloudForest import ( "fmt" ) /* DensityTarget is used for density estimating trees. It contains a set of features and the count of cases. */ type DensityTarget struct { Features *[]Feature N int } func (target *DensityTarget) GetName() string { return "DensityTarget" } /* DensityTarget.SplitImpurity is a density estimating version of SplitImpurity. */ func (target *DensityTarget) SplitImpurity(l *[]int, r *[]int, m *[]int, allocs *BestSplitAllocs) (impurityDecrease float64) { nl := float64(len(*l)) nr := float64(len(*r)) nm := 0.0 impurityDecrease = nl * target.Impurity(l, nil) impurityDecrease += nr * target.Impurity(r, nil) if m != nil && len(*m) > 0 { nm = float64(len(*m)) impurityDecrease += nm * target.Impurity(m, nil) } impurityDecrease /= nl + nr + nm return } //UpdateSImpFromAllocs willl be called when splits are being built by moving cases from r to l as in learning from numerical variables. //Here it just wraps SplitImpurity but it can be implemented to provide further optimization. func (target *DensityTarget) UpdateSImpFromAllocs(l *[]int, r *[]int, m *[]int, allocs *BestSplitAllocs, movedRtoL *[]int) (impurityDecrease float64) { return target.SplitImpurity(l, r, m, allocs) } //DensityTarget.Impurity uses the impurity measure defined in "Density Estimating Trees" //by <NAME> and <NAME> func (target *DensityTarget) Impurity(cases *[]int, counter *[]int) (e float64) { t := len(*cases) e = float64(t*t) / float64(target.N*target.N) for _, f := range *target.Features { switch f.(type) { case CatFeature: bigenoughcounter := make([]int, f.NCats()) e /= f.Span(cases, &bigenoughcounter) case NumFeature: e /= f.Span(cases, nil) } } return } //DensityTarget.FindPredicted returns the string representation of the density in the region //spaned by the specified cases. func (target *DensityTarget) FindPredicted(cases []int) string { t := len(cases) e := float64(t) / float64(target.N) for _, f := range *target.Features { switch f.(type) { case CatFeature: bigenoughcounter := make([]int, f.NCats()) e /= f.Span(&cases, &bigenoughcounter) case NumFeature: e /= f.Span(&cases, nil) } } return fmt.Sprintf("%v", e) } func (target *DensityTarget) NCats() int { return 0 }
densitytarget.go
0.75101
0.439868
densitytarget.go
starcoder
package klog import ( "cloud.google.com/go/civil" "errors" "fmt" "math" "regexp" "strings" gotime "time" ) // Date represents a day in the gregorian calendar. type Date interface { // Year returns the year as number, e.g. `2004`. Year() int // Month returns the month as number, e.g. `3` for March. Month() int // Day returns the day as number, e.g. `21`. Day() int // Weekday returns the day of the week, starting from Monday = 1. Weekday() int // Quarter returns the quarter that the date is in, e.g. `2` for `2010-04-15`. Quarter() int // WeekNumber returns the number of the week in the calendar year. WeekNumber() int // IsEqualTo checks whether two dates are the same. IsEqualTo(Date) bool // IsAfterOrEqual checks whether the given date occurs afterwards or at the same date. IsAfterOrEqual(Date) bool // PlusDays adds a number of days to the date. It doesn’t modify // the original object. PlusDays(int) Date // ToString serialises the date, e.g. `2017-04-23`. ToString() string // ToStringWithFormat serialises the date according to the given format. ToStringWithFormat(DateFormat) string // Format returns the current formatting. Format() DateFormat } // DateFormat contains the formatting options for the Date. type DateFormat struct { UseDashes bool } type date struct { year int month int day int format DateFormat } var datePattern = regexp.MustCompile(`^(\d{4})[-/](\d{2})[-/](\d{2})$`) func NewDate(year int, month int, day int) (Date, error) { cd := civil.Date{ Year: year, Month: gotime.Month(month), Day: day, } return civil2Date(cd, DateFormat{UseDashes: true}) } func NewDateFromString(yyyymmdd string) (Date, error) { match := datePattern.FindStringSubmatch(yyyymmdd) if len(match) != 4 || match[1] == "0" || match[2] == "0" || match[3] == "0" { return nil, errors.New("MALFORMED_DATE") } if c := strings.Count(yyyymmdd, "-"); c == 1 { // `-` and `/` mixed return nil, errors.New("MALFORMED_DATE") } cd, err := civil.ParseDate(match[1] + "-" + match[2] + "-" + match[3]) if err != nil || !cd.IsValid() { return nil, errors.New("UNREPRESENTABLE_DATE") } return civil2Date(cd, DateFormat{UseDashes: strings.Contains(yyyymmdd, "-")}) } func NewDateFromGo(t gotime.Time) Date { d, err := NewDate(t.Year(), int(t.Month()), t.Day()) if err != nil { // This can/should never occur panic("ILLEGAL_DATE") } return d } func civil2Date(cd civil.Date, format DateFormat) (Date, error) { if !cd.IsValid() { return nil, errors.New("UNREPRESENTABLE_DATE") } if cd.Year > 9999 { // A year greater than 9999 cannot be serialised according to YYYY-MM-DD. return nil, errors.New("UNREPRESENTABLE_DATE") } return &date{ year: cd.Year, month: int(cd.Month), day: cd.Day, format: format, }, nil } func date2Civil(d *date) civil.Date { return civil.Date{ Year: d.year, Month: gotime.Month(d.month), Day: d.day, } } func (d *date) ToString() string { separator := "-" if !d.format.UseDashes { separator = "/" } return fmt.Sprintf("%04d%s%02d%s%02d", d.year, separator, d.month, separator, d.day) } func (d *date) Year() int { return d.year } func (d *date) Month() int { return d.month } func (d *date) Day() int { return d.day } func (d *date) Weekday() int { x := int(date2Civil(d).In(gotime.UTC).Weekday()) if x == 0 { return 7 } return x } func (d *date) Quarter() int { quarter := math.Ceil(float64(d.Month()) / 3) return int(quarter) } func (d *date) WeekNumber() int { _, week := date2Civil(d).In(gotime.UTC).ISOWeek() return week } func (d *date) IsEqualTo(otherDate Date) bool { return d.Year() == otherDate.Year() && d.Month() == otherDate.Month() && d.Day() == otherDate.Day() } func (d *date) IsAfterOrEqual(otherDate Date) bool { if d.Year() != otherDate.Year() { return d.Year() >= otherDate.Year() } if d.Month() != otherDate.Month() { return d.Month() >= otherDate.Month() } return d.Day() >= otherDate.Day() } func (d *date) PlusDays(dayIncrement int) Date { cd := date2Civil(d).AddDays(dayIncrement) newDate, err := civil2Date(cd, d.format) if err != nil { panic(err) } return newDate } func (d *date) ToStringWithFormat(f DateFormat) string { nDate := *d nDate.format = f return nDate.ToString() } func (d *date) Format() DateFormat { return d.format }
src/date.go
0.83752
0.561996
date.go
starcoder
package blip import ( "errors" "unsafe" ) type buf_t = int32 // Sample buffer that resamples to output rate and accumulates samples until they're read out type Blip struct { factor uint64 offset uint64 avail int32 size int32 integrator int32 buffer []buf_t } // Creates new buffer that can hold at most sample_count samples. Sets rates // so that there are blip_max_ratio clocks per sample. Returns pointer to new // buffer, or NULL if insufficient memory. func New(size uint) *Blip { m := &Blip{ factor: timeUnit / MaxRatio, size: int32(size), buffer: make([]buf_t, size+bufExtra), } m.Clear() return m } // Frees buffer. No effect if NULL is passed. func (b *Blip) Delete() { if b != nil { b = nil } } // Sets approximate input clock rate and output sample rate. For every // clock_rate input clocks, approximately sample_rate samples are generated. func (b *Blip) SetRates(clockRate, sampleRate float64) error { factor := timeUnit * sampleRate / clockRate b.factor = uint64(factor) if !(0 <= factor-float64(b.factor) && factor-float64(b.factor) < 1) { return errors.New("clockRate exceeds maximum, relative to sampleRate") } /* Avoid requiring math.h. Equivalent to m->factor = (int) ceil( factor ) */ if float64(b.factor) < factor { b.factor++ } /* At this point, factor is most likely rounded up, but could still have been rounded down in the floating-point calculation. */ return nil } func (b *Blip) Clear() { /* We could set offset to 0, factor/2, or factor-1. 0 is suitable if factor is rounded up. factor-1 is suitable if factor is rounded down. Since we don't know rounding direction, factor/2 accommodates either, with the slight loss of showing an error in half the time. Since for a 64-bit factor this is years, the halving isn't a problem. */ b.offset = b.factor / 2 b.avail = 0 b.integrator = 0 for i := range b.buffer { b.buffer[i] = 0 } } // Length of time frame, in clocks, needed to make sample_count additional samples available. func (b *Blip) ClocksNeeded(samples uint) int { if b.avail+int32(samples) > b.size { return 0 } needed := uint64(samples) * timeUnit if needed < b.offset { return 0 } return int((needed - b.offset + b.factor - 1) / b.factor) } // Makes input clocks before clock_duration available for reading as output // samples. Also begins new time frame at clock_duration, so that clock time 0 in // the new time frame specifies the same clock as clock_duration in the old time // frame specified. Deltas can have been added slightly past clock_duration (up to // however many clocks there are in two output samples). func (b *Blip) EndFrame(t uint) error { off := uint64(t)*b.factor + b.offset b.avail += int32(off >> timeBits) b.offset = off & (timeUnit - 1) if b.avail > b.size { return errors.New("buffer size was exceeded") } return nil } // Number of buffered samples available for reading. func (b *Blip) SamplesAvail() int { return int(b.avail) } func (b *Blip) removeSamples(count int) { remain := b.avail + int32(bufExtra) - int32(count) b.avail -= int32(count) for i := 0; i < int(remain); i++ { b.buffer[i] = b.buffer[count+i] } for i := 0; i < count; i++ { b.buffer[remain+int32(i)] = 0 } } func (b *Blip) ReadSamples(out unsafe.Pointer, count int, stereo bool) int { if count < 0 { return 0 } if int32(count) > b.avail { count = int(b.avail) } if count > 0 { step := 1 if stereo { step = 2 } sum := b.integrator for i := 0; i < count; i++ { s := sum >> deltaBits // Eliminate fraction sum += b.buffer[i] s = clamp(s) *(*int16)(out) = int16(s) out = unsafe.Add(out, step*2) // High-pass filter sum -= s << (deltaBits - bassShift) } b.integrator = sum b.removeSamples(count) } return count } func (b *Blip) AddDelta(time uint, delta int) error { fixed := uint32((uint64(time)*b.factor + b.offset) >> preShift) out := b.buffer[b.avail+int32(fixed>>fracBits):] phaseShift := fracBits - phaseBits phase := (fixed >> phaseShift) & (phaseCount - 1) in := blStep[phase] interp := int((fixed >> (phaseShift - deltaBits)) & (deltaUnit - 1)) delta2 := (delta * interp) >> deltaBits delta -= delta2 if b.avail+int32(fixed>>fracBits) > b.size+endFrameExtra { return errors.New("buffer size was exceeded") } next := blStep[phase+1] for i := 0; i < 8; i++ { out[i] += int32(int(in[i])*delta + int(next[i])*delta2) } in = blStep[phaseCount-phase] prev := blStep[phaseCount-phase-1] for i := 0; i < 8; i++ { out[8+i] += int32(int(in[7-i])*delta + int(prev[7-i])*delta2) } return nil } // Same as blip_add_delta(), but uses faster, lower-quality synthesis. func (b *Blip) AddDeltaFast(time uint, delta int) error { fixed := uint((uint64(time)*b.factor + b.offset) >> preShift) out := b.buffer[b.avail+int32(fixed>>fracBits):] interp := int((fixed >> (fracBits - deltaBits)) & (deltaUnit - 1)) delta2 := delta * interp if b.avail+int32(fixed>>fracBits) > b.size+endFrameExtra { return errors.New("buffer size was exceeded") } out[7] += int32(delta*deltaUnit - delta2) out[8] += int32(delta2) return nil }
blip.go
0.808029
0.434821
blip.go
starcoder
package gotree import ( "log" ) // BST is a struct for a Binary Search Tree. type BST struct { root *BSTNode } // Root returns the root node of this BST. func (tree *BST) Root() *BSTNode { return tree.root } // BSTNode is a Binary Search Tree Node. type BSTNode struct { value Element left *BSTNode right *BSTNode } // Value returns the element value stored in this node. func (head *BSTNode) Value() Element { return head.value } // Left returns the left child node of this node. func (head *BSTNode) Left() *BSTNode { return head.left } // Right returns the right child node of this node. func (head *BSTNode) Right() *BSTNode { return head.right } // NewBST returns a new Binary Search Tree created from the given // list of elements. func NewBST(elems ...Element) *BST { tree := &BST{} for _, e := range elems { tree.Insert(e) } return tree } // Insert inserts a new element to the head BST node. func (tree *BST) Insert(elem Element) { node := &BSTNode{value: elem} tree.InsertNode(node) } // InsertNode inserts a new BST node to the head node. func (tree *BST) InsertNode(node *BSTNode) { if tree == nil { log.Printf("Insertion of %+v skipped because the tree is nil.\n", node) return } if tree.root == nil { tree.root = node return } tree.root.insert(node) } // insert is an insertion operation for a BSTNode. func (head *BSTNode) insert(node *BSTNode) { if head == nil { log.Printf("Insertion of %+v skipped because the head is nil.\n", node) return } if head.value.Less(node.value) { if head.right == nil { head.right = node } else { head.right.insert(node) } } else if node.value.Less(head.value) { if head.left == nil { head.left = node } else { head.left.insert(node) } } } // Delete deletes the node with the given element as its value. func (tree *BST) Delete(elem Element) { tree.root = tree.root.delete(elem) } // delete is a deletion operation for a BSTNode. // It returns the updated node, which is a replacement for the given head node. func (head *BSTNode) delete(elem Element) *BSTNode { if head == nil { return head } if elem.Less(head.value) { head.left = head.left.delete(elem) } else if head.value.Less(elem) { head.right = head.right.delete(elem) } else { if head.left == nil { return head.right } else if head.right == nil { return head.left } head.value = head.right.minValue() head.right = head.right.delete(head.value) } return head } // minValue finds a node with the minimum value under the given BST node // and returns its value. func (head *BSTNode) minValue() Element { minVal := head.value for head.left != nil { head = head.left minVal = head.value } return minVal }
bst.go
0.827793
0.53783
bst.go
starcoder
package mu // Sum returns the sum of its arguments. // An empty argument list raises a panic. func Sum(nums ...int) int { if len(nums) == 0 { panic("Sum argument list empty") } var total int = 0 for _, num := range nums { total += num } return total } // SumI8 returns the sum of its arguments. // An empty argument list raises a panic. func SumI8(nums ...int8) int8 { if len(nums) == 0 { panic("SumI8 argument list empty") } var total int8 = 0 for _, num := range nums { total += num } return total } // SumI16 returns the sum of its arguments. // An empty argument list raises a panic. func SumI16(nums ...int16) int16 { if len(nums) == 0 { panic("SumI16 argument list empty") } var total int16 = 0 for _, num := range nums { total += num } return total } // SumI32 returns the sum of its arguments. // An empty argument list raises a panic. func SumI32(nums ...int32) int32 { if len(nums) == 0 { panic("SumI32 argument list empty") } var total int32 = 0 for _, num := range nums { total += num } return total } // SumI64 returns the sum of its arguments. // An empty argument list raises a panic. func SumI64(nums ...int64) int64 { if len(nums) == 0 { panic("SumI64 argument list empty") } var total int64 = 0 for _, num := range nums { total += num } return total } // SumU8 returns the sum of its arguments. // An empty argument list raises a panic. func SumU8(nums ...uint8) uint8 { if len(nums) == 0 { panic("SumU8 argument list empty") } var total uint8 = 0 for _, num := range nums { total += num } return total } // SumU16 returns the sum of its arguments. // An empty argument list raises a panic. func SumU16(nums ...uint16) uint16 { if len(nums) == 0 { panic("SumU16 argument list empty") } var total uint16 = 0 for _, num := range nums { total += num } return total } // SumU32 returns the sum of its arguments. // An empty argument list raises a panic. func SumU32(nums ...uint32) uint32 { if len(nums) == 0 { panic("SumU32 argument list empty") } var total uint32 = 0 for _, num := range nums { total += num } return total } // SumU64 returns the sum of its arguments. // An empty argument list raises a panic. func SumU64(nums ...uint64) uint64 { if len(nums) == 0 { panic("SumU64 argument list empty") } var total uint64 = 0 for _, num := range nums { total += num } return total } // SumF32 returns the sum of its arguments. // An empty argument list raises a panic. func SumF32(nums ...float32) float32 { if len(nums) == 0 { panic("SumF32 argument list empty") } var total float32 = 0 for _, num := range nums { total += num } return total } // SumF64 returns the sum of its arguments. // An empty argument list raises a panic. func SumF64(nums ...float64) float64 { if len(nums) == 0 { panic("SumF64 argument list empty") } var total float64 = 0 for _, num := range nums { total += num } return total } // Min returns the minimum of its arguments. // An empty argument list raises a panic. func Min(nums ...int) int { if len(nums) == 0 { panic("Min argument list empty") } min := nums[0] for _, num := range nums { if num < min { min = num } } return min } // MinI8 returns the minimum of its arguments. // An empty argument list raises a panic. func MinI8(nums ...int8) int8 { if len(nums) == 0 { panic("MinI8 argument list empty") } min := nums[0] for _, num := range nums { if num < min { min = num } } return min } // MinI16 returns the minimum of its arguments. // An empty argument list raises a panic. func MinI16(nums ...int16) int16 { if len(nums) == 0 { panic("MinI16 argument list empty") } min := nums[0] for _, num := range nums { if num < min { min = num } } return min } // MinI32 returns the minimum of its arguments. // An empty argument list raises a panic. func MinI32(nums ...int32) int32 { if len(nums) == 0 { panic("MinI32 argument list empty") } min := nums[0] for _, num := range nums { if num < min { min = num } } return min } // MinI64 returns the minimum of its arguments. // An empty argument list raises a panic. func MinI64(nums ...int64) int64 { if len(nums) == 0 { panic("MinI64 argument list empty") } min := nums[0] for _, num := range nums { if num < min { min = num } } return min } // MinU8 returns the minimum of its arguments. // An empty argument list raises a panic. func MinU8(nums ...uint8) uint8 { if len(nums) == 0 { panic("MinU8 argument list empty") } min := nums[0] for _, num := range nums { if num < min { min = num } } return min } // MinU16 returns the minimum of its arguments. // An empty argument list raises a panic. func MinU16(nums ...uint16) uint16 { if len(nums) == 0 { panic("MinU16 argument list empty") } min := nums[0] for _, num := range nums { if num < min { min = num } } return min } // MinU32 returns the minimum of its arguments. // An empty argument list raises a panic. func MinU32(nums ...uint32) uint32 { if len(nums) == 0 { panic("MinU32 argument list empty") } min := nums[0] for _, num := range nums { if num < min { min = num } } return min } // MinU64 returns the minimum of its arguments. // An empty argument list raises a panic. func MinU64(nums ...uint64) uint64 { if len(nums) == 0 { panic("MinU64 argument list empty") } min := nums[0] for _, num := range nums { if num < min { min = num } } return min } // MinF32 returns the minimum of its arguments. // An empty argument list raises a panic. func MinF32(nums ...float32) float32 { if len(nums) == 0 { panic("MinF32 argument list empty") } min := nums[0] for _, num := range nums { if num < min { min = num } } return min } // MinF64 returns the minimum of its arguments. // An empty argument list raises a panic. func MinF64(nums ...float64) float64 { if len(nums) == 0 { panic("MinF64 argument list empty") } min := nums[0] for _, num := range nums { if num < min { min = num } } return min } // Max returns the maximum of its arguments. // An empty argument list raises a panic. func Max(nums ...int) int { if len(nums) == 0 { panic("Max argument list empty") } max := nums[0] for _, num := range nums { if num > max { max = num } } return max } // MaxI8 returns the maximum of its arguments. // An empty argument list raises a panic. func MaxI8(nums ...int8) int8 { if len(nums) == 0 { panic("MaxI8 argument list empty") } max := nums[0] for _, num := range nums { if num > max { max = num } } return max } // MaxI16 returns the maximum of its arguments. // An empty argument list raises a panic. func MaxI16(nums ...int16) int16 { if len(nums) == 0 { panic("MaxI16 argument list empty") } max := nums[0] for _, num := range nums { if num > max { max = num } } return max } // MaxI32 returns the maximum of its arguments. // An empty argument list raises a panic. func MaxI32(nums ...int32) int32 { if len(nums) == 0 { panic("MaxI32 argument list empty") } max := nums[0] for _, num := range nums { if num > max { max = num } } return max } // MaxI64 returns the maximum of its arguments. // An empty argument list raises a panic. func MaxI64(nums ...int64) int64 { if len(nums) == 0 { panic("MaxI64 argument list empty") } max := nums[0] for _, num := range nums { if num > max { max = num } } return max } // MaxU8 returns the maximum of its arguments. // An empty argument list raises a panic. func MaxU8(nums ...uint8) uint8 { if len(nums) == 0 { panic("MaxU8 argument list empty") } max := nums[0] for _, num := range nums { if num > max { max = num } } return max } // MaxU16 returns the maximum of its arguments. // An empty argument list raises a panic. func MaxU16(nums ...uint16) uint16 { if len(nums) == 0 { panic("MaxU16 argument list empty") } max := nums[0] for _, num := range nums { if num > max { max = num } } return max } // MaxU32 returns the maximum of its arguments. // An empty argument list raises a panic. func MaxU32(nums ...uint32) uint32 { if len(nums) == 0 { panic("MaxU32 argument list empty") } max := nums[0] for _, num := range nums { if num > max { max = num } } return max } // MaxU64 returns the maximum of its arguments. // An empty argument list raises a panic. func MaxU64(nums ...uint64) uint64 { if len(nums) == 0 { panic("MaxU64 argument list empty") } max := nums[0] for _, num := range nums { if num > max { max = num } } return max } // MaxF32 returns the maximum of its arguments. // An empty argument list raises a panic. func MaxF32(nums ...float32) float32 { if len(nums) == 0 { panic("MaxF32 argument list empty") } max := nums[0] for _, num := range nums { if num > max { max = num } } return max } // MaxF64 returns the maximum of its arguments. // An empty argument list raises a panic. func MaxF64(nums ...float64) float64 { if len(nums) == 0 { panic("MaxF64 argument list empty") } max := nums[0] for _, num := range nums { if num > max { max = num } } return max } // Abs returns the absolute values of its argument. func Abs(num int) int { var abs int switch num < 0 { case true: abs = -num default: abs = num } return abs } // VAbs returns the absolute values of its arguments. // An empty argument list raises a panic. func VAbs(nums ...int) []int { if len(nums) == 0 { panic("VAbs argument list empty") } var abs []int for _, num := range nums { switch num < 0 { case true: abs = append(abs, -num) default: abs = append(abs, num) } } return abs } // AbsI8 returns the absolute values of its argument. func AbsI8(num int8) int8 { var abs int8 switch num < 0 { case true: abs = -num default: abs = num } return abs } // VAbsI8 returns the absolute values of its arguments. // An empty argument list raises a panic. func VAbsI8(nums ...int8) []int8 { if len(nums) == 0 { panic("VAbsI8 argument list empty") } var abs []int8 for _, num := range nums { switch num < 0 { case true: abs = append(abs, -num) default: abs = append(abs, num) } } return abs } // AbsI16 returns the absolute values of its argument. func AbsI16(num int16) int16 { var abs int16 switch num < 0 { case true: abs = -num default: abs = num } return abs } // VAbsI16 returns the absolute values of its arguments. // An empty argument list raises a panic. func VAbsI16(nums ...int16) []int16 { if len(nums) == 0 { panic("VAbsI16 argument list empty") } var abs []int16 for _, num := range nums { switch num < 0 { case true: abs = append(abs, -num) default: abs = append(abs, num) } } return abs } // AbsI32 returns the absolute values of its argument. func AbsI32(num int32) int32 { var abs int32 switch num < 0 { case true: abs = -num default: abs = num } return abs } // VAbsI32 returns the absolute values of its arguments. // An empty argument list raises a panic. func VAbsI32(nums ...int32) []int32 { if len(nums) == 0 { panic("VAbsI32 argument list empty") } var abs []int32 for _, num := range nums { switch num < 0 { case true: abs = append(abs, -num) default: abs = append(abs, num) } } return abs } // AbsI64 returns the absolute values of its argument. func AbsI64(num int64) int64 { var abs int64 switch num < 0 { case true: abs = -num default: abs = num } return abs } // VAbsI64 returns the absolute values of its arguments. // An empty argument list raises a panic. func VAbsI64(nums ...int64) []int64 { if len(nums) == 0 { panic("VAbsI64 argument list empty") } var abs []int64 for _, num := range nums { switch num < 0 { case true: abs = append(abs, -num) default: abs = append(abs, num) } } return abs } // AbsF32 returns the absolute values of its argument. func AbsF32(num float32) float32 { var abs float32 switch num < 0 { case true: abs = -num default: abs = num } return abs } // VAbsF32 returns the absolute values of its arguments. // An empty argument list raises a panic. func VAbsF32(nums ...float32) []float32 { if len(nums) == 0 { panic("VAbsF32 argument list empty") } var abs []float32 for _, num := range nums { switch num < 0 { case true: abs = append(abs, -num) default: abs = append(abs, num) } } return abs } // AbsF64 returns the absolute values of its argument. func AbsF64(num float64) float64 { var abs float64 switch num < 0 { case true: abs = -num default: abs = num } return abs } // VAbsF64 returns the absolute values of its arguments. // An empty argument list raises a panic. func VAbsF64(nums ...float64) []float64 { if len(nums) == 0 { panic("VAbsF64 argument list empty") } var abs []float64 for _, num := range nums { switch num < 0 { case true: abs = append(abs, -num) default: abs = append(abs, num) } } return abs }
mu_g.go
0.747063
0.457864
mu_g.go
starcoder
package asciiturtle import ( "fmt" "math" "strings" ) const degToRad = math.Pi / 180.0 type Pen struct { Canvas Canvas X, Y int Char byte Heading float64 penUp bool } func NewPen(canvas Canvas, char byte, x, y int) (*Pen, error) { if canvas == nil { return nil, fmt.Errorf("canvas must not be nil") } switch { case x < 0: x = 0 case x >= canvas.Width(): x = canvas.Width() - 1 } switch { case y < 0: y = 0 case y >= canvas.Height(): y = canvas.Height() - 1 } return &Pen{ Canvas: canvas, X: x, Y: y, Char: char, }, nil } func (p *Pen) PenUp() { p.penUp = true } func (p *Pen) PenDown() { p.penUp = false } func (p *Pen) Dot() { if p.penUp { return } p.Canvas[p.Y][p.X] = p.Char } func (p *Pen) Goto(x, y int) { switch { case x < 0: x = 0 case x >= p.Canvas.Width(): x = p.Canvas.Width() - 1 } switch { case y < 0: y = 0 case y >= p.Canvas.Height(): y = p.Canvas.Height() - 1 } p.X = x p.Y = y } func (p *Pen) Forward(distance int) { d := float64(distance) x1 := p.X + int(d*math.Cos(p.Heading*degToRad)) y1 := p.Y - int(d*math.Sin(p.Heading*degToRad)) p.drawTo(x1, y1) } func (p *Pen) Backward(distance int) { d := float64(distance) x1 := p.X - int(d*math.Cos(p.Heading*degToRad)) y1 := p.Y + int(d*math.Sin(p.Heading*degToRad)) p.drawTo(x1, y1) } func (p *Pen) drawTo(x1, y1 int) { x0 := p.X y0 := p.Y d := math.Max(math.Abs(float64(x1)-float64(x0)), math.Abs(float64(y1)-float64(y0))) for i := 0; i <= int(d); i++ { t := float64(i) / d x, y := lerpPoint(x0, y0, x1, y1, t) if x < 0 || x >= p.Canvas.Width() || y < 0 || y >= p.Canvas.Height() { break } p.X = x p.Y = y p.Dot() } } func lerpPoint(x0, y0, x1, y1 int, t float64) (int, int) { x := int(math.Round(lerp(float64(x0), float64(x1), t))) y := int(math.Round(lerp(float64(y0), float64(y1), t))) return x, y } func lerp(n, m, t float64) float64 { return n + t*(m-n) } func (p *Pen) Right(deg float64) { p.Heading -= deg } func (p *Pen) Left(deg float64) { p.Heading += deg } type Canvas [][]byte func NewCanvas(x, y int) Canvas { if x <= 0 || y <= 0 { return [][]byte{} } canvas := make([][]byte, y) for i := range canvas { canvas[i] = make([]byte, x) } return canvas } func (c Canvas) String() string { var b strings.Builder for _, row := range c { for _, v := range row { if v == 0 { b.WriteByte(' ') continue } b.WriteByte(v) } b.WriteByte('\n') } return b.String() } func (c Canvas) Height() int { return len(c) } func (c Canvas) Width() int { if len(c) == 0 { return 0 } return len(c[0]) }
asciiturtle.go
0.659295
0.512144
asciiturtle.go
starcoder
package tictactoe import "github.com/z-rui/game" // N is the board size of the Tic-Tac-Toe game const N = 3 // Cell represents a cell of the board. // It has three states: Empty, O and X. type Cell uint8 const ( Empty Cell = iota O X ) // String converts a cell to the string representation. func (c Cell) String() string { switch c { case O: return "O" case X: return "X" default: return " " } } // State represents the current state of the game. type State struct { Board [N][N]Cell LastMove Move Turn Cell // must be O or X } // NewState returns a new state at the start of the game. func NewState() *State { s := new(State) s.LastMove = invalidMove s.Turn = O return s } // Dim returns the dimension of the board func (s *State) Dim() (int, int) { return N, N } // Get returns the string representation at (i, j) func (s *State) Get(i, j int) string { return s.Board[i][j].String() } // Eval returns the evaluation of the current state. func (s *State) Eval() (eval game.Evaluation) { if s.LastMove == invalidMove { return 0 } won := s.match(0, 1) for k := -1; k <= 1; k++ { won = won || s.match(1, k) } if won { switch s.Turn { case O: return game.Lost case X: return game.Won } } return 0 } // IsEnd tells if the game has ended. func (s *State) IsEnd() bool { switch s.Eval() { case game.Won, game.Lost: return true } for i := 0; i < N; i++ { for j := 0; j < N; j++ { if s.Board[i][j] == Empty { return false } } } return true } func (s *State) match(di, dj int) bool { i, j := int(s.LastMove.I), int(s.LastMove.J) cell := s.Board[i][j] for { i1, j1 := i-di, j-dj if 0 <= i1 && i1 < N && 0 <= j1 && j1 < N && s.Board[i1][j1] == cell { i, j = i1, j1 } else { break } } n := 0 for { n++ i1, j1 := i+di, j+dj if 0 <= i1 && i1 < N && 0 <= j1 && j1 < N && s.Board[i1][j1] == cell { i, j = i1, j1 } else { break } } return n == N } // Next returns all possible next states. func (s *State) Next() (nxt []game.State) { if s.IsEnd() { return } nxt = make([]game.State, 0, 4) for i := 0; i < N; i++ { for j := 0; j < N; j++ { if t := s.Move(Move{uint8(i), uint8(j)}); t != nil { nxt = append(nxt, t) } } } return } // Move returns the next state based on the move. // It returns nil if the move is not allowed. func (s *State) Move(m Move) (t *State) { if !m.Valid() || !m.Allowed(s) { return } t = new(State) t.Board = s.Board t.Board[m.I][m.J] = s.Turn t.LastMove = m t.Turn = s.Turn ^ (O ^ X) return t }
tictactoe/state.go
0.761006
0.476701
state.go
starcoder
package stargen import ( "math" "github.com/dayaftereh/discover/server/utils" "github.com/dayaftereh/discover/server/mathf" ) type StellarClass struct { Class string Color int64 Mass *mathf.Range Radius *mathf.Range Temperature *mathf.Range Luminosity *mathf.Range } // Stellar classification https://en.wikipedia.org/wiki/Stellar_classification var StellarClassification []*StellarClass = []*StellarClass{ &StellarClass{ Class: "O", Color: 255, // blue Mass: mathf.NewRange(16.0, 120.0), // >= 16 M Radius: mathf.NewRange(6.6, math.MaxFloat64), // >= 6.6 R Temperature: mathf.NewRange(30, 50), // >= 30 *1000 K Luminosity: mathf.NewRange(30000, math.MaxFloat64), // >= 30000 L }, &StellarClass{ Class: "B", Color: 10079487, // blue white Mass: mathf.NewRange(2.1, 16.0), // 2.1–16 M Radius: mathf.NewRange(1.8, 6.6), // 1.8–6.6 R Temperature: mathf.NewRange(10, 30), // 10 - 30 *1000 K Luminosity: mathf.NewRange(25, 30000), // 25 - 30000 L }, &StellarClass{ Class: "A", Color: 16777215, // white Mass: mathf.NewRange(1.4, 2.1), // 1.4 - 2.1 M Radius: mathf.NewRange(1.4, 1.8), // 1.4 - 1.8 R Temperature: mathf.NewRange(7.5, 10), // 7.5 - 10 *1000 K Luminosity: mathf.NewRange(5, 25), // 5 - 25 L }, &StellarClass{ Class: "F", Color: 16777164, // yellow white Mass: mathf.NewRange(1.04, 1.4), // 1.04 - 1.4 M Radius: mathf.NewRange(1.15, 1.4), // 1.15 - 1.4 R Temperature: mathf.NewRange(6, 7.5), // 6 - 7.5 *1000 K Luminosity: mathf.NewRange(1.5, 5), // 1.5 - 5 L }, &StellarClass{ Class: "G", Color: 16776960, // yellow Mass: mathf.NewRange(0.8, 1.04), // 0.8 - 1.04 M Radius: mathf.NewRange(0.96, 1.15), // 0.96 - 1.15 R Temperature: mathf.NewRange(5.2, 6.0), // 5.2 - 6 *1000 K Luminosity: mathf.NewRange(0.6, 1.5), // 0.6 - 1.5 L }, &StellarClass{ Class: "K", Color: 16761446, // light orange Mass: mathf.NewRange(0.45, 0.8), // 0.45 - 0.8 M Radius: mathf.NewRange(0.7, 0.96), // 0.7 - 0.96 R Temperature: mathf.NewRange(3.7, 5.2), // 3.7 - 5.2 *1000 K Luminosity: mathf.NewRange(0.08, 0.6), // 0.08 - 0.6 L }, &StellarClass{ Class: "M", Color: 13395456, // orange red Mass: mathf.NewRange(0.08, 0.45), // 0.08 - 0.45 M Radius: mathf.NewRange(0.0, 0.7), // 0 - 0.7 R Temperature: mathf.NewRange(2.4, 3.7), // 2.4 - 3.7 *1000 K Luminosity: mathf.NewRange(0.0, 0.08), // 0.0 - 0.08 L }, } func RandStellarClass() *StellarClass { length := len(StellarClassification) index := utils.RandIntn(length) class := StellarClassification[index] return class }
server/game/universe/generator/stargen/stellar-classification.go
0.633864
0.429011
stellar-classification.go
starcoder
package lstm import ( "context" "io" "github.com/owulveryck/lstm/datasetter" G "gorgonia.org/gorgonia" "gorgonia.org/tensor" ) // basicReadWriter is a dummy structure that fufil the datasetter.ReadWriter interface // Is it used to build a one step execution graph type basicReadWriter struct { input *G.Node step int output *G.Node } func (b *basicReadWriter) ReadInputVector(g *G.ExprGraph) (*G.Node, error) { if b.step >= 1 { return nil, io.EOF } b.step++ return b.input, nil } func (b *basicReadWriter) WriteComputedVector(n *G.Node) error { b.output = n return nil } // GetComputedVectors ... func (b *basicReadWriter) GetComputedVectors() G.Nodes { return G.Nodes{b.output} } // Predict ... func (m *Model) Predict(ctx context.Context, dataSet datasetter.Float32ReadWriter) error { hiddenT := tensor.New(tensor.Of(tensor.Float32), tensor.WithShape(m.hiddenSize)) cellT := tensor.New(tensor.Of(tensor.Float32), tensor.WithShape(m.hiddenSize)) lstm := m.newLSTM(hiddenT, cellT) // Create the inputVector inputBacking := make([]float32, m.inputSize) inputT := tensor.New(tensor.Of(tensor.Float32), tensor.WithShape(m.inputSize), tensor.WithBacking(inputBacking)) input := G.NewVector(lstm.g, tensor.Float32, G.WithName("input"), G.WithShape(m.inputSize), G.WithValue(inputT)) // Create a dummy ReadWriter to build a basic computing graph dummySet := &basicReadWriter{ input: input, } // We need an empty memory to start... prevHidden := G.NewVector(lstm.g, tensor.Float32, G.WithName("hₜ₋₁"), G.WithShape(m.hiddenSize), G.WithValue(hiddenT)) prevCell := G.NewVector(lstm.g, tensor.Float32, G.WithName("Cₜ₋₁"), G.WithShape(m.hiddenSize), G.WithValue(cellT)) // First pass to get update the hidden state and the cell according to the input hidden, cell, err := lstm.forwardStep(dummySet, prevHidden, prevCell, 0) if err != nil { return err } //g := lstm.g.SubgraphRoots(dataSet.GetComputedVectors()...) //machine := G.NewTapeMachine(g, G.ExecuteFwdOnly()) machine := G.NewTapeMachine(lstm.g) for { inputValue, err := dataSet.Read() copy(input.Value().Data().([]float32), inputValue) if err == io.EOF { return nil } if err != nil { return err } err = machine.RunAll() if err != nil { return err } machine.Reset() dataSet.Write(dummySet.output.Value().Data().([]float32)) copy(prevHidden.Value().Data().([]float32), hidden.Value().Data().([]float32)) copy(prevCell.Value().Data().([]float32), cell.Value().Data().([]float32)) } return nil }
predict.go
0.620162
0.464598
predict.go
starcoder
package parser import ( "github.com/influxdata/telegraf/plugins/parsers" "github.com/ulule/deepcopier" ) // Config implements Telegraf parsers.Config, but with SignalFx Smart Agent struct tags // and a methods for returning a Telegraf parsers.Config struct and a Telegraf parsers.Parser. // Please refer to Telegraf's documentation for more information about the different parsers // and their specific configurations type Config struct { // dataFormat specifies a data format to parse: `json`, `value`, `influx`, `graphite`, `value`, `nagios`, // `collectd`, `dropwizard`, `wavefront`, `grok`, `csv`, or `logfmt`. DataFormat string `yaml:"dataFormat" default:"influx"` // defaultTags are tags that will be added to all metrics. (`json`, `value`, `graphite`, `collectd`, `dropwizard`, // `wavefront`, `grok`, `csv` and `logfmt` only) DefaultTags map[string]string `yaml:"defaultTags"` // metricName applies to (`json` and `value`). This will be the name of the measurement. MetricName string `yaml:"metricName"` // value // dataType specifies the value type to parse the value to: `integer`, `float`, // `long`, `string`, or `boolean`. (`value` only) DataType string `yaml:"dataType"` // json // A list of tag names to fetch from JSON data. (`json` only) TagKeys []string `yaml:"JSONTagKeys"` // A list of fields in JSON to extract and use as string fields. (json only) JSONStringFields []string `yaml:"JSONStringFields"` // A path used to extract the metric name in JSON data. (`json` only) JSONNameKey string `yaml:"JSONNameKey"` // A gjson path for json parser. (`json` only) JSONQuery string `yaml:"JSONQuery"` // The name of the timestamp key. (`json` only) JSONTimeKey string `yaml:"JSONTimeKey"` // Specifies the timestamp format. (`json` only) JSONTimeFormat string `yaml:"JSONTimeFormat"` // graphite // Separator for Graphite data. (`graphite` only). Separator string `yaml:"separator"` // A list of templates for Graphite data. (`graphite` only). Templates []string `yaml:"templates"` // collectd // The path to the collectd authentication file (`collectd` only) CollectdAuthFile string `yaml:"collectdAuthFile"` // Specifies the security level: `none` (default), `sign`, or // `encrypt`. (`collectd only`) CollectdSecurityLevel string `yaml:"collectdSecurityLevel"` // A list of paths to collectd TypesDB files. (`collectd` only) CollectdTypesDB []string `yaml:"collectdTypesDB"` // Indicates whether to separate or join multivalue metrics. (`collectd` only) CollectdSplit string `yaml:"collectdSplit"` // dropwizard // An optional gjson path used to locate a metric registry inside of JSON data. // The default behavior is to consider the entire JSON document. (`dropwizard` only) DropwizardMetricRegistryPath string `yaml:"dropwizardMetricRegistryPath"` // An optional gjson path used to identify the drop wizard metric timestamp. (`dropwizard` only) DropwizardTimePath string `yaml:"dropwizardTimePath"` // The format used for parsing the drop wizard metric timestamp. // The default format is time.RFC3339. (`dropwizard` only) DropwizardTimeFormat string `yaml:"dropwizardTimeFormat"` // An optional gjson path used to locate drop wizard tags. (`dropwizard` only) DropwizardTagsPath string `yaml:"dropwizardTagsPath"` // A map of gjson tag names and gjson paths used to extract tag values from the JSON document. // This is only used if `dropwizardTagsPath` is not specified. (`dropwizard` only) DropwizardTagPathsMap map[string]string `yaml:"dropwizardTagPathsMap"` // grok // A list of patterns to match. (`grok` only) GrokPatterns []string `yaml:"grokPatterns"` // A list of named grok patterns to match. (`grok` only) GrokNamedPatterns []string `yaml:"grokNamedPatterns"` // Custom grok patterns. (`grok` only) GrokCustomPatterns string `yaml:"grokCustomPatterns"` // List of paths to custom grok pattern files. (`grok` only) GrokCustomPatternFiles []string `yaml:"grokCustomPatternFiles"` // Specifies the timezone. The default is UTC time. Other options are `Local` for the // local time on the machine, `UTC`, and `Canada/Eastern` (unix style timezones). (`grok` only) GrokTimeZone string `yaml:"grokTimezone"` //csv // The delimiter used between fields in the csv. (`csv` only) CSVDelimiter string `yaml:"CSVDelimiter"` // The character used to mark rows as comments. (`csv` only) CSVComment string `yaml:"CSVComment"` // Indicates whether to trim leading white from fields. (`csv` only) CSVTrimSpace bool `yaml:"CSVTrimSpace"` // List of custom column names. All columns must have names. Unnamed columns are ignored. // This configuration must be set when `CSVHeaderRowCount` is 0. (`csv` only) CSVColumnNames []string `yaml:"CSVColumnNames"` // List of types to assign to columns. Acceptable values are `int`, // `float`, `bool`, or `string` (`csv` only). CSVColumnTypes []string `yaml:"CSVColumnTypes"` // List of columns that should be added as tags. Unspecified columns will be added as fields. (`csv` only) CSVTagColumns []string `yaml:"CSVTagColumns"` // The name of the column to extract the metric name from (`csv` only) CSVMeasurementColumn string `yaml:"CSVMeasurementColumn"` // The name of the column to extract the metric timestamp from. // `CSVTimestampFormat` must be set when using this option. (`csv` only) CSVTimestampColumn string `yaml:"CSVTimestampColumn"` // The format to use for extracting timestamps. (`csv` only) CSVTimestampFormat string `yaml:"CSVTimestampFormat"` // The number of rows that are headers. By default no rows are treated as headers. (`csv` only) CSVHeaderRowCount int `yaml:"CSVHeaderRowCount"` // The number of rows to ignore before looking for headers. (`csv` only) CSVSkipRows int `yaml:"CSVSkipRows"` // The number of columns to ignore before parsing data on a given row. (`csv` only) CSVSkipColumns int `yaml:"CSVSkipColumns"` } // GetTelegrafConfig returns the configuration as a Telegraf *parsers.Config func (c *Config) GetTelegrafConfig() (config *parsers.Config, err error) { config = &parsers.Config{} // copy top level struct fields to the err = deepcopier.Copy(c).To(config) return config, err } // GetTelegrafParser returns a pointer to a telegraf *parsers.Parser func (c *Config) GetTelegrafParser() (parser parsers.Parser, err error) { var config *parsers.Config if config, err = c.GetTelegrafConfig(); err != nil { return parser, err } return parsers.NewParser(config) }
pkg/monitors/telegraf/common/parser/parsers.go
0.707203
0.462109
parsers.go
starcoder
package advent import ( "strings" "advent/lib/util" ) var exampleInput1 = strings.Split(`light red bags contain 1 bright white bag, 2 muted yellow bags. dark orange bags contain 3 bright white bags, 4 muted yellow bags. bright white bags contain 1 shiny gold bag. muted yellow bags contain 2 shiny gold bags, 9 faded blue bags. shiny gold bags contain 1 dark olive bag, 2 vibrant plum bags. dark olive bags contain 3 faded blue bags, 4 dotted black bags. vibrant plum bags contain 5 faded blue bags, 6 dotted black bags. faded blue bags contain no other bags. dotted black bags contain no other bags.`, "\n") var exampleInput2 = strings.Split(`shiny gold bags contain 2 dark red bags. dark red bags contain 2 dark orange bags. dark orange bags contain 2 dark yellow bags. dark yellow bags contain 2 dark green bags. dark green bags contain 2 dark blue bags. dark blue bags contain 2 dark violet bags. dark violet bags contain no other bags.`, "\n") func Part1() interface{} { colors := make(map[string]struct{}) bags := make(Bags) for _, line := range util.InputLines() { line = strings.ReplaceAll(line, ".", "") line = strings.ReplaceAll(line, " bags", "") line = strings.ReplaceAll(line, " bag", "") leftRight := strings.Split(line, " contain ") right := strings.Split(leftRight[1], ", ") for _, color := range right { words := strings.Fields(color) color := strings.Join(words[1:], " ") bags[color] = append(bags[color], leftRight[0]) } colors[leftRight[0]] = struct{}{} } return len(bags.HowManyCanHold("shiny gold")) } type Bags map[string][]string func (this Bags) HowManyCanHold(color string) (containers map[string]struct{}) { outers := this[color] if len(outers) == 0 { return nil } set := make(map[string]struct{}) for _, outer := range outers { set[outer] = struct{}{} for container := range this.HowManyCanHold(outer) { set[container] = struct{}{} } } return set } func Part2() interface{} { bags := make(NestedBags) for _, line := range util.InputLines() { line = strings.ReplaceAll(line, ".", "") line = strings.ReplaceAll(line, " bags", "") line = strings.ReplaceAll(line, " bag", "") leftRight := strings.Split(line, " contain ") outer := leftRight[0] inners := strings.Split(leftRight[1], ", ") for _, inner := range inners { words := strings.Fields(inner) bags[outer] = append(bags[outer], Bag{ Color: strings.Join(words[1:], " "), Quantity: util.ParseInt(words[0]), }) } } return bags.HowManyAreHeldIn("shiny gold") } type NestedBags map[string][]Bag func (this NestedBags) HowManyAreHeldIn(color string) (result int) { inners := this[color] for _, inner := range inners { result += inner.Quantity result += inner.Quantity * this.HowManyAreHeldIn(inner.Color) } return result } type Bag struct { Color string Quantity int }
go/2020/day07/main.go
0.644001
0.535402
main.go
starcoder
package avaclient // SubDescriptionDto This is appended to a Position and is used to separate the complete Position into smaller amounts to be described separately, for example concrete walls could be attached to different building storeys. type SubDescriptionDto struct { // Elements GUID identifier. Id string `json:"id"` // Returns the total calculated sum of all quantity assignments. Will return the result rounded to three decimal places. Quantity float32 `json:"quantity"` // Holds quantity information for this sub description. Quantity is listening to changes here and is reporting the total sum of all quantity components. QuantityComponents []CalculationDto `json:"quantityComponents,omitempty"` // Indicates if the bidder is asked to specify an amount. AmountToBeEnteredByBidder bool `json:"amountToBeEnteredByBidder"` // Identifier for this SubDescription. Identifier string `json:"identifier,omitempty"` // Short description for this DescriptionBase element. ShortText string `json:"shortText,omitempty"` // Detailed description for this DescriptionBase element. When the HtmlLongText is set, this is automatically overwritten and filled with the appropriate plain text representation of the Html text. Vice versa, setting this property overrides the HtmlLongText. LongText string `json:"longText,omitempty"` // This contains the Html representation of the Longtext. When the LongText is set, this is automatically overwritten and filled with the appropriate Html representation of the plaintext. Vice versa, setting this property overrides the LongText. GAEB 90 and GAEB 2000 exports do not support any image functionality. In GAEB XML, only images that use an embedded Base64 data uri are exported, regular url references are cleared before written out. HtmlLongText string `json:"htmlLongText,omitempty"` AdditionType AdditionTypeDto `json:"additionType"` StandardizedDescription StandardizedDescriptionDto `json:"standardizedDescription,omitempty"` // This identifier can be used to point to the Id of an ExecutionDescription in the same ServiceSpecification. ExecutionDescriptions act as a way to centrally describe how positions (or sub descriptions) should be executed in practice. Often, the position (or sub description) itself still has text of its own to highlight deviations from that or add more details. When working with import and export features, this property is only supported in GAEB 90 data exchange. ExecutionDescriptionReference string `json:"executionDescriptionReference,omitempty"` }
model_sub_description_dto.go
0.753194
0.484929
model_sub_description_dto.go
starcoder
// F7 illuminant conversion functions package white // F7_A functions func F7_A_Bradford(xs, ys, zs float64) (xd, yd, zd float64) { m := [3][3]float64{ {1.2162616, 0.1109265, -0.1548306}, {0.1532455, 0.9152079, -0.0559592}, {-0.0239302, 0.0358725, 0.3151544}} xd = m[0][0]*xs + m[0][1]*ys + m[0][2]*zs yd = m[1][0]*xs + m[1][1]*ys + m[1][2]*zs zd = m[2][0]*xs + m[2][1]*ys + m[2][2]*zs return } func F7_A_vonKries(xs, ys, zs float64) (xd, yd, zd float64) { m := [3][3]float64{ {1.0710133, 0.2439140, -0.1501795}, {0.0267922, 0.9804152, -0.0054059}, {0.0000000, 0.0000000, 0.3272274}} xd = m[0][0]*xs + m[0][1]*ys + m[0][2]*zs yd = m[1][0]*xs + m[1][1]*ys + m[1][2]*zs zd = m[2][0]*xs + m[2][1]*ys + m[2][2]*zs return } func F7_A_Xyz(xs, ys, zs float64) (xd, yd, zd float64) { m := [3][3]float64{ {1.1558170, 0.0000000, 0.0000000}, {0.0000000, 1.0000000, 0.0000000}, {0.0000000, 0.0000000, 0.3272274}} xd = m[0][0]*xs + m[0][1]*ys + m[0][2]*zs yd = m[1][0]*xs + m[1][1]*ys + m[1][2]*zs zd = m[2][0]*xs + m[2][1]*ys + m[2][2]*zs return } // F7_B functions func F7_B_Bradford(xs, ys, zs float64) (xd, yd, zd float64) { m := [3][3]float64{ {1.0639791, 0.0325159, -0.0487483}, {0.0445370, 0.9766166, -0.0174212}, {-0.0078094, 0.0119287, 0.7795373}} xd = m[0][0]*xs + m[0][1]*ys + m[0][2]*zs yd = m[1][0]*xs + m[1][1]*ys + m[1][2]*zs zd = m[2][0]*xs + m[2][1]*ys + m[2][2]*zs return } func F7_B_vonKries(xs, ys, zs float64) (xd, yd, zd float64) { m := [3][3]float64{ {1.0210792, 0.0724024, -0.0479336}, {0.0079529, 0.9941864, -0.0016046}, {0.0000000, 0.0000000, 0.7836814}} xd = m[0][0]*xs + m[0][1]*ys + m[0][2]*zs yd = m[1][0]*xs + m[1][1]*ys + m[1][2]*zs zd = m[2][0]*xs + m[2][1]*ys + m[2][2]*zs return } func F7_B_Xyz(xs, ys, zs float64) (xd, yd, zd float64) { m := [3][3]float64{ {1.0424133, 0.0000000, 0.0000000}, {0.0000000, 1.0000000, 0.0000000}, {0.0000000, 0.0000000, 0.7836814}} xd = m[0][0]*xs + m[0][1]*ys + m[0][2]*zs yd = m[1][0]*xs + m[1][1]*ys + m[1][2]*zs zd = m[2][0]*xs + m[2][1]*ys + m[2][2]*zs return } // F7_C functions func F7_C_Bradford(xs, ys, zs float64) (xd, yd, zd float64) { m := [3][3]float64{ {1.0096302, 0.0069785, 0.0130568}, {0.0122432, 0.9846882, 0.0033800}, {0.0038825, -0.0073255, 1.0905639}} xd = m[0][0]*xs + m[0][1]*ys + m[0][2]*zs yd = m[1][0]*xs + m[1][1]*ys + m[1][2]*zs zd = m[2][0]*xs + m[2][1]*ys + m[2][2]*zs return } func F7_C_vonKries(xs, ys, zs float64) (xd, yd, zd float64) { m := [3][3]float64{ {1.0026841, 0.0092168, 0.0170692}, {0.0010124, 0.9992607, -0.0002049}, {0.0000000, 0.0000000, 1.0872208}} xd = m[0][0]*xs + m[0][1]*ys + m[0][2]*zs yd = m[1][0]*xs + m[1][1]*ys + m[1][2]*zs zd = m[2][0]*xs + m[2][1]*ys + m[2][2]*zs return } func F7_C_Xyz(xs, ys, zs float64) (xd, yd, zd float64) { m := [3][3]float64{ {1.0319125, 0.0000000, 0.0000000}, {0.0000000, 1.0000000, 0.0000000}, {0.0000000, 0.0000000, 1.0872208}} xd = m[0][0]*xs + m[0][1]*ys + m[0][2]*zs yd = m[1][0]*xs + m[1][1]*ys + m[1][2]*zs zd = m[2][0]*xs + m[2][1]*ys + m[2][2]*zs return } // F7_D50 functions func F7_D50_Bradford(xs, ys, zs float64) (xd, yd, zd float64) { m := [3][3]float64{ {1.0476532, 0.0228258, -0.0499378}, {0.0294704, 0.9904638, -0.0169869}, {-0.0091967, 0.0149801, 0.7530971}} xd = m[0][0]*xs + m[0][1]*ys + m[0][2]*zs yd = m[1][0]*xs + m[1][1]*ys + m[1][2]*zs zd = m[2][0]*xs + m[2][1]*ys + m[2][2]*zs return } func F7_D50_vonKries(xs, ys, zs float64) (xd, yd, zd float64) { m := [3][3]float64{ {1.0160269, 0.0550506, -0.0519304}, {0.0060469, 0.9955792, -0.0012196}, {0.0000000, 0.0000000, 0.7588347}} xd = m[0][0]*xs + m[0][1]*ys + m[0][2]*zs yd = m[1][0]*xs + m[1][1]*ys + m[1][2]*zs zd = m[2][0]*xs + m[2][1]*ys + m[2][2]*zs return } func F7_D50_Xyz(xs, ys, zs float64) (xd, yd, zd float64) { m := [3][3]float64{ {1.0145306, 0.0000000, 0.0000000}, {0.0000000, 1.0000000, 0.0000000}, {0.0000000, 0.0000000, 0.7588347}} xd = m[0][0]*xs + m[0][1]*ys + m[0][2]*zs yd = m[1][0]*xs + m[1][1]*ys + m[1][2]*zs zd = m[2][0]*xs + m[2][1]*ys + m[2][2]*zs return } // F7_D55 functions func F7_D55_Bradford(xs, ys, zs float64) (xd, yd, zd float64) { m := [3][3]float64{ {1.0283868, 0.0134412, -0.0312747}, {0.0171406, 0.9952022, -0.0105684}, {-0.0058572, 0.0096065, 0.8436556}} xd = m[0][0]*xs + m[0][1]*ys + m[0][2]*zs yd = m[1][0]*xs + m[1][1]*ys + m[1][2]*zs zd = m[2][0]*xs + m[2][1]*ys + m[2][2]*zs return } func F7_D55_vonKries(xs, ys, zs float64) (xd, yd, zd float64) { m := [3][3]float64{ {1.0095838, 0.0329194, -0.0327530}, {0.0036160, 0.9973564, -0.0007292}, {0.0000000, 0.0000000, 0.8473705}} xd = m[0][0]*xs + m[0][1]*ys + m[0][2]*zs yd = m[1][0]*xs + m[1][1]*ys + m[1][2]*zs zd = m[2][0]*xs + m[2][1]*ys + m[2][2]*zs return } func F7_D55_Xyz(xs, ys, zs float64) (xd, yd, zd float64) { m := [3][3]float64{ {1.0067445, 0.0000000, 0.0000000}, {0.0000000, 1.0000000, 0.0000000}, {0.0000000, 0.0000000, 0.8473705}} xd = m[0][0]*xs + m[0][1]*ys + m[0][2]*zs yd = m[1][0]*xs + m[1][1]*ys + m[1][2]*zs zd = m[2][0]*xs + m[2][1]*ys + m[2][2]*zs return } // F7_D65 functions func F7_D65_Bradford(xs, ys, zs float64) (xd, yd, zd float64) { m := [3][3]float64{ {0.9998530, -0.0000616, 0.0002403}, {-0.0000675, 0.9999796, 0.0000778}, {0.0000498, -0.0000848, 1.0012851}} xd = m[0][0]*xs + m[0][1]*ys + m[0][2]*zs yd = m[1][0]*xs + m[1][1]*ys + m[1][2]*zs zd = m[2][0]*xs + m[2][1]*ys + m[2][2]*zs return } func F7_D65_vonKries(xs, ys, zs float64) (xd, yd, zd float64) { m := [3][3]float64{ {0.9999485, -0.0001770, 0.0002629}, {-0.0000194, 1.0000142, 0.0000039}, {0.0000000, 0.0000000, 1.0012506}} xd = m[0][0]*xs + m[0][1]*ys + m[0][2]*zs yd = m[1][0]*xs + m[1][1]*ys + m[1][2]*zs zd = m[2][0]*xs + m[2][1]*ys + m[2][2]*zs return } func F7_D65_Xyz(xs, ys, zs float64) (xd, yd, zd float64) { m := [3][3]float64{ {1.0000631, 0.0000000, 0.0000000}, {0.0000000, 1.0000000, 0.0000000}, {0.0000000, 0.0000000, 1.0012506}} xd = m[0][0]*xs + m[0][1]*ys + m[0][2]*zs yd = m[1][0]*xs + m[1][1]*ys + m[1][2]*zs zd = m[2][0]*xs + m[2][1]*ys + m[2][2]*zs return } // F7_D75 functions func F7_D75_Bradford(xs, ys, zs float64) (xd, yd, zd float64) { m := [3][3]float64{ {0.9797979, -0.0092332, 0.0255119}, {-0.0113145, 1.0015327, 0.0084791}, {0.0049755, -0.0082922, 1.1310137}} xd = m[0][0]*xs + m[0][1]*ys + m[0][2]*zs yd = m[1][0]*xs + m[1][1]*ys + m[1][2]*zs zd = m[2][0]*xs + m[2][1]*ys + m[2][2]*zs return } func F7_D75_vonKries(xs, ys, zs float64) (xd, yd, zd float64) { m := [3][3]float64{ {0.9931015, -0.0236961, 0.0271847}, {-0.0026028, 1.0019031, 0.0005248}, {0.0000000, 0.0000000, 1.1277369}} xd = m[0][0]*xs + m[0][1]*ys + m[0][2]*zs yd = m[1][0]*xs + m[1][1]*ys + m[1][2]*zs zd = m[2][0]*xs + m[2][1]*ys + m[2][2]*zs return } func F7_D75_Xyz(xs, ys, zs float64) (xd, yd, zd float64) { m := [3][3]float64{ {0.9992740, 0.0000000, 0.0000000}, {0.0000000, 1.0000000, 0.0000000}, {0.0000000, 0.0000000, 1.1277369}} xd = m[0][0]*xs + m[0][1]*ys + m[0][2]*zs yd = m[1][0]*xs + m[1][1]*ys + m[1][2]*zs zd = m[2][0]*xs + m[2][1]*ys + m[2][2]*zs return } // F7_E functions func F7_E_Bradford(xs, ys, zs float64) (xd, yd, zd float64) { m := [3][3]float64{ {1.0501042, 0.0270124, -0.0230277}, {0.0389931, 0.9729287, -0.0091847}, {-0.0023588, 0.0025668, 0.9192667}} xd = m[0][0]*xs + m[0][1]*ys + m[0][2]*zs yd = m[1][0]*xs + m[1][1]*ys + m[1][2]*zs zd = m[2][0]*xs + m[2][1]*ys + m[2][2]*zs return } func F7_E_vonKries(xs, ys, zs float64) (xd, yd, zd float64) { m := [3][3]float64{ {1.0161448, 0.0554519, -0.0195004}, {0.0060910, 0.9955480, -0.0012294}, {0.0000000, 0.0000000, 0.9195656}} xd = m[0][0]*xs + m[0][1]*ys + m[0][2]*zs yd = m[1][0]*xs + m[1][1]*ys + m[1][2]*zs zd = m[2][0]*xs + m[2][1]*ys + m[2][2]*zs return } func F7_E_Xyz(xs, ys, zs float64) (xd, yd, zd float64) { m := [3][3]float64{ {1.0521775, 0.0000000, 0.0000000}, {0.0000000, 1.0000000, 0.0000000}, {0.0000000, 0.0000000, 0.9195656}} xd = m[0][0]*xs + m[0][1]*ys + m[0][2]*zs yd = m[1][0]*xs + m[1][1]*ys + m[1][2]*zs zd = m[2][0]*xs + m[2][1]*ys + m[2][2]*zs return } // F7_F2 functions func F7_F2_Bradford(xs, ys, zs float64) (xd, yd, zd float64) { m := [3][3]float64{ {1.0901032, 0.0444769, -0.0815305}, {0.0592248, 0.9745143, -0.0283245}, {-0.0141918, 0.0225586, 0.6113819}} xd = m[0][0]*xs + m[0][1]*ys + m[0][2]*zs yd = m[1][0]*xs + m[1][1]*ys + m[1][2]*zs zd = m[2][0]*xs + m[2][1]*ys + m[2][2]*zs return } func F7_F2_vonKries(xs, ys, zs float64) (xd, yd, zd float64) { m := [3][3]float64{ {1.0299949, 0.1030276, -0.0828392}, {0.0113169, 0.9917269, -0.0022829}, {0.0000000, 0.0000000, 0.6197228}} xd = m[0][0]*xs + m[0][1]*ys + m[0][2]*zs yd = m[1][0]*xs + m[1][1]*ys + m[1][2]*zs zd = m[2][0]*xs + m[2][1]*ys + m[2][2]*zs return } func F7_F2_Xyz(xs, ys, zs float64) (xd, yd, zd float64) { m := [3][3]float64{ {1.0436128, 0.0000000, 0.0000000}, {0.0000000, 1.0000000, 0.0000000}, {0.0000000, 0.0000000, 0.6197228}} xd = m[0][0]*xs + m[0][1]*ys + m[0][2]*zs yd = m[1][0]*xs + m[1][1]*ys + m[1][2]*zs zd = m[2][0]*xs + m[2][1]*ys + m[2][2]*zs return } // F7_F11 functions func F7_F11_Bradford(xs, ys, zs float64) (xd, yd, zd float64) { m := [3][3]float64{ {1.1079204, 0.0540934, -0.0896135}, {0.0731195, 0.9648265, -0.0315596}, {-0.0150044, 0.0234241, 0.5833138}} xd = m[0][0]*xs + m[0][1]*ys + m[0][2]*zs yd = m[1][0]*xs + m[1][1]*ys + m[1][2]*zs zd = m[2][0]*xs + m[2][1]*ys + m[2][2]*zs return } func F7_F11_vonKries(xs, ys, zs float64) (xd, yd, zd float64) { m := [3][3]float64{ {1.0357335, 0.1227378, -0.0896478}, {0.0134819, 0.9901444, -0.0027199}, {0.0000000, 0.0000000, 0.5917405}} xd = m[0][0]*xs + m[0][1]*ys + m[0][2]*zs yd = m[1][0]*xs + m[1][1]*ys + m[1][2]*zs zd = m[2][0]*xs + m[2][1]*ys + m[2][2]*zs return } func F7_F11_Xyz(xs, ys, zs float64) (xd, yd, zd float64) { m := [3][3]float64{ {1.0622994, 0.0000000, 0.0000000}, {0.0000000, 1.0000000, 0.0000000}, {0.0000000, 0.0000000, 0.5917405}} xd = m[0][0]*xs + m[0][1]*ys + m[0][2]*zs yd = m[1][0]*xs + m[1][1]*ys + m[1][2]*zs zd = m[2][0]*xs + m[2][1]*ys + m[2][2]*zs return }
f64/white/f7.go
0.501221
0.558508
f7.go
starcoder
package main import ( "image" "math" "sync" "github.com/skelterjohn/go.wde" _ "github.com/skelterjohn/go.wde/xgb" t "github.com/tincann/go-path-tracer/tracer" ) func main() { go start() wde.Run() } func start() { w, _ := wde.NewWindow(500, 500) screen := w.Screen() bounds := screen.Bounds() camera := t.NewCamera( t.NewVector(0, -2, 0), //eye t.NewVector(0, 1, 0), //direction t.NewVector(0, 0, 1), //up 1.5, //distance to image plane 3, //image plane width 3, //image plane height 0.5, //move speed, 0.5, //look speed ) tracer := t.NewTracer( camera, 1, //anti-aliasing factor 4, //maximum number of bounces ) numRegions := 2 regions := divideIntoRegions(numRegions, numRegions, bounds) go handleEvents(w, screen, regions, tracer) w.FlushImage(bounds) w.Show() scene := t.DefaultScene() for { wg := sync.WaitGroup{} wg.Add(len(regions)) // start := time.Now() for _, region := range regions { go func(r *ScreenRegion) { tracer.TraceRegion(bounds, r.Bounds, r.Accumulator, scene, 1) r.Accumulator.DrawContents(screen) wg.Done() }(region) } wg.Wait() w.FlushImage(bounds) // fmt.Println(time.Since(start).Seconds()) } } type ScreenRegion struct { Bounds image.Rectangle Accumulator *t.Accumulator } func divideIntoRegions(xParts, yParts int, screen image.Rectangle) []*ScreenRegion { regions := make([]*ScreenRegion, xParts*yParts) for y := 0; y < yParts; y++ { for x := 0; x < xParts; x++ { r := ScreenRegion{} regionWidth := float64(screen.Dx()) / float64(xParts) regionHeight := float64(screen.Dy()) / float64(yParts) r.Bounds.Min.X = int(math.Floor(float64(x) * regionWidth)) r.Bounds.Min.Y = int(math.Floor(float64(y) * regionHeight)) r.Bounds.Max.X = int(math.Floor(float64(x+1) * regionWidth)) r.Bounds.Max.Y = int(math.Floor(float64(y+1) * regionHeight)) r.Accumulator = t.NewAccumulator(r.Bounds) i := xParts*y + x regions[i] = &r } } return regions } func handleEvents(w wde.Window, screen wde.Image, regions []*ScreenRegion, tracer *t.Tracer) { for { e := <-w.EventChan() moveVector := t.NewVector(0, 0, 0) theta, phi := 0.0, 0.0 switch e.(type) { case wde.KeyDownEvent: event := e.(wde.KeyDownEvent) switch event.Key { case wde.KeyEscape: wde.Stop() //camera movement case wde.KeyW: moveVector.Y++ case wde.KeyS: moveVector.Y-- case wde.KeyA: moveVector.X-- case wde.KeyD: moveVector.X++ case wde.KeySpace: moveVector.Z++ case wde.KeyLeftShift: moveVector.Z-- case wde.KeyLeftArrow: phi-- case wde.KeyRightArrow: phi++ case wde.KeyUpArrow: theta-- case wde.KeyDownArrow: theta++ case wde.KeyTab: resetAccumulators(regions) } } if moveVector.X != 0 || moveVector.Y != 0 || moveVector.Z != 0 { tracer.Camera.Move(moveVector) resetAccumulators(regions) } if theta != 0 || phi != 0 { tracer.Camera.Rotate(phi, theta) resetAccumulators(regions) } } } func resetAccumulators(regions []*ScreenRegion) { for _, region := range regions { region.Accumulator.Reset() } }
main.go
0.556641
0.430207
main.go
starcoder
package sherbet import ( "reflect" "time" "github.com/viant/toolbox" ) // EncryptArrayToInt encrypt array to int64 func EncryptArrayToInt(array []bool) int64 { var result int64 = 0 for _, val := range array { result = result << 1 if val { result++ } else { } } return result } // DecryptArrayToInt decrypt int64 to array func DecryptArrayToInt(data int64, length int) []bool { var result = make([]bool, length) for length > 0 { result[length-1] = data%2 == 1 length-- data = data >> 1 } return result } // DatetimeFormat format a datetime func DatetimeFormat(datetime *string, format string) error { var result error = nil temp, err := time.Parse(time.RFC3339Nano, *datetime) if err == nil { *datetime = temp.Format(format) } else { result = err } return result } // DatetimesFormat format datetimes func DatetimesFormat(datetimes *[]*string, format string) error { var result error = nil for _, val := range *datetimes { if result == nil { result = DatetimeFormat(val, format) } else { break } } return result } // ReflectTags get object's tags func ReflectTags(obj interface{}, tag string) []string { var result []string types := reflect.TypeOf(obj) reflectTags(types, tag, &result) return result } func reflectTags(types reflect.Type, tag string, data *[]string) { for i := 0; i < types.NumField(); i++ { if types.Field(i).Tag.Get(tag) == "" { reflectTags(types.Field(i).Type, tag, data) } else { tag := types.Field(i).Tag.Get(tag) if !toolbox.HasSliceAnyElements(*data, tag) { *data = append(*data, tag) } else { } } } } // ReflectValues get object's value func ReflectValues(obj interface{}) []interface{} { var result []interface{} reflectValues(obj, &result) return result } func reflectValues(obj interface{}, data *[]interface{}) { values := reflect.ValueOf(obj) for i := 0; i < values.NumField(); i++ { if values.Field(i).Kind() == reflect.Struct { reflectValues(values.Field(i).Interface(), data) } else { switch values.Field(i).Elem().Kind() { case reflect.Bool: *data = append(*data, values.Field(i).Elem().Bool()) case reflect.Int: *data = append(*data, values.Field(i).Elem().Int()) case reflect.String: *data = append(*data, values.Field(i).Elem().String()) case reflect.Float64: *data = append(*data, values.Field(i).Elem().Float()) default: *data = append(*data, nil) } } } } // HasValueFromSliceForInt has value from slice func HasValueFromSliceForInt(array *[]*int, value *int) (result bool) { result = false for _, val := range *array { if *value == *val { result = true } else { } } return result } // HasValueFromSliceForString has value from slice func HasValueFromSliceForString(array *[]*string, value *string) (result bool) { result = false for _, val := range *array { if *value == *val { result = true } else { } } return result }
kit.go
0.591133
0.496033
kit.go
starcoder
package dataconverter import ( "strconv" "strings" ) // Bluetooth Protocol Operation type const ( BluetoothAdd string = "Add" BluetoothSubtract string = "Subtract" BluetoothMultiply string = "Multiply" BluetoothDivide string = "Divide" ) //Converter is the structure that contains data conversion specific configuration type Converter struct { DataWrite DataWrite `yaml:"write"` DataRead DataRead `yaml:"read"` } //dataWrite structure contains configuration information specific to data-writes type DataWrite struct { Attributes []WriteAttribute `yaml:"attributes"` } //WriteAttribute structure contains the name of the attribute as well as a data-map of values to be written type WriteAttribute struct { Name string `yaml:"name"` Operations map[string]DataMap `yaml:"operations"` } //DataMap structure contains a mapping between the value that arrives from the platform (expected value) and // the byte value to be written into the device type DataMap struct { DataMapping map[string][]byte `yaml:"data-map"` } //dataRead structure contains configuration information specific to data-read type DataRead struct { Actions []ReadAction `yaml:"actions"` } //ReadAction specifies the name of the action along with the conversion operations to be performed in case of data-read type ReadAction struct { ActionName string `yaml:"action-name"` ConversionOperation ReadOperation `yaml:"conversion-operation"` } //ReadOperation specifies how to convert the data received from the device into meaningful data type ReadOperation struct { StartIndex int `yaml:"start-index"` EndIndex int `yaml:"end-index"` ShiftLeft uint `yaml:"shift-left"` ShiftRight uint `yaml:"shift-right"` Multiply float64 `yaml:"multiply"` Divide float64 `yaml:"divide"` Add float64 `yaml:"add"` Subtract float64 `yaml:"subtract"` OrderOfExecution []string `yaml:"order-of-execution"` } //ConvertReadData is the function responsible to convert the data read from the device into meaningful data func (operation *ReadOperation) ConvertReadData(data []byte) float64 { var intermediateResult uint64 var initialValue []byte var initialStringValue = "" if operation.StartIndex <= operation.EndIndex { for index := operation.StartIndex; index <= operation.EndIndex; index += 1 { initialValue = append(initialValue, data[index]) } } else { for index := operation.StartIndex; index >= operation.EndIndex; index -= 1 { initialValue = append(initialValue, data[index]) } } for _, value := range initialValue { initialStringValue = initialStringValue + strconv.Itoa(int(value)) } initialByteValue, _ := strconv.ParseUint(initialStringValue, 16, 16) if operation.ShiftLeft != 0 { intermediateResult = initialByteValue << operation.ShiftLeft } else if operation.ShiftRight != 0 { intermediateResult = initialByteValue >> operation.ShiftRight } finalResult := float64(intermediateResult) for _, executeOperation := range operation.OrderOfExecution { switch strings.ToUpper(executeOperation) { case strings.ToUpper(BluetoothAdd): finalResult = finalResult + operation.Add case strings.ToUpper(BluetoothSubtract): finalResult = finalResult - operation.Subtract case strings.ToUpper(BluetoothMultiply): finalResult = finalResult * operation.Multiply case strings.ToUpper(BluetoothDivide): finalResult = finalResult / operation.Divide } } return finalResult }
device/bluetooth_mapper/data_converter/data_converter.go
0.637482
0.411643
data_converter.go
starcoder
package builtin import ( "math" "strconv" . "github.com/apmckinlay/gsuneido/runtime" "github.com/apmckinlay/gsuneido/util/dnum" ) var minNarrow = dnum.FromInt(MinSuInt) var maxNarrow = dnum.FromInt(MaxSuInt) func init() { NumMethods = Methods{ "Chr": method0(func(this Value) Value { n := byte(ToInt(this)) return SuStr(string([]byte{n})) }), "Int": method0(func(this Value) Value { dn := ToDnum(this).Trunc() if dnum.Compare(dn, minNarrow) >= 0 && dnum.Compare(dn, maxNarrow) <= 0 { n, _ := dn.ToInt() return SuInt(n) } return SuDnum{Dnum: dn} }), "Format": method1("(mask)", func(this, arg Value) Value { x := ToDnum(this) mask := ToStr(arg) return SuStr(x.Format(mask)) }), "Frac": method0(func(this Value) Value { dn := ToDnum(this).Frac() if dn.IsZero() { return Zero } return SuDnum{Dnum: dn} }), "Hex": method0(func(this Value) Value { n := ToInt(this) return SuStr(strconv.FormatUint(uint64(uint32(n)), 16)) }), "Round": method1("(number)", func(this, arg Value) Value { x := ToDnum(this) r := ToInt(arg) return SuDnum{Dnum: x.Round(r, dnum.HalfUp)} }), "RoundUp": method1("(number)", func(this, arg Value) Value { x := ToDnum(this) r := ToInt(arg) return SuDnum{Dnum: x.Round(r, dnum.Up)} }), "RoundDown": method1("(number)", func(this, arg Value) Value { x := ToDnum(this) r := ToInt(arg) return SuDnum{Dnum: x.Round(r, dnum.Down)} }), // float methods "Cos": method0(func(this Value) Value { f := toFloat(this) return fromFloat(math.Cos(f)) }), "Sin": method0(func(this Value) Value { f := toFloat(this) return fromFloat(math.Sin(f)) }), "Tan": method0(func(this Value) Value { f := toFloat(this) return fromFloat(math.Tan(f)) }), "ACos": method0(func(this Value) Value { f := toFloat(this) return fromFloat(math.Acos(f)) }), "ASin": method0(func(this Value) Value { f := toFloat(this) return fromFloat(math.Asin(f)) }), "ATan": method0(func(this Value) Value { f := toFloat(this) return fromFloat(math.Atan(f)) }), "Exp": method0(func(this Value) Value { f := toFloat(this) return fromFloat(math.Exp(f)) }), "Log": method0(func(this Value) Value { f := toFloat(this) return fromFloat(math.Log(f)) }), "Log10": method0(func(this Value) Value { f := toFloat(this) return fromFloat(math.Log10(f)) }), "Pow": method1("(number)", func(this, arg Value) Value { if p, ok := arg.ToInt(); ok && 0 <= p && p <= 10 { if p == 0 { return One } x := this for ; p > 1; p-- { x = OpMul(x, this) } return x } x := toFloat(this) y := toFloat(arg) return fromFloat(math.Pow(x, y)) }), "Sqrt": method0(func(this Value) Value { f := toFloat(this) return fromFloat(math.Sqrt(f)) }), } } func toFloat(v Value) float64 { if i, ok := v.ToInt(); ok { return float64(i) } return ToDnum(v).ToFloat() } func fromFloat(f float64) Value { n := int64(f) if f == float64(n) { if MinSuInt <= n && n <= MaxSuInt { return SuInt(int(n)) } return SuDnum{Dnum: dnum.FromInt(n)} } return SuDnum{Dnum: dnum.FromFloat(f)} }
builtin/number.go
0.515132
0.57678
number.go
starcoder
package polyline import ( "gioui.org/f32" "gioui.org/layout" "gioui.org/op" "gioui.org/op/clip" "gioui.org/op/paint" "image/color" "math" ) const ( rad90 = float32(90 * math.Pi / 180) ) func Draw(points []f32.Point, width float32, col color.RGBA, gtx layout.Context) { length := len(points) for i, p := range points { drawCircle(p, width, col, gtx) if i < length-1 { drawLine(p, points[i+1], width, col, gtx) } } } func drawCircle(p f32.Point, radius float32, col color.RGBA, gtx layout.Context) { d := radius * 2 const k = 0.551915024494 // 4*(sqrt(2)-1)/3 defer op.Push(gtx.Ops).Pop() var path clip.Path path.Begin(gtx.Ops) path.Move(f32.Point{X: p.X + radius, Y: p.Y}) path.Cube(f32.Point{X: 0, Y: radius * k}, f32.Point{X: -radius + radius*k, Y: radius}, f32.Point{X: -radius, Y: radius}) // SE path.Cube(f32.Point{X: -radius * k, Y: 0}, f32.Point{X: -radius, Y: -radius + radius*k}, f32.Point{X: -radius, Y: -radius}) // SW path.Cube(f32.Point{X: 0, Y: -radius * k}, f32.Point{X: radius - radius*k, Y: -radius}, f32.Point{X: radius, Y: -radius}) // NW path.Cube(f32.Point{X: radius * k, Y: 0}, f32.Point{X: radius, Y: radius - radius*k}, f32.Point{X: radius, Y: radius}) // NE path.End().Add(gtx.Ops) box := f32.Rectangle{Min: f32.Point{X: p.X - radius, Y: p.Y - radius}, Max: f32.Point{X: p.X + d, Y: p.Y + d}} paint.ColorOp{Color: col}.Add(gtx.Ops) paint.PaintOp{Rect: box}.Add(gtx.Ops) } func drawLine(p1, p2 f32.Point, width float32, col color.RGBA, gtx layout.Context) { tilt := angle(p1, p2) a := offsetPoint(p1, width, tilt+rad90) b := offsetPoint(p2, width, tilt+rad90) c := offsetPoint(p2, -width, tilt+rad90) d := offsetPoint(p1, -width, tilt+rad90) defer op.Push(gtx.Ops).Pop() paint.ColorOp{Color: col}.Add(gtx.Ops) var path clip.Path path.Begin(gtx.Ops) path.Move(a) path.Line(b.Sub(a)) path.Line(c.Sub(b)) path.Line(d.Sub(c)) path.Line(a.Sub(d)) path.End().Add(gtx.Ops) box := boundingBox([]f32.Point{a, b, c, d}) paint.PaintOp{Rect: box}.Add(gtx.Ops) } func boundingBox(points []f32.Point) (box f32.Rectangle) { for _, p := range points { box.Min.X = min(box.Min.X, p.X) box.Min.Y = min(box.Min.Y, p.Y) box.Max.X = max(box.Max.X, p.X) box.Max.Y = max(box.Max.Y, p.Y) } return box } func offsetPoint(point f32.Point, distance, angle float32) f32.Point { x := point.X + distance*cos(angle) y := point.Y + distance*sin(angle) return f32.Point{X: x, Y: y} } func angle(p1, p2 f32.Point) float32 { return atan2(p2.Y-p1.Y, p2.X-p1.X) } func cos(v float32) float32 { return float32(math.Cos(float64(v))) } func sin(v float32) float32 { return float32(math.Sin(float64(v))) } func min(x, y float32) float32 { return float32(math.Min(float64(x), float64(y))) } func max(x, y float32) float32 { return float32(math.Max(float64(x), float64(y))) } func atan2(y, x float32) float32 { return float32(math.Atan2(float64(y), float64(x))) }
polyline.go
0.730194
0.478285
polyline.go
starcoder
package sierpinski import ( "fmt" "image/color" "math" "math/rand" "github.com/hajimehoshi/ebiten/v2" "github.com/hajimehoshi/ebiten/v2/ebitenutil" "github.com/hajimehoshi/ebiten/v2/inpututil" ) // speedFactor is how many pixels to set per tick. var speedFactor = 10 var colorMappings = map[int]color.Color{ 0: color.RGBA{0x00, 0x00, 0xFF, 0xFF}, 1: color.RGBA{0x00, 0xFF, 0x00, 0xFF}, 2: color.RGBA{0xFF, 0x00, 0x00, 0xFF}, } // pt represents a fixed point determined by x,y co-ordinates on a // cartesian plane. type pt struct { x, y int } // existsInTriangle returns true if p is found within the triangle formed by // v1, v2, and v3 else false. // https://stackoverflow.com/questions/2049582/how-to-determine-if-a-point-is-in-a-2d-triangle func existsInTriangle(p, v1, v2, v3 pt) bool { sign := func(p1, p2, p3 pt) int { return (p1.x-p3.x)*(p2.y-p3.y) - (p2.x-p3.x)*(p1.y-p3.y) } d1 := sign(p, v1, v2) d2 := sign(p, v2, v3) d3 := sign(p, v3, v1) has_neg := (d1 < 0) || (d2 < 0) || (d3 < 0) has_pos := (d1 > 0) || (d2 > 0) || (d3 > 0) return !(has_neg && has_pos) } // midwayPoint returns a pt that exists halfway between p1, and p2 func midwayPoint(p1, p2 pt) pt { return pt{(p1.x + p2.x) / 2, (p1.y + p2.y) / 2} } type Game struct { width, height int lastPosition pt initialTriangle [3]pt image *ebiten.Image } func NewGame(width, height int) *Game { // Set the initial triangle to start with. pts := [3]pt{ {x: width / 2, y: 0}, {x: 1, y: height - 1}, {x: width - 1, y: height - 1}, } // Find an initial point within the triangle to start with. var position pt found := false for !found { p := pt{x: rand.Intn(width), y: rand.Intn(height)} found = existsInTriangle(p, pts[0], pts[1], pts[2]) position = p } return &Game{ width: width, height: height, image: ebiten.NewImage(width, height), initialTriangle: pts, lastPosition: position, } } func (g *Game) findNearestPoint(p1 pt) pt { var smallestDistance float64 var smallestPoint pt for _, p := range g.initialTriangle { x := float64(p1.x - p.x) y := float64(p1.y - p.y) d := math.Sqrt(math.Pow(x, 2) + math.Pow(y, 2)) if d < smallestDistance { smallestDistance = d smallestPoint = p } } return smallestPoint } func (g *Game) Update() error { if ebiten.IsKeyPressed(ebiten.KeyArrowLeft) { if speedFactor <= 0 { speedFactor = 0 } else { speedFactor-- } } if ebiten.IsKeyPressed(ebiten.KeyArrowRight) { speedFactor++ } if inpututil.IsKeyJustPressed(ebiten.KeyR) { g.image = ebiten.NewImage(g.width, g.height) } for i := 0; i < speedFactor; i++ { r := rand.Intn(3) m := midwayPoint(g.lastPosition, g.initialTriangle[r]) g.image.Set(m.x, m.y, colorMappings[r]) g.lastPosition = m } return nil } func (g *Game) Draw(screen *ebiten.Image) { screen.DrawImage(g.image, &ebiten.DrawImageOptions{}) t := fmt.Sprintf("TPS: %f\nSpeed: %d (press <- or -> arrow to change)\n", ebiten.CurrentTPS(), speedFactor) ebitenutil.DebugPrint(screen, t) } func (g *Game) Layout(_, _ int) (int, int) { return g.width, g.height }
pkg/sierpinski/game.go
0.757256
0.526038
game.go
starcoder
package geom import ( "math" "math/rand" ) // Dir is a unit vector that specifies a direction in 3D space. type Dir Vec // Up is the positive Direction on the vertical (Y) axis. var Up = Dir{0, 1, 0} func SphericalDirection(theta, phi float64) (Dir, bool) { x := math.Sin(theta) * math.Cos(phi) y := math.Cos(theta) z := math.Sin(theta) * math.Sin(phi) return Vec{x, y, z}.Unit() } // Inv inverts a Direction. func (a Dir) Inv() Dir { return Dir{-a.X, -a.Y, -a.Z} } // Enters returns whether this Vector is entering the plane represented by a normal Vector. func (a Dir) Enters(normal Dir) bool { return normal.Dot(a) < 0 } // Dot returns the dot product of two unit vectors, which is also the cosine of the angle between them. func (a Dir) Dot(b Dir) float64 { return a.X*b.X + a.Y*b.Y + a.Z*b.Z } func (a Dir) Half(b Dir) Dir { dir, _ := Vec(a).Plus(Vec(b)).Unit() return dir } // Refracted refracts a vector through the plane represented by a normal, based on the ratio of refraction indices. // https://www.bramz.net/data/writings/reflection_transmission.pdf func (a Dir) Refracted(normal Dir, indexA, indexB float64) (bool, Dir) { ratio := indexA / indexB cos := normal.Dot(a) k := 1 - ratio*ratio*(1-cos*cos) if k < 0 { return false, a } offset := normal.Scaled(ratio*cos + math.Sqrt(k)) dir, _ := a.Scaled(ratio).Minus(offset).Unit() return true, dir } // Reflected reflects the vector about a normal. // https://www.bramz.net/data/writings/reflection_transmission.pdf func (a Dir) Reflected(normal Dir) Dir { cos := normal.Dot(a) dir, _ := Vec(a).Minus(normal.Scaled(2 * cos)).Unit() return dir } // To ensure that both face outward func (a Dir) Reflect2(normal Dir) Dir { dir, _ := normal.Scaled(2).Scaled(a.Dot(normal)).Minus(Vec(a)).Unit() return dir } func (a Dir) Equals(b Dir) bool { return Vec(a).Equals(Vec(b)) } // Scaled multiplies a Direction by a scalar to produce a Vector3. func (a Dir) Scaled(n float64) Vec { return Vec(a).Scaled(n) } // Cross returns the cross product of unit vectors a and b. func (a Dir) Cross(b Dir) (Dir, bool) { return Vec(a).Cross(Vec(b)).Unit() } // Cone returns a random vector within a cone about Direction a. // size is 0-1, where 0 is the original vector and 1 is anything within the original hemisphere. // https://github.com/fogleman/pt/blob/69e74a07b0af72f1601c64120a866d9a5f432e2f/pt/util.go#L24 func (a Dir) Cone(size float64, rnd *rand.Rand) (Dir, bool) { u := rnd.Float64() v := rnd.Float64() theta := size * 0.5 * math.Pi * (1 - (2 * math.Acos(u) / math.Pi)) m1 := math.Sin(theta) m2 := math.Cos(theta) a2 := v * 2 * math.Pi q := RandDirection(rnd) s, _ := a.Cross(q) t, _ := a.Cross(s) d := Vec{} d = d.Plus(s.Scaled(m1 * math.Cos(a2))) d = d.Plus(t.Scaled(m1 * math.Sin(a2))) d = d.Plus(a.Scaled(m2)) return d.Unit() } // RandDirection returns a random unit vector (a point on the edge of a unit sphere). func RandDirection(rnd *rand.Rand) Dir { return AngleDirection(rnd.Float64()*math.Pi*2, math.Asin(rnd.Float64()*2-1)) } // AngleDirection creates a unit vector based on theta and phi. // http://mathworld.wolfram.com/SphericalCoordinates.html func AngleDirection(theta, phi float64) Dir { return Dir{math.Cos(theta) * math.Cos(phi), math.Sin(phi), math.Sin(theta) * math.Cos(phi)} } // RandHemiCos returns a random unit vector within the hemisphere of the normal direction a. // It distributes these random vectors with a cosine weight. // https://github.com/fogleman/pt/blob/69e74a07b0af72f1601c64120a866d9a5f432e2f/pt/ray.go#L28 // NOTE: Added .Unit() because this doesn't always return a unit vector otherwise func (a Dir) RandHemiCos(rnd *rand.Rand) (Dir, bool) { u := rnd.Float64() v := rnd.Float64() r := math.Sqrt(u) theta := 2 * math.Pi * v s, _ := a.Cross(RandDirection(rnd)) t, _ := a.Cross(s) d := Vec{} d = d.Plus(s.Scaled(r * math.Cos(theta))) d = d.Plus(t.Scaled(r * math.Sin(theta))) d = d.Plus(a.Scaled(math.Sqrt(1 - u))) return d.Unit() } // https://stackoverflow.com/questions/5531827/random-point-on-a-given-sphere // http://www.leadinglesson.com/dot-product-is-positive-for-vectors-in-the-same-general-direction func (a Dir) RandHemi(rnd *rand.Rand) Dir { u := rnd.Float64() v := rnd.Float64() theta := 2 * math.Pi * u phi := math.Acos(2*v - 1) x := math.Sin(phi) * math.Cos(theta) y := math.Sin(phi) * math.Sin(theta) z := math.Cos(phi) dir, _ := Vec{x, y, z}.Unit() if a.Dot(dir) < 0 { return dir.Inv() } return dir } func ParseDirection(s string) (d Dir, err error) { v, err := ParseVec(s) if err != nil { return d, err } dir, _ := v.Unit() return dir, nil }
pkg/geom/dir.go
0.869285
0.659494
dir.go
starcoder
package contracts import ( "context" "testing" "github.com/adamluzsi/frameless/contracts/assert" "github.com/adamluzsi/frameless/extid" "github.com/adamluzsi/frameless" "github.com/adamluzsi/testcase" "github.com/stretchr/testify/require" ) // Updater will request an update for a wrapped entity object in the Resource type Updater struct { T T Subject func(testing.TB) UpdaterSubject Context func(testing.TB) context.Context FixtureFactory func(testing.TB) frameless.FixtureFactory } type UpdaterSubject interface { CRD frameless.Updater } func (c Updater) resource() testcase.Var { return testcase.Var{ Name: "resource", Init: func(t *testcase.T) interface{} { return c.Subject(t) }, } } func (c Updater) resourceGet(t *testcase.T) UpdaterSubject { return c.resource().Get(t).(UpdaterSubject) } func (c Updater) Spec(s *testcase.Spec) { c.resource().Let(s, nil) factoryLet(s, c.FixtureFactory) s.Before(func(t *testcase.T) { assert.DeleteAllEntity(t, c.resourceGet(t), c.Context(t)) }) var ( requestContext = testcase.Var{Name: `request-Context`} entityWithChanges = testcase.Var{Name: `entity-with-changes`} subject = func(t *testcase.T) error { return c.resourceGet(t).Update( requestContext.Get(t).(context.Context), entityWithChanges.Get(t), ) } ) ctx.Let(s, func(t *testcase.T) interface{} { return c.Context(t) }) requestContext.Let(s, func(t *testcase.T) interface{} { return ctx.Get(t) }) s.When(`an entity already stored`, func(s *testcase.Spec) { entity := s.Let(`entity`, func(t *testcase.T) interface{} { ent := CreatePTR(factoryGet(t), c.T) assert.CreateEntity(t, c.resourceGet(t), ctxGet(t), ent) return ent }).EagerLoading(s) s.And(`and the received entity in argument use the stored entity's ext.ID`, func(s *testcase.Spec) { entityWithChanges.Let(s, func(t *testcase.T) interface{} { newEntity := CreatePTR(factoryGet(t), c.T) id, _ := extid.Lookup(entity.Get(t)) require.Nil(t, extid.Set(newEntity, id)) return newEntity }) s.Then(`then it will update stored entity values by the received one`, func(t *testcase.T) { require.Nil(t, subject(t)) assert.HasEntity(t, c.resourceGet(t), c.Context(t), entityWithChanges.Get(t)) }) s.And(`ctx arg is canceled`, func(s *testcase.Spec) { requestContext.Let(s, func(t *testcase.T) interface{} { ctx, cancel := context.WithCancel(ctx.Get(t).(context.Context)) cancel() return ctx }) s.Then(`it expected to return with Context cancel error`, func(t *testcase.T) { require.Equal(t, context.Canceled, subject(t)) }) }) }) }) s.When(`the received entity has ext.ID that is unknown in the storage`, func(s *testcase.Spec) { entityWithChanges.Let(s, func(t *testcase.T) interface{} { newEntity := CreatePTR(factoryGet(t), c.T) assert.CreateEntity(t, c.resourceGet(t), ctxGet(t), newEntity) assert.DeleteEntity(t, c.resourceGet(t), ctxGet(t), newEntity) return newEntity }) s.Then(`it will encounter error during the update of the stored entity`, func(t *testcase.T) { require.Error(t, subject(t)) }) }) } func (c Updater) Test(t *testing.T) { c.Spec(testcase.NewSpec(t)) } func (c Updater) Benchmark(b *testing.B) { s := testcase.NewSpec(b) factoryLet(s, c.FixtureFactory) ent := s.Let(`ent`, func(t *testcase.T) interface{} { ptr := newT(c.T) assert.CreateEntity(t, c.resourceGet(t), c.Context(t), ptr) return ptr }).EagerLoading(s) s.Test(``, func(t *testcase.T) { require.Nil(b, c.resourceGet(t).Update(c.Context(t), ent.Get(t))) }) }
contracts/Updater.go
0.630116
0.658239
Updater.go
starcoder
package dstream type concatVertical struct { // The streams to be concatenated streams []Dstream // The index of the current stream within streams pos int // The number of observations in the concatenated stream nobs int // True if nobs is known yet (nobs is not known until reading // the entire concatenated stream). nobsKnown bool // Map from variable names to column positions namepos map[string]int } // ConcatVertical concatenates a collection of Dstreams vertically // (appending additional observations). The column names and data // types of all the Dstreams being combined must be identical. func ConcatVertical(streams ...Dstream) Dstream { c := &concatVertical{ streams: streams, } // Construct the name to position mapping c.namepos = make(map[string]int) for k, n := range streams[0].Names() { c.namepos[n] = k } return c } func (c *concatVertical) Close() { for _, s := range c.streams { s.Close() } } func (c *concatVertical) GetPos(pos int) interface{} { return c.streams[c.pos].GetPos(pos) } func (c *concatVertical) NumObs() int { if c.nobsKnown { return c.nobs } return -1 } func (c *concatVertical) NumVar() int { return len(c.Names()) } func (c *concatVertical) Names() []string { return c.streams[0].Names() } func (c *concatVertical) Get(name string) interface{} { return c.GetPos(c.namepos[name]) } func (c *concatVertical) Next() bool { // Advance within current stream if c.streams[c.pos].Next() { return true } // Try to advance to next stream c.nobs += c.streams[c.pos].NumObs() c.pos++ if c.pos >= len(c.streams) { // Done with all streams c.nobsKnown = true return false } // Advance to next stream c.streams[c.pos].Next() // Check that the names are the same a := c.streams[0].Names() b := c.streams[c.pos].Names() msg := "Streams to be concatenated have different column names" if len(a) != len(b) { panic(msg) } for i := range a { if a[i] != b[i] { panic(msg) } } return true } func (c *concatVertical) Reset() { c.pos = 0 c.nobs = 0 c.nobsKnown = false for _, s := range c.streams { s.Reset() } }
dstream/concatvertical.go
0.760651
0.413832
concatvertical.go
starcoder
package tiles import ( "fmt" "math" ) var RE float64 = 6378137.0 var ORIGIN = RE * math.Pi var CE float64 = 2.0 * ORIGIN var DEG2RAD float64 = math.Pi / 180.0 // WebMercator tile, numbered starting from upper left type TileID struct { Zoom uint8 X uint32 Y uint32 } func NewTileID(zoom uint8, x uint32, y uint32) *TileID { return &TileID{zoom, x, y} } func GeoToMercator(lon float64, lat float64) (x float64, y float64) { // truncate incoming values to world bounds lon = math.Min(math.Max(lon, -180), 180) lat = math.Min(math.Max(lat, -85.051129), 85.051129) x = lon * ORIGIN / 180.0 y = RE * math.Log(math.Tan((math.Pi*0.25)+(0.5*DEG2RAD*lat))) return } // GeoToTile calculates the tile x,y at zoom that contains longitude, latitude func GeoToTile(zoom uint8, x float64, y float64) *TileID { z2 := 1 << zoom zoomFactor := float64(z2) eps := 1e-14 // truncate incoming values to world bounds x = math.Min(math.Max(x, -180), 180) y = math.Min(math.Max(y, -85.051129), 85.051129) var tileX uint32 var tileY uint32 x = math.Max(x/360.0+0.5, 0.0) if x >= 1 { tileX = uint32(z2 - 1) } else { tileX = uint32(math.Floor((x + eps) * zoomFactor)) } y = math.Sin(y * math.Pi / 180) y = 0.5 - 0.25*math.Log((1.0+y)/(1.0-y))/math.Pi if y >= 1 { tileY = uint32(z2 - 1) } else { tileY = uint32((y + eps) * zoomFactor) } return &TileID{ Zoom: zoom, X: tileX, Y: tileY, } } // TileRange calculates the min tile x, min tile y, max tile x, max tile y tile // range for Mercator coordinates xmin, ymin, xmax, ymax at a given zoom level. // Assumes bounds have already been clipped to Mercator world bounds. func TileRange(zoom uint8, bounds [4]float64) (*TileID, *TileID) { z2 := 1 << zoom zoomFactor := float64(z2) origin := -ORIGIN eps := 1.0e-11 xmin := math.Min(math.Max(math.Floor(((bounds[0]-origin)/CE)*zoomFactor), 0), zoomFactor-1) // ymin isn't right yet, spilling over ymin := math.Min(math.Max(math.Floor(((1.0-(((bounds[1]-origin)/CE)+eps))*zoomFactor)), 0), zoomFactor-1) xmax := math.Min(math.Max(math.Floor((((bounds[2]-origin)/CE)-eps)*zoomFactor), 0), zoomFactor-1) ymax := math.Min(math.Max(math.Floor((1.0-((bounds[3]-origin)/CE))*zoomFactor), 0), zoomFactor-1) // tiles start in upper left, flip y values minTile := &TileID{Zoom: zoom, X: uint32(xmin), Y: uint32(ymax)} maxTile := &TileID{Zoom: zoom, X: uint32(xmax), Y: uint32(ymin)} return minTile, maxTile } func (t *TileID) String() string { return fmt.Sprintf("Tile(zoom: %v, x: %v, y:%v)", t.Zoom, t.X, t.Y) } func (t *TileID) GeoBounds() (float64, float64, float64, float64) { z2 := 1 << t.Zoom zoomFactor := (float64)(z2) x := (float64)(t.X) y := (float64)(t.Y) xmin := x/zoomFactor*360.0 - 180.0 ymin := math.Atan(math.Sinh(math.Pi*(1.0-2.0*((y+1.0)/zoomFactor)))) * (180.0 / math.Pi) xmax := (x+1.0)/zoomFactor*360.0 - 180.0 ymax := math.Atan(math.Sinh(math.Pi*(1.0-2.0*y/zoomFactor))) * (180.0 / math.Pi) return xmin, ymin, xmax, ymax } func (t *TileID) MercatorBounds() (float64, float64, float64, float64) { z2 := 1 << t.Zoom tileSize := CE / (float64)(z2) xmin := (float64)(t.X)*tileSize - CE/2.0 xmax := xmin + tileSize ymax := CE/2 - (float64)(t.Y)*tileSize ymin := ymax - tileSize return xmin, ymin, xmax, ymax }
tiles/tileid.go
0.778481
0.454775
tileid.go
starcoder
package cdn import ( "encoding/json" ) // CustconfBandWidthLimit The pattern based bandwidth throttling policy allows you to limit the transfer rate of assets to end users based on a set of rules matching the request's HTTP User-Agent and/or the path. Each rule must be expressed in the following format: <User-Agent Pattern>:<path pattern 1, path pattern 2>. For example, the pattern: \"Mozilla*:*.mp3,*dir*.exe|*IE*:*.jpg,*.zip|*ios 6*:* \" will match all MP3 files and EXE files containing the substring \"dir\" that are requested by a User-Agent containing the substring \"Mozilla,\" all JPG and ZIP files requested by a User-Agent containing the substring \"IE,\" and all requests made by User-Agents containing the substring \"ios 6.\" type CustconfBandWidthLimit struct { // This is used by the API to perform conflict checking Id *string `json:"id,omitempty"` // String of values delimited by a '|' character. These are pattern match rules to use for applying rate limiting on requests. Rule *string `json:"rule,omitempty"` // These are the initial bytes (ri) and the sustained rate (rs) query string parameters to use for this rule. Example: ri=100,rs=1000 Values *string `json:"values,omitempty"` Enabled *bool `json:"enabled,omitempty"` } // NewCustconfBandWidthLimit instantiates a new CustconfBandWidthLimit object // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments // will change when the set of required properties is changed func NewCustconfBandWidthLimit() *CustconfBandWidthLimit { this := CustconfBandWidthLimit{} return &this } // NewCustconfBandWidthLimitWithDefaults instantiates a new CustconfBandWidthLimit object // This constructor will only assign default values to properties that have it defined, // but it doesn't guarantee that properties required by API are set func NewCustconfBandWidthLimitWithDefaults() *CustconfBandWidthLimit { this := CustconfBandWidthLimit{} return &this } // GetId returns the Id field value if set, zero value otherwise. func (o *CustconfBandWidthLimit) GetId() string { if o == nil || o.Id == nil { var ret string return ret } return *o.Id } // GetIdOk returns a tuple with the Id field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *CustconfBandWidthLimit) GetIdOk() (*string, bool) { if o == nil || o.Id == nil { return nil, false } return o.Id, true } // HasId returns a boolean if a field has been set. func (o *CustconfBandWidthLimit) HasId() bool { if o != nil && o.Id != nil { return true } return false } // SetId gets a reference to the given string and assigns it to the Id field. func (o *CustconfBandWidthLimit) SetId(v string) { o.Id = &v } // GetRule returns the Rule field value if set, zero value otherwise. func (o *CustconfBandWidthLimit) GetRule() string { if o == nil || o.Rule == nil { var ret string return ret } return *o.Rule } // GetRuleOk returns a tuple with the Rule field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *CustconfBandWidthLimit) GetRuleOk() (*string, bool) { if o == nil || o.Rule == nil { return nil, false } return o.Rule, true } // HasRule returns a boolean if a field has been set. func (o *CustconfBandWidthLimit) HasRule() bool { if o != nil && o.Rule != nil { return true } return false } // SetRule gets a reference to the given string and assigns it to the Rule field. func (o *CustconfBandWidthLimit) SetRule(v string) { o.Rule = &v } // GetValues returns the Values field value if set, zero value otherwise. func (o *CustconfBandWidthLimit) GetValues() string { if o == nil || o.Values == nil { var ret string return ret } return *o.Values } // GetValuesOk returns a tuple with the Values field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *CustconfBandWidthLimit) GetValuesOk() (*string, bool) { if o == nil || o.Values == nil { return nil, false } return o.Values, true } // HasValues returns a boolean if a field has been set. func (o *CustconfBandWidthLimit) HasValues() bool { if o != nil && o.Values != nil { return true } return false } // SetValues gets a reference to the given string and assigns it to the Values field. func (o *CustconfBandWidthLimit) SetValues(v string) { o.Values = &v } // GetEnabled returns the Enabled field value if set, zero value otherwise. func (o *CustconfBandWidthLimit) GetEnabled() bool { if o == nil || o.Enabled == nil { var ret bool return ret } return *o.Enabled } // GetEnabledOk returns a tuple with the Enabled field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *CustconfBandWidthLimit) GetEnabledOk() (*bool, bool) { if o == nil || o.Enabled == nil { return nil, false } return o.Enabled, true } // HasEnabled returns a boolean if a field has been set. func (o *CustconfBandWidthLimit) HasEnabled() bool { if o != nil && o.Enabled != nil { return true } return false } // SetEnabled gets a reference to the given bool and assigns it to the Enabled field. func (o *CustconfBandWidthLimit) SetEnabled(v bool) { o.Enabled = &v } func (o CustconfBandWidthLimit) MarshalJSON() ([]byte, error) { toSerialize := map[string]interface{}{} if o.Id != nil { toSerialize["id"] = o.Id } if o.Rule != nil { toSerialize["rule"] = o.Rule } if o.Values != nil { toSerialize["values"] = o.Values } if o.Enabled != nil { toSerialize["enabled"] = o.Enabled } return json.Marshal(toSerialize) } type NullableCustconfBandWidthLimit struct { value *CustconfBandWidthLimit isSet bool } func (v NullableCustconfBandWidthLimit) Get() *CustconfBandWidthLimit { return v.value } func (v *NullableCustconfBandWidthLimit) Set(val *CustconfBandWidthLimit) { v.value = val v.isSet = true } func (v NullableCustconfBandWidthLimit) IsSet() bool { return v.isSet } func (v *NullableCustconfBandWidthLimit) Unset() { v.value = nil v.isSet = false } func NewNullableCustconfBandWidthLimit(val *CustconfBandWidthLimit) *NullableCustconfBandWidthLimit { return &NullableCustconfBandWidthLimit{value: val, isSet: true} } func (v NullableCustconfBandWidthLimit) MarshalJSON() ([]byte, error) { return json.Marshal(v.value) } func (v *NullableCustconfBandWidthLimit) UnmarshalJSON(src []byte) error { v.isSet = true return json.Unmarshal(src, &v.value) }
pkg/cdn/model_custconf_band_width_limit.go
0.869908
0.449091
model_custconf_band_width_limit.go
starcoder
package main import "fmt" // NewNodeData returns a pointer to a NodeData structure, initialized with the provided data // parsing Terraform output, reference: https://github.com/hashicorp/terraform/blob/master/command/output.go func NewNodeData(data []byte) (result *NodeData) { result = &NodeData{string(data), -1} return } func (nd *NodeData) skipsSpaces() { index := nd.index if index == -1 { index++ } for index < len(nd.bytes) { ch := rune(nd.bytes[index]) if ch == ' ' || ch == '\r' || ch == '\n' || ch == '\t' { index++ continue } break } nd.index = index } func (nd *NodeData) skipOptionalChar() { index := nd.index for index < len(nd.bytes) { ch := rune(nd.bytes[index]) if ch == '"' || ch == ',' { index++ continue } break } nd.index = index } // ReadOptionalValue reads an optional token func (nd *NodeData) ReadOptionalValue() (result string) { nd.skipsSpaces() for nd.index < len(nd.bytes) { ch := rune(nd.bytes[nd.index]) if ch == ',' || ch == ']' || ch == '}' || ch == '\n' { break } result = result + string(ch) nd.index++ } nd.skipsSpaces() return } func (nd *NodeData) eof() (result bool) { result = nd.index >= len(nd.bytes) return } // ReadValue reads a value from the specified NodeData func (nd *NodeData) ReadValue() (result string) { nd.skipsSpaces() index := nd.index var name string for index < len(nd.bytes) { ch := nd.bytes[index] if ch != ' ' && ch != '\n' { name = name + string(ch) index++ } else { break } } nd.index = index nd.skipsSpaces() result = name return } func (nd *NodeData) readLeftSquare() { nd.readChar('[') } func (nd *NodeData) readRightSquare() { nd.readChar(']') } func (nd *NodeData) readChar(chExpected rune) { nd.skipsSpaces() chSeen := rune(nd.bytes[nd.index]) if chSeen == chExpected { nd.index++ } else { panic(fmt.Sprintf(`"%s" expected, but found "%s"`, string(chExpected), string(chSeen))) } nd.skipsSpaces() } func (nd *NodeData) readEqual() { nd.readChar('=') } func (nd *NodeData) readLeftSquareOrLeftBrace() (result NodeType) { ch := nd.bytes[nd.index] switch ch { case '[': nd.index++ result = ntList case '{': nd.index++ result = ntMap } return } // ReadList reads an array of string. The List nomenclature is from Terraform func (nd *NodeData) ReadList() (result []string) { nd.skipsSpaces() readName: if nd.eof() { return } ch := rune(nd.bytes[nd.index]) // empty list if ch == ']' { // no need to increment index return } nd.skipOptionalChar() value := nd.ReadOptionalValue() if value != "" { result = append(result, value) } nd.skipOptionalChar() goto readName } // ReadMap reads a dictionary from the given NodeData structure func (nd *NodeData) ReadMap() (result map[string]interface{}) { result = make(map[string]interface{}) values := []string{} nd.skipsSpaces() readName: if nd.eof() { return } ch := rune(nd.bytes[nd.index]) // empty map if ch == '}' || ch == ']' { return } name := nd.ReadValue() nd.readEqual() nd.readLeftSquare() readvalue: value := nd.ReadOptionalValue() if value != "" { values = append(values, value) } result[name] = values ch = rune(nd.bytes[nd.index]) if !(ch == ']') { goto readvalue } nd.readRightSquare() values = []string{} goto readName } // ReadRightBrace expects a right brace as the next char func (nd *NodeData) ReadRightBrace() { nd.readChar('}') } // ReadRightSquare expects a right square as the next char func (nd *NodeData) ReadRightSquare() { nd.readChar(']') } // ReadNode reads a node which consists of a sequence of a list, or a map, or a combination of both. func (nd *NodeData) ReadNode() (result map[string]interface{}) { result = make(map[string]interface{}) if !nd.eof() { loop: NodeName := nd.ReadValue() nd.readEqual() nodeType := nd.readLeftSquareOrLeftBrace() switch nodeType { case ntList: { value := nd.ReadList() nd.ReadRightSquare() amap := make(map[string]interface{}) amap["value"] = value amap["type"] = "list" result[NodeName] = amap } case ntMap: { value := nd.ReadMap() nd.ReadRightBrace() amap := make(map[string]interface{}) amap["value"] = value amap["type"] = "map" result[NodeName] = amap } } if !nd.eof() { goto loop } } return }
src/createconfig/tfeparser.go
0.739893
0.579638
tfeparser.go
starcoder
package collisions import ( "github.com/TrashPony/Veliri/src/mechanics/gameObjects/detail" "github.com/TrashPony/Veliri/src/mechanics/globalGame/game_math" ) type Polygon struct { Sides []*SideRec `json:"sides"` centerX, centerY float64 Height, Width float64 Angle int } type SideRec struct { X1 float64 `json:"x_1"` Y1 float64 `json:"y_1"` X2 float64 `json:"x_2"` Y2 float64 `json:"y_2"` } func (r *Polygon) Rotate(rotate int) { r.Angle = rotate rotateSide := func(side *SideRec, x0, y0 float64, rotate int) { side.X1, side.Y1 = game_math.RotatePoint(side.X1, side.Y1, x0, y0, rotate) side.X2, side.Y2 = game_math.RotatePoint(side.X2, side.Y2, x0, y0, rotate) } for _, side := range r.Sides { rotateSide(side, r.centerX, r.centerY, rotate) } } func (r *Polygon) detectCollisionRectToCircle(centerCircle *point, radius int) bool { // A - [0]1 B - [1]1 C = [2]1 D = [3]1 if r.detectPointInRectangle(centerCircle.x, centerCircle.y) { // цент находится внутри прямоуголника, пересекается return true } /* intersectCircle(S, (A, B)) or intersectCircle(S, (B, C)) or intersectCircle(S, (C, D)) or intersectCircle(S, (D, A)) */ a := &point{x: r.Sides[0].X1, y: r.Sides[0].Y1} b := &point{x: r.Sides[1].X1, y: r.Sides[1].Y1} c := &point{x: r.Sides[2].X1, y: r.Sides[2].Y1} d := &point{x: r.Sides[3].X1, y: r.Sides[3].Y1} intersect1, _, _ := IntersectVectorToCircle(a, b, centerCircle, radius) intersect2, _, _ := IntersectVectorToCircle(b, c, centerCircle, radius) intersect3, _, _ := IntersectVectorToCircle(c, d, centerCircle, radius) intersect4, _, _ := IntersectVectorToCircle(d, a, centerCircle, radius) // пересекается 1 из сторон if intersect1 || intersect2 || intersect3 || intersect4 { return true } return false } func (r *Polygon) detectCollisionRectToRect(r2 *Polygon) bool { if r.detectPointInRectangle(float64(r2.centerX), float64(r2.centerY)) { // цент находится внутри прямоуголника, пересекается return true } if r2.detectPointInRectangle(float64(r.centerX), float64(r.centerY)) { // цент находится внутри прямоуголника, пересекается return true } if r.centerX == r2.centerX && r.centerY == r2.centerY { // при одинаковом прямоугольнике и одинаковым центром, не будет пересечений и колизия будет не найдена // поэтому это тут return true } intersection := func(ax1, ay1, ax2, ay2, bx1, by1, bx2, by2 float64) bool { v1 := (bx2-bx1)*(ay1-by1) - (by2-by1)*(ax1-bx1) v2 := (bx2-bx1)*(ay2-by1) - (by2-by1)*(ax2-bx1) v3 := (ax2-ax1)*(by1-ay1) - (ay2-ay1)*(bx1-ax1) v4 := (ax2-ax1)*(by2-ay1) - (ay2-ay1)*(bx2-ax1) return (v1*v2 < 0) && (v3*v4 < 0) } for _, side1 := range r.Sides { for _, side2 := range r2.Sides { if intersection(side1.X1, side1.Y1, side1.X2, side1.Y2, side2.X1, side2.Y1, side2.X2, side2.Y2) { return true } } } return false } func (r *Polygon) detectPointInRectangle(x, y float64) bool { dot := func(u, v *point) float64 { return u.x*v.x + u.y*v.y } // A - [0]1 B - [1]1 C = [2]1 D = [3]1 //0 ≤ AP·AB ≤ AB·AB and 0 ≤ AP·AD ≤ AD·AD AB := vector(&point{x: r.Sides[0].X1, y: r.Sides[0].Y1}, &point{x: r.Sides[1].X1, y: r.Sides[1].Y1}) AM := vector(&point{x: r.Sides[0].X1, y: r.Sides[0].Y1}, &point{x: x, y: y}) BC := vector(&point{x: r.Sides[1].X1, y: r.Sides[1].Y1}, &point{x: r.Sides[2].X1, y: r.Sides[2].Y1}) BM := vector(&point{x: r.Sides[1].X1, y: r.Sides[1].Y1}, &point{x: x, y: y}) return 0 <= dot(AB, AM) && dot(AB, AM) <= dot(AB, AB) && 0 <= dot(BC, BM) && dot(BC, BM) <= dot(BC, BC) } func GetBodyRect(body *detail.Body, x, y float64, rotate int, full, min bool) *Polygon { /* squad.rectDebag.moveTo(-50, -25); squad.rectDebag.lineTo(-50, +25); squad.rectDebag.lineTo(-50, +25); squad.rectDebag.lineTo(+50, +25); squad.rectDebag.lineTo(+50, +25); squad.rectDebag.lineTo(+50, -25); squad.rectDebag.lineTo(+50, -25); squad.rectDebag.lineTo(-50, -25); // A - [0] B - [1] C = [2] D = [3] */ heightBody, widthBody := float64(body.Height), float64(body.Width) if full { if heightBody > widthBody { widthBody = heightBody } else { heightBody = widthBody } } if min { if heightBody < widthBody { widthBody = heightBody } else { heightBody = widthBody } } bodyRec := GetCenterRect(x, y, heightBody*2, widthBody*2) bodyRec.Rotate(rotate) return bodyRec } func GetCenterRect(x, y, height, width float64) *Polygon { // делем на 2 что бы центр квадрата был в х у height = height / 2 width = width / 2 return GetRect(x, y, height, width) } func GetRect(x, y, height, width float64) *Polygon { return &Polygon{ Sides: []*SideRec{ // A // B {X1: x - width, Y1: y - height, X2: x - width, Y2: y + height}, // B // C {X1: x - width, Y1: y + height, X2: x + width, Y2: y + height}, // C // D {X1: x + width, Y1: y + height, X2: x + width, Y2: y - height}, // D // A {X1: x + width, Y1: y - height, X2: x - width, Y2: y - height}, }, centerX: float64(x), centerY: float64(y), Height: height, Width: width, } }
src/mechanics/globalGame/collisions/polygon.go
0.752649
0.497192
polygon.go
starcoder
package pgn import ( "fmt" ) // Nag represents a numeric annotation glyph. type Nag int // String returns the common representation of the NAG if it has one (!, ?, !?, // +-, -+, ...). Otherwise it returns $<nag> ($56, $123, ...). func (n Nag) String() string { if int(n) >= len(nagData) || nagData[n].str == "" { return fmt.Sprintf("$%d", n) } return nagData[n].str } // Description returns a short description explaining the NAG. func (n Nag) Description() string { if int(n) >= len(nagData) { return fmt.Sprintf("$%d: non-standard NAG", n) } return nagData[n].desc } var nagData = []struct { str string desc string }{ 0: {"", "null annotation"}, 1: {"!", "good move"}, 2: {"?", "poor move"}, 3: {"!!", "very good move"}, 4: {"??", "very poor move"}, 5: {"!?", "speculative move"}, 6: {"?!", "questionable move"}, 7: {"□", "forced move (all others lose quickly)"}, 8: {"", "singular move (no reasonable alternatives)"}, 9: {"", "worst move"}, 10: {"=", "drawish position"}, 11: {"", "equal chances, quiet position"}, 12: {"", "equal chances, active position"}, 13: {"∞", "unclear position"}, 14: {"⩲", "white has a slight advantage"}, 15: {"⩱", "black has a slight advantage"}, 16: {"±", "white has a moderate advantage"}, 17: {"∓", "black has a moderate advantage"}, 18: {"+-", "white has a decisive advantage"}, 19: {"-+", "black has a decisive advantage"}, 20: {"+--", "white has a crushing advantage (black should resign)"}, 21: {"--+", "black has a crushing advantage (white should resign)"}, 22: {"", "white is in zugzwang"}, 23: {"", "black is in zugzwang"}, 24: {"", "white has a slight space advantage"}, 25: {"", "black has a slight space advantage"}, 26: {"", "white has a moderate space advantage"}, 27: {"", "black has a moderate space advantage"}, 28: {"", "white has a decisive space advantage"}, 29: {"", "black has a decisive space advantage"}, 30: {"", "white has a slight time (development) advantage"}, 31: {"", "black has a slight time (development) advantage"}, 32: {"", "white has a moderate time (development) advantage"}, 33: {"", "black has a moderate time (development) advantage"}, 34: {"", "white has a decisive time (development) advantage"}, 35: {"", "black has a decisive time (development) advantage"}, 36: {"", "white has the initiative"}, 37: {"", "black has the initiative"}, 38: {"", "white has a lasting initiative"}, 39: {"", "black has a lasting initiative"}, 40: {"", "white has the attack"}, 41: {"", "black has the attack"}, 42: {"", "white has insufficient compensation for material deficit"}, 43: {"", "black has insufficient compensation for material deficit"}, 44: {"", "white has sufficient compensation for material deficit"}, 45: {"", "black has sufficient compensation for material deficit"}, 46: {"", "white has more than adequate compensation for material deficit"}, 47: {"", "black has more than adequate compensation for material deficit"}, 48: {"", "white has a slight center control advantage"}, 49: {"", "black has a slight center control advantage"}, 50: {"", "white has a moderate center control advantage"}, 51: {"", "black has a moderate center control advantage"}, 52: {"", "white has a decisive center control advantage"}, 53: {"", "black has a decisive center control advantage"}, 54: {"", "white has a slight kingside control advantage"}, 55: {"", "black has a slight kingside control advantage"}, 56: {"", "white has a moderate kingside control advantage"}, 57: {"", "black has a moderate kingside control advantage"}, 58: {"", "white has a decisive kingside control advantage"}, 59: {"", "black has a decisive kingside control advantage"}, 60: {"", "white has a slight queenside control advantage"}, 61: {"", "black has a slight queenside control advantage"}, 62: {"", "white has a moderate queenside control advantage"}, 63: {"", "black has a moderate queenside control advantage"}, 64: {"", "white has a decisive queenside control advantage"}, 65: {"", "black has a decisive queenside control advantage"}, 66: {"", "white has a vulnerable first rank"}, 67: {"", "black has a vulnerable first rank"}, 68: {"", "white has a well protected first rank"}, 69: {"", "black has a well protected first rank"}, 70: {"", "white has a poorly protected king"}, 71: {"", "black has a poorly protected king"}, 72: {"", "white has a well protected king"}, 73: {"", "black has a well protected king"}, 74: {"", "white has a poorly placed king"}, 75: {"", "black has a poorly placed king"}, 76: {"", "white has a well placed king"}, 77: {"", "black has a well placed king"}, 78: {"", "white has a very weak pawn structure"}, 79: {"", "black has a very weak pawn structure"}, 80: {"", "white has a moderately weak pawn structure"}, 81: {"", "black has a moderately weak pawn structure"}, 82: {"", "white has a moderately strong pawn structure"}, 83: {"", "black has a moderately strong pawn structure"}, 84: {"", "white has a very strong pawn structure"}, 85: {"", "black has a very strong pawn structure"}, 86: {"", "white has poor knight placement"}, 87: {"", "black has poor knight placement"}, 88: {"", "white has good knight placement"}, 89: {"", "black has good knight placement"}, 90: {"", "white has poor bishop placement"}, 91: {"", "black has poor bishop placement"}, 92: {"", "white has good bishop placement"}, 93: {"", "black has good bishop placement"}, 94: {"", "white has poor rook placement"}, 95: {"", "black has poor rook placement"}, 96: {"", "white has good rook placement"}, 97: {"", "black has good rook placement"}, 98: {"", "white has poor queen placement"}, 99: {"", "black has poor queen placement"}, 100: {"", "white has good queen placement"}, 101: {"", "black has good queen placement"}, 102: {"", "white has poor piece coordination"}, 103: {"", "black has poor piece coordination"}, 104: {"", "white has good piece coordination"}, 105: {"", "black has good piece coordination"}, 106: {"", "white has played the opening very poorly"}, 107: {"", "black has played the opening very poorly"}, 108: {"", "white has played the opening poorly"}, 109: {"", "black has played the opening poorly"}, 110: {"", "white has played the opening well"}, 111: {"", "black has played the opening well"}, 112: {"", "white has played the opening very well"}, 113: {"", "black has played the opening very well"}, 114: {"", "white has played the middlegame very poorly"}, 115: {"", "black has played the middlegame very poorly"}, 116: {"", "white has played the middlegame poorly"}, 117: {"", "black has played the middlegame poorly"}, 118: {"", "white has played the middlegame well"}, 119: {"", "black has played the middlegame well"}, 120: {"", "white has played the middlegame very well"}, 121: {"", "black has played the middlegame very well"}, 122: {"", "white has played the ending very poorly"}, 123: {"", "black has played the ending very poorly"}, 124: {"", "white has played the ending poorly"}, 125: {"", "black has played the ending poorly"}, 126: {"", "white has played the ending well"}, 127: {"", "black has played the ending well"}, 128: {"", "white has played the ending very well"}, 129: {"", "black has played the ending very well"}, 130: {"", "white has slight counterplay"}, 131: {"", "black has slight counterplay"}, 132: {"", "white has moderate counterplay"}, 133: {"", "black has moderate counterplay"}, 134: {"", "white has decisive counterplay"}, 135: {"", "black has decisive counterplay"}, 136: {"", "white has moderate time control pressure"}, 137: {"", "black has moderate time control pressure"}, 138: {"", "white has severe time control pressure"}, 139: {"", "black has severe time control pressure"}, }
pgn/nag.go
0.767472
0.487307
nag.go
starcoder
package utils import ( "sort" ) type signedInt interface { ~int | ~int8 | ~int16 | ~int32 | ~int64 } type unsignedInt interface { ~uint | ~uint8 | ~uint16 | ~uint32 | ~uint64 } type float interface { ~float32 | ~float64 } type number interface { signedInt | unsignedInt | float } type enumerable interface { number | ~string } // Contains checks whether the value is in slice or not. // It supports int, int8, int16, int32, int64, float32, float64, uint, uint8, uint16, uint32, uint64 and string // as types for slice values. func Contains[T comparable](slice []T, value T) bool { for _, v := range slice { if v == value { return true } } return false } // Sum sums the values of slice. // It supports int, int8, int16, int32, int64, float32, float64, uint, uint8, uint16, uint32 and uint64 // as types for slice values. func Sum[T number](slice []T) T { var sum T for _, v := range slice { sum += v } return sum } // Sort sorts ascending the values of slice. // It supports int, int8, int16, int32, int64, float32, float64, uint, uint8, uint16, uint32, uint64 and string // as types for slice values. func Sort[T enumerable](slice []T) { sort.Slice(slice, func(i, j int) bool { return slice[i] < slice[j] }) } // SortDesc sorts descending the values of slice. // It supports int, int8, int16, int32, int64, float32, float64, uint, uint8, uint16, uint32, uint64 and string // as types for slice values. func SortDesc[T enumerable](slice []T) { sort.Slice(slice, func(i, j int) bool { return slice[i] > slice[j] }) } // Min get the min value of slice. // It supports int, int8, int16, int32, int64, float32, float64, uint, uint8, uint16, uint32 and uint64 as types for slice values. // Returns 0 if slice is nil or empty. func Min[T number](slice []T) T { if len(slice) == 0 { return T(0) } var min T = slice[0] for _, v := range slice { if v < min { min = v } } return min } // Max get the max value of slice. // It supports int, int8, int16, int32, int64, float32, float64, uint, uint8, uint16, uint32 and uint64 // as types for slice values. // Returns 0 if slice is nil or empty. func Max[T number](slice []T) T { if len(slice) == 0 { return T(0) } var max T = slice[0] for _, v := range slice { if v > max { max = v } } return max } // Filter filters the values of slice by the given function. // It any type for slice values. // Returns empty if given slice is nil or empty. func Filter[T any](slice []T, f func(T) bool) []T { result := make([]T, 0, len(slice)) for _, v := range slice { if f(v) { result = append(result, v) } } return result } // Diff returns the difference of two slices. // Gets the values of s1 that are not in s2. // It supports int, int8, int16, int32, int64, float32, float64, uint, uint8, uint16, uint32, uint64 and string // as types for slice values. // Returns empty if given slice s1 is nil or empty. func Diff[T comparable](s1, s2 []T) []T { if len(s1) == 0 { return []T{} } var result []T for _, v := range s1 { if !Contains(s2, v) { result = append(result, v) } } return result } // Intersect returns the intersection of two slices. // It supports all comparable types for slice values. // Returns empty if given slices, s1 or s2, are nil or empty. func Intersect[T comparable](s1, s2 []T) []T { if len(s1) == 0 || len(s2) == 0 { return []T{} } var result []T for _, v := range s1 { if Contains(s2, v) { result = append(result, v) } } return result } // Any returns true if the given function returns true for any element of the slice. // It supports all comparable types as slice values. func Any[T comparable](slice []T, f func(T) bool) bool { for _, v := range slice { if f(v) { return true } } return false } // All returns true if the given function returns true for all elements of the slice. // It supports all comparable types for slice values. func All[T comparable](slice []T, f func(T) bool) bool { for _, v := range slice { if !f(v) { return false } } return true } // Select returns a new slice with the new type of the given function. // IT supports all comparable types for slice values. func Select[T, K comparable](slice []T, f func(T) K) []K { result := make([]K, 0, len(slice)) for _, v := range slice { result = append(result, f(v)) } return result }
utils.go
0.831588
0.423041
utils.go
starcoder
package opc // Colorbox // Every pixel's r,g,b is linearly related to its x,y,z. import ( "github.com/longears/pixelslinger/colorutils" "github.com/longears/pixelslinger/midi" "math" "time" ) func MakePatternSpatialColorBox(locations []float64) ByteThread { return func(bytesIn chan []byte, bytesOut chan []byte, midiState *midi.MidiState) { // get bounding box n_pixels := len(locations) / 3 var max_coord_x, max_coord_y, max_coord_z float64 var min_coord_x, min_coord_y, min_coord_z float64 for ii := 0; ii < n_pixels; ii++ { x := locations[ii*3+0] y := locations[ii*3+1] z := locations[ii*3+2] if ii == 0 || x > max_coord_x { max_coord_x = x } if ii == 0 || y > max_coord_y { max_coord_y = y } if ii == 0 || z > max_coord_z { max_coord_z = z } if ii == 0 || x < min_coord_x { min_coord_x = x } if ii == 0 || y < min_coord_y { min_coord_y = y } if ii == 0 || z < min_coord_z { min_coord_z = z } } for bytes := range bytesIn { n_pixels := len(bytes) / 3 t := float64(time.Now().UnixNano())/1.0e9 - 9.4e8 // fill in bytes slice for ii := 0; ii < n_pixels; ii++ { //-------------------------------------------------------------------------------- // make moving stripes for x, y, and z x := locations[ii*3+0] y := locations[ii*3+1] z := locations[ii*3+2] r := (x - min_coord_x) / max_coord_x g := (y - min_coord_y) / max_coord_y b := (z - min_coord_z) / max_coord_z // r, g, b = colorutils.ContrastRgb(r, g, b, 0.5, 2) // make a moving white dot showing the order of the pixels in the layout file spark_ii := colorutils.PosMod2(t*80, float64(n_pixels)) spark_rad := float64(8) spark_val := math.Max(0, (spark_rad-colorutils.ModDist2(float64(ii), float64(spark_ii), float64(n_pixels)))/spark_rad) spark_val = math.Min(1, spark_val*2) r += spark_val g += spark_val b += spark_val bytes[ii*3+0] = colorutils.FloatToByte(r) bytes[ii*3+1] = colorutils.FloatToByte(g) bytes[ii*3+2] = colorutils.FloatToByte(b) //-------------------------------------------------------------------------------- } bytesOut <- bytes } } }
opc/pattern-colorbox.go
0.648466
0.476945
pattern-colorbox.go
starcoder
package streebog import ( "encoding/binary" "encoding/hex" ) // bit512 is an effective representation of 512 bits. // Go leaks 128+ bit native structures so this is the most efficient way for doing xor512bit. type bit512 [8]uint64 // bit512FromBytes64 should be called only on bytes slice with len 64. func bit512FromBytes64(bytes []byte) bit512 { var b bit512 b[0] = binary.BigEndian.Uint64(bytes[0:8]) b[1] = binary.BigEndian.Uint64(bytes[8:16]) b[2] = binary.BigEndian.Uint64(bytes[16:24]) b[3] = binary.BigEndian.Uint64(bytes[24:32]) b[4] = binary.BigEndian.Uint64(bytes[32:40]) b[5] = binary.BigEndian.Uint64(bytes[40:48]) b[6] = binary.BigEndian.Uint64(bytes[48:56]) b[7] = binary.BigEndian.Uint64(bytes[56:64]) return b } // bit512FromString can be called only on hex strings representing at least 512 bits. // Otherwise function panics. func bit512FromString(s string) bit512 { var b bit512 bytes := hexDecoderUnsafe(s) b[0] = binary.BigEndian.Uint64(bytes[0:8]) b[1] = binary.BigEndian.Uint64(bytes[8:16]) b[2] = binary.BigEndian.Uint64(bytes[16:24]) b[3] = binary.BigEndian.Uint64(bytes[24:32]) b[4] = binary.BigEndian.Uint64(bytes[32:40]) b[5] = binary.BigEndian.Uint64(bytes[40:48]) b[6] = binary.BigEndian.Uint64(bytes[48:56]) b[7] = binary.BigEndian.Uint64(bytes[56:64]) return b } // String gives hex string representation of 512 bits. func (b bit512) String() string { var result [64]byte for i := 0; i < 8; i++ { binary.BigEndian.PutUint64(result[8*i:8*i+8], b[i]) } return hex.EncodeToString(result[:]) } // Bytes64 converts internal 512 bits to byte slice. func (b bit512) Bytes64() [64]byte { var result [64]byte binary.BigEndian.PutUint64(result[0:8], b[0]) binary.BigEndian.PutUint64(result[8:16], b[1]) binary.BigEndian.PutUint64(result[16:24], b[2]) binary.BigEndian.PutUint64(result[24:32], b[3]) binary.BigEndian.PutUint64(result[32:40], b[4]) binary.BigEndian.PutUint64(result[40:48], b[5]) binary.BigEndian.PutUint64(result[48:56], b[6]) binary.BigEndian.PutUint64(result[56:64], b[7]) return result } // add512bit adds 2 bit512 fields modulo 2^512. func add512bit(this bit512, that bit512) bit512 { var result bit512 var overflow uint64 for i := 7; i >= 0; i-- { result[i] = this[i] + that[i] + overflow // Overflow check if result[i] < this[i] { overflow = 1 } else { overflow = 0 } } return result } // add512bitInPlace adds 2 bit512 fields inplace modulo 2^512. func (b *bit512) add512bitInPlace(that bit512) { var overflow uint64 for i := 7; i >= 0; i-- { b[i] += that[i] + overflow // Overflow check if b[i] < that[i] { overflow = 1 } else { overflow = 0 } } } // add512bitUint64 adds uint64 to bit512 modulo 2^512. func add512bitUint64(this bit512, that uint64) bit512 { this[7] += that for i := 7; i >= 1; i-- { // Overflow check if this[i] < that { that = 1 this[i-1] += that } else { that = 0 } } return this } // addUint64InPlace adds uint64 to bit512 inplace modulo 2^512. func (b *bit512) addUint64InPlace(that uint64) { b[7] += that for i := 7; i >= 1; i-- { // Overflow check if b[i] < that { that = 1 b[i-1] += that } else { that = 0 } } } // xor512bit does xor512bit operation on 2 bit512 fields. Implemented in the way it is inlined. func xor512bit(this bit512, that bit512) bit512 { this[0] ^= that[0] this[1] ^= that[1] this[2] ^= that[2] this[3] ^= that[3] this[4] ^= that[4] this[5] ^= that[5] this[6] ^= that[6] this[7] ^= that[7] return this } // xor512bitInPlace does xor512bit inplace operation on 2 bit512 fields. Implemented in the way it is inlined. func (b *bit512) xor512bitInPlace(that bit512) { b[0] ^= that[0] b[1] ^= that[1] b[2] ^= that[2] b[3] ^= that[3] b[4] ^= that[4] b[5] ^= that[5] b[6] ^= that[6] b[7] ^= that[7] }
hash/streebog/bit512.go
0.625209
0.455622
bit512.go
starcoder
package criteria_bounding import ( "fmt" "github.com/Azbesciak/RealDecisionMaker/lib/utils" ) type CriteriaBounding struct { AllowedValuesRangeScaling float64 `json:"allowedValuesRangeScaling"` DisallowNegativeValues bool `json:"disallowNegativeValues"` } type CriteriaInRangeBounding struct { bounding *CriteriaBounding valueRange *utils.ValueRange } func DefaultParams() *CriteriaBounding { return &CriteriaBounding{ AllowedValuesRangeScaling: -1.0, DisallowNegativeValues: false, } } func FromParams(params *interface{}) *CriteriaBounding { bounding := DefaultParams() utils.DecodeToStruct(*params, bounding) if bounding.AllowedValuesRangeScaling == 0 { panic(fmt.Errorf("allowedValuesRangeScaling cannot be 0")) } return bounding } func (b *CriteriaBounding) WithRange(valueRange *utils.ValueRange) *CriteriaInRangeBounding { var scaled *utils.ValueRange = nil if b.AllowedValuesRangeScaling > 0 { scaled = scaleRange(valueRange, b.AllowedValuesRangeScaling) } return &CriteriaInRangeBounding{ bounding: b, valueRange: scaled, } } func (b *CriteriaInRangeBounding) BoundValue(value float64) float64 { value = b.bounding.trimBelowZeroIfRequired(value) if b.valueRange == nil { return value } return boundValueInRange(value, b.valueRange) } func (b *CriteriaBounding) BoundValue(value float64, valueRange *utils.ValueRange) float64 { value = b.trimBelowZeroIfRequired(value) return boundValue(value, b.AllowedValuesRangeScaling, valueRange) } func (b *CriteriaBounding) trimBelowZeroIfRequired(value float64) float64 { if b.DisallowNegativeValues && value < 0 { value = 0 } return value } func boundValue(value, scaling float64, valueRange *utils.ValueRange) float64 { if scaling > 0 { scaledRange := scaleRange(valueRange, scaling) return boundValueInRange(value, scaledRange) } return value } func boundValueInRange(value float64, scaledRange *utils.ValueRange) float64 { if value < scaledRange.Min { value = scaledRange.Min } if value > scaledRange.Max { value = scaledRange.Max } return value } func scaleRange(valueRange *utils.ValueRange, scaling float64) *utils.ValueRange { if scaling == 1 { return valueRange } else { return valueRange.ScaleEqually(scaling) } }
lib/model/criteria-bounding/criteria-bounding.go
0.847873
0.414188
criteria-bounding.go
starcoder
package maps import ( "constraints" gs "github.com/kigichang/goscala" "github.com/kigichang/goscala/slices" ) func Make[K comparable, V any](a ...int) gs.Map[K, V] { return newGeneralMap[K, V](a...) } func Empty[K comparable, V any]() gs.Map[K, V] { return Make[K, V]() } func From[K comparable, V any](pairs ...gs.Pair[K, V]) gs.Map[K, V] { m := Make[K, V](len(pairs)) for i := range pairs { m.Add(pairs[i]) } return m } func Collect[K comparable, V, T any](m gs.Map[K, V], pf func(K, V) (T, bool)) gs.Slice[T] { return slices.Collect( m.Slice(), func(p gs.Pair[K, V]) (T, bool) { return pf(p.Key(), p.Value()) }, ) } func CollectMap[K1, K2 comparable, V1, V2 any](m gs.Map[K1, V1], pf func(K1, V1) (K2, V2, bool)) gs.Map[K2, V2] { ret := Make[K2, V2]() iter := m.Range() for iter.Next() { if k2, v2, ok := pf(iter.Get()); ok { ret.Put(k2, v2) } } return ret } func CollectFirst[K comparable, V, T any](m gs.Map[K, V], pf func(K, V) (T, bool)) gs.Option[T] { iter := m.Range() for iter.Next() { if v, ok := pf(iter.Get()); ok { gs.Some[T](v) } } return gs.None[T]() } func FlatMapSlice[K comparable, V, T any](m gs.Map[K, V], fn func(K, V) gs.Sliceable[T]) gs.Slice[T] { ret := slices.Empty[T]() iter := m.Range() for iter.Next() { ret = append(ret, fn(iter.Get()).Slice()...) } return ret } func FlatMap[K1, K2 comparable, V1, V2 any](m gs.Map[K1, V1], fn func(K1, V1) gs.Sliceable[gs.Pair[K2, V2]]) gs.Map[K2, V2] { return ToMap(FlatMapSlice(m, fn)...) } func GroupMap[K1, K2 comparable, V1, V2 any](m gs.Map[K1, V1], groupBy func(K1, V1) K2, op func(K1, V1) V2) gs.Map[K2, gs.Slice[V2]] { ret := Make[K2, gs.Slice[V2]]() it := m.Range() for it.Next() { k, v := it.Get() k2 := groupBy(k, v) v2 := op(k, v) x, _ := ret.Get(k2) x = append(x, v2) ret.Put(k2, x) } return ret } func GroupBy[K, K1 comparable, V any](m gs.Map[K, V], groupBy func(K, V) K1) gs.Map[K1, gs.Map[K, V]] { op := func(k K, v V) gs.Pair[K, V] { return gs.P(k, v) } m1 := GroupMap(m, groupBy, op) ret := Make[K1, gs.Map[K, V]](m1.Len()) it := m1.Range() for it.Next() { k, v := it.Get() ret.Put(k, ToMap(v...)) } return ret } func GroupMapReduce[K1, K2 comparable, V1, V2 any](m gs.Map[K1, V1], groupBy func(K1, V1) K2, op func(K1, V1) V2, reduce func(V2, V2) V2) gs.Map[K2, V2] { m2 := GroupMap(m, groupBy, op) ret := Make[K2, V2](m2.Len()) it := m2.Range() for it.Next() { k, v := it.Get() r, _ := v.Reduce(reduce) ret.Put(k, r) } return ret } func MapMap[K1, K2 comparable, V1, V2 any](m gs.Map[K1, V1], fn func(K1, V1) (K2, V2)) gs.Map[K2, V2] { ret := Make[K2, V2](m.Len()) it := m.Range() for it.Next() { k2, v2 := fn(it.Get()) ret.Put(k2, v2) } return ret } func MapSlice[K comparable, V, T any](m gs.Map[K, V], fn func(K, V) T) gs.Slice[T] { ret := slices.Make[T](m.Len()) it := m.Range() for it.Next() { ret = append(ret, fn(it.Get())) } return ret } func MaxBy[K comparable, V any, B constraints.Ordered](m gs.Map[K, V], fn func(K, V) B) gs.Option[gs.Pair[K, V]] { fn1 := func(p gs.Pair[K, V]) B { return fn(p.Key(), p.Value()) } return slices.MaxBy(m.Slice(), fn1) } func MinBy[K comparable, V any, B constraints.Ordered](m gs.Map[K, V], fn func(K, V) B) gs.Option[gs.Pair[K, V]] { fn1 := func(p gs.Pair[K, V]) B { return fn(p.Key(), p.Value()) } return slices.MinBy(m.Slice(), fn1) } func PartitionMap[K comparable, V, A, B any](m gs.Map[K, V], fn func(K, V) gs.Either[A, B]) (gs.Slice[A], gs.Slice[B]) { return slices.PartitionMap(m.Slice(), func(p gs.Pair[K, V]) gs.Either[A, B] { return fn(p.Key(), p.Value()) }) } func ToMap[K comparable, V any](pairs ...gs.Pair[K, V]) gs.Map[K, V] { return formGeneralMap[K, V](pairs...) }
maps/maps.go
0.676406
0.480296
maps.go
starcoder
package model import ( "errors" "reflect" "strconv" "time" "unsafe" ) var timeType = reflect.TypeOf(time.Time{}) type visit struct { a1 unsafe.Pointer a2 unsafe.Pointer typ reflect.Type } type DiffResult struct { DiffMap map[string]interface{} Equal bool } func Equal(x, y interface{}) (*DiffResult, error) { if x == nil || y == nil { return nil, errors.New("use of Equal with nil value") } v1 := handlePtr(reflect.ValueOf(x)) v2 := handlePtr(reflect.ValueOf(y)) if v1.Type() != v2.Type() { return nil, errors.New("use of Equal with different type values") } result := &DiffResult{ DiffMap: make(map[string]interface{}), Equal: true, } for i, n := 0, v1.NumField(); i < n; i++ { // Ignore unexported fields if !v1.Field(i).CanInterface() { continue } // Ignore fields with tag `diffignore:"true"` if checkTag(v1.Type().Field(i)) { continue } if !deepValueEqual(v1.Field(i), v2.Field(i), make(map[visit]bool)) { result.DiffMap[v1.Type().Field(i).Name] = v2.Field(i).Interface() result.Equal = false } } return result, nil } func handlePtr(v reflect.Value) reflect.Value { if v.Kind() == reflect.Ptr { return v.Elem() } return v } func checkTag(v reflect.StructField) bool { b, _ := strconv.ParseBool(v.Tag.Get("diffignore")) return b } func deepValueEqual(v1, v2 reflect.Value, visited map[visit]bool) bool { if !v1.IsValid() || !v2.IsValid() { return v1.IsValid() == v2.IsValid() } if v1.Type() != v2.Type() { return false } hard := func(k reflect.Kind) bool { switch k { case reflect.Array, reflect.Map, reflect.Slice, reflect.Struct: return true } return false } if v1.CanAddr() && v2.CanAddr() && hard(v1.Kind()) { addr1 := unsafe.Pointer(v1.UnsafeAddr()) addr2 := unsafe.Pointer(v2.UnsafeAddr()) if uintptr(addr1) > uintptr(addr2) { // Canonicalize order to reduce number of entries in visited. // Assumes non-moving garbage collector. addr1, addr2 = addr2, addr1 } // Short circuit if references are already seen typ := v1.Type() v := visit{addr1, addr2, typ} if visited[v] { return true } // Remember for later. visited[v] = true } switch v1.Kind() { case reflect.Slice: // We treat a nil slice the same as an empty slice. if v1.Len() != v2.Len() { return false } if v1.Pointer() == v2.Pointer() { return true } for i := 0; i < v1.Len(); i++ { if !deepValueEqual(v1.Index(i), v2.Index(i), visited) { return false } } return true case reflect.Interface: if v1.IsNil() || v2.IsNil() { return v1.IsNil() == v2.IsNil() } return deepValueEqual(v1.Elem(), v2.Elem(), visited) case reflect.Ptr: return deepValueEqual(v1.Elem(), v2.Elem(), visited) case reflect.Struct: if v1.Type() == timeType { // Special case for time - we ignore the time zone. t1 := v1.Interface().(time.Time) t2 := v2.Interface().(time.Time) return t1.Equal(t2) } for i, n := 0, v1.NumField(); i < n; i++ { if !deepValueEqual(v1.Field(i), v2.Field(i), visited) { return false } } return true case reflect.Map: if v1.IsNil() != v2.IsNil() { return false } if v1.Len() != v2.Len() { return false } if v1.Pointer() == v2.Pointer() { return true } for _, k := range v1.MapKeys() { val1 := v1.MapIndex(k) val2 := v2.MapIndex(k) if !val1.IsValid() || !val2.IsValid() || !deepValueEqual(v1.MapIndex(k), v2.MapIndex(k), visited) { return false } } return true case reflect.Func: if v1.IsNil() && v2.IsNil() { return true } // Can't do better than this: return false default: return v1.Interface() == v2.Interface() } }
model/diff.go
0.519521
0.412885
diff.go
starcoder
package block import "crypto/cipher" type ecb struct { block cipher.Block blockSize int } // NewECBEncrypter returns a new cipher.BlockMode which uses the given Block cipher to encrypt given blocks in // electronic code book mode (ECB). ECB is the most simple (and also most unsecure) mode in which a Block cipher // can be operated. It simply encodes each block separately and totally lacks diffusion, which means that a single // plaintext block is always encrypted to the exact same ciphertext block. func NewECBEncrypter(block cipher.Block) cipher.BlockMode { return ecbEncrypter{block: block, blockSize: block.BlockSize()} } type ecbEncrypter ecb func (e ecbEncrypter) BlockSize() int { return e.blockSize } func (e ecbEncrypter) CryptBlocks(dst, src []byte) { if len(src)%e.blockSize != 0 { panic("grypto/ecb: input not full blocks") } if len(dst) < len(src) { panic("grypto/ecb: output smaller than input") } for len(src) > 0 { // encrypt in place with block cipher e.block.Encrypt(dst[:e.blockSize], src[:e.blockSize]) // move to the next block src = src[e.blockSize:] dst = dst[e.blockSize:] } } // NewECBDecrypter returns a new cipher.BlockMode which uses the given Block cipher to decrypt given blocks in // electronic code book mode (ECB). ECB is the most simple (and also most unsecure) mode in which a Block cipher // can be operated. It simply encodes each block separately and totally lacks diffusion, which means that a single // plaintext block is always encrypted to the exact same ciphertext block. func NewECBDecrypter(block cipher.Block) cipher.BlockMode { return ecbDecrypter{block: block, blockSize: block.BlockSize()} } type ecbDecrypter ecb func (e ecbDecrypter) BlockSize() int { return e.blockSize } func (e ecbDecrypter) CryptBlocks(dst, src []byte) { if len(src)%e.blockSize != 0 { panic("grypto/ecb: input not full blocks") } if len(dst) < len(src) { panic("grypto/ecb: output smaller than input") } for len(src) > 0 { // encrypt in place with block cipher e.block.Decrypt(dst[:e.blockSize], src[:e.blockSize]) // move to the next block src = src[e.blockSize:] dst = dst[e.blockSize:] } }
block/ecb.go
0.776114
0.542257
ecb.go
starcoder
package gocvsimd import "unsafe" //go:noescape func _SimdSse2MedianFilterRhomb3x3(src unsafe.Pointer, srcStride, width, height, channelCount uint64, dst unsafe.Pointer, dstStride uint64) //go:noescape func _SimdSse2MedianFilterSquare3x3(src unsafe.Pointer, srcStride, width, height, channelCount uint64, dst unsafe.Pointer, dstStride uint64) //go:noescape func _SimdSse2MedianFilterRhomb5x5(src unsafe.Pointer, srcStride, width, height, channelCount uint64, dst unsafe.Pointer, dstStride uint64) //go:noescape func _SimdSse2MedianFilterSquare5x5(src unsafe.Pointer, srcStride, width, height, channelCount uint64, dst unsafe.Pointer, dstStride uint64) // SimdSse2MedianFilterRhomb3x3 performs median filtration of input image (filter window is a rhomb 3x3). // All images must have the same width, height and format (8-bit gray, 16-bit UV, 24-bit BGR or 32-bit BGRA). func SimdSse2MedianFilterRhomb3x3(src, dst View) { _SimdSse2MedianFilterRhomb3x3(src.GetData(), uint64(src.GetStride()), uint64(src.GetWidth()), uint64(src.GetHeight()), uint64(ChannelCount(src.GetFormat())), dst.GetData(), uint64(dst.GetStride())) } // SimdSse2MedianFilterSquare3x3 performs median filtration of input image (filter window is a square 3x3). // All images must have the same width, height and format (8-bit gray, 16-bit UV, 24-bit BGR or 32-bit BGRA). func SimdSse2MedianFilterSquare3x3(src, dst View) { _SimdSse2MedianFilterSquare3x3(src.GetData(), uint64(src.GetStride()), uint64(src.GetWidth()), uint64(src.GetHeight()), uint64(ChannelCount(src.GetFormat())), dst.GetData(), uint64(dst.GetStride())) } // SimdSse2MedianFilterRhomb5x5 performs median filtration of input image (filter window is a rhomb 5x5). // All images must have the same width, height and format (8-bit gray, 16-bit UV, 24-bit BGR or 32-bit BGRA). func SimdSse2MedianFilterRhomb5x5(src, dst View) { _SimdSse2MedianFilterRhomb5x5(src.GetData(), uint64(src.GetStride()), uint64(src.GetWidth()), uint64(src.GetHeight()), uint64(ChannelCount(src.GetFormat())), dst.GetData(), uint64(dst.GetStride())) } // SimdSse2MedianFilterSquare5x5 performs median filtration of input image (filter window is a square 5x5). // All images must have the same width, height and format (8-bit gray, 16-bit UV, 24-bit BGR or 32-bit BGRA). func SimdSse2MedianFilterSquare5x5(src, dst View) { _SimdSse2MedianFilterSquare5x5(src.GetData(), uint64(src.GetStride()), uint64(src.GetWidth()), uint64(src.GetHeight()), uint64(ChannelCount(src.GetFormat())), dst.GetData(), uint64(dst.GetStride())) }
sse2/SimdSse2MedianFilter_amd64.go
0.594551
0.500488
SimdSse2MedianFilter_amd64.go
starcoder
package vision import ( "fmt" "image" "image/color" "image/draw" "math" "github.com/fogleman/gg" ) // HoughPoint represents a single point in the Hough space with // its score, theta and rho values and and minimum and maximum // corresponding spatial points. type HoughPoint struct { Indexes []int Score int SpatialMin []int SpatialMax []int } // String returns a string representation of the HoughPoint. func (h HoughPoint) String() string { if len(h.Indexes) < 2 { return "{}" } return fmt.Sprintf("{%4d,%4d:%4d}", h.Indexes[0], h.Indexes[1], h.Score) } // GenerateKey returns an unique integer representation for each // Hough point. func GenerateKey(theta, rho int) int { return pairing(theta, rho) } // HoughSpace stores the relevant data from the Hough transform // algorithm. type HoughSpace struct { ThetaRes int RhoRes int MaxScore int MinScore int SpatialBounds image.Rectangle points map[int]*HoughPoint } // NewHoughSpace performs the Hough transform in the input space image // and returns a HoughSpace struct pointer with the given theta and rho // resolutions. func NewHoughSpace(input *image.Gray, thetaRes, rhoRes int) *HoughSpace { hs := &HoughSpace{ ThetaRes: thetaRes, RhoRes: rhoRes, points: map[int]*HoughPoint{}, } b := input.Bounds() hs.SpatialBounds = b rhoMax := math.Hypot(float64(b.Dx()), float64(b.Dy())) drho := rhoMax / float64(rhoRes/2) /* dtheta := math.Pi / float64(thetaRes) */ hs.MaxScore = 0 hs.MinScore = math.MaxInt32 /* img := image.NewGray(image.Rect(0, 0, thetaRes, rhoRes)) */ for x := 0; x < b.Dx(); x++ { for y := 0; y < b.Dy(); y++ { c := input.GrayAt(x, y) if c.Y != 255 { continue } for thetaIndex := 0; thetaIndex < thetaRes; thetaIndex++ { //Rescale thetaIndex to the interval [0, pi] and assign to theta /* var theta float64 if thetaIndex > 0 && thetaIndex < thetaRes-1 { theta = dtheta*(float64(thetaIndex)+math.Pi) + (float64(thetaRes) - 1) } else if thetaIndex == 0 { theta = 0 } else if thetaIndex == thetaRes-1 { theta = math.Pi } */ theta := rescale(float64(thetaIndex), 0, float64(thetaRes-1), -math.Pi/2, math.Pi/2) rho := float64(x)*math.Cos(theta) + float64(y)*math.Sin(theta) rhoIndex := rhoRes/2 - int(math.Floor(rho/drho+0.5)) var score int key := pairing(thetaIndex, rhoIndex) if _, ok := hs.points[key]; !ok { score = 1 hs.points[key] = &HoughPoint{ Score: score, SpatialMin: []int{x, y}, SpatialMax: []int{x, y}, Indexes: []int{thetaIndex, rhoIndex}, } } else { p := hs.points[key] p.Score++ score = p.Score if p.SpatialMin[0] < x || p.SpatialMin[1] < y { p.SpatialMin = []int{x, y} } else if p.SpatialMax[0] > x || p.SpatialMax[1] > y { p.SpatialMax = []int{x, y} } hs.points[key] = p } if score < hs.MinScore { hs.MinScore = score } else if score > hs.MaxScore { hs.MaxScore = score } /* col := img.At(thetaIndex, rhoIndex).(color.Gray) if col.Y < 255 { col.Y++ img.SetGray(thetaIndex, rhoIndex, col) } */ } } } /* _ = imgio.Save("hue.png", img, imgio.PNGEncoder()) */ if hs.MaxScore == 0 { hs.MaxScore = hs.MinScore } return hs } // At returns the Hough point at theta and rho indexes. func (h *HoughSpace) At(theta, rho int) (*HoughPoint, bool) { key := GenerateKey(theta, rho) hp, ok := h.points[key] return hp, ok } // Set overwrites the Hough point at theta and rho indexes. func (h *HoughSpace) Set(theta, rho int, hp *HoughPoint) { key := GenerateKey(theta, rho) h.points[key] = hp } // Count returns the total number of Hough points. func (h *HoughSpace) Count() int { return len(h.points) } // HoughImage rescales the scores in the Hough space to // the interval [0, 255] and plot it in a grayscale image. func (h *HoughSpace) HoughImage() *image.Gray { b := image.Rect(0, 0, h.ThetaRes, h.RhoRes) i := image.NewGray(b) for _, point := range h.points { col := color.Gray{Y: uint8(rescale(float64(point.Score), 0, float64(h.MaxScore), 0, 255))} i.SetGray(point.Indexes[0], point.Indexes[1], col) } return i } // FindCentroids thresholds the image of the Hough space, // find its blobs and return a new Hough space containing // only the most representative point for each blob. func (h *HoughSpace) FindCentroids(threshold uint8) *HoughSpace { img := h.HoughImage() aux := image.Image(img) aux = Threshold(&aux, threshold) blobs := *ListBlobs(&aux, Connectivity8) h2 := &HoughSpace{ SpatialBounds: h.SpatialBounds, ThetaRes: h.ThetaRes, RhoRes: h.RhoRes, points: map[int]*HoughPoint{}, MaxScore: 0, MinScore: math.MaxInt32, } for _, blob := range blobs { //Compute the blob centroid var thetaMomentum, rhoMomentum float64 var totalScore int64 for _, p := range blob.Points { score := h.points[pairing(p.X, p.Y)].Score totalScore += int64(score) thetaMomentum += float64(p.X * score) rhoMomentum += float64(p.Y * score) } theta := int(thetaMomentum / float64(totalScore)) rho := int(rhoMomentum / float64(totalScore)) //Checks if the centroid lies over a point var hp *HoughPoint key := pairing(theta, rho) if hp0, ok := h.points[key]; ok { //If it does, use it for representing the blob hp = hp0 } else { //If not, use the nearest point that does p := blob.ClosestPoint(image.Pt(theta, rho)) key = pairing(p.X, p.Y) hp = h.points[key] } //Store the point in the new Hough space h2.points[key] = hp //Update minimum and maximum scores if hp.Score < h2.MinScore { h2.MinScore = hp.Score } else if hp.Score > h2.MaxScore { h2.MaxScore = hp.Score } } if h2.MaxScore == 0 { h2.MaxScore = h2.MinScore } return h2 } // PlotLines returns an image with line segments for each // corresponding point in the Hough space. func (h *HoughSpace) PlotLines() *image.Gray { b := h.SpatialBounds context := gg.NewContext(b.Dx(), b.Dy()) context.SetRGB(0, 0, 0) context.Clear() context.SetStrokeStyle(gg.NewSolidPattern(color.RGBA{255, 255, 255, 255})) context.SetLineWidth(0.4) for _, p := range h.points { xmin := float64(p.SpatialMin[0]) ymin := float64(p.SpatialMin[1]) xmax := float64(p.SpatialMax[0]) ymax := float64(p.SpatialMax[1]) context.DrawLine(xmin, ymin, xmax, ymax) context.Stroke() } img := context.Image() gray := image.NewGray(b) draw.Draw(gray, b, img, image.ZP, draw.Src) return gray }
hough.go
0.773772
0.550184
hough.go
starcoder
package win import ( "image" "image/color" ) type DIB struct { // Pix holds the image's pixels, in B, G, R order. The pixel at // (x, y) starts at Pix[(p.Rect.Max.Y-y-p.Rect.Min.Y-1)*p.Stride + (x-p.Rect.Min.X)*3]. Pix []uint8 // Stride is the Pix stride (in bytes) between vertically adjacent pixels. Stride int // Rect is the image's bounds. Rect image.Rectangle } func NewDIB(r image.Rectangle) *DIB { w, h := r.Dx(), r.Dy() buf := make([]uint8, 4*w*h) return &DIB{buf, 4 * w, r} } func (p *DIB) ColorModel() color.Model { return color.RGBAModel } func (p *DIB) Bounds() image.Rectangle { return p.Rect } func (p *DIB) At(x, y int) color.Color { if !(image.Point{x, y}.In(p.Rect)) { return color.RGBA{} } i := p.PixOffset(x, y) return color.RGBA{p.Pix[i+2], p.Pix[i+1], p.Pix[i+0], p.Pix[i+3]} } // PixOffset returns the index of the first element of Pix that corresponds to // the pixel at (x, y). func (p *DIB) PixOffset(x, y int) int { return (y-p.Rect.Min.Y)*p.Stride + (x-p.Rect.Min.X)*4 } func (p *DIB) Set(x, y int, c color.Color) { if !(image.Point{x, y}.In(p.Rect)) { return } i := p.PixOffset(x, y) c1 := color.RGBAModel.Convert(c).(color.RGBA) p.Pix[i+0] = c1.B p.Pix[i+1] = c1.G p.Pix[i+2] = c1.R p.Pix[i+3] = c1.A } func (p *DIB) SetDIB(x, y int, c color.RGBA) { if !(image.Point{x, y}.In(p.Rect)) { return } i := p.PixOffset(x, y) p.Pix[i+0] = c.B p.Pix[i+1] = c.G p.Pix[i+2] = c.R p.Pix[i+3] = c.A } // SubImage returns an image representing the portion of the image p visible // through r. The returned value shares pixels with the original image. func (p *DIB) SubImage(r image.Rectangle) image.Image { r = r.Intersect(p.Rect) // If r1 and r2 are Rectangles, r1.Intersect(r2) is not guaranteed to be inside // either r1 or r2 if the intersection is empty. Without explicitly checking for // this, the Pix[i:] expression below can panic. if r.Empty() { return &DIB{} } i := p.PixOffset(r.Min.X, r.Min.Y) return &DIB{ Pix: p.Pix[i:], Stride: p.Stride, Rect: r, } } // Opaque scans the entire image and returns whether or not it is fully opaque. func (p *DIB) Opaque() bool { if p.Rect.Empty() { return true } i0, i1 := 3, p.Rect.Dx()*4 for y := p.Rect.Min.Y; y < p.Rect.Max.Y; y++ { for i := i0; i < i1; i += 4 { if p.Pix[i] != 0xff { return false } } i0 += p.Stride i1 += p.Stride } return true } func (p *DIB) CopyRGBA(src *image.RGBA, r image.Rectangle) { // clip r against each image's bounds and move sp accordingly (see draw.clip()) sp := src.Bounds().Min orig := r.Min r = r.Intersect(p.Bounds()) r = r.Intersect(src.Bounds().Add(orig.Sub(sp))) dx := r.Min.X - orig.X dy := r.Min.Y - orig.Y (sp).X += dx (sp).Y += dy i0 := (r.Min.X - p.Rect.Min.X) * 4 i1 := (r.Max.X - p.Rect.Min.X) * 4 si0 := (sp.X - src.Rect.Min.X) * 4 yMax := r.Max.Y - p.Rect.Min.Y y := r.Min.Y - p.Rect.Min.Y sy := sp.Y - src.Rect.Min.Y for ; y != yMax; y, sy = y+1, sy+1 { dpix := p.Pix[y*p.Stride:] spix := src.Pix[sy*src.Stride:] for i, si := i0, si0; i < i1; i, si = i+4, si+4 { dpix[i+0] = spix[si+2] dpix[i+1] = spix[si+1] dpix[i+2] = spix[si+0] dpix[i+3] = spix[si+3] } } }
win/dib_windows.go
0.785432
0.571169
dib_windows.go
starcoder
package conv // UInt8ToBytes is the fastest way to convert uint8 into byte slice func UInt8ToBytes(n uint8, buf *[3]byte) []byte { if n == 0 { return digits1[0] } else if n < 10 { return digits1[n] } else if n < 100 { return digits2[n] } n = n - 100 if n < 100 { buf[0] = '1' } else { n = n - 100 buf[0] = '2' } buf[1], buf[2] = digits2[n][0], digits2[n][1] return buf[0:] } // UInt16ToBytes is the fastest way to convert uint16 into byte slice func UInt16ToBytes(n uint16, buf *[5]byte) []byte { if n == 0 { return digits1[0] } return ui16Dig(n, buf) } // UInt32ToBytes is the fastest way to convert uint32 into byte slice func UInt32ToBytes(n uint32, buf *[10]byte) []byte { if n == 0 { return digits1[0] } return ui32Dig(n, buf) } // UInt64ToBytes is the fastest way to convert uint64 into byte slice func UInt64ToBytes(n uint64, buf *[20]byte) []byte { if n == 0 { return digits1[0] } return ui64Dig(n, buf) } func ui16Dig(u uint16, buf *[5]byte) []byte { if u < 10 { return digits1[u] } else if u < 100 { return digits2[u] } pos := 5 for u >= 100 { pos -= 2 is := u % 100 u /= 100 buf[pos+1], buf[pos] = digits2[is][1], digits2[is][0] } if u < 10 { pos-- buf[pos] = digits[u] } else { pos -= 2 buf[pos+1], buf[pos] = digits2[u][1], digits2[u][0] } return buf[pos:] } func ui32Dig(u uint32, buf *[10]byte) []byte { if u < 10 { return digits1[u] } else if u < 100 { return digits2[u] } pos := 10 for u >= 100 { pos -= 2 is := u % 100 u /= 100 buf[pos+1], buf[pos] = digits2[is][1], digits2[is][0] } if u < 10 { pos-- buf[pos] = digits[u] } else { pos -= 2 buf[pos+1], buf[pos] = digits2[u][1], digits2[u][0] } return buf[pos:] } func ui64Dig(u uint64, buf *[20]byte) []byte { if u < 10 { return digits1[u] } else if u < 100 { return digits2[u] } pos := 20 for u >= 100 { pos -= 2 is := u % 100 u /= 100 buf[pos+1], buf[pos] = digits2[is][1], digits2[is][0] } if u < 10 { pos-- buf[pos] = digits[u] } else { pos -= 2 buf[pos+1], buf[pos] = digits2[u][1], digits2[u][0] } return buf[pos:] }
conv/uint.go
0.678647
0.577376
uint.go
starcoder
package objects import ( "fmt" "github.com/alexandreLamarre/Golang-Ray-Tracing-Renderer/pkg/algebra" "github.com/alexandreLamarre/Golang-Ray-Tracing-Renderer/pkg/canvas" "github.com/alexandreLamarre/Golang-Ray-Tracing-Renderer/pkg/geometry/primitives" "math" "reflect" ) //ErrorCSGNormal returns an error if csg.LocalNormalAt is ever called, only a child shape should have its local noral //called type ErrorCSGNormal int func (e ErrorCSGNormal) Error() string { return fmt.Sprintf("CSG Local normal called : Invalid use of LocalNormal method") } //Contains Check if the Shape is the provided Shape/contains the provided Shape func Contains(s primitives.Shape, testContained primitives.Shape) bool { if reflect.TypeOf(s) == reflect.TypeOf(&primitives.Group{}) { g := s.(*primitives.Group) for _, v := range g.GetShapes() { if Contains(v, testContained) { return true } } } else if reflect.TypeOf(s) == reflect.TypeOf(&CSGShape{}) { csg := s.(*CSGShape) if Contains(csg.Left(), testContained) { return true } if Contains(csg.Right(), testContained) { return true } } return s == testContained } //CSGShape is the datatype that uses the Constructive Geometry // approach to handling groups of objects type CSGShape struct { Parent primitives.Shape material *canvas.Material transform *algebra.Matrix left primitives.Shape right primitives.Shape action string } //UnionCSG initializes a CSG shape based on the union operation of two Shapes: left and right func UnionCSG(left, right primitives.Shape) *CSGShape { if left == nil { panic("Undefined left Shape in CSG operation `union`") } if right == nil { panic("Undefined right Shape in CSG operation `union`") } c := &CSGShape{Parent: nil, material: canvas.NewDefaultMaterial(), transform: algebra.IdentityMatrix(4), left: left, right: right, action: "union"} left.SetParent(c) right.SetParent(c) return c } //IntersectCSG initializes a CSG shape based on the union operation of two Shapes: left and right func IntersectCSG(left, right primitives.Shape) *CSGShape { if left == nil { panic("Undefined left Shape in CSG operation `intersect`") } if right == nil { panic("Undefined right Shape in CSG operation `intersect`") } c := &CSGShape{Parent: nil, material: canvas.NewDefaultMaterial(), transform: algebra.IdentityMatrix(4), left: left, right: right, action: "intersect"} left.SetParent(c) right.SetParent(c) return c } //DifferenceCSG initializes a CSG shape based on the union operation of two Shapes: left and right func DifferenceCSG(left, right primitives.Shape) *CSGShape { if left == nil { panic("Undefined left Shape in CSG operation `difference`") } if right == nil { panic("Undefined right Shape in CSG operation `difference`") } c := &CSGShape{Parent: nil, material: canvas.NewDefaultMaterial(), transform: algebra.IdentityMatrix(4), left: left, right: right, action: "difference"} left.SetParent(c) right.SetParent(c) return c } //Left returns the left shape of the CSG Shape func (s *CSGShape) Left() primitives.Shape { return s.left } //Right returns the left shape of the CSG Shape func (s *CSGShape) Right() primitives.Shape { return s.right } //primitives.Shape interface methods //GetTransform Getter for CSG Shape transform, primitives.Shape interface method func (s *CSGShape) GetTransform() *algebra.Matrix { return s.transform } //GetMaterial Getter for CSG Shape material, primitives.Shape interface method func (s *CSGShape) GetMaterial() *canvas.Material { return s.material } //GetParent Getter for CSG Parent Shape, primitives.Shape interface method func (s *CSGShape) GetParent() primitives.Shape { return s.Parent } //GetBounds Getter for CSG Shape bounding box, primitives.Shape interface method func (s *CSGShape) GetBounds() (*algebra.Vector, *algebra.Vector) { minL, maxL := s.left.GetBounds() minR, maxR := s.right.GetBounds() bL := primitives.GetBoundsTransform(minL, maxL, s.left.GetTransform()) br := primitives.GetBoundsTransform(minL, maxL, s.right.GetTransform()) minL, maxL = bL.Get() minR, maxR = br.Get() minX := math.Inf(1) minY := math.Inf(1) minZ := math.Inf(1) maxX := math.Inf(-1) maxY := math.Inf(-1) maxZ := math.Inf(-1) minX = math.Min(minX, minL.Get()[0]) minX = math.Min(minX, minR.Get()[0]) maxX = math.Max(maxX, maxL.Get()[0]) maxX = math.Max(maxX, maxR.Get()[0]) minY = math.Min(minY, minL.Get()[1]) minY = math.Min(minY, minR.Get()[1]) maxY = math.Max(maxY, maxL.Get()[1]) maxY = math.Max(maxY, maxR.Get()[1]) minZ = math.Min(minZ, minL.Get()[2]) minZ = math.Min(minZ, minR.Get()[2]) maxZ = math.Max(maxZ, maxL.Get()[2]) maxZ = math.Max(maxZ, maxR.Get()[2]) return algebra.NewVector(minX, minY, minZ), algebra.NewVector(maxX, maxY, maxZ) } //SetTransform Setter for CSG Shape transform, primitives.Shape interface method func (s *CSGShape) SetTransform(m *algebra.Matrix) { if len(m.Get()) != 4 || len(m.Get()[0]) != 4 { panic(algebra.ExpectedDimension(4)) } s.transform = m } //SetMaterial Setter for CSG Shape material, primitives.Shape interface method func (s *CSGShape) SetMaterial(m *canvas.Material) { s.material = m } //SetParent Setter for CSG Shape parent, primitives.Shape interface method func (s *CSGShape) SetParent(shape primitives.Shape) { s.Parent = shape } //LocalIntersect Intersect implementation for CSG Shape, Check intersections of left and right //and apply the appropriate CSG Shape action. primitives.Shape interface method. func (s *CSGShape) LocalIntersect(r *algebra.Ray) ([]*primitives.Intersection, bool) { xs := make([]*primitives.Intersection, 0, 0) is := primitives.NewIntersections() err := is.Intersect(s.Left(), r) if err != nil { panic(err) } err = is.Intersect(s.Right(), r) if err != nil { panic(err) } if is.Count() == 0 { return xs, false } hits := is.GetHits() refs := is.GetRef() k := len(refs.Get()) for i := 0; i < k; i++ { if min := refs.ExtractMin(); min != nil { xs = append(xs, refs.ExtractMin()) } } k = len(hits.Get()) for i := 0; i < k; i++ { if min := hits.ExtractMin(); min != nil { xs = append(xs, min) } } xs = s.filterIntersections(xs) return xs, true } //LocalNormalAt Local normal implementation for CSG Shape, should never be directly called on. Only //the children of CSG shape should be intersected. primitives.Shape interface method. func (s *CSGShape) LocalNormalAt(p *algebra.Vector, hit *primitives.Intersection) (*algebra.Vector, error) { return nil, ErrorCSGNormal(0) } // CSG Shape helpers func (s *CSGShape) filterIntersections(xs []*primitives.Intersection) []*primitives.Intersection { inl := false inr := false result := make([]*primitives.Intersection, 0, 0) for _, i := range xs { lhit := Contains(s.Left(), i.Object) if intersectionAllowed(s.action, lhit, inl, inr) { result = append(result, i) } if lhit { inl = !(inl) } else { inr = !(inr) } } return result } func intersectionAllowed(action string, lhit, inl, inr bool) bool { if action == "union" { return (lhit && (!inr)) || ((!lhit) && (!inl)) } if action == "intersect" { return (lhit && inr) || ((!lhit) && inl) } if action == "difference" { return (lhit && !inr) || (!lhit && inl) } return false }
pkg/geometry/objects/CSG.go
0.824462
0.636184
CSG.go
starcoder
package GoStats import ( "math" "sort" ) const ( // MaxFloat64 is the biggest number that can be used in Go using the float64 type. MaxFloat64 = math.MaxFloat64 // MinFloat64 is the smallest number that can be used in Go using the float64 type. MinFloat64 = -(MaxFloat64) ) // Mean of the float64 numbers. func Mean(numbers []float64) float64 { return Total(numbers) / float64(len(numbers)) } // Mode the float64 numbers. func Mode(numbers []float64) (mode float64, r int) { r = math.MinInt64 for i, n := range Repeats(numbers) { if n > r { mode = i r = n } } return } // Repeats of the float64 numbers. func Repeats(numbers []float64) (r map[float64]int) { r = make(map[float64]int) for _, n := range numbers { r[n]++ } return } // Total of the float64 numbers. func Total(numbers []float64) (total float64) { for _, n := range numbers { total += n } return } // Median of the float64 numbers. func Median(numbers []float64, sort bool) float64 { if sort { numbers = Sort(numbers) } if len(numbers) == 0 { return 0 } if Singular(float64(len(numbers))) { return numbers[len(numbers)/2] } return (numbers[len(numbers)/2] + numbers[len(numbers)/2-1]) / 2 } // Singular is to get whether a number is singular or not. func Singular(number float64) bool { n := number / 2 return float64(int(n)) != n } // Min is to get the smallest number of the float64 numbers. func Min(numbers []float64) (iMin int, min float64) { min = MaxFloat64 for i, n := range numbers { if n < min { min = n iMin = i } } return } // Max is to get the biggest number of the float64 numbers. func Max(numbers []float64) (iMax int, max float64) { max = MinFloat64 for i, n := range numbers { if n > max { max = n iMax = i } } return } // Range of the float64 numbers. func Range(numbers []float64) float64 { _, max := Max(numbers) _, min := Min(numbers) return max - min } // Variance of the float64 numbers. func Variance(numbers []float64) (variance float64) { mean := Mean(numbers) for _, n := range numbers { dif := Range([]float64{mean, n}) variance += dif * dif } variance /= float64(len(numbers)) return } // StandardDeviation of the float64 numbers. func StandardDeviation(numbers []float64) float64 { return math.Sqrt(Variance(numbers)) } // Sort the []float64. func Sort(numbers []float64) []float64 { sort.Float64s(numbers) return numbers }
GoStats.go
0.78964
0.559711
GoStats.go
starcoder
package webmercator import ( "errors" "fmt" "math" ) const ( RMajor = 6378137.0 RMinor = 6356752.3142 Ratio = RMinor / RMajor ) const ( SRID = 3857 EarthRadius = RMajor Deg2Rad = math.Pi / 180 Rad2Deg = 180 / math.Pi PiDiv2 = math.Pi / 2.0 PiDiv4 = math.Pi / 4.0 MinXExtent = -20026376.39 MinYExtent = -20048966.10 MaxXExtent = 20026376.39 MaxYExtent = 20048966.10 ) var Extent = [4]float64{MinXExtent, MinYExtent, MaxXExtent, MaxYExtent} var ErrCoordsRequire2Values = errors.New("Coords should have at least 2 coords") func RadToDeg(rad float64) float64 { return rad * Rad2Deg } func DegToRad(deg float64) float64 { return deg * Deg2Rad } var Eccent float64 var Com float64 func init() { Eccent = math.Sqrt(1.0 - (Ratio * Ratio)) Com = 0.5 * Eccent } func con(phi float64) float64 { v := Eccent * math.Sin(phi) return math.Pow(((1.0 - v) / (1.0 + v)), Com) } //LonToX converts from a Longitude to a X coordinate in WebMercator. func LonToX(lon float64) float64 { return RMajor * DegToRad(lon) } // LatToY converts from Latitude to a Y coordinate in WebMercator. func LatToY(lat float64) float64 { lat = math.Min(89.5, math.Max(lat, -89.5)) phi := DegToRad(lat) ts := math.Tan(0.5*((math.Pi*0.5)-phi)) / con(phi) return 0 - RMajor*math.Log(ts) } // XToLon converts from X coordinate in WebMercator to Lontitude in WGS84 func XToLon(x float64) float64 { return RadToDeg(x) / RMajor } // YToLat converts from Y coordinate in WebMercator to Latitude in WGS84 func YToLat(y float64) float64 { ts := math.Exp(-y / RMajor) phi := PiDiv2 - 2*math.Atan(ts) dphi := 1.0 i := 0 for (math.Abs(dphi) > 0.000000001) && (i < 15) { dphi = PiDiv2 - 2*math.Atan(ts*con(phi)) - phi phi += dphi i++ } return RadToDeg(phi) } // ToLonLat given a set of coordinates (x,y) it will convert them to Lon/Lat coordinates. If more then x,y is given (i.e. z, and m) they will be returned untransformed. func ToLonLat(c ...float64) ([]float64, error) { if len(c) < 2 { return c, fmt.Errorf("Coords should have at least 2 coords") } crds := []float64{XToLon(c[0]), YToLat(c[1])} crds = append(crds, c[2:]...) return crds, nil } // ToXY given a set of coordinates (lon,lat) it will convert them to X,Y coordinates. If more then lon/lat is given (i.e. z, and m) they will be returned untransformed. func ToXY(c ...float64) ([]float64, error) { if len(c) < 2 { return c, fmt.Errorf("Coords should have at least 2 coords") } crds := []float64{LonToX(c[0]), LatToY(c[1])} crds = append(crds, c[2:]...) return crds, nil }
maths/webmercator/main.go
0.758689
0.401131
main.go
starcoder
package lmath import ( "fmt" "math" ) const ( mat3Dim = 3 ) var ( Mat3Identity = Mat3{[9]float64{ 1, 0, 0, 0, 1, 0, 0, 0, 1}} ) type Mat3 struct { mat [9]float64 } // New Mat3 with the given values. // Row-Order. func NewMat3( m11, m12, m13, m21, m22, m23, m31, m32, m33 float64) *Mat3 { // 0 1 2 3 // 4 5 6 7 // 8 9 10 11 // 12 13 14 15 out := Mat3{} out.mat[0] = m11 out.mat[1] = m12 out.mat[2] = m13 out.mat[3] = m21 out.mat[4] = m22 out.mat[5] = m23 out.mat[6] = m31 out.mat[7] = m32 out.mat[8] = m33 return &out } // Load the matrix with 9 floats. // Specified in Row-Major order. func (this *Mat3) Load(m [9]float64) *Mat3 { this.mat = m return this } // Load the matrix with 9 floats. // Specified in Row-Major order. func (this *Mat3) Load32(m [9]float32) *Mat3 { for k, v := range m { this.mat[k] = float64(v) } return this } // Retrieve a 9 float array of all the values of the matrix. // Returned in Row-Major order. func (this Mat3) Dump() (m [9]float64) { m = this.mat return } // Retrieve a 9 float64 array of all the values of the matrix. // Returned in Col-Major order. func (this Mat3) DumpOpenGL() (m [9]float64) { m[0], m[1], m[2] = this.Col(0) m[3], m[4], m[5] = this.Col(1) m[6], m[7], m[8] = this.Col(2) return } // Retrieve a 9 float32 array of all the values of the matrix. // Returned in Col-Major order. func (this Mat3) DumpOpenGLf32() (m [9]float32) { m[0] = float32(this.mat[0]) m[1] = float32(this.mat[3]) m[2] = float32(this.mat[6]) m[3] = float32(this.mat[1]) m[4] = float32(this.mat[4]) m[5] = float32(this.mat[7]) m[6] = float32(this.mat[2]) m[7] = float32(this.mat[5]) m[8] = float32(this.mat[8]) return } // Return a copy of this matrix. // Carbon-copy of all elements func (this Mat3) Copy() Mat3 { return this } // Compare this matrix to the other. // Return true if all elements between them are the same. // Equality is measured using an epsilon (< 0.0000001). func (this Mat3) Eq(other Mat3) bool { for k, _ := range this.mat { if closeEq(this.mat[k], other.mat[k], epsilon) == false { return false } } return true } // Retrieve the element at row and column. // 0 indexed. // Does not do any bounds checking. func (this Mat3) Get(row, col int) float64 { return this.mat[row*mat3Dim+col] } // Set the value at the specified column and row. // 0 indexed. // Does not do any bounds checking. func (this *Mat3) Set(row, col int, value float64) *Mat3 { this.mat[row*mat3Dim+col] = value return this } // Retrieve the element at the given index assuming a linear array. // (i.e matrix[0], matrix[5]). // 0 indexed. func (this Mat3) At(index int) float64 { return this.mat[index] } // Set the element of the matrix specified at the index to the given value. // 0 indexed. // Return a pointer to the 'this' func (this *Mat3) SetAt(index int, value float64) *Mat3 { this.mat[index] = value return this } // Set the specified row of the matrix to the given x,y,z values. // 0 indexed. // Does not do bounds checking of the row. func (this *Mat3) SetRow(row int, x, y, z float64) *Mat3 { this.mat[row*mat3Dim] = x this.mat[row*mat3Dim+1] = y this.mat[row*mat3Dim+2] = z return this } // Set the specified column of the matrix to the given x,y,z values. // 0 indexed. // Does not do bounds checking on the col. func (this *Mat3) SetCol(col int, x, y, z float64) *Mat3 { this.mat[mat3Dim*0+col] = x this.mat[mat3Dim*1+col] = y this.mat[mat3Dim*2+col] = z return this } // Retrieve the x,y,z elements from the specified row. // 0 indexed. // Does not bounds check the row. func (this Mat3) Row(row int) (x, y, z float64) { x = this.mat[row*mat3Dim] y = this.mat[row*mat3Dim+1] z = this.mat[row*mat3Dim+2] return } // Retrieve the x,y,z elements from the specified column. // 0 indexed. // Does not bounds check the column. func (this Mat3) Col(col int) (x, y, z float64) { x = this.mat[mat3Dim*0+col] y = this.mat[mat3Dim*1+col] z = this.mat[mat3Dim*2+col] return } // Add in a constant value to all the terms fo the matrix. // Return a new matrix with the result. func (this Mat3) AddScalar(val float64) Mat3 { this.AddInScalar(val) return this } // Add in a constant value to all the terms fo the matrix. // Returns a pointer to 'this'. func (this *Mat3) AddInScalar(val float64) *Mat3 { for k, _ := range this.mat { this.mat[k] += val } return this } // Subtract in a constant value to all the terms fo the matrix. // Return a new matrix with the result. func (this Mat3) SubScalar(val float64) Mat3 { this.SubInScalar(val) return this } // Subtract in a constant value to all the terms fo the matrix. // Returns a pointer to 'this'. func (this *Mat3) SubInScalar(val float64) *Mat3 { for k, _ := range this.mat { this.mat[k] -= val } return this } // Multiplies in a constant value to all the terms fo the matrix. // Return a new matrix with the result. func (this Mat3) MultScalar(val float64) Mat3 { this.MultInScalar(val) return this } // Multiplies in a constant value to all the terms fo the matrix. // Returns a pointer to 'this'. func (this *Mat3) MultInScalar(val float64) *Mat3 { for k, _ := range this.mat { this.mat[k] *= val } return this } // Divides in a constant value to all the terms fo the matrix. // Return a new matrix with the result. // precondition: val > 0 func (this Mat3) DivScalar(val float64) Mat3 { this.DivInScalar(val) return this } // Divides in a constant value to all the terms fo the matrix. // Returns a pointer to 'this'. // precondition: val > 0 func (this *Mat3) DivInScalar(val float64) *Mat3 { for k, _ := range this.mat { this.mat[k] /= val } return this } // Adds the two matrices together ( ie. this + other). // Return a new matrix with the result. func (this Mat3) Add(other Mat3) Mat3 { this.AddIn(other) return this } // Adds the two matrices together ( ie. this + other). // Stores the result in this. // Returns this. func (this *Mat3) AddIn(other Mat3) *Mat3 { for k, _ := range this.mat { this.mat[k] += other.mat[k] } return this } // Subtract the two matrices together ( ie. this - other). // Return a new matrix with the result. func (this Mat3) Sub(other Mat3) Mat3 { this.SubIn(other) return this } // Subtract the two matrices together ( ie. this - other). // Stores the result in this. // Returns this. func (this *Mat3) SubIn(other Mat3) *Mat3 { for k, _ := range this.mat { this.mat[k] -= other.mat[k] } return this } // Multiply the two matrices together ( ie. this * other). // Return a new matrix with the result. func (this Mat3) Mult(other Mat3) Mat3 { this.MultIn(other) return this } // Multiplies the two matrices together ( ie. this * other). // Stores the result in this. // Returns this. func (this *Mat3) MultIn(o Mat3) *Mat3 { // 0 1 2 // 3 4 5 // 6 7 8 m := *this this.mat[0] = m.mat[0]*o.mat[0] + m.mat[1]*o.mat[3] + m.mat[2]*o.mat[6] this.mat[1] = m.mat[0]*o.mat[1] + m.mat[1]*o.mat[4] + m.mat[2]*o.mat[7] this.mat[2] = m.mat[0]*o.mat[2] + m.mat[1]*o.mat[5] + m.mat[2]*o.mat[8] this.mat[3] = m.mat[3]*o.mat[0] + m.mat[4]*o.mat[3] + m.mat[5]*o.mat[6] this.mat[4] = m.mat[3]*o.mat[1] + m.mat[4]*o.mat[4] + m.mat[5]*o.mat[7] this.mat[5] = m.mat[3]*o.mat[2] + m.mat[4]*o.mat[5] + m.mat[5]*o.mat[8] this.mat[6] = m.mat[6]*o.mat[0] + m.mat[7]*o.mat[3] + m.mat[8]*o.mat[6] this.mat[7] = m.mat[6]*o.mat[1] + m.mat[7]*o.mat[4] + m.mat[8]*o.mat[7] this.mat[8] = m.mat[6]*o.mat[2] + m.mat[7]*o.mat[5] + m.mat[8]*o.mat[8] return this } // Returns a new matrix which is transpose to this. func (this Mat3) Transpose() Mat3 { this.TransposeIn() return this } // Take the transpose of this matrix. func (this *Mat3) TransposeIn() *Mat3 { // TODO: can definitely be way more efficient // by only exchanging the column entries. m00, m01, m02 := this.Row(0) m10, m11, m12 := this.Row(1) m20, m21, m22 := this.Row(2) this.SetCol(0, m00, m01, m02) this.SetCol(1, m10, m11, m12) this.SetCol(2, m20, m21, m22) return this } // Get the determinant of the matrix. // Uses a straight-up Cramers-Rule implementation. func (this Mat3) Determinant() float64 { // 0 1 2 // 3 4 5 // 6 7 8 // Use Cramer's rule to calculate the determinant return det3x3( this.mat[0], this.mat[1], this.mat[2], this.mat[3], this.mat[4], this.mat[5], this.mat[6], this.mat[7], this.mat[8]) } // Returns a new matrix which is the Adjoint matrix of this. func (this Mat3) Adjoint() Mat3 { a1, a2, a3 := this.mat[0], this.mat[1], this.mat[2] b1, b2, b3 := this.mat[3], this.mat[4], this.mat[5] c1, c2, c3 := this.mat[6], this.mat[7], this.mat[8] // 0 1 2 3 a1 a2 a3 a4 // 4 5 6 7 b1 b2 b3 b4 // 8 9 10 11 c1 c2 c3 c4 // 12 13 14 15 d1 d2 d3 d4 // 0 1 2 a1 a2 a3 // 3 4 5 b1 b2 b3 // 6 7 8 c1 c2 c3 this.mat[0] = det2x2(b2, b3, c2, c3) this.mat[1] = -det2x2(b1, b3, c1, c3) this.mat[2] = det2x2(b1, b2, c1, c2) this.mat[3] = -det2x2(a2, a3, c2, c3) this.mat[4] = det2x2(a1, a3, c1, c3) this.mat[5] = -det2x2(a1, a2, c1, c2) this.mat[6] = det2x2(a2, a3, b2, b3) this.mat[7] = -det2x2(a1, a3, b1, b3) this.mat[8] = det2x2(a1, a2, b1, b2) this.TransposeIn() return this } // Returns a new matrix which is the inverse matrix of this. // The bool flag is false if an inverse does not exist. func (this Mat3) Inverse() Mat3 { // TODO: Needs further testing // Try out with rotation matrices. // The inverse of a valid rotation matrix should just be the transpose det := this.Determinant() return this.Adjoint().DivScalar(det) } // Returns true if the inverse of this matrix exists false otherwise. // Internally it checks to see if the determinant is zero. func (this Mat3) HasInverse() bool { return !closeEq(this.Determinant(), 0, epsilon) } // Sets the matrix to the identity matrix. func (this *Mat3) ToIdentity() *Mat3 { this.mat = [9]float64{ 1, 0, 0, 0, 1, 0, 0, 0, 1, } return this } //============================================================================== // Return true if the matrix is the identity matrix. func (this Mat3) IsIdentity() bool { iden := [9]float64{ 1, 0, 0, 0, 1, 0, 0, 0, 1, } for k, _ := range iden { if !closeEq(this.mat[k], iden[k], 0) { return false } } return true } // Check to see if the matrix is a valid rotation matrix. // The two properties it checks are // 1) Determinant() == 1 // 2) m*m.Transpose == Identity func (this Mat3) IsRotation() bool { return closeEq(this.Determinant(), 1, epsilon) && this.Mult(this.Transpose()).IsIdentity() } // Implement the Stringer interface // Prints out each row of the matrix on its own line func (this Mat3) String() string { return fmt.Sprintf("%f %f %f\n%f %f %f\n%f %f %f", this.mat[0], this.mat[1], this.mat[2], this.mat[3], this.mat[4], this.mat[5], this.mat[6], this.mat[7], this.mat[8]) } // ============================================================================= // Create a 2D translation matrix for Mat3. Overwrites all values in the matrix. func (this *Mat3) ToTranslate(x, y float64) *Mat3 { this.ToIdentity() this.Set(0, 2, x) this.Set(1, 2, y) return this } // Create a 2D scaling matrix for Mat3. Overwrites all values in the matrix. func (this *Mat3) ToScale(x, y float64) *Mat3 { this.ToIdentity() this.Set(0, 0, x) this.Set(1, 1, y) return this } // Create a 2D shearing matrix for Mat3. Overwrites all values in the matrix. // 0 x 0 // y 0 0 // 0 0 1 func (this *Mat3) ToShear(x, y float64) *Mat3 { this.ToIdentity() this.Set(0, 0, 0) this.Set(1, 1, 0) this.Set(0, 1, x) this.Set(1, 0, y) return this } // Create a 2D rotation matrix about the Z axis // cos -sin 0 // sin cos 0 // 0 0 1 func (this *Mat3) ToRotateZ(angle float64) *Mat3 { this.ToIdentity() this.Set(0, 0, math.Cos(angle)) this.Set(0, 1, -math.Sin(angle)) this.Set(1, 0, math.Sin(angle)) this.Set(1, 1, math.Cos(angle)) return this } //============================================================================== // Multiplies the Vec3 against the matrix ( ie. result = Matrix * Vec). // Returns a new vector with the result. func (this Mat3) MultVec3(v Vec3) (out Vec3) { // 0 1 2 // 3 4 5 // 6 7 8 out.Set( this.mat[0]*v.X+this.mat[1]*v.Y+this.mat[2]*v.Z, this.mat[3]*v.X+this.mat[4]*v.Y+this.mat[5]*v.Z, this.mat[6]*v.X+this.mat[7]*v.Y+this.mat[8]*v.Z, ) return } // ============================================================================= // Return a rotation matrix which rotates a vector about the axis [x,y,z] with // the given angle (radians). // Set this matrix as a rotation matrix from the give angle(radians) and axis. func (this *Mat3) FromAxisAngle(angle, x, y, z float64) *Mat3 { //Reference http://en.wikipedia.org/wiki/Rotation_matrix c := math.Cos(angle) s := math.Sin(angle) t := (1 - c) return this.Load([9]float64{c + x*x*t, x*y*t - z*s, x*z*t + y*s, y*x*t + z*s, c + y*y*t, y*z*t - x*s, z*x*t - y*s, z*y*t + x*s, c + z*z*t}) } // Set this as a rotation matrix using the specified pitch,yaw, and roll paramters. // Angles are in radians. func (this *Mat3) FromEuler(pitch, yaw, roll float64) *Mat3 { cx := math.Cos(pitch) sx := math.Sin(pitch) cy := math.Cos(yaw) sy := math.Sin(yaw) cz := math.Cos(roll) sz := math.Sin(roll) // This matrix was derived by multiplying each indiviudual rotation matrix // together into a single matrix. // note the matrices are applied in reverse order compared to the application // of the rotations. // roll yaw pitch // | cz -sz 0 | | cy 0 sy | | 1 0 0 | // | sz cz 0 |x| 0 1 0 |x| 0 cx -sx | // | 0 0 1 | | -sy 0 cy | | 0 sx cx | // first row this.mat[0] = cz * cy this.mat[1] = cz*sy*sx - sz*cx this.mat[2] = sz*sx + cz*cx*sy // second row this.mat[3] = sz * cy this.mat[4] = cz*cx + sx*sy*sz this.mat[5] = sz*sy*cx - cz*sx // third row this.mat[6] = -sy this.mat[7] = sx * cy this.mat[8] = cy * cx return this } // Return the axis (radians) and axis of this rotation matrix. // Assumes the matrix is a valid rotation matrix. func (this Mat3) AxisAngle() (angle, x, y, z float64) { // Reference // http://www.euclideanspace.com/maths/geometry/rotations/conversions/matrixToAngle/ m00, m01, m02 := this.Get(0, 0), this.Get(0, 1), this.Get(0, 2) m10, m11, m12 := this.Get(1, 0), this.Get(1, 1), this.Get(1, 2) m20, m21, m22 := this.Get(2, 0), this.Get(2, 1), this.Get(2, 2) if closeEq(math.Abs(m01-m10), 0, epsilon) && closeEq(math.Abs(m02-m20), 0, epsilon) && closeEq(math.Abs(m12-m21), 0, epsilon) { // singularity check // Checking for cases in which the angle is either 0 or 180 if this.IsIdentity() { // If the angle is 0, then the rotation matrix will be the identity matrix // A 0 angle means that there is an arbitrary axis. angle, x, y, z = 0, 1, 0, 0 return } // Angle is 180, we need to find the axis it rotates around angle = math.Pi xx := (m00 + 1) / 2 yy := (m11 + 1) / 2 zz := (m22 + 1) / 2 xy := (m01 + m10) / 4 xz := (m02 + m20) / 4 yz := (m12 + m21) / 4 if (xx > yy) && (xx > zz) { // m[0][0] is the largest diagonal term if xx < epsilon { x = 0 y = math.Sqrt(2) / 2 z = math.Sqrt(2) / 2 } else { x = math.Sqrt(xx) y = xy / x z = xz / x } } else if yy > zz { // m[1][1] is the largest diagonal term if yy < epsilon { x = math.Sqrt(2) / 2 y = 0 z = math.Sqrt(2) / 2 } else { y = math.Sqrt(yy) x = xy / y z = yz / y } } else { // m[2][2] is the largest diagonal term so base result on this if zz < epsilon { x = math.Sqrt(2) / 2 y = math.Sqrt(2) / 2 z = 0 } else { z = math.Sqrt(zz) x = xz / z y = yz / z } } return } // no singularity; therefore calculate as normal angle = math.Acos((m00 + m11 + m22 - 1) / 2) A := (m21 - m12) B := (m02 - m20) C := (m10 - m01) x = A / math.Sqrt(A*A+B*B+C*C) y = B / math.Sqrt(A*A+B*B+C*C) z = C / math.Sqrt(A*A+B*B+C*C) return } // Return the pitch,yaw and roll values for the given rotation matrix. // The returned euler angle may not be the exact angle in which you supplied // but they can be used to make an equilvalent rotation matrix. func (this Mat3) Euler() (pitch, yaw, roll float64) { // The method for calculating the euler angles from a rotation matrix // uses the method described in this document // http://staff.city.ac.uk/~sbbh653/publications/euler.pdf // The rotation matrix we are using will be of the following form // cos(x) is abbreviated as cx ( similarily sin(x) = sx) // This corresponds to the pitch => yaw => roll rotation matrix // cz*cy cz*sy*sx - sz*cx sz*sx + cz*cx*sy | r11 r12 r13 // sz*cy cz*cx + sx*sy*sz sz*sy*cx - cz*sx | r21 r22 r23 // -sy sx*cy cx*cy | r31 r32 r33 // We want to determine the x,y,z angles // 1) Find the 'y' angle // This is easily accomplished because term r31 is simply '-sin(y)' // 2) There are two possible angles for y because // sin(y) == sin(pi - y) // 3) To find the value of x, we observe the following // r32/r33 = tan(x) // (sin(x)cos(y)) / (cos(x)cos(y)) // (sin(x)/cos(x)) == tan(x) by defn. // 4) We can also calculate x and z by. // x = atan2(r32,r33) == atan2( (sin(x)cos(y)) / (cos(x)cos(y)) ) // z = atan2(r21,r11) == atan2( (sin(z)cos(y)) / (cos(z)cos(y)) ) var x, y, z float64 r31 := this.Get(2, 0) if closeEq(r31, 1, epsilon) { // we are in gimbal lock z = 0 y = -math.Pi / 2 x = -z + math.Atan2(-this.Get(0, 1), -this.Get(0, 2)) } else if closeEq(r31, -1, epsilon) { // we are in gimbal lock z = 0 y = math.Pi / 2 x = z + math.Atan2(this.Get(0, 1), this.Get(0, 2)) } else { y = -math.Asin(r31) cos_y := math.Cos(y) x = math.Atan2(this.Get(2, 1)/cos_y, this.Get(2, 2)/cos_y) z = math.Atan2(this.Get(1, 0)/cos_y, this.Get(0, 0)/cos_y) // There are two alternative values for y,here is the second option // y2 := math.Pi - y // cos_y2 := math.Cos(y2) // x2 := math.Atan2(this.Get(2, 1)/cos_y2, this.Get(2, 2)/cos_y2) // z2 := math.Atan2(this.Get(1, 0)/cos_y2, this.Get(0, 0)/cos_y2) } pitch = x yaw = y roll = z return } // Creates a rotation matrix from the given quaternion. Return this func (this *Mat3) FromQuat(q *Quat) *Mat3 { *this = q.Mat3() return this } // Returns the quaternion represented by this rotation matrix. func (this Mat3) Quat() Quat { q := Quat{} q.FromMat3(this) return q }
lmath/mat3.go
0.807461
0.664867
mat3.go
starcoder
package export import "github.com/prometheus/client_golang/prometheus" // IngestionRealtimeExporter contains all the Prometheus metrics that are possible to gather from the Jetty service type IngestionRealtimeExporter struct { EventsThrownAway *prometheus.GaugeVec `description:"number of events rejected because they are outside the windowPeriod"` EventsUnparsable *prometheus.GaugeVec `description:"number of events rejected because the events are unparseable"` EventsDuplicate *prometheus.GaugeVec `description:"number of events rejected because the events are duplicated"` EventsProcessed *prometheus.GaugeVec `description:"number of events successfully processed per emission period"` EventsMessageGap *prometheus.GaugeVec `description:"time gap between the data time in event and current system time"` RowsOutput *prometheus.GaugeVec `description:"number of Druid rows persisted"` PersistsCount *prometheus.GaugeVec `description:"number of times persist occurred"` PersistsTime *prometheus.HistogramVec `description:"milliseconds spent doing intermediate persist"` PersistsCPU *prometheus.HistogramVec `description:"cpu time in Nanoseconds spent on doing intermediate persist"` PersistsBackPressure *prometheus.HistogramVec `description:"milliseconds spent creating persist tasks and blocking waiting for them to finish"` PersistsFailed *prometheus.GaugeVec `description:"number of persists that failed"` HandOffFailed *prometheus.GaugeVec `description:"number of handoffs that failed"` HandOffCount *prometheus.GaugeVec `description:"number of handoffs that happene"` MergeTime *prometheus.HistogramVec `description:"milliseconds spent merging intermediate segments"` MergeCPU *prometheus.HistogramVec `description:"cpu time in Nanoseconds spent on merging intermediate segments"` SinkCount *prometheus.GaugeVec `description:"number of sinks not handoffed"` } // NewIngestionRealtimeExporter returns a new Jetty exporter object func NewIngestionRealtimeExporter() *IngestionRealtimeExporter { re := &IngestionRealtimeExporter{ EventsThrownAway: prometheus.NewGaugeVec(prometheus.GaugeOpts{ Namespace: "druid", Subsystem: "realtime", Name: "ingest_events_thrown_away_count", Help: "number of events rejected because they are outside the windowPeriod", }, []string{"dataSource", "taskId"}), EventsUnparsable: prometheus.NewGaugeVec(prometheus.GaugeOpts{ Namespace: "druid", Subsystem: "realtime", Name: "ingest_events_unparsable_count", Help: "number of events rejected because the events are unparseable", }, []string{"dataSource", "taskId"}), EventsDuplicate: prometheus.NewGaugeVec(prometheus.GaugeOpts{ Namespace: "druid", Subsystem: "realtime", Name: "ingest_events_duplicate_count", Help: "number of events rejected because the events are duplicated", }, []string{"dataSource", "taskId"}), EventsProcessed: prometheus.NewGaugeVec(prometheus.GaugeOpts{ Namespace: "druid", Subsystem: "realtime", Name: "ingest_events_processed_count", Help: "number of events successfully processed per emission period", }, []string{"dataSource", "taskId"}), EventsMessageGap: prometheus.NewGaugeVec(prometheus.GaugeOpts{ Namespace: "druid", Subsystem: "realtime", Name: "ingest_events_messagegap", Help: "time gap between the data time in event and current system time", }, []string{"dataSource", "taskId"}), RowsOutput: prometheus.NewGaugeVec(prometheus.GaugeOpts{ Namespace: "druid", Subsystem: "realtime", Name: "ingest_rows_output_count", Help: "number of Druid rows persisted", }, []string{"dataSource", "taskId"}), PersistsCount: prometheus.NewGaugeVec(prometheus.GaugeOpts{ Namespace: "druid", Subsystem: "realtime", Name: "ingest_persists_count", Help: "number of times persist occurred", }, []string{"dataSource", "taskId"}), PersistsTime: prometheus.NewHistogramVec(prometheus.HistogramOpts{ Namespace: "druid", Subsystem: "realtime", Name: "ingest_persists_time", Help: "milliseconds spent doing intermediate persist", Buckets: []float64{10, 100, 500, 1000, 2000, 3000, 5000, 7000, 10000}, }, []string{"dataSource"}), PersistsCPU: prometheus.NewHistogramVec(prometheus.HistogramOpts{ Namespace: "druid", Subsystem: "realtime", Name: "ingest_persists_cpu", Help: "cpu time in Nanoseconds spent on doing intermediate persist", Buckets: []float64{10, 100, 500, 1000, 2000, 3000, 5000, 7000, 10000}, }, []string{"dataSource"}), PersistsBackPressure: prometheus.NewHistogramVec(prometheus.HistogramOpts{ Namespace: "druid", Subsystem: "realtime", Name: "ingest_persists_backpressure", Help: "milliseconds spent creating persist tasks and blocking waiting for them to finish", Buckets: []float64{10, 100, 500, 1000, 2000, 3000, 5000, 7000, 10000}, }, []string{"dataSource"}), PersistsFailed: prometheus.NewGaugeVec(prometheus.GaugeOpts{ Namespace: "druid", Subsystem: "realtime", Name: "ingest_persists_failed_count", Help: "number of times persist failed", }, []string{"dataSource", "taskId"}), HandOffFailed: prometheus.NewGaugeVec(prometheus.GaugeOpts{ Namespace: "druid", Subsystem: "realtime", Name: "ingest_handoff_failed_count", Help: "Number of times handoff failed", }, []string{"dataSource", "taskId"}), HandOffCount: prometheus.NewGaugeVec(prometheus.GaugeOpts{ Namespace: "druid", Subsystem: "realtime", Name: "ingest_handoff_count", Help: "number of times handoff count", }, []string{"dataSource", "taskId"}), MergeTime: prometheus.NewHistogramVec(prometheus.HistogramOpts{ Namespace: "druid", Subsystem: "realtime", Name: "ingest_merge_time", Help: "milliseconds spent merging intermediate segments", Buckets: []float64{10, 100, 500, 1000, 2000, 3000, 5000, 7000, 10000}, }, []string{"dataSource"}), MergeCPU: prometheus.NewHistogramVec(prometheus.HistogramOpts{ Namespace: "druid", Subsystem: "realtime", Name: "ingest_merge_cpu", Help: "cpu time in Nanoseconds spent on merging intermediate segments", Buckets: []float64{10, 100, 500, 1000, 2000, 3000, 5000, 7000, 10000}, }, []string{"dataSource"}), SinkCount: prometheus.NewGaugeVec(prometheus.GaugeOpts{ Namespace: "druid", Subsystem: "realtime", Name: "ingest_sink_count", Help: "number of sinks not handoffed", }, []string{"dataSource", "taskId"}), } // register all the prometheus metrics prometheus.MustRegister(re.EventsThrownAway) prometheus.MustRegister(re.EventsUnparsable) prometheus.MustRegister(re.EventsDuplicate) prometheus.MustRegister(re.EventsProcessed) prometheus.MustRegister(re.EventsMessageGap) prometheus.MustRegister(re.RowsOutput) prometheus.MustRegister(re.PersistsCount) prometheus.MustRegister(re.PersistsCPU) prometheus.MustRegister(re.PersistsTime) prometheus.MustRegister(re.PersistsBackPressure) prometheus.MustRegister(re.PersistsFailed) prometheus.MustRegister(re.HandOffFailed) prometheus.MustRegister(re.HandOffCount) prometheus.MustRegister(re.MergeTime) prometheus.MustRegister(re.MergeCPU) prometheus.MustRegister(re.SinkCount) return re } // SetEventsThrownAway . func (re *IngestionRealtimeExporter) SetEventsThrownAway(labels map[string]string, val float64) { re.EventsThrownAway.With(labels).Add(val) } // SetEventsUnparsable . func (re *IngestionRealtimeExporter) SetEventsUnparsable(labels map[string]string, val float64) { re.EventsUnparsable.With(labels).Add(val) } // SetEventsDuplicate . func (re *IngestionRealtimeExporter) SetEventsDuplicate(labels map[string]string, val float64) { re.EventsDuplicate.With(labels).Add(val) } // SetEventsProcessed . func (re *IngestionRealtimeExporter) SetEventsProcessed(labels map[string]string, val float64) { re.EventsProcessed.With(labels).Add(val) } // SetEventsMessageGap . func (re *IngestionRealtimeExporter) SetEventsMessageGap(labels map[string]string, val float64) { re.EventsMessageGap.With(labels).Add(val) } // SetRowsOutput . func (re *IngestionRealtimeExporter) SetRowsOutput(labels map[string]string, val float64) { re.RowsOutput.With(labels).Add(val) } // SetPersistsCount . func (re *IngestionRealtimeExporter) SetPersistsCount(labels map[string]string, val float64) { re.PersistsCount.With(labels).Add(val) } // SetPersistsTime . func (re *IngestionRealtimeExporter) SetPersistsTime(source string, val float64) { re.PersistsTime.With(prometheus.Labels{"dataSource": source}).Observe(val) } // SetPersistsCPU . func (re *IngestionRealtimeExporter) SetPersistsCPU(source string, val float64) { re.PersistsCPU.With(prometheus.Labels{"dataSource": source}).Observe(val) } // SetPersistsBackPressure . func (re *IngestionRealtimeExporter) SetPersistsBackPressure(source string, val float64) { re.PersistsBackPressure.With(prometheus.Labels{"dataSource": source}).Observe(val) } // SetPersistsFailed . func (re *IngestionRealtimeExporter) SetPersistsFailed(labels map[string]string, val float64) { re.PersistsFailed.With(labels).Add(val) } // SetHandOffFailed . func (re *IngestionRealtimeExporter) SetHandOffFailed(labels map[string]string, val float64) { re.HandOffFailed.With(labels).Add(val) } // SetHandOffCount . func (re *IngestionRealtimeExporter) SetHandOffCount(labels map[string]string, val float64) { re.HandOffCount.With(labels).Add(val) } // SetMergeTime . func (re *IngestionRealtimeExporter) SetMergeTime(source string, val float64) { re.MergeTime.With(prometheus.Labels{"dataSource": source}).Observe(val) } // SetMergeCPU . func (re *IngestionRealtimeExporter) SetMergeCPU(source string, val float64) { re.MergeCPU.With(prometheus.Labels{"dataSource": source}).Observe(val) } // SetSinkCount . func (re *IngestionRealtimeExporter) SetSinkCount(labels map[string]string, val float64) { re.SinkCount.With(labels).Add(val) }
pkg/export/ingestion_realtime.go
0.712832
0.426501
ingestion_realtime.go
starcoder
package libs const gaugeQueryPartForOccurrences = ` | sum(sliceGoodCount) as totalGood, sum(sliceTotalCount) as totalCount | (totalGood/totalCount)*100 as SLO | format("%.2f%%",SLO) as sloStr | fields SLO ` const gaugeQueryPartForTimeslice = ` | timeslice 1m | sum(sliceGoodCount) as timesliceGoodCount, sum(sliceTotalCount) as timesliceTotalCount by _timeslice | fillmissing timeslice(1m) | if(timesliceTotalCount ==0, 1,(timesliceGoodCount/timesliceTotalCount)) as timesliceRatio | order by _timeslice asc | if(timesliceRatio >= {{.TimesliceRatioTarget}}, 1,0) as sliceHealthy | 1 as timesliceOne | sum(sliceHealthy) as healthySlices, sum(timesliceOne) as totalSlices | (healthySlices/totalSlices)*100 as Availability | fields Availability ` const hourlyBurnQueryPartForOccurrences = ` | timeslice 60m | sum(sliceGoodCount) as tmGood, sum(sliceTotalCount) as tmCount group by _timeslice | tmGood/tmCount as tmSLO | (tmCount-tmGood) as tmBad | order by _timeslice asc | total tmCount as totalCount | ((tmBad/tmCount)/(1-{{.Target}})) as hourlyBurnRate | fields _timeslice, hourlyBurnRate | compare timeshift 1d ` const hourlyBurnQueryPartForTimeslice = ` | timeslice 1m | sum(sliceGoodCount) as timesliceGoodCount, sum(sliceTotalCount) as timesliceTotalCount by _timeslice | fillmissing timeslice(1m) | if(timesliceTotalCount ==0, 1,(timesliceGoodCount/timesliceTotalCount)) as timesliceRatio | if(timesliceRatio >= {{.TimesliceRatioTarget}}, 1,0) as sliceHealthy | 1 as timesliceOne | _timeslice as _messagetime | timeslice 60m | sum(sliceHealthy) as healthySlices, sum(timesliceOne) as totalSlices by _timeslice | order by _timeslice asc | ((1 - healthySlices/totalSlices)/(1-{{.Target}})) as hourlyBurnRate | fields _timeslice, hourlyBurnRate | compare timeshift 1d ` const burnTrendQueryPartForOccurrences = ` | sum(sliceGoodCount) as totalGood, sum(sliceTotalCount) as totalCount | ((1 - totalGood/totalCount)/(1-{{.Target}}))*100 as BurnRate | fields BurnRate | compare timeshift 1d 7 | fields BurnRate_7d,BurnRate_6d,BurnRate_5d,BurnRate_4d,BurnRate_3d,BurnRate_2d,BurnRate_1d,BurnRate ` const burnTrendQueryPartForTimeslice = ` | timeslice 1m | sum(sliceGoodCount) as timesliceGoodCount, sum(sliceTotalCount) as timesliceTotalCount by _timeslice | fillmissing timeslice(1m) | if(timesliceTotalCount ==0, 1,(timesliceGoodCount/timesliceTotalCount)) as timesliceRatio | if(timesliceRatio >= {{.TimesliceRatioTarget}}, 1,0) as sliceHealthy | 1 as timesliceOne | sum(sliceHealthy) as totalGood, sum(timesliceOne) as totalCount | ((1 - totalGood/totalCount)/(1-{{.Target}}))*100 as BurnRate | fields BurnRate | compare timeshift 1d 7 | fields BurnRate_7d,BurnRate_6d,BurnRate_5d,BurnRate_4d,BurnRate_3d,BurnRate_2d,BurnRate_1d,BurnRate ` const budgetLeftQueryPart = ` | timeslice 60m | sum(sliceGoodCount) as tmGood, sum(sliceTotalCount) as tmCount group by _timeslice | tmGood/tmCount as tmSLO | (tmCount-tmGood) as tmBad | order by _timeslice asc | accum tmBad as runningBad | total tmCount as totalCount | totalCount*(1-{{.Target}}) as errorBudget | (1-runningBad/errorBudget) as budgetRemaining | fields _timeslice, budgetRemaining | predict budgetRemaining by 1h model=ar, forecast=800 | toLong(formatDate(_timeslice, "M")) as tmIndex | toLong(formatDate(now(), "M")) as monthIndex | where tmIndex = monthIndex | if(isNull(budgetRemaining),budgetRemaining_predicted,budgetRemaining) as budgetRemaining_predicted | fields _timeslice,budgetRemaining, budgetRemaining_predicted ` const budgetLeftQueryTimeSlicesPart = ` | timeslice 1m | sum(sliceGoodCount) as timesliceGoodCount, sum(sliceTotalCount) as timesliceTotalCount by _timeslice | fillmissing timeslice(1m) | if(timesliceTotalCount ==0, 1,(timesliceGoodCount/timesliceTotalCount)) as timesliceRatio | (timesliceGoodCount/timesliceTotalCount) as timesliceRatio | if(timesliceRatio >= {{.TimesliceRatioTarget}}, 1,0) as sliceHealthy | 1 as timesliceOne | _timeslice as _messagetime | timeslice 60m | sum(sliceHealthy) as healthySlices, sum(timesliceOne) as totalSlices by _timeslice | healthySlices/totalSlices as tmSLO | (totalSlices - healthySlices) as tmBad | order by _timeslice asc | accum tmBad as runningBad | toLong(formatDate(now(), "M")) as monthIndex | if(monthIndex == 12,0,1) as addToMonth | parseDate(format("2021-%d-01",toLong(monthIndex)), "yyyy-MM-dd") as ym | parseDate(format("2021-%d-01",toLong(monthIndex+addToMonth)), "yyyy-MM-dd") as ymNext | toLong(if(monthIndex == 12,31,(ymNext - ym)/(24*3600*1000))) as dayCount | (dayCount*24*60)*(1-{{.Target}}) as errorBudget | (1-runningBad/errorBudget) as budgetRemaining | fields _timeslice, budgetRemaining | predict budgetRemaining by 1h model=ar, forecast=800 | toLong(formatDate(_timeslice, "M")) as tmIndex | toLong(formatDate(now(), "M")) as monthIndex | where tmIndex = monthIndex | if(isNull(budgetRemaining),budgetRemaining_predicted,budgetRemaining) as budgetRemaining_predicted | fields _timeslice,budgetRemaining, budgetRemaining_predicted ` const breakDownPanelQueryOccurrences = ` | sum(sliceGoodCount) as totalGood, sum(sliceTotalCount) as totalCount {{if ne .GroupByStr ""}}by {{.GroupByStr}} {{end}} | totalCount - totalGood as totalBad | (totalGood/totalCount)*100 as Availability_Percentage | totalCount*(1-{{.Target}}) as errorBudget | (1-totalBad/errorBudget) as BudgetRemaining | BudgetRemaining*100 as %"Budget Remaining (%)" | order by BudgetRemaining asc | Availability_Percentage as %"Availability (%)" | fields {{if ne .GroupByStr ""}} {{.GroupByStr}}, {{end}} %"Availability (%)", %"Budget Remaining (%)" ` const breakDownPanelQueryTimeslices = ` | timeslice 1m | sum(sliceGoodCount) as timesliceGoodCount, sum(sliceTotalCount) as timesliceTotalCount by _timeslice {{if ne .GroupByStr ""}}, {{.GroupByStr}} {{end}} | fillmissing timeslice(1m) | if(timesliceTotalCount ==0, 1,(timesliceGoodCount/timesliceTotalCount)) as timesliceRatio | (timesliceGoodCount/timesliceTotalCount) as timesliceRatio | if(timesliceRatio >= 0.9, 1,0) as sliceHealthy | 1 as timesliceOne | _timeslice as _messagetime | sum(sliceHealthy) as healthySlices, sum(timesliceOne) as totalSlices {{if ne .GroupByStr ""}}by {{.GroupByStr}} {{end}} | (healthySlices/totalSlices)*100 as Availability_Percentage | (totalSlices - healthySlices) as badSlices | toLong(formatDate(now(), "M")) as monthIndex | if(monthIndex == 12,0,1) as addToMonth | parseDate(format("2021-%d-01",toLong(monthIndex)), "yyyy-MM-dd") as ym | parseDate(format("2021-%d-01",toLong(monthIndex+addToMonth)), "yyyy-MM-dd") as ymNext | toLong(if(monthIndex == 12,31,(ymNext - ym)/(24*3600*1000))) as dayCount | (dayCount*24*60)*(1-0.95) as errorBudget | (1-badSlices/errorBudget) as BudgetRemaining | (BudgetRemaining)*errorBudget as DowntimeRemainingInMinutes | DowntimeRemainingInMinutes/60 as DowntimeRemainingInHours | DowntimeRemainingInMinutes%60 as DowntimeRemainingMinuteModulo | format("%2.0fh%2.0fm",DowntimeRemainingInHours,DowntimeRemainingMinuteModulo) as %"Budget Remaining (Time)" | BudgetRemaining*100 as %"Budget Remaining (%)" | order by BudgetRemaining asc | Availability_Percentage as %"Availability (%)" | fields {{if ne .GroupByStr ""}} {{.GroupByStr}}, {{end}} %"Availability (%)", %"Budget Remaining (%)", %"Budget Remaining (Time)" ` const pd = ` _view=slogen_tf_cloudcollector_cc_ingest_lag_v2 | timeslice 1m | sum(sliceGoodCount) as timesliceGoodCount, sum(sliceTotalCount) as timesliceTotalCount by _timeslice | fillmissing timeslice(1m) | if(timesliceTotalCount ==0, 1,(timesliceGoodCount/timesliceTotalCount)) as timesliceRatio | (timesliceGoodCount/timesliceTotalCount) as timesliceRatio | if(timesliceRatio >= 0.9, 1,0) as sliceHealthy | 1 as timesliceOne | _timeslice as _messagetime | timeslice 60m | toLong(formatDate(now(), "M")) as monthIndex | if(monthIndex == 12,0,1) as addToMonth | parseDate(format("2021-%d-01",toLong(monthIndex)), "yyyy-MM-dd") as ym | parseDate(format("2021-%d-01",toLong(monthIndex+addToMonth)), "yyyy-MM-dd") as ymNext | toLong(if(monthIndex == 12,31,(ymNext - ym)/(24*3600*1000))) as dayCount | sum(sliceHealthy) as healthySlices, sum(timesliceOne) as totalSlices by _timeslice | predict healthySlices by 1d forecast=30 | if(isNull(healthySlices) ,healthySlices_predicted,healthySlices) as forecasted_slices | fields _timeslice,healthySlices,forecasted_slices ` /* _view=slogen_tf_tsat_v2_anomaly_compute_delay_v2 | timeslice 1m | fillmissing timeslice(1m) | sum(sliceGoodCount) as timesliceGoodCount, sum(sliceTotalCount) as timesliceTotalCount by _timeslice | if(timesliceTotalCount ==0, 1,(timesliceGoodCount/timesliceTotalCount)) as timesliceRatio | if(timesliceRatio >= 0.9, 1,0) as sliceHealthy | _timeslice as _messagetime | 1 as timesliceOne | timeslice 60m | sum(sliceHealthy) as healthySlices, sum(timesliceOne) as totalSlices by _timeslice | healthySlices/totalSlices as tmSLO | (totalSlices - healthySlices) as badSlices | order by _timeslice asc | accum badSlices as runningBad | toLong(formatDate(now(), "M")) as monthIndex | if(monthIndex == 12,0,1) as addToMonth | parseDate(format("2021-%d-01",toLong(monthIndex)), "yyyy-MM-dd") as ym | parseDate(format("2021-%d-01",toLong(monthIndex+addToMonth)), "yyyy-MM-dd") as ymNext | toLong(if(monthIndex == 12,31,(ymNext - ym)/(24*3600*1000))) as dayCount | (dayCount*24*60)*(1-0.8) as errorBudget | (1-runningBad/errorBudget) as budgetRemaining | fields _timeslice, budgetRemaining _view=slogen_tf_cloudcollector_cc_ingest_lag_v2 | timeslice 1m | sum(sliceGoodCount) as timesliceGoodCount, sum(sliceTotalCount) as timesliceTotalCount by _timeslice | fillmissing timeslice(1m) | if(timesliceTotalCount ==0, 1,(timesliceGoodCount/timesliceTotalCount)) as timesliceRatio | (timesliceGoodCount/timesliceTotalCount) as timesliceRatio | if(timesliceRatio >= 0.9, 1,0) as sliceHealthy | 1 as timesliceOne | _timeslice as _messagetime | timeslice 60m | sum(sliceHealthy) as healthySlices, sum(timesliceOne) as totalSlices by _timeslice | predict healthySlices by 1d forecast=30 | toLong(formatDate(now(), "M")) as monthIndex | toLong(formatDate(now(), "M")) as dayIndex | if(monthIndex == 12,0,1) as addToMonth | parseDate(format("2021-%d-01",toLong(monthIndex)), "yyyy-MM-dd") as ym | parseDate(format("2021-%d-01",toLong(monthIndex+addToMonth)), "yyyy-MM-dd") as ymNext | toLong(if(monthIndex == 12,31,(ymNext - ym)/(24*3600*1000))) as dayCount | if(isNull(healthySlices) ,healthySlices_predicted,healthySlices) as forecasted_slices | formatDate(_timeslice,"dd") as m | where m = dayCount */
libs/queries.go
0.528047
0.469885
queries.go
starcoder
package chunk import ( "bytes" "sort" "github.com/pingcap/tidb/parser/mysql" "github.com/pingcap/tidb/types" "github.com/pingcap/tidb/types/json" ) // CompareFunc is a function to compare the two values in Row, the two columns must have the same type. type CompareFunc = func(l Row, lCol int, r Row, rCol int) int // GetCompareFunc gets a compare function for the field type. func GetCompareFunc(tp *types.FieldType) CompareFunc { switch tp.GetType() { case mysql.TypeTiny, mysql.TypeShort, mysql.TypeInt24, mysql.TypeLong, mysql.TypeLonglong, mysql.TypeYear: if mysql.HasUnsignedFlag(tp.GetFlag()) { return cmpUint64 } return cmpInt64 case mysql.TypeFloat: return cmpFloat32 case mysql.TypeDouble: return cmpFloat64 case mysql.TypeString, mysql.TypeVarString, mysql.TypeVarchar, mysql.TypeBlob, mysql.TypeTinyBlob, mysql.TypeMediumBlob, mysql.TypeLongBlob: return genCmpStringFunc(tp.GetCollate()) case mysql.TypeDate, mysql.TypeDatetime, mysql.TypeTimestamp: return cmpTime case mysql.TypeDuration: return cmpDuration case mysql.TypeNewDecimal: return cmpMyDecimal case mysql.TypeSet, mysql.TypeEnum: return cmpNameValue case mysql.TypeBit: return cmpBit case mysql.TypeJSON: return cmpJSON } return nil } func cmpNull(lNull, rNull bool) int { if lNull && rNull { return 0 } if lNull { return -1 } return 1 } func cmpInt64(l Row, lCol int, r Row, rCol int) int { lNull, rNull := l.IsNull(lCol), r.IsNull(rCol) if lNull || rNull { return cmpNull(lNull, rNull) } return types.CompareInt64(l.GetInt64(lCol), r.GetInt64(rCol)) } func cmpUint64(l Row, lCol int, r Row, rCol int) int { lNull, rNull := l.IsNull(lCol), r.IsNull(rCol) if lNull || rNull { return cmpNull(lNull, rNull) } return types.CompareUint64(l.GetUint64(lCol), r.GetUint64(rCol)) } func genCmpStringFunc(collation string) func(l Row, lCol int, r Row, rCol int) int { return func(l Row, lCol int, r Row, rCol int) int { return cmpStringWithCollationInfo(l, lCol, r, rCol, collation) } } func cmpStringWithCollationInfo(l Row, lCol int, r Row, rCol int, collation string) int { lNull, rNull := l.IsNull(lCol), r.IsNull(rCol) if lNull || rNull { return cmpNull(lNull, rNull) } return types.CompareString(l.GetString(lCol), r.GetString(rCol), collation) } func cmpFloat32(l Row, lCol int, r Row, rCol int) int { lNull, rNull := l.IsNull(lCol), r.IsNull(rCol) if lNull || rNull { return cmpNull(lNull, rNull) } return types.CompareFloat64(float64(l.GetFloat32(lCol)), float64(r.GetFloat32(rCol))) } func cmpFloat64(l Row, lCol int, r Row, rCol int) int { lNull, rNull := l.IsNull(lCol), r.IsNull(rCol) if lNull || rNull { return cmpNull(lNull, rNull) } return types.CompareFloat64(l.GetFloat64(lCol), r.GetFloat64(rCol)) } func cmpMyDecimal(l Row, lCol int, r Row, rCol int) int { lNull, rNull := l.IsNull(lCol), r.IsNull(rCol) if lNull || rNull { return cmpNull(lNull, rNull) } lDec, rDec := l.GetMyDecimal(lCol), r.GetMyDecimal(rCol) return lDec.Compare(rDec) } func cmpTime(l Row, lCol int, r Row, rCol int) int { lNull, rNull := l.IsNull(lCol), r.IsNull(rCol) if lNull || rNull { return cmpNull(lNull, rNull) } lTime, rTime := l.GetTime(lCol), r.GetTime(rCol) return lTime.Compare(rTime) } func cmpDuration(l Row, lCol int, r Row, rCol int) int { lNull, rNull := l.IsNull(lCol), r.IsNull(rCol) if lNull || rNull { return cmpNull(lNull, rNull) } lDur, rDur := l.GetDuration(lCol, 0).Duration, r.GetDuration(rCol, 0).Duration return types.CompareInt64(int64(lDur), int64(rDur)) } func cmpNameValue(l Row, lCol int, r Row, rCol int) int { lNull, rNull := l.IsNull(lCol), r.IsNull(rCol) if lNull || rNull { return cmpNull(lNull, rNull) } _, lVal := l.getNameValue(lCol) _, rVal := r.getNameValue(rCol) return types.CompareUint64(lVal, rVal) } func cmpBit(l Row, lCol int, r Row, rCol int) int { lNull, rNull := l.IsNull(lCol), r.IsNull(rCol) if lNull || rNull { return cmpNull(lNull, rNull) } lBit := types.BinaryLiteral(l.GetBytes(lCol)) rBit := types.BinaryLiteral(r.GetBytes(rCol)) return lBit.Compare(rBit) } func cmpJSON(l Row, lCol int, r Row, rCol int) int { lNull, rNull := l.IsNull(lCol), r.IsNull(rCol) if lNull || rNull { return cmpNull(lNull, rNull) } lJ, rJ := l.GetJSON(lCol), r.GetJSON(rCol) return json.CompareBinary(lJ, rJ) } // Compare compares the value with ad. // We assume that the collation information of the column is the same with the datum. func Compare(row Row, colIdx int, ad *types.Datum) int { switch ad.Kind() { case types.KindNull: if row.IsNull(colIdx) { return 0 } return 1 case types.KindMinNotNull: if row.IsNull(colIdx) { return -1 } return 1 case types.KindMaxValue: return -1 case types.KindInt64: return types.CompareInt64(row.GetInt64(colIdx), ad.GetInt64()) case types.KindUint64: return types.CompareUint64(row.GetUint64(colIdx), ad.GetUint64()) case types.KindFloat32: return types.CompareFloat64(float64(row.GetFloat32(colIdx)), float64(ad.GetFloat32())) case types.KindFloat64: return types.CompareFloat64(row.GetFloat64(colIdx), ad.GetFloat64()) case types.KindString: return types.CompareString(row.GetString(colIdx), ad.GetString(), ad.Collation()) case types.KindBytes, types.KindBinaryLiteral, types.KindMysqlBit: return bytes.Compare(row.GetBytes(colIdx), ad.GetBytes()) case types.KindMysqlDecimal: l, r := row.GetMyDecimal(colIdx), ad.GetMysqlDecimal() return l.Compare(r) case types.KindMysqlDuration: l, r := row.GetDuration(colIdx, 0).Duration, ad.GetMysqlDuration().Duration return types.CompareInt64(int64(l), int64(r)) case types.KindMysqlEnum: l, r := row.GetEnum(colIdx).Value, ad.GetMysqlEnum().Value return types.CompareUint64(l, r) case types.KindMysqlSet: l, r := row.GetSet(colIdx).Value, ad.GetMysqlSet().Value return types.CompareUint64(l, r) case types.KindMysqlJSON: l, r := row.GetJSON(colIdx), ad.GetMysqlJSON() return json.CompareBinary(l, r) case types.KindMysqlTime: l, r := row.GetTime(colIdx), ad.GetMysqlTime() return l.Compare(r) default: return 0 } } // LowerBound searches on the non-decreasing Column colIdx, // returns the smallest index i such that the value at row i is not less than `d`. func (c *Chunk) LowerBound(colIdx int, d *types.Datum) (index int, match bool) { index = sort.Search(c.NumRows(), func(i int) bool { cmp := Compare(c.GetRow(i), colIdx, d) if cmp == 0 { match = true } return cmp >= 0 }) return } // UpperBound searches on the non-decreasing Column colIdx, // returns the smallest index i such that the value at row i is larger than `d`. func (c *Chunk) UpperBound(colIdx int, d *types.Datum) int { return sort.Search(c.NumRows(), func(i int) bool { return Compare(c.GetRow(i), colIdx, d) > 0 }) }
util/chunk/compare.go
0.517327
0.406509
compare.go
starcoder
package bn256 import ( "math/big" ) // twistPoint implements the elliptic curve y²=x³+3/ξ over GF(p²). Points are // kept in Jacobian form and t=z² when valid. The group G₂ is the set of // n-torsion points of this curve over GF(p²) (where n = Order) type twistPoint struct { x, y, z, t gfP2 } var twistB = &gfP2{ gfP{0x38e7ecccd1dcff67, 0x65f0b37d93ce0d3e, 0xd749d0dd22ac00aa, 0x0141b9ce4a688d4d}, gfP{0x3bf938e377b802a8, 0x020b1b273633535d, 0x26b7edf049755260, 0x2514c6324384a86d}, } // twistGen is the generator of group G₂. var twistGen = &twistPoint{ gfP2{ gfP{0xafb4737da84c6140, 0x6043dd5a5802d8c4, 0x09e950fc52a02f86, 0x14fef0833aea7b6b}, gfP{0x8e83b5d102bc2026, 0xdceb1935497b0172, 0xfbb8264797811adf, 0x19573841af96503b}, }, gfP2{ gfP{0x64095b56c71856ee, 0xdc57f922327d3cbb, 0x55f935be33351076, 0x0da4a0e693fd6482}, gfP{0x619dfa9d886be9f6, 0xfe7fd297f59e9b78, 0xff9e1a62231b7dfe, 0x28fd7eebae9e4206}, }, gfP2{*newGFp(0), *newGFp(1)}, gfP2{*newGFp(0), *newGFp(1)}, } func (c *twistPoint) String() string { c.MakeAffine() x, y := gfP2Decode(&c.x), gfP2Decode(&c.y) return "(" + x.String() + ", " + y.String() + ")" } func (c *twistPoint) Set(a *twistPoint) { c.x.Set(&a.x) c.y.Set(&a.y) c.z.Set(&a.z) c.t.Set(&a.t) } // IsOnCurve returns true iff c is on the curve. func (c *twistPoint) IsOnCurve() bool { c.MakeAffine() if c.IsInfinity() { return true } y2, x3 := &gfP2{}, &gfP2{} y2.Square(&c.y) x3.Square(&c.x).Mul(x3, &c.x).Add(x3, twistB) if *y2 != *x3 { return false } cneg := &twistPoint{} cneg.Mul(c, Order) return cneg.z.IsZero() } func (c *twistPoint) SetInfinity() { c.x.SetZero() c.y.SetOne() c.z.SetZero() c.t.SetZero() } func (c *twistPoint) IsInfinity() bool { return c.z.IsZero() } func (c *twistPoint) Add(a, b *twistPoint) { // For additional comments, see the same function in curve.go. if a.IsInfinity() { c.Set(b) return } if b.IsInfinity() { c.Set(a) return } // See http://hyperelliptic.org/EFD/g1p/auto-code/shortw/jacobian-0/addition/add-2007-bl.op3 z12 := (&gfP2{}).Square(&a.z) z22 := (&gfP2{}).Square(&b.z) u1 := (&gfP2{}).Mul(&a.x, z22) u2 := (&gfP2{}).Mul(&b.x, z12) t := (&gfP2{}).Mul(&b.z, z22) s1 := (&gfP2{}).Mul(&a.y, t) t.Mul(&a.z, z12) s2 := (&gfP2{}).Mul(&b.y, t) h := (&gfP2{}).Sub(u2, u1) xEqual := h.IsZero() t.Add(h, h) i := (&gfP2{}).Square(t) j := (&gfP2{}).Mul(h, i) t.Sub(s2, s1) yEqual := t.IsZero() if xEqual && yEqual { c.Double(a) return } r := (&gfP2{}).Add(t, t) v := (&gfP2{}).Mul(u1, i) t4 := (&gfP2{}).Square(r) t.Add(v, v) t6 := (&gfP2{}).Sub(t4, j) c.x.Sub(t6, t) t.Sub(v, &c.x) // t7 t4.Mul(s1, j) // t8 t6.Add(t4, t4) // t9 t4.Mul(r, t) // t10 c.y.Sub(t4, t6) t.Add(&a.z, &b.z) // t11 t4.Square(t) // t12 t.Sub(t4, z12) // t13 t4.Sub(t, z22) // t14 c.z.Mul(t4, h) } func (c *twistPoint) Double(a *twistPoint) { // See http://hyperelliptic.org/EFD/g1p/auto-code/shortw/jacobian-0/doubling/dbl-2009-l.op3 A := (&gfP2{}).Square(&a.x) B := (&gfP2{}).Square(&a.y) C := (&gfP2{}).Square(B) t := (&gfP2{}).Add(&a.x, B) t2 := (&gfP2{}).Square(t) t.Sub(t2, A) t2.Sub(t, C) d := (&gfP2{}).Add(t2, t2) t.Add(A, A) e := (&gfP2{}).Add(t, A) f := (&gfP2{}).Square(e) t.Add(d, d) c.x.Sub(f, t) t.Add(C, C) t2.Add(t, t) t.Add(t2, t2) c.y.Sub(d, &c.x) t2.Mul(e, &c.y) c.y.Sub(t2, t) t.Mul(&a.y, &a.z) c.z.Add(t, t) } func (c *twistPoint) Mul(a *twistPoint, scalar *big.Int) { sum, t := &twistPoint{}, &twistPoint{} for i := scalar.BitLen(); i >= 0; i-- { t.Double(sum) if scalar.Bit(i) != 0 { sum.Add(t, a) } else { sum.Set(t) } } c.Set(sum) } func (c *twistPoint) MakeAffine() { if c.z.IsOne() { return } else if c.z.IsZero() { c.x.SetZero() c.y.SetOne() c.t.SetZero() return } zInv := (&gfP2{}).Invert(&c.z) t := (&gfP2{}).Mul(&c.y, zInv) zInv2 := (&gfP2{}).Square(zInv) c.y.Mul(t, zInv2) t.Mul(&c.x, zInv2) c.x.Set(t) c.z.SetOne() c.t.SetOne() } func (c *twistPoint) Neg(a *twistPoint) { c.x.Set(&a.x) c.y.Neg(&a.y) c.z.Set(&a.z) c.t.SetZero() } // Clone makes a hard copy of the point func (c *twistPoint) Clone() *twistPoint { n := &twistPoint{ x: c.x.Clone(), y: c.y.Clone(), z: c.z.Clone(), t: c.t.Clone(), } return n }
ioporaclenode/internal/pkg/kyber/pairing/bn256/twist.go
0.756987
0.495667
twist.go
starcoder
package client import ( "encoding/json" ) // UiNodeImageAttributes struct for UiNodeImageAttributes type UiNodeImageAttributes struct { // Height of the image Height int64 `json:"height"` // A unique identifier Id string `json:"id"` NodeType string `json:"node_type"` // The image's source URL. format: uri Src string `json:"src"` // Width of the image Width int64 `json:"width"` } // NewUiNodeImageAttributes instantiates a new UiNodeImageAttributes object // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments // will change when the set of required properties is changed func NewUiNodeImageAttributes(height int64, id string, nodeType string, src string, width int64) *UiNodeImageAttributes { this := UiNodeImageAttributes{} this.Height = height this.Id = id this.NodeType = nodeType this.Src = src this.Width = width return &this } // NewUiNodeImageAttributesWithDefaults instantiates a new UiNodeImageAttributes object // This constructor will only assign default values to properties that have it defined, // but it doesn't guarantee that properties required by API are set func NewUiNodeImageAttributesWithDefaults() *UiNodeImageAttributes { this := UiNodeImageAttributes{} return &this } // GetHeight returns the Height field value func (o *UiNodeImageAttributes) GetHeight() int64 { if o == nil { var ret int64 return ret } return o.Height } // GetHeightOk returns a tuple with the Height field value // and a boolean to check if the value has been set. func (o *UiNodeImageAttributes) GetHeightOk() (*int64, bool) { if o == nil { return nil, false } return &o.Height, true } // SetHeight sets field value func (o *UiNodeImageAttributes) SetHeight(v int64) { o.Height = v } // GetId returns the Id field value func (o *UiNodeImageAttributes) GetId() string { if o == nil { var ret string return ret } return o.Id } // GetIdOk returns a tuple with the Id field value // and a boolean to check if the value has been set. func (o *UiNodeImageAttributes) GetIdOk() (*string, bool) { if o == nil { return nil, false } return &o.Id, true } // SetId sets field value func (o *UiNodeImageAttributes) SetId(v string) { o.Id = v } // GetNodeType returns the NodeType field value func (o *UiNodeImageAttributes) GetNodeType() string { if o == nil { var ret string return ret } return o.NodeType } // GetNodeTypeOk returns a tuple with the NodeType field value // and a boolean to check if the value has been set. func (o *UiNodeImageAttributes) GetNodeTypeOk() (*string, bool) { if o == nil { return nil, false } return &o.NodeType, true } // SetNodeType sets field value func (o *UiNodeImageAttributes) SetNodeType(v string) { o.NodeType = v } // GetSrc returns the Src field value func (o *UiNodeImageAttributes) GetSrc() string { if o == nil { var ret string return ret } return o.Src } // GetSrcOk returns a tuple with the Src field value // and a boolean to check if the value has been set. func (o *UiNodeImageAttributes) GetSrcOk() (*string, bool) { if o == nil { return nil, false } return &o.Src, true } // SetSrc sets field value func (o *UiNodeImageAttributes) SetSrc(v string) { o.Src = v } // GetWidth returns the Width field value func (o *UiNodeImageAttributes) GetWidth() int64 { if o == nil { var ret int64 return ret } return o.Width } // GetWidthOk returns a tuple with the Width field value // and a boolean to check if the value has been set. func (o *UiNodeImageAttributes) GetWidthOk() (*int64, bool) { if o == nil { return nil, false } return &o.Width, true } // SetWidth sets field value func (o *UiNodeImageAttributes) SetWidth(v int64) { o.Width = v } func (o UiNodeImageAttributes) MarshalJSON() ([]byte, error) { toSerialize := map[string]interface{}{} if true { toSerialize["height"] = o.Height } if true { toSerialize["id"] = o.Id } if true { toSerialize["node_type"] = o.NodeType } if true { toSerialize["src"] = o.Src } if true { toSerialize["width"] = o.Width } return json.Marshal(toSerialize) } type NullableUiNodeImageAttributes struct { value *UiNodeImageAttributes isSet bool } func (v NullableUiNodeImageAttributes) Get() *UiNodeImageAttributes { return v.value } func (v *NullableUiNodeImageAttributes) Set(val *UiNodeImageAttributes) { v.value = val v.isSet = true } func (v NullableUiNodeImageAttributes) IsSet() bool { return v.isSet } func (v *NullableUiNodeImageAttributes) Unset() { v.value = nil v.isSet = false } func NewNullableUiNodeImageAttributes(val *UiNodeImageAttributes) *NullableUiNodeImageAttributes { return &NullableUiNodeImageAttributes{value: val, isSet: true} } func (v NullableUiNodeImageAttributes) MarshalJSON() ([]byte, error) { return json.Marshal(v.value) } func (v *NullableUiNodeImageAttributes) UnmarshalJSON(src []byte) error { v.isSet = true return json.Unmarshal(src, &v.value) }
internal/httpclient/model_ui_node_image_attributes.go
0.756987
0.412353
model_ui_node_image_attributes.go
starcoder
package iso20022 // Set of elements providing further details on the account statement. type AccountStatement1 struct { // Unique and unambiguous identification of the account report, assigned by the account servicer. Identification *Max35Text `xml:"Id"` // Sequential number of the report, assigned by the account servicer. It is increased incrementally for each report sent electronically. ElectronicSequenceNumber *Number `xml:"ElctrncSeqNb,omitempty"` // Legal sequential number of the report, assigned by the account servicer. It is increased incrementally for each report sent. // // Usage : in those scenarios where eg a paper statement is a legal requirement, the paper statement may have a different numbering than the electronic sequential number. Paper statements can for instance only be sent if movement on the account has taken place, whereas electronic statements can be sent eg each day, regardless of whether movements have taken place or not. LegalSequenceNumber *Number `xml:"LglSeqNb,omitempty"` // Date and time at which the report was created. CreationDateTime *ISODateTime `xml:"CreDtTm"` // Range of time between the start date and the end date for which the account statement is issued. FromToDate *DateTimePeriodDetails `xml:"FrToDt,omitempty"` // Specifies if this document is a copy, a duplicate, or a duplicate of a copy. CopyDuplicateIndicator *CopyDuplicate1Code `xml:"CpyDplctInd,omitempty"` // Business relationship between two entities; one entity is the account owner, the other entity is the account servicer. Account *CashAccount13 `xml:"Acct"` // Identifies the parent account of the reported account. RelatedAccount *CashAccount7 `xml:"RltdAcct,omitempty"` // Provides general interest information that applies to the account at a particular moment in time. Interest []*AccountInterest1 `xml:"Intrst,omitempty"` // Set of elements defining the balance(s). Balance []*CashBalance2 `xml:"Bal"` // Set of element providing summary information on entries. TransactionsSummary *TotalTransactions1 `xml:"TxsSummry,omitempty"` // Specifies the elements of an entry in the statement. // // Usage: At least one reference must be provided to identify the entry and its underlying transaction(s). Entry []*StatementEntry1 `xml:"Ntry,omitempty"` // Further details on the account statement. AdditionalStatementInformation *Max500Text `xml:"AddtlStmtInf,omitempty"` } func (a *AccountStatement1) SetIdentification(value string) { a.Identification = (*Max35Text)(&value) } func (a *AccountStatement1) SetElectronicSequenceNumber(value string) { a.ElectronicSequenceNumber = (*Number)(&value) } func (a *AccountStatement1) SetLegalSequenceNumber(value string) { a.LegalSequenceNumber = (*Number)(&value) } func (a *AccountStatement1) SetCreationDateTime(value string) { a.CreationDateTime = (*ISODateTime)(&value) } func (a *AccountStatement1) AddFromToDate() *DateTimePeriodDetails { a.FromToDate = new(DateTimePeriodDetails) return a.FromToDate } func (a *AccountStatement1) SetCopyDuplicateIndicator(value string) { a.CopyDuplicateIndicator = (*CopyDuplicate1Code)(&value) } func (a *AccountStatement1) AddAccount() *CashAccount13 { a.Account = new(CashAccount13) return a.Account } func (a *AccountStatement1) AddRelatedAccount() *CashAccount7 { a.RelatedAccount = new(CashAccount7) return a.RelatedAccount } func (a *AccountStatement1) AddInterest() *AccountInterest1 { newValue := new (AccountInterest1) a.Interest = append(a.Interest, newValue) return newValue } func (a *AccountStatement1) AddBalance() *CashBalance2 { newValue := new (CashBalance2) a.Balance = append(a.Balance, newValue) return newValue } func (a *AccountStatement1) AddTransactionsSummary() *TotalTransactions1 { a.TransactionsSummary = new(TotalTransactions1) return a.TransactionsSummary } func (a *AccountStatement1) AddEntry() *StatementEntry1 { newValue := new (StatementEntry1) a.Entry = append(a.Entry, newValue) return newValue } func (a *AccountStatement1) SetAdditionalStatementInformation(value string) { a.AdditionalStatementInformation = (*Max500Text)(&value) }
AccountStatement1.go
0.771069
0.429788
AccountStatement1.go
starcoder
package model import ( "math" ) // Rect ... type Rect struct { A *Point B *Point C *Point D *Point } // Polygon ... type Polygon struct { Points []*Point } // RectCollider ... type RectCollider struct { ID uint8 Rect *Rect Pivot *Point Look *Point Turret *Point Dir *Point Velocity float32 Rotation float32 LastRotation float32 TurretRotation float32 TurretLastRotation float32 ForwardSpeed float32 RotationSpeed float32 CollisionFront bool CollisionBack bool } // NewRectCollider creates a new rectangle collider func NewRectCollider(x float32, y float32, width float32, depth float32) *RectCollider { return &RectCollider{ Pivot: &Point{ X: x, Y: y, }, Look: &Point{ X: x, Y: y + 2, }, Turret: &Point{ X: x, Y: y + 3, }, Dir: &Point{ X: 0, Y: 0, }, Rotation: 0, LastRotation: 0, TurretRotation: 0, TurretLastRotation: 0, ForwardSpeed: 15, RotationSpeed: 1.5, CollisionBack: false, CollisionFront: false, Rect: &Rect{ A: &Point{ X: x - (width / 2), Y: y + (depth / 2), }, B: &Point{ X: x + (width / 2), Y: y + (depth / 2), }, C: &Point{ X: x + (width / 2), Y: y - (depth / 2), }, D: &Point{ X: x - (width / 2), Y: y - (depth / 2), }, }, } } // CalcDirection calculates the direction vector of the collider func (r *RectCollider) CalcDirection() { r.Dir.X = r.Look.X - r.Pivot.X r.Dir.Y = r.Look.Y - r.Pivot.Y normalize(r.Dir) } // Rotate applies a rotation to all points of the collider func (r *RectCollider) Rotate(angle float32) { r.rotateRectPoint(angle, r.Rect.A) r.rotateRectPoint(angle, r.Rect.B) r.rotateRectPoint(angle, r.Rect.C) r.rotateRectPoint(angle, r.Rect.D) r.rotateRectPoint(angle, r.Look) r.rotateRectPoint(angle, r.Turret) } func (r *RectCollider) rotateRectPoint(theta float32, p *Point) { sinTheta := float32(math.Sin(float64(theta))) cosTheta := float32(math.Cos(float64(theta))) // point to origin p.X -= r.Pivot.X p.Y -= r.Pivot.Y // rotation x := p.X*cosTheta - p.Y*sinTheta y := p.X*sinTheta + p.Y*cosTheta // point back to original position p.X = x + r.Pivot.X p.Y = y + r.Pivot.Y } // ChangePosition of Collider func (r *RectCollider) ChangePosition(posX, posY float32) { dX := posX - r.Pivot.X dY := posY - r.Pivot.Y r.Pivot.X = posX r.Pivot.Y = posY r.Look.X += dX r.Look.Y += dY r.Turret.X += dX r.Turret.Y += dY r.Rect.A.X += dX r.Rect.A.Y += dY r.Rect.B.X += dX r.Rect.B.Y += dY r.Rect.C.X += dX r.Rect.C.Y += dY r.Rect.D.X += dX r.Rect.D.Y += dY } func (r *RectCollider) getPolygon() Polygon { return Polygon{ Points: []*Point{r.Rect.A, r.Rect.B, r.Rect.C, r.Rect.D}, } } func (r *RectCollider) collisionPolygon(otherPolygon Polygon) bool { if r.doPolygonsIntersect(r.getPolygon(), otherPolygon) { return true } return false } func (r *RectCollider) collisionFrontRect(other RectCollider) { otherPolygon := Polygon{ Points: []*Point{other.Rect.A, other.Rect.B, other.Rect.C, other.Rect.D}, } r.collisionFront(otherPolygon) } func (r *RectCollider) collisionBackRect(other RectCollider) { otherPolygon := Polygon{ Points: []*Point{other.Rect.A, other.Rect.B, other.Rect.C, other.Rect.D}, } r.collisionBack(otherPolygon) } func (r *RectCollider) collisionFront(otherPolygon Polygon) { frontPolygon := Polygon{ Points: []*Point{r.Rect.A, r.Rect.B, r.Pivot}, } if r.doPolygonsIntersect(frontPolygon, otherPolygon) { r.CollisionFront = true return } r.CollisionFront = false } func (r *RectCollider) collisionBack(otherPolygon Polygon) { backPolygon := Polygon{ Points: []*Point{r.Rect.C, r.Rect.D, r.Pivot}, } if r.doPolygonsIntersect(backPolygon, otherPolygon) { r.CollisionBack = true return } r.CollisionBack = false } func (r *RectCollider) doPolygonsIntersect(a Polygon, b Polygon) bool { return doPolygonsIntersect(a, b) } func doPolygonsIntersect(a Polygon, b Polygon) bool { for _, polygon := range [2]Polygon{a, b} { for i1 := 0; i1 < len(polygon.Points); i1++ { i2 := (i1 + 1) % len(polygon.Points) p1 := polygon.Points[i1] p2 := polygon.Points[i2] normal := Point{ X: p2.Y - p1.Y, Y: p1.X - p2.X, } var minA, maxA *float32 for _, p := range a.Points { projected := normal.X*p.X + normal.Y*p.Y if minA == nil || projected < *minA { minA = &projected } if maxA == nil || projected > *maxA { maxA = &projected } } var minB, maxB *float32 for _, p := range b.Points { projected := normal.X*p.X + normal.Y*p.Y if minB == nil || projected < *minB { minB = &projected } if maxB == nil || projected > *maxB { maxB = &projected } } if *maxA < *minB || *maxB < *minA { return false } } } return true }
model/rect_collider.go
0.799403
0.671995
rect_collider.go
starcoder
package predicate import "strings" // Transformation captures one transformation step in the predicate evaluation // chain, with a `Description` and an actual transformation function `Func`. type Transformation struct { Description string Func TransformFunc } // TransformFunc is the function type for use in a `Transformation`. type TransformFunc func( value interface{}) ( result interface{}, ctx []ContextValue, err error) // Predicate captures a complete predicate chain with `Transformations`, a // `Description` and an actual evaluation function `Func`. type Predicate struct { Transformations []Transformation Description string Func PredicateFunc } // PredicateFunc is the function type for use in a `Predicate`. type PredicateFunc func( value interface{}) ( success bool, ctx []ContextValue, err error) // FormatDescription return a formatted description of the full predicate chain, // using the `value` string to represent the input value. func (p *Predicate) FormatDescription(value string) string { var s = p.Description for i := len(p.Transformations) - 1; i >= 0; i-- { tr := p.Transformations[i] s = strings.Replace(s, "{}", tr.Description, -1) } return strings.Replace(s, "{}", value, -1) } // Evaluate evaluates the full predicate chain on the given `value`, and returns // a `success` flag and, upon failure, a `context` containing all the relevant // values captured during evaluation. func (p *Predicate) Evaluate(value interface{}) (success bool, context []ContextValue) { context = []ContextValue{ {"expected", p.FormatDescription("value"), true}, {"value", value, false}, } for _, tr := range p.Transformations { r, ctx, err := tr.Func(value) context = append(context, ctx...) if err != nil { context = append(context, ContextValue{"error", err, true}) return } value = r } success, ctx, err := p.Func(value) for _, v := range ctx { if v.Name != "expected" && v.Name != "value" { context = append(context, v) } } if err != nil { context = append(context, ContextValue{"error", err, true}) success = false } return } // RegisterTransformation appends the given transformation to the list of // transformations attached to the predicate. func (p *Predicate) RegisterTransformation(desc string, f TransformFunc) { p.Transformations = append(p.Transformations, Transformation{ Description: desc, Func: f, }) } // RegisterPredicate sets the predicate evaluation function and description for // the current predicate. func (p *Predicate) RegisterPredicate(desc string, f PredicateFunc) { if p.Func != nil { panic("RegisterPredicate() should only be called once per predicate") } p.Description = desc p.Func = f }
pkg/utils/predicate/predicate.go
0.846815
0.420957
predicate.go
starcoder
package interpreter import ( "fmt" "sort" "strings" "github.com/aws/aws-sdk-go/service/dynamodb" ) // UpdaterFunc function used emule to UpdateItem expressions type UpdaterFunc func(map[string]*dynamodb.AttributeValue, map[string]*dynamodb.AttributeValue) // MatcherFunc function used to filter data type MatcherFunc func(map[string]*dynamodb.AttributeValue, map[string]*dynamodb.AttributeValue) bool // Native simple interpreter using pure go functions type Native struct { filterExpressions map[string]MatcherFunc keyExpressions map[string]MatcherFunc writeCondExpressions map[string]MatcherFunc updateExpressions map[string]UpdaterFunc } // NewNativeInterpreter returns a new native interpreter func NewNativeInterpreter() *Native { return &Native{ filterExpressions: map[string]MatcherFunc{}, keyExpressions: map[string]MatcherFunc{}, writeCondExpressions: map[string]MatcherFunc{}, updateExpressions: map[string]UpdaterFunc{}, } } // Match evalute the item with given expression and attributes func (ni *Native) Match(input MatchInput) (bool, error) { matcher, err := ni.getMatcher(input.TableName, input.Expression, input.ExpressionType) if err != nil { return false, err } return matcher(input.Item, input.Attributes), nil } // Update change the item with given expression and attributes func (ni *Native) Update(input UpdateInput) error { updater, found := ni.updateExpressions[input.TableName+"|"+hashExpressionKey(input.Expression)] if !found { return fmt.Errorf( "%w: updater not found for %q expression in table %q", ErrUnsupportedFeature, input.Expression, input.TableName, ) } updater(input.Item, input.Attributes) return nil } func (ni *Native) getMatcher(tablename, expression string, kind ExpressionType) (MatcherFunc, error) { var ( matcher MatcherFunc found bool ) switch kind { case ExpressionTypeKey: matcher, found = ni.keyExpressions[tablename+"|"+hashExpressionKey(expression)] case ExpressionTypeFilter: matcher, found = ni.filterExpressions[tablename+"|"+hashExpressionKey(expression)] case ExpressionTypeConditional: matcher, found = ni.writeCondExpressions[tablename+"|"+hashExpressionKey(expression)] } if !found { return matcher, fmt.Errorf( "%w: matcher %q not found for %q expression in table %q", ErrUnsupportedFeature, string(kind), expression, tablename, ) } return matcher, nil } func hashExpressionKey(s string) string { out := strings.Split(strings.TrimSpace(s), "") sort.Strings(out) return strings.Join(out, "") } // AddUpdater add expression updater to use on key or filter queries func (ni *Native) AddUpdater(tablename string, expr string, updater UpdaterFunc) { ni.updateExpressions[tablename+"|"+hashExpressionKey(expr)] = updater } // AddMatcher add expression matcher to use on key or filter queries func (ni *Native) AddMatcher(tablename string, t ExpressionType, expr string, matcher MatcherFunc) { // TODO validate the expresion(expr) key := hashExpressionKey(expr) switch t { case ExpressionTypeKey: ni.keyExpressions[tablename+"|"+key] = matcher case ExpressionTypeFilter: ni.filterExpressions[tablename+"|"+key] = matcher case ExpressionTypeConditional: ni.writeCondExpressions[tablename+"|"+key] = matcher default: panic("NativeInterpreter: unsupported expression type") } }
interpreter/native.go
0.629547
0.414366
native.go
starcoder
package transforms import ( "math" "github.com/dustismo/heavyfishdesign/path" ) // Will rotate and scale the path so that the PathStartPoint and PathEndPoint equal StartPoint // and EndPoint. This is useful for using svg to connect two points type RotateScaleTransform struct { StartPoint path.Point EndPoint path.Point // What point on the path should be considered origin? // defaults to path start PathStartPoint path.Point // What point on the path should be considered the end? // defaults to path end PathEndPoint path.Point SegmentOperators path.SegmentOperators } func (rt RotateScaleTransform) line(p path.Path) (path.LineSegment, error) { startPoint := rt.PathStartPoint endPoint := rt.PathEndPoint if path.IsPoint00(startPoint) && path.IsPoint00(endPoint) { sp, err := path.PointPathAttribute(path.StartPoint, p, rt.SegmentOperators) if err != nil { return path.LineSegment{}, err } ep, err := path.PointPathAttribute(path.EndPoint, p, rt.SegmentOperators) if err != nil { return path.LineSegment{}, err } startPoint, endPoint = sp, ep } return path.LineSegment{ StartPoint: startPoint, EndPoint: endPoint, }, nil } func (rt RotateScaleTransform) PathTransform(p path.Path) (path.Path, error) { requestedLine := path.LineSegment{ StartPoint: rt.StartPoint, EndPoint: rt.EndPoint, } curLine, err := rt.line(p) if err != nil { return nil, err } //first rotate path to the requested angle pth, err := RotateTransform{ Degrees: requestedLine.Angle() - curLine.Angle(), Axis: path.Origin, SegmentOperators: rt.SegmentOperators, }.PathTransform(p) if err != nil { return nil, err } // rotate the curLine to match the path rotation curLineS, err := RotateTransform{ Degrees: requestedLine.Angle() - curLine.Angle(), Axis: path.Origin, SegmentOperators: rt.SegmentOperators, }.PathTransform(path.NewPathFromSegmentsWithoutMove([]path.Segment{ curLine, })) if err != nil { return nil, err } curLine = curLineS.Segments()[1].(path.LineSegment) newX := math.Abs(requestedLine.EndPoint.X - requestedLine.StartPoint.X) newY := math.Abs(requestedLine.EndPoint.Y - requestedLine.StartPoint.Y) oldX := math.Abs(curLine.EndPoint.X - curLine.StartPoint.X) oldY := math.Abs(curLine.EndPoint.Y - curLine.StartPoint.Y) xScale := 0.0 if oldX != 0 { xScale = newX / oldX } yScale := 0.0 if oldY != 0 { yScale = newY / oldY } scale := math.Max(xScale, yScale) // Now scale to the requested size pth, err = ScaleTransform{ ScaleX: scale, ScaleY: scale, SegmentOperators: rt.SegmentOperators, }.PathTransform(pth) if err != nil { return pth, err } // move pth, err = MoveTransform{ Point: rt.StartPoint, Handle: path.StartPoint, SegmentOperators: rt.SegmentOperators, }.PathTransform(pth) return pth, err }
transforms/rotate_scale.go
0.793146
0.624179
rotate_scale.go
starcoder
package assert import ( http "net/http" url "net/url" time "time" ) func Conditionf(t TestingT, comp Comparison, msg string, args ...interface{}) bool { return Condition(t, comp, append([]interface{}{msg}, args...)...) } func Containsf(t TestingT, s interface{}, contains interface{}, msg string, args ...interface{}) bool { return Contains(t, s, contains, append([]interface{}{msg}, args...)...) } func Emptyf(t TestingT, object interface{}, msg string, args ...interface{}) bool { return Empty(t, object, append([]interface{}{msg}, args...)...) } func Equalf(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) bool { return Equal(t, expected, actual, append([]interface{}{msg}, args...)...) } func EqualErrorf(t TestingT, theError error, errString string, msg string, args ...interface{}) bool { return EqualError(t, theError, errString, append([]interface{}{msg}, args...)...) } func EqualValuesf(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) bool { return EqualValues(t, expected, actual, append([]interface{}{msg}, args...)...) } func Errorf(t TestingT, err error, msg string, args ...interface{}) bool { return Error(t, err, append([]interface{}{msg}, args...)...) } func Exactlyf(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) bool { return Exactly(t, expected, actual, append([]interface{}{msg}, args...)...) } func Failf(t TestingT, failureMessage string, msg string, args ...interface{}) bool { return Fail(t, failureMessage, append([]interface{}{msg}, args...)...) } func FailNowf(t TestingT, failureMessage string, msg string, args ...interface{}) bool { return FailNow(t, failureMessage, append([]interface{}{msg}, args...)...) } func Falsef(t TestingT, value bool, msg string, args ...interface{}) bool { return False(t, value, append([]interface{}{msg}, args...)...) } func HTTPBodyContainsf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, str interface{}) bool { return HTTPBodyContains(t, handler, method, url, values, str) } func HTTPBodyNotContainsf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, str interface{}) bool { return HTTPBodyNotContains(t, handler, method, url, values, str) } func HTTPErrorf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values) bool { return HTTPError(t, handler, method, url, values) } func HTTPRedirectf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values) bool { return HTTPRedirect(t, handler, method, url, values) } func HTTPSuccessf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values) bool { return HTTPSuccess(t, handler, method, url, values) } func Implementsf(t TestingT, interfaceObject interface{}, object interface{}, msg string, args ...interface{}) bool { return Implements(t, interfaceObject, object, append([]interface{}{msg}, args...)...) } func InDeltaf(t TestingT, expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) bool { return InDelta(t, expected, actual, delta, append([]interface{}{msg}, args...)...) } func InDeltaSlicef(t TestingT, expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) bool { return InDeltaSlice(t, expected, actual, delta, append([]interface{}{msg}, args...)...) } func InEpsilonf(t TestingT, expected interface{}, actual interface{}, epsilon float64, msg string, args ...interface{}) bool { return InEpsilon(t, expected, actual, epsilon, append([]interface{}{msg}, args...)...) } func InEpsilonSlicef(t TestingT, expected interface{}, actual interface{}, epsilon float64, msg string, args ...interface{}) bool { return InEpsilonSlice(t, expected, actual, epsilon, append([]interface{}{msg}, args...)...) } func IsTypef(t TestingT, expectedType interface{}, object interface{}, msg string, args ...interface{}) bool { return IsType(t, expectedType, object, append([]interface{}{msg}, args...)...) } func JSONEqf(t TestingT, expected string, actual string, msg string, args ...interface{}) bool { return JSONEq(t, expected, actual, append([]interface{}{msg}, args...)...) } func Lenf(t TestingT, object interface{}, length int, msg string, args ...interface{}) bool { return Len(t, object, length, append([]interface{}{msg}, args...)...) } func Nilf(t TestingT, object interface{}, msg string, args ...interface{}) bool { return Nil(t, object, append([]interface{}{msg}, args...)...) } func NoErrorf(t TestingT, err error, msg string, args ...interface{}) bool { return NoError(t, err, append([]interface{}{msg}, args...)...) } func NotContainsf(t TestingT, s interface{}, contains interface{}, msg string, args ...interface{}) bool { return NotContains(t, s, contains, append([]interface{}{msg}, args...)...) } func NotEmptyf(t TestingT, object interface{}, msg string, args ...interface{}) bool { return NotEmpty(t, object, append([]interface{}{msg}, args...)...) } func NotEqualf(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) bool { return NotEqual(t, expected, actual, append([]interface{}{msg}, args...)...) } func NotNilf(t TestingT, object interface{}, msg string, args ...interface{}) bool { return NotNil(t, object, append([]interface{}{msg}, args...)...) } func NotPanicsf(t TestingT, f PanicTestFunc, msg string, args ...interface{}) bool { return NotPanics(t, f, append([]interface{}{msg}, args...)...) } func NotRegexpf(t TestingT, rx interface{}, str interface{}, msg string, args ...interface{}) bool { return NotRegexp(t, rx, str, append([]interface{}{msg}, args...)...) } func NotSubsetf(t TestingT, list interface{}, subset interface{}, msg string, args ...interface{}) bool { return NotSubset(t, list, subset, append([]interface{}{msg}, args...)...) } func NotZerof(t TestingT, i interface{}, msg string, args ...interface{}) bool { return NotZero(t, i, append([]interface{}{msg}, args...)...) } func Panicsf(t TestingT, f PanicTestFunc, msg string, args ...interface{}) bool { return Panics(t, f, append([]interface{}{msg}, args...)...) } func PanicsWithValuef(t TestingT, expected interface{}, f PanicTestFunc, msg string, args ...interface{}) bool { return PanicsWithValue(t, expected, f, append([]interface{}{msg}, args...)...) } func Regexpf(t TestingT, rx interface{}, str interface{}, msg string, args ...interface{}) bool { return Regexp(t, rx, str, append([]interface{}{msg}, args...)...) } func Subsetf(t TestingT, list interface{}, subset interface{}, msg string, args ...interface{}) bool { return Subset(t, list, subset, append([]interface{}{msg}, args...)...) } func Truef(t TestingT, value bool, msg string, args ...interface{}) bool { return True(t, value, append([]interface{}{msg}, args...)...) } func WithinDurationf(t TestingT, expected time.Time, actual time.Time, delta time.Duration, msg string, args ...interface{}) bool { return WithinDuration(t, expected, actual, delta, append([]interface{}{msg}, args...)...) } func Zerof(t TestingT, i interface{}, msg string, args ...interface{}) bool { return Zero(t, i, append([]interface{}{msg}, args...)...) }
assertion_format.go
0.727879
0.47926
assertion_format.go
starcoder
package threefish import ( "crypto/cipher" ) const ( // Size of a 256-bit block in bytes blockSize256 = 32 // Number of 64-bit words per 256-bit block numWords256 = blockSize256 / 8 // Number of rounds when using a 256-bit cipher numRounds256 = 72 ) type cipher256 struct { t [(tweakSize / 8) + 1]uint64 ks [(numRounds256 / 4) + 1][numWords256]uint64 } // New256 creates a new Threefish cipher with a block size of 256 bits. // The key argument must be 32 bytes and the tweak argument must be 16 bytes. func New256(key, tweak []byte) (cipher.Block, error) { // Length check the provided key if len(key) != blockSize256 { return nil, KeySizeError(blockSize256) } c := new(cipher256) // Load and extend the tweak value if err := calculateTweak(&c.t, tweak); err != nil { return nil, err } // Load and extend key k := new([numWords256 + 1]uint64) k[numWords256] = c240 for i := 0; i < numWords256; i++ { k[i] = loadWord(key[i*8 : (i+1)*8]) k[numWords256] ^= k[i] } // Calculate the key schedule for s := 0; s <= numRounds256/4; s++ { for i := 0; i < numWords256; i++ { c.ks[s][i] = k[(s+i)%(numWords256+1)] switch i { case numWords256 - 3: c.ks[s][i] += c.t[s%3] case numWords256 - 2: c.ks[s][i] += c.t[(s+1)%3] case numWords256 - 1: c.ks[s][i] += uint64(s) } } } return c, nil } // BlockSize returns the block size of a 256-bit cipher. func (c *cipher256) BlockSize() int { return blockSize256 } // Encrypt loads plaintext from src, encrypts it, and stores it in dst. func (c *cipher256) Encrypt(dst, src []byte) { // Load the input in := new([numWords256]uint64) in[0] = loadWord(src[0:8]) in[1] = loadWord(src[8:16]) in[2] = loadWord(src[16:24]) in[3] = loadWord(src[24:32]) // Perform encryption rounds for d := 0; d < numRounds256; d += 8 { // Add round key in[0] += c.ks[d/4][0] in[1] += c.ks[d/4][1] in[2] += c.ks[d/4][2] in[3] += c.ks[d/4][3] // Four rounds of mix and permute in[0] += in[1] in[1] = ((in[1] << 14) | (in[1] >> (64 - 14))) ^ in[0] in[2] += in[3] in[3] = ((in[3] << 16) | (in[3] >> (64 - 16))) ^ in[2] in[1], in[3] = in[3], in[1] in[0] += in[1] in[1] = ((in[1] << 52) | (in[1] >> (64 - 52))) ^ in[0] in[2] += in[3] in[3] = ((in[3] << 57) | (in[3] >> (64 - 57))) ^ in[2] in[1], in[3] = in[3], in[1] in[0] += in[1] in[1] = ((in[1] << 23) | (in[1] >> (64 - 23))) ^ in[0] in[2] += in[3] in[3] = ((in[3] << 40) | (in[3] >> (64 - 40))) ^ in[2] in[1], in[3] = in[3], in[1] in[0] += in[1] in[1] = ((in[1] << 5) | (in[1] >> (64 - 5))) ^ in[0] in[2] += in[3] in[3] = ((in[3] << 37) | (in[3] >> (64 - 37))) ^ in[2] in[1], in[3] = in[3], in[1] // Add round key in[0] += c.ks[(d/4)+1][0] in[1] += c.ks[(d/4)+1][1] in[2] += c.ks[(d/4)+1][2] in[3] += c.ks[(d/4)+1][3] // Four rounds of mix and permute in[0] += in[1] in[1] = ((in[1] << 25) | (in[1] >> (64 - 25))) ^ in[0] in[2] += in[3] in[3] = ((in[3] << 33) | (in[3] >> (64 - 33))) ^ in[2] in[1], in[3] = in[3], in[1] in[0] += in[1] in[1] = ((in[1] << 46) | (in[1] >> (64 - 46))) ^ in[0] in[2] += in[3] in[3] = ((in[3] << 12) | (in[3] >> (64 - 12))) ^ in[2] in[1], in[3] = in[3], in[1] in[0] += in[1] in[1] = ((in[1] << 58) | (in[1] >> (64 - 58))) ^ in[0] in[2] += in[3] in[3] = ((in[3] << 22) | (in[3] >> (64 - 22))) ^ in[2] in[1], in[3] = in[3], in[1] in[0] += in[1] in[1] = ((in[1] << 32) | (in[1] >> (64 - 32))) ^ in[0] in[2] += in[3] in[3] = ((in[3] << 32) | (in[3] >> (64 - 32))) ^ in[2] in[1], in[3] = in[3], in[1] } // Add the final round key in[0] += c.ks[numRounds256/4][0] in[1] += c.ks[numRounds256/4][1] in[2] += c.ks[numRounds256/4][2] in[3] += c.ks[numRounds256/4][3] // Store ciphertext in destination storeWord(dst[0:8], in[0]) storeWord(dst[8:16], in[1]) storeWord(dst[16:24], in[2]) storeWord(dst[24:32], in[3]) } // Decrypt loads ciphertext from src, decrypts it, and stores it in dst. func (c *cipher256) Decrypt(dst, src []byte) { // Load the ciphertext ct := new([numWords256]uint64) ct[0] = loadWord(src[0:8]) ct[1] = loadWord(src[8:16]) ct[2] = loadWord(src[16:24]) ct[3] = loadWord(src[24:32]) // Subtract the final round key ct[0] -= c.ks[numRounds256/4][0] ct[1] -= c.ks[numRounds256/4][1] ct[2] -= c.ks[numRounds256/4][2] ct[3] -= c.ks[numRounds256/4][3] // Perform decryption rounds for d := numRounds256 - 1; d >= 0; d -= 8 { // Four rounds of permute and unmix ct[1], ct[3] = ct[3], ct[1] ct[3] = ((ct[3] ^ ct[2]) << (64 - 32)) | ((ct[3] ^ ct[2]) >> 32) ct[2] -= ct[3] ct[1] = ((ct[1] ^ ct[0]) << (64 - 32)) | ((ct[1] ^ ct[0]) >> 32) ct[0] -= ct[1] ct[1], ct[3] = ct[3], ct[1] ct[3] = ((ct[3] ^ ct[2]) << (64 - 22)) | ((ct[3] ^ ct[2]) >> 22) ct[2] -= ct[3] ct[1] = ((ct[1] ^ ct[0]) << (64 - 58)) | ((ct[1] ^ ct[0]) >> 58) ct[0] -= ct[1] ct[1], ct[3] = ct[3], ct[1] ct[3] = ((ct[3] ^ ct[2]) << (64 - 12)) | ((ct[3] ^ ct[2]) >> 12) ct[2] -= ct[3] ct[1] = ((ct[1] ^ ct[0]) << (64 - 46)) | ((ct[1] ^ ct[0]) >> 46) ct[0] -= ct[1] ct[1], ct[3] = ct[3], ct[1] ct[3] = ((ct[3] ^ ct[2]) << (64 - 33)) | ((ct[3] ^ ct[2]) >> 33) ct[2] -= ct[3] ct[1] = ((ct[1] ^ ct[0]) << (64 - 25)) | ((ct[1] ^ ct[0]) >> 25) ct[0] -= ct[1] // Subtract round key ct[0] -= c.ks[d/4][0] ct[1] -= c.ks[d/4][1] ct[2] -= c.ks[d/4][2] ct[3] -= c.ks[d/4][3] // Four rounds of permute and unmix ct[1], ct[3] = ct[3], ct[1] ct[3] = ((ct[3] ^ ct[2]) << (64 - 37)) | ((ct[3] ^ ct[2]) >> 37) ct[2] -= ct[3] ct[1] = ((ct[1] ^ ct[0]) << (64 - 5)) | ((ct[1] ^ ct[0]) >> 5) ct[0] -= ct[1] ct[1], ct[3] = ct[3], ct[1] ct[3] = ((ct[3] ^ ct[2]) << (64 - 40)) | ((ct[3] ^ ct[2]) >> 40) ct[2] -= ct[3] ct[1] = ((ct[1] ^ ct[0]) << (64 - 23)) | ((ct[1] ^ ct[0]) >> 23) ct[0] -= ct[1] ct[1], ct[3] = ct[3], ct[1] ct[3] = ((ct[3] ^ ct[2]) << (64 - 57)) | ((ct[3] ^ ct[2]) >> 57) ct[2] -= ct[3] ct[1] = ((ct[1] ^ ct[0]) << (64 - 52)) | ((ct[1] ^ ct[0]) >> 52) ct[0] -= ct[1] ct[1], ct[3] = ct[3], ct[1] ct[3] = ((ct[3] ^ ct[2]) << (64 - 16)) | ((ct[3] ^ ct[2]) >> 16) ct[2] -= ct[3] ct[1] = ((ct[1] ^ ct[0]) << (64 - 14)) | ((ct[1] ^ ct[0]) >> 14) ct[0] -= ct[1] // Subtract round key ct[0] -= c.ks[(d/4)-1][0] ct[1] -= c.ks[(d/4)-1][1] ct[2] -= c.ks[(d/4)-1][2] ct[3] -= c.ks[(d/4)-1][3] } // Store decrypted value in destination storeWord(dst[0:8], ct[0]) storeWord(dst[8:16], ct[1]) storeWord(dst[16:24], ct[2]) storeWord(dst[24:32], ct[3]) }
threefish/threefish256.go
0.603815
0.536556
threefish256.go
starcoder
package set type Set[T comparable] map[T]struct{} func New[T comparable]() Set[T] { return make(map[T]struct{}) } func Add[T comparable](s Set[T], v T) { s[v] = struct{}{} } func Remove[T comparable](s Set[T], v T) { delete(s, v) } func Contains[T comparable](s Set[T], v T) bool { _, ok := s[v] return ok } func FromSlice[T comparable](s []T) Set[T] { set := Set[T]{} for _, v := range s { set[v] = struct{}{} } return set } func FromMapKey[K comparable, V any](m map[K]V) Set[K] { set := Set[K]{} for k := range m { set[k] = struct{}{} } return set } func FromMapValue[K comparable, V comparable](m map[K]V) Set[V] { set := Set[V]{} for _, v := range m { set[v] = struct{}{} } return set } func ToSlice[T comparable](s Set[T]) []T { slice := make([]T, 0, len(s)) for k := range s { slice = append(slice, k) } return slice } func Union[T comparable](s1, s2 Set[T]) Set[T] { union := Set[T]{} for k := range s1 { union[k] = struct{}{} } for k := range s2 { union[k] = struct{}{} } return union } func Intersect[T comparable](s1, s2 Set[T]) Set[T] { intersection := Set[T]{} for k := range s1 { if _, ok := s2[k]; ok { intersection[k] = struct{}{} } } return intersection } func Subtract[T comparable](s1, s2 Set[T]) Set[T] { subtract := Set[T]{} for k := range s1 { if _, ok := s2[k]; !ok { subtract[k] = struct{}{} } } return subtract } func Equal[T comparable](s1, s2 Set[T]) bool { if len(s1) != len(s2) { return false } for k := range s1 { if _, ok := s2[k]; !ok { return false } } return true } func IsSubset[T comparable](s1, s2 Set[T]) bool { for k := range s1 { if _, ok := s2[k]; !ok { return false } } return true } func IsSuperset[T comparable](s1, s2 Set[T]) bool { for k := range s2 { if _, ok := s1[k]; !ok { return false } } return true } func IsDisjoint[T comparable](s1, s2 Set[T]) bool { for k := range s1 { if _, ok := s2[k]; ok { return false } } return true }
set/set.go
0.666605
0.506713
set.go
starcoder
package internal import ( "github.com/johnfercher/maroto/internal/fpdf" "github.com/johnfercher/maroto/pkg/props" ) const ( maxPercent = 100.0 ) // Math is the abstraction which deals with useful calc. type Math interface { GetRectCenterColProperties(imageWidth float64, imageHeight float64, colWidth float64, colHeight float64, xColOffset float64, percent float64) (x float64, y float64, w float64, h float64) GetRectNonCenterColProperties(imageWidth float64, imageHeight float64, colWidth float64, colHeight float64, xColOffset float64, prop props.Rect) (x float64, y float64, w float64, h float64) GetCenterCorrection(outerSize, innerSize float64) float64 } type math struct { pdf fpdf.Fpdf } // NewMath create a Math. func NewMath(pdf fpdf.Fpdf) *math { return &math{ pdf, } } // GetRectCenterColProperties define Width, Height, X Offset and Y Offset // to and rectangle (QrCode, Barcode, Image) be centralized inside a cell. func (s *math) GetRectCenterColProperties(imageWidth float64, imageHeight float64, colWidth float64, colHeight float64, xColOffset float64, percent float64) (x float64, y float64, w float64, h float64) { percent /= 100.0 left, top, _, _ := s.pdf.GetMargins() imageProportion := imageHeight / imageWidth celProportion := colHeight / colWidth if imageProportion > celProportion { newImageWidth := colHeight / imageProportion * percent newImageHeight := newImageWidth * imageProportion widthCorrection := s.GetCenterCorrection(colWidth, newImageWidth) heightCorrection := s.GetCenterCorrection(colHeight, newImageHeight) x = xColOffset + left + widthCorrection y = top + heightCorrection w = newImageWidth h = newImageHeight } else { newImageWidth := colWidth * percent newImageHeight := newImageWidth * imageProportion widthCorrection := s.GetCenterCorrection(colWidth, newImageWidth) heightCorrection := s.GetCenterCorrection(colHeight, newImageHeight) x = xColOffset + left + widthCorrection y = top + heightCorrection w = newImageWidth h = newImageHeight } return x, y, w, h } // GetRectNonCenterColProperties define Width, Height to and rectangle (QrCode, Barcode, Image) inside a cell. func (s *math) GetRectNonCenterColProperties(imageWidth float64, imageHeight float64, colWidth float64, colHeight float64, xColOffset float64, prop props.Rect) (x float64, y float64, w float64, h float64) { percent := prop.Percent / maxPercent left, top, _, _ := s.pdf.GetMargins() imageProportion := imageHeight / imageWidth celProportion := colHeight / colWidth if imageProportion > celProportion { newImageWidth := colHeight / imageProportion * percent newImageHeight := newImageWidth * imageProportion x = xColOffset + left + prop.Left y = top w = newImageWidth h = newImageHeight } else { newImageWidth := colWidth * percent newImageHeight := newImageWidth * imageProportion x = xColOffset + left + prop.Left y = top w = newImageWidth h = newImageHeight } return } // GetCenterCorrection return the correction of space in X or Y to // centralize a line in relation with another line. func (s *math) GetCenterCorrection(outerSize, innerSize float64) float64 { const divisorToGetHalf = 2.0 return (outerSize - innerSize) / divisorToGetHalf }
internal/math.go
0.808786
0.651175
math.go
starcoder
package features // Followgrams [same as k-skip n-grams]: // - We know that an n-gram of a string is any n-length substring of that string. // - By analogy, we define an "n-followgram" to be a pair "ab" such that "b" follows "a" in the parent // string within a window of size n+1 (e.g. the string "abcd" contains the 2-followgrams // ab, ac, (but not ad because that distance is 3), bc, bd, and cd). // - We'll refer to the "infinity-followgrams" as just followgrams. // - So any string, of any length, over the alphabet [a-z0-9 ] can have 1369 (37 * 37) different // followgrams: aa, ab, ..., az, a0, ..., a9, "a ", ba, bb, ..., "b ", ..., " a", " b", ..., " ". // - So for a string of length 256, the maximum count in the followgrams array is // 255 + 254 + 253 + ... + 1 = 255 * 254 / 2, roughly 2**16 - 1. // More generally, for a string of length 2**n, the max count in the followgrams array is roughly // 2**(2n) - 1. // - Notice that sum(followgrams(string)) must be (n)(n-1)/2, where n = len(string). // - Given that, it's a fairly easy inductive proof that if s1 != s2 then // followgram(s1) != followgram(s2). (n-grams don't have this uniqueness property.) // Reconstruct the original string from the followgram array as follows: Sum the array grouping by // first letter; the letter with the highest sum must be the first letter of the string. Subtract 1 // from each entry with that first letter, and now you have the followgram array of the rest of the // original string (i.e. original - first char). // - In practice, I've found that this uniqueness property actually causes "infinity-followgrams" to // yield poor results in our use-case. So we'll use smaller followgram windows (say 5-followgrams) as // a proxy for using n>2-grams (say 6-grams). import "strconv" const followgram_default_window_size = 5 const max_followgram_count = 255 var num_followgrams int //---------------------------------------------------------------------------------------------------- // Provide featureSetConfig type followgrams struct { WindowSize int } func (f followgrams) Size() int32 { return int32(num_followgrams) } func (f followgrams) FromStringInPlace(input string, featureArray []byte) { sNormalized := normalizeString(input) sNormalizedLen := len(sNormalized) for i := 0; i < sNormalizedLen-1; i++ { ch1 := char_map[sNormalized[i]] // get window right edge, making sure we don't fall off the end of the string followgramWindowEnd := i + f.WindowSize + 1 if followgramWindowEnd > sNormalizedLen { followgramWindowEnd = sNormalizedLen } for j := i + 1; j < followgramWindowEnd; j++ { // get the index into the followgram array and increment the count, // making sure we don't overflow the byte ch2 := char_map[sNormalized[j]] followgramIndex := (ch1 * alphabet_size) + ch2 currentCount := featureArray[followgramIndex] if currentCount < max_followgram_count { featureArray[followgramIndex] = currentCount + 1 } } } } func deserializeFollowgramsMap(confMap map[string]string) (config featureSetConfig, ok bool) { if windowSizeStr, ok := confMap["window_size"]; ok { if windowSize, err := strconv.Atoi(windowSizeStr); err == nil { return followgrams{int(windowSize)}, true } else { return nil, false } } return followgrams{followgram_default_window_size}, true } //---------------------------------------------------------------------------------------------------- func init() { num_followgrams = alphabet_size * alphabet_size } func (f followgrams) fromString(input string) []byte { featureArray := make([]byte, num_followgrams) f.FromStringInPlace(input, featureArray) return featureArray }
features/followgrams.go
0.758689
0.531878
followgrams.go
starcoder
package grid import ( "fmt" "math" "github.com/gitchander/permutation" ) func init() { // Original foxhole problem definition // BaseGrid = CreateLinearGrid(5) BaseGrid = CreatePrismGrid([]int{8, 8}) } /* The default grid definition that will be used everywhere. The format for this is a list of connections for each node in the grid. The example for the basic foxhole problem would look like this: Connections: [][]int{ {2}, {1, 3}, {2, 4}, {3, 5}, {4}, } Symmetry: [][]int{ {0,1,2,3,4}, {4,3,2,1,0}, } */ var BaseGrid *GridDefinition type GridDefinition struct { // List of connections Connections [][]int // List of orderings which are symettric for the definitions Symmetries [][]int } /* Helper function for creating linear foxhole patterns */ func CreateLinearGrid(n int) *GridDefinition { // First create the connections connections := [][]int{} for i := 0; i < n; i += 1 { node := []int{} if i > 0 { node = append(node, i-1) } if i < n-1 { node = append(node, i+1) } connections = append(connections, node) } // Then create the symettries forward, backward := []int{}, []int{} for i := 0; i < n; i += 1 { forward = append(forward, i) backward = append(backward, n-i-1) } symmetries := [][]int{forward, backward} return &GridDefinition{ Connections: connections, Symmetries: symmetries, } } /* Function for determining the location of a cell in the values array given 3D coordinates. */ func getIndex(dimensionSizes []int, location []int) int { index := 0 for i, x := range location { index += dimensionSizes[i] * x } return index } /* Function for determining the location of a cell in vector form given a value index. */ func getLocation(dimensionSizes []int, index int) []int { location := []int{} for i, size := range dimensionSizes { remainder := index if i < len(dimensionSizes)-1 { remainder = index % dimensionSizes[i+1] } x := int(math.Floor(float64(remainder) / float64(size))) location = append(location, x) } return location } /* Create n-cube grid. */ func CreatePrismGrid(dimensionLengths []int) *GridDefinition { /* Create the size of each dimension. Use this value to index and un-index things. */ totalCells := 1 dimensionSizes := []int{} for _, l := range dimensionLengths { dimensionSizes = append(dimensionSizes, totalCells) totalCells *= l } // Generate the connections array first connections := [][]int{} for i := 0; i < totalCells; i++ { location := getLocation(dimensionSizes, i) // Generate each of the modified locations based on the current location connectionLocations := []int{} for i, x := range location { // Move in the negative direction along an axis if x > 0 { newLocation := make([]int, len(location)) copy(newLocation, location) newLocation[i] = x - 1 connectionLocations = append(connectionLocations, getIndex(dimensionSizes, newLocation)) } // Move in the positive direction along an axis if x < dimensionLengths[i]-1 { newLocation := make([]int, len(location)) copy(newLocation, location) newLocation[i] = x + 1 connectionLocations = append(connectionLocations, getIndex(dimensionSizes, newLocation)) } } // Add the generated locations for this cell to connections connections = append(connections, connectionLocations) } // Generate all the symettries. First we need the base symmetry symmetryHashes := map[string]bool{} symmetries := [][]int{} // Now apply all the other ones by dimension for _, dimensionInformation := range GetBinaryArrays(len(dimensionLengths)) { order := GetOrderedLocations(dimensionLengths, dimensionInformation) indexOrder := []int{} for _, location := range order { indexOrder = append(indexOrder, getIndex(dimensionSizes, location)) } hash := hashSymmetry(indexOrder) if _, exists := symmetryHashes[hash]; !exists { symmetryHashes[hash] = true symmetries = append(symmetries, indexOrder) } } return &GridDefinition{ Connections: connections, Symmetries: symmetries, } } /* Returns a list of binary arrays */ func GetBinaryArrays(n int) [][]int { // Create the array list and the base array baseArrays := [][]int{} baseArray := []int{} for i := 0; i < n; i++ { baseArray = append(baseArray, i + 1) } baseArrays = append(baseArrays, baseArray) // Now sequentiall apply flips to everything for i := 0; i < n; i++ { newArrays := [][]int{} for _, array := range baseArrays { newArray := make([]int, len(array)) copy(newArray, array) newArray[i] = -array[i] newArrays = append(newArrays, newArray) } baseArrays = append(baseArrays, newArrays...) } // Now permute each of the int options finalArrays := [][]int{} for _, array := range baseArrays { p := permutation.New(permutation.IntSlice(array)) for p.Next() { newArray := make([]int, len(array)) copy(newArray, array) finalArrays = append(finalArrays, newArray) } } // Return a list of the generated baseArrays return finalArrays } /* Creates an ordering based on dimensions */ func GetOrderedLocations(dimensionLengths []int, dimensionInformation []int) [][]int { // Current position. Start at whatever the appropriate corner is currentLocation := make([]int, len(dimensionLengths)) dimensionDirections := []bool{} dimensionOrder := []int{} for i, dimensionInfo := range dimensionInformation { // Used to determine if the dimension is increasing or decreasing if dimensionInfo > 0 { dimensionDirections = append(dimensionDirections, true) } else { dimensionDirections = append(dimensionDirections, false) dimensionInfo = -dimensionInfo } dimensionInfo-- // Fix the current position into the correct location currentLocation[dimensionInfo] = 0 if !dimensionDirections[i] { currentLocation[dimensionInfo] = dimensionLengths[dimensionInfo] - 1 } dimensionOrder = append(dimensionOrder, dimensionInfo) } /* Create a zig-zag boi starting at the current location and changing according to the dimension direction and ordering */ locations := [][]int{} locationLoop: for { // Make a copy of the current location and add it to the orderings newLocation := make([]int, len(currentLocation)) copy(newLocation, currentLocation) locations = append(locations, newLocation) /* Adjust the current location to match the orderings and direction of the dimensions */ for i, d := range dimensionOrder { direction := dimensionDirections[i] length := dimensionLengths[d] if direction { currentLocation[d]++ if currentLocation[d] == length { currentLocation[d] = 0 } else { continue locationLoop } } else { if currentLocation[d] != 0 { currentLocation[d]-- continue locationLoop } else { currentLocation[d] = length - 1 } } } // If we are here, then we didn't hit a continue so it's time to break out break } return locations } /* Helper function for grabbing symmetry hashes */ func hashSymmetry(s []int) string { hash := "" for i, v := range s { hash += fmt.Sprint(v) if i != len(s) - 1 { hash += "," } } return hash } /* Function for determining if a grid definition is binary or not. */ func (d *GridDefinition) RepeatingGrid() (int, []*Grid) { // Create an initial grid with only one cell shaded grids := []*Grid{} grid := CreateBlankGrid() grid.Values[0] = true // Loop until the last for { for i, otherGrid := range grids { if grid.Equal(otherGrid) { return len(grids) - i, grids[i:] } } grids = append(grids, grid) grid = grid.Propogate() } }
grid/gridDefinition.go
0.743634
0.463869
gridDefinition.go
starcoder
package airtime import ( "errors" "math" "time" ) // CodingRate defines the coding-rate type. type CodingRate int // Available coding-rates. const ( CodingRate45 CodingRate = 1 CodingRate46 CodingRate = 2 CodingRate47 CodingRate = 3 CodingRate48 CodingRate = 4 ) // CalculateLoRaAirtime calculates the airtime for a LoRa modulated frame. func CalculateLoRaAirtime(payloadSize, sf, bandwidth, preambleNumber int, codingRate CodingRate, headerEnabled, lowDataRateOptimization bool) (time.Duration, error) { symbolDuration := CalculateLoRaSymbolDuration(sf, bandwidth) preambleDuration := CalculateLoRaPreambleDuration(symbolDuration, preambleNumber) payloadSymbolNumber, err := CalculateLoRaPayloadSymbolNumber(payloadSize, sf, codingRate, headerEnabled, lowDataRateOptimization) if err != nil { return 0, err } return preambleDuration + (time.Duration(payloadSymbolNumber) * symbolDuration), nil } // CalculateLoRaSymbolDuration calculates the LoRa symbol duration. func CalculateLoRaSymbolDuration(sf int, bandwidth int) time.Duration { return time.Duration((1 << uint(sf)) * 1000000 / bandwidth) } // CalculateLoRaPreambleDuration calculates the LoRa preamble duration. func CalculateLoRaPreambleDuration(symbolDuration time.Duration, preambleNumber int) time.Duration { return time.Duration((100*preambleNumber)+425) * symbolDuration / 100 } // CalculateLoRaPayloadSymbolNumber returns the number of symbols that make // up the packet payload and header. func CalculateLoRaPayloadSymbolNumber(payloadSize, sf int, codingRate CodingRate, headerEnabled, lowDataRateOptimization bool) (int, error) { var pl, spreadingFactor, h, de, cr float64 if codingRate < 1 || codingRate > 4 { return 0, errors.New("codingRate must be between 1 - 4") } if lowDataRateOptimization { de = 1 } if !headerEnabled { h = 1 } pl = float64(payloadSize) spreadingFactor = float64(sf) cr = float64(codingRate) a := 8*pl - 4*spreadingFactor + 28 + 16 - 20*h b := 4 * (spreadingFactor - 2*de) c := cr + 4 return int(8 + math.Max(math.Ceil(a/b)*c, 0)), nil }
airtime/airtime.go
0.811153
0.491639
airtime.go
starcoder
package parser import ( "github.com/viant/parsly" ast2 "github.com/viant/velty/ast" aexpr "github.com/viant/velty/ast/expr" ) var dataTypeMatchers = []*parsly.Token{String, Boolean, Number} func matchOperand(cursor *parsly.Cursor, candidates ...*parsly.Token) (*parsly.Token, ast2.Expression, error) { matched := cursor.MatchAfterOptional(WhiteSpace, Negation) hasNegation := matched.Code == negationToken candidates = append([]*parsly.Token{Quote, SelectorStart, Parentheses}, candidates...) matched = cursor.MatchAfterOptional(WhiteSpace, candidates...) var matcher *parsly.Token var expression ast2.Expression var err error switch matched.Code { case parsly.EOF, parsly.Invalid: return nil, nil, cursor.NewError(candidates...) case parenthesesToken: text := matched.Text(cursor) newCursor := parsly.NewCursor("", []byte(text[1:len(text)-1]), 0) token, expr, err := matchOperand(newCursor, candidates...) if err != nil { return nil, nil, err } if hasNegation { expr = &aexpr.Unary{ Token: ast2.NEG, X: expr, } } return token, expr, nil case stringToken: value := matched.Text(cursor) matcher = String expression = aexpr.StringLiteral(value[1 : len(value)-1]) case selectorStartToken: expression, err = matchSelector(cursor) if err != nil { return nil, nil, err } matcher = Selector case numberToken: value := matched.Text(cursor) matcher = Number expression = aexpr.NumberLiteral(value) case booleanToken: value := matched.Text(cursor) matcher = Boolean expression = aexpr.BoolLiteral(value) case quoteToken: matched = cursor.MatchOne(StringFinish) if matched.Code != stringFinishToken { return nil, nil, cursor.NewError(StringFinish) } value := matched.Text(cursor) if len(value) == 1 { // matched `"` matcher = String expression = aexpr.StringLiteral("") } else { newCursor := parsly.NewCursor("", []byte(value[:len(value)-1]), 0) matcher, expression, err = matchOperand(newCursor, candidates...) if err != nil { expression = aexpr.StringLiteral(value[:len(value)-1]) } else { if _, ok := expression.(*aexpr.Select); !ok { expression = aexpr.StringLiteral(value[:len(value)-1]) } } } } if hasNegation { expression = &aexpr.Unary{ Token: ast2.NEG, X: expression, } } err = addEquationIfNeeded(cursor, &expression) if err != nil { return nil, nil, err } return matcher, expression, nil } func addEquationIfNeeded(cursor *parsly.Cursor, expression *ast2.Expression) error { for { candidates := []*parsly.Token{Add, Sub, Multiply, Quo, NotEqual, Negation, Equal, And, Or, GreaterEqual, Greater, LessEqual, Less} matched := cursor.MatchAfterOptional(WhiteSpace, candidates...) switch matched.Code { case parsly.EOF, binaryExpressionStartToken, parsly.Invalid: return nil } token := matchToken(matched) eprCursor, err := matchExpressionBlock(cursor) var rightExpression ast2.Expression if err == nil { rightExpression, err = matchEquationExpression(eprCursor) rightExpression = &aexpr.Parentheses{P: rightExpression} } else { _, rightExpression, err = matchOperand(cursor, dataTypeMatchers...) } if err != nil { return err } hasPrecedence := isPrecedenceToken(token) if hasPrecedence { y, ok := rightExpression.(*aexpr.Binary) if ok && !isPrecedenceToken(y.Token) { expression = adjustPrecedence(expression, token, y) continue } } *expression = &aexpr.Binary{ X: *expression, Token: token, Y: rightExpression, } } } func adjustPrecedence(expression *ast2.Expression, token ast2.Token, y *aexpr.Binary) *ast2.Expression { p := &aexpr.Parentheses{} p.P = &aexpr.Binary{ X: *expression, Token: token, Y: y.X, } *expression = &aexpr.Binary{ X: p, Token: y.Token, Y: y.Y, } return expression } func isPrecedenceToken(token ast2.Token) bool { hasPrecedence := token == ast2.MUL || token == ast2.QUO return hasPrecedence }
parser/operand.go
0.558568
0.452113
operand.go
starcoder
package sqlb import ( "bytes" "encoding/json" "fmt" r "reflect" ) /* Known SQL operations used in JEL. Serves as a whitelist, allowing us to differentiate them from casts, and describes how to transform JEL Lisp-style calls into SQL expressions (prefix, infix, etc.). This is case-sensitive and whitespace-sensitive. */ var Ops = map[string]Op{ `and`: OpInfix, `or`: OpInfix, `not`: OpPrefix, `is null`: OpPostfix, `is not null`: OpPostfix, `is true`: OpPostfix, `is not true`: OpPostfix, `is false`: OpPostfix, `is not false`: OpPostfix, `is unknown`: OpPostfix, `is not unknown`: OpPostfix, `is distinct from`: OpInfix, `is not distinct from`: OpInfix, `=`: OpInfix, `~`: OpInfix, `~*`: OpInfix, `~=`: OpInfix, `<>`: OpInfix, `<`: OpInfix, `>`: OpInfix, `>=`: OpInfix, `<=`: OpInfix, `@@`: OpInfix, `any`: OpAny, `between`: OpBetween, } /* Syntax type of SQL operator expressions used in JEL. Allows us to convert JEL Lisp-style "calls" into SQL-style operations that use prefix, infix, etc. */ type Op byte const ( OpPrefix Op = iota + 1 OpPostfix OpInfix OpFunc OpAny OpBetween ) /* Shortcut for instantiating `Jel` with the type of the given value. The input is used only as a type carrier. */ func JelFor(typ interface{}) Jel { return Jel{Type: typeElemOf(typ)} } /* Short for "JSON Expression Language". Provides support for expressing a whitelisted subset of SQL with JSON, as Lisp-style nested lists. Transcodes JSON to SQL on the fly. Implements `Expr`. Can be transparently used as a sub-expression in other `sqlb` expressions. See the provided example. Expressions are Lisp-style, using nested lists to express "calls". This syntax is used for all SQL operations. Binary infix operators are considered variadic. Lists are used for calls and casts. The first element must be a string. It may be one of the whitelisted operators or functions, listed in `Ops`. If not, it must be a field name or a dot-separated field path. Calls are arbitrarily nestable. ["and", true, ["or", true, ["and", true, false]]] ["<=", 10, 20] ["=", "someField", "otherField"] ["and", ["=", "someField", "otherField"], ["<=", "dateField", ["dateField", "9999-01-01T00:00:00Z"]] ] Transcoding from JSON to SQL is done by consulting two things: the built-in whitelist of SQL operations (`Ops`, shared), and a struct type provided to that particular decoder. The struct serves as a whitelist of available identifiers, and allows to determine value types via casting. Casting allows to decode arbitrary JSON directly into the corresponding Go type: ["someDateField", "9999-01-01T00:00:00Z"] ["someGeoField", {"lng": 10, "lat": 20}] Such decoded values are substituted with ordinal parameters such as $1, and appended to the slice of arguments (see below). A string not in a call position and not inside a cast is interpreted as an identifier: field name or nested field path, dot-separated. It must be found on the reference struct, otherwise transcoding fails with an error. "someField" "outerField.innerField" Literal numbers, booleans, and nulls that occur outside of casts are decoded into their Go equivalents. Like casts, they're substituted with ordinal parameters and appended to the slice of arguments. JSON queries are transcoded against a struct, by matching fields tagged with `json` against fields tagged with `db`. Literal values are JSON-decoded into the types of the corresponding struct fields. type Input struct { FieldOne string `json:"fieldOne" db:"field_one"` FieldTwo struct { FieldThree *time.Time `json:"fieldThree" db:"field_three"` } `json:"fieldTwo" db:"field_two"` } const src = ` ["and", ["=", "fieldOne", ["fieldOne", "literal string"]], ["<", "fieldTwo.fieldThree", ["fieldTwo.fieldThree", "9999-01-01T00:00:00Z"]] ] ` expr := Jel{Type: reflect.TypeOf((*Input)(nil)).Elem(), Text: src} text, args := Reify(expr) The result is roughly equivalent to the following (formatted for clarity): text := ` "field_one" = 'literal string' and ("field_two")."field_three" < '9999-01-01T00:00:00Z' ` args := []interface{}{"literal string", time.Time("9999-01-01T00:00:00Z")} */ type Jel struct { Type r.Type Text string } var _ = Expr(Jel{}) /* Implement `Expr`, allowing this to be used as a sub-expression in queries built with "github.com/mitranim/sqlb". Always generates a valid boolean expression, falling back on "true" if empty. */ func (self Jel) AppendExpr(text []byte, args []interface{}) ([]byte, []interface{}) { bui := Bui{text, args} if len(self.Text) == 0 { bui.Str(`true`) } else { self.decode(&bui, stringToBytesUnsafe(self.Text)) } return bui.Get() } // Implement the `Appender` interface, sometimes allowing more efficient text // encoding. func (self Jel) Append(text []byte) []byte { return exprAppend(&self, text) } // Implement the `fmt.Stringer` interface for debug purposes. func (self Jel) String() string { return exprString(&self) } // Stores the input for future use in `.AppendExpr`. Input must be valid JSON. func (self *Jel) Parse(val string) error { self.Text = val return nil } // Stores the input for future use in `.AppendExpr`. Input must be valid JSON. func (self *Jel) UnmarshalText(val []byte) error { // TODO consider using unsafe conversions. self.Text = string(val) return nil } // Stores the input for future use in `.AppendExpr`. Input must be valid JSON. func (self *Jel) UnmarshalJSON(val []byte) error { // TODO consider using unsafe conversions. self.Text = string(val) return nil } /* If `.Type` is empty, sets the type of the provided value. Otherwise this is a nop. The input is used only as a type carrier; its actual value is ignored. */ func (self *Jel) OrType(typ interface{}) { if self.Type == nil { self.Type = typeElemOf(typ) } } func (self *Jel) decode(bui *Bui, input []byte) { input = bytes.TrimSpace(input) if isJsonDict(input) { panic(ErrInvalidInput{Err{ `decoding JEL`, fmt.Errorf(`unexpected dict in input: %q`, input), }}) } else if isJsonList(input) { self.decodeList(bui, input) } else if isJsonString(input) { self.decodeString(bui, input) } else { self.decodeAny(bui, input) } } func (self *Jel) decodeList(bui *Bui, input []byte) { var list []json.RawMessage err := json.Unmarshal(input, &list) if err != nil { panic(ErrInvalidInput{Err{ `decoding JEL list`, fmt.Errorf(`failed to unmarshal as JSON list: %w`, err), }}) } if !(len(list) > 0) { panic(ErrInvalidInput{Err{ `decoding JEL list`, fmt.Errorf(`lists must have at least one element, found empty list`), }}) } head, args := list[0], list[1:] if !isJsonString(head) { panic(ErrInvalidInput{Err{ `decoding JEL list`, fmt.Errorf(`first list element must be a string, found %q`, head), }}) } var name string err = json.Unmarshal(head, &name) if err != nil { panic(ErrInvalidInput{Err{ `decoding JEL list`, fmt.Errorf(`failed to unmarshal JSON list head as string: %w`, err), }}) } switch Ops[name] { case OpPrefix: self.decodeOpPrefix(bui, name, args) case OpPostfix: self.decodeOpPostfix(bui, name, args) case OpInfix: self.decodeOpInfix(bui, name, args) case OpFunc: self.decodeOpFunc(bui, name, args) case OpAny: self.decodeOpAny(bui, name, args) case OpBetween: self.decodeOpBetween(bui, name, args) default: self.decodeCast(bui, name, args) } } func (self *Jel) decodeOpPrefix(bui *Bui, name string, args []json.RawMessage) { if len(args) != 1 { panic(ErrInvalidInput{Err{ `decoding JEL op (prefix)`, fmt.Errorf(`prefix operation %q must have exactly 1 argument, found %v`, name, len(args)), }}) } bui.Str(`(`) bui.Str(name) self.decode(bui, args[0]) bui.Str(`)`) } func (self *Jel) decodeOpPostfix(bui *Bui, name string, args []json.RawMessage) { if len(args) != 1 { panic(ErrInvalidInput{Err{ `decoding JEL op (postfix)`, fmt.Errorf(`postfix operation %q must have exactly 1 argument, found %v`, name, len(args)), }}) } bui.Str(`(`) self.decode(bui, args[0]) bui.Str(name) bui.Str(`)`) } func (self *Jel) decodeOpInfix(bui *Bui, name string, args []json.RawMessage) { if !(len(args) >= 2) { panic(ErrInvalidInput{Err{ `decoding JEL op (infix)`, fmt.Errorf(`infix operation %q must have at least 2 arguments, found %v`, name, len(args)), }}) } bui.Str(`(`) for i, arg := range args { if i > 0 { bui.Str(name) } self.decode(bui, arg) } bui.Str(`)`) } func (self *Jel) decodeOpFunc(bui *Bui, name string, args []json.RawMessage) { bui.Str(name) bui.Str(`(`) for i, arg := range args { if i > 0 { bui.Str(`,`) } self.decode(bui, arg) } bui.Str(`)`) } func (self *Jel) decodeOpAny(bui *Bui, name string, args []json.RawMessage) { if len(args) != 2 { panic(ErrInvalidInput{Err{ `decoding JEL op`, fmt.Errorf(`operation %q must have exactly 2 arguments, found %v`, name, len(args)), }}) } bui.Str(`(`) self.decode(bui, args[0]) bui.Str(`=`) bui.Str(name) bui.Str(`(`) self.decode(bui, args[1]) bui.Str(`)`) bui.Str(`)`) } func (self *Jel) decodeOpBetween(bui *Bui, name string, args []json.RawMessage) { if len(args) != 3 { panic(ErrInvalidInput{Err{ `decoding JEL op (between)`, fmt.Errorf(`operation %q must have exactly 3 arguments, found %v`, name, len(args)), }}) } bui.Str(`(`) self.decode(bui, args[0]) bui.Str(`between`) self.decode(bui, args[1]) bui.Str(`and`) self.decode(bui, args[2]) bui.Str(`)`) } func (self *Jel) decodeCast(bui *Bui, name string, args []json.RawMessage) { if len(args) != 1 { panic(ErrInvalidInput{Err{ `decoding JEL op (cast)`, fmt.Errorf(`cast into %q must have exactly 1 argument, found %v`, name, len(args)), }}) } typ := self.Type field, ok := loadStructJsonPathToNestedDbFieldMap(typ)[name] if !ok { panic(errUnknownField(`decoding JEL op (cast)`, name, typeName(typ))) } val := r.New(field.Field.Type) try(json.Unmarshal(args[0], val.Interface())) bui.Param(bui.Arg(val.Elem().Interface())) } func (self *Jel) decodeString(bui *Bui, input []byte) { var str string try(json.Unmarshal(input, &str)) typ := self.Type val, ok := loadStructJsonPathToNestedDbFieldMap(typ)[str] if !ok { panic(errUnknownField(`decoding JEL string`, str, typeName(typ))) } bui.Set(Path(val.DbPath).AppendExpr(bui.Get())) } // Should be used only for numbers, bools, nulls. // TODO: unmarshal integers into `int64` rather than `float64`. func (self *Jel) decodeAny(bui *Bui, input []byte) { var val interface{} try(json.Unmarshal(input, &val)) bui.Param(bui.Arg(val)) }
sqlb_jel.go
0.709019
0.501953
sqlb_jel.go
starcoder
package httpref // Statuses represents all of the defined HTTP statuses var Statuses = References{ { Name: "1xx", IsTitle: true, Summary: "Informational response", Description: `https://developer.mozilla.org/en-US/docs/Web/HTTP/Status https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#1xx_Informational_response`, }, { Name: "100", Summary: "Continue", Description: `The HTTP 100 Continue informational status response code indicates that everything so far is OK and that the client should continue with the request or ignore it if it is already finished. To have a server check the request's headers, a client must send Expect: 100-continue as a header in its initial request and receive a 100 Continue status code in response before sending the body. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/100`, }, { Name: "101", Summary: "Switching Protocols", Description: `The HTTP 101 Switching Protocols response code indicates the protocol the server is switching to as requested by a client which sent the message including the Upgrade request header. The server includes in this response an Upgrade response header to indicate the protocol it switched to. The process is described in detail in the article Protocol upgrade mechanism. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/101`, }, { Name: "102", Summary: "Processing (WebDAV)", Description: `This code indicates that the server has received and is processing the request, but no response is available yet.`, }, { Name: "103", Summary: "Early Hints", Description: `The HTTP 103 Early Hints information response status code is primarily intended to be used with the Link header to allow the user agent to start preloading resources while the server is still preparing a response. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/103`, }, { Name: "2xx", IsTitle: true, Summary: "Successful responses", Description: `https://developer.mozilla.org/en-US/docs/Web/HTTP/Status https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#2xx_Success`, }, { Name: "200", Summary: "OK", Description: `The HTTP 200 OK success status response code indicates that the request has succeeded. A 200 response is cacheable by default. The meaning of a success depends on the HTTP request method: GET: The resource has been fetched and is transmitted in the message body. HEAD: The entity headers are in the message body. POST: The resource describing the result of the action is transmitted in the message body. TRACE: The message body contains the request message as received by the server. The successful result of a PUT or a DELETE is often not a 200 OK but a 204 No Content (or a 201 Created when the resource is uploaded for the first time). https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/200`, }, { Name: "201", Summary: "Created", Description: `The HTTP 201 Created success status response code indicates that the request has succeeded and has led to the creation of a resource. The new resource is effectively created before this response is sent back and the new resource is returned in the body of the message, its location being either the URL of the request, or the content of the Location header. The common use case of this status code is as the result of a POST request. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/201`, }, { Name: "202", Summary: "Accepted", Description: `The HyperText Transfer Protocol (HTTP) 202 Accepted response status code indicates that the request has been received but not yet acted upon. It is non-committal, meaning that there is no way for the HTTP to later send an asynchronous response indicating the outcome of processing the request. It is intended for cases where another process or server handles the request, or for batch processing. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/202`, }, { Name: "203", Summary: "Non-Authoritative Information ", Description: `The HTTP 203 Non-Authoritative Information response status indicates that the request was successful but the enclosed payload has been modified by a transforming proxy from that of the origin server's 200 (OK) response . The 203 response is similar to the value 214, meaning Transformation Applied, of the Warning header code, which has the additional advantage of being applicable to responses with any status code. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/203`, }, { Name: "204", Summary: "No Content", Description: `The HTTP 204 No Content success status response code indicates that the request has succeeded, but that the client doesn't need to go away from its current page. A 204 response is cacheable by default. An ETag header is included in such a response. The common use case is to return 204 as a result of a PUT request, updating a resource, without changing the current content of the page displayed to the user. If the resource is created, 201 Created is returned instead. If the page should be changed to the newly updated page, the 200 should be used instead. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/204`, }, { Name: "205", Summary: "Reset Content", Description: `The HTTP 205 Reset Content response status tells the client to reset the document view, so for example to clear the content of a form, reset a canvas state, or to refresh the UI. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/205`, }, { Name: "206", Summary: "Partial Content (WebDAV)", Description: `The HTTP 206 Partial Content success status response code indicates that the request has succeeded and has the body contains the requested ranges of data, as described in the Range header of the request. If there is only one range, the Content-Type of the whole response is set to the type of the document, and a Content-Range is provided. If several ranges are sent back, the Content-Type is set to multipart/byteranges and each fragment covers one range, with Content-Range and Content-Type describing it. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/206`, }, { Name: "207", Summary: "Multi-Status (WebDAV)", Description: `Conveys information about multiple resources, for situations where multiple status codes might be appropriate. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status`, }, { Name: "208", Summary: "Already Reported (WebDAV)", Description: `Used inside a <dav:propstat> response element to avoid repeatedly enumerating the internal members of multiple bindings to the same collection. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status`, }, { Name: "226", Summary: "IM Used", Description: `The server has fulfilled a GET request for the resource, and the response is a representation of the result of one or more instance-manipulations applied to the current instance. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status`, }, { Name: "3xx", IsTitle: true, Summary: "Redirection messages", Description: `https://developer.mozilla.org/en-US/docs/Web/HTTP/Status https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#3xx_Redirection`, }, { Name: "300", Summary: "Multiple Choices", Description: `The HTTP 300 Multiple Choices redirect status response code indicates that the request has more than one possible responses. The user-agent or the user should choose one of them. As there is no standardized way of choosing one of the responses, this response code is very rarely used. If the server has a preferred choice, it should generate a Location header. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/300`, }, { Name: "301", Summary: "Moved Permanently", Description: `The HyperText Transfer Protocol (HTTP) 301 Moved Permanently redirect status response code indicates that the resource requested has been definitively moved to the URL given by the Location headers. A browser redirects to this page and search engines update their links to the resource (in 'SEO-speak', it is said that the 'link-juice' is sent to the new URL). Even if the specification requires the method (and the body) not to be altered when the redirection is performed, not all user-agents align with it - you can still find this type of bugged software out there. It is therefore recommended to use the 301 code only as a response for GET or HEAD methods and to use the 308 Permanent Redirect for POST methods instead, as the method change is explicitly prohibited with this status. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/301`, }, { Name: "302", Summary: "Found", Description: `The HyperText Transfer Protocol (HTTP) 302 Found redirect status response code indicates that the resource requested has been temporarily moved to the URL given by the Location header. A browser redirects to this page but search engines don't update their links to the resource (in 'SEO-speak', it is said that the 'link-juice' is not sent to the new URL). Even if the specification requires the method (and the body) not to be altered when the redirection is performed, not all user-agents conform here - you can still find this type of bugged software out there. It is therefore recommended to set the 302 code only as a response for GET or HEAD methods and to use 307 Temporary Redirect instead, as the method change is explicitly prohibited in that case. In the cases where you want the method used to be changed to GET, use 303 See Other instead. This is useful when you want to give a response to a PUT method that is not the uploaded resource but a confirmation message such as: 'you successfully uploaded XYZ'. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/302`, }, { Name: "303", Summary: "See other", Description: `The HyperText Transfer Protocol (HTTP) 303 See Other redirect status response code indicates that the redirects don't link to the newly uploaded resources, but to another page (such as a confirmation page or an upload progress page). This response code is usually sent back as a result of PUT or POST. The method used to display this redirected page is always GET. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/303`, }, { Name: "304", Summary: "Not Modified", Description: `The HTTP 304 Not Modified client redirection response code indicates that there is no need to retransmit the requested resources. It is an implicit redirection to a cached resource. This happens when the request method is safe, like a GET or a HEAD request, or when the request is conditional and uses a If-None-Match or a If-Modified-Since header. The equivalent 200 OK response would have included the headers Cache-Control, Content-Location, Date, ETag, Expires, and Vary. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/304`, }, { Name: "305", Summary: "Use proxy", Description: `Defined in a previous version of the HTTP specification to indicate that a requested response must be accessed by a proxy. It has been deprecated due to security concerns regarding in-band configuration of a proxy. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status`, }, { Name: "306", Summary: "unused", Description: `This response code is no longer used; it is just reserved. It was used in a previous version of the HTTP/1.1 specification. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status`, }, { Name: "307", Summary: "Temporary redirect", Description: `HTTP 307 Temporary Redirect redirect status response code indicates that the resource requested has been temporarily moved to the URL given by the Location headers. The method and the body of the original request are reused to perform the redirected request. In the cases where you want the method used to be changed to GET, use 303 See Other instead. This is useful when you want to give an answer to a PUT method that is not the uploaded resources, but a confirmation message (like "You successfully uploaded XYZ"). The only difference between 307 and 302 is that 307 guarantees that the method and the body will not be changed when the redirected request is made. With 302, some old clients were incorrectly changing the method to GET: the behavior with non-GET methods and 302 is then unpredictable on the Web, whereas the behavior with 307 is predictable. For GET requests, their behavior is identical. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/307`, }, { Name: "308", Summary: "Permanent redirect", Description: `The HyperText Transfer Protocol (HTTP) 308 Permanent Redirect redirect status response code indicates that the resource requested has been definitively moved to the URL given by the Location headers. A browser redirects to this page and search engines update their links to the resource (in 'SEO-speak', it is said that the 'link-juice' is sent to the new URL). The request method and the body will not be altered, whereas 301 may incorrectly sometimes be changed to a GET method. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/308`, }, { Name: "4xx", IsTitle: true, Summary: "Client error responses", Description: `https://developer.mozilla.org/en-US/docs/Web/HTTP/Status https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#4xx_Client_errors`, }, { Name: "400", Summary: "Bad request", Description: `The HyperText Transfer Protocol (HTTP) 400 Bad Request response status code indicates that the server cannot or will not process the request due to something that is perceived to be a client error (e.g., malformed request syntax, invalid request message framing, or deceptive request routing). https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/400`, }, { Name: "401", Summary: "Unauthorized", Description: `The HTTP 401 Unauthorized client error status response code indicates that the request has not been applied because it lacks valid authentication credentials for the target resource. This status is sent with a WWW-Authenticate header that contains information on how to authorize correctly. This status is similar to 403, but in this case, authentication is possible. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/401`, }, { Name: "402", Summary: "Payment required", Description: `The HTTP 402 Payment Required is a nonstandard client error status response code that is reserved for future use. Sometimes, this code indicates that the request can not be processed until the client makes a payment. Originally it was created to enable digital cash or (micro) payment systems and would indicate that the requested content is not available until the client makes a payment. However, no standard use convention exists and different entities use it in different contexts. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/402`, }, { Name: "403", Summary: "Forbidden", Description: `The HTTP 403 Forbidden client error status response code indicates that the server understood the request but refuses to authorize it. This status is similar to 401, but in this case, re-authenticating will make no difference. The access is permanently forbidden and tied to the application logic, such as insufficient rights to a resource. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/403`, }, { Name: "404", Summary: "Not found", Description: `The HTTP 404 Not Found client error response code indicates that the server can't find the requested resource. Links which lead to a 404 page are often called broken or dead links, and can be subject to link rot. A 404 status code does not indicate whether the resource is temporarily or permanently missing. But if a resource is permanently removed, a 410 (Gone) should be used instead of a 404 status. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/404`, }, { Name: "405", Summary: "Method not allowed", Description: `The HyperText Transfer Protocol (HTTP) 405 Method Not Allowed response status code indicates that the request method is known by the server but is not supported by the target resource. The server MUST generate an Allow header field in a 405 response containing a list of the target resource's currently supported methods. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/405`, }, { Name: "406", Summary: "Not acceptable", Description: `The HyperText Transfer Protocol (HTTP) 406 Not Acceptable client error response code indicates that the server cannot produce a response matching the list of acceptable values defined in the request's proactive content negotiation headers, and that the server is unwilling to supply a default representation. Proactive content negotiation headers include: Accept Accept-Charset Accept-Encoding Accept-Language In practice, this error is very rarely used. Instead of responding using this error code, which would be cryptic for the end user and difficult to fix, servers ignore the relevant header and serve an actual page to the user. It is assumed that even if the user won't be completely happy, they will prefer this to an error code. If a server returns such an error status, the body of the message should contain the list of the available representations of the resources, allowing the user to choose among them. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/406`, }, { Name: "407", Summary: "Proxy authentication required", Description: `The HTTP 407 Proxy Authentication Required client error status response code indicates that the request has not been applied because it lacks valid authentication credentials for a proxy server that is between the browser and the server that can access the requested resource. This status is sent with a Proxy-Authenticate header that contains information on how to authorize correctly. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/407`, }, { Name: "408", Summary: "Request timeout", Description: `The HyperText Transfer Protocol (HTTP) 408 Request Timeout response status code means that the server would like to shut down this unused connection. It is sent on an idle connection by some servers, even without any previous request by the client. A server should send the "close" Connection header field in the response, since 408 implies that the server has decided to close the connection rather than continue waiting. This response is used much more since some browsers, like Chrome, Firefox 27+, and IE9, use HTTP pre-connection mechanisms to speed up surfing. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/408`, }, { Name: "409", Summary: "Conflict", Description: `The HTTP 409 Conflict response status code indicates a request conflict with current state of the server. Conflicts are most likely to occur in response to a PUT request. For example, you may get a 409 response when uploading a file which is older than the one already on the server resulting in a version control conflict. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/409`, }, { Name: "410", Summary: "Gone", Description: `The HyperText Transfer Protocol (HTTP) 410 Gone client error response code indicates that access to the target resource is no longer available at the origin server and that this condition is likely to be permanent. If you don't know whether this condition is temporary or permanent, a 404 status code should be used instead. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/410`, }, { Name: "411", Summary: "Length required", Description: `The HyperText Transfer Protocol (HTTP) 411 Length Required client error response code indicates that the server refuses to accept the request without a defined Content-Length header. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/411`, }, { Name: "412", Summary: "Precondition failed", Description: `The HyperText Transfer Protocol (HTTP) 412 Precondition Failed client error response code indicates that access to the target resource has been denied. This happens with conditional requests on methods other than GET or HEAD when the condition defined by the If-Unmodified-Since or If-None-Match headers is not fulfilled. In that case, the request, usually an upload or a modification of a resource, cannot be made and this error response is sent back. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/412`, }, { Name: "413", Summary: "Payload too large", Description: `The HTTP 413 Payload Too Large response status code indicates that the request entity is larger than limits defined by server; the server might close the connection or return a Retry-After header field. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/413`, }, { Name: "414", Summary: "URI too long", Description: `The HTTP 414 URI Too Long response status code indicates that the URL requested by the client is longer than the server is willing to interpret. There are a few conditions when this might occur: A client has improperly converted a POST request to a GET request with more than ≈2 kB of submitted data. A client has descended into a loop of redirection (for example, a redirected URL prefix that points to a suffix of itself, or mishandled relative URLs), The server is under attack by a client attempting to exploit potential security holes. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/414`, }, { Name: "415", Summary: "Unsupported media type", Description: `The HTTP 415 Unsupported Media Type client error response code indicates that the server refuses to accept the request because the payload format is in an unsupported format. The format problem might be due to the request's indicated Content-Type or Content-Encoding, or as a result of inspecting the data directly. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/415`, }, { Name: "416", Summary: "Range not satisfiable", Description: `The HyperText Transfer Protocol (HTTP) 416 Range Not Satisfiable error response code indicates that a server cannot serve the requested ranges. The most likely reason is that the document doesn't contain such ranges, or that the Range header value, though syntactically correct, doesn't make sense. The 416 response message contains a Content-Range indicating an unsatisfied range (that is a '*') followed by a '/' and the current length of the resource. E.g. Content-Range: bytes */12777 Faced with this error, browsers usually either abort the operation (for example, a download will be considered as non-resumable) or ask for the whole document again. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/416`, }, { Name: "417", Summary: "Expectation failed", Description: `The HTTP 417 Expectation Failed client error response code indicates that the expectation given in the request's Expect header could not be met. See the Expect header for more details. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/417`, }, { Name: "418", Summary: "I'm a teapot", Description: `The HTTP 418 I'm a teapot client error response code indicates that the server refuses to brew coffee because it is a teapot. This error is a reference to Hyper Text Coffee Pot Control Protocol which was an April Fools' joke in 1998. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/418`, }, { Name: "421", Summary: "Misdirected Request", Description: `The request was directed at a server that is not able to produce a response. This can be sent by a server that is not configured to produce responses for the combination of scheme and authority that are included in the request URI. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status`, }, { Name: "422", Summary: "Unprocessable Entity (WebDAV)", Description: `The HyperText Transfer Protocol (HTTP) 422 Unprocessable Entity response status code indicates that the server understands the content type of the request entity, and the syntax of the request entity is correct, but it was unable to process the contained instructions. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/422`, }, { Name: "423", Summary: "Locked (WebDAV)", Description: `The resource that is being accessed is locked. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status`, }, { Name: "424", Summary: "Failed Dependency (WebDAV)", Description: `The request failed due to failure of a previous request. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status`, }, { Name: "425", Summary: "Too early", Description: `The HyperText Transfer Protocol (HTTP) 425 Too Early response status code indicates that the server is unwilling to risk processing a request that might be replayed, which creates the potential for a replay attack. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/425`, }, { Name: "426", Summary: "Upgrade required", Description: `The HTTP 426 Upgrade Required client error response code indicates that the server refuses to perform the request using the current protocol but might be willing to do so after the client upgrades to a different protocol. The server sends an Upgrade header with this response to indicate the required protocol(s). https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/426`, }, { Name: "428", Summary: "Precondition required", Description: `The HTTP 428 Precondition Required response status code indicates that the server requires the request to be conditional. Typically, this means that a required precondition header, such as If-Match, is missing. When a precondition header is not matching the server side state, the response should be 412 Precondition Failed. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/428`, }, { Name: "429", Summary: "Too many requests", Description: `The HTTP 429 Too Many Requests response status code indicates the user has sent too many requests in a given amount of time ("rate limiting"). A Retry-After header might be included to this response indicating how long to wait before making a new request. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/429`, }, { Name: "431", Summary: "Request Header Fields Too Large", Description: `The HTTP 431 Request Header Fields Too Large response status code indicates that the server refuses to process the request because the request’s HTTP headers are too long. The request may be resubmitted after reducing the size of the request headers. 431 can be used when the total size of request headers is too large, or when a single header field is too large. To help those running into this error, indicate which of the two is the problem in the response body — ideally, also include which headers are too large. This lets users attempt to fix the problem, such as by clearing their cookies. Servers will often produce this status if: The Referer URL is too long There are too many Cookies sent in the request https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/431`, }, { Name: "451", Summary: "Unavailable For Legal Reasons", Description: `The HyperText Transfer Protocol (HTTP) 451 Unavailable For Legal Reasons client error response code indicates that the user requested a resource that is not available due to legal reasons, such as a web page for which a legal action has been issued. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/451`, }, { Name: "5xx", IsTitle: true, Summary: "Server error responses", Description: `https://developer.mozilla.org/en-US/docs/Web/HTTP/Status https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#5xx_Server_errors`, }, { Name: "500", Summary: "Internal server error", Description: `The HyperText Transfer Protocol (HTTP) 500 Internal Server Error server error response code indicates that the server encountered an unexpected condition that prevented it from fulfilling the request. This error response is a generic "catch-all" response. Usually, this indicates the server cannot find a better 5xx error code to response. Sometimes, server administrators log error responses like the 500 status code with more details about the request to prevent the error from happening again in the future. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/500`, }, { Name: "501", Summary: "Not implemented", Description: `The HyperText Transfer Protocol (HTTP) 501 Not Implemented server error response code means that the server does not support the functionality required to fulfill the request. This status can also send a Retry-After header, telling the requester when to check back to see if the functionality is supported by then. 501 is the appropriate response when the server does not recognize the request method and is incapable of supporting it for any resource. The only methods that servers are required to support (and therefore that must not return 501) are GET and HEAD. If the server does recognize the method, but intentionally does not support it, the appropriate response is 405 Method Not Allowed. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/501`, }, { Name: "502", Summary: "Bad gateway", Description: `The HyperText Transfer Protocol (HTTP) 502 Bad Gateway server error response code indicates that the server, while acting as a gateway or proxy, received an invalid response from the upstream server. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/502`, }, { Name: "503", Summary: "Service unavailable", Description: `The HyperText Transfer Protocol (HTTP) 503 Service Unavailable server error response code indicates that the server is not ready to handle the request. Common causes are a server that is down for maintenance or that is overloaded. This response should be used for temporary conditions and the Retry-After HTTP header should, if possible, contain the estimated time for the recovery of the service. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/503`, }, { Name: "504", Summary: "Gateway timeout", Description: `The HyperText Transfer Protocol (HTTP) 504 Gateway Timeout server error response code indicates that the server, while acting as a gateway or proxy, did not get a response in time from the upstream server that it needed in order to complete the request. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/504`, }, { Name: "505", Summary: "HTTP version no supported", Description: `The HyperText Transfer Protocol (HTTP) 505 HTTP Version Not Supported response status code indicates that the HTTP version used in the request is not supported by the server. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/505`, }, { Name: "506", Summary: "Variant also negotiates", Description: `The HyperText Transfer Protocol (HTTP) 506 Variant Also Negotiates response status code may be given in the context of Transparent Content Negotiation (see RFC 2295). This protocol enables a client to retrieve the best variant of a given resource, where the server supports multiple variants. The Variant Also Negotiates status code indicates an internal server configuration error in which the chosen variant is itself configured to engage in content negotiation, so is not a proper negotiation endpoint. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/506`, }, { Name: "507", Summary: "Insufficient Storage (WebDAV)", Description: `The HyperText Transfer Protocol (HTTP) 507 Insufficient Storage response status code may be given in the context of the Web Distributed Authoring and Versioning (WebDAV) protocol (see RFC 4918). It indicates that a method could not be performed because the server cannot store the representation needed to successfully complete the request. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/507`, }, { Name: "508", Summary: "Loop Detected (WebDAV)", Description: `The HyperText Transfer Protocol (HTTP) 508 Loop Detected response status code may be given in the context of the Web Distributed Authoring and Versioning (WebDAV) protocol. It indicates that the server terminated an operation because it encountered an infinite loop while processing a request with "Depth: infinity". This status indicates that the entire operation failed. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/508`, }, { Name: "510", Summary: "Not extended", Description: `The HyperText Transfer Protocol (HTTP) 510 Not Extended response status code is sent in the context of the HTTP Extension Framework, defined in RFC 2774. In that specification a client may send a request that contains an extension declaration, that describes the extension to be used. If the server receives such a request, but any described extensions are not supported for the request, then the server responds with the 510 status code. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/510`, }, { Name: "511", Summary: "Network Authentication Required", Description: `The HTTP 511 Network Authentication Required response status code indicates that the client needs to authenticate to gain network access. This status is not generated by origin servers, but by intercepting proxies that control access to the network. Network operators sometimes require some authentication, acceptance of terms, or other user interaction before granting access (for example in an internet café or at an airport). They often identify clients who have not done so using their Media Access Control (MAC) addresses. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/511`, }, }
statuses.go
0.882927
0.569374
statuses.go
starcoder
package model import ( "fmt" "strconv" "strings" "sync" "unsafe" "github.com/timescale/promscale/pkg/prompb" ) // SeriesID represents a globally unique id for the series. This should be equivalent // to the PostgreSQL type in the series table (currently BIGINT). type SeriesID int64 const invalidSeriesID = -1 func (s SeriesID) String() string { return strconv.FormatInt(int64(s), 10) } //Epoch represents the series epoch type SeriesEpoch int64 const InvalidSeriesEpoch = -1 // Series stores a labels.Series in its canonical string representation type Series struct { //protects names, values, seriesID, epoch //str and metricName are immutable and doesn't need a lock lock sync.RWMutex names []string values []string metricName string str string seriesID SeriesID epoch SeriesEpoch } func NewSeries(key string, labelPairs []prompb.Label) *Series { series := &Series{ names: make([]string, len(labelPairs)), values: make([]string, len(labelPairs)), str: key, seriesID: invalidSeriesID, epoch: InvalidSeriesEpoch, } for i, l := range labelPairs { series.names[i] = l.Name series.values[i] = l.Value if l.Name == MetricNameLabelName { series.metricName = l.Value } } return series } //NameValues returns the names and values, only valid if the seriesIDIsNotSet func (l *Series) NameValues() (names []string, values []string, ok bool) { l.lock.RLock() defer l.lock.RUnlock() return l.names, l.values, !l.isSeriesIDSetNoLock() } func (l *Series) MetricName() string { return l.metricName } // Get a string representation for hashing and comparison // This representation is guaranteed to uniquely represent the underlying label // set, though need not human-readable, or indeed, valid utf-8 func (l *Series) String() string { return l.str } // Compare returns a comparison int between two Labels func (l *Series) Compare(b *Series) int { return strings.Compare(l.str, b.str) } // Equal returns true if two Labels are equal func (l *Series) Equal(b *Series) bool { return l.str == b.str } func (l *Series) isSeriesIDSetNoLock() bool { return l.seriesID != invalidSeriesID } func (l *Series) IsSeriesIDSet() bool { l.lock.RLock() defer l.lock.RUnlock() return l.isSeriesIDSetNoLock() } //FinalSizeBytes returns the size in bytes /after/ the seriesID is set func (l *Series) FinalSizeBytes() uint64 { //size is the base size of the struct + the str and metricName strings //names and values are not counted since they will be nilled out return uint64(unsafe.Sizeof(*l)) + uint64(len(l.str)+len(l.metricName)) // #nosec } func (l *Series) GetSeriesID() (SeriesID, SeriesEpoch, error) { l.lock.RLock() defer l.lock.RUnlock() switch l.seriesID { case invalidSeriesID: return 0, 0, fmt.Errorf("Series id not set") case 0: return 0, 0, fmt.Errorf("Series id invalid") default: return l.seriesID, l.epoch, nil } } //note this has to be idempotent func (l *Series) SetSeriesID(sid SeriesID, eid SeriesEpoch) { l.lock.Lock() defer l.lock.Unlock() //TODO: Unset l.Names and l.Values, no longer used l.seriesID = sid l.epoch = eid l.names = nil l.values = nil }
pkg/pgmodel/model/series.go
0.739986
0.460471
series.go
starcoder
package reward import ( "github.com/filecoin-project/go-state-types/abi" "github.com/filecoin-project/go-state-types/big" "github.com/filecoin-project/specs-actors/v2/actors/util/smoothing" ) // A quantity of space * time (in byte-epochs) representing power committed to the network for some duration. type Spacetime = big.Int // 36.266260308195979333 FIL // https://www.wolframalpha.com/input/?i=IntegerPart%5B330%2C000%2C000+*+%281+-+Exp%5B-Log%5B2%5D+%2F+%286+*+%281+year+%2F+30+seconds%29%29%5D%29+*+10%5E18%5D const InitialRewardPositionEstimateStr = "36266260308195979333" var InitialRewardPositionEstimate = big.MustFromString(InitialRewardPositionEstimateStr) // -1.0982489*10^-7 FIL per epoch. Change of simple minted tokens between epochs 0 and 1 // https://www.wolframalpha.com/input/?i=IntegerPart%5B%28Exp%5B-Log%5B2%5D+%2F+%286+*+%281+year+%2F+30+seconds%29%29%5D+-+1%29+*+10%5E18%5D var InitialRewardVelocityEstimate = abi.NewTokenAmount(-109897758509) // Changed since v0: // - ThisEpochRewardSmoothed is not a pointer type State struct { // CumsumBaseline is a target CumsumRealized needs to reach for EffectiveNetworkTime to increase // CumsumBaseline and CumsumRealized are expressed in byte-epochs. CumsumBaseline Spacetime // CumsumRealized is cumulative sum of network power capped by BaselinePower(epoch) CumsumRealized Spacetime // EffectiveNetworkTime is ceiling of real effective network time `theta` based on // CumsumBaselinePower(theta) == CumsumRealizedPower // Theta captures the notion of how much the network has progressed in its baseline // and in advancing network time. EffectiveNetworkTime abi.ChainEpoch // EffectiveBaselinePower is the baseline power at the EffectiveNetworkTime epoch EffectiveBaselinePower abi.StoragePower // The reward to be paid in per WinCount to block producers. // The actual reward total paid out depends on the number of winners in any round. // This value is recomputed every non-null epoch and used in the next non-null epoch. ThisEpochReward abi.TokenAmount // Smoothed ThisEpochReward ThisEpochRewardSmoothed smoothing.FilterEstimate // The baseline power the network is targeting at st.Epoch ThisEpochBaselinePower abi.StoragePower // Epoch tracks for which epoch the Reward was computed Epoch abi.ChainEpoch // TotalStoragePowerReward tracks the total FIL awarded to block miners TotalStoragePowerReward abi.TokenAmount } func ConstructState(currRealizedPower abi.StoragePower) *State { st := &State{ CumsumBaseline: big.Zero(), CumsumRealized: big.Zero(), EffectiveNetworkTime: 0, EffectiveBaselinePower: BaselineInitialValue, ThisEpochReward: big.Zero(), ThisEpochBaselinePower: InitBaselinePower(), Epoch: -1, ThisEpochRewardSmoothed: smoothing.NewEstimate(InitialRewardPositionEstimate, InitialRewardVelocityEstimate), TotalStoragePowerReward: big.Zero(), } st.updateToNextEpochWithReward(currRealizedPower) return st } // Takes in current realized power and updates internal state // Used for update of internal state during null rounds func (st *State) updateToNextEpoch(currRealizedPower abi.StoragePower) { st.Epoch++ st.ThisEpochBaselinePower = BaselinePowerFromPrev(st.ThisEpochBaselinePower) cappedRealizedPower := big.Min(st.ThisEpochBaselinePower, currRealizedPower) st.CumsumRealized = big.Add(st.CumsumRealized, cappedRealizedPower) for st.CumsumRealized.GreaterThan(st.CumsumBaseline) { st.EffectiveNetworkTime++ st.EffectiveBaselinePower = BaselinePowerFromPrev(st.EffectiveBaselinePower) st.CumsumBaseline = big.Add(st.CumsumBaseline, st.EffectiveBaselinePower) } } // Takes in a current realized power for a reward epoch and computes // and updates reward state to track reward for the next epoch func (st *State) updateToNextEpochWithReward(currRealizedPower abi.StoragePower) { prevRewardTheta := computeRTheta(st.EffectiveNetworkTime, st.EffectiveBaselinePower, st.CumsumRealized, st.CumsumBaseline) st.updateToNextEpoch(currRealizedPower) currRewardTheta := computeRTheta(st.EffectiveNetworkTime, st.EffectiveBaselinePower, st.CumsumRealized, st.CumsumBaseline) st.ThisEpochReward = computeReward(st.Epoch, prevRewardTheta, currRewardTheta) } func (st *State) updateSmoothedEstimates(delta abi.ChainEpoch) { filterReward := smoothing.LoadFilter(st.ThisEpochRewardSmoothed, smoothing.DefaultAlpha, smoothing.DefaultBeta) st.ThisEpochRewardSmoothed = filterReward.NextEstimate(st.ThisEpochReward, delta) }
actors/builtin/reward/reward_state.go
0.778313
0.523238
reward_state.go
starcoder
package main import "math" type Pot struct { minRaise money totalToCall money potNumber uint bets []Bet } type Bet struct { potNumber uint player guid value money } //newPot is a constructor for a new Pot struct func newPot() *Pot { pot := new(Pot) pot.bets = make([]Bet, 0) return pot } //Resolves partial pots from previous round // increments potNumber to a previously unused number func (pot *Pot) newRound() { pot.condenseBets() pot.makeSidePots() pot.minRaise = 0 pot.totalToCall = 0 pot.potNumber++ } func (pot *Pot) condenseBets() { playerBets := make(map[guid]money) betsCopy := make([]Bet, 0) for _, bet := range pot.bets { if bet.potNumber == pot.potNumber { playerBets[bet.player] += bet.value } else { betsCopy = append(betsCopy, bet) } } for k, v := range playerBets { betsCopy = append(betsCopy, Bet{potNumber: pot.potNumber, player: k, value: v}) } pot.bets = betsCopy } func (pot *Pot) allBetsEqual(potNumber uint) bool { var prevBet money for _, bet := range pot.bets { if bet.potNumber == potNumber { prevBet = bet.value break } } for _, bet := range pot.bets { if bet.potNumber != potNumber { continue } if prevBet != bet.value { return false } } return true } func (pot *Pot) makeSidePots() { if pot.allBetsEqual(pot.potNumber) { return } pot.potNumber++ minimum := money(math.MaxUint64) for _, b := range pot.bets { if b.value < minimum && b.potNumber == (pot.potNumber-1) { minimum = b.value } } for i := 0; i < len(pot.bets); i++ { b := pot.bets[i] if b.value > minimum && b.potNumber == (pot.potNumber-1) { excess := b.value - minimum b.value = minimum pot.bets = append(pot.bets, Bet{potNumber: pot.potNumber, player: b.player, value: excess}) } pot.bets[i] = b } pot.makeSidePots() } func (p *Pot) receiveBet(id guid, bet money) { betSoFar := p.totalPlayerBetThisRound(id) raise := p.raiseAmount(id, bet) if betSoFar+bet > p.totalToCall { p.totalToCall = betSoFar + bet } if raise > p.minRaise { p.minRaise = raise } newBet := Bet{potNumber: p.potNumber, player: id, value: bet} p.bets = append(p.bets, newBet) } func (p *Pot) totalInPot() money { var sum money = 0 for _, m := range p.bets { sum += m.value } return sum } func (p *Pot) totalPlayerBetThisRound(id guid) money { sum := money(0) for _, bet := range p.bets { if bet.player == id && bet.potNumber == p.potNumber { sum += bet.value } } return sum } // stakeholders returns a map of sidepots to players who have bet in each sidepot. func (p *Pot) stakeholders() map[uint][]guid { stakeholders := make(map[uint][]guid) for _, bet := range p.bets { if _, ok := stakeholders[bet.potNumber]; !ok { stakeholders[bet.potNumber] = make([]guid, 0) } stakeholders[bet.potNumber] = append(stakeholders[bet.potNumber], bet.player) } return stakeholders } // amounts returns a map of sidepots to amounts. func (p *Pot) amounts() map[uint]money { amounts := make(map[uint]money) for _, bet := range p.bets { amounts[bet.potNumber] += bet.value } return amounts } // commitBet decrements player wealth by bet and adds bet to the current pot. func (p *Pot) commitBet(player *Player, bet money) { if bet < 0 { panic("trying to bet < 0") } p.receiveBet(player.guid, bet) player.wealth -= bet } // betInvalid returns true if the bet is a valid bet, and false if the bet is not valid. func (p *Pot) betInvalid(player *Player, bet money) bool { raise := p.raiseAmount(player.guid, bet) return (bet > player.wealth) || (raise > 0 && raise < p.minRaise) || (bet < player.wealth && (p.totalPlayerBetThisRound(player.guid)+bet) < p.totalToCall) } // raiseAmount returns the amount the current bet is raising (possibly 0). func (p *Pot) raiseAmount(id guid, betAmount money) money { return p.totalPlayerBetThisRound(id) + betAmount - p.totalToCall }
pot.go
0.599485
0.422624
pot.go
starcoder
package zebra import "fmt" /* The Zebra Puzzle facts given: 1. There are five houses. 2. The Englishman lives in the red house. 3. The Spaniard owns the dog. 4. Coffee is drunk in the green house. 5. The Ukrainian drinks tea. 6. The green house is immediately to the right of the ivory house. 7. The Old Gold smoker owns snails. 8. Kools are smoked in the yellow house. 9. Milk is drunk in the middle house. 10. The Norwegian lives in the first house. 11. The man who smokes Chesterfields lives in the house next to the man with the fox. 12. Kools are smoked in the house next to the house where the horse is kept. 13. The Lucky Strike smoker drinks orange juice. 14. The Japanese smokes Parliaments. 15. The Norwegian lives next to the blue house. */ type Solution struct { DrinksWater string OwnsZebra string } // SolvePuzzle returns a solution answering the two questions for the zebra puzzle, // which are "Who drinks water?", and "Who owns the zebra?" func SolvePuzzle() (solution Solution) { // 1. There are five houses. const ( firstHouse = iota // name/identity of each house secondHouse middleHouse fourthHouse fifthHouse numHouses ) houses := []int{firstHouse, secondHouse, middleHouse, fourthHouse, fifthHouse} // Generate the permutations of all the house identities. houseIdentityPermutations := permutations(houses, numHouses) // Note: Below, the pattern of var naming of "iXyz" is notation for "potential house (i)dentity of Xyz" // So for example, when comparing pEnglishman == pRed, the test is determining whether the permutation // of placements for both the Englishman and the red house agree to meet the given fact #2. for _, houseColors := range houseIdentityPermutations { iRed, iGreen, iIvory, iYellow, iBlue := assign(houseColors) // 6. The green house is immediately to the right of the ivory house. if !justRightOf(iGreen, iIvory) { continue } for _, residents := range houseIdentityPermutations { iEnglishman, iSpaniard, iUkrainian, iNorwegian, iJapanese := assign(residents) // 2. The Englishman lives in the red house. // 10. The Norwegian lives in the first house. // 15. The Norwegian lives next to the blue house. if iEnglishman != iRed || iNorwegian != firstHouse || !nextTo(iNorwegian, iBlue) { continue } for _, beverages := range houseIdentityPermutations { iCoffee, iTea, iMilk, iOrangeJuice, iWater := assign(beverages) // 4. Coffee is drunk in the green house. // 5. The Ukrainian drinks tea. // 9. Milk is drunk in the middle house. if iCoffee != iGreen || iUkrainian != iTea || iMilk != middleHouse { continue } for _, smokeBrands := range houseIdentityPermutations { iOldGold, iKools, iChesterfields, iLuckyStrike, iParliaments := assign(smokeBrands) // 8. Kools are smoked in the yellow house. // 13. The Lucky Strike smoker drinks orange juice. // 14. The Japanese smokes Parliaments. if iKools != iYellow || iLuckyStrike != iOrangeJuice || iJapanese != iParliaments { continue } for _, pets := range houseIdentityPermutations { iDog, iSnails, iFox, iHorse, iZebra := assign(pets) // 3. The Spaniard owns the dog. // 7. The Old Gold smoker owns snails. // 11. The man who smokes Chesterfields lives in the house next to the man with the fox. // 12. Kools are smoked in the house next to the house where the horse is kept. if iSpaniard != iDog || iOldGold != iSnails || !nextTo(iChesterfields, iFox) || !nextTo(iKools, iHorse) { continue } // At this point all criteria are met, so we arrived at the solution, // and can fill in an array h of house facts (actually we only need the residents). var h [numHouses]struct { resident string color string pet string beverage string smokeBrand string } h[iEnglishman].resident = "Englishman" h[iSpaniard].resident = "Spaniard" h[iUkrainian].resident = "Ukrainian" h[iJapanese].resident = "Japanese" h[iNorwegian].resident = "Norwegian" solution = Solution{ DrinksWater: h[iWater].resident, OwnsZebra: h[iZebra].resident} if !showHouseFacts { return } h[iRed].color = "red" h[iGreen].color = "green" h[iIvory].color = "ivory" h[iYellow].color = "yellow" h[iBlue].color = "blue" h[iDog].pet = "dog" h[iSnails].pet = "snails" h[iFox].pet = "fox" h[iHorse].pet = "horse" h[iZebra].pet = "zebra" h[iCoffee].beverage = "coffee" h[iTea].beverage = "tea" h[iMilk].beverage = "milk" h[iOrangeJuice].beverage = "orange juice" h[iWater].beverage = "water" h[iOldGold].smokeBrand = "OldGold" h[iKools].smokeBrand = "Kools" h[iChesterfields].smokeBrand = "Chesterfields" h[iLuckyStrike].smokeBrand = "LuckyStrike" h[iParliaments].smokeBrand = "Parliaments" var houseNames = [5]string{"first", "second", "middle", "fourth", "fifth"} for p := firstHouse; p <= fifthHouse; p++ { var separator string if h[p].pet[len(h[p].pet)-1] != 's' { separator = "a " } fmt.Printf("The %-10s lives in the %-6s house which is %-7s owns %-8s drinks %-13s and smokes %-13s\n", h[p].resident, houseNames[p], h[p].color+",", separator+h[p].pet+",", h[p].beverage+",", h[p].smokeBrand+".") } return } } } } } return } const showHouseFacts = false // when true, print all house facts for fun. // assign is notational helper function which returns the successive members // of a permutation slice as five distinct values. func assign(p []int) (a, b, c, d, e int) { return p[0], p[1], p[2], p[3], p[4] } // justRightOf returns true if positionally x is just to the right of y. func justRightOf(x, y int) bool { return (x - y) == 1 } // nextTo returns true if positionally, x is next to y, differing only by 1. func nextTo(x, y int) bool { return (x-y) == 1 || (y-x) == 1 } // permutations returns a slice containing the r length permutations of the elements in iterable. // The implementation is modeled after the Python intertools.permutations(). func permutations(iterable []int, r int) (perms [][]int) { pool := iterable n := len(pool) if r > n { return } indices := make([]int, n) for i := range indices { indices[i] = i } cycles := make([]int, r) for i := range cycles { cycles[i] = n - i } result := make([]int, r) for i, el := range indices[:r] { result[i] = pool[el] } p := make([]int, len(result)) copy(p, result) perms = append(perms, p) for n > 0 { i := r - 1 for ; i >= 0; i-- { cycles[i]-- if cycles[i] == 0 { index := indices[i] for j := i; j < n-1; j++ { indices[j] = indices[j+1] } indices[n-1] = index cycles[i] = n - i } else { j := cycles[i] indices[i], indices[n-j] = indices[n-j], indices[i] for k := i; k < r; k++ { result[k] = pool[indices[k]] } p = make([]int, len(result)) copy(p, result) perms = append(perms, p) break } } if i < 0 { return } } return }
exercises/zebra-puzzle/example.go
0.566978
0.537588
example.go
starcoder
package core import "math/rand" import "github.com/zhenghaoz/gorse/base" // Split dataset to a training set and a test set with ratio. func Split(data DataSetInterface, testRatio float64) (train, test DataSetInterface) { testSize := int(float64(data.Count()) * testRatio) perm := rand.Perm(data.Count()) // Test Data testIndex := perm[:testSize] test = data.SubSet(testIndex) // Train Data trainIndex := perm[testSize:] train = data.SubSet(trainIndex) return } // Splitter split Data to train set and test set. type Splitter func(set DataSetInterface, seed int64) ([]DataSetInterface, []DataSetInterface) // NewKFoldSplitter creates a k-fold splitter. func NewKFoldSplitter(k int) Splitter { return func(dataSet DataSetInterface, seed int64) (trainFolds, testFolds []DataSetInterface) { // Create folds trainFolds = make([]DataSetInterface, k) testFolds = make([]DataSetInterface, k) // Check nil if dataSet == nil { return } // Generate permutation rand.Seed(seed) perm := rand.Perm(dataSet.Count()) // Split folds foldSize := dataSet.Count() / k begin, end := 0, 0 for i := 0; i < k; i++ { end += foldSize if i < dataSet.Count()%k { end++ } // Test Data testIndex := perm[begin:end] testFolds[i] = dataSet.SubSet(testIndex) // Train Data trainIndex := base.Concatenate(perm[0:begin], perm[end:dataSet.Count()]) trainFolds[i] = dataSet.SubSet(trainIndex) begin = end } return trainFolds, testFolds } } // NewRatioSplitter creates a ratio splitter. func NewRatioSplitter(repeat int, testRatio float64) Splitter { return func(dataSet DataSetInterface, seed int64) (trainFolds, testFolds []DataSetInterface) { trainFolds = make([]DataSetInterface, repeat) testFolds = make([]DataSetInterface, repeat) // Check nil if dataSet == nil { return } testSize := int(float64(dataSet.Count()) * testRatio) rand.Seed(seed) for i := 0; i < repeat; i++ { perm := rand.Perm(dataSet.Count()) // Test Data testIndex := perm[:testSize] testFolds[i] = dataSet.SubSet(testIndex) // Train Data trainIndex := perm[testSize:] trainFolds[i] = dataSet.SubSet(trainIndex) } return trainFolds, testFolds } } // NewUserLOOSplitter creates a per-user leave-one-out Data splitter. func NewUserLOOSplitter(repeat int) Splitter { return func(dataSet DataSetInterface, seed int64) (trainFolds, testFolds []DataSetInterface) { trainFolds = make([]DataSetInterface, repeat) testFolds = make([]DataSetInterface, repeat) // Check nil if dataSet == nil { return } rand.Seed(seed) for i := 0; i < repeat; i++ { trainUsers, trainItems, trainRatings := make([]int, 0, dataSet.Count()-dataSet.UserCount()), make([]int, 0, dataSet.Count()-dataSet.UserCount()), make([]float64, 0, dataSet.Count()-dataSet.UserCount()) testUsers, testItems, testRatings := make([]int, 0, dataSet.UserCount()), make([]int, 0, dataSet.UserCount()), make([]float64, 0, dataSet.UserCount()) for innerUserId := 0; innerUserId < dataSet.UserCount(); innerUserId++ { irs := dataSet.UserByIndex(innerUserId) userId := dataSet.UserIndexer().ToID(innerUserId) out := rand.Intn(irs.Len()) irs.ForEachIndex(func(i, index int, value float64) { itemId := dataSet.ItemIndexer().ToID(index) if i == out { testUsers = append(testUsers, userId) testItems = append(testItems, itemId) testRatings = append(testRatings, value) } else { trainUsers = append(trainUsers, userId) trainItems = append(trainItems, itemId) trainRatings = append(trainRatings, value) } }) } trainFolds[i] = NewDataSet(trainUsers, trainItems, trainRatings) testFolds[i] = NewDataSet(testUsers, testItems, testRatings) } return trainFolds, testFolds } }
core/splitter.go
0.559771
0.543287
splitter.go
starcoder
package main type Metrica struct { Name string Units string DataKey *MetricaDataKey DataSource *MetricsDataSource } type MetricaDataKey struct { StatBlockKey string KeyInsideStatBlock string } func (metrica *Metrica) GetName() string { return metrica.Name } func (metrica *Metrica) GetUnits() string { return metrica.Units } func (metrica *Metrica) GetValue() (float64, error) { return metrica.DataSource.CheckAndGetLastData(metrica.DataKey) } type IncrementalMetrica struct { Metrica } func (metrica *IncrementalMetrica) GetValue() (float64, error) { return metrica.DataSource.CheckAndGetData(metrica.DataKey) } var plainMetricas = []*Metrica{ // Solr memory metrics &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "solr", KeyInsideStatBlock: "jvm_memory_used", }, Name: "solr/memory/jvm/used", Units: "bytes", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "solr", KeyInsideStatBlock: "jvm_memory_free", }, Name: "solr/memory/jvm/free", Units: "bytes", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "solr", KeyInsideStatBlock: "jvm_memory_total", }, Name: "solr/memory/jvm/total", Units: "bytes", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "solr", KeyInsideStatBlock: "freePhysicalMemorySize", }, Name: "solr/memory/system/free", Units: "bytes", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "solr", KeyInsideStatBlock: "totalPhysicalMemorySize", }, Name: "solr/memory/system/total", Units: "bytes", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "solr", KeyInsideStatBlock: "committedVirtualMemorySize", }, Name: "solr/memory/system/commited virtual", Units: "bytes", }, //Avg request per second &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "spell", KeyInsideStatBlock: "avgRequestsPerSecond", }, Name: "handler/request_per_second/spell", Units: "requests/seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "/update", KeyInsideStatBlock: "avgRequestsPerSecond", }, Name: "handler/request_per_second/update", Units: "requests/seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "org.apache.solr.handler.XmlUpdateRequestHandler", KeyInsideStatBlock: "avgRequestsPerSecond", }, Name: "handler/request_per_second/org.apache.solr.handler.XmlUpdateRequestHandler", Units: "requests/seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "standard", KeyInsideStatBlock: "avgRequestsPerSecond", }, Name: "handler/request_per_second/standard", Units: "requests/seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "/suggest", KeyInsideStatBlock: "avgRequestsPerSecond", }, Name: "handler/request_per_second/suggest", Units: "requests/seconds", }, //Time per request &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "spell", KeyInsideStatBlock: "avgTimePerRequest", }, Name: "handler/time_per_request/spell", Units: "seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "/update", KeyInsideStatBlock: "avgTimePerRequest", }, Name: "handler/time_per_request/update", Units: "seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "org.apache.solr.handler.XmlUpdateRequestHandler", KeyInsideStatBlock: "avgTimePerRequest", }, Name: "handler/time_per_request/org.apache.solr.handler.XmlUpdateRequestHandler", Units: "seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "standard", KeyInsideStatBlock: "avgTimePerRequest", }, Name: "handler/time_per_request/standard", Units: "seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "/suggest", KeyInsideStatBlock: "avgTimePerRequest", }, Name: "handler/time_per_request/suggest", Units: "seconds", }, //Cache hitratio non cumulative &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "queryResultCache", KeyInsideStatBlock: "hitratio", }, Name: "handler/cache/hitrates/queryResultCache", Units: "seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "documentCache", KeyInsideStatBlock: "hitratio", }, Name: "handler/cache/hitrates/documentCache", Units: "seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "fieldValueCache", KeyInsideStatBlock: "hitratio", }, Name: "handler/cache/hitrates/fieldValueCache", Units: "seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "filterCache", KeyInsideStatBlock: "hitratio", }, Name: "handler/cache/hitrates/filterCache", Units: "seconds", }, //Cache hitratio cumulative &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "queryResultCache", KeyInsideStatBlock: "cumulative_hitratio", }, Name: "handler/cache/hitrates_cumulative/queryResultCache", Units: "seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "documentCache", KeyInsideStatBlock: "cumulative_hitratio", }, Name: "handler/cache/hitrates_cumulative/documentCache", Units: "seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "fieldValueCache", KeyInsideStatBlock: "cumulative_hitratio", }, Name: "handler/cache/hitrates_cumulative/fieldValueCache", Units: "seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "filterCache", KeyInsideStatBlock: "cumulative_hitratio", }, Name: "handler/cache/hitrates_cumulative/filterCache", Units: "seconds", }, //Cache size &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "queryResultCache", KeyInsideStatBlock: "size", }, Name: "handler/cache/size/queryResultCache", Units: "items", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "documentCache", KeyInsideStatBlock: "size", }, Name: "handler/cache/size/documentCache", Units: "items", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "fieldValueCache", KeyInsideStatBlock: "size", }, Name: "handler/cache/size/fieldValueCache", Units: "items", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "filterCache", KeyInsideStatBlock: "size", }, Name: "handler/cache/size/filterCache", Units: "items", }, } //Incremental metricas var incrementalMetricas = []*Metrica{ //Errors &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "spell", KeyInsideStatBlock: "errors", }, Name: "handler/errors/spell", Units: "errors/seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "/update", KeyInsideStatBlock: "errors", }, Name: "handler/errors/update", Units: "errors/seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "org.apache.solr.handler.XmlUpdateRequestHandler", KeyInsideStatBlock: "errors", }, Name: "handler/errors/org.apache.solr.handler.XmlUpdateRequestHandler", Units: "errors/seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "standard", KeyInsideStatBlock: "errors", }, Name: "handler/errors/standard", Units: "errors/seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "/suggest", KeyInsideStatBlock: "errors", }, Name: "handler/errors/suggest", Units: "errors/seconds", }, //update handler errors &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "updateHandler", KeyInsideStatBlock: "errors", }, Name: "handler/errors/DirectUpdateHandler2", Units: "errors/seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "updateHandler", KeyInsideStatBlock: "cumulative_errors", }, Name: "handler/errors/DirectUpdateHandler2 cumulative errors", Units: "errors/seconds", }, //timeouts &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "spell", KeyInsideStatBlock: "timeouts", }, Name: "handler/timeouts/spell", Units: "timeouts/seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "/update", KeyInsideStatBlock: "timeouts", }, Name: "handler/timeouts/update", Units: "timeouts/seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "org.apache.solr.handler.XmlUpdateRequestHandler", KeyInsideStatBlock: "timeouts", }, Name: "handler/timeouts/org.apache.solr.handler.XmlUpdateRequestHandler", Units: "timeouts/seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "standard", KeyInsideStatBlock: "timeouts", }, Name: "handler/timeouts/standard", Units: "timeouts/seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "/suggest", KeyInsideStatBlock: "timeouts", }, Name: "handler/timeouts/suggest", Units: "timeouts/seconds", }, //Direct updates handler metrics &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "updateHandler", KeyInsideStatBlock: "commits", }, Name: "DirectUpdateHandler2/commits", Units: "requests/seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "updateHandler", KeyInsideStatBlock: "autocommits", }, Name: "DirectUpdateHandler2/autocommits", Units: "requests/seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "updateHandler", KeyInsideStatBlock: "optimizes", }, Name: "DirectUpdateHandler2/optimizes", Units: "requests/seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "updateHandler", KeyInsideStatBlock: "rollbacks", }, Name: "DirectUpdateHandler2/rollbacks", Units: "requests/seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "updateHandler", KeyInsideStatBlock: "expungeDeletes", }, Name: "DirectUpdateHandler2/expungeDeletes", Units: "requests/seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "updateHandler", KeyInsideStatBlock: "adds", }, Name: "DirectUpdateHandler2/adds", Units: "requests/seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "updateHandler", KeyInsideStatBlock: "deletesById", }, Name: "DirectUpdateHandler2/deletesById", Units: "requests/seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "updateHandler", KeyInsideStatBlock: "deletesByQuery", }, Name: "DirectUpdateHandler2/deletesByQuery", Units: "requests/seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "updateHandler", KeyInsideStatBlock: "cumulative_adds", }, Name: "DirectUpdateHandler2/cumulative_adds", Units: "requests/seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "updateHandler", KeyInsideStatBlock: "cumulative_deletesById", }, Name: "DirectUpdateHandler2/cumulative_deletesById", Units: "requests/seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "updateHandler", KeyInsideStatBlock: "cumulative_deletesByQuery", }, Name: "DirectUpdateHandler2/cumulative_deletesByQuery", Units: "requests/seconds", }, //queryResultCache detail metrics &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "queryResultCache", KeyInsideStatBlock: "lookups", }, Name: "handler/cache/queryResultCache/lookups", Units: "request/seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "queryResultCache", KeyInsideStatBlock: "hits", }, Name: "handler/cache/queryResultCache/hits", Units: "hits/seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "queryResultCache", KeyInsideStatBlock: "inserts", }, Name: "handler/cache/queryResultCache/inserts", Units: "inserts/seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "queryResultCache", KeyInsideStatBlock: "evictions", }, Name: "handler/cache/queryResultCache/evictions", Units: "evictions/seconds", }, //queryResultCache cumulative detail metrics &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "queryResultCache", KeyInsideStatBlock: "cumulative_lookups", }, Name: "handler/cache/queryResultCache/cumulative_lookups", Units: "request/seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "queryResultCache", KeyInsideStatBlock: "cumulative_hits", }, Name: "handler/cache/queryResultCache/cumulative_hits", Units: "hits/seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "queryResultCache", KeyInsideStatBlock: "cumulative_inserts", }, Name: "handler/cache/queryResultCache/cumulative_inserts", Units: "inserts/seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "queryResultCache", KeyInsideStatBlock: "cumulative_evictions", }, Name: "handler/cache/queryResultCache/cumulative_evictions", Units: "evictions/seconds", }, //documentCache detail metrics &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "documentCache", KeyInsideStatBlock: "lookups", }, Name: "handler/cache/documentCache/lookups", Units: "request/seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "documentCache", KeyInsideStatBlock: "hits", }, Name: "handler/cache/documentCache/hits", Units: "hits/seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "documentCache", KeyInsideStatBlock: "inserts", }, Name: "handler/cache/documentCache/inserts", Units: "inserts/seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "documentCache", KeyInsideStatBlock: "evictions", }, Name: "handler/cache/documentCache/evictions", Units: "evictions/seconds", }, //documentCache cumulative detail metrics &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "documentCache", KeyInsideStatBlock: "cumulative_lookups", }, Name: "handler/cache/documentCache/cumulative_lookups", Units: "request/seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "documentCache", KeyInsideStatBlock: "cumulative_hits", }, Name: "handler/cache/documentCache/cumulative_hits", Units: "hits/seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "documentCache", KeyInsideStatBlock: "cumulative_inserts", }, Name: "handler/cache/documentCache/cumulative_inserts", Units: "inserts/seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "documentCache", KeyInsideStatBlock: "cumulative_evictions", }, Name: "handler/cache/documentCache/cumulative_evictions", Units: "evictions/seconds", }, //fieldValueCache detail metrics &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "fieldValueCache", KeyInsideStatBlock: "lookups", }, Name: "handler/cache/fieldValueCache/lookups", Units: "request/seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "fieldValueCache", KeyInsideStatBlock: "hits", }, Name: "handler/cache/fieldValueCache/hits", Units: "hits/seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "fieldValueCache", KeyInsideStatBlock: "inserts", }, Name: "handler/cache/fieldValueCache/inserts", Units: "inserts/seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "fieldValueCache", KeyInsideStatBlock: "evictions", }, Name: "handler/cache/fieldValueCache/evictions", Units: "evictions/seconds", }, //fieldValueCache cumulative detail metrics &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "fieldValueCache", KeyInsideStatBlock: "cumulative_lookups", }, Name: "handler/cache/fieldValueCache/cumulative_lookups", Units: "request/seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "fieldValueCache", KeyInsideStatBlock: "cumulative_hits", }, Name: "handler/cache/fieldValueCache/cumulative_hits", Units: "hits/seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "fieldValueCache", KeyInsideStatBlock: "cumulative_inserts", }, Name: "handler/cache/fieldValueCache/cumulative_inserts", Units: "inserts/seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "fieldValueCache", KeyInsideStatBlock: "cumulative_evictions", }, Name: "handler/cache/fieldValueCache/cumulative_evictions", Units: "evictions/seconds", }, //filterCache detail metrics &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "filterCache", KeyInsideStatBlock: "lookups", }, Name: "handler/cache/filterCache/lookups", Units: "request/seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "filterCache", KeyInsideStatBlock: "hits", }, Name: "handler/cache/filterCache/hits", Units: "hits/seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "filterCache", KeyInsideStatBlock: "inserts", }, Name: "handler/cache/filterCache/inserts", Units: "inserts/seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "filterCache", KeyInsideStatBlock: "evictions", }, Name: "handler/cache/filterCache/evictions", Units: "evictions/seconds", }, //filterCache cumulative detail metrics &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "filterCache", KeyInsideStatBlock: "cumulative_lookups", }, Name: "handler/cache/filterCache/cumulative_lookups", Units: "request/seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "filterCache", KeyInsideStatBlock: "cumulative_hits", }, Name: "handler/cache/filterCache/cumulative_hits", Units: "hits/seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "filterCache", KeyInsideStatBlock: "cumulative_inserts", }, Name: "handler/cache/filterCache/cumulative_inserts", Units: "inserts/seconds", }, &Metrica{ DataKey: &MetricaDataKey{ StatBlockKey: "filterCache", KeyInsideStatBlock: "cumulative_evictions", }, Name: "handler/cache/filterCache/cumulative_evictions", Units: "evictions/seconds", }, }
metrics.go
0.511717
0.400427
metrics.go
starcoder
package world import "github.com/lquesada/cavernal/lib/g3n/engine/math32" import "github.com/lquesada/cavernal/model" type Coords struct { X int Z int } type World struct { floor [][]ITile tileSize float32 zeroX int zeroZ int defaultTile ITile gravity float32 } func Empty(tileSize, gravity float32) *World { defaultTile := NewTile("far void", nil) defaultTile.SetWalkThrough(true) defaultTile.SetSeeThrough(true) defaultTile.SetFallThrough(true) return &World{ floor: [][]ITile{}, tileSize: tileSize, zeroX: 0, zeroZ: 0, defaultTile: defaultTile, gravity: gravity, } } func New(floor [][]ITile, tileSize float32, zeroX, zeroZ int, gravity float32) *World { defaultTile := NewTile("far void", nil) defaultTile.SetWalkThrough(true) defaultTile.SetSeeThrough(true) defaultTile.SetFallThrough(true) return &World{ floor: floor, tileSize: tileSize, zeroX: zeroX, zeroZ: zeroZ, defaultTile: defaultTile, gravity: gravity, } } func (w *World) Node() model.INode { nodes := []model.INode{} for z, vv := range(w.floor) { for x, v := range(vv) { if v == nil { continue } if n := v.Node(); n != nil { nodes = append(nodes, n.Transform( &model.Transform{ Position: &math32.Vector3{float32(x-w.zeroX)*w.TileSize(), v.Y(), float32(z-w.zeroZ)*w.TileSize()}, Scale: &math32.Vector3{w.TileSize(), 1, w.TileSize()}, })) } } } return model.NewNode(nodes...) } func (w *World) GetTile(x, z int) ITile { x += w.zeroX z += w.zeroZ if z < 0 || z >= len(w.floor) { return w.defaultTile } if x < 0 || x >= len(w.floor[z]) { return w.defaultTile } if w.floor[z][x] == nil { return w.defaultTile } return w.floor[z][x] } func (w *World) SetTile(x, z int, t ITile) { w.GrowZ(z) w.GrowX(x) x += w.zeroX z += w.zeroZ if t == w.defaultTile { t = nil } w.floor[z][x] = t } func (w *World) DefaultTile() ITile { return w.defaultTile } func (w *World) Width() int { if w.Height() == 0 { return 0 } return len(w.floor[0]) } func (w *World) Height() int { return len(w.floor) } func (w *World) Gravity() float32 { return w.gravity } func (w *World) TileSize() float32 { return w.tileSize } func (w *World) WhereStanding(position *math32.Vector3) (tileX, tileZ int) { fx := int(math32.Round(position.X/w.TileSize())) fz := int(math32.Round(position.Z/w.TileSize())) return fx, fz } func (w *World) Center(x, z int) (position *math32.Vector3) { return &math32.Vector3{float32(x)*w.TileSize(), 0, float32(z)*w.TileSize()} } func (w *World) IsFallingToVoid(position *math32.Vector3, radius float32) (falling bool, fallPoint *math32.Vector3) { tileSpan := int(math32.Ceil(radius*2/w.TileSize())) tileX, tileZ := w.WhereStanding(position) tile := w.GetTile(tileX, tileZ) var xCoordSum, zCoordSum float32 var count int if tile.FallThrough() && position.Y <= tile.Y() { fitsHole := true deltaX, deltaZ, deltaCount := w.DeltaXZForTileCoverage(radius) for i := 0; i < deltaCount; i++ { cTileX, cTileZ := w.WhereStanding(&math32.Vector3{position.X + deltaX[i], 0, position.Z + deltaZ[i]}) cTile := w.GetTile(cTileX, cTileZ) if !cTile.FallThrough() { fitsHole = false } } if fitsHole { return true, position } for i := 0; i < tileSpan; i++ { nextTile: for j := 0; j < tileSpan; j++ { for x := 0; x < tileSpan; x++ { for z := 0; z < tileSpan; z++ { evalX := tileX-i+x evalZ := tileZ-j+z evalTile := w.GetTile(evalX, evalZ) if !evalTile.FallThrough() || position.Y > tile.Y() { continue nextTile } } } tileCenter := w.Center(tileX-i, tileZ-j) xCoord := tileCenter.X zCoord := tileCenter.Z margin := w.TileSize()/2 if tileCenter.X > 0 { xCoord += margin } else if tileCenter.X < 0 { xCoord -= margin } if tileCenter.Z > 0 { zCoord += margin } else if tileCenter.Z < 0 { zCoord -= margin } xCoordSum += xCoord zCoordSum += zCoord count++ } } if count > 0 { centerX := xCoordSum/float32(count) centerZ := zCoordSum/float32(count) var fallX, fallZ float32 if centerX < position.X + radius || centerX > position.X - radius{ fallX = centerX } if centerZ < position.Z + radius || centerZ > position.Z - radius { fallZ = centerZ } return true, &math32.Vector3{fallX, 0, fallZ} } } return false, nil } func (w *World) EvaluateRay(source, destination *math32.Vector3, ok func(ITile) bool) (success bool, fragmentSuccess float32) { angle := math32.Atan2(destination.Z-source.Z, destination.X-source.X) incX := math32.Cos(angle) incZ := math32.Sin(angle) current := math32.NewVector3(source.X, source.Y, source.Z) destinationX, destinationZ := w.WhereStanding(destination) currentX, currentZ := w.WhereStanding(current) for current.DistanceTo(destination) > 0.001 { if currentX == destinationX && currentZ == destinationZ { if !ok(w.GetTile(currentX, currentZ)) { return false, source.DistanceTo(current) } else { current.Copy(destination) break } } nextX := currentX if currentX != destinationX { if incX > 0 { nextX++ } else if incX < 0 { nextX-- } } nextZ := currentZ if currentZ != destinationZ { if incZ > 0 { nextZ++ } else if incZ < 0 { nextZ-- } } currentCenter := w.Center(currentX, currentZ) nextCenter := w.Center(nextX, nextZ) cutX := (currentCenter.X+nextCenter.X)/2 cutZ := (currentCenter.Z+nextCenter.Z)/2 deltaX := cutX - current.X deltaZ := cutZ - current.Z var needToCutX, needToCutZ float32 if math32.Abs(incX) > 0.001 { needToCutX = deltaX/incX } if math32.Abs(incZ) > 0.001 { needToCutZ = deltaZ/incZ } var willProgress float32 var fixX, fixZ bool if nextX != currentX && (needToCutX <= needToCutZ || currentZ == nextZ) { willProgress = needToCutX fixX = true } if nextZ != currentZ && (needToCutZ <= needToCutX || currentX == nextX) { willProgress = needToCutZ fixZ = true } progressX := willProgress * incX progressZ := willProgress * incZ current.X += progressX if fixX { current.X = cutX + 0.0001 * incX/math32.Abs(incX) } current.Z += progressZ if fixZ { current.Z = cutZ + 0.0001 * incZ/math32.Abs(incZ) } currentX, currentZ = w.WhereStanding(current) if !ok(w.GetTile(currentX, currentZ)) { return false, source.DistanceTo(current) } } return true, source.DistanceTo(current) } func (w *World) EvaluateRayRadius(source, destination *math32.Vector3, radius float32, ok func(ITile) bool) (success bool, fragmentSuccess float32) { deltaX, deltaZ, deltaCount := w.DeltaXZForBorderCoverage(radius) var failed bool var fragment float32 for i := 0; i < deltaCount; i++ { dx := deltaX[i] dz := deltaZ[i] sourceCurrent := &math32.Vector3{source.X + dx, source.Y, source.Z + dz} destinationCurrent := &math32.Vector3{destination.X + dx, source.Y, destination.Z + dz} if suc, frag := w.EvaluateRay(sourceCurrent, destinationCurrent, ok); !suc { if !failed { failed = true fragment = frag } else { fragment = math32.Min(fragment, frag) } } } return !failed, fragment } func (w *World) DeltaXZForBorderCoverage(radius float32) (deltaX, deltaZ []float32, deltaCount int) { deltaX = []float32{} deltaZ = []float32{} for r := radius; r >= 0; r -= w.TileSize()/2 { deltaX = append(deltaX, r) deltaZ = append(deltaZ, -radius) deltaX = append(deltaX, r) deltaZ = append(deltaZ, radius) deltaX = append(deltaX, -r) deltaZ = append(deltaZ, -radius) deltaX = append(deltaX, -r) deltaZ = append(deltaZ, radius) deltaZ = append(deltaZ, r) deltaX = append(deltaX, -radius) deltaZ = append(deltaZ, r) deltaX = append(deltaX, radius) deltaZ = append(deltaZ, -r) deltaX = append(deltaX, -radius) deltaZ = append(deltaZ, -r) deltaX = append(deltaX, radius) } return deltaX, deltaZ, len(deltaX) } func (w *World) DeltaXZForTileCoverage(radius float32) (deltaX, deltaZ []float32, deltaCount int) { deltaX = []float32{} deltaZ = []float32{} for r1 := radius; r1 >= 0; r1 -= w.TileSize()/2 { for r2 := radius; r2 >= 0; r2 -= w.TileSize()/2 { deltaX = append(deltaX, r1) deltaZ = append(deltaZ, r2) deltaX = append(deltaX, -r1) deltaZ = append(deltaZ, -r2) deltaX = append(deltaX, r1) deltaZ = append(deltaZ, -r2) deltaX = append(deltaX, -r1) deltaZ = append(deltaZ, r2) } } return deltaX, deltaZ, len(deltaX) } func (w *World) GrowX(x int) { xZ := x+w.zeroX if xZ < 0 { incX := -xZ for rI, r := range w.floor { newLen := len(r)+incX rN := make([]ITile, newLen, newLen) for i := 0; i < incX; i++ { rN[i] = w.defaultTile } for i := incX; i < newLen; i++ { rN[i] = r[i-incX] } w.floor[rI] = rN } w.zeroX += incX } if xZ > w.Width() { incX := xZ-w.Width() for rI, r := range w.floor { newLen := len(r)+incX rN := make([]ITile, newLen, newLen) for i := 0; i < len(r); i++ { rN[i] = r[i] } for i := len(r); i < newLen; i++ { rN[i] = w.defaultTile } w.floor[rI] = rN } } } func (w *World) GrowZ(z int) { zZ := z+w.zeroZ width := w.Width() if zZ < 0 { incZ := -zZ newLen := len(w.floor)+incZ fN := make([][]ITile, newLen, newLen) for j := 0; j < incZ; j++ { fN[j] = make([]ITile, width, width) for i := 0; i < width; i++ { fN[j][i] = w.defaultTile } } for j := incZ; j < newLen; j++ { fN[j] = w.floor[j-incZ] } w.floor = fN w.zeroZ += incZ } if zZ > w.Height() { incZ := zZ-w.Height() newLen := len(w.floor)+incZ fN := make([][]ITile, newLen, newLen) for j := 0; j < len(w.floor); j++ { fN[j] = w.floor[j] } for j := len(w.floor); j < newLen; j++ { fN[j] = make([]ITile, width, width) for i := 0; i < width; i++ { fN[j][i] = w.defaultTile } } w.floor = fN } } func (w *World) MinCoords() *Coords { return &Coords{-w.zeroX, -w.zeroZ} } func (w *World) MaxCoords() *Coords { return &Coords{w.Width()-w.zeroX-1, w.Height()-w.zeroZ-1} } func (w *World) FindConnect(minCoords, maxCoords *Coords) []*Coords { coords := []*Coords{} for x := minCoords.X; x <= maxCoords.X; x++ { for z := minCoords.Z; z <= maxCoords.Z; z++ { if w.GetTile(x, z).Connect() { coords = append(coords, &Coords{X: x, Z: z}) } } } return coords }
world/world.go
0.609059
0.513242
world.go
starcoder
package iso20022 // Formal document used to record a fact and used as proof of the fact, in the context of a commercial trade transaction. type CertificateDataSet1 struct { // Identifies the certificate data set. DataSetIdentification *DocumentIdentification1 `xml:"DataSetId"` // Specifies the type of the certificate. CertificateType *TradeCertificateType1Code `xml:"CertTp"` // Specifies if the certificate data set is required in relation to specific line items, and which line items. LineItem []*LineItemAndPOIdentification1 `xml:"LineItm,omitempty"` // Characteristics of the goods that are certified, in the context of a commercial trade transaction. CertifiedCharacteristics *CertifiedCharacteristics1Choice `xml:"CertfdChrtcs"` // Issue date of the document. IssueDate *ISODate `xml:"IsseDt"` // Place where the certificate was issued. PlaceOfIssue *PostalAddress5 `xml:"PlcOfIsse,omitempty"` // Issuer of the certificate, typically the inspection company or its agent. Issuer *PartyIdentification26 `xml:"Issr"` // Date(s) at which inspection of the goods took place. InspectionDate *DatePeriodDetails `xml:"InspctnDt,omitempty"` // Indicates that the inspection has been performed by an authorised inspector. AuthorisedInspectorIndicator *YesNoIndicator `xml:"AuthrsdInspctrInd,omitempty"` // Unique identifier of the document. CertificateIdentification *Max35Text `xml:"CertId"` // Transport information relative to the goods that are covered by the certificate. Transport *SingleTransport3 `xml:"Trnsprt,omitempty"` // Information about the goods and/or services of a trade transaction. GoodsDescription *Max70Text `xml:"GoodsDesc,omitempty"` // Party responsible for dispatching the goods. Consignor *PartyIdentification26 `xml:"Consgnr,omitempty"` // Party to whom the goods (which are the subject of the certificate) must be delivered. Consignee *PartyIdentification26 `xml:"Consgn,omitempty"` // Manufacturer of the goods which are the subject of the certificate. Manufacturer *PartyIdentification26 `xml:"Manfctr,omitempty"` // Additional and important information that could not be captured by structured fields. AdditionalInformation []*Max350Text `xml:"AddtlInf,omitempty"` } func (c *CertificateDataSet1) AddDataSetIdentification() *DocumentIdentification1 { c.DataSetIdentification = new(DocumentIdentification1) return c.DataSetIdentification } func (c *CertificateDataSet1) SetCertificateType(value string) { c.CertificateType = (*TradeCertificateType1Code)(&value) } func (c *CertificateDataSet1) AddLineItem() *LineItemAndPOIdentification1 { newValue := new (LineItemAndPOIdentification1) c.LineItem = append(c.LineItem, newValue) return newValue } func (c *CertificateDataSet1) AddCertifiedCharacteristics() *CertifiedCharacteristics1Choice { c.CertifiedCharacteristics = new(CertifiedCharacteristics1Choice) return c.CertifiedCharacteristics } func (c *CertificateDataSet1) SetIssueDate(value string) { c.IssueDate = (*ISODate)(&value) } func (c *CertificateDataSet1) AddPlaceOfIssue() *PostalAddress5 { c.PlaceOfIssue = new(PostalAddress5) return c.PlaceOfIssue } func (c *CertificateDataSet1) AddIssuer() *PartyIdentification26 { c.Issuer = new(PartyIdentification26) return c.Issuer } func (c *CertificateDataSet1) AddInspectionDate() *DatePeriodDetails { c.InspectionDate = new(DatePeriodDetails) return c.InspectionDate } func (c *CertificateDataSet1) SetAuthorisedInspectorIndicator(value string) { c.AuthorisedInspectorIndicator = (*YesNoIndicator)(&value) } func (c *CertificateDataSet1) SetCertificateIdentification(value string) { c.CertificateIdentification = (*Max35Text)(&value) } func (c *CertificateDataSet1) AddTransport() *SingleTransport3 { c.Transport = new(SingleTransport3) return c.Transport } func (c *CertificateDataSet1) SetGoodsDescription(value string) { c.GoodsDescription = (*Max70Text)(&value) } func (c *CertificateDataSet1) AddConsignor() *PartyIdentification26 { c.Consignor = new(PartyIdentification26) return c.Consignor } func (c *CertificateDataSet1) AddConsignee() *PartyIdentification26 { c.Consignee = new(PartyIdentification26) return c.Consignee } func (c *CertificateDataSet1) AddManufacturer() *PartyIdentification26 { c.Manufacturer = new(PartyIdentification26) return c.Manufacturer } func (c *CertificateDataSet1) AddAdditionalInformation(value string) { c.AdditionalInformation = append(c.AdditionalInformation, (*Max350Text)(&value)) }
CertificateDataSet1.go
0.733165
0.403626
CertificateDataSet1.go
starcoder
package mappings // Compliance mapping used to create the `comp-<version>-s-<date>` index var ComplianceSumDate = Mapping{ Index: IndexNameSum, Type: DocType, Timeseries: true, Mapping: ` { "index_patterns": ["` + IndexNameSum + `-20*"], "settings": { "analysis": { "analyzer": { "autocomplete": { "filter": [ "lowercase" ], "tokenizer": "autocomplete_tokenizer" }, "autocomplete_version_numbers": { "filter": [ "lowercase" ], "tokenizer": "autocomplete_version_number_tokenizer" } }, "tokenizer": { "autocomplete_tokenizer": { "max_gram": 20, "min_gram": 2, "token_chars": [ "letter", "digit" ], "type": "edge_ngram" }, "autocomplete_version_number_tokenizer": { "max_gram": 20, "min_gram": 2, "token_chars": [ "letter", "digit", "punctuation" ], "type": "edge_ngram" } }, "normalizer": { "case_insensitive": { "type": "custom", "char_filter": [], "filter": ["lowercase", "asciifolding"] } } }, "index": { "refresh_interval": "1s" } }, "mappings": { "` + DocType + `": { "properties": { "controls_sums": { "properties": { "failed": { "properties": { "critical": { "type": "integer" }, "major": { "type": "integer" }, "minor": { "type": "integer" }, "total": { "type": "integer" } }, "type": "object" }, "passed": { "properties": { "total": { "type": "integer" } }, "type": "object" }, "skipped": { "properties": { "total": { "type": "integer" } }, "type": "object" }, "waived": { "properties": { "total": { "type": "integer" } }, "type": "object" }, "total": { "type": "integer" } }, "type": "object" }, "daily_latest": { "type": "boolean" }, "doc_version": { "type": "keyword" }, "end_time": { "type": "date" }, "environment": { "fields": { "engram": { "analyzer": "autocomplete", "type": "text" }, "lower": { "normalizer": "case_insensitive", "type": "keyword" } }, "type": "keyword" }, "job_uuid": { "type": "keyword" }, "node_name": { "fields": { "engram": { "analyzer": "autocomplete", "type": "text" }, "lower": { "normalizer": "case_insensitive", "type": "keyword" } }, "type": "keyword" }, "node_uuid": { "type": "keyword" }, "platform": { "properties": { "name": { "fields": { "engram": { "analyzer": "autocomplete", "type": "text" }, "lower": { "normalizer": "case_insensitive", "type": "keyword" } }, "type": "keyword" }, "release": { "fields": { "engram": { "analyzer": "autocomplete", "type": "text" }, "lower": { "normalizer": "case_insensitive", "type": "keyword" } }, "type": "keyword" }, "full": { "fields": { "engram": { "analyzer": "autocomplete_version_numbers", "type": "text" }, "lower": { "normalizer": "case_insensitive", "type": "keyword" } }, "type": "keyword" } } }, "profiles": { "properties": { "controls_sums": { "properties": { "failed": { "properties": { "critical": { "type": "integer" }, "major": { "type": "integer" }, "minor": { "type": "integer" }, "total": { "type": "integer" } }, "type": "object" }, "passed": { "properties": { "total": { "type": "integer" } }, "type": "object" }, "skipped": { "properties": { "total": { "type": "integer" } }, "type": "object" }, "waived": { "properties": { "total": { "type": "integer" } }, "type": "object" }, "total": { "type": "integer" } }, "type": "object" }, "profile": { "type": "keyword" }, "name": { "type": "keyword" }, "title": { "type": "keyword", "fields": { "engram": { "type": "text", "analyzer": "autocomplete" }, "lower": { "normalizer": "case_insensitive", "type": "keyword" } } }, "sha256": { "type": "keyword" }, "version": { "type": "keyword" }, "status": { "type": "keyword" }, "full": { "fields": { "engram": { "analyzer": "autocomplete_version_numbers", "type": "text" }, "lower": { "normalizer": "case_insensitive", "type": "keyword" } }, "type": "keyword" } }, "type": "nested" }, "recipes": { "fields": { "engram": { "analyzer": "autocomplete", "type": "text" }, "lower": { "normalizer": "case_insensitive", "type": "keyword" } }, "type": "keyword" }, "report_uuid": { "type": "keyword" }, "roles": { "fields": { "engram": { "analyzer": "autocomplete", "type": "text" }, "lower": { "normalizer": "case_insensitive", "type": "keyword" } }, "type": "keyword" }, "status": { "type": "keyword" }, "projects": { "type": "keyword" }, "statistics": { "properties": { "duration": { "type": "double" } } }, "version": { "type": "keyword", "fields": { "engram": { "type": "text", "analyzer": "autocomplete_version_numbers" }, "lower": { "normalizer": "case_insensitive", "type": "keyword" } } }, "policy_name": { "fields": { "engram": { "analyzer": "autocomplete", "type": "text" }, "lower": { "normalizer": "case_insensitive", "type": "keyword" } }, "type": "keyword" }, "policy_group": { "fields": { "engram": { "analyzer": "autocomplete", "type": "text" }, "lower": { "normalizer": "case_insensitive", "type": "keyword" } }, "type": "keyword" }, "organization_name": { "fields": { "engram": { "analyzer": "autocomplete", "type": "text" }, "lower": { "normalizer": "case_insensitive", "type": "keyword" } }, "type": "keyword" }, "source_fqdn": { "fields": { "engram": { "analyzer": "autocomplete", "type": "text" }, "lower": { "normalizer": "case_insensitive", "type": "keyword" } }, "type": "keyword" }, "chef_tags": { "fields": { "engram": { "analyzer": "autocomplete", "type": "text" }, "lower": { "normalizer": "case_insensitive", "type": "keyword" } }, "type": "keyword" } } } } }`, }
components/compliance-service/ingest/ingestic/mappings/comp-sum-date.go
0.715126
0.479138
comp-sum-date.go
starcoder
package radiusgyration import ( "bufio" "fmt" "strconv" "strings" ) // readCfgFirst reads the first configuration. It reads the number of atoms, the // columns and performs the usual calculations like in readCfg. func (r *RadiusGyration) readCfgFirst(rd *bufio.Reader) (xyz [][3]float64, types []string, err error) { for i := 0; i < 3; i++ { rd.ReadSlice('\n') } b, _ := rd.ReadSlice('\n') r.atoms, err = strconv.Atoi(string(b)[:len(b)-1]) if err != nil { return } for i := 0; i < 4; i++ { rd.ReadSlice('\n') } b, _ = rd.ReadSlice('\n') fields := strings.Fields(string(b)) if len(fields) <= 2 { err = fmt.Errorf("not enough columns (at least 3; got %d)", len(fields)) return } fields = fields[2:] var found int r.colsLen = len(fields) for k, v := range fields { switch v { case "xu": r.cols[0] = k case "yu": r.cols[1] = k case "zu": r.cols[2] = k case "type": r.cols[3] = k default: continue } found++ } if found < len(r.cols) { err = fmt.Errorf("cannot find the columns xu yu, zu, and type") return } xyz, types, err = r.fetchXYZ(rd) if err != nil { err = fmt.Errorf("fetchXYZ: %w", err) return } return } // readCfg reads a configuration of the LAMMPS trajectory. This method will call // fetchXYZ to fetch the coordinates of the two atoms. func (r *RadiusGyration) readCfg(rd *bufio.Reader) (xyz [][3]float64, types []string, err error) { for i := 0; i < 9; i++ { rd.ReadSlice('\n') } xyz, types, err = r.fetchXYZ(rd) if err != nil { err = fmt.Errorf("fetchXYZ: %w", err) } return } // fetchXYZ fetches the coordinates of the two atoms by calling readXYZ two // times (one for the first atom, and the other for the second atom). func (r *RadiusGyration) fetchXYZ(rd *bufio.Reader) (xyz [][3]float64, types []string, err error) { for i := 0; i < r.AtomStart; i++ { rd.ReadSlice('\n') } for i := 0; i < (r.AtomEnd - r.AtomStart); i++ { b, _ := rd.ReadSlice('\n') fields := strings.Fields(string(b)) if len(fields) != r.colsLen { err = fmt.Errorf("number of columns don't match: %d (expected %d)", len(fields), r.colsLen) return } var xyzTmp [3]float64 for k := 0; k < 3; k++ { xyzTmp[k], _ = strconv.ParseFloat(fields[r.cols[k]], 64) } types = append(types, fields[r.cols[3]]) xyz = append(xyz, xyzTmp) } for i := 0; i < (r.atoms - r.AtomEnd); i++ { rd.ReadSlice('\n') } return }
pkg/radiusgyration/read.go
0.616474
0.411761
read.go
starcoder
package ns /** * Configuration for Policy Based Routing(PBR) entry resource. */ type Nspbr struct { /** * Name for the PBR. Must begin with an ASCII alphabetic or underscore \(_\) character, and must contain only ASCII alphanumeric, underscore, hash \(\#\), period \(.\), space, colon \(:\), at \(@\), equals \(=\), and hyphen \(-\) characters. Cannot be changed after the PBR is created. */ Name string `json:"name,omitempty"` /** * Action to perform on the outgoing IPv4 packets that match the PBR. Available settings function as follows: * ALLOW - The Citrix ADC sends the packet to the designated next-hop router. * DENY - The Citrix ADC applies the routing table for normal destination-based routing. */ Action string `json:"action,omitempty"` /** * Integer value that uniquely identifies the traffic domain in which you want to configure the entity. If you do not specify an ID, the entity becomes part of the default traffic domain, which has an ID of 0. */ Td int `json:"td,omitempty"` /** * IP address or range of IP addresses to match against the source IP address of an outgoing IPv4 packet. In the command line interface, separate the range with a hyphen. For example: 10.102.29.30-10.102.29.189. */ Srcip bool `json:"srcip,omitempty"` /** * Either the equals (=) or does not equal (!=) logical operator. */ Srcipop string `json:"srcipop,omitempty"` /** * IP address or range of IP addresses to match against the source IP address of an outgoing IPv4 packet. In the command line interface, separate the range with a hyphen. For example: 10.102.29.30-10.102.29.189. */ Srcipval string `json:"srcipval,omitempty"` /** * Port number or range of port numbers to match against the source port number of an outgoing IPv4 packet. In the command line interface, separate the range with a hyphen. For example: 40-90. Note: The destination port can be specified only for TCP and UDP protocols. */ Srcport bool `json:"srcport,omitempty"` /** * Either the equals (=) or does not equal (!=) logical operator. */ Srcportop string `json:"srcportop,omitempty"` /** * Port number or range of port numbers to match against the source port number of an outgoing IPv4 packet. In the command line interface, separate the range with a hyphen. For example: 40-90. Note: The destination port can be specified only for TCP and UDP protocols. */ Srcportval string `json:"srcportval,omitempty"` /** * IP address or range of IP addresses to match against the destination IP address of an outgoing IPv4 packet. In the command line interface, separate the range with a hyphen. For example: 10.102.29.30-10.102.29.189. */ Destip bool `json:"destip,omitempty"` /** * Either the equals (=) or does not equal (!=) logical operator. */ Destipop string `json:"destipop,omitempty"` /** * IP address or range of IP addresses to match against the destination IP address of an outgoing IPv4 packet. In the command line interface, separate the range with a hyphen. For example: 10.102.29.30-10.102.29.189. */ Destipval string `json:"destipval,omitempty"` /** * Port number or range of port numbers to match against the destination port number of an outgoing IPv4 packet. In the command line interface, separate the range with a hyphen. For example: 40-90. Note: The destination port can be specified only for TCP and UDP protocols. */ Destport bool `json:"destport,omitempty"` /** * Either the equals (=) or does not equal (!=) logical operator. */ Destportop string `json:"destportop,omitempty"` /** * Port number or range of port numbers to match against the destination port number of an outgoing IPv4 packet. In the command line interface, separate the range with a hyphen. For example: 40-90. Note: The destination port can be specified only for TCP and UDP protocols. */ Destportval string `json:"destportval,omitempty"` /** * IP address of the next hop router or the name of the link load balancing virtual server to which to send matching packets if action is set to ALLOW. If you specify a link load balancing (LLB) virtual server, which can provide a backup if a next hop link fails, first make sure that the next hops bound to the LLB virtual server are actually next hops that are directly connected to the Citrix ADC. Otherwise, the Citrix ADC throws an error when you attempt to create the PBR. The next hop can be null to represent null routes */ Nexthop bool `json:"nexthop,omitempty"` /** * The Next Hop IP address or gateway name. */ Nexthopval string `json:"nexthopval,omitempty"` /** * The Tunnel name. */ Iptunnel bool `json:"iptunnel,omitempty"` /** * The iptunnel name where packets need to be forwarded upon. */ Iptunnelname string `json:"iptunnelname,omitempty"` /** * The vlan to vxlan mapping to be applied for incoming packets over this pbr tunnel */ Vxlanvlanmap string `json:"vxlanvlanmap,omitempty"` /** * MAC address to match against the source MAC address of an outgoing IPv4 packet. */ Srcmac string `json:"srcmac,omitempty"` /** * Used to define range of Source MAC address. It takes string of 0 and 1, 0s are for exact match and 1s for wildcard. For matching first 3 bytes of MAC address, srcMacMask value "000000111111". */ Srcmacmask string `json:"srcmacmask,omitempty"` /** * Protocol, identified by protocol name, to match against the protocol of an outgoing IPv4 packet. */ Protocol string `json:"protocol,omitempty"` /** * Protocol, identified by protocol number, to match against the protocol of an outgoing IPv4 packet. */ Protocolnumber int `json:"protocolnumber,omitempty"` /** * ID of the VLAN. The Citrix ADC compares the PBR only to the outgoing packets on the specified VLAN. If you do not specify any interface ID, the appliance compares the PBR to the outgoing packets on all VLANs. */ Vlan int `json:"vlan,omitempty"` /** * ID of the VXLAN. The Citrix ADC compares the PBR only to the outgoing packets on the specified VXLAN. If you do not specify any interface ID, the appliance compares the PBR to the outgoing packets on all VXLANs. */ Vxlan int `json:"vxlan,omitempty"` /** * ID of an interface. The Citrix ADC compares the PBR only to the outgoing packets on the specified interface. If you do not specify any value, the appliance compares the PBR to the outgoing packets on all interfaces. */ Interface string `json:"Interface,omitempty"` /** * Priority of the PBR, which determines the order in which it is evaluated relative to the other PBRs. If you do not specify priorities while creating PBRs, the PBRs are evaluated in the order in which they are created. */ Priority int `json:"priority,omitempty"` /** * Monitor the route specified byte Next Hop parameter. This parameter is not applicable if you specify a link load balancing (LLB) virtual server name with the Next Hop parameter. */ Msr string `json:"msr,omitempty"` /** * The name of the monitor.(Can be only of type ping or ARP ) */ Monitor string `json:"monitor,omitempty"` /** * Enable or disable the PBR. After you apply the PBRs, the Citrix ADC compares outgoing packets to the enabled PBRs. */ State string `json:"state,omitempty"` /** * The owner node group in a Cluster for this pbr rule. If ownernode is not specified then the pbr rule is treated as Striped pbr rule. */ Ownergroup string `json:"ownergroup,omitempty"` /** * To get a detailed view. */ Detail bool `json:"detail,omitempty"` //------- Read only Parameter ---------; Hits string `json:"hits,omitempty"` Kernelstate string `json:"kernelstate,omitempty"` Curstate string `json:"curstate,omitempty"` Totalprobes string `json:"totalprobes,omitempty"` Totalfailedprobes string `json:"totalfailedprobes,omitempty"` Failedprobes string `json:"failedprobes,omitempty"` Monstatcode string `json:"monstatcode,omitempty"` Monstatparam1 string `json:"monstatparam1,omitempty"` Monstatparam2 string `json:"monstatparam2,omitempty"` Monstatparam3 string `json:"monstatparam3,omitempty"` Data string `json:"data,omitempty"` }
resource/config/ns/nspbr.go
0.821903
0.499329
nspbr.go
starcoder
package page import ( "bytes" ) var _ CellTyper = (*RecordCell)(nil) var _ CellTyper = (*PointerCell)(nil) //go:generate stringer -type=CellType // CellType is the type of a page. type CellType uint8 const ( // CellTypeUnknown indicates a corrupted page or an incorrectly decoded // cell. CellTypeUnknown CellType = iota // CellTypeRecord indicates a RecordCell, which stores a key and a variable // size record. CellTypeRecord // CellTypePointer indicates a PointerCell, which stores a key and an // uint32, which points to another page. CellTypePointer ) type ( // CellTyper describes a component that has a cell type. CellTyper interface { Type() CellType } // RecordCell is a cell with CellTypeRecord. It holds a key and a variable // size record. RecordCell struct { Key []byte Record []byte } // PointerCell is a cell with CellTypePointer. It holds a key and an uint32, // pointing to another page. PointerCell struct { Key []byte Pointer ID } ) // Type returns CellTypeRecord. func (RecordCell) Type() CellType { return CellTypeRecord } // Type returns CellTypePointer. func (PointerCell) Type() CellType { return CellTypePointer } func decodeCell(data []byte) CellTyper { switch t := CellType(data[0]); t { case CellTypePointer: return decodePointerCell(data) case CellTypeRecord: return decodeRecordCell(data) default: return nil } } func encodeRecordCell(cell RecordCell) []byte { key := frame(cell.Key) record := frame(cell.Record) var buf bytes.Buffer buf.WriteByte(byte(CellTypeRecord)) buf.Write(key) buf.Write(record) return buf.Bytes() } func decodeRecordCell(data []byte) RecordCell { keySize := byteOrder.Uint32(data[1:5]) key := data[5 : 5+keySize] recordSize := byteOrder.Uint32(data[5+keySize : 5+keySize+4]) record := data[5+keySize+4 : 5+keySize+4+recordSize] return RecordCell{ Key: key, Record: record, } } func encodePointerCell(cell PointerCell) []byte { key := frame(cell.Key) pointer := make([]byte, 4) byteOrder.PutUint32(pointer, cell.Pointer) var buf bytes.Buffer buf.WriteByte(byte(CellTypePointer)) buf.Write(key) buf.Write(pointer) return buf.Bytes() } func decodePointerCell(data []byte) PointerCell { keySize := byteOrder.Uint32(data[1:5]) key := data[5 : 5+keySize] pointer := byteOrder.Uint32(data[5+keySize : 5+keySize+4]) return PointerCell{ Key: key, Pointer: pointer, } } // frame frames the given data with a uint32 representing the length of the given data. // The length of the returned byte slice is the length of the given byte slice + 4. func frame(data []byte) []byte { // this allocation can be optimized, however, it would mess up the API, but // it should be considered in the future result := make([]byte, 4+len(data)) copy(result[4:], data) byteOrder.PutUint32(result, uint32(len(data))) return result }
internal/engine/page/cell.go
0.649467
0.551211
cell.go
starcoder