code stringlengths 114 1.05M | path stringlengths 3 312 | quality_prob float64 0.5 0.99 | learning_prob float64 0.2 1 | filename stringlengths 3 168 | kind stringclasses 1 value |
|---|---|---|---|---|---|
package fp
func (o BoolOption) IsEmpty() bool { return o == NoneBool }
func (o StringOption) IsEmpty() bool { return o == NoneString }
func (o IntOption) IsEmpty() bool { return o == NoneInt }
func (o Int64Option) IsEmpty() bool { return o == NoneInt64 }
func (o ByteOption) IsEmpty() bool { return o == NoneByte }
func (o RuneOption) IsEmpty() bool { return o == NoneRune }
func (o Float32Option) IsEmpty() bool { return o == NoneFloat32 }
func (o Float64Option) IsEmpty() bool { return o == NoneFloat64 }
func (o AnyOption) IsEmpty() bool { return o == NoneAny }
func (o Tuple2Option) IsEmpty() bool { return o == NoneTuple2 }
func (o BoolOptionOption) IsEmpty() bool { return o == NoneBoolOption }
func (o StringOptionOption) IsEmpty() bool { return o == NoneStringOption }
func (o IntOptionOption) IsEmpty() bool { return o == NoneIntOption }
func (o Int64OptionOption) IsEmpty() bool { return o == NoneInt64Option }
func (o ByteOptionOption) IsEmpty() bool { return o == NoneByteOption }
func (o RuneOptionOption) IsEmpty() bool { return o == NoneRuneOption }
func (o Float32OptionOption) IsEmpty() bool { return o == NoneFloat32Option }
func (o Float64OptionOption) IsEmpty() bool { return o == NoneFloat64Option }
func (o AnyOptionOption) IsEmpty() bool { return o == NoneAnyOption }
func (o Tuple2OptionOption) IsEmpty() bool { return o == NoneTuple2Option }
func (o BoolArrayOption) IsEmpty() bool { return o == NoneBoolArray }
func (o StringArrayOption) IsEmpty() bool { return o == NoneStringArray }
func (o IntArrayOption) IsEmpty() bool { return o == NoneIntArray }
func (o Int64ArrayOption) IsEmpty() bool { return o == NoneInt64Array }
func (o ByteArrayOption) IsEmpty() bool { return o == NoneByteArray }
func (o RuneArrayOption) IsEmpty() bool { return o == NoneRuneArray }
func (o Float32ArrayOption) IsEmpty() bool { return o == NoneFloat32Array }
func (o Float64ArrayOption) IsEmpty() bool { return o == NoneFloat64Array }
func (o AnyArrayOption) IsEmpty() bool { return o == NoneAnyArray }
func (o Tuple2ArrayOption) IsEmpty() bool { return o == NoneTuple2Array }
func (o BoolListOption) IsEmpty() bool { return o == NoneBoolList }
func (o StringListOption) IsEmpty() bool { return o == NoneStringList }
func (o IntListOption) IsEmpty() bool { return o == NoneIntList }
func (o Int64ListOption) IsEmpty() bool { return o == NoneInt64List }
func (o ByteListOption) IsEmpty() bool { return o == NoneByteList }
func (o RuneListOption) IsEmpty() bool { return o == NoneRuneList }
func (o Float32ListOption) IsEmpty() bool { return o == NoneFloat32List }
func (o Float64ListOption) IsEmpty() bool { return o == NoneFloat64List }
func (o AnyListOption) IsEmpty() bool { return o == NoneAnyList }
func (o Tuple2ListOption) IsEmpty() bool { return o == NoneTuple2List } | fp/bootstrap_option_isempty.go | 0.705481 | 0.508483 | bootstrap_option_isempty.go | starcoder |
package nexus
// use only if you are sure it is indicated
const certsDefault = `
Certificate:
Data:
Version: 3 (0x2)
Serial Number:
0a:18:9e:cb:e2:e2:ee:ca:ed:4b:b6:14:4c:a6:fb:0e:ac:34:27:08
Signature Algorithm: sha256WithRSAEncryption
Issuer: CN = localhost
Validity
Not Before: Jun 3 21:38:18 2020 GMT
Not After : Jul 3 21:38:18 2020 GMT
Subject: CN = localhost
Subject Public Key Info:
Public Key Algorithm: rsaEncryption
RSA Public-Key: (2048 bit)
Modulus:
00:dd:ab:61:d7:d0:72:f0:49:38:d3:9c:a7:bc:97:
a8:f6:b1:7e:5c:4e:52:dc:b7:f5:22:b3:79:4a:81:
db:28:da:27:12:0a:e0:88:60:9e:ff:d8:d5:59:95:
cd:15:fe:2d:9d:b6:e1:56:f1:93:dd:9b:f5:90:b4:
fc00:e968:6179::de52:7100:cb:42:cb:74:c3:8d:66:
fc00:db20:35b:7399::5:21:e4:82:a8:3f:89:71:
d9:09:3b:1b:18:f9:2f:dc:d7:01:98:81:fb:da:86:
0f:7b:9b:37:32:0a:05:e3:d1:d9:bd:e6:49:ab:38:
2d:3e:55:de:81:d7:37:b2:99:c4:bf:52:8a:18:8e:
f7:34:86:9c:8f:7f:a7:0f:50:9e:bf:ea:6f:30:ab:
79:28:25:71:8c:15:6e:52:af:94:78:6f:86:c6:66:
1c:09:ed:1d:ba:c4:98:6f:a3:64:22:48:ec:b4:f5:
fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b:26:f2:6c:bc:b8:bd:e6:
34:63:cb:4a:8d:c3:9e:23:3b:36:06:84:bd:b6:a6:
fdf8:f53e:61e4::18:93:55:cb:74:33:9a:49:
7a:d0:72:62:8e:f4:2e:e7:85:96:6c:d8:12:ec:a9:
fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b:2a:a7:24:5e:74:a9:57:
a3:0d
Exponent: 65537 (0x10001)
X509v3 extensions:
X509v3 Subject Key Identifier:
8E:48:3D:8A:6F:E1:E8:79:2A:65:AA:20:04:66:90:98:8A:F1:E4:C9
X509v3 Authority Key Identifier:
keyid:8E:48:3D:8A:6F:E1:E8:79:2A:65:AA:20:04:66:90:98:8A:F1:E4:C9
X509v3 Basic Constraints: critical
CA:TRUE
Signature Algorithm: sha256WithRSAEncryption
a3:85:2b:f3:ee:e4:b2:36:02:db:7c:a3:85:ba:fc:89:21:b6:
a6:ac:b9:78:59:c0:f6:cd:ae:b6:3b:03:6e:47:3b:be:aa:fd:
78:50:7b:bc:18:35:9e:b1:d5:b9:53:69:62:41:d6:57:6e:e4:
61:4b:a9:dc:49:d5:79:fe:d8:ef:43:03:77:49:78:08:4a:ed:
02:40:33:79:a4:4f:f1:c0:4e:38:2c:39:ab:99:92:2c:d9:3b:
fc00:db20:35b:7399::5:9f:6f:fe:b1:73:f6:a0:c4:a1:89:
d6:8a:82:43:53:db:35:e6:84:e5:8e:b6:7e:7d:dd:63:03:4e:
fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b:36:96:d2:17:91:9a:d5:c2:fd:b4:
42:c3:e0:c1:35:d0:4a:bf:14:71:ac:6e:5f:56:60:d4:9f:76:
77:d0:a8:53:f6:1d:7f:83:a4:69:d2:a3:1f:c2:68:04:d2:39:
53:73:4c:e3:88:ed:42:7a:b4:8b:9b:90:96:9c:0d:62:91:c0:
f5:92:76:f0:b9:44:1d:f5:fe:64:0d:3b:d9:75:6a:6b:3d:f3:
fc:c3:95:d8:0f:53:6a:45:4b:ea:ef:1f:f7:4b:e1:25:6f:e2:
f4:09:c3:f1:bf:db:02:35:d5:5f:03:6f:d9:8f:95:2a:5e:34:
21:6c:88:c6
-----BEGIN CERTIFICATE-----
MIIDCTCCAfGgAwIBAgIUChiey+Li7srtS7YUTKb7Dqw0JwgwDQYJKoZIhvcNAQEL
BQAwFDESMBAGA1UEAwwJbG9jYWxob3N0MB4XDTIwMDYwMzIxMzgxOFoXDTIwMDcw
MzIxMzgxOFowFDESMBAGA1UEAwwJbG9jYWxob3N0MIIBIjANBgkqhkiG9w0BAQEF
AAOCAQ8AMIIBCgKCAQEA3ath19By8Ek405ynvJeo9rF+XE5S3Lf1IrN5SoHbKNon
EgrgiGCe/9jVWZXNFf4tnbbhVvGT3Zv1kLRRJxpKa0hByctCy3TDjWbfrToEuoLU
cCHkgqg/iXHZCTsbGPkv3NcBmIH72oYPe5s3MgoF49HZveZJqzgtPlXegdc3spnE
v1KKGI73NIacj3+nD1Cev+pvMKt5KCVxjBVuUq+UeG+GxmYcCe0dusSYb6NkIkjs
tPVuaw8y2z/JlCbybLy4veY0Y8tKjcOeIzs2BoS9tqZxb9+WgmPWp5NVy3Qzmkl6
0HJijvQu54WWbNgS7Kn8AEJ9Fg+WmyqnJF50qVejDQIDAQABo1MwUTAdBgNVHQ4E
FgQUjkg9im/h6HkqZaogBGaQmIrx5MkwHwYDVR0jBBgwFoAUjkg9im/h6HkqZaog
BGaQmIrx5MkwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEAo4Ur
8+7ksjYC23yjhbr8iSG2pqy5eFnA9s2utjsDbkc7vqr9eFB7vBg1nrHVuVNpYkHW
V27kYUup3EnVef7Y70MDd0l4CErtAkAzeaRP8cBOOCw5q5mSLNk7bYAroSxLBQKf
b/6xc/agxKGJ1oqCQ1PbNeaE5Y62fn3dYwNOY6WXDNe8cj82ltIXkZrVwv20QsPg
wTXQSr8UcaxuX1Zg1J92d9CoU/Ydf4OkadKjH8JoBNI5U3NM44jtQnq0i5uQlpwN
YpHA9ZJ28LlEHfX+ZA072XVqaz3z/MOV2A9TakVL6u8f90vhJW/i9AnD8b/bAjXV
XwNv2Y+VKl40IWyIxg==
-----END CERTIFICATE-----
` | nexus/const.go | 0.51879 | 0.516291 | const.go | starcoder |
package stripe
// The type of the credit note line item, one of `invoice_line_item` or `custom_line_item`. When the type is `invoice_line_item` there is an additional `invoice_line_item` property on the resource the value of which is the id of the credited line item on the invoice.
type CreditNoteLineItemType string
// List of values that CreditNoteLineItemType can take
const (
CreditNoteLineItemTypeCustomLineItem CreditNoteLineItemType = "custom_line_item"
CreditNoteLineItemTypeInvoiceLineItem CreditNoteLineItemType = "invoice_line_item"
)
// The integer amount in %s representing the discount being credited for this line item.
type CreditNoteLineItemDiscountAmount struct {
// The amount, in %s, of the discount.
Amount int64 `json:"amount"`
// The discount that was applied to get this discount amount.
Discount *Discount `json:"discount"`
}
// CreditNoteLineItem is the resource representing a Stripe credit note line item.
// For more details see https://stripe.com/docs/api/credit_notes/line_item
type CreditNoteLineItem struct {
// The integer amount in %s representing the gross amount being credited for this line item, excluding (exclusive) tax and discounts.
Amount int64 `json:"amount"`
// Description of the item being credited.
Description string `json:"description"`
// The integer amount in %s representing the discount being credited for this line item.
DiscountAmount int64 `json:"discount_amount"`
// The amount of discount calculated per discount for this line item
DiscountAmounts []*CreditNoteLineItemDiscountAmount `json:"discount_amounts"`
// Unique identifier for the object.
ID string `json:"id"`
// ID of the invoice line item being credited
InvoiceLineItem string `json:"invoice_line_item"`
// Has the value `true` if the object exists in live mode or the value `false` if the object exists in test mode.
Livemode bool `json:"livemode"`
// String representing the object's type. Objects of the same type share the same value.
Object string `json:"object"`
// The number of units of product being credited.
Quantity int64 `json:"quantity"`
// The amount of tax calculated per tax rate for this line item
TaxAmounts []*CreditNoteTaxAmount `json:"tax_amounts"`
// The tax rates which apply to the line item.
TaxRates []*TaxRate `json:"tax_rates"`
// The type of the credit note line item, one of `invoice_line_item` or `custom_line_item`. When the type is `invoice_line_item` there is an additional `invoice_line_item` property on the resource the value of which is the id of the credited line item on the invoice.
Type CreditNoteLineItemType `json:"type"`
// The cost of each unit of product being credited.
UnitAmount int64 `json:"unit_amount"`
// Same as `unit_amount`, but contains a decimal value with at most 12 decimal places.
UnitAmountDecimal float64 `json:"unit_amount_decimal,string"`
}
// CreditNoteLineItemList is a list of CreditNoteLineItems as retrieved from a list endpoint.
type CreditNoteLineItemList struct {
APIResource
ListMeta
Data []*CreditNoteLineItem `json:"data"`
} | creditnotelineitem.go | 0.818882 | 0.423875 | creditnotelineitem.go | starcoder |
package sql_nosql_injection
import (
"github.com/threagile/threagile/model"
)
func Category() model.RiskCategory {
return model.RiskCategory{
Id: "sql-nosql-injection",
Title: "SQL/NoSQL-Injection",
Description: "When a database is accessed via database access protocols SQL/NoSQL-Injection risks might arise. " +
"The risk rating depends on the sensitivity technical asset itself and of the data assets processed or stored.",
Impact: "If this risk is unmitigated, attackers might be able to modify SQL/NoSQL queries to steal and modify data and eventually further escalate towards a deeper system penetration via code executions.",
ASVS: "V5 - Validation, Sanitization and Encoding Verification Requirements",
CheatSheet: "https://cheatsheetseries.owasp.org/cheatsheets/SQL_Injection_Prevention_Cheat_Sheet.html",
Action: "SQL/NoSQL-Injection Prevention",
Mitigation: "Try to use parameter binding to be safe from injection vulnerabilities. " +
"When a third-party product is used instead of custom developed software, check if the product applies the proper mitigation and ensure a reasonable patch-level.",
Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?",
Function: model.Development,
STRIDE: model.Tampering,
DetectionLogic: "Database accessed via typical database access protocols by in-scope clients.",
RiskAssessment: "The risk rating depends on the sensitivity of the data stored inside the database.",
FalsePositives: "Database accesses by queries not consisting of parts controllable by the caller can be considered " +
"as false positives after individual review.",
ModelFailurePossibleReason: false,
CWE: 89,
}
}
func SupportedTags() []string {
return []string{}
}
func GenerateRisks() []model.Risk {
risks := make([]model.Risk, 0)
for _, id := range model.SortedTechnicalAssetIDs() {
technicalAsset := model.ParsedModelRoot.TechnicalAssets[id]
incomingFlows := model.IncomingTechnicalCommunicationLinksMappedByTargetId[technicalAsset.Id]
for _, incomingFlow := range incomingFlows {
if model.ParsedModelRoot.TechnicalAssets[incomingFlow.SourceId].OutOfScope {
continue
}
if incomingFlow.Protocol.IsPotentialDatabaseAccessProtocol(true) && (technicalAsset.Technology == model.Database || technicalAsset.Technology == model.IdentityStoreDatabase) ||
(incomingFlow.Protocol.IsPotentialDatabaseAccessProtocol(false)) {
risks = append(risks, createRisk(technicalAsset, incomingFlow))
}
}
}
return risks
}
func createRisk(technicalAsset model.TechnicalAsset, incomingFlow model.CommunicationLink) model.Risk {
caller := model.ParsedModelRoot.TechnicalAssets[incomingFlow.SourceId]
title := "<b>SQL/NoSQL-Injection</b> risk at <b>" + caller.Title + "</b> against database <b>" + technicalAsset.Title + "</b>" +
" via <b>" + incomingFlow.Title + "</b>"
impact := model.MediumImpact
if technicalAsset.HighestConfidentiality() == model.Sensitive || technicalAsset.HighestIntegrity() == model.MissionCritical {
impact = model.HighImpact
}
likelihood := model.VeryLikely
if incomingFlow.Usage == model.DevOps {
likelihood = model.Likely
}
risk := model.Risk{
Category: Category(),
Severity: model.CalculateSeverity(likelihood, impact),
ExploitationLikelihood: likelihood,
ExploitationImpact: impact,
Title: title,
MostRelevantTechnicalAssetId: caller.Id,
MostRelevantCommunicationLinkId: incomingFlow.Id,
DataBreachProbability: model.Probable,
DataBreachTechnicalAssetIDs: []string{technicalAsset.Id},
}
risk.SyntheticId = risk.Category.Id + "@" + caller.Id + "@" + technicalAsset.Id + "@" + incomingFlow.Id
return risk
} | risks/built-in/sql-nosql-injection/sql-nosql-injection-rule.go | 0.648132 | 0.457197 | sql-nosql-injection-rule.go | starcoder |
package rtime
import (
"strings"
"time"
)
// Returns the minute offset within the hour specified by t, in the range [0, 59].
func (t Time) Minute() int {
return time.Time(t).Minute()
}
// Returns the second offset within the minute specified by t, in the range [0, 59].
func (t Time) Second() int {
return time.Time(t).Second()
}
// Returns the hour within the day specified by t, in the range [0, 23].
func (t Time) Hour() int {
return time.Time(t).Hour()
}
// Returns string representation of Hour
func (t Time) HourString() (result string) {
result = numberInString(t.Hour(), false)
result += " " + hoursTail(t.Hour())
return
}
// Returns string representation of Minute
func (t Time) MinuteString() (result string) {
result = numberInString(t.Minute(), true)
result += " " + minutesTail(t.Minute())
return
}
// Returns string representation of Second
func (t Time) SecondString() (result string) {
result = numberInString(t.Second(), true)
result += " " + secondsTail(t.Second())
return
}
func hoursTail(hours int) (result string) {
switch {
case hours > 20 && hours < 100:
hours = hours % 10
case hours > 100:
hours = hours % 100 % 10
}
switch hours {
case 1:
result = "час"
case 2, 3, 4:
result = "часа"
default:
result = "часов"
}
return
}
func minutesTail(minutes int) (result string) {
switch {
case minutes > 20 && minutes < 60:
minutes = minutes % 10
case minutes > 60:
minutes = minutes % 60 % 10
}
switch minutes {
case 1:
result = "минута"
case 2, 3, 4:
result = "минуты"
default:
result = "минут"
}
return
}
func secondsTail(seconds int) (result string) {
switch {
case seconds > 20 && seconds < 60:
seconds = seconds % 10
case seconds > 60:
seconds = seconds % 60 % 10
}
switch seconds {
case 1:
result = "секунда"
case 2, 3, 4:
result = "секунды"
default:
result = "секунд"
}
return
}
func numberInString(num int, female bool) (result string) {
switch num {
case 0:
result = "Ноль"
case 1:
if female {
result = "Одна"
} else {
result = "Один"
}
case 2:
if female {
result = "Две"
} else {
result = "Два"
}
case 3:
result = "Три"
case 4:
result = "Четыре"
case 5:
result = "Пять"
case 6:
result = "Шесть"
case 7:
result = "Семь"
case 8:
result = "Восемь"
case 9:
result = "Девять"
case 10:
result = "Десять"
case 11:
result = "Одиннадцать"
case 12:
result = "Двенадцать"
case 13:
result = "Тринадцать"
case 14:
result = "Четырнадцать"
case 15:
result = "Пятнадцать"
case 16:
result = "Шестнадцать"
case 17:
result = "Семнадцать"
case 18:
result = "Восемнадцать"
case 19:
result = "Девятнадцать"
case 20:
result = "Двадцать"
case 30:
result = "Тридцать"
case 40:
result = "Сорок"
case 50:
result = "Пятьдесят"
case 60:
result = "Шестьдесят"
case 70:
result = "Семьдесят"
case 80:
result = "Восемьдесят"
case 90:
result = "Девяносто"
default:
lastDigit := num % 10
result = numberInString(num-lastDigit, female)
result += " " + strings.ToLower(numberInString(lastDigit, female))
}
return
} | time.go | 0.671471 | 0.578895 | time.go | starcoder |
package roster
import (
"github.com/google/uuid"
"github.com/dansage/synergySync/pkg/orm"
)
// Course represents a course from the roster data
type Course struct {
orm.NumericID
orm.Timestamps
// Active indicates if the course is still listed in the roster data
Active bool `gorm:"not null; default(false)" json:"active"`
// Name is the display name of the course
Name string `gorm:"not null" json:"name"`
// Seen indicates if the course was seen in the roster data
Seen bool `gorm:"-"`
}
// School represents a school from the roster data
type School struct {
orm.NumericID
orm.Timestamps
// SynergyID is the numerical ID assigned to the school by the SIS
SynergyID uint `gorm:"not null"`
}
// Section represents a section from the roster data
type Section struct {
orm.NumericID
orm.Timestamps
// CourseID is the numerical ID of the course
CourseID uint `gorm:"not null"`
// StudentID is the unique ID of the student
StudentID uuid.UUID `gorm:"not null"`
// TeacherID is the numerical ID of the teacher
TeacherID uint `gorm:"not null"`
// Period is the numerical period in which the section takes place
Period uint `gorm:"not null"`
// Seen indicates if the section was seen in the roster data
Seen bool `gorm:"-"`
}
// TableName overrides the table name used by Section to `student_courses`
func (Section) TableName() string {
return "student_courses"
}
// Student represents a student from the roster data
type Student struct {
orm.UniqueID
orm.Timestamps
// Active indicates if the student is still listed in the roster data
Active bool `gorm:"not null; default(false)" json:"active"`
// DistrictID is the numerical ID assigned to the student by the SIS
DistrictID uint `gorm:"not null"`
// SchoolID of the school the student attends
SchoolID uint `gorm:"not null"`
// Name is the display name of the course
Name string `gorm:"not null" json:"name"`
// Seen indicates if the student was seen in the roster data
Seen bool `gorm:"-"`
}
// Teacher represents a teacher from the roster data
type Teacher struct {
orm.NumericID
orm.Timestamps
// Name is the display name of the course
Name string `gorm:"not null" json:"name"`
} | app/roster/models.go | 0.571527 | 0.479077 | models.go | starcoder |
package operator
import (
"github.com/matrixorigin/matrixcube/components/prophet/core"
"github.com/matrixorigin/matrixcube/components/prophet/limit"
"github.com/matrixorigin/matrixcube/components/prophet/pb/metapb"
)
// OpInfluence records the influence of the cluster.
type OpInfluence struct {
ContainersInfluence map[uint64]*ContainerInfluence
}
// GetContainerInfluence get containerInfluence of specific container.
func (m OpInfluence) GetContainerInfluence(id uint64) *ContainerInfluence {
containerInfluence, ok := m.ContainersInfluence[id]
if !ok {
containerInfluence = &ContainerInfluence{
InfluenceStats: map[string]InfluenceStats{},
}
m.ContainersInfluence[id] = containerInfluence
}
return containerInfluence
}
type InfluenceStats struct {
ResourceSize int64
ResourceCount int64
LeaderSize int64
LeaderCount int64
}
// ContainerInfluence records influences that pending operators will make.
type ContainerInfluence struct {
InfluenceStats map[string]InfluenceStats
StepCost map[limit.Type]int64
}
// ResourceProperty returns delta size of leader/resource by influence.
func (s ContainerInfluence) ResourceProperty(kind core.ScheduleKind, groupKey string) int64 {
switch kind.ResourceKind {
case metapb.ResourceKind_LeaderKind:
switch kind.Policy {
case core.ByCount:
return s.InfluenceStats[groupKey].LeaderCount
case core.BySize:
return s.InfluenceStats[groupKey].LeaderSize
default:
return 0
}
case metapb.ResourceKind_ReplicaKind:
return s.InfluenceStats[groupKey].ResourceSize
default:
return 0
}
}
// GetStepCost returns the specific type step cost
func (s ContainerInfluence) GetStepCost(limitType limit.Type) int64 {
if s.StepCost == nil {
return 0
}
return s.StepCost[limitType]
}
func (s *ContainerInfluence) addStepCost(limitType limit.Type, cost int64) {
if s.StepCost == nil {
s.StepCost = make(map[limit.Type]int64)
}
s.StepCost[limitType] += cost
}
// AdjustStepCost adjusts the step cost of specific type container limit according to resource size
func (s *ContainerInfluence) AdjustStepCost(limitType limit.Type, resourceSize int64) {
if resourceSize > limit.SmallResourceThreshold {
s.addStepCost(limitType, limit.ResourceInfluence[limitType])
} else if resourceSize <= limit.SmallResourceThreshold && resourceSize > limit.EmptyResourceApproximateSize {
s.addStepCost(limitType, limit.SmallResourceInfluence[limitType])
}
} | components/prophet/schedule/operator/influence.go | 0.649134 | 0.410047 | influence.go | starcoder |
package datadog
import (
"encoding/json"
)
// GraphSnapshot Object representing a graph snapshot.
type GraphSnapshot struct {
// A JSON document defining the graph. `graph_def` can be used instead of `metric_query`. The JSON document uses the [grammar defined here](https://docs.datadoghq.com/graphing/graphing_json/#grammar) and should be formatted to a single line then URL encoded.
GraphDef *string `json:"graph_def,omitempty"`
// The metric query. One of `metric_query` or `graph_def` is required.
MetricQuery *string `json:"metric_query,omitempty"`
// URL of your [graph snapshot](https://docs.datadoghq.com/metrics/explorer/#snapshot).
SnapshotUrl *string `json:"snapshot_url,omitempty"`
}
// NewGraphSnapshot instantiates a new GraphSnapshot object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewGraphSnapshot() *GraphSnapshot {
this := GraphSnapshot{}
return &this
}
// NewGraphSnapshotWithDefaults instantiates a new GraphSnapshot object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewGraphSnapshotWithDefaults() *GraphSnapshot {
this := GraphSnapshot{}
return &this
}
// GetGraphDef returns the GraphDef field value if set, zero value otherwise.
func (o *GraphSnapshot) GetGraphDef() string {
if o == nil || o.GraphDef == nil {
var ret string
return ret
}
return *o.GraphDef
}
// GetGraphDefOk returns a tuple with the GraphDef field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *GraphSnapshot) GetGraphDefOk() (*string, bool) {
if o == nil || o.GraphDef == nil {
return nil, false
}
return o.GraphDef, true
}
// HasGraphDef returns a boolean if a field has been set.
func (o *GraphSnapshot) HasGraphDef() bool {
if o != nil && o.GraphDef != nil {
return true
}
return false
}
// SetGraphDef gets a reference to the given string and assigns it to the GraphDef field.
func (o *GraphSnapshot) SetGraphDef(v string) {
o.GraphDef = &v
}
// GetMetricQuery returns the MetricQuery field value if set, zero value otherwise.
func (o *GraphSnapshot) GetMetricQuery() string {
if o == nil || o.MetricQuery == nil {
var ret string
return ret
}
return *o.MetricQuery
}
// GetMetricQueryOk returns a tuple with the MetricQuery field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *GraphSnapshot) GetMetricQueryOk() (*string, bool) {
if o == nil || o.MetricQuery == nil {
return nil, false
}
return o.MetricQuery, true
}
// HasMetricQuery returns a boolean if a field has been set.
func (o *GraphSnapshot) HasMetricQuery() bool {
if o != nil && o.MetricQuery != nil {
return true
}
return false
}
// SetMetricQuery gets a reference to the given string and assigns it to the MetricQuery field.
func (o *GraphSnapshot) SetMetricQuery(v string) {
o.MetricQuery = &v
}
// GetSnapshotUrl returns the SnapshotUrl field value if set, zero value otherwise.
func (o *GraphSnapshot) GetSnapshotUrl() string {
if o == nil || o.SnapshotUrl == nil {
var ret string
return ret
}
return *o.SnapshotUrl
}
// GetSnapshotUrlOk returns a tuple with the SnapshotUrl field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *GraphSnapshot) GetSnapshotUrlOk() (*string, bool) {
if o == nil || o.SnapshotUrl == nil {
return nil, false
}
return o.SnapshotUrl, true
}
// HasSnapshotUrl returns a boolean if a field has been set.
func (o *GraphSnapshot) HasSnapshotUrl() bool {
if o != nil && o.SnapshotUrl != nil {
return true
}
return false
}
// SetSnapshotUrl gets a reference to the given string and assigns it to the SnapshotUrl field.
func (o *GraphSnapshot) SetSnapshotUrl(v string) {
o.SnapshotUrl = &v
}
func (o GraphSnapshot) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if o.GraphDef != nil {
toSerialize["graph_def"] = o.GraphDef
}
if o.MetricQuery != nil {
toSerialize["metric_query"] = o.MetricQuery
}
if o.SnapshotUrl != nil {
toSerialize["snapshot_url"] = o.SnapshotUrl
}
return json.Marshal(toSerialize)
}
type NullableGraphSnapshot struct {
value *GraphSnapshot
isSet bool
}
func (v NullableGraphSnapshot) Get() *GraphSnapshot {
return v.value
}
func (v *NullableGraphSnapshot) Set(val *GraphSnapshot) {
v.value = val
v.isSet = true
}
func (v NullableGraphSnapshot) IsSet() bool {
return v.isSet
}
func (v *NullableGraphSnapshot) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableGraphSnapshot(val *GraphSnapshot) *NullableGraphSnapshot {
return &NullableGraphSnapshot{value: val, isSet: true}
}
func (v NullableGraphSnapshot) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableGraphSnapshot) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | api/v1/datadog/model_graph_snapshot.go | 0.834373 | 0.401248 | model_graph_snapshot.go | starcoder |
package day11
import (
"fmt"
)
// Coordinate is a value with x and y parts
type Coordinate struct {
x int
y int
}
// Square represents a square of Coordinates in a Grid
type Square struct {
Coordinate
Size int
Sum int
}
// Grid representes an x,y plane of power levels
type Grid struct {
SN int
Cells map[int]map[int]int
size int
}
// NewGrid creates a fully populated grid
func NewGrid(sn, size int) Grid {
g := Grid{
SN: sn,
Cells: make(map[int]map[int]int),
size: size,
}
for x := 1; x <= size; x++ {
if _, ok := g.Cells[x]; !ok {
g.Cells[x] = make(map[int]int, size)
}
for y := 1; y <= size; y++ {
g.Cells[x][y] = powerLevel(x, y, sn)
}
}
return g
}
func hundredth(n int) int {
return int(n / 100) % 10
}
func powerLevel(x, y, sn int) int {
rackID := x + 10
pl := rackID * y
pl += sn
pl *= rackID
return hundredth(pl) - 5
}
func sumSquare(g Grid, sq Square) int {
sum := 0
maxX, maxY := (sq.x + sq.Size), (sq.y + sq.Size)
for x := sq.x; x < maxX; x++ {
for y := sq.y; y < maxY; y++ {
sum += g.Cells[x][y]
}
}
return sum
}
// FindAnyLargestSquare finds the largest square of any size
func FindAnyLargestSquare(g Grid) Square {
ch := make(chan Square, g.size)
sem := make(chan struct{}, 1)
for size := 1; size <= g.size; size++ {
sem <- struct{}{}
go func(size int) {
sq := FindLargestSquare(g, size)
fmt.Printf("Completed size: %d\n", size)
<-sem
ch<- sq
}(size)
}
max := Square{}
for i := 0; i < g.size; i++ {
sq := <- ch
if sq.Sum > max.Sum {
max = sq
}
}
return max
}
// FindLargestSquare finds the largest square fo a specified size
func FindLargestSquare(g Grid, size int) Square {
ch := make(chan Square, 50)
sem := make(chan struct{}, 50)
bound := g.size-size
for x := 1; x <= bound; x++ {
for y := 1; y <= bound; y++ {
sem <- struct{}{}
go func(x, y int) {
s := sumSquare(g, Square{Coordinate{x, y}, size, 0})
<-sem
ch<- Square{Coordinate{x, y}, size, s}
}(x, y)
}
}
max := Square{}
total := (bound * bound)
for i := 0; i < total; i++ {
sq := <- ch
if sq.Sum > max.Sum {
max = sq
}
}
return max
} | day11/day11.go | 0.762778 | 0.463566 | day11.go | starcoder |
package base36
// Simplified code based on https://godoc.org/github.com/mr-tron/base58
// which in turn is based on https://github.com/trezor/trezor-crypto/commit/89a7d7797b806fac
import (
"fmt"
)
const UcAlphabet = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ"
const LcAlphabet = "0123456789abcdefghijklmnopqrstuvwxyz"
const maxDigitOrdinal = byte('z')
const maxDigitValueB36 = 35
var revAlphabet [maxDigitOrdinal + 1]byte
func init() {
for i := range revAlphabet {
revAlphabet[i] = maxDigitValueB36 + 1
}
for i, c := range UcAlphabet {
revAlphabet[byte(c)] = byte(i)
if c > '9' {
revAlphabet[byte(c)+32] = byte(i)
}
}
}
// EncodeToStringUc encodes the given byte-buffer as base36 using [0-9A-Z] as
// the digit-alphabet
func EncodeToStringUc(b []byte) string { return encode(b, UcAlphabet) }
// EncodeToStringLc encodes the given byte-buffer as base36 using [0-9a-z] as
// the digit-alphabet
func EncodeToStringLc(b []byte) string { return encode(b, LcAlphabet) }
func encode(inBuf []byte, al string) string {
// As a polar opposite to the base58 implementation, using a uint32 here is
// significantly slower
var carry uint64
var encIdx, valIdx, zcnt, high int
inSize := len(inBuf)
for zcnt < inSize && inBuf[zcnt] == 0 {
zcnt++
}
// Really this is log(256)/log(36) or 1.55, but integer math is easier
// Use 2 as a constant and just overallocate
encSize := (inSize - zcnt) * 2
// Allocate one big buffer up front
// Note: pools *DO NOT* help, the overhead of zeroing the val-half (see below)
// kills any performance gain to be had
outBuf := make([]byte, (zcnt + encSize*2))
// use the second half for the temporary numeric buffer
val := outBuf[encSize+zcnt:]
high = encSize - 1
for _, b := range inBuf[zcnt:] {
valIdx = encSize - 1
for carry = uint64(b); valIdx > high || carry != 0; valIdx-- {
carry += uint64((val[valIdx])) * 256
val[valIdx] = byte(carry % 36)
carry /= 36
}
high = valIdx
}
// Reset the value index to the first significant value position
for valIdx = 0; valIdx < encSize && val[valIdx] == 0; valIdx++ {
}
// Now write the known-length result to first half of buffer
encSize += zcnt - valIdx
for encIdx = 0; encIdx < zcnt; encIdx++ {
outBuf[encIdx] = '0'
}
for encIdx < encSize {
outBuf[encIdx] = al[val[valIdx]]
encIdx++
valIdx++
}
return string(outBuf[:encSize])
}
// DecodeString takes a base36 encoded string and returns a slice of the decoded
// bytes.
func DecodeString(s string) ([]byte, error) {
if len(s) == 0 {
return nil, fmt.Errorf("can not decode zero-length string")
}
var zcnt int
for i := 0; i < len(s) && s[i] == '0'; i++ {
zcnt++
}
var t, c uint64
outi := make([]uint32, (len(s)+3)/4)
binu := make([]byte, (len(s)+3)*3)
for _, r := range s {
if r > rune(maxDigitOrdinal) || revAlphabet[r] > maxDigitValueB36 {
return nil, fmt.Errorf("invalid base36 character (%q)", r)
}
c = uint64(revAlphabet[r])
for j := len(outi) - 1; j >= 0; j-- {
t = uint64(outi[j])*36 + c
c = (t >> 32)
outi[j] = uint32(t & 0xFFFFFFFF)
}
}
mask := (uint(len(s)%4) * 8)
if mask == 0 {
mask = 32
}
mask -= 8
var j, cnt int
for j, cnt = 0, 0; j < len(outi); j++ {
for mask < 32 { // loop relies on uint overflow
binu[cnt] = byte(outi[j] >> mask)
mask -= 8
cnt++
}
mask = 24
}
for n := zcnt; n < len(binu); n++ {
if binu[n] > 0 {
return binu[n-zcnt : cnt], nil
}
}
return binu[:cnt], nil
} | vendor/github.com/multiformats/go-base36/base36.go | 0.709925 | 0.479382 | base36.go | starcoder |
package iso20022
// Creation/cancellation of investment units on the books of the fund or its designated agent, as a result of executing an investment fund order.
type InvestmentFundTransaction4 struct {
// Underlying transaction or corporate action.
EventType *TransactionType1Choice `xml:"EvtTp"`
// Status of an investment fund transaction.
BookingStatus *TransactionStatus1Code `xml:"BookgSts,omitempty"`
// Unique and unambiguous identifier for a group of individual orders, as assigned by the instructing party. This identifier links the individual orders together.
MasterReference *Max35Text `xml:"MstrRef,omitempty"`
// Unique and unambiguous identifier for an order, as assigned by the instructing party.
OrderReference *Max35Text `xml:"OrdrRef,omitempty"`
// Unique and unambiguous investor's identification of an order. This reference can typically be used in a hub scenario to give the reference of the order as assigned by the underlying client.
ClientReference *Max35Text `xml:"ClntRef,omitempty"`
// Unique and unambiguous identifier for an order execution, as assigned by a confirming party.
DealReference *Max35Text `xml:"DealRef,omitempty"`
// Unique technical identifier for an instance of a leg within a switch.
LegIdentification *Max35Text `xml:"LegId,omitempty"`
// Unique identifier for an instance of a leg execution within a switch confirmation.
LegExecutionIdentification *Max35Text `xml:"LegExctnId,omitempty"`
// Date and time at which the order was placed by the investor or its agent.
OrderDateTime *ISODateTime `xml:"OrdrDtTm,omitempty"`
// Indicates whether the cash payment with respect to the executed order is settled.
SettledTransactionIndicator *YesNoIndicator `xml:"SttldTxInd"`
// Indicates whether the executed order has a registered status on the books of the transfer agent.
RegisteredTransactionIndicator *YesNoIndicator `xml:"RegdTxInd"`
// Number of investment funds units.
UnitsQuantity *FinancialInstrumentQuantity1 `xml:"UnitsQty"`
// Direction of the transaction being reported, is, securities are received (credited) or delivered (debited).
CreditDebit *CreditDebitCode `xml:"CdtDbt"`
// Transaction being reported is a reversal of previously reported transaction.
Reversal *ReversalCode `xml:"Rvsl,omitempty"`
// Amount of money to be moved between the debtor and creditor, before deduction of charges, expressed in the currency as ordered by the initiating party.
SettlementAmount *ActiveCurrencyAndAmount `xml:"SttlmAmt,omitempty"`
// Date on which the debtor expects the amount of money to be available to the creditor.
SettlementDate *ISODate `xml:"SttlmDt,omitempty"`
// Date and time at which a price is applied, according to the terms stated in the prospectus.
TradeDateTime *DateAndDateTimeChoice `xml:"TradDtTm"`
// Indicates whether the dividend is included, that is, cum-dividend, in the executed price. When the dividend is not included, the price will be ex-dividend.
CumDividendIndicator *YesNoIndicator `xml:"CumDvddInd"`
// Indicates whether the order has been partially executed, that is, the confirmed quantity does not match the ordered quantity for a given financial instrument.
PartiallyExecutedIndicator *YesNoIndicator `xml:"PrtlyExctdInd"`
// Price at which the order was executed.
PriceDetails *UnitPrice20 `xml:"PricDtls,omitempty"`
}
func (i *InvestmentFundTransaction4) AddEventType() *TransactionType1Choice {
i.EventType = new(TransactionType1Choice)
return i.EventType
}
func (i *InvestmentFundTransaction4) SetBookingStatus(value string) {
i.BookingStatus = (*TransactionStatus1Code)(&value)
}
func (i *InvestmentFundTransaction4) SetMasterReference(value string) {
i.MasterReference = (*Max35Text)(&value)
}
func (i *InvestmentFundTransaction4) SetOrderReference(value string) {
i.OrderReference = (*Max35Text)(&value)
}
func (i *InvestmentFundTransaction4) SetClientReference(value string) {
i.ClientReference = (*Max35Text)(&value)
}
func (i *InvestmentFundTransaction4) SetDealReference(value string) {
i.DealReference = (*Max35Text)(&value)
}
func (i *InvestmentFundTransaction4) SetLegIdentification(value string) {
i.LegIdentification = (*Max35Text)(&value)
}
func (i *InvestmentFundTransaction4) SetLegExecutionIdentification(value string) {
i.LegExecutionIdentification = (*Max35Text)(&value)
}
func (i *InvestmentFundTransaction4) SetOrderDateTime(value string) {
i.OrderDateTime = (*ISODateTime)(&value)
}
func (i *InvestmentFundTransaction4) SetSettledTransactionIndicator(value string) {
i.SettledTransactionIndicator = (*YesNoIndicator)(&value)
}
func (i *InvestmentFundTransaction4) SetRegisteredTransactionIndicator(value string) {
i.RegisteredTransactionIndicator = (*YesNoIndicator)(&value)
}
func (i *InvestmentFundTransaction4) AddUnitsQuantity() *FinancialInstrumentQuantity1 {
i.UnitsQuantity = new(FinancialInstrumentQuantity1)
return i.UnitsQuantity
}
func (i *InvestmentFundTransaction4) SetCreditDebit(value string) {
i.CreditDebit = (*CreditDebitCode)(&value)
}
func (i *InvestmentFundTransaction4) SetReversal(value string) {
i.Reversal = (*ReversalCode)(&value)
}
func (i *InvestmentFundTransaction4) SetSettlementAmount(value, currency string) {
i.SettlementAmount = NewActiveCurrencyAndAmount(value, currency)
}
func (i *InvestmentFundTransaction4) SetSettlementDate(value string) {
i.SettlementDate = (*ISODate)(&value)
}
func (i *InvestmentFundTransaction4) AddTradeDateTime() *DateAndDateTimeChoice {
i.TradeDateTime = new(DateAndDateTimeChoice)
return i.TradeDateTime
}
func (i *InvestmentFundTransaction4) SetCumDividendIndicator(value string) {
i.CumDividendIndicator = (*YesNoIndicator)(&value)
}
func (i *InvestmentFundTransaction4) SetPartiallyExecutedIndicator(value string) {
i.PartiallyExecutedIndicator = (*YesNoIndicator)(&value)
}
func (i *InvestmentFundTransaction4) AddPriceDetails() *UnitPrice20 {
i.PriceDetails = new(UnitPrice20)
return i.PriceDetails
} | InvestmentFundTransaction4.go | 0.817283 | 0.482795 | InvestmentFundTransaction4.go | starcoder |
package heap
type
// A PairingHeapInt implements the Heap interface using the Pairing Heap data structure
// see: wikipedia.org/wiki/Pairing_heap
PairingHeapInt struct {
less func(int, int) bool
root *pairingHeapNodeInt
}
type
// A PairingHeapInt8 implements the Heap interface using the Pairing Heap data structure
// see: wikipedia.org/wiki/Pairing_heap
PairingHeapInt8 struct {
less func(int8, int8) bool
root *pairingHeapNodeInt8
}
type
// A PairingHeapInt16 implements the Heap interface using the Pairing Heap data structure
// see: wikipedia.org/wiki/Pairing_heap
PairingHeapInt16 struct {
less func(int16, int16) bool
root *pairingHeapNodeInt16
}
type
// A PairingHeapInt32 implements the Heap interface using the Pairing Heap data structure
// see: wikipedia.org/wiki/Pairing_heap
PairingHeapInt32 struct {
less func(int32, int32) bool
root *pairingHeapNodeInt32
}
type
// A PairingHeapInt64 implements the Heap interface using the Pairing Heap data structure
// see: wikipedia.org/wiki/Pairing_heap
PairingHeapInt64 struct {
less func(int64, int64) bool
root *pairingHeapNodeInt64
}
type
// A PairingHeapUint implements the Heap interface using the Pairing Heap data structure
// see: wikipedia.org/wiki/Pairing_heap
PairingHeapUint struct {
less func(uint, uint) bool
root *pairingHeapNodeUint
}
type
// A PairingHeapUint8 implements the Heap interface using the Pairing Heap data structure
// see: wikipedia.org/wiki/Pairing_heap
PairingHeapUint8 struct {
less func(uint8, uint8) bool
root *pairingHeapNodeUint8
}
type
// A PairingHeapUint16 implements the Heap interface using the Pairing Heap data structure
// see: wikipedia.org/wiki/Pairing_heap
PairingHeapUint16 struct {
less func(uint16, uint16) bool
root *pairingHeapNodeUint16
}
type
// A PairingHeapUint32 implements the Heap interface using the Pairing Heap data structure
// see: wikipedia.org/wiki/Pairing_heap
PairingHeapUint32 struct {
less func(uint32, uint32) bool
root *pairingHeapNodeUint32
}
type
// A PairingHeapUint64 implements the Heap interface using the Pairing Heap data structure
// see: wikipedia.org/wiki/Pairing_heap
PairingHeapUint64 struct {
less func(uint64, uint64) bool
root *pairingHeapNodeUint64
}
type
// A PairingHeapFloat32 implements the Heap interface using the Pairing Heap data structure
// see: wikipedia.org/wiki/Pairing_heap
PairingHeapFloat32 struct {
less func(float32, float32) bool
root *pairingHeapNodeFloat32
}
type
// A PairingHeapFloat64 implements the Heap interface using the Pairing Heap data structure
// see: wikipedia.org/wiki/Pairing_heap
PairingHeapFloat64 struct {
less func(float64, float64) bool
root *pairingHeapNodeFloat64
}
type
// A PairingHeapComplex64 implements the Heap interface using the Pairing Heap data structure
// see: wikipedia.org/wiki/Pairing_heap
PairingHeapComplex64 struct {
less func(complex64, complex64) bool
root *pairingHeapNodeComplex64
}
type
// A PairingHeapComplex128 implements the Heap interface using the Pairing Heap data structure
// see: wikipedia.org/wiki/Pairing_heap
PairingHeapComplex128 struct {
less func(complex128, complex128) bool
root *pairingHeapNodeComplex128
}
type
// A PairingHeapString implements the Heap interface using the Pairing Heap data structure
// see: wikipedia.org/wiki/Pairing_heap
PairingHeapString struct {
less func(string, string) bool
root *pairingHeapNodeString
}
type pairingHeapNodeInt struct {
elem int
parent, child, sibling *pairingHeapNodeInt
}
type pairingHeapNodeInt8 struct {
elem int8
parent, child, sibling *pairingHeapNodeInt8
}
type pairingHeapNodeInt16 struct {
elem int16
parent, child, sibling *pairingHeapNodeInt16
}
type pairingHeapNodeInt32 struct {
elem int32
parent, child, sibling *pairingHeapNodeInt32
}
type pairingHeapNodeInt64 struct {
elem int64
parent, child, sibling *pairingHeapNodeInt64
}
type pairingHeapNodeUint struct {
elem uint
parent, child, sibling *pairingHeapNodeUint
}
type pairingHeapNodeUint8 struct {
elem uint8
parent, child, sibling *pairingHeapNodeUint8
}
type pairingHeapNodeUint16 struct {
elem uint16
parent, child, sibling *pairingHeapNodeUint16
}
type pairingHeapNodeUint32 struct {
elem uint32
parent, child, sibling *pairingHeapNodeUint32
}
type pairingHeapNodeUint64 struct {
elem uint64
parent, child, sibling *pairingHeapNodeUint64
}
type pairingHeapNodeFloat32 struct {
elem float32
parent, child, sibling *pairingHeapNodeFloat32
}
type pairingHeapNodeFloat64 struct {
elem float64
parent, child, sibling *pairingHeapNodeFloat64
}
type pairingHeapNodeComplex64 struct {
elem complex64
parent, child, sibling *pairingHeapNodeComplex64
}
type pairingHeapNodeComplex128 struct {
elem complex128
parent, child, sibling *pairingHeapNodeComplex128
}
type pairingHeapNodeString struct {
elem string
parent, child, sibling *pairingHeapNodeString
}
// NewPairingHeapInt creates a new PairingHeap using the less function to define
// the ordering of the elements
func NewPairingHeapInt(less func(int, int) bool) *PairingHeapInt {
return &PairingHeapInt{
less: less,
}
}
// NewPairingHeapInt8 creates a new PairingHeap using the less function to define
// the ordering of the elements
func NewPairingHeapInt8(less func(int8, int8) bool) *PairingHeapInt8 {
return &PairingHeapInt8{
less: less,
}
}
// NewPairingHeapInt16 creates a new PairingHeap using the less function to define
// the ordering of the elements
func NewPairingHeapInt16(less func(int16, int16) bool) *PairingHeapInt16 {
return &PairingHeapInt16{
less: less,
}
}
// NewPairingHeapInt32 creates a new PairingHeap using the less function to define
// the ordering of the elements
func NewPairingHeapInt32(less func(int32, int32) bool) *PairingHeapInt32 {
return &PairingHeapInt32{
less: less,
}
}
// NewPairingHeapInt64 creates a new PairingHeap using the less function to define
// the ordering of the elements
func NewPairingHeapInt64(less func(int64, int64) bool) *PairingHeapInt64 {
return &PairingHeapInt64{
less: less,
}
}
// NewPairingHeapUint creates a new PairingHeap using the less function to define
// the ordering of the elements
func NewPairingHeapUint(less func(uint, uint) bool) *PairingHeapUint {
return &PairingHeapUint{
less: less,
}
}
// NewPairingHeapUint8 creates a new PairingHeap using the less function to define
// the ordering of the elements
func NewPairingHeapUint8(less func(uint8, uint8) bool) *PairingHeapUint8 {
return &PairingHeapUint8{
less: less,
}
}
// NewPairingHeapUint16 creates a new PairingHeap using the less function to define
// the ordering of the elements
func NewPairingHeapUint16(less func(uint16, uint16) bool) *PairingHeapUint16 {
return &PairingHeapUint16{
less: less,
}
}
// NewPairingHeapUint32 creates a new PairingHeap using the less function to define
// the ordering of the elements
func NewPairingHeapUint32(less func(uint32, uint32) bool) *PairingHeapUint32 {
return &PairingHeapUint32{
less: less,
}
}
// NewPairingHeapUint64 creates a new PairingHeap using the less function to define
// the ordering of the elements
func NewPairingHeapUint64(less func(uint64, uint64) bool) *PairingHeapUint64 {
return &PairingHeapUint64{
less: less,
}
}
// NewPairingHeapFloat32 creates a new PairingHeap using the less function to define
// the ordering of the elements
func NewPairingHeapFloat32(less func(float32, float32) bool) *PairingHeapFloat32 {
return &PairingHeapFloat32{
less: less,
}
}
// NewPairingHeapFloat64 creates a new PairingHeap using the less function to define
// the ordering of the elements
func NewPairingHeapFloat64(less func(float64, float64) bool) *PairingHeapFloat64 {
return &PairingHeapFloat64{
less: less,
}
}
// NewPairingHeapComplex64 creates a new PairingHeap using the less function to define
// the ordering of the elements
func NewPairingHeapComplex64(less func(complex64, complex64) bool) *PairingHeapComplex64 {
return &PairingHeapComplex64{
less: less,
}
}
// NewPairingHeapComplex128 creates a new PairingHeap using the less function to define
// the ordering of the elements
func NewPairingHeapComplex128(less func(complex128, complex128) bool) *PairingHeapComplex128 {
return &PairingHeapComplex128{
less: less,
}
}
// NewPairingHeapString creates a new PairingHeap using the less function to define
// the ordering of the elements
func NewPairingHeapString(less func(string, string) bool) *PairingHeapString {
return &PairingHeapString{
less: less,
}
}
// Peek returns the minimum element in the heap
// O(1)
func (h *PairingHeapInt) Peek() (el int, ok bool) {
if h.root == nil {
return el, false
}
return h.root.elem, true
}
// Peek returns the minimum element in the heap
// O(1)
func (h *PairingHeapInt8) Peek() (el int8, ok bool) {
if h.root == nil {
return el, false
}
return h.root.elem, true
}
// Peek returns the minimum element in the heap
// O(1)
func (h *PairingHeapInt16) Peek() (el int16, ok bool) {
if h.root == nil {
return el, false
}
return h.root.elem, true
}
// Peek returns the minimum element in the heap
// O(1)
func (h *PairingHeapInt32) Peek() (el int32, ok bool) {
if h.root == nil {
return el, false
}
return h.root.elem, true
}
// Peek returns the minimum element in the heap
// O(1)
func (h *PairingHeapInt64) Peek() (el int64, ok bool) {
if h.root == nil {
return el, false
}
return h.root.elem, true
}
// Peek returns the minimum element in the heap
// O(1)
func (h *PairingHeapUint) Peek() (el uint, ok bool) {
if h.root == nil {
return el, false
}
return h.root.elem, true
}
// Peek returns the minimum element in the heap
// O(1)
func (h *PairingHeapUint8) Peek() (el uint8, ok bool) {
if h.root == nil {
return el, false
}
return h.root.elem, true
}
// Peek returns the minimum element in the heap
// O(1)
func (h *PairingHeapUint16) Peek() (el uint16, ok bool) {
if h.root == nil {
return el, false
}
return h.root.elem, true
}
// Peek returns the minimum element in the heap
// O(1)
func (h *PairingHeapUint32) Peek() (el uint32, ok bool) {
if h.root == nil {
return el, false
}
return h.root.elem, true
}
// Peek returns the minimum element in the heap
// O(1)
func (h *PairingHeapUint64) Peek() (el uint64, ok bool) {
if h.root == nil {
return el, false
}
return h.root.elem, true
}
// Peek returns the minimum element in the heap
// O(1)
func (h *PairingHeapFloat32) Peek() (el float32, ok bool) {
if h.root == nil {
return el, false
}
return h.root.elem, true
}
// Peek returns the minimum element in the heap
// O(1)
func (h *PairingHeapFloat64) Peek() (el float64, ok bool) {
if h.root == nil {
return el, false
}
return h.root.elem, true
}
// Peek returns the minimum element in the heap
// O(1)
func (h *PairingHeapComplex64) Peek() (el complex64, ok bool) {
if h.root == nil {
return el, false
}
return h.root.elem, true
}
// Peek returns the minimum element in the heap
// O(1)
func (h *PairingHeapComplex128) Peek() (el complex128, ok bool) {
if h.root == nil {
return el, false
}
return h.root.elem, true
}
// Peek returns the minimum element in the heap
// O(1)
func (h *PairingHeapString) Peek() (el string, ok bool) {
if h.root == nil {
return el, false
}
return h.root.elem, true
}
// Pop returns the minimum element in the heap and removes it
// O(log n)
func (h *PairingHeapInt) Pop() (el int, ok bool) {
if h.root == nil {
return el, false
}
el = h.root.elem
h.root = h.mergePairs(h.root.child)
return el, true
}
// Pop returns the minimum element in the heap and removes it
// O(log n)
func (h *PairingHeapInt8) Pop() (el int8, ok bool) {
if h.root == nil {
return el, false
}
el = h.root.elem
h.root = h.mergePairs(h.root.child)
return el, true
}
// Pop returns the minimum element in the heap and removes it
// O(log n)
func (h *PairingHeapInt16) Pop() (el int16, ok bool) {
if h.root == nil {
return el, false
}
el = h.root.elem
h.root = h.mergePairs(h.root.child)
return el, true
}
// Pop returns the minimum element in the heap and removes it
// O(log n)
func (h *PairingHeapInt32) Pop() (el int32, ok bool) {
if h.root == nil {
return el, false
}
el = h.root.elem
h.root = h.mergePairs(h.root.child)
return el, true
}
// Pop returns the minimum element in the heap and removes it
// O(log n)
func (h *PairingHeapInt64) Pop() (el int64, ok bool) {
if h.root == nil {
return el, false
}
el = h.root.elem
h.root = h.mergePairs(h.root.child)
return el, true
}
// Pop returns the minimum element in the heap and removes it
// O(log n)
func (h *PairingHeapUint) Pop() (el uint, ok bool) {
if h.root == nil {
return el, false
}
el = h.root.elem
h.root = h.mergePairs(h.root.child)
return el, true
}
// Pop returns the minimum element in the heap and removes it
// O(log n)
func (h *PairingHeapUint8) Pop() (el uint8, ok bool) {
if h.root == nil {
return el, false
}
el = h.root.elem
h.root = h.mergePairs(h.root.child)
return el, true
}
// Pop returns the minimum element in the heap and removes it
// O(log n)
func (h *PairingHeapUint16) Pop() (el uint16, ok bool) {
if h.root == nil {
return el, false
}
el = h.root.elem
h.root = h.mergePairs(h.root.child)
return el, true
}
// Pop returns the minimum element in the heap and removes it
// O(log n)
func (h *PairingHeapUint32) Pop() (el uint32, ok bool) {
if h.root == nil {
return el, false
}
el = h.root.elem
h.root = h.mergePairs(h.root.child)
return el, true
}
// Pop returns the minimum element in the heap and removes it
// O(log n)
func (h *PairingHeapUint64) Pop() (el uint64, ok bool) {
if h.root == nil {
return el, false
}
el = h.root.elem
h.root = h.mergePairs(h.root.child)
return el, true
}
// Pop returns the minimum element in the heap and removes it
// O(log n)
func (h *PairingHeapFloat32) Pop() (el float32, ok bool) {
if h.root == nil {
return el, false
}
el = h.root.elem
h.root = h.mergePairs(h.root.child)
return el, true
}
// Pop returns the minimum element in the heap and removes it
// O(log n)
func (h *PairingHeapFloat64) Pop() (el float64, ok bool) {
if h.root == nil {
return el, false
}
el = h.root.elem
h.root = h.mergePairs(h.root.child)
return el, true
}
// Pop returns the minimum element in the heap and removes it
// O(log n)
func (h *PairingHeapComplex64) Pop() (el complex64, ok bool) {
if h.root == nil {
return el, false
}
el = h.root.elem
h.root = h.mergePairs(h.root.child)
return el, true
}
// Pop returns the minimum element in the heap and removes it
// O(log n)
func (h *PairingHeapComplex128) Pop() (el complex128, ok bool) {
if h.root == nil {
return el, false
}
el = h.root.elem
h.root = h.mergePairs(h.root.child)
return el, true
}
// Pop returns the minimum element in the heap and removes it
// O(log n)
func (h *PairingHeapString) Pop() (el string, ok bool) {
if h.root == nil {
return el, false
}
el = h.root.elem
h.root = h.mergePairs(h.root.child)
return el, true
}
// Push adds the element to the heap
// O(1)
func (h *PairingHeapInt) Push(el int) {
h.root = h.merge(h.root, &pairingHeapNodeInt{
elem: el,
})
}
// Push adds the element to the heap
// O(1)
func (h *PairingHeapInt8) Push(el int8) {
h.root = h.merge(h.root, &pairingHeapNodeInt8{
elem: el,
})
}
// Push adds the element to the heap
// O(1)
func (h *PairingHeapInt16) Push(el int16) {
h.root = h.merge(h.root, &pairingHeapNodeInt16{
elem: el,
})
}
// Push adds the element to the heap
// O(1)
func (h *PairingHeapInt32) Push(el int32) {
h.root = h.merge(h.root, &pairingHeapNodeInt32{
elem: el,
})
}
// Push adds the element to the heap
// O(1)
func (h *PairingHeapInt64) Push(el int64) {
h.root = h.merge(h.root, &pairingHeapNodeInt64{
elem: el,
})
}
// Push adds the element to the heap
// O(1)
func (h *PairingHeapUint) Push(el uint) {
h.root = h.merge(h.root, &pairingHeapNodeUint{
elem: el,
})
}
// Push adds the element to the heap
// O(1)
func (h *PairingHeapUint8) Push(el uint8) {
h.root = h.merge(h.root, &pairingHeapNodeUint8{
elem: el,
})
}
// Push adds the element to the heap
// O(1)
func (h *PairingHeapUint16) Push(el uint16) {
h.root = h.merge(h.root, &pairingHeapNodeUint16{
elem: el,
})
}
// Push adds the element to the heap
// O(1)
func (h *PairingHeapUint32) Push(el uint32) {
h.root = h.merge(h.root, &pairingHeapNodeUint32{
elem: el,
})
}
// Push adds the element to the heap
// O(1)
func (h *PairingHeapUint64) Push(el uint64) {
h.root = h.merge(h.root, &pairingHeapNodeUint64{
elem: el,
})
}
// Push adds the element to the heap
// O(1)
func (h *PairingHeapFloat32) Push(el float32) {
h.root = h.merge(h.root, &pairingHeapNodeFloat32{
elem: el,
})
}
// Push adds the element to the heap
// O(1)
func (h *PairingHeapFloat64) Push(el float64) {
h.root = h.merge(h.root, &pairingHeapNodeFloat64{
elem: el,
})
}
// Push adds the element to the heap
// O(1)
func (h *PairingHeapComplex64) Push(el complex64) {
h.root = h.merge(h.root, &pairingHeapNodeComplex64{
elem: el,
})
}
// Push adds the element to the heap
// O(1)
func (h *PairingHeapComplex128) Push(el complex128) {
h.root = h.merge(h.root, &pairingHeapNodeComplex128{
elem: el,
})
}
// Push adds the element to the heap
// O(1)
func (h *PairingHeapString) Push(el string) {
h.root = h.merge(h.root, &pairingHeapNodeString{
elem: el,
})
}
// Merge merges two heaps
// O(1)
func (h *PairingHeapInt) Merge(h2 *PairingHeapInt) *PairingHeapInt {
return &PairingHeapInt{
less: h.less,
root: h.merge(h.root, h2.root),
}
}
// Merge merges two heaps
// O(1)
func (h *PairingHeapInt8) Merge(h2 *PairingHeapInt8) *PairingHeapInt8 {
return &PairingHeapInt8{
less: h.less,
root: h.merge(h.root, h2.root),
}
}
// Merge merges two heaps
// O(1)
func (h *PairingHeapInt16) Merge(h2 *PairingHeapInt16) *PairingHeapInt16 {
return &PairingHeapInt16{
less: h.less,
root: h.merge(h.root, h2.root),
}
}
// Merge merges two heaps
// O(1)
func (h *PairingHeapInt32) Merge(h2 *PairingHeapInt32) *PairingHeapInt32 {
return &PairingHeapInt32{
less: h.less,
root: h.merge(h.root, h2.root),
}
}
// Merge merges two heaps
// O(1)
func (h *PairingHeapInt64) Merge(h2 *PairingHeapInt64) *PairingHeapInt64 {
return &PairingHeapInt64{
less: h.less,
root: h.merge(h.root, h2.root),
}
}
// Merge merges two heaps
// O(1)
func (h *PairingHeapUint) Merge(h2 *PairingHeapUint) *PairingHeapUint {
return &PairingHeapUint{
less: h.less,
root: h.merge(h.root, h2.root),
}
}
// Merge merges two heaps
// O(1)
func (h *PairingHeapUint8) Merge(h2 *PairingHeapUint8) *PairingHeapUint8 {
return &PairingHeapUint8{
less: h.less,
root: h.merge(h.root, h2.root),
}
}
// Merge merges two heaps
// O(1)
func (h *PairingHeapUint16) Merge(h2 *PairingHeapUint16) *PairingHeapUint16 {
return &PairingHeapUint16{
less: h.less,
root: h.merge(h.root, h2.root),
}
}
// Merge merges two heaps
// O(1)
func (h *PairingHeapUint32) Merge(h2 *PairingHeapUint32) *PairingHeapUint32 {
return &PairingHeapUint32{
less: h.less,
root: h.merge(h.root, h2.root),
}
}
// Merge merges two heaps
// O(1)
func (h *PairingHeapUint64) Merge(h2 *PairingHeapUint64) *PairingHeapUint64 {
return &PairingHeapUint64{
less: h.less,
root: h.merge(h.root, h2.root),
}
}
// Merge merges two heaps
// O(1)
func (h *PairingHeapFloat32) Merge(h2 *PairingHeapFloat32) *PairingHeapFloat32 {
return &PairingHeapFloat32{
less: h.less,
root: h.merge(h.root, h2.root),
}
}
// Merge merges two heaps
// O(1)
func (h *PairingHeapFloat64) Merge(h2 *PairingHeapFloat64) *PairingHeapFloat64 {
return &PairingHeapFloat64{
less: h.less,
root: h.merge(h.root, h2.root),
}
}
// Merge merges two heaps
// O(1)
func (h *PairingHeapComplex64) Merge(h2 *PairingHeapComplex64) *PairingHeapComplex64 {
return &PairingHeapComplex64{
less: h.less,
root: h.merge(h.root, h2.root),
}
}
// Merge merges two heaps
// O(1)
func (h *PairingHeapComplex128) Merge(h2 *PairingHeapComplex128) *PairingHeapComplex128 {
return &PairingHeapComplex128{
less: h.less,
root: h.merge(h.root, h2.root),
}
}
// Merge merges two heaps
// O(1)
func (h *PairingHeapString) Merge(h2 *PairingHeapString) *PairingHeapString {
return &PairingHeapString{
less: h.less,
root: h.merge(h.root, h2.root),
}
}
func (h *PairingHeapInt) merge(n1, n2 *pairingHeapNodeInt) *pairingHeapNodeInt {
switch {
case n1 == nil:
return n2
case n2 == nil:
return n1
case h.less(n1.elem, n2.elem):
c1 := n1.child
n1.child = n2
n2.parent = n1
n2.sibling = c1
return n1
default:
c2 := n2.child
n2.child = n1
n1.parent = n2
n1.sibling = c2
return n2
}
}
func (h *PairingHeapInt8) merge(n1, n2 *pairingHeapNodeInt8) *pairingHeapNodeInt8 {
switch {
case n1 == nil:
return n2
case n2 == nil:
return n1
case h.less(n1.elem, n2.elem):
c1 := n1.child
n1.child = n2
n2.parent = n1
n2.sibling = c1
return n1
default:
c2 := n2.child
n2.child = n1
n1.parent = n2
n1.sibling = c2
return n2
}
}
func (h *PairingHeapInt16) merge(n1, n2 *pairingHeapNodeInt16) *pairingHeapNodeInt16 {
switch {
case n1 == nil:
return n2
case n2 == nil:
return n1
case h.less(n1.elem, n2.elem):
c1 := n1.child
n1.child = n2
n2.parent = n1
n2.sibling = c1
return n1
default:
c2 := n2.child
n2.child = n1
n1.parent = n2
n1.sibling = c2
return n2
}
}
func (h *PairingHeapInt32) merge(n1, n2 *pairingHeapNodeInt32) *pairingHeapNodeInt32 {
switch {
case n1 == nil:
return n2
case n2 == nil:
return n1
case h.less(n1.elem, n2.elem):
c1 := n1.child
n1.child = n2
n2.parent = n1
n2.sibling = c1
return n1
default:
c2 := n2.child
n2.child = n1
n1.parent = n2
n1.sibling = c2
return n2
}
}
func (h *PairingHeapInt64) merge(n1, n2 *pairingHeapNodeInt64) *pairingHeapNodeInt64 {
switch {
case n1 == nil:
return n2
case n2 == nil:
return n1
case h.less(n1.elem, n2.elem):
c1 := n1.child
n1.child = n2
n2.parent = n1
n2.sibling = c1
return n1
default:
c2 := n2.child
n2.child = n1
n1.parent = n2
n1.sibling = c2
return n2
}
}
func (h *PairingHeapUint) merge(n1, n2 *pairingHeapNodeUint) *pairingHeapNodeUint {
switch {
case n1 == nil:
return n2
case n2 == nil:
return n1
case h.less(n1.elem, n2.elem):
c1 := n1.child
n1.child = n2
n2.parent = n1
n2.sibling = c1
return n1
default:
c2 := n2.child
n2.child = n1
n1.parent = n2
n1.sibling = c2
return n2
}
}
func (h *PairingHeapUint8) merge(n1, n2 *pairingHeapNodeUint8) *pairingHeapNodeUint8 {
switch {
case n1 == nil:
return n2
case n2 == nil:
return n1
case h.less(n1.elem, n2.elem):
c1 := n1.child
n1.child = n2
n2.parent = n1
n2.sibling = c1
return n1
default:
c2 := n2.child
n2.child = n1
n1.parent = n2
n1.sibling = c2
return n2
}
}
func (h *PairingHeapUint16) merge(n1, n2 *pairingHeapNodeUint16) *pairingHeapNodeUint16 {
switch {
case n1 == nil:
return n2
case n2 == nil:
return n1
case h.less(n1.elem, n2.elem):
c1 := n1.child
n1.child = n2
n2.parent = n1
n2.sibling = c1
return n1
default:
c2 := n2.child
n2.child = n1
n1.parent = n2
n1.sibling = c2
return n2
}
}
func (h *PairingHeapUint32) merge(n1, n2 *pairingHeapNodeUint32) *pairingHeapNodeUint32 {
switch {
case n1 == nil:
return n2
case n2 == nil:
return n1
case h.less(n1.elem, n2.elem):
c1 := n1.child
n1.child = n2
n2.parent = n1
n2.sibling = c1
return n1
default:
c2 := n2.child
n2.child = n1
n1.parent = n2
n1.sibling = c2
return n2
}
}
func (h *PairingHeapUint64) merge(n1, n2 *pairingHeapNodeUint64) *pairingHeapNodeUint64 {
switch {
case n1 == nil:
return n2
case n2 == nil:
return n1
case h.less(n1.elem, n2.elem):
c1 := n1.child
n1.child = n2
n2.parent = n1
n2.sibling = c1
return n1
default:
c2 := n2.child
n2.child = n1
n1.parent = n2
n1.sibling = c2
return n2
}
}
func (h *PairingHeapFloat32) merge(n1, n2 *pairingHeapNodeFloat32) *pairingHeapNodeFloat32 {
switch {
case n1 == nil:
return n2
case n2 == nil:
return n1
case h.less(n1.elem, n2.elem):
c1 := n1.child
n1.child = n2
n2.parent = n1
n2.sibling = c1
return n1
default:
c2 := n2.child
n2.child = n1
n1.parent = n2
n1.sibling = c2
return n2
}
}
func (h *PairingHeapFloat64) merge(n1, n2 *pairingHeapNodeFloat64) *pairingHeapNodeFloat64 {
switch {
case n1 == nil:
return n2
case n2 == nil:
return n1
case h.less(n1.elem, n2.elem):
c1 := n1.child
n1.child = n2
n2.parent = n1
n2.sibling = c1
return n1
default:
c2 := n2.child
n2.child = n1
n1.parent = n2
n1.sibling = c2
return n2
}
}
func (h *PairingHeapComplex64) merge(n1, n2 *pairingHeapNodeComplex64) *pairingHeapNodeComplex64 {
switch {
case n1 == nil:
return n2
case n2 == nil:
return n1
case h.less(n1.elem, n2.elem):
c1 := n1.child
n1.child = n2
n2.parent = n1
n2.sibling = c1
return n1
default:
c2 := n2.child
n2.child = n1
n1.parent = n2
n1.sibling = c2
return n2
}
}
func (h *PairingHeapComplex128) merge(n1, n2 *pairingHeapNodeComplex128) *pairingHeapNodeComplex128 {
switch {
case n1 == nil:
return n2
case n2 == nil:
return n1
case h.less(n1.elem, n2.elem):
c1 := n1.child
n1.child = n2
n2.parent = n1
n2.sibling = c1
return n1
default:
c2 := n2.child
n2.child = n1
n1.parent = n2
n1.sibling = c2
return n2
}
}
func (h *PairingHeapString) merge(n1, n2 *pairingHeapNodeString) *pairingHeapNodeString {
switch {
case n1 == nil:
return n2
case n2 == nil:
return n1
case h.less(n1.elem, n2.elem):
c1 := n1.child
n1.child = n2
n2.parent = n1
n2.sibling = c1
return n1
default:
c2 := n2.child
n2.child = n1
n1.parent = n2
n1.sibling = c2
return n2
}
}
func (h *PairingHeapInt) mergePairs(n *pairingHeapNodeInt) *pairingHeapNodeInt {
switch {
case n == nil:
return nil
case n.sibling == nil:
return n
default:
cousin := n.sibling.sibling
return h.merge(h.merge(n, n.sibling), h.mergePairs(cousin))
}
}
func (h *PairingHeapInt8) mergePairs(n *pairingHeapNodeInt8) *pairingHeapNodeInt8 {
switch {
case n == nil:
return nil
case n.sibling == nil:
return n
default:
cousin := n.sibling.sibling
return h.merge(h.merge(n, n.sibling), h.mergePairs(cousin))
}
}
func (h *PairingHeapInt16) mergePairs(n *pairingHeapNodeInt16) *pairingHeapNodeInt16 {
switch {
case n == nil:
return nil
case n.sibling == nil:
return n
default:
cousin := n.sibling.sibling
return h.merge(h.merge(n, n.sibling), h.mergePairs(cousin))
}
}
func (h *PairingHeapInt32) mergePairs(n *pairingHeapNodeInt32) *pairingHeapNodeInt32 {
switch {
case n == nil:
return nil
case n.sibling == nil:
return n
default:
cousin := n.sibling.sibling
return h.merge(h.merge(n, n.sibling), h.mergePairs(cousin))
}
}
func (h *PairingHeapInt64) mergePairs(n *pairingHeapNodeInt64) *pairingHeapNodeInt64 {
switch {
case n == nil:
return nil
case n.sibling == nil:
return n
default:
cousin := n.sibling.sibling
return h.merge(h.merge(n, n.sibling), h.mergePairs(cousin))
}
}
func (h *PairingHeapUint) mergePairs(n *pairingHeapNodeUint) *pairingHeapNodeUint {
switch {
case n == nil:
return nil
case n.sibling == nil:
return n
default:
cousin := n.sibling.sibling
return h.merge(h.merge(n, n.sibling), h.mergePairs(cousin))
}
}
func (h *PairingHeapUint8) mergePairs(n *pairingHeapNodeUint8) *pairingHeapNodeUint8 {
switch {
case n == nil:
return nil
case n.sibling == nil:
return n
default:
cousin := n.sibling.sibling
return h.merge(h.merge(n, n.sibling), h.mergePairs(cousin))
}
}
func (h *PairingHeapUint16) mergePairs(n *pairingHeapNodeUint16) *pairingHeapNodeUint16 {
switch {
case n == nil:
return nil
case n.sibling == nil:
return n
default:
cousin := n.sibling.sibling
return h.merge(h.merge(n, n.sibling), h.mergePairs(cousin))
}
}
func (h *PairingHeapUint32) mergePairs(n *pairingHeapNodeUint32) *pairingHeapNodeUint32 {
switch {
case n == nil:
return nil
case n.sibling == nil:
return n
default:
cousin := n.sibling.sibling
return h.merge(h.merge(n, n.sibling), h.mergePairs(cousin))
}
}
func (h *PairingHeapUint64) mergePairs(n *pairingHeapNodeUint64) *pairingHeapNodeUint64 {
switch {
case n == nil:
return nil
case n.sibling == nil:
return n
default:
cousin := n.sibling.sibling
return h.merge(h.merge(n, n.sibling), h.mergePairs(cousin))
}
}
func (h *PairingHeapFloat32) mergePairs(n *pairingHeapNodeFloat32) *pairingHeapNodeFloat32 {
switch {
case n == nil:
return nil
case n.sibling == nil:
return n
default:
cousin := n.sibling.sibling
return h.merge(h.merge(n, n.sibling), h.mergePairs(cousin))
}
}
func (h *PairingHeapFloat64) mergePairs(n *pairingHeapNodeFloat64) *pairingHeapNodeFloat64 {
switch {
case n == nil:
return nil
case n.sibling == nil:
return n
default:
cousin := n.sibling.sibling
return h.merge(h.merge(n, n.sibling), h.mergePairs(cousin))
}
}
func (h *PairingHeapComplex64) mergePairs(n *pairingHeapNodeComplex64) *pairingHeapNodeComplex64 {
switch {
case n == nil:
return nil
case n.sibling == nil:
return n
default:
cousin := n.sibling.sibling
return h.merge(h.merge(n, n.sibling), h.mergePairs(cousin))
}
}
func (h *PairingHeapComplex128) mergePairs(n *pairingHeapNodeComplex128) *pairingHeapNodeComplex128 {
switch {
case n == nil:
return nil
case n.sibling == nil:
return n
default:
cousin := n.sibling.sibling
return h.merge(h.merge(n, n.sibling), h.mergePairs(cousin))
}
}
func (h *PairingHeapString) mergePairs(n *pairingHeapNodeString) *pairingHeapNodeString {
switch {
case n == nil:
return nil
case n.sibling == nil:
return n
default:
cousin := n.sibling.sibling
return h.merge(h.merge(n, n.sibling), h.mergePairs(cousin))
}
} | heap/reified_pairingheap.go | 0.858199 | 0.550668 | reified_pairingheap.go | starcoder |
package cocmath
import (
"errors"
"fmt"
"unicode"
"github.com/catsworld/botmaid/random"
)
// Result includes the minimize, maximum and exact result of an expression.
type Result struct {
Min, Max, Value int
}
// Operator includes some information of an operator.
type Operator struct {
Op rune
LeftComb bool
}
// Expression includes the infix, sufix and result of a math expression.
type Expression struct {
Infix []interface{}
Suffix []interface{}
Result Result
}
var (
priority = map[rune]int{
'+': 1,
'-': 1,
'*': 2,
'/': 2,
'%': 2,
'^': 3,
'd': 4,
}
)
func isOperator(operator rune) bool {
_, exist := priority[operator]
return exist
}
func leftCombination(v rune, last interface{}) bool {
if _, ok := last.(rune); ok {
return false
}
return true
}
func calculate(x int, operator rune, y int) (int, error) {
if operator == '+' {
return x + y, nil
} else if operator == '-' {
return x - y, nil
} else if operator == '*' {
return x * y, nil
} else if operator == '/' {
if y == 0 {
return 0, errors.New("Divide by 0")
}
return x / y, nil
} else if operator == '%' {
if y == 0 {
return 0, errors.New("Divide by 0")
}
return x % y, nil
} else if operator == 'd' {
if x > 1000000 {
return 0, errors.New("The number of times is over 1000000")
}
ret := 0
for i := 0; i < x; i++ {
ret += random.Int(1, y)
}
return ret, nil
} else if operator == '^' {
return Qpow(x, y), nil
} else {
return 0, errors.New("Invalid operator")
}
}
func calculateMin(x int, operator rune, y int) (int, error) {
if operator == 'd' {
res1, err := calculate(x, '*', 1)
if err != nil {
return 0, err
}
res2, err := calculate(x, '*', y)
if err != nil {
return 0, err
}
return Min(res1, res2), nil
}
return calculate(x, operator, y)
}
func calculateMax(x int, operator rune, y int) (int, error) {
if operator == 'd' {
res1, err := calculate(x, '*', 1)
if err != nil {
return 0, err
}
res2, err := calculate(x, '*', y)
if err != nil {
return 0, err
}
return Max(res1, res2), nil
}
return calculate(x, operator, y)
}
func calculateU(unary rune, x int) (int, error) {
if unary == '-' {
return -x, nil
} else if unary == '+' {
return x, nil
} else if unary == 'd' {
return random.Int(1, x), nil
} else {
return 0, errors.New("Invalid unary")
}
}
func (e *Expression) string2Infix(s string) error {
nowNumber := 0
nowNumberFlag := false
for _, v := range s {
if unicode.IsDigit(v) {
if len(e.Infix) > 0 {
if _, ok := e.Infix[len(e.Infix)-1].(int); ok {
return errors.New("Invalid expression")
}
}
nowNumber = nowNumber*10 + int(v-'0')
nowNumberFlag = true
} else {
if nowNumberFlag {
e.Infix = append(e.Infix, nowNumber)
nowNumber = 0
nowNumberFlag = false
}
if v != ' ' {
e.Infix = append(e.Infix, v)
}
}
}
if nowNumberFlag {
e.Infix = append(e.Infix, nowNumber)
}
return nil
}
func (e *Expression) infix2Suffix() error {
stack := []*Operator{}
last := new(interface{})
for _, symbol := range e.Infix {
switch v := symbol.(type) {
case int:
e.Suffix = append(e.Suffix, v)
case rune:
if isOperator(v) {
for len(stack) > 0 && ((leftCombination(v, last) && priority[v] <= priority[stack[len(stack)-1].Op]) || (!leftCombination(v, last) && priority[v] < priority[stack[len(stack)-1].Op])) {
e.Suffix = append(e.Suffix, stack[len(stack)-1])
stack = stack[:len(stack)-1]
}
stack = append(stack, &Operator{
Op: v,
LeftComb: leftCombination(v, last),
})
} else if v == '(' {
stack = append(stack, &Operator{
Op: v,
})
} else if v == ')' {
for len(stack) > 0 && stack[len(stack)-1].Op != '(' {
e.Suffix = append(e.Suffix, stack[len(stack)-1])
stack = stack[:len(stack)-1]
}
if len(stack) == 0 || stack[len(stack)-1].Op != '(' {
return errors.New("Invalid expression")
}
stack = stack[:len(stack)-1]
} else {
return errors.New("Invalid expression")
}
}
}
for len(stack) > 0 {
if stack[len(stack)-1].Op == '(' || stack[len(stack)-1].Op == ')' {
return errors.New("Invalid expression")
}
e.Suffix = append(e.Suffix, stack[len(stack)-1])
stack = stack[:len(stack)-1]
}
return nil
}
func (e *Expression) suffix2Result() error {
stack := []*Result{}
for _, symbol := range e.Suffix {
switch v := symbol.(type) {
case int:
stack = append(stack, &Result{
Min: v,
Max: v,
Value: v,
})
case *Operator:
if (v.LeftComb && len(stack) < 2) || (!v.LeftComb && len(stack) < 1) {
return errors.New("Invalid expression")
}
if v.LeftComb {
res, err := calculate(stack[len(stack)-2].Value, v.Op, stack[len(stack)-1].Value)
if err != nil {
return fmt.Errorf("Calculating: %v", err)
}
res1, err := calculateMin(stack[len(stack)-2].Min, v.Op, stack[len(stack)-1].Min)
if err != nil {
return fmt.Errorf("Calculating: %v", err)
}
res2, err := calculateMin(stack[len(stack)-2].Min, v.Op, stack[len(stack)-1].Max)
if err != nil {
return fmt.Errorf("Calculating: %v", err)
}
res3, err := calculateMin(stack[len(stack)-2].Max, v.Op, stack[len(stack)-1].Min)
if err != nil {
return fmt.Errorf("Calculating: %v", err)
}
res4, err := calculateMin(stack[len(stack)-2].Max, v.Op, stack[len(stack)-1].Max)
if err != nil {
return fmt.Errorf("Calculating: %v", err)
}
resMin := Min(res1, res2, res3, res4)
res1, err = calculateMax(stack[len(stack)-2].Min, v.Op, stack[len(stack)-1].Min)
if err != nil {
return fmt.Errorf("Calculating: %v", err)
}
res2, err = calculateMax(stack[len(stack)-2].Min, v.Op, stack[len(stack)-1].Max)
if err != nil {
return fmt.Errorf("Calculating: %v", err)
}
res3, err = calculateMax(stack[len(stack)-2].Max, v.Op, stack[len(stack)-1].Min)
if err != nil {
return fmt.Errorf("Calculating: %v", err)
}
res4, err = calculateMax(stack[len(stack)-2].Max, v.Op, stack[len(stack)-1].Max)
if err != nil {
return fmt.Errorf("Calculating: %v", err)
}
resMax := Max(res1, res2, res3, res4)
stack = stack[:len(stack)-2]
stack = append(stack, &Result{
Min: resMin,
Max: resMax,
Value: res,
})
} else {
res, err := calculateU(v.Op, stack[len(stack)-1].Value)
if err != nil {
return fmt.Errorf("Calculating: %v", err)
}
resMin := 0
resMax := 0
if v.Op == 'd' {
resMin, err = calculateU('+', 1)
resMax, err = calculateU('+', stack[len(stack)-1].Value)
} else {
resMin = res
resMax = res
}
stack = stack[:len(stack)-1]
stack = append(stack, &Result{
Min: resMin,
Max: resMax,
Value: res,
})
}
}
}
if len(stack) != 1 {
return errors.New("Invalid expression")
}
e.Result = *stack[0]
return nil
}
// New generates a expression from the string s.
func New(s string) (*Expression, error) {
e := &Expression{}
err := e.string2Infix(s)
if err != nil {
return nil, fmt.Errorf("Generating new expression: %v", err)
}
err = e.infix2Suffix()
if err != nil {
return nil, fmt.Errorf("Generating new expression: %v", err)
}
err = e.suffix2Result()
if err != nil {
return nil, fmt.Errorf("Generating new expression: %v", err)
}
return e, nil
} | cocmath/expression.go | 0.577972 | 0.555616 | expression.go | starcoder |
// Code to execute a parsed template.
package template
import (
"bytes"
"io"
"reflect"
"strings"
)
// Internal state for executing a Template. As we evaluate the struct,
// the data item descends into the fields associated with sections, etc.
// Parent is used to walk upwards to find variables higher in the tree.
type state struct {
parent *state // parent in hierarchy
data reflect.Value // the driver data for this section etc.
wr io.Writer // where to send output
buf [2]bytes.Buffer // alternating buffers used when chaining formatters
}
func (parent *state) clone(data reflect.Value) *state {
return &state{parent: parent, data: data, wr: parent.wr}
}
// Evaluate interfaces and pointers looking for a value that can look up the name, via a
// struct field, method, or map key, and return the result of the lookup.
func (t *Template) lookup(st *state, v reflect.Value, name string) reflect.Value {
for v.IsValid() {
typ := v.Type()
if n := v.Type().NumMethod(); n > 0 {
for i := 0; i < n; i++ {
m := typ.Method(i)
mtyp := m.Type
if m.Name == name && mtyp.NumIn() == 1 && mtyp.NumOut() == 1 {
if !isExported(name) {
t.execError(st, t.linenum, "name not exported: %s in type %s", name, st.data.Type())
}
return v.Method(i).Call(nil)[0]
}
}
}
switch av := v; av.Kind() {
case reflect.Ptr:
v = av.Elem()
case reflect.Interface:
v = av.Elem()
case reflect.Struct:
if !isExported(name) {
t.execError(st, t.linenum, "name not exported: %s in type %s", name, st.data.Type())
}
return av.FieldByName(name)
case reflect.Map:
if v := av.MapIndex(reflect.ValueOf(name)); v.IsValid() {
return v
}
return reflect.Zero(typ.Elem())
default:
return reflect.Value{}
}
}
return v
}
// indirectPtr returns the item numLevels levels of indirection below the value.
// It is forgiving: if the value is not a pointer, it returns it rather than giving
// an error. If the pointer is nil, it is returned as is.
func indirectPtr(v reflect.Value, numLevels int) reflect.Value {
for i := numLevels; v.IsValid() && i > 0; i++ {
if p := v; p.Kind() == reflect.Ptr {
if p.IsNil() {
return v
}
v = p.Elem()
} else {
break
}
}
return v
}
// Walk v through pointers and interfaces, extracting the elements within.
func indirect(v reflect.Value) reflect.Value {
loop:
for v.IsValid() {
switch av := v; av.Kind() {
case reflect.Ptr:
v = av.Elem()
case reflect.Interface:
v = av.Elem()
default:
break loop
}
}
return v
}
// If the data for this template is a struct, find the named variable.
// Names of the form a.b.c are walked down the data tree.
// The special name "@" (the "cursor") denotes the current data.
// The value coming in (st.data) might need indirecting to reach
// a struct while the return value is not indirected - that is,
// it represents the actual named field. Leading stars indicate
// levels of indirection to be applied to the value.
func (t *Template) findVar(st *state, s string) reflect.Value {
data := st.data
flattenedName := strings.TrimLeft(s, "*")
numStars := len(s) - len(flattenedName)
s = flattenedName
if s == "@" {
return indirectPtr(data, numStars)
}
for _, elem := range strings.Split(s, ".") {
// Look up field; data must be a struct or map.
data = t.lookup(st, data, elem)
if !data.IsValid() {
return reflect.Value{}
}
}
return indirectPtr(data, numStars)
}
// Is there no data to look at?
func empty(v reflect.Value) bool {
v = indirect(v)
if !v.IsValid() {
return true
}
switch v.Kind() {
case reflect.Bool:
return v.Bool() == false
case reflect.String:
return v.String() == ""
case reflect.Struct:
return false
case reflect.Map:
return false
case reflect.Array:
return v.Len() == 0
case reflect.Slice:
return v.Len() == 0
}
return false
}
// Look up a variable or method, up through the parent if necessary.
func (t *Template) varValue(name string, st *state) reflect.Value {
field := t.findVar(st, name)
if !field.IsValid() {
if st.parent == nil {
t.execError(st, t.linenum, "name not found: %s in type %s", name, st.data.Type())
}
return t.varValue(name, st.parent)
}
return field
}
func (t *Template) format(wr io.Writer, fmt string, val []interface{}, v *variableElement, st *state) {
fn := t.formatter(fmt)
if fn == nil {
t.execError(st, v.linenum, "missing formatter %s for variable", fmt)
}
fn(wr, fmt, val...)
}
// Evaluate a variable, looking up through the parent if necessary.
// If it has a formatter attached ({var|formatter}) run that too.
func (t *Template) writeVariable(v *variableElement, st *state) {
// Resolve field names
val := make([]interface{}, len(v.args))
for i, arg := range v.args {
if name, ok := arg.(fieldName); ok {
val[i] = t.varValue(string(name), st).Interface()
} else {
val[i] = arg
}
}
for i, fmt := range v.fmts[:len(v.fmts)-1] {
b := &st.buf[i&1]
b.Reset()
t.format(b, fmt, val, v, st)
val = val[0:1]
val[0] = b.Bytes()
}
t.format(st.wr, v.fmts[len(v.fmts)-1], val, v, st)
}
// Execute element i. Return next index to execute.
func (t *Template) executeElement(i int, st *state) int {
switch elem := t.elems[i].(type) {
case *textElement:
st.wr.Write(elem.text)
return i + 1
case *literalElement:
st.wr.Write(elem.text)
return i + 1
case *variableElement:
t.writeVariable(elem, st)
return i + 1
case *sectionElement:
t.executeSection(elem, st)
return elem.end
case *repeatedElement:
t.executeRepeated(elem, st)
return elem.end
}
e := t.elems[i]
t.execError(st, 0, "internal error: bad directive in execute: %v %T\n", reflect.ValueOf(e).Interface(), e)
return 0
}
// Execute the template.
func (t *Template) execute(start, end int, st *state) {
for i := start; i < end; {
i = t.executeElement(i, st)
}
}
// Execute a .section
func (t *Template) executeSection(s *sectionElement, st *state) {
// Find driver data for this section. It must be in the current struct.
field := t.varValue(s.field, st)
if !field.IsValid() {
t.execError(st, s.linenum, ".section: cannot find field %s in %s", s.field, st.data.Type())
}
st = st.clone(field)
start, end := s.start, s.or
if !empty(field) {
// Execute the normal block.
if end < 0 {
end = s.end
}
} else {
// Execute the .or block. If it's missing, do nothing.
start, end = s.or, s.end
if start < 0 {
return
}
}
for i := start; i < end; {
i = t.executeElement(i, st)
}
}
// Return the result of calling the Iter method on v, or nil.
func iter(v reflect.Value) reflect.Value {
for j := 0; j < v.Type().NumMethod(); j++ {
mth := v.Type().Method(j)
fv := v.Method(j)
ft := fv.Type()
// TODO(rsc): NumIn() should return 0 here, because ft is from a curried FuncValue.
if mth.Name != "Iter" || ft.NumIn() != 1 || ft.NumOut() != 1 {
continue
}
ct := ft.Out(0)
if ct.Kind() != reflect.Chan ||
ct.ChanDir()&reflect.RecvDir == 0 {
continue
}
return fv.Call(nil)[0]
}
return reflect.Value{}
}
// Execute a .repeated section
func (t *Template) executeRepeated(r *repeatedElement, st *state) {
// Find driver data for this section. It must be in the current struct.
field := t.varValue(r.field, st)
if !field.IsValid() {
t.execError(st, r.linenum, ".repeated: cannot find field %s in %s", r.field, st.data.Type())
}
field = indirect(field)
start, end := r.start, r.or
if end < 0 {
end = r.end
}
if r.altstart >= 0 {
end = r.altstart
}
first := true
// Code common to all the loops.
loopBody := func(newst *state) {
// .alternates between elements
if !first && r.altstart >= 0 {
for i := r.altstart; i < r.altend; {
i = t.executeElement(i, newst)
}
}
first = false
for i := start; i < end; {
i = t.executeElement(i, newst)
}
}
if array := field; array.Kind() == reflect.Array || array.Kind() == reflect.Slice {
for j := 0; j < array.Len(); j++ {
loopBody(st.clone(array.Index(j)))
}
} else if m := field; m.Kind() == reflect.Map {
for _, key := range m.MapKeys() {
loopBody(st.clone(m.MapIndex(key)))
}
} else if ch := iter(field); ch.IsValid() {
for {
e, ok := ch.Recv()
if !ok {
break
}
loopBody(st.clone(e))
}
} else {
t.execError(st, r.linenum, ".repeated: cannot repeat %s (type %s)",
r.field, field.Type())
}
if first {
// Empty. Execute the .or block, once. If it's missing, do nothing.
start, end := r.or, r.end
if start >= 0 {
newst := st.clone(field)
for i := start; i < end; {
i = t.executeElement(i, newst)
}
}
return
}
}
// A valid delimiter must contain no space and be non-empty.
func validDelim(d []byte) bool {
if len(d) == 0 {
return false
}
for _, c := range d {
if isSpace(c) {
return false
}
}
return true
} | llgo/third_party/gofrontend/libgo/go/old/template/execute.go | 0.571647 | 0.450722 | execute.go | starcoder |
package plaid
import (
"encoding/json"
)
// LiabilityOverride Used to configure Sandbox test data for the Liabilities product
type LiabilityOverride struct {
// The type of the liability object, either `credit` or `student`. Mortgages are not currently supported in the custom Sandbox.
Type string `json:"type"`
// The purchase APR percentage value. For simplicity, this is the only interest rate used to calculate interest charges. Can only be set if `type` is `credit`.
PurchaseApr float32 `json:"purchase_apr"`
// The cash APR percentage value. Can only be set if `type` is `credit`.
CashApr float32 `json:"cash_apr"`
// The balance transfer APR percentage value. Can only be set if `type` is `credit`. Can only be set if `type` is `credit`.
BalanceTransferApr float32 `json:"balance_transfer_apr"`
// The special APR percentage value. Can only be set if `type` is `credit`.
SpecialApr float32 `json:"special_apr"`
// Override the `last_payment_amount` field. Can only be set if `type` is `credit`.
LastPaymentAmount float32 `json:"last_payment_amount"`
// Override the `minimum_payment_amount` field. Can only be set if `type` is `credit` or `student`.
MinimumPaymentAmount float32 `json:"minimum_payment_amount"`
// Override the `is_overdue` field
IsOverdue bool `json:"is_overdue"`
// The date on which the loan was initially lent, in [ISO 8601](https://wikipedia.org/wiki/ISO_8601) (YYYY-MM-DD) format. Can only be set if `type` is `student`.
OriginationDate string `json:"origination_date"`
// The original loan principal. Can only be set if `type` is `student`.
Principal float32 `json:"principal"`
// The interest rate on the loan as a percentage. Can only be set if `type` is `student`.
NominalApr float32 `json:"nominal_apr"`
// If set, interest capitalization begins at the given number of months after loan origination. By default interest is never capitalized. Can only be set if `type` is `student`.
InterestCapitalizationGracePeriodMonths float32 `json:"interest_capitalization_grace_period_months"`
RepaymentModel StudentLoanRepaymentModel `json:"repayment_model"`
// Override the `expected_payoff_date` field. Can only be set if `type` is `student`.
ExpectedPayoffDate string `json:"expected_payoff_date"`
// Override the `guarantor` field. Can only be set if `type` is `student`.
Guarantor string `json:"guarantor"`
// Override the `is_federal` field. Can only be set if `type` is `student`.
IsFederal bool `json:"is_federal"`
// Override the `loan_name` field. Can only be set if `type` is `student`.
LoanName string `json:"loan_name"`
LoanStatus StudentLoanStatus `json:"loan_status"`
// Override the `payment_reference_number` field. Can only be set if `type` is `student`.
PaymentReferenceNumber string `json:"payment_reference_number"`
PslfStatus PSLFStatus `json:"pslf_status"`
// Override the `repayment_plan.description` field. Can only be set if `type` is `student`.
RepaymentPlanDescription string `json:"repayment_plan_description"`
// Override the `repayment_plan.type` field. Can only be set if `type` is `student`. Possible values are: `\"extended graduated\"`, `\"extended standard\"`, `\"graduated\"`, `\"income-contingent repayment\"`, `\"income-based repayment\"`, `\"interest only\"`, `\"other\"`, `\"pay as you earn\"`, `\"revised pay as you earn\"`, or `\"standard\"`.
RepaymentPlanType string `json:"repayment_plan_type"`
// Override the `sequence_number` field. Can only be set if `type` is `student`.
SequenceNumber string `json:"sequence_number"`
ServicerAddress Address `json:"servicer_address"`
AdditionalProperties map[string]interface{}
}
type _LiabilityOverride LiabilityOverride
// NewLiabilityOverride instantiates a new LiabilityOverride object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewLiabilityOverride(type_ string, purchaseApr float32, cashApr float32, balanceTransferApr float32, specialApr float32, lastPaymentAmount float32, minimumPaymentAmount float32, isOverdue bool, originationDate string, principal float32, nominalApr float32, interestCapitalizationGracePeriodMonths float32, repaymentModel StudentLoanRepaymentModel, expectedPayoffDate string, guarantor string, isFederal bool, loanName string, loanStatus StudentLoanStatus, paymentReferenceNumber string, pslfStatus PSLFStatus, repaymentPlanDescription string, repaymentPlanType string, sequenceNumber string, servicerAddress Address) *LiabilityOverride {
this := LiabilityOverride{}
this.Type = type_
this.PurchaseApr = purchaseApr
this.CashApr = cashApr
this.BalanceTransferApr = balanceTransferApr
this.SpecialApr = specialApr
this.LastPaymentAmount = lastPaymentAmount
this.MinimumPaymentAmount = minimumPaymentAmount
this.IsOverdue = isOverdue
this.OriginationDate = originationDate
this.Principal = principal
this.NominalApr = nominalApr
this.InterestCapitalizationGracePeriodMonths = interestCapitalizationGracePeriodMonths
this.RepaymentModel = repaymentModel
this.ExpectedPayoffDate = expectedPayoffDate
this.Guarantor = guarantor
this.IsFederal = isFederal
this.LoanName = loanName
this.LoanStatus = loanStatus
this.PaymentReferenceNumber = paymentReferenceNumber
this.PslfStatus = pslfStatus
this.RepaymentPlanDescription = repaymentPlanDescription
this.RepaymentPlanType = repaymentPlanType
this.SequenceNumber = sequenceNumber
this.ServicerAddress = servicerAddress
return &this
}
// NewLiabilityOverrideWithDefaults instantiates a new LiabilityOverride object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewLiabilityOverrideWithDefaults() *LiabilityOverride {
this := LiabilityOverride{}
return &this
}
// GetType returns the Type field value
func (o *LiabilityOverride) GetType() string {
if o == nil {
var ret string
return ret
}
return o.Type
}
// GetTypeOk returns a tuple with the Type field value
// and a boolean to check if the value has been set.
func (o *LiabilityOverride) GetTypeOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.Type, true
}
// SetType sets field value
func (o *LiabilityOverride) SetType(v string) {
o.Type = v
}
// GetPurchaseApr returns the PurchaseApr field value
func (o *LiabilityOverride) GetPurchaseApr() float32 {
if o == nil {
var ret float32
return ret
}
return o.PurchaseApr
}
// GetPurchaseAprOk returns a tuple with the PurchaseApr field value
// and a boolean to check if the value has been set.
func (o *LiabilityOverride) GetPurchaseAprOk() (*float32, bool) {
if o == nil {
return nil, false
}
return &o.PurchaseApr, true
}
// SetPurchaseApr sets field value
func (o *LiabilityOverride) SetPurchaseApr(v float32) {
o.PurchaseApr = v
}
// GetCashApr returns the CashApr field value
func (o *LiabilityOverride) GetCashApr() float32 {
if o == nil {
var ret float32
return ret
}
return o.CashApr
}
// GetCashAprOk returns a tuple with the CashApr field value
// and a boolean to check if the value has been set.
func (o *LiabilityOverride) GetCashAprOk() (*float32, bool) {
if o == nil {
return nil, false
}
return &o.CashApr, true
}
// SetCashApr sets field value
func (o *LiabilityOverride) SetCashApr(v float32) {
o.CashApr = v
}
// GetBalanceTransferApr returns the BalanceTransferApr field value
func (o *LiabilityOverride) GetBalanceTransferApr() float32 {
if o == nil {
var ret float32
return ret
}
return o.BalanceTransferApr
}
// GetBalanceTransferAprOk returns a tuple with the BalanceTransferApr field value
// and a boolean to check if the value has been set.
func (o *LiabilityOverride) GetBalanceTransferAprOk() (*float32, bool) {
if o == nil {
return nil, false
}
return &o.BalanceTransferApr, true
}
// SetBalanceTransferApr sets field value
func (o *LiabilityOverride) SetBalanceTransferApr(v float32) {
o.BalanceTransferApr = v
}
// GetSpecialApr returns the SpecialApr field value
func (o *LiabilityOverride) GetSpecialApr() float32 {
if o == nil {
var ret float32
return ret
}
return o.SpecialApr
}
// GetSpecialAprOk returns a tuple with the SpecialApr field value
// and a boolean to check if the value has been set.
func (o *LiabilityOverride) GetSpecialAprOk() (*float32, bool) {
if o == nil {
return nil, false
}
return &o.SpecialApr, true
}
// SetSpecialApr sets field value
func (o *LiabilityOverride) SetSpecialApr(v float32) {
o.SpecialApr = v
}
// GetLastPaymentAmount returns the LastPaymentAmount field value
func (o *LiabilityOverride) GetLastPaymentAmount() float32 {
if o == nil {
var ret float32
return ret
}
return o.LastPaymentAmount
}
// GetLastPaymentAmountOk returns a tuple with the LastPaymentAmount field value
// and a boolean to check if the value has been set.
func (o *LiabilityOverride) GetLastPaymentAmountOk() (*float32, bool) {
if o == nil {
return nil, false
}
return &o.LastPaymentAmount, true
}
// SetLastPaymentAmount sets field value
func (o *LiabilityOverride) SetLastPaymentAmount(v float32) {
o.LastPaymentAmount = v
}
// GetMinimumPaymentAmount returns the MinimumPaymentAmount field value
func (o *LiabilityOverride) GetMinimumPaymentAmount() float32 {
if o == nil {
var ret float32
return ret
}
return o.MinimumPaymentAmount
}
// GetMinimumPaymentAmountOk returns a tuple with the MinimumPaymentAmount field value
// and a boolean to check if the value has been set.
func (o *LiabilityOverride) GetMinimumPaymentAmountOk() (*float32, bool) {
if o == nil {
return nil, false
}
return &o.MinimumPaymentAmount, true
}
// SetMinimumPaymentAmount sets field value
func (o *LiabilityOverride) SetMinimumPaymentAmount(v float32) {
o.MinimumPaymentAmount = v
}
// GetIsOverdue returns the IsOverdue field value
func (o *LiabilityOverride) GetIsOverdue() bool {
if o == nil {
var ret bool
return ret
}
return o.IsOverdue
}
// GetIsOverdueOk returns a tuple with the IsOverdue field value
// and a boolean to check if the value has been set.
func (o *LiabilityOverride) GetIsOverdueOk() (*bool, bool) {
if o == nil {
return nil, false
}
return &o.IsOverdue, true
}
// SetIsOverdue sets field value
func (o *LiabilityOverride) SetIsOverdue(v bool) {
o.IsOverdue = v
}
// GetOriginationDate returns the OriginationDate field value
func (o *LiabilityOverride) GetOriginationDate() string {
if o == nil {
var ret string
return ret
}
return o.OriginationDate
}
// GetOriginationDateOk returns a tuple with the OriginationDate field value
// and a boolean to check if the value has been set.
func (o *LiabilityOverride) GetOriginationDateOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.OriginationDate, true
}
// SetOriginationDate sets field value
func (o *LiabilityOverride) SetOriginationDate(v string) {
o.OriginationDate = v
}
// GetPrincipal returns the Principal field value
func (o *LiabilityOverride) GetPrincipal() float32 {
if o == nil {
var ret float32
return ret
}
return o.Principal
}
// GetPrincipalOk returns a tuple with the Principal field value
// and a boolean to check if the value has been set.
func (o *LiabilityOverride) GetPrincipalOk() (*float32, bool) {
if o == nil {
return nil, false
}
return &o.Principal, true
}
// SetPrincipal sets field value
func (o *LiabilityOverride) SetPrincipal(v float32) {
o.Principal = v
}
// GetNominalApr returns the NominalApr field value
func (o *LiabilityOverride) GetNominalApr() float32 {
if o == nil {
var ret float32
return ret
}
return o.NominalApr
}
// GetNominalAprOk returns a tuple with the NominalApr field value
// and a boolean to check if the value has been set.
func (o *LiabilityOverride) GetNominalAprOk() (*float32, bool) {
if o == nil {
return nil, false
}
return &o.NominalApr, true
}
// SetNominalApr sets field value
func (o *LiabilityOverride) SetNominalApr(v float32) {
o.NominalApr = v
}
// GetInterestCapitalizationGracePeriodMonths returns the InterestCapitalizationGracePeriodMonths field value
func (o *LiabilityOverride) GetInterestCapitalizationGracePeriodMonths() float32 {
if o == nil {
var ret float32
return ret
}
return o.InterestCapitalizationGracePeriodMonths
}
// GetInterestCapitalizationGracePeriodMonthsOk returns a tuple with the InterestCapitalizationGracePeriodMonths field value
// and a boolean to check if the value has been set.
func (o *LiabilityOverride) GetInterestCapitalizationGracePeriodMonthsOk() (*float32, bool) {
if o == nil {
return nil, false
}
return &o.InterestCapitalizationGracePeriodMonths, true
}
// SetInterestCapitalizationGracePeriodMonths sets field value
func (o *LiabilityOverride) SetInterestCapitalizationGracePeriodMonths(v float32) {
o.InterestCapitalizationGracePeriodMonths = v
}
// GetRepaymentModel returns the RepaymentModel field value
func (o *LiabilityOverride) GetRepaymentModel() StudentLoanRepaymentModel {
if o == nil {
var ret StudentLoanRepaymentModel
return ret
}
return o.RepaymentModel
}
// GetRepaymentModelOk returns a tuple with the RepaymentModel field value
// and a boolean to check if the value has been set.
func (o *LiabilityOverride) GetRepaymentModelOk() (*StudentLoanRepaymentModel, bool) {
if o == nil {
return nil, false
}
return &o.RepaymentModel, true
}
// SetRepaymentModel sets field value
func (o *LiabilityOverride) SetRepaymentModel(v StudentLoanRepaymentModel) {
o.RepaymentModel = v
}
// GetExpectedPayoffDate returns the ExpectedPayoffDate field value
func (o *LiabilityOverride) GetExpectedPayoffDate() string {
if o == nil {
var ret string
return ret
}
return o.ExpectedPayoffDate
}
// GetExpectedPayoffDateOk returns a tuple with the ExpectedPayoffDate field value
// and a boolean to check if the value has been set.
func (o *LiabilityOverride) GetExpectedPayoffDateOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.ExpectedPayoffDate, true
}
// SetExpectedPayoffDate sets field value
func (o *LiabilityOverride) SetExpectedPayoffDate(v string) {
o.ExpectedPayoffDate = v
}
// GetGuarantor returns the Guarantor field value
func (o *LiabilityOverride) GetGuarantor() string {
if o == nil {
var ret string
return ret
}
return o.Guarantor
}
// GetGuarantorOk returns a tuple with the Guarantor field value
// and a boolean to check if the value has been set.
func (o *LiabilityOverride) GetGuarantorOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.Guarantor, true
}
// SetGuarantor sets field value
func (o *LiabilityOverride) SetGuarantor(v string) {
o.Guarantor = v
}
// GetIsFederal returns the IsFederal field value
func (o *LiabilityOverride) GetIsFederal() bool {
if o == nil {
var ret bool
return ret
}
return o.IsFederal
}
// GetIsFederalOk returns a tuple with the IsFederal field value
// and a boolean to check if the value has been set.
func (o *LiabilityOverride) GetIsFederalOk() (*bool, bool) {
if o == nil {
return nil, false
}
return &o.IsFederal, true
}
// SetIsFederal sets field value
func (o *LiabilityOverride) SetIsFederal(v bool) {
o.IsFederal = v
}
// GetLoanName returns the LoanName field value
func (o *LiabilityOverride) GetLoanName() string {
if o == nil {
var ret string
return ret
}
return o.LoanName
}
// GetLoanNameOk returns a tuple with the LoanName field value
// and a boolean to check if the value has been set.
func (o *LiabilityOverride) GetLoanNameOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.LoanName, true
}
// SetLoanName sets field value
func (o *LiabilityOverride) SetLoanName(v string) {
o.LoanName = v
}
// GetLoanStatus returns the LoanStatus field value
func (o *LiabilityOverride) GetLoanStatus() StudentLoanStatus {
if o == nil {
var ret StudentLoanStatus
return ret
}
return o.LoanStatus
}
// GetLoanStatusOk returns a tuple with the LoanStatus field value
// and a boolean to check if the value has been set.
func (o *LiabilityOverride) GetLoanStatusOk() (*StudentLoanStatus, bool) {
if o == nil {
return nil, false
}
return &o.LoanStatus, true
}
// SetLoanStatus sets field value
func (o *LiabilityOverride) SetLoanStatus(v StudentLoanStatus) {
o.LoanStatus = v
}
// GetPaymentReferenceNumber returns the PaymentReferenceNumber field value
func (o *LiabilityOverride) GetPaymentReferenceNumber() string {
if o == nil {
var ret string
return ret
}
return o.PaymentReferenceNumber
}
// GetPaymentReferenceNumberOk returns a tuple with the PaymentReferenceNumber field value
// and a boolean to check if the value has been set.
func (o *LiabilityOverride) GetPaymentReferenceNumberOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.PaymentReferenceNumber, true
}
// SetPaymentReferenceNumber sets field value
func (o *LiabilityOverride) SetPaymentReferenceNumber(v string) {
o.PaymentReferenceNumber = v
}
// GetPslfStatus returns the PslfStatus field value
func (o *LiabilityOverride) GetPslfStatus() PSLFStatus {
if o == nil {
var ret PSLFStatus
return ret
}
return o.PslfStatus
}
// GetPslfStatusOk returns a tuple with the PslfStatus field value
// and a boolean to check if the value has been set.
func (o *LiabilityOverride) GetPslfStatusOk() (*PSLFStatus, bool) {
if o == nil {
return nil, false
}
return &o.PslfStatus, true
}
// SetPslfStatus sets field value
func (o *LiabilityOverride) SetPslfStatus(v PSLFStatus) {
o.PslfStatus = v
}
// GetRepaymentPlanDescription returns the RepaymentPlanDescription field value
func (o *LiabilityOverride) GetRepaymentPlanDescription() string {
if o == nil {
var ret string
return ret
}
return o.RepaymentPlanDescription
}
// GetRepaymentPlanDescriptionOk returns a tuple with the RepaymentPlanDescription field value
// and a boolean to check if the value has been set.
func (o *LiabilityOverride) GetRepaymentPlanDescriptionOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.RepaymentPlanDescription, true
}
// SetRepaymentPlanDescription sets field value
func (o *LiabilityOverride) SetRepaymentPlanDescription(v string) {
o.RepaymentPlanDescription = v
}
// GetRepaymentPlanType returns the RepaymentPlanType field value
func (o *LiabilityOverride) GetRepaymentPlanType() string {
if o == nil {
var ret string
return ret
}
return o.RepaymentPlanType
}
// GetRepaymentPlanTypeOk returns a tuple with the RepaymentPlanType field value
// and a boolean to check if the value has been set.
func (o *LiabilityOverride) GetRepaymentPlanTypeOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.RepaymentPlanType, true
}
// SetRepaymentPlanType sets field value
func (o *LiabilityOverride) SetRepaymentPlanType(v string) {
o.RepaymentPlanType = v
}
// GetSequenceNumber returns the SequenceNumber field value
func (o *LiabilityOverride) GetSequenceNumber() string {
if o == nil {
var ret string
return ret
}
return o.SequenceNumber
}
// GetSequenceNumberOk returns a tuple with the SequenceNumber field value
// and a boolean to check if the value has been set.
func (o *LiabilityOverride) GetSequenceNumberOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.SequenceNumber, true
}
// SetSequenceNumber sets field value
func (o *LiabilityOverride) SetSequenceNumber(v string) {
o.SequenceNumber = v
}
// GetServicerAddress returns the ServicerAddress field value
func (o *LiabilityOverride) GetServicerAddress() Address {
if o == nil {
var ret Address
return ret
}
return o.ServicerAddress
}
// GetServicerAddressOk returns a tuple with the ServicerAddress field value
// and a boolean to check if the value has been set.
func (o *LiabilityOverride) GetServicerAddressOk() (*Address, bool) {
if o == nil {
return nil, false
}
return &o.ServicerAddress, true
}
// SetServicerAddress sets field value
func (o *LiabilityOverride) SetServicerAddress(v Address) {
o.ServicerAddress = v
}
func (o LiabilityOverride) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if true {
toSerialize["type"] = o.Type
}
if true {
toSerialize["purchase_apr"] = o.PurchaseApr
}
if true {
toSerialize["cash_apr"] = o.CashApr
}
if true {
toSerialize["balance_transfer_apr"] = o.BalanceTransferApr
}
if true {
toSerialize["special_apr"] = o.SpecialApr
}
if true {
toSerialize["last_payment_amount"] = o.LastPaymentAmount
}
if true {
toSerialize["minimum_payment_amount"] = o.MinimumPaymentAmount
}
if true {
toSerialize["is_overdue"] = o.IsOverdue
}
if true {
toSerialize["origination_date"] = o.OriginationDate
}
if true {
toSerialize["principal"] = o.Principal
}
if true {
toSerialize["nominal_apr"] = o.NominalApr
}
if true {
toSerialize["interest_capitalization_grace_period_months"] = o.InterestCapitalizationGracePeriodMonths
}
if true {
toSerialize["repayment_model"] = o.RepaymentModel
}
if true {
toSerialize["expected_payoff_date"] = o.ExpectedPayoffDate
}
if true {
toSerialize["guarantor"] = o.Guarantor
}
if true {
toSerialize["is_federal"] = o.IsFederal
}
if true {
toSerialize["loan_name"] = o.LoanName
}
if true {
toSerialize["loan_status"] = o.LoanStatus
}
if true {
toSerialize["payment_reference_number"] = o.PaymentReferenceNumber
}
if true {
toSerialize["pslf_status"] = o.PslfStatus
}
if true {
toSerialize["repayment_plan_description"] = o.RepaymentPlanDescription
}
if true {
toSerialize["repayment_plan_type"] = o.RepaymentPlanType
}
if true {
toSerialize["sequence_number"] = o.SequenceNumber
}
if true {
toSerialize["servicer_address"] = o.ServicerAddress
}
for key, value := range o.AdditionalProperties {
toSerialize[key] = value
}
return json.Marshal(toSerialize)
}
func (o *LiabilityOverride) UnmarshalJSON(bytes []byte) (err error) {
varLiabilityOverride := _LiabilityOverride{}
if err = json.Unmarshal(bytes, &varLiabilityOverride); err == nil {
*o = LiabilityOverride(varLiabilityOverride)
}
additionalProperties := make(map[string]interface{})
if err = json.Unmarshal(bytes, &additionalProperties); err == nil {
delete(additionalProperties, "type")
delete(additionalProperties, "purchase_apr")
delete(additionalProperties, "cash_apr")
delete(additionalProperties, "balance_transfer_apr")
delete(additionalProperties, "special_apr")
delete(additionalProperties, "last_payment_amount")
delete(additionalProperties, "minimum_payment_amount")
delete(additionalProperties, "is_overdue")
delete(additionalProperties, "origination_date")
delete(additionalProperties, "principal")
delete(additionalProperties, "nominal_apr")
delete(additionalProperties, "interest_capitalization_grace_period_months")
delete(additionalProperties, "repayment_model")
delete(additionalProperties, "expected_payoff_date")
delete(additionalProperties, "guarantor")
delete(additionalProperties, "is_federal")
delete(additionalProperties, "loan_name")
delete(additionalProperties, "loan_status")
delete(additionalProperties, "payment_reference_number")
delete(additionalProperties, "pslf_status")
delete(additionalProperties, "repayment_plan_description")
delete(additionalProperties, "repayment_plan_type")
delete(additionalProperties, "sequence_number")
delete(additionalProperties, "servicer_address")
o.AdditionalProperties = additionalProperties
}
return err
}
type NullableLiabilityOverride struct {
value *LiabilityOverride
isSet bool
}
func (v NullableLiabilityOverride) Get() *LiabilityOverride {
return v.value
}
func (v *NullableLiabilityOverride) Set(val *LiabilityOverride) {
v.value = val
v.isSet = true
}
func (v NullableLiabilityOverride) IsSet() bool {
return v.isSet
}
func (v *NullableLiabilityOverride) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableLiabilityOverride(val *LiabilityOverride) *NullableLiabilityOverride {
return &NullableLiabilityOverride{value: val, isSet: true}
}
func (v NullableLiabilityOverride) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableLiabilityOverride) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | plaid/model_liability_override.go | 0.790004 | 0.624709 | model_liability_override.go | starcoder |
package input
import (
"time"
"github.com/Jeffail/benthos/v3/lib/types"
)
//------------------------------------------------------------------------------
// WithPipeline is a type that wraps both an input type and a pipeline type
// by routing the input through the pipeline, and implements the input.Type
// interface in order to act like an ordinary input.
type WithPipeline struct {
in Type
pipe types.Pipeline
}
// WrapWithPipeline routes an input directly into a processing pipeline and
// returns a type that manages both and acts like an ordinary input.
func WrapWithPipeline(procs *int, in Type, pipeConstructor types.PipelineConstructorFunc) (*WithPipeline, error) {
pipe, err := pipeConstructor(procs)
if err != nil {
return nil, err
}
if err := pipe.Consume(in.TransactionChan()); err != nil {
return nil, err
}
return &WithPipeline{
in: in,
pipe: pipe,
}, nil
}
// WrapWithPipelines wraps an input with a variadic number of pipelines.
func WrapWithPipelines(in Type, pipeConstructors ...types.PipelineConstructorFunc) (Type, error) {
procs := 0
var err error
for _, ctor := range pipeConstructors {
if in, err = WrapWithPipeline(&procs, in, ctor); err != nil {
return nil, err
}
}
return in, nil
}
//------------------------------------------------------------------------------
// TransactionChan returns the channel used for consuming transactions from this
// input.
func (i *WithPipeline) TransactionChan() <-chan types.Transaction {
return i.pipe.TransactionChan()
}
// Connected returns a boolean indicating whether this input is currently
// connected to its target.
func (i *WithPipeline) Connected() bool {
return i.in.Connected()
}
//------------------------------------------------------------------------------
// CloseAsync triggers a closure of this object but does not block.
func (i *WithPipeline) CloseAsync() {
i.in.CloseAsync()
i.pipe.CloseAsync()
}
// WaitForClose is a blocking call to wait until the object has finished closing
// down and cleaning up resources.
func (i *WithPipeline) WaitForClose(timeout time.Duration) error {
return i.pipe.WaitForClose(timeout)
}
//------------------------------------------------------------------------------ | lib/input/wrap_with_pipeline.go | 0.792986 | 0.445771 | wrap_with_pipeline.go | starcoder |
package streaming
import (
"github.com/alexandre-normand/glukit/app/apimodel"
"github.com/alexandre-normand/glukit/app/container"
"github.com/alexandre-normand/glukit/app/glukitio"
"time"
)
type GlucoseReadStreamer struct {
head *container.ImmutableList
startTime *time.Time
wr glukitio.GlucoseReadBatchWriter
d time.Duration
}
const (
BUFFER_SIZE = 86400
)
// WriteGlucoseRead writes a single GlucoseRead into the buffer.
func (b *GlucoseReadStreamer) WriteGlucoseRead(c apimodel.GlucoseRead) (g *GlucoseReadStreamer, err error) {
return b.WriteGlucoseReads([]apimodel.GlucoseRead{c})
}
// WriteGlucoseReads writes the contents of p into the buffer.
// It returns the number of bytes written.
// If nn < len(p), it also returns an error explaining
// why the write is short. p must be sorted by time (oldest to most recent).
func (b *GlucoseReadStreamer) WriteGlucoseReads(p []apimodel.GlucoseRead) (g *GlucoseReadStreamer, err error) {
g = newGlucoseStreamerDuration(b.head, b.startTime, b.wr, b.d)
if err != nil {
return g, err
}
for i := range p {
c := p[i]
t := c.GetTime()
truncatedTime := t.Truncate(g.d)
if g.head == nil {
g = newGlucoseStreamerDuration(container.NewImmutableList(nil, c), &truncatedTime, g.wr, g.d)
} else if t.Sub(*g.startTime) >= g.d {
g, err = g.Flush()
if err != nil {
return g, err
}
g = newGlucoseStreamerDuration(container.NewImmutableList(nil, c), &truncatedTime, g.wr, g.d)
} else {
g = newGlucoseStreamerDuration(container.NewImmutableList(g.head, c), g.startTime, g.wr, g.d)
}
}
return g, err
}
func newGlucoseStreamerDuration(head *container.ImmutableList, startTime *time.Time, wr glukitio.GlucoseReadBatchWriter, bufferDuration time.Duration) *GlucoseReadStreamer {
w := new(GlucoseReadStreamer)
w.head = head
w.startTime = startTime
w.wr = wr
w.d = bufferDuration
return w
}
// NewGlucoseStreamerDuration returns a new GlucoseReadStreamer whose buffer has the specified size.
func NewGlucoseStreamerDuration(wr glukitio.GlucoseReadBatchWriter, bufferDuration time.Duration) *GlucoseReadStreamer {
return newGlucoseStreamerDuration(nil, nil, wr, bufferDuration)
}
// Flush writes any buffered data to the underlying glukitio.Writer as a batch.
func (b *GlucoseReadStreamer) Flush() (*GlucoseReadStreamer, error) {
r, size := b.head.ReverseList()
batch := ListToArrayOfGlucoseReads(r, size)
if len(batch) > 0 {
innerWriter, err := b.wr.WriteGlucoseReadBatch(batch)
if err != nil {
return nil, err
} else {
return newGlucoseStreamerDuration(nil, nil, innerWriter, b.d), nil
}
}
return newGlucoseStreamerDuration(nil, nil, b.wr, b.d), nil
}
func ListToArrayOfGlucoseReads(head *container.ImmutableList, size int) []apimodel.GlucoseRead {
r := make([]apimodel.GlucoseRead, size)
cursor := head
for i := 0; i < size; i++ {
r[i] = cursor.Value().(apimodel.GlucoseRead)
cursor = cursor.Next()
}
return r
}
// Close flushes the buffer and the inner writer to effectively ensure nothing is left
// unwritten
func (b *GlucoseReadStreamer) Close() (*GlucoseReadStreamer, error) {
g, err := b.Flush()
if err != nil {
return g, err
}
innerWriter, err := g.wr.Flush()
if err != nil {
return newGlucoseStreamerDuration(g.head, g.startTime, innerWriter, b.d), err
}
return newGlucoseStreamerDuration(nil, nil, innerWriter, g.d), nil
} | app/streaming/glucosereadstreamer.go | 0.755907 | 0.404449 | glucosereadstreamer.go | starcoder |
package floatring
// Buffer is the ring buffer of float64 values.
type Buffer struct {
ring []float64
full bool
wc int
rc int
}
// New creates a new buffer with the given capacity.
func New(cp int) *Buffer {
return &Buffer{
ring: make([]float64, cp),
}
}
// IsFull returns true if the buffer if filled with values.
func (buff *Buffer) IsFull() bool { return buff.full }
// IsEmpty returns true if the buffer contains no values.
func (buff *Buffer) IsEmpty() bool { return !buff.full && buff.wc == buff.rc }
// Cap returns the buffer capacity.
func (buff *Buffer) Cap() int { return len(buff.ring) }
// Len returns the number of values in the buffer.
func (buff *Buffer) Len() int {
switch {
case buff.IsEmpty():
return 0
case buff.IsFull():
return len(buff.ring)
case buff.wc >= buff.rc:
return buff.wc - buff.rc
default:
return len(buff.ring) + buff.wc - buff.rc
}
}
// WriteValue writes a single value to the buffer.
// If the buffer is overflowed, then the older value will be overwritten.
func (buff *Buffer) WriteValue(v float64) {
if len(buff.ring) == 0 {
return
}
var n = len(buff.ring)
if buff.IsFull() {
buff.rc = (buff.rc + 1) % n
}
buff.ring[buff.wc] = v
buff.wc = (buff.wc + 1) % n
buff.full = buff.wc == buff.rc
}
// Write appends provided floats to the buffer.
// If the buffer is overflowed, then the older value will be overwritten.
func (buff *Buffer) Write(vv []float64) {
for _, v := range vv {
buff.WriteValue(v)
}
}
// ReadValue returns the oldest value in the buffer or false, if the buffer is empty.
func (buff *Buffer) ReadValue() (_ float64, ok bool) {
if buff.IsEmpty() {
return 0, false
}
var n = len(buff.ring)
var x = buff.ring[buff.rc]
buff.full = false
buff.rc = (buff.rc + 1) % n
return x, true
}
// Read reads values from the buffer to the given slice and return the number of read values.
func (buff *Buffer) Read(vv []float64) int {
for i := range vv {
var x, ok = buff.ReadValue()
if !ok {
return i
}
vv[i] = x
}
return len(vv)
}
// DumpTo returns the slice of values as [oldest...newest].
// It tries to append values to the provided slice to avoid allocations.
// The provided dst slice can be nil, then a new allocated slice will be returned.
func (buff *Buffer) DumpTo(dst []float64) []float64 {
dst = dst[:0]
switch {
case buff.IsEmpty():
return dst
case buff.wc > buff.rc:
dst = append(dst, buff.ring[buff.rc:buff.wc]...)
default:
dst = append(dst, buff.ring[buff.rc:]...)
dst = append(dst, buff.ring[:buff.wc]...)
}
return dst
}
// ForEach calls provided hook for each value in the buffer in the LIFO order.
// This method does no allocations. The iterations continiues until
// hook returns true.
func (buff *Buffer) ForEach(op func(x float64) bool) {
if buff.IsEmpty() {
return
}
if buff.wc > buff.rc {
buff.forEach(op)
return
}
buff.forEachChunked(op)
}
func (buff *Buffer) forEach(op func(x float64) bool) {
for _, x := range buff.ring[buff.rc:buff.wc] {
if !op(x) {
return
}
}
}
func (buff *Buffer) forEachChunked(op func(x float64) bool) {
for _, x := range buff.ring[buff.rc:] {
if !op(x) {
return
}
}
for _, x := range buff.ring[:buff.wc] {
if !op(x) {
return
}
}
}
// Reset the buffer to a zero state. Doesn't change the capacity.
// Can be used in pair with the sync.Pool.
func (buff *Buffer) Reset() {
buff.wc = 0
buff.rc = 0
buff.full = false
} | floatring.go | 0.824285 | 0.461927 | floatring.go | starcoder |
package packets
import (
"encoding/binary"
"unicode/utf8"
"unsafe"
)
// bytesToString provides a zero-alloc, no-copy byte to string conversion.
// via https://github.com/golang/go/issues/25484#issuecomment-391415660
func bytesToString(bs []byte) string {
return *(*string)(unsafe.Pointer(&bs))
}
// decodeUint16 extracts the value of two bytes from a byte array.
func decodeUint16(buf []byte, offset int) (uint16, int, error) {
if len(buf) < offset+2 {
return 0, 0, ErrOffsetUintOutOfRange
}
return binary.BigEndian.Uint16(buf[offset : offset+2]), offset + 2, nil
}
// decodeString extracts a string from a byte array, beginning at an offset.
func decodeString(buf []byte, offset int) (string, int, error) {
b, n, err := decodeBytes(buf, offset)
if err != nil {
return "", 0, err
}
return bytesToString(b), n, nil
}
// decodeBytes extracts a byte array from a byte array, beginning at an offset. Used primarily for message payloads.
func decodeBytes(buf []byte, offset int) ([]byte, int, error) {
length, next, err := decodeUint16(buf, offset)
if err != nil {
return make([]byte, 0, 0), 0, err
}
if next+int(length) > len(buf) {
return make([]byte, 0, 0), 0, ErrOffsetStrOutOfRange
}
if !validUTF8(buf[next : next+int(length)]) {
return make([]byte, 0, 0), 0, ErrOffsetStrInvalidUTF8
}
return buf[next : next+int(length)], next + int(length), nil
}
// decodeByte extracts the value of a byte from a byte array.
func decodeByte(buf []byte, offset int) (byte, int, error) {
if len(buf) <= offset {
return 0, 0, ErrOffsetByteOutOfRange
}
return buf[offset], offset + 1, nil
}
// decodeByteBool extracts the value of a byte from a byte array and returns a bool.
func decodeByteBool(buf []byte, offset int) (bool, int, error) {
if len(buf) <= offset {
return false, 0, ErrOffsetBoolOutOfRange
}
return 1&buf[offset] > 0, offset + 1, nil
}
// encodeBool returns a byte instead of a bool.
func encodeBool(b bool) byte {
if b {
return 1
}
return 0
}
// encodeBytes encodes a byte array to a byte array. Used primarily for message payloads.
func encodeBytes(val []byte) []byte {
// In many circumstances the number of bytes being encoded is small.
// Setting the cap to a low amount allows us to account for those without
// triggering allocation growth on append unless we need to.
buf := make([]byte, 2, 32)
binary.BigEndian.PutUint16(buf, uint16(len(val)))
return append(buf, val...)
}
// encodeUint16 encodes a uint16 value to a byte array.
func encodeUint16(val uint16) []byte {
buf := make([]byte, 2)
binary.BigEndian.PutUint16(buf, val)
return buf
}
// encodeString encodes a string to a byte array.
func encodeString(val string) []byte {
// Like encodeBytes, we set the cap to a small number to avoid
// triggering allocation growth on append unless we absolutely need to.
buf := make([]byte, 2, 32)
binary.BigEndian.PutUint16(buf, uint16(len(val)))
return append(buf, []byte(val)...)
}
// validUTF8 checks if the byte array contains valid UTF-8 characters, specifically
// conforming to the MQTT specification requirements.
func validUTF8(b []byte) bool {
// [MQTT-1.4.0-1] The character data in a UTF-8 encoded string MUST be well-formed UTF-8...
if !utf8.Valid(b) {
return false
}
// [MQTT-1.4.0-2] A UTF-8 encoded string MUST NOT include an encoding of the null character U+0000...
// ...
return true
} | server/internal/packets/codec.go | 0.808143 | 0.403391 | codec.go | starcoder |
package pt
type Triangle struct {
Material *Material
V1, V2, V3 Vector
N1, N2, N3 Vector
T1, T2, T3 Vector
}
func NewTriangle(v1, v2, v3, t1, t2, t3 Vector, material Material) *Triangle {
t := Triangle{}
t.V1 = v1
t.V2 = v2
t.V3 = v3
t.T1 = t1
t.T2 = t2
t.T3 = t3
t.Material = &material
t.FixNormals()
return &t
}
func (t *Triangle) Vertices() (Vector, Vector, Vector) {
return t.V1, t.V2, t.V3
}
func (t *Triangle) Compile() {
}
func (t *Triangle) BoundingBox() Box {
min := t.V1.Min(t.V2).Min(t.V3)
max := t.V1.Max(t.V2).Max(t.V3)
return Box{min, max}
}
func (t *Triangle) Intersect(r Ray) Hit {
e1x := t.V2.X - t.V1.X
e1y := t.V2.Y - t.V1.Y
e1z := t.V2.Z - t.V1.Z
e2x := t.V3.X - t.V1.X
e2y := t.V3.Y - t.V1.Y
e2z := t.V3.Z - t.V1.Z
px := r.Direction.Y*e2z - r.Direction.Z*e2y
py := r.Direction.Z*e2x - r.Direction.X*e2z
pz := r.Direction.X*e2y - r.Direction.Y*e2x
det := e1x*px + e1y*py + e1z*pz
if det > -EPS && det < EPS {
return NoHit
}
inv := 1 / det
tx := r.Origin.X - t.V1.X
ty := r.Origin.Y - t.V1.Y
tz := r.Origin.Z - t.V1.Z
u := (tx*px + ty*py + tz*pz) * inv
if u < 0 || u > 1 {
return NoHit
}
qx := ty*e1z - tz*e1y
qy := tz*e1x - tx*e1z
qz := tx*e1y - ty*e1x
v := (r.Direction.X*qx + r.Direction.Y*qy + r.Direction.Z*qz) * inv
if v < 0 || u+v > 1 {
return NoHit
}
d := (e2x*qx + e2y*qy + e2z*qz) * inv
if d < EPS {
return NoHit
}
return Hit{t, d, nil}
}
func (t *Triangle) UV(p Vector) Vector {
u, v, w := t.Barycentric(p)
n := Vector{}
n = n.Add(t.T1.MulScalar(u))
n = n.Add(t.T2.MulScalar(v))
n = n.Add(t.T3.MulScalar(w))
return Vector{n.X, n.Y, 0}
}
func (t *Triangle) MaterialAt(p Vector) Material {
return *t.Material
}
func (t *Triangle) NormalAt(p Vector) Vector {
u, v, w := t.Barycentric(p)
n := Vector{}
n = n.Add(t.N1.MulScalar(u))
n = n.Add(t.N2.MulScalar(v))
n = n.Add(t.N3.MulScalar(w))
n = n.Normalize()
if t.Material.NormalTexture != nil {
b := Vector{}
b = b.Add(t.T1.MulScalar(u))
b = b.Add(t.T2.MulScalar(v))
b = b.Add(t.T3.MulScalar(w))
ns := t.Material.NormalTexture.NormalSample(b.X, b.Y)
dv1 := t.V2.Sub(t.V1)
dv2 := t.V3.Sub(t.V1)
dt1 := t.T2.Sub(t.T1)
dt2 := t.T3.Sub(t.T1)
T := dv1.MulScalar(dt2.Y).Sub(dv2.MulScalar(dt1.Y)).Normalize()
B := dv2.MulScalar(dt1.X).Sub(dv1.MulScalar(dt2.X)).Normalize()
N := T.Cross(B)
matrix := Matrix{
T.X, B.X, N.X, 0,
T.Y, B.Y, N.Y, 0,
T.Z, B.Z, N.Z, 0,
0, 0, 0, 1}
n = matrix.MulDirection(ns)
}
if t.Material.BumpTexture != nil {
b := Vector{}
b = b.Add(t.T1.MulScalar(u))
b = b.Add(t.T2.MulScalar(v))
b = b.Add(t.T3.MulScalar(w))
bump := t.Material.BumpTexture.BumpSample(b.X, b.Y)
dv1 := t.V2.Sub(t.V1)
dv2 := t.V3.Sub(t.V1)
dt1 := t.T2.Sub(t.T1)
dt2 := t.T3.Sub(t.T1)
tangent := dv1.MulScalar(dt2.Y).Sub(dv2.MulScalar(dt1.Y)).Normalize()
bitangent := dv2.MulScalar(dt1.X).Sub(dv1.MulScalar(dt2.X)).Normalize()
n = n.Add(tangent.MulScalar(bump.X * t.Material.BumpMultiplier))
n = n.Add(bitangent.MulScalar(bump.Y * t.Material.BumpMultiplier))
}
n = n.Normalize()
return n
}
func (t *Triangle) Area() float64 {
e1 := t.V2.Sub(t.V1)
e2 := t.V3.Sub(t.V1)
n := e1.Cross(e2)
return n.Length() / 2
}
func (t *Triangle) Normal() Vector {
e1 := t.V2.Sub(t.V1)
e2 := t.V3.Sub(t.V1)
return e1.Cross(e2).Normalize()
}
func (t *Triangle) Barycentric(p Vector) (u, v, w float64) {
v0 := t.V2.Sub(t.V1)
v1 := t.V3.Sub(t.V1)
v2 := p.Sub(t.V1)
d00 := v0.Dot(v0)
d01 := v0.Dot(v1)
d11 := v1.Dot(v1)
d20 := v2.Dot(v0)
d21 := v2.Dot(v1)
d := d00*d11 - d01*d01
v = (d11*d20 - d01*d21) / d
w = (d00*d21 - d01*d20) / d
u = 1 - v - w
return
}
func (t *Triangle) FixNormals() {
n := t.Normal()
zero := Vector{}
if t.N1 == zero {
t.N1 = n
}
if t.N2 == zero {
t.N2 = n
}
if t.N3 == zero {
t.N3 = n
}
} | pt/triangle.go | 0.714329 | 0.647157 | triangle.go | starcoder |
package errors
import "net/http"
// nolint: lll
func getBuiltin(code string) (cbe EMFErrorType, ok bool) {
cbe, ok = map[string]EMFErrorType{
"emf.400.QueryParameterInvalid": {
ErrorCode: "emf.400.QueryParameterInvalid",
StatusCode: http.StatusBadRequest,
Description: "An invalid query parameter was provided in the request.",
Message: map[string]string{
"en": "The incoming request payload has an invalid query parameter '{{.Data.Param}}'. Error: '{{.Data.Error}}'",
},
Data: map[string]interface{}{
"Param": "The invalid Query parameter.",
"Error": "The error raised while validating the query parameters.",
},
},
"emf.400.TokenMissing": {
ErrorCode: "emf.400.TokenMissing",
StatusCode: http.StatusBadRequest,
Description: "The JWT Token Verification request failed, so the API Request could not be completed.",
Message: map[string]string{
"en": `JWT Token is missing or malformed and could not be validated. Error: '{{.Data.Error}}'`,
},
Data: map[string]interface{}{
"Error": "Error found when extracting the JWT token from the request.",
},
},
"emf.400.InvalidParametersFailure": {
ErrorCode: "emf.400.InvalidParametersFailure",
StatusCode: http.StatusBadRequest,
Description: "The parameters are invalid.",
Data: map[string]interface{}{
"Error": "The error raised while parsing the request parameters.",
},
Message: map[string]string{
"en": "The request parameters are invalid. Error: '{{.Data.Error}}'",
},
},
"emf.401.Unauthorized": {
ErrorCode: "emf.401.Unauthorized",
StatusCode: http.StatusUnauthorized,
Description: "The API request cannot be performed as the User associated with the JWT token in the request does not have access to this endpoint.",
Message: map[string]string{
"en": "User with identifier '{{.Data.Target}}' and role '{{.Data.Role}}' does not have access.",
},
Data: map[string]interface{}{
"Role": "Type of authorization access being used.",
"Target": "Identity being acted on.",
},
},
"emf.401.TokenExpired": {
ErrorCode: "emf.401.TokenExpired",
StatusCode: http.StatusUnauthorized,
Description: "The JWT Token provided with the request is Expired.",
Message: map[string]string{
"en": "The provided access token expired at '{{.Data.ExpirationDate}}'.",
},
Data: map[string]interface{}{
"ExpirationDate": "Expiration Date of the given token.",
},
},
"emf.401.TokenInactive": {
ErrorCode: "emf.401.TokenInactive",
StatusCode: http.StatusUnauthorized,
Description: "The JWT Token provided with the request is no longer Active (the User logged out or switched roles).",
Message: map[string]string{
"en": "The provided token with User '{{.Data.Target}}' and role '{{.Data.Role}}' is no longer active.",
},
Data: map[string]interface{}{
"Role": "Type of authorization access being used.",
"Target": "Identity being acted on.",
},
},
"emf.401.TokenVerificationFailure": {
ErrorCode: "emf.401.TokenVerificationFailure",
StatusCode: http.StatusUnauthorized,
Description: "The JWT Token Verification request failed, so the API Request could not be completed.",
Message: map[string]string{
"en": `The provided token with User '{{.Data.Target}}' and role '{{.Data.Role}}' could not be verified,
please try again. Error: '{{.Data.error}}'`,
},
Data: map[string]interface{}{
"Role": "Type of authorization access being used.",
"Target": "Identity being acted on.",
},
},
"emf.401.TokenInvalidProperty": {
ErrorCode: "emf.401.TokenInvalidProperty",
StatusCode: http.StatusUnauthorized,
Description: "The JWT Token in the request has an invalid property, so the API Request could not be completed. Please try again.",
Message: map[string]string{
"en": `The provided token has an invalid '{{.Data.Name}}' value
'{{.Data.Value}}', please try again. Error: '{{.Data.Error}}'`,
},
Data: map[string]interface{}{
"Error": "Error found when retrieving the property from the JWT token.",
"Name": "Name of the JWT token property.",
"Value": "Value associated with JWT token property.",
},
},
"emf.401.UnauthorizedCaller": {
ErrorCode: "emf.401.UnauthorizedCaller",
StatusCode: http.StatusUnauthorized,
Description: "The API request sent by the Caller/Client cannot be performed as the User associated with the JWT token in the API request does not have access to this endpoint.",
Message: map[string]string{
"en": "User with identifier '{{.Data.Target}}' and role '{{.Data.Role}}' does not have access.",
},
Data: map[string]interface{}{
"Role": "Type of authorization access being used.",
"Target": "Identity being acted on.",
},
},
"emf.500.RequesterEncodingFailure": {
ErrorCode: "emf.500.RequesterEncodingFailure",
StatusCode: http.StatusInternalServerError,
Description: "The internal HTTP Request could not be performed as the request or response could not be JSON encoded",
Message: map[string]string{
"en": "Failed to encode request payload. Error: '{{.Data.Error}}'",
},
Data: map[string]interface{}{
"Error": "JSON Encoding Error",
},
},
"emf.500.RequesterDecodingFailure": {
ErrorCode: "emf.500.RequesterDecodingFailure",
StatusCode: http.StatusInternalServerError,
Description: "The internal HTTP Request could not be performed as the response could not be JSON decoded",
Message: map[string]string{
"en": "Failed to decode response payload. Error: '{{.Data.Error}}'",
},
Data: map[string]interface{}{
"Error": "JSON Decoding Error",
},
},
"emf.500.RequesterErrorResponseFailure": {
ErrorCode: "emf.500.RequesterErrorResponseFailure",
StatusCode: http.StatusInternalServerError,
Description: "The internal HTTP Request could not be performed as the CBError response could not be JSON decoded",
Message: map[string]string{
"en": "Failed to decode failing response payload as CBError. Response: '{{.Data.Response}}'",
},
Data: map[string]interface{}{
"Response": "Response sent by the downstream component",
"Error": "Error encountered while trying to decode 'Response'",
},
},
"emf.500.RequesterCreateRequestFailure": {
ErrorCode: "emf.500.RequesterCreateRequestFailure",
StatusCode: http.StatusInternalServerError,
Description: "The internal HTTP Request could not be performed as the request object could not be initialized",
Message: map[string]string{
"en": "Failed to initialize request. Error: '{{.Data.Error}}'",
},
Data: map[string]interface{}{
"Error": "Error created while initializing the http request",
},
},
"emf.500.RequesterSendRequestFailure": {
ErrorCode: "emf.500.RequesterSendRequestFailure",
StatusCode: http.StatusInternalServerError,
Description: "The internal HTTP Request could not be performed as the HTTP client failed to execute the request",
Message: map[string]string{
"en": "Failed to execute request. Error: '{{.Data.Error}}'",
},
Data: map[string]interface{}{
"Error": "HTTP client error",
},
},
}[code]
return
} | emf/context/errors/builtin_errors.go | 0.545286 | 0.40869 | builtin_errors.go | starcoder |
package science
/*
A fairly substantial redraft of NodeBuilders -- in this one,
the recursion happens in such a way that you *don't* get a handle to a Node
immediately when completing every builder.
The idea is that this might make it easier to build large recursive values
(both for caller ergonomics, and for internal efficiency) since we don't
have to be concerned with returning an immutable Node at high granularity at
almost every step throughout the process of building the larger tree.
But does it? Not sure yet.
This exploration is triggered by two things:
Firstly: in general, the NodeBuilder system at present is small-pieces-first...
and that's very bad for memory characteristics. In trying to spec behaviors
that fix it, we've come up with possible 'memory visibility' models which
let us have the memory amortization we want while keeping the current
small-pieces-first interfaces... but it's... getting arcane.
It's unclear if the effort is worth it -- what if we just admit it and
fully abandon the small-pieces-first model?
Secondly: that increasingly-arcane pretends-to-be small-pieces-first model
is still failing in one critical place: handling keys for maps.
Making it possible to handle complex keys without heap allocs for
semiotically-unnecessary interface wrapping is turning out to be agnozing.
So maybe some different interfaces will make it easier.
Maybe. Let's see.
*/
type NodeAssembler interface {
BeginMap() MapAssembler
BeginList() ListAssembler
AssignNull()
AssignBool(bool)
AssignInt(int)
AssignFloat(float64)
AssignString(string)
AssignBytes([]byte)
Assign(Node)
CheckError() error // where are these stored? when each child is done, could check this. and have it stored in each child. means each allocs (nonzero struct)... but isn't that true anyway? yeah. well, except now you're apparently getting that for scalars, which is bad.
}
type MapAssembler interface {
AssembleKey() MapKeyAssembler
Insert(string, Node)
Insert2(Node, Node)
Done()
}
type MapKeyAssembler interface {
NodeAssembler
AssembleValue() MapValueAssembler
}
type MapValueAssembler interface {
NodeAssembler
}
type ListAssembler interface {
AssembleValue() NodeAssembler
Append(Node)
Done()
}
type NodeBuilder interface {
NodeAssembler
Build() (Node, error)
}
type Node interface {
// all the familiar reader methods go here
}
func demo() {
var nb NodeBuilder
func(mb MapAssembler) {
mb.AssembleKey().AssignString("key")
func(mb MapAssembler) {
mb.AssembleKey().AssignString("nested")
mb.AssembleValue().AssignBool(true)
mb.Done()
}(mb.AssembleValue().BeginMap())
mb.AssembleKey().AssignString("secondkey")
mb.AssembleValue().AssignString("morevalue")
mb.Done()
}(nb.BeginMap())
result, err := nb.Build()
_, _ = result, err
}
/*
Pros:
- this nested pretty nicely. not shuffling returned nodes (and errors!) everywhere is actually... prettier to use than what we've got presently.
- having the 'Build' at the end seems... fine.
- notice the lack of childbuilder getter methods. it's just implied by normal operation.
- also means it's intensely obvious that none of the assemblers can be reused freely -- whereas with the child builder getters at present just returning a NodeBuilder, that's nonobvious.
Cons:
- not very clear where error handling should go. punting it *all* to the end is not great.
- this is a super big deal!
- when you have a typed map key, does the subsequent `AssembleValue` call trigger validating the key?
- when you have a typed map value, does the next `AssembleKey` call trigger validating value?
- what happens for the last value in a map? now we're really in trouble. the next action on the parent has to pick up the duty? no, I really don't think that's even viable.
- is every builder gonna need to have room inside to curry an error? that means a bunch of things might go from zero to nonzero struct size, which might be consequential.
- i really don't like that we have two different styles of usage appearing in the same interfaces (the 'Assemble' recursors vs 'Insert'/'Append').
- ... maybe we can get rid of those now? `mb.AssembleKey().Assign(n1).Assign(n2)`? no, not quite a one-liner yet...
- ... yeah, I can't get a one-liner out of this. either the key or the value might require or design (respectively) a closure in order to flow well. the former complicates the core interface, and the latter can still be done from the outside but then just looks weird.
- building single scalar values got more annoying. it's at least two or three lines now: create builder, do assign, call build.
- question is whether this is the thing to worry about -- we traded this for fewer lines when making recursive structures, which we presumably do more often than not.
Notes:
- yes, also ditched the "amend" methods. those have been pretty consistently irritating; that said, I don't have a plan to replace their fastpath capabilities either.
- the types still don't particularly do much to force correct usage order. e.g., your logic must alternate calling AssembleKey and AssembleValue, etc.
- ...but as found in other explorations, there's a limit to what we can do at best anyway without linear types or rustacean 'move' semantics, etc. so if we already have to give up a meter shy of the dream, what's another millimeter?
- i don't think there's any strong incentive _not_ to panic throughout this code, performance-wise at least. something with this many vtables is already fractally uninlinable.
- interesting to note that if a user is creating map keys via a typed nodebuilder (presumably they're structs)... `Assign2(Node,Node)` **still** forces allocs for boxing.
- while the AssembleKey style of usage avoids a boxing, it's awkward to use if the user wants to use a natively typed builder (`AssembleKey().Assign(nowthis)`?) and then still forces a temp copy-by-value in the middle for no good reason.
- a natively-typed assign method that works by value is the only way to minimize all the costs at once.
- we do still need it though: `Assign2(Node,Node)` is the only way to copy an existing typed map key generically (without the temp-copy lamented above for the `AssembleKey.Assign` chain).
- the `Append` and `Assign` method names might be too terse -- we didn't leave space for the natively typed methods, which we generally want to be the shortest ones.
- 'Assign' being a symbol on both NodeAssemler and MapAssembler with different arity might be unfortunate -- would make it impossible to supply both with one concrete type.
*/ | _rsrch/nodeassembler/nodeAssembler.go | 0.552057 | 0.579103 | nodeAssembler.go | starcoder |
package value
import (
"strconv"
"strings"
)
type compareFloatFunc func(a, b float64) bool
// FloatSlice holds a slice of float64 values
type FloatSlice struct {
valsPtr *[]float64
}
// NewFloatSlice makes a new FloatSlice with the given float64 values.
func NewFloatSlice(vals ...float64) *FloatSlice {
slice := make([]float64, len(vals))
copy(slice, vals)
return &FloatSlice{valsPtr: &slice}
}
// NewFloatSliceFromPtr makes a new FloatSlice with the given pointer to float64 values.
func NewFloatSliceFromPtr(valsPtr *[]float64) *FloatSlice {
return &FloatSlice{valsPtr: valsPtr}
}
// Set changes the float64 values.
func (v *FloatSlice) Set(vals []float64) { *v.valsPtr = vals }
// Type return TypeFloat.
func (v *FloatSlice) Type() Type { return TypeFloat }
// IsSlice returns true.
func (v *FloatSlice) IsSlice() bool { return true }
// Clone produce a clone that is identical except for the backing pointer.
func (v *FloatSlice) Clone() Value { return NewFloatSlice(*v.valsPtr...) }
// Parse sets the values from the given string.
func (v *FloatSlice) Parse(str string) error {
substrings := strings.Split(str, ",")
vals := make([]float64, len(substrings))
for i := 0; i < len(substrings); i++ {
substr := strings.TrimSpace(substrings[i])
val, err := strconv.ParseFloat(substr, 64)
if err != nil {
return err
}
vals[i] = val
}
*v.valsPtr = vals
return nil
}
// SlicePointer returns the pointer for storage of slice values.
func (v *FloatSlice) SlicePointer() interface{} { return v.valsPtr }
// Slice returns the float64 slice values.
func (v *FloatSlice) Slice() interface{} { return *v.valsPtr }
// Len returns the number of slice elements.
func (v *FloatSlice) Len() int { return len(*v.valsPtr) }
// Equal checks if length and values of given slice equal the current.
// Returns a non-nil error if types do not match.
func (v *FloatSlice) Equal(v2 Slice) (bool, error) {
if err := CheckType(TypeFloat, v2.Type()); err != nil {
return false, err
}
vals1 := *v.valsPtr
vals2 := v2.Slice().([]float64)
if len(vals1) != len(vals2) {
return false, nil
}
for i, val1 := range vals1 {
if val1 != vals2[i] {
return false, nil
}
}
return true, nil
}
// Greater checks if all values of the current slice are greater than that of
// the given single.
// Returns a non-nil error if types do not match.
func (v *FloatSlice) Greater(v2 Single) (bool, error) {
return compareFloats(*v.valsPtr, v2, floatGreater)
}
// GreaterEqual checks if all values of the current slice are greater or equal
// to the given single.
// Returns a non-nil error if types do not match.
func (v *FloatSlice) GreaterEqual(v2 Single) (bool, error) {
return compareFloats(*v.valsPtr, v2, floatGreaterEqual)
}
// Less checks if all values of the current slice are less than that of
// the given single.
// Returns a non-nil error if types do not match.
func (v *FloatSlice) Less(v2 Single) (bool, error) {
return compareFloats(*v.valsPtr, v2, floatLess)
}
// LessEqual checks if all values of the current slice are less or equal
// to the given single.
// Returns a non-nil error if types do not match.
func (v *FloatSlice) LessEqual(v2 Single) (bool, error) {
return compareFloats(*v.valsPtr, v2, floatLessEqual)
}
// Contains checks if the given single value is equal to one of the
// current slice values.
// Returns a non-nil error if types do not match.
func (v *FloatSlice) Contains(v2 Single) (bool, error) {
if err := CheckType(TypeFloat, v2.Type()); err != nil {
return false, err
}
vals := *v.valsPtr
val2 := v2.Value().(float64)
for _, val1 := range vals {
if val1 == val2 {
return true, nil
}
}
return false, nil
}
func compareFloats(vals []float64, v2 Single, f compareFloatFunc) (bool, error) {
if err := CheckType(TypeFloat, v2.Type()); err != nil {
return false, err
}
if len(vals) == 0 {
return false, nil
}
val2 := v2.Value().(float64)
for _, val1 := range vals {
if !f(val1, val2) {
return false, nil
}
}
return true, nil
}
func floatGreater(a, b float64) bool {
return a > b
}
func floatGreaterEqual(a, b float64) bool {
return a >= b
}
func floatLess(a, b float64) bool {
return a < b
}
func floatLessEqual(a, b float64) bool {
return a <= b
} | value/floatslice.go | 0.88282 | 0.673296 | floatslice.go | starcoder |
package spogoto
import (
"strconv"
)
// NewIntegerStack generates an integer DataStack.
func NewIntegerStack(ints []int64) *datastack {
elements := Elements{}
for _, v := range ints {
elements = append(elements, int64(v))
}
d := NewDataStack(elements, FunctionMap{}, func(str string) (Element, bool) {
val, err := strconv.ParseInt(str, 10, 64)
return Element(val), err == nil
})
addIntegerFunctions(d)
return d
}
func IntegerStackConstructor() (string, DataStack) {
return "integer", NewIntegerStack([]int64{})
}
func addIntegerFunctions(ds *datastack) {
ds.FunctionMap["+"] = func(d DataStack, r RunSet, i Interpreter) {
if d.Lack(2) {
return
}
s := d.Pop().(int64) + d.Pop().(int64)
d.Push(s)
}
ds.FunctionMap["*"] = func(d DataStack, r RunSet, i Interpreter) {
if d.Lack(2) {
return
}
s := d.Pop().(int64) * d.Pop().(int64)
d.Push(s)
}
ds.FunctionMap["-"] = func(d DataStack, r RunSet, i Interpreter) {
if d.Lack(2) {
return
}
d.Push(-d.Pop().(int64) + d.Pop().(int64))
}
ds.FunctionMap["/"] = func(d DataStack, r RunSet, i Interpreter) {
if d.Lack(2) || d.Peek().(int64) == 0 {
return
}
i1 := d.Pop().(int64)
i2 := d.Pop().(int64)
d.Push(i2 / i1)
}
ds.FunctionMap["%"] = func(d DataStack, r RunSet, i Interpreter) {
if d.Lack(2) || d.Peek().(int64) == 0 {
return
}
i1 := d.Pop().(int64)
i2 := d.Pop().(int64)
d.Push(i2 % i1)
}
ds.FunctionMap["min"] = func(d DataStack, r RunSet, i Interpreter) {
if d.Lack(2) {
return
}
i1 := d.Pop().(int64)
i2 := d.Pop().(int64)
if i1 < i2 {
d.Push(i1)
} else {
d.Push(i2)
}
}
ds.FunctionMap["max"] = func(d DataStack, r RunSet, i Interpreter) {
if d.Lack(2) {
return
}
i1 := d.Pop().(int64)
i2 := d.Pop().(int64)
if i1 > i2 {
d.Push(i1)
} else {
d.Push(i2)
}
}
ds.FunctionMap[">"] = func(d DataStack, r RunSet, i Interpreter) {
if d.Lack(2) {
return
}
i1 := d.Pop().(int64)
i2 := d.Pop().(int64)
r.Stack("boolean").Push(i2 > i1)
}
ds.FunctionMap["<"] = func(d DataStack, r RunSet, i Interpreter) {
if d.Lack(2) {
return
}
i1 := d.Pop().(int64)
i2 := d.Pop().(int64)
r.Stack("boolean").Push(i2 < i1)
}
ds.FunctionMap["="] = func(d DataStack, r RunSet, i Interpreter) {
if d.Lack(2) {
return
}
r.Stack("boolean").Push(d.Pop().(int64) == d.Pop().(int64))
}
ds.FunctionMap["fromboolean"] = func(d DataStack, r RunSet, i Interpreter) {
if r.Bad("boolean", 1) {
return
}
b := r.Stack("boolean").Pop().(bool)
if b {
d.Push(int64(1))
} else {
d.Push(int64(0))
}
}
ds.FunctionMap["fromfloat"] = func(d DataStack, r RunSet, i Interpreter) {
if r.Bad("float", 1) {
return
}
f := r.Stack("float").Pop().(float64)
d.Push(int64(f))
}
ds.FunctionMap["rand"] = func(d DataStack, r RunSet, i Interpreter) {
d.Push(i.RandInt())
}
} | integer_stack.go | 0.549641 | 0.422981 | integer_stack.go | starcoder |
package walkeralias
import (
"errors"
"math/rand"
)
// walkerAlias holds an internal rand reference instead of sharing with global rand.
type walkerAlias struct {
buckets []*bucket
r *rand.Rand
}
var ErrIllegalProbMap = errors.New("illegal probability map values")
// NewWalkerAlias accepts a map {key: probability} and a seed to init a new rand for its own use.
// Returns a reference to a WalkerAlias object to use
// An example of a probabilityMap: {1: 2, 2: 3}, the ratio of selecting key 1 and key 2 is 0.4 and 0.6 respectively
// WalkerAlias involves an O(n) preprocessing step to generate a probability table.
// Subsequent sampling are all O(1).
func NewWalkerAlias(probabilityMap map[int]float64, seed int64) (*walkerAlias, error) {
n := len(probabilityMap)
buckets := make([]*bucket, 0)
var sumWeights float64
for _, w := range probabilityMap {
sumWeights += w
}
if sumWeights <= 0 || len(probabilityMap) == 0 {
return nil, ErrIllegalProbMap
}
for k, w := range probabilityMap {
prob := w * float64(n) / sumWeights
buckets = append(buckets, newBucket(k, prob))
}
underfull := make([]int, 0)
overfull := make([]int, 0)
for i, b := range buckets {
if b.threshold < 1 {
underfull = append(underfull, i)
} else if b.threshold > 1 {
overfull = append(overfull, i)
}
}
for len(underfull) > 0 && len(overfull) > 0 {
u, o := underfull[len(underfull)-1], overfull[len(overfull)-1]
underfull = underfull[:len(underfull)-1]
under, over := buckets[u], buckets[o]
under.key2 = over.key1
over.threshold -= 1 - under.threshold
if over.threshold < 1 {
underfull = append(underfull, o)
overfull = overfull[:len(overfull)-1]
}
}
return &walkerAlias{buckets: buckets, r: rand.New(rand.NewSource(seed))}, nil
}
// Random returns a random key following the given probability
func (w *walkerAlias) Random() int {
bucketIdx := rand.Intn(len(w.buckets))
b := w.buckets[bucketIdx]
prob := rand.Float64()
if prob > b.threshold {
return b.key2
}
return b.key1
}
// newBucket returns a ref to a bucket object with the given key
// and sets its initial threshold to the prob (probability) given
func newBucket(key int, prob float64) *bucket {
return &bucket{threshold: prob, key1: key, key2: -1}
}
// bucket holds 2 keys at most,
// Returns Key1 below or equal to the threshold, Key2 strictly above the threshold
type bucket struct {
threshold float64 // threshold point
key1 int // key below or equal to threshold
key2 int // key above threshold
} | walker-alias.go | 0.790571 | 0.514278 | walker-alias.go | starcoder |
package collectors
import (
"fmt"
"github.com/prometheus/client_golang/prometheus"
cephclient "github.com/rook/rook/pkg/ceph/client"
"github.com/rook/rook/pkg/clusterd"
)
// OSDCollector displays statistics about OSD in the ceph cluster.
// An important aspect of monitoring OSDs is to ensure that when the cluster is up and
// running that all OSDs that are in the cluster are up and running, too
type OSDCollector struct {
// Context for executing commands against the Ceph cluster
context *clusterd.Context
// The name of the ceph cluster
clusterName string
// CrushWeight is a persistent setting, and it affects how CRUSH assigns data to OSDs.
// It displays the CRUSH weight for the OSD
CrushWeight *prometheus.GaugeVec
// Depth displays the OSD's level of hierarchy in the CRUSH map
Depth *prometheus.GaugeVec
// Reweight sets an override weight on the OSD.
// It displays value within 0 to 1.
Reweight *prometheus.GaugeVec
// Bytes displays the total bytes available in the OSD
Bytes *prometheus.GaugeVec
// UsedBytes displays the total used bytes in the OSD
UsedBytes *prometheus.GaugeVec
// AvailBytes displays the total available bytes in the OSD
AvailBytes *prometheus.GaugeVec
// Utilization displays current utilization of the OSD
Utilization *prometheus.GaugeVec
// Variance displays current variance of the OSD from the standard utilization
Variance *prometheus.GaugeVec
// Pgs displays total no. of placement groups in the OSD.
// Available in Ceph Jewel version.
Pgs *prometheus.GaugeVec
// CommitLatency displays in seconds how long it takes for an operation to be applied to disk
CommitLatency *prometheus.GaugeVec
// ApplyLatency displays in seconds how long it takes to get applied to the backing filesystem
ApplyLatency *prometheus.GaugeVec
// OSDIn displays the In state of the OSD
OSDIn *prometheus.GaugeVec
// OSDUp displays the Up state of the OSD
OSDUp *prometheus.GaugeVec
// TotalBytes displays total bytes in all OSDs
TotalBytes prometheus.Gauge
// TotalUsedBytes displays total used bytes in all OSDs
TotalUsedBytes prometheus.Gauge
// TotalAvailBytes displays total available bytes in all OSDs
TotalAvailBytes prometheus.Gauge
// AverageUtil displays average utilization in all OSDs
AverageUtil prometheus.Gauge
}
//NewOSDCollector creates an instance of the OSDCollector and instantiates
// the individual metrics that show information about the OSD.
func NewOSDCollector(context *clusterd.Context, clusterName string) *OSDCollector {
return &OSDCollector{
context: context,
clusterName: clusterName,
CrushWeight: prometheus.NewGaugeVec(
prometheus.GaugeOpts{
Namespace: cephNamespace,
Name: "osd_crush_weight",
Help: "OSD Crush Weight",
},
[]string{"osd"},
),
Depth: prometheus.NewGaugeVec(
prometheus.GaugeOpts{
Namespace: cephNamespace,
Name: "osd_depth",
Help: "OSD Depth",
},
[]string{"osd"},
),
Reweight: prometheus.NewGaugeVec(
prometheus.GaugeOpts{
Namespace: cephNamespace,
Name: "osd_reweight",
Help: "OSD Reweight",
},
[]string{"osd"},
),
Bytes: prometheus.NewGaugeVec(
prometheus.GaugeOpts{
Namespace: cephNamespace,
Name: "osd_bytes",
Help: "OSD Total Bytes",
},
[]string{"osd"},
),
UsedBytes: prometheus.NewGaugeVec(
prometheus.GaugeOpts{
Namespace: cephNamespace,
Name: "osd_used_bytes",
Help: "OSD Used Storage in Bytes",
},
[]string{"osd"},
),
AvailBytes: prometheus.NewGaugeVec(
prometheus.GaugeOpts{
Namespace: cephNamespace,
Name: "osd_avail_bytes",
Help: "OSD Available Storage in Bytes",
},
[]string{"osd"},
),
Utilization: prometheus.NewGaugeVec(
prometheus.GaugeOpts{
Namespace: cephNamespace,
Name: "osd_utilization",
Help: "OSD Utilization",
},
[]string{"osd"},
),
Variance: prometheus.NewGaugeVec(
prometheus.GaugeOpts{
Namespace: cephNamespace,
Name: "osd_variance",
Help: "OSD Variance",
},
[]string{"osd"},
),
Pgs: prometheus.NewGaugeVec(
prometheus.GaugeOpts{
Namespace: cephNamespace,
Name: "osd_pgs",
Help: "OSD Placement Group Count",
},
[]string{"osd"},
),
TotalBytes: prometheus.NewGauge(
prometheus.GaugeOpts{
Namespace: cephNamespace,
Name: "osd_total_bytes",
Help: "OSD Total Storage Bytes",
},
),
TotalUsedBytes: prometheus.NewGauge(
prometheus.GaugeOpts{
Namespace: cephNamespace,
Name: "osd_total_used_bytes",
Help: "OSD Total Used Storage Bytes",
},
),
TotalAvailBytes: prometheus.NewGauge(
prometheus.GaugeOpts{
Namespace: cephNamespace,
Name: "osd_total_avail_bytes",
Help: "OSD Total Available Storage Bytes ",
},
),
AverageUtil: prometheus.NewGauge(
prometheus.GaugeOpts{
Namespace: cephNamespace,
Name: "osd_average_utilization",
Help: "OSD Average Utilization",
},
),
CommitLatency: prometheus.NewGaugeVec(
prometheus.GaugeOpts{
Namespace: cephNamespace,
Name: "osd_perf_commit_latency_seconds",
Help: "OSD Perf Commit Latency",
},
[]string{"osd"},
),
ApplyLatency: prometheus.NewGaugeVec(
prometheus.GaugeOpts{
Namespace: cephNamespace,
Name: "osd_perf_apply_latency_seconds",
Help: "OSD Perf Apply Latency",
},
[]string{"osd"},
),
OSDIn: prometheus.NewGaugeVec(
prometheus.GaugeOpts{
Namespace: cephNamespace,
Name: "osd_in",
Help: "OSD In Status",
},
[]string{"osd"},
),
OSDUp: prometheus.NewGaugeVec(
prometheus.GaugeOpts{
Namespace: cephNamespace,
Name: "osd_up",
Help: "OSD Up Status",
},
[]string{"osd"},
),
}
}
func (o *OSDCollector) collectorList() []prometheus.Collector {
return []prometheus.Collector{
o.CrushWeight,
o.Depth,
o.Reweight,
o.Bytes,
o.UsedBytes,
o.AvailBytes,
o.Utilization,
o.Variance,
o.Pgs,
o.TotalBytes,
o.TotalUsedBytes,
o.TotalAvailBytes,
o.AverageUtil,
o.CommitLatency,
o.ApplyLatency,
o.OSDIn,
o.OSDUp,
}
}
func (o *OSDCollector) collect() error {
osdDF, err := cephclient.GetOSDUsage(o.context, o.clusterName)
if err != nil {
return err
}
for _, node := range osdDF.OSDNodes {
crushWeight, err := node.CrushWeight.Float64()
if err != nil {
return err
}
o.CrushWeight.WithLabelValues(node.Name).Set(crushWeight)
depth, err := node.Depth.Float64()
if err != nil {
return err
}
o.Depth.WithLabelValues(node.Name).Set(depth)
reweight, err := node.Reweight.Float64()
if err != nil {
return err
}
o.Reweight.WithLabelValues(node.Name).Set(reweight)
osdKB, err := node.KB.Float64()
if err != nil {
return nil
}
o.Bytes.WithLabelValues(node.Name).Set(osdKB * 1e3)
usedKB, err := node.UsedKB.Float64()
if err != nil {
return err
}
o.UsedBytes.WithLabelValues(node.Name).Set(usedKB * 1e3)
availKB, err := node.AvailKB.Float64()
if err != nil {
return err
}
o.AvailBytes.WithLabelValues(node.Name).Set(availKB * 1e3)
util, err := node.Utilization.Float64()
if err != nil {
return err
}
o.Utilization.WithLabelValues(node.Name).Set(util)
variance, err := node.Variance.Float64()
if err != nil {
return err
}
o.Variance.WithLabelValues(node.Name).Set(variance)
pgs, err := node.Pgs.Float64()
if err != nil {
continue
}
o.Pgs.WithLabelValues(node.Name).Set(pgs)
}
totalKB, err := osdDF.Summary.TotalKB.Float64()
if err != nil {
return err
}
o.TotalBytes.Set(totalKB * 1e3)
totalUsedKB, err := osdDF.Summary.TotalUsedKB.Float64()
if err != nil {
return err
}
o.TotalUsedBytes.Set(totalUsedKB * 1e3)
totalAvailKB, err := osdDF.Summary.TotalAvailKB.Float64()
if err != nil {
return err
}
o.TotalAvailBytes.Set(totalAvailKB * 1e3)
averageUtil, err := osdDF.Summary.AverageUtil.Float64()
if err != nil {
return err
}
o.AverageUtil.Set(averageUtil)
return nil
}
func (o *OSDCollector) collectOSDPerf() error {
osdPerf, err := cephclient.GetOSDPerfStats(o.context, o.clusterName)
if err != nil {
return err
}
for _, perfStat := range osdPerf.PerfInfo {
osdID, err := perfStat.ID.Int64()
if err != nil {
return err
}
osdName := fmt.Sprintf("osd.%v", osdID)
commitLatency, err := perfStat.Stats.CommitLatency.Float64()
if err != nil {
return err
}
o.CommitLatency.WithLabelValues(osdName).Set(commitLatency / 1e3)
applyLatency, err := perfStat.Stats.ApplyLatency.Float64()
if err != nil {
return err
}
o.ApplyLatency.WithLabelValues(osdName).Set(applyLatency / 1e3)
}
return nil
}
func (o *OSDCollector) collectOSDDump() error {
osdDump, err := cephclient.GetOSDDump(o.context, o.clusterName)
if err != nil {
return err
}
for _, dumpInfo := range osdDump.OSDs {
osdID, err := dumpInfo.OSD.Int64()
if err != nil {
return err
}
osdName := fmt.Sprintf("osd.%v", osdID)
in, err := dumpInfo.In.Float64()
if err != nil {
return err
}
o.OSDIn.WithLabelValues(osdName).Set(in)
up, err := dumpInfo.Up.Float64()
if err != nil {
return err
}
o.OSDUp.WithLabelValues(osdName).Set(up)
}
return nil
}
// Describe sends the descriptors of each OSDCollector related metrics we have defined
// to the provided prometheus channel.
func (o *OSDCollector) Describe(ch chan<- *prometheus.Desc) {
for _, metric := range o.collectorList() {
metric.Describe(ch)
}
}
// Collect sends all the collected metrics to the provided prometheus channel.
// It requires the caller to handle synchronization.
func (o *OSDCollector) Collect(ch chan<- prometheus.Metric) {
if err := o.collectOSDPerf(); err != nil {
logger.Errorf("failed collecting osd perf stats: %+v", err)
}
if err := o.collectOSDDump(); err != nil {
logger.Errorf("failed collecting osd dump: %+v", err)
}
if err := o.collect(); err != nil {
logger.Errorf("failed collecting osd metrics: %+v", err)
}
for _, metric := range o.collectorList() {
metric.Collect(ch)
}
} | pkg/ceph/collectors/osd.go | 0.606964 | 0.451387 | osd.go | starcoder |
package main
const helpLongMsg = `
NAME:
%_COMMAND_NAME_% - Lists the best common ancestors of the two passed commit revisions
SYNOPSIS:
usage: %_COMMAND_NAME_% <path> <commitRev> <commitRev>
or: %_COMMAND_NAME_% <path> --independent <commitRev>...
or: %_COMMAND_NAME_% <path> --is-ancestor <commitRev> <commitRev>
params:
<path> Path to the git repository
<commitRev> Git revision as supported by go-git
DESCRIPTION:
%_COMMAND_NAME_% finds the best common ancestor(s) between two commits. One common ancestor is better than another common ancestor if the latter is an ancestor of the former.
A common ancestor that does not have any better common ancestor is a best common ancestor, i.e. a merge base. Note that there can be more than one merge base for a pair of commits.
Commits that does not share a common history has no common ancestors.
OPTIONS:
As the most common special case, specifying only two commits on the command line means computing the merge base between the given two commits.
If there is no shared history between the passed commits, there won't be a merge-base, and the command will exit with status 1.
--independent
List the subgroup from the passed commits, that cannot be reached from any other of the passed ones. In other words, it prints a minimal subset of the supplied commits with the same ancestors.
--is-ancestor
Check if the first commit is an ancestor of the second one, and exit with status 0 if true, or with status 1 if not. Errors are signaled by a non-zero status that is not 1.
DISCUSSION:
Given two commits A and B, %_COMMAND_NAME_% A B will output a commit which is the best common ancestor of both, what means that is reachable from both A and B through the parent relationship.
For example, with this topology:
o---o---o---o---B
/ /
---3---2---o---1---o---A
the merge base between A and B is 1.
With the given topology 2 and 3 are also common ancestors of A and B, but they are not the best ones because they can be also reached from 1.
When the history involves cross-cross merges, there can be more than one best common ancestor for two commits. For example, with this topology:
---1---o---A
\ /
X
/ \
---2---o---o---B
When the history involves feature branches depending on other feature branches there can be also more than one common ancestor. For example:
o---o---o
/ \
1---o---A \
/ / \
---o---o---2---o---o---B
In both examples, both 1 and 2 are merge-bases of A and B for each situation.
Neither one is better than the other (both are best merge bases) because 1 cannot be reached from 2, nor the opposite.
` | _examples/merge_base/help-long.msg.go | 0.629547 | 0.48932 | help-long.msg.go | starcoder |
// Package unicodes implements unicode utility functions.
package unicodes
import (
"unicode"
)
// IsGraphic reports whether the rune is defined as a Graphic by Unicode.
// Such characters include letters, marks, numbers, punctuation, symbols, and
// spaces, from categories L, M, N, P, S, Zs.
func IsGraphic(r rune) bool {
return unicode.IsGraphic(r)
}
// IsPrint reports whether the rune is defined as printable by Go. Such
// characters include letters, marks, numbers, punctuation, symbols, and the
// ASCII space character, from categories L, M, N, P, S and the ASCII space
// character. This categorization is the same as IsGraphic except that the
// only spacing character is ASCII space, U+0020.
func IsPrint(r rune) bool {
return unicode.IsPrint(r)
}
// IsOneOf reports whether the rune is a member of one of the ranges.
// The function "In" provides a nicer signature and should be used in preference to IsOneOf.
func IsOneOf(ranges []*unicode.RangeTable, r rune) bool {
return unicode.IsOneOf(ranges, r)
}
// In reports whether the rune is a member of one of the ranges.
func In(r rune, ranges ...*unicode.RangeTable) bool {
return unicode.In(r, ranges...)
}
// IsControl reports whether the rune is a control character.
// The C (Other) Unicode category includes more code points
// such as surrogates; use Is(C, r) to test for them.
func IsControl(r rune) bool {
return unicode.IsControl(r)
}
// IsLetter reports whether the rune is a letter (category L).
func IsLetter(r rune) bool {
return unicode.IsLetter(r)
}
// IsMark reports whether the rune is a mark character (category M).
func IsMark(r rune) bool {
return unicode.IsMark(r)
}
// IsNumber reports whether the rune is a number (category N).
func IsNumber(r rune) bool {
return unicode.IsNumber(r)
}
// IsPunct reports whether the rune is a Unicode punctuation character
// (category P).
func IsPunct(r rune) bool {
return unicode.IsPunct(r)
}
// IsSpace reports whether the rune is a space character as defined
// by Unicode's White Space property; in the Latin-1 space
// this is
// '\t', '\n', '\v', '\f', '\r', ' ', U+0085 (NEL), U+00A0 (NBSP).
// Other definitions of spacing characters are set by category
// Z and property Pattern_White_Space.
func IsSpace(r rune) bool {
return unicode.IsSpace(r)
}
// IsSymbol reports whether the rune is a symbolic character.
func IsSymbol(r rune) bool {
return unicode.IsSymbol(r)
} | util/unicodes/graphic.go | 0.709321 | 0.402862 | graphic.go | starcoder |
package kata
import (
"errors"
"fmt"
"regexp"
"strconv"
"strings"
)
// matchData holds the string encoded results of a match.
type matchData struct {
team [2]string
points [2]string
}
// teamData holds the points scored and conceded in match by a team.
type teamData struct {
pointsScored, pointsConceded int
}
// The points are given as decimal floating point numbers.
// The points must be the only decimal numbers in the result string.
var patternPoints = regexp.MustCompile(`\b\d+\.?\d*\b`)
// splitMatchData splits the string of one game result into its parts.
// Extracts only the strings.
// Allows floating point numbers as points.
func splitMatchData(game string) (parsed matchData, err error) {
// The points structed the result string
indexPoints := patternPoints.FindAllStringIndex(game, -1)
if indexPoints == nil || len(indexPoints) != 2 {
err = errors.New("not 2 numbers")
return
}
for id, index := range indexPoints {
parsed.points[id] = game[index[0]:index[1]]
}
parsed.team[0] = strings.TrimSpace(game[0:indexPoints[0][0]])
parsed.team[1] = strings.TrimSpace(game[indexPoints[0][1]:indexPoints[1][0]])
return
}
// parsePoints parsed an decimal number.
// Returns float number error if the decimal number is invalid.
func parsePoints(str string) (n int, err error) {
n, err = strconv.Atoi(str)
if err != nil {
err = errors.New("float number")
}
return
}
// parseTeamData parses the points of a team.
// The points must be given as integers.
func parseTeamData(str matchData, teamId int) (game teamData, err error) {
game.pointsScored, err = parsePoints(str.points[teamId])
if err == nil {
game.pointsConceded, err = parsePoints(str.points[1-teamId])
}
return
}
func NbaCup(resultSheet, toFind string) string {
if toFind == "" {
// Special case: empty team must result in an empty string.
return ""
}
// variables to collect game results
numberMatchesWon := 0
numberMatchesDrawn := 0
numberMatchesLost := 0
totalPointsScored := 0
totalPointsConceded := 0
rank := 0
// Iterate over all games.
for _, line := range strings.Split(resultSheet, ",") {
entries, err := splitMatchData(line)
if err != nil {
return fmt.Sprintf("Error(%v):%s", err, line)
}
var game teamData
if entries.team[0] == toFind {
game, err = parseTeamData(entries, 0)
} else if entries.team[1] == toFind {
game, err = parseTeamData(entries, 1)
} else {
continue
}
if err != nil {
return fmt.Sprintf("Error(%v):%s", err, line)
}
// The team to find was in the game. Add the result of the game.
delta := game.pointsScored - game.pointsConceded
switch {
case delta > 0:
numberMatchesWon++
rank += 3
case delta == 0:
numberMatchesDrawn++
rank++
case delta < 0:
numberMatchesLost++
}
totalPointsScored += game.pointsScored
totalPointsConceded += game.pointsConceded
}
// Prepare the result string in the given format
if numberMatchesWon == 0 && numberMatchesDrawn == 0 && numberMatchesLost == 0 {
return toFind + ":This team didn't play!"
} else {
return fmt.Sprintf("%s:W=%d;D=%d;L=%d;Scored=%d;Conceded=%d;Points=%d", toFind, numberMatchesWon, numberMatchesDrawn, numberMatchesLost, totalPointsScored, totalPointsConceded, rank)
}
} | 6_kyu/Ranking_NBA_teams.go | 0.622689 | 0.476641 | Ranking_NBA_teams.go | starcoder |
package query
func DerivedResults(qr Results, ch <-chan Result) Results {
return &results{
query: qr.Query(),
proc: qr.Process(),
res: ch,
}
}
// NaiveFilter applies a filter to the results.
func NaiveFilter(qr Results, filter Filter) Results {
ch := make(chan Result)
go func() {
defer close(ch)
defer qr.Close()
for e := range qr.Next() {
if e.Error != nil || filter.Filter(e.Entry) {
ch <- e
}
}
}()
return DerivedResults(qr, ch)
}
// NaiveLimit truncates the results to a given int limit
func NaiveLimit(qr Results, limit int) Results {
ch := make(chan Result)
go func() {
defer close(ch)
defer qr.Close()
l := 0
for e := range qr.Next() {
if e.Error != nil {
ch <- e
continue
}
ch <- e
l++
if limit > 0 && l >= limit {
break
}
}
}()
return DerivedResults(qr, ch)
}
// NaiveOffset skips a given number of results
func NaiveOffset(qr Results, offset int) Results {
ch := make(chan Result)
go func() {
defer close(ch)
defer qr.Close()
sent := 0
for e := range qr.Next() {
if e.Error != nil {
ch <- e
}
if sent < offset {
sent++
continue
}
ch <- e
}
}()
return DerivedResults(qr, ch)
}
// NaiveOrder reorders results according to given Order.
// WARNING: this is the only non-stream friendly operation!
func NaiveOrder(qr Results, o Order) Results {
ch := make(chan Result)
var entries []Entry
go func() {
defer close(ch)
defer qr.Close()
for e := range qr.Next() {
if e.Error != nil {
ch <- e
}
entries = append(entries, e.Entry)
}
o.Sort(entries)
for _, e := range entries {
ch <- Result{Entry: e}
}
}()
return DerivedResults(qr, ch)
}
func NaiveQueryApply(q Query, qr Results) Results {
if q.Prefix != "" {
qr = NaiveFilter(qr, FilterKeyPrefix{q.Prefix})
}
for _, f := range q.Filters {
qr = NaiveFilter(qr, f)
}
for _, o := range q.Orders {
qr = NaiveOrder(qr, o)
}
if q.Offset != 0 {
qr = NaiveOffset(qr, q.Offset)
}
if q.Limit != 0 {
qr = NaiveLimit(qr, q.Offset)
}
return qr
}
func ResultEntriesFrom(keys []string, vals []interface{}) []Entry {
re := make([]Entry, len(keys))
for i, k := range keys {
re[i] = Entry{Key: k, Value: vals[i]}
}
return re
} | vendor/gx/ipfs/QmXRKBQA4wXP7xWbFiZsR1GP4HV6wMDQ1aWFxZZ4uBcPX9/go-datastore/query/query_impl.go | 0.72027 | 0.407392 | query_impl.go | starcoder |
package tetra3d
import (
"math"
"github.com/kvartborg/vector"
)
// BoundingTriangles is a Node specifically for detecting a collision between any of the triangles from a mesh instance and another BoundingObject.
type BoundingTriangles struct {
*Node
BoundingAABB *BoundingAABB
Mesh *Mesh
}
// NewBoundingTriangles returns a new BoundingTriangles object.
func NewBoundingTriangles(name string, mesh *Mesh) *BoundingTriangles {
return &BoundingTriangles{
Node: NewNode(name),
BoundingAABB: NewBoundingAABB("triangle broadphase aabb", mesh.Dimensions.Width(), mesh.Dimensions.Height(), mesh.Dimensions.Depth()),
Mesh: mesh,
}
}
func (bt *BoundingTriangles) Transform() Matrix4 {
transformDirty := bt.Node.isTransformDirty
transform := bt.Node.Transform()
if transformDirty {
bt.BoundingAABB.SetWorldTransform(transform)
rot := bt.WorldRotation().MultVec(bt.Mesh.Dimensions.Center())
bt.BoundingAABB.MoveVec(rot)
bt.BoundingAABB.Transform()
}
return transform
}
func (bt *BoundingTriangles) Clone() INode {
clone := NewBoundingTriangles(bt.name, bt.Mesh)
clone.Node = bt.Node.Clone().(*Node)
return clone
}
// AddChildren parents the provided children Nodes to the passed parent Node, inheriting its transformations and being under it in the scenegraph
// hierarchy. If the children are already parented to other Nodes, they are unparented before doing so.
func (bt *BoundingTriangles) AddChildren(children ...INode) {
// We do this manually so that addChildren() parents the children to the Model, rather than to the Model.NodeBase.
bt.addChildren(bt, children...)
}
// Intersecting returns true if the BoundingTriangles object is intersecting the other specified BoundingObject.
func (bt *BoundingTriangles) Intersecting(other BoundingObject) bool {
return bt.Intersection(other) != nil
}
// Intersection returns an IntersectionResult if the BoundingTriangles object is intersecting another BoundingObject. If
// no intersection is reported, Intersection returns nil. (Note that BoundingTriangles > AABB collision is buggy at the moment.)
func (bt *BoundingTriangles) Intersection(other BoundingObject) *IntersectionResult {
if other == bt {
return nil
}
switch otherBounds := other.(type) {
case *BoundingAABB:
intersection := otherBounds.Intersection(bt)
if intersection != nil {
for _, inter := range intersection.Intersections {
inter.MTV = inter.MTV.Invert()
}
}
return intersection
case *BoundingSphere:
intersection := otherBounds.Intersection(bt)
if intersection != nil {
for _, inter := range intersection.Intersections {
inter.MTV = inter.MTV.Invert()
}
}
return intersection
case *BoundingTriangles:
return btTrianglesTriangles(bt, otherBounds)
case *BoundingCapsule:
intersection := otherBounds.Intersection(bt)
if intersection != nil {
for _, inter := range intersection.Intersections {
inter.MTV = inter.MTV.Invert()
}
}
return intersection
}
return nil
}
// Type returns the NodeType for this object.
func (bt *BoundingTriangles) Type() NodeType {
return NodeTypeBoundingTriangles
}
type collisionPlane struct {
Normal vector.Vector
Distance float64
VectorPool *VectorPool
}
func newCollisionPlane() *collisionPlane {
return &collisionPlane{
VectorPool: NewVectorPool(16),
}
}
func (plane *collisionPlane) Set(v0, v1, v2 vector.Vector) {
first := plane.VectorPool.Sub(v1, v0)
second := plane.VectorPool.Sub(v2, v0)
normal := plane.VectorPool.Cross(first, second).Unit()
distance := dot(normal, v0)
plane.Normal = normal
plane.Distance = distance
}
func (plane *collisionPlane) ClosestPoint(point vector.Vector) vector.Vector {
dist := dot(plane.Normal, point) - plane.Distance
return plane.VectorPool.Sub(point, plane.Normal.Scale(dist))[:3]
}
var colPlane = newCollisionPlane()
func closestPointOnTri(point, v0, v1, v2 vector.Vector) vector.Vector {
colPlane.VectorPool.Reset()
colPlane.Set(v0, v1, v2)
if planePoint := colPlane.ClosestPoint(point); colPlane.pointInsideTriangle(planePoint, v0, v1, v2) {
return planePoint
}
ab := colPlane.closestPointOnLine(point, v0, v1)
bc := colPlane.closestPointOnLine(point, v1, v2)
ca := colPlane.closestPointOnLine(point, v2, v0)
closest := ab
closestDist := fastVectorDistanceSquared(point, ab)
bcDist := fastVectorDistanceSquared(point, bc)
caDist := fastVectorDistanceSquared(point, ca)
if bcDist < closestDist {
closest = bc
closestDist = bcDist
}
if caDist < closestDist {
closest = ca
}
return closest
}
func (plane *collisionPlane) pointInsideTriangle(point, v0, v1, v2 vector.Vector) bool {
ca := plane.VectorPool.Sub(v2, v0)[:3]
ba := plane.VectorPool.Sub(v1, v0)[:3]
pa := plane.VectorPool.Sub(point, v0)[:3]
dot00 := dot(ca, ca)
dot01 := dot(ca, ba)
dot02 := dot(ca, pa)
dot11 := dot(ba, ba)
dot12 := dot(ba, pa)
invDenom := 1.0 / ((dot00 * dot11) - (dot01 * dot01))
u := ((dot11 * dot02) - (dot01 * dot12)) * invDenom
v := ((dot00 * dot12) - (dot01 * dot02)) * invDenom
return (u >= 0) && (v >= 0) && (u+v < 1)
}
func (plane *collisionPlane) closestPointOnLine(point, start, end vector.Vector) vector.Vector {
diff := plane.VectorPool.Sub(end, start)
dotA := dot(plane.VectorPool.Sub(point, start), diff)
dotB := dot(diff, diff)
d := math.Min(math.Max(dotA/dotB, 0), 1)
return plane.VectorPool.Add(start, diff.Scale(d))
} | boundsTriangles.go | 0.900122 | 0.768168 | boundsTriangles.go | starcoder |
package stats
import (
"fmt"
"math"
"sort"
"github.com/heustis/tsp-solver-go/model"
)
// DistanceGaps tracks the distance between one vertex and each edge in the current circuit.
// From those distances, it analyzes the gaps between successive distances, sorted from smallest to largest,
// to facilitate finding statistically significant gaps, which indicates that the vertex will likely be attached to an edge prior to that gap.
type DistanceGaps struct {
ClosestEdges []*model.DistanceToEdge
Gaps []float64
GapAverage float64
GapStandardDeviation float64
}
// NewDistanceGaps accepts a vertex and the edges of the current circuit, and returnes a populated DistanceGaps.
func NewDistanceGaps(vertex model.CircuitVertex, edges []model.CircuitEdge) *DistanceGaps {
stats := &DistanceGaps{
ClosestEdges: make([]*model.DistanceToEdge, len(edges)),
}
for i, e := range edges {
stats.ClosestEdges[i] = &model.DistanceToEdge{
Vertex: vertex,
Edge: e,
Distance: e.DistanceIncrease(vertex),
}
}
sort.Slice(stats.ClosestEdges, func(i, j int) bool {
return stats.ClosestEdges[i].Distance < stats.ClosestEdges[j].Distance
})
stats.processStats()
return stats
}
// Clone creates a deep copy of a DistanceGaps, if it needs to be used in a circuit that tracks multiple copies of the circuit (in various stages of completion).
func (stats *DistanceGaps) Clone() *DistanceGaps {
clone := &DistanceGaps{
ClosestEdges: make([]*model.DistanceToEdge, len(stats.ClosestEdges)),
Gaps: make([]float64, len(stats.Gaps)),
GapAverage: stats.GapAverage,
GapStandardDeviation: stats.GapStandardDeviation,
}
copy(clone.ClosestEdges, stats.ClosestEdges)
copy(clone.Gaps, stats.Gaps)
return clone
}
// processStats calculates the gaps between successive distances in ClosestEdge, the average gap, and the standard deviation of the gaps.
func (stats *DistanceGaps) processStats() {
numGaps := len(stats.ClosestEdges) - 1
numGapsFloat := float64(numGaps)
stats.GapAverage = 0
stats.Gaps = make([]float64, numGaps)
// Compute averages
for current, next := 0, 1; current < numGaps; current, next = current+1, next+1 {
distanceGap := stats.ClosestEdges[next].Distance - stats.ClosestEdges[current].Distance
stats.Gaps[current] = distanceGap
stats.GapAverage += distanceGap / numGapsFloat
}
stats.GapStandardDeviation = 0
// Compute standard deviations
for current, next := 0, 1; current < numGaps; current, next = current+1, next+1 {
currentGapDeviation := stats.Gaps[current] - stats.GapAverage
stats.GapStandardDeviation += currentGapDeviation * currentGapDeviation / numGapsFloat
}
stats.GapStandardDeviation = math.Sqrt(stats.GapStandardDeviation)
}
// UpdateStats replaces the removed edge with the two edges that result from its split, then updates the statistics for this vertex.
func (stats *DistanceGaps) UpdateStats(removedEdge model.CircuitEdge, edgeA model.CircuitEdge, edgeB model.CircuitEdge) {
prevNumEdges := len(stats.ClosestEdges)
numEdges := prevNumEdges + 1
vertex := stats.ClosestEdges[0].Vertex
closer := &model.DistanceToEdge{
Vertex: vertex,
Edge: edgeA,
Distance: edgeA.DistanceIncrease(vertex),
}
farther := &model.DistanceToEdge{
Vertex: vertex,
Edge: edgeB,
Distance: edgeB.DistanceIncrease(vertex),
}
if farther.Distance < closer.Distance {
closer, farther = farther, closer
}
// Update the closest edges list - note: the list is already sorted
updatedEdges := make([]*model.DistanceToEdge, numEdges)
for src, dest, isCloserInList, isFartherInList := 0, 0, false, false; dest < numEdges; dest++ {
if src >= prevNumEdges {
if !isCloserInList {
updatedEdges[dest] = closer
isCloserInList = true
} else {
updatedEdges[dest] = farther
isFartherInList = true
}
} else {
srcEdge := stats.ClosestEdges[src]
if !isCloserInList && closer.Distance < srcEdge.Distance {
updatedEdges[dest] = closer
isCloserInList = true
} else if !isFartherInList && farther.Distance < srcEdge.Distance {
updatedEdges[dest] = farther
isFartherInList = true
} else if srcEdge.Edge == removedEdge {
src++
dest-- // Need to keep the destination at the same position for the next iteration, since nothing was copied this iteration.
} else {
updatedEdges[dest] = srcEdge
src++
}
}
}
stats.ClosestEdges = updatedEdges
stats.processStats()
}
// String converts the DistanceGaps to a string, with the edges printed as their index in the circuit, and vertices as their index in the initial request.
func (stats *DistanceGaps) String(vertexIndexLookup map[model.CircuitVertex]int, edgeIndexLookup map[model.CircuitEdge]int) string {
if len(stats.ClosestEdges) <= 0 {
return `{}`
}
s := fmt.Sprintf("{\r\n\t\"vertex\":%d,\r\n\t\"gapAverage\":%g,\r\n\t\"gapStdDev\":%g,\r\n\t\"closestEdges\":[",
vertexIndexLookup[stats.ClosestEdges[0].Vertex], stats.GapAverage, stats.GapStandardDeviation)
lastIndex := len(stats.ClosestEdges) - 1
for i, e := range stats.ClosestEdges {
if i == lastIndex {
s += fmt.Sprintf("{\"edge\":%d,\"distance\":%g}", edgeIndexLookup[e.Edge], e.Distance)
} else {
s += fmt.Sprintf("{\"edge\":%d,\"distance\":%g},", edgeIndexLookup[e.Edge], e.Distance)
}
}
s += "],\r\n\t\"gaps\":["
lastIndex = len(stats.Gaps) - 1
for i, gap := range stats.Gaps {
s += fmt.Sprintf("{\"gap\":%g,\"gapZScore\":%g}", gap, (gap-stats.GapAverage)/stats.GapStandardDeviation)
if i != lastIndex {
s += ","
}
}
s += "]}"
return s
} | stats/distancegaps.go | 0.764628 | 0.798462 | distancegaps.go | starcoder |
package main
/*
# Eller's maze algorithm implemented using FB Horizon's object/event model
# High level overview
For each row,
1. Compute east-west openings
1.1. randomly decide which ones (last row special case!)
1.2. merge sets
1.3. mark state as open
2. Compute north-south openings
2.1. from each set
2.1.1. pick one cell at random (skipping already picked ones)
2.1.2. mark state as open
2.1.3. decide whether to do that again.
3. Apply decisions to world
4. Advance to the next row
4.1. any cell closed to the south is removed from current set and added to brand new set
4.2. all walls are marked closed again
# Wiring
There are "cell" assemblies for each cell in one row of the maze. The overall row assembly will compute the state for one row, then move to the next row. Triggers will update references to the actual maze walls that need mutating.
A Cell assembly uses two triggers to capture pointers to the east and south wall of its cell.
┌──────┐ ┌───────┐
│ Cell ├──► East │
└──┬───┘ │Trigger│
┌──▼────┐ └───────┘
│ South │
│Trigger│
└───────┘
The Row assembly is a singly-linked list that cycles back to the control node.
┌───────┐
│Control◄───────────────────────────────────┐
│ Node │ │
└───┬───┘┌──────┐ ┌──────┐ ┌──────┐ │
└────► Cell ├─────► Cell ├─...─► Cell ├─┘
└──────┘ └──────┘ └──────┘
TODO: Describe set membership wiring
*/
import (
"context"
"flag"
"fmt"
"math/rand"
"time"
"github.com/misterikkit/automata/horizon"
"github.com/misterikkit/automata/wall"
)
func main() {
h := flag.Int("h", 10, "height")
w := flag.Int("w", 10, "width")
flag.Parse()
rand.Seed(time.Now().Unix())
ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second)
defer cancel()
maze := wall.NewMaze(*h, *w)
loop := horizon.NewEventLoop()
cells := make([]horizon.Object, *w)
for i := range cells {
last := i == len(cells)-1
cells[i] = horizon.NewObject(fmt.Sprintf("cell-%02d", i), Cell(last), loop)
}
// Workaround to simulate the moving and trigger detecting of wall objects
row := 0
ctrl := horizon.NewObject("controller", Controller(*h, func() {
row++
updateTriggers(cells, maze, row)
}, cancel), loop)
updateTriggers(cells, maze, row)
ctrl.Wire(horizon.Wiring{"head": cells[0]})
for i := range cells {
j := i + 1
if j < len(cells) {
cells[i].Wire(horizon.Wiring{"nextCell": cells[j]})
} else {
cells[i].Wire(horizon.Wiring{"nextCell": ctrl})
}
}
loop.Run(ctx)
fmt.Println(maze)
}
func updateTriggers(cells []horizon.Object, maze *wall.Maze, row int) {
for i := range cells {
col := i
cells[i].Send(cells[i], "triggerEast", func() {
maze.Open(row, col, wall.East)
})
cells[i].Send(cells[i], "triggerSouth", func() {
maze.Open(row, col, wall.South)
})
}
} | eller/main.go | 0.76454 | 0.746693 | main.go | starcoder |
package twistededwards
import (
"math/bits"
"github.com/consensys/gnark/cs/internal/curve"
)
// Point point on a twisted Edwards curve
type Point struct {
X, Y curve.Element
}
// NewPoint creates a new instance of Point
func NewPoint(x, y curve.Element) Point {
return Point{x, y}
}
// IsOnCurve checks if a point is on the twisted Edwards curve
func (p *Point) IsOnCurve(ecurve CurveParams) bool {
var lhs, rhs, tmp curve.Element
tmp.Mul(&p.Y, &p.Y)
lhs.Mul(&p.X, &p.X).
Mul(&lhs, &ecurve.A).
Add(&lhs, &tmp)
tmp.Mul(&p.X, &p.X).
Mul(&tmp, &p.Y).
Mul(&tmp, &p.Y).
Mul(&tmp, &ecurve.D)
rhs.SetOne().Add(&rhs, &tmp)
// TODO why do we not compare lhs and rhs directly?
lhsreg := lhs.ToRegular()
rhsreg := rhs.ToRegular()
return rhsreg.Equal(&lhsreg)
}
// Add adds two points (x,y), (u,v) on a twisted Edwards curve with parameters a, d
// modifies p
func (p *Point) Add(p1, p2 *Point, ecurve CurveParams) *Point {
var xu, yv, xv, yu, dxyuv, one, denx, deny curve.Element
pRes := new(Point)
xv.Mul(&p1.X, &p2.Y)
yu.Mul(&p1.Y, &p2.X)
pRes.X.Add(&xv, &yu)
xu.Mul(&p1.X, &p2.X).Mul(&xu, &ecurve.A)
yv.Mul(&p1.Y, &p2.Y)
pRes.Y.Sub(&yv, &xu)
dxyuv.Mul(&xv, &yu).Mul(&dxyuv, &ecurve.D)
one.SetOne()
denx.Add(&one, &dxyuv)
deny.Sub(&one, &dxyuv)
p.X.Div(&pRes.X, &denx)
p.Y.Div(&pRes.Y, &deny)
return p
}
// Double doubles point (x,y) on a twisted Edwards curve with parameters a, d
// modifies p
func (p *Point) Double(p1 *Point, ecurve CurveParams) *Point {
p.Add(p1, p1, ecurve)
return p
}
// ScalarMul scalar multiplication of a point
// p1 points on the twisted Edwards curve
// c parameters of the twisted Edwards curve
// scal scalar NOT in Montgomery form
// modifies p
func (p *Point) ScalarMul(p1 *Point, ecurve CurveParams, scalar curve.Element) *Point {
pRes := new(Point)
pRes.X.SetZero()
pRes.Y.SetOne()
const wordSize = bits.UintSize
for i := curve.NbLimbs - 1; i >= 0; i-- {
for j := 0; j < wordSize; j++ {
pRes.Double(pRes, ecurve)
b := (scalar[i] & (uint64(1) << uint64(wordSize-1-j))) >> uint64(wordSize-1-j)
if b == 1 {
pRes.Add(pRes, p1, ecurve)
}
}
}
p.X.Set(&pRes.X)
p.Y.Set(&pRes.Y)
return p
} | cs/std/reference/algebra/twisted_edwards/point.go | 0.693992 | 0.409811 | point.go | starcoder |
package timeseries
import (
"errors"
"math"
"sort"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
)
// Aligns point's time stamps according to provided interval.
func (ts TimeSeries) Align(interval time.Duration) TimeSeries {
if interval <= 0 || ts.Len() < 2 {
return ts
}
alignedTs := NewTimeSeries()
var frameTs = ts[0].GetTimeFrame(interval)
var pointFrameTs time.Time
var point TimePoint
for i := 1; i < ts.Len(); i++ {
point = ts[i]
pointFrameTs = point.GetTimeFrame(interval)
if pointFrameTs.After(frameTs) {
for frameTs.Before(pointFrameTs) {
alignedTs = append(alignedTs, TimePoint{Time: frameTs, Value: nil})
frameTs = frameTs.Add(interval)
}
}
alignedTs = append(alignedTs, TimePoint{Time: pointFrameTs, Value: point.Value})
frameTs = frameTs.Add(interval)
}
return alignedTs
}
// Detects interval between data points in milliseconds based on median delta between points.
func (ts TimeSeries) DetectInterval() time.Duration {
if ts.Len() < 2 {
return 0
}
deltas := make([]int, 0)
for i := 1; i < ts.Len(); i++ {
delta := ts[i].Time.Sub(ts[i-1].Time)
deltas = append(deltas, int(delta.Milliseconds()))
}
sort.Ints(deltas)
midIndex := int(math.Floor(float64(len(deltas)) * 0.5))
return time.Duration(deltas[midIndex]) * time.Millisecond
}
// Gets point timestamp rounded according to provided interval.
func (p *TimePoint) GetTimeFrame(interval time.Duration) time.Time {
return p.Time.Truncate(interval)
}
func alignDataPoints(frame *data.Frame, interval time.Duration) *data.Frame {
if interval <= 0 || frame.Rows() < 2 {
return frame
}
timeFieldIdx := getTimeFieldIndex(frame)
if timeFieldIdx < 0 {
return frame
}
var frameTs = getPointTimeFrame(getTimestampAt(frame, 0), interval)
var pointFrameTs *time.Time
var pointsInserted = 0
for i := 1; i < frame.Rows(); i++ {
pointFrameTs = getPointTimeFrame(getTimestampAt(frame, i), interval)
if pointFrameTs == nil || frameTs == nil {
continue
}
if pointFrameTs.After(*frameTs) {
for frameTs.Before(*pointFrameTs) {
insertAt := i + pointsInserted
err := insertNullPointAt(frame, *frameTs, insertAt)
if err != nil {
backend.Logger.Debug("Error inserting null point", "error", err)
}
*frameTs = frameTs.Add(interval)
pointsInserted++
}
}
setTimeAt(frame, *pointFrameTs, i+pointsInserted)
*frameTs = frameTs.Add(interval)
}
return frame
}
func getPointTimeFrame(ts *time.Time, interval time.Duration) *time.Time {
if ts == nil {
return nil
}
timeFrame := ts.Truncate(interval)
return &timeFrame
}
func getTimeFieldIndex(frame *data.Frame) int {
for i := 0; i < len(frame.Fields); i++ {
if frame.Fields[i].Type() == data.FieldTypeTime {
return i
}
}
return -1
}
func getTimestampAt(frame *data.Frame, index int) *time.Time {
timeFieldIdx := getTimeFieldIndex(frame)
if timeFieldIdx < 0 {
return nil
}
tsValue := frame.Fields[timeFieldIdx].At(index)
ts, ok := tsValue.(time.Time)
if !ok {
return nil
}
return &ts
}
func insertNullPointAt(frame *data.Frame, frameTs time.Time, index int) error {
for _, field := range frame.Fields {
if field.Type() == data.FieldTypeTime {
field.Insert(index, frameTs)
} else if field.Type().Nullable() {
field.Insert(index, nil)
} else {
return errors.New("field is not nullable")
}
}
return nil
}
func setTimeAt(frame *data.Frame, frameTs time.Time, index int) {
for _, field := range frame.Fields {
if field.Type() == data.FieldTypeTime {
field.Insert(index, frameTs)
}
}
} | pkg/timeseries/timeseries.go | 0.728072 | 0.467271 | timeseries.go | starcoder |
package bn256
// For details of the algorithms used, see "Multiplication and Squaring on
// Pairing-Friendly Fields, Devegili et al.
// http://eprint.iacr.org/2006/471.pdf.
// gfP2 implements a field of size p² as a quadratic extension of the base field
// where i²=-1.
type gfP2 struct {
X, Y gfP // value is xi+Y.
}
func gfP2Decode(in *gfP2) *gfP2 {
out := &gfP2{}
montDecode(&out.X, &in.X)
montDecode(&out.Y, &in.Y)
return out
}
func (e *gfP2) String() string {
return "(" + e.X.String() + ", " + e.Y.String() + ")"
}
func (e *gfP2) Set(a *gfP2) *gfP2 {
e.X.Set(&a.X)
e.Y.Set(&a.Y)
return e
}
func (e *gfP2) SetZero() *gfP2 {
e.X = gfP{0}
e.Y = gfP{0}
return e
}
func (e *gfP2) SetOne() *gfP2 {
e.X = gfP{0}
e.Y = *newGFp(1)
return e
}
func (e *gfP2) IsZero() bool {
zero := gfP{0}
return e.X == zero && e.Y == zero
}
func (e *gfP2) IsOne() bool {
zero, one := gfP{0}, *newGFp(1)
return e.X == zero && e.Y == one
}
func (e *gfP2) Conjugate(a *gfP2) *gfP2 {
e.Y.Set(&a.Y)
gfpNeg(&e.X, &a.X)
return e
}
func (e *gfP2) Neg(a *gfP2) *gfP2 {
gfpNeg(&e.X, &a.X)
gfpNeg(&e.Y, &a.Y)
return e
}
func (e *gfP2) Add(a, b *gfP2) *gfP2 {
gfpAdd(&e.X, &a.X, &b.X)
gfpAdd(&e.Y, &a.Y, &b.Y)
return e
}
func (e *gfP2) Sub(a, b *gfP2) *gfP2 {
gfpSub(&e.X, &a.X, &b.X)
gfpSub(&e.Y, &a.Y, &b.Y)
return e
}
// See "Multiplication and Squaring in Pairing-Friendly Fields",
// http://eprint.iacr.org/2006/471.pdf
func (e *gfP2) Mul(a, b *gfP2) *gfP2 {
tx, t := &gfP{}, &gfP{}
gfpMul(tx, &a.X, &b.Y)
gfpMul(t, &b.X, &a.Y)
gfpAdd(tx, tx, t)
ty := &gfP{}
gfpMul(ty, &a.Y, &b.Y)
gfpMul(t, &a.X, &b.X)
gfpSub(ty, ty, t)
e.X.Set(tx)
e.Y.Set(ty)
return e
}
func (e *gfP2) MulScalar(a *gfP2, b *gfP) *gfP2 {
gfpMul(&e.X, &a.X, b)
gfpMul(&e.Y, &a.Y, b)
return e
}
// MulXi sets e=ξa where ξ=i+9 and then returns e.
func (e *gfP2) MulXi(a *gfP2) *gfP2 {
// (xi+Y)(i+9) = (9x+Y)i+(9y-X)
tx := &gfP{}
gfpAdd(tx, &a.X, &a.X)
gfpAdd(tx, tx, tx)
gfpAdd(tx, tx, tx)
gfpAdd(tx, tx, &a.X)
gfpAdd(tx, tx, &a.Y)
ty := &gfP{}
gfpAdd(ty, &a.Y, &a.Y)
gfpAdd(ty, ty, ty)
gfpAdd(ty, ty, ty)
gfpAdd(ty, ty, &a.Y)
gfpSub(ty, ty, &a.X)
e.X.Set(tx)
e.Y.Set(ty)
return e
}
func (e *gfP2) Square(a *gfP2) *gfP2 {
// Complex squaring algorithm:
// (xi+Y)² = (X+Y)(Y-X) + 2*i*X*Y
tx, ty := &gfP{}, &gfP{}
gfpSub(tx, &a.Y, &a.X)
gfpAdd(ty, &a.X, &a.Y)
gfpMul(ty, tx, ty)
gfpMul(tx, &a.X, &a.Y)
gfpAdd(tx, tx, tx)
e.X.Set(tx)
e.Y.Set(ty)
return e
}
func (e *gfP2) Invert(a *gfP2) *gfP2 {
// See "Implementing cryptographic pairings", M. Scott, section 3.2.
// ftp://192.168.3.11/pub/crypto/pairings.pdf
t1, t2 := &gfP{}, &gfP{}
gfpMul(t1, &a.X, &a.X)
gfpMul(t2, &a.Y, &a.Y)
gfpAdd(t1, t1, t2)
inv := &gfP{}
inv.Invert(t1)
gfpNeg(t1, &a.X)
gfpMul(&e.X, t1, inv)
gfpMul(&e.Y, &a.Y, inv)
return e
} | gfp2.go | 0.817246 | 0.434161 | gfp2.go | starcoder |
package blockchain
import (
"bytes"
"encoding/base64"
"encoding/json"
"errors"
)
// BlockChain object to be instantiated and to have Blocks appended to its Genesis Block
type BlockChain struct {
Chain []Block
Difficulty int32
PendingTransactions TransactionList
MiningReward float32
}
// NewBlockChain instantiates a new BlockChain to be used. Can append existing Blocks to it.
func NewBlockChain(d int32, mr float32) *BlockChain {
bc := make([]Block, 0)
txl := TransactionList{}
genesis := NewBlock(txl)
bc = append(bc, genesis)
return &BlockChain{
Chain: bc,
Difficulty: d,
PendingTransactions: txl,
MiningReward: mr,
}
}
// GetLatestBlock returns the final Block in the BlockChain
func (bc BlockChain) GetLatestBlock() Block {
return bc.Chain[len(bc.Chain)-1]
}
// MineTransactions mines current pending Transactions on the BlockChain
func (bc *BlockChain) MineTransactions(address []byte) {
b := NewBlock(bc.PendingTransactions)
b.PreviousHash = bc.GetLatestBlock().Hash
b.MineBlock(bc.Difficulty)
bc.Chain = append(bc.Chain, b)
// Reward the address who mined the transaction
bc.PendingTransactions = TransactionList{NewTransaction(nil, address, bc.MiningReward)}
}
// PushTransactions to the BlockChain, use MineTransactions to process them.
func (bc *BlockChain) PushTransactions(tx ...Transaction) error {
for _, t := range tx {
if t.FromAddress == nil || t.ToAddress == nil {
return errors.New("must include to and from address")
}
if !t.VerifyTransaction() {
return errors.New("unverified transaction")
}
}
bc.PendingTransactions = append(bc.PendingTransactions, tx...)
return nil
}
// GetBalance retrieves the address' current balance from Genesis Block -> last Block
func (bc *BlockChain) GetBalance(address []byte) float32 {
balance := float32(0.00)
for _, b := range bc.Chain {
for _, t := range b.Transactions {
if bytes.Equal(t.FromAddress, address) && !bytes.Equal(t.ToAddress, address) {
balance -= t.Amount
} else if bytes.Equal(t.ToAddress, address) && !bytes.Equal(t.FromAddress, address) {
balance += t.Amount
}
}
}
return balance
}
// GetAllBalances in the BlockChain from Genesis.
func (bc *BlockChain) GetAllBalances() string {
balances := make(map[string]float32)
for _, b := range bc.Chain {
for _, t := range b.Transactions {
if t.FromAddress == nil {
balances["Mining rewards awarded"] -= t.Amount
} else {
balances[base64.StdEncoding.EncodeToString(t.FromAddress)] -= t.Amount
}
balances[base64.StdEncoding.EncodeToString(t.ToAddress)] += t.Amount
}
}
j, _ := json.MarshalIndent(balances, "", " ")
return string(j)
}
// String outputs the BlockChain and its underlying Blocks in JSON string format.
func (bc BlockChain) String() string {
s, err := json.MarshalIndent(bc, "", " ")
_ = err
return string(s)
}
// Verify is used to verify the integrity of the BlockChain. From the last Block to Genesis, the PreviousHash field
// should be linked to the proper prior Block's Hash field. Similar to a singly linked list.
func (bc BlockChain) Verify() bool {
if len(bc.Chain) == 0 {
return true
}
for i := len(bc.Chain) - 2; i >= 0; i-- {
currentBlock := bc.Chain[i]
lastBlock := bc.Chain[i+1]
if !bytes.Equal(currentBlock.Hash, currentBlock.ReCalculateHash()) {
return false
}
if !bytes.Equal(currentBlock.Hash, lastBlock.PreviousHash) {
return false
}
if !currentBlock.CheckTransactions() {
return false
}
}
return true
} | internal/blockchain/blockchain.go | 0.737536 | 0.450903 | blockchain.go | starcoder |
package adapt
import (
"reflect"
"github.com/nobuenhombre/suikat/pkg/ge"
)
// Check compares reflect.Value with a string representation of the expected type.
// If the type does not match the expected one, the ge.MismatchError{} error is returned
func Check(val reflect.Value, expectType string) error {
if val.Type().String() != expectType {
return ge.Pin(&ge.MismatchError{
ComparedItems: "val.Type().String() vs expectType",
Expected: expectType,
Actual: val.Type().String(),
})
}
return nil
}
// Bool convert v interface{} into bool.
// If v can't be converted - return ge.MismatchError{}
func Bool(v interface{}) (bool, error) {
val := reflect.ValueOf(v)
result, ok := val.Interface().(bool)
if !ok {
return false, ge.Pin(&ge.MismatchError{
ComparedItems: "val.Type().String() vs bool",
Expected: "bool",
Actual: val.Type().String(),
})
}
return result, nil
}
// Int convert v interface{} into int.
// If v can't be converted - return ge.MismatchError{}
func Int(v interface{}) (int, error) {
val := reflect.ValueOf(v)
result, ok := val.Interface().(int)
if !ok {
return 0, ge.Pin(&ge.MismatchError{
ComparedItems: "val.Type().String() vs int",
Expected: "int",
Actual: val.Type().String(),
})
}
return result, nil
}
// String convert v interface{} into string.
// If v can't be converted - return ge.MismatchError{}
func String(v interface{}) (string, error) {
val := reflect.ValueOf(v)
result, ok := val.Interface().(string)
if !ok {
return "", ge.Pin(&ge.MismatchError{
ComparedItems: "val.Type().String() vs string",
Expected: "string",
Actual: val.Type().String(),
})
}
return result, nil
}
// IsNil check interface{} is nil.
// and return bool value
func IsNil(i interface{}) bool {
if i == nil {
return true
}
// nolint: exhaustive
switch reflect.TypeOf(i).Kind() {
case reflect.Ptr, reflect.Map, reflect.Array, reflect.Chan, reflect.Slice:
return reflect.ValueOf(i).IsNil()
default:
return false
}
} | pkg/adapt/adapt.go | 0.781747 | 0.528594 | adapt.go | starcoder |
package vterrors
/*
Vitess uses canonical error codes for error reporting. This is based
on years of industry experience with error reporting. This idea is
that errors should be classified into a small set of errors (10 or so)
with very specific meaning. Each error has a code, and a message. When
errors are passed around (even through RPCs), the code is
propagated. To handle errors, only the code should be looked at (and
not string-matching on the error message).
For instance, see this document for the Google Cloud Error Codes.
https://cloud.google.com/datastore/docs/concepts/errors
Vitess defines the error codes in /proto/vtrpc.proto. Along with an
RPCError message that can be used to transmit errors through RPCs, in
the message payloads.
Vitess then defines the VtError interface, for all errors that have a code.
See vterrors.go in this library.
Vitess also defines a VitessError error implementation, that can wrap
any error and add a code to it.
To easily transmit these codes through gRPC, we map these codes to
gRPC error codes in grpc.go, in this library. So if a gRPC call only
returns an error, we return a gRPC error with the right gRPC error
code. If a gRPC call needs to return both an error and some data (like
vtgateservice.Execute that can return an updated Session along with
the error), we can just return an RPCError in the result.
Some libraries define their own error structures that implement the
VtError interface. Usually, it is to add extra data to it. For an
example, see ../tabletserver/tablet_error.go that adds the SQL error
codes to the error structure. These SQL errors however are all mapped
to their appropriate canonical error code, see the function NewTabletErrorSQL
in that file for the mapping.
When transmitting any error through RPC boundaries, we are careful to
always preserve the error code. When augmenting / aggregating errors,
we also preserve the error codes:
- See WithPrefix and WithSuffix in this package for augmentation.
- See aggregate.go in this package for aggregation.
*/ | vendor/github.com/youtube/vitess/go/vt/vterrors/doc.go | 0.821295 | 0.524577 | doc.go | starcoder |
package config
/*
CONFIGURATION FILE
------------------
The Git configuration file contains a number of variables that affect
the Git commands' behavior. The `.git/config` file in each repository
is used to store the configuration for that repository, and
`$HOME/.gitconfig` is used to store a per-user configuration as
fallback values for the `.git/config` file. The file `/etc/gitconfig`
can be used to store a system-wide default configuration.
The configuration variables are used by both the Git plumbing
and the porcelains. The variables are divided into sections, wherein
the fully qualified variable name of the variable itself is the last
dot-separated segment and the section name is everything before the last
dot. The variable names are case-insensitive, allow only alphanumeric
characters and `-`, and must start with an alphabetic character. Some
variables may appear multiple times; we say then that the variable is
multivalued.
Syntax
~~~~~~
The syntax is fairly flexible and permissive; whitespaces are mostly
ignored. The '#' and ';' characters begin comments to the end of line,
blank lines are ignored.
The file consists of sections and variables. A section begins with
the name of the section in square brackets and continues until the next
section begins. Section names are case-insensitive. Only alphanumeric
characters, `-` and `.` are allowed in section names. Each variable
must belong to some section, which means that there must be a section
header before the first setting of a variable.
Sections can be further divided into subsections. To begin a subsection
put its name in double quotes, separated by space from the section name,
in the section header, like in the example below:
--------
[section "subsection"]
--------
Subsection names are case sensitive and can contain any characters except
newline (doublequote `"` and backslash can be included by escaping them
as `\"` and `\\`, respectively). Section headers cannot span multiple
lines. Variables may belong directly to a section or to a given subsection.
You can have `[section]` if you have `[section "subsection"]`, but you
don't need to.
There is also a deprecated `[section.subsection]` syntax. With this
syntax, the subsection name is converted to lower-case and is also
compared case sensitively. These subsection names follow the same
restrictions as section names.
All the other lines (and the remainder of the line after the section
header) are recognized as setting variables, in the form
'name = value' (or just 'name', which is a short-hand to say that
the variable is the boolean "true").
The variable names are case-insensitive, allow only alphanumeric characters
and `-`, and must start with an alphabetic character.
A line that defines a value can be continued to the next line by
ending it with a `\`; the backquote and the end-of-line are
stripped. Leading whitespaces after 'name =', the remainder of the
line after the first comment character '#' or ';', and trailing
whitespaces of the line are discarded unless they are enclosed in
double quotes. Internal whitespaces within the value are retained
verbatim.
Inside double quotes, double quote `"` and backslash `\` characters
must be escaped: use `\"` for `"` and `\\` for `\`.
The following escape sequences (beside `\"` and `\\`) are recognized:
`\n` for newline character (NL), `\t` for horizontal tabulation (HT, TAB)
and `\b` for backspace (BS). Other char escape sequences (including octal
escape sequences) are invalid.
Includes
~~~~~~~~
You can include one config file from another by setting the special
`include.path` variable to the name of the file to be included. The
variable takes a pathname as its value, and is subject to tilde
expansion.
The
included file is expanded immediately, as if its contents had been
found at the location of the include directive. If the value of the
`include.path` variable is a relative path, the path is considered to be
relative to the configuration file in which the include directive was
found. See below for examples.
Example
~~~~~~~
# Core variables
[core]
; Don't trust file modes
filemode = false
# Our diff algorithm
[diff]
external = /usr/local/bin/diff-wrapper
renames = true
[branch "devel"]
remote = origin
merge = refs/heads/devel
# Proxy settings
[core]
gitProxy="ssh" for "kernel.org"
gitProxy=default-proxy ; for the rest
[include]
path = /path/to/foo.inc ; include by absolute path
path = foo ; expand "foo" relative to the current file
path = ~/foo ; expand "foo" in your `$HOME` directory
Values
~~~~~~
Values of many variables are treated as a simple string, but there
are variables that take values of specific types and there are rules
as to how to spell them.
boolean::
When a variable is said to take a boolean value, many
synonyms are accepted for 'true' and 'false'; these are all
case-insensitive.
true;; Boolean true can be spelled as `yes`, `on`, `true`,
or `1`. Also, a variable defined without `= <value>`
is taken as true.
false;; Boolean false can be spelled as `no`, `off`,
`false`, or `0`.
+
When converting value to the canonical form using `--bool` type
specifier; 'git config' will ensure that the output is "true" or
"false" (spelled in lowercase).
integer::
The value for many variables that specify various sizes can
be suffixed with `k`, `M`,... to mean "scale the number by
1024", "by 1024x1024", etc.
color::
The value for a variable that takes a color is a list of
colors (at most two, one for foreground and one for background)
and attributes (as many as you want), separated by spaces.
+
The basic colors accepted are `normal`, `black`, `red`, `green`, `yellow`,
`blue`, `magenta`, `cyan` and `white`. The first color given is the
foreground; the second is the background.
+
Colors may also be given as numbers between 0 and 255; these use ANSI
256-color mode (but note that not all terminals may support this). If
your terminal supports it, you may also specify 24-bit RGB values as
hex, like `#ff0ab3`.
+
From: https://git-scm.com/docs/git-config
The accepted attributes are `bold`, `dim`, `ul`, `blink`, `reverse`,
`italic`, and `strike` (for crossed-out or "strikethrough" letters).
The position of any attributes with respect to the colors
(before, after, or in between), doesn't matter. Specific attributes may
be turned off by prefixing them with `no` or `no-` (e.g., `noreverse`,
`no-ul`, etc).
+
For git's pre-defined color slots, the attributes are meant to be reset
at the beginning of each item in the colored output. So setting
`color.decorate.branch` to `black` will paint that branch name in a
plain `black`, even if the previous thing on the same output line (e.g.
opening parenthesis before the list of branch names in `log --decorate`
output) is set to be painted with `bold` or some other attribute.
However, custom log formats may do more complicated and layered
coloring, and the negated forms may be useful there.
pathname::
A variable that takes a pathname value can be given a
string that begins with "`~/`" or "`~user/`", and the usual
tilde expansion happens to such a string: `~/`
is expanded to the value of `$HOME`, and `~user/` to the
specified user's home directory.
From:
https://raw.githubusercontent.com/git/git/659889482ac63411daea38b2c3d127842ea04e4d/Documentation/config.txt
*/ | vendor/gopkg.in/src-d/go-git.v4/plumbing/format/config/doc.go | 0.731155 | 0.685555 | doc.go | starcoder |
package cloud
import (
"encoding/json"
"fmt"
"testing"
"github.com/ingrammicro/cio/api/types"
"github.com/ingrammicro/cio/utils"
"github.com/stretchr/testify/assert"
)
// CreateTemporaryArchiveMocked test mocked function
func CreateTemporaryArchiveMocked(t *testing.T, temporaryArchiveIn *types.TemporaryArchive) *types.TemporaryArchive {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewTemporaryArchiveService(cs)
assert.Nil(err, "Couldn't load temporaryArchive service")
assert.NotNil(ds, "TemporaryArchive service not instanced")
// convertMap
mapIn, err := utils.ItemConvertParams(*temporaryArchiveIn)
assert.Nil(err, "TemporaryArchive test data corrupted")
// to json
dOut, err := json.Marshal(temporaryArchiveIn)
assert.Nil(err, "TemporaryArchive test data corrupted")
// call service
cs.On("Post", APIPathPluginsToscaTemporaryArchives, mapIn).Return(dOut, 200, nil)
temporaryArchiveOut, err := ds.CreateTemporaryArchive(mapIn)
assert.Nil(err, "Error creating temporary archive")
assert.Equal(temporaryArchiveIn, temporaryArchiveOut, "CreateTemporaryArchive returned different temporary archive")
return temporaryArchiveOut
}
// CreateTemporaryArchiveFailErrMocked test mocked function
func CreateTemporaryArchiveFailErrMocked(
t *testing.T,
temporaryArchiveIn *types.TemporaryArchive,
) *types.TemporaryArchive {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewTemporaryArchiveService(cs)
assert.Nil(err, "Couldn't load temporaryArchive service")
assert.NotNil(ds, "TemporaryArchive service not instanced")
// convertMap
mapIn, err := utils.ItemConvertParams(*temporaryArchiveIn)
assert.Nil(err, "TemporaryArchive test data corrupted")
// to json
dOut, err := json.Marshal(temporaryArchiveIn)
assert.Nil(err, "TemporaryArchive test data corrupted")
// call service
cs.On("Post", APIPathPluginsToscaTemporaryArchives, mapIn).Return(dOut, 200, fmt.Errorf("mocked error"))
temporaryArchiveOut, err := ds.CreateTemporaryArchive(mapIn)
assert.NotNil(err, "We are expecting an error")
assert.Nil(temporaryArchiveOut, "Expecting nil output")
assert.Equal(err.Error(), "mocked error", "Error should be 'mocked error'")
return temporaryArchiveOut
}
// CreateTemporaryArchiveFailStatusMocked test mocked function
func CreateTemporaryArchiveFailStatusMocked(
t *testing.T,
temporaryArchiveIn *types.TemporaryArchive,
) *types.TemporaryArchive {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewTemporaryArchiveService(cs)
assert.Nil(err, "Couldn't load temporaryArchive service")
assert.NotNil(ds, "TemporaryArchive service not instanced")
// convertMap
mapIn, err := utils.ItemConvertParams(*temporaryArchiveIn)
assert.Nil(err, "TemporaryArchive test data corrupted")
// to json
dOut, err := json.Marshal(temporaryArchiveIn)
assert.Nil(err, "TemporaryArchive test data corrupted")
// call service
cs.On("Post", APIPathPluginsToscaTemporaryArchives, mapIn).Return(dOut, 499, nil)
temporaryArchiveOut, err := ds.CreateTemporaryArchive(mapIn)
assert.NotNil(err, "We are expecting an status code error")
assert.Nil(temporaryArchiveOut, "Expecting nil output")
assert.Contains(err.Error(), "499", "Error should contain http code 499")
return temporaryArchiveOut
}
// CreateTemporaryArchiveFailJSONMocked test mocked function
func CreateTemporaryArchiveFailJSONMocked(
t *testing.T,
temporaryArchiveIn *types.TemporaryArchive,
) *types.TemporaryArchive {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewTemporaryArchiveService(cs)
assert.Nil(err, "Couldn't load temporaryArchive service")
assert.NotNil(ds, "TemporaryArchive service not instanced")
// convertMap
mapIn, err := utils.ItemConvertParams(*temporaryArchiveIn)
assert.Nil(err, "TemporaryArchive test data corrupted")
// wrong json
dIn := []byte{10, 20, 30}
// call service
cs.On("Post", APIPathPluginsToscaTemporaryArchives, mapIn).Return(dIn, 200, nil)
temporaryArchiveOut, err := ds.CreateTemporaryArchive(mapIn)
assert.NotNil(err, "We are expecting a marshalling error")
assert.Nil(temporaryArchiveOut, "Expecting nil output")
assert.Contains(err.Error(), "invalid character", "Error message should include the string 'invalid character'")
return temporaryArchiveOut
}
// UploadTemporaryArchiveMocked test mocked function
func UploadTemporaryArchiveMocked(t *testing.T, temporaryArchiveIn *types.TemporaryArchive) {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewTemporaryArchiveService(cs)
assert.Nil(err, "Couldn't load temporaryArchive service")
assert.NotNil(ds, "TemporaryArchive service not instanced")
sourceFilePath := "fakeURLToFile"
targetURL := temporaryArchiveIn.UploadURL
// call service
var noBytes []uint8
cs.On("PutFile", sourceFilePath, targetURL).Return(noBytes, 200, nil)
err = ds.UploadTemporaryArchive(sourceFilePath, targetURL)
assert.Nil(err, "Error uploading temporary archive file")
}
// UploadTemporaryArchiveFailStatusMocked test mocked function
func UploadTemporaryArchiveFailStatusMocked(t *testing.T, temporaryArchiveIn *types.TemporaryArchive) {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewTemporaryArchiveService(cs)
assert.Nil(err, "Couldn't load temporaryArchive service")
assert.NotNil(ds, "TemporaryArchive service not instanced")
sourceFilePath := "fakeURLToFile"
targetURL := temporaryArchiveIn.UploadURL
// call service
var noBytes []uint8
cs.On("PutFile", sourceFilePath, targetURL).Return(noBytes, 403, nil)
err = ds.UploadTemporaryArchive(sourceFilePath, targetURL)
assert.NotNil(err, "We are expecting an error")
}
// UploadTemporaryArchiveFailErrMocked test mocked function
func UploadTemporaryArchiveFailErrMocked(t *testing.T, temporaryArchiveIn *types.TemporaryArchive) {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewTemporaryArchiveService(cs)
assert.Nil(err, "Couldn't load temporaryArchive service")
assert.NotNil(ds, "TemporaryArchive service not instanced")
sourceFilePath := "fakeURLToFile"
targetURL := temporaryArchiveIn.UploadURL
// call service
var noBytes []uint8
cs.On("PutFile", sourceFilePath, targetURL).Return(noBytes, 403, fmt.Errorf("mocked error"))
err = ds.UploadTemporaryArchive(sourceFilePath, targetURL)
assert.NotNil(err, "We are expecting an error")
assert.Equal(err.Error(), "mocked error", "Error should be 'mocked error'")
}
// CreateTemporaryArchiveImportMocked test mocked function
func CreateTemporaryArchiveImportMocked(
t *testing.T,
temporaryArchiveID string,
temporaryArchiveImportIn *types.TemporaryArchiveImport,
) *types.TemporaryArchiveImport {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewTemporaryArchiveService(cs)
assert.Nil(err, "Couldn't load temporaryArchive service")
assert.NotNil(ds, "TemporaryArchive service not instanced")
// convertMap
mapIn, err := utils.ItemConvertParams(*temporaryArchiveImportIn)
// to json
dOut, err := json.Marshal(temporaryArchiveImportIn)
assert.Nil(err, "TemporaryArchiveImport test data corrupted")
// call service
cs.On("Post", fmt.Sprintf(APIPathPluginsToscaTemporaryArchiveImport, temporaryArchiveID), mapIn).
Return(dOut, 200, nil)
temporaryArchiveImportOut, err := ds.CreateTemporaryArchiveImport(temporaryArchiveID, mapIn)
assert.Nil(err, "Error creating temporary archive import")
assert.Equal(
temporaryArchiveImportIn,
temporaryArchiveImportOut,
"CreateTemporaryArchiveImport returned different temporary archive import",
)
return temporaryArchiveImportOut
}
// CreateTemporaryArchiveImportFailErrMocked test mocked function
func CreateTemporaryArchiveImportFailErrMocked(
t *testing.T,
temporaryArchiveID string,
temporaryArchiveImportIn *types.TemporaryArchiveImport,
) *types.TemporaryArchiveImport {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewTemporaryArchiveService(cs)
assert.Nil(err, "Couldn't load temporaryArchive service")
assert.NotNil(ds, "TemporaryArchive service not instanced")
// convertMap
mapIn, err := utils.ItemConvertParams(*temporaryArchiveImportIn)
assert.Nil(err, "TemporaryArchiveImport test data corrupted")
// to json
dOut, err := json.Marshal(temporaryArchiveImportIn)
assert.Nil(err, "TemporaryArchiveImport test data corrupted")
// call service
cs.On("Post", fmt.Sprintf(APIPathPluginsToscaTemporaryArchiveImport, temporaryArchiveID), mapIn).
Return(dOut, 200, fmt.Errorf("mocked error"))
temporaryArchiveImportOut, err := ds.CreateTemporaryArchiveImport(temporaryArchiveID, mapIn)
assert.NotNil(err, "We are expecting an error")
assert.Nil(temporaryArchiveImportOut, "Expecting nil output")
assert.Equal(err.Error(), "mocked error", "Error should be 'mocked error'")
return temporaryArchiveImportOut
}
// CreateTemporaryArchiveImportFailStatusMocked test mocked function
func CreateTemporaryArchiveImportFailStatusMocked(
t *testing.T,
temporaryArchiveID string,
temporaryArchiveImportIn *types.TemporaryArchiveImport,
) *types.TemporaryArchiveImport {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewTemporaryArchiveService(cs)
assert.Nil(err, "Couldn't load temporaryArchive service")
assert.NotNil(ds, "TemporaryArchive service not instanced")
// convertMap
mapIn, err := utils.ItemConvertParams(*temporaryArchiveImportIn)
assert.Nil(err, "TemporaryArchiveImport test data corrupted")
// to json
dOut, err := json.Marshal(temporaryArchiveImportIn)
assert.Nil(err, "TemporaryArchiveImport test data corrupted")
// call service
cs.On("Post", fmt.Sprintf(APIPathPluginsToscaTemporaryArchiveImport, temporaryArchiveID), mapIn).
Return(dOut, 499, nil)
temporaryArchiveImportOut, err := ds.CreateTemporaryArchiveImport(temporaryArchiveID, mapIn)
assert.NotNil(err, "We are expecting an status code error")
assert.Nil(temporaryArchiveImportOut, "Expecting nil output")
assert.Contains(err.Error(), "499", "Error should contain http code 499")
return temporaryArchiveImportOut
}
// CreateTemporaryArchiveImportFailJSONMocked test mocked function
func CreateTemporaryArchiveImportFailJSONMocked(
t *testing.T,
temporaryArchiveID string,
temporaryArchiveImportIn *types.TemporaryArchiveImport,
) *types.TemporaryArchiveImport {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewTemporaryArchiveService(cs)
assert.Nil(err, "Couldn't load temporaryArchive service")
assert.NotNil(ds, "TemporaryArchive service not instanced")
// convertMap
mapIn, err := utils.ItemConvertParams(*temporaryArchiveImportIn)
assert.Nil(err, "TemporaryArchiveImport test data corrupted")
// wrong json
dIn := []byte{10, 20, 30}
// call service
cs.On("Post", fmt.Sprintf(APIPathPluginsToscaTemporaryArchiveImport, temporaryArchiveID), mapIn).
Return(dIn, 200, nil)
temporaryArchiveImportOut, err := ds.CreateTemporaryArchiveImport(temporaryArchiveID, mapIn)
assert.NotNil(err, "We are expecting a marshalling error")
assert.Nil(temporaryArchiveImportOut, "Expecting nil output")
assert.Contains(err.Error(), "invalid character", "Error message should include the string 'invalid character'")
return temporaryArchiveImportOut
}
// GetTemporaryArchiveImportMocked test mocked function
func GetTemporaryArchiveImportMocked(
t *testing.T,
temporaryArchiveImportIn *types.TemporaryArchiveImport,
) *types.TemporaryArchiveImport {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewTemporaryArchiveService(cs)
assert.Nil(err, "Couldn't load temporaryArchive service")
assert.NotNil(ds, "TemporaryArchive service not instanced")
// to json
dIn, err := json.Marshal(temporaryArchiveImportIn)
assert.Nil(err, "TemporaryArchiveImport test data corrupted")
// call service
cs.On("Get", fmt.Sprintf(APIPathPluginsToscaTemporaryArchiveImport, temporaryArchiveImportIn.ID)).
Return(dIn, 200, nil)
temporaryArchiveImportOut, err := ds.GetTemporaryArchiveImport(temporaryArchiveImportIn.ID)
assert.Nil(err, "Error getting temporary archive import")
assert.Equal(
*temporaryArchiveImportIn,
*temporaryArchiveImportOut,
"GetTemporaryArchiveImport returned different temporary archive import",
)
return temporaryArchiveImportOut
}
// GetTemporaryArchiveImportFailErrMocked test mocked function
func GetTemporaryArchiveImportFailErrMocked(
t *testing.T,
temporaryArchiveImportIn *types.TemporaryArchiveImport,
) *types.TemporaryArchiveImport {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewTemporaryArchiveService(cs)
assert.Nil(err, "Couldn't load temporaryArchive service")
assert.NotNil(ds, "TemporaryArchive service not instanced")
// to json
dIn, err := json.Marshal(temporaryArchiveImportIn)
assert.Nil(err, "TemporaryArchiveImport test data corrupted")
// call service
cs.On("Get", fmt.Sprintf(APIPathPluginsToscaTemporaryArchiveImport, temporaryArchiveImportIn.ID)).
Return(dIn, 200, fmt.Errorf("mocked error"))
temporaryArchiveImportOut, err := ds.GetTemporaryArchiveImport(temporaryArchiveImportIn.ID)
assert.NotNil(err, "We are expecting an error")
assert.Nil(temporaryArchiveImportOut, "Expecting nil output")
assert.Equal(err.Error(), "mocked error", "Error should be 'mocked error'")
return temporaryArchiveImportOut
}
// GetTemporaryArchiveImportFailStatusMocked test mocked function
func GetTemporaryArchiveImportFailStatusMocked(
t *testing.T,
temporaryArchiveImportIn *types.TemporaryArchiveImport,
) *types.TemporaryArchiveImport {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewTemporaryArchiveService(cs)
assert.Nil(err, "Couldn't load temporaryArchive service")
assert.NotNil(ds, "TemporaryArchive service not instanced")
// to json
dIn, err := json.Marshal(temporaryArchiveImportIn)
assert.Nil(err, "TemporaryArchiveImport test data corrupted")
// call service
cs.On("Get", fmt.Sprintf(APIPathPluginsToscaTemporaryArchiveImport, temporaryArchiveImportIn.ID)).
Return(dIn, 499, nil)
temporaryArchiveImportOut, err := ds.GetTemporaryArchiveImport(temporaryArchiveImportIn.ID)
assert.NotNil(err, "We are expecting an status code error")
assert.Nil(temporaryArchiveImportOut, "Expecting nil output")
assert.Contains(err.Error(), "499", "Error should contain http code 499")
return temporaryArchiveImportOut
}
// GetTemporaryArchiveImportFailJSONMocked test mocked function
func GetTemporaryArchiveImportFailJSONMocked(
t *testing.T,
temporaryArchiveImportIn *types.TemporaryArchiveImport,
) *types.TemporaryArchiveImport {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewTemporaryArchiveService(cs)
assert.Nil(err, "Couldn't load temporaryArchive service")
assert.NotNil(ds, "TemporaryArchive service not instanced")
// wrong json
dIn := []byte{10, 20, 30}
// call service
cs.On("Get", fmt.Sprintf(APIPathPluginsToscaTemporaryArchiveImport, temporaryArchiveImportIn.ID)).
Return(dIn, 200, nil)
temporaryArchiveImportOut, err := ds.GetTemporaryArchiveImport(temporaryArchiveImportIn.ID)
assert.NotNil(err, "We are expecting a marshalling error")
assert.Nil(temporaryArchiveImportOut, "Expecting nil output")
assert.Contains(err.Error(), "invalid character", "Error message should include the string 'invalid character'")
return temporaryArchiveImportOut
}
// CreateTemporaryArchiveExportMocked test mocked function
func CreateTemporaryArchiveExportMocked(
t *testing.T,
temporaryArchiveExportIn *types.TemporaryArchiveExport,
) *types.TemporaryArchiveExport {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewTemporaryArchiveService(cs)
assert.Nil(err, "Couldn't load temporaryArchive service")
assert.NotNil(ds, "TemporaryArchive service not instanced")
// convertMap
mapIn, err := utils.ItemConvertParams(*temporaryArchiveExportIn)
assert.Nil(err, "TemporaryArchiveExport test data corrupted")
// to json
dOut, err := json.Marshal(temporaryArchiveExportIn)
assert.Nil(err, "TemporaryArchiveExport test data corrupted")
// call service
cs.On("Post", APIPathPluginsToscaTemporaryArchivesExport, mapIn).Return(dOut, 200, nil)
temporaryArchiveExportOut, err := ds.CreateTemporaryArchiveExport(mapIn)
assert.Nil(err, "Error creating temporary archive export")
assert.Equal(
temporaryArchiveExportIn,
temporaryArchiveExportOut,
"CreateTemporaryArchiveExport returned different temporary archive export",
)
return temporaryArchiveExportOut
}
// CreateTemporaryArchiveExportFailErrMocked test mocked function
func CreateTemporaryArchiveExportFailErrMocked(
t *testing.T,
temporaryArchiveExportIn *types.TemporaryArchiveExport,
) *types.TemporaryArchiveExport {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewTemporaryArchiveService(cs)
assert.Nil(err, "Couldn't load temporaryArchive service")
assert.NotNil(ds, "TemporaryArchive service not instanced")
// convertMap
mapIn, err := utils.ItemConvertParams(*temporaryArchiveExportIn)
assert.Nil(err, "TemporaryArchiveExport test data corrupted")
// to json
dOut, err := json.Marshal(temporaryArchiveExportIn)
assert.Nil(err, "TemporaryArchiveExport test data corrupted")
// call service
cs.On("Post", APIPathPluginsToscaTemporaryArchivesExport, mapIn).Return(dOut, 200, fmt.Errorf("mocked error"))
temporaryArchiveExportOut, err := ds.CreateTemporaryArchiveExport(mapIn)
assert.NotNil(err, "We are expecting an error")
assert.Nil(temporaryArchiveExportOut, "Expecting nil output")
assert.Equal(err.Error(), "mocked error", "Error should be 'mocked error'")
return temporaryArchiveExportOut
}
// CreateTemporaryArchiveExportFailStatusMocked test mocked function
func CreateTemporaryArchiveExportFailStatusMocked(
t *testing.T,
temporaryArchiveExportIn *types.TemporaryArchiveExport,
) *types.TemporaryArchiveExport {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewTemporaryArchiveService(cs)
assert.Nil(err, "Couldn't load temporaryArchive service")
assert.NotNil(ds, "TemporaryArchive service not instanced")
// convertMap
mapIn, err := utils.ItemConvertParams(*temporaryArchiveExportIn)
assert.Nil(err, "TemporaryArchiveExport test data corrupted")
// to json
dOut, err := json.Marshal(temporaryArchiveExportIn)
assert.Nil(err, "TemporaryArchiveExport test data corrupted")
// call service
cs.On("Post", APIPathPluginsToscaTemporaryArchivesExport, mapIn).Return(dOut, 499, nil)
temporaryArchiveExportOut, err := ds.CreateTemporaryArchiveExport(mapIn)
assert.NotNil(err, "We are expecting an status code error")
assert.Nil(temporaryArchiveExportOut, "Expecting nil output")
assert.Contains(err.Error(), "499", "Error should contain http code 499")
return temporaryArchiveExportOut
}
// CreateTemporaryArchiveExportFailJSONMocked test mocked function
func CreateTemporaryArchiveExportFailJSONMocked(
t *testing.T,
temporaryArchiveExportIn *types.TemporaryArchiveExport,
) *types.TemporaryArchiveExport {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewTemporaryArchiveService(cs)
assert.Nil(err, "Couldn't load temporaryArchive service")
assert.NotNil(ds, "TemporaryArchive service not instanced")
// convertMap
mapIn, err := utils.ItemConvertParams(*temporaryArchiveExportIn)
assert.Nil(err, "TemporaryArchiveExport test data corrupted")
// wrong json
dIn := []byte{10, 20, 30}
// call service
cs.On("Post", APIPathPluginsToscaTemporaryArchivesExport, mapIn).Return(dIn, 200, nil)
temporaryArchiveExportOut, err := ds.CreateTemporaryArchiveExport(mapIn)
assert.NotNil(err, "We are expecting a marshalling error")
assert.Nil(temporaryArchiveExportOut, "Expecting nil output")
assert.Contains(err.Error(), "invalid character", "Error message should include the string 'invalid character'")
return temporaryArchiveExportOut
}
// GetTemporaryArchiveExportTaskMocked test mocked function
func GetTemporaryArchiveExportTaskMocked(
t *testing.T,
temporaryArchiveExportTaskIn *types.TemporaryArchiveExportTask,
) *types.TemporaryArchiveExportTask {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewTemporaryArchiveService(cs)
assert.Nil(err, "Couldn't load temporaryArchive service")
assert.NotNil(ds, "TemporaryArchive service not instanced")
// to json
dIn, err := json.Marshal(temporaryArchiveExportTaskIn)
assert.Nil(err, "TemporaryArchiveExportTask test data corrupted")
// call service
cs.On("Get", fmt.Sprintf(APIPathPluginsToscaTemporaryArchiveExport, temporaryArchiveExportTaskIn.ID)).
Return(dIn, 200, nil)
temporaryArchiveExportTaskOut, err := ds.GetTemporaryArchiveExportTask(temporaryArchiveExportTaskIn.ID)
assert.Nil(err, "Error getting temporary archive export task")
assert.Equal(
*temporaryArchiveExportTaskIn,
*temporaryArchiveExportTaskOut,
"GetTemporaryArchiveExportTask returned different temporary archive export task",
)
return temporaryArchiveExportTaskOut
}
// GetTemporaryArchiveExportTaskFailErrMocked test mocked function
func GetTemporaryArchiveExportTaskFailErrMocked(
t *testing.T,
temporaryArchiveExportTaskIn *types.TemporaryArchiveExportTask,
) *types.TemporaryArchiveExportTask {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewTemporaryArchiveService(cs)
assert.Nil(err, "Couldn't load temporaryArchive service")
assert.NotNil(ds, "TemporaryArchive service not instanced")
// to json
dIn, err := json.Marshal(temporaryArchiveExportTaskIn)
assert.Nil(err, "TemporaryArchiveExportTask test data corrupted")
// call service
cs.On("Get", fmt.Sprintf(APIPathPluginsToscaTemporaryArchiveExport, temporaryArchiveExportTaskIn.ID)).
Return(dIn, 200, fmt.Errorf("mocked error"))
temporaryArchiveExportTaskOut, err := ds.GetTemporaryArchiveExportTask(temporaryArchiveExportTaskIn.ID)
assert.NotNil(err, "We are expecting an error")
assert.Nil(temporaryArchiveExportTaskOut, "Expecting nil output")
assert.Equal(err.Error(), "mocked error", "Error should be 'mocked error'")
return temporaryArchiveExportTaskOut
}
// GetTemporaryArchiveExportTaskFailStatusMocked test mocked function
func GetTemporaryArchiveExportTaskFailStatusMocked(
t *testing.T,
temporaryArchiveExportTaskIn *types.TemporaryArchiveExportTask,
) *types.TemporaryArchiveExportTask {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewTemporaryArchiveService(cs)
assert.Nil(err, "Couldn't load temporaryArchive service")
assert.NotNil(ds, "TemporaryArchive service not instanced")
// to json
dIn, err := json.Marshal(temporaryArchiveExportTaskIn)
assert.Nil(err, "TemporaryArchiveExportTask test data corrupted")
// call service
cs.On("Get", fmt.Sprintf(APIPathPluginsToscaTemporaryArchiveExport, temporaryArchiveExportTaskIn.ID)).
Return(dIn, 499, nil)
temporaryArchiveExportTaskOut, err := ds.GetTemporaryArchiveExportTask(temporaryArchiveExportTaskIn.ID)
assert.NotNil(err, "We are expecting an status code error")
assert.Nil(temporaryArchiveExportTaskOut, "Expecting nil output")
assert.Contains(err.Error(), "499", "Error should contain http code 499")
return temporaryArchiveExportTaskOut
}
// GetTemporaryArchiveExportTaskFailJSONMocked test mocked function
func GetTemporaryArchiveExportTaskFailJSONMocked(
t *testing.T,
temporaryArchiveExportTaskIn *types.TemporaryArchiveExportTask,
) *types.TemporaryArchiveExportTask {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewTemporaryArchiveService(cs)
assert.Nil(err, "Couldn't load temporaryArchive service")
assert.NotNil(ds, "TemporaryArchive service not instanced")
// wrong json
dIn := []byte{10, 20, 30}
// call service
cs.On("Get", fmt.Sprintf(APIPathPluginsToscaTemporaryArchiveExport, temporaryArchiveExportTaskIn.ID)).
Return(dIn, 200, nil)
temporaryArchiveExportTaskOut, err := ds.GetTemporaryArchiveExportTask(temporaryArchiveExportTaskIn.ID)
assert.NotNil(err, "We are expecting a marshalling error")
assert.Nil(temporaryArchiveExportTaskOut, "Expecting nil output")
assert.Contains(err.Error(), "invalid character", "Error message should include the string 'invalid character'")
return temporaryArchiveExportTaskOut
}
// DownloadTemporaryArchiveExportMocked test mocked function
func DownloadTemporaryArchiveExportMocked(t *testing.T, downloadTemporaryArchiveExportDataIn map[string]string) {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewTemporaryArchiveService(cs)
assert.Nil(err, "Couldn't load temporaryArchive service")
assert.NotNil(ds, "TemporaryArchive service not instanced")
urlSource := downloadTemporaryArchiveExportDataIn["fakeURLToFile"]
pathFile := downloadTemporaryArchiveExportDataIn["fakeFilePath"]
// call service
cs.On("GetFile", urlSource, pathFile).Return(pathFile, 200, nil)
realFileName, status, err := ds.DownloadTemporaryArchiveExport(urlSource, pathFile)
assert.Nil(err, "Error downloading temporary archive export file")
assert.Equal(status, 200, "DownloadTemporaryArchiveExport returned invalid response")
assert.Equal(realFileName, pathFile, "Invalid downloaded file path")
}
// DownloadTemporaryArchiveExportFailErrMocked test mocked function
func DownloadTemporaryArchiveExportFailErrMocked(t *testing.T, downloadTemporaryArchiveExportDataIn map[string]string) {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewTemporaryArchiveService(cs)
assert.Nil(err, "Couldn't load temporaryArchive service")
assert.NotNil(ds, "TemporaryArchive service not instanced")
urlSource := downloadTemporaryArchiveExportDataIn["fakeURLToFile"]
pathFile := downloadTemporaryArchiveExportDataIn["fakeFilePath"]
// call service
cs.On("GetFile", urlSource, pathFile).Return("", 499, fmt.Errorf("mocked error"))
_, status, err := ds.DownloadTemporaryArchiveExport(urlSource, pathFile)
assert.NotNil(err, "We are expecting an error")
assert.Equal(status, 499, "DownloadTemporaryArchiveExport returned an unexpected status code")
assert.Equal(err.Error(), "mocked error", "Error should be 'mocked error'")
} | api/cloud/temporary_archives_api_mocked.go | 0.592902 | 0.489381 | temporary_archives_api_mocked.go | starcoder |
package example_body
import (
"github.com/hueypark/framework/core/random"
"github.com/hueypark/physics/core"
"github.com/hueypark/physics/core/body"
"github.com/hueypark/physics/core/math/rotator"
"github.com/hueypark/physics/core/math/vector"
"github.com/hueypark/physics/core/shape/bullet"
"github.com/hueypark/physics/core/shape/circle"
"github.com/hueypark/physics/core/shape/convex"
)
const WINDOW_WIDTH = 1024
const WINDOW_HEIGHT = 768
type ExampleBody struct {
world *physics.World
respawnTime float64
onBodyCreated func(body body.Body)
}
func New() *ExampleBody {
world := physics.New()
plane := createConvex(
[]vector.Vector{
{X: 300, Y: -100},
{X: 300, Y: -120},
{X: -300, Y: -100},
{X: -300, Y: -120},
},
vector.Vector{X: 0, Y: -200},
rotator.ZERO(),
vector.Vector{})
plane.SetStatic()
world.Add(plane)
return &ExampleBody{world, 0, nil}
}
func (e *ExampleBody) SetOnBodyCreated(onBodyCreated func(body body.Body)) {
e.onBodyCreated = onBodyCreated
}
func (e *ExampleBody) Tick(delta float64) {
const RESPAWN_TIME = 0.5
e.respawnTime -= delta
if e.respawnTime < 0 {
e.respawnTime = RESPAWN_TIME
e.world.Add(
e.createRandomShape(
vector.Vector{X: random.FRandom(-300, 300), Y: 0},
rotator.Rotator{Degrees: random.FRandom(180.0, 360.0)},
vector.Vector{X: 0, Y: 0}))
}
e.world.Tick(delta)
for _, b := range e.world.Bodys() {
if isOutbound(b.Position()) {
e.world.ReservedDelete(b.Id())
}
}
}
func (e *ExampleBody) World() *physics.World {
return e.world
}
func (e *ExampleBody) createRandomShape(position vector.Vector, rotation rotator.Rotator, velocity vector.Vector) *body.Body {
var b *body.Body
switch random.Random(0, 2) {
case 0:
b = createBullet(position, velocity)
case 1:
b = createCircle(random.FRandom(10, 50), position, velocity)
case 2:
b = createConvex(
[]vector.Vector{
{X: random.FRandom(-50, 50), Y: random.FRandom(-50, 50)},
{X: random.FRandom(-50, 50), Y: random.FRandom(-50, 50)},
{X: random.FRandom(-50, 50), Y: random.FRandom(-50, 50)},
{X: random.FRandom(-50, 50), Y: random.FRandom(-50, 50)},
{X: random.FRandom(-50, 50), Y: random.FRandom(-50, 50)},
{X: random.FRandom(-50, 50), Y: random.FRandom(-50, 50)},
{X: random.FRandom(-50, 50), Y: random.FRandom(-50, 50)},
{X: random.FRandom(-50, 50), Y: random.FRandom(-50, 50)},
{X: random.FRandom(-50, 50), Y: random.FRandom(-50, 50)},
{X: random.FRandom(-50, 50), Y: random.FRandom(-50, 50)},
},
position,
rotation,
velocity)
}
if e.onBodyCreated != nil {
e.onBodyCreated(*b)
}
return b
}
func createBullet(position vector.Vector, velocity vector.Vector) *body.Body {
b := body.New()
b.SetMass(1)
b.SetShape(bullet.New())
b.SetPosition(position)
b.Velocity = velocity
return b
}
func createCircle(radius float64, position vector.Vector, velocity vector.Vector) *body.Body {
b := body.New()
b.SetMass(1)
b.SetShape(&circle.Circle{Radius: radius})
b.SetPosition(position)
b.Velocity = velocity
return b
}
func createConvex(vertices []vector.Vector, position vector.Vector, rotation rotator.Rotator, velocity vector.Vector) *body.Body {
b := body.New()
b.SetMass(1)
b.SetShape(convex.New(vertices))
b.SetPosition(position)
b.SetRotation(rotation)
b.Velocity = velocity
return b
}
func isOutbound(position vector.Vector) bool {
const MARGIN = 300
if position.X < -WINDOW_WIDTH/2-MARGIN ||
position.X > WINDOW_WIDTH/2+MARGIN ||
position.Y < -WINDOW_HEIGHT/2-MARGIN ||
position.Y > WINDOW_HEIGHT/2+MARGIN {
return true
}
return false
} | example/example_body/example_body.go | 0.794823 | 0.456531 | example_body.go | starcoder |
package parser
import (
"encoding/json"
"fmt"
"sort"
"strconv"
"strings"
"time"
)
// Series is a flat JSON serializable representation of the series.
type Series struct {
id string
Start time.Time `json:"start"`
End time.Time `json:"end"`
Tags Tags `json:"tags"`
Datapoints Datapoints `json:"datapoints"`
}
// Tag is a simple JSON serializable representation of a tag.
type Tag [2]string
// NewTag creates a new tag with a given name and value.
func NewTag(name, value string) Tag {
return Tag{name, value}
}
// Name returns the tag name.
func (t Tag) Name() string {
return t[0]
}
// Value returns the tag value.
func (t Tag) Value() string {
return t[1]
}
// Tags is a simple JSON serializable representation of tags.
type Tags []Tag
// Get returns a list of tag values with the given name.
func (t Tags) Get(name string) []string {
// NB: this is almost always going to be 0
values := make([]string, 0, 2)
// NB: This list isn't expected to get very long so it uses array lookup.
// If this is a problem in the future, `Tags` be converted to a map.
for _, t := range t {
if t.Name() == name {
values = append(values, t.Value())
}
}
return values
}
// Datapoints is a JSON serializeable list of values for the series.
type Datapoints []Datapoint
// Datapoint is a JSON serializeable datapoint for the series.
type Datapoint struct {
Value Value `json:"val"`
Timestamp time.Time `json:"ts"`
}
// Value is a JSON serizlizable float64 that allows NaNs.
type Value float64
// MarshalJSON returns state as the JSON encoding of a Value.
func (v Value) MarshalJSON() ([]byte, error) {
return json.Marshal(fmt.Sprintf("%g", float64(v)))
}
// UnmarshalJSON unmarshals JSON-encoded data into a Value.
func (v *Value) UnmarshalJSON(data []byte) error {
var str string
err := json.Unmarshal(data, &str)
if err != nil {
return err
}
f, err := strconv.ParseFloat(str, 64)
if err != nil {
return err
}
*v = Value(f)
return nil
}
// IDOrGenID gets the ID for this result.
func (r *Series) IDOrGenID() string {
if len(r.id) == 0 {
tags := make(sort.StringSlice, len(r.Tags))
for _, v := range r.Tags {
tags = append(tags, fmt.Sprintf("%s:%s,", v[0], v[1]))
}
sort.Sort(tags)
var sb strings.Builder
for _, t := range tags {
sb.WriteString(t)
}
r.id = sb.String()
}
return r.id
} | src/cmd/services/m3comparator/main/parser/parser.go | 0.822759 | 0.416144 | parser.go | starcoder |
package game
import (
"errors"
"fmt"
"strings"
)
// Board represents a 3x3 tic-tac-toe game board.
type Board struct {
grid [][]*Mark
}
// NewBoard is the constructor for Board.
func NewBoard() *Board {
return &Board{
grid: [][]*Mark{
{nil, nil, nil},
{nil, nil, nil},
{nil, nil, nil},
},
}
}
// ApplyMark applies a Mark to the given board grid space indicated by the row
// and column number. This is the programmatic equivalent to drawing an X or O
// on a physical tic-tac-toe grid :).
func (b *Board) ApplyMark(row, col int, mark Mark) error {
if row < 0 || row > 2 {
return fmt.Errorf("row %d is out of range [0, 2]", row)
}
if col < 0 || col > 2 {
return fmt.Errorf("col %d is out of range [0, 2]", col)
}
if b.grid[row][col] != nil {
return errors.New("space is occupied")
}
b.grid[row][col] = &mark
return nil
}
// IsThreeInARow returns true if the grid has a given Mark (X or O) placed
// three-in-a-row along any row, column, or diagonal.
func (b *Board) IsThreeInARow(mark Mark) bool {
// Diagonals
if b.spaceEquals(0, 0, mark) && b.spaceEquals(1, 1, mark) && b.spaceEquals(2, 2, mark) {
return true
}
if b.spaceEquals(0, 2, mark) && b.spaceEquals(1, 1, mark) && b.spaceEquals(2, 0, mark) {
return true
}
for i := 0; i < 3; i++ {
// Horizontal
if b.spaceEquals(i, 0, mark) && b.spaceEquals(i, 1, mark) && b.spaceEquals(i, 2, mark) {
return true
}
// Vertical
if b.spaceEquals(0, i, mark) && b.spaceEquals(1, i, mark) && b.spaceEquals(2, i, mark) {
return true
}
}
return false
}
// IsFull returns true if all the board spaces are currently marked.
func (b *Board) IsFull() bool {
for i := 0; i < 3; i++ {
for j := 0; j < 3; j++ {
if b.grid[i][j] == nil {
return false
}
}
}
return true
}
// ToString returns a string representation of the game board.
func (b *Board) ToString() string {
builder := strings.Builder{}
for i := 0; i < 3; i++ {
for j := 0; j < 3; j++ {
builder.WriteString(b.grid[i][j].ToString())
}
builder.WriteString("\n")
}
return builder.String()
}
func (b *Board) spaceEquals(row, col int, mark Mark) bool {
return b.grid[row][col] != nil && *b.grid[row][col] == mark
} | internal/game/board.go | 0.787155 | 0.427636 | board.go | starcoder |
package shellGenerator
import (
"fmt"
"math"
"github.com/Konstantin8105/Convert-INP-to-STD-format/inp"
)
// Shell - input data of shell
type Shell struct {
Height float64 // unit - meter. Height of shell
Diameter float64 // unit - meter. Diameter of shell
Precision float64 // unit - meter. Maximal distance between points
}
// ShellName - name of shell for include the property and loads
var ShellName string
func init() {
ShellName = "shell"
}
func (s Shell) check() error {
switch {
case s.Height <= 0:
return fmt.Errorf("Height of shell cannot be less or equal zero")
case s.Diameter <= 0:
return fmt.Errorf("Diameter of shell cannot be less or equal zero")
case s.Precision <= 0:
return fmt.Errorf("Precition of shell cannot be less or equal zero")
}
return nil
}
func maxInt(a, b int) int {
if a > b {
return a
}
return b
}
// GenerateMesh - Generate file in Mesh format for shell
func (s Shell) GenerateMesh(amountOfPointOnLevel, amountLevelsByHeight int) (m inp.Format, err error) {
err = s.check()
if err != nil {
return m, err
}
deltaHeight := s.Height / float64(amountLevelsByHeight)
// init number of point, cannot be less 1
initPoint := 1
var angleOffset float64
l := (amountLevelsByHeight + 1) * amountOfPointOnLevel
m.Nodes = make([]inp.Node, 0, l)
for level := 0; level <= amountLevelsByHeight; level++ {
elevation := deltaHeight * float64(level)
if level == amountLevelsByHeight {
elevation = s.Height
}
for i := 0; i < amountOfPointOnLevel; i++ {
angle := 2.*math.Pi/float64(amountOfPointOnLevel)*float64(i) + angleOffset
m.Nodes = append(m.Nodes, inp.Node{
Index: int(i+amountOfPointOnLevel*level) + initPoint,
Coord: [3]float64{
s.Diameter * float64(0.5) * math.Sin(angle),
elevation,
s.Diameter * float64(0.5) * math.Cos(angle)},
})
}
}
//fmt.Println("Generate points")
// generate triangles
var shell inp.Element
shell.Name = ShellName
shell.FE, err = inp.GetFiniteElementByName("S4")
if err != nil {
return m, err
}
l2 := amountLevelsByHeight * amountOfPointOnLevel
shell.Data = make([]inp.ElementData, 0, l2)
for level := 0; level < amountLevelsByHeight; level++ {
for i := 0; i < amountOfPointOnLevel; i++ {
if i+1 < amountOfPointOnLevel {
quardToRectangle(&shell,
int(i+amountOfPointOnLevel*level+initPoint),
int(i+1+amountOfPointOnLevel*level+initPoint),
int(i+amountOfPointOnLevel*(level+1)+initPoint),
int(i+1+amountOfPointOnLevel*(level+1)+initPoint))
} else {
quardToRectangle(&shell,
int(i+amountOfPointOnLevel*level+initPoint),
int(0+amountOfPointOnLevel*level+initPoint),
int(i+amountOfPointOnLevel*(level+1)+initPoint),
int(0+amountOfPointOnLevel*(level+1)+initPoint))
}
}
}
m.Elements = append(m.Elements, shell)
//fmt.Println("Generate elements")
m.AddUniqueIndexToElements()
fmt.Printf("Create model with %5v points and %5v elements\n", len(m.Nodes), len(shell.Data))
//fmt.Println("Return model")
return m, nil
}
// GenerateINP - Generate file in INP format for shell
func (s Shell) GenerateINP(filename string) (err error) {
// generate first level of points
amountOfPointOnLevel := 4
if s.Precision < s.Diameter {
amountOfPointOnLevel = int(maxInt(amountOfPointOnLevel, int(math.Pi/math.Asin(s.Precision/s.Diameter)+1)))
}
amountLevelsByHeight := maxInt(2, int(s.Height/s.Precision+1))
m, err := s.GenerateMesh(amountOfPointOnLevel, amountLevelsByHeight)
if err != nil {
return err
}
return m.Save(filename)
}
// Convert 4 points element to quatric element
// p3 *---* p4
// | |
// | |
// | |
// p1 *---* p2
func quardToRectangle(element *inp.Element, p1, p2, p3, p4 int) {
element.Data = append(element.Data, inp.ElementData{
Index: -1,
IPoint: []int{p1, p2, p4, p3},
})
return
} | shellGenerator/shell.go | 0.525125 | 0.422207 | shell.go | starcoder |
package repeatgenome
import (
"bytes"
"github.com/mmcco/jh-bio/bioutils"
"unsafe"
)
/*
Returns the total number of bases in a RepeatGenome's reference chromosomes.
*/
func (repeatGenome *RepeatGenome) Size() uint64 {
var numBases uint64 = 0
for _, seqs := range repeatGenome.chroms {
for _, seq := range seqs {
numBases += uint64(len(seq))
}
}
return numBases
}
/*
Returns the size in gigabytes of the supplied RepeatGenome's Kmers field.
*/
func (rg *RepeatGenome) KmersGBSize() float64 {
return (float64(len(rg.Kmers)) / 1000000000) * float64(unsafe.Sizeof(Kmer{}))
}
/*
Returns the percent of a RepeatGenome's reference bases that are contained
in a repeat instance. It makes the assumption that no base is contained in
more than one repeat instance.
*/
func (rg *RepeatGenome) PercentRepeats() float64 {
var repeatBases uint64 = 0
for _, match := range rg.Matches {
repeatBases += match.SeqEnd - match.SeqStart
}
return 100 * (float64(repeatBases) / float64(rg.Size()))
}
/*
Returns the sum of the sizes of all of a repeat sequence type's instances.
*/
func (repeat *Repeat) Size() uint64 {
var repeatSize uint64 = 0
for _, match := range repeat.Instances {
repeatSize += match.SeqEnd - match.SeqStart
}
return repeatSize
}
/*
Returns the sum of the sizes of all repeat instances in the supplied
ClassNode's subtree.
*/
func (classNode *ClassNode) Size() uint64 {
if classNode == nil {
return 0
}
var classNodeSize uint64 = 0
if classNode.Repeat != nil {
for _, match := range classNode.Repeat.Instances {
classNodeSize += match.SeqEnd - match.SeqStart
}
}
for _, child := range classNode.Children {
classNodeSize += child.Size()
}
return classNodeSize
}
/*
Returns the number of base pairs from which the supplied read could have
originated, assuming that its classification was correct. This is done in
terms of Kraken-Q logic, meaning that there is at least one kmer shared
between the repeat reference and the read. Therefore, the read must overlap
a repeat reference from the classified subtree by at least k bases. This
function is used to calculate the probability of correct classification
assuming random selection, and the amount to which a classification narrows
a read's potential origin.
*/
func (readResp ReadResponse) HangingSize() uint64 {
classNode := readResp.ClassNode
if classNode == nil {
return 0
}
var classNodeSize uint64 = 0
if classNode.Repeat != nil {
for _, match := range classNode.Repeat.Instances {
classNodeSize += match.SeqEnd - match.SeqStart
classNodeSize += 2 * uint64(len(readResp.Seq)-bioutils.K)
}
}
for _, child := range classNode.Children {
classNodeSize += ReadResponse{readResp.Seq, child}.HangingSize()
}
return classNodeSize
}
/*
Returns the average percent of the genome a read from the given set could
have originated from, assuming their classification was correct. This is
used to estimate how much the classification assisted us in locating reads'
origins. The more specific and helpful the classifications are, the lower
the percentage will be. Uses a cumulative average to prevent overflow.
*/
func (rg *RepeatGenome) AvgPossPercentGenome(resps []ReadResponse, strict bool) float64 {
classNodeSizes := make(map[*ClassNode]float64, len(rg.ClassTree.ClassNodes))
if strict {
for _, classNode := range rg.ClassTree.NodesByID {
classNodeSizes[classNode] = float64(classNode.Size())
}
} else {
for _, resp := range resps {
if _, exists := classNodeSizes[resp.ClassNode]; !exists {
classNodeSizes[resp.ClassNode] = float64(resp.HangingSize())
}
}
}
var classesProcessed, avgClassSize float64 = 0, 0
for _, resp := range resps {
avgClassSize += (classNodeSizes[resp.ClassNode] - avgClassSize) / (classesProcessed + 1)
classesProcessed++
}
return 100 * (avgClassSize / float64(rg.Size()))
}
/*
A helper function for the PercentTrueClassification() below It recursively
determines whether a read originated from a reference repeat instance in the
subtree indicated by the supplied ClassNode.
*/
func (rg *RepeatGenome) recNodeSearch(classNode *ClassNode, readSAM ReadSAM, strict bool) bool {
if classNode != nil && classNode.Repeat != nil {
for _, match := range classNode.Repeat.Instances {
if match.SeqName != readSAM.SeqName {
continue
}
// must compute where the read ends
endInd := readSAM.StartInd + uint64(len(readSAM.TextSeq))
overlap := readSAM.SeqName == match.SeqName && readSAM.StartInd < match.SeqEnd && endInd > match.SeqStart
if overlap && !strict {
return true
} else if overlap && strict {
// below logic is for checking for at least rg.K overlap
var overlap uint64 = endInd - match.SeqStart
if readSAM.StartInd > match.SeqStart {
overlap -= readSAM.StartInd - match.SeqStart
}
if overlap >= bioutils.K {
return true
}
}
}
}
if classNode != nil && classNode.Children != nil {
for _, child := range classNode.Children {
if rg.recNodeSearch(child, readSAM, strict) {
return true
}
}
}
return false
}
func (rg *RepeatGenome) RepeatIsCorrect(readSAMRepeat ReadSAMRepeat, strict bool) bool {
// awkward unpacking - maybe use separate args?
readSAM, repeat := readSAMRepeat.ReadSAM, readSAMRepeat.Repeat
read, seqName, startInd := readSAM.TextSeq, readSAM.SeqName, readSAM.StartInd
if repeat == nil {
// We will for now use a panic rather than an error. This is for speed
// and simplicity, and because the caller logically should ensure than
// the repeat is non-nil.
panic("RepeatGenome.RepeatIsCorrect(): readSAMRepeat.Repeat is nil")
}
for _, match := range repeat.Instances {
if match.SeqName != seqName {
continue
}
// must compute where the read ends - it isn't stored
endInd := readSAM.StartInd + uint64(len(read))
overlap := seqName == match.SeqName && startInd < match.SeqEnd && endInd > match.SeqStart
if overlap && !strict {
return true
} else if overlap && strict {
// below logic is for checking for at least rg.K overlap
var overlap uint64 = endInd - match.SeqStart
if startInd > match.SeqStart {
overlap -= startInd - match.SeqStart
}
if overlap >= bioutils.K {
return true
}
}
}
return false
}
/*
func TestNodeSearch(classNode *ClassNode, readSAM ReadSAM) bool {
if classNode == nil {
return false
}
if classNode != nil && classNode.Repeat != nil {
for _, match := range classNode.Repeat.Instances {
// must compute where the read ends
endInd := readSAM.StartInd + uint64(len(readSAM.Seq))
fmt.Printf("testing match %s[%d:%d] against %s[%d:%d]\n", match.SeqName, match.SeqStart, match.SeqEnd, readSAM.SeqName, readSAM.StartInd, endInd)
if readSAM.SeqName == match.SeqName && readSAM.StartInd < match.SeqEnd && endInd > match.SeqStart {
fmt.Println("true")
return true
// below logic is for checking for at least rg.K overlap
/*
var overlap uint64 := readSAM.SeqEnd - match.SeqStart
if readSAM.SeqStart > match.SeqStart {
overlap -= readSAM.SeqStart - match.SeqStart
}
if overlap >= uint64(rg.K) {
return true
}
}
}
} else {
fmt.Println("no classNode or no classNode.Repeat")
}
if classNode != nil && classNode.Children != nil {
for _, child := range classNode.Children {
if TestNodeSearch(child, readSAM) {
fmt.Println("true")
return true
} else {
fmt.Println("(child false)")
}
}
} else {
fmt.Println("no classNode or no classNode.Children")
}
fmt.Println("false")
return false
}
*/
/*
Determines whether a read overlaps any repeat instances in the given
ClassNode's subtree. If the argument strict is true, the read must be
entirely contained in a reference repeat instance (classic Kraken logic).
Otherwise, the read must overlap a reference repeat instance by at least k
bases.
*/
func (rg *RepeatGenome) PercentTrueClassifications(responses []ReadSAMResponse, useStrict bool) float64 {
var classifications, correctClassifications uint64 = 0, 0
for _, resp := range responses {
if resp.ClassNode != nil {
classifications++
}
if rg.recNodeSearch(resp.ClassNode, resp.ReadSAM, useStrict) {
correctClassifications++
}
}
return 100 * (float64(correctClassifications) / float64(classifications))
}
/*
Returns the number of non-ambiguous (non-n-containing), non-unique kmers in
the reference genome. It is used for simple printed statistics, and to
determine the amount of memory to allocate for raw-kmer-containing data
structures. This is different from the count returned by krakenFirstPass()
because it does not allow ambiguous kmers.
*/
func (rg *RepeatGenome) numRawKmers() uint64 {
var numRawKmers uint64 = 0
splitOnN := func(c rune) bool { return c == 'n' }
for i := range rg.Matches {
match := &rg.Matches[i]
seq := rg.chroms[match.SeqName][match.SeqName][match.SeqStart:match.SeqEnd]
seqs := bytes.FieldsFunc([]byte(seq), splitOnN)
for j := range seqs {
if len(seqs[j]) >= bioutils.K {
numRawKmers += uint64(len(seqs[j]) - bioutils.K + 1)
}
}
}
return numRawKmers
} | repeatgenome/stats-rg.go | 0.732592 | 0.412826 | stats-rg.go | starcoder |
package gap
import (
"context"
"fmt"
"time"
)
// GoalFlag is a uint value type, that is used to specify different genetic
// algorithm goals of cancellation.
type GoalFlag uint
const (
// TIME is a goal type flag. If this flag is set a counter is started
// using the TimeN field in the goal structure which requests a graceful
// stop from the algorithm after the time elapses. NOTE: The graceful
// shutdown may take a while after the timer elapses.
TIME GoalFlag = 1 << iota
// GENERATION is a goal type flag. It starts counting generations and
// ends the algorithm after the amount of generations specified in the
// goal structure field GenN has been completed.
GENERATION
// FITNESS is a goal type flag. It monitors the fitness of all
// solutions and stops the algorithm after the fitness specified in the
// goal structure field FitN has been achieved.
FITNESS
)
// Goal is a structure that contains information about the goals of the
// algorithm being run. It can be customized to use one or more end conditions.
type Goal struct {
// A value that can be constructed from combining one or more goal flags
// via bitwise OR.
Goals GoalFlag
// If TIME is set - the duration the algorithm will run before
// cancellation.
TimeN time.Duration
// If GENERATION is set - the amount of generations the algorithm will
// run before cancellation.
GenN uint
// If FITNESS is set - the fitness after which the algorithm will be
// cancelled.
FitN uint
ctx context.Context
cancel context.CancelFunc
term bool
}
func (g *Goal) init() error {
g.ctx = nil
g.cancel = nil
g.term = false
if g.Goals&(TIME|FITNESS|GENERATION) == 0 {
return fmt.Errorf("no goal set for algorithm")
}
if g.Goals&TIME != 0 {
g.ctx, g.cancel = context.WithTimeout(context.Background(), g.TimeN)
}
return nil
}
func (g *Goal) checkTime() bool {
if g.Goals&TIME != 0 {
select {
case <-g.ctx.Done():
g.term = true
default:
}
}
return g.term
}
func (g *Goal) checkGen(gen uint) bool {
if g.Goals&GENERATION != 0 && gen >= g.GenN {
g.term = true
}
return g.term
}
func (g *Goal) checkFitness(fitness uint) bool {
if g.Goals&FITNESS != 0 && fitness >= g.FitN {
g.term = true
}
return g.term
}
func (g *Goal) finalize() {
if g.cancel != nil {
g.cancel()
}
} | goal.go | 0.513181 | 0.426202 | goal.go | starcoder |
package eval
import (
"fmt"
"path/filepath"
"reflect"
"runtime"
"strings"
)
// RunDSL iterates through the root expressions and calls WalkSets on each to
// retrieve the expression sets. It iterates over the expression sets multiple
// times to first execute the DSL, then validate the resulting expressions and
// lastly to finalize them. The executed DSL may register additional roots
// during initial execution via Register to have them be executed (last) in the
// same run.
func RunDSL() error {
roots, err := Context.Roots()
if err != nil {
return err
}
if len(roots) == 0 {
return nil
}
executed := 0
recursed := 0
for executed < len(roots) {
recursed++
start := executed
executed = len(roots)
for _, root := range roots[start:] {
root.WalkSets(runSet)
}
if recursed > 100 {
// Let's cross that bridge once we get there
return fmt.Errorf("too many generated roots, infinite loop?")
}
}
if Context.Errors != nil {
return Context.Errors
}
for _, root := range roots {
prepareSet(ExpressionSet{root})
root.WalkSets(prepareSet)
}
for _, root := range roots {
validateSet(ExpressionSet{root})
root.WalkSets(validateSet)
}
if Context.Errors != nil {
return Context.Errors
}
for _, root := range roots {
finalizeSet(ExpressionSet{root})
root.WalkSets(finalizeSet)
}
return nil
}
// Execute runs the given DSL to initialize the given expression. It returns
// true on success. It returns false and appends to Context.Errors on failure.
// Note that Run takes care of calling Execute on all expressions that implement
// Source. This function is intended for use by expressions that run the DSL at
// declaration time rather than store the DSL for execution by the dsl engine
// (usually simple independent expressions). The DSL should use ReportError to
// record DSL execution errors.
func Execute(fn func(), def Expression) bool {
if fn == nil {
return true
}
var startCount int
if Context.Errors != nil {
startCount = len(Context.Errors.(MultiError))
}
Context.Stack = append(Context.Stack, def)
fn()
Context.Stack = Context.Stack[:len(Context.Stack)-1]
var endCount int
if Context.Errors != nil {
endCount = len(Context.Errors.(MultiError))
}
return endCount <= startCount
}
// Current returns the expression whose DSL is currently being executed.
// As a special case Current returns Top when the execution stack is empty.
func Current() Expression {
current := Context.Stack.Current()
if current == nil {
return Top
}
return current
}
// ReportError records a DSL error for reporting post DSL execution. It accepts
// a format and values a la fmt.Printf.
func ReportError(fm string, vals ...interface{}) {
var suffix string
if cur := Context.Stack.Current(); cur != nil {
if name := cur.EvalName(); name != "" {
suffix = fmt.Sprintf(" in %s", name)
}
} else {
suffix = " (top level)"
}
err := fmt.Errorf(fm+suffix, vals...)
file, line := computeErrorLocation()
Context.Record(&Error{
GoError: err,
File: file,
Line: line,
})
}
// IncompatibleDSL should be called by DSL functions when they are invoked in an
// incorrect context (e.g. "Params" in "Service").
func IncompatibleDSL() {
elems := strings.Split(caller(), ".")
ReportError("invalid use of %s", elems[len(elems)-1])
}
// InvalidArgError records an invalid argument error. It is used by DSL
// functions that take dynamic arguments.
func InvalidArgError(expected string, actual interface{}) {
ReportError("cannot use %#v (type %s) as type %s", actual, reflect.TypeOf(actual), expected)
}
// ValidationErrors records the errors encountered when running Validate.
type ValidationErrors struct {
Errors []error
Expressions []Expression
}
// Error implements the error interface.
func (verr *ValidationErrors) Error() string {
msg := make([]string, len(verr.Errors))
for i, err := range verr.Errors {
msg[i] = fmt.Sprintf("%s: %s", verr.Expressions[i].EvalName(), err)
}
return strings.Join(msg, "\n")
}
// Merge merges validation errors into the target.
func (verr *ValidationErrors) Merge(err *ValidationErrors) {
if err == nil {
return
}
verr.Errors = append(verr.Errors, err.Errors...)
verr.Expressions = append(verr.Expressions, err.Expressions...)
}
// Add adds a validation error to the target.
func (verr *ValidationErrors) Add(def Expression, format string, vals ...interface{}) {
verr.AddError(def, fmt.Errorf(format, vals...))
}
// AddError adds a validation error to the target. It "flattens" validation
// errors so that the recorded errors are never ValidationErrors themselves.
func (verr *ValidationErrors) AddError(def Expression, err error) {
if v, ok := err.(*ValidationErrors); ok {
verr.Errors = append(verr.Errors, v.Errors...)
verr.Expressions = append(verr.Expressions, v.Expressions...)
return
}
verr.Errors = append(verr.Errors, err)
verr.Expressions = append(verr.Expressions, def)
}
// runSet executes the DSL for all expressions in the given set. The expression
// DSLs may append to the set as they execute.
func runSet(set ExpressionSet) error {
executed := 0
recursed := 0
for executed < len(set) {
recursed++
for _, def := range set[executed:] {
executed++
if def == nil {
continue
}
if source, ok := def.(Source); ok {
Execute(source.DSL(), def)
}
}
if recursed > 100 {
return fmt.Errorf("too many generated expressions, infinite loop?")
}
}
return nil
}
// prepareSet runs the pre validation steps on all the set expressions that
// define one.
func prepareSet(set ExpressionSet) error {
for _, def := range set {
if def == nil {
continue
}
if p, ok := def.(Preparer); ok {
p.Prepare()
}
}
return nil
}
// validateSet runs the validation on all the set expressions that define one.
func validateSet(set ExpressionSet) error {
errors := &ValidationErrors{}
for _, def := range set {
if def == nil {
continue
}
if validate, ok := def.(Validator); ok {
if err := validate.Validate(); err != nil {
errors.AddError(def, err)
}
}
}
if len(errors.Errors) > 0 {
Context.Record(&Error{GoError: errors})
}
return Context.Errors
}
// finalizeSet runs the finalizer on all the set expressions that define one.
func finalizeSet(set ExpressionSet) error {
for _, def := range set {
if def == nil {
continue
}
if f, ok := def.(Finalizer); ok {
f.Finalize()
}
}
return nil
}
// caller returns the name of calling function.
func caller() string {
pc, file, _, ok := runtime.Caller(2)
if ok && filepath.Base(file) == "current.go" {
pc, _, _, ok = runtime.Caller(3)
}
if !ok {
return "<unknown>"
}
return runtime.FuncForPC(pc).Name()
} | eval/eval.go | 0.656438 | 0.456349 | eval.go | starcoder |
package data
import (
"fmt"
"github.com/fogleman/gg"
"github.com/nmaupu/gocube/math3D"
"log"
"math"
)
/**
* 3D calculations are based on:
* x axis going to the right
* y axis going up
* z axis going backwards
* Matrices computation are using column
* [1 0 0 0] [x]
* |0 1 0 0| |y|
* |0 0 1 0| x |z|
* [0 0 0 1] [w]
* So matrices may have to be transposed from what's available online.
* This is the same conventions used by OpenGL for matrix calculations.
*/
const (
AngleOfView = 60 // FOV
Near = .1
Far = 100.
CamX = 0.
CamY = 0.
CamZ = -6.
)
var (
rotRadX = getRad(30)
rotRadY = -getRad(35)
Mproj = getProjectionMatrix(AngleOfView, Near, Far)
Cam = getCameraTranslation(CamX, CamY, CamZ)
)
type cubie3d struct {
Point *math3D.Matrix
HexColor string
CubeSize int
DirRight *math3D.Matrix
DirDown *math3D.Matrix
}
func getRotationMatrixX(rad float64) *math3D.Matrix {
m := new(math3D.Matrix)
m.AddRow([]float64{1, 0, 0, 0})
m.AddRow([]float64{0, math.Cos(rad), -math.Sin(rad), 0})
m.AddRow([]float64{0, math.Sin(rad), math.Cos(rad), 0})
m.AddRow([]float64{0, 0, 0, 1})
return m
}
func getRotationMatrixY(rad float64) *math3D.Matrix {
m := new(math3D.Matrix)
m.AddRow([]float64{math.Cos(rad), 0, math.Sin(rad), 0})
m.AddRow([]float64{0, 1, 0, 0})
m.AddRow([]float64{-math.Sin(rad), 0, math.Cos(rad), 0})
m.AddRow([]float64{0, 0, 0, 1})
return m
}
func getRotationMatrixZ(rad float64) *math3D.Matrix {
m := new(math3D.Matrix)
m.AddRow([]float64{math.Cos(rad), -math.Sin(rad), 0, 0})
m.AddRow([]float64{math.Sin(rad), math.Cos(rad), 0, 0})
m.AddRow([]float64{0, 0, 1, 0})
m.AddRow([]float64{0, 0, 0, 1})
return m
}
func getRotationMatrixXYZ(radX, radY, radZ float64) *math3D.Matrix {
return getRotationMatrixX(radX).
Product(getRotationMatrixY(radY)).
Product(getRotationMatrixZ(radZ))
}
func getTranslationMatrix(vec *math3D.Matrix) *math3D.Matrix {
m := new(math3D.Matrix)
m.AddRow([]float64{1, 0, 0, vec.At(0, 0)})
m.AddRow([]float64{0, 1, 0, vec.At(1, 0)})
m.AddRow([]float64{0, 0, 1, vec.At(2, 0)})
m.AddRow([]float64{0, 0, 0, 1})
return m
}
func getScaleMatrix(x, y, z float64) *math3D.Matrix {
m := new(math3D.Matrix)
m.AddRow([]float64{x, 0, 0, 0})
m.AddRow([]float64{0, y, 0, 0})
m.AddRow([]float64{0, 0, z, 0})
m.AddRow([]float64{0, 0, 0, 1})
return m
}
func getRad(deg float64) float64 {
return (deg * math.Pi) / 180
}
func buildFace3d(cube *Cube, color string, radX, radY, radZ float64) []cubie3d {
log.Printf("Building face %s", color)
var x, y, z float64
face := cube.Faces[color]
ret := make([]cubie3d, 0)
// In the real world coordinates, every points are going to be
// between [-1,1] - left, right
// and [-1,1] bottom, up
// So, we are building a cube from bottom left piece at origin. Coordinates will be included in [0,2] for a 3x3
// and at the end, we will center it such as its core will be at origin (0,0,0)
// So for a 3X3, all coordinates will be in [-1.5,1.5]
// We need to get that between [-1,1] and centered.
// As a result, we will be translating the cube according to the number of cubies divided by 2 (dim/2)
// and we will be scaling everything by 1/(dim/2) so all points will be included in [-1,1]
// Note about 4x4 matrices
// w=1 -> position in space
// w=0 -> direction
halfWidth := float64(cube.CubeSize) / 2. // i.e. For a 3x3, it's 1.5
// Reminder: Z axis is going backwards, that's why Z is positive ;)
toOrigMat := getTranslationMatrix(math3D.NewVector4(-halfWidth, -halfWidth, halfWidth, 0))
scale := 1. / halfWidth
scaleMat := getScaleMatrix(scale, scale, scale)
rotationMat := getRotationMatrixXYZ(radX, radY, radZ)
for i := 0; i < len(face.Colors); i++ {
for j := 0; j < len(face.Colors[i]); j++ {
c3d := cubie3d{
HexColor: face.Colors[i][j].HexColor,
CubeSize: cube.CubeSize,
}
// According to cube implementation, when rotating faces, "white" face is always on top, "green" on front.
// Faces are indexed by name "white" and "green" (it's a map)
// So if doing a x move, white will display in fact green face
// As a result, we only need to display 3 faces: white, green and red for 3D
// If one needs to display other faces, just set up the cube using x,y or z moves.
switch color {
case "white":
x = float64(j)
y = float64(cube.CubeSize)
z = -float64(cube.CubeSize) + float64(i)
c3d.DirRight = math3D.NewVector4(1, 0, 0, 0)
c3d.DirDown = math3D.NewVector4(0, 0, 1, 0)
case "green":
x = float64(j)
y = float64(cube.CubeSize) - float64(i)
z = 0.
c3d.DirRight = math3D.NewVector4(1, 0, 0, 0)
c3d.DirDown = math3D.NewVector4(0, -1, 0, 0)
case "red":
x = float64(cube.CubeSize)
y = float64(cube.CubeSize) - float64(i)
z = -float64(j)
c3d.DirRight = math3D.NewVector4(0, 0, -1, 0)
c3d.DirDown = math3D.NewVector4(0, -1, 0, 0)
default:
panic(fmt.Sprintf("%s color is not implemented!", color))
}
// Center the computed coords around origin, scale it and rotate according to params
c3d.Point = rotationMat.Product(
scaleMat.Product(
toOrigMat.Product(
math3D.NewVector4(x, y, z, 1))))
// Also rotate "cubie building vectors" according to params
c3d.DirRight = rotationMat.Product(c3d.DirRight)
c3d.DirDown = rotationMat.Product(c3d.DirDown)
ret = append(ret, c3d)
}
}
return ret
}
// Convert a point from real world 3D coordinates
// to the drawing plan
// Returns a 4D position vector (w=1), this is not really needed, it's done just by convention
func convertToDrawingPlan(vec *math3D.Matrix, imgWidth, imgHeight int) *math3D.Matrix {
if vec.GetNbRows() != 4 && vec.GetNbCols() != 1 {
panic("Parameter is not a 4D vector")
}
x := vec.At(0, 0)
y := vec.At(1, 0)
m := math3D.NewVector4(
(x+1)*.5*float64(imgWidth),
(1-(y+1)*.5)*float64(imgHeight),
0,
1,
)
return m
}
func getLineWidth(cubeDim, imgWidth int) float64 {
// Using this function
// y=2^(-.1*x)*c
// c = 1/100 image seems to be good enough
// As a result, the more x increase, the less the line are thick but never reach 0
return math.Pow(2, -.1*float64(cubeDim)) * (float64(imgWidth) / 100.)
}
func drawCubie(ctx *gg.Context, c3d cubie3d) {
// There are nb of cubies between [-1,1], 2 units
vecScale := 2. / float64(c3d.CubeSize)
m1 := c3d.Point
m2 := getTranslationMatrix(c3d.DirRight.ScalarMultiply(vecScale)).Product(m1)
m3 := getTranslationMatrix(c3d.DirDown.ScalarMultiply(vecScale)).Product(m2)
m4 := getTranslationMatrix(c3d.DirDown.ScalarMultiply(vecScale)).Product(m1)
// Convert to drawing plan
defer func() {
if r := recover(); r != nil {
log.Println(r)
}
}()
m1 = projectPoint(m1)
m2 = projectPoint(m2)
m3 = projectPoint(m3)
m4 = projectPoint(m4)
p1 := convertToDrawingPlan(m1, ctx.Width(), ctx.Height())
p2 := convertToDrawingPlan(m2, ctx.Width(), ctx.Height())
p3 := convertToDrawingPlan(m3, ctx.Width(), ctx.Height())
p4 := convertToDrawingPlan(m4, ctx.Width(), ctx.Height())
ctx.SetLineWidth(getLineWidth(c3d.CubeSize, ctx.Width()))
ctx.MoveTo(p1.At(0, 0), p1.At(1, 0))
ctx.LineTo(p2.At(0, 0), p2.At(1, 0))
ctx.LineTo(p3.At(0, 0), p3.At(1, 0))
ctx.LineTo(p4.At(0, 0), p4.At(1, 0))
ctx.LineTo(p1.At(0, 0), p1.At(1, 0))
ctx.SetHexColor(c3d.HexColor)
ctx.FillPreserve()
ctx.SetHexColor("#000000")
ctx.Stroke()
}
func drawFace(ctx *gg.Context, c3ds []cubie3d) {
for _, c3d := range c3ds {
drawCubie(ctx, c3d)
}
}
func drawAxes(ctx *gg.Context, width, radX, radY, radZ float64) *gg.Context {
// Origins' points
poX := math3D.NewVector4(0, 0, 0, 1)
poY := math3D.NewVector4(0, 0, 0, 1)
poZ := math3D.NewVector4(0, 0, 0, 1)
// Real world coords axes
arrowScale := .045
rwAxisX := getTranslationMatrix(math3D.NewVector3(1, 0, 0)).Product(poX)
rwArrowX1 := getTranslationMatrix(math3D.NewVector3(-arrowScale, arrowScale, 0)).Product(rwAxisX)
rwArrowX2 := getTranslationMatrix(math3D.NewVector3(-arrowScale, -arrowScale, 0)).Product(rwAxisX)
rwAxisY := getTranslationMatrix(math3D.NewVector3(0, 1, 0)).Product(poY)
rwArrowY1 := getTranslationMatrix(math3D.NewVector3(-arrowScale, -arrowScale, 0)).Product(rwAxisY)
rwArrowY2 := getTranslationMatrix(math3D.NewVector3(arrowScale, -arrowScale, 0)).Product(rwAxisY)
rwAxisZ := getTranslationMatrix(math3D.NewVector3(0, 0, 1)).Product(poZ)
rwArrowZ1 := getTranslationMatrix(math3D.NewVector3(-arrowScale, 0, -arrowScale)).Product(rwAxisZ)
rwArrowZ2 := getTranslationMatrix(math3D.NewVector3(arrowScale, 0, -arrowScale)).Product(rwAxisZ)
// Rotations
rotMat := getRotationMatrixXYZ(radX, radY, radZ)
rwAxisX = rotMat.Product(rwAxisX)
rwArrowX1 = rotMat.Product(rwArrowX1)
rwArrowX2 = rotMat.Product(rwArrowX2)
rwAxisY = rotMat.Product(rwAxisY)
rwArrowY1 = rotMat.Product(rwArrowY1)
rwArrowY2 = rotMat.Product(rwArrowY2)
rwAxisZ = rotMat.Product(rwAxisZ)
rwArrowZ1 = rotMat.Product(rwArrowZ1)
rwArrowZ2 = rotMat.Product(rwArrowZ2)
// Axis
defer func() {
if r := recover(); r != nil {
log.Println(r)
}
}()
scaleMat := getScaleMatrix(width, width, width)
aX := convertToDrawingPlan(projectPoint(scaleMat.Product(rwAxisX)), ctx.Width(), ctx.Height())
arrX1 := convertToDrawingPlan(projectPoint(scaleMat.Product(rwArrowX1)), ctx.Width(), ctx.Height())
arrX2 := convertToDrawingPlan(projectPoint(scaleMat.Product(rwArrowX2)), ctx.Width(), ctx.Height())
aY := convertToDrawingPlan(projectPoint(scaleMat.Product(rwAxisY)), ctx.Width(), ctx.Height())
arrY1 := convertToDrawingPlan(projectPoint(scaleMat.Product(rwArrowY1)), ctx.Width(), ctx.Height())
arrY2 := convertToDrawingPlan(projectPoint(scaleMat.Product(rwArrowY2)), ctx.Width(), ctx.Height())
aZ := convertToDrawingPlan(projectPoint(scaleMat.Product(rwAxisZ)), ctx.Width(), ctx.Height())
arrZ1 := convertToDrawingPlan(projectPoint(scaleMat.Product(rwArrowZ1)), ctx.Width(), ctx.Height())
arrZ2 := convertToDrawingPlan(projectPoint(scaleMat.Product(rwArrowZ2)), ctx.Width(), ctx.Height())
origX := convertToDrawingPlan(projectPoint(poX), ctx.Width(), ctx.Height())
origY := convertToDrawingPlan(projectPoint(poY), ctx.Width(), ctx.Height())
origZ := convertToDrawingPlan(projectPoint(poZ), ctx.Width(), ctx.Height())
// Draw axes
ctx.SetLineWidth(5)
ctx.SetHexColor("#FF0000")
ctx.DrawLine(origX.At(0, 0), origX.At(1, 0), aX.At(0, 0), aX.At(1, 0))
ctx.DrawLine(aX.At(0, 0), aX.At(1, 0), arrX1.At(0, 0), arrX1.At(1, 0))
ctx.DrawLine(aX.At(0, 0), aX.At(1, 0), arrX2.At(0, 0), arrX2.At(1, 0))
ctx.Stroke()
ctx.SetHexColor("#00FF00")
ctx.DrawLine(origY.At(0, 0), origY.At(1, 0), aY.At(0, 0), aY.At(1, 0))
ctx.DrawLine(aY.At(0, 0), aY.At(1, 0), arrY1.At(0, 0), arrY1.At(1, 0))
ctx.DrawLine(aY.At(0, 0), aY.At(1, 0), arrY2.At(0, 0), arrY2.At(1, 0))
ctx.Stroke()
ctx.SetHexColor("#0000FF")
ctx.DrawLine(origZ.At(0, 0), origZ.At(1, 0), aZ.At(0, 0), aZ.At(1, 0))
ctx.DrawLine(aZ.At(0, 0), aZ.At(1, 0), arrZ1.At(0, 0), arrZ1.At(1, 0))
ctx.DrawLine(aZ.At(0, 0), aZ.At(1, 0), arrZ2.At(0, 0), arrZ2.At(1, 0))
ctx.Stroke()
return ctx
}
// Perspective projection matrix given by:
// angleOfView = FOV in degrees
// n = near clipping plane
// f = far clipping plane
func getProjectionMatrix(angleOfView, n, f float64) *math3D.Matrix {
ret := new(math3D.Matrix)
radFov := getRad(angleOfView)
aspect := 1.
scaleTan := math.Tan(radFov * .5)
ret.AddRow([]float64{1 / (aspect * scaleTan), 0, 0, 0})
ret.AddRow([]float64{0, 1 / scaleTan, 0, 0})
ret.AddRow([]float64{0, 0, -(f + n) / (f - n), -2 * f * n / (f - n)})
ret.AddRow([]float64{0, 0, -1, 0})
return ret
}
// Multiply a 4D vector by a 4x4 matrix
// and divide by w (perspective occurs here) and w != 1
// returns a vector 4D
func multPointMatrix(m *math3D.Matrix, v *math3D.Matrix) *math3D.Matrix {
ret := m.Product(v)
w := ret.At(3, 0)
// normalize if w is different than 1 (convert from homogeneous to Cartesian coordinates)
if w != 1 {
ret.Data[0][0] /= w
ret.Data[1][0] /= w
ret.Data[2][0] /= w
}
return ret
}
// Get translation matrix for camera "the eye"
func getCameraTranslation(x, y, z float64) *math3D.Matrix {
return getTranslationMatrix(math3D.NewVector3(x, y, z))
}
// Project a point on the plan using perspective projection
// p, the point must be a 4D vector
// Returns a 4D vector point. Only x and y are useful ;-)
func projectPoint(p *math3D.Matrix) *math3D.Matrix {
// ret = cam x Mproj x p
ret := multPointMatrix(Cam, p)
ret = multPointMatrix(Mproj, ret)
// x and y must be in [-1,1] to be rendered
x := ret.At(0, 0)
y := ret.At(1, 0)
if x >= -1 && x <= 1 && y >= -1 && y <= 1 {
return ret
} else {
panic(fmt.Sprintf("Point cannot be rendered point=%+v", ret))
}
}
func drawCube3d(ctx *gg.Context, cube *Cube) *gg.Context {
var face3dMatrices []cubie3d
radX := rotRadX
radY := rotRadY
radZ := 0.
face3dMatrices = buildFace3d(cube, "white", radX, radY, radZ)
drawFace(ctx, face3dMatrices)
face3dMatrices = buildFace3d(cube, "red", radX, radY, radZ)
drawFace(ctx, face3dMatrices)
face3dMatrices = buildFace3d(cube, "green", radX, radY, radZ)
drawFace(ctx, face3dMatrices)
//DrawAxes(ctx, 4, radX, radY, radZ)
return ctx
} | data/cube3d.go | 0.703448 | 0.626738 | cube3d.go | starcoder |
package gaussproc
import (
"github.com/btracey/kernel"
"gonum.org/v1/gonum/mat"
"gonum.org/v1/gonum/stat"
)
// MeanStdMat returns the mean and standard deviations of the columns of the
// data matrix. If all of the elements of the column have the same value, a
// standard deviation of 1 is returned. If x == nil, MeanStd panics.
func MeanStdMat(x mat.Matrix) (mean, std []float64) {
if x == nil {
panic(nilInput)
}
samp, dim := x.Dims()
mean = make([]float64, dim)
std = make([]float64, dim)
col := make([]float64, samp)
for j := 0; j < dim; j++ {
mat.Col(col, j, x)
m, s := stat.MeanStdDev(col, nil)
mean[j] = m
if s == 0 {
s = 1
}
std[j] = s
}
return mean, std
}
// kernelMatrix computes the kernel matrix between the elements of x and xp,
// scaling the rows if necessary
func kernelMatrix(k *mat.Dense, x, xp mat.Matrix, mean, std []float64, ker kernel.Kerneler) *mat.Dense {
m, p := x.Dims()
n, p2 := xp.Dims()
if p != p2 {
panic(badInputDim)
}
if k == nil {
k = mat.NewDense(m, n, nil)
}
m2, n2 := k.Dims()
if m2 != m || n2 != n {
panic(badStorageDim)
}
xi := make([]float64, p)
xj := make([]float64, p)
for i := 0; i < m; i++ {
rowScaled(xi, i, x, mean, std)
for j := 0; j < n; j++ {
rowScaled(xj, j, xp, mean, std)
v := ker.Kernel(xi, xj)
k.Set(i, j, v)
}
}
return k
}
// kernelMatrixSym computes the kernel matrix between the elements of x and
// themselves, scaling the rows if necessary.
func kernelMatrixSym(k *mat.SymDense, x mat.Matrix, mean, std []float64, ker kernel.Kerneler, noise float64) *mat.SymDense {
m, p := x.Dims()
if k == nil {
k = mat.NewSymDense(m, nil)
}
mk := k.Symmetric()
if m != mk {
panic(badStorageDim)
}
xi := make([]float64, p)
xj := make([]float64, p)
for i := 0; i < m; i++ {
rowScaled(xi, i, x, mean, std)
for j := i; j < m; j++ {
rowScaled(xj, j, x, mean, std)
v := ker.Kernel(xi, xj)
if i == j {
v += noise * noise // so the noise is the std not the covariance
}
k.SetSym(i, j, v)
}
}
return k
}
func scaleY(dst, y []float64, mean, std float64) []float64 {
if dst == nil {
dst = make([]float64, len(y))
}
if len(dst) != len(y) {
panic(badInputDim)
}
if mean == 0 && std == 0 {
copy(dst, y)
return dst
}
for i, v := range y {
dst[i] = (v - mean) / std
}
return dst
}
func unscaleY(dst, y []float64, mean, std float64) []float64 {
if dst == nil {
dst = make([]float64, len(y))
}
if len(dst) != len(y) {
panic(badInputDim)
}
if mean == 0 && std == 0 {
copy(dst, y)
return dst
}
for i, v := range y {
dst[i] = v*std + mean
}
return dst
}
// rowScaled returns the i'th row of the matrix a, scaling the row according to
// the scaling provided if necessary.
func rowScaled(row []float64, i int, a mat.Matrix, mean, std []float64) {
mat.Row(row, i, a)
if mean == nil {
return
}
if len(row) != len(mean) {
panic("gaussproc: bad size")
}
if len(mean) != len(std) {
panic("gaussproc: bad size")
}
for i, v := range row {
row[i] = (v - mean[i]) / std[i]
}
return
} | gen.go | 0.713132 | 0.641871 | gen.go | starcoder |
package arc
func min(x, y int) int {
if x < y {
return x
}
return y
}
func max(x, y int) int {
if x > y {
return x
}
return y
}
// container represents a ARC cache container.
type container struct {
Capacity int
p int // target size of t1
t1 *list
t2 *list
b1 *list
b2 *list
}
func (c *container) replace() {
if c.t1.Count() >= max(1, c.p) { // t1's size exceeds target (t1 is too big)
// grab from t1 and put to b1
if key, val := c.t1.Discard(); len(key) > 0 {
c.b1.Save(key, val)
}
} else {
// grab from t2 and put to b2
if key, val := c.t2.Discard(); len(key) > 0 {
c.b2.Save(key, val)
}
}
}
func (c *container) Get(key string) (interface{}, error) {
if c.t1.Contains(key) { // seen twice recently, put it to t2
val := c.t1.Remove(key)
c.t2.Save(key, val)
return val, nil
}
if c.t2.Contains(key) {
return c.t2.Get(key), nil
}
if c.b1.Contains(key) {
c.p = min(c.Capacity, c.p+max(c.b2.Count()/c.b1.Count(), 1)) // adapt the target size of t1
c.replace()
val := c.b1.Remove(key)
c.t2.Save(key, val) // seen twice recently, put it to t2
return val, nil
}
if c.b2.Contains(key) {
c.p = max(0, c.p-max(c.b1.Count()/c.b2.Count(), 1)) // adapt the target size of t1
c.replace()
val := c.b2.Remove(key)
c.t2.Save(key, val) // seen twice recently, put it to t2
return val, nil
}
return nil, nil
}
func (c *container) Save(key string, value interface{}) error {
// remove the item anyway
c.Remove(key)
if c.t1.Count()+c.b1.Count() == c.Capacity { // b1 + t1 is full
if c.t1.Count() < c.Capacity { // still room in t1
c.b1.Discard()
c.replace()
} else {
c.t1.Discard()
}
} else { //c.t1.Count()+c.b1.Count() < c.Capacity {
total := c.t1.Count() + c.t2.Count() + c.b1.Count() + c.b2.Count()
if total >= c.Capacity { // cache full
if total == 2*c.Capacity {
c.b2.Discard()
}
c.replace()
}
}
c.t1.Save(key, value) // seen once recently, put it to t1
return nil
}
func (c *container) Remove(key string) error {
c.t1.Remove(key)
c.t2.Remove(key)
c.b1.Remove(key)
c.b2.Remove(key)
return nil
}
func (c *container) Clear() error {
c.p = 0
c.t1.Init()
c.t2.Init()
c.b1.Init()
c.b2.Init()
return nil
} | cache/container/memory/arc/container.go | 0.729712 | 0.450662 | container.go | starcoder |
package rui
import (
"strconv"
"strings"
)
const (
// ColumnCount is the constant for the "column-count" property tag.
// The "column-count" int property specifies number of columns into which the content is break
// Values less than zero are not valid. if the "column-count" property value is 0 then
// the number of columns is calculated based on the "column-width" property
ColumnCount = "column-count"
// ColumnWidth is the constant for the "column-width" property tag.
// The "column-width" SizeUnit property specifies the width of each column.
ColumnWidth = "column-width"
// ColumnGap is the constant for the "column-gap" property tag.
// The "column-width" SizeUnit property sets the size of the gap (gutter) between columns.
ColumnGap = "column-gap"
// ColumnSeparator is the constant for the "column-separator" property tag.
// The "column-separator" property specifies the line drawn between columns in a multi-column layout.
ColumnSeparator = "column-separator"
// ColumnSeparatorStyle is the constant for the "column-separator-style" property tag.
// The "column-separator-style" int property sets the style of the line drawn between
// columns in a multi-column layout.
// Valid values are NoneLine (0), SolidLine (1), DashedLine (2), DottedLine (3), and DoubleLine (4).
ColumnSeparatorStyle = "column-separator-style"
// ColumnSeparatorWidth is the constant for the "column-separator-width" property tag.
// The "column-separator-width" SizeUnit property sets the width of the line drawn between
// columns in a multi-column layout.
ColumnSeparatorWidth = "column-separator-width"
// ColumnSeparatorColor is the constant for the "column-separator-color" property tag.
// The "column-separator-color" Color property sets the color of the line drawn between
// columns in a multi-column layout.
ColumnSeparatorColor = "column-separator-color"
)
// ColumnLayout - grid-container of View
type ColumnLayout interface {
ViewsContainer
}
type columnLayoutData struct {
viewsContainerData
}
// NewColumnLayout create new ColumnLayout object and return it
func NewColumnLayout(session Session, params Params) ColumnLayout {
view := new(columnLayoutData)
view.Init(session)
setInitParams(view, params)
return view
}
func newColumnLayout(session Session) View {
return NewColumnLayout(session, nil)
}
// Init initialize fields of ColumnLayout by default values
func (ColumnLayout *columnLayoutData) Init(session Session) {
ColumnLayout.viewsContainerData.Init(session)
ColumnLayout.tag = "ColumnLayout"
//ColumnLayout.systemClass = "ruiColumnLayout"
}
func (columnLayout *columnLayoutData) normalizeTag(tag string) string {
tag = strings.ToLower(tag)
switch tag {
case Gap:
return ColumnGap
}
return tag
}
func (columnLayout *columnLayoutData) Get(tag string) interface{} {
return columnLayout.get(columnLayout.normalizeTag(tag))
}
func (columnLayout *columnLayoutData) Remove(tag string) {
columnLayout.remove(columnLayout.normalizeTag(tag))
}
func (columnLayout *columnLayoutData) remove(tag string) {
columnLayout.viewsContainerData.remove(tag)
if columnLayout.created {
switch tag {
case ColumnCount, ColumnWidth, ColumnGap:
updateCSSProperty(columnLayout.htmlID(), tag, "", columnLayout.Session())
case ColumnSeparator:
updateCSSProperty(columnLayout.htmlID(), "column-rule", "", columnLayout.Session())
}
}
}
func (columnLayout *columnLayoutData) Set(tag string, value interface{}) bool {
return columnLayout.set(columnLayout.normalizeTag(tag), value)
}
func (columnLayout *columnLayoutData) set(tag string, value interface{}) bool {
if value == nil {
columnLayout.remove(tag)
return true
}
if !columnLayout.viewsContainerData.set(tag, value) {
return false
}
if columnLayout.created {
switch tag {
case ColumnSeparator:
css := ""
session := columnLayout.Session()
if val, ok := columnLayout.properties[ColumnSeparator]; ok {
separator := val.(ColumnSeparatorProperty)
css = separator.cssValue(columnLayout.Session())
}
updateCSSProperty(columnLayout.htmlID(), "column-rule", css, session)
case ColumnCount:
session := columnLayout.Session()
if count, ok := intProperty(columnLayout, tag, session, 0); ok && count > 0 {
updateCSSProperty(columnLayout.htmlID(), tag, strconv.Itoa(count), session)
} else {
updateCSSProperty(columnLayout.htmlID(), tag, "auto", session)
}
}
}
return true
}
// GetColumnCount returns int value which specifies number of columns into which the content of
// ColumnLayout is break. If the return value is 0 then the number of columns is calculated
// based on the "column-width" property.
// If the second argument (subviewID) is "" then a top position of the first argument (view) is returned
func GetColumnCount(view View, subviewID string) int {
if subviewID != "" {
view = ViewByID(view, subviewID)
}
if view == nil {
return 0
}
result, _ := intStyledProperty(view, ColumnCount, 0)
return result
}
// GetColumnWidth returns SizeUnit value which specifies the width of each column of ColumnLayout.
// If the second argument (subviewID) is "" then a top position of the first argument (view) is returned
func GetColumnWidth(view View, subviewID string) SizeUnit {
if subviewID != "" {
view = ViewByID(view, subviewID)
}
if view == nil {
return AutoSize()
}
result, _ := sizeStyledProperty(view, ColumnWidth)
return result
}
// GetColumnGap returns SizeUnit property which specifies the size of the gap (gutter) between columns of ColumnLayout.
// If the second argument (subviewID) is "" then a top position of the first argument (view) is returned
func GetColumnGap(view View, subviewID string) SizeUnit {
if subviewID != "" {
view = ViewByID(view, subviewID)
}
if view == nil {
return AutoSize()
}
result, _ := sizeStyledProperty(view, ColumnGap)
return result
}
// GetColumnSeparator returns ViewBorder struct which specifies the line drawn between
// columns in a multi-column ColumnLayout.
// If the second argument (subviewID) is "" then a top position of the first argument (view) is returned
func GetColumnSeparator(view View, subviewID string) ViewBorder {
if subviewID != "" {
view = ViewByID(view, subviewID)
}
if view != nil {
value := view.Get(ColumnSeparator)
if value == nil {
value, _ = valueFromStyle(view, ColumnSeparator)
}
if value != nil {
if separator, ok := value.(ColumnSeparatorProperty); ok {
return separator.ViewBorder(view.Session())
}
}
}
return ViewBorder{}
}
// ColumnSeparatorStyle returns int value which specifies the style of the line drawn between
// columns in a multi-column layout.
// Valid values are NoneLine (0), SolidLine (1), DashedLine (2), DottedLine (3), and DoubleLine (4).
// If the second argument (subviewID) is "" then a top position of the first argument (view) is returned
func GetColumnSeparatorStyle(view View, subviewID string) int {
border := GetColumnSeparator(view, subviewID)
return border.Style
}
// ColumnSeparatorWidth returns SizeUnit value which specifies the width of the line drawn between
// columns in a multi-column layout.
// If the second argument (subviewID) is "" then a top position of the first argument (view) is returned
func GetColumnSeparatorWidth(view View, subviewID string) SizeUnit {
border := GetColumnSeparator(view, subviewID)
return border.Width
}
// ColumnSeparatorColor returns Color value which specifies the color of the line drawn between
// columns in a multi-column layout.
// If the second argument (subviewID) is "" then a top position of the first argument (view) is returned
func GetColumnSeparatorColor(view View, subviewID string) Color {
border := GetColumnSeparator(view, subviewID)
return border.Color
} | columnLayout.go | 0.749179 | 0.402128 | columnLayout.go | starcoder |
package esbuilder
// blockNode defines a statement block in the AST.
type blockNode struct {
// statements are the statements under the block.
statements []StatementBuilder
}
// returnNode defines a return statement in the AST.
type returnNode struct {
// value is the optional value being returned.
value ExpressionBuilder
}
// varNode defines a variable statement in the AST.
type varNode struct {
// name is the name of the variable being declared.
name string
// value is the optional value for the variable.
value ExpressionBuilder
}
// conditionalNode defines a conditional statement in the AST.
type conditionalNode struct {
// condition is the expression over which the conditional operates.
condition ExpressionBuilder
// thenStatement is the statement executed if the condition is true.
thenStatement StatementBuilder
// elseStatement5 is the optional statement executed if the condition is false.
elseStatement StatementBuilder
}
// exprStatementNode is a statement that contains a single expression.
type exprStatementNode struct {
// childExpr is the expression of the statement.
childExpr ExpressionBuilder
}
func (node blockNode) emit(sb *sourceBuilder) {
sb.append("{")
sb.indent()
sb.appendLine()
for _, statement := range node.statements {
sb.emit(statement)
sb.appendLine()
}
sb.dedent()
sb.append("}")
}
func (node varNode) emit(sb *sourceBuilder) {
sb.append("var ")
sb.append(node.name)
if node.value != nil {
sb.append(" = ")
sb.emitWrapped(node.value)
}
sb.append(";")
}
func (node returnNode) emit(sb *sourceBuilder) {
sb.append("return")
if node.value != nil {
sb.append(" ")
sb.emitWrapped(node.value)
}
sb.append(";")
}
func (node conditionalNode) emit(sb *sourceBuilder) {
sb.append("if ")
sb.emitWrapped(node.condition)
sb.append(" ")
sb.emit(node.thenStatement)
if node.elseStatement != nil {
sb.append(" else ")
sb.emit(node.elseStatement)
}
}
func (node exprStatementNode) emit(sb *sourceBuilder) {
sb.emit(node.childExpr)
sb.append(";")
}
// Return returns a new return statement.
func Return() StatementBuilder {
return statementBuilder{returnNode{nil}, nil}
}
// Returns returns a new return statement with a value.
func Returns(value ExpressionBuilder) StatementBuilder {
return statementBuilder{returnNode{value}, nil}
}
// If returns a new conditional statement.
func If(condition ExpressionBuilder, thenStatement StatementBuilder) StatementBuilder {
return statementBuilder{conditionalNode{condition, thenStatement, nil}, nil}
}
// IfElse returns a new conditional statement.
func IfElse(condition ExpressionBuilder, thenStatement StatementBuilder, elseStatement StatementBuilder) StatementBuilder {
return statementBuilder{conditionalNode{condition, thenStatement, elseStatement}, nil}
}
// Statements returns a block of statements.
func Statements(statements ...StatementBuilder) StatementBuilder {
return statementBuilder{blockNode{statements}, nil}
}
// ExprStatement returns an expression as a statement.
func ExprStatement(expr ExpressionBuilder) StatementBuilder {
return statementBuilder{exprStatementNode{expr}, nil}
}
// Variable returns a variable declaration statement.
func Variable(name string) StatementBuilder {
return statementBuilder{varNode{name, nil}, nil}
}
// VariableWithInit returns a variable declaration statement.
func VariableWithInit(name string, value ExpressionBuilder) StatementBuilder {
return statementBuilder{varNode{name, value}, nil}
} | generator/escommon/esbuilder/statements_base.go | 0.665954 | 0.466481 | statements_base.go | starcoder |
package keyvalue
import (
"strings"
"github.com/coreos/bbolt"
)
// Bucket represents a bucket of key-value pairs. Keys are strings, but values are simply bytes.
// Buckets can form a hierarchy of buckets.
type Bucket interface {
BucketView
// Put sets the value for a key in the bucket. If the key exist then its previous value will be overwritten.
// Supplied value must remain valid for the life of the transaction.
// Returns an error if the key is blank, if the key is too large, or if the value is too large.
Put(key string, value []byte) error
// PutMultiple will put all values received on the data channel within the same transaction, i.e., either all or none will be stored.
// The puts are performed async, and when the the puts are done, then the result will be communicated via the response channel.
// Once nil is received on the data channel, then this signals the transaction was successfully committed.
// If the transaction failed, then the error is returned on the response channel.
PutMultiple(data <-chan *KeyValue) <-chan error
// Delete removes the keys from the bucket. If the key does not exist then nothing is done and a nil error is returned.
// All or none are deleted within the same transaction.
Delete(keys ...string) error
// CreateBucket creates a new bucket at the given key and returns the new bucket.
// Returns an error if the key already exists, if the bucket name is blank, or if the bucket name is too long.
CreateBucket(name string) (Bucket, error)
// CreateBucketIfNotExists creates a new bucket if it doesn't already exist and returns a reference to it.
// Returns an error if the bucket name is blank, or if the bucket name is too long.
CreateBucketIfNotExists(name string) (Bucket, error)
// DeleteBucket deletes a bucket at the given key. Returns an error if the bucket does not exists, or if the key represents a non-bucket value.
DeleteBucket(name string) error
// Buckets iterate through the top-level children buckets and returns them on the returned channel.
// The cancel channel is used to terminate the iteration early by the client.
Buckets(cancel <-chan struct{}) <-chan Bucket
// Bucket returns the bucket for the specified name. If the bucket does not exist, then nil is returned.
// If path is specified, then the bucket will traverse the path to locate the Bucket within its hierarchy.
Bucket(path ...string) Bucket
}
type bucket struct {
*bucketView
}
func (a *bucket) Put(key string, value []byte) error {
return a.db.Update(func(tx *bolt.Tx) error {
b := lookupBucket(tx, a.path)
if b == nil {
return errBucketDoesNotExist(a.path)
}
b.Put([]byte(key), value)
return nil
})
}
func (a *bucket) PutMultiple(data <-chan *KeyValue) <-chan error {
c := make(chan chan error)
go func() {
response := make(chan error)
defer close(response)
c <- response
err := a.db.Update(func(tx *bolt.Tx) error {
b := lookupBucket(tx, a.path)
if b == nil {
return errBucketDoesNotExist(a.path)
}
for kv := range data {
b.Put([]byte(kv.Key), kv.Value)
}
return nil
})
if err != nil {
response <- err
}
}()
return <-c
}
func (a *bucket) Delete(keys ...string) error {
return a.db.Update(func(tx *bolt.Tx) error {
b := lookupBucket(tx, a.path)
if b == nil {
return errBucketDoesNotExist(a.path)
}
for _, k := range keys {
if err := b.Delete([]byte(k)); err != nil {
return err
}
}
return nil
})
}
func (a *bucket) CreateBucket(name string) (Bucket, error) {
name, err := checkBucketName(name)
if err != nil {
return nil, err
}
err = a.db.Update(func(tx *bolt.Tx) error {
b := lookupBucket(tx, a.path)
if b == nil {
return errBucketDoesNotExist(a.path)
}
if _, err := b.CreateBucket([]byte(name)); err != nil {
return err
}
return nil
})
if err != nil {
return nil, err
}
child := &bucket{&bucketView{path: append(a.path, name), db: a.db}}
return child, nil
}
func (a *bucket) CreateBucketIfNotExists(name string) (Bucket, error) {
name, err := checkBucketName(name)
if err != nil {
return nil, err
}
err = a.db.Update(func(tx *bolt.Tx) error {
b := lookupBucket(tx, a.path)
if b == nil {
return errBucketDoesNotExist(a.path)
}
if _, err := b.CreateBucketIfNotExists([]byte(name)); err != nil {
return err
}
return nil
})
if err != nil {
return nil, err
}
return &bucket{&bucketView{path: append(a.path, name), db: a.db}}, nil
}
func (a *bucket) DeleteBucket(name string) error {
name, err := checkBucketName(name)
if err != nil {
return err
}
return a.db.Update(func(tx *bolt.Tx) error {
b := lookupBucket(tx, a.path)
if b == nil {
return errBucketDoesNotExist(a.path)
}
return b.DeleteBucket([]byte(name))
})
}
func (a *bucket) Buckets(cancel <-chan struct{}) <-chan Bucket {
c := make(chan chan Bucket)
go a.db.View(func(tx *bolt.Tx) error {
data := make(chan Bucket)
c <- data
b := lookupBucket(tx, a.path)
if b == nil {
close(data)
return nil
}
cursor := b.Cursor()
if cancel != nil {
for k, v := cursor.First(); k != nil; k, v = cursor.Next() {
select {
case <-cancel:
break
default:
// nil values mean the value is a bucket
if v == nil {
data <- &bucket{&bucketView{append(a.path, string(k)), a.db}}
}
}
}
} else {
for k, v := cursor.First(); k != nil; k, v = cursor.Next() {
// nil values mean the value is a bucket
if v == nil {
data <- &bucket{&bucketView{append(a.path, string(k)), a.db}}
}
}
}
close(data)
return nil
})
return <-c
}
func (a *bucket) Bucket(path ...string) Bucket {
view := a.bucketView.bucketView(path...)
if view != nil {
return &bucket{view}
}
return nil
}
func checkBucketName(name string) (string, error) {
name = strings.TrimSpace(name)
if name == "" {
return name, ErrBucketNameMustNotBeBlank
}
return name, nil
} | pkg/data/keyvalue/bucket.go | 0.745954 | 0.408985 | bucket.go | starcoder |
package geometry
import (
"fmt"
"math"
)
const epsilon = 0.00001
type Value interface {
shift(dx float64, dy float64) Value
intersect(other Value) Value
fmt.GoStringer
}
type nowhere struct {
}
type everywhere struct {
}
type point struct {
x float64
y float64
}
type line struct {
angle float64
d float64
}
type lineSegment struct {
x1 float64
y1 float64
x2 float64
y2 float64
}
/* nowhere */
var Nowhere = nowhere{}
func (nw nowhere) shift(dx float64, dy float64) Value {
return Nowhere
}
func (nw nowhere) intersect(other Value) Value {
return Nowhere
}
func (nw nowhere) GoString() string {
return "\"Nowhere\""
}
/* nowhere */
var Everywhere = everywhere{}
func (ew everywhere) shift(dx float64, dy float64) Value {
return Everywhere
}
func (ew everywhere) intersect(other Value) Value {
return other
}
func (ew everywhere) GoString() string {
return "\"Everywhere\""
}
/* point */
func NewPoint(x float64, y float64) point {
return point{x, y}
}
func (p point) shift(dx float64, dy float64) Value {
return point{x: p.x + dx, y: p.y + dy}
}
func (p point) intersect(other Value) Value {
switch ot := other.(type) {
case nowhere:
return Nowhere
case everywhere:
return p
case point:
if realClose(p.x, ot.x) && realClose(p.y, ot.y) {
return p
} else {
return Nowhere
}
case line, lineSegment:
return ot.intersect(p)
}
panic("Should never been reached")
}
func (p point) GoString() string {
return fmt.Sprintf("{\"Point\":[%v,%v]}", p.x, p.y)
}
/* line: sin(angle)*x + cos(angle)*y = d */
func NewLine(angle float64, d float64) line {
// make d positiv and angle between 0 and 2pi
if d < 0 {
angle = angle + math.Pi
d = -d
}
angle = math.Mod(angle, 2*math.Pi)
if angle < 0 {
angle = angle + 2*math.Pi
}
return line{angle, d}
}
func (ln line) shift(dx float64, dy float64) Value {
return line{ln.angle, ln.d + math.Sin(ln.angle)*dx + math.Cos(ln.angle)*dy}
}
func (ln line) intersect(other Value) Value {
switch ot := other.(type) {
case nowhere:
return Nowhere
case everywhere:
return ln
case point:
if realClose(math.Sin(ln.angle)*ot.x+math.Cos(ln.angle)*ot.y, ln.d) {
return ot
} else {
return Nowhere
}
case line:
if realCloseAngle(ln.angle, ot.angle) {
if realClose(ln.d, ot.d) {
return ln
} else {
return Nowhere
}
} else if realCloseAngle(ln.angle, ot.angle+math.Pi) {
if realClose(ln.d, 0) && realClose(ot.d, 0) {
return ln
} else {
return Nowhere
}
} else {
x := (ln.d*math.Cos(ot.angle) - ot.d*math.Cos(ln.angle)) / math.Sin(ln.angle-ot.angle)
y := (ot.d*math.Sin(ln.angle) - ln.d*math.Sin(ot.angle)) / math.Sin(ln.angle-ot.angle)
return point{x, y}
}
case lineSegment:
return ot.intersect(ln)
}
panic("Should never been reached")
}
func (ln line) GoString() string {
return fmt.Sprintf("{\"Line\":[%v,%v]}", ln.angle, ln.d)
}
/* lineSegment */
func NewLineSegment(x1 float64, y1 float64, x2 float64, y2 float64) Value {
if realClose(x1, x2) {
if realClose(y1, y2) {
return point{x1, y1}
} else if y1 < y2 {
return lineSegment{x1, y1, x2, y2}
} else {
return lineSegment{x2, y2, x1, y1}
}
} else {
if x1 < x2 {
return lineSegment{x1, y1, x2, y2}
} else {
return lineSegment{x2, y2, x1, y1}
}
}
}
func (ls lineSegment) shift(dx float64, dy float64) Value {
return lineSegment{ls.x1 + dx, ls.y1 + dy, ls.x2 + dx, ls.y2 + dy}
}
func (ls lineSegment) intersect(other Value) Value {
switch ot := other.(type) {
case nowhere:
return Nowhere
case everywhere:
return ls
case point:
p := ls.toLine().intersect(ot)
switch pt := p.(type) {
case nowhere:
return Nowhere
case point:
if between(ls.x1, pt.x, ls.x2) && between(ls.y1, pt.y, ls.y2) {
return pt
} else {
return Nowhere
}
}
case line:
p := ls.toLine().intersect(ot)
switch pt := p.(type) {
case nowhere:
return Nowhere
case point:
if between(ls.x1, pt.x, ls.x2) && between(ls.y1, pt.y, ls.y2) {
return pt
} else {
return Nowhere
}
case line:
return ls
}
case lineSegment:
p := ls.toLine().intersect(ot)
switch pt := p.(type) {
case nowhere:
return Nowhere
case point:
if between(ls.x1, pt.x, ls.x2) && between(ls.y1, pt.y, ls.y2) {
return pt
} else {
return Nowhere
}
case lineSegment:
// ls and ot ar on the same line
if realClose(ls.x1, ot.x2) && realClose(ls.y1, ot.y2) {
return point{ls.x1, ls.y1} // touch in one point
} else if realClose(ls.x2, ot.x1) && realClose(ls.y2, ot.y1) {
return point{ls.x2, ls.y2} // touch in one point
} else if between(ls.x1, ot.x1, ls.x2) && between(ls.y1, ot.y1, ls.y2) {
x1 := ot.x1
y1 := ot.y1
var x2 float64
var y2 float64
if between(ls.x1, ot.x2, ls.x2) && between(ls.y1, ot.y2, ls.y2) {
x2 = ot.x2
y2 = ot.y2
} else {
x2 = ls.x2
y2 = ls.y2
}
return lineSegment{x1, y1, x2, y2}
} else if between(ot.x1, ls.x1, ot.x2) && between(ot.y1, ls.y1, ot.y2) {
x1 := ls.x1
y1 := ls.y1
var x2 float64
var y2 float64
if between(ot.x1, ls.x2, ot.x2) && between(ot.y1, ls.y2, ot.y2) {
x2 = ls.x2
y2 = ls.y2
} else {
x2 = ot.x2
y2 = ot.y2
}
return lineSegment{x1, y1, x2, y2}
} else {
return Nowhere
}
}
}
panic("Should never been reached")
}
func (ls lineSegment) GoString() string {
return fmt.Sprintf("{\"LineSegment\":[%v,%v,%v,%v]}", ls.x1, ls.y1, ls.x2, ls.y2)
}
func (ls lineSegment) toLine() line {
var angle float64
dx := ls.x1 - ls.x2
if dx == 0 {
angle = math.Pi / 2
} else {
dy := ls.y2 - ls.y1
angle = math.Atan(dy / dx)
}
return line{angle, ls.x1*math.Sin(angle) + ls.y1*math.Cos(angle)}
}
func realClose(f1 float64, f2 float64) bool {
return math.Abs(f1-f2) < epsilon
}
func realCloseAngle(f1 float64, f2 float64) bool {
d := math.Abs(math.Mod(f1, 2*math.Pi) - math.Mod(f2, 2*math.Pi))
return d < epsilon || (d > 2*math.Pi-epsilon && d < 2*math.Pi+epsilon) || d > 4*math.Pi-epsilon
}
func between(f1 float64, f2 float64, f3 float64) bool {
return math.Min(f1, f3)-epsilon < f2 && f2 < math.Max(f1, f3)+epsilon
}
func Shift(dx float64, dy float64, gv Value) Value {
return gv.shift(dx, dy)
}
func Intersect(gv1 Value, gv2 Value) Value {
return gv1.intersect(gv2)
} | geometry/geometry.go | 0.59972 | 0.558688 | geometry.go | starcoder |
package fast
import (
"go/ast"
r "reflect"
. "github.com/cosmos72/gomacro/base"
xr "github.com/cosmos72/gomacro/xreflect"
)
// Convert compiles a type conversion expression
func (c *Comp) Convert(node ast.Expr, t xr.Type) *Expr {
e := c.Expr1(node, nil)
return c.convert(e, t, node)
}
// Convert compiles a type conversion expression
func (c *Comp) convert(e *Expr, t xr.Type, nodeOpt ast.Expr) *Expr {
if e.Untyped() {
e.ConstTo(t)
}
if e.Type != nil && e.Type.IdenticalTo(t) {
return e
} else if e.Type != nil && e.Type.ReflectType() == t.ReflectType() {
if e.Const() {
return c.exprValue(t, e.Value)
} else {
return exprFun(t, e.Fun)
}
} else if e.Type == nil && IsNillableKind(t.Kind()) {
e.Type = t
e.Value = xr.Zero(t).Interface()
} else if e.Type != nil && e.Type.ConvertibleTo(t) {
} else {
c.Errorf("cannot convert %v to %v: %v", e.Type, t, nodeOpt)
return nil
}
rtype := t.ReflectType()
if e.Const() {
val := convert(r.ValueOf(e.Value), rtype).Interface()
return c.exprValue(t, val)
}
fun := e.AsX1()
var ret I
switch t.Kind() {
case r.Bool:
ret = func(env *Env) bool {
val := convert(fun(env), rtype)
return val.Bool()
}
case r.Int:
ret = func(env *Env) int {
val := convert(fun(env), rtype)
return int(val.Int())
}
case r.Int8:
ret = func(env *Env) int8 {
val := convert(fun(env), rtype)
return int8(val.Int())
}
case r.Int16:
ret = func(env *Env) int16 {
val := convert(fun(env), rtype)
return int16(val.Int())
}
case r.Int32:
ret = func(env *Env) int32 {
val := convert(fun(env), rtype)
return int32(val.Int())
}
case r.Int64:
ret = func(env *Env) int64 {
val := convert(fun(env), rtype)
return val.Int()
}
case r.Uint:
ret = func(env *Env) uint {
val := convert(fun(env), rtype)
return uint(val.Uint())
}
case r.Uint8:
ret = func(env *Env) uint8 {
val := convert(fun(env), rtype)
return uint8(val.Uint())
}
case r.Uint16:
ret = func(env *Env) uint16 {
val := convert(fun(env), rtype)
return uint16(val.Uint())
}
case r.Uint32:
ret = func(env *Env) uint32 {
val := convert(fun(env), rtype)
return uint32(val.Uint())
}
case r.Uint64:
ret = func(env *Env) uint64 {
val := convert(fun(env), rtype)
return val.Uint()
}
case r.Uintptr:
ret = func(env *Env) uintptr {
val := convert(fun(env), rtype)
return uintptr(val.Uint())
}
case r.Float32:
ret = func(env *Env) float32 {
val := convert(fun(env), rtype)
return float32(val.Float())
}
case r.Float64:
ret = func(env *Env) float64 {
val := convert(fun(env), rtype)
return val.Float()
}
case r.Complex64:
ret = func(env *Env) complex64 {
val := convert(fun(env), rtype)
return complex64(val.Complex())
}
case r.Complex128:
ret = func(env *Env) complex128 {
val := convert(fun(env), rtype)
return val.Complex()
}
case r.String:
ret = func(env *Env) string {
val := convert(fun(env), rtype)
return val.String()
}
default:
if conv := c.Converter(e.Type, t); conv != nil {
ret = func(env *Env) r.Value {
return conv(fun(env))
}
} else {
ret = func(env *Env) r.Value {
return fun(env)
}
}
}
eret := exprFun(t, ret)
if e.Const() {
eret.EvalConst(COptKeepUntyped)
}
return eret
}
// Converter returns a function that converts reflect.Value from tin to tout
// also supports conversion from interpreted types to interfaces
func (c *Comp) Converter(tin, tout xr.Type) func(r.Value) r.Value {
if !tin.ConvertibleTo(tout) {
c.Errorf("cannot convert from <%v> to <%v>", tin, tout)
}
rtin := tin.ReflectType()
rtout := tout.ReflectType()
switch {
case rtin == rtout:
return nil
case rtin.ConvertibleTo(rtout):
// most conversions, including from compiled type to compiled interface
if rtin.Kind() != r.Interface {
return func(obj r.Value) r.Value {
return obj.Convert(rtout)
}
}
// extract objects wrapped in proxies (if any)
g := c.CompGlobals
return func(obj r.Value) r.Value {
obj, _ = g.extractFromProxy(obj)
if obj.IsValid() {
return obj.Convert(rtout)
} else {
return r.Zero(rtout)
}
}
case xr.IsEmulatedInterface(tout):
// conversion from type to emulated interface
return c.converterToEmulatedInterface(tin, tout)
case rtin == rtypeOfForward:
// conversion from forward-declared type
return c.converterFromForward(tin, tout)
case rtout.Kind() == r.Interface:
// conversion from interpreted type to compiled interface.
// must use a proxy that pre-implement compiled interfaces.
return c.converterToProxy(tin, tout)
default:
c.Errorf("unimplemented conversion from <%v> to <%v> with reflect.Type <%v> to <%v>",
tin, tout, rtin, rtout)
return nil
}
}
// conversion from forward-declared type
func (c *Comp) converterFromForward(tin, tout xr.Type) func(r.Value) r.Value {
rtout := tout.ReflectType()
return func(val r.Value) r.Value {
val = val.Elem()
if val.Type() != rtout {
val = val.Convert(rtout)
}
return val
}
}
// conversion between compatible types.
// also implements conversion from xr.Forward.
func convert(v r.Value, rtout r.Type) r.Value {
if v.Kind() == r.Interface {
v = v.Elem()
}
return v.Convert(rtout)
} | vendor/github.com/cosmos72/gomacro/fast/convert.go | 0.652906 | 0.492066 | convert.go | starcoder |
package crdt
import (
"math/rand"
"github.com/johnny-morrice/godless/internal/testutil"
)
func GenNamespace(rand *rand.Rand, size int) Namespace {
if size > 30 {
size = 30
}
const tableMax = 10
const maxStr = 20
gen := EmptyNamespace()
tableCount := testutil.GenCountRange(rand, 1, tableMax)
for i := 0; i < tableCount; i++ {
tableName := TableName(testutil.RandLettersRange(rand, 1, maxStr))
table := EmptyTable()
rowCount := testutil.GenCountRange(rand, 1, size)
for j := 0; j < rowCount; j++ {
rowName := RowName(testutil.RandLetters(rand, maxStr))
row := genRow(rand, size)
table.addRow(rowName, row)
}
gen.addTable(tableName, table)
}
return gen
}
func genRow(rand *rand.Rand, size int) Row {
if size > 10 {
size = 10
}
if size < 5 {
size = 5
}
const maxStr = 20
row := EmptyRow()
entryCount := testutil.GenCountRange(rand, 1, size)
for k := 0; k < entryCount; k++ {
entryName := EntryName(testutil.RandLetters(rand, maxStr))
entry := GenEntry(rand, size)
row.addEntry(entryName, entry)
}
return row
}
func GenEntry(rand *rand.Rand, size int) Entry {
if size > 5 {
size = 5
}
if size < 2 {
size = 2
}
const maxStr = 20
pointCount := testutil.GenCountRange(rand, 1, size)
points := make([]Point, pointCount)
for m := 0; m < pointCount; m++ {
points[m] = genPoint(rand, maxStr)
}
return MakeEntry(points)
}
func genPoint(rand *rand.Rand, size int) Point {
return UnsignedPoint(PointText(testutil.RandLettersRange(rand, 1, size)))
}
func GenIndex(rand *rand.Rand, size int) Index {
if size > 20 {
size = 20
}
if size < 10 {
size = 10
}
index := EmptyIndex()
for i := 0; i < size; i++ {
keyCount := testutil.GenCountRange(rand, 2, size)
indexKey := TableName(testutil.RandLettersRange(rand, 1, keyCount))
addrCount := testutil.GenCountRange(rand, 1, size)
addrs := make([]Link, addrCount)
for j := 0; j < addrCount; j++ {
pathCount := testutil.GenCountRange(rand, 2, size)
a := testutil.RandLettersRange(rand, 1, pathCount)
addrs[j] = UnsignedLink(IPFSPath(a))
}
index.addTable(indexKey, addrs...)
}
return index
}
func GenLink(rand *rand.Rand, size int) Link {
if size > 10 {
size = 10
}
if size < 2 {
size = 2
}
addr := testutil.RandLettersRange(rand, 1, size)
return UnsignedLink(IPFSPath(addr))
} | crdt/gen.go | 0.536556 | 0.575767 | gen.go | starcoder |
package board
import (
"errors"
"fmt"
"math/rand"
"time"
)
const CellDead = 0
const CellAlive = 1
type board struct {
state [][]int
rows int
columns int
}
/* Creates a new Board with the given dimensions. The dimensions, rows and columns,
must be positive integers greater than 0.
Returns a board populated with a random state. */
func NewRandomBoard(rows, columns int) (board, error) {
if rows < 1 || columns < 1 {
return board{}, errors.New("rows and columns must be a positive integer greater than 0")
}
initState := make([][]int, rows)
for i := range initState {
initState[i] = make([]int, columns)
}
rand.Seed(time.Now().UnixNano())
// Populate random state
for i := range initState {
for j := range initState[i] {
initState[i][j] = rand.Intn(2)
}
}
return board{state: initState, rows: rows, columns: columns}, nil
}
func NewBoard(initialState [][]int) (board, error) {
if initialState == nil {
return board{}, errors.New("initialState cannot be nil")
}
if len(initialState) < 1 || len(initialState[0]) < 1 {
return board{}, errors.New("initialState must contain at least 1 row and 1 column")
}
colSize := len(initialState[0])
for i := 0; i < len(initialState); i++ {
if colSize != len(initialState[i]) {
return board{}, errors.New("initialState is a jagged 2D array, initialState cannot be jagged")
}
for j := 0; j < len(initialState[i]); j++ {
cellValue := initialState[i][j]
if cellValue < 0 || cellValue > 1 {
return board{}, errors.New("initialState may only contain values 0 or 1")
}
}
}
return board{state: initialState, rows: len(initialState), columns: len(initialState[0])}, nil
}
func (b *board) Evolve() {
newState := make([][]int, b.rows)
for i := range newState {
newState[i] = make([]int, b.columns)
for j := range newState[i] {
newState[i][j] = nextStateForCell(b, i, j)
}
}
b.state = newState
}
func (b *board) State() [][]int {
return b.state
}
func (b *board) Rows() int {
return b.rows
}
func (b *board) Columns() int {
return b.columns
}
func (b *board) PrettyPrint() {
for i := range b.state {
for j := range b.state[i] {
if (b.state[i][j] == CellDead) {
fmt.Print(" - ")
} else {
fmt.Print(" * ")
}
}
fmt.Println()
}
}
func nextStateForCell(b *board, i, j int) int {
neighborsAlive := 0
cellValue := b.state[i][j]
for x := -1; x <= 1; x++ {
for y := -1; y <= 1; y++ {
if i+x < 0 || i+x > (b.rows-1) || y+j < 0 || y+j > (b.columns-1) {
continue
}
neighborsAlive += b.state[i+x][y+j]
}
}
neighborsAlive -= cellValue
if cellValue == CellDead && neighborsAlive == 3 {
return CellAlive
} else if cellValue == CellAlive && (neighborsAlive < 2 || neighborsAlive > 3) {
return CellDead
} else {
return cellValue
}
} | golang/board/board.go | 0.689619 | 0.509642 | board.go | starcoder |
package vue
import (
"reflect"
"runtime"
"strings"
)
// Option uses the option pattern for components.
type Option func(*Comp)
// El is the element option for components.
// The root element of a component is query selected from the value, e.g. #app or body.
func El(el string) Option {
return func(comp *Comp) {
comp.el = el
}
}
// Template is the template option for components.
// The template uses the mustache syntax for rendering.
// The template must have a single root element.
func Template(tmpl string) Option {
return func(comp *Comp) {
comp.tmpl = tmpl
}
}
// Data is the data option for components.
// This option accepts either a function or a struct.
// The data function is expected to return a new data value.
// For example: func() *Type { return &Type{...} }
// Without a function the data is shared across components.
// The scope of the data is within the component.
// Data must be a pointer to be mutable by methods.
func Data(data interface{}) Option {
return func(comp *Comp) {
comp.data = data
}
}
// Method is the method option for components.
// The given name and function is registered as a method for the component.
// The function is required to accept context and allows optional arguments.
// For example: func(vctx vue.Context) or func(vctx vue.Context, a1 Arg1, ..., ak ArgK)
func Method(name string, function interface{}) Option {
return func(comp *Comp) {
comp.methods[name] = reflect.ValueOf(function)
}
}
// Methods is the methods option for components.
// The given functions are registered as methods for the component.
// The functions are required to accept context and allows optional arguments.
// For example: func(vctx vue.Context) or func(vctx vue.Context, a1 Arg1, ..., ak ArgK)
func Methods(functions ...interface{}) Option {
return func(comp *Comp) {
for _, function := range functions {
fn := reflect.ValueOf(function)
name := funcName(fn)
comp.methods[name] = fn
}
}
}
// Computed is the computed option for components.
// The given name and function is registered as a computed property for the component.
// The function is required to accept context and return a value.
// For example: func(vctx vue.Context) Type
func Computed(name string, function interface{}) Option {
return func(comp *Comp) {
fn := reflect.ValueOf(function)
comp.computed[name] = fn
}
}
// Computeds is the computeds option for components.
// The given functions are registered as computed properties for the component.
// The functions are required to accept context and return a value.
// For example: func(vctx vue.Context) Type
func Computeds(functions ...interface{}) Option {
return func(comp *Comp) {
for _, function := range functions {
fn := reflect.ValueOf(function)
name := funcName(fn)
comp.computed[name] = fn
}
}
}
// Watch is the watch option for components.
// The given function is registered as a watcher for the data field.
// All data fields are watchable, e.g. data, props and computed.
// The function is required to accept context and both the new and old values.
// For example: func(vctx vue.Context, newVal, oldVal Type)
func Watch(field string, function interface{}) Option {
return func(comp *Comp) {
fn := reflect.ValueOf(function)
comp.watchers[field] = fn
}
}
// Sub is the subcomponent option for components.
func Sub(element string, sub *Comp) Option {
return func(comp *Comp) {
sub.isSub = true
comp.subs[element] = sub
}
}
// Props is the props option for subcomponents.
func Props(props ...string) Option {
return func(sub *Comp) {
for _, prop := range props {
sub.props[prop] = struct{}{}
}
}
}
// funcName returns the name of the given function.
func funcName(function reflect.Value) string {
name := runtime.FuncForPC(function.Pointer()).Name()
return stripMetadata(name)
}
// stripMetadata returns the function name without metadata.
func stripMetadata(name string) string {
parts := strings.Split(name, ".")
name = parts[len(parts)-1]
return strings.TrimSuffix(name, "-fm")
} | option.go | 0.787768 | 0.485112 | option.go | starcoder |
package gogeom
import (
"math"
)
//sides of square
type SquareSides struct {
Side float64
}
//diagonals of square
type SquareDiagonals struct {
Diagonal float64
}
//sides of rectangle
type RectangleSides struct {
Length, Breadth float64
}
//sides of parallelogram
type ParallelogramSides struct {
Base, Slant, Height float64
}
//sides of trapezium with height
type TrapeziumSidesWithHeight struct {
Base, Top, Height float64
}
//sides of trapezium slants
type TrapeziumSidesWithSlants struct {
Base, Top, Slant1, Slant2 float64
}
//diagonals of rhombus
type RhombusDiagonals struct {
Diagonal1, Diagonal2 float64
}
//sides and height of rhombus
type RhombusSides struct {
Side, Height float64
}
//sides of scalene quadrilateral
type ScaleneQuadrilateralSides struct {
Side1, Side2, Side3, Side4 float64
}
//perimeter of square when the sides are there
func (q *SquareSides) PerimeterOfSquare() float64 {
return 4 * q.Side
}
//area of the square when the sides are there
func (q *SquareSides) AreaOfSquare() float64 {
return math.Pow(q.Side,2)
}
//diagonal of the square when the side is there
func (q *SquareSides) DiagonalOfSquare() float64 {
return math.Sqrt(2) * q.Side
}
//side of the square when diagonal is there
func (q *SquareDiagonals) SideOfSquare() float64 {
return q.Diagonal / math.Sqrt(2)
}
//perimeter of the square when the diagonals are there
func (q *SquareDiagonals) PerimeterOfSquare() float64 {
return 4 * q.SideOfSquare()
}
//area of the square when the diagonals are there
func (q *SquareDiagonals) AreaOfSquare() float64 {
return math.Pow(q.SideOfSquare(), 2)
}
//perimeter of rectangle
func (q *RectangleSides) PerimeterOfRectangle() float64 {
return 2 * (q.Length + q.Breadth)
}
//area of rectangle
func (q *RectangleSides) AreaOfRectangle() float64 {
return q.Length * q.Breadth
}
//diagonal of rectangle
func (q *RectangleSides) DiagonalOfRectangle() float64 {
return math.Sqrt(math.Pow(q.Length, 2) + math.Pow(q.Breadth, 2))
}
//perimeter of parallelogram
func (q *ParallelogramSides) PerimeterOfParallelogram() float64 {
return 2 * (q.Base + q.Slant)
}
//area of parallelogram
func (q *ParallelogramSides) AreaOfParallelogram() float64 {
return q.Base * q.Height
}
//area of trapezium
func (q *TrapeziumSidesWithHeight) AreaOfTrapezium() float64 {
return 0.5 * (q.Base + q.Top) * q.Height
}
//perimeter of trapezium
func (q *TrapeziumSidesWithSlants) PerimeterOfTrapezium() float64 {
return q.Base + q.Top + q.Slant1 + q.Slant2
}
//area of rhombus with diagonals given
func (q *RhombusDiagonals) AreaOfRhombus() float64 {
return 0.5 * q.Diagonal1 * q.Diagonal2
}
//length of each side of rhombus with diagonals given
func (q *RhombusDiagonals) LengthOfRhombusSides() float64 {
return 0.5 * math.Sqrt(math.Pow(q.Diagonal1, 2) + math.Pow(q.Diagonal2, 2))
}
//perimeter of rhombus with diagonals given
func (q *RhombusDiagonals) PerimeterOfRhombus() float64 {
return 4 * q.LengthOfRhombusSides()
}
//area of rhombus with sides and height given
func (q *RhombusSides) AreaOfRhombus() float64 {
return q.Side * q.Height
}
//perimeter of rhombus with sides and height given
func (q *RhombusSides) PerimeterOfRhombus() float64 {
return 4 * q.Side
}
//perimeter of scalene quadrilateral
func (q *ScaleneQuadrilateralSides) PerimeterOfScaleneQuadrilateral() float64 {
return q.Side1 + q.Side2 + q.Side3 + q.Side4
} | quadrilaterals.go | 0.864339 | 0.596463 | quadrilaterals.go | starcoder |
package timef
// List of dates in number or string format
const (
StampDashDateLongYearAtBegin = "2006-01-02 15:04:05" // 2006-01-02 15:04:05
StampDashDateYearAtBegin = "06-01-02 15:04:05" // 06-01-02 15:04:05
StampDashDayLongYearAtBegin = "2006-01-02" // 2006-01-02
StampSlashDateLongYearAtBegin = "2006/01/02 15:04:05" // 2006/01/02 15:04:05
StampSlashDateYearAtBegin = "06/01/02 15:04:05" // 06/01/02 15:04:05
StampSlashDayLongYearAtBegin = "2006/01/02" // 2006/01/02
StampDotDateLongYearAtBegin = "2006.01.02 15:04:05" // 2006.01.02 15:04:05
StampDotDateYearAtBegin = "06.01.02 15:04:05" // 06.01.02 15:04:05
StampDotDayLongYearAtBegin = "2006.01.02" // 2006.01.02
StampDateLongYearAtBegin = "20060102 15:04:05" // 20060102 15:04:05
StampDateYearAtBegin = "060102 15:04:05" // 060102 15:04:05
StampDayLongYearAtBegin = "20060102" // 20060102
StampDashDayYearAtBegin = "06-01-02" // 06-01-02
StampSlashDayYearAtBegin = "06/01/02" // 06/01/02
StampDotDayYearAtBegin = "06.01.02" // 06.01.02
StampDayYearAtBegin = "060102" // 060102
StampDashDateLongYearAtEnd = "02-01-2006 15:04:05" // 02-01-2006 15:04:05
StampDashDateYearAtEnd = "02-01-06 15:04:05" // 02-01-06 15:04:05
StampDashDayLongYearAtEnd = "02-01-2006" // 02-01-2006
StampSlashDateLongYearAtEnd = "02/01/2006 15:04:05" // 02/01/2006 15:04:05
StampSlashDateYearAtEnd = "02/01/06 15:04:05" // 02/01/06 15:04:05
StampSlashDayLongYearAtEnd = "02/01/2006" // 02/01/2006
StampDotDateLongYearAtEnd = "02.01.2006 15:04:05" // 02.01.2006 15:04:05
StampDotDateYearAtEnd = "02.01.06 15:04:05" // 02.01.06 15:04:05
StampDotDayLongYearAtEnd = "02.01.2006" // 02.01.2006
StampDateLongYearAtEnd = "02012006 15:04:05" // 02012006 15:04:05
StampDateYearAtEnd = "020106 15:04:05" // 020106 15:04:05
StampDayLongYearAtEnd = "02012006" // 02012006
StampDashDayYearAtEnd = "02-01-06" // 02-01-06
StampSlashDayYearAtEnd = "02/01/06" // 02/01/06
StampDotDayYearAtEnd = "02.01.06" // 02.01.06
StampDayYearAtEnd = "020106" // 020106
StampTime = "15:04:05" // 15:04:05
StampWordDayLongMonthLongYear = "2 January 2006" // 2 January 2006
StampWordDayLongMonthYear = "2 January 06" // 2 January 06
StampWordDayLongMonth = "2 January" // 2 January
StampWordDayMonthYear = "2 Jan 06" // 2 Jan 06
StampWordDayMonth = "2 Jan" // 2 Jan
LongYear = "2006" // 2006
Year = "06" // 06
LongMonth = "January" // January
Month = "Jan" // Jan
ZeroMonth = "01" // 01
NumMonth = "1" // 1
Day = "2" // 2
ZeroDay = "02" // 02
Hour = "15" // 15
Hour12 = "3" // 3
Minute = "4" // 4
ZeroMinute = "04" // 04
Second = "5" // 5
ZeroSecond = "05" // 05
Zero = "0" // 0
Milli = "000" // 000
Micro = "000000" // 000000
Nano = "000000000" // 000000000
)
// Full date format and year at the beginning and timestamp
const (
// FormatDateLongYearAtBegin11 its YYYY-MM-DD HH24:MI. Complete date plus hours and minutes.
FormatDateLongYearAtBegin11 = "YYYY-MM-DD HH24:MI"
// FormatDateLongYearAtBegin12 its YYYY/MM/DD HH24:MI. Complete date plus hours and minutes.
FormatDateLongYearAtBegin12 = "YYYY/MM/DD HH24:MI"
// FormatDateLongYearAtBegin13 its YYYY.MM.DD HH24:MI. Complete date plus hours and minutes.
FormatDateLongYearAtBegin13 = "YYYY.MM.DD HH24:MI"
// FormatDateLongYearAtBegin14 its YYYYMMDD HH24:MI. Complete date plus hours and minutes.
FormatDateLongYearAtBegin14 = "YYYYMMDD HH24:MI"
// FormatDateLongYearAtBegin21 its YYYY-MM-DD HH24:MI:SS. Complete date plus hours and minutes, seconds.
FormatDateLongYearAtBegin21 = "YYYY-MM-DD HH24:MI:SS"
// FormatDateLongYearAtBegin22 its YYYY/MM/DD HH24:MI:SS. Complete date plus hours and minutes, seconds.
FormatDateLongYearAtBegin22 = "YYYY/MM/DD HH24:MI:SS"
// FormatDateLongYearAtBegin23 its YYYY.MM.DD HH24:MI:SS. Complete date plus hours and minutes, seconds.
FormatDateLongYearAtBegin23 = "YYYY.MM.DD HH24:MI:SS"
// FormatDateLongYearAtBegin24 its YYYYMMDD HH24:MI:SS. Complete date plus hours and minutes, seconds.
FormatDateLongYearAtBegin24 = "YYYYMMDD HH24:MI:SS"
// FormatDateLongYearAtBegin31 its YYYY-MM-DD HH24:MI:SS.FFF. Complete date plus hours, minutes, seconds, milliseconds.
FormatDateLongYearAtBegin31 = "YYYY-MM-DD HH24:MI:SS.FFF"
// FormatDateLongYearAtBegin32 its YYYY/MM/DD HH24:MI:SS.FFF. Complete date plus hours, minutes, seconds, milliseconds.
FormatDateLongYearAtBegin32 = "YYYY/MM/DD HH24:MI:SS.FFF"
// FormatDateLongYearAtBegin33 its YYYY.MM.DD HH24:MI:SS.FFF. Complete date plus hours, minutes, seconds, milliseconds.
FormatDateLongYearAtBegin33 = "YYYY.MM.DD HH24:MI:SS.FFF"
// FormatDateLongYearAtBegin34 its YYYYMMDD HH24:MI:SS.FFF. Complete date plus hours, minutes, seconds, milliseconds.
FormatDateLongYearAtBegin34 = "YYYYMMDD HH24:MI:SS.FFF"
// FormatDateLongYearAtBegin41 its YYYY-MM-DD HH24:MI:SS.FFFFFF. Complete date plus hours, minutes, seconds, microseconds.
FormatDateLongYearAtBegin41 = "YYYY-MM-DD HH24:MI:SS.FFFFFF"
// FormatDateLongYearAtBegin42 its YYYY/MM/DD HH24:MI:SS.FFFFFF. Complete date plus hours, minutes, seconds, microseconds.
FormatDateLongYearAtBegin42 = "YYYY/MM/DD HH24:MI:SS.FFFFFF"
// FormatDateLongYearAtBegin43 its YYYY.MM.DD HH24:MI:SS.FFFFFF. Complete date plus hours, minutes, seconds, microseconds.
FormatDateLongYearAtBegin43 = "YYYY.MM.DD HH24:MI:SS.FFFFFF"
// FormatDateLongYearAtBegin44 its YYYYMMDD HH24:MI:SS.FFFFFF. Complete date plus hours, minutes, seconds, microseconds.
FormatDateLongYearAtBegin44 = "YYYYMMDD HH24:MI:SS.FFFFFF"
// FormatDateLongYearAtBegin51 its YYYY-MM-DD HH24:MI:SS.FFFFFFFFF. Complete date plus hours, minutes, seconds, nanoseconds.
FormatDateLongYearAtBegin51 = "YYYY-MM-DD HH24:MI:SS.FFFFFFFFF"
// FormatDateLongYearAtBegin52 its YYYY/MM/DD HH24:MI:SS.FFFFFFFFF. Complete date plus hours, minutes, seconds, nanoseconds.
FormatDateLongYearAtBegin52 = "YYYY/MM/DD HH24:MI:SS.FFFFFFFFF"
// FormatDateLongYearAtBegin53 its YYYY.MM.DD HH24:MI:SS.FFFFFFFFF. Complete date plus hours, minutes, seconds, nanoseconds.
FormatDateLongYearAtBegin53 = "YYYY.MM.DD HH24:MI:SS.FFFFFFFFF"
// FormatDateLongYearAtBegin54 its YYYYMMDD HH24:MI:SS.FFFFFFFFF. Complete date plus hours, minutes, seconds, nanoseconds.
FormatDateLongYearAtBegin54 = "YYYYMMDD HH24:MI:SS.FFFFFFFFF"
)
// Date format and year at the beginning and timestamp
const (
// FormatDateYearAtBegin11 its YY-MM-DD HH24:MI. Complete date plus hours and minutes.
FormatDateYearAtBegin11 = "YY-MM-DD HH24:MI"
// FormatDateYearAtBegin12 its YY/MM/DD HH24:MI. Complete date plus hours and minutes.
FormatDateYearAtBegin12 = "YY/MM/DD HH24:MI"
// FormatDateYearAtBegin13 its YY.MM.DD HH24:MI. Complete date plus hours and minutes.
FormatDateYearAtBegin13 = "YY.MM.DD HH24:MI"
// FormatDateYearAtBegin14 its YYMMDD HH24:MI. Complete date plus hours and minutes.
FormatDateYearAtBegin14 = "YYMMDD HH24:MI"
// FormatDateYearAtBegin21 its YY-MM-DD HH24:MI:SS. Complete date plus hours and minutes, seconds.
FormatDateYearAtBegin21 = "YY-MM-DD HH24:MI:SS"
// FormatDateYearAtBegin22 its YY/MM/DD HH24:MI:SS. Complete date plus hours and minutes, seconds.
FormatDateYearAtBegin22 = "YY/MM/DD HH24:MI:SS"
// FormatDateYearAtBegin23 its YY.MM.DD HH24:MI:SS. Complete date plus hours and minutes, seconds.
FormatDateYearAtBegin23 = "YY.MM.DD HH24:MI:SS"
// FormatDateYearAtBegin24 its YYMMDD HH24:MI:SS. Complete date plus hours and minutes, seconds.
FormatDateYearAtBegin24 = "YYMMDD HH24:MI:SS"
// FormatDateYearAtBegin31 its YY-MM-DD HH24:MI:SS.FFF. Complete date plus hours, minutes, seconds, milliseconds.
FormatDateYearAtBegin31 = "YY-MM-DD HH24:MI:SS.FFF"
// FormatDateYearAtBegin41 its YY-MM-DD HH24:MI:SS.FFFFFF. Complete date plus hours, minutes, seconds, microseconds.
FormatDateYearAtBegin41 = "YY-MM-DD HH24:MI:SS.FFFFFF"
// FormatDateYearAtBegin51 its YY-MM-DD HH24:MI:SS.FFFFFFFFF. Complete date plus hours, minutes, seconds, nanoseconds.
FormatDateYearAtBegin51 = "YY-MM-DD HH24:MI:SS.FFFFFFFFF"
)
// Full day format and year at the beginning
const (
// FormatDayLongYearAtBegin1 its YYYY-MM-DD. Complete day.
FormatDayLongYearAtBegin1 = "YYYY-MM-DD"
// FormatDayLongYearAtBegin2 its YYYY/MM/DD. Complete day.
FormatDayLongYearAtBegin2 = "YYYY/MM/DD"
// FormatDayLongYearAtBegin3 its YYYY.MM.DD. Complete day.
FormatDayLongYearAtBegin3 = "YYYY.MM.DD"
// FormatDayLongYearAtBegin4 its YYYYMMDD. Complete day.
FormatDayLongYearAtBegin4 = "YYYYMMDD"
)
// Day format and year at the beginning
const (
// FormatDayYearAtBegin1 its YY-MM-DD. Complete day.
FormatDayYearAtBegin1 = "YY-MM-DD"
// FormatDayYearAtBegin2 its YY/MM/DD. Complete day.
FormatDayYearAtBegin2 = "YY/MM/DD"
// FormatDayYearAtBegin3 its YY.MM.DD. Complete day.
FormatDayYearAtBegin3 = "YY.MM.DD"
// FormatDayYearAtBegin4 its YYMMDD. Complete day.
FormatDayYearAtBegin4 = "YYMMDD"
)
// Full date format and year at the end and timestamp
const (
// FormatDateLongYearAtEnd11 its YYYY-MM-DD HH24:MI. Complete date plus hours and minutes.
FormatDateLongYearAtEnd11 = "DD-MM-YYYY HH24:MI"
// FormatDateLongYearAtEnd12 its DD/MM/YYYY HH24:MI. Complete date plus hours and minutes.
FormatDateLongYearAtEnd12 = "DD/MM/YYYY HH24:MI"
// FormatDateLongYearAtEnd13 its DD.MM.YYYY HH24:MI. Complete date plus hours and minutes.
FormatDateLongYearAtEnd13 = "DD.MM.YYYY HH24:MI"
// FormatDateLongYearAtEnd14 its DDMMYYYY HH24:MI. Complete date plus hours and minutes.
FormatDateLongYearAtEnd14 = "DDMMYYYY HH24:MI"
// FormatDateLongYearAtEnd21 its DD-MM-YYYY HH24:MI:SS. Complete date plus hours and minutes, seconds.
FormatDateLongYearAtEnd21 = "DD-MM-YYYY HH24:MI:SS"
// FormatDateLongYearAtEnd22 its DD/MM/YYYY HH24:MI:SS. Complete date plus hours and minutes, seconds.
FormatDateLongYearAtEnd22 = "DD/MM/YYYY HH24:MI:SS"
// FormatDateLongYearAtEnd23 its DD.MM.YYYY HH24:MI:SS. Complete date plus hours and minutes, seconds.
FormatDateLongYearAtEnd23 = "DD.MM.YYYY HH24:MI:SS"
// FormatDateLongYearAtEnd24 its DDMMYYYY HH24:MI:SS. Complete date plus hours and minutes, seconds.
FormatDateLongYearAtEnd24 = "DDMMYYYY HH24:MI:SS"
// FormatDateLongYearAtEnd31 its DD-MM-YYYY HH24:MI:SS.FFF. Complete date plus hours, minutes, seconds, milliseconds.
FormatDateLongYearAtEnd31 = "DD-MM-YYYY HH24:MI:SS.FFF"
// FormatDateLongYearAtEnd32 its DD/MM/YYYY HH24:MI:SS.FFF. Complete date plus hours, minutes, seconds, milliseconds.
FormatDateLongYearAtEnd32 = "DD/MM/YYYY HH24:MI:SS.FFF"
// FormatDateLongYearAtEnd33 its DD.MM.YYYY HH24:MI:SS.FFF. Complete date plus hours, minutes, seconds, milliseconds.
FormatDateLongYearAtEnd33 = "DD.MM.YYYY HH24:MI:SS.FFF"
// FormatDateLongYearAtEnd34 its DDMMYYYY HH24:MI:SS.FFF. Complete date plus hours, minutes, seconds, milliseconds.
FormatDateLongYearAtEnd34 = "DDMMYYYY HH24:MI:SS.FFF"
// FormatDateLongYearAtEnd41 its DD-MM-YYYY HH24:MI:SS.FFFFFF. Complete date plus hours, minutes, seconds, microseconds.
FormatDateLongYearAtEnd41 = "DD-MM-YYYY HH24:MI:SS.FFFFFF"
// FormatDateLongYearAtEnd42 its DD/MM/YYYY HH24:MI:SS.FFFFFF. Complete date plus hours, minutes, seconds, microseconds.
FormatDateLongYearAtEnd42 = "DD/MM/YYYY HH24:MI:SS.FFFFFF"
// FormatDateLongYearAtEnd43 its DD.MM.YYYY HH24:MI:SS.FFFFFF. Complete date plus hours, minutes, seconds, microseconds.
FormatDateLongYearAtEnd43 = "DD.MM.YYYY HH24:MI:SS.FFFFFF"
// FormatDateLongYearAtEnd44 its DDMMYYYY HH24:MI:SS.FFFFFF. Complete date plus hours, minutes, seconds, microseconds.
FormatDateLongYearAtEnd44 = "DDMMYYYY HH24:MI:SS.FFFFFF"
// FormatDateLongYearAtEnd51 its DD-MM-YYYY HH24:MI:SS.FFFFFFFFF. Complete date plus hours, minutes, seconds, nanoseconds.
FormatDateLongYearAtEnd51 = "DD-MM-YYYY HH24:MI:SS.FFFFFFFFF"
// FormatDateLongYearAtEnd52 its DD/MM/YYYY HH24:MI:SS.FFFFFFFFF. Complete date plus hours, minutes, seconds, nanoseconds.
FormatDateLongYearAtEnd52 = "DD/MM/YYYY HH24:MI:SS.FFFFFFFFF"
// FormatDateLongYearAtEnd53 its DD.MM.YYYY HH24:MI:SS.FFFFFFFFF. Complete date plus hours, minutes, seconds, nanoseconds.
FormatDateLongYearAtEnd53 = "DD.MM.YYYY HH24:MI:SS.FFFFFFFFF"
// FormatDateLongYearAtEnd54 its DDMMYYYY HH24:MI:SS.FFFFFFFFF. Complete date plus hours, minutes, seconds, nanoseconds.
FormatDateLongYearAtEnd54 = "DDMMYYYY HH24:MI:SS.FFFFFFFFF"
)
// Date format and year at the end and timestamp
const (
// FormatDateYearAtEnd11 its DD-MM-YY HH24:MI. Complete date plus hours and minutes.
FormatDateYearAtEnd11 = "DD-MM-YY HH24:MI"
// FormatDateYearAtEnd12 its DD/MM/YY HH24:MI. Complete date plus hours and minutes.
FormatDateYearAtEnd12 = "DD/MM/YY HH24:MI"
// FormatDateYearAtEnd13 its DD.MM.YY HH24:MI. Complete date plus hours and minutes.
FormatDateYearAtEnd13 = "DD.MM.YY HH24:MI"
// FormatDateYearAtEnd14 its DDMMYY HH24:MI. Complete date plus hours and minutes.
FormatDateYearAtEnd14 = "DDMMYY HH24:MI"
// FormatDateYearAtEnd21 its DD-MM-YY HH24:MI:SS. Complete date plus hours and minutes, seconds.
FormatDateYearAtEnd21 = "DD-MM-YY HH24:MI:SS"
// FormatDateYearAtEnd22 its DD/MM/YY HH24:MI:SS. Complete date plus hours and minutes, seconds.
FormatDateYearAtEnd22 = "DD/MM/YY HH24:MI:SS"
// FormatDateYearAtEnd23 its DD.MM.YY HH24:MI:SS. Complete date plus hours and minutes, seconds.
FormatDateYearAtEnd23 = "DD.MM.YY HH24:MI:SS"
// FormatDateYearAtEnd24 its DDMMYY HH24:MI:SS. Complete date plus hours and minutes, seconds.
FormatDateYearAtEnd24 = "DDMMYY HH24:MI:SS"
// FormatDateYearAtEnd31 its DD-MM-YY HH24:MI:SS.FFF. Complete date plus hours, minutes, seconds, milliseconds.
FormatDateYearAtEnd31 = "DD-MM-YY HH24:MI:SS.FFF"
// FormatDateYearAtEnd41 its DD-MM-YY HH24:MI:SS.FFFFFF. Complete date plus hours, minutes, seconds, microseconds.
FormatDateYearAtEnd41 = "DD-MM-YY HH24:MI:SS.FFFFFF"
// FormatDateYearAtEnd51 its DD-MM-YY HH24:MI:SS.FFFFFFFFF. Complete date plus hours, minutes, seconds, nanoseconds.
FormatDateYearAtEnd51 = "DD-MM-YY HH24:MI:SS.FFFFFFFFF"
)
// Full day format and year at the end
const (
// FormatDayLongYearAtEnd1 its DD-MM-YYYY. Complete day.
FormatDayLongYearAtEnd1 = "DD-MM-YYYY"
// FormatDayLongYearAtEnd2 its DD/MM/YYYY. Complete day.
FormatDayLongYearAtEnd2 = "DD/MM/YYYY"
// FormatDayLongYearAtEnd3 its DD.MM.YYYY. Complete day.
FormatDayLongYearAtEnd3 = "DD.MM.YYYY"
// FormatDayLongYearAtEnd4 its DDMMYYYY. Complete day.
FormatDayLongYearAtEnd4 = "DDMMYYYY"
)
// Day format and year at the end
const (
// FormatDayYearAtEnd1 its DD-MM-YY. Complete day.
FormatDayYearAtEnd1 = "DD-MM-YY"
// FormatDayYearAtEnd2 its DD/MM/YY. Complete day.
FormatDayYearAtEnd2 = "DD/MM/YY"
// FormatDayYearAtEnd3 its DD.MM.YY. Complete day.
FormatDayYearAtEnd3 = "DD.MM.YY"
// FormatDayYearAtEnd4 its DDMMYY. Complete day.
FormatDayYearAtEnd4 = "DDMMYY"
)
// Collection of days of the week in English
var DaysEn = [...]string{
"Sunday",
"Monday",
"Tuesday",
"Wednesday",
"Thursday",
"Friday",
"Saturday",
}
// Collection of days of the week in abbreviated form in English
var DaysEnAbbreviated = [...]string{
"Su",
"Mo",
"Tu",
"We",
"Th",
"Fr",
"Sa",
}
// Collection of days of the week in Russian
var DaysRu = [...]string{
"Воскресенье",
"Понедельник",
"Вторник",
"Среда",
"Четверг",
"Пятница",
"Суббота",
}
// Collection of days of the week in abbreviated form in Russian
var DaysRuAbbreviated = [...]string{
"Вс",
"Пн",
"Вт",
"Ср",
"Чт",
"Пт",
"Сб",
}
// Collection of months in English
var MonthsEn = [...]string{
"January",
"February",
"March",
"April",
"May",
"June",
"July",
"August",
"September",
"October",
"November",
"December",
}
// Collection of months in simple form in Russian
var MonthsRu = [...]string{
"Январь",
"Февраль",
"Март",
"Апрель",
"Май",
"Июнь",
"Июль",
"Август",
"Сентябрь",
"Октябрь",
"Ноябрь",
"Декабрь",
}
// Collection of months in genitive form in Russian
var MonthsRuGenitive = [...]string{
"Января",
"Февраля",
"Марта",
"Апреля",
"Мая",
"Июня",
"Июля",
"Августа",
"Сентября",
"Октября",
"Ноября",
"Декабря",
}
// Collection of months in dative form in Russian
var MonthsRuDative = [...]string{
"Январю",
"Февралю",
"Марту",
"Апрелю",
"Маю",
"Июню",
"Июлю",
"Августу",
"Сентябрю",
"Октябрю",
"Ноябрю",
"Декабрю",
}
// Collection of months in accusative form in Russian
var MonthsRuAccusative = [...]string{
"Январь",
"Февраль",
"Март",
"Апрель",
"Май",
"Июнь",
"Июль",
"Август",
"Сентябрь",
"Октябрь",
"Ноябрь",
"Декабрь",
}
// Collection of months in ablative form in Russian
var MonthsRuAblative = [...]string{
"Январем",
"Февралем",
"Мартом",
"Апрелем",
"Маем",
"Июнем",
"Июлем",
"Августом",
"Сентябрем",
"Октябрем",
"Ноябрем",
"Декабрем",
}
// Collection of months in prepositional form in Russian
var MonthsRuPrepositional = [...]string{
"Январе",
"Феврале",
"Марте",
"Апреле",
"Мае",
"Июне",
"Июле",
"Августе",
"Сентябре",
"Октябре",
"Ноябре",
"Декабре",
} | var.go | 0.505371 | 0.432723 | var.go | starcoder |
package quicktime
import (
"bytes"
"encoding/binary"
"io"
"reflect"
)
// AtomInfo has common information of atom
type AtomInfo struct {
// Offset specifies an offset of the atom in a file.
Offset uint64
// Size specifies size(bytes) of atom.
Size uint64
// HeaderSize specifies size(bytes) of common fields which are defined as "Atom" class member at ISO/IEC 14496-12.
HeaderSize uint64
// Type specifies atom type which is represented by 4 characters.
Type AtomType
// ExtendedType specifies box extended type which is represented by 16 characters.
ExtendedType [16]byte
}
const (
defaultHeaderSize = 8
)
// ReadAtomInfo reads common fields which are defined as "Atom" class member at ISO/IEC 14496-12.
func ReadAtomInfo(r io.ReadSeeker) (*AtomInfo, error) {
offset, err := r.Seek(0, io.SeekCurrent)
if err != nil {
return nil, err
}
ai := &AtomInfo{
Offset: uint64(offset),
}
// read 8 bytes
buf := bytes.NewBuffer(make([]byte, 0, defaultHeaderSize))
if _, err := io.CopyN(buf, r, defaultHeaderSize); err != nil {
return nil, err
}
ai.HeaderSize = defaultHeaderSize
// pick size and type
data := buf.Bytes()
ai.Size = uint64(binary.BigEndian.Uint32(data))
ai.Type = AtomType{data[4], data[5], data[6], data[7]}
if ai.Size == 1 {
// read more 8 bytes
buf.Reset()
if _, err := io.CopyN(buf, r, 8); err != nil {
return nil, err
}
ai.HeaderSize += 8
ai.Size = binary.BigEndian.Uint64(buf.Bytes())
} else if ai.Size == 0 {
// box extends to end of file
offsetEOF, err := r.Seek(0, io.SeekEnd)
if err != nil {
return nil, err
}
ai.Size = uint64(offsetEOF) - ai.Offset
if _, err := ai.SeekToPayload(r); err != nil {
return nil, err
}
}
if ai.Type == StrToAtomType("uuid") {
buf.Reset()
if _, err := io.CopyN(buf, r, 16); err != nil {
return nil, err
}
copy(ai.ExtendedType[:], buf.Bytes())
}
return ai, nil
}
func isFullAtom(bi *AtomInfo) bool {
if _, ok := atomMap[bi.Type]; ok {
t := reflect.TypeOf(atomMap[bi.Type])
for i := 0; i < t.NumField(); i++ {
if reflect.TypeOf(FullAtom{}) == t.Field(i).Type {
return true
}
}
}
return false
}
func (bi *AtomInfo) SeekToStart(s io.Seeker) (int64, error) {
return s.Seek(int64(bi.Offset), io.SeekStart)
}
func (bi *AtomInfo) SeekToPayload(s io.Seeker) (int64, error) {
return s.Seek(int64(bi.Offset+bi.HeaderSize), io.SeekStart)
}
func (bi *AtomInfo) SeekToEnd(s io.Seeker) (int64, error) {
return s.Seek(int64(bi.Offset+bi.Size), io.SeekStart)
} | quicktime/atom_info.go | 0.644784 | 0.407274 | atom_info.go | starcoder |
package flip
// Interface represents a type whose order of elements can be flipped by the
// routines in this package. The methods require that the elements of the
// collection be enumerated by an integer index.
type Interface interface {
Len() int
Swap(i, j int)
}
// Flip flips the data.
func Flip(data Interface) {
for l, r := 0, data.Len()-1; l < r; l, r = l+1, r-1 {
data.Swap(l, r)
}
}
// BoolSlice attaches the methods of Interface to []bool.
type BoolSlice []bool
// Len returns the length of the slice.
func (p BoolSlice) Len() int {
return len(p)
}
// Swap swaps the elements at the given indices.
func (p BoolSlice) Swap(i, j int) {
p[i], p[j] = p[j], p[i]
}
// Bools flips a slice of bools.
func Bools(p []bool) {
Flip(BoolSlice(p))
}
// Uint8Slice attaches the methods of Interface to []uint8.
type Uint8Slice []uint8
// Len returns the length of the slice.
func (p Uint8Slice) Len() int {
return len(p)
}
// Swap swaps the elements at the given indices.
func (p Uint8Slice) Swap(i, j int) {
p[i], p[j] = p[j], p[i]
}
// Uint8s flips a slice of uint8s.
func Uint8s(p []uint8) {
Flip(Uint8Slice(p))
}
// Uint16Slice attaches the methods of Interface to []uint16.
type Uint16Slice []uint16
// Len returns the length of the slice.
func (p Uint16Slice) Len() int {
return len(p)
}
// Swap swaps the elements at the given indices.
func (p Uint16Slice) Swap(i, j int) {
p[i], p[j] = p[j], p[i]
}
// Uint16s flips a slice of uint16s.
func Uint16s(p []uint16) {
Flip(Uint16Slice(p))
}
// Uint32Slice attaches the methods of Interface to []uint32.
type Uint32Slice []uint32
// Len returns the length of the slice.
func (p Uint32Slice) Len() int {
return len(p)
}
// Swap swaps the elements at the given indices.
func (p Uint32Slice) Swap(i, j int) {
p[i], p[j] = p[j], p[i]
}
// Uint32s flips a slice of uint32s.
func Uint32s(p []uint32) {
Flip(Uint32Slice(p))
}
// Uint64Slice attaches the methods of Interface to []uint64.
type Uint64Slice []uint64
// Len returns the length of the slice.
func (p Uint64Slice) Len() int {
return len(p)
}
// Swap swaps the elements at the given indices.
func (p Uint64Slice) Swap(i, j int) {
p[i], p[j] = p[j], p[i]
}
// Uint64s flips a slice of uint64s.
func Uint64s(p []uint64) {
Flip(Uint64Slice(p))
}
// Int8Slice attaches the methods of Interface to []int8.
type Int8Slice []int8
// Len returns the length of the slice.
func (p Int8Slice) Len() int {
return len(p)
}
// Swap swaps the elements at the given indices.
func (p Int8Slice) Swap(i, j int) {
p[i], p[j] = p[j], p[i]
}
// Int8s flips a slice of int8s.
func Int8s(p []int8) {
Flip(Int8Slice(p))
}
// Int16Slice attaches the methods of Interface to []int16.
type Int16Slice []int16
// Len returns the length of the slice.
func (p Int16Slice) Len() int {
return len(p)
}
// Swap swaps the elements at the given indices.
func (p Int16Slice) Swap(i, j int) {
p[i], p[j] = p[j], p[i]
}
// Int16s flips a slice of int16s.
func Int16s(p []int16) {
Flip(Int16Slice(p))
}
// Int32Slice attaches the methods of Interface to []int32.
type Int32Slice []int32
// Len returns the length of the slice.
func (p Int32Slice) Len() int {
return len(p)
}
// Swap swaps the elements at the given indices.
func (p Int32Slice) Swap(i, j int) {
p[i], p[j] = p[j], p[i]
}
// Int32s flips a slice of int32s.
func Int32s(p []int32) {
Flip(Int32Slice(p))
}
// Int64Slice attaches the methods of Interface to []int64.
type Int64Slice []int64
// Len returns the length of the slice.
func (p Int64Slice) Len() int {
return len(p)
}
// Swap swaps the elements at the given indices.
func (p Int64Slice) Swap(i, j int) {
p[i], p[j] = p[j], p[i]
}
// Int64s flips a slice of int64s.
func Int64s(p []int64) {
Flip(Int64Slice(p))
}
// Float32Slice attaches the methods of Interface to []float32.
type Float32Slice []float32
// Len returns the length of the slice.
func (p Float32Slice) Len() int {
return len(p)
}
// Swap swaps the elements at the given indices.
func (p Float32Slice) Swap(i, j int) {
p[i], p[j] = p[j], p[i]
}
// Float32s flips a slice of float32s.
func Float32s(p []float32) {
Flip(Float32Slice(p))
}
// Float64Slice attaches the methods of Interface to []float64.
type Float64Slice []float64
// Len returns the length of the slice.
func (p Float64Slice) Len() int {
return len(p)
}
// Swap swaps the elements at the given indices.
func (p Float64Slice) Swap(i, j int) {
p[i], p[j] = p[j], p[i]
}
// Float64s flips a slice of float64s.
func Float64s(p []float64) {
Flip(Float64Slice(p))
}
// Complex64Slice attaches the methods of Interface to []complex64.
type Complex64Slice []complex64
// Len returns the length of the slice.
func (p Complex64Slice) Len() int {
return len(p)
}
// Swap swaps the elements at the given indices.
func (p Complex64Slice) Swap(i, j int) {
p[i], p[j] = p[j], p[i]
}
// Complex64s flips a slice of complex64s.
func Complex64s(p []complex64) {
Flip(Complex64Slice(p))
}
// Complex128Slice attaches the methods of Interface to []complex128.
type Complex128Slice []complex128
// Len returns the length of the slice.
func (p Complex128Slice) Len() int {
return len(p)
}
// Swap swaps the elements at the given indices.
func (p Complex128Slice) Swap(i, j int) {
p[i], p[j] = p[j], p[i]
}
// Complex128s flips a slice of complex128s.
func Complex128s(p []complex128) {
Flip(Complex128Slice(p))
}
// ByteSlice attaches the methods of Interface to []byte.
type ByteSlice []byte
// Len returns the length of the slice.
func (p ByteSlice) Len() int {
return len(p)
}
// Swap swaps the elements at the given indices.
func (p ByteSlice) Swap(i, j int) {
p[i], p[j] = p[j], p[i]
}
// Bytes flips a slice of bytes.
func Bytes(p []byte) {
Flip(ByteSlice(p))
}
// RuneSlice attaches the methods of Interface to []rune.
type RuneSlice []rune
// Len returns the length of the slice.
func (p RuneSlice) Len() int {
return len(p)
}
// Swap swaps the elements at the given indices.
func (p RuneSlice) Swap(i, j int) {
p[i], p[j] = p[j], p[i]
}
// Runes flips a slice of runes.
func Runes(p []rune) {
Flip(RuneSlice(p))
}
// UintSlice attaches the methods of Interface to []uint.
type UintSlice []uint
// Len returns the length of the slice.
func (p UintSlice) Len() int {
return len(p)
}
// Swap swaps the elements at the given indices.
func (p UintSlice) Swap(i, j int) {
p[i], p[j] = p[j], p[i]
}
// Uints flips a slice of uints.
func Uints(p []uint) {
Flip(UintSlice(p))
}
// IntSlice attaches the methods of Interface to []int.
type IntSlice []int
// Len returns the length of the slice.
func (p IntSlice) Len() int {
return len(p)
}
// Swap swaps the elements at the given indices.
func (p IntSlice) Swap(i, j int) {
p[i], p[j] = p[j], p[i]
}
// Ints flips a slice of ints.
func Ints(p []int) {
Flip(IntSlice(p))
}
// UintptrSlice attaches the methods of Interface to []uintptr.
type UintptrSlice []uintptr
// Len returns the length of the slice.
func (p UintptrSlice) Len() int {
return len(p)
}
// Swap swaps the elements at the given indices.
func (p UintptrSlice) Swap(i, j int) {
p[i], p[j] = p[j], p[i]
}
// Uintptrs flips a slice of uintptrs.
func Uintptrs(p []uintptr) {
Flip(UintptrSlice(p))
}
// StringSlice attaches the methods of Interface to []string.
type StringSlice []string
// Len returns the length of the slice.
func (p StringSlice) Len() int {
return len(p)
}
// Swap swaps the elements at the given indices.
func (p StringSlice) Swap(i, j int) {
p[i], p[j] = p[j], p[i]
}
// Strings flips a slice of strings.
func Strings(p []string) {
Flip(StringSlice(p))
}
// String flips a string. This function will return a value as strings are
// immutable in Go.
func String(s string) string {
p := []rune(s)
Runes(p)
return string(p)
} | flip.go | 0.841826 | 0.536313 | flip.go | starcoder |
package main
import (
"fmt"
"io"
"math/rand"
"os"
)
var (
// InhibitoryNeuronDensity the density with which to create inhibitory neurons
InhibitoryNeuronDensity = 0.0
)
// NetworkLayer an individual, 2D layer of neurons
type NetworkLayer struct {
Neurons [][]*Neuron `json:"neurons"`
}
// NewNetworkLayer creates a new network layer of the specified width and height
func NewNetworkLayer(width, height int) *NetworkLayer {
layer := &NetworkLayer{Neurons: make([][]*Neuron, width)}
for i := range layer.Neurons {
layer.Neurons[i] = make([]*Neuron, height)
for j := 0; j < len(layer.Neurons[i]); j++ {
// Create the right density of inhibitory and excitatory neurons
if rand.Float64() < InhibitoryNeuronDensity {
layer.Neurons[i][j] = NewNeuron(TypeInhibitory)
} else {
layer.Neurons[i][j] = NewNeuron(TypeExcitatory)
}
}
}
return layer
}
// Clear clears the current layer's state back to 0.0
func (l *NetworkLayer) Clear() {
l.EachNeuron(func(n *Neuron) {
n.Potential = 0.0
})
}
// EachNeuron perform some action on each neuron in this layer
func (l *NetworkLayer) EachNeuron(do func(n *Neuron)) {
for _, row := range l.Neurons {
for _, neuron := range row {
do(neuron)
}
}
}
// EachNeuronWithIndex performs some function against every neuron in the
// network and includes its spacial data as well
func (l *NetworkLayer) EachNeuronWithIndex(do func(n *Neuron, row int, column int)) {
for i, row := range l.Neurons {
for j, neuron := range row {
do(neuron, i, j)
}
}
}
// Connect connects a given layer to the next layer
func (l *NetworkLayer) Connect(target *NetworkLayer) {
// Grab each neuron in the source layer
l.EachNeuron(func(src *Neuron) {
// And connect it to each neuron in the target layer
target.EachNeuron(func(tgtNeuron *Neuron) {
conn := src.Connect(tgtNeuron)
conn.Weight = rand.Float64()
})
})
}
// Height returns the height of the current layer
func (l *NetworkLayer) Height() int {
return len(l.Neurons[0])
}
// Print prints out the current layer using the field specified. Values are
// "potential", "total_in", "total_out"
func (l *NetworkLayer) Print(field string) {
currRow := 0
l.EachNeuronWithIndex(func(n *Neuron, row, col int) {
if row != currRow {
io.WriteString(os.Stdout, "\n")
currRow = row
}
val := n.Potential
switch field {
case "total_in":
val = 0.0
for _, input := range n.In {
val += input.Weight
}
case "total_out":
val = 0.0
for _, input := range n.Out {
val += input.Weight
}
}
io.WriteString(os.Stdout, fmt.Sprintf(" %.3f", val))
})
}
// PrintErrorOffset is a rage function for when I have no idea what the fuck is happening
// there now stop complaining that I need to comment this
func (l *NetworkLayer) PrintErrorOffset(errMap map[*Neuron]*NeuronError) {
for _, row := range l.Neurons {
vals := ""
for _, neuron := range row {
vals = fmt.Sprintf("%s%.3f(%.3f)[%.3f]\t", vals, errMap[neuron].TotalWeight,
errMap[neuron].Error, neuron.Potential)
}
Info.Println(vals)
}
}
// Width returns the width of this current layer
func (l *NetworkLayer) Width() int {
return len(l.Neurons)
} | network_layer.go | 0.739986 | 0.542621 | network_layer.go | starcoder |
package main
import (
"math"
"github.com/adrianderstroff/pbr/pkg/cgm"
"github.com/go-gl/mathgl/mgl32"
)
// Ray primitive consisting of a position o and a direction dir.
type Ray struct {
o mgl32.Vec3
dir mgl32.Vec3
}
// Sphere consists of a position o and a radius r.
type Sphere struct {
o mgl32.Vec3
r float32
}
// AABB is an axis aligned bounding box defined by the min and max point.
type AABB struct {
min mgl32.Vec3
max mgl32.Vec3
}
// Hit record storing the parameter t, intersection p and surface normal n.
type Hit struct {
t float32
p mgl32.Vec3
n mgl32.Vec3
}
func isBetween(a, min, max float32) bool {
return min <= a && a <= max
}
func vecMin(a, b mgl32.Vec3) mgl32.Vec3 {
return mgl32.Vec3{
cgm.Min32(a.X(), b.X()),
cgm.Min32(a.Y(), b.Y()),
cgm.Min32(a.Z(), b.Z()),
}
}
func vecMax(a, b mgl32.Vec3) mgl32.Vec3 {
return mgl32.Vec3{
cgm.Max32(a.X(), b.X()),
cgm.Max32(a.Y(), b.Y()),
cgm.Max32(a.Z(), b.Z()),
}
}
func vecMul(a, b mgl32.Vec3) mgl32.Vec3 {
return mgl32.Vec3{
a.X() * b.X(),
a.Y() * b.Y(),
a.Z() * b.Z(),
}
}
func min3(a, b, c float32) float32 {
return cgm.Min32(a, cgm.Min32(b, c))
}
func max3(a, b, c float32) float32 {
return cgm.Max32(a, cgm.Max32(b, c))
}
func intersectSphere(ray *Ray, sphere *Sphere, tmin, tmax float32) (Hit, bool) {
// solve squared term
oc := ray.o.Sub(sphere.o)
a := ray.dir.Dot(ray.dir)
b := oc.Dot(ray.dir)
c := oc.Dot(oc) - sphere.r*sphere.r
discriminant := b*b - a*c
if discriminant > 0 {
// get both solutions
t1 := (-b - cgm.Sqrt32(discriminant)) / a
t2 := (-b + cgm.Sqrt32(discriminant)) / a
// check which one is inside
t1inside := isBetween(t1, tmin, tmax)
t2inside := isBetween(t2, tmin, tmax)
// early return
if !t1inside && !t2inside {
return Hit{}, false
}
// determine the right parameter t
t := t1
if t1inside && t2inside {
t = cgm.Min32(t1, t2)
} else if t2inside {
t = t2
}
p := ray.o.Add(ray.dir.Mul(t))
// set hit record
hit := Hit{
t: t,
p: p,
n: p.Sub(sphere.o).Normalize(),
}
return hit, true
}
return Hit{}, false
}
func intersectAABB(ray *Ray, aabb *AABB) mgl32.Vec3 {
d := mgl32.Vec3{
-1 / ray.dir.X(),
-1 / ray.dir.Y(),
-1 / ray.dir.Z(),
}
tMin := vecMul(aabb.min.Sub(ray.o), d)
tMax := vecMul(aabb.max.Sub(ray.o), d)
t1 := vecMin(tMin, tMax)
t2 := vecMax(tMin, tMax)
tNear := max3(t1.X(), t1.Y(), t1.Z())
tFar := min3(t2.X(), t2.Y(), t2.Z())
//t := cgm.Min32(tNear, tFar)
t := tNear
if tNear < 0 {
t = tFar
}
return ray.o.Add(ray.dir.Mul(t))
}
func cosineDistribution(hit *Hit, r1, r2, a float32) mgl32.Vec3 {
// calculate tangent and binormal
n := hit.n.Normalize()
t := mgl32.Vec3{0, 1, 0}
if n.Dot(mgl32.Vec3{0, 1, 0}) == 1 {
t = mgl32.Vec3{1, 0, 0}
}
b := t.Cross(n)
t = n.Cross(b)
// spherical coordinates
sinTheta := cgm.Sqrt32(r1)
cosTheta := cgm.Sqrt32(1 - sinTheta*sinTheta)
psi := 2 * math.Pi * r2
v1 := n.Mul(cosTheta)
v2 := t.Mul(sinTheta * cgm.Cos32(psi) * a)
v3 := b.Mul(sinTheta * cgm.Sin32(psi) * a)
dir := v1.Add(v2).Add(v3)
return dir.Normalize()
} | cmd/cosdist/cosdist.go | 0.82963 | 0.602676 | cosdist.go | starcoder |
package stream
import (
"mime"
"os"
"path/filepath"
"testing"
"vincent.click/pkg/preflight/expect"
)
// File is a set of expectations about a file stream
type File struct {
*testing.T
r reader
b []byte
}
// ExpectFile returns expectations based on a file descriptor
func ExpectFile(t *testing.T, file *os.File) Expectations {
return &File{
T: t,
r: file,
}
}
// Close the underlying file descriptor
func (f *File) Close() error {
return f.r.Close()
}
// Size returns an Expectation about the file size in bytes
func (f *File) Size() expect.Expectation {
info, err := f.r.Stat()
if err != nil {
return expect.Faulty(f.T, err)
}
return expect.Value(f.T, info.Size())
}
// Text returns an Expectation about the entire file contents as text
func (f *File) Text() expect.Expectation {
txt, err := readAll(f.r)
if err != nil {
return expect.Faulty(f.T, err)
}
return expect.Value(f.T, string(txt))
}
// NextText returns an Expectation about the next chunk of text
func (f *File) NextText(bytes int) expect.Expectation {
data, err := read(f.r, bytes)
if err != nil {
return expect.Faulty(f.T, err)
}
return expect.Value(f.T, string(data))
}
// TextAt returns an Expectation about the text contents at a specific position
func (f *File) TextAt(pos int64, bytes int) expect.Expectation {
data, err := readAt(f.r, pos, bytes)
if err != nil {
return expect.Faulty(f.T, err)
}
return expect.Value(f.T, string(data))
}
// Bytes returns an Expectation about the entire file contents
func (f *File) Bytes() expect.Expectation {
bytes, err := readAll(f.r)
if err != nil {
return expect.Faulty(f.T, err)
}
return expect.Value(f.T, bytes)
}
// NextBytes returns an Expectation about the next chunk of bytes
func (f *File) NextBytes(bytes int) expect.Expectation {
data, err := read(f.r, bytes)
if err != nil {
return expect.Faulty(f.T, err)
}
return expect.Value(f.T, data)
}
// BytesAt returns an Expectation about the file contents at a specific position
func (f *File) BytesAt(pos int64, bytes int) expect.Expectation {
data, err := readAt(f.r, pos, bytes)
if err != nil {
return expect.Faulty(f.T, err)
}
return expect.Value(f.T, data)
}
// Lines returns an Expectation about the entire file contents as lines of text
func (f *File) Lines() expect.Expectation {
lines := []string{}
for {
line, bytes, err := readLine(f.r, f.b)
if err != nil {
return expect.Faulty(f.T, err)
}
f.b = bytes
if len(line) < 1 {
break
}
lines = append(lines, string(line))
}
return expect.Value(f.T, lines)
}
// NextLine returns an Expectation about the next line of text
func (f *File) NextLine() expect.Expectation {
line, bytes, err := readLine(f.r, f.b)
if err != nil {
return expect.Faulty(f.T, err)
}
f.b = bytes
return expect.Value(f.T, string(line))
}
// ContentType returns an Expectation about the content type
func (f *File) ContentType() expect.Expectation {
ext := filepath.Ext(f.r.Name())
if len(ext) > 0 {
typ := mime.TypeByExtension(ext)
return expect.Value(f.T, typ)
}
if typ, err := detectContentType(f.r); err != nil {
return expect.Faulty(f.T, err)
} else {
return expect.Value(f.T, typ)
}
} | stream/file.go | 0.721351 | 0.408631 | file.go | starcoder |
package subtest
import (
"errors"
"reflect"
"regexp"
"strconv"
"testing"
"time"
)
// Test returns a test that fails fatally with the error f returned by f.
func Test(f func() error) func(t *testing.T) {
return func(t *testing.T) {
if err := f(); err != nil {
t.Fatal(err)
}
}
}
// ValueFunc is a function returning a value. The main purpose of a ValueFunc
// instance is to initialize tests against the the returned value.
type ValueFunc func() (interface{}, error)
// Value returns a new ValueFunc for a static value v.
func Value(v interface{}) ValueFunc {
return func() (interface{}, error) {
return v, nil
}
}
// Len returns a new ValueFunc for the length of v.
func Len(v interface{}) ValueFunc {
return func() (interface{}, error) {
l, ok := asLen(v)
if !ok {
return nil, FailGot(msgNotLenType, v)
}
return l, nil
}
}
func asLen(v interface{}) (l int, ok bool) {
rv := reflect.ValueOf(v)
switch rv.Kind() {
case reflect.Array, reflect.Chan, reflect.Map, reflect.Slice, reflect.String:
l = rv.Len()
ok = true
}
return
}
// Cap returns a new ValueFunc for the capacity of v.
func Cap(v interface{}) ValueFunc {
return func() (interface{}, error) {
l, ok := asCap(v)
if !ok {
return nil, FailGot(msgNotCapType, v)
}
return l, nil
}
}
func asCap(v interface{}) (c int, ok bool) {
rv := reflect.ValueOf(v)
switch rv.Kind() {
case reflect.Array, reflect.Chan, reflect.Map, reflect.Slice:
c = rv.Cap()
ok = true
}
return
}
// Index returns a new ValueFunc for the value at index v[i]. Accepts input of
// type array, slice and string.
func Index(v interface{}, i int) ValueFunc {
return func() (interface{}, error) {
rv := reflect.ValueOf(v)
switch rv.Kind() {
case reflect.Array, reflect.Slice, reflect.String:
if rv.Len() <= i {
return nil, errors.New(msgIndexOutOfRange)
}
return rv.Index(i).Interface(), nil
default:
return nil, FailGot(msgNotIndexType, v)
}
}
}
// Float64 returns a new ValueFunc that parses v into a float64. Accepts numeric
// kinds and string kinds as input.
func Float64(v interface{}) ValueFunc {
return func() (interface{}, error) {
f, ok := asFloat64(v)
if !ok {
return nil, FailGot(msgNotFloat64, v)
}
return f, nil
}
}
func asFloat64(v interface{}) (f float64, ok bool) {
rv := reflect.ValueOf(v)
switch rv.Kind() {
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
f = float64(rv.Uint())
ok = true
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
f = float64(rv.Int())
ok = true
case reflect.Float32, reflect.Float64:
f = rv.Float()
ok = true
case reflect.String:
// E.g. json.Number
var err error
f, err = strconv.ParseFloat(rv.String(), 64)
ok = err == nil
}
return f, ok
}
// Test returns a test function that fails fatally with the error returned by
// f.Check(vf).
func (vf ValueFunc) Test(c Check) func(t *testing.T) {
return func(t *testing.T) {
t.Helper()
if err := c.Check(vf); err != nil {
t.Fatal(err)
}
}
}
// LessThan is equivalent to vf.Test(LessThan(v)).
func (vf ValueFunc) LessThan(v float64) func(t *testing.T) {
return vf.Test(LessThan(v))
}
// LessThanOrEqual is equivalent to vf.Test(LessThanOrEqual(v)).
func (vf ValueFunc) LessThanOrEqual(v float64) func(t *testing.T) {
return vf.Test(LessThanOrEqual(v))
}
// GreaterThan is equivalent to vf.Test(GreaterThan(v)).
func (vf ValueFunc) GreaterThan(v float64) func(t *testing.T) {
return vf.Test(GreaterThan(v))
}
// GreaterThanOrEqual is equivalent to vf.Test(GreaterThanOrEqual(v)).
func (vf ValueFunc) GreaterThanOrEqual(v float64) func(t *testing.T) {
return vf.Test(GreaterThanOrEqual(v))
}
// NotNumericEqual is equivalent to vf.Test(NotNumericEqual(v)).
func (vf ValueFunc) NotNumericEqual(v float64) func(t *testing.T) {
return vf.Test(NotNumericEqual(v))
}
// NumericEqual is equivalent to vf.Test(NumericEqual(v)).
func (vf ValueFunc) NumericEqual(v float64) func(t *testing.T) {
return vf.Test(NumericEqual(v))
}
// NotBefore is equivalent to vf.Test(NotBefore(v)).
func (vf ValueFunc) NotBefore(v time.Time) func(t *testing.T) {
return vf.Test(NotBefore(v))
}
// Before is equivalent to vf.Test(Before(v)).
func (vf ValueFunc) Before(v time.Time) func(t *testing.T) {
return vf.Test(Before(v))
}
// NotTimeEqual is equivalent to vf.Test(NotTimeEqual(v)).
func (vf ValueFunc) NotTimeEqual(v time.Time) func(t *testing.T) {
return vf.Test(NotTimeEqual(v))
}
// TimeEqual is equivalent to vf.Test(TimeEqual(v)).
func (vf ValueFunc) TimeEqual(v time.Time) func(t *testing.T) {
return vf.Test(TimeEqual(v))
}
// NotDeepEqual is equivalent to vf.Test(NotDeepEqual(v)).
func (vf ValueFunc) NotDeepEqual(v interface{}) func(t *testing.T) {
return vf.Test(NotDeepEqual(v))
}
// DeepEqual is equivalent to vf.Test(DeepEqual(v)).
func (vf ValueFunc) DeepEqual(v interface{}) func(t *testing.T) {
return vf.Test(DeepEqual(v))
}
// NotReflectNil is equivalent to vf.Test(NotReflectNil(v)).
func (vf ValueFunc) NotReflectNil() func(t *testing.T) {
return vf.Test(NotReflectNil())
}
// ReflectNil is equivalent to vf.Test(ReflectNil(v)).
func (vf ValueFunc) ReflectNil() func(t *testing.T) {
return vf.Test(ReflectNil())
}
// MatchRegexp is equivalent to vf.Test(s.MatchRegexp(r)).
func (vf ValueFunc) MatchRegexp(r *regexp.Regexp) func(t *testing.T) {
return vf.Test(MatchRegexp(r))
}
// MatchPattern is equivalent to vf.Test(s.MatchPattern(pattern)).
func (vf ValueFunc) MatchPattern(pattern string) func(t *testing.T) {
return vf.Test(MatchPattern(pattern))
}
// NoError is equivalent to vf.Test(NoError(v)).
func (vf ValueFunc) NoError() func(t *testing.T) {
return vf.Test(NoError())
}
// Error is equivalent to vf.Test(Error(v)).
func (vf ValueFunc) Error() func(t *testing.T) {
return vf.Test(Error())
}
// ErrorIsNot is equivalent to vf.Test(ErrorIsNot(v)).
// wrapped by vf.
func (vf ValueFunc) ErrorIsNot(target error) func(t *testing.T) {
return vf.Test(ErrorIsNot(target))
}
// ErrorIs is equivalent to vf.Test(ErrorIs(v)).
func (vf ValueFunc) ErrorIs(target error) func(t *testing.T) {
return vf.Test(ErrorIs(target))
}
// ContainsMatch is equivalent to vf.Test(ContainsMatch{c}).
func (vf ValueFunc) ContainsMatch(c Check) func(t *testing.T) {
return vf.Test(ContainsMatch(c))
}
// Contains is equivalent to vf.Test(Contains{v}).
func (vf ValueFunc) Contains(v interface{}) func(t *testing.T) {
return vf.Test(Contains(v))
} | value.go | 0.838746 | 0.585901 | value.go | starcoder |
package facet
import (
"fmt"
"math"
)
// A GroupID identifies a set of values belonging together.
type GroupID struct {
Row, Col string
}
type Grouper interface {
Group() GroupID
}
// Aestetic is a function mapping a certain data point to an aestehtic.
type Aesthetic func(i int) float64
// DiscreteAestetic is a function mapping a certain data point to a discrete
// aesthetic like Shape or Stroke.
type DiscreteAesthetic func(i int) int
type GroupBy struct {
FacetRow DiscreteAesthetic
FacetCol DiscreteAesthetic
Alpha Aesthetic
Color Aesthetic
Fill Aesthetic
Shape DiscreteAesthetic
Size Aesthetic
Stroke DiscreteAesthetic
}
// Faceting
type Faceting struct {
Rows []string
Cols []string
Groups map[GroupID][]int // Groups contains the indices for each group
}
func NewFaceting() *Faceting {
return &Faceting{
Groups: make(map[GroupID][]int),
}
}
func (f1 *Faceting) Add(group GroupID) {
}
// A Partitioner can be used to turn a continuous value into a discrete factor.
type Partitioner struct {
Partitions int
Range Interval
}
func (p *Partitioner) Learn(x ...float64) { p.Range.Update(x...) }
func (p *Partitioner) Partition(x float64) string {
min, max := p.Range.Min, p.Range.Max
if x < min {
return fmt.Sprintf("(-∞, %g)", x)
}
if x >= max {
return fmt.Sprintf("[%g, ∞)", x)
}
w := (max - min) / float64(p.Partitions)
k := math.Floor((x - min) / w)
return fmt.Sprintf("[%g, %g)", min+k*w, min+(k+1)*w)
}
/*
How to learn groups?
Original Data --- group-by-field-G---> Grouped Data --> Work on each group indiv.
That works fine in R and is "undoable" in Go.
Idea:
- Geom provides a func (i int) GroupID.
- Plot owns Geoms.
- Plot calls GroupID for each element i in the Geom.
- A panel is selected based on the group and the facting choosen.
Works well if Dataset already contains the group, e.g. as a string or int field.
But hard to use if grouping is done on a continuous field, e.g. a float64.
Need an adaptor which can be trained easily.
G might be:
- strings (label)
- integers
- floats (must be put into intervals)
Basically the same like for a discrete scale.
For grouping continuous data: Type which works as an adoptor.
How to draw a Point Geom in a faceted way?
- The Plot itself owns the Point Geom
- The Plot iterates the Points and determines the Group
- The Plot chooses the appropriate Panel and...
- ... hands drawing of a single point in that panel to the Point Geom
Same applies to Boxplots or whatnot.
Sizing of e.g. a Boxplot is done on the full dataset before grouping.
Can Stats be added?
Binning for Histograms? Input is Data with X and Group.
Plot iterates Data, determines Group. Records X for that Group.
Can be done as Binning can be done iteratively.
Boxplots? Need full dataset befor Geom can be computed.
Seems doable.
If plot owns Geom: How to compute boxplot? How is faceting done?
Input: Some data with X and Y and optional Color
Step 1: Partition data by group into facets
Step 3: Per Group/facet compute fiveval foraech X
Example Boxplot
---------------
Data: X, Y, A, B, C, D, E
Faceting by (A,B)
Color mapped from C
Stroke mapped from D
Stat: fiveval/boxplot
Statistics has to be done on Data/(A,B,C,D)
Grouping is done on faceting and optional aesthetics
*/ | group.go | 0.784773 | 0.701795 | group.go | starcoder |
package comparing
import (
m "math"
"github.com/Willsem/compare-trajectories/app/model"
"github.com/Willsem/compare-trajectories/app/service/comparing/interpolation"
"github.com/Willsem/compare-trajectories/app/service/comparing/speed"
"github.com/Willsem/compare-trajectories/app/service/math"
)
func difference(perfect interpolation.InterpolatedTrajectory, compared speed.SpeedTrajectory, comparedAcc model.Accelerometer) (ct model.ComparedTrajectory, err error) {
ct = model.ComparedTrajectory{
Backlog: make([]float64, compared.Gps.Len()),
Long: make([]float64, compared.Gps.Len()),
Lat: make([]float64, compared.Gps.Len()),
DeltaLong: make([]float64, compared.Gps.Len()),
DeltaLat: make([]float64, compared.Gps.Len()),
DeltaAcc: make([]model.FloatPoint, compared.Gps.Len()),
DeltaGyro: make([]model.FloatPoint, compared.Gps.Len()),
}
for i := 0; i < compared.Gps.Len(); i++ {
var minDist float64 = -1
var minIndex int
x1 := compared.Gps.Lat[i]
y1 := compared.Gps.Long[i]
for j := 0; j < perfect.Trajectory.Gps.Len(); j++ {
x2 := perfect.Trajectory.Gps.Lat[j]
y2 := perfect.Trajectory.Gps.Long[j]
dist := math.Distance(x1, y1, x2, y2)
if minDist == -1 || dist < minDist {
minDist = dist
minIndex = j
}
}
var secondIndex int
if minIndex == 0 {
secondIndex = 1
} else if minIndex == perfect.Trajectory.Gps.Len()-1 {
secondIndex = minIndex - 1
} else {
x2 := perfect.Trajectory.Gps.Lat[minIndex-1]
y2 := perfect.Trajectory.Gps.Long[minIndex-1]
x3 := perfect.Trajectory.Gps.Lat[minIndex+1]
y3 := perfect.Trajectory.Gps.Long[minIndex+1]
dist1 := math.Distance(x1, y1, x2, y2)
dist2 := math.Distance(x1, y1, x3, y3)
if dist1 < dist2 {
secondIndex = minIndex - 1
} else {
secondIndex = minIndex + 1
}
}
speed, lat, long, acc, gyro := perfect.TakeValues(minIndex, secondIndex)
minDist = -1
for j := 0; j < len(lat); j++ {
x2 := lat[j]
y2 := long[j]
dist := math.Distance(x1, y1, x2, y2)
if minDist == -1 || dist < minDist {
minDist = dist
minIndex = j
}
}
accIndex := 0
var minDateDiff float64 = -1
for j := 0; j < comparedAcc.Len(); j++ {
dateDiff := m.Abs(model.DateDiffSeconds(compared.Gps.Date[i], comparedAcc.Date[j]))
if minDateDiff == -1 || dateDiff < minDateDiff {
minDateDiff = dateDiff
accIndex = j
} else {
break
}
}
ct.Backlog[i] = speed[minIndex] - compared.Speed[i]
ct.Long[i] = compared.Gps.Long[i]
ct.Lat[i] = compared.Gps.Lat[i]
ct.DeltaLat[i] = lat[minIndex] - compared.Gps.Lat[i]
ct.DeltaLong[i] = long[minIndex] - compared.Gps.Long[i]
ct.DeltaAcc[i].X = float64(acc[minIndex].X - comparedAcc.Acc[accIndex].X)
ct.DeltaAcc[i].Y = float64(acc[minIndex].Y - comparedAcc.Acc[accIndex].Y)
ct.DeltaAcc[i].Z = float64(acc[minIndex].Z - comparedAcc.Acc[accIndex].Z)
ct.DeltaGyro[i].X = float64(gyro[minIndex].X - comparedAcc.Gyro[accIndex].X)
ct.DeltaGyro[i].Y = float64(gyro[minIndex].Y - comparedAcc.Gyro[accIndex].Y)
ct.DeltaGyro[i].Z = float64(gyro[minIndex].Z - comparedAcc.Gyro[accIndex].Z)
}
return
} | app/service/comparing/difference.go | 0.54819 | 0.537041 | difference.go | starcoder |
package match
import (
"reflect"
"regexp"
"strings"
"testing"
)
// Matcher is a helper object for chaining matcher conditions.
type Matcher struct {
t *testing.T
value interface{}
}
// Fatal causes the unit test to immediately fail is one of the previous
// conditions has fail.
// Note: This will only affect previous conditions, not subsequent ones. You
// can build complex conditions this way.
// Ex: match.IsNotNil(t, value).Fatal().Equals(other)
func (m *Matcher) Fatal() *Matcher {
if m.t.Failed() {
m.t.FailNow()
}
return m
}
// IsNil asserts that the matched value is nil.
func (m *Matcher) IsNil() *Matcher {
if m.value != nil {
m.t.Errorf("expected == nil, got %v", m.value)
}
return m
}
// IsNotNil asserts that the matched value is not nil.
func (m *Matcher) IsNotNil() *Matcher {
if m.value == nil {
m.t.Error("expected != nil, got nil")
}
return m
}
// Equals asserts that the matched value is equal to value.
func (m *Matcher) Equals(value interface{}) *Matcher {
if !reflect.DeepEqual(m.value, value) {
m.t.Errorf("expected == %v, got %v", value, m.value)
}
return m
}
// NotEquals asserts that the match value is not equal to value.
func (m *Matcher) NotEquals(value interface{}) *Matcher {
if reflect.DeepEqual(m.value, value) {
m.t.Errorf("expected != %v, got %v", value, m.value)
}
return m
}
// LessThan asserts that the matched value is less than value.
// Note: This function will only work on numeric values.
// If a non-numeric value is passed, a fatal error will be thrown.
func (m *Matcher) LessThan(value interface{}) *Matcher {
ok := true
rv := reflect.ValueOf(m.value)
switch rv.Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
ok = rv.Int() < reflect.ValueOf(value).Int()
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
ok = rv.Uint() < uint64(reflect.ValueOf(value).Int())
case reflect.Float32, reflect.Float64:
ok = rv.Float() < reflect.ValueOf(value).Float()
default:
m.t.Fatalf("expected numeric value, got %v", m.value)
}
if !ok {
m.t.Errorf("expected < %v, got %v", value, m.value)
}
return m
}
// GreaterThan asserts that the matched value is greater then value.
// Note: This function will only work on numeric values.
// If a non-numeric value is passed, a fatal error will be thrown.
func (m *Matcher) GreaterThan(value interface{}) *Matcher {
ok := true
rv := reflect.ValueOf(m.value)
switch rv.Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
ok = rv.Int() > reflect.ValueOf(value).Int()
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
ok = rv.Uint() > uint64(reflect.ValueOf(value).Int())
case reflect.Float32, reflect.Float64:
ok = rv.Float() > reflect.ValueOf(value).Float()
default:
m.t.Fatalf("expected numeric value, got %v", m.value)
}
if !ok {
m.t.Errorf("expected < %v, got %v", value, m.value)
}
return m
}
// Contains asserts that the matched value contains value.
// If used on an array, asserts that the array contains value.
// If used on a map, asserts that the map contains a key with value.
// If used on a string, asserts that the string contains value.
// If used on an incompatible value, a fatal error will be thrown.
func (m *Matcher) Contains(value interface{}) *Matcher {
rv := reflect.ValueOf(m.value)
switch rv.Kind() {
case reflect.Array, reflect.Slice:
ok := false
for i := 0; i < rv.Len(); i++ {
if rv.Index(i).Interface() == value {
ok = true
break
}
}
if !ok {
m.t.Errorf("expected array to contain %v", value)
}
case reflect.Map:
ok := false
for _, key := range rv.MapKeys() {
if key.Interface() == value {
ok = true
break
}
}
if !ok {
m.t.Errorf("expected map to have key %v", value)
}
case reflect.String:
if !strings.Contains(rv.String(), value.(string)) {
m.t.Errorf("expected %#q to contain %#q", rv.String(), value.(string))
}
default:
m.t.Fatalf("expected array, map or string, got %s", rv.Kind().String())
}
return m
}
// Matches asserts that the matched value matches the regex in pattern.
// Note: If the regex cannot be compiled, a fatal error will be thrown.
// If used on a non-string value, a fatal error will be thrown.
func (m *Matcher) Matches(pattern string) *Matcher {
rv := reflect.ValueOf(m.value)
if rv.Kind() != reflect.String {
m.t.Fatalf("expected string, got %s", rv.Kind().String())
}
re, err := regexp.Compile(pattern)
if err != nil {
m.t.Fatal(err)
}
if !re.MatchString(rv.String()) {
m.t.Errorf("expected %#q to match pattern %#q", rv.String(), pattern)
}
return m
}
// KindOf asserts that the matched value of the kind.
func (m *Matcher) KindOf(kind reflect.Kind) *Matcher {
rv := reflect.ValueOf(m.value)
if rv.Kind() != kind {
m.t.Errorf("expected kind %s, got %s", kind.String(), rv.Kind().String())
}
return m
} | matcher.go | 0.785103 | 0.526951 | matcher.go | starcoder |
package main
import (
"crypto/sha256"
"encoding/hex"
"errors"
"strconv"
"strings"
"sync"
"sync/atomic"
"time"
)
const genesisBlockData = "The Times 03/Jan/2009 Chancellor on brink of second bailout for banks" // https://en.bitcoin.it/wiki/Genesis_block
// Block is the element of the Blockchain.
type Block struct {
Timestamp string
Data string
PreviousHash string
Hash string
Nonce int
}
// Difficulty sets how difficult mining is. (1 = easiest)
var difficulty = 1
// Miners sets the number of concurrent mining threads. (1 = no concurrency)
var miners = 1
func createBlockchain(timestamp time.Time) []Block {
genesisBlock := Block{}
genesisBlock.Timestamp = timestamp.Format(time.RFC3339)
genesisBlock.PreviousHash = strings.Repeat("0", 64)
genesisBlock.Data = genesisBlockData
genesisBlock.Nonce = 0
blockchain := make([]Block, 0, 10)
addBlock(&blockchain, genesisBlock)
return blockchain
}
func createBlock(timestamp time.Time, data string, previousBlock Block) (block Block, err error) {
if len(data) == 0 {
err = errors.New("Cannot create a block with no data")
return
}
block = Block{}
block.Timestamp = timestamp.Format(time.RFC3339)
block.PreviousHash = previousBlock.Hash
block.Data = data
block.Nonce = 0
return
}
func isBlockchainValid(blockchain []Block) bool {
// A valid blockchain contains at least one block
if len(blockchain) == 0 {
return false
}
// Validate the genesis (first) block
if len(blockchain) > 0 && blockchain[0].Data != genesisBlockData {
return false
}
// Validate next blocks
for i := 1; i < len(blockchain); i++ {
currentBlock := blockchain[i]
previousBlock := blockchain[i-1]
if currentBlock.Hash != calculateHash(currentBlock) {
return false
}
if currentBlock.PreviousHash != previousBlock.Hash {
return false
}
}
return true
}
func addBlock(blockchain *[]Block, block Block) (err error) {
// A new block must be "mined" before to be added to the blockchain. Mining is a proof-of-work (https://en.wikipedia.org/wiki/Proof_of_work)
minedBlock := mineBlock(block)
candidateBlockchain := append(*blockchain, minedBlock)
if isBlockchainValid(candidateBlockchain) == false {
// If the mined block is inconsistent, the blockchain is not updated
return errors.New("Invalid block (Previous hash is inconsistent). Blockchain was not updated")
}
*blockchain = candidateBlockchain
return
}
func mineBlock(block Block) Block {
// Mining is guessing the Block.Nonce until a Block.Hash that matches the targetHashPrefix is found
if miners <= 1 {
// difficulty defines the number of 0s leading the hash. The higher difficulty, the more time-consuming
targetHashPrefix := strings.Repeat("0", difficulty)
for {
block.Hash = calculateHash(block)
if strings.HasPrefix(block.Hash, targetHashPrefix) {
return block
}
block.Nonce++
}
} else {
// Depending on the hardware, concurrency could speed up mining
result := make(chan Block, miners)
var wg sync.WaitGroup
var stop uint32 = 0
for i := 0; i < miners; i++ {
wg.Add(1)
go concurrentMineBlock(block, miners, result, &stop, &wg)
block.Nonce++ // Every miner starts with a different nonce
}
wg.Wait()
close(result)
return <-result
}
}
func concurrentMineBlock(block Block, nonceIncrementStep int, result chan Block, stop *uint32, wg *sync.WaitGroup) {
defer wg.Done()
isMiningRunning := func() bool {
return atomic.LoadUint32(stop) == 0
}
stopMining := func() {
atomic.StoreUint32(stop, 1)
}
targetHashPrefix := strings.Repeat("0", difficulty)
for isMiningRunning() {
block.Hash = calculateHash(block)
if strings.HasPrefix(block.Hash, targetHashPrefix) {
result <- block
stopMining()
break
}
block.Nonce = block.Nonce + nonceIncrementStep
}
}
func calculateHash(block Block) string {
data := block.Timestamp + block.Data + block.PreviousHash + strconv.Itoa(block.Nonce)
hash := sha256.Sum256([]byte(data))
return hex.EncodeToString(hash[:])
}
func getLatestBlock(blockchain []Block) Block {
// ASSUMPTION: Blockchain has at least 1 (genesis) block
return blockchain[len(blockchain)-1]
} | cmd/blockchain.go | 0.639173 | 0.449936 | blockchain.go | starcoder |
package commands
import "github.com/urfave/cli"
const artisticShort = `
Copyright Datajin Technologies, Inc. 2015,2017. All rights reserved.
This software is released under the Artistic License 2.0.
`
const artisticFull = `
The Artistic License 2.0
Copyright (c) 2000-2006, The Perl Foundation.
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
This license establishes the terms under which a given free software
Package may be copied, modified, distributed, and/or redistributed.
The intent is that the Copyright Holder maintains some artistic
control over the development of that Package while still keeping the
Package available as open source and free software.
You are always permitted to make arrangements wholly outside of this
license directly with the Copyright Holder of a given Package. If the
terms of this license do not permit the full use that you propose to
make of the Package, you should contact the Copyright Holder and seek
a different licensing arrangement.
Definitions
"Copyright Holder" means the individual(s) or organization(s)
named in the copyright notice for the entire Package.
"Contributor" means any party that has contributed code or other
material to the Package, in accordance with the Copyright Holder's
procedures.
"You" and "your" means any person who would like to copy,
distribute, or modify the Package.
"Package" means the collection of files distributed by the
Copyright Holder, and derivatives of that collection and/or of
those files. A given Package may consist of either the Standard
Version, or a Modified Version.
"Distribute" means providing a copy of the Package or making it
accessible to anyone else, or in the case of a company or
organization, to others outside of your company or organization.
"Distributor Fee" means any fee that you charge for Distributing
this Package or providing support for this Package to another
party. It does not mean licensing fees.
"Standard Version" refers to the Package if it has not been
modified, or has been modified only in ways explicitly requested
by the Copyright Holder.
"Modified Version" means the Package, if it has been changed, and
such changes were not explicitly requested by the Copyright
Holder.
"Original License" means this Artistic License as Distributed with
the Standard Version of the Package, in its current version or as
it may be modified by The Perl Foundation in the future.
"Source" form means the source code, documentation source, and
configuration files for the Package.
"Compiled" form means the compiled bytecode, object code, binary,
or any other form resulting from mechanical transformation or
translation of the Source form.
Permission for Use and Modification Without Distribution
(1) You are permitted to use the Standard Version and create and use
Modified Versions for any purpose without restriction, provided that
you do not Distribute the Modified Version.
Permissions for Redistribution of the Standard Version
(2) You may Distribute verbatim copies of the Source form of the
Standard Version of this Package in any medium without restriction,
either gratis or for a Distributor Fee, provided that you duplicate
all of the original copyright notices and associated disclaimers. At
your discretion, such verbatim copies may or may not include a
Compiled form of the Package.
(3) You may apply any bug fixes, portability changes, and other
modifications made available from the Copyright Holder. The resulting
Package will still be considered the Standard Version, and as such
will be subject to the Original License.
Distribution of Modified Versions of the Package as Source
(4) You may Distribute your Modified Version as Source (either gratis
or for a Distributor Fee, and with or without a Compiled form of the
Modified Version) provided that you clearly document how it differs
from the Standard Version, including, but not limited to, documenting
any non-standard features, executables, or modules, and provided that
you do at least ONE of the following:
(a) make the Modified Version available to the Copyright Holder
of the Standard Version, under the Original License, so that the
Copyright Holder may include your modifications in the Standard
Version.
(b) ensure that installation of your Modified Version does not
prevent the user installing or running the Standard Version. In
addition, the Modified Version must bear a name that is different
from the name of the Standard Version.
(c) allow anyone who receives a copy of the Modified Version to
make the Source form of the Modified Version available to others
under
(i) the Original License or
(ii) a license that permits the licensee to freely copy,
modify and redistribute the Modified Version using the same
licensing terms that apply to the copy that the licensee
received, and requires that the Source form of the Modified
Version, and of any works derived from it, be made freely
available in that license fees are prohibited but Distributor
Fees are allowed.
Distribution of Compiled Forms of the Standard Version
or Modified Versions without the Source
(5) You may Distribute Compiled forms of the Standard Version without
the Source, provided that you include complete instructions on how to
get the Source of the Standard Version. Such instructions must be
valid at the time of your distribution. If these instructions, at any
time while you are carrying out such distribution, become invalid, you
must provide new instructions on demand or cease further distribution.
If you provide valid instructions or cease distribution within thirty
days after you become aware that the instructions are invalid, then
you do not forfeit any of your rights under this license.
(6) You may Distribute a Modified Version in Compiled form without
the Source, provided that you comply with Section 4 with respect to
the Source of the Modified Version.
Aggregating or Linking the Package
(7) You may aggregate the Package (either the Standard Version or
Modified Version) with other packages and Distribute the resulting
aggregation provided that you do not charge a licensing fee for the
Package. Distributor Fees are permitted, and licensing fees for other
components in the aggregation are permitted. The terms of this license
apply to the use and Distribution of the Standard or Modified Versions
as included in the aggregation.
(8) You are permitted to link Modified and Standard Versions with
other works, to embed the Package in a larger work of your own, or to
build stand-alone binary or bytecode versions of applications that
include the Package, and Distribute the result without restriction,
provided the result does not expose a direct interface to the Package.
Items That are Not Considered Part of a Modified Version
(9) Works (including, but not limited to, modules and scripts) that
merely extend or make use of the Package, do not, by themselves, cause
the Package to be a Modified Version. In addition, such works are not
considered parts of the Package itself, and are not subject to the
terms of this license.
General Provisions
(10) Any use, modification, and distribution of the Standard or
Modified Versions is governed by this Artistic License. By using,
modifying or distributing the Package, you accept this license. Do not
use, modify, or distribute the Package, if you do not accept this
license.
(11) If your Modified Version has been derived from a Modified
Version made by someone other than you, you are nevertheless required
to ensure that your Modified Version complies with the requirements of
this license.
(12) This license does not grant you the right to use any trademark,
service mark, tradename, or logo of the Copyright Holder.
(13) This license includes the non-exclusive, worldwide,
free-of-charge patent license to make, have made, use, offer to sell,
sell, import and otherwise transfer the Package with respect to any
patent claims licensable by the Copyright Holder that are necessarily
infringed by the Package. If you institute patent litigation
(including a cross-claim or counterclaim) against any party alleging
that the Package constitutes direct or contributory patent
infringement, then this Artistic License to you shall terminate on the
date that such litigation is filed.
(14) Disclaimer of Warranty:
THE PACKAGE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS "AS
IS' AND WITHOUT ANY EXPRESS OR IMPLIED WARRANTIES. THE IMPLIED
WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, OR
NON-INFRINGEMENT ARE DISCLAIMED TO THE EXTENT PERMITTED BY YOUR LOCAL
LAW. UNLESS REQUIRED BY LAW, NO COPYRIGHT HOLDER OR CONTRIBUTOR WILL
BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL
DAMAGES ARISING IN ANY WAY OUT OF THE USE OF THE PACKAGE, EVEN IF
ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
`
// This software is provided "AS IS" but the service it connects to has its own
// terms and polcies.
const notices = `
Use of the mktmpio service is subject to the the following:
* mktmpio Privacy Policy
https://mktmp.io/privacy-policy
* mktmpio Terms of Service
https://mktmp.io/terms-of-service
`
// The dependencies list below is manually populated using the output from:
// go list -f {{.Deps}} ./...
const thirdPartyShort = `
This binary includes 3rd party open source software.
`
const thirdpartyFull = `
This binary includes 3rd party software:
* github.com/mitchellh/go-homedir
MIT license
Copyright (c) 2013 <NAME>
* github.com/mktmpio/go-mktmpio
Artistic-2.0 license
Copyright (c) 2015 Datajin Technologies, Inc.
* github.com/mktmpio/go-mktmpio/stdcopy
Apache License 2.0
Copyright 2013-2016 Docker, Inc.
* github.com/urfave/cli
MIT license
Copyright (C) 2016 <NAME> & Contributors
* golang.org/x/crypto/ssh/terminal
golang.org/x/net/websocket
BSD-2 license
Copyright (c) 2009 The Go Authors. All rights reserved.
* gopkg.in/yaml.v2
LGPL3 license
Copyright (c) 2011-2014 - Canonical Inc.
`
// This software is provided "AS IS"
const warrantiesShort = `
This software is provided "AS IS" and without warranty.
`
const warrantiesFull = `
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
`
// Contact info
const contact = `
For more information contact Datajin Technologies, Inc.:
w: https://mktmp.io
e: <EMAIL>
`
const summary = `
This is only a summary. For full warranty and license texts as well as the
complete listing of included 3rd party software, run with the --verbose flag.
`
// LegalCommand defines the 'mktmpio config' command
var LegalCommand = cli.Command{
Name: "legal",
Usage: "display licensing, copyright, and warranty notices",
Action: licenseAction,
Flags: []cli.Flag{
cli.BoolFlag{Name: "verbose, v"},
},
}
func licenseAction(c *cli.Context) error {
license := artisticShort
thirdparty := thirdPartyShort
warranties := warrantiesShort
if c.Bool("verbose") {
license = artisticFull
thirdparty = thirdpartyFull
warranties = warrantiesFull
} else {
c.App.Writer.Write([]byte(summary))
}
c.App.Writer.Write([]byte("\nLICENSE:\n" + license))
c.App.Writer.Write([]byte("\nNOTICES:\n" + notices))
c.App.Writer.Write([]byte("\nWARRANTIES:\n" + warranties))
c.App.Writer.Write([]byte("\n3RD PARTY SOFTWARE:\n" + thirdparty))
c.App.Writer.Write([]byte("\nCONTACT:\n" + contact))
return nil
} | commands/legal.go | 0.732974 | 0.469885 | legal.go | starcoder |
package crazy
import "math"
// Exponential adapts a Source to generate random numbers under an exponential
// distribution.
type Exponential struct {
Source
Rate float64
}
// NewExponential creates an exponential distribution drawing from the
// specified source with given rate parameter.
func NewExponential(src Source, rate float64) Exponential {
return Exponential{
Source: src,
Rate: rate,
}
}
// Next generates an exponential variate.
func (e Exponential) Next() float64 {
x := expoZig.GenNext(e.Source)
return x / e.Rate
}
func expoPDF(x float64) float64 {
return math.Exp(-x)
}
func expoTail(src Source) float64 {
return expoR - math.Log(Uniform0_1{src}.Next())
}
var expoZig = Ziggurat{
PDF: expoPDF,
Tail: expoTail,
Mirrored: false,
K: expoK,
W: expoW,
F: expoF,
}
const expoR = 9.25616454426554
var expoK = [1024]uint64{
0x1ce142c806bbca, 0x00000000000000, 0x13a2e8754d11d3, 0x1893ad94056903,
0x1abdc2caab364d, 0x1bf0d5a059f05f, 0x1cb2dcb2afeca3, 0x1d382b88438efd,
0x1d993d4a843892, 0x1de3005cdb9685, 0x1e1ce953a7aada, 0x1e4b8ff9b72624,
0x1e71ef51692778, 0x1e920badc7caed, 0x1ead4eb647f2a7, 0x1ec4bd2917746d,
0x1ed917afecf097, 0x1eeaefb117c710, 0x1efab4f0d45b99, 0x1f08bebea80034,
0x1f15524a538575, 0x1f20a71a9a4950, 0x1f2aea428b997d, 0x1f3440b9fd4d2f,
0x1f3cc91b7046c6, 0x1f449cf3d895f2, 0x1f4bd1c2ba0869, 0x1f5279bfcb493d,
0x1f58a4751d7f8b, 0x1f5e5f388a575e, 0x1f63b58c3ad7e9, 0x1f68b16c08dc31,
0x1f6d5b8c044fd9, 0x1f71bb8b55855b, 0x1f75d81def9dca, 0x1f79b72ef3a811,
0x1f7d5dfd381a11, 0x1f80d133164bc9, 0x1f8414fa616be2, 0x1f872d0d3ad391,
0x1f8a1cc452f503, 0x1f8ce72309a699, 0x1f8f8ee1ca4a4b, 0x1f921676eecb4a,
0x1f94801e668bf9, 0x1f96cde05350f7, 0x1f990196c55b3a, 0x1f9b1cf2b8c1d3,
0x1f9d2180705665, 0x1f9f10ab45ad4f, 0x1fa0ebc1020c8b, 0x1fa2b3f4d0ddf9,
0x1fa46a61d9a972, 0x1fa6100d8d74de, 0x1fa7a5e9b19fcc, 0x1fa92cd630d2bd,
0x1faaa5a2b95b8c, 0x1fac11102f446a, 0x1fad6fd1f79125, 0x1faec28f214fb0,
0x1fb009e37086ff, 0x1fb146604e85bf, 0x1fb2788da29d0f, 0x1fb3a0ea95eeeb,
0x1fb4bfee44a205, 0x1fb5d6085e829f, 0x1fb6e3a1b8d84e, 0x1fb7e91cd3015a,
0x1fb8e6d64f34c0, 0x1fb9dd2560a263, 0x1fbacc5c3004f5, 0x1fbbb4c83799b7,
0x1fbc96b29756b3, 0x1fbd726062201c, 0x1fbe4812e4a863, 0x1fbf1807e6940d,
0x1fbfe279e66a1e, 0x1fc0a7a050cb6a, 0x1fc167afb35e94, 0x1fc222d9ebd331,
0x1fc2d94e535488, 0x1fc38b39e6bbb7, 0x1fc438c76bc8fa, 0x1fc4e21f93a4fb,
0x1fc587691ae4b6, 0x1fc628c8e7450b, 0x1fc6c662234def, 0x1fc760565808ea,
0x1fc7f6c584f268, 0x1fc889ce3649dc, 0x1fc9198d99e17d, 0x1fc9a61f928b72,
0x1fca2f9eca3fa7, 0x1fcab624c3132d, 0x1fcb39c9e717df, 0x1fcbbaa5973916,
0x1fcc38ce39287b, 0x1fccb459446c6f, 0x1fcd2d5b4ea00f, 0x1fcda3e816f387,
0x1fce181290fa48, 0x1fce89eceed388, 0x1fcef988aab890, 0x1fcf66f68ffb57,
0x1fcfd246c37f46, 0x1fd03b88cbb500, 0x1fd0a2cb9821aa, 0x1fd1081d887932,
0x1fd16b8c7352e7, 0x1fd1cd25ac7ec6, 0x1fd22cf60b01bf, 0x1fd28b09eebe86,
0x1fd2e76d45d02f, 0x1fd3422b919b8a, 0x1fd39b4feb9bb3, 0x1fd3f2e509ee11,
0x1fd448f543a1c2, 0x1fd49d8a94ce02, 0x1fd4f0aea27303, 0x1fd5426abe2867,
0x1fd592c7e99c3d, 0x1fd5e1ced9e561, 0x1fd62f87faabb8, 0x1fd67bfb7128c6,
0x1fd6c7311f02d3, 0x1fd71130a504bf, 0x1fd75a0165b486, 0x1fd7a1aa87ca39,
0x1fd7e832f88942, 0x1fd82da16dfd80, 0x1fd871fc691dc8, 0x1fd8b54a37d537,
0x1fd8f790f6f4b2, 0x1fd938d6940dd2, 0x1fd97920cf386c, 0x1fd9b8753cc3d3,
0x1fd9f6d946d4e9, 0x1fda34522ef1e9, 0x1fda70e50f7cee, 0x1fdaac96dd1e02,
0x1fdae76c681da3, 0x1fdb216a5db062, 0x1fdb5a9549347e, 0x1fdb92f1956210,
0x1fdbca838d6e7c, 0x1fdc014f5e23c4, 0x1fdc375916ec52, 0x1fdc6ca4aad3c6,
0x1fdca135f17d4d, 0x1fdcd510a80fff, 0x1fdd08387219c6, 0x1fdd3ab0da6926,
0x1fdd6c7d53de73, 0x1fdd9da13a34ba, 0x1fddce1fd2c2dd, 0x1fddfdfc4d351d,
0x1fde2d39c43f82, 0x1fde5bdb3e496a, 0x1fde89e3ae128a, 0x1fdeb755f351ad,
0x1fdee434db4d7f, 0x1fdf1083216f9a, 0x1fdf3c436fd21a, 0x1fdf67785fc7fd,
0x1fdf92247a607a, 0x1fdfbc4a38e58e, 0x1fdfe5ec0555f2, 0x1fe00f0c3adaaa,
0x1fe037ad263860, 0x1fe05fd1063cae, 0x1fe0877a0c2794, 0x1fe0aeaa5c1128,
0x1fe0d5640d4bc6, 0x1fe0fba92ac2c9, 0x1fe1217bb35607, 0x1fe146dd9a321c,
0x1fe16bd0c725b9, 0x1fe1905716f40a, 0x1fe1b4725ba44e, 0x1fe1d8245ccec7,
0x1fe1fb6ed7e715, 0x1fe21e53808417, 0x1fe240d400a56d, 0x1fe262f1f8f6b3,
0x1fe284af011080, 0x1fe2a60ca7b754, 0x1fe2c70c731872, 0x1fe2e7afe104cc,
0x1fe307f8672a07, 0x1fe327e77349b3, 0x1fe3477e6b6ebc, 0x1fe366beae2137,
0x1fe385a9929884, 0x1fe3a44068ebe6, 0x1fe3c2847a4199, 0x1fe3e07708fc73,
0x1fe3fe1950e81d, 0x1fe41b6c8763f6, 0x1fe43871db8ca3, 0x1fe4552a766464,
0x1fe471977afa2c, 0x1fe48dba068f92, 0x1fe4a99330bd9d, 0x1fe4c5240b987e,
0x1fe4e06da3d237, 0x1fe4fb7100dc40, 0x1fe5162f250832, 0x1fe530a90da77a,
0x1fe54adfb32a2b, 0x1fe564d4093cdf, 0x1fe57e86fee5cb, 0x1fe597f97ea0f4,
0x1fe5b12c6e7ba0, 0x1fe5ca20b02ef6, 0x1fe5e2d72139ee, 0x1fe5fb509afa74,
0x1fe6138df2c5e8, 0x1fe62b8ffa00ea, 0x1fe643577e367b, 0x1fe65ae5492e80,
0x1fe6723a2103a8, 0x1fe68956c838b2, 0x1fe6a03bfdcd2d, 0x1fe6b6ea7d5199,
0x1fe6cd62fefb09, 0x1fe6e3a637b63c, 0x1fe6f9b4d93a30, 0x1fe70f8f921a3e,
0x1fe725370dd7b3, 0x1fe73aabf4f2fd, 0x1fe74feeecfc5c, 0x1fe7650098a429,
0x1fe779e197caaf, 0x1fe78e92878f9a, 0x1fe7a314026109, 0x1fe7b766a00a2d,
0x1fe7cb8af5c19a, 0x1fe7df81963731, 0x1fe7f34b11a1b1, 0x1fe806e7f5cbf3,
0x1fe81a58ce21d2, 0x1fe82d9e23bcbb, 0x1fe840b87d6ff7, 0x1fe853a85fd499,
0x1fe8666e4d5531, 0x1fe8790ac63927, 0x1fe88b7e48afd9, 0x1fe89dc950db71,
0x1fe8afec58db72, 0x1fe8c1e7d8d710, 0x1fe8d3bc470739, 0x1fe8e56a17c070,
0x1fe8f6f1bd7c5e, 0x1fe90853a8e335, 0x1fe9199048d4d1, 0x1fe92aa80a71a4,
0x1fe93b9b592372, 0x1fe94c6a9ea5d1, 0x1fe95d16430e79, 0x1fe96d9eacd569,
0x1fe97e0440dcce, 0x1fe98e476278c9, 0x1fe99e68737701, 0x1fe9ae67d4260a,
0x1fe9be45e35c9c, 0x1fe9ce02fe80ac, 0x1fe9dd9f818e4e, 0x1fe9ed1bc71e7a,
0x1fe9fc78286da4, 0x1fea0bb4fd622f, 0x1fea1ad29c92c2, 0x1fea29d15b4c6b,
0x1fea38b18d98b2, 0x1fea4773864375, 0x1fea561796e0b6, 0x1fea649e0fd23d,
0x1fea7307404d1e, 0x1fea8153765f1f, 0x1fea8f82fef403, 0x1fea9d9625dab6,
0x1feaab8d35ca5b, 0x1feab968786744, 0x1feac7283647c3, 0x1fead4ccb6f8f2,
0x1feae256410352, 0x1feaefc519ef56, 0x1feafd198649db, 0x1feb0a53c9a87f,
0x1feb177426ade9, 0x1feb247adf0df3, 0x1feb31683391c2, 0x1feb3e3c641bc8,
0x1feb4af7afabb3, 0x1feb579a54623f, 0x1feb64248f8501, 0x1feb70969d820f,
0x1feb7cf0b9f3a6, 0x1feb89331fa3aa, 0x1feb955e088f28, 0x1feba171ade9b4,
0x1febad6e4820c2, 0x1febb9540edee8, 0x1febc523390f11, 0x1febd0dbfcdfa0,
0x1febdc7e8fc583, 0x1febe80b267f33, 0x1febf381f517ae, 0x1febfee32ee957,
0x1fec0a2f06a0cf, 0x1fec1565ae3fbd, 0x1fec2087571f89, 0x1fec2b9431f408,
0x1fec368c6ece1f, 0x1fec41703d1e52, 0x1fec4c3fcbb74e, 0x1fec56fb48d060,
0x1fec61a2e207e6, 0x1fec6c36c465ae, 0x1fec76b71c5d4f, 0x1fec812415d072,
0x1fec8b7ddc1116, 0x1fec95c499e3c1, 0x1fec9ff87981ad, 0x1fecaa19a49ae9,
0x1fecb42844586c, 0x1fecbe24815e28, 0x1fecc80e83cd04, 0x1fecd1e67344dd,
0x1fecdbac76e672, 0x1fece560b5554a, 0x1fecef0354b994, 0x1fecf8947ac1fc,
0x1fed02144ca577, 0x1fed0b82ef2507, 0x1fed14e0868d78, 0x1fed1e2d36b915,
0x1fed276923114f, 0x1fed30946e906a, 0x1fed39af3bc313, 0x1fed42b9acc9fa,
0x1fed4bb3e35b5d, 0x1fed549e00c492, 0x1fed5d7825eb87, 0x1fed664273503c,
0x1fed6efd090e34, 0x1fed77a806dde4, 0x1fed80438c1618, 0x1fed88cfb7ad55,
0x1fed914ca83b2d, 0x1fed99ba7bf99b, 0x1feda21950c64a, 0x1fedaa694423e0,
0x1fedb2aa733b41, 0x1fedbadcfadccc, 0x1fedc300f78190, 0x1fedcb16854c82,
0x1fedd31dc00ba5, 0x1feddb16c33938, 0x1fede301a9fcd3, 0x1fedeade8f2c88,
0x1fedf2ad8d4dfd, 0x1fedfa6ebe977d, 0x1fee02223cf109, 0x1fee09c821f567,
0x1fee116086f321, 0x1fee18eb84ed8f, 0x1fee2069349dce, 0x1fee27d9ae73c1,
0x1fee2f3d0a96ff, 0x1fee369360e7cb, 0x1fee3ddcc8ffff, 0x1fee45195a33f2,
0x1fee4c492b9366, 0x1fee536c53ea62, 0x1fee5a82e9c215, 0x1fee618d0361ae,
0x1fee688ab6cf37, 0x1fee6f7c19d064, 0x1fee766141eb64, 0x1fee7d3a4467b3,
0x1fee8407364edc, 0x1fee8ac82c6d44, 0x1fee917d3b52ed, 0x1fee9826775433,
0x1fee9ec3f48a8c, 0x1feea555c6d540, 0x1feeabdc01da1f, 0x1feeb256b9063a,
0x1feeb8c5ff8e8b, 0x1feebf29e870ae, 0x1feec582867384, 0x1feecbcfec27df,
0x1feed2122be928, 0x1feed84957de04, 0x1feede7581f8f2, 0x1feee496bbf8eb,
0x1feeeaad1769fe, 0x1feef0b8a5a5e7, 0x1feef6b977d4ac, 0x1feefcaf9eed2a,
0x1fef029b2bb5ae, 0x1fef087c2ec47f, 0x1fef0e52b88072, 0x1fef141ed92172,
0x1fef19e0a0b108, 0x1fef1f981f0ae6, 0x1fef254563dd6c, 0x1fef2ae87eaa27,
0x1fef30817ec656, 0x1fef3610735b68, 0x1fef3b956b677a, 0x1fef411075bdd0,
0x1fef4681a10750, 0x1fef4be8fbc2fa, 0x1fef514694465d, 0x1fef569a78be09,
0x1fef5be4b72e05, 0x1fef61255d723b, 0x1fef665c793eea, 0x1fef6b8a182112,
0x1fef70ae477edc, 0x1fef75c914980a, 0x1fef7ada8c8659, 0x1fef7fe2bc3dea,
0x1fef84e1b08da7, 0x1fef89d7761fa5, 0x1fef8ec4197987, 0x1fef93a7a6fce0,
0x1fef98822ae78b, 0x1fef9d53b15411, 0x1fefa21c4639ff, 0x1fefa6dbf56e44,
0x1fefab92caa386, 0x1fefb040d16a7e, 0x1fefb4e615324e, 0x1fefb982a148d3,
0x1fefbe1680dafc, 0x1fefc2a1bef519, 0x1fefc724668333, 0x1fefcb9e825154,
0x1fefd0101d0bda, 0x1fefd479413fc5, 0x1fefd8d9f95b02, 0x1fefdd324facb7,
0x1fefe1824e658c, 0x1fefe5c9ff97f7, 0x1fefea096d387f, 0x1fefee40a11e06,
0x1feff26fa5020e, 0x1feff6968280ff, 0x1feffab5431a67, 0x1feffecbf0313f,
0x1ff002da930c2c, 0x1ff006e134d5bf, 0x1ff00adfde9cb5, 0x1ff00ed6995434,
0x1ff012c56dd40c, 0x1ff016ac64d8ed, 0x1ff01a8b8704a9, 0x1ff01e62dcde6b,
0x1ff022326ed2f3, 0x1ff025fa4534ca, 0x1ff029ba683c81, 0x1ff02d72e008e1,
0x1ff03123b49f26, 0x1ff034ccedeb31, 0x1ff0386e93bfbf, 0x1ff03c08add698,
0x1ff03f9b43d0c9, 0x1ff043265d36ce, 0x1ff046aa0178c7, 0x1ff04a2637eeaa,
0x1ff04d9b07d86c, 0x1ff05108785e39, 0x1ff0546e909096, 0x1ff057cd57689b,
0x1ff05b24d3c814, 0x1ff05e750c79b4, 0x1ff061be08313c, 0x1ff064ffcd8ba6,
0x1ff0683a630f4f, 0x1ff06b6dcf2c20, 0x1ff06e9a183bb5, 0x1ff071bf448182,
0x1ff074dd5a2aff, 0x1ff077f45f4fc9, 0x1ff07b0459f1c8, 0x1ff07e0d4ffd57,
0x1ff0810f474962, 0x1ff0840a45978e, 0x1ff086fe50945b, 0x1ff089eb6dd741,
0x1ff08cd1a2e2d8, 0x1ff08fb0f524f4, 0x1ff0928969f6c7, 0x1ff0955b069cff,
0x1ff09825d047e6, 0x1ff09ae9cc137f, 0x1ff09da6ff07a4, 0x1ff0a05d6e1824,
0x1ff0a30d1e24dc, 0x1ff0a5b613f9d6, 0x1ff0a858544f64, 0x1ff0aaf3e3ca39,
0x1ff0ad88c6fb83, 0x1ff0b017026104, 0x1ff0b29e9a652f, 0x1ff0b51f935f39,
0x1ff0b799f19338, 0x1ff0ba0db93235, 0x1ff0bc7aee5a46, 0x1ff0bee19516a2,
0x1ff0c141b15fb5, 0x1ff0c39b471b3b, 0x1ff0c5ee5a1c4e, 0x1ff0c83aee237d,
0x1ff0ca8106dee0, 0x1ff0ccc0a7ea27, 0x1ff0cef9d4ceb1, 0x1ff0d12c91039d,
0x1ff0d358dfedd9, 0x1ff0d57ec4e032, 0x1ff0d79e431b6b, 0x1ff0d9b75dce43,
0x1ff0dbca18158d, 0x1ff0ddd674fc39, 0x1ff0dfdc777b66, 0x1ff0e1dc227a6c,
0x1ff0e3d578ceef, 0x1ff0e5c87d3ce5, 0x1ff0e7b53276a5, 0x1ff0e99b9b1cf7,
0x1ff0eb7bb9bf19, 0x1ff0ed5590dacb, 0x1ff0ef2922dc5d, 0x1ff0f0f6721eb8,
0x1ff0f2bd80eb61, 0x1ff0f47e517a8b, 0x1ff0f638e5f319, 0x1ff0f7ed406aa9,
0x1ff0f99b62e59b, 0x1ff0fb434f5716, 0x1ff0fce507a110, 0x1ff0fe808d9456,
0x1ff10015e2f08b, 0x1ff101a5096437, 0x1ff1032e028cc1, 0x1ff104b0cff67a,
0x1ff1062d731ca0, 0x1ff107a3ed695d, 0x1ff109144035cf, 0x1ff10a7e6cca04,
0x1ff10be2745d03, 0x1ff10d405814c6, 0x1ff10e98190641, 0x1ff10fe9b8355e,
0x1ff111353694fd, 0x1ff1127a9506f5, 0x1ff113b9d45c14, 0x1ff114f2f55418,
0x1ff11625f89db3, 0x1ff11752ded684, 0x1ff11879a88b16, 0x1ff1199a5636db,
0x1ff11ab4e8442a, 0x1ff11bc95f0c38, 0x1ff11cd7bad713, 0x1ff11ddffbdb9f,
0x1ff11ee2223f8b, 0x1ff11fde2e1752, 0x1ff120d41f662a, 0x1ff121c3f61e06,
0x1ff122adb21f86, 0x1ff123915339f5, 0x1ff1246ed92b39, 0x1ff12546439fd0,
0x1ff126179232c1, 0x1ff126e2c46d93, 0x1ff127a7d9c842, 0x1ff12866d1a933,
0x1ff1291fab6527, 0x1ff129d2663f2f, 0x1ff12a7f01689d, 0x1ff12b257c00f9,
0x1ff12bc5d515f1, 0x1ff12c600ba346, 0x1ff12cf41e92c4, 0x1ff12d820cbc2d,
0x1ff12e09d4e527, 0x1ff12e8b75c130, 0x1ff12f06edf185, 0x1ff12f7c3c0518,
0x1ff12feb5e7874, 0x1ff1305453b5b0, 0x1ff130b71a1459, 0x1ff13113afd95b,
0x1ff1316a1336ed, 0x1ff131ba424c7c, 0x1ff132043b2694, 0x1ff13247fbbec6,
0x1ff1328581fb93, 0x1ff132bccbb054, 0x1ff132edd69d1d, 0x1ff13318a06ea6,
0x1ff1333d26be31, 0x1ff1335b67116b, 0x1ff133735eda56, 0x1ff133850b7726,
0x1ff133906a3229, 0x1ff133957841a6, 0x1ff1339432c7c0, 0x1ff1338c96d255,
0x1ff1337ea15adf, 0x1ff1336a4f4653, 0x1ff1334f9d64ff, 0x1ff1332e887269,
0x1ff133070d152b, 0x1ff132d927decd, 0x1ff132a4d54ba5, 0x1ff1326a11c2ae,
0x1ff13228d99562, 0x1ff131e128ff97, 0x1ff13192fc2751, 0x1ff1313e4f1c9c,
0x1ff130e31dd966, 0x1ff1308164414f, 0x1ff130191e2180, 0x1ff12faa473082,
0x1ff12f34db0e0a, 0x1ff12eb8d542d2, 0x1ff12e36314068, 0x1ff12dacea60fc,
0x1ff12d1cfbe731, 0x1ff12c8660fded, 0x1ff12be914b824, 0x1ff12b451210a4,
0x1ff12a9a53e9e3, 0x1ff129e8d50dc7, 0x1ff12930902d73, 0x1ff128717fe108,
0x1ff127ab9ea776, 0x1ff126dee6e63a, 0x1ff1260b52e926, 0x1ff12530dce227,
0x1ff1244f7ee906, 0x1ff1236732fb28, 0x1ff12277f2fb51, 0x1ff12181b8b164,
0x1ff120847dca1f, 0x1ff11f803bd6d8, 0x1ff11e74ec4d3a, 0x1ff11d628886ff,
0x1ff11c4909c1a8, 0x1ff11b28691e38, 0x1ff11a009fa0e6, 0x1ff118d1a630d6,
0x1ff1179b7597ca, 0x1ff1165e0681d7, 0x1ff11519517d14, 0x1ff113cd4ef94a,
0x1ff11279f747a3, 0x1ff1111f429a53, 0x1ff10fbd29044a, 0x1ff10e53a278d6,
0x1ff10ce2a6cb50, 0x1ff10b6a2daec0, 0x1ff109ea2eb581, 0x1ff10862a150e7,
0x1ff106d37cd0de, 0x1ff1053cb86389, 0x1ff1039e4b14e3, 0x1ff101f82bce57,
0x1ff1004a51565d, 0x1ff0fe94b25013, 0x1ff0fcd7453ad1, 0x1ff0fb120071be,
0x1ff0f944da2b66, 0x1ff0f76fc87945, 0x1ff0f592c1475c, 0x1ff0f3adba5bb6,
0x1ff0f1c0a955fc, 0x1ff0efcb83aef3, 0x1ff0edce3eb80b, 0x1ff0ebc8cf9adc,
0x1ff0e9bb2b58ad, 0x1ff0e7a546c9f0, 0x1ff0e587169dbe, 0x1ff0e3608f5956,
0x1ff0e131a55790, 0x1ff0defa4cc858, 0x1ff0dcba79b01a, 0x1ff0da721fe739,
0x1ff0d82133197a, 0x1ff0d5c7a6c56c, 0x1ff0d3656e3bd8, 0x1ff0d0fa7c9f1e,
0x1ff0ce86c4e29c, 0x1ff0cc0a39ca0f, 0x1ff0c984cde8ea, 0x1ff0c6f673a1b7,
0x1ff0c45f1d2566, 0x1ff0c1bebc72a5, 0x1ff0bf15435531, 0x1ff0bc62a3651e,
0x1ff0b9a6ce0624, 0x1ff0b6e1b466e2, 0x1ff0b413478025, 0x1ff0b13b781421,
0x1ff0ae5a36adb1, 0x1ff0ab6f739f8b, 0x1ff0a87b1f0373, 0x1ff0a57d28b970,
0x1ff0a2758066ef, 0x1ff09f641575f3, 0x1ff09c48d71432, 0x1ff09923b43237,
0x1ff095f49b827a, 0x1ff092bb7b787c, 0x1ff08f784247d0, 0x1ff08c2adde32f,
0x1ff088d33bfb7e, 0x1ff0857149fed0, 0x1ff08204f51766, 0x1ff07e8e2a2aa6,
0x1ff07b0cd5d812, 0x1ff07780e47835, 0x1ff073ea421b8e, 0x1ff07048da896e,
0x1ff06c9c993ee0, 0x1ff068e5696d77, 0x1ff0652335fa27, 0x1ff06155e97c0f,
0x1ff05d7d6e3b3d, 0x1ff05999ae2f72, 0x1ff055aa92fed7, 0x1ff051b005fcb1,
0x1ff04da9f0280d, 0x1ff049983a2a63, 0x1ff0457acc5635, 0x1ff041518ea5a4,
0x1ff03d1c68b8ff, 0x1ff038db41d549, 0x1ff0348e00e2ba, 0x1ff030348c6b33,
0x1ff02bceca98b1, 0x1ff0275ca133ad, 0x1ff022ddf5a180, 0x1ff01e52ace2b3,
0x1ff019baab914d, 0x1ff01515d5df14, 0x1ff010640f93c9, 0x1ff00ba53c0b50,
0x1ff006d93e33de, 0x1ff001fff88c0b, 0x1feffd194d20e8, 0x1feff8251d8c04,
0x1feff3234af162, 0x1fefee13b5fd6b, 0x1fefe8f63ee2d3, 0x1fefe3cac5586c,
0x1fefde912896f6, 0x1fefd9494756d7, 0x1fefd3f2ffcdd5, 0x1fefce8e2facb1,
0x1fefc91ab41cc8, 0x1fefc39869bd94, 0x1fefbe072ca228, 0x1fefb866d84e9f,
0x1fefb2b747b579, 0x1fefacf85534e2, 0x1fefa729da93f9, 0x1fefa14bb0fff5,
0x1fef9b5db10947, 0x1fef955fb2a0a3, 0x1fef8f518d13f9, 0x1fef8933170b5f,
0x1fef83042685e3, 0x1fef7cc490d651, 0x1fef76742a9fda, 0x1fef7012c7d2b7,
0x1fef69a03ba8a6, 0x1fef631c58a15f, 0x1fef5c86f07ee9, 0x1fef55dfd441e3,
0x1fef4f26d425ab, 0x1fef485bbf9c74, 0x1fef417e654b41, 0x1fef3a8e9305c9,
0x1fef338c15ca3d, 0x1fef2c76b9bcf3, 0x1fef254e4a2400, 0x1fef1e129162a5,
0x1fef16c358f4ac, 0x1fef0f606969a0, 0x1fef07e98a5feb, 0x1fef005e827fd0,
0x1feef8bf177645, 0x1feef10b0defaf, 0x1feee942299274, 0x1feee1642cf971,
0x1feed970d9ae46, 0x1feed167f0237e, 0x1feec9492fae8f, 0x1feec1145681b2,
0x1feeb8c921a58f, 0x1feeb0674cf2c6, 0x1feea7ee930b3e, 0x1fee9f5ead5355,
0x1fee96b753ead6, 0x1fee8df83da5c4, 0x1fee85212004f2, 0x1fee7c31af2e64,
0x1fee73299de581, 0x1fee6a089d8304, 0x1fee60ce5decc0, 0x1fee577a8d8d21,
0x1fee4e0cd94a70, 0x1fee4484ec7dde, 0x1fee3ae270ea4b, 0x1fee31250eb2c6,
0x1fee274c6c50d2, 0x1fee1d582e8a57, 0x1fee1347f86753, 0x1fee091b6b273b,
0x1fedfed226360e, 0x1fedf46bc72111, 0x1fede9e7e98b3f, 0x1feddf4627215c,
0x1fedd486178db0, 0x1fedc9a7506b60, 0x1fedbea965396e, 0x1fedb38be74d4d,
0x1feda84e65c511, 0x1fed9cf06d7933, 0x1fed917188ede5, 0x1fed85d14043f3,
0x1fed7a0f19292b, 0x1fed6e2a96c84e, 0x1fed622339b875, 0x1fed55f87fec02,
0x1fed49a9e49ef6, 0x1fed3d36e044c3, 0x1fed309ee87582, 0x1fed23e16fda96,
0x1fed16fde61aa9, 0x1fed09f3b7c502, 0x1fecfcc24e3c38, 0x1fecef690fa025,
0x1fece1e75eb728, 0x1fecd43c9ad6a2, 0x1fecc6681fcaac, 0x1fecb86945bcf9,
0x1fecaa3f611ae4, 0x1fec9be9c27a96, 0x1fec8d67b67f48, 0x1fec7eb885bc8a,
0x1fec6fdb749894, 0x1fec60cfc32d86, 0x1fec5194ad299c, 0x1fec422969ae3c,
0x1fec328d2b2de4, 0x1fec22bf1f48d6, 0x1fec12be6ea886, 0x1fec028a3cd9b4,
0x1febf221a8252d, 0x1febe183c9670d, 0x1febd0afb3e495, 0x1febbfa4752065,
0x1febae6114ad16, 0x1feb9ce493fe28, 0x1feb8b2dee3720, 0x1feb793c17f8d6,
0x1feb670dff2cc5, 0x1feb54a28ace64, 0x1feb41f89ab255, 0x1feb2f0f074b67,
0x1feb1be4a16d3d, 0x1feb0878320c91, 0x1feaf4c879fcef, 0x1feae0d431abbf,
0x1feacc9a08d893, 0x1feab818a64a83, 0x1feaa34ea78281, 0x1fea8e3aa06a76,
0x1fea78db1b010a, 0x1fea632e9701e4, 0x1fea4d33898a36, 0x1fea36e85cb96e,
0x1fea204b6f4dd7, 0x1fea095b143cec, 0x1fe9f215924732, 0x1fe9da79238760,
0x1fe9c283f4fc82, 0x1fe9aa34260ef2, 0x1fe99187c80fc6, 0x1fe9787cddb27f,
0x1fe95f115a809d, 0x1fe945432246c2, 0x1fe92b10087b1e, 0x1fe91075cf9cab,
0x1fe8f572288aef, 0x1fe8da02b1d5c3, 0x1fe8be24f704c2, 0x1fe8a1d66fd5d4,
0x1fe885147f7262, 0x1fe867dc739aab, 0x1fe84a2b83c699, 0x1fe82bfed03b8b,
0x1fe80d53611666, 0x1fe7ee2625493f, 0x1fe7ce73f18bd6, 0x1fe7ae397f3e29,
0x1fe78d736b3c3c, 0x1fe76c1e34a225, 0x1fe74a363b7f7d, 0x1fe727b7bf790d,
0x1fe7049ede57b6, 0x1fe6e0e792834b, 0x1fe6bc8db16825, 0x1fe6978ce9c603,
0x1fe671e0c1e6c8, 0x1fe64b8495bb6e, 0x1fe6247394dd8a, 0x1fe5fca8c07372,
0x1fe5d41ee8f51c, 0x1fe5aad0abcf7f, 0x1fe580b870e42c, 0x1fe555d067e2a0,
0x1fe52a12857886, 0x1fe4fd7880561c, 0x1fe4cffbce0368, 0x1fe4a1959f82e4,
0x1fe4723eddbde3, 0x1fe441f025b691, 0x1fe410a1c47b3a, 0x1fe3de4bb2d5ff,
0x1fe3aae590b3cd, 0x1fe37666a03cda, 0x1fe340c5c09883, 0x1fe309f96855be,
0x1fe2d1f79f70c1, 0x1fe298b5f8edc5, 0x1fe25e298c000b, 0x1fe22246ecb36a,
0x1fe1e502240dd1, 0x1fe1a64ea79cf0, 0x1fe1661f506340, 0x1fe12466511634,
0x1fe0e1152b9dfa, 0x1fe09c1ca5c586, 0x1fe0556cbd07ed, 0x1fe00cf49965e1,
0x1fdfc2a27f2c03, 0x1fdf7663bf9023, 0x1fdf2824a8088b, 0x1fded7d0703d61,
0x1fde855126705d, 0x1fde308f9a32f1, 0x1fddd973453e66, 0x1fdd7fe2323bfe,
0x1fdd23c0e1452b, 0x1fdcc4f229dd06, 0x1fdc63571a1c4b, 0x1fdbfeced2c01b,
0x1fdb97365fc177, 0x1fdb2c688d0f8b, 0x1fdabe3db6f92c, 0x1fda4c8b95c24b,
0x1fd9d72503cfc4, 0x1fd95dd9bdbef4, 0x1fd8e0761ba5e9, 0x1fd85ec2c29c6d,
0x1fd7d8844d8cda, 0x1fd74d7aec2468, 0x1fd6bd61f68cd8, 0x1fd627ef746325,
0x1fd58cd3951ed5, 0x1fd4ebb817d1c2, 0x1fd4443f9fcda6, 0x1fd39604f3511c,
0x1fd2e09a20dc3f, 0x1fd223878731c8, 0x1fd15e4abb4d94, 0x1fd0905546b5d4,
0x1fcfb90b37769e, 0x1fced7c179c444, 0x1fcdebbbf19f89, 0x1fccf42b48d34f,
0x1fcbf02a63233d, 0x1fcadebb6763a3, 0x1fc9bec4484712, 0x1fc88f0ab2ba29,
0x1fc74e2f41599a, 0x1fc5faa7cc7e95, 0x1fc492b8a3f8da, 0x1fc3146c720d64,
0x1fc17d8a75a29c, 0x1fbfcb8aaa2a0f, 0x1fbdfb8754ba16, 0x1fbc0a2b43eb4b,
0x1fb9f39bd71f30, 0x1fb7b35d938f7c, 0x1fb54431a09a74, 0x1fb29fe9eb39c4,
0x1fafbf30d51c86, 0x1fac9940155db5, 0x1fa923809add97, 0x1fa5510a7e2e33,
0x1fa111f7d6ccf2, 0x1f9c5276a7d6ff, 0x1f96f97b763f40, 0x1f90e6e48f5fd7,
0x1f89f0c02df865, 0x1f81df33180649, 0x1f78661cee4357, 0x1f6d1ade61b01f,
0x1f5f632ea2dce6, 0x1f4e56c35fc3f7, 0x1f38862a92f170, 0x1f1b862fe29563,
0x1ef2e7b5562b19, 0x1eb584cfe7bbe6, 0x1e4cc4bf01d278, 0x1d6bfb46d88c28,
}
var expoW = [1024]float64{
1.13866300213877813e-15, 3.50242056868416685e-18, 5.70764952751972552e-18, 7.43157304507964198e-18,
8.89301382714102794e-18, 1.01849930256090362e-17, 1.13566120763560199e-17, 1.24373330462895014e-17,
1.34464053055239106e-17, 1.43971940744360142e-17, 1.52994122305171874e-17, 1.61603734793054288e-17,
1.69857437005156817e-17, 1.77800154948490313e-17, 1.85468208790794572e-17, 1.92891447096363812e-17,
2.00094747399575766e-17, 2.07099098388363480e-17, 2.13922397691885634e-17, 2.20580051413288253e-17,
2.27085432365339685e-17, 2.33450235613893963e-17, 2.39684758075173161e-17, 2.45798121063206663e-17,
2.51798449374771436e-17, 2.57693016838210481e-17, 2.63488365683286212e-17, 2.69190405257174441e-17,
2.74804494286134748e-17, 2.80335509910373130e-17, 2.85787905998030641e-17, 2.91165762702436677e-17,
2.96472828815662770e-17, 3.01712558156409405e-17, 3.06888140986687519e-17, 3.12002531261801955e-17,
3.17058470368832284e-17, 3.22058507890550994e-17, 3.27005019837409651e-17, 3.31900224714507652e-17,
3.36746197729291870e-17, 3.41544883396033694e-17, 3.46298106752513256e-17, 3.51007583370977331e-17,
3.55674928317893002e-17, 3.60301664194168814e-17, 3.64889228368475191e-17, 3.69438979500361947e-17,
3.73952203436478575e-17, 3.78430118551906078e-17, 3.82873880599043852e-17, 3.87284587118362003e-17,
3.91663281458397442e-17, 3.96010956446438125e-17, 4.00328557746248912e-17, 4.04616986934806411e-17,
4.08877104326225461e-17, 4.13109731567783096e-17, 4.17315654030096701e-17, 4.21495623011038930e-17,
4.25650357770807539e-17, 4.29780547413678012e-17, 4.33886852630308135e-17, 4.37969907313005329e-17,
4.42030320055085859e-17, 4.46068675544319532e-17, 4.50085535859453662e-17, 4.54081441677920346e-17,
4.58056913402044554e-17, 4.62012452210367440e-17, 4.65948541040075780e-17, 4.69865645505969950e-17,
4.73764214760904961e-17, 4.77644682302191072e-17, 4.81507466728040892e-17, 4.85352972447789544e-17,
4.89181590349291082e-17, 4.92993698426600875e-17, 4.96789662370792738e-17, 5.00569836126519279e-17,
5.04334562416710039e-17, 5.08084173237606483e-17, 5.11818990326155613e-17, 5.15539325601623859e-17,
5.19245481583145717e-17, 5.22937751784789536e-17, 5.26616421089601042e-17, 5.30281766103974008e-17,
5.33934055493597026e-17, 5.37573550302132608e-17, 5.41200504253700168e-17, 5.44815164040155802e-17,
5.48417769594092893e-17, 5.52008554348419563e-17, 5.55587745483310531e-17, 5.59155564161275402e-17,
5.62712225751033443e-17, 5.66257940040840001e-17, 5.69792911441864214e-17, 5.73317339182178903e-17,
5.76831417491886752e-17, 5.80335335779871866e-17, 5.83829278802635728e-17, 5.87313426825644027e-17,
5.90787955777588901e-17, 5.94253037397940196e-17, 5.97708839378140594e-17, 6.01155525496774833e-17,
6.04593255749024622e-17, 6.08022186470701729e-17, 6.11442470457132772e-17, 6.14854257077155906e-17,
6.18257692382471985e-17, 6.21652919212578466e-17, 6.25040077295503742e-17, 6.28419303344544295e-17,
6.31790731151195959e-17, 6.35154491674463330e-17, 6.38510713126714457e-17, 6.41859521056246471e-17,
6.45201038426711684e-17, 6.48535385693550682e-17, 6.51862680877568281e-17, 6.55183039635782105e-17,
6.58496575329665713e-17, 6.61803399090902991e-17, 6.65103619884764002e-17, 6.68397344571204976e-17,
6.71684677963794495e-17, 6.74965722886556345e-17, 6.78240580228820122e-17, 6.81509348998165384e-17,
6.84772126371536022e-17, 6.88029007744605938e-17, 6.91280086779465688e-17, 6.94525455450699849e-17,
6.97765204089922405e-17, 7.00999421428830436e-17, 7.04228194640838963e-17, 7.07451609381350078e-17,
7.10669749826714292e-17, 7.13882698711932909e-17, 7.17090537367152949e-17, 7.20293345752999490e-17,
7.23491202494792634e-17, 7.26684184915689645e-17, 7.29872369068795414e-17, 7.33055829768278701e-17,
7.36234640619532022e-17, 7.39408874048411141e-17, 7.42578601329588457e-17, 7.45743892614052207e-17,
7.48904816955783273e-17, 7.52061442337639926e-17, 7.55213835696477749e-17, 7.58362062947534310e-17,
7.61506189008102266e-17, 7.64646277820518860e-17, 7.67782392374492908e-17, 7.70914594728795881e-17,
7.74042946032335713e-17, 7.77167506544637473e-17, 7.80288335655749535e-17, 7.83405491905595321e-17,
7.86519033002790468e-17, 7.89629015842941934e-17, 7.92735496526446554e-17, 7.95838530375807270e-17,
7.98938171952481110e-17, 8.02034475073275782e-17, 8.05127492826308455e-17, 8.08217277586542374e-17,
8.11303881030913265e-17, 8.14387354153060087e-17, 8.17467747277673049e-17, 8.20545110074468898e-17,
8.23619491571807650e-17, 8.26690940169961470e-17, 8.29759503654045216e-17, 8.32825229206620955e-17,
8.35888163419985706e-17, 8.38948352308151647e-17, 8.42005841318529637e-17, 8.45060675343323331e-17,
8.48112898730644100e-17, 8.51162555295354792e-17, 8.54209688329649217e-17, 8.57254340613377475e-17,
8.60296554424122665e-17, 8.63336371547037247e-17, 8.66373833284446306e-17, 8.69408980465223419e-17,
8.72441853453947103e-17, 8.75472492159843547e-17, 8.78500936045520899e-17, 8.81527224135503018e-17,
8.84551395024566624e-17, 8.87573486885888013e-17, 8.90593537479005498e-17, 8.93611584157600782e-17,
8.96627663877107150e-17, 8.99641813202145720e-17, 9.02654068313798122e-17, 9.05664465016717829e-17,
9.08673038746084713e-17, 9.11679824574408223e-17, 9.14684857218181801e-17, 9.17688171044394437e-17,
9.20689800076901207e-17, 9.23689778002657626e-17, 9.26688138177820984e-17, 9.29684913633723879e-17,
9.32680137082719675e-17, 9.35673840923907163e-17, 9.38666057248735546e-17, 9.41656817846492320e-17,
9.44646154209679607e-17, 9.47634097539278079e-17, 9.50620678749905365e-17, 9.53605928474868584e-17,
9.56589877071115063e-17, 9.59572554624083708e-17, 9.62553990952459616e-17, 9.65534215612833288e-17,
9.68513257904268869e-17, 9.71491146872781551e-17, 9.74467911315727711e-17, 9.77443579786109619e-17,
9.80418180596796592e-17, 9.83391741824665274e-17, 9.86364291314659314e-17, 9.89335856683773363e-17,
9.92306465324960395e-17, 9.95276144410965083e-17, 9.98244920898086293e-17, 1.00121282152986834e-16,
1.00417987284072419e-16, 1.00714610115949139e-16, 1.01011153261292315e-16, 1.01307619312911505e-16,
1.01604010844086932e-16, 1.01900330408899940e-16, 1.02196580542557368e-16, 1.02492763761710182e-16,
1.02788882564766411e-16, 1.03084939432198605e-16, 1.03380936826845768e-16, 1.03676877194210111e-16,
1.03972762962748555e-16, 1.04268596544159228e-16, 1.04564380333663023e-16, 1.04860116710280255e-16,
1.05155808037102722e-16, 1.05451456661561074e-16, 1.05747064915687647e-16, 1.06042635116375016e-16,
1.06338169565630065e-16, 1.06633670550823963e-16, 1.06929140344937998e-16, 1.07224581206805350e-16,
1.07519995381348891e-16, 1.07815385099815248e-16, 1.08110752580004962e-16, 1.08406100026499016e-16,
1.08701429630881762e-16, 1.08996743571960260e-16, 1.09292044015980230e-16, 1.09587333116838589e-16,
1.09882613016292633e-16, 1.10177885844166078e-16, 1.10473153718551823e-16, 1.10768418746011716e-16,
1.11063683021773164e-16, 1.11358948629922863e-16, 1.11654217643597637e-16, 1.11949492125172265e-16,
1.12244774126444748e-16, 1.12540065688818639e-16, 1.12835368843482824e-16, 1.13130685611588700e-16,
1.13426018004424635e-16, 1.13721368023588112e-16, 1.14016737661155233e-16, 1.14312128899848038e-16,
1.14607543713199171e-16, 1.14902984065714518e-16, 1.15198451913033436e-16, 1.15493949202086751e-16,
1.15789477871252708e-16, 1.16085039850510597e-16, 1.16380637061592587e-16, 1.16676271418133165e-16,
1.16971944825816850e-16, 1.17267659182523937e-16, 1.17563416378474067e-16, 1.17859218296368247e-16,
1.18155066811528801e-16, 1.18450963792037639e-16, 1.18746911098872637e-16, 1.19042910586042500e-16,
1.19338964100719642e-16, 1.19635073483371603e-16, 1.19931240567890890e-16, 1.20227467181722921e-16,
1.20523755145992785e-16, 1.20820106275630154e-16, 1.21116522379492887e-16, 1.21413005260489129e-16,
1.21709556715697839e-16, 1.22006178536488073e-16, 1.22302872508636879e-16, 1.22599640412445572e-16,
1.22896484022855082e-16, 1.23193405109559679e-16, 1.23490405437119549e-16, 1.23787486765072178e-16,
1.24084650848042274e-16, 1.24381899435850871e-16, 1.24679234273622845e-16, 1.24976657101893526e-16,
1.25274169656714186e-16, 1.25571773669756293e-16, 1.25869470868414701e-16, 1.26167262975909935e-16,
1.26465151711389210e-16, 1.26763138790026570e-16, 1.27061225923122038e-16, 1.27359414818199677e-16,
1.27657707179104776e-16, 1.27956104706100156e-16, 1.28254609095961356e-16, 1.28553222042071101e-16,
1.28851945234512898e-16, 1.29150780360163588e-16, 1.29449729102785191e-16, 1.29748793143115928e-16,
1.30047974158960416e-16, 1.30347273825278961e-16, 1.30646693814276255e-16, 1.30946235795489186e-16,
1.31245901435873957e-16, 1.31545692399892392e-16, 1.31845610349597702e-16, 1.32145656944719360e-16,
1.32445833842747460e-16, 1.32746142699016212e-16, 1.33046585166787048e-16, 1.33347162897330828e-16,
1.33647877540009571e-16, 1.33948730742357598e-16, 1.34249724150161959e-16, 1.34550859407542293e-16,
1.34852138157030313e-16, 1.35153562039648496e-16, 1.35455132694988272e-16, 1.35756851761287856e-16,
1.36058720875509308e-16, 1.36360741673415348e-16, 1.36662915789645431e-16, 1.36965244857791481e-16,
1.37267730510473198e-16, 1.37570374379412732e-16, 1.37873178095509130e-16, 1.38176143288912096e-16,
1.38479271589095575e-16, 1.38782564624930773e-16, 1.39086024024758832e-16, 1.39389651416463058e-16,
1.39693448427540782e-16, 1.39997416685174999e-16, 1.40301557816305364e-16, 1.40605873447699093e-16,
1.40910365206021393e-16, 1.41215034717905595e-16, 1.41519883610022969e-16, 1.41824913509152195e-16,
1.42130126042248606e-16, 1.42435522836513119e-16, 1.42741105519460741e-16, 1.43046875718989028e-16,
1.43352835063446169e-16, 1.43658985181698763e-16, 1.43965327703199430e-16, 1.44271864258054142e-16,
1.44578596477089444e-16, 1.44885525991919190e-16, 1.45192654435011325e-16, 1.45499983439754442e-16,
1.45807514640523901e-16, 1.46115249672748092e-16, 1.46423190172974259e-16, 1.46731337778934339e-16,
1.47039694129610493e-16, 1.47348260865300626e-16, 1.47657039627683545e-16, 1.47966032059884311e-16,
1.48275239806539145e-16, 1.48584664513860290e-16, 1.48894307829700942e-16, 1.49204171403619915e-16,
1.49514256886946076e-16, 1.49824565932843012e-16, 1.50135100196373369e-16, 1.50445861334563241e-16,
1.50756851006466296e-16, 1.51068070873228159e-16, 1.51379522598150462e-16, 1.51691207846754890e-16,
1.52003128286847420e-16, 1.52315285588582176e-16, 1.52627681424525467e-16, 1.52940317469719863e-16,
1.53253195401748017e-16, 1.53566316900796759e-16, 1.53879683649720993e-16, 1.54193297334107699e-16,
1.54507159642339896e-16, 1.54821272265660695e-16, 1.55135636898237243e-16, 1.55450255237224965e-16,
1.55765128982831561e-16, 1.56080259838381226e-16, 1.56395649510378793e-16, 1.56711299708574196e-16,
1.57027212146026667e-16, 1.57343388539169247e-16, 1.57659830607873276e-16, 1.57976540075513081e-16,
1.58293518669030589e-16, 1.58610768119000158e-16, 1.58928290159693563e-16, 1.59246086529144928e-16,
1.59564158969216028e-16, 1.59882509225661421e-16, 1.60201139048193995e-16, 1.60520050190550519e-16,
1.60839244410557318e-16, 1.61158723470196162e-16, 1.61478489135670363e-16, 1.61798543177470937e-16,
1.62118887370442995e-16, 1.62439523493852323e-16, 1.62760453331452145e-16, 1.63081678671550073e-16,
1.63403201307075263e-16, 1.63725023035645833e-16, 1.64047145659636362e-16, 1.64369570986245761e-16,
1.64692300827565281e-16, 1.65015337000646875e-16, 1.65338681327571583e-16, 1.65662335635518412e-16,
1.65986301756833258e-16, 1.66310581529098375e-16, 1.66635176795201728e-16, 1.66960089403406969e-16,
1.67285321207423532e-16, 1.67610874066477009e-16, 1.67936749845379951e-16, 1.68262950414602768e-16,
1.68589477650345218e-16, 1.68916333434607899e-16, 1.69243519655264330e-16, 1.69571038206133233e-16,
1.69898890987051176e-16, 1.70227079903945674e-16, 1.70555606868908324e-16, 1.70884473800268816e-16,
1.71213682622668786e-16, 1.71543235267136388e-16, 1.71873133671161165e-16, 1.72203379778769235e-16,
1.72533975540599046e-16, 1.72864922913977231e-16, 1.73196223862995301e-16, 1.73527880358586382e-16,
1.73859894378602652e-16, 1.74192267907892993e-16, 1.74525002938381232e-16, 1.74858101469144861e-16,
1.75191565506494142e-16, 1.75525397064051581e-16, 1.75859598162832201e-16, 1.76194170831323905e-16,
1.76529117105568701e-16, 1.76864439029244107e-16, 1.77200138653745312e-16, 1.77536218038267738e-16,
1.77872679249890141e-16, 1.78209524363658278e-16, 1.78546755462669095e-16, 1.78884374638155479e-16,
1.79222383989571556e-16, 1.79560785624678622e-16, 1.79899581659631581e-16, 1.80238774219065934e-16,
1.80578365436185542e-16, 1.80918357452850858e-16, 1.81258752419667768e-16, 1.81599552496077107e-16,
1.81940759850444856e-16, 1.82282376660152812e-16, 1.82624405111690199e-16, 1.82966847400745663e-16,
1.83309705732300040e-16, 1.83652982320719913e-16, 1.83996679389851725e-16, 1.84340799173116672e-16,
1.84685343913606275e-16, 1.85030315864178671e-16, 1.85375717287555763e-16, 1.85721550456420849e-16,
1.86067817653517369e-16, 1.86414521171748110e-16, 1.86761663314275339e-16, 1.87109246394621707e-16,
1.87457272736772007e-16, 1.87805744675275556e-16, 1.88154664555349660e-16, 1.88504034732983767e-16,
1.88853857575044459e-16, 1.89204135459381333e-16, 1.89554870774933866e-16, 1.89906065921838924e-16,
1.90257723311539424e-16, 1.90609845366893743e-16, 1.90962434522286032e-16, 1.91315493223737720e-16,
1.91669023929019577e-16, 1.92023029107765020e-16, 1.92377511241584419e-16, 1.92732472824180073e-16,
1.93087916361462647e-16, 1.93443844371668189e-16, 1.93800259385476483e-16, 1.94157163946130322e-16,
1.94514560609555776e-16, 1.94872451944483782e-16, 1.95230840532572437e-16, 1.95589728968530828e-16,
1.95949119860243622e-16, 1.96309015828896961e-16, 1.96669419509105496e-16, 1.97030333549040576e-16,
1.97391760610559598e-16, 1.97753703369336583e-16, 1.98116164514993922e-16, 1.98479146751235394e-16,
1.98842652795980423e-16, 1.99206685381499634e-16, 1.99571247254551578e-16, 1.99936341176520879e-16,
2.00301969923557614e-16, 2.00668136286718152e-16, 2.01034843072107173e-16, 2.01402093101021140e-16,
2.01769889210093206e-16, 2.02138234251439572e-16, 2.02507131092806976e-16, 2.02876582617722087e-16,
2.03246591725642028e-16, 2.03617161332106405e-16, 2.03988294368891085e-16, 2.04359993784163330e-16,
2.04732262542638388e-16, 2.05105103625737854e-16, 2.05478520031749589e-16, 2.05852514775989139e-16,
2.06227090890962883e-16, 2.06602251426532805e-16, 2.06977999450083069e-16, 2.07354338046688067e-16,
2.07731270319282422e-16, 2.08108799388832639e-16, 2.08486928394510603e-16, 2.08865660493868734e-16,
2.09244998863017130e-16, 2.09624946696802572e-16, 2.10005507208989242e-16, 2.10386683632441540e-16,
2.10768479219308660e-16, 2.11150897241211321e-16, 2.11533940989430357e-16, 2.11917613775097275e-16,
2.12301918929386988e-16, 2.12686859803712680e-16, 2.13072439769922390e-16, 2.13458662220498338e-16,
2.13845530568757797e-16, 2.14233048249056612e-16, 2.14621218716994703e-16, 2.15010045449623903e-16,
2.15399531945658094e-16, 2.15789681725685650e-16, 2.16180498332384214e-16, 2.16571985330737896e-16,
2.16964146308256882e-16, 2.17356984875199440e-16, 2.17750504664796629e-16, 2.18144709333479190e-16,
2.18539602561107330e-16, 2.18935188051202992e-16, 2.19331469531184587e-16, 2.19728450752604765e-16,
2.20126135491390574e-16, 2.20524527548086525e-16, 2.20923630748100497e-16, 2.21323448941952326e-16,
2.21723986005525452e-16, 2.22125245840321300e-16, 2.22527232373716823e-16, 2.22929949559224970e-16,
2.23333401376757968e-16, 2.23737591832894256e-16, 2.24142524961148027e-16, 2.24548204822242125e-16,
2.24954635504384391e-16, 2.25361821123546973e-16, 2.25769765823749227e-16, 2.26178473777343781e-16,
2.26587949185306291e-16, 2.26998196277528356e-16, 2.27409219313114428e-16, 2.27821022580681770e-16,
2.28233610398664470e-16, 2.28646987115621043e-16, 2.29061157110545689e-16, 2.29476124793183588e-16,
2.29891894604349848e-16, 2.30308471016252347e-16, 2.30725858532818961e-16, 2.31144061690028342e-16,
2.31563085056245286e-16, 2.31982933232559793e-16, 2.32403610853130764e-16, 2.32825122585533890e-16,
2.33247473131113825e-16, 2.33670667225341053e-16, 2.34094709638172788e-16, 2.34519605174419153e-16,
2.34945358674113256e-16, 2.35371975012886739e-16, 2.35799459102349560e-16, 2.36227815890474861e-16,
2.36657050361988964e-16, 2.37087167538766144e-16, 2.37518172480228528e-16, 2.37950070283751580e-16,
2.38382866085074297e-16, 2.38816565058715145e-16, 2.39251172418393403e-16, 2.39686693417456089e-16,
2.40123133349310156e-16, 2.40560497547861096e-16, 2.40998791387956480e-16, 2.41438020285836399e-16,
2.41878189699588878e-16, 2.42319305129612399e-16, 2.42761372119083744e-16, 2.43204396254432841e-16,
2.43648383165823523e-16, 2.44093338527641192e-16, 2.44539268058986699e-16, 2.44986177524177365e-16,
2.45434072733254616e-16, 2.45882959542498662e-16, 2.46332843854950086e-16, 2.46783731620939020e-16,
2.47235628838621027e-16, 2.47688541554521267e-16, 2.48142475864085225e-16, 2.48597437912238105e-16,
2.49053433893951384e-16, 2.49510470054817500e-16, 2.49968552691632863e-16, 2.50427688152988815e-16,
2.50887882839871114e-16, 2.51349143206267946e-16, 2.51811475759786376e-16, 2.52274887062278155e-16,
2.52739383730473822e-16, 2.53204972436626459e-16, 2.53671659909164476e-16, 2.54139452933354007e-16,
2.54608358351970773e-16, 2.55078383065981757e-16, 2.55549534035237082e-16, 2.56021818279171509e-16,
2.56495242877517024e-16, 2.56969814971025037e-16, 2.57445541762200125e-16, 2.57922430516043926e-16,
2.58400488560810722e-16, 2.58879723288773863e-16, 2.59360142157003882e-16, 2.59841752688158292e-16,
2.60324562471283367e-16, 2.60808579162627749e-16, 2.61293810486468685e-16, 2.61780264235950829e-16,
2.62267948273937821e-16, 2.62756870533876631e-16, 2.63247039020675708e-16, 2.63738461811596195e-16,
2.64231147057157200e-16, 2.64725102982055158e-16, 2.65220337886097110e-16, 2.65716860145149056e-16,
2.66214678212098810e-16, 2.66713800617834190e-16, 2.67214235972236587e-16, 2.67715992965190163e-16,
2.68219080367607127e-16, 2.68723507032469422e-16, 2.69229281895886985e-16, 2.69736413978172915e-16,
2.70244912384935942e-16, 2.70754786308190756e-16, 2.71266045027485926e-16, 2.71778697911050459e-16,
2.72292754416958999e-16, 2.72808224094315908e-16, 2.73325116584458868e-16, 2.73843441622182455e-16,
2.74363209036981680e-16, 2.74884428754316433e-16, 2.75407110796896489e-16, 2.75931265285988602e-16,
2.76456902442744944e-16, 2.76984032589554083e-16, 2.77512666151414781e-16, 2.78042813657333164e-16,
2.78574485741743071e-16, 2.79107693145951342e-16, 2.79642446719607267e-16, 2.80178757422197485e-16,
2.80716636324566602e-16, 2.81256094610464204e-16, 2.81797143578118288e-16, 2.82339794641836546e-16,
2.82884059333635371e-16, 2.83429949304897444e-16, 2.83977476328058667e-16, 2.84526652298324477e-16,
2.85077489235416830e-16, 2.85629999285352485e-16, 2.86184194722252411e-16, 2.86740087950184267e-16,
2.87297691505037474e-16, 2.87857018056432360e-16, 2.88418080409664058e-16, 2.88980891507681219e-16,
2.89545464433101301e-16, 2.90111812410262747e-16, 2.90679948807314326e-16, 2.91249887138343927e-16,
2.91821641065545760e-16, 2.92395224401428764e-16, 2.92970651111065558e-16, 2.93547935314383834e-16,
2.94127091288500741e-16, 2.94708133470101354e-16, 2.95291076457862152e-16, 2.95875935014920594e-16,
2.96462724071391983e-16, 2.97051458726934297e-16, 2.97642154253362576e-16, 2.98234826097313640e-16,
2.98829489882962880e-16, 2.99426161414793356e-16, 3.00024856680419229e-16, 3.00625591853465104e-16,
3.01228383296501083e-16, 3.01833247564036990e-16, 3.02440201405575009e-16, 3.03049261768724115e-16,
3.03660445802376121e-16, 3.04273770859946023e-16, 3.04889254502677578e-16, 3.05506914503015827e-16,
3.06126768848048258e-16, 3.06748835743016518e-16, 3.07373133614899761e-16, 3.07999681116072110e-16,
3.08628497128035786e-16, 3.09259600765231606e-16, 3.09893011378929151e-16, 3.10528748561198436e-16,
3.11166832148964997e-16, 3.11807282228150819e-16, 3.12450119137903079e-16, 3.13095363474912859e-16,
3.13743036097826702e-16, 3.14393158131752492e-16, 3.15045750972863044e-16, 3.15700836293099297e-16,
3.16358436044975803e-16, 3.17018572466491834e-16, 3.17681268086149917e-16, 3.18346545728085253e-16,
3.19014428517309080e-16, 3.19684939885068437e-16, 3.20358103574326237e-16, 3.21033943645364003e-16,
3.21712484481511557e-16, 3.22393750795006195e-16, 3.23077767632985751e-16, 3.23764560383618450e-16,
3.24454154782374051e-16, 3.25146576918439408e-16, 3.25841853241283321e-16, 3.26540010567374278e-16,
3.27241076087055526e-16, 3.27945077371581957e-16, 3.28652042380323591e-16, 3.29361999468139896e-16,
3.30074977392930619e-16, 3.30791005323367361e-16, 3.31510112846811718e-16, 3.32232329977425218e-16,
3.32957687164476910e-16, 3.33686215300853697e-16, 3.34417945731780403e-16, 3.35152910263755103e-16,
3.35891141173706168e-16, 3.36632671218378088e-16, 3.37377533643952612e-16, 3.38125762195912371e-16,
3.38877391129154568e-16, 3.39632455218362083e-16, 3.40390989768640078e-16, 3.41153030626426683e-16,
3.41918614190685159e-16, 3.42687777424387839e-16, 3.43460557866299685e-16, 3.44236993643071775e-16,
3.45017123481654131e-16, 3.45800986722038182e-16, 3.46588623330339684e-16, 3.47380073912232226e-16,
3.48175379726744167e-16, 3.48974582700429235e-16, 3.49777725441924170e-16, 3.50584851256905820e-16,
3.51396004163460519e-16, 3.52211228907880337e-16, 3.53030570980899513e-16, 3.53854076634387146e-16,
3.54681792898509997e-16, 3.55513767599382910e-16, 3.56350049377222915e-16, 3.57190687705024226e-16,
3.58035732907772433e-16, 3.58885236182216900e-16, 3.59739249617220422e-16, 3.60597826214707033e-16,
3.61461019911228816e-16, 3.62328885600174218e-16, 3.63201479154640379e-16, 3.64078857450993939e-16,
3.64961078393145380e-16, 3.65848200937562515e-16, 3.66740285119050961e-16, 3.67637392077329333e-16,
3.68539584084429687e-16, 3.69446924572953038e-16, 3.70359478165213282e-16, 3.71277310703302660e-16,
3.72200489280113960e-16, 3.73129082271356780e-16, 3.74063159368605721e-16, 3.75002791613421188e-16,
3.75948051432584648e-16, 3.76899012674492385e-16, 3.77855750646753940e-16, 3.78818342155043416e-16,
3.79786865543253576e-16, 3.80761400735006893e-16, 3.81742029276577174e-16, 3.82728834381281573e-16,
3.83721900975402503e-16, 3.84721315745703930e-16, 3.85727167188608917e-16, 3.86739545661108118e-16,
3.87758543433474275e-16, 3.88784254743858197e-16, 3.89816775854849397e-16, 3.90856205112085891e-16,
3.91902643005002902e-16, 3.92956192229815509e-16, 3.94016957754834067e-16, 3.95085046888217494e-16,
3.96160569348273844e-16, 3.97243637336424719e-16, 3.98334365612955237e-16, 3.99432871575678552e-16,
4.00539275341650411e-16, 4.01653699832077006e-16, 4.02776270860567218e-16, 4.03907117224888621e-16,
4.05046370802395843e-16, 4.06194166649308659e-16, 4.07350643104028759e-16, 4.08515941894693588e-16,
4.09690208251177187e-16, 4.10873591021761983e-16, 4.12066242794716309e-16, 4.13268320025027761e-16,
4.14479983166557671e-16, 4.15701396809897257e-16, 4.16932729826223068e-16, 4.18174155517468832e-16,
4.19425851773149282e-16, 4.20688001234192956e-16, 4.21960791464163570e-16, 4.23244415128274004e-16,
4.24539070180622584e-16, 4.25844960060109453e-16, 4.27162293895520534e-16, 4.28491286720299892e-16,
4.29832159697564367e-16, 4.31185140355953613e-16, 4.32550462836947651e-16, 4.33928368154328449e-16,
4.35319104466508757e-16, 4.36722927362501830e-16, 4.38140100162361367e-16, 4.39570894232979693e-16,
4.41015589320196769e-16, 4.42474473898242448e-16, 4.43947845537609534e-16, 4.45436011292537968e-16,
4.46939288109378534e-16, 4.48458003257202595e-16, 4.49992494782128535e-16, 4.51543111986951658e-16,
4.53110215937788517e-16, 4.54694179999583812e-16, 4.56295390402477505e-16, 4.57914246841191694e-16,
4.59551163109777973e-16, 4.61206567774257927e-16, 4.62880904885908994e-16, 4.64574634738177881e-16,
4.66288234670469284e-16, 4.68022199922335617e-16, 4.69777044541914934e-16, 4.71553302352804213e-16,
4.73351527983943201e-16, 4.75172297967502213e-16, 4.77016211910241770e-16, 4.78883893744324138e-16,
4.80775993064140985e-16, 4.82693186556356576e-16, 4.84636179531083447e-16, 4.86605707562900925e-16,
4.88602538251317563e-16, 4.90627473111269884e-16, 4.92681349605364941e-16, 4.94765043330822740e-16,
4.96879470375476390e-16, 4.99025589858770085e-16, 5.01204406675475713e-16, 5.03416974461862417e-16,
5.05664398806332965e-16, 5.07947840729125887e-16, 5.10268520458620140e-16, 5.12627721535126601e-16,
5.15026795276867394e-16, 5.17467165647217501e-16, 5.19950334567288038e-16, 5.22477887723691122e-16,
5.25051500927957739e-16, 5.27672947091743395e-16, 5.30344103890830216e-16, 5.33066962201238118e-16,
5.35843635402765046e-16, 5.38676369659293412e-16, 5.41567555301633775e-16, 5.44519739457988136e-16,
5.47535640099899782e-16, 5.50618161698517219e-16, 5.53770412718034299e-16, 5.56995725211362223e-16,
5.60297676828819863e-16, 5.63680115605603882e-16, 5.67147187960177049e-16, 5.70703370416234636e-16,
5.74353505659037883e-16, 5.78102843657118577e-16, 5.81957088728395689e-16, 5.85922453613072497e-16,
5.90005721844059276e-16, 5.94214319991934545e-16, 5.98556401722680686e-16, 6.03040946065420216e-16,
6.07677872874969880e-16, 6.12478179232220788e-16, 6.17454101511974155e-16, 6.22619309143385482e-16,
6.27989137805828684e-16, 6.33580872104132808e-16, 6.39414090884648266e-16, 6.45511092628449050e-16,
6.51897424297094042e-16, 6.58602545376047753e-16, 6.65660670840558245e-16, 6.73111854210230591e-16,
6.81003397732669891e-16, 6.89391715925600510e-16, 6.98344839449975845e-16, 7.07945842906517828e-16,
7.18297638387021814e-16, 7.29529844515185979e-16, 7.41808911905442455e-16, 7.55353552220360039e-16,
7.70459195679784070e-16, 7.87538659776905717e-16, 8.07193901521411435e-16, 8.30352530063857217e-16,
8.58554763842056565e-16, 8.94646544346688949e-16, 9.44845067108994447e-16, 1.02764069967626247e-15,
}
var expoF = [1024]float64{
1.00000000000000000e+00, 9.68945415010992406e-01, 9.49889196388417734e-01, 9.35253503977446332e-01,
9.23022980182970576e-01, 9.12343913742377999e-01, 9.02766566511869151e-01, 8.94021426632391591e-01,
8.85932557172777080e-01, 8.78377875569187450e-01, 8.71268699374791011e-01, 8.64538272175399025e-01,
8.58134889483703867e-01, 8.52017559763211696e-01, 8.46153146776933429e-01, 8.40514419977236971e-01,
8.35078684225500689e-01, 8.29826791867500857e-01, 8.24742414617106068e-01, 8.19811496498238590e-01,
8.15021835792814509e-01, 8.10362760725640840e-01, 8.05824874458240470e-01, 8.01399852137199997e-01,
7.97080277593054665e-01, 7.92859510629635666e-01, 7.88731578189985183e-01, 7.84691084357616253e-01,
7.80733135361913133e-01, 7.76853276643614366e-01, 7.73047439694789729e-01, 7.69311896882065360e-01,
7.65643222836894122e-01, 7.62038261284015528e-01, 7.58494096401405415e-01, 7.55008027978255836e-01,
7.51577549773686071e-01, 7.48200330586717310e-01, 7.44874197634032620e-01, 7.41597121901076739e-01,
7.38367205187811071e-01, 7.35182668615751433e-01, 7.32041842399939080e-01, 7.28943156719906393e-01,
7.25885133548809014e-01, 7.22866379320717622e-01, 7.19885578333421372e-01, 7.16941486798615135e-01,
7.14032927463546851e-01, 7.11158784738500138e-01, 7.08318000273201132e-01, 7.05509568932652931e-01,
7.02732535129217184e-01, 6.99985989473171366e-01, 6.97269065708607139e-01, 6.94580937905535545e-01,
6.91920817882511030e-01, 6.89287952837074336e-01, 6.86681623163910348e-01, 6.84101140442869071e-01,
6.81545845580976417e-01, 6.79015107094277037e-01, 6.76508319516868628e-01, 6.74024901925812103e-01,
6.71564296571774522e-01, 6.69125967606291172e-01, 6.66709399897449817e-01, 6.64314097926606451e-01,
6.61939584759462352e-01, 6.59585401085470702e-01, 6.57251104320109802e-01, 6.54936267765070079e-01,
6.52640479821855801e-01, 6.50363343254706905e-01, 6.48104474499118455e-01, 6.45863503012554574e-01,
6.43640070664256547e-01, 6.41433831161305923e-01, 6.39244449508346468e-01, 6.37071601498584217e-01,
6.34914973233881241e-01, 6.32774260671937538e-01, 6.30649169198716941e-01, 6.28539413224417198e-01,
6.26444715801421137e-01, 6.24364808262785287e-01, 6.22299429879931032e-01, 6.20248327538310606e-01,
6.18211255429903850e-01, 6.16187974761494450e-01, 6.14178253477745129e-01, 6.12181865998166153e-01,
6.10198592967134523e-01, 6.08228221016181014e-01, 6.06270542537818757e-01, 6.04325355470234138e-01,
6.02392463092211172e-01, 6.00471673827700170e-01, 5.98562801059483118e-01, 5.96665662951422315e-01,
5.94780082278816513e-01, 5.92905886266414717e-01, 5.91042906433671189e-01, 5.89190978446849645e-01,
5.87349941977609258e-01, 5.85519640567729871e-01, 5.83699921499652996e-01, 5.81890635672535850e-01,
5.80091637483535205e-01, 5.78302784714053386e-01, 5.76523938420695048e-01, 5.74754962830699268e-01,
5.72995725241623455e-01, 5.71246095925071362e-01, 5.69505948034266130e-01, 5.67775157515283291e-01,
5.66053603021768215e-01, 5.64341165832971670e-01, 5.62637729774947970e-01, 5.60943181144766934e-01,
5.59257408637601205e-01, 5.57580303276555500e-01, 5.55911758345113416e-01, 5.54251669322083917e-01,
5.52599933818934130e-01, 5.50956451519404422e-01, 5.49321124121302184e-01, 5.47693855280382058e-01,
5.46074550556219340e-01, 5.44463117359992643e-01, 5.42859464904093314e-01, 5.41263504153483788e-01,
5.39675147778732156e-01, 5.38094310110651897e-01, 5.36520907096481925e-01, 5.34954856257540912e-01,
5.33396076648298023e-01, 5.31844488816802352e-01, 5.30300014766414196e-01, 5.28762577918789112e-01,
5.27232103078063563e-01, 5.25708516396194092e-01, 5.24191745339406934e-01, 5.22681718655712557e-01,
5.21178366343446720e-01, 5.19681619620796287e-01, 5.18191410896273408e-01, 5.16707673740102957e-01,
5.15230342856487833e-01, 5.13759354056719575e-01, 5.12294644233103558e-01, 5.10836151333668664e-01,
5.09383814337632579e-01, 5.07937573231595163e-01, 5.06497368986433938e-01, 5.05063143534877135e-01,
5.03634839749728336e-01, 5.02212401422722166e-01, 5.00795773243986830e-01, 4.99384900782093522e-01,
4.97979730464671533e-01, 4.96580209559570440e-01, 4.95186286156549860e-01, 4.93797909149479353e-01,
4.92415028219030659e-01, 4.91037593815846496e-01, 4.89665557144169317e-01, 4.88298870145914887e-01,
4.86937485485176458e-01, 4.85581356533144659e-01, 4.84230437353430188e-01, 4.82884682687775846e-01,
4.81544047942145836e-01, 4.80208489173179431e-01, 4.78877963074997970e-01, 4.77552426966354582e-01,
4.76231838778113925e-01, 4.74916157041053999e-01, 4.73605340873978209e-01, 4.72299349972128746e-01,
4.70998144595892287e-01, 4.69701685559788251e-01, 4.68409934221731883e-01, 4.67122852472563244e-01,
4.65840402725834490e-01, 4.64562547907847234e-01, 4.63289251447933093e-01, 4.62020477268969720e-01,
4.60756189778125824e-01, 4.59496353857828055e-01, 4.58240934856943449e-01, 4.56989898582171483e-01,
4.55743211289639016e-01, 4.54500839676692969e-01, 4.53262750873884734e-01, 4.52028912437140884e-01,
4.50799292340115010e-01, 4.49573858966715534e-01, 4.48352581103804715e-01, 4.47135427934063967e-01,
4.45922369029020382e-01, 4.44713374342231516e-01, 4.43508414202621992e-01, 4.42307459307969664e-01,
4.41110480718536202e-01, 4.39917449850838471e-01, 4.38728338471557067e-01, 4.37543118691578137e-01,
4.36361762960164989e-01, 4.35184244059256042e-01, 4.34010535097886119e-01, 4.32840609506727148e-01,
4.31674441032745770e-01, 4.30512003733974524e-01, 4.29353271974393824e-01, 4.28198220418921693e-01,
4.27046824028508742e-01, 4.25899058055335289e-01, 4.24754898038108619e-01, 4.23614319797457339e-01,
4.22477299431420539e-01, 4.21343813311029547e-01, 4.20213838075979895e-01, 4.19087350630390820e-01,
4.17964328138651198e-01, 4.16844748021348355e-01, 4.15728587951278983e-01, 4.14615825849539543e-01,
4.13506439881694277e-01, 4.12400408454018874e-01, 4.11297710209818645e-01, 4.10198324025818073e-01,
4.09102229008621587e-01, 4.08009404491242844e-01, 4.06919830029700891e-01, 4.05833485399682559e-01,
4.04750350593268193e-01, 4.03670405815720723e-01, 4.02593631482335013e-01, 4.01520008215347501e-01,
4.00449516840904074e-01, 3.99382138386084551e-01, 3.98317854075983246e-01, 3.97256645330843650e-01,
3.96198493763246673e-01, 3.95143381175350250e-01, 3.94091289556180446e-01, 3.93042201078971531e-01,
3.91996098098555179e-01, 3.90952963148796961e-01, 3.89912778940079030e-01, 3.88875528356828593e-01,
3.87841194455090355e-01, 3.86809760460143037e-01, 3.85781209764157529e-01, 3.84755525923897557e-01,
3.83732692658460162e-01, 3.82712693847056473e-01, 3.81695513526831454e-01, 3.80681135890721389e-01,
3.79669545285349075e-01, 3.78660726208955134e-01, 3.77654663309365535e-01, 3.76651341381993687e-01,
3.75650745367876959e-01, 3.74652860351746719e-01, 3.73657671560131244e-01, 3.72665164359490708e-01,
3.71675324254383987e-01, 3.70688136885665653e-01, 3.69703588028713948e-01, 3.68721663591687843e-01,
3.67742349613813302e-01, 3.66765632263697905e-01, 3.65791497837673507e-01, 3.64819932758165710e-01,
3.63850923572090590e-01, 3.62884456949277345e-01, 3.61920519680916708e-01, 3.60959098678034496e-01,
3.60000180969989703e-01, 3.59043753702997182e-01, 3.58089804138673640e-01, 3.57138319652607006e-01,
3.56189287732948778e-01, 3.55242695979028411e-01, 3.54298532099989794e-01, 3.53356783913449435e-01,
3.52417439344175298e-01, 3.51480486422786675e-01, 3.50545913284474264e-01, 3.49613708167740067e-01,
3.48683859413156938e-01, 3.47756355462147226e-01, 3.46831184855780184e-01, 3.45908336233588021e-01,
3.44987798332399676e-01, 3.44069559985192897e-01, 3.43153610119963259e-01, 3.42239937758610557e-01,
3.41328532015841957e-01, 3.40419382098091694e-01, 3.39512477302456928e-01, 3.38607807015649243e-01,
3.37705360712962255e-01, 3.36805127957254202e-01, 3.35907098397945525e-01, 3.35011261770031654e-01,
3.34117607893109569e-01, 3.33226126670419509e-01, 3.32336808087899949e-01, 3.31449642213256734e-01,
3.30564619195045539e-01, 3.29681729261767698e-01, 3.28800962720978918e-01, 3.27922309958411151e-01,
3.27045761437106564e-01, 3.26171307696564106e-01, 3.25298939351898575e-01, 3.24428647093010780e-01,
3.23560421683770361e-01, 3.22694253961209598e-01, 3.21830134834728498e-01, 3.20968055285311527e-01,
3.20108006364754560e-01, 3.19249979194903444e-01, 3.18393964966902365e-01, 3.17539954940453117e-01,
3.16687940443084404e-01, 3.15837912869431081e-01, 3.14989863680523841e-01, 3.14143784403087600e-01,
3.13299666628850726e-01, 3.12457502013862465e-01, 3.11617282277820484e-01, 3.10778999203406958e-01,
3.09942644635634146e-01, 3.09108210481198020e-01, 3.08275688707841278e-01, 3.07445071343724619e-01,
3.06616350476806021e-01, 3.05789518254228898e-01, 3.04964566881717913e-01, 3.04141488622982903e-01,
3.03320275799130790e-01, 3.02500920788084882e-01, 3.01683416024012219e-01, 3.00867753996757858e-01,
3.00053927251287211e-01, 2.99241928387134759e-01, 2.98431750057860934e-01, 2.97623384970514993e-01,
2.96816825885105495e-01, 2.96012065614077335e-01, 2.95209097021795430e-01, 2.94407913024035184e-01,
2.93608506587479279e-01, 2.92810870729220685e-01, 2.92014998516272550e-01, 2.91220883065083591e-01,
2.90428517541060027e-01, 2.89637895158093517e-01, 2.88849009178094529e-01, 2.88061852910532212e-01,
2.87276419711979702e-01, 2.86492702985664704e-01, 2.85710696181026347e-01, 2.84930392793277143e-01,
2.84151786362970060e-01, 2.83374870475571639e-01, 2.82599638761039607e-01, 2.81826084893406104e-01,
2.81054202590365954e-01, 2.80283985612869713e-01, 2.79515427764721880e-01, 2.78748522892183881e-01,
2.77983264883581827e-01, 2.77219647668919100e-01, 2.76457665219493387e-01, 2.75697311547518642e-01,
2.74938580705751112e-01, 2.74181466787120187e-01, 2.73425963924363691e-01, 2.72672066289666837e-01,
2.71919768094306458e-01, 2.71169063588298620e-01, 2.70419947060050680e-01, 2.69672412836017727e-01,
2.68926455280362187e-01, 2.68182068794618533e-01, 2.67439247817361059e-01, 2.66697986823875688e-01,
2.65958280325835905e-01, 2.65220122870982233e-01, 2.64483509042805098e-01, 2.63748433460231968e-01,
2.63014890777317656e-01, 2.62282875682938232e-01, 2.61552382900488434e-01, 2.60823407187582790e-01,
2.60095943335759638e-01, 2.59369986170189082e-01, 2.58645530549383607e-01, 2.57922571364912756e-01,
2.57201103541120246e-01, 2.56481122034844689e-01, 2.55762621835143422e-01, 2.55045597963019399e-01,
2.54330045471150956e-01, 2.53615959443624861e-01, 2.52903334995672191e-01, 2.52192167273407042e-01,
2.51482451453568345e-01, 2.50774182743264407e-01, 2.50067356379720329e-01, 2.49361967630027936e-01,
2.48658011790898997e-01, 2.47955484188420588e-01, 2.47254380177813426e-01, 2.46554695143192754e-01,
2.45856424497331832e-01, 2.45159563681428050e-01, 2.44464108164871408e-01, 2.43770053445015733e-01,
2.43077395046952194e-01, 2.42386128523285255e-01, 2.41696249453911050e-01, 2.41007753445798334e-01,
2.40320636132771354e-01, 2.39634893175295433e-01, 2.38950520260264615e-01, 2.38267513100791695e-01,
2.37585867436000298e-01, 2.36905579030819380e-01, 2.36226643675779668e-01, 2.35549057186812488e-01,
2.34872815405050478e-01, 2.34197914196630524e-01, 2.33524349452498781e-01, 2.32852117088217542e-01,
2.32181213043774398e-01, 2.31511633283393192e-01, 2.30843373795346946e-01, 2.30176430591772813e-01,
2.29510799708488894e-01, 2.28846477204812881e-01, 2.28183459163382785e-01, 2.27521741689979184e-01,
2.26861320913349623e-01, 2.26202192985034550e-01, 2.25544354079195181e-01, 2.24887800392443160e-01,
2.24232528143671755e-01, 2.23578533573888932e-01, 2.22925812946052210e-01, 2.22274362544904819e-01,
2.21624178676814054e-01, 2.20975257669610708e-01, 2.20327595872430559e-01, 2.19681189655557219e-01,
2.19036035410266589e-01, 2.18392129548672931e-01, 2.17749468503576488e-01, 2.17108048728312547e-01,
2.16467866696602063e-01, 2.15828918902403716e-01, 2.15191201859767589e-01, 2.14554712102690087e-01,
2.13919446184970413e-01, 2.13285400680068571e-01, 2.12652572180964394e-01, 2.12020957300018492e-01,
2.11390552668834081e-01, 2.10761354938120510e-01, 2.10133360777557926e-01, 2.09506566875663353e-01,
2.08880969939658101e-01, 2.08256566695336348e-01, 2.07633353886935185e-01, 2.07011328277005746e-01,
2.06390486646285676e-01, 2.05770825793572948e-01, 2.05152342535600635e-01, 2.04535033706913116e-01,
2.03918896159743457e-01, 2.03303926763891835e-01, 2.02690122406605278e-01, 2.02077479992458509e-01,
2.01465996443235956e-01, 2.00855668697814765e-01, 2.00246493712049251e-01, 1.99638468458656043e-01,
1.99031589927100627e-01, 1.98425855123484901e-01, 1.97821261070435711e-01, 1.97217804806994551e-01,
1.96615483388508316e-01, 1.96014293886520946e-01, 1.95414233388666342e-01, 1.94815298998562036e-01,
1.94217487835704111e-01, 1.93620797035362863e-01, 1.93025223748479718e-01, 1.92430765141564847e-01,
1.91837418396596043e-01, 1.91245180710918228e-01, 1.90654049297144190e-01, 1.90064021383056059e-01,
1.89475094211507850e-01, 1.88887265040328739e-01, 1.88300531142227445e-01, 1.87714889804697388e-01,
1.87130338329922818e-01, 1.86546874034685645e-01, 1.85964494250273338e-01, 1.85383196322387617e-01,
1.84802977611053965e-01, 1.84223835490531979e-01, 1.83645767349226580e-01, 1.83068770589600083e-01,
1.82492842628085017e-01, 1.81917980894997805e-01, 1.81344182834453138e-01, 1.80771445904279376e-01,
1.80199767575934450e-01, 1.79629145334422785e-01, 1.79059576678212762e-01, 1.78491059119155221e-01,
1.77923590182402391e-01, 1.77357167406327926e-01, 1.76791788342447298e-01, 1.76227450555339282e-01,
1.75664151622567960e-01, 1.75101889134605421e-01, 1.74540660694755351e-01, 1.73980463919077122e-01,
1.73421296436310768e-01, 1.72863155887802461e-01, 1.72306039927430904e-01, 1.71749946221534167e-01,
1.71194872448837326e-01, 1.70640816300380777e-01, 1.70087775479449227e-01, 1.69535747701501122e-01,
1.68984730694099083e-01, 1.68434722196840664e-01, 1.67885719961290036e-01, 1.67337721750909912e-01,
1.66790725340994539e-01, 1.66244728518602974e-01, 1.65699729082493225e-01, 1.65155724843056739e-01,
1.64612713622253659e-01, 1.64070693253548755e-01, 1.63529661581847757e-01, 1.62989616463434350e-01,
1.62450555765907778e-01, 1.61912477368121061e-01, 1.61375379160119653e-01, 1.60839259043080829e-01,
1.60304114929253477e-01, 1.59769944741898540e-01, 1.59236746415230057e-01, 1.58704517894356606e-01,
1.58173257135223455e-01, 1.57642962104555084e-01, 1.57113630779798369e-01, 1.56585261149066346e-01,
1.56057851211082205e-01, 1.55531398975124219e-01, 1.55005902460970874e-01, 1.54481359698846660e-01,
1.53957768729368255e-01, 1.53435127603491456e-01, 1.52913434382458246e-01, 1.52392687137744731e-01,
1.51872883951009285e-01, 1.51354022914041347e-01, 1.50836102128710653e-01, 1.50319119706916915e-01,
1.49803073770540085e-01, 1.49287962451390921e-01, 1.48773783891162192e-01, 1.48260536241380331e-01,
1.47748217663357356e-01, 1.47236826328143555e-01, 1.46726360416480378e-01, 1.46216818118753922e-01,
1.45708197634948772e-01, 1.45200497174602400e-01, 1.44693714956759950e-01, 1.44187849209929358e-01,
1.43682898172037166e-01, 1.43178860090384524e-01, 1.42675733221603707e-01, 1.42173515831615110e-01,
1.41672206195584655e-01, 1.41171802597881535e-01, 1.40672303332036508e-01, 1.40173706700700479e-01,
1.39676011015603591e-01, 1.39179214597514728e-01, 1.38683315776201327e-01, 1.38188312890389714e-01,
1.37694204287725830e-01, 1.37200988324736262e-01, 1.36708663366789746e-01, 1.36217227788059086e-01,
1.35726679971483494e-01, 1.35237018308731111e-01, 1.34748241200162266e-01, 1.34260347054792839e-01,
1.33773334290258061e-01, 1.33287201332776828e-01, 1.32801946617116334e-01, 1.32317568586556911e-01,
1.31834065692857549e-01, 1.31351436396221627e-01, 1.30869679165262903e-01, 1.30388792476972104e-01,
1.29908774816683836e-01, 1.29429624678043642e-01, 1.28951340562975747e-01, 1.28473920981650974e-01,
1.27997364452455020e-01, 1.27521669501957147e-01, 1.27046834664879288e-01, 1.26572858484065381e-01,
1.26099739510451114e-01, 1.25627476303034058e-01, 1.25156067428844164e-01, 1.24685511462914539e-01,
1.24215806988252561e-01, 1.23746952595811477e-01, 1.23278946884462240e-01, 1.22811788460965687e-01,
1.22345475939945103e-01, 1.21880007943859128e-01, 1.21415383102975005e-01, 1.20951600055342126e-01,
1.20488657446765998e-01, 1.20026553930782431e-01, 1.19565288168632225e-01, 1.19104858829236049e-01,
1.18645264589169755e-01, 1.18186504132639911e-01, 1.17728576151459846e-01, 1.17271479345025839e-01,
1.16815212420293774e-01, 1.16359774091756102e-01, 1.15905163081419044e-01, 1.15451378118780271e-01,
1.14998417940806799e-01, 1.14546281291913271e-01, 1.14094966923940527e-01, 1.13644473596134554e-01,
1.13194800075125751e-01, 1.12745945134908446e-01, 1.12297907556820870e-01, 1.11850686129525353e-01,
1.11404279648988899e-01, 1.10958686918464083e-01, 1.10513906748470225e-01, 1.10069937956774958e-01,
1.09626779368376101e-01, 1.09184429815483833e-01, 1.08742888137503227e-01, 1.08302153181017047e-01,
1.07862223799768980e-01, 1.07423098854647084e-01, 1.06984777213667628e-01, 1.06547257751959223e-01,
1.06110539351747316e-01, 1.05674620902338937e-01, 1.05239501300107896e-01, 1.04805179448480196e-01,
1.04371654257919791e-01, 1.03938924645914740e-01, 1.03506989536963606e-01, 1.03075847862562214e-01,
1.02645498561190804e-01, 1.02215940578301370e-01, 1.01787172866305481e-01, 1.01359194384562340e-01,
1.00932004099367173e-01, 1.00505600983940041e-01, 1.00079984018414864e-01, 9.96551521898288717e-02,
9.92311044921123514e-02, 9.88078399260787221e-02, 9.83853574994150021e-02, 9.79636562266725242e-02,
9.75427351292580541e-02, 9.71225932354252697e-02, 9.67032295802665309e-02, 9.62846432057049978e-02,
9.58668331604871221e-02, 9.54497985001754728e-02, 9.50335382871419082e-02, 9.46180515905610808e-02,
9.42033374864043455e-02, 9.37893950574339580e-02, 9.33762233931976493e-02, 9.29638215900236153e-02,
9.25521887510157154e-02, 9.21413239860492256e-02, 9.17312264117668141e-02, 9.13218951515749056e-02,
9.09133293356404609e-02, 9.05055281008880913e-02, 9.00984905909975181e-02, 8.96922159564014637e-02,
8.92867033542838889e-02, 8.88819519485785914e-02, 8.84779609099681924e-02, 8.80747294158835542e-02,
8.76722566505035161e-02, 8.72705418047550613e-02, 8.68695840763138577e-02, 8.64693826696051882e-02,
8.60699367958053380e-02, 8.56712456728432742e-02, 8.52733085254028106e-02, 8.48761245849251889e-02,
8.44796930896120207e-02, 8.40840132844286464e-02, 8.36890844211079649e-02, 8.32949057581546387e-02,
8.29014765608497295e-02, 8.25087961012557769e-02, 8.21168636582223360e-02, 8.17256785173918338e-02,
8.13352399712060775e-02, 8.09455473189129859e-02, 8.05565998665739302e-02, 8.01683969270714503e-02,
7.97809378201174424e-02, 7.93942218722618331e-02, 7.90082484169017385e-02, 7.86230167942910257e-02,
7.82385263515504165e-02, 7.78547764426780614e-02, 7.74717664285606011e-02, 7.70894956769847123e-02,
7.67079635626491813e-02, 7.63271694671774775e-02, 7.59471127791307710e-02, 7.55677928940216292e-02,
7.51892092143280344e-02, 7.48113611495081349e-02, 7.44342481160153585e-02, 7.40578695373142193e-02,
7.36822248438965405e-02, 7.33073134732983717e-02, 7.29331348701173499e-02, 7.25596884860307267e-02,
7.21869737798139233e-02, 7.18149902173596810e-02, 7.14437372716978381e-02, 7.10732144230156876e-02,
7.07034211586789196e-02, 7.03343569732532981e-02, 6.99660213685267818e-02, 6.95984138535324787e-02,
6.92315339445721267e-02, 6.88653811652402553e-02, 6.84999550464490820e-02, 6.81352551264539502e-02,
6.77712809508796143e-02, 6.74080320727470927e-02, 6.70455080525013125e-02, 6.66837084580394202e-02,
6.63226328647398555e-02, 6.59622808554921891e-02, 6.56026520207276115e-02, 6.52437459584503249e-02,
6.48855622742696148e-02, 6.45281005814327541e-02, 6.41713605008586985e-02, 6.38153416611725727e-02,
6.34600436987410726e-02, 6.31054662577085895e-02, 6.27516089900343049e-02, 6.23984715555300773e-02,
6.20460536218992512e-02, 6.16943548647763684e-02, 6.13433749677678025e-02, 6.09931136224933015e-02,
6.06435705286285237e-02, 6.02947453939484887e-02, 5.99466379343720490e-02, 5.95992478740073744e-02,
5.92525749451984080e-02, 5.89066188885724179e-02, 5.85613794530885912e-02, 5.82168563960876531e-02,
5.78730494833426812e-02, 5.75299584891109522e-02, 5.71875831961869974e-02, 5.68459233959567323e-02,
5.65049788884528989e-02, 5.61647494824115828e-02, 5.58252349953300489e-02, 5.54864352535257913e-02,
5.51483500921969017e-02, 5.48109793554836730e-02, 5.44743228965316323e-02, 5.41383805775558433e-02,
5.38031522699066317e-02, 5.34686378541367399e-02, 5.31348372200699109e-02, 5.28017502668709296e-02,
5.24693769031171764e-02, 5.21377170468717635e-02, 5.18067706257581417e-02, 5.14765375770364006e-02,
5.11470178476811782e-02, 5.08182113944611880e-02, 5.04901181840205424e-02, 5.01627381929617491e-02,
4.98360714079305323e-02, 4.95101178257024294e-02, 4.91848774532713182e-02, 4.88603503079397886e-02,
4.85365364174114786e-02, 4.82134358198854224e-02, 4.78910485641523970e-02, 4.75693747096933781e-02,
4.72484143267801471e-02, 4.69281674965780077e-02, 4.66086343112508128e-02, 4.62898148740682261e-02,
4.59717092995153495e-02, 4.56543177134047240e-02, 4.53376402529907935e-02, 4.50216770670868807e-02,
4.47064283161847098e-02, 4.43918941725766139e-02, 4.40780748204803868e-02, 4.37649704561669309e-02,
4.34525812880907358e-02, 4.31409075370232584e-02, 4.28299494361892713e-02, 4.25197072314063076e-02,
4.22101811812271577e-02, 4.19013715570856829e-02, 4.15932786434458149e-02, 4.12859027379540375e-02,
4.09792441515952691e-02, 4.06733032088523253e-02, 4.03680802478690523e-02, 4.00635756206172006e-02,
3.97597896930671491e-02, 3.94567228453625921e-02, 3.91543754719992920e-02, 3.88527479820080165e-02,
3.85518407991417636e-02, 3.82516543620673721e-02, 3.79521891245617124e-02, 3.76534455557125114e-02,
3.73554241401239523e-02, 3.70581253781272699e-02, 3.67615497859963350e-02, 3.64656978961685058e-02,
3.61705702574708632e-02, 3.58761674353518939e-02, 3.55824900121189669e-02, 3.52895385871815903e-02,
3.49973137773007120e-02, 3.47058162168442721e-02, 3.44150465580491047e-02, 3.41250054712894726e-02,
3.38356936453523896e-02, 3.35471117877199146e-02, 3.32592606248587400e-02, 3.29721409025171622e-02,
3.26857533860298033e-02, 3.24000988606301873e-02, 3.21151781317715740e-02, 3.18309920254561604e-02,
3.15475413885731165e-02, 3.12648270892454880e-02, 3.09828500171865015e-02, 3.07016110840654458e-02,
3.04211112238834853e-02, 3.01413513933597166e-02, 2.98623325723278814e-02, 2.95840557641440134e-02,
2.93065219961054502e-02, 2.90297323198815890e-02, 2.87536878119568143e-02, 2.84783895740859556e-02,
2.82038387337628441e-02, 2.79300364447022977e-02, 2.76569838873361208e-02, 2.73846822693235847e-02,
2.71131328260769133e-02, 2.68423368213023435e-02, 2.65722955475573355e-02, 2.63030103268245413e-02,
2.60344825111031332e-02, 2.57667134830182089e-02, 2.54997046564489245e-02, 2.52334574771760807e-02,
2.49679734235499634e-02, 2.47032540071791748e-02, 2.44393007736413391e-02, 2.41761153032165471e-02,
2.39136992116444173e-02, 2.36520541509058040e-02, 2.33911818100301337e-02, 2.31310839159293881e-02,
2.28717622342599486e-02, 2.26132185703133703e-02, 2.23554547699374093e-02, 2.20984727204885413e-02,
2.18422743518173722e-02, 2.15868616372883598e-02, 2.13322365948354006e-02, 2.10784012880548231e-02,
2.08253578273375518e-02, 2.05731083710421377e-02, 2.03216551267106187e-02, 2.00710003523291297e-02,
1.98211463576354052e-02, 1.95720955054753849e-02, 1.93238502132112300e-02, 1.90764129541833305e-02,
1.88297862592288305e-02, 1.85839727182595428e-02, 1.83389749819021577e-02, 1.80947957632039656e-02,
1.78514378394073192e-02, 1.76089040537964724e-02, 1.73671973176205453e-02, 1.71263206120965891e-02,
1.68862769904970615e-02, 1.66470695803262285e-02, 1.64087015855903760e-02, 1.61711762891669809e-02,
1.59344970552783502e-02, 1.56986673320756928e-02, 1.54636906543398518e-02, 1.52295706463055156e-02,
1.49963110246161165e-02, 1.47639156014171755e-02, 1.45323882875964035e-02, 1.43017330961795162e-02,
1.40719541458913487e-02, 1.38430556648925888e-02, 1.36150419947032898e-02, 1.33879175943251039e-02,
1.31616870445752116e-02, 1.29363550526458981e-02, 1.27119264569048856e-02, 1.24884062319527527e-02,
1.22657994939551988e-02, 1.20441115062693074e-02, 1.18233476853847295e-02, 1.16035136072024422e-02,
1.13846150136758077e-02, 1.11666578198408532e-02, 1.09496481212651656e-02, 1.07335922019475491e-02,
1.05184965427035856e-02, 1.03043678300756818e-02, 1.00912129658099151e-02, 9.87903907694620380e-03,
9.66785352657308084e-03, 9.45766392530357318e-03, 9.24847814353469029e-03, 9.04030432455965420e-03,
8.83315089860958746e-03, 8.62702659790985822e-03, 8.42194047284596826e-03, 8.21790190934484042e-03,
8.01492064758981861e-03, 7.81300680220201396e-03, 7.61217088403687899e-03, 7.41242382376369920e-03,
7.21377699741718691e-03, 7.01624225413531848e-03, 6.81983194632634643e-03, 6.62455896254145714e-03,
6.43043676336860388e-03, 6.23747942070876013e-03, 6.04570166084955358e-03, 5.85511891181457987e-03,
5.66574735554167472e-03, 5.47760398553264957e-03, 5.29070667072356181e-03, 5.10507422645257135e-03,
4.92072649355685798e-03, 4.73768442681749176e-03, 4.55597019419978853e-03, 4.37560728861750067e-03,
4.19662065429614577e-03, 4.01903683024253458e-03, 3.84288411386877853e-03, 3.66819274850284718e-03,
3.49499513938897593e-03, 3.32332610390146744e-03, 3.15322316314992773e-03, 2.98472688406265906e-03,
2.81788128356848981e-03, 2.65273430990268372e-03, 2.48933842070269947e-03, 2.32775128397775307e-03,
2.16803663706178042e-03, 2.01026535158670356e-03, 1.85451677141087470e-03, 1.70088041870498962e-03,
1.54945820679059211e-03, 1.40036736693642224e-03, 1.25374440855476944e-03, 1.10975062326345393e-03,
9.68579983952068489e-04, 8.30470932584022304e-04, 6.95724848635113999e-04, 5.64736848623737615e-04,
4.38051607567338302e-04, 3.16477095370594789e-04, 2.01361696027663157e-04, 9.55209899796100552e-05,
} | expo.go | 0.691393 | 0.465387 | expo.go | starcoder |
package congestion
// WindowedFilter Use the following to construct a windowed filter object of type T.
// For example, a min filter using QuicTime as the time type:
// WindowedFilter<T, MinFilter<T>, QuicTime, QuicTime::Delta> ObjectName;
// A max filter using 64-bit integers as the time type:
// WindowedFilter<T, MaxFilter<T>, uint64_t, int64_t> ObjectName;
// Specifically, this template takes four arguments:
// 1. T -- type of the measurement that is being filtered.
// 2. Compare -- MinFilter<T> or MaxFilter<T>, depending on the type of filter
// desired.
// 3. TimeT -- the type used to represent timestamps.
// 4. TimeDeltaT -- the type used to represent continuous time intervals between
// two timestamps. Has to be the type of (a - b) if both |a| and |b| are
// of type TimeT.
type WindowedFilter struct {
// Time length of window.
windowLength int64
estimates []Sample
comparator func(int64, int64) bool
}
type Sample struct {
sample int64
time int64
}
// Compares two values and returns true if the first is greater than or equal
// to the second.
func MaxFilter(a, b int64) bool {
return a >= b
}
// Compares two values and returns true if the first is less than or equal
// to the second.
func MinFilter(a, b int64) bool {
return a <= b
}
func NewWindowedFilter(windowLength int64, comparator func(int64, int64) bool) *WindowedFilter {
return &WindowedFilter{
windowLength: windowLength,
estimates: make([]Sample, 3),
comparator: comparator,
}
}
// Changes the window length. Does not update any current samples.
func (f *WindowedFilter) SetWindowLength(windowLength int64) {
f.windowLength = windowLength
}
func (f *WindowedFilter) GetBest() int64 {
return f.estimates[0].sample
}
func (f *WindowedFilter) GetSecondBest() int64 {
return f.estimates[1].sample
}
func (f *WindowedFilter) GetThirdBest() int64 {
return f.estimates[2].sample
}
func (f *WindowedFilter) Update(sample int64, time int64) {
if f.estimates[0].time == 0 || f.comparator(sample, f.estimates[0].sample) || (time-f.estimates[2].time) > f.windowLength {
f.Reset(sample, time)
return
}
if f.comparator(sample, f.estimates[1].sample) {
f.estimates[1].sample = sample
f.estimates[1].time = time
f.estimates[2].sample = sample
f.estimates[2].time = time
} else if f.comparator(sample, f.estimates[2].sample) {
f.estimates[2].sample = sample
f.estimates[2].time = time
}
// Expire and update estimates as necessary.
if time-f.estimates[0].time > f.windowLength {
// The best estimate hasn't been updated for an entire window, so promote
// second and third best estimates.
f.estimates[0].sample = f.estimates[1].sample
f.estimates[0].time = f.estimates[1].time
f.estimates[1].sample = f.estimates[2].sample
f.estimates[1].time = f.estimates[2].time
f.estimates[2].sample = sample
f.estimates[2].time = time
// Need to iterate one more time. Check if the new best estimate is
// outside the window as well, since it may also have been recorded a
// long time ago. Don't need to iterate once more since we cover that
// case at the beginning of the method.
if time-f.estimates[0].time > f.windowLength {
f.estimates[0].sample = f.estimates[1].sample
f.estimates[0].time = f.estimates[1].time
f.estimates[1].sample = f.estimates[2].sample
f.estimates[1].time = f.estimates[2].time
}
return
}
if f.estimates[1].sample == f.estimates[0].sample && time-f.estimates[1].time > f.windowLength>>2 {
// A quarter of the window has passed without a better sample, so the
// second-best estimate is taken from the second quarter of the window.
f.estimates[1].sample = sample
f.estimates[1].time = time
f.estimates[2].sample = sample
f.estimates[2].time = time
return
}
if f.estimates[2].sample == f.estimates[1].sample && time-f.estimates[2].time > f.windowLength>>1 {
// We've passed a half of the window without a better estimate, so take
// a third-best estimate from the second half of the window.
f.estimates[2].sample = sample
f.estimates[2].time = time
}
}
func (f *WindowedFilter) Reset(newSample int64, newTime int64) {
f.estimates[0].sample = newSample
f.estimates[0].time = newTime
f.estimates[1].sample = newSample
f.estimates[1].time = newTime
f.estimates[2].sample = newSample
f.estimates[2].time = newTime
} | internal/congestion/windowed_filter.go | 0.887519 | 0.659693 | windowed_filter.go | starcoder |
package types
// Scannable is a thing that knows how to describe itself to a Scanner for
// construction in a SQL query string and query argument list.
type Scannable interface {
// Scan takes two slices and a pointer to an int. The first slice is a
// slice of bytes that the implementation should copy its string
// representation to and the other slice is a slice of interface{} values
// that the element should add its arguments to. The pointer to an int is
// the index of the current argument to be processed. The method returns a
// single int, the number of bytes written to the buffer.
Scan(Scanner, []byte, []interface{}, *int) int
}
// Element adds a Size and ArgCount method to the Scannable interface
type Element interface {
Scannable
// Size returns the number of bytes that the scannable element would
// consume as a SQL string
Size(Scanner) int
// ArgCount returns the number of interface{} arguments that the element
// will add to the slice of interface{} arguments passed to Scan()
ArgCount() int
}
// Sortable is an Element that knows whether it is to be part of an ORDER BY
// clause
type Sortable interface {
Element
// IsAsc returns true if the element is to be sorted in ascending order
IsAsc() bool
}
// Projection is something that produces a scalar value. A column, column
// definition, function, etc. When appearing in the SELECT clause's projection
// list, the projection will output itself using the "AS alias" extended
// notation. When outputting in GROUP BY, ORDER BY or ON clauses, the
// projection will not include the alias extension
type Projection interface {
Element
From() Selection
// As returns the projection aliased as another name
As(alias string) Projection
// disables the outputting of the "AS alias" extended output. Returns a
// function that resets the outputting of the "AS alias" extended output
DisableAliasScan() func()
// Asc returns an Element that describes a sort on the projection in
// descending order
Asc() Sortable
// Desc returns an Element that describes a sort on the projection in
// descending order
Desc() Sortable
}
// Selection is something that produces rows. A table, table definition,
// view, subselect, etc.
type Selection interface {
Element
Projections() []Projection
} | pkg/types/interface.go | 0.63273 | 0.68172 | interface.go | starcoder |
package geometry
import (
"github.com/alexandreLamarre/Golang-Ray-Tracing-Renderer/pkg/algebra"
"github.com/alexandreLamarre/Golang-Ray-Tracing-Renderer/pkg/canvas"
"github.com/alexandreLamarre/Golang-Ray-Tracing-Renderer/pkg/geometry/primitives"
"log"
"math"
"reflect"
)
//World manages the world space of the Shape(s) inside of it and the light sources illuminating it
type World struct {
Objects []primitives.Shape
Lights []*canvas.PointLight
}
//NewDefaultWorld creates a new default world with one light source and 2 spheres
func NewDefaultWorld() *World {
lights := make([]*canvas.PointLight, 0, 0)
light := canvas.NewPointLight(&canvas.Color{1, 1, 1}, algebra.NewPoint(-10, 10, -10))
lights = append(lights, light)
objects := make([]primitives.Shape, 0, 0)
s1 := primitives.NewSphere(nil)
m := canvas.NewDefaultMaterial()
m.Color = &canvas.Color{0.8, 1.0, 0.6}
m.Diffuse = 0.7
m.Specular = 0.2
s1.SetMaterial(m)
s2 := primitives.NewSphere(algebra.ScalingMatrix(0.5, 0.5, 0.5))
objects = append(objects, s1, s2)
return &World{Objects: objects, Lights: lights}
}
//Intersect intersects all objects in the World space with the given ray
func (w *World) Intersect(r *algebra.Ray) *primitives.Intersections {
is := primitives.NewIntersections()
for _, s := range w.Objects {
err := is.Intersect(s, r)
if err != nil {
panic(err)
}
}
return is
}
//ShadeHit Determines the color at some valid ray intersection (hit)
func (w World) ShadeHit(comps Comps, depth int) *canvas.Color {
color := &canvas.Color{0, 0, 0}
inShadow := w.PointIsShadowed(comps.OverPoint)
for _, l := range w.Lights {
pattern := comps.Object.GetMaterial().Pattern
var patternColor *canvas.Color
if pattern != nil {
patternColor = primitives.PatternAtObject(comps.Object, pattern, comps.Point)
} else {
patternColor = nil
}
lightingColor := canvas.Lighting(comps.Object.GetMaterial(), patternColor, l, comps.Point, comps.Eye, comps.Normal, inShadow)
color = color.Add(lightingColor)
reflected := w.ReflectedColor(&comps, depth)
refracted := w.RefractedColor(&comps, depth)
material := comps.Object.GetMaterial()
if material.Reflective > 0 && material.Transparency > 0 {
reflectance := Schlick(&comps)
color = color.Add(reflected.ScalarMult(reflectance))
color = color.Add(refracted.ScalarMult(1 - reflectance))
} else {
color = color.Add(refracted)
color = color.Add(reflected)
}
}
return color
}
//ColorAt returns the color where the ray intersects (if at all), with a maximum recursive depth of depth
func (w World) ColorAt(ray *algebra.Ray, depth int) *canvas.Color {
intersections := w.Intersect(ray)
if h := intersections.Hit(); h == nil {
return &canvas.Color{0, 0, 0}
} else {
c := PrepareComputations(h, ray, intersections)
return w.ShadeHit(*c, depth)
}
}
//PointIsShadowed returns whether or not the point in question is in the shadow of some other object
func (w World) PointIsShadowed(p *algebra.Vector) bool {
for i := 0; i < len(w.Lights); i++ {
v, err := w.Lights[i].Position.Subtract(p)
if err != nil {
panic(err)
return false
}
dist := v.Magnitude()
direction, err := v.Normalize()
if err != nil {
panic(err)
return false
}
res := append(p.Get()[:3], direction.Get()[:3]...)
r := algebra.NewRay(res...)
is := w.Intersect(r)
if h := is.Hit(); h != nil && h.T < dist {
return true
}
}
return false
}
//ReflectedColor determines if there is a reflected color being emitted at some ray intersection
// Takes the pre-computed computations at the ray intersection (struct Comps)
func (w *World) ReflectedColor(comps *Comps, depth int) *canvas.Color {
if comps.Object.GetMaterial().Reflective == 0.0 || depth <= 0 {
return &canvas.Color{0, 0, 0}
}
p := []float64{comps.OverPoint.Get()[0], comps.OverPoint.Get()[1], comps.OverPoint.Get()[2]}
d := []float64{comps.Reflect.Get()[0], comps.Reflect.Get()[1], comps.Reflect.Get()[2]}
res := append(p, d...)
reflectRay := algebra.NewRay(res...)
color := w.ColorAt(reflectRay, depth-1)
return color.ScalarMult(comps.Object.GetMaterial().Reflective)
}
//RefractedColor determines if there is a refracted color being emitted at some ray intersection
// Takes the pre-computed computations at the ray intersection (struct Comps)
func (w *World) RefractedColor(comps *Comps, depth int) *canvas.Color {
//completely opaque object
if comps.Object.GetMaterial().Transparency == 0.0 || depth == 0 {
return &canvas.Color{0, 0, 0}
}
refractiveRatio := comps.N1 / comps.N2
cosI, err := algebra.DotProduct(comps.Eye, comps.Normal)
if err != nil {
panic(err)
}
sin2T := refractiveRatio * refractiveRatio * (1 - cosI*cosI)
// Total reflection occurs
if sin2T > 1 {
return &canvas.Color{0, 0, 0}
}
cosT := math.Sqrt(1.0 - sin2T)
direction, err := comps.Normal.MultScalar(refractiveRatio*cosI - cosT).Subtract(comps.Eye.MultScalar(refractiveRatio))
if err != nil {
panic(err)
}
point := comps.UnderPoint.Get()[:3]
res := append(point, direction.Get()[:3]...)
refractRay := algebra.NewRay(res...)
color := w.ColorAt(refractRay, depth-1).ScalarMult(comps.Object.GetMaterial().Transparency)
return color
}
//Schlick returns the reflectance at a pre-computed ray intersection based on the Schlick model
func Schlick(comps *Comps) float64 {
cos, err := algebra.DotProduct(comps.Eye, comps.Normal)
if err != nil {
panic(err)
}
if comps.N1 > comps.N2 {
n := comps.N1 / comps.N2
sin2T := n * n * (1 - cos*cos)
if sin2T > 1.0 {
return 1.0
}
cosT := math.Sqrt(1.0 - sin2T)
cos = cosT
}
r0 := (comps.N1 - comps.N2) / (comps.N1 + comps.N2)
r0 *= r0
return r0 + (1-r0)*math.Pow((1-cos), 5)
}
//Comps manages the precomputed state of the necessary vectors for lighting
type Comps struct {
T float64
Object primitives.Shape
Point *algebra.Vector
OverPoint *algebra.Vector
UnderPoint *algebra.Vector
N1 float64
N2 float64
Eye *algebra.Vector
Normal *algebra.Vector
Reflect *algebra.Vector
Inside bool
}
func PrepareComputations(intersection *primitives.Intersection, ray *algebra.Ray, is *primitives.Intersections) *Comps {
position := ray.Position(intersection.T)
c := &Comps{T: intersection.T, Object: intersection.Object, Point: position,
Eye: ray.Get()["direction"].Negate(), Normal: primitives.NormalAt(intersection.Object, position, intersection)}
if d, err := algebra.DotProduct(c.Normal, c.Eye); err != nil {
panic(err)
} else if d < 0 {
c.Inside = true
c.Normal = c.Normal.Negate()
} else {
c.Inside = false
}
EPSILON := 0.0001
overPoint, err := c.Point.Add(c.Normal.MultScalar(EPSILON))
if err != nil {
panic(err)
}
c.OverPoint = overPoint
underPoint, err := c.Point.Subtract(c.Normal.MultScalar(EPSILON))
if err != nil {
panic(err)
}
c.UnderPoint = underPoint
direction := ray.Get()["direction"]
c.Reflect = direction.Reflect(c.Normal)
determineRefractiveIndexes(c, intersection, is)
return c
}
// Helper functions
func determineRefractiveIndexes(comps *Comps, hit *primitives.Intersection, is *primitives.Intersections) {
if is == nil || len(is.GetHits().Get()) == 0 {
log.Print("Warning: no intersections provided, this should only occur during unit testing")
comps.N1 = 1.0
comps.N2 = 1.0
return
} // this is only possible if calling the PrepareComputations method directly with nil in testing
containers := make([]*primitives.Intersection, 0, 0)
allIntersections := getSortedIntersections(is)
for i := 0; i < len(allIntersections); i++ {
if intersectionEquals(allIntersections[i], hit) {
if len(containers) == 0 {
comps.N1 = 1.0
} else {
comps.N1 = containers[len(containers)-1].Object.GetMaterial().RefractiveIndex
}
}
if index, found := has(containers, allIntersections[i]); found {
containers = append(containers[:index], containers[index+1:]...) //remove object
} else {
containers = append(containers, allIntersections[i])
}
if intersectionEquals(allIntersections[i], hit) {
if len(containers) == 0 {
comps.N2 = 1.0
} else {
comps.N2 = containers[len(containers)-1].Object.GetMaterial().RefractiveIndex
}
break
}
}
}
func intersectionEquals(a *primitives.Intersection, b *primitives.Intersection) bool {
if a.T == b.T && reflect.TypeOf(a.Object) == reflect.TypeOf(b.Object) && a.Object == b.Object {
return true
}
return false
}
func has(container []*primitives.Intersection, intersect *primitives.Intersection) (int, bool) {
for i := 0; i < len(container); i++ {
if reflect.TypeOf(container[i]) == reflect.TypeOf(intersect) && container[i].Object == intersect.Object {
return i, true
}
}
return -1, false
}
func getSortedIntersections(is *primitives.Intersections) []*primitives.Intersection {
res := make([]*primitives.Intersection, 0, 0)
ref := is.GetRef().Copy()
hits := is.GetHits().Copy()
intersect := ref.ExtractMin()
for intersect != nil {
res = append(res, intersect)
intersect = ref.ExtractMin()
}
intersect = hits.ExtractMin()
for intersect != nil {
res = append(res, intersect)
intersect = hits.ExtractMin()
}
return res
} | pkg/geometry/world.go | 0.818338 | 0.54825 | world.go | starcoder |
package gofra
import (
"encoding/json"
"fmt"
)
// Anti-aliasing
// https://en.wikipedia.org/wiki/Spatial_anti-aliasing
// https://en.wikipedia.org/wiki/Multisample_anti-aliasing
// https://en.wikipedia.org/wiki/Supersampling
/*
+---+
| |
+---+
4x - 2*2
+---+---+
| | |
+---+---+
| | |
+---+---+
9x - 3*3
+---+---+---+
| | | |
+---+---+---+
| | | |
+---+---+---+
| | | |
+---+---+---+
16x - 4*4
+---+---+---+---+
| | | | |
+---+---+---+---+
| | | | |
+---+---+---+---+
| | | | |
+---+---+---+---+
| | | | |
+---+---+---+---+
25x - 5*5
+---+---+---+---+---+
| | | | | |
+---+---+---+---+---+
| | | | | |
+---+---+---+---+---+
| | | | | |
+---+---+---+---+---+
| | | | | |
+---+---+---+---+---+
| | | | | |
+---+---+---+---+---+
*/
type AntiAliasing int
const (
AA_NONE AntiAliasing = iota
AA_4X // 4 samples per pixel
AA_9X // 9 samples per pixel
AA_16X // 16 samples per pixel
AA_25X // 25 samples per pixel
)
const (
name_AA_NONE = "none"
name_AA_4X = "4x"
name_AA_9X = "9x"
name_AA_16X = "16x"
name_AA_25X = "25x"
)
var aaNames = map[AntiAliasing]string{
AA_NONE: name_AA_NONE,
AA_4X: name_AA_4X,
AA_9X: name_AA_9X,
AA_16X: name_AA_16X,
AA_25X: name_AA_25X,
}
var aaValues = map[string]AntiAliasing{
name_AA_NONE: AA_NONE,
name_AA_4X: AA_4X,
name_AA_9X: AA_9X,
name_AA_16X: AA_16X,
name_AA_25X: AA_25X,
}
func (aa AntiAliasing) String() string {
name, ok := aaNames[aa]
if ok {
return name
}
return fmt.Sprintf("AntiAliasing(%d)", aa)
}
func (aa AntiAliasing) MarshalJSON() ([]byte, error) {
value := aa
s, ok := aaNames[value]
if !ok {
return nil, fmt.Errorf("gofra.AntiAliasing.MarshalJSON: undefined value %d", value)
}
return json.Marshal(s)
}
func (aa *AntiAliasing) UnmarshalJSON(data []byte) error {
var name string
err := json.Unmarshal(data, &name)
if err != nil {
return err
}
value, ok := aaValues[name]
if !ok {
return fmt.Errorf("gofra.AntiAliasing.UnmarshalJSON: undefined name %q", name)
}
*aa = value
return nil
}
func ParseAntiAliasing(s string) (AntiAliasing, error) {
value, ok := aaValues[s]
if !ok {
return 0, fmt.Errorf("gofra.ParseAntiAliasing: undefined name %q", s)
}
return value, nil
} | anti_aliasing.go | 0.608361 | 0.52756 | anti_aliasing.go | starcoder |
package zgeo
import (
"fmt"
"math"
"strconv"
"github.com/torlangballe/zutil/zdict"
"github.com/torlangballe/zutil/zlog"
"github.com/torlangballe/zutil/zstr"
)
type Size struct {
W float64 `json:"w"`
H float64 `json:"h"`
}
type Sizes []Size
// SizeF creates a Size from float64 w and h
func SizeF(w, h float32) Size {
return Size{float64(w), float64(h)}
}
// SizeI creates a Size from integer w and h
func SizeI(w, h int) Size {
return Size{float64(w), float64(h)}
}
// SizeI64 creates a Size from int64 w and h
func SizeI64(w, h int64) Size {
return Size{float64(w), float64(h)}
}
// SizeBoth uses a for W and H
func SizeBoth(a float64) Size {
return Size{a, a}
}
// Pos converts a size to a Pos
func (s Size) Pos() Pos {
return Pos{s.W, s.H}
}
//IsNull returns true if S and W are zero
func (s Size) IsNull() bool {
return s.W == 0 && s.H == 0
}
func (s *Size) Set(w, h float64) {
s.W = w
s.H = h
}
// Vertice returns the non-vertical s.W or vertical s.H
func (s Size) Vertice(vertical bool) float64 {
if vertical {
return s.H
}
return s.W
}
// VerticeP returns a pointer to the non-vertical s.W or vertical s.H
func (s *Size) VerticeP(vertical bool) *float64 {
if vertical {
return &s.H
}
return &s.W
}
// Max returns the greater of W and H
func (s Size) Max() float64 {
return math.Max(s.W, s.H)
}
// Min returns the lesser of W and H
func (s Size) Min() float64 {
return math.Min(s.W, s.H)
}
func (s Size) Maxed(a Size) Size {
w := math.Max(s.W, a.W)
h := math.Max(s.H, a.H)
return Size{w, h}
}
// EqualSided returns a Size where W and H are largest of the two
func (s Size) EqualSided() Size {
m := s.Max()
return Size{m, m}
}
// Area returns the product of W, H (WxH)
func (s Size) Area() float64 {
if s.W < 0 || s.H < 0 {
return 0
}
return s.W * s.H
}
func (s *Size) Maximize(a Size) {
s.W = math.Max(s.W, a.W)
s.H = math.Max(s.H, a.H)
}
func (s *Size) Minimize(a Size) {
s.W = math.Min(s.W, a.W)
s.H = math.Min(s.H, a.H)
}
func (s *Size) MinimizeNonZero(a Size) {
if a.W != 0 {
s.W = math.Min(s.W, a.W)
}
if a.H != 0 {
s.H = math.Min(s.H, a.H)
}
}
func (s *Size) MaximizeNonZero(a Size) {
if a.W != 0 {
s.W = math.Max(s.W, a.W)
}
if a.H != 0 {
s.H = math.Max(s.H, a.H)
}
}
func (s *Size) Add(a Size) {
s.W += a.W
s.H += a.H
}
func (s *Size) MultiplyD(a float64) {
s.W *= a
s.H *= a
}
func (s *Size) DivideD(a float64) {
s.W /= a
s.H /= a
}
func (s *Size) MultiplyF(a float32) {
s.W *= float64(a)
s.H *= float64(a)
}
func (s Size) Negative() Size {
return Size{-s.W, -s.H}
}
func (s Size) Equals(a Size) bool {
return s.W == a.W && s.H == a.H
}
func (s Size) Contains(a Size) bool {
return s.W >= a.W && s.H >= a.H
}
func (s Size) Plus(a Size) Size { return Size{s.W + a.W, s.H + a.H} }
func (s Size) Minus(a Size) Size { return Size{s.W - a.W, s.H - a.H} }
func (s Size) MinusD(a float64) Size { return Size{s.W - a, s.H - a} }
func (s Size) Times(a Size) Size { return Size{s.W * a.W, s.H * a.H} }
func (s Size) TimesD(a float64) Size { return Size{s.W * a, s.H * a} }
func (s Size) DividedBy(a Size) Size { return Size{s.W / a.W, s.H / a.H} }
func (s Size) DividedByD(a float64) Size { return Size{s.W / a, s.H / a} }
func (s Size) MultipliedBy(a Size) Size { return Size{s.W * a.W, s.H * a.H} }
func (s Size) MultipliedByD(a float64) Size { return Size{s.W * a, s.H * a} }
func (s *Size) Subtract(a Size) { s.W -= a.W; s.H -= a.H }
func (s Size) Copy() Size {
return s
}
// ShrunkInto makes s just fit in 'in' proportionally, if bigger
func (s Size) ShrunkInto(in Size) Size {
if s.W <= in.W && s.H < in.H {
return s
}
return s.ScaledTo(in)
}
// ExpandTo makes s just fit in 'in' proportionally, if smaller
func (s Size) ExpandedInto(in Size) Size {
if s.W >= in.W && s.H > in.H {
return s
}
return s.ScaledTo(in)
}
// ScaledTo makes s just fit in 'in' proportionally
func (s Size) ScaledTo(in Size) Size {
if s.W == 0 || s.H == 0 || in.W == 0 || in.H == 0 {
return Size{}
}
f := in.DividedBy(s)
min := f.Min()
scaled := s.TimesD(min)
return scaled
}
func (s *Size) Floor() Size {
return Size{math.Floor(s.W), math.Floor(s.H)}
}
func (s *Size) Ceil() Size {
return Size{math.Ceil(s.W), math.Ceil(s.H)}
}
func (s *Size) Round() Size {
return Size{math.Round(s.W), math.Round(s.H)}
}
func (s Size) String() string { // we don't use String() since we're doing that as set methods in zui
return fmt.Sprintf("%gx%g", s.W, s.H)
}
func (s *Size) FromString(str string) error { // we don't use String() since that's special in Go
var sw, sh string
if zstr.SplitN(str, "x", &sw, &sh) {
w, err := strconv.ParseFloat(sw, 64)
if err != nil {
return zlog.Error(err, zlog.StackAdjust(1), "parse w", sw)
}
h, err := strconv.ParseFloat(sh, 64)
if err != nil {
return zlog.Error(err, zlog.StackAdjust(1), "parse h", sh)
}
s.W = w
s.H = h
}
return nil
}
func (s Size) ZNVID() string {
return s.String()
}
func (s Size) ZUIString() string {
return s.String()
}
/*
func (s *Size) UnmarshalJSON(b []byte) error {
str := string(b)
str = strings.Trim(str, `"`)
err := s.FromString(str)
fmt.Println("UNMARSHAL SIZE:", str, s)
return err
}
func (s *Size) MarshalJSON() ([]byte, error) {
str := `"` + s.String() + `"`
return []byte(str), nil
}
*/
func (s Sizes) GetItems() (items zdict.Items) {
for _, size := range s {
items = append(items, zdict.Item{size.String(), size})
}
return
}
func (s *Sizes) IndexOf(size Size) int {
for i, is := range *s {
if is == size {
return i
}
}
return -1
}
func (s Size) Swapped() Size {
return Size{s.H, s.W}
}
type ISize struct {
W int `json:"w"`
H int `json:"h"`
}
type FSize struct {
W float32 `json:"w"`
H float32 `json:"h"`
} | zgeo/size.go | 0.879826 | 0.455562 | size.go | starcoder |
package seekable
import (
"encoding/binary"
"fmt"
"math"
"go.uber.org/zap/zapcore"
)
const (
/*
The format consists of a number of frames (Zstandard compressed frames and skippable frames), followed by a final skippable frame at the end containing the seek table.
Seek Table Format
The structure of the seek table frame is as follows:
|`Skippable_Magic_Number`|`Frame_Size`|`[Seek_Table_Entries]`|`Seek_Table_Footer`|
|------------------------|------------|----------------------|-------------------|
| 4 bytes | 4 bytes | 8-12 bytes each | 9 bytes |
Skippable_Magic_Number
Value: 0x184D2A5E.
This is for compatibility with Zstandard skippable frames: https://github.com/facebook/zstd/blob/release/doc/zstd_compression_format.md#skippable-frames.
Since it is legal for other Zstandard skippable frames to use the same
magic number, it is not recommended for a decoder to recognize frames
solely on this.
Frame_Size
The total size of the skippable frame, not including the `Skippable_Magic_Number` or `Frame_Size`.
This is for compatibility with Zstandard skippable frames: https://github.com/facebook/zstd/blob/release/doc/zstd_compression_format.md#skippable-frames.
https://github.com/facebook/zstd/blob/dev/contrib/seekable_format/zstd_seekable_compression_format.md
*/
skippableFrameMagic = 0x184D2A50
seekableMagicNumber = 0x8F92EAB1
seekTableFooterOffset = 9
frameSizeFieldSize = 4
skippableMagicNumberFieldSize = 4
// maxFrameSize is the maximum framesize supported by decoder. This is to prevent OOMs due to untrusted input.
maxDecoderFrameSize = 128 << 20
seekableTag = 0xE
)
/*
seekTableDescriptor is a Go representation of a bitfiled.
A bitfield describing the format of the seek table.
| Bit number | Field name |
| ---------- | ---------- |
| 7 | `Checksum_Flag` |
| 6-2 | `Reserved_Bits` |
| 1-0 | `Unused_Bits` |
While only `Checksum_Flag` currently exists, there are 7 other bits in this field that can be used for future changes to the format,
for example the addition of inline dictionaries.
`Reserved_Bits` are not currently used but may be used in the future for breaking changes,
so a compliant decoder should ensure they are set to 0.
`Unused_Bits` may be used in the future for non-breaking changes,
so a compliant decoder should not interpret these bits.
*/
type seekTableDescriptor struct {
// If the checksum flag is set, each of the seek table entries contains a 4 byte checksum
// of the uncompressed data contained in its frame.
ChecksumFlag bool
}
func (d *seekTableDescriptor) MarshalLogObject(enc zapcore.ObjectEncoder) error {
enc.AddBool("ChecksumFlag", d.ChecksumFlag)
return nil
}
/*
seekTableFooter is the footer of a seekable ZSTD stream.
The seek table footer format is as follows:
|`Number_Of_Frames`|`Seek_Table_Descriptor`|`Seekable_Magic_Number`|
|------------------|-----------------------|-----------------------|
| 4 bytes | 1 byte | 4 bytes |
https://github.com/facebook/zstd/blob/dev/contrib/seekable_format/zstd_seekable_compression_format.md#seek_table_footer
*/
type seekTableFooter struct {
// The number of stored frames in the data.
NumberOfFrames uint32
// A bitfield describing the format of the seek table.
SeekTableDescriptor seekTableDescriptor
// Value : 0x8F92EAB1.
SeekableMagicNumber uint32
}
func (f *seekTableFooter) marshalBinaryInline(dst []byte) {
binary.LittleEndian.PutUint32(dst[0:], f.NumberOfFrames)
if f.SeekTableDescriptor.ChecksumFlag {
dst[4] |= 1 << 7
}
binary.LittleEndian.PutUint32(dst[5:], seekableMagicNumber)
}
func (f *seekTableFooter) MarshalBinary() ([]byte, error) {
dst := make([]byte, seekTableFooterOffset)
f.marshalBinaryInline(dst)
return dst, nil
}
func (f *seekTableFooter) MarshalLogObject(enc zapcore.ObjectEncoder) error {
enc.AddUint32("NumberOfFrames", f.NumberOfFrames)
if err := enc.AddObject("SeekTableDescriptor", &f.SeekTableDescriptor); err != nil {
return err
}
enc.AddUint32("SeekableMagicNumber", f.SeekableMagicNumber)
return nil
}
func (f *seekTableFooter) UnmarshalBinary(p []byte) error {
if len(p) != seekTableFooterOffset {
return fmt.Errorf("footer length mismatch %d vs %d", len(p), seekTableFooterOffset)
}
// Check that reserved bits are set to 0.
var reservedBits uint8 = (p[4] << 1) >> 3
if reservedBits != 0 {
return fmt.Errorf("footer reserved bits %d != 0", reservedBits)
}
f.NumberOfFrames = binary.LittleEndian.Uint32(p[0:])
f.SeekTableDescriptor.ChecksumFlag = (p[4] & (1 << 7)) > 0
f.SeekableMagicNumber = binary.LittleEndian.Uint32(p[5:])
if f.SeekableMagicNumber != seekableMagicNumber {
return fmt.Errorf("footer magic mismatch %d vs %d", f.SeekableMagicNumber, seekableMagicNumber)
}
return nil
}
/*
seekTableEntry is an element of the Seek Table describing each of the ZSTD-compressed frames in the stream.
`Seek_Table_Entries` consists of `Number_Of_Frames` (one for each frame in the data, not including the seek table frame) entries of the following form, in sequence:
|`Compressed_Size`|`Decompressed_Size`|`[Checksum]`|
|-----------------|-------------------|------------|
| 4 bytes | 4 bytes | 4 bytes |
https://github.com/facebook/zstd/blob/dev/contrib/seekable_format/zstd_seekable_compression_format.md#seek_table_entries
*/
type seekTableEntry struct {
// The compressed size of the frame.
// The cumulative sum of the `Compressed_Size` fields of frames `0` to `i` gives the offset in the compressed file of frame `i+1`.
CompressedSize uint32
// The size of the decompressed data contained in the frame. For skippable or otherwise empty frames, this value is 0.
DecompressedSize uint32
// Only present if `Checksum_Flag` is set in the `Seek_Table_Descriptor`. Value : the least significant 32 bits of the XXH64 digest of the uncompressed data, stored in little-endian format.
Checksum uint32
}
func (e *seekTableEntry) marshalBinaryInline(dst []byte) {
binary.LittleEndian.PutUint32(dst[0:], e.CompressedSize)
binary.LittleEndian.PutUint32(dst[4:], e.DecompressedSize)
binary.LittleEndian.PutUint32(dst[8:], e.Checksum)
}
func (e *seekTableEntry) MarshalBinary() ([]byte, error) {
dst := make([]byte, 12)
e.marshalBinaryInline(dst)
return dst, nil
}
func (e *seekTableEntry) MarshalLogObject(enc zapcore.ObjectEncoder) error {
enc.AddUint32("CompressedSize", e.CompressedSize)
enc.AddUint32("DecompressedSize", e.DecompressedSize)
enc.AddUint32("Checksum", e.Checksum)
return nil
}
func (e *seekTableEntry) UnmarshalBinary(p []byte) error {
if len(p) < 8 {
return fmt.Errorf("entry length mismatch %d vs %d", len(p), 8)
}
e.CompressedSize = binary.LittleEndian.Uint32(p[0:])
e.DecompressedSize = binary.LittleEndian.Uint32(p[4:])
if len(p) >= 12 {
e.Checksum = binary.LittleEndian.Uint32(p[8:])
}
return nil
}
/*
createSkippableFrame returns a payload formatted as a ZSDT skippable frame.
| `Magic_Number` | `Frame_Size` | `User_Data` |
|:--------------:|:------------:|:-----------:|
| 4 bytes | 4 bytes | n bytes |
Skippable frames allow the insertion of user-defined metadata
into a flow of concatenated frames.
Magic_Number
4 Bytes, __little-endian__ format.
Value : 0x184D2A5?, which means any value from 0x184D2A50 to 0x184D2A5F.
All 16 values are valid to identify a skippable frame.
This specification doesn't detail any specific tagging for skippable frames.
Frame_Size
This is the size, in bytes, of the following `User_Data`
(without including the magic number nor the size field itself).
This field is represented using 4 Bytes, __little-endian__ format, unsigned 32-bits.
This means `User_Data` can’t be bigger than (2^32-1) bytes.
User_Data
The `User_Data` can be anything. Data will just be skipped by the decoder.
https://github.com/facebook/zstd/blob/dev/doc/zstd_compression_format.md#skippable-frames
*/
func createSkippableFrame(tag uint32, payload []byte) ([]byte, error) {
if len(payload) == 0 {
return nil, nil
}
if tag > 0xf {
return nil, fmt.Errorf("requested tag (%d) > 0xf", tag)
}
if len(payload) > math.MaxUint32 {
return nil, fmt.Errorf("requested skippable frame size (%d) > max uint32", len(payload))
}
dst := make([]byte, 8, len(payload)+8)
binary.LittleEndian.PutUint32(dst[0:], skippableFrameMagic+tag)
binary.LittleEndian.PutUint32(dst[4:], uint32(len(payload)))
return append(dst, payload...), nil
} | seekable.go | 0.850779 | 0.624923 | seekable.go | starcoder |
package client
import (
"encoding/json"
)
// RetentionSetting struct for RetentionSetting
type RetentionSetting struct {
TimeRetentionDuration int32 `json:"timeRetentionDuration"`
RetentionDurationType string `json:"retentionDurationType"`
}
// NewRetentionSetting instantiates a new RetentionSetting object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewRetentionSetting(timeRetentionDuration int32, retentionDurationType string) *RetentionSetting {
this := RetentionSetting{}
this.TimeRetentionDuration = timeRetentionDuration
this.RetentionDurationType = retentionDurationType
return &this
}
// NewRetentionSettingWithDefaults instantiates a new RetentionSetting object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewRetentionSettingWithDefaults() *RetentionSetting {
this := RetentionSetting{}
return &this
}
// GetTimeRetentionDuration returns the TimeRetentionDuration field value
func (o *RetentionSetting) GetTimeRetentionDuration() int32 {
if o == nil {
var ret int32
return ret
}
return o.TimeRetentionDuration
}
// GetTimeRetentionDurationOk returns a tuple with the TimeRetentionDuration field value
// and a boolean to check if the value has been set.
func (o *RetentionSetting) GetTimeRetentionDurationOk() (*int32, bool) {
if o == nil {
return nil, false
}
return &o.TimeRetentionDuration, true
}
// SetTimeRetentionDuration sets field value
func (o *RetentionSetting) SetTimeRetentionDuration(v int32) {
o.TimeRetentionDuration = v
}
// GetRetentionDurationType returns the RetentionDurationType field value
func (o *RetentionSetting) GetRetentionDurationType() string {
if o == nil {
var ret string
return ret
}
return o.RetentionDurationType
}
// GetRetentionDurationTypeOk returns a tuple with the RetentionDurationType field value
// and a boolean to check if the value has been set.
func (o *RetentionSetting) GetRetentionDurationTypeOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.RetentionDurationType, true
}
// SetRetentionDurationType sets field value
func (o *RetentionSetting) SetRetentionDurationType(v string) {
o.RetentionDurationType = v
}
func (o RetentionSetting) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if true {
toSerialize["timeRetentionDuration"] = o.TimeRetentionDuration
}
if true {
toSerialize["retentionDurationType"] = o.RetentionDurationType
}
return json.Marshal(toSerialize)
}
type NullableRetentionSetting struct {
value *RetentionSetting
isSet bool
}
func (v NullableRetentionSetting) Get() *RetentionSetting {
return v.value
}
func (v *NullableRetentionSetting) Set(val *RetentionSetting) {
v.value = val
v.isSet = true
}
func (v NullableRetentionSetting) IsSet() bool {
return v.isSet
}
func (v *NullableRetentionSetting) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableRetentionSetting(val *RetentionSetting) *NullableRetentionSetting {
return &NullableRetentionSetting{value: val, isSet: true}
}
func (v NullableRetentionSetting) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableRetentionSetting) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | client/model_retention_setting.go | 0.745306 | 0.517754 | model_retention_setting.go | starcoder |
package argparse
import (
"fmt"
"regexp"
"strings"
"github.com/skillian/errors"
)
// Argument holds the definition of an argument.
type Argument struct {
// parser holds a reference back to the parser that instantiated the
// argument.
parser *ArgumentParser
// Action holds the action to perform after successful parsing of
// values associated with the given argument.
Action ArgumentAction
// Const holds the value associated with this argument when the
// argument is present.
Const interface{}
// Default is the value associated with the argument when a specific
// value is not otherwise provided.
Default interface{}
// Dest is the string key that the argument can be retrieved by.
Dest string
// Help is the help text associated with the argument.
Help string
// MetaVar is the variable that the argument is represented with when
// displaying its usage. It is a slice in case Nargs is non-zero.
MetaVar []string
// Nargs is the number of values that this argument can accept. It
// should be a positive int unless it is one of the sentinel values:
// ZeroOrOne, ZeroOrMore, or OneOrMore.
Nargs int
// OptionStrings are the possible string values that the argument can
// be matched against.
OptionStrings []string
// Required determines if the argument is required or not.
Required bool
// Type holds a function that can be used to parse a string value into
// the type desired by this argument.
Type ValueParser
// Choices holds an optional collection of allowed choices for this
// Argument. Choices is nil if no set of allowed values was provided.
Choices *ArgumentChoices
}
// Bind the argument's parsed value into the given pointer.
func (a *Argument) Bind(target interface{}) error {
return a.parser.boundArgs.bind(a, target)
}
// MustBind panics if Binding an argument fails.
func (a *Argument) MustBind(target interface{}) {
if err := a.Bind(target); err != nil {
panic(err)
}
}
// Optional returns whether or not this is an optional (flag) argument. If
// it is not, then it is a positional argument.
func (a *Argument) Optional() bool {
for _, s := range a.OptionStrings {
if strings.HasPrefix(s, "-") {
return true
}
}
return false
}
const (
// OneOrMore means that one or more argument values are accepted by
// the argument.
OneOrMore int = -1 - iota
// ZeroOrMore indicates that zero or more arguments are accepted.
ZeroOrMore
// ZeroOrOne indicates that zero or one argument is allowed
ZeroOrOne
)
// isValidNarg is a helper function that can tell if a Nargs value is either a
// valid number of arguments or valid sentinel value.
func isValidNarg(v int) bool {
return v >= ZeroOrOne
}
// ValueParser can parse a string value into a Go value.
type ValueParser func(v string) (interface{}, error)
// Bool converts the given string into a boolean value.
// It implements the ValueParser interface.
func Bool(v string) (interface{}, error) {
if strings.EqualFold(v, "true") {
return true, nil
}
if strings.EqualFold(v, "false") {
return false, nil
}
return nil, errors.NewUnexpectedType(false, v)
}
// Float32 converts the given string into a float32 value.
// It implements the ValueParser interface.
func Float32(v string) (interface{}, error) {
var f float32
err := sscanf(v, "%f", &f)
return f, err
}
// Float64 converts the given string into a float64 value.
// It implements the ValueParser interface.
func Float64(v string) (interface{}, error) {
var f float64
err := sscanf(v, "%f", &f)
return f, err
}
// Int converts the given string into a int value.
// It implements the ValueParser interface.
func Int(v string) (interface{}, error) {
var i int
err := sscanf(v, "%d", &i)
return i, err
}
// Int8 converts the given string into a int8 value.
// It implements the ValueParser interface.
func Int8(v string) (interface{}, error) {
var i int8
err := sscanf(v, "%d", &i)
return i, err
}
// Int16 converts the given string into a int16 value.
// It implements the ValueParser interface.
func Int16(v string) (interface{}, error) {
var i int16
err := sscanf(v, "%d", &i)
return i, err
}
// Int32 converts the given string into a int32 value.
// It implements the ValueParser interface.
func Int32(v string) (interface{}, error) {
var i int32
err := sscanf(v, "%d", &i)
return i, err
}
// Int64 converts the given string into a int value.
// It implements the ValueParser interface.
func Int64(v string) (interface{}, error) {
var i int64
err := sscanf(v, "%d", &i)
return i, err
}
// Uint converts the given string into a uint value.
// It implements the ValueParser interface.
func Uint(v string) (interface{}, error) {
var i uint
err := sscanf(v, "%u", &i)
return i, err
}
// Uint8 converts the given string into a uint8 value.
// It implements the ValueParser interface.
func Uint8(v string) (interface{}, error) {
var i uint8
err := sscanf(v, "%u", &i)
return i, err
}
// Uint16 converts the given string into a uint16 value.
// It implements the ValueParser interface.
func Uint16(v string) (interface{}, error) {
var i uint16
err := sscanf(v, "%u", &i)
return i, err
}
// Uint32 converts the given string into a uint32 value.
// It implements the ValueParser interface.
func Uint32(v string) (interface{}, error) {
var i uint32
err := sscanf(v, "%u", &i)
return i, err
}
// Uint64 converts the given string into a uint64 value.
// It implements the ValueParser interface.
func Uint64(v string) (interface{}, error) {
var i uint64
err := sscanf(v, "%u", &i)
return i, err
}
// String is a "dummy" ValueParser filled in automatically by AddArgument if
// no other ValueParser is used.
func String(v string) (interface{}, error) {
return v, nil
}
func sscanf(s, f string, p interface{}) error {
n, err := fmt.Sscanf(s, f, p)
if err != nil {
return err
}
if n != 1 {
return errors.Errorf("%d != 1", n)
}
return nil
}
// Action takes the name of an action instead of the action function.
// it works similarly to Python's argparse.ArgumentParser.add_argument's
// action parameter when set to a string value.
func Action(v string) ArgumentOption {
key := strings.TrimSpace(strings.ToLower(v))
act, ok := actions[key]
if !ok {
return func(a *Argument) error {
return errors.Errorf(
"unrecognized %v: %q", "Action", v,
)
}
}
return ActionFunc(act)
}
// ActionFunc lets you specify an action function value instead of just a string
// key of an action function.
func ActionFunc(f ArgumentAction) ArgumentOption {
return func(a *Argument) error {
a.Action = f
switch f {
case Store:
if a.Nargs < 1 {
a.Nargs = 1
}
case StoreTrue:
a.Default = false
a.Const = true
a.Nargs = 0
case StoreFalse:
a.Default = true
a.Const = false
a.Nargs = 0
}
return nil
}
}
// ArgumentOption configures an Argument.
type ArgumentOption func(a *Argument) error
// ArgumentAction is called when an argument's values are parsed from the
// command line.
type ArgumentAction interface {
Name() string
UpdateNamespace(a *Argument, ns Namespace, vs []interface{}) error
}
type argumentActionStruct struct {
name string
updateNamespace func(a *Argument, ns Namespace, vs []interface{}) error
}
func newArgumentActionStruct(name string, f func(a *Argument, ns Namespace, vs []interface{}) error) *argumentActionStruct {
if _, ok := actions[name]; ok {
panic("redefinition of argument action: " + name)
}
s := &argumentActionStruct{name: name, updateNamespace: f}
actions[name] = s
return s
}
func (s argumentActionStruct) Name() string { return s.name }
func (s argumentActionStruct) UpdateNamespace(a *Argument, ns Namespace, args []interface{}) error {
return s.updateNamespace(a, ns, args)
}
var (
actions = make(map[string]ArgumentAction, 4)
// Append is an ArgumentAction that appends an encountered argument to
Append ArgumentAction = newArgumentActionStruct(
"append",
func(a *Argument, ns Namespace, args []interface{}) error {
vs, err := a.defaultCreateValues(args)
if err != nil {
return err
}
ns.Append(a, getArgValueForNS(a, vs))
return nil
},
)
// Store is an ArgumentAction that sets the value associated with the
// given argument. If that argument already has a value in the given
// namespace, an error is returned.
Store ArgumentAction = newArgumentActionStruct(
"store",
func(a *Argument, ns Namespace, args []interface{}) error {
if v, ok := ns.Get(a); ok {
return errors.Errorf(
"argument %q already defined with value %v.",
a.Dest, v)
}
vs, err := a.defaultCreateValues(args)
if err != nil {
return err
}
ns.Set(a, getArgValueForNS(a, vs))
return nil
},
)
// StoreTrue is an ArgumentAction that stores the true value in the
// given namespace for the given argument.
StoreTrue ArgumentAction = newArgumentActionStruct(
"store_true",
func(a *Argument, ns Namespace, args []interface{}) error {
if len(args) > 0 {
return errors.Errorf(
"no values expected for argument %q but got %d",
a.Dest, len(args))
}
ns.Set(a, true)
return nil
},
)
// StoreFalse is an ArgumentAction that stores the false value in the given
// namespace for the given argument.
StoreFalse ArgumentAction = newArgumentActionStruct(
"store_false",
func(a *Argument, ns Namespace, args []interface{}) error {
if len(args) > 0 {
return errors.Errorf(
"no values expected for argument %q but got %d",
a.Dest, len(args))
}
ns.Set(a, false)
return nil
},
)
)
func getArgValueForNS(a *Argument, vs []interface{}) interface{} {
if a.Nargs == 1 && len(vs) == 1 {
return vs[0]
}
return vs
}
// Choices sets the argument's choices.
func Choices(choices ...Choice) ArgumentOption {
return func(a *Argument) error {
a.Choices = NewChoices(choices...)
return nil
}
}
// ChoiceValues sets the argument's choices.
func ChoiceValues(values ...interface{}) ArgumentOption {
return func(a *Argument) error {
a.Choices = NewChoiceValues(values...)
return nil
}
}
// Const sets the Const value for the given string
func Const(v interface{}) ArgumentOption {
return func(a *Argument) error {
return setValue(&a.Const, "Const", v)
}
}
// Default sets the default value of an argument.
func Default(v interface{}) ArgumentOption {
return func(a *Argument) error {
return setValue(&a.Default, "Default", v)
}
}
// Dest sets the destination name in the parsed argument namespace.
func Dest(v string) ArgumentOption {
return func(a *Argument) error {
return setValue(&a.Dest, "Dest", v)
}
}
// Help sets the help string of an argument.
func Help(format string, args ...interface{}) ArgumentOption {
v := format
if len(args) >= 0 {
v = fmt.Sprintf(format, args...)
}
return func(a *Argument) error {
return setValue(&a.Help, "Help", v)
}
}
// MetaVar sets the help string of an argument.
func MetaVar(v ...string) ArgumentOption {
return func(a *Argument) error {
return setValue(&a.MetaVar, "MetaVar", v)
}
}
// Nargs sets the number of values the argument can accept.
func Nargs(v int) ArgumentOption {
return func(a *Argument) error {
if !isValidNarg(v) {
return errors.Errorf(
"%d is not a valid number of arguments", v)
}
a.Nargs = v
return nil
}
}
var (
alphaNumRegexp = regexp.MustCompile("[0-9A-Za-z]+")
)
// OptionStrings sets the arg strings.
func OptionStrings(ops ...string) ArgumentOption {
return func(a *Argument) error {
if len(ops) == 0 {
return errors.Errorf("no option strings specified")
}
var positional, optional bool
for _, op := range ops {
if len(op) > 0 && op[0] == '-' {
optional = true
} else {
positional = true
}
}
if optional == positional {
return errors.Errorf(
"cannot determine if argument %s is "+
"optional or positional",
ops[0])
}
err := setValue(&a.OptionStrings, "OptionStrings", ops)
if err != nil {
return err
}
return nil
}
}
// Required flags the Argument as required.
func Required(a *Argument) error {
a.Required = true
return nil
}
// Type sets the Type (actually a ValueParser function)
// of the argument.
func Type(t ValueParser) ArgumentOption {
return func(a *Argument) error {
if a.Type != nil {
return errors.Errorf(
"type already set!")
}
a.Type = t
return nil
}
}
func (a *Argument) defaultCreateValues(args []interface{}) (vs []interface{}, err error) {
vs = make([]interface{}, len(args))
if a.Choices != nil {
for i, arg := range args {
v, ok := a.Choices.Load(stringOf(arg))
if !ok {
return nil, errors.Errorf(
"invalid choice %q for %v", v, a.Dest,
)
}
vs[i] = v
}
return
}
for i, arg := range args {
if vs[i], err = a.Type(stringOf(arg)); err != nil {
return
}
}
return
}
func stringOf(v interface{}) string {
if s, ok := v.(string); ok {
return s
}
return fmt.Sprint(v)
} | argument.go | 0.736401 | 0.488405 | argument.go | starcoder |
package hexgrid
import (
"image"
"math"
)
type Point64 struct {
X float64
Y float64
}
func (p *Point64) ToPoint() image.Point {
return image.Point{X: int(math.Round(p.X)), Y: int(math.Round(p.Y))}
}
type HexLayout struct {
orientation Orientation
scale Point64 // multiplication factor relative to the canonical hexagon, where the points are on a unit circle
origin Point64 // center point for hexagon 0,0
}
type Orientation struct {
f0, f1, f2, f3, b0, b1, b2, b3, startAngle float64
}
var OrientationPointy Orientation = Orientation{math.Sqrt(3.), math.Sqrt(3.) / 2., 0., 3. / 2., math.Sqrt(3.) / 3., -1. / 3., 0., 2. / 3., 0.5}
var OrientationFlat Orientation = Orientation{3. / 2., 0., math.Sqrt(3.) / 2., math.Sqrt(3.), 2. / 3., 0., -1. / 3., math.Sqrt(3.) / 3., 0.}
func NewLayout(gridOrientation Orientation, scaleX, scaleY float64, origin Point64) (HexLayout){
return HexLayout{orientation: gridOrientation, scale: Point64{scaleX, scaleY}, origin: origin}
}
// HexToPixel returns the center pixel for a given hexagon an a certain Layout
func (l *HexLayout) HexToPoint(h HexCoord) Point64 {
M := l.orientation
size := l.scale
origin := l.origin
x := (M.f0*float64(h.q) + M.f1*float64(h.r)) * size.X
y := (M.f2*float64(h.q) + M.f3*float64(h.r)) * size.Y
return Point64{ X: x + origin.X, Y: y + origin.Y }
}
// PixelToHex returns the corresponding hexagon axial coordinates for a given pixel on a certain Layout
func (l *HexLayout) PointToHex(p Point64) fractionalHex {
M := l.orientation
origin := l.origin
scale := l.scale
pt := Point64{(p.X - float64(origin.X)) / scale.X, (p.Y - float64(origin.Y)) / scale.Y}
q := M.b0*pt.X + M.b1*pt.Y
r := M.b2*pt.X + M.b3*pt.Y
return fractionalHex{q, r, -q - r}
}
func (l *HexLayout) HexCornerOffset(c int) Point64 {
M := l.orientation
angle := 2. * math.Pi * (M.startAngle - float64(c)) / 6.
return Point64{l.scale.X * math.Cos(angle), l.scale.Y * math.Sin(angle)}
}
// Gets the corners of the hexagon for the given Layout, starting at the E vertex and proceeding in a CCW order
func (l *HexLayout) HexagonCorners(h HexCoord) []Point64 {
corners := make([]Point64, 0)
center := l.HexToPoint( h)
for i := 0; i < 6; i++ {
offset := l.HexCornerOffset(i)
corners = append(corners, Point64{center.X + offset.X, center.Y + offset.Y})
}
return corners
} | layout.go | 0.898658 | 0.601886 | layout.go | starcoder |
package astvisit
import (
"go/ast"
"golang.org/x/tools/go/ast/astutil"
)
type Cursor interface {
// Path returns the current path of the cursor
Path() Path
// Node returns the current Node.
Node() ast.Node
// Parent returns the parent of the current Node.
Parent() ast.Node
// ParentField returns the name of the parent Node field that contains the current Node.
// If the parent is a *ast.Package and the current Node is a *ast.File, ParentField returns
// the filename for the current Node.
ParentField() string
// ParentFieldIndex reports the index >= 0 of the current Node in the slice of Nodes that
// contains it, or a value < 0 if the current Node is not part of a slice.
// The index of the current node changes if InsertBefore is called while
// processing the current node.
ParentFieldIndex() int
// Replace replaces the current Node with n.
// The replacement node is not walked by Apply.
Replace(n ast.Node)
// Delete deletes the current Node from its containing slice.
// If the current Node is not part of a slice, Delete panics.
// As a special case, if the current node is a package file,
// Delete removes it from the package's Files map.
Delete()
// InsertAfter inserts n after the current Node in its containing slice.
// If the current Node is not part of a slice, InsertAfter panics.
// Apply does not walk n.
InsertAfter(n ast.Node)
// InsertBefore inserts n before the current Node in its containing slice.
// If the current Node is not part of a slice, InsertBefore panics.
// Apply will not walk n.
InsertBefore(n ast.Node)
}
func newCursor(c *astutil.Cursor, path Path) Cursor {
return &cursor{c, path}
}
// cursor implements the Cursor inteface by
// wrapping an astutil.Cursor together with a Path.
type cursor struct {
cursor *astutil.Cursor
path Path
}
func (c *cursor) Path() Path {
return c.path
}
func (c *cursor) Node() ast.Node {
return c.cursor.Node()
}
func (c *cursor) Parent() ast.Node {
return c.cursor.Parent()
}
func (c *cursor) ParentField() string {
return c.cursor.Name()
}
func (c *cursor) ParentFieldIndex() int {
return c.cursor.Index()
}
func (c *cursor) Replace(n ast.Node) {
c.cursor.Replace(n)
}
func (c *cursor) Delete() {
c.cursor.Delete()
}
func (c *cursor) InsertAfter(n ast.Node) {
c.cursor.InsertAfter(n)
}
func (c *cursor) InsertBefore(n ast.Node) {
c.cursor.InsertBefore(n)
} | cursor.go | 0.742702 | 0.490358 | cursor.go | starcoder |
package timeinterval
import (
"encoding/json"
"fmt"
"time"
)
// Repeating describes an interval with recurring events distributed evenly by the duration of the interval.
// The number of Repetitions determine the bounds of the repeating interval (from StartsAt).
// When Repetitions is unset, then the repeating interval will be unbounded and recur infinitely long into the future.
type Repeating struct {
Interval Interval
Repetitions *uint32
}
// String returns a string that describes the repeating interval.
func (r Repeating) String() string {
if r.Repetitions != nil {
return fmt.Sprintf("%v, reps: %v, times: %v", r.Interval, r.RepeatEvery(), *r.Repetitions)
}
return fmt.Sprintf("%v, reps: %v", r.Interval, r.RepeatEvery())
}
// RepeatEvery returns duration of each repetition. This is identical to the duration of the interval.
func (r Repeating) RepeatEvery() time.Duration {
return r.Interval.Duration()
}
// UnmarshalJSON unmarshal Repeating from an ISO8601 "repeating interval" string.
func (in *Repeating) UnmarshalJSON(data []byte) error {
var s string
err := json.Unmarshal(data, &s)
if err != nil {
return err
}
ri, err := ParseRepeatingIntervalISO8601(s)
if err != nil {
return err
}
*in = *ri
return nil
}
// MarshalJSON marshal Repeating into an ISO8601 "repeating interval" string.
func (in Repeating) MarshalJSON() ([]byte, error) {
iso, err := in.ISO8601()
if err != nil {
return nil, err
}
return json.Marshal(iso)
}
// StartsAt returns the time the interval begins.
// If "Repetitions" is nil, then this indicates the repeating interval is unbounded
// and as a result StartsAt() will return nil.
func (in Repeating) StartsAt() *time.Time {
if in.Repetitions == nil {
return nil
}
return &in.Interval.StartsAt
}
// EndsAt returns the time the interval ends.
// If "Repetitions" is nil, then this indicates the repeating interval is unbounded
// and as a result EndsAt() will return nil.
func (in Repeating) EndsAt() *time.Time {
if in.Repetitions == nil {
return nil
}
endsAt := in.Interval.StartsAt.Add(time.Duration(*in.Repetitions) * in.RepeatEvery())
return &endsAt
}
// Duration returns the duration the repeating interval will be active for or nil if it is unbounded.
func (in Repeating) Duration() *time.Duration {
endsAt := in.EndsAt()
startsAt := in.StartsAt()
if startsAt == nil || endsAt == nil {
return nil
}
d := endsAt.Sub(*startsAt)
return &d
}
// Started returns a boolean indicating if the interval has begun at the given time.
// When the repeating interval is unbounded, then this function will always return true.
func (in Repeating) Started(t time.Time) bool {
startsAt := in.StartsAt()
if startsAt == nil {
return true
}
return t.Equal(*startsAt) || t.After(*startsAt)
}
// Ended returns a boolean indicating if the interval has ended at the given time.
// When the repeating interval is unbounded, then this function will always return false.
func (in Repeating) Ended(t time.Time) bool {
endsAt := in.EndsAt()
if endsAt == nil {
return false
}
return t.After(*endsAt)
}
// In returns a boolean indicating if the given time is when the interval is active (Started and not Ended)
func (in Repeating) In(t time.Time) bool {
return in.Started(t) && !in.Ended(t)
}
// Next returns the time of the next interval-occurrence relative to the given time.
// It returns the startsAt time if the interval have not started yet and nil if the interval has ended.
func (in Repeating) Next(t time.Time) *time.Time {
if !in.Started(t) {
return in.StartsAt()
}
if in.Ended(t) || in.RepeatEvery() == 0 {
return nil
}
diff := t.Sub(in.Interval.StartsAt)
mod := diff % in.RepeatEvery()
nxt := t.Add(in.RepeatEvery() - mod)
if in.Ended(nxt) {
return nil
}
return &nxt
}
// ISO8691 returns the repeating interval formatted as an ISO8601 repeating interval string.
func (in Repeating) ISO8601() (string, error) {
iso, err := in.Interval.ISO8601()
if err != nil {
return "", err
}
if in.Repetitions != nil {
return fmt.Sprintf("R%d/%s", *in.Repetitions, iso), nil
}
return fmt.Sprintf("R/%s", iso), nil
} | timeinterval/repeating.go | 0.835215 | 0.431704 | repeating.go | starcoder |
package formats
import (
"fmt"
"io"
"sort"
"github.com/anaminus/rbxmk"
"github.com/anaminus/rbxmk/dump"
"github.com/anaminus/rbxmk/rtypes"
"github.com/robloxapi/rbxattr"
"github.com/robloxapi/types"
)
func decodeAttributeValue(a rbxattr.Value) (t types.Value, err error) {
switch a := a.(type) {
case *rbxattr.ValueString:
return types.String(*a), nil
case *rbxattr.ValueBool:
return types.Bool(*a), nil
case *rbxattr.ValueFloat:
return types.Float(*a), nil
case *rbxattr.ValueDouble:
return types.Double(*a), nil
case *rbxattr.ValueUDim:
return types.UDim(*a), nil
case *rbxattr.ValueUDim2:
return types.UDim2{
X: types.UDim(a.X),
Y: types.UDim(a.Y),
}, nil
case *rbxattr.ValueBrickColor:
return types.BrickColor(*a), nil
case *rbxattr.ValueColor3:
return types.Color3(*a), nil
case *rbxattr.ValueVector2:
return types.Vector2(*a), nil
case *rbxattr.ValueVector3:
return types.Vector3(*a), nil
case *rbxattr.ValueNumberSequence:
t := make(types.NumberSequence, len(*a))
for i, k := range *a {
t[i] = types.NumberSequenceKeypoint{
Time: k.Time,
Value: k.Value,
Envelope: k.Envelope,
}
}
return t, nil
case *rbxattr.ValueColorSequence:
t := make(types.ColorSequence, len(*a))
for i, k := range *a {
t[i] = types.ColorSequenceKeypoint{
Time: k.Time,
Value: types.Color3(k.Value),
Envelope: k.Envelope,
}
}
return t, nil
case *rbxattr.ValueNumberRange:
return types.NumberRange(*a), nil
case *rbxattr.ValueRect:
return types.Rect{
Min: types.Vector2(a.Min),
Max: types.Vector2(a.Max),
}, nil
default:
return nil, cannotEncode(a)
}
}
func encodeAttributeValue(t types.Value) (a rbxattr.Value, err error) {
switch t := t.(type) {
case types.String:
a := rbxattr.ValueString(t)
return &a, nil
case types.Bool:
a := rbxattr.ValueBool(t)
return &a, nil
case types.Float:
a := rbxattr.ValueFloat(t)
return &a, nil
case types.Double:
a := rbxattr.ValueDouble(t)
return &a, nil
case types.UDim:
a := rbxattr.ValueUDim(t)
return &a, nil
case types.UDim2:
a := rbxattr.ValueUDim2{
X: rbxattr.ValueUDim(t.X),
Y: rbxattr.ValueUDim(t.Y),
}
return &a, nil
case types.BrickColor:
a := rbxattr.ValueBrickColor(t)
return &a, nil
case types.Color3:
a := rbxattr.ValueColor3(t)
return &a, nil
case types.Vector2:
a := rbxattr.ValueVector2(t)
return &a, nil
case types.Vector3:
a := rbxattr.ValueVector3(t)
return &a, nil
case types.NumberSequence:
a := make(rbxattr.ValueNumberSequence, len(t))
for i, k := range t {
a[i] = rbxattr.ValueNumberSequenceKeypoint{
Envelope: k.Envelope,
Time: k.Time,
Value: k.Value,
}
}
return &a, nil
case types.ColorSequence:
a := make(rbxattr.ValueColorSequence, len(t))
for i, k := range t {
a[i] = rbxattr.ValueColorSequenceKeypoint{
Envelope: k.Envelope,
Time: k.Time,
Value: rbxattr.ValueColor3(k.Value),
}
}
return &a, nil
case types.NumberRange:
a := rbxattr.ValueNumberRange(t)
return &a, nil
case types.Rect:
a := rbxattr.ValueRect{
Min: rbxattr.ValueVector2(t.Min),
Max: rbxattr.ValueVector2(t.Max),
}
return &a, nil
case types.Stringlike:
a := rbxattr.ValueString(t.Stringlike())
return &a, nil
case types.Numberlike:
a := rbxattr.ValueDouble(t.Numberlike())
return &a, nil
default:
return nil, cannotEncode(t)
}
}
func init() { register(RBXAttr) }
func RBXAttr() rbxmk.Format {
return rbxmk.Format{
Name: "rbxattr",
MediaTypes: []string{"application/octet-stream"},
CanDecode: func(g rbxmk.Global, f rbxmk.FormatOptions, typeName string) bool {
return typeName == "Instance"
},
Decode: func(g rbxmk.Global, f rbxmk.FormatOptions, r io.Reader) (v types.Value, err error) {
var model rbxattr.Model
if _, err = model.ReadFrom(r); err != nil {
return nil, fmt.Errorf("decode attributes: %w", err)
}
dict := make(rtypes.Dictionary, len(model.Value))
for _, entry := range model.Value {
if _, ok := dict[entry.Key]; ok {
continue
}
dict[entry.Key], err = decodeAttributeValue(entry.Value)
if err != nil {
return nil, fmt.Errorf("decode %q: %w", entry.Key, err)
}
}
return dict, nil
},
Encode: func(g rbxmk.Global, f rbxmk.FormatOptions, w io.Writer, v types.Value) error {
dict, ok := v.(rtypes.Dictionary)
if !ok {
return fmt.Errorf("Dictionary expected, got %s", v.Type())
}
// Roblox's implementation encodes using reverse insertion order. To
// match this would require some sort of internal ordered dictionary
// type. Instead, we'll just sort ascending.
keys := make([]string, 0, len(dict))
for key := range dict {
keys = append(keys, key)
}
sort.Strings(keys)
var model rbxattr.Model
model.Value = make(rbxattr.ValueDictionary, 0, len(dict))
for _, key := range keys {
value, err := encodeAttributeValue(dict[key])
if err != nil {
return err
}
if value == nil {
continue
}
model.Value = append(model.Value, rbxattr.Entry{
Key: key,
Value: value,
})
}
if _, err := model.WriteTo(w); err != nil {
return fmt.Errorf("encode attributes: %w", err)
}
return nil
},
Dump: func() dump.Format {
return dump.Format{
Summary: "Formats/rbxattr:Summary",
Description: "Formats/rbxattr:Description",
}
},
}
} | formats/rbxattr.go | 0.558809 | 0.403861 | rbxattr.go | starcoder |
package ast
// Packge ast implement the Abstract Syntax Tree that represents the parsed
// source code before being passed on to the interpreter for evaluation.
import (
"bytes"
"strings"
"Primate/token"
)
// Node defines an interface for all nodes in the AST.
type Node interface {
TokenLiteral() string
String() string
}
// Statement defines the interface for all statement nodes.
type Statement interface {
Node
statementNode()
}
// Expression defines the interface for all expression nodes.
type Expression interface {
Node
expressionNode()
}
// Program is the root node. All programs consist of a slice of Statement(s)
type Program struct {
Statements []Statement
}
// TokenLiteral prints the literal value of the token associated with this node
func (p *Program) TokenLiteral() string {
if len(p.Statements) > 0 {
return p.Statements[0].TokenLiteral()
}
return ""
}
// String returns a stringified version of the AST for debugging
func (p *Program) String() string {
var out bytes.Buffer
for _, s := range p.Statements {
out.WriteString(s.String())
}
return out.String()
}
// LetStatement the `let` statement represents the AST node that binds an
// expression to an identifier
type LetStatement struct {
Token token.Token // the token.LET token
Name *Identifier
Value Expression
}
func (ls *LetStatement) statementNode() {}
// TokenLiteral prints the literal value of the token associated with this node
func (ls *LetStatement) TokenLiteral() string { return ls.Token.Literal }
// String returns a stringified version of the AST for debugging
func (ls *LetStatement) String() string {
var out bytes.Buffer
out.WriteString(ls.TokenLiteral() + " ")
out.WriteString(ls.Name.String())
out.WriteString(" = ")
if ls.Value != nil {
out.WriteString(ls.Value.String())
}
out.WriteString(";")
return out.String()
}
// ReturnStatement represenets the `return` statement node
type ReturnStatement struct {
Token token.Token // the 'return' token
ReturnValue Expression
}
func (rs *ReturnStatement) statementNode() {}
// TokenLiteral prints the literal value of the token associated with this node
func (rs *ReturnStatement) TokenLiteral() string { return rs.Token.Literal }
// String returns a stringified version of the AST for debugging
func (rs *ReturnStatement) String() string {
var out bytes.Buffer
out.WriteString(rs.TokenLiteral() + " ")
if rs.ReturnValue != nil {
out.WriteString(rs.ReturnValue.String())
}
out.WriteString(";")
return out.String()
}
// ExpressionStatement represents an expression statement and holds an
// expression
type ExpressionStatement struct {
Token token.Token // the first token of the expression
Expression Expression
}
func (es *ExpressionStatement) statementNode() {}
// TokenLiteral prints the literal value of the token associated with this node
func (es *ExpressionStatement) TokenLiteral() string { return es.Token.Literal }
// String returns a stringified version of the AST for debugging
func (es *ExpressionStatement) String() string {
if es.Expression != nil {
return es.Expression.String()
}
return ""
}
// BlockStatement represents a block statement and holds one or more other
// statements
type BlockStatement struct {
Token token.Token // the { token
Statements []Statement
}
func (bs *BlockStatement) statementNode() {}
// TokenLiteral prints the literal value of the token associated with this node
func (bs *BlockStatement) TokenLiteral() string { return bs.Token.Literal }
// String returns a stringified version of the AST for debugging
func (bs *BlockStatement) String() string {
var out bytes.Buffer
for _, s := range bs.Statements {
out.WriteString(s.String())
}
return out.String()
}
// Identifier represents an identiifer and holds the name of the identifier
type Identifier struct {
Token token.Token // the token.IDENT token
Value string
}
func (i *Identifier) expressionNode() {}
// TokenLiteral prints the literal value of the token associated with this node
func (i *Identifier) TokenLiteral() string { return i.Token.Literal }
// String returns a stringified version of the AST for debugging
func (i *Identifier) String() string { return i.Value }
// Boolean represents a boolean value and holds the underlying boolean value
type Boolean struct {
Token token.Token
Value bool
}
func (b *Boolean) expressionNode() {}
// TokenLiteral prints the literal value of the token associated with this node
func (b *Boolean) TokenLiteral() string { return b.Token.Literal }
// String returns a stringified version of the AST for debugging
func (b *Boolean) String() string { return b.Token.Literal }
// IntegerLiteral represents a literal integare and holds an integer value
type IntegerLiteral struct {
Token token.Token
Value int64
}
func (il *IntegerLiteral) expressionNode() {}
// TokenLiteral prints the literal value of the token associated with this node
func (il *IntegerLiteral) TokenLiteral() string { return il.Token.Literal }
// String returns a stringified version of the expression node
func (il *IntegerLiteral) String() string { return il.Token.Literal }
// PrefixExpression represents a prefix expression and holds the operator
// as well as the right-hand side expression
type PrefixExpression struct {
Token token.Token // The prefix token, e.g. !
Operator string
Right Expression
}
func (pe *PrefixExpression) expressionNode() {}
// TokenLiteral prints the literal value of the token associated with this node
func (pe *PrefixExpression) TokenLiteral() string { return pe.Token.Literal }
// String returns a stringified version of the AST for debugging
func (pe *PrefixExpression) String() string {
var out bytes.Buffer
out.WriteString("(")
out.WriteString(pe.Operator)
out.WriteString(pe.Right.String())
out.WriteString(")")
return out.String()
}
// InfixExpression represents an infix expression and holds the left-hand
// expression, operator and right-hand expression
type InfixExpression struct {
Token token.Token // The operator token, e.g. +
Left Expression
Operator string
Right Expression
}
func (ie *InfixExpression) expressionNode() {}
// TokenLiteral prints the literal value of the token associated with this node
func (ie *InfixExpression) TokenLiteral() string { return ie.Token.Literal }
// String returns a stringified version of the AST for debugging
func (ie *InfixExpression) String() string {
var out bytes.Buffer
out.WriteString("(")
out.WriteString(ie.Left.String())
out.WriteString(" " + ie.Operator + " ")
out.WriteString(ie.Right.String())
out.WriteString(")")
return out.String()
}
// IfExpression represents an `if` expression and holds the condition,
// consequence and alternative expressions
type IfExpression struct {
Token token.Token // The 'if' token
Condition Expression
Consequence *BlockStatement
Alternative *BlockStatement
}
func (ie *IfExpression) expressionNode() {}
// TokenLiteral prints the literal value of the token associated with this node
func (ie *IfExpression) TokenLiteral() string { return ie.Token.Literal }
// String returns a stringified version of the AST for debugging
func (ie *IfExpression) String() string {
var out bytes.Buffer
out.WriteString("if")
out.WriteString(ie.Condition.String())
out.WriteString(" ")
out.WriteString(ie.Consequence.String())
if ie.Alternative != nil {
out.WriteString("else ")
out.WriteString(ie.Alternative.String())
}
return out.String()
}
// FunctionLiteral represents a literal functions and holds the function's
// formal parameters and boy of the function as a block statement
type FunctionLiteral struct {
Token token.Token // The 'fn' token
Parameters []*Identifier
Body *BlockStatement
}
func (fl *FunctionLiteral) expressionNode() {}
// TokenLiteral prints the literal value of the token associated with this node
func (fl *FunctionLiteral) TokenLiteral() string { return fl.Token.Literal }
// String returns a stringified version of the AST for debugging
func (fl *FunctionLiteral) String() string {
var out bytes.Buffer
params := []string{}
for _, p := range fl.Parameters {
params = append(params, p.String())
}
out.WriteString(fl.TokenLiteral())
out.WriteString("(")
out.WriteString(strings.Join(params, ", "))
out.WriteString(") ")
out.WriteString(fl.Body.String())
return out.String()
}
// CallExpression represents a call expression and holds the function to be
// called as well as the arguments to be passed to that function
type CallExpression struct {
Token token.Token // The '(' token
Function Expression // Identifier or FunctionLiteral
Arguments []Expression
}
func (ce *CallExpression) expressionNode() {}
// TokenLiteral prints the literal value of the token associated with this node
func (ce *CallExpression) TokenLiteral() string { return ce.Token.Literal }
// String returns a stringified version of the AST for debugging
func (ce *CallExpression) String() string {
var out bytes.Buffer
args := []string{}
for _, a := range ce.Arguments {
args = append(args, a.String())
}
out.WriteString(ce.Function.String())
out.WriteString("(")
out.WriteString(strings.Join(args, ", "))
out.WriteString(")")
return out.String()
} | ast/ast.go | 0.874359 | 0.484197 | ast.go | starcoder |
package randomvariate
import (
"math"
"math/rand"
)
// Multinomial draws n samples from a probability distribution given by the
// set of probabilities p. Uses the inversion method which may be inefficient
// when the number of categories and number of samples are both large.
func Multinomial(n int, p []float64) []int {
result := make([]int, len(p))
cumP := make([]float64, len(p))
lastIdx := len(p) - 1
// Create a cummulative distribution of p
cumP[0] = p[0]
for i := 1; i < len(p); i++ {
cumP[i] = cumP[i-1] + p[i]
}
for i := 0; i < n; i++ {
// Generate pseudorandom number
x := rand.Float64()
for j := 0; j < len(cumP); j++ {
if x < cumP[j] {
result[j]++
break
} else if x > cumP[lastIdx] && x <= 1.0 {
result[lastIdx]++
break
}
}
}
return result
}
// MultinomialA draws n samples from a probability distribution given by the
// set of probabilities p. Uses the alias method. Faster when dealing with
// a larger number of categories and number of samples.
func MultinomialA(n int, p []float64) []int {
// Setup uniform distribution
K := len(p)
q := make([]float64, K)
J := make([]int, K)
var smaller []int
var larger []int
for i, prob := range p {
q[i] = float64(K) * prob
if q[i] < 1.0 {
smaller = append(smaller, i)
} else {
larger = append(larger, i)
}
}
var small, large int
for len(smaller) > 0 && len(larger) > 0 {
small, smaller = smaller[len(smaller)-1], smaller[:len(smaller)-1]
large, larger = larger[len(larger)-1], larger[:len(larger)-1]
J[small] = large
q[large] = float64(q[large] - (1.0 - q[small]))
if q[large] < 1.0 {
smaller = append(smaller, large)
} else {
larger = append(larger, large)
}
}
// Draw sample
result := make([]int, len(p))
for i := 0; i < n; i++ {
kk := rand.Intn(K)
if rand.Float64() < float64(q[kk]) {
result[kk]++
} else {
result[J[kk]]++
}
}
return result
}
// MultinomialLog1p draws n samples from a log-probability distribution given
// by the set of probabilities p. Note that the log probabilities are actually
// log(1+p) where p is from 0 to 1. This prevents solves the problem of
// computing log probability of 0.
func MultinomialLog1p(n int, logP []float64) []int {
// Transform log probabilities into decimal
p := make([]float64, len(logP))
for i, logProb := range logP {
p[i] = math.Expm1(logProb)
}
return Multinomial(n, p)
}
// MultinomialLog draws n samples from a log-probability distribution given
// by the set of probabilities p. Note that the log probabilities are in the
// format log(p) where p is from 0 to 1. If p = 0, the log-probability should
// be encoded as negative infinity.
func MultinomialLog(n int, logP []float64) []int {
// Transform log probabilities into decimal
p := make([]float64, len(logP))
for i, logProb := range logP {
if math.IsInf(logProb, -1) {
p[i] = 0
} else if logProb == 0 {
p[i] = 1
} else {
p[i] = math.Exp(logProb)
}
}
return Multinomial(n, p)
} | multinomial.go | 0.710025 | 0.594963 | multinomial.go | starcoder |
package simplevdf
import (
"crypto/sha256"
"fmt"
"math/big"
"runtime"
"time"
)
type pair struct {
a *big.Int
b *big.Int
}
type trip struct {
g1 *big.Int
h1 *big.Int
t int
}
type cipherPair struct {
c *big.Int
proof []trip
}
func square(base, modulus *big.Int) *big.Int {
return new(big.Int).Mod(new(big.Int).Mul(base, base), modulus)
}
// Mul multiply
func Mul(x, y *big.Int) *big.Int {
return big.NewInt(0).Mul(x, y)
}
// Add add
func Add(x, y *big.Int) *big.Int {
return big.NewInt(0).Add(x, y)
}
// Sub subtract
func Sub(x, y *big.Int) *big.Int {
return big.NewInt(0).Sub(x, y)
}
// Div divide
func Div(x, y *big.Int) *big.Int {
return big.NewInt(0).Div(x, y)
}
func sha(input *big.Int) *big.Int {
h := sha256.New()
h.Write(input.Bytes())
return new(big.Int).SetBytes(h.Sum(nil))
}
func encodeByte(t int, m, N *big.Int) cipherPair {
x := new(big.Int).Mod(m, N)
proof := []*big.Int{}
start := time.Now()
proof = append(proof, x)
h := x
for x := 0; x < t; x++ {
h = square(h, N)
proof = append(proof, h)
}
cur := time.Now()
println("Eval Elapsed", fmt.Sprintf("%.2f", cur.Sub(start).Seconds()), "sec")
start = time.Now()
prooflist := proofListBuild(proof, h, N, t, []trip{})
cur = time.Now()
println("Prooflist Elapsed", fmt.Sprintf("%.2f", cur.Sub(start).Seconds()), "sec")
return cipherPair{h, prooflist}
}
// g: base
// r: random
// N: field
// v: result of T/2
// h: result of T
func proofListBuild(proof []*big.Int, h, N *big.Int, T int, prooflist []trip) []trip {
for t := T; t >= 2; t = t / 2 {
h := new(big.Int)
if (t)%2 != 0 {
t++
h = square(proof[t-1], N)
} else {
h = proof[t]
}
g := proof[0]
r := new(big.Int).Mod(sha(proof[t/2]), N)
v := proof[t/2]
out := proofBuild(g, v, h, r, N)
prooflist = append(prooflist, trip{out.a, out.b, t / 2})
}
return prooflist
}
func proofBuild(g, v, h, r, N *big.Int) pair {
g1 := make(chan *big.Int)
h1 := make(chan *big.Int)
go func(g, r, v, N *big.Int, c chan *big.Int) {
exp := new(big.Int).Exp(g, r, N)
mul := new(big.Int).Mul(exp, v)
mod := new(big.Int).Mod(mul, N)
c <- mod
}(g, r, v, N, g1)
go func(v, r, h, N *big.Int, c chan *big.Int) {
exp := new(big.Int).Exp(v, r, N)
mul := new(big.Int).Mul(exp, h)
mod := new(big.Int).Mod(mul, N)
c <- mod
}(v, r, h, N, h1)
outH := <-h1
outG := <-g1
return pair{outG, outH}
}
func verify(cp cipherPair, N *big.Int) []bool {
prooflist := cp.proof
boolist := []bool{}
c := make(chan bool)
start := time.Now()
for _, item := range prooflist {
go func(item trip, N *big.Int, c chan bool) {
g := item.g1
for x := 0; x < item.t; x++ {
g = square(g, N)
}
c <- g.Cmp(item.h1) == 0
}(item, N, c)
}
for range prooflist {
boolist = append(boolist, <-c)
}
cur := time.Now()
println("Verify Elapsed", fmt.Sprintf("%.2f", cur.Sub(start).Seconds()), "sec")
println(len(boolist))
return boolist
}
// Test test
func Test() {
println("GOMAXPROCS is:", runtime.GOMAXPROCS(0))
t := 100000
input, _ := new(big.Int).SetString("349590234923847372", 0)
var p, _ = new(big.Int).SetString("126493185890016866190387990037436305339", 0)
var q, _ = new(big.Int).SetString("237515677732435432578220196406645605033", 0)
//p*q=N
var N = Mul(p, q)
start := time.Now()
startingValue := encodeByte(t, input, N)
cur := time.Now()
println("Encode Elapsed", fmt.Sprintf("%.2f", cur.Sub(start).Seconds()), "sec")
println(startingValue.c.String())
verified := true
for _, statement := range verify(startingValue, N) {
if statement != true {
verified = false
}
}
println(verified)
} | go/candidates/simple_vdf/pvdf.go | 0.530966 | 0.427815 | pvdf.go | starcoder |
package gridspech
import (
"strings"
)
// TileSet represents a mathematical set of tiles. Tiles are compared using ==.
type TileSet struct {
set map[Tile]struct{}
}
// NewTileSet returns a TileSet containing only tiles.
func NewTileSet(tiles ...Tile) TileSet {
var ts TileSet
for _, tile := range tiles {
ts.Add(tile)
}
return ts
}
// Init initializes the tileset.
func (ts *TileSet) checkInit() {
if ts.set == nil {
ts.set = make(map[Tile]struct{})
}
}
// Add adds t to the TileSet ts.
func (ts *TileSet) Add(t Tile) {
ts.checkInit()
ts.set[t] = struct{}{}
}
// Has returns if ts contains t.
func (ts TileSet) Has(t Tile) bool {
_, ok := ts.set[t]
return ok
}
// Remove removes t from ts.
func (ts *TileSet) Remove(t Tile) {
ts.checkInit()
delete(ts.set, t)
}
// RemoveIf removes each value for which pred returns true.
func (ts *TileSet) RemoveIf(pred func(t Tile) bool) {
for tile := range ts.set {
if pred(tile) {
ts.Remove(tile)
}
}
}
// RemoveAll removes all of the elements in o from ts (making ts the intersection of ts and o)
func (ts *TileSet) RemoveAll(o TileSet) {
if ts.Len() < o.Len() {
for tile := range ts.set {
if o.Has(tile) {
ts.Remove(tile)
}
}
} else {
for tile := range o.set {
if ts.Has(tile) {
ts.Remove(tile)
}
}
}
}
// Len returns the number of tiles in ts.
func (ts TileSet) Len() int {
return len(ts.set)
}
// Merge adds all tiles in other into ts.
func (ts *TileSet) Merge(other TileSet) {
ts.checkInit()
for tile := range other.set {
ts.set[tile] = struct{}{}
}
}
// Eq returns if ts contains exactly the same contents as other.
func (ts TileSet) Eq(other TileSet) bool {
if ts.Len() != other.Len() {
return false
}
for tile := range ts.set {
if !other.Has(tile) {
return false
}
}
return true
}
// Iter returns an iterator for this TileSet.
func (ts TileSet) Iter() <-chan Tile {
iter := make(chan Tile, 5)
go func() {
for tile := range ts.set {
iter <- tile
}
close(iter)
}()
return iter
}
// Slice returns a slice representation of ts
func (ts TileSet) Slice() []Tile {
slice := make([]Tile, 0, len(ts.set))
for tile := range ts.set {
slice = append(slice, tile)
}
return slice
}
// ToTileCoordSet converts ts into a TileCoordSet
func (ts TileSet) ToTileCoordSet() TileCoordSet {
var result TileCoordSet
for val := range ts.set {
result.Add(val.Coord)
}
return result
}
func (ts TileSet) String() string {
slice := ts.Slice()
var maxX, maxY int
for _, tile := range slice {
if tile.Coord.X > maxX {
maxX = tile.Coord.X
}
if tile.Coord.Y > maxY {
maxY = tile.Coord.Y
}
}
maxX++
maxY++
tilesAt := make([][]Tile, maxX)
for x := range tilesAt {
tilesAt[x] = make([]Tile, maxY)
}
for _, v := range slice {
tilesAt[v.Coord.X][v.Coord.Y] = v
}
var sb strings.Builder
sb.WriteByte('{')
for y := maxY - 1; y >= 0; y-- {
for x := 0; x < maxX; x++ {
if tile := tilesAt[x][y]; tile.Data.Type != TypeHole {
sb.WriteByte(byte(tile.Data.Color) + '0')
} else {
sb.WriteByte(' ')
}
}
if y > 0 {
sb.WriteByte('|')
}
}
sb.WriteByte('}')
return sb.String()
}
// MultiLineString returns a string representation of this tileset on multiple lines
func (ts TileSet) MultiLineString() string {
next := ts.String()
next = next[1 : len(next)-1]
next = strings.ReplaceAll(next, "|", "\n")
next += "\n"
return next
} | tileSet.go | 0.80567 | 0.471588 | tileSet.go | starcoder |
package gonum
import (
"math/cmplx"
"gonum.org/v1/gonum/blas"
"gonum.org/v1/gonum/internal/asm/c128"
)
// Zgerc performs the rank-one operation
// A += alpha * x * y^H
// where A is an m×n dense matrix, alpha is a scalar, x is an m element vector,
// and y is an n element vector.
func (Implementation) Zgerc(m, n int, alpha complex128, x []complex128, incX int, y []complex128, incY int, a []complex128, lda int) {
checkZMatrix('A', m, n, a, lda)
checkZVector('x', m, x, incX)
checkZVector('y', n, y, incY)
if m == 0 || n == 0 || alpha == 0 {
return
}
var kx, jy int
if incX < 0 {
kx = (1 - m) * incX
}
if incY < 0 {
jy = (1 - n) * incY
}
for j := 0; j < n; j++ {
if y[jy] != 0 {
tmp := alpha * cmplx.Conj(y[jy])
c128.AxpyInc(tmp, x, a[j:], uintptr(m), uintptr(incX), uintptr(lda), uintptr(kx), 0)
}
jy += incY
}
}
// Zgeru performs the rank-one operation
// A += alpha * x * y^T
// where A is an m×n dense matrix, alpha is a scalar, x is an m element vector,
// and y is an n element vector.
func (Implementation) Zgeru(m, n int, alpha complex128, x []complex128, incX int, y []complex128, incY int, a []complex128, lda int) {
checkZMatrix('A', m, n, a, lda)
checkZVector('x', m, x, incX)
checkZVector('y', n, y, incY)
if m == 0 || n == 0 || alpha == 0 {
return
}
var kx int
if incX < 0 {
kx = (1 - m) * incX
}
if incY == 1 {
for i := 0; i < m; i++ {
if x[kx] != 0 {
tmp := alpha * x[kx]
c128.AxpyUnitary(tmp, y[:n], a[i*lda:i*lda+n])
}
kx += incX
}
return
}
var jy int
if incY < 0 {
jy = (1 - n) * incY
}
for i := 0; i < m; i++ {
if x[kx] != 0 {
tmp := alpha * x[kx]
c128.AxpyInc(tmp, y, a[i*lda:i*lda+n], uintptr(n), uintptr(incY), 1, uintptr(jy), 0)
}
kx += incX
}
}
// Zher performs the Hermitian rank-one operation
// A += alpha * x * x^H
// where A is an n×n Hermitian matrix, alpha is a real scalar, and x is an n
// element vector. On entry, the imaginary parts of the diagonal elements of A
// are ignored and assumed to be zero, on return they will be set to zero.
func (Implementation) Zher(uplo blas.Uplo, n int, alpha float64, x []complex128, incX int, a []complex128, lda int) {
if uplo != blas.Upper && uplo != blas.Lower {
panic(badUplo)
}
checkZMatrix('A', n, n, a, lda)
checkZVector('x', n, x, incX)
if n == 0 || alpha == 0 {
return
}
var kx int
if incX < 0 {
kx = (1 - n) * incX
}
if uplo == blas.Upper {
if incX == 1 {
for i := 0; i < n; i++ {
if x[i] != 0 {
tmp := complex(alpha*real(x[i]), alpha*imag(x[i]))
aii := real(a[i*lda+i])
xtmp := real(tmp * cmplx.Conj(x[i]))
a[i*lda+i] = complex(aii+xtmp, 0)
for j := i + 1; j < n; j++ {
a[i*lda+j] += tmp * cmplx.Conj(x[j])
}
} else {
aii := real(a[i*lda+i])
a[i*lda+i] = complex(aii, 0)
}
}
return
}
ix := kx
for i := 0; i < n; i++ {
if x[ix] != 0 {
tmp := complex(alpha*real(x[ix]), alpha*imag(x[ix]))
aii := real(a[i*lda+i])
xtmp := real(tmp * cmplx.Conj(x[ix]))
a[i*lda+i] = complex(aii+xtmp, 0)
jx := ix + incX
for j := i + 1; j < n; j++ {
a[i*lda+j] += tmp * cmplx.Conj(x[jx])
jx += incX
}
} else {
aii := real(a[i*lda+i])
a[i*lda+i] = complex(aii, 0)
}
ix += incX
}
return
}
if incX == 1 {
for i := 0; i < n; i++ {
if x[i] != 0 {
tmp := complex(alpha*real(x[i]), alpha*imag(x[i]))
for j := 0; j < i; j++ {
a[i*lda+j] += tmp * cmplx.Conj(x[j])
}
aii := real(a[i*lda+i])
xtmp := real(tmp * cmplx.Conj(x[i]))
a[i*lda+i] = complex(aii+xtmp, 0)
} else {
aii := real(a[i*lda+i])
a[i*lda+i] = complex(aii, 0)
}
}
return
}
ix := kx
for i := 0; i < n; i++ {
if x[ix] != 0 {
tmp := complex(alpha*real(x[ix]), alpha*imag(x[ix]))
jx := kx
for j := 0; j < i; j++ {
a[i*lda+j] += tmp * cmplx.Conj(x[jx])
jx += incX
}
aii := real(a[i*lda+i])
xtmp := real(tmp * cmplx.Conj(x[ix]))
a[i*lda+i] = complex(aii+xtmp, 0)
} else {
aii := real(a[i*lda+i])
a[i*lda+i] = complex(aii, 0)
}
ix += incX
}
}
// Zher2 performs the Hermitian rank-two operation
// A += alpha*x*y^H + conj(alpha)*y*x^H
// where alpha is a scalar, x and y are n element vectors and A is an n×n
// Hermitian matrix. On entry, the imaginary parts of the diagonal elements are
// ignored and assumed to be zero. On return they will be set to zero.
func (Implementation) Zher2(uplo blas.Uplo, n int, alpha complex128, x []complex128, incX int, y []complex128, incY int, a []complex128, lda int) {
if uplo != blas.Upper && uplo != blas.Lower {
panic(badUplo)
}
checkZMatrix('A', n, n, a, lda)
checkZVector('x', n, x, incX)
checkZVector('y', n, y, incY)
if n == 0 || alpha == 0 {
return
}
var kx, ky int
var ix, iy int
if incX != 1 || incY != 1 {
if incX < 0 {
kx = (1 - n) * incX
}
if incY < 0 {
ky = (1 - n) * incY
}
ix = kx
iy = ky
}
if uplo == blas.Upper {
if incX == 1 && incY == 1 {
for i := 0; i < n; i++ {
if x[i] != 0 || y[i] != 0 {
tmp1 := alpha * x[i]
tmp2 := cmplx.Conj(alpha) * y[i]
aii := real(a[i*lda+i]) + real(tmp1*cmplx.Conj(y[i])) + real(tmp2*cmplx.Conj(x[i]))
a[i*lda+i] = complex(aii, 0)
for j := i + 1; j < n; j++ {
a[i*lda+j] += tmp1*cmplx.Conj(y[j]) + tmp2*cmplx.Conj(x[j])
}
} else {
aii := real(a[i*lda+i])
a[i*lda+i] = complex(aii, 0)
}
}
return
}
for i := 0; i < n; i++ {
if x[i] != 0 || y[i] != 0 {
tmp1 := alpha * x[ix]
tmp2 := cmplx.Conj(alpha) * y[iy]
aii := real(a[i*lda+i]) + real(tmp1*cmplx.Conj(y[iy])) + real(tmp2*cmplx.Conj(x[ix]))
a[i*lda+i] = complex(aii, 0)
jx := ix + incX
jy := iy + incY
for j := i + 1; j < n; j++ {
a[i*lda+j] += tmp1*cmplx.Conj(y[jy]) + tmp2*cmplx.Conj(x[jx])
jx += incX
jy += incY
}
} else {
aii := real(a[i*lda+i])
a[i*lda+i] = complex(aii, 0)
}
ix += incX
iy += incY
}
return
}
if incX == 1 && incY == 1 {
for i := 0; i < n; i++ {
if x[i] != 0 || y[i] != 0 {
tmp1 := alpha * x[i]
tmp2 := cmplx.Conj(alpha) * y[i]
for j := 0; j < i; j++ {
a[i*lda+j] += tmp1*cmplx.Conj(y[j]) + tmp2*cmplx.Conj(x[j])
}
aii := real(a[i*lda+i]) + real(tmp1*cmplx.Conj(y[i])) + real(tmp2*cmplx.Conj(x[i]))
a[i*lda+i] = complex(aii, 0)
} else {
aii := real(a[i*lda+i])
a[i*lda+i] = complex(aii, 0)
}
}
return
}
for i := 0; i < n; i++ {
if x[i] != 0 || y[i] != 0 {
tmp1 := alpha * x[ix]
tmp2 := cmplx.Conj(alpha) * y[iy]
jx := kx
jy := ky
for j := 0; j < i; j++ {
a[i*lda+j] += tmp1*cmplx.Conj(y[jy]) + tmp2*cmplx.Conj(x[jx])
jx += incX
jy += incY
}
aii := real(a[i*lda+i]) + real(tmp1*cmplx.Conj(y[iy])) + real(tmp2*cmplx.Conj(x[ix]))
a[i*lda+i] = complex(aii, 0)
} else {
aii := real(a[i*lda+i])
a[i*lda+i] = complex(aii, 0)
}
ix += incX
iy += incY
}
} | blas/gonum/level2cmplx128.go | 0.601477 | 0.443902 | level2cmplx128.go | starcoder |
package merkle
import (
"bytes"
"fmt"
)
type SimpleProof struct {
Aunts [][]byte `json:"aunts"` // Hashes from leaf's sibling to a root's child.
}
// proofs[0] is the proof for items[0].
func SimpleProofsFromHashers(items []Hasher) (rootHash []byte, proofs []*SimpleProof) {
trails, rootSPN := trailsFromHashers(items)
rootHash = rootSPN.Hash
proofs = make([]*SimpleProof, len(items))
for i, trail := range trails {
proofs[i] = &SimpleProof{
Aunts: trail.FlattenAunts(),
}
}
return
}
func SimpleProofsFromMap(m map[string]Hasher) (rootHash []byte, proofs []*SimpleProof) {
sm := NewSimpleMap()
for k, v := range m {
sm.Set(k, v)
}
sm.Sort()
kvs := sm.kvs
kvsH := make([]Hasher, 0, len(kvs))
for _, kvp := range kvs {
kvsH = append(kvsH, KVPair(kvp))
}
return SimpleProofsFromHashers(kvsH)
}
// Verify that leafHash is a leaf hash of the simple-merkle-tree
// which hashes to rootHash.
func (sp *SimpleProof) Verify(index int, total int, leafHash []byte, rootHash []byte) bool {
computedHash := computeHashFromAunts(index, total, leafHash, sp.Aunts)
return computedHash != nil && bytes.Equal(computedHash, rootHash)
}
func (sp *SimpleProof) String() string {
return sp.StringIndented("")
}
func (sp *SimpleProof) StringIndented(indent string) string {
return fmt.Sprintf(`SimpleProof{
%s Aunts: %X
%s}`,
indent, sp.Aunts,
indent)
}
// Use the leafHash and innerHashes to get the root merkle hash.
// If the length of the innerHashes slice isn't exactly correct, the result is nil.
// Recursive impl.
func computeHashFromAunts(index int, total int, leafHash []byte, innerHashes [][]byte) []byte {
if index >= total || index < 0 || total <= 0 {
return nil
}
switch total {
case 0:
panic("Cannot call computeHashFromAunts() with 0 total")
case 1:
if len(innerHashes) != 0 {
return nil
}
return leafHash
default:
if len(innerHashes) == 0 {
return nil
}
numLeft := (total + 1) / 2
if index < numLeft {
leftHash := computeHashFromAunts(index, numLeft, leafHash, innerHashes[:len(innerHashes)-1])
if leftHash == nil {
return nil
}
return SimpleHashFromTwoHashes(leftHash, innerHashes[len(innerHashes)-1])
}
rightHash := computeHashFromAunts(index-numLeft, total-numLeft, leafHash, innerHashes[:len(innerHashes)-1])
if rightHash == nil {
return nil
}
return SimpleHashFromTwoHashes(innerHashes[len(innerHashes)-1], rightHash)
}
}
// Helper structure to construct merkle proof.
// The node and the tree is thrown away afterwards.
// Exactly one of node.Left and node.Right is nil, unless node is the root, in which case both are nil.
// node.Parent.Hash = hash(node.Hash, node.Right.Hash) or
// hash(node.Left.Hash, node.Hash), depending on whether node is a left/right child.
type SimpleProofNode struct {
Hash []byte
Parent *SimpleProofNode
Left *SimpleProofNode // Left sibling (only one of Left,Right is set)
Right *SimpleProofNode // Right sibling (only one of Left,Right is set)
}
// Starting from a leaf SimpleProofNode, FlattenAunts() will return
// the inner hashes for the item corresponding to the leaf.
func (spn *SimpleProofNode) FlattenAunts() [][]byte {
// Nonrecursive impl.
innerHashes := [][]byte{}
for spn != nil {
if spn.Left != nil {
innerHashes = append(innerHashes, spn.Left.Hash)
} else if spn.Right != nil {
innerHashes = append(innerHashes, spn.Right.Hash)
} else {
break
}
spn = spn.Parent
}
return innerHashes
}
// trails[0].Hash is the leaf hash for items[0].
// trails[i].Parent.Parent....Parent == root for all i.
func trailsFromHashers(items []Hasher) (trails []*SimpleProofNode, root *SimpleProofNode) {
// Recursive impl.
switch len(items) {
case 0:
return nil, nil
case 1:
trail := &SimpleProofNode{items[0].Hash(), nil, nil, nil}
return []*SimpleProofNode{trail}, trail
default:
lefts, leftRoot := trailsFromHashers(items[:(len(items)+1)/2])
rights, rightRoot := trailsFromHashers(items[(len(items)+1)/2:])
rootHash := SimpleHashFromTwoHashes(leftRoot.Hash, rightRoot.Hash)
root := &SimpleProofNode{rootHash, nil, nil, nil}
leftRoot.Parent = root
leftRoot.Right = rightRoot
rightRoot.Parent = root
rightRoot.Left = leftRoot
return append(lefts, rights...), root
}
} | pkg/merkle/simple_proof.go | 0.727879 | 0.44089 | simple_proof.go | starcoder |
// Sample program demonstrating decoupling with interface composition.
package main
import (
"errors"
"fmt"
"io"
"math/rand"
"time"
)
func init() {
rand.Seed(time.Now().UnixNano())
}
// =============================================================================
// Data is the structure of the data we are copying.
type Data struct {
Line string
}
// =============================================================================
// Puller declares behavior for pulling data.
type Puller interface {
Pull(d *Data) error
}
// Storer declares behavior for storing data.
type Storer interface {
Store(d Data) error
}
// PullStorer declares behavior for both pulling and storing.
type PullStorer interface {
Puller
Storer
}
// =============================================================================
// Xenia is a system we need to pull data from.
type Xenia struct{}
// Pull knows how to pull data out of Xenia.
func (Xenia) Pull(d *Data) error {
switch rand.Intn(10) {
case 1, 9:
return io.EOF
case 5:
return errors.New("Error reading data from Xenia")
default:
d.Line = "Data"
fmt.Println("In:", d.Line)
return nil
}
}
// Pillar is a system we need to store data into.
type Pillar struct{}
// Store knows how to store data into Pillar.
func (Pillar) Store(d Data) error {
fmt.Println("Out:", d.Line)
return nil
}
// =============================================================================
// System wraps Pullers and Stores together into a single system.
type System struct {
Puller
Storer
}
// =============================================================================
// pull knows how to pull bulks of data from any Puller.
func pull(p Puller, data []Data) (int, error) {
for i := range data {
if err := p.Pull(&data[i]); err != nil {
return i, err
}
}
return len(data), nil
}
// store knows how to store bulks of data from any Storer.
func store(s Storer, data []Data) (int, error) {
for i, d := range data {
if err := s.Store(d); err != nil {
return i, err
}
}
return len(data), nil
}
// Copy knows how to pull and store data from any System.
func Copy(ps PullStorer, batch int) error {
data := make([]Data, batch)
for {
i, err := pull(ps, data)
if i > 0 {
if _, err := store(ps, data[:i]); err != nil {
return err
}
}
if err != nil {
return err
}
}
}
// =============================================================================
func main() {
// Initialize the system for use.
sys := System{
Puller: Xenia{},
Storer: Pillar{},
}
if err := Copy(&sys, 3); err != io.EOF {
fmt.Println(err)
}
} | topics/composition/example4/example4.go | 0.662469 | 0.422564 | example4.go | starcoder |
package p726
import (
"bufio"
"bytes"
"sort"
"strconv"
"strings"
)
type Atom struct {
atom string
cnt int
}
type Atoms []Atom
// Len is the number of elements in the collection.
func (a Atoms) Len() int {
return len(a)
}
// Less reports whether the element with
// index i should sort before the element with index j.
func (a Atoms) Less(i, j int) bool {
return a[i].atom < a[j].atom
}
// Swap swaps the elements with indexes i and j.
func (a Atoms) Swap(i, j int) {
a[i], a[j] = a[j], a[i]
}
func parseFormula(reader *bufio.Reader) map[string]int {
counts := make(map[string]int)
b, err := reader.ReadByte()
for err == nil && b != ')' {
reader.UnreadByte()
cnts := parseUnit(reader)
for k, v := range cnts {
counts[k] += v
}
b, err = reader.ReadByte()
}
if err == nil && b != ')' {
reader.UnreadByte()
}
return counts
}
func parseUnit(reader *bufio.Reader) map[string]int {
counts := make(map[string]int)
b, err := reader.ReadByte()
if err == nil && b == '(' {
cnts := parseFormula(reader)
digits := parseInt(reader)
for k, v := range cnts {
counts[k] += v * digits
}
} else if err == nil {
buf := new(bytes.Buffer)
buf.WriteByte(b)
b, err = reader.ReadByte()
for err == nil && b >= 'a' && b <= 'z' {
buf.WriteByte(b)
b, err = reader.ReadByte()
}
if err == nil {
reader.UnreadByte()
}
atom := buf.String()
digits := parseInt(reader)
counts[atom] += digits
}
return counts
}
func parseInt(reader *bufio.Reader) int {
b, err := reader.ReadByte()
val := 0
for err == nil && b <= '9' && b >= '0' {
val = val*10 + int(b-'0')
b, err = reader.ReadByte()
}
if val == 0 {
val = 1
}
if err == nil {
reader.UnreadByte()
}
return val
}
func countOfAtoms(formula string) string {
reader := bufio.NewReader(strings.NewReader(formula))
counts := parseFormula(reader)
atoms := make([]Atom, 0)
for k, v := range counts {
atoms = append(atoms, Atom{k, v})
}
sort.Sort(Atoms(atoms))
var buffer bytes.Buffer
for _, atom := range atoms {
buffer.WriteString(atom.atom)
if atom.cnt > 1 {
buffer.WriteString(strconv.Itoa(atom.cnt))
}
}
return buffer.String()
} | algorithms/p726/726.go | 0.657978 | 0.413418 | 726.go | starcoder |
package input
import (
"time"
"github.com/Jeffail/benthos/lib/pipeline"
"github.com/Jeffail/benthos/lib/types"
"github.com/Jeffail/benthos/lib/util"
)
//------------------------------------------------------------------------------
// WithPipeline is a type that wraps both an input type and a pipeline type
// by routing the input through the pipeline, and implements the input.Type
// interface in order to act like an ordinary input.
type WithPipeline struct {
in Type
pipe pipeline.Type
}
// WrapWithPipeline routes an input directly into a processing pipeline and
// returns a type that manages both and acts like an ordinary input.
func WrapWithPipeline(in Type, pipeConstructor pipeline.ConstructorFunc) (*WithPipeline, error) {
pipe, err := pipeConstructor()
if err != nil {
return nil, err
}
if err = util.Couple(in, pipe); err != nil {
return nil, err
}
return &WithPipeline{
in: in,
pipe: pipe,
}, nil
}
// WrapWithPipelines wraps an input with a variadic number of pipelines.
func WrapWithPipelines(in Type, pipeConstructors ...pipeline.ConstructorFunc) (*WithPipeline, error) {
var err error
var pipe *WithPipeline
for i, ctor := range pipeConstructors {
if i == 0 {
if pipe, err = WrapWithPipeline(in, ctor); err != nil {
return nil, err
}
} else if pipe, err = WrapWithPipeline(pipe, ctor); err != nil {
return nil, err
}
}
return pipe, nil
}
//------------------------------------------------------------------------------
// MessageChan returns the channel used for consuming messages from this input.
func (i *WithPipeline) MessageChan() <-chan types.Message {
return i.pipe.MessageChan()
}
// StartListening starts the type listening to a response channel from a
// consumer.
func (i *WithPipeline) StartListening(resChan <-chan types.Response) error {
return i.pipe.StartListening(resChan)
}
//------------------------------------------------------------------------------
// CloseAsync triggers a closure of this object but does not block.
func (i *WithPipeline) CloseAsync() {
i.in.CloseAsync()
}
// WaitForClose is a blocking call to wait until the object has finished closing
// down and cleaning up resources.
func (i *WithPipeline) WaitForClose(timeout time.Duration) error {
return i.pipe.WaitForClose(timeout)
}
//------------------------------------------------------------------------------ | plugin/benthos/lib/input/wrap_with_pipeline.go | 0.753285 | 0.432962 | wrap_with_pipeline.go | starcoder |
package util
import (
"sort"
"sync"
)
// IterAsync executes the given function f up to n times concurrently.
// Each call is done in a separate goroutine. On each iteration, the function f
// will be called with a unique sequential index i such that the index can be
// used to reference an element in an array or slice. If an error is returned
// by the function f for any index, an error will be returned. Otherwise,
// a nil result will be returned once all function calls have completed.
func IterAsync(n int, f func(i int) error) error {
wg := sync.WaitGroup{}
asyncErrors := make(chan error, n)
wg.Add(n)
for i := 0; i < n; i++ {
go func(j int) {
err := f(j)
if err != nil {
asyncErrors <- err
}
wg.Done()
}(i)
}
go func() {
wg.Wait()
close(asyncErrors)
}()
for err := range asyncErrors {
return err
}
return nil
}
// ExecuteAsync executes the given function f up to n times concurrently, populating
// the given results slice with the results of each function call.
// Each call is done in a separate goroutine. On each iteration, the function f
// will be called with a unique sequential index i such that the index can be
// used to reference an element in an array or slice. If an error is returned
// by the function f for any index, an error will be returned. Otherwise,
// a nil result will be returned once all function calls have completed.
func ExecuteAsync(n int, f func(i int) (interface{}, error)) ([]interface{}, error) {
wg := sync.WaitGroup{}
asyncErrors := make(chan error, n)
asyncResults := make(chan interface{}, n)
wg.Add(n)
for i := 0; i < n; i++ {
go func(j int) {
result, err := f(j)
if err != nil {
asyncErrors <- err
} else {
asyncResults <- result
}
wg.Done()
}(i)
}
go func() {
wg.Wait()
close(asyncErrors)
close(asyncResults)
}()
for err := range asyncErrors {
return nil, err
}
results := make([]interface{}, 0, n)
for result := range asyncResults {
results = append(results, result)
}
return results, nil
}
// ExecuteOrderedAsync executes the given function f up to n times concurrently, populating
// the given results slice with the results of each function call.
// Each call is done in a separate goroutine. On each iteration, the function f
// will be called with a unique sequential index i such that the index can be
// used to reference an element in an array or slice. If an error is returned
// by the function f for any index, an error will be returned. Otherwise,
// a nil result will be returned once all function calls have completed.
func ExecuteOrderedAsync(n int, f func(i int) (interface{}, error)) ([]interface{}, error) {
wg := sync.WaitGroup{}
asyncErrors := make(chan error, n)
asyncResults := make(chan *asyncResult, n)
wg.Add(n)
for i := 0; i < n; i++ {
go func(j int) {
result, err := f(j)
if err != nil {
asyncErrors <- err
} else {
asyncResults <- &asyncResult{
i: j,
result: result,
}
}
wg.Done()
}(i)
}
go func() {
wg.Wait()
close(asyncErrors)
close(asyncResults)
}()
for err := range asyncErrors {
return nil, err
}
sortedResults := make([]*asyncResult, 0, n)
for result := range asyncResults {
sortedResults = append(sortedResults, result)
}
sort.Slice(sortedResults, func(i, j int) bool {
return sortedResults[i].i < sortedResults[j].i
})
results := make([]interface{}, n)
for i, result := range sortedResults {
results[i] = result.result
}
return results, nil
}
type asyncResult struct {
i int
result interface{}
} | vendor/github.com/atomix/go-client/pkg/client/util/async.go | 0.69946 | 0.41182 | async.go | starcoder |
package schema
import (
"errors"
"fmt"
"unicode"
)
type DataType string
const (
// DataTypeCRef The data type is a cross-reference, it is starting with a capital letter
DataTypeCRef DataType = "cref"
// DataTypeString The data type is a value of type string
DataTypeString DataType = "string"
// DataTypeText The data type is a value of type string
DataTypeText DataType = "text"
// DataTypeInt The data type is a value of type int
DataTypeInt DataType = "int"
// DataTypeNumber The data type is a value of type number/float
DataTypeNumber DataType = "number"
// DataTypeBoolean The data type is a value of type boolean
DataTypeBoolean DataType = "boolean"
// DataTypeDate The data type is a value of type date
DataTypeDate DataType = "date"
// DataTypeGeoCoordinates is used to represent geo coordintaes, i.e. latitude
// and longitude pairs of locations on earth
DataTypeGeoCoordinates DataType = "geoCoordinates"
// DataTypePhoneNumber represents a parsed/to-be-parsed phone number
DataTypePhoneNumber DataType = "phoneNumber"
// DataTypeBlob represents a base64 encoded data
DataTypeBlob DataType = "blob"
// DataTypeArrayString The data type is a value of type string array
DataTypeStringArray DataType = "string[]"
// DataTypeTextArray The data type is a value of type string array
DataTypeTextArray DataType = "text[]"
// DataTypeIntArray The data type is a value of type int array
DataTypeIntArray DataType = "int[]"
// DataTypeNumberArray The data type is a value of type number/float array
DataTypeNumberArray DataType = "number[]"
// DataTypeBooleanArray The data type is a value of type boolean array
DataTypeBooleanArray DataType = "boolean[]"
// DataTypeDateArray The data type is a value of type date array
DataTypeDateArray DataType = "date[]"
)
var PrimitiveDataTypes []DataType = []DataType{DataTypeString, DataTypeText, DataTypeInt, DataTypeNumber, DataTypeBoolean, DataTypeDate, DataTypeGeoCoordinates, DataTypePhoneNumber, DataTypeBlob, DataTypeStringArray, DataTypeTextArray, DataTypeIntArray, DataTypeNumberArray, DataTypeBooleanArray, DataTypeDateArray}
type PropertyKind int
const (
PropertyKindPrimitive PropertyKind = 1
PropertyKindRef PropertyKind = 2
)
type PropertyDataType interface {
Kind() PropertyKind
IsPrimitive() bool
AsPrimitive() DataType
IsReference() bool
Classes() []ClassName
ContainsClass(name ClassName) bool
}
type propertyDataType struct {
kind PropertyKind
primitiveType DataType
classes []ClassName
}
func (p *propertyDataType) Kind() PropertyKind {
return p.kind
}
func (p *propertyDataType) IsPrimitive() bool {
return p.kind == PropertyKindPrimitive
}
func (p *propertyDataType) AsPrimitive() DataType {
if p.kind != PropertyKindPrimitive {
panic("not primitive type")
}
return p.primitiveType
}
func (p *propertyDataType) IsReference() bool {
return p.kind == PropertyKindRef
}
func (p *propertyDataType) Classes() []ClassName {
if p.kind != PropertyKindRef {
panic("not MultipleRef type")
}
return p.classes
}
func (p *propertyDataType) ContainsClass(needle ClassName) bool {
if p.kind != PropertyKindRef {
panic("not MultipleRef type")
}
for _, class := range p.classes {
if class == needle {
return true
}
}
return false
}
// Based on the schema, return a valid description of the defined datatype
func (s *Schema) FindPropertyDataType(dataType []string) (PropertyDataType, error) {
if len(dataType) < 1 {
return nil, errors.New("dataType must have at least one element")
} else if len(dataType) == 1 {
someDataType := dataType[0]
if len(someDataType) == 0 {
return nil, fmt.Errorf("dataType cannot be an empty string")
}
firstLetter := rune(someDataType[0])
if unicode.IsLower(firstLetter) {
switch someDataType {
case string(DataTypeString), string(DataTypeText),
string(DataTypeInt), string(DataTypeNumber),
string(DataTypeBoolean), string(DataTypeDate), string(DataTypeGeoCoordinates),
string(DataTypePhoneNumber), string(DataTypeBlob),
string(DataTypeStringArray), string(DataTypeTextArray),
string(DataTypeIntArray), string(DataTypeNumberArray),
string(DataTypeBooleanArray), string(DataTypeDateArray):
return &propertyDataType{
kind: PropertyKindPrimitive,
primitiveType: DataType(someDataType),
}, nil
default:
return nil, fmt.Errorf("Unknown primitive data type '%s'", someDataType)
}
}
}
/* implies len(dataType) > 1, or first element is a class already */
var classes []ClassName
for _, someDataType := range dataType {
if ValidNetworkClassName(someDataType) {
// this is a network instance
classes = append(classes, ClassName(someDataType))
} else {
// this is a local reference
className, err := ValidateClassName(someDataType)
if err != nil {
return nil, err
}
if s.FindClassByName(className) == nil {
return nil, fmt.Errorf("SingleRef class name '%s' does not exist", className)
}
classes = append(classes, className)
}
}
return &propertyDataType{
kind: PropertyKindRef,
classes: classes,
}, nil
} | entities/schema/data_types.go | 0.587352 | 0.743098 | data_types.go | starcoder |
package iso20022
// Specifies rates.
type CorporateActionRate70 struct {
// Cash dividend amount per equity before deductions or allowances have been made.
GrossDividendRate []*GrossDividendRateFormat21Choice `xml:"GrssDvddRate,omitempty"`
// Cash dividend amount per equity after deductions or allowances have been made.
NetDividendRate []*NetDividendRateFormat23Choice `xml:"NetDvddRate,omitempty"`
// Public index rate applied to the amount paid to adjust it to inflation.
IndexFactor *RateAndAmountFormat39Choice `xml:"IndxFctr,omitempty"`
// Actual interest rate used for the payment of the interest for the specified interest period.
InterestRateUsedForPayment []*InterestRateUsedForPaymentFormat7Choice `xml:"IntrstRateUsdForPmt,omitempty"`
// A maximum percentage of shares available through the over subscription privilege, usually a percentage of the basic subscription shares, for example, an account owner subscribing to 100 shares may over subscribe to a maximum of 50 additional shares when the over subscription maximum is 50 percent.
MaximumAllowedOversubscriptionRate *PercentageRate `xml:"MaxAllwdOvrsbcptRate,omitempty"`
// Proportionate allocation used for the offer.
ProrationRate *PercentageRate `xml:"PrratnRate,omitempty"`
// Percentage of a cash distribution that will be withheld by the tax authorities of the jurisdiction of the issuer, for which a relief at source and/or reclaim may be possible.
WithholdingTaxRate []*RateAndAmountFormat40Choice `xml:"WhldgTaxRate,omitempty"`
// Rate at which the income will be withheld by a jurisdiction other than the jurisdiction of the issuer’s country of tax incorporation, for which a relief at source and/or reclaim may be possible. It is levied in complement or offset of the withholding tax rate (TAXR) levied by the jurisdiction of the issuer’s tax domicile.
SecondLevelTax []*RateAndAmountFormat40Choice `xml:"ScndLvlTax,omitempty"`
// Rate used for additional tax that cannot be categorised.
AdditionalTax *RateAndAmountFormat39Choice `xml:"AddtlTax,omitempty"`
// Amount included in the dividend/NAV that is identified as gains directly or indirectly derived from interest payments, for example, in the context of the EU Savings directive.
TaxableIncomePerDividendShare []*RateTypeAndAmountAndStatus26 `xml:"TaxblIncmPerDvddShr,omitempty"`
}
func (c *CorporateActionRate70) AddGrossDividendRate() *GrossDividendRateFormat21Choice {
newValue := new(GrossDividendRateFormat21Choice)
c.GrossDividendRate = append(c.GrossDividendRate, newValue)
return newValue
}
func (c *CorporateActionRate70) AddNetDividendRate() *NetDividendRateFormat23Choice {
newValue := new(NetDividendRateFormat23Choice)
c.NetDividendRate = append(c.NetDividendRate, newValue)
return newValue
}
func (c *CorporateActionRate70) AddIndexFactor() *RateAndAmountFormat39Choice {
c.IndexFactor = new(RateAndAmountFormat39Choice)
return c.IndexFactor
}
func (c *CorporateActionRate70) AddInterestRateUsedForPayment() *InterestRateUsedForPaymentFormat7Choice {
newValue := new(InterestRateUsedForPaymentFormat7Choice)
c.InterestRateUsedForPayment = append(c.InterestRateUsedForPayment, newValue)
return newValue
}
func (c *CorporateActionRate70) SetMaximumAllowedOversubscriptionRate(value string) {
c.MaximumAllowedOversubscriptionRate = (*PercentageRate)(&value)
}
func (c *CorporateActionRate70) SetProrationRate(value string) {
c.ProrationRate = (*PercentageRate)(&value)
}
func (c *CorporateActionRate70) AddWithholdingTaxRate() *RateAndAmountFormat40Choice {
newValue := new(RateAndAmountFormat40Choice)
c.WithholdingTaxRate = append(c.WithholdingTaxRate, newValue)
return newValue
}
func (c *CorporateActionRate70) AddSecondLevelTax() *RateAndAmountFormat40Choice {
newValue := new(RateAndAmountFormat40Choice)
c.SecondLevelTax = append(c.SecondLevelTax, newValue)
return newValue
}
func (c *CorporateActionRate70) AddAdditionalTax() *RateAndAmountFormat39Choice {
c.AdditionalTax = new(RateAndAmountFormat39Choice)
return c.AdditionalTax
}
func (c *CorporateActionRate70) AddTaxableIncomePerDividendShare() *RateTypeAndAmountAndStatus26 {
newValue := new(RateTypeAndAmountAndStatus26)
c.TaxableIncomePerDividendShare = append(c.TaxableIncomePerDividendShare, newValue)
return newValue
} | CorporateActionRate70.go | 0.836821 | 0.621455 | CorporateActionRate70.go | starcoder |
package atomic
import "sync/atomic"
// AtomicInt implements an int value with atomic semantics
type AtomicInt struct {
val int64
}
// NewAtomicInt generates a newVal AtomicInt instance.
func NewAtomicInt(value int) *AtomicInt {
v := int64(value)
return &AtomicInt{
val: v,
}
}
// AddAndGet atomically adds the given value to the current value.
func (ai *AtomicInt) AddAndGet(delta int) int {
res := int(atomic.AddInt64(&ai.val, int64(delta)))
return res
}
// CompareAndSet atomically sets the value to the given updated value if the current value == expected value.
// Returns true if the expectation was met
func (ai *AtomicInt) CompareAndSet(expect int, update int) bool {
res := atomic.CompareAndSwapInt64(&ai.val, int64(expect), int64(update))
return res
}
// DecrementAndGet atomically decrements current value by one and returns the result.
func (ai *AtomicInt) DecrementAndGet() int {
res := int(atomic.AddInt64(&ai.val, -1))
return res
}
// Get atomically retrieves the current value.
func (ai *AtomicInt) Get() int {
res := int(atomic.LoadInt64(&ai.val))
return res
}
// GetAndAdd atomically adds the given delta to the current value and returns the result.
func (ai *AtomicInt) GetAndAdd(delta int) int {
newVal := atomic.AddInt64(&ai.val, int64(delta))
res := int(newVal - int64(delta))
return res
}
// GetAndDecrement atomically decrements the current value by one and returns the result.
func (ai *AtomicInt) GetAndDecrement() int {
newVal := atomic.AddInt64(&ai.val, -1)
res := int(newVal + 1)
return res
}
// GetAndIncrement atomically increments current value by one and returns the result.
func (ai *AtomicInt) GetAndIncrement() int {
newVal := atomic.AddInt64(&ai.val, 1)
res := int(newVal - 1)
return res
}
// GetAndSet atomically sets current value to the given value and returns the old value.
func (ai *AtomicInt) GetAndSet(newValue int) int {
res := int(atomic.SwapInt64(&ai.val, int64(newValue)))
return res
}
// IncrementAndGet atomically increments current value by one and returns the result.
func (ai *AtomicInt) IncrementAndGet() int {
res := int(atomic.AddInt64(&ai.val, 1))
return res
}
// Set atomically sets current value to the given value.
func (ai *AtomicInt) Set(newValue int) {
atomic.StoreInt64(&ai.val, int64(newValue))
} | vendor/github.com/aerospike/aerospike-client-go/types/atomic/int.go | 0.865991 | 0.58516 | int.go | starcoder |
package versionary
import (
"sort"
)
// TextValue represents a key-value pair where the value is a string.
type TextValue struct {
Key string `json:"key"`
Value string `json:"value"`
}
// NumValue represents a key-value pair where the value is a number.
type NumValue struct {
Key string `json:"key"`
Value float64 `json:"value"`
}
// Record is a struct that represents a single item in a database table. PartKeyValue and SortKeyValue
// are used to represent the primary key and are required fields. All other fields are optional.
// Note that the PartKeyValue will be a full pipe-delimited partition key: rowName|partKeyName|partKeyValue.
type Record struct {
PartKeyValue string
SortKeyValue string
JsonValue []byte
TextValue string
NumericValue float64
TimeToLive int64
}
// IsValid returns true if the Record is valid (all required fields are supplied).
func (r *Record) IsValid() bool {
return r.PartKeyValue != "" && r.SortKeyValue != ""
}
// RecordSet provides an in-memory data structure for storing a set of Records, used for lightweight testing purposes.
type RecordSet map[string]map[string]Record
// SetRecord adds a Record to the RecordSet.
func (rs *RecordSet) SetRecord(r Record) {
if *rs == nil {
*rs = make(RecordSet)
}
if (*rs)[r.PartKeyValue] == nil {
(*rs)[r.PartKeyValue] = make(map[string]Record)
}
(*rs)[r.PartKeyValue][r.SortKeyValue] = r
}
// SetRecords adds a list of Records to the RecordSet.
func (rs *RecordSet) SetRecords(records []Record) {
if *rs == nil {
*rs = make(RecordSet)
}
for _, record := range records {
(*rs).SetRecord(record)
}
}
// GetRecord returns a specified Record from the RecordSet.
func (rs *RecordSet) GetRecord(partKey string, sortKey string) (Record, bool) {
if *rs == nil || (*rs)[partKey] == nil {
return Record{}, false
}
record, ok := (*rs)[partKey][sortKey]
return record, ok
}
// GetRecords returns a list of Records from the RecordSet.
func (rs *RecordSet) GetRecords(partKey string, sortKeys []string) []Record {
var records []Record
if *rs == nil || (*rs)[partKey] == nil {
return records
}
for _, sortKey := range sortKeys {
record, ok := (*rs)[partKey][sortKey]
if ok {
records = append(records, record)
}
}
return records
}
// GetSortKeys returns a complete list of sort keys for a specified partition key.
func (rs *RecordSet) GetSortKeys(partKey string) []string {
if *rs == nil || (*rs)[partKey] == nil {
return []string{}
}
sortKeys := make([]string, 0, len((*rs)[partKey]))
for sortKey := range (*rs)[partKey] {
sortKeys = append(sortKeys, sortKey)
}
sort.Strings(sortKeys)
return sortKeys
}
// DeleteRecordForKeys removes a specified Record from the RecordSet.
func (rs *RecordSet) DeleteRecordForKeys(partKey string, sortKey string) {
if *rs != nil && (*rs)[partKey] != nil {
delete((*rs)[partKey], sortKey)
}
}
// DeleteRecordsForKey removes all Records for a specified partition key from the RecordSet.
func (rs *RecordSet) DeleteRecordsForKey(partKey string) {
if *rs != nil {
delete(*rs, partKey)
}
}
// DeleteRecord removes the provided Record from the RecordSet.
func (rs *RecordSet) DeleteRecord(r Record) {
if *rs != nil && (*rs)[r.PartKeyValue] != nil {
delete((*rs)[r.PartKeyValue], r.SortKeyValue)
}
}
// DeleteRecords removes the provided list of Records from the RecordSet.
func (rs *RecordSet) DeleteRecords(records []Record) {
if *rs == nil {
return
}
for _, record := range records {
(*rs).DeleteRecord(record)
}
}
// RecordsExist returns true if the RecordSet contains any records for the provided partition key.
func (rs *RecordSet) RecordsExist(partKey string) bool {
if *rs == nil {
return false
}
records, ok := (*rs)[partKey]
return ok && len(records) > 0
}
// RecordExists returns true if the RecordSet contains a record for the provided partition and sort key.
func (rs *RecordSet) RecordExists(partKey string, sortKey string) bool {
if *rs == nil || (*rs)[partKey] == nil {
return false
}
_, ok := (*rs)[partKey][sortKey]
return ok
} | record.go | 0.844922 | 0.556701 | record.go | starcoder |
package xpath
import (
"errors"
"fmt"
)
// NodeType represents a type of XPath node.
type NodeType int
const (
// RootNode is a root node of the XML document or node tree.
RootNode NodeType = iota
// ElementNode is an element, such as <element>.
ElementNode
// AttributeNode is an attribute, such as id='123'.
AttributeNode
// TextNode is the text content of a node.
TextNode
// CommentNode is a comment node, such as <!-- my comment -->
CommentNode
// allNode is any types of node, used by xpath package only to predicate match.
allNode
)
// NodeNavigator provides cursor model for navigating XML data.
type NodeNavigator interface {
// NodeType returns the XPathNodeType of the current node.
NodeType() NodeType
// LocalName gets the Name of the current node.
LocalName() string
// Prefix returns namespace prefix associated with the current node.
Prefix() string
// Value gets the value of current node.
Value() string
// Copy does a deep copy of the NodeNavigator and all its components.
Copy() NodeNavigator
// MoveToRoot moves the NodeNavigator to the root node of the current node.
MoveToRoot()
// Reset moves the NodeNavigator to the 'local original' node.
Reset()
// Reset 'local original' to 'original', moves the NodeNavigator to the original node.
ResetToOriginal()
// SetCurrentAsOriginal set 'local original' == current
SetCurrentAsOriginal()
// MoveToParent moves the NodeNavigator to the parent node of the current node.
MoveToParent() bool
// MoveToNextAttribute moves the NodeNavigator to the next attribute on current node.
MoveToNextAttribute() bool
// MoveToChild moves the NodeNavigator to the first child node of the current node.
MoveToChild() bool
// MoveToFirst moves the NodeNavigator to the first sibling node of the current node.
MoveToFirst() bool
// MoveToNext moves the NodeNavigator to the next sibling node of the current node.
MoveToNext() bool
// MoveToPrevious moves the NodeNavigator to the previous sibling node of the current node.
MoveToPrevious() bool
// MoveTo moves the NodeNavigator to the same position as the specified NodeNavigator.
MoveTo(NodeNavigator) bool
}
// NodeIterator holds all matched Node object.
type NodeIterator struct {
node NodeNavigator
query query
}
// Current returns current node which matched.
func (t *NodeIterator) Current() NodeNavigator {
return t.node
}
// MoveNext moves Navigator to the next match node.
func (t *NodeIterator) MoveNext() bool {
n := t.query.Select(t)
if n != nil {
if !t.node.MoveTo(n) {
t.node = n.Copy()
}
return true
}
return false
}
// Select selects a node set using the specified XPath expression.
// This method is deprecated, recommend using Expr.Select() method instead.
func Select(root NodeNavigator, expr string) *NodeIterator {
exp, err := Compile(expr)
if err != nil {
panic(err)
}
return exp.Select(root)
}
// Expr is an XPath expression for query.
type Expr struct {
s string
q query
}
type iteratorFunc func() NodeNavigator
func (f iteratorFunc) Current() NodeNavigator {
return f()
}
// Evaluate returns the result of the expression.
// The result type of the expression is one of the follow: bool,float64,string,NodeIterator).
func (expr *Expr) Evaluate(root NodeNavigator) interface{} {
val := expr.q.Evaluate(iteratorFunc(
func() NodeNavigator {
return root
}))
switch val.(type) {
case query:
return &NodeIterator{query: expr.q.Clone(), node: root}
}
return val
}
// Select selects a node set using the specified XPath expression.
func (expr *Expr) Select(root NodeNavigator) *NodeIterator {
return &NodeIterator{query: expr.q.Clone(), node: root}
}
// String returns XPath expression string.
func (expr *Expr) String() string {
return expr.s
}
// Compile compiles an XPath expression string.
func Compile(expr string) (*Expr, error) {
if expr == "" {
return nil, errors.New("expr expression is nil")
}
qy, err := build(expr)
if err != nil {
return nil, err
}
if qy == nil {
return nil, fmt.Errorf(fmt.Sprintf("undeclared variable in XPath expression: %s", expr))
}
return &Expr{s: expr, q: qy}, nil
}
// MustCompile compiles an XPath expression string and ignored error.
func MustCompile(expr string) *Expr {
exp, err := Compile(expr)
if err != nil {
return &Expr{s: expr, q: nopQuery{}}
}
return exp
} | xpath.go | 0.68056 | 0.516047 | xpath.go | starcoder |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.