code stringlengths 114 1.05M | path stringlengths 3 312 | quality_prob float64 0.5 0.99 | learning_prob float64 0.2 1 | filename stringlengths 3 168 | kind stringclasses 1 value |
|---|---|---|---|---|---|
package avro
import (
"fmt"
"time"
)
// ReadNext reads the next Avro element as a generic interface.
func (r *Reader) ReadNext(schema Schema) interface{} {
var ls LogicalSchema
lts, ok := schema.(LogicalTypeSchema)
if ok {
ls = lts.Logical()
}
switch schema.Type() {
case Boolean:
return r.ReadBool()
case Int:
if ls != nil {
switch ls.Type() {
case Date:
i := r.ReadInt()
sec := int64(i) * int64(24*time.Hour/time.Second)
return time.Unix(sec, 0).UTC()
case TimeMillis:
return time.Duration(r.ReadInt()) * time.Millisecond
}
}
return int(r.ReadInt())
case Long:
if ls != nil {
switch ls.Type() {
case TimeMicros:
return time.Duration(r.ReadLong()) * time.Microsecond
case TimestampMillis:
i := r.ReadLong()
sec := i / 1e3
nsec := (i - sec*1e3) * 1e6
return time.Unix(sec, nsec).UTC()
case TimestampMicros:
i := r.ReadLong()
sec := i / 1e6
nsec := (i - sec*1e6) * 1e3
return time.Unix(sec, nsec).UTC()
}
}
return r.ReadLong()
case Float:
return r.ReadFloat()
case Double:
return r.ReadDouble()
case String:
return r.ReadString()
case Bytes:
if ls != nil && ls.Type() == Decimal {
dec := ls.(*DecimalLogicalSchema)
return ratFromBytes(r.ReadBytes(), dec.Scale())
}
return r.ReadBytes()
case Record:
fields := schema.(*RecordSchema).Fields()
obj := make(map[string]interface{}, len(fields))
for _, field := range fields {
obj[field.Name()] = r.ReadNext(field.Type())
}
return obj
case Ref:
return r.ReadNext(schema.(*RefSchema).Schema())
case Enum:
symbols := schema.(*EnumSchema).Symbols()
idx := int(r.ReadInt())
if idx < 0 || idx >= len(symbols) {
r.ReportError("Read", "unknown enum symbol")
return nil
}
return symbols[idx]
case Array:
arr := []interface{}{}
r.ReadArrayCB(func(r *Reader) bool {
elem := r.ReadNext(schema.(*ArraySchema).Items())
arr = append(arr, elem)
return true
})
return arr
case Map:
obj := map[string]interface{}{}
r.ReadMapCB(func(r *Reader, field string) bool {
elem := r.ReadNext(schema.(*MapSchema).Values())
obj[field] = elem
return true
})
return obj
case Union:
types := schema.(*UnionSchema).Types()
idx := int(r.ReadLong())
if idx < 0 || idx > len(types)-1 {
r.ReportError("Read", "unknown union type")
return nil
}
schema := types[idx]
if schema.Type() == Null {
return nil
}
return r.ReadNext(types[idx]) // JP
case Fixed:
size := schema.(*FixedSchema).Size()
obj := make([]byte, size)
r.Read(obj)
if ls != nil && ls.Type() == Decimal {
dec := ls.(*DecimalLogicalSchema)
return ratFromBytes(obj, dec.Scale())
}
return obj
default:
r.ReportError("Read", fmt.Sprintf("unexpected schema type: %v", schema.Type()))
return nil
}
}
// ReadArrayCB reads an array with a callback per item.
func (r *Reader) ReadArrayCB(callback func(*Reader) bool) {
for {
l, _ := r.ReadBlockHeader()
if l == 0 {
break
}
for i := 0; i < int(l); i++ {
callback(r)
}
}
}
// ReadMapCB reads an array with a callback per item.
func (r *Reader) ReadMapCB(callback func(*Reader, string) bool) {
for {
l, _ := r.ReadBlockHeader()
if l == 0 {
break
}
for i := 0; i < int(l); i++ {
field := r.ReadString()
callback(r, field)
}
}
} | reader_generic.go | 0.553747 | 0.402891 | reader_generic.go | starcoder |
package genericjmx
// MBeanMap is a map from the service name to the mbean definitions that this
// service has
type MBeanMap map[string]MBean
// MergeWith combines the current MBeanMap with the one given as an
// argument and returns a new map with values from both maps.
func (m MBeanMap) MergeWith(m2 MBeanMap) MBeanMap {
out := MBeanMap{}
for k, v := range m {
out[k] = v
}
for k, v := range m2 {
out[k] = v
}
return out
}
// MBeanNames returns a list of the MBean names (the key values of the map)
func (m MBeanMap) MBeanNames() []string {
names := make([]string, 0)
for n := range m {
names = append(names, n)
}
return names
}
// DefaultMBeans are basic JVM memory and threading metrics that are common to
// all JMX applications
var DefaultMBeans MBeanMap
const defaultMBeanYAML = `
garbage_collector:
objectName: "java.lang:type=GarbageCollector,*"
instancePrefix: "gc-"
instanceFrom:
- "name"
values:
- type: "invocations"
table: false
attribute: "CollectionCount"
- type: "total_time_in_ms"
instancePrefix: "collection_time"
table: false
attribute: "CollectionTime"
memory-heap:
objectName: java.lang:type=Memory
instancePrefix: memory-heap
values:
- type: jmx_memory
table: true
attribute: HeapMemoryUsage
memory-nonheap:
objectName: java.lang:type=Memory
instancePrefix: memory-nonheap
values:
- type: jmx_memory
table: true
attribute: NonHeapMemoryUsage
memory_pool:
objectName: java.lang:type=MemoryPool,*
instancePrefix: memory_pool-
instanceFrom:
- name
values:
- type: jmx_memory
table: true
attribute: Usage
threading:
objectName: java.lang:type=Threading
values:
- type: gauge
table: false
instancePrefix: jvm.threads.count
attribute: ThreadCount
`
// MBeanValue specifies a particular value to pull from the MBean.
type MBeanValue struct {
// Sets the data set used within collectd to handle the values
// of the MBean attribute
Type string `yaml:"type"`
// Set this to true if the returned attribute is a composite type.
// If set to true, the keys within the composite type is appended
// to the type instance.
Table bool `yaml:"table"`
// Works like the option of the same name directly beneath the
// MBean block, but sets the type instance instead
InstancePrefix string `yaml:"instancePrefix"`
// Works like the option of the same name directly beneath the
// MBean block, but sets the type instance instead
InstanceFrom []string `yaml:"instanceFrom"`
// Sets the name of the attribute from which to read the value.
// You can access the keys of composite types by using a dot to
// concatenate the key name to the attribute name.
// For example: โattrib0.key42โ. If `table` is set to true, path
// must point to a composite type, otherwise it must point to
// a numeric type.
Attribute string `yaml:"attribute"`
}
// MBean represents the <MBean> config object in the collectd config for
// generic jmx.
type MBean struct {
// Sets the pattern which is used to retrieve MBeans from the MBeanServer.
// If more than one MBean is returned you should use the `instanceFrom` option
// to make the identifiers unique
ObjectName string `yaml:"objectName"`
// Prefixes the generated plugin instance with prefix
InstancePrefix string `yaml:"instancePrefix"`
// The object names used by JMX to identify MBeans include so called
// "properties" which are basically key-value-pairs. If the given object
// name is not unique and multiple MBeans are returned, the values of those
// properties usually differ. You can use this option to build the plugin
// instance from the appropriate property values.
// This option is optional and may be repeated to generate the plugin
// instance from multiple property values
InstanceFrom []string `yaml:"instanceFrom"`
// The `value` blocks map one or more attributes of an MBean to a value
// list in collectd. There must be at least one `value` block within each MBean block
Values []MBeanValue `yaml:"values"`
Dimensions []string `yaml:"dimensions"`
} | internal/monitors/collectd/genericjmx/mbeans.go | 0.830044 | 0.460956 | mbeans.go | starcoder |
package equationscanner
import (
"log"
"unicode"
"errors"
"fmt"
)
// EquationScanner scans given equation string and hold the equation's parameters.
type EquationScanner struct {
SecondDegreeCoefficientsOnLeft []int
FirstDegreeCoefficientsOnLeft []int
FreeNumbersOnLeft []int
SecondDegreeCoefficientsOnRight []int
FirstDegreeCoefficientsOnRight []int
FreeNumbersOnRight []int
VariableSign rune
isAfterEqualSign bool
scanState ScanState
charactersGroup []rune
coefficientSign rune
}
// Connect Creates a connection to given address
func (equationScanner *EquationScanner) Scan(equation string) (err error) {
defer func() {
if r := recover(); r != nil {
err = errors.New(fmt.Sprintf("Error while scanning equation %s", equation))
}
}()
initializeScanner(equationScanner)
collectCoefficients(equationScanner, equation)
end(equationScanner)
return
}
func initializeScanner(equationScanner *EquationScanner) {
equationScanner.SecondDegreeCoefficientsOnLeft = make([]int, 0)
equationScanner.FirstDegreeCoefficientsOnLeft = make([]int, 0)
equationScanner.FreeNumbersOnLeft = make([]int, 0)
equationScanner.SecondDegreeCoefficientsOnRight = make([]int, 0)
equationScanner.FirstDegreeCoefficientsOnRight = make([]int, 0)
equationScanner.FreeNumbersOnRight = make([]int, 0)
equationScanner.VariableSign = ' '
equationScanner.isAfterEqualSign = false
equationScanner.scanState = DuringStart
equationScanner.charactersGroup = make([]rune, 0)
equationScanner.coefficientSign = '+'
}
func end(equationScanner *EquationScanner) {
if len(equationScanner.SecondDegreeCoefficientsOnLeft) == 0 {
equationScanner.SecondDegreeCoefficientsOnLeft = append(equationScanner.SecondDegreeCoefficientsOnLeft, 0)
}
if len(equationScanner.FirstDegreeCoefficientsOnLeft) == 0 {
equationScanner.FirstDegreeCoefficientsOnLeft = append(equationScanner.FirstDegreeCoefficientsOnLeft, 0)
}
if len(equationScanner.FreeNumbersOnLeft) == 0 {
equationScanner.FreeNumbersOnLeft = append(equationScanner.FreeNumbersOnLeft, 0)
}
if len(equationScanner.SecondDegreeCoefficientsOnRight) == 0 {
equationScanner.SecondDegreeCoefficientsOnRight = append(equationScanner.SecondDegreeCoefficientsOnRight, 0)
}
if len(equationScanner.FirstDegreeCoefficientsOnRight) == 0 {
equationScanner.FirstDegreeCoefficientsOnRight = append(equationScanner.FirstDegreeCoefficientsOnRight, 0)
}
if len(equationScanner.FreeNumbersOnRight) == 0 {
equationScanner.FreeNumbersOnRight = append(equationScanner.FreeNumbersOnRight, 0)
}
}
func collectCoefficients(equationScanner *EquationScanner, equation string) {
for _, char := range equation {
if unicode.IsSpace(char) {
continue
}
validateCharacterWithState(equationScanner, char)
if unicode.IsDigit(char) {
handleDigit(equationScanner, char)
continue
}
if (unicode.IsLetter(char)) {
handleVariable(equationScanner, char)
continue
}
handleSignCharacter(equationScanner, char)
}
handleScanEnd(equationScanner)
}
func validateCharacterWithState(equationScanner *EquationScanner, char rune) {
switch equationScanner.scanState {
case DuringStart, DuringEqualSign:
if unicode.IsDigit(char) || unicode.IsLetter(char) || char == '-' || char == '+' {
return
}
case DuringNumber:
if unicode.IsDigit(char) || unicode.IsLetter(char) || char == '-' ||
char == '+' || char == '*' || char == '=' {
return
}
case DuringCoefficientSign:
if unicode.IsDigit(char) || unicode.IsLetter(char) {
return
}
case DuringMultiplySign:
if unicode.IsLetter(char) {
return
}
case DuringVariable:
if char == '-' || char == '+' || char == '=' || char == '^' {
return
}
case DuringExponentSign:
if unicode.IsDigit(char) {
return
}
case DuringExponentNumber:
if char == '-' || char == '+' {
return
}
log.Panicf("Unexpected case encountered %s", equationScanner.scanState)
}
log.Panicf("Invalid position for character '%s. Current State: %s", char, equationScanner.scanState)
}
func handleDigit(equationScanner *EquationScanner, char rune) {
if (equationScanner.scanState == DuringExponentSign) {
handleEndOfExponentScan(equationScanner, char)
equationScanner.scanState = DuringExponentNumber
return
}
equationScanner.scanState = DuringNumber
equationScanner.charactersGroup = append(equationScanner.charactersGroup, char)
}
func handleEndOfExponentScan(equationScanner *EquationScanner, char rune) {
defer setScanState(equationScanner, DuringExponentNumber)
if char == '2' {
addCoefficientOfExponent2(equationScanner)
return
}
if char == '1' {
addCoefficient(equationScanner)
return
}
if char == '0' {
addFreeNumber(equationScanner)
return
}
log.Panicf("Found invalid exponent character %s", char)
}
func setScanState(equationScanner *EquationScanner, newState ScanState) {
equationScanner.scanState = newState
}
func handleVariable(equationScanner *EquationScanner, char rune) {
if equationScanner.scanState == DuringCoefficientSign ||
equationScanner.scanState == DuringEqualSign ||
equationScanner.scanState == DuringStart {
equationScanner.charactersGroup = append(equationScanner.charactersGroup, '1')
}
equationScanner.scanState = DuringVariable
if equationScanner.VariableSign == ' ' {
equationScanner.VariableSign = char
return
}
if equationScanner.VariableSign == char {
return
}
log.Panicf("Found two different variables in the equation. Variables: %s, %s", equationScanner.VariableSign, char)
}
func handleSignCharacter(equationScanner *EquationScanner, char rune) {
switch char {
case '^':
handleExponentSign(equationScanner)
break
case '-', '+':
handleCoefficientSign(equationScanner, char)
break
case '=':
handleEqualSign(equationScanner, char)
break
case '*':
handleMultiplySign(equationScanner, char)
break
log.Panicf("Unexpected character '%s':, %s", equationScanner.VariableSign, char)
}
}
func handleExponentSign(equationScanner *EquationScanner) {
equationScanner.scanState = DuringExponentSign
}
func handleCoefficientSign(equationScanner *EquationScanner, char rune) {
if equationScanner.scanState == DuringNumber {
addFreeNumber(equationScanner)
}
if equationScanner.scanState == DuringVariable {
addCoefficient(equationScanner)
}
equationScanner.scanState = DuringCoefficientSign
equationScanner.coefficientSign = char
}
func handleEqualSign(equationScanner *EquationScanner, char rune) {
if equationScanner.isAfterEqualSign{
log.Panicf("Equation has two '=' signs")
}
if equationScanner.scanState == DuringVariable {
addCoefficient(equationScanner)
}
if equationScanner.scanState == DuringNumber {
addFreeNumber(equationScanner)
}
equationScanner.scanState = DuringEqualSign
equationScanner.isAfterEqualSign = true
}
func handleMultiplySign(equationScanner *EquationScanner, char rune) {
equationScanner.scanState = DuringMultiplySign
}
func handleScanEnd(equationScanner *EquationScanner) {
switch equationScanner.scanState {
case DuringExponentNumber:
return
case DuringNumber:
addFreeNumber(equationScanner)
break
case DuringVariable:
addCoefficient(equationScanner)
break
log.Panic("Invalid end of equation")
}
}
func addCoefficientOfExponent2(equationScanner *EquationScanner) {
number := CreateNumberFromCharactersGroupAndCoefficientSign(equationScanner)
if equationScanner.isAfterEqualSign {
equationScanner.SecondDegreeCoefficientsOnRight = append(equationScanner.SecondDegreeCoefficientsOnRight, number)
return
}
equationScanner.SecondDegreeCoefficientsOnLeft = append(equationScanner.SecondDegreeCoefficientsOnLeft, number)
}
func addCoefficient(equationScanner *EquationScanner) {
number := CreateNumberFromCharactersGroupAndCoefficientSign(equationScanner)
if equationScanner.isAfterEqualSign {
equationScanner.FirstDegreeCoefficientsOnRight = append(equationScanner.FirstDegreeCoefficientsOnRight, number)
return
}
equationScanner.FirstDegreeCoefficientsOnLeft = append(equationScanner.FirstDegreeCoefficientsOnLeft, number)
}
func addFreeNumber(equationScanner *EquationScanner) {
number := CreateNumberFromCharactersGroupAndCoefficientSign(equationScanner)
if equationScanner.isAfterEqualSign {
equationScanner.FreeNumbersOnRight = append(equationScanner.FreeNumbersOnRight, number)
return
}
equationScanner.FreeNumbersOnLeft = append(equationScanner.FreeNumbersOnLeft, number)
}
func CreateNumberFromCharactersGroupAndCoefficientSign(equationScanner *EquationScanner) int {
number := CreateNumberFromCharactersGroup(equationScanner)
defer setCoefficientSign(equationScanner, '+')
if equationScanner.coefficientSign == '-' {
return -number
}
if equationScanner.coefficientSign == '+' {
return number
}
log.Panicf("Unexpected coefficient sign %s", equationScanner.coefficientSign)
return 99999999999
}
func setCoefficientSign(equationScanner *EquationScanner, newCoefficientSign rune) {
equationScanner.coefficientSign = newCoefficientSign
}
func CreateNumberFromCharactersGroup(equationScanner *EquationScanner) int {
number := 0
for i := 0; i < len(equationScanner.charactersGroup); i++ {
number = number * 10
number += int(equationScanner.charactersGroup[i] - '0')
}
equationScanner.charactersGroup = nil
equationScanner.charactersGroup = make([]rune, 0)
return number
} | src/equationscanner/equation_scanner.go | 0.654564 | 0.540257 | equation_scanner.go | starcoder |
package iso20022
// Execution of a redemption order.
type RedemptionExecution4 struct {
// Unique and unambiguous identifier for an order, as assigned by the instructing party.
OrderReference *Max35Text `xml:"OrdrRef"`
// Unique and unambiguous identifier for an order execution, as assigned by a confirming party.
DealReference *Max35Text `xml:"DealRef"`
// Specifies the category of the investment fund order.
OrderType []*FundOrderType1 `xml:"OrdrTp,omitempty"`
// Investment fund class to which an investment fund order execution is related.
FinancialInstrumentDetails *FinancialInstrument6 `xml:"FinInstrmDtls"`
// Number of investment funds units redeemed.
UnitsNumber *FinancialInstrumentQuantity1 `xml:"UnitsNb"`
// Indicates the rounding direction applied to nearest unit.
Rounding *RoundingDirection2Code `xml:"Rndg,omitempty"`
// Net amount of money paid to the investor as a result of the redemption.
NetAmount *ActiveCurrencyAndAmount `xml:"NetAmt"`
// Portion of the investor's holdings, in a specific investment fund/ fund class, that is redeemed.
HoldingsRedemptionRate *PercentageRate `xml:"HldgsRedRate,omitempty"`
// Amount of money paid to the investor as a result of the redemption, including all charges, commissions, and tax.
GrossAmount *ActiveCurrencyAndAmount `xml:"GrssAmt,omitempty"`
// Date and time at which a price is applied, according to the terms stated in the prospectus.
TradeDateTime *DateAndDateTimeChoice `xml:"TradDtTm"`
// Price at which the order was executed.
PriceDetails *UnitPrice5 `xml:"PricDtls"`
// Indicates whether the order has been partially executed, ie, the confirmed quantity does not match the ordered quantity for a given financial instrument.
PartiallyExecutedIndicator *YesNoIndicator `xml:"PrtlyExctdInd"`
// Indicates whether the dividend is included, ie, cum-dividend, in the executed price. When the dividend is not included, the price will be ex-dividend.
CumDividendIndicator *YesNoIndicator `xml:"CumDvddInd"`
// Part of the price deemed as accrued income or profit rather than capital. The interim profit amount is used for tax purposes.
InterimProfitAmount *ProfitAndLoss1Choice `xml:"IntrmPrftAmt,omitempty"`
// Information needed to process a currency exchange or conversion.
ForeignExchangeDetails []*ForeignExchangeTerms4 `xml:"FXDtls,omitempty"`
// Dividend option chosen by the account owner based on the options offered in the prospectus.
IncomePreference *IncomePreference1Code `xml:"IncmPref,omitempty"`
// Tax group to which the purchased investment fund units belong. The investor indicates to the intermediary operating pooled nominees, which type of unit is to be sold.
Group1Or2Units *UKTaxGroupUnitCode `xml:"Grp1Or2Units,omitempty"`
// Amount of money associated with a service.
ChargeGeneralDetails *TotalCharges2 `xml:"ChrgGnlDtls,omitempty"`
// Amount of money due to a party as compensation for a service.
CommissionGeneralDetails *TotalCommissions2 `xml:"ComssnGnlDtls,omitempty"`
// Tax related to an investment fund order.
TaxGeneralDetails *TotalTaxes2 `xml:"TaxGnlDtls,omitempty"`
// Parameters used to execute the settlement of an investment fund order
SettlementAndCustodyDetails *FundSettlementParameters4 `xml:"SttlmAndCtdyDtls,omitempty"`
// Indicates whether the financial instrument is to be physically delivered.
PhysicalDeliveryIndicator *YesNoIndicator `xml:"PhysDlvryInd"`
// Parameters of a physical delivery.
PhysicalDeliveryDetails *DeliveryParameters3 `xml:"PhysDlvryDtls,omitempty"`
// Currency requested for settlement of cash proceeds.
RequestedSettlementCurrency *CurrencyCode `xml:"ReqdSttlmCcy,omitempty"`
// Currency to be used for pricing the fund. This currency must be among the set of currencies in which the price may be expressed, as stated in the prospectus.
RequestedNAVCurrency *CurrencyCode `xml:"ReqdNAVCcy,omitempty"`
// Payment transaction resulting from the investment fund order execution.
CashSettlementDetails *PaymentTransaction15 `xml:"CshSttlmDtls,omitempty"`
}
func (r *RedemptionExecution4) SetOrderReference(value string) {
r.OrderReference = (*Max35Text)(&value)
}
func (r *RedemptionExecution4) SetDealReference(value string) {
r.DealReference = (*Max35Text)(&value)
}
func (r *RedemptionExecution4) AddOrderType() *FundOrderType1 {
newValue := new(FundOrderType1)
r.OrderType = append(r.OrderType, newValue)
return newValue
}
func (r *RedemptionExecution4) AddFinancialInstrumentDetails() *FinancialInstrument6 {
r.FinancialInstrumentDetails = new(FinancialInstrument6)
return r.FinancialInstrumentDetails
}
func (r *RedemptionExecution4) AddUnitsNumber() *FinancialInstrumentQuantity1 {
r.UnitsNumber = new(FinancialInstrumentQuantity1)
return r.UnitsNumber
}
func (r *RedemptionExecution4) SetRounding(value string) {
r.Rounding = (*RoundingDirection2Code)(&value)
}
func (r *RedemptionExecution4) SetNetAmount(value, currency string) {
r.NetAmount = NewActiveCurrencyAndAmount(value, currency)
}
func (r *RedemptionExecution4) SetHoldingsRedemptionRate(value string) {
r.HoldingsRedemptionRate = (*PercentageRate)(&value)
}
func (r *RedemptionExecution4) SetGrossAmount(value, currency string) {
r.GrossAmount = NewActiveCurrencyAndAmount(value, currency)
}
func (r *RedemptionExecution4) AddTradeDateTime() *DateAndDateTimeChoice {
r.TradeDateTime = new(DateAndDateTimeChoice)
return r.TradeDateTime
}
func (r *RedemptionExecution4) AddPriceDetails() *UnitPrice5 {
r.PriceDetails = new(UnitPrice5)
return r.PriceDetails
}
func (r *RedemptionExecution4) SetPartiallyExecutedIndicator(value string) {
r.PartiallyExecutedIndicator = (*YesNoIndicator)(&value)
}
func (r *RedemptionExecution4) SetCumDividendIndicator(value string) {
r.CumDividendIndicator = (*YesNoIndicator)(&value)
}
func (r *RedemptionExecution4) AddInterimProfitAmount() *ProfitAndLoss1Choice {
r.InterimProfitAmount = new(ProfitAndLoss1Choice)
return r.InterimProfitAmount
}
func (r *RedemptionExecution4) AddForeignExchangeDetails() *ForeignExchangeTerms4 {
newValue := new(ForeignExchangeTerms4)
r.ForeignExchangeDetails = append(r.ForeignExchangeDetails, newValue)
return newValue
}
func (r *RedemptionExecution4) SetIncomePreference(value string) {
r.IncomePreference = (*IncomePreference1Code)(&value)
}
func (r *RedemptionExecution4) SetGroup1Or2Units(value string) {
r.Group1Or2Units = (*UKTaxGroupUnitCode)(&value)
}
func (r *RedemptionExecution4) AddChargeGeneralDetails() *TotalCharges2 {
r.ChargeGeneralDetails = new(TotalCharges2)
return r.ChargeGeneralDetails
}
func (r *RedemptionExecution4) AddCommissionGeneralDetails() *TotalCommissions2 {
r.CommissionGeneralDetails = new(TotalCommissions2)
return r.CommissionGeneralDetails
}
func (r *RedemptionExecution4) AddTaxGeneralDetails() *TotalTaxes2 {
r.TaxGeneralDetails = new(TotalTaxes2)
return r.TaxGeneralDetails
}
func (r *RedemptionExecution4) AddSettlementAndCustodyDetails() *FundSettlementParameters4 {
r.SettlementAndCustodyDetails = new(FundSettlementParameters4)
return r.SettlementAndCustodyDetails
}
func (r *RedemptionExecution4) SetPhysicalDeliveryIndicator(value string) {
r.PhysicalDeliveryIndicator = (*YesNoIndicator)(&value)
}
func (r *RedemptionExecution4) AddPhysicalDeliveryDetails() *DeliveryParameters3 {
r.PhysicalDeliveryDetails = new(DeliveryParameters3)
return r.PhysicalDeliveryDetails
}
func (r *RedemptionExecution4) SetRequestedSettlementCurrency(value string) {
r.RequestedSettlementCurrency = (*CurrencyCode)(&value)
}
func (r *RedemptionExecution4) SetRequestedNAVCurrency(value string) {
r.RequestedNAVCurrency = (*CurrencyCode)(&value)
}
func (r *RedemptionExecution4) AddCashSettlementDetails() *PaymentTransaction15 {
r.CashSettlementDetails = new(PaymentTransaction15)
return r.CashSettlementDetails
} | RedemptionExecution4.go | 0.818592 | 0.420362 | RedemptionExecution4.go | starcoder |
package elk
import (
"entgo.io/ent/entc/gen"
"errors"
"fmt"
"strings"
)
const maxDepth = 25
type (
// Edge specifies and edge to load for a type.
Edge struct {
*gen.Edge
Edges Edges
}
// Edges is a list of multiple EdgeToLoad.
Edges []Edge
// walk is a node sequence in the schema graph. Used to keep track when computing EdgesToLoad.
walk []string
)
// EntQuery simply runs EntQuery on every item in the list.
func (es Edges) EntQuery() string {
b := new(strings.Builder)
for _, e := range es {
b.WriteString(e.EntQuery())
}
return b.String()
}
// EntQuery constructs the code to eager load all the defined edges for the given edge.
func (e Edge) EntQuery() string {
b := new(strings.Builder)
b.WriteString(fmt.Sprintf(".%s(", strings.Title(e.EagerLoadField())))
if len(e.Edges) > 0 {
b.WriteString(fmt.Sprintf("func (q *ent.%s) {\nq%s\n}", e.Type.QueryName(), e.Edges.EntQuery()))
}
b.WriteString(")")
return b.String()
}
// cycleDepth determines the length of a cycle on the last visited node.
// <nil>: 0 -> no visits at all
// a->b->c: 1 -> 1st visit on c
// a->b->b: 2 -> 2nd visit on b
// a->a->a: 3 -> 3rd visit on a
// a->b->a: 2 -> 2nd visit on a
func (w walk) cycleDepth() uint {
if len(w) == 0 {
return 0
}
n := w[len(w)-1]
c := uint(1)
for i := len(w) - 2; i >= 0; i-- {
if n == w[i] {
c++
}
}
return c
}
// reachedMaxDepth returns if the walk has reached a depth greater than maxDepth.
func (w walk) reachedMaxDepth() bool {
return len(w) > maxDepth
}
// push adds a new step to the walk.
func (w *walk) push(s string) {
*w = append(*w, s)
}
// pop removed the last step of the walk.
func (w *walk) pop() {
if len(*w) > 0 {
*w = (*w)[:len(*w)-1]
}
}
// edges returns the EdgesToLoad for the given node and operation.
func edges(n *gen.Type, a string) (Edges, error) {
g, err := groupsForOperation(n, a)
if err != nil {
return nil, err
}
return edgesHelper(n, walk{}, g)
}
// edgesHelper recursively collects the edges to load on this type for requested groups on the given operation.
func edgesHelper(n *gen.Type, w walk, groupsToLoad []string) (Edges, error) {
// If we have reached maxDepth there most possibly is an unwanted circular reference.
if w.reachedMaxDepth() {
return nil, errors.New(fmt.Sprintf("max depth of %d reached: ", maxDepth))
}
// What edges to load on this type.
var es Edges
// Iterate over the edges of the given type.
// If the type has an edge we need to eager load, do so.
// Recursively go down the current edges edges and eager load those too.
for _, e := range n.Edges {
// Parse the edges annotation.
a := Annotation{}
if e.Annotations != nil && e.Annotations[a.Name()] != nil {
if err := a.Decode(e.Annotations[a.Name()]); err != nil {
return nil, err
}
}
a.EnsureDefaults()
// If the edge has at least one of the groups requested load the edge.
if a.Groups.Match(groupsToLoad) {
// Add the current step to our walk, since we will add this edge.
w.push(encodeTypeAndEdge(n, e))
// If we have reached the max depth on this field for the given type stop the recursion. Backtrack!
if w.cycleDepth() > a.MaxDepth {
w.pop()
continue
}
// Recursively collect the eager loads of this edges edges.
es1, err := edgesHelper(e.Type, w, groupsToLoad)
if err != nil {
return nil, err
}
// Done visiting this node. Remove this node from our walk.
w.pop()
es = append(es, Edge{Edge: e, Edges: es1})
}
}
return es, nil
}
func encodeTypeAndEdge(n *gen.Type, e *gen.Edge) string {
return n.Name + "." + e.Name
} | edge.go | 0.659405 | 0.439447 | edge.go | starcoder |
package arrgo
import "fmt"
func (a *Arrf) Greater(b *Arrf) *Arrb {
if len(a.data) == 0 || len(b.data) == 0 {
panic(EMPTY_ARRAY_ERROR)
}
var t = EmptyB(a.shape...)
for i, v := range a.data {
t.data[i] = v > b.data[i]
}
return t
}
func (a *Arrf) GreaterEqual(b *Arrf) *Arrb {
if len(a.data) == 0 || len(b.data) == 0 {
panic(EMPTY_ARRAY_ERROR)
}
var t = EmptyB(a.shape...)
for i, v := range a.data {
t.data[i] = v >= b.data[i]
}
return t
}
func (a *Arrf) Less(b *Arrf) *Arrb {
if len(a.data) == 0 || len(b.data) == 0 {
panic(EMPTY_ARRAY_ERROR)
}
var t = EmptyB(a.shape...)
for i, v := range a.data {
t.data[i] = v < b.data[i]
}
return t
}
func (a *Arrf) LessEqual(b *Arrf) *Arrb {
if len(a.data) == 0 || len(b.data) == 0 {
panic(EMPTY_ARRAY_ERROR)
}
var t = EmptyB(a.shape...)
for i, v := range a.data {
t.data[i] = v <= b.data[i]
}
return t
}
//ๅคๆญไธคไธชArray็ธๅฏนไฝ็ฝฎ็ๅ
็ด ๆฏๅฆ็ธๅ๏ผ่ฟๅArrbใ
//ๅฆๆไธคไธชArrayไปปไธไธบ็ฉบ๏ผๆ่
ๅฝข็ถไธๅ๏ผๅๆๅบๅผๅธธใ
func (a *Arrf) Equal(b *Arrf) *Arrb {
if len(a.data) == 0 || len(b.data) == 0 {
fmt.Println("empty array.")
panic(EMPTY_ARRAY_ERROR)
}
if !SameIntSlice(a.shape, b.shape) {
fmt.Println("shape not same.")
panic(SHAPE_ERROR)
}
var t = EmptyB(a.shape...)
for i, v := range a.data {
t.data[i] = v == b.data[i]
}
return t
}
func (a *Arrf) NotEqual(b *Arrf) *Arrb {
if len(a.data) == 0 || len(b.data) == 0 {
panic(EMPTY_ARRAY_ERROR)
}
var t = EmptyB(a.shape...)
for i, v := range a.data {
t.data[i] = v != b.data[i]
}
return t
}
func Greater(a, b *Arrf) *Arrb {
return a.Greater(b)
}
func GreaterEqual(a,b *Arrf) *Arrb {
return a.GreaterEqual(b)
}
func Less(a, b *Arrf) *Arrb {
return a.Less(b)
}
func LessEqual(a, b *Arrf) *Arrb {
return a.LessEqual(b)
}
func Equal(a, b *Arrf) *Arrb {
return a.Equal(b)
}
func NotEqual(a, b *Arrf) *Arrb {
return a.NotEqual(b)
}
func (a *Arrf) Sort(axis ...int) *Arrf {
ax := -1
if len(axis) == 0 {
ax = a.Ndims() - 1
} else {
ax = axis[0]
}
axisShape, axisSt, axis1St := a.shape[ax], a.strides[ax], a.strides[ax + 1]
if axis1St == 1 {
Hsort(axisSt, a.data)
} else {
Vsort(axis1St, a.data[0:axisShape * axis1St])
}
return a
}
func Sort(a *Arrf, axis ...int) *Arrf {
return a.Copy().Sort(axis...)
}
func (a *Arrf) Size() int {
return ProductIntSlice(a.shape)
} | compare_opt.go | 0.61659 | 0.556882 | compare_opt.go | starcoder |
// En algunos lenguajes es idiomรกtico usar estucturas
// de datos y algorรญtmos [genรฉricos](https://es.wikipedia.org/wiki/Programaci%C3%B3n_gen%C3%A9rica).
// Go no soporta genรฉricos; en Go es comรบn tener
// funciones que actuan sobre colecciones de datos si
// tu programa y tipos de datos lo necesitan.
// Aquรญ hay algunos ejemplo de funciones sobre colecciones
// para slices de `strings`. Puedes usar estos ejemplos para
// construir tus propias funciones. Hay que notar que
// en ocasiones puede ser mรกs claro simplemente usar
// el cรณdigo que manipula la colecciรณn directamente
// en vez de crear y llamar a una funciรณn auxiliar.
package main
import "strings"
import "fmt"
// Regresa el primer รญndice del string `t`, o -1 si no
// se encuentra.
func Index(vs []string, t string) int {
for i, v := range vs {
if v == t {
return i
}
}
return -1
}
// Regresa `true` si el string `t`esta en el slice.
func Include(vs []string, t string) bool {
return Index(vs, t) >= 0
}
// Regresa `true` si uno de los strings del slice
// satisface el predicado `f`.
func Any(vs []string, f func(string) bool) bool {
for _, v := range vs {
if f(v) {
return true
}
}
return false
}
// Regresa `true` si todos los strings dentro del
// slice satisfacen el predicado `f`
func All(vs []string, f func(string) bool) bool {
for _, v := range vs {
if !f(v) {
return false
}
}
return true
}
// Regresa un nuevo slice que contiene todos los strings
// en el slice que satisfacen el predicado `f`
func Filter(vs []string, f func(string) bool) []string {
vsf := make([]string, 0)
for _, v := range vs {
if f(v) {
vsf = append(vsf, v)
}
}
return vsf
}
// Regresa un nuevo slice que contiene el resultado de
// aplicar la funcion `f` a cada cadena en el slice original.
func Map(vs []string, f func(string) string) []string {
vsm := make([]string, len(vs))
for i, v := range vs {
vsm[i] = f(v)
}
return vsm
}
func main() {
// Aquรญ podemos probar nuestras funciones
var strs = []string{"durazno", "manzana", "pera",
"ciruela"}
fmt.Println(Index(strs, "pera"))
fmt.Println(Include(strs, "uva"))
fmt.Println(Any(strs, func(v string) bool {
return strings.HasPrefix(v, "p")
}))
fmt.Println(All(strs, func(v string) bool {
return strings.HasPrefix(v, "p")
}))
fmt.Println(Filter(strs, func(v string) bool {
return strings.Contains(v, "e")
}))
// Los ejemplos anteriores utilizan todos, funciones
// anรณnimas pero tambiรฉn puedes usar funciones nombradas
// con el tipo correcto.
fmt.Println(Map(strs, strings.ToUpper))
} | examples/funciones-sobre-colecciones/funciones-sobre-colecciones.go | 0.614972 | 0.551755 | funciones-sobre-colecciones.go | starcoder |
Package logger contains logging related API.
It is possible to use a custom logger, but the builtin logger is used if one is not provided.
Log levels for the builtin logger are described with type Level. Following log levels are supported:
- OffLevel : Do not log anything.
- FatalLevel : A critical error occured, the client cannot continue running.
- ErrorLevel : A severe error occured.
- WarnLevel : There is a problem, but the client can continue running.
- InfoLevel : Informational message.
- DebugLevel : A log message which can help with diagnosing a problem. Should not be used in production.
- TraceLevel : Potentially very detailed log message, which is usually logged when an important function is called. Should not be used in production.
Lower levels imply higher levels.
For example, if you want to see logs of level WarnLevel or more critical (ErrorLevel, FatalLevel), you need to set the log level as WarnLevel.
config := hazelcast.Config{}
config.Logger.Level = logger.WarnLevel
Using a Custom Logger
You can provide a custom logger that implements logger.Logger:
type Logger interface {
Log(weight Weight, f func() string)
}
weight is the numeric value that correspond to the log level.
E.g., WeightInfo corresponds to InfoLevel.
f is a function which produces and returns the log message.
If it is not called, there is no performance penalty related to log production.
The Log method is called whenever the client needs to log.
It's the custom logger's responsibility to decide whether to call f and log a message or avoid calling it to discard the message.
Most custom loggers would have a log filtering logic in their Log methods similar to the one below:
func (c MyCustomLogger) Log(wantWeight logger.Weight, formatter func() string) {
// Do not log if this is a more detailed log message than configured.
if c.weight < wantWeight {
return
}
// ...
}
In order to activate your custom logger, use config.Logger.CustomLogger configuration:
config := hazelcast.Config{}
config.Logger.CustomLogger = MyCustomLogger{}
See the example for a detailed custom logger implementation.
*/
package logger | logger/doc.go | 0.702122 | 0.450601 | doc.go | starcoder |
package datasource
import (
"bytes"
"encoding/json"
"fmt"
"strconv"
)
// Coin represents the object used to represent cryptocurrency tickers
type Coin struct {
Symbol string
Current string
Open string
High string
Low string
PercentChange string
Response string
}
func (c *Coin) OutputJSON() string {
stringOrder := []string{
"Symbol",
"Current Price (USD)",
"Open (24 Hours)",
"High (24 Hours)",
"Low (24 Hours)",
"Change % (24 Hours)",
}
output := c.outputMap()
fmtStr := "```\n"
for _, k := range stringOrder {
if k == "Change % (24 Hours)" {
f, _ := strconv.ParseFloat(output[k], 64)
fmtStr += fmt.Sprintf("%-20s %.2f %%\n", k, f)
} else {
fmtStr += fmt.Sprintf("%-20s %-20s\n", k, output[k])
}
}
fmtStr += "```\n"
return fmtStr
}
func (c *Coin) UnmarshalJSON(data []byte) error {
// auxiliary struct to help map json
var aux struct {
Display map[string]interface{}
Response string
}
dec := json.NewDecoder(bytes.NewReader(data))
if err := dec.Decode(&aux); err != nil {
return fmt.Errorf("decode coin: %v", err)
}
if aux.Response == "Error" {
return fmt.Errorf("could not find coin: %v", c.Symbol)
}
c.Current = aux.Display[c.Symbol].(map[string]interface{})["USD"].(map[string]interface{})["PRICE"].(string)
c.Open = aux.Display[c.Symbol].(map[string]interface{})["USD"].(map[string]interface{})["OPEN24HOUR"].(string)
c.High = aux.Display[c.Symbol].(map[string]interface{})["USD"].(map[string]interface{})["HIGH24HOUR"].(string)
c.Low = aux.Display[c.Symbol].(map[string]interface{})["USD"].(map[string]interface{})["LOW24HOUR"].(string)
c.PercentChange = aux.Display[c.Symbol].(map[string]interface{})["USD"].(map[string]interface{})["CHANGEPCT24HOUR"].(string)
return nil
}
func (c *Coin) outputMap() map[string]string {
return map[string]string{
"Symbol": c.Symbol,
"Current Price (USD)": c.Current,
"Open (24 Hours)": c.Open,
"High (24 Hours)": c.High,
"Low (24 Hours)": c.Low,
"Change % (24 Hours)": c.PercentChange,
}
} | datasource/coin.go | 0.645679 | 0.446796 | coin.go | starcoder |
package integration
// Specifies the data collection integration between Microsoft Azure and SignalFx, in the form of a JSON object.
type JiraIntegration struct {
// The creation date and time for the integration object, in Unix time UTC-relative. The system sets this value, and you can't modify it.
Created int64 `json:"created,omitempty"`
// SignalFx-assigned user ID of the user that created the integration object. If the system created the object, the value is \"AAAAAAAAAA\". The system sets this value, and you can't modify it.
Creator string `json:"creator,omitempty"`
// Flag that indicates the state of the integration object. If `true`, the integration is enabled. If `false`, the integration is disabled, and you must enable it by setting \"enabled\" to `true` in a **PUT** request that updates the object. <br> **NOTE:** SignalFx always sets the flag to `true` when you call **POST** `/integration` to create an integration.
Enabled bool `json:"enabled"`
// SignalFx-assigned ID of an integration you create in the web UI or API. Use this property to retrieve an integration using the **GET**, **PUT**, or **DELETE** `/integration/{id}` endpoints or the **GET** `/integration/validate{id}/` endpoint, as described in this topic.
Id string `json:"id,omitempty"`
// The last time the integration was updated, in Unix time UTC-relative. This value is \"read-only\".
LastUpdated int64 `json:"lastUpdated,omitempty"`
// SignalFx-assigned ID of the last user who updated the integration. If the last update was by the system, the value is \"AAAAAAAAAA\". This value is \"read-only\".
LastUpdatedBy string `json:"lastUpdatedBy,omitempty"`
// A human-readable label for the integration. This property helps you identify a specific integration when you're using multiple integrations for the same service.
Name string `json:"name,omitempty"`
Type Type `json:"type"`
APIToken string `json:"apiToken,omitempty"`
UserEmail string `json:"userEmail,omitempty"`
Username string `json:"username,omitempty"`
Password string `json:"password,omitempty"`
Assignee *JiraAssignee `json:"assignee,omitempty"`
AuthMethod string `json:"authMethod"`
BaseURL string `json:"baseUrl"`
IssueType string `json:"issueType"`
ProjectKey string `json:"projectKey"`
} | integration/model_jira_integration.go | 0.695338 | 0.448185 | model_jira_integration.go | starcoder |
package ecc
import (
"fmt"
"math/big"
"github.com/ravdin/programmingbitcoin/util"
)
// Useful constants for secp256k1:
// _G: Generator point.
// _A, _B: 0 and 7 respectively, for y^2 = x^3 + 7
// _N: order of the finite field.
// _P: Large prime number that is less than 2^256.
var (
_G *S256Point
_A *s256Field
_B *s256Field
_N *big.Int
_P *big.Int
)
func init() {
_N = util.HexStringToBigInt("fffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141")
// 2^256 - 2^32 - 2^9 - 2^8 - 2^7 - 2^6 - 2^4 - 1
_P = util.HexStringToBigInt("fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f")
_A = newS256Field(big.NewInt(0), _P)
_B = newS256Field(big.NewInt(7), _P)
x := util.HexStringToBigInt("79be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798")
y := util.HexStringToBigInt("483ada7726a3c4655da4fbfc0e1108a8fd17b448a68554199c47d08ffb10d4b8")
_G, _ = NewS256Point(x, y)
}
// S256Point sepresents a point in a secp256k1 elliptic curve.
type S256Point struct {
X *s256Field
Y *s256Field
}
// NewS256Point initializes a new S256Point.
// Returns an error if the point is not on the curve.
func NewS256Point(x *big.Int, y *big.Int) (*S256Point, error) {
p, err := NewPoint(newS256Field(x, _P), newS256Field(y, _P), _A, _B)
if err != nil {
return nil, err
}
return &S256Point{X: p.X.(*s256Field), Y: p.Y.(*s256Field)}, nil
}
// ParseS256Point returns a Point object from a SEC binary (not hex)
func ParseS256Point(secBin []byte) *S256Point {
if secBin[0] == 4 {
var x, y *big.Int = new(big.Int), new(big.Int)
x.SetBytes(secBin[1:33])
y.SetBytes(secBin[33:65])
result, _ := NewS256Point(x, y)
return result
}
isEven := secBin[0] == 2
xval := new(big.Int)
xval.SetBytes(secBin[1:])
x := newS256Field(xval, _P)
// right side of the equation y^2 = x^3 + 7
alpha := new(s256Field)
alpha.Pow(x, big.NewInt(3))
alpha.Add(alpha, _B)
// solve for left side
beta := alpha.Sqrt()
var evenBeta, oddBeta *s256Field
betaOffset := new(big.Int)
betaOffset.Sub(_P, beta.Num)
if beta.Num.Bit(0) == 0 {
evenBeta = beta
oddBeta = newS256Field(betaOffset, _P)
} else {
evenBeta = newS256Field(betaOffset, _P)
oddBeta = beta
}
if isEven {
return &S256Point{X: x, Y: evenBeta}
}
return &S256Point{X: x, Y: oddBeta}
}
func (p *S256Point) point() *Point {
if result, err := NewPoint(p.X, p.Y, _A, _B); err == nil {
return result
}
panic("Error casting to Point!")
}
func (p *S256Point) String() string {
if p.X == nil {
return "Point(infinity)"
}
return fmt.Sprintf("Point(%v,%v)", p.X.Num, p.Y.Num)
}
// Eq returns true if two points are equal, and false otherwise.
func (p *S256Point) Eq(other *S256Point) bool {
if p.X == nil {
return other.X == nil
}
return p.X.Eq(other.X) && p.Y.Eq(other.Y)
}
// Ne returns true if two points are not equal, and false otherwise.
func (p *S256Point) Ne(other *S256Point) bool {
return !p.Eq(other)
}
// Add p1 + p2 and return p.
func (p *S256Point) Add(p1, p2 *S256Point) *S256Point {
result := new(Point)
result.Add(p1.point(), p2.point())
if result.X == nil && result.Y == nil {
*p = S256Point{X: nil, Y: nil}
} else {
*p = S256Point{X: result.X.(*s256Field), Y: result.Y.(*s256Field)}
}
return p
}
// Cmul multiplies a point r by a constant and returns p.
func (p *S256Point) Cmul(r *S256Point, coefficient *big.Int) *S256Point {
coef := new(big.Int)
coef.Mod(coefficient, _N)
result := new(Point)
result.Cmul(r.point(), coef)
if result.X == nil && result.Y == nil {
*p = S256Point{X: nil, Y: nil}
} else {
*p = S256Point{X: result.X.(*s256Field), Y: result.Y.(*s256Field)}
}
return p
}
// Verify a signature.
func (p *S256Point) Verify(z *big.Int, sig *Signature) bool {
// By Fermat's Little Theorem, 1/s = pow(s, N-2, N)
sInv := new(big.Int)
e := new(big.Int)
e.Sub(_N, big.NewInt(2))
sInv.Exp(sig.s, e, _N)
// u = z / s
u := new(big.Int)
u.Mul(z, sInv).Mod(u, _N)
// v = r / s
v := new(big.Int)
v.Mul(sig.r, sInv).Mod(v, _N)
// u*G + v*P should have as the x coordinate, r
total := new(S256Point)
total.Cmul(_G, u)
total.Add(total, new(S256Point).Cmul(p, v))
return total.X.Num.Cmp(sig.r) == 0
}
// Sec returns the binary version of the SEC format
func (p *S256Point) Sec(compressed bool) []byte {
x := util.IntToBytes(p.X.Num, 32)
y := util.IntToBytes(p.Y.Num, 32)
var result []byte
if compressed {
result = make([]byte, 33)
copy(result[1:], x)
if y[31]%2 == 0 {
result[0] = 2
} else {
result[0] = 3
}
} else {
result = make([]byte, 65)
copy(result[1:], x)
copy(result[33:], y)
result[0] = 4
}
return result
}
// Hash160 returns a hash of the SEC format.
func (p *S256Point) Hash160(compressed bool) []byte {
return util.Hash160(p.Sec(compressed))
}
// Address of the public key.
func (p *S256Point) Address(compressed bool, testnet bool) string {
h160 := p.Hash160(compressed)
var prefix byte
if testnet {
prefix = 0x6f
}
withPrefix := make([]byte, len(h160)+1)
withPrefix[0] = prefix
copy(withPrefix[1:], h160)
return util.EncodeBase58Checksum(withPrefix)
} | ecc/s256point.go | 0.768907 | 0.482978 | s256point.go | starcoder |
package easing
import (
"github.com/gravestench/mathlib"
"math"
)
const (
defaultAmplitude = 0.1
defaultPeriod = 0.1
)
var _ EaseFunctionProvider = &ElasticOutEaseProvider{}
var _ EaseFunctionProvider = &ElasticInEaseProvider{}
var _ EaseFunctionProvider = &ElasticInOutEaseProvider{}
type ElasticOutEaseProvider struct{}
func (*ElasticOutEaseProvider) New(params []float64) func(float64) float64 {
params = ensureElasticParams(params)
amplitude, period := params[0], params[1]
elastic := func(v float64) float64 {
if math.Abs(0-v) < math.SmallestNonzeroFloat64 {
return 0
} else if math.Abs(1-v) < math.SmallestNonzeroFloat64 {
return 1
}
s := period / 4
if amplitude < 1 {
amplitude = 1
} else {
s = period * math.Asin(1/amplitude) / mathlib.PI2
}
return amplitude*math.Pow(2, -10*v)*math.Sin((v-s)*mathlib.PI2/period) + 1
}
return elastic
}
type ElasticInEaseProvider struct{}
func (*ElasticInEaseProvider) New(params []float64) func(float64) float64 {
params = ensureElasticParams(params)
amplitude, period := params[0], params[1]
elastic := func(v float64) float64 {
if math.Abs(0-v) < math.SmallestNonzeroFloat64 {
return 0
} else if math.Abs(1-v) < math.SmallestNonzeroFloat64 {
return 1
}
s := period / 4
if amplitude < 1 {
amplitude = 1
} else {
s = period * math.Asin(1/amplitude) / mathlib.PI2
}
return -(amplitude * math.Pow(2, 10*(v-1)) * math.Sin((v-s)*mathlib.PI2/period))
}
return elastic
}
type ElasticInOutEaseProvider struct{}
func (*ElasticInOutEaseProvider) New(params []float64) func(float64) float64 {
params = ensureElasticParams(params)
amp, period := params[0], params[1]
elastic := func(v float64) float64 {
if math.Abs(0-v) < math.SmallestNonzeroFloat64 {
return 0
} else if math.Abs(1-v) < math.SmallestNonzeroFloat64 {
return 1
}
s := period / 4
if amp < 1 {
amp = 1
} else {
s = period * math.Asin(1/amp) / mathlib.PI2
}
v *= 2
if v < 1 {
return -0.5 * (amp * math.Pow(2, 10*(v-1)) * math.Sin((v-s)*mathlib.PI2/period))
}
return amp*math.Pow(2, -10*(v-1))*math.Sin((v-s)*mathlib.PI2/period)*0.5 + 1
}
return elastic
}
func ensureElasticParams(params []float64) []float64 {
if params == nil {
params = []float64{defaultAmplitude, defaultPeriod}
}
switch len(params) {
case 0:
params = []float64{defaultAmplitude, defaultPeriod}
case 1:
params = append(params, defaultPeriod)
}
return params
} | pkg/easing/elastic.go | 0.714329 | 0.402157 | elastic.go | starcoder |
package forge
import (
netv1 "k8s.io/api/networking/v1"
netv1apply "k8s.io/client-go/applyconfigurations/networking/v1"
)
// RemoteIngress forges the apply patch for the reflected ingress, given the local one.
func RemoteIngress(local *netv1.Ingress, targetNamespace string) *netv1apply.IngressApplyConfiguration {
return netv1apply.Ingress(local.GetName(), targetNamespace).
WithLabels(local.GetLabels()).WithLabels(ReflectionLabels()).
WithAnnotations(FilterIngressAnnotations(local.GetAnnotations())).
WithSpec(RemoteIngressSpec(local.Spec.DeepCopy()))
}
// FilterIngressAnnotations filters the ingress annotations to be reflected, removing the ingress class annotation.
func FilterIngressAnnotations(local map[string]string) map[string]string {
res := make(map[string]string)
for k, v := range local {
if k != "kubernetes.io/ingress.class" {
res[k] = v
}
}
return res
}
// RemoteIngressSpec forges the apply patch for the specs of the reflected ingress, given the local one.
// It expects the local object to be a deepcopy, as it is mutated.
func RemoteIngressSpec(local *netv1.IngressSpec) *netv1apply.IngressSpecApplyConfiguration {
return netv1apply.IngressSpec().
WithDefaultBackend(RemoteIngressBackend(local.DefaultBackend)).
WithRules(RemoteIngressRules(local.Rules)...).
WithTLS(RemoteIngressTLS(local.TLS)...)
}
// RemoteIngressBackend forges the apply patch for the backend of the reflected ingress, given the local one.
func RemoteIngressBackend(local *netv1.IngressBackend) *netv1apply.IngressBackendApplyConfiguration {
if local == nil {
return nil
}
return netv1apply.IngressBackend().
WithResource(RemoteTypedLocalObjectReference(local.Resource)).
WithService(RemoteIngressService(local.Service))
}
// RemoteIngressService forges the apply patch for the service of the reflected ingress, given the local one.
func RemoteIngressService(local *netv1.IngressServiceBackend) *netv1apply.IngressServiceBackendApplyConfiguration {
if local == nil {
return nil
}
return netv1apply.IngressServiceBackend().
WithName(local.Name).
WithPort(netv1apply.ServiceBackendPort().
WithName(local.Port.Name).
WithNumber(local.Port.Number))
}
// RemoteIngressRules forges the apply patch for the rules of the reflected ingress, given the local ones.
func RemoteIngressRules(local []netv1.IngressRule) []*netv1apply.IngressRuleApplyConfiguration {
remote := make([]*netv1apply.IngressRuleApplyConfiguration, len(local))
for i := range local {
remote[i] = netv1apply.IngressRule().
WithHost(local[i].Host).
WithHTTP(RemoteIngressHTTP(local[i].HTTP))
}
return remote
}
// RemoteIngressHTTP forges the apply patch for the HTTPIngressRuleValue of the reflected ingress, given the local one.
func RemoteIngressHTTP(local *netv1.HTTPIngressRuleValue) *netv1apply.HTTPIngressRuleValueApplyConfiguration {
if local == nil {
return nil
}
return netv1apply.HTTPIngressRuleValue().
WithPaths(RemoteIngressPaths(local.Paths)...)
}
// RemoteIngressPaths forges the apply patch for the paths of the reflected ingress, given the local ones.
func RemoteIngressPaths(local []netv1.HTTPIngressPath) []*netv1apply.HTTPIngressPathApplyConfiguration {
remote := make([]*netv1apply.HTTPIngressPathApplyConfiguration, len(local))
for i := range local {
remote[i] = netv1apply.HTTPIngressPath().
WithPath(local[i].Path).
WithBackend(RemoteIngressBackend(&local[i].Backend))
remote[i].PathType = local[i].PathType
}
return remote
}
// RemoteIngressTLS forges the apply patch for the TLS configs of the reflected ingress, given the local ones.
func RemoteIngressTLS(local []netv1.IngressTLS) []*netv1apply.IngressTLSApplyConfiguration {
remote := make([]*netv1apply.IngressTLSApplyConfiguration, len(local))
for i := range local {
remote[i] = netv1apply.IngressTLS().
WithHosts(local[i].Hosts...).
WithSecretName(local[i].SecretName)
}
return remote
} | pkg/virtualKubelet/forge/ingresses.go | 0.685107 | 0.422386 | ingresses.go | starcoder |
// Package gb provides holiday definitions for the United Kingdom.
package gb
import (
"time"
"github.com/rickar/cal/v2"
"github.com/rickar/cal/v2/aa"
)
var (
// Standard UK weekend substitution rules:
// Saturdays move to Monday
// Sundays move to Monday
weekendAlt = []cal.AltDay{
{Day: time.Saturday, Offset: 2},
{Day: time.Sunday, Offset: 1},
}
// NewYear represents New Year's Day on 1-Jan
NewYear = aa.NewYear.Clone(&cal.Holiday{Name: "New Year's Day", Type: cal.ObservanceBank, Observed: weekendAlt})
// GoodFriday represents Good Friday - two days before Easter
GoodFriday = aa.GoodFriday.Clone(&cal.Holiday{Name: "Good Friday", Type: cal.ObservanceBank})
// EasterMonday represents Easter Monday - the day after Easter
EasterMonday = aa.EasterMonday.Clone(&cal.Holiday{Name: "Easter Monday", Type: cal.ObservanceBank})
// EarlyMay represents Early May on the first Monday of May
EarlyMay = &cal.Holiday{
Name: "Early May",
Type: cal.ObservanceBank,
Month: time.May,
Weekday: time.Monday,
Offset: 1,
Func: cal.CalcWeekdayOffset,
Except: []int{2020},
}
// VEDay represents VE Day, the 75th anniversary of the end of WWII.
VEDay = &cal.Holiday{
Name: "VE Day",
Type: cal.ObservanceBank,
Month: time.May,
Day: 8,
Func: cal.CalcDayOfMonth,
StartYear: 2020,
EndYear: 2020,
}
// SpringHoliday represents Spring Bank Holiday on the last Monday of May
SpringHoliday = &cal.Holiday{
Name: "Spring Bank Holiday",
Type: cal.ObservanceBank,
Month: time.May,
Weekday: time.Monday,
Offset: -1,
Func: cal.CalcWeekdayOffset,
}
// SummerHolidayScotland represents Summer Bank Holiday in Scotland on the first Monday of August
SummerHolidayScotland = &cal.Holiday{
Name: "Summer Bank Holiday",
Type: cal.ObservanceBank,
Month: time.August,
Weekday: time.Monday,
Offset: 1,
Func: cal.CalcWeekdayOffset,
}
// SummerHoliday represents Summer Bank Holiday on the last Monday of August
SummerHoliday = &cal.Holiday{
Name: "Summer Bank Holiday",
Type: cal.ObservanceBank,
Month: time.August,
Weekday: time.Monday,
Offset: -1,
Func: cal.CalcWeekdayOffset,
}
// ChristmasDay represents Christmas Day on 25-Dec
ChristmasDay = aa.ChristmasDay.Clone(&cal.Holiday{Name: "Christmas Day", Type: cal.ObservanceBank, Observed: weekendAlt})
// BoxingDay represents Boxing Day on 26-Dec
BoxingDay = aa.ChristmasDay2.Clone(&cal.Holiday{Name: "Boxing Day", Type: cal.ObservanceBank,
Observed: []cal.AltDay{
{Day: time.Saturday, Offset: 2},
{Day: time.Sunday, Offset: 2},
{Day: time.Monday, Offset: 1}}})
// Holidays provides a list of the standard national holidays
Holidays = []*cal.Holiday{
NewYear,
GoodFriday,
EasterMonday,
EarlyMay,
VEDay,
SpringHoliday,
SummerHoliday,
ChristmasDay,
BoxingDay,
}
) | v2/gb/gb_holidays.go | 0.506591 | 0.592224 | gb_holidays.go | starcoder |
package anidb
import (
"fmt"
"regexp"
"strconv"
"strings"
"time"
"github.com/PuerkitoBio/goquery"
"shitty.moe/satelit-project/satelit-scraper/proto/data"
)
// Parses and returns list of episodes or empty slice if episodes not found.
func (p *Parser) episodes() []*data.Episode {
eps := make([]*data.Episode, 0)
p.doc.Find(`table#eplist tr[id*="eid"]`).Each(func(i int, s *goquery.Selection) {
log := p.log.With("ep_idx", i)
var ep data.Episode
ep.Type = parseEpisodeType(s)
if ep.Type == data.Episode_UNKNOWN {
log.Infof("skipping episode with unknown type")
return
}
number, err := parseEpisodeNumber(s)
if err != nil {
log.Errorf("failed to parse ep number: %v", err)
return
}
name := parseEpisodeName(s)
if len(name) == 0 {
log.Infof("skipping episode without a name")
return
}
duration, err := parseEpisodeDuration(s)
if err != nil {
log.Warnf("failed to parse ep duration: %v", err)
duration = 0
}
date, err := parseEpisodeDate(s)
if err != nil {
log.Warnf("failed to parse ep air date: %v", err)
date = time.Time{}
}
ep.Number = number
ep.Name = name
ep.Duration = duration.Seconds()
if !date.IsZero() {
ep.AirDate = date.Unix()
}
eps = append(eps, &ep)
})
return eps
}
// Parses episode type of an episode. Episode_UNKNOWN returned on error.
func parseEpisodeType(s *goquery.Selection) data.Episode_Type {
raw := s.Find("td abbr").First().AttrOr("title", "")
raw = strings.TrimSpace(strings.ToLower(raw))
switch {
case regexp.MustCompile(`regular`).MatchString(raw):
return data.Episode_REGULAR
case regexp.MustCompile(`special`).MatchString(raw):
return data.Episode_SPECIAL
default:
return data.Episode_UNKNOWN
}
}
// Parses episode number. Error is returned if edisode isn't numbered.
func parseEpisodeNumber(s *goquery.Selection) (int32, error) {
raw := s.Find("td.eid abbr").First().Text()
match := regexp.MustCompile(`\d+`).FindStringSubmatch(raw)
if len(match) == 0 {
return 0, fmt.Errorf("not found: %v", raw)
}
num, err := strconv.Atoi(match[0])
if err != nil {
return 0, err
}
return int32(num), nil
}
// Parses name of an episode. Empty string is returned if edisode doesn't have a name.
func parseEpisodeName(s *goquery.Selection) string {
raw := s.Find(`td.name label`).First().Text()
// generic name like "Episode 1" should be skipped
if regexp.MustCompile(`episode\s+[\d.]+`).MatchString(strings.ToLower(raw)) {
return ""
}
return strings.TrimSpace(raw)
}
// Parses episode duration. Zero is returned if episode doesn't have duration.
func parseEpisodeDuration(s *goquery.Selection) (time.Duration, error) {
raw := s.Find("td.duration").First().Text()
raw = strings.TrimSpace(raw)
if len(raw) == 0 {
return 0, fmt.Errorf("not found: %v", s)
}
match := regexp.MustCompile(`(\d+)\s*m`).FindStringSubmatch(raw)
if len(match) == 0 {
return 0, fmt.Errorf("not found: %v", raw)
}
mins, err := strconv.ParseFloat(match[1], 64)
if err != nil {
return 0, fmt.Errorf("parsing failed: %v", err)
}
return time.Duration(mins) * time.Minute, nil
}
// Parses episode air date. Returns zero if episode doesn't have air date.
func parseEpisodeDate(s *goquery.Selection) (time.Time, error) {
raw := s.Find("td.airdate").First()
d, err := parseDate(raw.AttrOr("content", raw.Text()))
if err != nil {
return time.Time{}, fmt.Errorf("parsing failed: %v", err)
}
return d, nil
} | parser/anidb/episode.go | 0.627609 | 0.407039 | episode.go | starcoder |
package gp
import (
"math"
"time"
)
type FitnessFunc func(gp *GP, inputs, outputs [][]float64) int
// An example fitness function which treats
// the output as a environment to compare
// a modified environment by the GP to.
func EnvFitness(g *GP, inputs, outputs [][]float64) int {
fitness := 1
for i, envDiff := range inputs {
g.Env = environment.New(envDiff)
Eval(g.First)
fitness += g.Env.Diff(outputs[i])
}
return fitness
}
func MatchEnvFitness(g *GP, inputs, outputs [][]float64) int {
fitness := 1
for i, envDiff := range inputs {
g.Env = environment.New(envDiff)
Eval(g.First)
fitness += g.Env.MatchDiff(outputs[i])
}
return fitness
}
func MatchMemFitness(g *GP, inputs, outputs [][]float64) int {
fitness := 1
for i, envDiff := range inputs {
g.Env = environment.New(envDiff)
Eval(g.First)
fitness += g.Mem.MatchDiff(outputs[i])
}
return fitness
}
// An example fitness which treats the
// output of the GP as a value to compare
// against the single expected output
func OutputFitness(g *GP, inputs, outputs [][]float64) int {
fitness := 1
for i, envDiff := range inputs {
g.Env = environment.New(envDiff)
out := Eval(g.First)
fitness += int(math.Abs(float64(out - int(outputs[i][0]))))
}
return fitness
}
func Mem0Fitness(g *GP, inputs, outputs [][]float64) int {
if g.Mem == nil {
panic("Mem0Fitness used on GPs without memory")
}
fitness := 1
for i, envDiff := range inputs {
g.Env = environment.New(envDiff)
Eval(g.First)
fitness += int(math.Abs(float64(*(*g.Mem)[0]) - outputs[i][0]))
}
return fitness
}
func ComplexityFitness(f FitnessFunc, mod float64) FitnessFunc {
return func(g *GP, inputs, outputs [][]float64) int {
i := f(g, inputs, outputs)
i += int(math.Floor(mod * float64(g.Nodes)))
if i < 0 {
i = math.MaxInt32
}
return i
}
}
func TimeFitness(f FitnessFunc, threshold int, timeLimit int) FitnessFunc {
return func(g *GP, inputs, outputs [][]float64) int {
t1 := time.Now()
i := f(g, inputs, outputs)
t2 := time.Now().Sub(t1)
if i <= threshold {
t3 := int(t2 / time.Second)
if t3 < timeLimit {
i -= int(math.Floor(float64(threshold) * (float64(t3) / float64(timeLimit))))
}
}
i += threshold
return i
}
} | gp/fitness.go | 0.703957 | 0.452294 | fitness.go | starcoder |
package controller
import (
"fmt"
wfv1 "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1"
)
type counter func(wfv1.NodeStatus) bool
func (woc *wfOperationCtx) getActivePodsCounter(boundaryID string) counter {
return func(node wfv1.NodeStatus) bool {
return node.Type == wfv1.NodeTypePod &&
// Only count pods that match the provided boundaryID, or all if no boundaryID was provided
(boundaryID == "" || node.BoundaryID == boundaryID) &&
// Only count Running or Pending pods
(node.Phase == wfv1.NodePending || node.Phase == wfv1.NodeRunning) &&
// Only count pods that are NOT waiting for a lock
(node.SynchronizationStatus == nil || node.SynchronizationStatus.Waiting == "") &&
// Only count pods that are created.
woc.nodePodExist(node)
}
}
func getActiveChildrenCounter(boundaryID string) counter {
return func(node wfv1.NodeStatus) bool {
return node.BoundaryID == boundaryID &&
// Only count Pods, Steps, or DAGs
(node.Type == wfv1.NodeTypePod || node.Type == wfv1.NodeTypeSteps || node.Type == wfv1.NodeTypeDAG) &&
// Only count Running or Pending nodes
(node.Phase == wfv1.NodePending || node.Phase == wfv1.NodeRunning)
}
}
func getUnsuccessfulChildrenCounter(boundaryID string) counter {
return func(node wfv1.NodeStatus) bool {
return node.BoundaryID == boundaryID &&
// Only count Pods, Steps, or DAGs
(node.Type == wfv1.NodeTypePod || node.Type == wfv1.NodeTypeSteps || node.Type == wfv1.NodeTypeDAG) &&
// Only count Failed or Errored nodes
(node.Phase == wfv1.NodeFailed || node.Phase == wfv1.NodeError)
}
}
func (woc *wfOperationCtx) getActivePods(boundaryID string) int64 {
return woc.countNodes(woc.getActivePodsCounter(boundaryID))
}
func (woc *wfOperationCtx) getActiveChildren(boundaryID string) int64 {
return woc.countNodes(getActiveChildrenCounter(boundaryID))
}
func (woc *wfOperationCtx) getUnsuccessfulChildren(boundaryID string) int64 {
return woc.countNodes(getUnsuccessfulChildrenCounter(boundaryID))
}
func (woc *wfOperationCtx) nodePodExist(node wfv1.NodeStatus) bool {
_, podExist, _ := woc.controller.podInformer.GetIndexer().GetByKey(fmt.Sprintf("%s/%s", woc.wf.Namespace, node.ID))
return podExist
}
func (woc *wfOperationCtx) countNodes(counter counter) int64 {
count := 0
for _, node := range woc.wf.Status.Nodes {
if counter(node) {
count++
}
}
return int64(count)
} | workflow/controller/node_counters.go | 0.628293 | 0.400749 | node_counters.go | starcoder |
package adaboost_classifier
import (
"fmt"
"sync"
"sort"
mlearn "datamining-hw/machine_learning"
)
//----------------------------------------------------------------------------------------------------------------------
type CARTClassifier struct {
tree_root *treeNode
}
type CARTClassifierTrainOptions struct {
MaxDepth int
TargetImpurity float64
MinElementsInLeaf int
EnableEmbeddedFeaturesRanking bool
}
type CARTClassifierTrainer struct {
weights []float64
sampleIndicesPool sync.Pool
classesDistPool sync.Pool
Options CARTClassifierTrainOptions
EmbeddedFeaturesRank []float64
}
// internal helper structures
type (
treeNode struct {
split_value float64
split_feature int
left_child, right_child *treeNode
left_class, right_class int /* if one of them == -1 than left_child are not null */
}
splitPartInfo struct {
class int
impurity float64
weight float64
samplesCount int
classes_dist []float64
}
splitPartsInfo struct {
left_part splitPartInfo
right_part splitPartInfo
}
testSplitInfo struct {
impurity float64
split_pos int
split_parts splitPartsInfo
feature_id int
}
argSortedSamplesBuffered struct {
indices [][]int
refs int
}
)
func (cc *CARTClassifier) PredictProbe(probe []float64) int {
return cc.predictProbe(probe, cc.tree_root)
}
func (cc *CARTClassifier) predictProbe(probe []float64, node *treeNode) int {
if probe[node.split_feature] <= node.split_value {
if node.left_class == -1 {
return cc.predictProbe(probe, node.left_child)
} else {
return node.left_class
}
} else {
if node.right_class == -1 {
return cc.predictProbe(probe, node.right_child)
} else {
return node.right_class
}
}
}
func NewCARTClassifierTrainer(data_set *mlearn.DataSet, options CARTClassifierTrainOptions) *CARTClassifierTrainer {
trainer := CARTClassifierTrainer{Options: options}
trainer.sampleIndicesPool.New = func() interface{} {
buffer := make([]int, data_set.SamplesNum * data_set.FeaturesNum)
argSortedSamples := make([][]int, data_set.FeaturesNum*2)
for i := range argSortedSamples {
j := i % data_set.FeaturesNum
argSortedSamples[i] = buffer[j*data_set.SamplesNum : (j+1)*data_set.SamplesNum]
}
return argSortedSamples
}
trainer.classesDistPool.New = func() interface{} {
return make([]float64, data_set.ClassesNum)
}
return &trainer
}
func (trainer *CARTClassifierTrainer) TrainClassifierWithWeights(data_set *mlearn.DataSet, weights []float64) mlearn.BaseClassifier {
classifier := &CARTClassifier{}
trainer.weights = weights
// zero target impurity is not tolerant to float rounding errors
const minTargetImpurity = 10e-9
if trainer.Options.TargetImpurity < minTargetImpurity {
trainer.Options.TargetImpurity = minTargetImpurity
}
// reasonable default
const defaultMinimalElementsInLeaf = 3
if trainer.Options.MinElementsInLeaf == 0 {
trainer.Options.MinElementsInLeaf = defaultMinimalElementsInLeaf
}
if trainer.Options.EnableEmbeddedFeaturesRanking {
trainer.EmbeddedFeaturesRank = make([]float64, data_set.FeaturesNum)
}
classifier.tree_root = trainer.buildTree(data_set)
return classifier
}
func (trainer *CARTClassifierTrainer) TrainClassifier(data_set *mlearn.DataSet) mlearn.BaseClassifier {
ident_weights := make([]float64, data_set.SamplesNum)
for i := range ident_weights {
ident_weights[i] = 1.0 / float64(data_set.SamplesNum)
}
return trainer.TrainClassifierWithWeights(data_set, ident_weights)
}
func (trainer *CARTClassifierTrainer) buildTree(data_set *mlearn.DataSet) *treeNode {
classes_dist := trainer.classesDistPool.Get().([]float64)
for i := range classes_dist { classes_dist[i] = 0 }
for i, c := range data_set.Classes {
classes_dist[c] += trainer.weights[i]
}
arg_ordered_samples_buffer := argSortedSamplesBuffered {
refs: 2,
indices: data_set.ArgOrderedByFeature,
}
return trainer.makeNode(data_set, &arg_ordered_samples_buffer, classes_dist, 1.0, trainer.Options.MaxDepth)
}
func makeTerminalNode(split_feature int, split_value float64, left_class int, right_class int) *treeNode {
return &treeNode{ split_value: split_value,
split_feature: split_feature,
left_class: left_class,
right_class: right_class }
}
func (tr *CARTClassifierTrainer) makeNode(data_set *mlearn.DataSet, arg_sorted_samples *argSortedSamplesBuffered,
classes_dist []float64, sum_weight float64, depth int) *treeNode {
split_feature, split_val, parts_info := tr.findBestSplit(data_set, arg_sorted_samples.indices, classes_dist, sum_weight)
// maximum tree height exceeded
if depth == 1 {
return makeTerminalNode(split_feature, split_val, parts_info.left_part.class, parts_info.right_part.class)
}
node := new(treeNode)
node.split_feature = split_feature
node.split_value = split_val
node.left_class = -1
node.right_class = -1
new_arg_sorted_samples_buffer := tr.sampleIndicesPool.Get().([][]int)
new_arg_sorted_samples_left, new_arg_sorted_samples_right :=
tr.splitSamples(data_set, arg_sorted_samples.indices, new_arg_sorted_samples_buffer, split_val,
split_feature, parts_info.left_part.samplesCount-1)
arg_sorted_samples.refs--
if arg_sorted_samples.refs <= 0 {
tr.sampleIndicesPool.Put(arg_sorted_samples.indices)
}
const minimumSamplesNumForParallel = 256
barrierChannel := make(chan int)
var parallelRecursiveCalls int
// the node is already pure, no need to split it recursively
if parts_info.left_part.impurity <= tr.Options.TargetImpurity || parts_info.left_part.samplesCount < tr.Options.MinElementsInLeaf {
node.left_class = parts_info.left_part.class
} else {
new_arg_sorted_samples := argSortedSamplesBuffered{ indices: new_arg_sorted_samples_left, refs: 2 }
if len(new_arg_sorted_samples.indices[0]) < minimumSamplesNumForParallel {
node.left_child = tr.makeNode(data_set, &new_arg_sorted_samples, parts_info.left_part.classes_dist, parts_info.left_part.weight, depth - 1)
} else {
parallelRecursiveCalls++
go func(new_arg_sorted_samples *argSortedSamplesBuffered) {
node.left_child = tr.makeNode(data_set, new_arg_sorted_samples, parts_info.left_part.classes_dist, parts_info.left_part.weight, depth - 1)
barrierChannel <- 1
}(&new_arg_sorted_samples)
}
}
if parts_info.right_part.impurity <= tr.Options.TargetImpurity || parts_info.right_part.samplesCount < tr.Options.MinElementsInLeaf {
node.right_class = parts_info.right_part.class
} else {
new_arg_sorted_samples := argSortedSamplesBuffered{ indices: new_arg_sorted_samples_right, refs: 2 }
if len(new_arg_sorted_samples.indices[0]) < minimumSamplesNumForParallel {
node.right_child = tr.makeNode(data_set, &new_arg_sorted_samples, parts_info.right_part.classes_dist, parts_info.right_part.weight, depth - 1)
} else {
parallelRecursiveCalls++
go func(new_arg_sorted_samples *argSortedSamplesBuffered) {
node.right_child = tr.makeNode(data_set, new_arg_sorted_samples, parts_info.right_part.classes_dist, parts_info.right_part.weight, depth - 1)
barrierChannel <- 2
}(&new_arg_sorted_samples)
}
}
// waiting for all recursive calls to return here
for parallelRecursiveCalls > 0 {
<-barrierChannel
parallelRecursiveCalls--
}
return node
}
func (tr *CARTClassifierTrainer) splitSamples(data_set *mlearn.DataSet, arg_sorted_samples [][]int, filtered_samples_buffer [][]int,
filter_val float64, split_feature, split_pos int) ([][]int, [][]int) {
filtered_samples_left := filtered_samples_buffer
filtered_samples_right := filtered_samples_left[data_set.FeaturesNum:]
filtered_samples_left = filtered_samples_left[:data_set.FeaturesNum]
for j := 0; j < data_set.FeaturesNum; j++ {
filtered_left_samples_feature := filtered_samples_buffer[j][:split_pos+1]
filtered_right_samples_feature := filtered_samples_buffer[j][split_pos+1:len(arg_sorted_samples[0])]
filtered_left_samples_count := 0
filtered_right_samples_count := 0
for _, i := range arg_sorted_samples[j] {
if data_set.SamplesByFeature[split_feature][i] < filter_val {
if filtered_left_samples_count >= len(filtered_left_samples_feature) {
filtered_left_samples_count++
continue
}
filtered_left_samples_feature[filtered_left_samples_count] = i
filtered_left_samples_count++
} else {
filtered_right_samples_feature[filtered_right_samples_count] = i
filtered_right_samples_count++
}
}
filtered_samples_left[j] = filtered_left_samples_feature
filtered_samples_right[j] = filtered_right_samples_feature
}
return filtered_samples_left, filtered_samples_right
}
func (tr *CARTClassifierTrainer) findBestSplitByFeaturesChunk(data_set *mlearn.DataSet, arg_sorted_samples [][]int, features_subset [2]int,
classes_dist []float64, sum_weight float64) testSplitInfo {
var best_split_info testSplitInfo
best_split_info.impurity = 1.0
best_split_info.feature_id = -1
best_split_info.split_pos = -3
for j := features_subset[0]; j < features_subset[1]; j++ {
impurity, best_split_pos, best_split_parts :=
tr.findBestSplitPosition(data_set, arg_sorted_samples[j], j, classes_dist, sum_weight)
if impurity < best_split_info.impurity {
best_split_info = testSplitInfo{ impurity: impurity, split_pos: best_split_pos,
split_parts: best_split_parts, feature_id: j }
}
}
return best_split_info
}
func minInt(x, y int) int {
if x < y {
return x
}
return y
}
func (tr *CARTClassifierTrainer) findBestSplit(data_set *mlearn.DataSet, arg_sorted_samples [][]int, classes_dist []float64, sum_weight float64) (
best_split_feature int, split_val float64, parts_info splitPartsInfo) {
const parallelTreadsCount = 4
const minimumSamplesForParallel = 2048
best_split_feature = -2
best_impurity := 1.0
split_pos := -1
// really no performance profit with parallel threads
if len(arg_sorted_samples[0]) < minimumSamplesForParallel {
bounds := [2]int{ 0, data_set.FeaturesNum }
next_split_test := tr.findBestSplitByFeaturesChunk(data_set, arg_sorted_samples, bounds, classes_dist, sum_weight)
best_impurity = next_split_test.impurity
best_split_feature = next_split_test.feature_id
split_pos = next_split_test.split_pos
parts_info = next_split_test.split_parts
} else {
// maybe some speed up with parallel processing
channel := make(chan testSplitInfo, parallelTreadsCount)
parallel_parts := parallelTreadsCount
if data_set.FeaturesNum % parallelTreadsCount != 0 {
parallel_parts++
}
features_for_one_thread := data_set.FeaturesNum / parallelTreadsCount
routine := func(j int) {
bounds := [2]int{ features_for_one_thread * j, minInt(features_for_one_thread * (j + 1), data_set.FeaturesNum) }
channel <- tr.findBestSplitByFeaturesChunk(data_set, arg_sorted_samples, bounds, classes_dist, sum_weight)
}
// map features subsets to workers
for j := 0; j < parallel_parts; j++ {
go routine(j)
}
// reduce to the best split
for j := 0; j < parallel_parts; j++ {
next_split_test := <- channel
if next_split_test.impurity < best_impurity ||
/* this magic is needed to make results channel-ordering-independent */
(next_split_test.impurity == best_impurity && next_split_test.feature_id < best_split_feature) {
best_impurity = next_split_test.impurity
best_split_feature = next_split_test.feature_id
split_pos = next_split_test.split_pos
parts_info = next_split_test.split_parts
}
}
}
split_left_id := arg_sorted_samples[best_split_feature][split_pos]
split_right_id := arg_sorted_samples[best_split_feature][split_pos + 1]
split_val = (data_set.SamplesByFeature[best_split_feature][split_left_id] +
data_set.SamplesByFeature[best_split_feature][split_right_id]) / 2.0
parts_info.left_part.samplesCount = split_pos + 1
parts_info.right_part.samplesCount = len(arg_sorted_samples[best_split_feature]) - split_pos - 1
// embedded features ranking
if tr.Options.EnableEmbeddedFeaturesRanking {
impurity_improvement := giniImpurity(classes_dist, sum_weight) - best_impurity
tr.EmbeddedFeaturesRank[best_split_feature] += impurity_improvement
}
tr.classesDistPool.Put(classes_dist)
return
}
func argmax(slice []float64) int {
// special two-class case handling to improve performance
if len(slice) == 2 {
if slice[0] > slice[1] {
return 0
} else {
return 1
}
}
max_i := 0
max_v := slice[0]
for i, v := range slice {
if v > max_v {
max_v = v
max_i = i
}
}
return max_i
}
func (tr *CARTClassifierTrainer) findBestSplitPosition(data_set *mlearn.DataSet, cur_subset_indices []int,
cur_feature int,
classes_dist []float64,
sum_weight float64) (best_split_impurity float64,
best_split_pos int,
best_split_parts splitPartsInfo) {
best_split_impurity = 1.0
best_split_pos = -1
// initialize left classes distribution
left_classes_dist := tr.classesDistPool.Get().([]float64)
for i := range left_classes_dist { left_classes_dist[i] = 0 }
left_part_weight := float64(0.0)
// initialize right classes distribution
right_classes_dist := tr.classesDistPool.Get().([]float64)
copy(right_classes_dist, classes_dist)
right_part_weight := sum_weight
prev_feature_val := data_set.SamplesByFeature[cur_feature][cur_subset_indices[0]]
for i := 0; i < len(cur_subset_indices)-1; i++ {
sample_index := cur_subset_indices[i]
next_sample_index := cur_subset_indices[i+1]
c := data_set.Classes[sample_index]
w := tr.weights[sample_index]
left_classes_dist[c] += w
left_part_weight += w
right_classes_dist[c] -= w
right_part_weight -= w
next_feature_val := data_set.SamplesByFeature[cur_feature][next_sample_index]
if prev_feature_val == next_feature_val {
continue
}
prev_feature_val = next_feature_val
left_impurity := giniImpurity(left_classes_dist, left_part_weight)
right_impurity := giniImpurity(right_classes_dist, right_part_weight)
split_impurity := (left_part_weight * left_impurity + right_part_weight * right_impurity) / sum_weight
left_part_class := argmax(left_classes_dist)
right_part_class := argmax(right_classes_dist)
if split_impurity <= best_split_impurity {
best_split_impurity = split_impurity
best_split_pos = i
if best_split_parts.left_part.classes_dist == nil {
best_split_parts.left_part.classes_dist = tr.classesDistPool.Get().([]float64)
best_split_parts.right_part.classes_dist = tr.classesDistPool.Get().([]float64)
}
left_classes_dist_old, right_classes_dist_old := best_split_parts.left_part.classes_dist,
best_split_parts.right_part.classes_dist
best_split_parts.left_part = splitPartInfo{ class: left_part_class, impurity: left_impurity,
weight: left_part_weight, classes_dist: left_classes_dist }
best_split_parts.right_part = splitPartInfo{ class: right_part_class, impurity: right_impurity,
weight: right_part_weight, classes_dist: right_classes_dist }
left_classes_dist, right_classes_dist = left_classes_dist_old, right_classes_dist_old
copy(left_classes_dist, best_split_parts.left_part.classes_dist)
copy(right_classes_dist, best_split_parts.right_part.classes_dist)
}
}
tr.classesDistPool.Put(left_classes_dist)
tr.classesDistPool.Put(right_classes_dist)
return
}
func giniImpurity(classes_portions []float64, sum_weight float64) float64 {
var sqSum float64
for i := range classes_portions {
sqSum += classes_portions[i] * classes_portions[i]
}
return 1.0 - sqSum / (sum_weight * sum_weight)
}
func (tr *CARTClassifierTrainer) GetRankedFeatures() []int {
indices := make([]int, len(tr.EmbeddedFeaturesRank))
for i := range indices {
indices[i] = i
}
sort.Sort(mlearn.FeaturesRankSorter{FeaturesRank: tr.EmbeddedFeaturesRank, Indices: indices})
return indices
}
func (tr *CARTClassifierTrainer) GetFeaturesRank() []float64 {
return tr.EmbeddedFeaturesRank
}
func (cc *CARTClassifier) Dump() {
cc.dump(cc.tree_root)
}
func (cc *CARTClassifier) dump(node *treeNode) {
fmt.Println(*node)
if node.left_class == -1 {
cc.dump(node.left_child)
}
if node.right_class == -1 {
cc.dump(node.right_child)
}
}
func (cc *CARTClassifier) CloneEmpty() mlearn.BaseClassifier {
clone := new(CARTClassifier)
*clone = *cc
clone.tree_root = nil
return clone
} | adaboost_classifier/cart_classifier.go | 0.619932 | 0.434461 | cart_classifier.go | starcoder |
package charts
import (
"github.com/weslintw/go-tachart/opts"
)
type Overlaper interface {
overlap() MultiSeries
}
// XYAxis represent the X and Y axis in the rectangular coordinates.
type XYAxis struct {
XAxisList []opts.XAxis `json:"xaxis"`
YAxisList []opts.YAxis `json:"yaxis"`
}
func (xy *XYAxis) initXYAxis() {
xy.XAxisList = append(xy.XAxisList, opts.XAxis{})
xy.YAxisList = append(xy.YAxisList, opts.YAxis{})
}
// ExtendXAxis adds new X axes.
func (xy *XYAxis) ExtendXAxis(xAxis ...opts.XAxis) {
xy.XAxisList = append(xy.XAxisList, xAxis...)
}
// ExtendYAxis adds new Y axes.
func (xy *XYAxis) ExtendYAxis(yAxis ...opts.YAxis) {
xy.YAxisList = append(xy.YAxisList, yAxis...)
}
// WithXAxisOpts
func WithXAxisOpts(opt opts.XAxis, index ...int) GlobalOpts {
return func(bc *BaseConfiguration) {
if len(index) == 0 {
index = []int{0}
}
for i := 0; i < len(index); i++ {
bc.XYAxis.XAxisList[index[i]] = opt
}
}
}
// WithYAxisOpts
func WithYAxisOpts(opt opts.YAxis, index ...int) GlobalOpts {
return func(bc *BaseConfiguration) {
if len(index) == 0 {
index = []int{0}
}
for i := 0; i < len(index); i++ {
bc.XYAxis.YAxisList[index[i]] = opt
}
}
}
// RectConfiguration contains options for the rectangular coordinates.
type RectConfiguration struct {
BaseConfiguration
}
func (rect *RectConfiguration) setRectGlobalOptions(options ...GlobalOpts) {
rect.BaseConfiguration.setBaseGlobalOptions(options...)
}
// RectChart is a chart in RectChart coordinate.
type RectChart struct {
RectConfiguration
xAxisData interface{}
}
func (rc *RectChart) overlap() MultiSeries {
return rc.MultiSeries
}
// SetGlobalOptions sets options for the RectChart instance.
func (rc *RectChart) SetGlobalOptions(options ...GlobalOpts) *RectChart {
rc.RectConfiguration.setRectGlobalOptions(options...)
return rc
}
// Overlap composes multiple charts into one single canvas.
// It is only suited for some of the charts which are in rectangular coordinate.
// Supported charts: Bar/BoxPlot/Line/Scatter/EffectScatter/Kline/HeatMap
func (rc *RectChart) Overlap(a ...Overlaper) {
for i := 0; i < len(a); i++ {
rc.MultiSeries = append(rc.MultiSeries, a[i].overlap()...)
}
}
// Validate
func (rc *RectChart) Validate() {
// Make sure that the data of X axis won't be cleaned for XAxisOpts
rc.XAxisList[0].Data = rc.xAxisData
// Make sure that the labels of Y axis show correctly
for i := 0; i < len(rc.YAxisList); i++ {
rc.YAxisList[i].AxisLabel.Show = true
}
rc.Assets.Validate(rc.AssetsHost)
} | charts/rectangle.go | 0.783988 | 0.405566 | rectangle.go | starcoder |
package funk
import (
"fmt"
"math/rand"
"reflect"
"strings"
)
// Chunk creates an array of elements split into groups with the length of size.
// If array can't be split evenly, the final chunk will be
// the remaining element.
func Chunk(arr interface{}, size int) interface{} {
if !IsIteratee(arr) {
panic("First parameter must be neither array nor slice")
}
if size == 0 {
return arr
}
arrValue := reflect.ValueOf(arr)
arrType := arrValue.Type()
resultSliceType := reflect.SliceOf(arrType)
// Initialize final result slice which will contains slice
resultSlice := reflect.MakeSlice(resultSliceType, 0, 0)
itemType := arrType.Elem()
var itemSlice reflect.Value
itemSliceType := reflect.SliceOf(itemType)
length := arrValue.Len()
for i := 0; i < length; i++ {
if i%size == 0 || i == 0 {
if itemSlice.Kind() != reflect.Invalid {
resultSlice = reflect.Append(resultSlice, itemSlice)
}
itemSlice = reflect.MakeSlice(itemSliceType, 0, 0)
}
itemSlice = reflect.Append(itemSlice, arrValue.Index(i))
if i == length-1 {
resultSlice = reflect.Append(resultSlice, itemSlice)
}
}
return resultSlice.Interface()
}
// ToMap transforms a slice of instances to a Map.
// []*Foo => Map<int, *Foo>
func ToMap(in interface{}, pivot string) interface{} {
value := reflect.ValueOf(in)
// input value must be a slice
if value.Kind() != reflect.Slice {
panic(fmt.Sprintf("%v must be a slice", in))
}
inType := value.Type()
structType := inType.Elem()
// retrieve the struct in the slice to deduce key type
if structType.Kind() == reflect.Ptr {
structType = structType.Elem()
}
field, _ := structType.FieldByName(pivot)
// value of the map will be the input type
collectionType := reflect.MapOf(field.Type, inType.Elem())
// create a map from scratch
collection := reflect.MakeMap(collectionType)
for i := 0; i < value.Len(); i++ {
instance := value.Index(i)
var field reflect.Value
if instance.Kind() == reflect.Ptr {
field = instance.Elem().FieldByName(pivot)
} else {
field = instance.FieldByName(pivot)
}
collection.SetMapIndex(field, instance)
}
return collection.Interface()
}
func mapSlice(arrValue reflect.Value, funcValue reflect.Value) reflect.Value {
funcType := funcValue.Type()
if funcType.NumIn() != 1 || funcType.NumOut() == 0 || funcType.NumOut() > 2 {
panic("Map function with an array must have one parameter and must return one or two parameters")
}
arrElemType := arrValue.Type().Elem()
// Checking whether element type is convertible to function's first argument's type.
if !arrElemType.ConvertibleTo(funcType.In(0)) {
panic("Map function's argument is not compatible with type of array.")
}
if funcType.NumOut() == 1 {
// Get slice type corresponding to function's return value's type.
resultSliceType := reflect.SliceOf(funcType.Out(0))
// MakeSlice takes a slice kind type, and makes a slice.
resultSlice := reflect.MakeSlice(resultSliceType, 0, 0)
for i := 0; i < arrValue.Len(); i++ {
result := funcValue.Call([]reflect.Value{arrValue.Index(i)})[0]
resultSlice = reflect.Append(resultSlice, result)
}
return resultSlice
}
if funcType.NumOut() == 2 {
// value of the map will be the input type
collectionType := reflect.MapOf(funcType.Out(0), funcType.Out(1))
// create a map from scratch
collection := reflect.MakeMap(collectionType)
for i := 0; i < arrValue.Len(); i++ {
results := funcValue.Call([]reflect.Value{arrValue.Index(i)})
collection.SetMapIndex(results[0], results[1])
}
return collection
}
return reflect.Value{}
}
func mapMap(arrValue reflect.Value, funcValue reflect.Value) reflect.Value {
funcType := funcValue.Type()
if funcType.NumIn() != 2 || funcType.NumOut() == 0 || funcType.NumOut() > 2 {
panic("Map function with a map must have two parameters and must return one or two parameters")
}
// Only one returned parameter, should be a slice
if funcType.NumOut() == 1 {
// Get slice type corresponding to function's return value's type.
resultSliceType := reflect.SliceOf(funcType.Out(0))
// MakeSlice takes a slice kind type, and makes a slice.
resultSlice := reflect.MakeSlice(resultSliceType, 0, 0)
for _, key := range arrValue.MapKeys() {
results := funcValue.Call([]reflect.Value{key, arrValue.MapIndex(key)})
result := results[0]
resultSlice = reflect.Append(resultSlice, result)
}
return resultSlice
}
// two parameters, should be a map
if funcType.NumOut() == 2 {
// value of the map will be the input type
collectionType := reflect.MapOf(funcType.Out(0), funcType.Out(1))
// create a map from scratch
collection := reflect.MakeMap(collectionType)
for _, key := range arrValue.MapKeys() {
results := funcValue.Call([]reflect.Value{key, arrValue.MapIndex(key)})
collection.SetMapIndex(results[0], results[1])
}
return collection
}
return reflect.Value{}
}
// Map manipulates an iteratee and transforms it to another type.
func Map(arr interface{}, mapFunc interface{}) interface{} {
result := mapFn(arr, mapFunc, "Map")
if result.IsValid() {
return result.Interface()
}
return nil
}
func mapFn(arr interface{}, mapFunc interface{}, funcName string) reflect.Value {
if !IsIteratee(arr) {
panic("First parameter must be an iteratee")
}
if !IsFunction(mapFunc) {
panic("Second argument must be function")
}
var (
funcValue = reflect.ValueOf(mapFunc)
arrValue = reflect.ValueOf(arr)
arrType = arrValue.Type()
)
kind := arrType.Kind()
if kind == reflect.Slice || kind == reflect.Array {
return mapSlice(arrValue, funcValue)
} else if kind == reflect.Map {
return mapMap(arrValue, funcValue)
}
panic(fmt.Sprintf("Type %s is not supported by "+funcName, arrType.String()))
}
// FlatMap manipulates an iteratee and transforms it to a flattened collection of another type.
func FlatMap(arr interface{}, mapFunc interface{}) interface{} {
result := mapFn(arr, mapFunc, "FlatMap")
if result.IsValid() {
return flatten(result).Interface()
}
return nil
}
// Flatten flattens a two-dimensional array.
func Flatten(out interface{}) interface{} {
return flatten(reflect.ValueOf(out)).Interface()
}
func flatten(value reflect.Value) reflect.Value {
sliceType := value.Type()
if (value.Kind() != reflect.Slice && value.Kind() != reflect.Array) ||
(sliceType.Elem().Kind() != reflect.Slice && sliceType.Elem().Kind() != reflect.Array) {
panic("Argument must be an array or slice of at least two dimensions")
}
resultSliceType := sliceType.Elem().Elem()
resultSlice := reflect.MakeSlice(reflect.SliceOf(resultSliceType), 0, 0)
length := value.Len()
for i := 0; i < length; i++ {
item := value.Index(i)
resultSlice = reflect.AppendSlice(resultSlice, item)
}
return resultSlice
}
// FlattenDeep recursively flattens array.
func FlattenDeep(out interface{}) interface{} {
return flattenDeep(reflect.ValueOf(out)).Interface()
}
func flattenDeep(value reflect.Value) reflect.Value {
sliceType := sliceElem(value.Type())
resultSlice := reflect.MakeSlice(reflect.SliceOf(sliceType), 0, 0)
return flattenRecursive(value, resultSlice)
}
func flattenRecursive(value reflect.Value, result reflect.Value) reflect.Value {
length := value.Len()
for i := 0; i < length; i++ {
item := value.Index(i)
kind := item.Kind()
if kind == reflect.Slice || kind == reflect.Array {
result = flattenRecursive(item, result)
} else {
result = reflect.Append(result, item)
}
}
return result
}
// Shuffle creates an array of shuffled values
func Shuffle(in interface{}) interface{} {
value := reflect.ValueOf(in)
valueType := value.Type()
kind := value.Kind()
if kind == reflect.Array || kind == reflect.Slice {
length := value.Len()
resultSlice := makeSlice(value, length)
for i, v := range rand.Perm(length) {
resultSlice.Index(i).Set(value.Index(v))
}
return resultSlice.Interface()
}
panic(fmt.Sprintf("Type %s is not supported by Shuffle", valueType.String()))
}
// Reverse transforms an array the first element will become the last,
// the second element will become the second to last, etc.
func Reverse(in interface{}) interface{} {
value := reflect.ValueOf(in)
valueType := value.Type()
kind := value.Kind()
if kind == reflect.String {
return ReverseString(in.(string))
}
if kind == reflect.Array || kind == reflect.Slice {
length := value.Len()
resultSlice := makeSlice(value, length)
j := 0
for i := length - 1; i >= 0; i-- {
resultSlice.Index(j).Set(value.Index(i))
j++
}
return resultSlice.Interface()
}
panic(fmt.Sprintf("Type %s is not supported by Reverse", valueType.String()))
}
// Uniq creates an array with unique values.
func Uniq(in interface{}) interface{} {
value := reflect.ValueOf(in)
valueType := value.Type()
kind := value.Kind()
if kind == reflect.Array || kind == reflect.Slice {
length := value.Len()
result := makeSlice(value, 0)
seen := make(map[interface{}]bool, length)
j := 0
for i := 0; i < length; i++ {
val := value.Index(i)
v := val.Interface()
if _, ok := seen[v]; ok {
continue
}
seen[v] = true
result = reflect.Append(result, val)
j++
}
return result.Interface()
}
panic(fmt.Sprintf("Type %s is not supported by Uniq", valueType.String()))
}
// ConvertSlice converts a slice type to another,
// a perfect example would be to convert a slice of struct to a slice of interface.
func ConvertSlice(in interface{}, out interface{}) {
srcValue := reflect.ValueOf(in)
dstValue := reflect.ValueOf(out)
if dstValue.Kind() != reflect.Ptr {
panic("Second argument must be a pointer")
}
dstValue = dstValue.Elem()
if srcValue.Kind() != reflect.Slice && srcValue.Kind() != reflect.Array {
panic("First argument must be an array or slice")
}
if dstValue.Kind() != reflect.Slice && dstValue.Kind() != reflect.Array {
panic("Second argument must be an array or slice")
}
// returns value that points to dstValue
direct := reflect.Indirect(dstValue)
length := srcValue.Len()
for i := 0; i < length; i++ {
dstValue = reflect.Append(dstValue, srcValue.Index(i))
}
direct.Set(dstValue)
}
// Drop creates an array/slice with `n` elements dropped from the beginning.
func Drop(in interface{}, n int) interface{} {
value := reflect.ValueOf(in)
valueType := value.Type()
kind := value.Kind()
if kind == reflect.Array || kind == reflect.Slice {
length := value.Len()
resultSlice := makeSlice(value, length-n)
j := 0
for i := n; i < length; i++ {
resultSlice.Index(j).Set(value.Index(i))
j++
}
return resultSlice.Interface()
}
panic(fmt.Sprintf("Type %s is not supported by Drop", valueType.String()))
}
// Prune returns a copy of "in" that only contains fields in "paths"
// which are looked up using struct field name.
// For lookup paths by field tag instead, use funk.PruneByTag()
func Prune(in interface{}, paths []string) (interface{}, error) {
return pruneByTag(in, paths, nil /*tag*/)
}
// pruneByTag returns a copy of "in" that only contains fields in "paths"
// which are looked up using struct field Tag "tag".
func PruneByTag(in interface{}, paths []string, tag string) (interface{}, error) {
return pruneByTag(in, paths, &tag)
}
// pruneByTag returns a copy of "in" that only contains fields in "paths"
// which are looked up using struct field Tag "tag". If tag is nil,
// traverse paths using struct field name
func pruneByTag(in interface{}, paths []string, tag *string) (interface{}, error) {
inValue := reflect.ValueOf(in)
ret := reflect.New(inValue.Type()).Elem()
for _, path := range paths {
parts := strings.Split(path, ".")
if err := prune(inValue, ret, parts, tag); err != nil {
return nil, err
}
}
return ret.Interface(), nil
}
func prune(inValue reflect.Value, ret reflect.Value, parts []string, tag *string) error {
if len(parts) == 0 {
// we reached the location that ret needs to hold inValue
// Note: The value at the end of the path is not copied, maybe we need to change.
// ret and the original data holds the same reference to this value
ret.Set(inValue)
return nil
}
inKind := inValue.Kind()
switch inKind {
case reflect.Ptr:
if inValue.IsNil() {
// TODO validate
return nil
}
if ret.IsNil() {
// init ret and go to next level
ret.Set(reflect.New(inValue.Type().Elem()))
}
return prune(inValue.Elem(), ret.Elem(), parts, tag)
case reflect.Struct:
part := parts[0]
var fValue reflect.Value
var fRet reflect.Value
if tag == nil {
// use field name
fValue = inValue.FieldByName(part)
if !fValue.IsValid() {
return fmt.Errorf("field name %v is not found in struct %v", part, inValue.Type().String())
}
fRet = ret.FieldByName(part)
} else {
// search tag that has key equal to part
found := false
for i := 0; i < inValue.NumField(); i++ {
f := inValue.Type().Field(i)
if key, ok := f.Tag.Lookup(*tag); ok {
if key == part {
fValue = inValue.Field(i)
fRet = ret.Field(i)
found = true
break
}
}
}
if !found {
return fmt.Errorf("Struct tag %v is not found with key %v", *tag, part)
}
}
// init Ret is zero and go down one more level
if fRet.IsZero() {
fRet.Set(reflect.New(fValue.Type()).Elem())
}
return prune(fValue, fRet, parts[1:], tag)
case reflect.Array, reflect.Slice:
// set all its elements
length := inValue.Len()
// init ret
if ret.IsZero() {
if inKind == reflect.Slice {
ret.Set(reflect.MakeSlice(inValue.Type(), length /*len*/, length /*cap*/))
} else { // array
ret.Set(reflect.New(inValue.Type()).Elem())
}
}
for j := 0; j < length; j++ {
if err := prune(inValue.Index(j), ret.Index(j), parts, tag); err != nil {
return err
}
}
default:
return fmt.Errorf("path %v cannot be looked up on kind of %v", strings.Join(parts, "."), inValue.Kind())
}
return nil
} | transform.go | 0.734691 | 0.544862 | transform.go | starcoder |
package mocks
import "time"
// MetricsProvider implements a mock ActivityPub metrics provider.
type MetricsProvider struct{}
// OutboxPostTime records the time it takes to post a message to the outbox.
func (m *MetricsProvider) OutboxPostTime(value time.Duration) {
}
// OutboxResolveInboxesTime records the time it takes to resolve inboxes for an outbox post.
func (m *MetricsProvider) OutboxResolveInboxesTime(value time.Duration) {
}
// InboxHandlerTime records the time it takes to handle an activity posted to the inbox.
func (m *MetricsProvider) InboxHandlerTime(activityType string, value time.Duration) {
}
// WriteAnchorTime records the time it takes to write an anchor credential and post an 'Offer' activity.
func (m *MetricsProvider) WriteAnchorTime(value time.Duration) {
}
// WriteAnchorBuildCredentialTime records the time it takes to build credential inside write anchor.
func (m *MetricsProvider) WriteAnchorBuildCredentialTime(value time.Duration) {
}
// WriteAnchorGetWitnessesTime records the time it takes to get witnesses inside write anchor.
func (m *MetricsProvider) WriteAnchorGetWitnessesTime(value time.Duration) {
}
// WriteAnchorSignCredentialTime records the time it takes to sign credential inside write anchor.
func (m *MetricsProvider) WriteAnchorSignCredentialTime(value time.Duration) {
}
// WriteAnchorPostOfferActivityTime records the time it takes to post offer activity inside write anchor.
func (m *MetricsProvider) WriteAnchorPostOfferActivityTime(value time.Duration) {
}
// WriteAnchorGetPreviousAnchorsGetBulkTime records the time it takes to get bulk inside previous anchor.
func (m *MetricsProvider) WriteAnchorGetPreviousAnchorsGetBulkTime(value time.Duration) {
}
// WriteAnchorGetPreviousAnchorsTime records the time it takes to get previous anchor.
func (m *MetricsProvider) WriteAnchorGetPreviousAnchorsTime(value time.Duration) {
}
// WriteAnchorSignWithLocalWitnessTime records the time it takes to sign with local witness.
func (m *MetricsProvider) WriteAnchorSignWithLocalWitnessTime(value time.Duration) {
}
// WriteAnchorSignWithServerKeyTime records the time it takes to sign with server key.
func (m *MetricsProvider) WriteAnchorSignWithServerKeyTime(value time.Duration) {
}
// WriteAnchorSignLocalWitnessLogTime records the time it takes to witness log inside sign local.
func (m *MetricsProvider) WriteAnchorSignLocalWitnessLogTime(value time.Duration) {
}
// WriteAnchorSignLocalStoreTime records the time it takes to store inside sign local.
func (m *MetricsProvider) WriteAnchorSignLocalStoreTime(value time.Duration) {
}
// WriteAnchorSignLocalWatchTime records the time it takes to watch inside sign local.
func (m *MetricsProvider) WriteAnchorSignLocalWatchTime(value time.Duration) {
}
// WriteAnchorResolveHostMetaLinkTime records the time it takes to resolve host meta link.
func (m *MetricsProvider) WriteAnchorResolveHostMetaLinkTime(value time.Duration) {
}
// ProcessWitnessedAnchorCredentialTime records the time it takes to process a witnessed anchor credential
// by publishing it to the Observer and posting a 'Create' activity.
func (m *MetricsProvider) ProcessWitnessedAnchorCredentialTime(value time.Duration) {
}
// AddOperationTime records the time it takes to add an operation to the queue.
func (m *MetricsProvider) AddOperationTime(value time.Duration) {
}
// BatchCutTime records the time it takes to cut an operation batch.
func (m *MetricsProvider) BatchCutTime(value time.Duration) {
}
// BatchRollbackTime records the time it takes to roll back an operation batch (in case of a transient error).
func (m *MetricsProvider) BatchRollbackTime(value time.Duration) {
}
// ProcessAnchorTime records the time it takes for the Observer to process an anchor credential.
func (m *MetricsProvider) ProcessAnchorTime(value time.Duration) {
}
// ProcessDIDTime records the time it takes for the Observer to process a DID.
func (m *MetricsProvider) ProcessDIDTime(value time.Duration) {
}
// CASWriteTime records the time it takes to write a document to CAS.
func (m *MetricsProvider) CASWriteTime(value time.Duration) {
}
// CASResolveTime records the time it takes to resolve a document from CAS.
func (m *MetricsProvider) CASResolveTime(value time.Duration) {
}
// BatchAckTime records the time to acknowledge all of the operations that are removed from the queue.
func (m *MetricsProvider) BatchAckTime(value time.Duration) {
}
// BatchNackTime records the time to nack all of the operations that are to be placed back on the queue.
func (m *MetricsProvider) BatchNackTime(value time.Duration) {
}
// WitnessAnchorCredentialTime records the time it takes for a verifiable credential to gather proofs from all
// required witnesses (according to witness policy). The start time is when the verifiable credential is issued
// and the end time is the time that the witness policy is satisfied.
func (m *MetricsProvider) WitnessAnchorCredentialTime(value time.Duration) {
}
// DocumentCreateUpdateTime records the time it takes the REST handler to process a create/update operation.
func (m *MetricsProvider) DocumentCreateUpdateTime(value time.Duration) {
}
// DocumentResolveTime records the time it takes the REST handler to resolve a document.
func (m *MetricsProvider) DocumentResolveTime(value time.Duration) {
}
// OutboxIncrementActivityCount increments the number of activities of the given type posted to the outbox.
func (m *MetricsProvider) OutboxIncrementActivityCount(activityType string) {
}
// CASIncrementCacheHitCount increments the number of CAS cache hits.
func (m *MetricsProvider) CASIncrementCacheHitCount() {
}
// CASReadTime records the time it takes to read a document from CAS storage.
func (m *MetricsProvider) CASReadTime(casType string, value time.Duration) {
}
// BatchSize records the size of an operation batch.
func (m *MetricsProvider) BatchSize(float64) {
}
// WitnessAddProofVctNil records vct witness.
func (m *MetricsProvider) WitnessAddProofVctNil(value time.Duration) {
}
// WitnessAddVC records vct witness add vc.
func (m *MetricsProvider) WitnessAddVC(value time.Duration) {
}
// WitnessAddProof records vct witness add proof.
func (m *MetricsProvider) WitnessAddProof(value time.Duration) {
}
// WitnessWebFinger records vct witness web finger.
func (m *MetricsProvider) WitnessWebFinger(value time.Duration) {
}
// WitnessVerifyVCTSignature records vct witness verify vct.
func (m *MetricsProvider) WitnessVerifyVCTSignature(value time.Duration) {
}
// AddProofParseCredential records vct parse credential in add proof.
func (m *MetricsProvider) AddProofParseCredential(value time.Duration) {
}
// AddProofSign records vct sign in add proof.
func (m *MetricsProvider) AddProofSign(value time.Duration) {
}
// SignerGetKey records get key time.
func (m *MetricsProvider) SignerGetKey(value time.Duration) {
}
// SignerSign records sign time.
func (m *MetricsProvider) SignerSign(value time.Duration) {
}
// SignerAddLinkedDataProof records add data linked proof.
func (m *MetricsProvider) SignerAddLinkedDataProof(value time.Duration) {
} | pkg/mocks/metricsprovider.go | 0.708213 | 0.413832 | metricsprovider.go | starcoder |
package logreg
import (
"context"
"fmt"
"math"
"time"
"github.com/campoy/mat"
)
var matProduct = mat.Product
// Accuracy computes the accuracy of x and theta predicting y.
// It also returns a list of the misspredicted rows.
func Accuracy(x, theta, y mat.Matrix) (float64, []int) {
m := x.Rows()
preds := HotDecode(Predict(x, theta))
labels := HotDecode(y)
correct := 0
var missed []int
for i := 0; i < m; i++ {
if preds.At(i, 0) == labels.At(i, 0) {
correct++
} else {
missed = append(missed, i)
}
}
return float64(correct) / float64(m), missed
}
// Predict computes the prediction of labels given x and theta.
func Predict(x, theta mat.Matrix) mat.Matrix {
return mat.Map(sigmoid, matProduct(x, theta))
}
func sigmoid(z float64) float64 { return 1 / (1 + math.Exp(-z)) }
func HotEncode(m mat.Matrix, k int) mat.Matrix {
return mat.FromFunc(m.Rows(), k, func(i, j int) float64 {
if int(m.At(i, 0)) == j {
return 1.0
}
return 0.0
})
}
func HotDecode(m mat.Matrix) mat.Matrix {
return mat.FromFunc(m.Rows(), 1, func(i, j int) float64 {
v := 0.0
pos := 0
for j := 0; j < m.Cols(); j++ {
if x := m.At(i, j); x > v {
v = x
pos = j
}
}
return float64(pos)
})
}
func Fit(ctx context.Context, x, y mat.Matrix) mat.Matrix {
start := time.Now()
initialTheta := mat.New(x.Cols(), y.Cols())
theta := initialTheta
for {
acc, _ := Accuracy(x, theta, y)
fmt.Printf("t: %v | accurracy: %f\n", time.Since(start), acc)
if acc == 1.0 {
return theta
}
select {
case <-ctx.Done():
return theta
default:
}
theta = optimize(func(theta mat.Matrix) (float64, mat.Matrix) {
return costFunction(theta, x, y)
}, theta, 1)
}
}
func costFunction(theta, x, y mat.Matrix) (float64, mat.Matrix) {
h := Predict(x, theta)
m := float64(x.Rows())
ones := mat.New(x.Rows(), y.Cols()).AddScalar(1)
j := -1 / m * mat.Sum(mat.Plus(
mat.Dot(y, mat.Map(math.Log, h)),
mat.Dot(mat.Minus(ones, y), mat.Map(math.Log, mat.Minus(ones, h))),
))
grad := matProduct(mat.Minus(h, y).T(), x).Scale(1 / m).T()
return j, grad
}
func optimize(cost func(theta mat.Matrix) (float64, mat.Matrix), initialTheta mat.Matrix, iters int) mat.Matrix {
theta := initialTheta
alpha := 0.01
for i := 0; i < iters; i++ {
_, grad := cost(theta)
theta = mat.Minus(theta, grad.Scale(alpha))
}
return theta
} | mnist/logreg/logreg.go | 0.800146 | 0.644393 | logreg.go | starcoder |
package ext
var symbols = map[string]rune{
`\exclam`: '!',
`\#`: '#',
`\$`: '$',
`\%`: '%',
`\&`: '&',
`\lparen`: '(',
`\rparen`: ')',
`\plus`: '+',
`\comma`: ',',
`\period`: '.',
`\mathslash`: '/',
`\mathcolon`: ':',
`\semicolon`: ';',
`\less`: '<',
`\equal`: '=',
`\greater`: '>',
`\question`: '?',
`\atsign`: '@',
`\lbrack`: '[',
`\backslash`: '\\',
`\rbrack`: ']',
`\sphat`: '^',
`\_`: '_',
`\{`: '{',
`\vert`: '|',
`\}`: '}',
`\sptilde`: '~',
`\cent`: '\u00a2',
`\pounds`: '\u00a3',
`\yen`: '\u00a5',
`\spddot`: '\u00a8',
`\neg`: '\u00ac',
`\circledR`: '\u00ae',
`\pm`: '\u00b1',
`+-`: '\u00b1',
`\Micro`: '\u00b5',
`\cdotp`: '\u00b7',
`\times`: '\u00d7',
`\eth`: '\u00f0',
`\div`: '\u00f7',
`\imath`: '\u0131',
`\Zbar`: '\u01b5',
`\jmath`: '\u0237',
`\grave`: '\u0300',
`\acute`: '\u0301',
`\hat`: '\u0302',
`\tilde`: '\u0303',
`\bar`: '\u0304',
`\overline`: '\u0305',
`\breve`: '\u0306',
`\dot`: '\u0307',
`\ddot`: '\u0308',
`\ovhook`: '\u0309',
`\mathring`: '\u030a',
`\check`: '\u030c',
`\candra`: '\u0310',
`\oturnedcomma`: '\u0312',
`\ocommatopright`: '\u0315',
`\droang`: '\u031a',
`\utilde`: '\u0330',
`\underbar`: '\u0331',
`\underline`: '\u0332',
`\not`: '\u0338',
`\upAlpha`: '\u0391',
`\upBeta`: '\u0392',
`\Gamma`: '\u0393',
`\Delta`: '\u0394',
`\upEpsilon`: '\u0395',
`\upZeta`: '\u0396',
`\upEta`: '\u0397',
`\Theta`: '\u0398',
`\upIota`: '\u0399',
`\upKappa`: '\u039a',
`\Lambda`: '\u039b',
`\upMu`: '\u039c',
`\upNu`: '\u039d',
`\Xi`: '\u039e',
`\upOmicron`: '\u039f',
`\Pi`: '\u03a0',
`\upRho`: '\u03a1',
`\Sigma`: '\u03a3',
`\upTau`: '\u03a4',
`\Upsilon`: '\u03a5',
`\Phi`: '\u03a6',
`\upChi`: '\u03a7',
`\Psi`: '\u03a8',
`\Omega`: '\u03a9',
`\alpha`: '\u03b1',
`\beta`: '\u03b2',
`\gamma`: '\u03b3',
`\delta`: '\u03b4',
`\varepsilon`: '\u03b5',
`\zeta`: '\u03b6',
`\eta`: '\u03b7',
`\theta`: '\u03b8',
`\iota`: '\u03b9',
`\kappa`: '\u03ba',
`\lambda`: '\u03bb',
`\mu`: '\u03bc',
`\nu`: '\u03bd',
`\xi`: '\u03be',
`\upomicron`: '\u03bf',
`\pi`: '\u03c0',
`\rho`: '\u03c1',
`\varsigma`: '\u03c2',
`\sigma`: '\u03c3',
`\tau`: '\u03c4',
`\upsilon`: '\u03c5',
`\varphi`: '\u03c6',
`\chi`: '\u03c7',
`\psi`: '\u03c8',
`\omega`: '\u03c9',
`\varbeta`: '\u03d0',
`\vartheta`: '\u03d1',
`\upUpsilon`: '\u03d2',
`\phi`: '\u03d5',
`\varpi`: '\u03d6',
`\Qoppa`: '\u03d8',
`\qoppa`: '\u03d9',
`\Stigma`: '\u03da',
`\stigma`: '\u03db',
`\Digamma`: '\u03dc',
`\digamma`: '\u03dd',
`\Koppa`: '\u03de',
`\koppa`: '\u03df',
`\Sampi`: '\u03e0',
`\sampi`: '\u03e1',
`\varkappa`: '\u03f0',
`\varrho`: '\u03f1',
`\upvarTheta`: '\u03f4',
`\epsilon`: '\u03f5',
`\backepsilon`: '\u03f6',
`\horizbar`: '\u2015',
`\|`: '\u2016',
`\twolowline`: '\u2017',
`\dagger`: '\u2020',
`\ddagger`: '\u2021',
`\bullet`: '\u2022',
`\enleadertwodots`: '\u2025',
`\ldots`: '\u2026',
`\prime`: '\u2032',
`\second`: '\u2033',
`\third`: '\u2034',
`\backprime`: '\u2035',
`\backdprime`: '\u2036',
`\backtrprime`: '\u2037',
`\caretinsert`: '\u2038',
`\Exclam`: '\u203c',
`\cat`: '\u2040',
`\hyphenbullet`: '\u2043',
`\fracslash`: '\u2044',
`\Question`: '\u2047',
`\closure`: '\u2050',
`\fourth`: '\u2057',
`\euro`: '\u20ac',
`\lvec`: '\u20d0',
`\vec`: '\u20d1',
`\vertoverlay`: '\u20d2',
`\LVec`: '\u20d6',
`\dddot`: '\u20db',
`\ddddot`: '\u20dc',
`\enclosecircle`: '\u20dd',
`\enclosesquare`: '\u20de',
`\enclosediamond`: '\u20df',
`\overleftrightarrow`: '\u20e1',
`\enclosetriangle`: '\u20e4',
`\annuity`: '\u20e7',
`\threeunderdot`: '\u20e8',
`\widebridgeabove`: '\u20e9',
`\underrightharpoondown`: '\u20ec',
`\underleftharpoondown`: '\u20ed',
`\underleftarrow`: '\u20ee',
`\underrightarrow`: '\u20ef',
`\asteraccent`: '\u20f0',
`\mathbb{C}`: '\u2102',
`\Euler`: '\u2107',
`\mathcal{g}`: '\u210a',
`\mathcal{H}`: '\u210b',
`\mathfrak{H}`: '\u210c',
`\mathbb{H}`: '\u210d',
`\Planckconst`: '\u210e',
`\hslash`: '\u210f',
`\mathcal{I}`: '\u2110',
`\Im`: '\u2111',
`\mathcal{L}`: '\u2112',
`\ell`: '\u2113',
`\mathbb{N}`: '\u2115',
`\wp`: '\u2118',
`\mathbb{P}`: '\u2119',
`\mathbb{Q}`: '\u211a',
`\mathcal{R}`: '\u211b',
`\Re`: '\u211c',
`\mathbb{R}`: '\u211d',
`\mathbb{Z}`: '\u2124',
`\tcohm`: '\u2126',
`\mho`: '\u2127',
`\mathfrak{Z}`: '\u2128',
`\turnediota`: '\u2129',
`\Angstroem`: '\u212b',
`\mathcal{B}`: '\u212c',
`\mathfrak{C}`: '\u212d',
`\mathcal{e}`: '\u212f',
`\mathcal{E}`: '\u2130',
`\mathcal{F}`: '\u2131',
`\Finv`: '\u2132',
`\mathcal{M}`: '\u2133',
`\mathcal{o}`: '\u2134',
`\aleph`: '\u2135',
`\beth`: '\u2136',
`\gimel`: '\u2137',
`\daleth`: '\u2138',
`\mathbb{\pi}`: '\u213c',
`\mathbb{\gamma}`: '\u213d',
`\mathbb{\Gamma}`: '\u213e',
`\mathbb{\Pi}`: '\u213f',
`\mathbb{\Sigma}`: '\u2140',
`\Game`: '\u2141',
`\sansLturned`: '\u2142',
`\sansLmirrored`: '\u2143',
`\Yup`: '\u2144',
`\CapitalDifferentialD`: '\u2145',
`\DifferentialD`: '\u2146',
`\ExponetialE`: '\u2147',
`\ComplexI`: '\u2148',
`\ComplexJ`: '\u2149',
`\PropertyLine`: '\u214a',
`\invamp`: '\u214b',
`\leftarrow`: '\u2190',
`<--`: '\u2190',
`\uparrow`: '\u2191',
`\rightarrow`: '\u2192',
`-->`: '\u2192',
`\downarrow`: '\u2193',
`\leftrightarrow`: '\u2194',
`\updownarrow`: '\u2195',
`\nwarrow`: '\u2196',
`\nearrow`: '\u2197',
`\searrow`: '\u2198',
`\swarrow`: '\u2199',
`\nleftarrow`: '\u219a',
`\nrightarrow`: '\u219b',
`\leftwavearrow`: '\u219c',
`\rightwavearrow`: '\u219d',
`\twoheadleftarrow`: '\u219e',
`\twoheaduparrow`: '\u219f',
`\twoheadrightarrow`: '\u21a0',
`\twoheaddownarrow`: '\u21a1',
`\leftarrowtail`: '\u21a2',
`\rightarrowtail`: '\u21a3',
`\mapsfrom`: '\u21a4',
`\MapsUp`: '\u21a5',
`\mapsto`: '\u21a6',
`|->`: '\u21a6',
`\MapsDown`: '\u21a7',
`\updownarrowbar`: '\u21a8',
`\hookleftarrow`: '\u21a9',
`\hookrightarrow`: '\u21aa',
`\looparrowleft`: '\u21ab',
`\looparrowright`: '\u21ac',
`\leftrightsquigarrow`: '\u21ad',
`\nleftrightarrow`: '\u21ae',
`\lightning`: '\u21af',
`\Lsh`: '\u21b0',
`\Rsh`: '\u21b1',
`\dlsh`: '\u21b2',
`\drsh`: '\u21b3',
`\linefeed`: '\u21b4',
`\carriagereturn`: '\u21b5',
`\curvearrowleft`: '\u21b6',
`\curvearrowright`: '\u21b7',
`\barovernorthwestarrow`: '\u21b8',
`\barleftarrowrightarrowba`: '\u21b9',
`\circlearrowleft`: '\u21ba',
`\circlearrowright`: '\u21bb',
`\leftharpoonup`: '\u21bc',
`\leftharpoondown`: '\u21bd',
`\upharpoonright`: '\u21be',
`\upharpoonleft`: '\u21bf',
`\rightharpoonup`: '\u21c0',
`\rightharpoondown`: '\u21c1',
`\downharpoonright`: '\u21c2',
`\downharpoonleft`: '\u21c3',
`\rightleftarrows`: '\u21c4',
`\updownarrows`: '\u21c5',
`\leftrightarrows`: '\u21c6',
`\leftleftarrows`: '\u21c7',
`\upuparrows`: '\u21c8',
`\rightrightarrows`: '\u21c9',
`\downdownarrows`: '\u21ca',
`\leftrightharpoons`: '\u21cb',
`\rightleftharpoons`: '\u21cc',
`\nLeftarrow`: '\u21cd',
`\nLeftrightarrow`: '\u21ce',
`\nRightarrow`: '\u21cf',
`\Leftarrow`: '\u21d0',
`\Uparrow`: '\u21d1',
`\Rightarrow`: '\u21d2',
`\Downarrow`: '\u21d3',
`\Leftrightarrow`: '\u21d4',
`\Updownarrow`: '\u21d5',
`\Nwarrow`: '\u21d6',
`\Nearrow`: '\u21d7',
`\Searrow`: '\u21d8',
`\Swarrow`: '\u21d9',
`\Lleftarrow`: '\u21da',
`\Rrightarrow`: '\u21db',
`\leftsquigarrow`: '\u21dc',
`\rightsquigarrow`: '\u21dd',
`\nHuparrow`: '\u21de',
`\nHdownarrow`: '\u21df',
`\dashleftarrow`: '\u21e0',
`\updasharrow`: '\u21e1',
`\dashrightarrow`: '\u21e2',
`\downdasharrow`: '\u21e3',
`\LeftArrowBar`: '\u21e4',
`\RightArrowBar`: '\u21e5',
`\leftwhitearrow`: '\u21e6',
`\upwhitearrow`: '\u21e7',
`\rightwhitearrow`: '\u21e8',
`\downwhitearrow`: '\u21e9',
`\whitearrowupfrombar`: '\u21ea',
`\circleonrightarrow`: '\u21f4',
`\downuparrows`: '\u21f5',
`\rightthreearrows`: '\u21f6',
`\nvleftarrow`: '\u21f7',
`\pfun`: '\u21f8',
`\nvleftrightarrow`: '\u21f9',
`\nVleftarrow`: '\u21fa',
`\ffun`: '\u21fb',
`\nVleftrightarrow`: '\u21fc',
`\leftarrowtriangle`: '\u21fd',
`\rightarrowtriangle`: '\u21fe',
`\leftrightarrowtriangle`: '\u21ff',
`\forall`: '\u2200',
`\complement`: '\u2201',
`\partial`: '\u2202',
`\exists`: '\u2203',
`\nexists`: '\u2204',
`\varnothing`: '\u2205',
`\increment`: '\u2206',
`\nabla`: '\u2207',
`\in`: '\u2208',
`\notin`: '\u2209',
`\smallin`: '\u220a',
`\ni`: '\u220b',
`\nni`: '\u220c',
`\smallni`: '\u220d',
`\QED`: '\u220e',
`\prod`: '\u220f',
`\coprod`: '\u2210',
`\sum`: '\u2211',
`\minus`: '\u2212',
`\mp`: '\u2213',
`-+`: '\u2213',
`\dotplus`: '\u2214',
`\slash`: '\u2215',
`\smallsetminus`: '\u2216',
`\ast`: '\u2217',
`\circ`: '\u2218',
`\sqrt`: '\u221a',
`\cuberoot`: '\u221b',
`\fourthroot`: '\u221c',
`\propto`: '\u221d',
`\infty`: '\u221e',
`\rightangle`: '\u221f',
`\angle`: '\u2220',
`\measuredangle`: '\u2221',
`\sphericalangle`: '\u2222',
`\mid`: '\u2223',
`\nmid`: '\u2224',
`\parallel`: '\u2225',
`||`: '\u2225',
`\nparallel`: '\u2226',
`\wedge`: '\u2227',
`\vee`: '\u2228',
`\cap`: '\u2229',
`\cup`: '\u222a',
`\int`: '\u222b',
`\iint`: '\u222c',
`\iiint`: '\u222d',
`\oint`: '\u222e',
`\oiint`: '\u222f',
`\oiiint`: '\u2230',
`\intclockwise`: '\u2231',
`\varointclockwise`: '\u2232',
`\ointctrclockwise`: '\u2233',
`\therefore`: '\u2234',
`\because`: '\u2235',
`\mathratio`: '\u2236',
`\Proportion`: '\u2237',
`\dotminus`: '\u2238',
`\eqcolon`: '\u2239',
`\dotsminusdots`: '\u223a',
`\kernelcontraction`: '\u223b',
`\sim`: '\u223c',
`\backsim`: '\u223d',
`\invlazys`: '\u223e',
`\AC`: '\u223f',
`\wr`: '\u2240',
`\nsim`: '\u2241',
`\eqsim`: '\u2242',
`\simeq`: '\u2243',
`\nsimeq`: '\u2244',
`\cong`: '\u2245',
`\simneqq`: '\u2246',
`\ncong`: '\u2247',
`\approx`: '\u2248',
`\napprox`: '\u2249',
`\approxeq`: '\u224a',
`\approxident`: '\u224b',
`\backcong`: '\u224c',
`\asymp`: '\u224d',
`\Bumpeq`: '\u224e',
`\bumpeq`: '\u224f',
`\doteq`: '\u2250',
`\Doteq`: '\u2251',
`\fallingdotseq`: '\u2252',
`\risingdotseq`: '\u2253',
`\coloneq`: '\u2254',
`\eqcirc`: '\u2256',
`\circeq`: '\u2257',
`\arceq`: '\u2258',
`\corresponds`: '\u2259',
`\veeeq`: '\u225a',
`\stareq`: '\u225b',
`\triangleq`: '\u225c',
`\eqdef`: '\u225d',
`\measeq`: '\u225e',
`\questeq`: '\u225f',
`\neq`: '\u2260',
`\equiv`: '\u2261',
`===`: '\u2261',
`\nequiv`: '\u2262',
`\Equiv`: '\u2263',
`\leq`: '\u2264',
`\geq`: '\u2265',
`\leqq`: '\u2266',
`\geqq`: '\u2267',
`\lneqq`: '\u2268',
`\gneqq`: '\u2269',
`\ll`: '\u226a',
`\gg`: '\u226b',
`\between`: '\u226c',
`\notasymp`: '\u226d',
`\nless`: '\u226e',
`\ngtr`: '\u226f',
`\nleq`: '\u2270',
`\ngeq`: '\u2271',
`\lesssim`: '\u2272',
`\gtrsim`: '\u2273',
`\NotLessTilde`: '\u2274',
`\NotGreaterTilde`: '\u2275',
`\lessgtr`: '\u2276',
`\gtrless`: '\u2277',
`\nlessgtr`: '\u2278',
`\NotGreaterLess`: '\u2279',
`\prec`: '\u227a',
`\succ`: '\u227b',
`\preccurlyeq`: '\u227c',
`\succcurlyeq`: '\u227d',
`\precsim`: '\u227e',
`\succsim`: '\u227f',
`\nprec`: '\u2280',
`\nsucc`: '\u2281',
`\subset`: '\u2282',
`\supset`: '\u2283',
`\nsubset`: '\u2284',
`\nsupset`: '\u2285',
`\subseteq`: '\u2286',
`\supseteq`: '\u2287',
`\nsubseteq`: '\u2288',
`\nsupseteq`: '\u2289',
`\subsetneq`: '\u228a',
`\supsetneq`: '\u228b',
`\cupleftarrow`: '\u228c',
`\cupdot`: '\u228d',
`\uplus`: '\u228e',
`\sqsubset`: '\u228f',
`\sqsupset`: '\u2290',
`\sqsubseteq`: '\u2291',
`\sqsupseteq`: '\u2292',
`\sqcap`: '\u2293',
`\sqcup`: '\u2294',
`\oplus`: '\u2295',
`\ominus`: '\u2296',
`\otimes`: '\u2297',
`\oslash`: '\u2298',
`\odot`: '\u2299',
`\circledcirc`: '\u229a',
`\circledast`: '\u229b',
`\circledequal`: '\u229c',
`\circleddash`: '\u229d',
`\boxplus`: '\u229e',
`\boxminus`: '\u229f',
`\boxtimes`: '\u22a0',
`\boxdot`: '\u22a1',
`\vdash`: '\u22a2',
`\dashv`: '\u22a3',
`\top`: '\u22a4',
`\bot`: '\u22a5',
`\assert`: '\u22a6',
`\models`: '\u22a7',
`\vDash`: '\u22a8',
`\Vdash`: '\u22a9',
`\Vvdash`: '\u22aa',
`\VDash`: '\u22ab',
`\nvdash`: '\u22ac',
`\nvDash`: '\u22ad',
`\nVdash`: '\u22ae',
`\nVDash`: '\u22af',
`\prurel`: '\u22b0',
`\scurel`: '\u22b1',
`\vartriangleleft`: '\u22b2',
`\vartriangleright`: '\u22b3',
`\trianglelefteq`: '\u22b4',
`\trianglerighteq`: '\u22b5',
`\multimapdotbothA`: '\u22b6',
`\multimapdotbothB`: '\u22b7',
`\multimap`: '\u22b8',
`\hermitmatrix`: '\u22b9',
`\intercal`: '\u22ba',
`\veebar`: '\u22bb',
`\barwedge`: '\u22bc',
`\barvee`: '\u22bd',
`\measuredrightangle`: '\u22be',
`\varlrtriangle`: '\u22bf',
`\bigwedge`: '\u22c0',
`\bigvee`: '\u22c1',
`\bigcap`: '\u22c2',
`\bigcup`: '\u22c3',
`\diamond`: '\u22c4',
`\cdot`: '\u22c5',
`\star`: '\u22c6',
`\divideontimes`: '\u22c7',
`\bowtie`: '\u22c8',
`\ltimes`: '\u22c9',
`\rtimes`: '\u22ca',
`\leftthreetimes`: '\u22cb',
`\rightthreetimes`: '\u22cc',
`\backsimeq`: '\u22cd',
`\curlyvee`: '\u22ce',
`\curlywedge`: '\u22cf',
`\Subset`: '\u22d0',
`\Supset`: '\u22d1',
`\Cap`: '\u22d2',
`\Cup`: '\u22d3',
`\pitchfork`: '\u22d4',
`\hash`: '\u22d5',
`\lessdot`: '\u22d6',
`\gtrdot`: '\u22d7',
`\lll`: '\u22d8',
`\ggg`: '\u22d9',
`\lesseqgtr`: '\u22da',
`\gtreqless`: '\u22db',
`\eqless`: '\u22dc',
`\eqgtr`: '\u22dd',
`\curlyeqprec`: '\u22de',
`\curlyeqsucc`: '\u22df',
`\npreceq`: '\u22e0',
`\nsucceq`: '\u22e1',
`\nsqsubseteq`: '\u22e2',
`\nsqsupseteq`: '\u22e3',
`\sqsubsetneq`: '\u22e4',
`\sqsupsetneq`: '\u22e5',
`\lnsim`: '\u22e6',
`\gnsim`: '\u22e7',
`\precnsim`: '\u22e8',
`\succnsim`: '\u22e9',
`\ntriangleleft`: '\u22ea',
`\ntriangleright`: '\u22eb',
`\ntrianglelefteq`: '\u22ec',
`\ntrianglerighteq`: '\u22ed',
`\vdots`: '\u22ee',
`\cdots`: '\u22ef',
`\iddots`: '\u22f0',
`\ddots`: '\u22f1',
`\disin`: '\u22f2',
`\varisins`: '\u22f3',
`\isins`: '\u22f4',
`\isindot`: '\u22f5',
`\barin`: '\u22f6',
`\isinobar`: '\u22f7',
`\isinvb`: '\u22f8',
`\isinE`: '\u22f9',
`\nisd`: '\u22fa',
`\varnis`: '\u22fb',
`\nis`: '\u22fc',
`\varniobar`: '\u22fd',
`\niobar`: '\u22fe',
`\bagmember`: '\u22ff',
`\diameter`: '\u2300',
`\house`: '\u2302',
`\varbarwedge`: '\u2305',
`\vardoublebarwedge`: '\u2306',
`\lceil`: '\u2308',
`\rceil`: '\u2309',
`\lfloor`: '\u230a',
`\rfloor`: '\u230b',
`\invneg`: '\u2310',
`\wasylozenge`: '\u2311',
`\profline`: '\u2312',
`\profsurf`: '\u2313',
`\viewdata`: '\u2317',
`\turnednot`: '\u2319',
`\ulcorner`: '\u231c',
`\urcorner`: '\u231d',
`\llcorner`: '\u231e',
`\lrcorner`: '\u231f',
`\inttop`: '\u2320',
`\intbottom`: '\u2321',
`\frown`: '\u2322',
`\smile`: '\u2323',
`\varhexagonlrbonds`: '\u232c',
`\conictaper`: '\u2332',
`\topbot`: '\u2336',
`\APLinv`: '\u2339',
`\obar`: '\u233d',
`\notslash`: '\u233f',
`\notbackslash`: '\u2340',
`\APLleftarrowbox`: '\u2347',
`\APLrightarrowbox`: '\u2348',
`\invdiameter`: '\u2349',
`\APLuparrowbox`: '\u2350',
`\APLboxupcaret`: '\u2353',
`\APLdownarrowbox`: '\u2357',
`\APLcomment`: '\u235d',
`\APLinput`: '\u235e',
`\APLlog`: '\u235f',
`\APLboxquestion`: '\u2370',
`\rangledownzigzagarrow`: '\u237c',
`\hexagon`: '\u2394',
`\lparenuend`: '\u239b',
`\lparenextender`: '\u239c',
`\lparenlend`: '\u239d',
`\rparenuend`: '\u239e',
`\rparenextender`: '\u239f',
`\rparenlend`: '\u23a0',
`\lbrackuend`: '\u23a1',
`\lbrackextender`: '\u23a2',
`\lbracklend`: '\u23a3',
`\rbrackuend`: '\u23a4',
`\rbrackextender`: '\u23a5',
`\rbracklend`: '\u23a6',
`\lbraceuend`: '\u23a7',
`\lbracemid`: '\u23a8',
`\lbracelend`: '\u23a9',
`\vbraceextender`: '\u23aa',
`\rbraceuend`: '\u23ab',
`\rbracemid`: '\u23ac',
`\rbracelend`: '\u23ad',
`\intextender`: '\u23ae',
`\harrowextender`: '\u23af',
`\lmoustache`: '\u23b0',
`\rmoustache`: '\u23b1',
`\sumtop`: '\u23b2',
`\sumbottom`: '\u23b3',
`\overbracket`: '\u23b4',
`\underbracket`: '\u23b5',
`\bbrktbrk`: '\u23b6',
`\sqrtbottom`: '\u23b7',
`\lvboxline`: '\u23b8',
`\rvboxline`: '\u23b9',
`\varcarriagereturn`: '\u23ce',
`\overparen`: '\u23dc',
`\underparen`: '\u23dd',
`\overbrace`: '\u23de',
`\underbrace`: '\u23df',
`\obrbrak`: '\u23e0',
`\ubrbrak`: '\u23e1',
`\trapezium`: '\u23e2',
`\benzenr`: '\u23e3',
`\strns`: '\u23e4',
`\fltns`: '\u23e5',
`\accurrent`: '\u23e6',
`\elinters`: '\u23e7',
`\bdtriplevdash`: '\u2506',
`\blockuphalf`: '\u2580',
`\blocklowhalf`: '\u2584',
`\blockfull`: '\u2588',
`\blocklefthalf`: '\u258c',
`\blockrighthalf`: '\u2590',
`\blockqtrshaded`: '\u2591',
`\blockhalfshaded`: '\u2592',
`\blockthreeqtrshaded`: '\u2593',
`\mdlgblksquare`: '\u25a0',
`\mdlgwhtsquare`: '\u25a1',
`\squoval`: '\u25a2',
`\blackinwhitesquare`: '\u25a3',
`\squarehfill`: '\u25a4',
`\squarevfill`: '\u25a5',
`\squarehvfill`: '\u25a6',
`\squarenwsefill`: '\u25a7',
`\squareneswfill`: '\u25a8',
`\squarecrossfill`: '\u25a9',
`\smblksquare`: '\u25aa',
`\smwhtsquare`: '\u25ab',
`\hrectangleblack`: '\u25ac',
`\hrectangle`: '\u25ad',
`\vrectangleblack`: '\u25ae',
`\vrectangle`: '\u25af',
`\parallelogramblack`: '\u25b0',
`\parallelogram`: '\u25b1',
`\bigblacktriangleup`: '\u25b2',
`\bigtriangleup`: '\u25b3',
`\blacktriangleup`: '\u25b4',
`\smalltriangleup`: '\u25b5',
`\RHD`: '\u25b6',
`\rhd`: '\u25b7',
`\blacktriangleright`: '\u25b8',
`\smalltriangleright`: '\u25b9',
`\blackpointerright`: '\u25ba',
`\whitepointerright`: '\u25bb',
`\bigblacktriangledown`: '\u25bc',
`\bigtriangledown`: '\u25bd',
`\blacktriangledown`: '\u25be',
`\smalltriangledown`: '\u25bf',
`\LHD`: '\u25c0',
`\lhd`: '\u25c1',
`\blacktriangleleft`: '\u25c2',
`\smalltriangleleft`: '\u25c3',
`\blackpointerleft`: '\u25c4',
`\whitepointerleft`: '\u25c5',
`\Diamondblack`: '\u25c6',
`\Diamond`: '\u25c7',
`\blackinwhitediamond`: '\u25c8',
`\fisheye`: '\u25c9',
`\lozenge`: '\u25ca',
`\Circle`: '\u25cb',
`\dottedcircle`: '\u25cc',
`\circlevertfill`: '\u25cd',
`\bullseye`: '\u25ce',
`\CIRCLE`: '\u25cf',
`\LEFTcircle`: '\u25d0',
`\RIGHTcircle`: '\u25d1',
`\circlebottomhalfblack`: '\u25d2',
`\circletophalfblack`: '\u25d3',
`\circleurquadblack`: '\u25d4',
`\blackcircleulquadwhite`: '\u25d5',
`\LEFTCIRCLE`: '\u25d6',
`\RIGHTCIRCLE`: '\u25d7',
`\inversebullet`: '\u25d8',
`\inversewhitecircle`: '\u25d9',
`\invwhiteupperhalfcircle`: '\u25da',
`\invwhitelowerhalfcircle`: '\u25db',
`\ularc`: '\u25dc',
`\urarc`: '\u25dd',
`\lrarc`: '\u25de',
`\llarc`: '\u25df',
`\topsemicircle`: '\u25e0',
`\botsemicircle`: '\u25e1',
`\lrblacktriangle`: '\u25e2',
`\llblacktriangle`: '\u25e3',
`\ulblacktriangle`: '\u25e4',
`\urblacktriangle`: '\u25e5',
`\smwhtcircle`: '\u25e6',
`\squareleftblack`: '\u25e7',
`\squarerightblack`: '\u25e8',
`\squareulblack`: '\u25e9',
`\squarelrblack`: '\u25ea',
`\boxbar`: '\u25eb',
`\trianglecdot`: '\u25ec',
`\triangleleftblack`: '\u25ed',
`\trianglerightblack`: '\u25ee',
`\lgwhtcircle`: '\u25ef',
`\squareulquad`: '\u25f0',
`\squarellquad`: '\u25f1',
`\squarelrquad`: '\u25f2',
`\squareurquad`: '\u25f3',
`\circleulquad`: '\u25f4',
`\circlellquad`: '\u25f5',
`\circlelrquad`: '\u25f6',
`\circleurquad`: '\u25f7',
`\ultriangle`: '\u25f8',
`\urtriangle`: '\u25f9',
`\lltriangle`: '\u25fa',
`\square`: '\u25fb',
`\blacksquare`: '\u25fc',
`\mdsmwhtsquare`: '\u25fd',
`\mdsmblksquare`: '\u25fe',
`\lrtriangle`: '\u25ff',
`\bigstar`: '\u2605',
`\bigwhitestar`: '\u2606',
`\Sun`: '\u2609',
`\Square`: '\u2610',
`\CheckedBox`: '\u2611',
`\XBox`: '\u2612',
`\steaming`: '\u2615',
`\pointright`: '\u261e',
`\skull`: '\u2620',
`\danger`: '\u2621',
`\radiation`: '\u2622',
`\biohazard`: '\u2623',
`\yinyang`: '\u262f',
`\frownie`: '\u2639',
`\smiley`: '\u263a',
`\blacksmiley`: '\u263b',
`\sun`: '\u263c',
`\rightmoon`: '\u263d',
`\leftmoon`: '\u263e',
`\mercury`: '\u263f',
`\female`: '\u2640',
`\earth`: '\u2641',
`\male`: '\u2642',
`\jupiter`: '\u2643',
`\saturn`: '\u2644',
`\uranus`: '\u2645',
`\neptune`: '\u2646',
`\pluto`: '\u2647',
`\aries`: '\u2648',
`\taurus`: '\u2649',
`\gemini`: '\u264a',
`\cancer`: '\u264b',
`\leo`: '\u264c',
`\virgo`: '\u264d',
`\libra`: '\u264e',
`\scorpio`: '\u264f',
`\sagittarius`: '\u2650',
`\capricornus`: '\u2651',
`\aquarius`: '\u2652',
`\pisces`: '\u2653',
`\spadesuit`: '\u2660',
`\heartsuit`: '\u2661',
`\diamondsuit`: '\u2662',
`\clubsuit`: '\u2663',
`\varspadesuit`: '\u2664',
`\varheartsuit`: '\u2665',
`\vardiamondsuit`: '\u2666',
`\varclubsuit`: '\u2667',
`\quarternote`: '\u2669',
`\eighthnote`: '\u266a',
`\twonotes`: '\u266b',
`\sixteenthnote`: '\u266c',
`\flat`: '\u266d',
`\natural`: '\u266e',
`\sharp`: '\u266f',
`\recycle`: '\u267b',
`\acidfree`: '\u267e',
`\dicei`: '\u2680',
`\diceii`: '\u2681',
`\diceiii`: '\u2682',
`\diceiv`: '\u2683',
`\dicev`: '\u2684',
`\dicevi`: '\u2685',
`\circledrightdot`: '\u2686',
`\circledtwodots`: '\u2687',
`\blackcircledrightdot`: '\u2688',
`\blackcircledtwodots`: '\u2689',
`\anchor`: '\u2693',
`\swords`: '\u2694',
`\warning`: '\u26a0',
`\Hermaphrodite`: '\u26a5',
`\medcirc`: '\u26aa',
`\medbullet`: '\u26ab',
`\mdsmwhtcircle`: '\u26ac',
`\neuter`: '\u26b2',
`\pencil`: '\u270e',
`\checkmark`: '\u2713',
`\ballotx`: '\u2717',
`\maltese`: '\u2720',
`\circledstar`: '\u272a',
`\varstar`: '\u2736',
`\dingasterisk`: '\u273d',
`\lbrbrak`: '\u2772',
`\rbrbrak`: '\u2773',
`\draftingarrow`: '\u279b',
`\arrowbullet`: '\u27a2',
`\threedangle`: '\u27c0',
`\whiteinwhitetriangle`: '\u27c1',
`\perp`: '\u27c2',
`\subsetcirc`: '\u27c3',
`\supsetcirc`: '\u27c4',
`\Lbag`: '\u27c5',
`\Rbag`: '\u27c6',
`\veedot`: '\u27c7',
`\bsolhsub`: '\u27c8',
`\suphsol`: '\u27c9',
`\longdivision`: '\u27cc',
`\Diamonddot`: '\u27d0',
`\wedgedot`: '\u27d1',
`\upin`: '\u27d2',
`\pullback`: '\u27d3',
`\pushout`: '\u27d4',
`\leftouterjoin`: '\u27d5',
`\rightouterjoin`: '\u27d6',
`\fullouterjoin`: '\u27d7',
`\bigbot`: '\u27d8',
`\bigtop`: '\u27d9',
`\DashVDash`: '\u27da',
`\dashVdash`: '\u27db',
`\multimapinv`: '\u27dc',
`\vlongdash`: '\u27dd',
`\longdashv`: '\u27de',
`\cirbot`: '\u27df',
`\lozengeminus`: '\u27e0',
`\concavediamond`: '\u27e1',
`\concavediamondtickleft`: '\u27e2',
`\concavediamondtickright`: '\u27e3',
`\whitesquaretickleft`: '\u27e4',
`\whitesquaretickright`: '\u27e5',
`\llbracket`: '\u27e6',
`\rrbracket`: '\u27e7',
`\langle`: '\u27e8',
`\rangle`: '\u27e9',
`\lang`: '\u27ea',
`\rang`: '\u27eb',
`\Lbrbrak`: '\u27ec',
`\Rbrbrak`: '\u27ed',
`\lgroup`: '\u27ee',
`\rgroup`: '\u27ef',
`\UUparrow`: '\u27f0',
`\DDownarrow`: '\u27f1',
`\acwgapcirclearrow`: '\u27f2',
`\cwgapcirclearrow`: '\u27f3',
`\rightarrowonoplus`: '\u27f4',
`\longleftarrow`: '\u27f5',
`\longrightarrow`: '\u27f6',
`\longleftrightarrow`: '\u27f7',
`\Longleftarrow`: '\u27f8',
`\Longrightarrow`: '\u27f9',
`\Longleftrightarrow`: '\u27fa',
`\longmapsfrom`: '\u27fb',
`\longmapsto`: '\u27fc',
`\Longmapsfrom`: '\u27fd',
`\Longmapsto`: '\u27fe',
`\longrightsquigarrow`: '\u27ff',
`\psur`: '\u2900',
`\nVtwoheadrightarrow`: '\u2901',
`\nvLeftarrow`: '\u2902',
`\nvRightarrow`: '\u2903',
`\nvLeftrightarrow`: '\u2904',
`\twoheadmapsto`: '\u2905',
`\Mapsfrom`: '\u2906',
`\Mapsto`: '\u2907',
`\downarrowbarred`: '\u2908',
`\uparrowbarred`: '\u2909',
`\Uuparrow`: '\u290a',
`\Ddownarrow`: '\u290b',
`\leftbkarrow`: '\u290c',
`\rightbkarrow`: '\u290d',
`\leftdbkarrow`: '\u290e',
`\dbkarow`: '\u290f',
`\drbkarow`: '\u2910',
`\rightdotarrow`: '\u2911',
`\UpArrowBar`: '\u2912',
`\DownArrowBar`: '\u2913',
`\pinj`: '\u2914',
`\finj`: '\u2915',
`\bij`: '\u2916',
`\nvtwoheadrightarrowtail`: '\u2917',
`\nVtwoheadrightarrowtail`: '\u2918',
`\lefttail`: '\u2919',
`\righttail`: '\u291a',
`\leftdbltail`: '\u291b',
`\rightdbltail`: '\u291c',
`\diamondleftarrow`: '\u291d',
`\rightarrowdiamond`: '\u291e',
`\diamondleftarrowbar`: '\u291f',
`\barrightarrowdiamond`: '\u2920',
`\nwsearrow`: '\u2921',
`\neswarrow`: '\u2922',
`\hknwarrow`: '\u2923',
`\hknearrow`: '\u2924',
`\hksearow`: '\u2925',
`\hkswarow`: '\u2926',
`\tona`: '\u2927',
`\toea`: '\u2928',
`\tosa`: '\u2929',
`\towa`: '\u292a',
`\rdiagovfdiag`: '\u292b',
`\fdiagovrdiag`: '\u292c',
`\seovnearrow`: '\u292d',
`\neovsearrow`: '\u292e',
`\fdiagovnearrow`: '\u292f',
`\rdiagovsearrow`: '\u2930',
`\neovnwarrow`: '\u2931',
`\nwovnearrow`: '\u2932',
`\leadsto`: '\u2933',
`\uprightcurvearrow`: '\u2934',
`\downrightcurvedarrow`: '\u2935',
`\leftdowncurvedarrow`: '\u2936',
`\rightdowncurvedarrow`: '\u2937',
`\cwrightarcarrow`: '\u2938',
`\acwleftarcarrow`: '\u2939',
`\acwoverarcarrow`: '\u293a',
`\acwunderarcarrow`: '\u293b',
`\curvearrowrightminus`: '\u293c',
`\curvearrowleftplus`: '\u293d',
`\cwundercurvearrow`: '\u293e',
`\ccwundercurvearrow`: '\u293f',
`\acwcirclearrow`: '\u2940',
`\cwcirclearrow`: '\u2941',
`\rightarrowshortleftarrow`: '\u2942',
`\leftarrowshortrightarrow`: '\u2943',
`\shortrightarrowleftarrow`: '\u2944',
`\rightarrowplus`: '\u2945',
`\leftarrowplus`: '\u2946',
`\rightarrowx`: '\u2947',
`\leftrightarrowcircle`: '\u2948',
`\twoheaduparrowcircle`: '\u2949',
`\leftrightharpoon`: '\u294a',
`\rightleftharpoon`: '\u294b',
`\updownharpoonrightleft`: '\u294c',
`\updownharpoonleftright`: '\u294d',
`\leftrightharpoonup`: '\u294e',
`\rightupdownharpoon`: '\u294f',
`\leftrightharpoondown`: '\u2950',
`\leftupdownharpoon`: '\u2951',
`\LeftVectorBar`: '\u2952',
`\RightVectorBar`: '\u2953',
`\RightUpVectorBar`: '\u2954',
`\RightDownVectorBar`: '\u2955',
`\DownLeftVectorBar`: '\u2956',
`\DownRightVectorBar`: '\u2957',
`\LeftUpVectorBar`: '\u2958',
`\LeftDownVectorBar`: '\u2959',
`\LeftTeeVector`: '\u295a',
`\RightTeeVector`: '\u295b',
`\RightUpTeeVector`: '\u295c',
`\RightDownTeeVector`: '\u295d',
`\DownLeftTeeVector`: '\u295e',
`\DownRightTeeVector`: '\u295f',
`\LeftUpTeeVector`: '\u2960',
`\LeftDownTeeVector`: '\u2961',
`\leftleftharpoons`: '\u2962',
`\upupharpoons`: '\u2963',
`\rightrightharpoons`: '\u2964',
`\downdownharpoons`: '\u2965',
`\leftrightharpoonsup`: '\u2966',
`\leftrightharpoonsdown`: '\u2967',
`\rightleftharpoonsup`: '\u2968',
`\rightleftharpoonsdown`: '\u2969',
`\leftbarharpoon`: '\u296a',
`\barleftharpoon`: '\u296b',
`\rightbarharpoon`: '\u296c',
`\barrightharpoon`: '\u296d',
`\updownharpoons`: '\u296e',
`\downupharpoons`: '\u296f',
`\rightimply`: '\u2970',
`\equalrightarrow`: '\u2971',
`\similarrightarrow`: '\u2972',
`\leftarrowsimilar`: '\u2973',
`\rightarrowsimilar`: '\u2974',
`\rightarrowapprox`: '\u2975',
`\ltlarr`: '\u2976',
`\leftarrowless`: '\u2977',
`\gtrarr`: '\u2978',
`\subrarr`: '\u2979',
`\leftarrowsubset`: '\u297a',
`\suplarr`: '\u297b',
`\strictfi`: '\u297c',
`\strictif`: '\u297d',
`\upfishtail`: '\u297e',
`\downfishtail`: '\u297f',
`\VERT`: '\u2980',
`\spot`: '\u2981',
`\typecolon`: '\u2982',
`\lBrace`: '\u2983',
`\rBrace`: '\u2984',
`\Lparen`: '\u2985',
`\Rparen`: '\u2986',
`\limg`: '\u2987',
`\rimg`: '\u2988',
`\lblot`: '\u2989',
`\rblot`: '\u298a',
`\lbrackubar`: '\u298b',
`\rbrackubar`: '\u298c',
`\lbrackultick`: '\u298d',
`\rbracklrtick`: '\u298e',
`\lbracklltick`: '\u298f',
`\rbrackurtick`: '\u2990',
`\langledot`: '\u2991',
`\rangledot`: '\u2992',
`\lparenless`: '\u2993',
`\rparengtr`: '\u2994',
`\Lparengtr`: '\u2995',
`\Rparenless`: '\u2996',
`\lblkbrbrak`: '\u2997',
`\rblkbrbrak`: '\u2998',
`\fourvdots`: '\u2999',
`\vzigzag`: '\u299a',
`\measuredangleleft`: '\u299b',
`\rightanglesqr`: '\u299c',
`\rightanglemdot`: '\u299d',
`\angles`: '\u299e',
`\angdnr`: '\u299f',
`\gtlpar`: '\u29a0',
`\sphericalangleup`: '\u29a1',
`\turnangle`: '\u29a2',
`\revangle`: '\u29a3',
`\angleubar`: '\u29a4',
`\revangleubar`: '\u29a5',
`\wideangledown`: '\u29a6',
`\wideangleup`: '\u29a7',
`\measanglerutone`: '\u29a8',
`\measanglelutonw`: '\u29a9',
`\measanglerdtose`: '\u29aa',
`\measangleldtosw`: '\u29ab',
`\measangleurtone`: '\u29ac',
`\measangleultonw`: '\u29ad',
`\measangledrtose`: '\u29ae',
`\measangledltosw`: '\u29af',
`\revemptyset`: '\u29b0',
`\emptysetobar`: '\u29b1',
`\emptysetocirc`: '\u29b2',
`\emptysetoarr`: '\u29b3',
`\emptysetoarrl`: '\u29b4',
`\circlehbar`: '\u29b5',
`\circledvert`: '\u29b6',
`\circledparallel`: '\u29b7',
`\circledbslash`: '\u29b8',
`\operp`: '\u29b9',
`\obot`: '\u29ba',
`\olcross`: '\u29bb',
`\odotslashdot`: '\u29bc',
`\uparrowoncircle`: '\u29bd',
`\circledwhitebullet`: '\u29be',
`\circledbullet`: '\u29bf',
`\circledless`: '\u29c0',
`\circledgtr`: '\u29c1',
`\cirscir`: '\u29c2',
`\cirE`: '\u29c3',
`\boxslash`: '\u29c4',
`\boxbslash`: '\u29c5',
`\boxast`: '\u29c6',
`\boxcircle`: '\u29c7',
`\boxbox`: '\u29c8',
`\boxonbox`: '\u29c9',
`\triangleodot`: '\u29ca',
`\triangleubar`: '\u29cb',
`\triangles`: '\u29cc',
`\triangleserifs`: '\u29cd',
`\rtriltri`: '\u29ce',
`\LeftTriangleBar`: '\u29cf',
`\RightTriangleBar`: '\u29d0',
`\lfbowtie`: '\u29d1',
`\rfbowtie`: '\u29d2',
`\fbowtie`: '\u29d3',
`\lftimes`: '\u29d4',
`\rftimes`: '\u29d5',
`\hourglass`: '\u29d6',
`\blackhourglass`: '\u29d7',
`\lvzigzag`: '\u29d8',
`\rvzigzag`: '\u29d9',
`\Lvzigzag`: '\u29da',
`\Rvzigzag`: '\u29db',
`\iinfin`: '\u29dc',
`\tieinfty`: '\u29dd',
`\nvinfty`: '\u29de',
`\multimapboth`: '\u29df',
`\laplac`: '\u29e0',
`\lrtriangleeq`: '\u29e1',
`\shuffle`: '\u29e2',
`\eparsl`: '\u29e3',
`\smeparsl`: '\u29e4',
`\eqvparsl`: '\u29e5',
`\gleichstark`: '\u29e6',
`\thermod`: '\u29e7',
`\downtriangleleftblack`: '\u29e8',
`\downtrianglerightblack`: '\u29e9',
`\blackdiamonddownarrow`: '\u29ea',
`\blacklozenge`: '\u29eb',
`\circledownarrow`: '\u29ec',
`\blackcircledownarrow`: '\u29ed',
`\errbarsquare`: '\u29ee',
`\errbarblacksquare`: '\u29ef',
`\errbardiamond`: '\u29f0',
`\errbarblackdiamond`: '\u29f1',
`\errbarcircle`: '\u29f2',
`\errbarblackcircle`: '\u29f3',
`\ruledelayed`: '\u29f4',
`\setminus`: '\u29f5',
`\dsol`: '\u29f6',
`\rsolbar`: '\u29f7',
`\xsol`: '\u29f8',
`\zhide`: '\u29f9',
`\doubleplus`: '\u29fa',
`\tripleplus`: '\u29fb',
`\lcurvyangle`: '\u29fc',
`\rcurvyangle`: '\u29fd',
`\tplus`: '\u29fe',
`\tminus`: '\u29ff',
`\bigodot`: '\u2a00',
`\bigoplus`: '\u2a01',
`\bigotimes`: '\u2a02',
`\bigcupdot`: '\u2a03',
`\biguplus`: '\u2a04',
`\bigsqcap`: '\u2a05',
`\bigsqcup`: '\u2a06',
`\conjquant`: '\u2a07',
`\disjquant`: '\u2a08',
`\varprod`: '\u2a09',
`\modtwosum`: '\u2a0a',
`\sumint`: '\u2a0b',
`\iiiint`: '\u2a0c',
`\intbar`: '\u2a0d',
`\intBar`: '\u2a0e',
`\fint`: '\u2a0f',
`\cirfnint`: '\u2a10',
`\awint`: '\u2a11',
`\rppolint`: '\u2a12',
`\scpolint`: '\u2a13',
`\npolint`: '\u2a14',
`\pointint`: '\u2a15',
`\sqint`: '\u2a16',
`\intlarhk`: '\u2a17',
`\intx`: '\u2a18',
`\intcap`: '\u2a19',
`\intcup`: '\u2a1a',
`\upint`: '\u2a1b',
`\lowint`: '\u2a1c',
`\Join`: '\u2a1d',
`\bigtriangleleft`: '\u2a1e',
`\zcmp`: '\u2a1f',
`\zpipe`: '\u2a20',
`\zproject`: '\u2a21',
`\ringplus`: '\u2a22',
`\plushat`: '\u2a23',
`\simplus`: '\u2a24',
`\plusdot`: '\u2a25',
`\plussim`: '\u2a26',
`\plussubtwo`: '\u2a27',
`\plustrif`: '\u2a28',
`\commaminus`: '\u2a29',
`\minusdot`: '\u2a2a',
`\minusfdots`: '\u2a2b',
`\minusrdots`: '\u2a2c',
`\opluslhrim`: '\u2a2d',
`\oplusrhrim`: '\u2a2e',
`\vectimes`: '\u2a2f',
`\dottimes`: '\u2a30',
`\timesbar`: '\u2a31',
`\btimes`: '\u2a32',
`\smashtimes`: '\u2a33',
`\otimeslhrim`: '\u2a34',
`\otimesrhrim`: '\u2a35',
`\otimeshat`: '\u2a36',
`\Otimes`: '\u2a37',
`\odiv`: '\u2a38',
`\triangleplus`: '\u2a39',
`\triangleminus`: '\u2a3a',
`\triangletimes`: '\u2a3b',
`\intprod`: '\u2a3c',
`\intprodr`: '\u2a3d',
`\fcmp`: '\u2a3e',
`\amalg`: '\u2a3f',
`\capdot`: '\u2a40',
`\uminus`: '\u2a41',
`\barcup`: '\u2a42',
`\barcap`: '\u2a43',
`\capwedge`: '\u2a44',
`\cupvee`: '\u2a45',
`\cupovercap`: '\u2a46',
`\capovercup`: '\u2a47',
`\cupbarcap`: '\u2a48',
`\capbarcup`: '\u2a49',
`\twocups`: '\u2a4a',
`\twocaps`: '\u2a4b',
`\closedvarcup`: '\u2a4c',
`\closedvarcap`: '\u2a4d',
`\Sqcap`: '\u2a4e',
`\Sqcup`: '\u2a4f',
`\closedvarcupsmashprod`: '\u2a50',
`\wedgeodot`: '\u2a51',
`\veeodot`: '\u2a52',
`\Wedge`: '\u2a53',
`\Vee`: '\u2a54',
`\wedgeonwedge`: '\u2a55',
`\veeonvee`: '\u2a56',
`\bigslopedvee`: '\u2a57',
`\bigslopedwedge`: '\u2a58',
`\veeonwedge`: '\u2a59',
`\wedgemidvert`: '\u2a5a',
`\veemidvert`: '\u2a5b',
`\midbarwedge`: '\u2a5c',
`\midbarvee`: '\u2a5d',
`\doublebarwedge`: '\u2a5e',
`\wedgebar`: '\u2a5f',
`\wedgedoublebar`: '\u2a60',
`\varveebar`: '\u2a61',
`\doublebarvee`: '\u2a62',
`\veedoublebar`: '\u2a63',
`\dsub`: '\u2a64',
`\rsub`: '\u2a65',
`\eqdot`: '\u2a66',
`\dotequiv`: '\u2a67',
`\equivVert`: '\u2a68',
`\equivVvert`: '\u2a69',
`\dotsim`: '\u2a6a',
`\simrdots`: '\u2a6b',
`\simminussim`: '\u2a6c',
`\congdot`: '\u2a6d',
`\asteq`: '\u2a6e',
`\hatapprox`: '\u2a6f',
`\approxeqq`: '\u2a70',
`\eqqplus`: '\u2a71',
`\pluseqq`: '\u2a72',
`\eqqsim`: '\u2a73',
`\Coloneqq`: '\u2a74',
`\Equal`: '\u2a75',
`\Same`: '\u2a76',
`\ddotseq`: '\u2a77',
`\equivDD`: '\u2a78',
`\ltcir`: '\u2a79',
`\gtcir`: '\u2a7a',
`\ltquest`: '\u2a7b',
`\gtquest`: '\u2a7c',
`\leqslant`: '\u2a7d',
`<=`: '\u2a7d',
`\geqslant`: '\u2a7e',
`>=`: '\u2a7e',
`\lesdot`: '\u2a7f',
`\gesdot`: '\u2a80',
`\lesdoto`: '\u2a81',
`\gesdoto`: '\u2a82',
`\lesdotor`: '\u2a83',
`\gesdotol`: '\u2a84',
`\lessapprox`: '\u2a85',
`\gtrapprox`: '\u2a86',
`\lneq`: '\u2a87',
`\gneq`: '\u2a88',
`\lnapprox`: '\u2a89',
`\gnapprox`: '\u2a8a',
`\lesseqqgtr`: '\u2a8b',
`\gtreqqless`: '\u2a8c',
`\lsime`: '\u2a8d',
`\gsime`: '\u2a8e',
`\lsimg`: '\u2a8f',
`\gsiml`: '\u2a90',
`\lgE`: '\u2a91',
`\glE`: '\u2a92',
`\lesges`: '\u2a93',
`\gesles`: '\u2a94',
`\eqslantless`: '\u2a95',
`\eqslantgtr`: '\u2a96',
`\elsdot`: '\u2a97',
`\egsdot`: '\u2a98',
`\eqqless`: '\u2a99',
`\eqqgtr`: '\u2a9a',
`\eqqslantless`: '\u2a9b',
`\eqqslantgtr`: '\u2a9c',
`\simless`: '\u2a9d',
`\simgtr`: '\u2a9e',
`\simlE`: '\u2a9f',
`\simgE`: '\u2aa0',
`\NestedLessLess`: '\u2aa1',
`\NestedGreaterGreater`: '\u2aa2',
`\partialmeetcontraction`: '\u2aa3',
`\glj`: '\u2aa4',
`\gla`: '\u2aa5',
`\leftslice`: '\u2aa6',
`\rightslice`: '\u2aa7',
`\lescc`: '\u2aa8',
`\gescc`: '\u2aa9',
`\smt`: '\u2aaa',
`\lat`: '\u2aab',
`\smte`: '\u2aac',
`\late`: '\u2aad',
`\bumpeqq`: '\u2aae',
`\preceq`: '\u2aaf',
`\succeq`: '\u2ab0',
`\precneq`: '\u2ab1',
`\succneq`: '\u2ab2',
`\preceqq`: '\u2ab3',
`\succeqq`: '\u2ab4',
`\precneqq`: '\u2ab5',
`\succneqq`: '\u2ab6',
`\precapprox`: '\u2ab7',
`\succapprox`: '\u2ab8',
`\precnapprox`: '\u2ab9',
`\succnapprox`: '\u2aba',
`\llcurly`: '\u2abb',
`\ggcurly`: '\u2abc',
`\subsetdot`: '\u2abd',
`\supsetdot`: '\u2abe',
`\subsetplus`: '\u2abf',
`\supsetplus`: '\u2ac0',
`\submult`: '\u2ac1',
`\supmult`: '\u2ac2',
`\subedot`: '\u2ac3',
`\supedot`: '\u2ac4',
`\subseteqq`: '\u2ac5',
`\supseteqq`: '\u2ac6',
`\subsim`: '\u2ac7',
`\supsim`: '\u2ac8',
`\subsetapprox`: '\u2ac9',
`\supsetapprox`: '\u2aca',
`\subsetneqq`: '\u2acb',
`\supsetneqq`: '\u2acc',
`\lsqhook`: '\u2acd',
`\rsqhook`: '\u2ace',
`\csub`: '\u2acf',
`\csup`: '\u2ad0',
`\csube`: '\u2ad1',
`\csupe`: '\u2ad2',
`\subsup`: '\u2ad3',
`\supsub`: '\u2ad4',
`\subsub`: '\u2ad5',
`\supsup`: '\u2ad6',
`\suphsub`: '\u2ad7',
`\supdsub`: '\u2ad8',
`\forkv`: '\u2ad9',
`\topfork`: '\u2ada',
`\mlcp`: '\u2adb',
`\forks`: '\u2adc',
`\forksnot`: '\u2add',
`\shortlefttack`: '\u2ade',
`\shortdowntack`: '\u2adf',
`\shortuptack`: '\u2ae0',
`\perps`: '\u2ae1',
`\vDdash`: '\u2ae2',
`\dashV`: '\u2ae3',
`\Dashv`: '\u2ae4',
`\DashV`: '\u2ae5',
`\varVdash`: '\u2ae6',
`\Barv`: '\u2ae7',
`\vBar`: '\u2ae8',
`\vBarv`: '\u2ae9',
`\Top`: '\u2aea',
`\Bot`: '\u2aeb',
`\Not`: '\u2aec',
`\bNot`: '\u2aed',
`\revnmid`: '\u2aee',
`\cirmid`: '\u2aef',
`\midcir`: '\u2af0',
`\topcir`: '\u2af1',
`\nhpar`: '\u2af2',
`\parsim`: '\u2af3',
`\interleave`: '\u2af4',
`\nhVvert`: '\u2af5',
`\threedotcolon`: '\u2af6',
`\lllnest`: '\u2af7',
`\gggnest`: '\u2af8',
`\leqqslant`: '\u2af9',
`\geqqslant`: '\u2afa',
`\trslash`: '\u2afb',
`\biginterleave`: '\u2afc',
`\sslash`: '\u2afd',
`\talloblong`: '\u2afe',
`\bigtalloblong`: '\u2aff',
`\squaretopblack`: '\u2b12',
`\squarebotblack`: '\u2b13',
`\squareurblack`: '\u2b14',
`\squarellblack`: '\u2b15',
`\diamondleftblack`: '\u2b16',
`\diamondrightblack`: '\u2b17',
`\diamondtopblack`: '\u2b18',
`\diamondbotblack`: '\u2b19',
`\dottedsquare`: '\u2b1a',
`\vysmblksquare`: '\u2b1d',
`\vysmwhtsquare`: '\u2b1e',
`\pentagonblack`: '\u2b1f',
`\pentagon`: '\u2b20',
`\varhexagon`: '\u2b21',
`\varhexagonblack`: '\u2b22',
`\hexagonblack`: '\u2b23',
`\lgblkcircle`: '\u2b24',
`\mdblkdiamond`: '\u2b25',
`\mdwhtdiamond`: '\u2b26',
`\mdblklozenge`: '\u2b27',
`\mdwhtlozenge`: '\u2b28',
`\smblkdiamond`: '\u2b29',
`\smblklozenge`: '\u2b2a',
`\smwhtlozenge`: '\u2b2b',
`\blkhorzoval`: '\u2b2c',
`\whthorzoval`: '\u2b2d',
`\blkvertoval`: '\u2b2e',
`\whtvertoval`: '\u2b2f',
`\circleonleftarrow`: '\u2b30',
`\leftthreearrows`: '\u2b31',
`\leftarrowonoplus`: '\u2b32',
`\longleftsquigarrow`: '\u2b33',
`\nvtwoheadleftarrow`: '\u2b34',
`\nVtwoheadleftarrow`: '\u2b35',
`\twoheadmapsfrom`: '\u2b36',
`\twoheadleftdbkarrow`: '\u2b37',
`\leftdotarrow`: '\u2b38',
`\nvleftarrowtail`: '\u2b39',
`\nVleftarrowtail`: '\u2b3a',
`\twoheadleftarrowtail`: '\u2b3b',
`\nvtwoheadleftarrowtail`: '\u2b3c',
`\nVtwoheadleftarrowtail`: '\u2b3d',
`\leftarrowx`: '\u2b3e',
`\leftcurvedarrow`: '\u2b3f',
`\equalleftarrow`: '\u2b40',
`\bsimilarleftarrow`: '\u2b41',
`\leftarrowbackapprox`: '\u2b42',
`\rightarrowgtr`: '\u2b43',
`\rightarrowsupset`: '\u2b44',
`\LLeftarrow`: '\u2b45',
`\RRightarrow`: '\u2b46',
`\bsimilarrightarrow`: '\u2b47',
`\rightarrowbackapprox`: '\u2b48',
`\similarleftarrow`: '\u2b49',
`\leftarrowapprox`: '\u2b4a',
`\leftarrowbsimilar`: '\u2b4b',
`\rightarrowbsimilar`: '\u2b4c',
`\medwhitestar`: '\u2b50',
`\medblackstar`: '\u2b51',
`\smwhitestar`: '\u2b52',
`\rightpentagonblack`: '\u2b53',
`\rightpentagon`: '\u2b54',
`\postalmark`: '\u3012',
`\hzigzag`: '\u3030',
`\mathbf{A}`: '\U0001d400',
`\mathbf{B}`: '\U0001d401',
`\mathbf{C}`: '\U0001d402',
`\mathbf{D}`: '\U0001d403',
`\mathbf{E}`: '\U0001d404',
`\mathbf{F}`: '\U0001d405',
`\mathbf{G}`: '\U0001d406',
`\mathbf{H}`: '\U0001d407',
`\mathbf{I}`: '\U0001d408',
`\mathbf{J}`: '\U0001d409',
`\mathbf{K}`: '\U0001d40a',
`\mathbf{L}`: '\U0001d40b',
`\mathbf{M}`: '\U0001d40c',
`\mathbf{N}`: '\U0001d40d',
`\mathbf{O}`: '\U0001d40e',
`\mathbf{P}`: '\U0001d40f',
`\mathbf{Q}`: '\U0001d410',
`\mathbf{R}`: '\U0001d411',
`\mathbf{S}`: '\U0001d412',
`\mathbf{T}`: '\U0001d413',
`\mathbf{U}`: '\U0001d414',
`\mathbf{V}`: '\U0001d415',
`\mathbf{W}`: '\U0001d416',
`\mathbf{X}`: '\U0001d417',
`\mathbf{Y}`: '\U0001d418',
`\mathbf{Z}`: '\U0001d419',
`\mathbf{a}`: '\U0001d41a',
`\mathbf{b}`: '\U0001d41b',
`\mathbf{c}`: '\U0001d41c',
`\mathbf{d}`: '\U0001d41d',
`\mathbf{e}`: '\U0001d41e',
`\mathbf{f}`: '\U0001d41f',
`\mathbf{g}`: '\U0001d420',
`\mathbf{h}`: '\U0001d421',
`\mathbf{i}`: '\U0001d422',
`\mathbf{j}`: '\U0001d423',
`\mathbf{k}`: '\U0001d424',
`\mathbf{l}`: '\U0001d425',
`\mathbf{m}`: '\U0001d426',
`\mathbf{n}`: '\U0001d427',
`\mathbf{o}`: '\U0001d428',
`\mathbf{p}`: '\U0001d429',
`\mathbf{q}`: '\U0001d42a',
`\mathbf{r}`: '\U0001d42b',
`\mathbf{s}`: '\U0001d42c',
`\mathbf{t}`: '\U0001d42d',
`\mathbf{u}`: '\U0001d42e',
`\mathbf{v}`: '\U0001d42f',
`\mathbf{w}`: '\U0001d430',
`\mathbf{x}`: '\U0001d431',
`\mathbf{y}`: '\U0001d432',
`\mathbf{z}`: '\U0001d433',
`\mitA`: '\U0001d434',
`\mitB`: '\U0001d435',
`\mitC`: '\U0001d436',
`\mitD`: '\U0001d437',
`\mitE`: '\U0001d438',
`\mitF`: '\U0001d439',
`\mitG`: '\U0001d43a',
`\mitH`: '\U0001d43b',
`\mitI`: '\U0001d43c',
`\mitJ`: '\U0001d43d',
`\mitK`: '\U0001d43e',
`\mitL`: '\U0001d43f',
`\mitM`: '\U0001d440',
`\mitN`: '\U0001d441',
`\mitO`: '\U0001d442',
`\mitP`: '\U0001d443',
`\mitQ`: '\U0001d444',
`\mitR`: '\U0001d445',
`\mitS`: '\U0001d446',
`\mitT`: '\U0001d447',
`\mitU`: '\U0001d448',
`\mitV`: '\U0001d449',
`\mitW`: '\U0001d44a',
`\mitX`: '\U0001d44b',
`\mitY`: '\U0001d44c',
`\mitZ`: '\U0001d44d',
`\mita`: '\U0001d44e',
`\mitb`: '\U0001d44f',
`\mitc`: '\U0001d450',
`\mitd`: '\U0001d451',
`\mite`: '\U0001d452',
`\mitf`: '\U0001d453',
`\mitg`: '\U0001d454',
`\miti`: '\U0001d456',
`\mitj`: '\U0001d457',
`\mitk`: '\U0001d458',
`\mitl`: '\U0001d459',
`\mitm`: '\U0001d45a',
`\mitn`: '\U0001d45b',
`\mito`: '\U0001d45c',
`\mitp`: '\U0001d45d',
`\mitq`: '\U0001d45e',
`\mitr`: '\U0001d45f',
`\mits`: '\U0001d460',
`\mitt`: '\U0001d461',
`\mitu`: '\U0001d462',
`\mitv`: '\U0001d463',
`\mitw`: '\U0001d464',
`\mitx`: '\U0001d465',
`\mity`: '\U0001d466',
`\mitz`: '\U0001d467',
`\mathbfit{A}`: '\U0001d468',
`\mathbfit{B}`: '\U0001d469',
`\mathbfit{C}`: '\U0001d46a',
`\mathbfit{D}`: '\U0001d46b',
`\mathbfit{E}`: '\U0001d46c',
`\mathbfit{F}`: '\U0001d46d',
`\mathbfit{G}`: '\U0001d46e',
`\mathbfit{H}`: '\U0001d46f',
`\mathbfit{I}`: '\U0001d470',
`\mathbfit{J}`: '\U0001d471',
`\mathbfit{K}`: '\U0001d472',
`\mathbfit{L}`: '\U0001d473',
`\mathbfit{M}`: '\U0001d474',
`\mathbfit{N}`: '\U0001d475',
`\mathbfit{O}`: '\U0001d476',
`\mathbfit{P}`: '\U0001d477',
`\mathbfit{Q}`: '\U0001d478',
`\mathbfit{R}`: '\U0001d479',
`\mathbfit{S}`: '\U0001d47a',
`\mathbfit{T}`: '\U0001d47b',
`\mathbfit{U}`: '\U0001d47c',
`\mathbfit{V}`: '\U0001d47d',
`\mathbfit{W}`: '\U0001d47e',
`\mathbfit{X}`: '\U0001d47f',
`\mathbfit{Y}`: '\U0001d480',
`\mathbfit{Z}`: '\U0001d481',
`\mathbfit{a}`: '\U0001d482',
`\mathbfit{b}`: '\U0001d483',
`\mathbfit{c}`: '\U0001d484',
`\mathbfit{d}`: '\U0001d485',
`\mathbfit{e}`: '\U0001d486',
`\mathbfit{f}`: '\U0001d487',
`\mathbfit{g}`: '\U0001d488',
`\mathbfit{h}`: '\U0001d489',
`\mathbfit{i}`: '\U0001d48a',
`\mathbfit{j}`: '\U0001d48b',
`\mathbfit{k}`: '\U0001d48c',
`\mathbfit{l}`: '\U0001d48d',
`\mathbfit{m}`: '\U0001d48e',
`\mathbfit{n}`: '\U0001d48f',
`\mathbfit{o}`: '\U0001d490',
`\mathbfit{p}`: '\U0001d491',
`\mathbfit{q}`: '\U0001d492',
`\mathbfit{r}`: '\U0001d493',
`\mathbfit{s}`: '\U0001d494',
`\mathbfit{t}`: '\U0001d495',
`\mathbfit{u}`: '\U0001d496',
`\mathbfit{v}`: '\U0001d497',
`\mathbfit{w}`: '\U0001d498',
`\mathbfit{x}`: '\U0001d499',
`\mathbfit{y}`: '\U0001d49a',
`\mathbfit{z}`: '\U0001d49b',
`\mathcal{A}`: '\U0001d49c',
`\mathcal{C}`: '\U0001d49e',
`\mathcal{D}`: '\U0001d49f',
`\mathcal{G}`: '\U0001d4a2',
`\mathcal{J}`: '\U0001d4a5',
`\mathcal{K}`: '\U0001d4a6',
`\mathcal{N}`: '\U0001d4a9',
`\mathcal{O}`: '\U0001d4aa',
`\mathcal{P}`: '\U0001d4ab',
`\mathcal{Q}`: '\U0001d4ac',
`\mathcal{S}`: '\U0001d4ae',
`\mathcal{T}`: '\U0001d4af',
`\mathcal{U}`: '\U0001d4b0',
`\mathcal{V}`: '\U0001d4b1',
`\mathcal{W}`: '\U0001d4b2',
`\mathcal{X}`: '\U0001d4b3',
`\mathcal{Y}`: '\U0001d4b4',
`\mathcal{Z}`: '\U0001d4b5',
`\mathcal{a}`: '\U0001d4b6',
`\mathcal{b}`: '\U0001d4b7',
`\mathcal{c}`: '\U0001d4b8',
`\mathcal{d}`: '\U0001d4b9',
`\mathcal{f}`: '\U0001d4bb',
`\mathcal{h}`: '\U0001d4bd',
`\mathcal{i}`: '\U0001d4be',
`\mathcal{j}`: '\U0001d4bf',
`\mathcal{k}`: '\U0001d4c0',
`\mathcal{l}`: '\U0001d4c1',
`\mathcal{m}`: '\U0001d4c2',
`\mathcal{n}`: '\U0001d4c3',
`\mathcal{p}`: '\U0001d4c5',
`\mathcal{q}`: '\U0001d4c6',
`\mathcal{r}`: '\U0001d4c7',
`\mathcal{s}`: '\U0001d4c8',
`\mathcal{t}`: '\U0001d4c9',
`\mathcal{u}`: '\U0001d4ca',
`\mathcal{v}`: '\U0001d4cb',
`\mathcal{w}`: '\U0001d4cc',
`\mathcal{x}`: '\U0001d4cd',
`\mathcal{y}`: '\U0001d4ce',
`\mathcal{z}`: '\U0001d4cf',
`\mbfscrA`: '\U0001d4d0',
`\mbfscrB`: '\U0001d4d1',
`\mbfscrC`: '\U0001d4d2',
`\mbfscrD`: '\U0001d4d3',
`\mbfscrE`: '\U0001d4d4',
`\mbfscrF`: '\U0001d4d5',
`\mbfscrG`: '\U0001d4d6',
`\mbfscrH`: '\U0001d4d7',
`\mbfscrI`: '\U0001d4d8',
`\mbfscrJ`: '\U0001d4d9',
`\mbfscrK`: '\U0001d4da',
`\mbfscrL`: '\U0001d4db',
`\mbfscrM`: '\U0001d4dc',
`\mbfscrN`: '\U0001d4dd',
`\mbfscrO`: '\U0001d4de',
`\mbfscrP`: '\U0001d4df',
`\mbfscrQ`: '\U0001d4e0',
`\mbfscrR`: '\U0001d4e1',
`\mbfscrS`: '\U0001d4e2',
`\mbfscrT`: '\U0001d4e3',
`\mbfscrU`: '\U0001d4e4',
`\mbfscrV`: '\U0001d4e5',
`\mbfscrW`: '\U0001d4e6',
`\mbfscrX`: '\U0001d4e7',
`\mbfscrY`: '\U0001d4e8',
`\mbfscrZ`: '\U0001d4e9',
`\mbfscra`: '\U0001d4ea',
`\mbfscrb`: '\U0001d4eb',
`\mbfscrc`: '\U0001d4ec',
`\mbfscrd`: '\U0001d4ed',
`\mbfscre`: '\U0001d4ee',
`\mbfscrf`: '\U0001d4ef',
`\mbfscrg`: '\U0001d4f0',
`\mbfscrh`: '\U0001d4f1',
`\mbfscri`: '\U0001d4f2',
`\mbfscrj`: '\U0001d4f3',
`\mbfscrk`: '\U0001d4f4',
`\mbfscrl`: '\U0001d4f5',
`\mbfscrm`: '\U0001d4f6',
`\mbfscrn`: '\U0001d4f7',
`\mbfscro`: '\U0001d4f8',
`\mbfscrp`: '\U0001d4f9',
`\mbfscrq`: '\U0001d4fa',
`\mbfscrr`: '\U0001d4fb',
`\mbfscrs`: '\U0001d4fc',
`\mbfscrt`: '\U0001d4fd',
`\mbfscru`: '\U0001d4fe',
`\mbfscrv`: '\U0001d4ff',
`\mbfscrw`: '\U0001d500',
`\mbfscrx`: '\U0001d501',
`\mbfscry`: '\U0001d502',
`\mbfscrz`: '\U0001d503',
`\mathfrak{A}`: '\U0001d504',
`\mathfrak{B}`: '\U0001d505',
`\mathfrak{D}`: '\U0001d507',
`\mathfrak{E}`: '\U0001d508',
`\mathfrak{F}`: '\U0001d509',
`\mathfrak{G}`: '\U0001d50a',
`\mathfrak{J}`: '\U0001d50d',
`\mathfrak{K}`: '\U0001d50e',
`\mathfrak{L}`: '\U0001d50f',
`\mathfrak{M}`: '\U0001d510',
`\mathfrak{N}`: '\U0001d511',
`\mathfrak{O}`: '\U0001d512',
`\mathfrak{P}`: '\U0001d513',
`\mathfrak{Q}`: '\U0001d514',
`\mathfrak{S}`: '\U0001d516',
`\mathfrak{T}`: '\U0001d517',
`\mathfrak{U}`: '\U0001d518',
`\mathfrak{V}`: '\U0001d519',
`\mathfrak{W}`: '\U0001d51a',
`\mathfrak{X}`: '\U0001d51b',
`\mathfrak{Y}`: '\U0001d51c',
`\mathfrak{a}`: '\U0001d51e',
`\mathfrak{b}`: '\U0001d51f',
`\mathfrak{c}`: '\U0001d520',
`\mathfrak{d}`: '\U0001d521',
`\mathfrak{e}`: '\U0001d522',
`\mathfrak{f}`: '\U0001d523',
`\mathfrak{g}`: '\U0001d524',
`\mathfrak{h}`: '\U0001d525',
`\mathfrak{i}`: '\U0001d526',
`\mathfrak{j}`: '\U0001d527',
`\mathfrak{k}`: '\U0001d528',
`\mathfrak{l}`: '\U0001d529',
`\mathfrak{m}`: '\U0001d52a',
`\mathfrak{n}`: '\U0001d52b',
`\mathfrak{o}`: '\U0001d52c',
`\mathfrak{p}`: '\U0001d52d',
`\mathfrak{q}`: '\U0001d52e',
`\mathfrak{r}`: '\U0001d52f',
`\mathfrak{s}`: '\U0001d530',
`\mathfrak{t}`: '\U0001d531',
`\mathfrak{u}`: '\U0001d532',
`\mathfrak{v}`: '\U0001d533',
`\mathfrak{w}`: '\U0001d534',
`\mathfrak{x}`: '\U0001d535',
`\mathfrak{y}`: '\U0001d536',
`\mathfrak{z}`: '\U0001d537',
`\mathbb{A}`: '\U0001d538',
`\mathbb{B}`: '\U0001d539',
`\mathbb{D}`: '\U0001d53b',
`\mathbb{E}`: '\U0001d53c',
`\mathbb{F}`: '\U0001d53d',
`\mathbb{G}`: '\U0001d53e',
`\mathbb{I}`: '\U0001d540',
`\mathbb{J}`: '\U0001d541',
`\mathbb{K}`: '\U0001d542',
`\mathbb{L}`: '\U0001d543',
`\mathbb{M}`: '\U0001d544',
`\mathbb{O}`: '\U0001d546',
`\mathbb{S}`: '\U0001d54a',
`\mathbb{T}`: '\U0001d54b',
`\mathbb{U}`: '\U0001d54c',
`\mathbb{V}`: '\U0001d54d',
`\mathbb{W}`: '\U0001d54e',
`\mathbb{X}`: '\U0001d54f',
`\mathbb{Y}`: '\U0001d550',
`\mathbb{a}`: '\U0001d552',
`\mathbb{b}`: '\U0001d553',
`\mathbb{c}`: '\U0001d554',
`\mathbb{d}`: '\U0001d555',
`\mathbb{e}`: '\U0001d556',
`\mathbb{f}`: '\U0001d557',
`\mathbb{g}`: '\U0001d558',
`\mathbb{h}`: '\U0001d559',
`\mathbb{i}`: '\U0001d55a',
`\mathbb{j}`: '\U0001d55b',
`\mathbb{k}`: '\U0001d55c',
`\mathbb{l}`: '\U0001d55d',
`\mathbb{m}`: '\U0001d55e',
`\mathbb{n}`: '\U0001d55f',
`\mathbb{o}`: '\U0001d560',
`\mathbb{p}`: '\U0001d561',
`\mathbb{q}`: '\U0001d562',
`\mathbb{r}`: '\U0001d563',
`\mathbb{s}`: '\U0001d564',
`\mathbb{t}`: '\U0001d565',
`\mathbb{u}`: '\U0001d566',
`\mathbb{v}`: '\U0001d567',
`\mathbb{w}`: '\U0001d568',
`\mathbb{x}`: '\U0001d569',
`\mathbb{y}`: '\U0001d56a',
`\mathbb{z}`: '\U0001d56b',
`\mbffrakA`: '\U0001d56c',
`\mbffrakB`: '\U0001d56d',
`\mbffrakC`: '\U0001d56e',
`\mbffrakD`: '\U0001d56f',
`\mbffrakE`: '\U0001d570',
`\mbffrakF`: '\U0001d571',
`\mbffrakG`: '\U0001d572',
`\mbffrakH`: '\U0001d573',
`\mbffrakI`: '\U0001d574',
`\mbffrakJ`: '\U0001d575',
`\mbffrakK`: '\U0001d576',
`\mbffrakL`: '\U0001d577',
`\mbffrakM`: '\U0001d578',
`\mbffrakN`: '\U0001d579',
`\mbffrakO`: '\U0001d57a',
`\mbffrakP`: '\U0001d57b',
`\mbffrakQ`: '\U0001d57c',
`\mbffrakR`: '\U0001d57d',
`\mbffrakS`: '\U0001d57e',
`\mbffrakT`: '\U0001d57f',
`\mbffrakU`: '\U0001d580',
`\mbffrakV`: '\U0001d581',
`\mbffrakW`: '\U0001d582',
`\mbffrakX`: '\U0001d583',
`\mbffrakY`: '\U0001d584',
`\mbffrakZ`: '\U0001d585',
`\mbffraka`: '\U0001d586',
`\mbffrakb`: '\U0001d587',
`\mbffrakc`: '\U0001d588',
`\mbffrakd`: '\U0001d589',
`\mbffrake`: '\U0001d58a',
`\mbffrakf`: '\U0001d58b',
`\mbffrakg`: '\U0001d58c',
`\mbffrakh`: '\U0001d58d',
`\mbffraki`: '\U0001d58e',
`\mbffrakj`: '\U0001d58f',
`\mbffrakk`: '\U0001d590',
`\mbffrakl`: '\U0001d591',
`\mbffrakm`: '\U0001d592',
`\mbffrakn`: '\U0001d593',
`\mbffrako`: '\U0001d594',
`\mbffrakp`: '\U0001d595',
`\mbffrakq`: '\U0001d596',
`\mbffrakr`: '\U0001d597',
`\mbffraks`: '\U0001d598',
`\mbffrakt`: '\U0001d599',
`\mbffraku`: '\U0001d59a',
`\mbffrakv`: '\U0001d59b',
`\mbffrakw`: '\U0001d59c',
`\mbffrakx`: '\U0001d59d',
`\mbffraky`: '\U0001d59e',
`\mbffrakz`: '\U0001d59f',
`\mathsf{A}`: '\U0001d5a0',
`\mathsf{B}`: '\U0001d5a1',
`\mathsf{C}`: '\U0001d5a2',
`\mathsf{D}`: '\U0001d5a3',
`\mathsf{E}`: '\U0001d5a4',
`\mathsf{F}`: '\U0001d5a5',
`\mathsf{G}`: '\U0001d5a6',
`\mathsf{H}`: '\U0001d5a7',
`\mathsf{I}`: '\U0001d5a8',
`\mathsf{J}`: '\U0001d5a9',
`\mathsf{K}`: '\U0001d5aa',
`\mathsf{L}`: '\U0001d5ab',
`\mathsf{M}`: '\U0001d5ac',
`\mathsf{N}`: '\U0001d5ad',
`\mathsf{O}`: '\U0001d5ae',
`\mathsf{P}`: '\U0001d5af',
`\mathsf{Q}`: '\U0001d5b0',
`\mathsf{R}`: '\U0001d5b1',
`\mathsf{S}`: '\U0001d5b2',
`\mathsf{T}`: '\U0001d5b3',
`\mathsf{U}`: '\U0001d5b4',
`\mathsf{V}`: '\U0001d5b5',
`\mathsf{W}`: '\U0001d5b6',
`\mathsf{X}`: '\U0001d5b7',
`\mathsf{Y}`: '\U0001d5b8',
`\mathsf{Z}`: '\U0001d5b9',
`\mathsf{a}`: '\U0001d5ba',
`\mathsf{b}`: '\U0001d5bb',
`\mathsf{c}`: '\U0001d5bc',
`\mathsf{d}`: '\U0001d5bd',
`\mathsf{e}`: '\U0001d5be',
`\mathsf{f}`: '\U0001d5bf',
`\mathsf{g}`: '\U0001d5c0',
`\mathsf{h}`: '\U0001d5c1',
`\mathsf{i}`: '\U0001d5c2',
`\mathsf{j}`: '\U0001d5c3',
`\mathsf{k}`: '\U0001d5c4',
`\mathsf{l}`: '\U0001d5c5',
`\mathsf{m}`: '\U0001d5c6',
`\mathsf{n}`: '\U0001d5c7',
`\mathsf{o}`: '\U0001d5c8',
`\mathsf{p}`: '\U0001d5c9',
`\mathsf{q}`: '\U0001d5ca',
`\mathsf{r}`: '\U0001d5cb',
`\mathsf{s}`: '\U0001d5cc',
`\mathsf{t}`: '\U0001d5cd',
`\mathsf{u}`: '\U0001d5ce',
`\mathsf{v}`: '\U0001d5cf',
`\mathsf{w}`: '\U0001d5d0',
`\mathsf{x}`: '\U0001d5d1',
`\mathsf{y}`: '\U0001d5d2',
`\mathsf{z}`: '\U0001d5d3',
`\mathsfbf{A}`: '\U0001d5d4',
`\mathsfbf{B}`: '\U0001d5d5',
`\mathsfbf{C}`: '\U0001d5d6',
`\mathsfbf{D}`: '\U0001d5d7',
`\mathsfbf{E}`: '\U0001d5d8',
`\mathsfbf{F}`: '\U0001d5d9',
`\mathsfbf{G}`: '\U0001d5da',
`\mathsfbf{H}`: '\U0001d5db',
`\mathsfbf{I}`: '\U0001d5dc',
`\mathsfbf{J}`: '\U0001d5dd',
`\mathsfbf{K}`: '\U0001d5de',
`\mathsfbf{L}`: '\U0001d5df',
`\mathsfbf{M}`: '\U0001d5e0',
`\mathsfbf{N}`: '\U0001d5e1',
`\mathsfbf{O}`: '\U0001d5e2',
`\mathsfbf{P}`: '\U0001d5e3',
`\mathsfbf{Q}`: '\U0001d5e4',
`\mathsfbf{R}`: '\U0001d5e5',
`\mathsfbf{S}`: '\U0001d5e6',
`\mathsfbf{T}`: '\U0001d5e7',
`\mathsfbf{U}`: '\U0001d5e8',
`\mathsfbf{V}`: '\U0001d5e9',
`\mathsfbf{W}`: '\U0001d5ea',
`\mathsfbf{X}`: '\U0001d5eb',
`\mathsfbf{Y}`: '\U0001d5ec',
`\mathsfbf{Z}`: '\U0001d5ed',
`\mathsfbf{a}`: '\U0001d5ee',
`\mathsfbf{b}`: '\U0001d5ef',
`\mathsfbf{c}`: '\U0001d5f0',
`\mathsfbf{d}`: '\U0001d5f1',
`\mathsfbf{e}`: '\U0001d5f2',
`\mathsfbf{f}`: '\U0001d5f3',
`\mathsfbf{g}`: '\U0001d5f4',
`\mathsfbf{h}`: '\U0001d5f5',
`\mathsfbf{i}`: '\U0001d5f6',
`\mathsfbf{j}`: '\U0001d5f7',
`\mathsfbf{k}`: '\U0001d5f8',
`\mathsfbf{l}`: '\U0001d5f9',
`\mathsfbf{m}`: '\U0001d5fa',
`\mathsfbf{n}`: '\U0001d5fb',
`\mathsfbf{o}`: '\U0001d5fc',
`\mathsfbf{p}`: '\U0001d5fd',
`\mathsfbf{q}`: '\U0001d5fe',
`\mathsfbf{r}`: '\U0001d5ff',
`\mathsfbf{s}`: '\U0001d600',
`\mathsfbf{t}`: '\U0001d601',
`\mathsfbf{u}`: '\U0001d602',
`\mathsfbf{v}`: '\U0001d603',
`\mathsfbf{w}`: '\U0001d604',
`\mathsfbf{x}`: '\U0001d605',
`\mathsfbf{y}`: '\U0001d606',
`\mathsfbf{z}`: '\U0001d607',
`\mathsfit{A}`: '\U0001d608',
`\mathsfit{B}`: '\U0001d609',
`\mathsfit{C}`: '\U0001d60a',
`\mathsfit{D}`: '\U0001d60b',
`\mathsfit{E}`: '\U0001d60c',
`\mathsfit{F}`: '\U0001d60d',
`\mathsfit{G}`: '\U0001d60e',
`\mathsfit{H}`: '\U0001d60f',
`\mathsfit{I}`: '\U0001d610',
`\mathsfit{J}`: '\U0001d611',
`\mathsfit{K}`: '\U0001d612',
`\mathsfit{L}`: '\U0001d613',
`\mathsfit{M}`: '\U0001d614',
`\mathsfit{N}`: '\U0001d615',
`\mathsfit{O}`: '\U0001d616',
`\mathsfit{P}`: '\U0001d617',
`\mathsfit{Q}`: '\U0001d618',
`\mathsfit{R}`: '\U0001d619',
`\mathsfit{S}`: '\U0001d61a',
`\mathsfit{T}`: '\U0001d61b',
`\mathsfit{U}`: '\U0001d61c',
`\mathsfit{V}`: '\U0001d61d',
`\mathsfit{W}`: '\U0001d61e',
`\mathsfit{X}`: '\U0001d61f',
`\mathsfit{Y}`: '\U0001d620',
`\mathsfit{Z}`: '\U0001d621',
`\mathsfit{a}`: '\U0001d622',
`\mathsfit{b}`: '\U0001d623',
`\mathsfit{c}`: '\U0001d624',
`\mathsfit{d}`: '\U0001d625',
`\mathsfit{e}`: '\U0001d626',
`\mathsfit{f}`: '\U0001d627',
`\mathsfit{g}`: '\U0001d628',
`\mathsfit{h}`: '\U0001d629',
`\mathsfit{i}`: '\U0001d62a',
`\mathsfit{j}`: '\U0001d62b',
`\mathsfit{k}`: '\U0001d62c',
`\mathsfit{l}`: '\U0001d62d',
`\mathsfit{m}`: '\U0001d62e',
`\mathsfit{n}`: '\U0001d62f',
`\mathsfit{o}`: '\U0001d630',
`\mathsfit{p}`: '\U0001d631',
`\mathsfit{q}`: '\U0001d632',
`\mathsfit{r}`: '\U0001d633',
`\mathsfit{s}`: '\U0001d634',
`\mathsfit{t}`: '\U0001d635',
`\mathsfit{u}`: '\U0001d636',
`\mathsfit{v}`: '\U0001d637',
`\mathsfit{w}`: '\U0001d638',
`\mathsfit{x}`: '\U0001d639',
`\mathsfit{y}`: '\U0001d63a',
`\mathsfit{z}`: '\U0001d63b',
`\mathsfbfit{A}`: '\U0001d63c',
`\mathsfbfit{B}`: '\U0001d63d',
`\mathsfbfit{C}`: '\U0001d63e',
`\mathsfbfit{D}`: '\U0001d63f',
`\mathsfbfit{E}`: '\U0001d640',
`\mathsfbfit{F}`: '\U0001d641',
`\mathsfbfit{G}`: '\U0001d642',
`\mathsfbfit{H}`: '\U0001d643',
`\mathsfbfit{I}`: '\U0001d644',
`\mathsfbfit{J}`: '\U0001d645',
`\mathsfbfit{K}`: '\U0001d646',
`\mathsfbfit{L}`: '\U0001d647',
`\mathsfbfit{M}`: '\U0001d648',
`\mathsfbfit{N}`: '\U0001d649',
`\mathsfbfit{O}`: '\U0001d64a',
`\mathsfbfit{P}`: '\U0001d64b',
`\mathsfbfit{Q}`: '\U0001d64c',
`\mathsfbfit{R}`: '\U0001d64d',
`\mathsfbfit{S}`: '\U0001d64e',
`\mathsfbfit{T}`: '\U0001d64f',
`\mathsfbfit{U}`: '\U0001d650',
`\mathsfbfit{V}`: '\U0001d651',
`\mathsfbfit{W}`: '\U0001d652',
`\mathsfbfit{X}`: '\U0001d653',
`\mathsfbfit{Y}`: '\U0001d654',
`\mathsfbfit{Z}`: '\U0001d655',
`\mathsfbfit{a}`: '\U0001d656',
`\mathsfbfit{b}`: '\U0001d657',
`\mathsfbfit{c}`: '\U0001d658',
`\mathsfbfit{d}`: '\U0001d659',
`\mathsfbfit{e}`: '\U0001d65a',
`\mathsfbfit{f}`: '\U0001d65b',
`\mathsfbfit{g}`: '\U0001d65c',
`\mathsfbfit{h}`: '\U0001d65d',
`\mathsfbfit{i}`: '\U0001d65e',
`\mathsfbfit{j}`: '\U0001d65f',
`\mathsfbfit{k}`: '\U0001d660',
`\mathsfbfit{l}`: '\U0001d661',
`\mathsfbfit{m}`: '\U0001d662',
`\mathsfbfit{n}`: '\U0001d663',
`\mathsfbfit{o}`: '\U0001d664',
`\mathsfbfit{p}`: '\U0001d665',
`\mathsfbfit{q}`: '\U0001d666',
`\mathsfbfit{r}`: '\U0001d667',
`\mathsfbfit{s}`: '\U0001d668',
`\mathsfbfit{t}`: '\U0001d669',
`\mathsfbfit{u}`: '\U0001d66a',
`\mathsfbfit{v}`: '\U0001d66b',
`\mathsfbfit{w}`: '\U0001d66c',
`\mathsfbfit{x}`: '\U0001d66d',
`\mathsfbfit{y}`: '\U0001d66e',
`\mathsfbfit{z}`: '\U0001d66f',
`\mathtt{A}`: '\U0001d670',
`\mathtt{B}`: '\U0001d671',
`\mathtt{C}`: '\U0001d672',
`\mathtt{D}`: '\U0001d673',
`\mathtt{E}`: '\U0001d674',
`\mathtt{F}`: '\U0001d675',
`\mathtt{G}`: '\U0001d676',
`\mathtt{H}`: '\U0001d677',
`\mathtt{I}`: '\U0001d678',
`\mathtt{J}`: '\U0001d679',
`\mathtt{K}`: '\U0001d67a',
`\mathtt{L}`: '\U0001d67b',
`\mathtt{M}`: '\U0001d67c',
`\mathtt{N}`: '\U0001d67d',
`\mathtt{O}`: '\U0001d67e',
`\mathtt{P}`: '\U0001d67f',
`\mathtt{Q}`: '\U0001d680',
`\mathtt{R}`: '\U0001d681',
`\mathtt{S}`: '\U0001d682',
`\mathtt{T}`: '\U0001d683',
`\mathtt{U}`: '\U0001d684',
`\mathtt{V}`: '\U0001d685',
`\mathtt{W}`: '\U0001d686',
`\mathtt{X}`: '\U0001d687',
`\mathtt{Y}`: '\U0001d688',
`\mathtt{Z}`: '\U0001d689',
`\mathtt{a}`: '\U0001d68a',
`\mathtt{b}`: '\U0001d68b',
`\mathtt{c}`: '\U0001d68c',
`\mathtt{d}`: '\U0001d68d',
`\mathtt{e}`: '\U0001d68e',
`\mathtt{f}`: '\U0001d68f',
`\mathtt{g}`: '\U0001d690',
`\mathtt{h}`: '\U0001d691',
`\mathtt{i}`: '\U0001d692',
`\mathtt{j}`: '\U0001d693',
`\mathtt{k}`: '\U0001d694',
`\mathtt{l}`: '\U0001d695',
`\mathtt{m}`: '\U0001d696',
`\mathtt{n}`: '\U0001d697',
`\mathtt{o}`: '\U0001d698',
`\mathtt{p}`: '\U0001d699',
`\mathtt{q}`: '\U0001d69a',
`\mathtt{r}`: '\U0001d69b',
`\mathtt{s}`: '\U0001d69c',
`\mathtt{t}`: '\U0001d69d',
`\mathtt{u}`: '\U0001d69e',
`\mathtt{v}`: '\U0001d69f',
`\mathtt{w}`: '\U0001d6a0',
`\mathtt{x}`: '\U0001d6a1',
`\mathtt{y}`: '\U0001d6a2',
`\mathtt{z}`: '\U0001d6a3',
`\mbfAlpha`: '\U0001d6a8',
`\mbfBeta`: '\U0001d6a9',
`\mathbf{\Gamma}`: '\U0001d6aa',
`\mathbf{\Delta}`: '\U0001d6ab',
`\mbfEpsilon`: '\U0001d6ac',
`\mbfZeta`: '\U0001d6ad',
`\mbfEta`: '\U0001d6ae',
`\mathbf{\Theta}`: '\U0001d6af',
`\mbfIota`: '\U0001d6b0',
`\mbfKappa`: '\U0001d6b1',
`\mathbf{\Lambda}`: '\U0001d6b2',
`\mbfMu`: '\U0001d6b3',
`\mbfNu`: '\U0001d6b4',
`\mathbf{\Xi}`: '\U0001d6b5',
`\mbfOmicron`: '\U0001d6b6',
`\mathbf{\Pi}`: '\U0001d6b7',
`\mbfRho`: '\U0001d6b8',
`\mbfvarTheta`: '\U0001d6b9',
`\mathbf{\Sigma}`: '\U0001d6ba',
`\mbfTau`: '\U0001d6bb',
`\mathbf{\Upsilon}`: '\U0001d6bc',
`\mathbf{\Phi}`: '\U0001d6bd',
`\mbfChi`: '\U0001d6be',
`\mathbf{\Psi}`: '\U0001d6bf',
`\mathbf{\Omega}`: '\U0001d6c0',
`\mbfnabla`: '\U0001d6c1',
`\mathbf{\alpha}`: '\U0001d6c2',
`\mathbf{\beta}`: '\U0001d6c3',
`\mathbf{\gamma}`: '\U0001d6c4',
`\mathbf{\delta}`: '\U0001d6c5',
`\mathbf{\varepsilon}`: '\U0001d6c6',
`\mathbf{\zeta}`: '\U0001d6c7',
`\mathbf{\eta}`: '\U0001d6c8',
`\mathbf{\theta}`: '\U0001d6c9',
`\mathbf{\iota}`: '\U0001d6ca',
`\mathbf{\kappa}`: '\U0001d6cb',
`\mathbf{\lambda}`: '\U0001d6cc',
`\mathbf{\mu}`: '\U0001d6cd',
`\mathbf{\nu}`: '\U0001d6ce',
`\mathbf{\xi}`: '\U0001d6cf',
`\mbfomicron`: '\U0001d6d0',
`\mathbf{\pi}`: '\U0001d6d1',
`\mathbf{\rho}`: '\U0001d6d2',
`\mathbf{\varsigma}`: '\U0001d6d3',
`\mathbf{\sigma}`: '\U0001d6d4',
`\mathbf{\tau}`: '\U0001d6d5',
`\mathbf{\upsilon}`: '\U0001d6d6',
`\mathbf{\varphi}`: '\U0001d6d7',
`\mathbf{\chi}`: '\U0001d6d8',
`\mathbf{\psi}`: '\U0001d6d9',
`\mathbf{\omega}`: '\U0001d6da',
`\mbfpartial`: '\U0001d6db',
`\mathbf{\epsilon}`: '\U0001d6dc',
`\mathbf{\vartheta}`: '\U0001d6dd',
`\mbfvarkappa`: '\U0001d6de',
`\mathbf{\phi}`: '\U0001d6df',
`\mathbf{\varrho}`: '\U0001d6e0',
`\mathbf{\varpi}`: '\U0001d6e1',
`\mitAlpha`: '\U0001d6e2',
`\mitBeta`: '\U0001d6e3',
`\mitEpsilon`: '\U0001d6e6',
`\mitZeta`: '\U0001d6e7',
`\mitEta`: '\U0001d6e8',
`\mitIota`: '\U0001d6ea',
`\mitKappa`: '\U0001d6eb',
`\mitMu`: '\U0001d6ed',
`\mitNu`: '\U0001d6ee',
`\mitOmicron`: '\U0001d6f0',
`\mitRho`: '\U0001d6f2',
`\mitvarTheta`: '\U0001d6f3',
`\mitTau`: '\U0001d6f5',
`\mitChi`: '\U0001d6f8',
`\mitnabla`: '\U0001d6fb',
`\mitomicron`: '\U0001d70a',
`\mbfitAlpha`: '\U0001d71c',
`\mbfitBeta`: '\U0001d71d',
`\mathbfit{\Gamma}`: '\U0001d71e',
`\mathbfit{\Delta}`: '\U0001d71f',
`\mbfitEpsilon`: '\U0001d720',
`\mbfitZeta`: '\U0001d721',
`\mbfitEta`: '\U0001d722',
`\mathbfit{\Theta}`: '\U0001d723',
`\mbfitIota`: '\U0001d724',
`\mbfitKappa`: '\U0001d725',
`\mathbfit{\Lambda}`: '\U0001d726',
`\mbfitMu`: '\U0001d727',
`\mbfitNu`: '\U0001d728',
`\mathbfit{\Xi}`: '\U0001d729',
`\mbfitOmicron`: '\U0001d72a',
`\mathbfit{\Pi}`: '\U0001d72b',
`\mbfitRho`: '\U0001d72c',
`\mbfitvarTheta`: '\U0001d72d',
`\mathbfit{\Sigma}`: '\U0001d72e',
`\mbfitTau`: '\U0001d72f',
`\mathbfit{\Upsilon}`: '\U0001d730',
`\mathbfit{\Phi}`: '\U0001d731',
`\mbfitChi`: '\U0001d732',
`\mathbfit{\Psi}`: '\U0001d733',
`\mathbfit{\Omega}`: '\U0001d734',
`\mbfitnabla`: '\U0001d735',
`\mathbfit{\alpha}`: '\U0001d736',
`\mathbfit{\beta}`: '\U0001d737',
`\mathbfit{\gamma}`: '\U0001d738',
`\mathbfit{\delta}`: '\U0001d739',
`\mathbfit{\varepsilon}`: '\U0001d73a',
`\mathbfit{\zeta}`: '\U0001d73b',
`\mathbfit{\eta}`: '\U0001d73c',
`\mathbfit{\theta}`: '\U0001d73d',
`\mathbfit{\iota}`: '\U0001d73e',
`\mathbfit{\kappa}`: '\U0001d73f',
`\mathbfit{\lambda}`: '\U0001d740',
`\mathbfit{\mu}`: '\U0001d741',
`\mathbfit{\nu}`: '\U0001d742',
`\mathbfit{\xi}`: '\U0001d743',
`\mbfitomicron`: '\U0001d744',
`\mathbfit{\pi}`: '\U0001d745',
`\mathbfit{\rho}`: '\U0001d746',
`\mathbfit{\varsigma}`: '\U0001d747',
`\mathbfit{\sigma}`: '\U0001d748',
`\mathbfit{\tau}`: '\U0001d749',
`\mathbfit{\upsilon}`: '\U0001d74a',
`\mathbfit{\varphi}`: '\U0001d74b',
`\mathbfit{\chi}`: '\U0001d74c',
`\mathbfit{\psi}`: '\U0001d74d',
`\mathbfit{\omega}`: '\U0001d74e',
`\mbfitpartial`: '\U0001d74f',
`\mathbfit{\epsilon}`: '\U0001d750',
`\mathbfit{\vartheta}`: '\U0001d751',
`\mbfitvarkappa`: '\U0001d752',
`\mathbfit{\phi}`: '\U0001d753',
`\mathbfit{\varrho}`: '\U0001d754',
`\mathbfit{\varpi}`: '\U0001d755',
`\mbfsansAlpha`: '\U0001d756',
`\mbfsansBeta`: '\U0001d757',
`\mathsfbf{\Gamma}`: '\U0001d758',
`\mathsfbf{\Delta}`: '\U0001d759',
`\mbfsansEpsilon`: '\U0001d75a',
`\mbfsansZeta`: '\U0001d75b',
`\mbfsansEta`: '\U0001d75c',
`\mathsfbf{\Theta}`: '\U0001d75d',
`\mbfsansIota`: '\U0001d75e',
`\mbfsansKappa`: '\U0001d75f',
`\mathsfbf{\Lambda}`: '\U0001d760',
`\mbfsansMu`: '\U0001d761',
`\mbfsansNu`: '\U0001d762',
`\mathsfbf{\Xi}`: '\U0001d763',
`\mbfsansOmicron`: '\U0001d764',
`\mathsfbf{\Pi}`: '\U0001d765',
`\mbfsansRho`: '\U0001d766',
`\mbfsansvarTheta`: '\U0001d767',
`\mathsfbf{\Sigma}`: '\U0001d768',
`\mbfsansTau`: '\U0001d769',
`\mathsfbf{\Upsilon}`: '\U0001d76a',
`\mathsfbf{\Phi}`: '\U0001d76b',
`\mbfsansChi`: '\U0001d76c',
`\mathsfbf{\Psi}`: '\U0001d76d',
`\mathsfbf{\Omega}`: '\U0001d76e',
`\mbfsansnabla`: '\U0001d76f',
`\mathsfbf{\alpha}`: '\U0001d770',
`\mathsfbf{\beta}`: '\U0001d771',
`\mathsfbf{\gamma}`: '\U0001d772',
`\mathsfbf{\delta}`: '\U0001d773',
`\mathsfbf{\varepsilon}`: '\U0001d774',
`\mathsfbf{\zeta}`: '\U0001d775',
`\mathsfbf{\eta}`: '\U0001d776',
`\mathsfbf{\theta}`: '\U0001d777',
`\mathsfbf{\iota}`: '\U0001d778',
`\mathsfbf{\kappa}`: '\U0001d779',
`\mathsfbf{\lambda}`: '\U0001d77a',
`\mathsfbf{\mu}`: '\U0001d77b',
`\mathsfbf{\nu}`: '\U0001d77c',
`\mathsfbf{\xi}`: '\U0001d77d',
`\mbfsansomicron`: '\U0001d77e',
`\mathsfbf{\pi}`: '\U0001d77f',
`\mathsfbf{\rho}`: '\U0001d780',
`\mathsfbf{\varsigma}`: '\U0001d781',
`\mathsfbf{\sigma}`: '\U0001d782',
`\mathsfbf{\tau}`: '\U0001d783',
`\mathsfbf{\upsilon}`: '\U0001d784',
`\mathsfbf{\varphi}`: '\U0001d785',
`\mathsfbf{\chi}`: '\U0001d786',
`\mathsfbf{\psi}`: '\U0001d787',
`\mathsfbf{\omega}`: '\U0001d788',
`\mbfsanspartial`: '\U0001d789',
`\mathsfbf{\epsilon}`: '\U0001d78a',
`\mathsfbf{\vartheta}`: '\U0001d78b',
`\mbfsansvarkappa`: '\U0001d78c',
`\mathsfbf{\phi}`: '\U0001d78d',
`\mathsfbf{\varrho}`: '\U0001d78e',
`\mathsfbf{\varpi}`: '\U0001d78f',
`\mbfitsansAlpha`: '\U0001d790',
`\mbfitsansBeta`: '\U0001d791',
`\mathsfbfit{\Gamma}`: '\U0001d792',
`\mathsfbfit{\Delta}`: '\U0001d793',
`\mbfitsansEpsilon`: '\U0001d794',
`\mbfitsansZeta`: '\U0001d795',
`\mbfitsansEta`: '\U0001d796',
`\mathsfbfit{\Theta}`: '\U0001d797',
`\mbfitsansIota`: '\U0001d798',
`\mbfitsansKappa`: '\U0001d799',
`\mathsfbfit{\Lambda}`: '\U0001d79a',
`\mbfitsansMu`: '\U0001d79b',
`\mbfitsansNu`: '\U0001d79c',
`\mathsfbfit{\Xi}`: '\U0001d79d',
`\mbfitsansOmicron`: '\U0001d79e',
`\mathsfbfit{\Pi}`: '\U0001d79f',
`\mbfitsansRho`: '\U0001d7a0',
`\mbfitsansvarTheta`: '\U0001d7a1',
`\mathsfbfit{\Sigma}`: '\U0001d7a2',
`\mbfitsansTau`: '\U0001d7a3',
`\mathsfbfit{\Upsilon}`: '\U0001d7a4',
`\mathsfbfit{\Phi}`: '\U0001d7a5',
`\mbfitsansChi`: '\U0001d7a6',
`\mathsfbfit{\Psi}`: '\U0001d7a7',
`\mathsfbfit{\Omega}`: '\U0001d7a8',
`\mbfitsansnabla`: '\U0001d7a9',
`\mathsfbfit{\alpha}`: '\U0001d7aa',
`\mathsfbfit{\beta}`: '\U0001d7ab',
`\mathsfbfit{\gamma}`: '\U0001d7ac',
`\mathsfbfit{\delta}`: '\U0001d7ad',
`\mathsfbfit{\varepsilon}`: '\U0001d7ae',
`\mathsfbfit{\zeta}`: '\U0001d7af',
`\mathsfbfit{\eta}`: '\U0001d7b0',
`\mathsfbfit{\theta}`: '\U0001d7b1',
`\mathsfbfit{\iota}`: '\U0001d7b2',
`\mathsfbfit{\kappa}`: '\U0001d7b3',
`\mathsfbfit{\lambda}`: '\U0001d7b4',
`\mathsfbfit{\mu}`: '\U0001d7b5',
`\mathsfbfit{\nu}`: '\U0001d7b6',
`\mathsfbfit{\xi}`: '\U0001d7b7',
`\mbfitsansomicron`: '\U0001d7b8',
`\mathsfbfit{\pi}`: '\U0001d7b9',
`\mathsfbfit{\rho}`: '\U0001d7ba',
`\mathsfbfit{\varsigma}`: '\U0001d7bb',
`\mathsfbfit{\sigma}`: '\U0001d7bc',
`\mathsfbfit{\tau}`: '\U0001d7bd',
`\mathsfbfit{\upsilon}`: '\U0001d7be',
`\mathsfbfit{\varphi}`: '\U0001d7bf',
`\mathsfbfit{\chi}`: '\U0001d7c0',
`\mathsfbfit{\psi}`: '\U0001d7c1',
`\mathsfbfit{\omega}`: '\U0001d7c2',
`\mbfitsanspartial`: '\U0001d7c3',
`\mathsfbfit{\epsilon}`: '\U0001d7c4',
`\mathsfbfit{\vartheta}`: '\U0001d7c5',
`\mbfitsansvarkappa`: '\U0001d7c6',
`\mathsfbfit{\phi}`: '\U0001d7c7',
`\mathsfbfit{\varrho}`: '\U0001d7c8',
`\mathsfbfit{\varpi}`: '\U0001d7c9',
`\mbfDigamma`: '\U0001d7ca',
`\mbfdigamma`: '\U0001d7cb',
`\mathbf{0}`: '\U0001d7ce',
`\mathbf{1}`: '\U0001d7cf',
`\mathbf{2}`: '\U0001d7d0',
`\mathbf{3}`: '\U0001d7d1',
`\mathbf{4}`: '\U0001d7d2',
`\mathbf{5}`: '\U0001d7d3',
`\mathbf{6}`: '\U0001d7d4',
`\mathbf{7}`: '\U0001d7d5',
`\mathbf{8}`: '\U0001d7d6',
`\mathbf{9}`: '\U0001d7d7',
`\mathbb{0}`: '\U0001d7d8',
`\mathbb{1}`: '\U0001d7d9',
`\mathbb{2}`: '\U0001d7da',
`\mathbb{3}`: '\U0001d7db',
`\mathbb{4}`: '\U0001d7dc',
`\mathbb{5}`: '\U0001d7dd',
`\mathbb{6}`: '\U0001d7de',
`\mathbb{7}`: '\U0001d7df',
`\mathbb{8}`: '\U0001d7e0',
`\mathbb{9}`: '\U0001d7e1',
`\mathsf{0}`: '\U0001d7e2',
`\mathsf{1}`: '\U0001d7e3',
`\mathsf{2}`: '\U0001d7e4',
`\mathsf{3}`: '\U0001d7e5',
`\mathsf{4}`: '\U0001d7e6',
`\mathsf{5}`: '\U0001d7e7',
`\mathsf{6}`: '\U0001d7e8',
`\mathsf{7}`: '\U0001d7e9',
`\mathsf{8}`: '\U0001d7ea',
`\mathsf{9}`: '\U0001d7eb',
`\mathsfbf{0}`: '\U0001d7ec',
`\mathsfbf{1}`: '\U0001d7ed',
`\mathsfbf{2}`: '\U0001d7ee',
`\mathsfbf{3}`: '\U0001d7ef',
`\mathsfbf{4}`: '\U0001d7f0',
`\mathsfbf{5}`: '\U0001d7f1',
`\mathsfbf{6}`: '\U0001d7f2',
`\mathsfbf{7}`: '\U0001d7f3',
`\mathsfbf{8}`: '\U0001d7f4',
`\mathsfbf{9}`: '\U0001d7f5',
`\mathtt{0}`: '\U0001d7f6',
`\mathtt{1}`: '\U0001d7f7',
`\mathtt{2}`: '\U0001d7f8',
`\mathtt{3}`: '\U0001d7f9',
`\mathtt{4}`: '\U0001d7fa',
`\mathtt{5}`: '\U0001d7fb',
`\mathtt{6}`: '\U0001d7fc',
`\mathtt{7}`: '\U0001d7fd',
`\mathtt{8}`: '\U0001d7fe',
`\mathtt{9}`: '\U0001d7ff',
} | internal/ext/zsym.go | 0.546254 | 0.613873 | zsym.go | starcoder |
package geo
import (
"runtime"
"sync"
)
type Point struct {
Y float64 // Lat
X float64 // Lon
}
type Polygon struct {
Points []Point
}
type BoundingBox struct {
BottomLeft Point
TopRight Point
}
func PointInPolygon(pt Point, poly Polygon) bool {
bbox := GetBoundingBox(poly)
if !PointInBoundingBox(pt, bbox) {
return false
}
nverts := len(poly.Points)
intersect := false
verts := poly.Points
j := 0
for i := 1; i < nverts; i++ {
if ((verts[i].Y > pt.Y) != (verts[j].Y > pt.Y)) &&
(pt.X < (verts[j].X-verts[i].X)*(pt.Y-verts[i].Y)/(verts[j].Y-verts[i].Y)+verts[i].X) {
intersect = !intersect
}
j = i
}
return intersect
}
func PointInBoundingBox(pt Point, bb BoundingBox) bool {
// Check if point is in bounding box
// Bottom Left is the smallest and x and y value
// Top Right is the largest x and y value
return pt.X < bb.TopRight.X && pt.X > bb.BottomLeft.X &&
pt.Y < bb.TopRight.Y && pt.Y > bb.BottomLeft.Y
}
func GetBoundingBox(poly Polygon) BoundingBox {
var maxX, maxY, minX, minY float64
for i := 0; i < len(poly.Points); i++ {
side := poly.Points[i]
if side.X > maxX || maxX == 0.0 {
maxX = side.X
}
if side.Y > maxY || maxY == 0.0 {
maxY = side.Y
}
if side.X < minX || minX == 0.0 {
minX = side.X
}
if side.Y < minY || minY == 0.0 {
minY = side.Y
}
}
return BoundingBox{
BottomLeft: Point{X: minX, Y: minY},
TopRight: Point{X: maxX, Y: maxY},
}
}
func MaxParallelism() int {
maxProcs := runtime.GOMAXPROCS(0)
numCPU := runtime.NumCPU()
if maxProcs < numCPU {
return maxProcs
}
return numCPU
}
func PointInPolygonParallel(pts []Point, poly Polygon, numcores int) []Point {
MAXPROCS := MaxParallelism()
runtime.GOMAXPROCS(MAXPROCS)
if numcores > MAXPROCS {
numcores = MAXPROCS
}
start := 0
inside := []Point{}
var m sync.Mutex
var wg sync.WaitGroup
wg.Add(numcores)
for i := 1; i <= numcores; i++ {
size := (len(pts) / numcores) * i
batch := pts[start:size]
go func(batch []Point) {
defer wg.Done()
for j := 0; j < len(batch); j++ {
pt := batch[j]
if PointInPolygon(pt, poly) {
m.Lock()
inside = append(inside, pt)
m.Unlock()
}
}
}(batch)
start = size
}
wg.Wait()
return inside
} | geo/geo.go | 0.722135 | 0.429609 | geo.go | starcoder |
package dfl
import (
"fmt"
"reflect"
"github.com/pkg/errors"
)
// Within is a BinaryOperator that represents that the left value is between
type Within struct {
*BinaryOperator
}
// Dfl returns the DFL representation of this node as a string
func (w Within) Dfl(quotes []string, pretty bool, tabs int) string {
return w.BinaryOperator.Dfl("within", quotes, pretty, tabs)
}
// Sql returns the SQL representation of this node as a string
func (w Within) Sql(pretty bool, tabs int) string {
str := w.Left.Sql(pretty, tabs) + " BETWEEN "
switch right := w.Right.(type) {
case Literal:
if t := reflect.TypeOf(right.Value); t.Kind() == reflect.Array || t.Kind() == reflect.Slice {
if rv := reflect.ValueOf(right.Value); rv.Len() == 2 {
str += fmt.Sprint(rv.Index(0).Interface()) + " AND " + fmt.Sprint(rv.Index(1).Interface())
}
}
case *Attribute:
str += right.Sql(pretty, tabs) + "[0] AND " + right.Sql(pretty, tabs) + "[1]"
case *Variable:
str += right.Sql(pretty, tabs) + "[0] AND " + right.Sql(pretty, tabs) + "[1]"
case Array:
if right.Len() == 2 {
str += right.Nodes[0].Sql(pretty, tabs) + " AND " + right.Nodes[1].Sql(pretty, tabs)
}
case And:
str += right.Left.Sql(pretty, tabs) + " AND " + right.Right.Sql(pretty, tabs)
case *And:
str += right.Left.Sql(pretty, tabs) + " AND " + right.Right.Sql(pretty, tabs)
}
return str
}
// Map returns a map representation of this node
func (w Within) Map() map[string]interface{} {
return w.BinaryOperator.Map("within", w.Left, w.Right)
}
// Compile returns a compiled version of this node.
// If the left and right values are both compiled as literals, then returns the compiled Literal with that value set.
// Otherwise returns a clone of this node.
func (w Within) Compile() Node {
left := w.Left.Compile()
right := w.Right.Compile()
switch right.(type) {
case And:
right = Array{Nodes: []Node{right.(And).Left, right.(And).Right}}
}
switch left.(type) {
case Literal:
switch left.(Literal).Value.(type) {
case int, int8, int16, int32, int64, float64:
switch right.(type) {
case Literal:
if t := reflect.TypeOf(right.(Literal).Value); t.Kind() == reflect.Array || t.Kind() == reflect.Slice {
rv := reflect.ValueOf(right.(Literal).Value)
if rv.Len() == 2 {
v, err := WithinRange(left.(Literal).Value, rv.Index(0).Interface(), rv.Index(1).Interface())
if err != nil {
return &Within{&BinaryOperator{Left: left, Right: right}}
}
return Literal{Value: v}
}
}
}
}
}
return &Within{&BinaryOperator{Left: left, Right: right}}
}
// Evaluate returns the value of this node given Context ctx, and an error if any.
func (w Within) Evaluate(vars map[string]interface{}, ctx interface{}, funcs FunctionMap, quotes []string) (map[string]interface{}, interface{}, error) {
vars, lv, err := w.Left.Evaluate(vars, ctx, funcs, quotes)
if err != nil {
return vars, false, errors.Wrap(err, "Error evaluating left value for "+w.Dfl(quotes, false, 0))
}
vars, rv, err := w.Right.Evaluate(vars, ctx, funcs, quotes)
if err != nil {
return vars, false, errors.Wrap(err, "Error evaluating right value for "+w.Dfl(quotes, false, 0))
}
if t := reflect.TypeOf(rv); !(t.Kind() == reflect.Array || t.Kind() == reflect.Slice) {
return vars, false, errors.Wrap(err, "right value is wrong type for "+w.Dfl(quotes, false, 0))
}
rvv := reflect.ValueOf(rv)
if rvv.Len() != 2 {
return vars, false, errors.Wrap(err, "right value is invalid length "+w.Dfl(quotes, false, 0))
}
v, err := WithinRange(lv, rvv.Index(0).Interface(), rvv.Index(1).Interface())
return vars, v, err
} | pkg/dfl/Within.go | 0.837254 | 0.473414 | Within.go | starcoder |
package merge
import (
"reflect"
"github.com/coreos/ignition/v2/config/util"
)
// Rules of Config Merging:
// 1) Parent and child configs must be the same version/type
// 2) Only valid configs can be merged
// 3) It is possible to merge two valid configs and get an invalid config
// 3) For structs:
// a) Members that are structs get merged recursively (i.e. ignition.storage)
// b) Members that are primitives get replaced by the child's member (e.g. ignition.storage.files[i].path)
// c) Members that are pointers only get replaced by the child's value if the child's value is non-nil (e.g. ignition.config.replace.source)
// d) List merging of a list with IgnoreDuplicates: append the lists (e.g. ignition.storage.files[i].append)
// e) List merging of a list not merged with other lists: merge any entries with the same Key() and append the others (e.g. ignition.storage.filesystems by path)
// f) List merging of a list merged with other lists: (e.g. ignition.storage.{files,links,directories} by path)
// - merge entries with the same Key() that are in the same list
// - remove entries from the parent with the same Key() that are not in the same list
// - append entries that are unique to the child
// appendToSlice is a helper that appends to a slice without returning a new one.
// panics if len >= cap
func appendToSlice(s, v reflect.Value) {
s.SetLen(s.Len() + 1)
s.Index(s.Len() - 1).Set(v)
}
type handleKey struct {
handle string
key string
}
// structInfo holds information about a struct being processed and has helper methods for querying that
// information in a way that is more clear what the intent is.
type structInfo struct {
// set of field names to not do duplicate merging on
ignoreDups map[string]struct{}
// map from field names to a handle indicating all those with the same handle should have duplication
// checking done across all fields that share that handle
mergedKeys map[string]string
// map from each handle + key() value to what list it came from
keysToValues map[handleKey]reflect.Value
// map from each handle + key() to the list it came from
keysToLists map[handleKey]string
}
// returns if this field should not do duplicate checking/merging
func (s structInfo) ignoreField(name string) bool {
_, ignore := s.ignoreDups[name]
return ignore
}
// getChildEntryByKey takes the name of a field (not handle) in the parent and a key and looks that entry
// up in the child. It will look up across all slices that share the same handle. It return the value and
// name of the field in the child it was found in. The bool indicates whether it was found.
func (s structInfo) getChildEntryByKey(fieldName, key string) (reflect.Value, string, bool) {
handle := fieldName
if tmp, ok := s.mergedKeys[fieldName]; ok {
handle = tmp
}
hkey := handleKey{
handle: handle,
key: key,
}
if v, ok := s.keysToValues[hkey]; ok {
return v, s.keysToLists[hkey], true
}
return reflect.Value{}, "", false
}
func newStructInfo(parent, child reflect.Value) structInfo {
ignoreDups := map[string]struct{}{}
if ignorer, ok := parent.Interface().(util.IgnoresDups); ok {
ignoreDups = ignorer.IgnoreDuplicates()
}
mergedKeys := map[string]string{}
if merger, ok := parent.Interface().(util.MergesKeys); ok {
mergedKeys = merger.MergedKeys()
}
keysToValues := map[handleKey]reflect.Value{}
keysToLists := map[handleKey]string{}
for i := 0; i < child.NumField(); i++ {
field := child.Field(i)
if field.Kind() != reflect.Slice {
continue
}
fieldName := child.Type().Field(i).Name
if _, ok := ignoreDups[fieldName]; ok {
continue
}
handle := fieldName
if tmp, ok := mergedKeys[handle]; ok {
handle = tmp
}
for j := 0; j < field.Len(); j++ {
v := field.Index(j)
hkey := handleKey{
handle: handle,
key: util.CallKey(v),
}
keysToValues[hkey] = v
keysToLists[hkey] = fieldName
}
}
return structInfo{
ignoreDups: ignoreDups,
mergedKeys: mergedKeys,
keysToValues: keysToValues,
keysToLists: keysToLists,
}
}
// MergeStruct is intended for use by config/vX_Y/ packages only. They should expose their own Merge() that is properly
// typed. Use that one instead.
// parent and child MUST be the same type
func MergeStruct(parent, child reflect.Value) reflect.Value {
// use New() so it's settable, addr-able, etc
result := reflect.New(parent.Type()).Elem()
info := newStructInfo(parent, child)
for i := 0; i < parent.NumField(); i++ {
fieldName := parent.Type().Field(i).Name
parentField := parent.Field(i)
childField := child.Field(i)
resultField := result.Field(i)
kind := parentField.Kind()
switch {
case util.IsPrimitive(kind):
resultField.Set(childField)
case kind == reflect.Ptr && childField.IsNil():
resultField.Set(parentField)
case kind == reflect.Ptr && !childField.IsNil():
resultField.Set(childField)
case kind == reflect.Struct:
resultField.Set(MergeStruct(parentField, childField))
case kind == reflect.Slice && info.ignoreField(fieldName):
if parentField.Len()+childField.Len() == 0 {
continue
}
resultField.Set(reflect.AppendSlice(parentField, childField))
case kind == reflect.Slice && !info.ignoreField(fieldName):
// ooph, this is a doosey
maxlen := parentField.Len() + childField.Len()
if maxlen == 0 {
continue
}
resultField.Set(reflect.MakeSlice(parentField.Type(), 0, parentField.Len()+childField.Len()))
parentKeys := getKeySet(parentField)
for i := 0; i < parentField.Len(); i++ {
parentItem := parentField.Index(i)
key := util.CallKey(parentItem)
if childItem, childList, ok := info.getChildEntryByKey(fieldName, key); ok {
if childList == fieldName {
// case 1: in child config in same list
if childItem.Kind() == reflect.Struct {
appendToSlice(resultField, MergeStruct(parentItem, childItem))
} else if util.IsPrimitive(childItem.Kind()) {
appendToSlice(resultField, childItem)
} else {
panic("List of pointers or slices or something else weird")
}
} else {
// case 2: in child config in different list. Do nothing since it'll be handled iterating over that list
}
} else {
// case 3: not in child config, append it
appendToSlice(resultField, parentItem)
}
}
for i := 0; i < childField.Len(); i++ {
childItem := childField.Index(i)
key := util.CallKey(childItem)
if _, alreadyMerged := parentKeys[key]; !alreadyMerged {
// We only check the parentMap for this field. If the parent had a matching entry in a differnt field
// then it would be skipped as case 2 above
appendToSlice(resultField, childItem)
}
}
default:
panic("unreachable code reached")
}
}
return result
}
// getKeySet takes a value of a slice and returns the set of all the Key() values in that slice
func getKeySet(list reflect.Value) map[string]struct{} {
m := map[string]struct{}{}
for i := 0; i < list.Len(); i++ {
m[util.CallKey(list.Index(i))] = struct{}{}
}
return m
} | vendor/github.com/coreos/ignition/v2/config/merge/merge.go | 0.565299 | 0.457379 | merge.go | starcoder |
package privacy
import (
"crypto/subtle"
"errors"
"github.com/incognitochain/incognito-chain/common"
)
// SchnorrPublicKey represents Schnorr Publickey
// PK = G^SK + H^R
type SchnorrPublicKey struct {
publicKey *Point
g, h *Point
}
func (schnorrPubKey SchnorrPublicKey) GetPublicKey() *Point {
return schnorrPubKey.publicKey
}
// SchnorrPrivateKey represents Schnorr Privatekey
type SchnorrPrivateKey struct {
privateKey *Scalar
randomness *Scalar
publicKey *SchnorrPublicKey
}
func (schnPrivKey SchnorrPrivateKey) GetPublicKey() *SchnorrPublicKey {
return schnPrivKey.publicKey
}
// SchnSignature represents Schnorr Signature
type SchnSignature struct {
e, z1, z2 *Scalar
}
// Set sets Schnorr private key
func (privateKey *SchnorrPrivateKey) Set(sk *Scalar, r *Scalar) {
privateKey.privateKey = sk
privateKey.randomness = r
privateKey.publicKey = new(SchnorrPublicKey)
privateKey.publicKey.g, _ = new(Point).SetKey(&PedCom.G[PedersenPrivateKeyIndex].key)
privateKey.publicKey.h, _ = new(Point).SetKey(&PedCom.G[PedersenRandomnessIndex].key)
privateKey.publicKey.publicKey = new(Point).ScalarMult(PedCom.G[PedersenPrivateKeyIndex], sk)
privateKey.publicKey.publicKey.Add(privateKey.publicKey.publicKey, new(Point).ScalarMult(PedCom.G[PedersenRandomnessIndex], r))
}
// Set sets Schnorr public key
func (publicKey *SchnorrPublicKey) Set(pk *Point) {
publicKey.publicKey, _ = new(Point).SetKey(&pk.key)
publicKey.g, _ = new(Point).SetKey(&PedCom.G[PedersenPrivateKeyIndex].key)
publicKey.h, _ = new(Point).SetKey(&PedCom.G[PedersenRandomnessIndex].key)
}
//Sign is function which using for signing on hash array by private key
func (privateKey SchnorrPrivateKey) Sign(data []byte) (*SchnSignature, error) {
if len(data) != common.HashSize {
return nil, NewPrivacyErr(UnexpectedErr, errors.New("hash length must be 32 bytes"))
}
signature := new(SchnSignature)
// has privacy
if !privateKey.randomness.IsZero() {
// generates random numbers s1, s2 in [0, Curve.Params().N - 1]
s1 := RandomScalar()
s2 := RandomScalar()
// t = s1*G + s2*H
t := new(Point).ScalarMult(privateKey.publicKey.g, s1)
t.Add(t, new(Point).ScalarMult(privateKey.publicKey.h, s2))
// E is the hash of elliptic point t and data need to be signed
msg := append(t.ToBytesS(), data...)
signature.e = HashToScalar(msg)
signature.z1 = new(Scalar).Mul(privateKey.privateKey, signature.e)
signature.z1 = new(Scalar).Sub(s1, signature.z1)
signature.z2 = new(Scalar).Mul(privateKey.randomness, signature.e)
signature.z2 = new(Scalar).Sub(s2, signature.z2)
return signature, nil
}
// generates random numbers s, k2 in [0, Curve.Params().N - 1]
s := RandomScalar()
// t = s*G
t := new(Point).ScalarMult(privateKey.publicKey.g, s)
// E is the hash of elliptic point t and data need to be signed
msg := append(t.ToBytesS(), data...)
signature.e = HashToScalar(msg)
// Z1 = s - e*sk
signature.z1 = new(Scalar).Mul(privateKey.privateKey, signature.e)
signature.z1 = new(Scalar).Sub(s, signature.z1)
signature.z2 = nil
return signature, nil
}
//Verify is function which using for verify that the given signature was signed by by privatekey of the public key
func (publicKey SchnorrPublicKey) Verify(signature *SchnSignature, data []byte) bool {
if signature == nil {
return false
}
rv := new(Point).ScalarMult(publicKey.publicKey, signature.e)
rv.Add(rv, new(Point).ScalarMult(publicKey.g, signature.z1))
if signature.z2 != nil {
rv.Add(rv, new(Point).ScalarMult(publicKey.h, signature.z2))
}
msg := append(rv.ToBytesS(), data...)
ev := HashToScalar(msg)
return subtle.ConstantTimeCompare(ev.ToBytesS(), signature.e.ToBytesS()) == 1
}
func (sig SchnSignature) Bytes() []byte {
bytes := append(sig.e.ToBytesS(), sig.z1.ToBytesS()...)
// Z2 is nil when has no privacy
if sig.z2 != nil {
bytes = append(bytes, sig.z2.ToBytesS()...)
}
return bytes
}
func (sig *SchnSignature) SetBytes(bytes []byte) error {
if len(bytes) == 0 {
return NewPrivacyErr(InvalidInputToSetBytesErr, nil)
}
sig.e = new(Scalar).FromBytesS(bytes[0:Ed25519KeySize])
sig.z1 = new(Scalar).FromBytesS(bytes[Ed25519KeySize : 2*Ed25519KeySize])
if len(bytes) == 3*Ed25519KeySize {
sig.z2 = new(Scalar).FromBytesS(bytes[2*Ed25519KeySize:])
} else {
sig.z2 = nil
}
return nil
} | privacy/schnorr.go | 0.722331 | 0.407628 | schnorr.go | starcoder |
package main
import (
"fmt"
"go-guide/datastruct/binaryTree/traversal/levelorder"
. "go-guide/datastruct/binaryTree/treeNode"
"log"
)
/**
้ข็ฎ๏ผhttps://leetcode-cn.com/problems/merge-two-binary-trees/
ๅๅนถไบๅๆ
็ปๅฎไธคไธชไบๅๆ ๏ผๆณ่ฑกๅฝไฝ ๅฐๅฎไปฌไธญ็ไธไธช่ฆ็ๅฐๅฆไธไธชไธๆถ๏ผไธคไธชไบๅๆ ็ไธไบ่็นไพฟไผ้ๅ ใ
ไฝ ้่ฆๅฐไปไปฌๅๅนถไธบไธไธชๆฐ็ไบๅๆ ใๅๅนถ็่งๅๆฏๅฆๆไธคไธช่็น้ๅ ๏ผ้ฃไนๅฐไปไปฌ็ๅผ็ธๅ ไฝไธบ่็นๅๅนถๅ็ๆฐๅผ๏ผๅฆๅไธไธบNULL ็่็นๅฐ็ดๆฅไฝไธบๆฐไบๅๆ ็่็นใ
ๆณจๆ: ๅๅนถๅฟ
้กปไปไธคไธชๆ ็ๆ น่็นๅผๅงใ
ๆณจๆ๏ผ
1.root1ๅroot2ๅๆถๅญๅจๅทฆๅณ็ป็น็ๆถๅ๏ผ้่ฆๆฐๅปบ็ป็น
2.ไธๅๆถๅญๅจ๏ผๅฐฑๆๅญๅจ็ๆ ๅค็จๅณๅฏ
*/
func main() {
root1 := NewNormalTree()
root2 := NewNormalTree2()
log.Println("ๅๅนถไบๅๆ -้ๅฝ๏ผ", levelorder.TraversalRecursive(mergeTrees(root1, root2)), levelorder.TraversalRecursive(root1), levelorder.TraversalRecursive(root2))
log.Println("ๅๅนถไบๅๆ -่ฟญไปฃ๏ผ", levelorder.TraversalRecursive(mergeTrees1(root1, root2)), levelorder.TraversalRecursive(root1), levelorder.TraversalRecursive(root2))
}
// mergeTrees, ้ๅฝ,ๅๅบ้ๅ
func mergeTrees(root1 *TreeNode, root2 *TreeNode) *TreeNode {
if root1 == nil {
return root2
}
if root2 == nil {
return root1
}
return &TreeNode{
Val: root1.Val + root2.Val,
Left: mergeTrees(root1.Left, root2.Left),
Right: mergeTrees(root1.Right, root2.Right),
}
}
// mergeTrees1 ่ฟญไปฃๆนๅผ๏ผBFS
func mergeTrees1(root1 *TreeNode, root2 *TreeNode) *TreeNode {
if root1 == nil {
return root2
}
if root2 == nil {
return root1
}
var newRoot = &TreeNode{Val: root1.Val + root2.Val}
var stack1 = []*TreeNode{newRoot} // ๅญๆพๆฐๅปบ็็ป็น๏ผๆฐๅปบ็็ป็น้่ฆ้พๆฅๅญ่็น
var stack2 = []*TreeNode{root1}
var stack3 = []*TreeNode{root2}
for len(stack2) > 0 && len(stack3) > 0 {
// ๅบๆ
node1 := stack1[0]
node2 := stack2[0]
node3 := stack3[0]
stack1 = stack1[1:]
stack2 = stack2[1:]
stack3 = stack3[1:]
// ๅ
ๅคๆญๅทฆ็ป็น
node2Left := node2.Left
node3Left := node3.Left
if node2Left != nil && node3Left != nil {
tempNode := &TreeNode{Val: node2Left.Val + node3Left.Val}
stack1 = append(stack1, tempNode)
stack2 = append(stack2, node2Left)
stack3 = append(stack3, node3Left)
node1.Left = tempNode
} else if node2Left != nil && node3Left == nil {
node1.Left = node2Left
} else if node2Left == nil && node3Left != nil {
node1.Left = node3Left
}
// ็ถๅๆฏๅณ่็น
node2Right := node2.Right
node3Right := node3.Right
if node2Right != nil && node3Right != nil {
tempNode := &TreeNode{Val: node2Right.Val + node3Right.Val}
stack1 = append(stack1, tempNode)
stack2 = append(stack2, node2Right)
stack3 = append(stack3, node3Right)
node1.Right = tempNode
} else if node2Right != nil && node3Right == nil {
node1.Right = node2Right
} else if node2Right == nil && node3Right != nil {
node1.Right = node3Right
}
}
return newRoot
} | datastruct/binaryTree/leetcodeQuestion/mergeTrees/mergeTrees.go | 0.544801 | 0.49469 | mergeTrees.go | starcoder |
package processor
import (
"encoding/xml"
"fmt"
"time"
"github.com/Jeffail/benthos/v3/internal/docs"
"github.com/Jeffail/benthos/v3/lib/log"
"github.com/Jeffail/benthos/v3/lib/metrics"
"github.com/Jeffail/benthos/v3/lib/types"
"github.com/clbanning/mxj"
"github.com/opentracing/opentracing-go"
)
//------------------------------------------------------------------------------
func init() {
dec := xml.NewDecoder(nil)
dec.Strict = false
mxj.CustomDecoder = dec
Constructors[TypeXML] = TypeSpec{
constructor: NewXML,
Status: docs.StatusBeta,
Categories: []Category{
CategoryParsing,
},
Summary: `
Parses messages as an XML document, performs a mutation on the data, and then
overwrites the previous contents with the new value.`,
Description: `
## Operators
### ` + "`to_json`" + `
Converts an XML document into a JSON structure, where elements appear as keys of
an object according to the following rules:
- If an element contains attributes they are parsed by prefixing a hyphen,
` + "`-`" + `, to the attribute label.
- If the element is a simple element and has attributes, the element value
is given the key ` + "`#text`" + `.
- XML comments, directives, and process instructions are ignored.
- When elements are repeated the resulting JSON value is an array.
For example, given the following XML:
` + "```xml" + `
<root>
<title>This is a title</title>
<description tone="boring">This is a description</description>
<elements id="1">foo1</elements>
<elements id="2">foo2</elements>
<elements>foo3</elements>
</root>
` + "```" + `
The resulting JSON structure would look like this:
` + "```json" + `
{
"root":{
"title":"This is a title",
"description":{
"#text":"This is a description",
"-tone":"boring"
},
"elements":[
{"#text":"foo1","-id":"1"},
{"#text":"foo2","-id":"2"},
"foo3"
]
}
}
` + "```" + ``,
FieldSpecs: docs.FieldSpecs{
docs.FieldCommon("operator", "An XML [operation](#operators) to apply to messages.").HasOptions("to_json"),
partsFieldSpec,
},
}
}
//------------------------------------------------------------------------------
// XMLConfig contains configuration fields for the XML processor.
type XMLConfig struct {
Parts []int `json:"parts" yaml:"parts"`
Operator string `json:"operator" yaml:"operator"`
}
// NewXMLConfig returns a XMLConfig with default values.
func NewXMLConfig() XMLConfig {
return XMLConfig{
Parts: []int{},
Operator: "to_json",
}
}
//------------------------------------------------------------------------------
// XML is a processor that performs an operation on a XML payload.
type XML struct {
parts []int
conf Config
log log.Modular
stats metrics.Type
mCount metrics.StatCounter
mErr metrics.StatCounter
mSent metrics.StatCounter
mBatchSent metrics.StatCounter
}
// NewXML returns a XML processor.
func NewXML(
conf Config, mgr types.Manager, log log.Modular, stats metrics.Type,
) (Type, error) {
if conf.XML.Operator != "to_json" {
return nil, fmt.Errorf("operator not recognised: %v", conf.XML.Operator)
}
j := &XML{
parts: conf.XML.Parts,
conf: conf,
log: log,
stats: stats,
mCount: stats.GetCounter("count"),
mErr: stats.GetCounter("error"),
mSent: stats.GetCounter("sent"),
mBatchSent: stats.GetCounter("batch.sent"),
}
return j, nil
}
//------------------------------------------------------------------------------
// ProcessMessage applies the processor to a message, either creating >0
// resulting messages or a response to be sent back to the message source.
func (p *XML) ProcessMessage(msg types.Message) ([]types.Message, types.Response) {
p.mCount.Incr(1)
newMsg := msg.Copy()
proc := func(index int, span opentracing.Span, part types.Part) error {
root, err := mxj.NewMapXml(part.Get())
if err != nil {
p.mErr.Incr(1)
p.log.Debugf("Failed to parse part as XML: %v\n", err)
return err
}
if err = part.SetJSON(map[string]interface{}(root)); err != nil {
p.mErr.Incr(1)
p.log.Debugf("Failed to marshal XML as JSON: %v\n", err)
return err
}
return nil
}
IteratePartsWithSpan(TypeXML, p.parts, newMsg, proc)
p.mBatchSent.Incr(1)
p.mSent.Incr(int64(newMsg.Len()))
return []types.Message{newMsg}, nil
}
// CloseAsync shuts down the processor and stops processing requests.
func (p *XML) CloseAsync() {
}
// WaitForClose blocks until the processor has closed down.
func (p *XML) WaitForClose(timeout time.Duration) error {
return nil
}
//------------------------------------------------------------------------------ | lib/processor/xml.go | 0.782953 | 0.698097 | xml.go | starcoder |
package rtda
import (
"math"
"jvm/pkg/rtda/heap"
)
type OperandStack struct {
size uint
slots []Slot
}
func NewOperandStack(maxStack uint) *OperandStack {
if maxStack > 0 {
return &OperandStack{
slots: make([]Slot, maxStack),
}
}
return nil
}
func (this *OperandStack) PushInt(val int32) {
this.slots[this.size].num = val
this.size++
}
func (this *OperandStack) PopInt() int32 {
this.size--
return this.slots[this.size].num
}
func (this *OperandStack) PushFloat(val float32) {
this.slots[this.size].num = int32(math.Float32bits(val))
this.size++
}
func (this *OperandStack) PopFloat() float32 {
this.size--
return math.Float32frombits(uint32(this.slots[this.size].num))
}
func (this *OperandStack) PushLong(val int64) {
this.PushInt(int32(uint32(val)))
this.PushInt(int32(uint32(val >> 32)))
}
func (this *OperandStack) PopLong() int64 {
return (int64(uint32(this.PopInt())) << 32) | int64(uint32(this.PopInt()))
}
func (this *OperandStack) PushDouble(val float64) {
this.PushLong(int64(math.Float64bits(val)))
}
func (this *OperandStack) PopDouble() float64 {
return math.Float64frombits(uint64(this.PopLong()))
}
func (this *OperandStack) PushRef(ref heap.Object) {
this.slots[this.size].ref = ref
this.size++
}
func (this *OperandStack) PopRef() heap.Object {
this.size--
ref := this.slots[this.size].ref
this.slots[this.size].ref = nil
return ref
}
func (this *OperandStack) PopNormalObject() *heap.NormalObject {
popRef := this.PopRef()
switch popRef.(type) {
case *heap.NormalObject:
return popRef.(*heap.NormalObject)
case *heap.ClassObject:
return popRef.(*heap.ClassObject).NormalObject
default:
panic("ref not valid")
}
}
func (this *OperandStack) PushSlot(slot Slot) {
this.slots[this.size] = slot
this.size++
}
func (this *OperandStack) PopSlot() Slot {
this.size--
return this.slots[this.size]
}
func (this *OperandStack) GetRefFromTop(count uint) heap.Object {
return this.slots[this.size-count].ref
}
func (this *OperandStack) Clear() {
for _, slot := range this.slots {
slot.num = 0
slot.ref = nil
}
this.size = 0
} | pkg/rtda/operand_stack.go | 0.675765 | 0.446977 | operand_stack.go | starcoder |
package rbtree
type Optional[T any] struct {
v T
some bool
}
func (o *Optional[T]) IsSome() bool {
return o.some
}
func (o *Optional[T]) Unwrap() T {
if o.some {
return o.v
} else {
panic(`unwrap none`)
}
}
func None[T any]() Optional[T] {
return Optional[T]{}
}
func Some[T any](v T) Optional[T] {
return Optional[T]{some: true, v: v}
}
type Node[K, V any] struct {
link [2]*Node[K, V]
parent *Node[K, V]
k K
v V
red bool
}
type Compare[K any] func(a, b K) int
type RBTree[K, V any] struct {
root *Node[K, V]
Cmp Compare[K]
}
func (t *RBTree[K, V]) searchNode(key K) *Node[K, V] {
n := t.root
for n != nil {
ord := t.Cmp(n.k, key)
switch {
case ord > 0:
n = n.link[0]
case ord < 0:
n = n.link[1]
default:
return n
}
}
return nil
}
func (t *RBTree[K, V]) Search(key K) Optional[V] {
n := t.searchNode(key)
if n != nil {
return Some(n.v)
} else {
return None[V]()
}
}
func (t *RBTree[K, V]) fixInsert(n *Node[K, V]) {
for {
p := n.parent
if p == nil || !p.red {
break
}
g := p.parent
if g == nil {
break
}
var dir int
if g.link[0] != p {
dir = 1
} else {
dir = 0
}
dir &= 1
sib := (1 - dir) & 1
u := g.link[sib]
if u != nil && u.red {
p.red = false
u.red = false
g.red = true
n = g
} else {
if p.link[sib] == n {
p.link[sib] = n.link[dir]
n.link[dir] = p
g.link[dir] = n
p.parent = n
if p.link[sib] != nil {
p.link[sib].parent = p
}
p = n
}
gg := g.parent
g.link[dir] = p.link[sib]
p.link[sib] = g
p.parent = g.parent
g.parent = p
if g.link[dir] != nil {
g.link[dir].parent = g
}
p.red = false
g.red = true
if gg == nil {
t.root = p
} else if gg.link[0] == g {
gg.link[0] = p
} else {
gg.link[1] = p
}
return
}
}
t.root.red = false
}
func (t *RBTree[K, V]) Insert(key K, value V) Optional[V] {
var (
p *Node[K, V]
dir int
)
n := t.root
for n != nil {
ord := t.Cmp(n.k, key)
switch {
case ord > 0:
dir = 0
case ord < 0:
dir = 1
default:
old := n.v
n.v = value
return Some(old)
}
p = n
n = n.link[dir&1]
}
n = &Node[K, V]{
parent: p,
k: key,
v: value,
red: true,
}
if p != nil {
p.link[dir&1] = n
} else {
t.root = n
}
t.fixInsert(n)
return None[V]()
}
func (t *RBTree[K, V]) fixDelete(p *Node[K, V], dir int, root *Node[K, V]) {
for {
dir &= 1
sib := (1 - dir) & 1
x := p.link[dir]
if x != nil && x.red {
x.red = false
break
}
if p == root {
break
}
g := p.parent
if g == nil {
g = root
}
w := p.link[sib]
if w.red {
w.red = false
p.red = true
p.link[sib] = w.link[dir]
w.link[dir] = p
if g.link[0] == p {
g.link[0] = w
} else {
g.link[1] = w
}
w.parent = p.parent
p.parent = w
g = w
w = p.link[sib]
w.parent = p
}
if (w.link[0] == nil || !w.link[0].red) && (w.link[1] == nil || !w.link[1].red) {
w.red = true
} else {
if w.link[sib] == nil || !w.link[sib].red {
y := w.link[dir]
y.red = false
w.red = true
w.link[dir] = y.link[sib]
y.link[sib] = w
if w.link[dir] != nil {
w.link[dir].parent = w
}
p.link[sib] = y
w = y
w.link[sib].parent = w
}
w.red = p.red
p.red = false
w.link[sib].red = false
p.link[sib] = w.link[dir]
w.link[dir] = p
if g.link[0] == p {
g.link[0] = w
} else {
g.link[1] = w
}
w.parent = p.parent
p.parent = w
if p.link[sib] != nil {
p.link[sib].parent = p
}
break
}
z := p
p = p.parent
if p == nil {
p = root
}
if p.link[0] == z {
dir = 0
} else {
dir = 1
}
}
}
func (t *RBTree[K, V]) Delete(key K) Optional[V] {
n := t.searchNode(key)
if n == nil {
return None[V]()
}
root := Node[K, V]{link: [2]*Node[K, V]{0: t.root}}
p := n.parent
dir := 0
if p == nil {
p = &root
} else if p.link[1] == n {
dir = 1
}
if n.link[1] == nil {
c := n.link[0]
p.link[dir] = c
if c != nil {
c.parent = n.parent
}
} else {
r := n.link[1]
if r.link[0] == nil {
r.link[0] = n.link[0]
p.link[dir] = r
r.parent = n.parent
if r.link[0] != nil {
r.link[0].parent = r
}
t := n.red
n.red = r.red
r.red = t
p = r
dir = 1
} else {
s := r.link[0]
for s.link[0] != nil {
s = s.link[0]
}
r = s.parent
r.link[0] = s.link[1]
s.link[0] = n.link[0]
s.link[1] = n.link[1]
p.link[dir] = s
if s.link[0] != nil {
s.link[0].parent = s
}
s.link[1].parent = s
s.parent = n.parent
if r.link[0] != nil {
r.link[0].parent = r
}
t := n.red
n.red = s.red
s.red = t
p = r
dir = 0
}
}
if !n.red {
t.fixDelete(p, dir, &root)
}
t.root = root.link[0]
return Some(n.v)
} | goofy/rbtree-go2/rbtree.go | 0.603698 | 0.403508 | rbtree.go | starcoder |
package geom
import (
"github.com/water-vapor/euclidea-solver/configs"
"github.com/water-vapor/euclidea-solver/pkg/hashset"
"math"
"math/rand"
)
// Circle is a circle is uniquely determined by its center point and radius
type Circle struct {
hashset.Serializable
center *Point
r float64
}
// NewCircleByPoint creates a circle by its center and a point on its side
func NewCircleByPoint(center, onSide *Point) *Circle {
return &Circle{center: center, r: NewSegment(center, onSide).Length()}
}
// NewCircleByRadius creates a circle by its center and its radius
func NewCircleByRadius(center *Point, r float64) *Circle {
return &Circle{center: center, r: r}
}
// GetCenter returns the center of the circle
func (c *Circle) GetCenter() *Point {
return c.center
}
// GetRadius returns the radius of the circle
func (c *Circle) GetRadius() float64 {
return c.r
}
// Serialize returns the hash of the circle
func (c *Circle) Serialize() interface{} {
cx := int64(math.Round(c.center.x * configs.HashPrecision))
cy := int64(math.Round(c.center.y * configs.HashPrecision))
cr := int64(math.Round(c.r * configs.HashPrecision))
return (cx*configs.Prime+cy)*configs.Prime + cr
}
// ContainsPoint checks if a point is on the circle
func (c *Circle) ContainsPoint(pt *Point) bool {
return math.Abs(NewSegment(pt, c.center).Length()-c.r) < configs.Tolerance
}
// IntersectLine returns the intersections with a line
func (c *Circle) IntersectLine(l *Line) *Intersection {
distNumer := l.a*c.center.x + l.b*c.center.y + l.c
distDenomSquare := l.a*l.a + l.b*l.b
dist := math.Abs(distNumer) / math.Sqrt(distDenomSquare)
// tangent circles with tolerance
if math.Abs(dist-c.r) < configs.Tolerance {
v := l.GetNormalVector()
tangentLine := NewLineFromDirection(c.center, v)
return l.IntersectLine(tangentLine)
}
if dist > c.r {
return NewIntersection()
}
// {{x -> -((-b^2 x1 + a (c + b y1) + Sqrt[
// b^2 ((a^2 + b^2) r1^2 - (c + a x1 + b y1)^2)])/(a^2 + b^2)),
// y -> (-b^2 (c + a x1) + a^2 b y1 +
// a Sqrt[b^2 ((a^2 + b^2) r1^2 - (c + a x1 + b y1)^2)])/(
// b (a^2 + b^2))}, {x -> (
// b^2 x1 - a (c + b y1) + Sqrt[
// b^2 ((a^2 + b^2) r1^2 - (c + a x1 + b y1)^2)])/(a^2 + b^2),
// y -> (-b^2 (c + a x1) + a^2 b y1 -
// a Sqrt[b^2 ((a^2 + b^2) r1^2 - (c + a x1 + b y1)^2)])/(
// b (a^2 + b^2))}}
det := math.Sqrt(distDenomSquare*c.r*c.r - distNumer*distNumer)
ptxc := l.b*l.b*c.center.x - l.a*(l.c+l.b*c.center.y)
ptyc := -l.b*(l.c+l.a*c.center.x) + l.a*l.a*c.center.y
pt1x := (ptxc - l.b*det) / distDenomSquare
pt2x := (ptxc + l.b*det) / distDenomSquare
pt1y := (ptyc + l.a*det) / distDenomSquare
pt2y := (ptyc - l.a*det) / distDenomSquare
return NewIntersection(NewPoint(pt1x, pt1y), NewPoint(pt2x, pt2y))
}
// IntersectCircle returns the intersections with another circle
func (c *Circle) IntersectCircle(c2 *Circle) *Intersection {
// center same, return no intersection
if c.center.Equal(c2.center) {
return NewIntersection()
}
dist := NewSegment(c.center, c2.center).Length()
// tangent circles with tolerance
if math.Abs(dist-c.r-c2.r) < configs.Tolerance {
// vector from c to c2
v := NewVector2DFromTwoPoints(c.center, c2.center)
v.SetLength(c.r)
pt := NewPoint(c.center.x+v.x, c.center.y+v.y)
return NewIntersection(pt)
}
// separated circles
if dist > c.r+c2.r {
return NewIntersection()
}
// one circle inside another
if math.Abs(c.r-c2.r) > dist {
return NewIntersection()
}
// Implements a nice looking formula
//https://math.stackexchange.com/a/1367732
R2 := dist * dist
coeff1 := (c.r*c.r - c2.r*c2.r) / R2
coeff2 := math.Sqrt(2*(c.r*c.r+c2.r*c2.r)/R2 - coeff1*coeff1 - 1)
pt1x := (c.center.x+c2.center.x)/2 + (c2.center.x-c.center.x)*coeff1/2 + (c2.center.y-c.center.y)*coeff2/2
pt2x := (c.center.x+c2.center.x)/2 + (c2.center.x-c.center.x)*coeff1/2 - (c2.center.y-c.center.y)*coeff2/2
pt1y := (c.center.y+c2.center.y)/2 + (c2.center.y-c.center.y)*coeff1/2 - (c2.center.x-c.center.x)*coeff2/2
pt2y := (c.center.y+c2.center.y)/2 + (c2.center.y-c.center.y)*coeff1/2 + (c2.center.x-c.center.x)*coeff2/2
return NewIntersection(NewPoint(pt1x, pt1y), NewPoint(pt2x, pt2y))
}
// IntersectHalfLine returns intersections with a half line
func (c *Circle) IntersectHalfLine(h *HalfLine) *Intersection {
// intersect as if it is a line
intersection := c.IntersectLine(NewLineFromHalfLine(h))
// based on number of Solutions...
if intersection.SolutionNumber == 0 {
return intersection
}
if intersection.SolutionNumber == 1 {
pt := intersection.Solutions[0]
if pt.InHalfLineRange(h) {
return intersection
}
return NewIntersection()
}
// solution number == 2
pt1 := intersection.Solutions[0]
pt2 := intersection.Solutions[1]
if pt1.InHalfLineRange(h) {
if pt2.InHalfLineRange(h) {
return intersection
}
return NewIntersection(pt1)
}
if pt2.InHalfLineRange(h) {
return NewIntersection(pt2)
}
return NewIntersection()
}
// IntersectSegment returns intersections with a segment
func (c *Circle) IntersectSegment(s *Segment) *Intersection {
// intersect as if it is a line
intersection := c.IntersectLine(NewLineFromSegment(s))
// based on number of Solutions...
if intersection.SolutionNumber == 0 {
return intersection
}
if intersection.SolutionNumber == 1 {
pt := intersection.Solutions[0]
if pt.InSegmentRange(s) {
return intersection
}
return NewIntersection()
}
// solution number == 2
pt1 := intersection.Solutions[0]
pt2 := intersection.Solutions[1]
if pt1.InSegmentRange(s) {
if pt2.InSegmentRange(s) {
return intersection
}
return NewIntersection(pt1)
}
if pt2.InSegmentRange(s) {
return NewIntersection(pt2)
}
return NewIntersection()
}
// GetRandomPoint returns a random point on the circle
func (c *Circle) GetRandomPoint() *Point {
// random number from -1 to 1
x := rand.Float64()*2 - 1
y := math.Sqrt(1 - x*x)
// decide sign
if rand.Float64() < 0.5 {
y *= -1
}
v := NewVector2D(x, y)
v.SetLength(c.r)
return NewPoint(c.center.x+v.x, c.center.y+v.y)
} | pkg/geom/circle.go | 0.877096 | 0.530054 | circle.go | starcoder |
package kv
import (
"bytes"
"encoding"
"fmt"
"strconv"
)
// KeyValue represents a node in a KeyValue tree.
type KeyValue interface {
// Type returns the node's Type.
Type() Type
// SetType sets the node's Type and returns the receiver.
SetType(Type) KeyValue
// Key returns the node's Key.
Key() string
// SetKey sets the node's Key and returns the receiver.
SetKey(key string) KeyValue
// Value returns the node's Value.
Value() string
// AsString returns Value as string if Type is TypeString, otherwise returns an error.
AsString() (string, error)
// AsInt32 returns Value as int32 if Type is TypeInt32, otherwise returns an error.
AsInt32() (int32, error)
// AsInt64 returns Value as int64 if Type is TypeInt64, otherwise returns an error.
AsInt64() (int64, error)
// AsUint64 returns Value as uint64 if Type is TypeUint64, otherwise returns an error.
AsUint64() (uint64, error)
// AsFloat32 returns Value as float32 if Type is TypeFloat32, otherwise returns an error.
AsFloat32() (float32, error)
// AsColor returns Value as int32 if Type is TypeColor, otherwise returns an error.
AsColor() (int32, error)
// AsPointer returns Value as int32 if Type is TypePointer, otherwise returns an error.
AsPointer() (int32, error)
// SetValue sets the node's Value and returns the receiver.
SetValue(value string) KeyValue
// SetString sets Value to given string value if Type is TypeString, otherwise returns an error.
SetString(string) error
// SetInt32 sets Value to given int32 value if Type is TypeInt32, otherwise returns an error.
SetInt32(int32) error
// SetInt64 sets Value to given int64 value if Type is TypeInt64, otherwise returns an error.
SetInt64(int64) error
// SetUint64 sets Value to given uint64 value if Type is TypeUint64, otherwise returns an error.
SetUint64(uint64) error
// SetFloat32 sets Value to given float32 value if Type is TypeFloat32, otherwise returns an error.
SetFloat32(float32) error
// SetColor sets Value to given int32 value if Type is TypeColor, otherwise returns an error.
SetColor(int32) error
// SetPointer sets Value to given int32 value if Type is TypePointer, otherwise returns an error.
SetPointer(int32) error
// Parent returns the parent node.
Parent() KeyValue
// SetParent sets the node's parent node and returns the receiver.
SetParent(KeyValue) KeyValue
// Children returns all child nodes
Children() []KeyValue
// SetChildren sets the node's children and returns the receiver.
SetChildren(...KeyValue) KeyValue
// Child finds a child node with the given key.
Child(key string) KeyValue
// NewChild creates an empty child node and returns the child node.
NewChild() KeyValue
// AddChild adds a child node and returns the receiver.
AddChild(KeyValue) KeyValue
// AddObject adds an Object child node and returns the receiver.
AddObject(key string) KeyValue
// AddString adds a String child node and returns the receiver.
AddString(key, value string) KeyValue
// AddInt32 adds an Int32 child node and returns the receiver.
AddInt32(key, value string) KeyValue
// AddInt64 adds an Int64 child node and returns the receiver.
AddInt64(key, value string) KeyValue
// AddUint64 adds an Uint64 child node and returns the receiver.
AddUint64(key, value string) KeyValue
// AddFloat32 adds a Float32 child node and returns the receiver.
AddFloat32(key, value string) KeyValue
// AddColor adds a Color child node and returns the receiver.
AddColor(key, value string) KeyValue
// AddPointer adds a Pointer child node and returns the receiver.
AddPointer(key, value string) KeyValue
encoding.BinaryMarshaler
encoding.BinaryUnmarshaler
encoding.TextMarshaler
encoding.TextUnmarshaler
}
type keyValue struct {
typ Type
key string
value string
parent KeyValue
children []KeyValue
vInt32 *int32
vFloat32 *float32
vPointer *int32
vColor *int32
vUint64 *uint64
vInt64 *int64
}
// NewKeyValue creates a KeyValue node.
func NewKeyValue(t Type, key, value string, parent KeyValue) KeyValue {
kv := &keyValue{
typ: t,
key: key,
value: value,
parent: parent,
}
if parent != nil {
parent.AddChild(kv)
}
return kv
}
// NewKeyValueEmpty creates an empty KeyValue node.
func NewKeyValueEmpty() KeyValue {
return NewKeyValue(TypeInvalid, "", "", nil)
}
// NewKeyValueRoot creates a root KeyValue node.
func NewKeyValueRoot(key string) KeyValue {
return NewKeyValueObject(key, nil)
}
// NewKeyValueObject creates a KeyValue node with TypeObject type.
func NewKeyValueObject(key string, parent KeyValue) KeyValue {
return NewKeyValue(TypeObject, key, "", parent)
}
// NewKeyValueString creates a KeyValue node with TypeString type.
func NewKeyValueString(key, value string, parent KeyValue) KeyValue {
return NewKeyValue(TypeString, key, value, parent)
}
// NewKeyValueInt32 creates a KeyValue node with TypeInt32 type.
func NewKeyValueInt32(key, value string, parent KeyValue) KeyValue {
return NewKeyValue(TypeInt32, key, value, parent)
}
// NewKeyValueInt64 creates a KeyValue node with TypeInt64 type.
func NewKeyValueInt64(key, value string, parent KeyValue) KeyValue {
return NewKeyValue(TypeInt64, key, value, parent)
}
// NewKeyValueUint64 creates a KeyValue node with TypeUint64 type.
func NewKeyValueUint64(key, value string, parent KeyValue) KeyValue {
return NewKeyValue(TypeUint64, key, value, parent)
}
// NewKeyValueFloat32 creates a KeyValue node with TypeFloat32 type.
func NewKeyValueFloat32(key, value string, parent KeyValue) KeyValue {
return NewKeyValue(TypeFloat32, key, value, parent)
}
// NewKeyValueColor creates a KeyValue node with TypeColor type.
func NewKeyValueColor(key, value string, parent KeyValue) KeyValue {
return NewKeyValue(TypeColor, key, value, parent)
}
// NewKeyValuePointer creates a KeyValue node with TypePointer type.
func NewKeyValuePointer(key, value string, parent KeyValue) KeyValue {
return NewKeyValue(TypePointer, key, value, parent)
}
func (kv *keyValue) resetValues() {
kv.vInt32 = nil
kv.vFloat32 = nil
kv.vPointer = nil
kv.vColor = nil
kv.vUint64 = nil
kv.vInt64 = nil
}
func (kv *keyValue) Type() Type { return kv.typ }
func (kv *keyValue) SetType(t Type) KeyValue {
kv.resetValues()
kv.typ = t
return kv
}
func (kv *keyValue) Key() string { return kv.key }
func (kv *keyValue) SetKey(k string) KeyValue {
kv.key = k
return kv
}
func (kv *keyValue) Value() string { return kv.value }
func (kv *keyValue) SetValue(v string) KeyValue {
kv.resetValues()
kv.value = v
return kv
}
func (kv *keyValue) AsString() (string, error) {
if kv.typ != TypeString {
return "", fmt.Errorf("kv: cannot convert Value of type %s to %s", kv.typ, TypeString)
}
return kv.value, nil
}
func (kv *keyValue) asInt32(p **int32) (int32, error) {
if *p == nil {
n, err := strconv.ParseInt(kv.value, 10, 32)
if err != nil {
return 0, err
}
n32 := int32(n)
*p = &n32
}
return **p, nil
}
func (kv *keyValue) AsInt32() (int32, error) {
if kv.typ != TypeInt32 {
return 0, fmt.Errorf("kv: cannot convert Value of type %s to %s", kv.typ, TypeInt32)
}
return kv.asInt32(&kv.vInt32)
}
func (kv *keyValue) AsInt64() (int64, error) {
if kv.typ != TypeInt64 {
return 0, fmt.Errorf("kv: cannot convert Value of type %s to %s", kv.typ, TypeInt64)
}
if kv.vInt64 == nil {
n, err := strconv.ParseInt(kv.value, 10, 64)
if err != nil {
return 0, err
}
kv.vInt64 = &n
}
return *kv.vInt64, nil
}
func (kv *keyValue) AsUint64() (uint64, error) {
if kv.typ != TypeUint64 {
return 0, fmt.Errorf("kv: cannot convert Value of type %s to %s", kv.typ, TypeUint64)
}
if kv.vUint64 == nil {
n, err := strconv.ParseUint(kv.value, 10, 64)
if err != nil {
return 0, err
}
kv.vUint64 = &n
}
return *kv.vUint64, nil
}
func (kv *keyValue) AsFloat32() (float32, error) {
if kv.typ != TypeFloat32 {
return 0, fmt.Errorf("kv: cannot convert Value of type %s to %s", kv.typ, TypeFloat32)
}
if kv.vFloat32 == nil {
n, err := strconv.ParseFloat(kv.value, 32)
if err != nil {
return 0, err
}
n32 := float32(n)
kv.vFloat32 = &n32
}
return *kv.vFloat32, nil
}
func (kv *keyValue) AsColor() (int32, error) {
if kv.typ != TypeColor {
return 0, fmt.Errorf("kv: cannot convert Value of type %s to %s", kv.typ, TypeColor)
}
return kv.asInt32(&kv.vColor)
}
func (kv *keyValue) AsPointer() (int32, error) {
if kv.typ != TypePointer {
return 0, fmt.Errorf("kv: cannot convert Value of type %s to %s", kv.typ, TypePointer)
}
return kv.asInt32(&kv.vPointer)
}
func (kv *keyValue) SetString(v string) error {
if kv.typ != TypeString {
return fmt.Errorf("cannot set Value of type %s with value of type %s", kv.typ, TypeString)
}
kv.value = v
return nil
}
func (kv *keyValue) SetInt32(v int32) error {
if kv.typ != TypeInt32 {
return fmt.Errorf("cannot set Value of type %s with value of type %s", kv.typ, TypeInt32)
}
kv.vInt32 = &v
kv.value = strconv.FormatInt(int64(v), 10)
return nil
}
func (kv *keyValue) SetInt64(v int64) error {
if kv.typ != TypeInt64 {
return fmt.Errorf("cannot set Value of type %s with value of type %s", kv.typ, TypeInt64)
}
kv.vInt64 = &v
kv.value = strconv.FormatInt(v, 10)
return nil
}
func (kv *keyValue) SetUint64(v uint64) error {
if kv.typ != TypeUint64 {
return fmt.Errorf("cannot set Value of type %s with value of type %s", kv.typ, TypeUint64)
}
kv.vUint64 = &v
kv.value = strconv.FormatUint(v, 10)
return nil
}
func (kv *keyValue) SetFloat32(v float32) error {
if kv.typ != TypeFloat32 {
return fmt.Errorf("cannot set Value of type %s with value of type %s", kv.typ, TypeFloat32)
}
kv.vFloat32 = &v
kv.value = strconv.FormatFloat(float64(v), 'f', -1, 32)
return nil
}
func (kv *keyValue) SetColor(v int32) error {
if kv.typ != TypeColor {
return fmt.Errorf("cannot set Value of type %s with value of type %s", kv.typ, TypeColor)
}
kv.vColor = &v
kv.value = strconv.FormatInt(int64(v), 10)
return nil
}
func (kv *keyValue) SetPointer(v int32) error {
if kv.typ != TypePointer {
return fmt.Errorf("cannot set Value of type %s with value of type %s", kv.typ, TypePointer)
}
kv.vPointer = &v
kv.value = strconv.FormatInt(int64(v), 10)
return nil
}
func (kv *keyValue) Parent() KeyValue { return kv.parent }
func (kv *keyValue) SetParent(p KeyValue) KeyValue {
kv.parent = p
return kv
}
func (kv *keyValue) Children() []KeyValue { return kv.children }
func (kv *keyValue) SetChildren(children ...KeyValue) KeyValue {
for _, c := range children {
c.SetParent(kv)
}
kv.children = children
return kv
}
func (kv *keyValue) Child(key string) KeyValue {
for _, c := range kv.children {
if c.Key() == key {
return c
}
}
return nil
}
func (kv *keyValue) NewChild() KeyValue {
return NewKeyValue(TypeInvalid, "", "", kv)
}
func (kv *keyValue) AddChild(c KeyValue) KeyValue {
c.SetParent(kv)
kv.children = append(kv.children, c)
return kv
}
func (kv *keyValue) AddObject(key string) KeyValue {
NewKeyValueObject(key, kv)
return kv
}
func (kv *keyValue) AddString(key, value string) KeyValue {
NewKeyValueString(key, value, kv)
return kv
}
func (kv *keyValue) AddInt32(key, value string) KeyValue {
NewKeyValueInt32(key, value, kv)
return kv
}
func (kv *keyValue) AddInt64(key, value string) KeyValue {
NewKeyValueInt64(key, value, kv)
return kv
}
func (kv *keyValue) AddUint64(key, value string) KeyValue {
NewKeyValueUint64(key, value, kv)
return kv
}
func (kv *keyValue) AddFloat32(key, value string) KeyValue {
NewKeyValueFloat32(key, value, kv)
return kv
}
func (kv *keyValue) AddColor(key, value string) KeyValue {
NewKeyValueColor(key, value, kv)
return kv
}
func (kv *keyValue) AddPointer(key, value string) KeyValue {
NewKeyValuePointer(key, value, kv)
return kv
}
func (kv *keyValue) MarshalText() ([]byte, error) {
b := &bytes.Buffer{}
if err := NewTextEncoder(b).Encode(kv); err != nil {
return nil, err
}
return b.Bytes(), nil
}
func (kv *keyValue) UnmarshalText(data []byte) error {
return NewTextDecoder(bytes.NewReader(data)).Decode(kv)
}
func (kv *keyValue) MarshalBinary() ([]byte, error) {
b := &bytes.Buffer{}
if err := NewBinaryEncoder(b).Encode(kv); err != nil {
return nil, err
}
return b.Bytes(), nil
}
func (kv *keyValue) UnmarshalBinary(data []byte) error {
return NewBinaryDecoder(bytes.NewReader(data)).Decode(kv)
} | kv.go | 0.802439 | 0.421492 | kv.go | starcoder |
package lbadd
import (
"fmt"
"regexp"
)
// Contains a command and associated information required to execute such command
type instruction struct {
command command
table string
params []string
}
// A single column on a single row
type record []byte
// A row containing multiple records
type row []record
// A response from the executor
type result struct {
columns []column // columns in order of stored rows
rows []row // the set of rows
rowsAffected int // the number of rows affected by execution
created int // the number of resources created
}
type exeConfig struct {
order int
}
// Execute executes an instruction against the database
type executor struct {
db *db
cfg exeConfig
}
func newExecutor(cfg exeConfig) *executor {
return &executor{
db: newDB(),
cfg: cfg,
}
}
// The executor takes an instruction, and coordinates the operations which are
// required to fulfill the instruction, executing these against the DB. It
// also returns the result of the instruction.
func (e *executor) execute(instr instruction) (result, error) {
switch instr.command {
case commandInsert:
return result{}, fmt.Errorf("unimplemented")
case commandSelect:
return e.executeSelect(instr)
case commandDelete:
return result{}, fmt.Errorf("unimplemented")
case commandCreateTable:
return e.executeCreateTable(instr)
default:
return result{}, fmt.Errorf("invalid executor command")
}
}
// Executes the select query instruction, returning the structure of the table
// (columns) and the rows specified in the query.
func (e *executor) executeSelect(instr instruction) (result, error) {
_, exists := e.db.tables[instr.table]
if !exists {
return result{}, fmt.Errorf("table %s does not exist", instr.table)
}
// TODO check if columns all exist in table
// TODO btree get all method
return result{}, fmt.Errorf("unimplemented")
}
// Executes the create table instruction, parses the columns given as arguments
// and adds a new table record to the storage map.
func (e *executor) executeCreateTable(instr instruction) (result, error) {
cols, err := parseInsertColumns(instr.params)
if err != nil {
return result{}, fmt.Errorf("failed to parse column params: %v", err)
}
e.db.tables[instr.table] = table{
name: instr.table,
store: newBtreeOrder(e.cfg.order),
columns: cols,
}
return result{created: 1}, nil
}
func parseInsertColumns(params []string) ([]column, error) {
// If there are no tables to be created, return early
if len(params) == 0 {
return []column{}, nil
}
// There should be a column name, type and nullable field for each column
// which is being declared
if len(params)%3 != 0 {
return []column{}, fmt.Errorf("invalid column pairs, every name must have a type")
}
cols := make([]column, 0, len(params)/3)
for i := 0; i < len(params); i += 3 {
p1, p2, p3 := params[i], params[i+1], params[i+2]
if err := validateTableName(p1); err != nil {
return cols, fmt.Errorf("invalid table name: %s: %v", p1, err)
}
colType := parseColumnType(p2)
if colType == columnTypeInvalid {
return cols, fmt.Errorf("found invalid column type: %s", p2)
}
b, err := parseBool(p3)
if err != nil {
return cols, fmt.Errorf("invalid value for field is_nullable: %s", err)
}
cols = append(cols, column{
name: p1,
dataType: colType,
isNullable: b,
})
}
return cols, nil
}
// The maximum number of characters allowed in a table name
const tableNameMaxLen = 32
// The validation pattern used to determine whether a table name is valid
var tableNamePattern = regexp.MustCompile(`^[a-zA-Z]+$`)
// Validates whether the string can be used as a valid table name identifier.
// Returns an error if it is invalid, nil if valid.
func validateTableName(name string) error {
if len(name) > tableNameMaxLen {
return fmt.Errorf("table name exceeds the character limit of %d", tableNameMaxLen)
}
if !tableNamePattern.MatchString(name) {
return fmt.Errorf("table name includes invalid characters")
}
return nil
}
func parseBool(str string) (bool, error) {
switch str {
case "true":
return true, nil
case "false":
return false, nil
default:
return false, fmt.Errorf("invalid boolean field")
}
} | executor.go | 0.601594 | 0.498779 | executor.go | starcoder |
package gbrt
import (
"context"
"image"
"image/color"
"math"
"github.com/apache/beam/sdks/v2/go/pkg/beam"
"github.com/apache/beam/sdks/v2/go/pkg/beam/core/sdf"
"github.com/apache/beam/sdks/v2/go/pkg/beam/io/rtrackers/offsetrange"
"github.com/apache/beam/sdks/v2/go/pkg/beam/log"
)
//go:generate go install github.com/apache/beam/sdks/go/cmd/starcgen
//go:generate starcgen --package=gbrt --identifiers=CombinePixelsFn,generateRaySDFn,MakeImageFn,TraceFn,ToPixelColour
//go:generate go fmt
// Setup to do the static initializing of the camera instead?
// Pixel is an x,y coordinate in an image. Used as a key for
// grouping sample rays back together.
type Pixel struct {
X, Y int
}
// generateRaySDFn is a splittable DoFn that maps XxYxSamples to the number line.
// Element Restrictions are then offset ranges into the number line, where each
// index maps to a specific sample for a specific pixel. Samples for a given
// pixel are contiguous to increase opportunities for combiner lifting to
// reduce shuffled data.
type generateRaySDFn struct {
}
// CreateInitialRestriction creates an offset range restriction representing
// the number of rays to cast.
func (fn *generateRaySDFn) CreateInitialRestriction(config ImageConfig) offsetrange.Restriction {
return offsetrange.Restriction{
Start: 1,
End: int64(config.Width*config.Height) * config.Samples,
}
}
// SplitRestriction splits the restriction into 1024 chunks.
func (fn *generateRaySDFn) SplitRestriction(_ ImageConfig, rest offsetrange.Restriction) (splits []offsetrange.Restriction) {
return rest.EvenSplits(1024)
}
// RestrictionSize outputs the size of the restriction as the number of elements
// that restriction will output.
func (fn *generateRaySDFn) RestrictionSize(_ ImageConfig, rest offsetrange.Restriction) float64 {
return rest.Size()
}
// CreateTracker just creates an offset range restriction tracker for the
// restriction.
func (fn *generateRaySDFn) CreateTracker(rest offsetrange.Restriction) *sdf.LockRTracker {
return sdf.NewLockRTracker(offsetrange.NewTracker(rest))
}
// ProcessElement creates a sample ray vector paired with the pixel it's
// contributing to.
func (fn *generateRaySDFn) ProcessElement(rt *sdf.LockRTracker, cfg ImageConfig, emit func(Pixel, Vec)) error {
// Sample aligned indexing to preserve pixel locality.
// Increases likelyhood that pixels are in the same bundle
// improving combiner lifting effectiveness.
stride := cfg.Width * float64(cfg.Samples)
for i := rt.GetRestriction().(offsetrange.Restriction).Start; rt.TryClaim(i); i++ {
sample := math.Mod(float64(i), stride)
X := math.Floor(sample / float64(cfg.Samples))
Y := math.Floor(float64(i) / stride)
px := Pixel{int(X), int(Y)}
ray := subPixelJitter(px.X, px.Y, cfg)
emit(px, ray)
}
return nil
}
// TraceFn creates rays from the pixels.
type TraceFn struct {
// TODO retype Vec to Position or something.
// requires redoing doing all the math for type safety.
Position Vec
Bounces int64
// TODO move this to a side input
Word string
scene *Scene
}
// Setup do the one time setup for the scene.
func (f *TraceFn) Setup() {
f.scene = populateScene(f.Word)
f.scene.Initialize()
}
// ProcessElement actualy traces the scene of the image and returns the colour contribution of this sample.
// TODO retype the returned thing to a colour, instead of a vec.
func (f *TraceFn) ProcessElement(k Pixel, ray Vec) (Pixel, Vec) {
// TODO break this up so bounces are contributed separately.
colour := Trace(f.Position, ray, f.scene, f.Bounces)
return k, colour
}
// CombinePixelsFn combines the contributions from multiple pixels.
type CombinePixelsFn struct {
SamplesCount int
}
var (
pixelAddInputCount = beam.NewCounter("gbrt", "pixelAddInput")
pixelMergesCount = beam.NewCounter("gbrt", "pixelMerges")
)
// AddInput sums together the colour contributions for a pixel.
// Typically on the lifted side of a CombineFn
func (fn *CombinePixelsFn) AddInput(ctx context.Context, a, b Vec) Vec {
pixelAddInputCount.Inc(ctx, 1)
return a.Plus(b)
}
// MergeAccumulators sums together the colour contributions for a pixel.
func (fn *CombinePixelsFn) MergeAccumulators(ctx context.Context, a, b Vec) Vec {
pixelMergesCount.Inc(ctx, 1)
return a.Plus(b)
}
// ExtractOutput does the Reinhard tone mapping for this pixel.
func (fn *CombinePixelsFn) ExtractOutput(colour Vec) Vec {
// Attenuate the combined sample colours.
colour = colour.Times(MonoVec(1. / float64(fn.SamplesCount))).Plus(MonoVec(14. / 241.))
o := colour.Plus(MonoVec(1))
colour = Vec{colour.X / o.X, colour.Y / o.Y, colour.Z / o.Z}.Times(MonoVec(255))
return colour
}
// PixelColour combines a pixel with its colour.
type PixelColour struct {
K Pixel
C Vec
}
// ToPixelColour combines pixels with it's colour.
func ToPixelColour(k Pixel, colour Vec) PixelColour {
return PixelColour{k, colour}
}
// MakeImageFn writes the image to wherever.
type MakeImageFn struct {
Width, Height int
Out string
}
// ProcessElement iterates over all the functions and writes
// Writes the file to the designated spot.
func (f *MakeImageFn) ProcessElement(ctx context.Context, _ beam.T, iter func(*PixelColour) bool) (bool, error) {
img := image.NewRGBA(image.Rect(0, 0, f.Width, f.Height))
var pc PixelColour
for iter(&pc) {
img.Set(f.Width-pc.K.X-1, f.Height-pc.K.Y-1, color.RGBA{uint8(pc.C.X), uint8(pc.C.Y), uint8(pc.C.Z), 255})
}
if err := writeToFile(ctx, f.Out, img); err != nil {
log.Infof(ctx, "ERROR:", err)
return false, err
}
return true, nil
}
// BeamTracer runs the ray tracer as a Apache Beam Pipeline on the runner of choice.
func BeamTracer(position Vec, img ImageConfig, word, dir string) *beam.Pipeline {
p, s := beam.NewPipelineWithRoot()
rays := generateRays(s, img)
trace := beam.ParDo(s.Scope("Trace"), &TraceFn{Position: position, Bounces: img.Bounces, Word: word}, rays)
finalPixels := beam.CombinePerKey(s.Scope("MergeRays"), &CombinePixelsFn{int(img.Samples)}, trace)
output := OutputPath(dir, word, int(img.Samples))
toImage(s, finalPixels, img, output)
return p
}
func generateRays(s beam.Scope, img ImageConfig) beam.PCollection {
s = s.Scope("GenerateRays")
cfg := beam.Create(s, img)
return beam.ParDo(s, &generateRaySDFn{}, cfg)
}
func toImage(s beam.Scope, finalPixels beam.PCollection, img ImageConfig, output string) {
s = s.Scope("ToImage")
pixelColours := beam.ParDo(s, ToPixelColour, finalPixels)
fixedKeyPixelColours := beam.AddFixedKey(s, pixelColours)
// Get everything onto a single machine again.
groupedPixelColours := beam.GroupByKey(s, fixedKeyPixelColours)
beam.ParDo(s, &MakeImageFn{Width: int(img.Width), Height: int(img.Height), Out: output}, groupedPixelColours)
} | gbrt/lib/beam.go | 0.70304 | 0.405154 | beam.go | starcoder |
package components
import (
"github.com/almerlucke/go-farsounds/farsounds"
)
// Osc uses a phasor to do a lookup
type Osc struct {
// Lookup table
*Lookup
// Phasor for lookup
*Phasor
// Amplitude of output
Amplitude float64
}
// NewOsc creates a new table lookup oscillator
func NewOsc(table []float64, phase float64, inc float64, amp float64) *Osc {
osc := new(Osc)
osc.Lookup = NewLookup(table)
osc.Phasor = NewPhasor(phase, inc)
osc.Amplitude = amp
return osc
}
// Process sample please
func (osc *Osc) Process(phaseMod float64) float64 {
return osc.Look(osc.Phasor.Process(phaseMod)) * osc.Amplitude
}
/*
Module based oscillator plus Processor interface methods
*/
// OscModule is an oscillator module
type OscModule struct {
// Inherit from BaseModule
*farsounds.BaseModule
// Inherit from Osc
*Osc
}
// NewOscModule creates a new osc module
func NewOscModule(table farsounds.WaveTable, phase float64, freq float64, amp float64, buflen int32, sr float64) *OscModule {
oscModule := new(OscModule)
oscModule.BaseModule = farsounds.NewBaseModule(3, 1, buflen, sr)
oscModule.Parent = oscModule
oscModule.Osc = NewOsc(table, phase, freq/sr, amp)
return oscModule
}
// OscModuleFactory creates new osc modules
func OscModuleFactory(settings interface{}, buflen int32, sr float64) (farsounds.Module, error) {
table := farsounds.SineTable
phase := 0.0
freq := 100.0
amp := 1.0
module := NewOscModule(table, phase, freq, amp, buflen, sr)
module.Message(settings)
return module, nil
}
// DSP fills output buffer for this osc module with samples
func (module *OscModule) DSP(timestamp int64) {
buflen := module.GetBufferLength()
sr := module.GetSampleRate()
var pmodInput []float64
var fmodInput []float64
var ampInput []float64
output := module.Outlets[0].Buffer
// Check if inlet is connected for phase modulation
if module.Inlets[0].Connections.Len() > 0 {
pmodInput = module.Inlets[0].Buffer
}
if module.Inlets[1].Connections.Len() > 0 {
fmodInput = module.Inlets[1].Buffer
}
if module.Inlets[2].Connections.Len() > 0 {
ampInput = module.Inlets[2].Buffer
}
for i := int32(0); i < buflen; i++ {
pmod := 0.0
if pmodInput != nil {
pmod = pmodInput[i]
}
if fmodInput != nil {
inc := fmodInput[i] / sr
module.Inc = inc
}
if ampInput != nil {
amp := ampInput[i]
module.Amplitude = amp
}
output[i] = module.Process(pmod)
}
}
// Message to module
func (module *OscModule) Message(message farsounds.Message) {
sr := module.GetSampleRate()
if valueMap, ok := message.(map[string]interface{}); ok {
if frequency, ok := valueMap["frequency"].(float64); ok {
module.Inc = frequency / sr
}
if phase, ok := valueMap["phase"].(float64); ok {
module.Phase = phase
}
if amplitude, ok := valueMap["amplitude"].(float64); ok {
module.Amplitude = amplitude
}
if tableName, ok := valueMap["table"].(string); ok {
table, err := farsounds.Registry.GetWaveTable(tableName)
if err == nil {
module.Lookup.Table = table
}
}
}
} | farsounds/components/osc.go | 0.681409 | 0.427695 | osc.go | starcoder |
package main
import (
"bufio"
"fmt"
"github.com/pkg/errors"
"io"
"math"
"os"
"strconv"
"strings"
)
type Point struct {
x int
y int
}
func (p *Point) Neighbours() []*Point {
return []*Point{
{x: p.x + 1, y: p.y},
{x: p.x - 1, y: p.y},
{x: p.x, y: p.y + 1},
{x: p.x, y: p.y - 1},
{x: p.x + 1, y: p.y + 1},
{x: p.x - 1, y: p.y + 1},
{x: p.x + 1, y: p.y - 1},
{x: p.x + 1, y: p.y + 1},
}
}
func (p *Point) String() string {
return fmt.Sprintf("[%d, %d]", p.x, p.y)
}
type Grid struct {
points []*Point
}
func (g *Grid) Dimensions() (*Point, *Point) {
var minX, minY, maxX, maxY int
for _, p := range g.points {
if p.x < minX {
minX = p.x
}
if p.x > maxX {
maxX = p.x
}
if p.y < minY {
minY = p.y
}
if p.y > maxY {
maxY = p.y
}
}
return &Point{x: minX, y: minY}, &Point{x: maxX, y: maxY}
}
func (g *Grid) Width() int {
topleft, botright := g.Dimensions()
return botright.x - topleft.x
}
func (g *Grid) Height() int {
topleft, botright := g.Dimensions()
return botright.y - topleft.y
}
func (g *Grid) Plot() [][]*Point {
width := g.Width()
height := g.Height()
plot := make([][]*Point, height)
for i := 0; i < height; i++ {
plot[i] = make([]*Point, width)
}
for row := 0; row < height; row++ {
for col := 0; col < width; col++ {
nearest, multiple := g.nearest(&Point{x: col, y: row})
if multiple {
plot[row][col] = nil
} else {
plot[row][col] = nearest
}
}
}
return plot
}
func (g *Grid) influence(p *Point, plot [][]*Point) int {
width := g.Width() + 2
height := g.Height() + 2
topleft, _ := g.Dimensions()
seen := make([][]bool, height)
for i := 0; i < height; i++ {
seen[i] = make([]bool, width)
}
neighbours := p.Neighbours()
i := 0
inf := 0
for i < len(neighbours) {
point := neighbours[i]
i += 1
x := point.x - topleft.x - 1
y := point.y - topleft.y - 1
if x < 0 || x > width-3 || y > height-3 || y < 0 {
continue
}
if seen[y][x] {
continue
}
plotted := plot[y][x]
seen[y][x] = true
if plotted == nil {
continue
}
if plotted.x == p.x && plotted.y == p.y {
inf += 1
neighbours = append(neighbours, point.Neighbours()...)
}
}
return inf
}
func (g *Grid) MaxInfluence() (*Point, int) {
plot := g.Plot()
// Remove points on the edges
candidates := make(map[*Point]bool)
for _, p := range g.points {
candidates[p] = true
}
for i := 0; i < len(plot); i++ {
delete(candidates, plot[i][0])
delete(candidates, plot[i][len(plot[i])-1])
}
for i := 0; i < len(plot[0]); i++ {
delete(candidates, plot[0][i])
delete(candidates, plot[len(plot)-1][i])
}
var maxPoint *Point
maxInfluence := math.MinInt32
for p := range candidates {
influence := g.influence(p, plot)
if influence > maxInfluence {
maxInfluence = influence
maxPoint = p
}
}
return maxPoint, maxInfluence
}
func (g *Grid) nearest(p *Point) (*Point, bool) {
var nearest *Point
distance := math.MaxInt32
multiple := false
for _, point := range g.points {
d := ManhattanDistance(p, point)
if d == distance {
multiple = true
}
if d < distance {
nearest = point
distance = d
multiple = false
}
}
return nearest, multiple
}
func ManhattanDistance(a *Point, b *Point) int {
return int(math.Abs(float64(a.x-b.x)) + math.Abs(float64(a.y-b.y)))
}
func SumDistanceToAll(p *Point, points []*Point) int {
s := 0
for _, point := range points {
s += ManhattanDistance(p, point)
}
return s
}
func (g *Grid) PointsWithinTotalDistance(limit int) []*Point {
result := make([]*Point, 0)
plot := g.Plot()
for row := 0; row < len(plot); row++ {
for col := 0; col < len(plot[0]); col++ {
p := &Point{x: col, y: row}
if SumDistanceToAll(p, g.points) < limit {
result = append(result, p)
}
}
}
return result
}
func parse(r io.Reader) ([]*Point, error) {
scanner := bufio.NewScanner(r)
points := make([]*Point, 0)
for scanner.Scan() {
t := scanner.Text()
tokens := strings.Split(t, ", ")
if len(tokens) != 2 {
return nil, errors.New(fmt.Sprintf("exactly 2 tokens expected, got %v", t))
}
x, err := strconv.ParseInt(tokens[0], 10, 32)
if err != nil {
return nil, errors.New(fmt.Sprintf("failed to parse x from %s", t))
}
y, err := strconv.ParseInt(tokens[1], 10, 32)
if err != nil {
return nil, errors.New(fmt.Sprintf("failed to parse x from %s", t))
}
points = append(points, &Point{
x: int(x),
y: int(y),
})
}
return points, nil
}
func main() {
points, err := parse(os.Stdin)
if err != nil {
panic(err)
}
grid := &Grid{points: points}
maxPoint, maxInfluence := grid.MaxInfluence()
fmt.Println(fmt.Sprintf("Part one: %v, influence: %v", maxPoint, maxInfluence))
fmt.Println(fmt.Sprintf("Part two: %v", len(grid.PointsWithinTotalDistance(10000))))
} | cmd/day6/day6.go | 0.712632 | 0.477067 | day6.go | starcoder |
package firmware
import (
"reflect"
"sort"
pb "chromiumos/tast/services/cros/firmware"
)
// allGBBFlags has all the GBB Flags in sorted order.
var allGBBFlags []pb.GBBFlag
func init() {
for _, v := range pb.GBBFlag_value {
allGBBFlags = append(allGBBFlags, pb.GBBFlag(v))
}
sort.Slice(allGBBFlags, func(i, j int) bool { return allGBBFlags[i] < allGBBFlags[j] })
}
// AllGBBFlags returns all the GBB Flags in order by their int values.
func AllGBBFlags() []pb.GBBFlag {
return allGBBFlags
}
// FAFTGBBFlags returns the flags that faft sets in firmware_test.py before starting a test.
func FAFTGBBFlags() []pb.GBBFlag {
return []pb.GBBFlag{pb.GBBFlag_FAFT_KEY_OVERIDE, pb.GBBFlag_ENTER_TRIGGERS_TONORM}
}
// RebootRequiredGBBFlags returns flags that require a DUT reboot after they are changed.
func RebootRequiredGBBFlags() []pb.GBBFlag {
return []pb.GBBFlag{pb.GBBFlag_FORCE_DEV_SWITCH_ON, pb.GBBFlag_DISABLE_EC_SOFTWARE_SYNC, pb.GBBFlag_FORCE_DEV_BOOT_USB}
}
// GBBFlagsStatesEqual determines if 2 GBBFlagsState are the same.
func GBBFlagsStatesEqual(a, b pb.GBBFlagsState) bool {
canonicalA := canonicalGBBFlagsState(a)
canonicalB := canonicalGBBFlagsState(b)
return reflect.DeepEqual(canonicalA.Clear, canonicalB.Clear) && reflect.DeepEqual(canonicalA.Set, canonicalB.Set)
}
// GBBFlagsChanged determines if any of the flags definitely have changed between a and b.
func GBBFlagsChanged(a, b pb.GBBFlagsState, flags []pb.GBBFlag) bool {
a = canonicalGBBFlagsState(a)
b = canonicalGBBFlagsState(b)
aClear := makeFlagsMap(a.Clear)
aSet := makeFlagsMap(a.Set)
bClear := makeFlagsMap(b.Clear)
bSet := makeFlagsMap(b.Set)
for _, f := range flags {
_, inAClear := aClear[f]
_, inASet := aSet[f]
_, inBClear := bClear[f]
_, inBSet := bSet[f]
if (inAClear && inBSet) || (inASet && inBClear) {
return true
}
}
return false
}
// makeFlagsMap converts a slice of GBBFlags into a map for easy lookup.
func makeFlagsMap(f []pb.GBBFlag) map[pb.GBBFlag]bool {
m := make(map[pb.GBBFlag]bool)
for _, f := range f {
m[f] = true
}
return m
}
// canonicalGBBFlagsState standardizes the GBBFlagsState so that they can be more readily compared. In particular, a flag in both Set and Clear will be deleted from Clear. The flags are also sorted.
func canonicalGBBFlagsState(s pb.GBBFlagsState) pb.GBBFlagsState {
setMap := makeFlagsMap(s.Set)
clearMap := makeFlagsMap(s.Clear)
var canonicalClear []pb.GBBFlag
var canonicalSet []pb.GBBFlag
for _, v := range allGBBFlags {
if _, sOk := setMap[v]; sOk {
canonicalSet = append(canonicalSet, v)
} else if _, cOk := clearMap[v]; cOk {
canonicalClear = append(canonicalClear, v)
}
}
return pb.GBBFlagsState{Clear: canonicalClear, Set: canonicalSet}
}
// GBBToggle adds `flag` to `flags` if it is missing, or removes it if it is present. Returns a new list, and does not modify the `flags` slice.
func GBBToggle(flags []pb.GBBFlag, flag pb.GBBFlag) []pb.GBBFlag {
var ret []pb.GBBFlag
found := false
for _, v := range flags {
if v == flag {
found = true
} else {
ret = append(ret, v)
}
}
if !found {
ret = append(ret, flag)
}
return ret
}
// GBBAddFlag modifies `s` to add all flags in `flags`.
func GBBAddFlag(s *pb.GBBFlagsState, flags ...pb.GBBFlag) {
s.Set = append(s.Set, flags...)
*s = canonicalGBBFlagsState(*s)
}
// CopyGBBFlags returns a new GBBFlagsState that is a copy of `s`.
func CopyGBBFlags(s pb.GBBFlagsState) *pb.GBBFlagsState {
// Depends on the behavior of canonicalGBBFlagsState to always return a copy with new Set & Clear arrays.
ret := canonicalGBBFlagsState(s)
return &ret
}
// GBBFlagsContains returns true if `s` contains the requested GBB flag `flag`.
func GBBFlagsContains(s pb.GBBFlagsState, flag pb.GBBFlag) bool {
for _, f := range s.Set {
if f == flag {
return true
}
}
return false
} | src/chromiumos/tast/common/firmware/gbb_flags.go | 0.71423 | 0.497253 | gbb_flags.go | starcoder |
package cov
import (
"fmt"
"sort"
"strings"
)
type treeFile struct {
tcm TestCoverageMap
spangroups map[SpanGroupID]SpanGroup
}
func newTreeFile() *treeFile {
return &treeFile{
tcm: TestCoverageMap{},
spangroups: map[SpanGroupID]SpanGroup{},
}
}
// Tree represents source code coverage across a tree of different processes.
// Each tree node is addressed by a Path.
type Tree struct {
initialized bool
strings Strings
spans map[Span]SpanID
testRoot Test
files map[string]*treeFile
}
func (t *Tree) init() {
if !t.initialized {
t.strings.m = map[string]StringID{}
t.spans = map[Span]SpanID{}
t.testRoot = newTest()
t.files = map[string]*treeFile{}
t.initialized = true
}
}
// SpanList is a list of Spans
type SpanList []Span
// Compare returns -1 if l comes before o, 1 if l comes after o, otherwise 0.
func (l SpanList) Compare(o SpanList) int {
switch {
case len(l) < len(o):
return -1
case len(l) > len(o):
return 1
}
for i, a := range l {
switch a.Compare(o[i]) {
case -1:
return -1
case 1:
return 1
}
}
return 0
}
// Spans returns all the spans used by the tree
func (t *Tree) Spans() SpanList {
out := make(SpanList, len(t.spans))
for span, id := range t.spans {
out[id] = span
}
return out
}
// FileSpanGroups returns all the span groups for the given file
func (t *Tree) FileSpanGroups(path string) map[SpanGroupID]SpanGroup {
return t.files[path].spangroups
}
// FileCoverage returns the TestCoverageMap for the given file
func (t *Tree) FileCoverage(path string) TestCoverageMap {
return t.files[path].tcm
}
// Tests returns the root test
func (t *Tree) Tests() *Test { return &t.testRoot }
// Strings returns the string table
func (t *Tree) Strings() Strings { return t.strings }
func (t *Tree) index(path Path) []indexedTest {
out := make([]indexedTest, len(path))
test := &t.testRoot
for i, p := range path {
name := t.strings.index(p)
test, out[i] = test.index(name)
}
return out
}
func (t *Tree) addSpans(spans []Span) SpanSet {
out := make(SpanSet, len(spans))
for _, s := range spans {
id, ok := t.spans[s]
if !ok {
id = SpanID(len(t.spans))
t.spans[s] = id
}
out[id] = struct{}{}
}
return out
}
// Add adds the coverage information cov to the tree node addressed by path.
func (t *Tree) Add(path Path, cov *Coverage) {
t.init()
tests := t.index(path)
nextFile:
// For each file with coverage...
for _, file := range cov.Files {
// Lookup or create the file's test coverage map
tf, ok := t.files[file.Path]
if !ok {
tf = newTreeFile()
t.files[file.Path] = tf
}
// Add all the spans to the map, get the span ids
spans := t.addSpans(file.Spans)
// Starting from the test root, walk down the test tree.
tcm, test := tf.tcm, t.testRoot
parent := (*TestCoverage)(nil)
for _, indexedTest := range tests {
if indexedTest.created {
if parent != nil && len(test.children) == 1 {
parent.Spans = parent.Spans.addAll(spans)
delete(parent.Children, indexedTest.index)
} else {
tc := tcm.index(indexedTest.index)
tc.Spans = spans
}
continue nextFile
}
test = test.children[indexedTest.index]
tc := tcm.index(indexedTest.index)
// If the tree node contains spans that are not in this new test,
// we need to push those spans down to all the other children.
if lower := tc.Spans.removeAll(spans); len(lower) > 0 {
// push into each child node
for i := range test.children {
child := tc.Children.index(TestIndex(i))
child.Spans = child.Spans.addAll(lower)
}
// remove from node
tc.Spans = tc.Spans.removeAll(lower)
}
// The spans that are in the new test, but are not part of the tree
// node carry propagating down.
spans = spans.removeAll(tc.Spans)
if len(spans) == 0 {
continue nextFile
}
tcm = tc.Children
parent = tc
}
}
}
// StringID is an identifier of a string
type StringID int
// Strings holds a map of string to identifier
type Strings struct {
m map[string]StringID
s []string
}
func (s *Strings) index(str string) StringID {
i, ok := s.m[str]
if !ok {
i = StringID(len(s.s))
s.s = append(s.s, str)
s.m[str] = i
}
return i
}
// TestIndex is an child test index
type TestIndex int
// Test is an collection of named sub-tests
type Test struct {
indices map[StringID]TestIndex
children []Test
}
func newTest() Test {
return Test{
indices: map[StringID]TestIndex{},
}
}
type indexedTest struct {
index TestIndex
created bool
}
func (t *Test) index(name StringID) (*Test, indexedTest) {
idx, ok := t.indices[name]
if !ok {
idx = TestIndex(len(t.children))
t.children = append(t.children, newTest())
t.indices[name] = idx
}
return &t.children[idx], indexedTest{idx, !ok}
}
type namedIndex struct {
name string
idx TestIndex
}
func (t Test) byName(s Strings) []namedIndex {
out := make([]namedIndex, len(t.children))
for id, idx := range t.indices {
out[idx] = namedIndex{s.s[id], idx}
}
sort.Slice(out, func(i, j int) bool { return out[i].name < out[j].name })
return out
}
func (t Test) String(s Strings) string {
sb := strings.Builder{}
for i, n := range t.byName(s) {
child := t.children[n.idx]
if i > 0 {
sb.WriteString(" ")
}
sb.WriteString(n.name)
if len(child.children) > 0 {
sb.WriteString(fmt.Sprintf(":%v", child.String(s)))
}
}
return "{" + sb.String() + "}"
}
// TestCoverage holds the coverage information for a deqp test group / leaf.
// For example:
// The deqp test group may hold spans that are common for all children, and may
// also optionally hold child nodes that describe coverage that differs per
// child test.
type TestCoverage struct {
Spans SpanSet
Group *SpanGroupID
Children TestCoverageMap
}
func (tc TestCoverage) String(t *Test, s Strings) string {
sb := strings.Builder{}
sb.WriteString("{")
if len(tc.Spans) > 0 {
sb.WriteString(tc.Spans.String())
}
if tc.Group != nil {
sb.WriteString(" <")
sb.WriteString(fmt.Sprintf("%v", *tc.Group))
sb.WriteString(">")
}
if len(tc.Children) > 0 {
sb.WriteString(" ")
sb.WriteString(tc.Children.String(t, s))
}
sb.WriteString("}")
return sb.String()
}
// deletable returns true if the TestCoverage provides no data.
func (tc TestCoverage) deletable() bool {
return len(tc.Spans) == 0 && tc.Group == nil && len(tc.Children) == 0
}
// TestCoverageMap is a map of TestIndex to *TestCoverage.
type TestCoverageMap map[TestIndex]*TestCoverage
// traverse performs a depth first traversal of the TestCoverage tree.
func (tcm TestCoverageMap) traverse(cb func(*TestCoverage)) {
for _, tc := range tcm {
cb(tc)
tc.Children.traverse(cb)
}
}
func (tcm TestCoverageMap) String(t *Test, s Strings) string {
sb := strings.Builder{}
for _, n := range t.byName(s) {
if child, ok := tcm[n.idx]; ok {
sb.WriteString(fmt.Sprintf("\n%v: %v", n.name, child.String(&t.children[n.idx], s)))
}
}
if sb.Len() > 0 {
sb.WriteString("\n")
}
return indent(sb.String())
}
func newTestCoverage() *TestCoverage {
return &TestCoverage{
Children: TestCoverageMap{},
Spans: SpanSet{},
}
}
func (tcm TestCoverageMap) index(idx TestIndex) *TestCoverage {
tc, ok := tcm[idx]
if !ok {
tc = newTestCoverage()
tcm[idx] = tc
}
return tc
}
// SpanID is an identifier of a span in a Tree.
type SpanID int
// SpanSet is a set of SpanIDs.
type SpanSet map[SpanID]struct{}
// SpanIDList is a list of SpanIDs
type SpanIDList []SpanID
// Compare returns -1 if l comes before o, 1 if l comes after o, otherwise 0.
func (l SpanIDList) Compare(o SpanIDList) int {
switch {
case len(l) < len(o):
return -1
case len(l) > len(o):
return 1
}
for i, a := range l {
b := o[i]
switch {
case a < b:
return -1
case a > b:
return 1
}
}
return 0
}
// List returns the full list of sorted span ids.
func (s SpanSet) List() SpanIDList {
out := make(SpanIDList, 0, len(s))
for span := range s {
out = append(out, span)
}
sort.Slice(out, func(i, j int) bool { return out[i] < out[j] })
return out
}
func (s SpanSet) String() string {
sb := strings.Builder{}
sb.WriteString(`[`)
l := s.List()
for i, span := range l {
if i > 0 {
sb.WriteString(`, `)
}
sb.WriteString(fmt.Sprintf("%v", span))
}
sb.WriteString(`]`)
return sb.String()
}
func (s SpanSet) contains(rhs SpanID) bool {
_, found := s[rhs]
return found
}
func (s SpanSet) containsAll(rhs SpanSet) bool {
for span := range rhs {
if !s.contains(span) {
return false
}
}
return true
}
func (s SpanSet) remove(rhs SpanID) SpanSet {
out := make(SpanSet, len(s))
for span := range s {
if span != rhs {
out[span] = struct{}{}
}
}
return out
}
func (s SpanSet) removeAll(rhs SpanSet) SpanSet {
out := make(SpanSet, len(s))
for span := range s {
if _, found := rhs[span]; !found {
out[span] = struct{}{}
}
}
return out
}
func (s SpanSet) add(rhs SpanID) SpanSet {
out := make(SpanSet, len(s)+1)
for span := range s {
out[span] = struct{}{}
}
out[rhs] = struct{}{}
return out
}
func (s SpanSet) addAll(rhs SpanSet) SpanSet {
out := make(SpanSet, len(s)+len(rhs))
for span := range s {
out[span] = struct{}{}
}
for span := range rhs {
out[span] = struct{}{}
}
return out
}
func (s SpanSet) invert(rhs SpanID) SpanSet {
if s.contains(rhs) {
return s.remove(rhs)
}
return s.add(rhs)
}
// SpanGroupID is an identifier of a SpanGroup.
type SpanGroupID int
// SpanGroup holds a number of spans, potentially extending from another
// SpanGroup.
type SpanGroup struct {
Spans SpanSet
Extend *SpanGroupID
}
func newSpanGroup() SpanGroup {
return SpanGroup{Spans: SpanSet{}}
}
func indent(s string) string {
return strings.TrimSuffix(strings.ReplaceAll(s, "\n", "\n "), " ")
} | tests/regres/cov/tree.go | 0.722135 | 0.44348 | tree.go | starcoder |
package stored
import (
"fmt"
"strings"
errorConfigAVA "github.com/ver13/ava/pkg/common/config/error"
errorAVA "github.com/ver13/ava/pkg/common/error"
)
const (
// DialectTypePostgreSQL is a DialectType of type PostgreSQL
DialectTypePostgreSQL DialectType = iota
// DialectTypeSqlite3 is a DialectType of type Sqlite3
DialectTypeSqlite3
// DialectTypeMySQL is a DialectType of type MySQL
DialectTypeMySQL
// DialectTypeSQLServer is a DialectType of type SQLServer
DialectTypeSQLServer
// DialectTypeUnknown is a DialectType of type Unknown
DialectTypeUnknown
)
const _DialectTypeName = "PostgreSQLSqlite3MySQLSQLServerUnknown"
var _DialectTypeMap = map[DialectType]string{
0: _DialectTypeName[0:10],
1: _DialectTypeName[10:17],
2: _DialectTypeName[17:22],
3: _DialectTypeName[22:31],
4: _DialectTypeName[31:38],
}
// String implements the Stringer interface.
func (x DialectType) String() string {
if str, ok := _DialectTypeMap[x]; ok {
return str
}
return fmt.Sprintf("DialectType(%d)", x)
}
var _DialectTypeValue = map[string]DialectType{
_DialectTypeName[0:10]: 0,
strings.ToLower(_DialectTypeName[0:10]): 0,
_DialectTypeName[10:17]: 1,
strings.ToLower(_DialectTypeName[10:17]): 1,
_DialectTypeName[17:22]: 2,
strings.ToLower(_DialectTypeName[17:22]): 2,
_DialectTypeName[22:31]: 3,
strings.ToLower(_DialectTypeName[22:31]): 3,
_DialectTypeName[31:38]: 4,
strings.ToLower(_DialectTypeName[31:38]): 4,
}
// ParseDialectType attempts to convert a string to a DialectType
func ParseDialectType(name string) (DialectType, *errorAVA.Error) {
if x, ok := _DialectTypeValue[name]; ok {
return x, nil
}
return DialectTypeUnknown, errorConfigAVA.DialectTypeUnknown(nil, fmt.Sprintf("%s is not a valid dialect type.", name))
}
// MarshalText implements the text marshaller method
func (x DialectType) MarshalText() ([]byte, *errorAVA.Error) {
return []byte(x.String()), nil
}
// UnmarshalText implements the text unmarshaller method
func (x DialectType) UnmarshalText(text []byte) (DialectType, *errorAVA.Error) {
name := string(text)
tmp, err := ParseDialectType(name)
if err != nil {
return DialectTypeUnknown, err
}
x = tmp
return tmp, nil
} | pkg/common/config/model/stored/dialectType_enum.go | 0.53437 | 0.475484 | dialectType_enum.go | starcoder |
package flattree
type iterator struct {
index uint64 // keeps track of the current index of the iterator
offset uint64 // keeps track of the current offset of the iterator
factor uint64 // keeps track of the factor of the iterator (2^depth)
}
// NewIterator will construct a new iterator at the designated position
func NewIterator(index uint64) *iterator {
i := &iterator{}
i.Seek(index)
return i
}
// Factor will return the current factor of the iterator
func (i iterator) Index() uint64 {
return i.index
}
// Factor will return the current factor of the iterator
func (i iterator) Offset() uint64 {
return i.offset
}
// Factor will return the current factor of the iterator
func (i iterator) Factor() uint64 {
return i.factor
}
// Seek will position the iterator at the designated index
func (i *iterator) Seek(index uint64) {
i.index = index
if isEven(index) {
i.offset = index / 2
i.factor = 2
} else {
i.offset = Offset(index)
i.factor = twoPow(Depth(index) + 1)
}
}
// IsLeft checks if the iterator is currently at a left node
func (i iterator) IsLeft() bool {
return isEven(i.offset)
}
// IsRight checks if the iterator is currently at a right node
func (i iterator) IsRight() bool {
return !isEven(i.offset)
}
// Prev moves the iterator to the previous item of the current node, returning its value
func (i *iterator) Prev() uint64 {
if i.offset == 0 {
return i.index
}
i.offset -= 1
i.index -= i.factor
return i.index
}
// Next moves the iterator to the next item of the current node, returning its value
func (i *iterator) Next() uint64 {
i.offset += 1
i.index += i.factor
return i.index
}
// Sibling moves the iterator to the sibling of the current node, returning its value
func (i *iterator) Sibling() uint64 {
if i.IsLeft() {
return i.Next()
}
return i.Prev()
}
// Parent moves the iterator to the parent of the current node, returning its value
func (i *iterator) Parent() uint64 {
if isEven(i.offset) {
i.index += i.factor / 2
i.offset /= 2
} else {
i.index -= i.factor / 2
i.offset = (i.offset - 1) / 2
}
i.factor *= 2
return i.index
}
// LeftSpan moves the iterator to the left span current node, returning its value
func (i *iterator) LeftSpan() uint64 {
i.index = i.index + 1 - i.factor/2
i.offset = i.index / 2
i.factor = 2
return i.index
}
// RightSpan moves the iterator to the right span current node, returning its value
func (i *iterator) RightSpan() uint64 {
i.index = i.index + i.factor/2 - 1
i.offset = i.index / 2
i.factor = 2
return i.index
}
// LeftChild moves the iterator to the left child of the current node, returning its value
func (i *iterator) LeftChild() uint64 {
if i.factor == 2 {
return i.index
}
i.factor /= 2
i.index = i.index - i.factor/2
i.offset *= 2
return i.index
}
// RightChild moves the iterator to the left child of the current node, returning its value
func (i *iterator) RightChild() uint64 {
if i.factor == 2 {
return i.index
}
i.factor /= 2
i.index = i.index + i.factor/2
i.offset = 2*i.offset + 1
return i.index
}
// twoPow returns the value of 2 raised to an exponent n, argument to the method
func twoPow(n uint64) uint64 {
return 1 << n
} | iterator.go | 0.825203 | 0.65714 | iterator.go | starcoder |
package kiwi
import (
"strconv"
"strings"
)
type Expression struct {
Terms []Term
Constant float64
}
var _ Constrainer = Expression{}
func (e Expression) GetValue() float64 {
result := e.Constant
for _, t := range e.Terms {
result += t.GetValue()
}
return result
}
func (e Expression) IsConstant() bool {
return len(e.Terms) == 0
}
func (e Expression) Multiply(coefficient float64) Expression {
terms := make([]Term, len(e.Terms))
for i := range e.Terms {
terms[i] = e.Terms[i].Multiply(coefficient)
}
return Expression{Terms: terms, Constant: e.Constant * coefficient}
}
func (e Expression) Divide(denominator float64) Expression {
return e.Multiply(1.0 / denominator)
}
func (e Expression) Negate() Expression {
return e.Multiply(-1.0)
}
func (e Expression) AddConstant(constant float64) Expression {
return Expression{Terms: e.Terms, Constant: e.Constant + constant}
}
func (e Expression) AddVariable(variable *Variable) Expression {
return e.AddTerm(Term{Variable: variable, Coefficient: 1.0})
}
func (e Expression) AddTerm(term Term) Expression {
terms := make([]Term, len(e.Terms)+1)
n := copy(terms, e.Terms)
terms[n] = term
return Expression{Terms: terms, Constant: e.Constant}
}
func (e Expression) AddExpression(other Expression) Expression {
terms := make([]Term, len(e.Terms)+len(other.Terms))
n := copy(terms, e.Terms)
copy(terms[n:], other.Terms)
return Expression{Terms: terms, Constant: e.Constant + other.Constant}
}
func (e Expression) EqualsConstant(constant float64) *Constraint {
return NewConstraint(e.AddConstant(-constant), EQ)
}
func (e Expression) EqualsVariable(variable *Variable) *Constraint {
return NewConstraint(e.AddTerm(variable.Negate()), EQ)
}
func (e Expression) EqualsTerm(term Term) *Constraint {
return NewConstraint(e.AddTerm(term.Negate()), EQ)
}
func (e Expression) EqualsExpression(expression Expression) *Constraint {
return NewConstraint(e.AddExpression(expression.Negate()), EQ)
}
func (e Expression) LessThanOrEqualsConstant(constant float64) *Constraint {
return NewConstraint(e.AddConstant(-constant), LE)
}
func (e Expression) LessThanOrEqualsVariable(variable *Variable) *Constraint {
return NewConstraint(e.AddTerm(variable.Negate()), LE)
}
func (e Expression) LessThanOrEqualsTerm(term Term) *Constraint {
return NewConstraint(e.AddTerm(term.Negate()), LE)
}
func (e Expression) LessThanOrEqualsExpression(expression Expression) *Constraint {
return NewConstraint(e.AddExpression(expression.Negate()), LE)
}
func (e Expression) GreaterThanOrEqualsConstant(constant float64) *Constraint {
return NewConstraint(e.AddConstant(-constant), GE)
}
func (e Expression) GreaterThanOrEqualsVariable(variable *Variable) *Constraint {
return NewConstraint(e.AddTerm(variable.Negate()), GE)
}
func (e Expression) GreaterThanOrEqualsTerm(term Term) *Constraint {
return NewConstraint(e.AddTerm(term.Negate()), GE)
}
func (e Expression) GreaterThanOrEqualsExpression(expression Expression) *Constraint {
return NewConstraint(e.AddExpression(expression.Negate()), GE)
}
func (e Expression) String() string {
var factors []string
for _, t := range e.Terms {
factors = append(factors, t.String())
}
factors = append(factors, strconv.FormatFloat(e.Constant, 'f', -1, 64))
return strings.Join(factors, " + ")
} | expression.go | 0.804713 | 0.433202 | expression.go | starcoder |
package strmatcher
import (
"regexp"
"sync"
"time"
"v2ray.com/core/common/task"
)
// Matcher is the interface to determine a string matches a pattern.
type Matcher interface {
// Match returns true if the given string matches a predefined pattern.
Match(string) bool
}
// Type is the type of the matcher.
type Type byte
const (
// Full is the type of matcher that the input string must exactly equal to the pattern.
Full Type = iota
// Substr is the type of matcher that the input string must contain the pattern as a sub-string.
Substr
// Domain is the type of matcher that the input string must be a sub-domain or itself of the pattern.
Domain
// Regex is the type of matcher that the input string must matches the regular-expression pattern.
Regex
)
// New creates a new Matcher based on the given pattern.
func (t Type) New(pattern string) (Matcher, error) {
switch t {
case Full:
return fullMatcher(pattern), nil
case Substr:
return substrMatcher(pattern), nil
case Domain:
return domainMatcher(pattern), nil
case Regex:
r, err := regexp.Compile(pattern)
if err != nil {
return nil, err
}
return ®exMatcher{
pattern: r,
}, nil
default:
panic("Unknown type")
}
}
// IndexMatcher is the interface for matching with a group of matchers.
type IndexMatcher interface {
// Match returns the the index of a matcher that matches the input. It returns 0 if no such matcher exists.
Match(input string) uint32
}
type matcherEntry struct {
m Matcher
id uint32
}
// MatcherGroup is an implementation of IndexMatcher.
// Empty initialization works.
type MatcherGroup struct {
count uint32
fullMatcher FullMatcherGroup
domainMatcher DomainMatcherGroup
otherMatchers []matcherEntry
}
// Add adds a new Matcher into the MatcherGroup, and returns its index. The index will never be 0.
func (g *MatcherGroup) Add(m Matcher) uint32 {
g.count++
c := g.count
switch tm := m.(type) {
case fullMatcher:
g.fullMatcher.addMatcher(tm, c)
case domainMatcher:
g.domainMatcher.addMatcher(tm, c)
default:
g.otherMatchers = append(g.otherMatchers, matcherEntry{
m: m,
id: c,
})
}
return c
}
// Match implements IndexMatcher.Match.
func (g *MatcherGroup) Match(pattern string) uint32 {
if c := g.fullMatcher.Match(pattern); c > 0 {
return c
}
if c := g.domainMatcher.Match(pattern); c > 0 {
return c
}
for _, e := range g.otherMatchers {
if e.m.Match(pattern) {
return e.id
}
}
return 0
}
// Size returns the number of matchers in the MatcherGroup.
func (g *MatcherGroup) Size() uint32 {
return g.count
}
type cacheEntry struct {
timestamp time.Time
result uint32
}
// CachedMatcherGroup is a IndexMatcher with cachable results.
type CachedMatcherGroup struct {
sync.RWMutex
group *MatcherGroup
cache map[string]cacheEntry
cleanup *task.Periodic
}
// NewCachedMatcherGroup creats a new CachedMatcherGroup.
func NewCachedMatcherGroup(g *MatcherGroup) *CachedMatcherGroup {
r := &CachedMatcherGroup{
group: g,
cache: make(map[string]cacheEntry),
}
r.cleanup = &task.Periodic{
Interval: time.Second * 30,
Execute: func() error {
r.Lock()
defer r.Unlock()
expire := time.Now().Add(-1 * time.Second * 120)
for p, e := range r.cache {
if e.timestamp.Before(expire) {
delete(r.cache, p)
}
}
return nil
},
}
return r
}
// Match implements IndexMatcher.Match.
func (g *CachedMatcherGroup) Match(pattern string) uint32 {
g.RLock()
r, f := g.cache[pattern]
g.RUnlock()
if f {
return r.result
}
mr := g.group.Match(pattern)
g.Lock()
g.cache[pattern] = cacheEntry{
result: mr,
timestamp: time.Now(),
}
g.Unlock()
return mr
} | common/strmatcher/strmatcher.go | 0.772788 | 0.476884 | strmatcher.go | starcoder |
// Based on https://github.com/golang/text/blob/master/encoding/japanese/shiftjis.go
// Package sjisreplace provides a encoder to safely convert to Shift-JIS.
package sjisreplace
import (
"unicode/utf8"
"golang.org/x/text/transform"
)
// NewEncoder returns a new transformer that is very similar to the Shift-JIS encoder in golang.org/x/text/encoding/japanese
// The Shift-JIS encoder in golang.org/x/text/encoding/japanese returns an error if it finds a rune that cannot be converted to Shift-JIS.
// This encoder does not return an error in the same case, replaces the target rune with a pre-specified rune, and continues processing.
func NewEncoder(r rune) transform.Transformer {
return encoder{r: r}
}
type encoder struct {
transform.NopResetter
r rune
}
// Transform imprements transform.Transformer.
func (e encoder) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) {
r, size := rune(0), 0
loop:
for ; nSrc < len(src); nSrc += size {
r = rune(src[nSrc])
// Decode a 1-byte rune.
if r < utf8.RuneSelf {
size = 1
} else {
// Decode a multi-byte rune.
r, size = utf8.DecodeRune(src[nSrc:])
if size == 1 {
// All valid runes of size 1 (those below utf8.RuneSelf) were
// handled above. We have invalid UTF-8 or we haven't seen the
// full character yet.
if !atEOF && !utf8.FullRune(src[nSrc:]) {
err = transform.ErrShortSrc
break loop
}
}
// func init checks that the switch covers all tables.
switch {
case encode0Low <= r && r < encode0High:
if r = rune(encode0[r-encode0Low]); r>>tableShift == jis0208 {
goto write2
}
case encode1Low <= r && r < encode1High:
if r = rune(encode1[r-encode1Low]); r>>tableShift == jis0208 {
goto write2
}
case encode2Low <= r && r < encode2High:
if r = rune(encode2[r-encode2Low]); r>>tableShift == jis0208 {
goto write2
}
case encode3Low <= r && r < encode3High:
if r = rune(encode3[r-encode3Low]); r>>tableShift == jis0208 {
goto write2
}
case encode4Low <= r && r < encode4High:
if r = rune(encode4[r-encode4Low]); r>>tableShift == jis0208 {
goto write2
}
case encode5Low <= r && r < encode5High:
if 0xff61 <= r && r < 0xffa0 {
r -= 0xff61 - 0xa1
goto write1
}
if r = rune(encode5[r-encode5Low]); r>>tableShift == jis0208 {
goto write2
}
}
r = e.r
goto write1
}
write1:
if nDst >= len(dst) {
err = transform.ErrShortDst
break
}
dst[nDst] = uint8(r)
nDst++
continue
write2:
j1 := uint8(r>>codeShift) & codeMask
j2 := uint8(r) & codeMask
if nDst+2 > len(dst) {
err = transform.ErrShortDst
break loop
}
if j1 <= 61 {
dst[nDst+0] = 129 + j1/2
} else {
dst[nDst+0] = 193 + j1/2
}
if j1&1 == 0 {
dst[nDst+1] = j2 + j2/63 + 64
} else {
dst[nDst+1] = j2 + 159
}
nDst += 2
continue
}
return nDst, nSrc, err
} | sjisreplace.go | 0.706089 | 0.434701 | sjisreplace.go | starcoder |
package timestampvm
import (
"errors"
"fmt"
"time"
"github.com/ava-labs/avalanchego/ids"
"github.com/ava-labs/avalanchego/snow/choices"
"github.com/ava-labs/avalanchego/snow/consensus/snowman"
"github.com/ava-labs/avalanchego/utils/hashing"
)
var (
errTimestampTooEarly = errors.New("block's timestamp is earlier than its parent's timestamp")
errDatabaseGet = errors.New("error while retrieving data from database")
errTimestampTooLate = errors.New("block's timestamp is more than 1 hour ahead of local time")
_ snowman.Block = &Block{}
)
// Block is a block on the chain.
// Each block contains:
// 1) ParentID
// 2) Height
// 3) Timestamp
// 4) A piece of data (a string)
type Block struct {
PrntID ids.ID `serialize:"true" json:"parentID"` // parent's ID
Hght uint64 `serialize:"true" json:"height"` // This block's height. The genesis block is at height 0.
Tmstmp int64 `serialize:"true" json:"timestamp"` // Time this block was proposed at
Dt [dataLen]byte `serialize:"true" json:"data"` // Arbitrary data
id ids.ID // hold this block's ID
bytes []byte // this block's encoded bytes
status choices.Status // block's status
vm *VM // the underlying VM reference, mostly used for state
}
// Verify returns nil iff this block is valid.
// To be valid, it must be that:
// b.parent.Timestamp < b.Timestamp <= [local time] + 1 hour
func (b *Block) Verify() error {
// Get [b]'s parent
parentID := b.Parent()
parent, err := b.vm.getBlock(parentID)
if err != nil {
return errDatabaseGet
}
// Ensure [b]'s height comes right after its parent's height
if expectedHeight := parent.Height() + 1; expectedHeight != b.Hght {
return fmt.Errorf(
"expected block to have height %d, but found %d",
expectedHeight,
b.Hght,
)
}
// Ensure [b]'s timestamp is after its parent's timestamp.
if b.Timestamp().Unix() < parent.Timestamp().Unix() {
return errTimestampTooEarly
}
// Ensure [b]'s timestamp is not more than an hour
// ahead of this node's time
if b.Timestamp().Unix() >= time.Now().Add(time.Hour).Unix() {
return errTimestampTooLate
}
// Put that block to verified blocks in memory
b.vm.verifiedBlocks[b.ID()] = b
return nil
}
// Initialize sets [b.bytes] to [bytes], [b.id] to hash([b.bytes]),
// [b.status] to [status] and [b.vm] to [vm]
func (b *Block) Initialize(bytes []byte, status choices.Status, vm *VM) {
b.bytes = bytes
b.id = hashing.ComputeHash256Array(b.bytes)
b.status = status
b.vm = vm
}
// Accept sets this block's status to Accepted and sets lastAccepted to this
// block's ID and saves this info to b.vm.DB
func (b *Block) Accept() error {
b.SetStatus(choices.Accepted) // Change state of this block
blkID := b.ID()
// Persist data
if err := b.vm.state.PutBlock(b); err != nil {
return err
}
// Set last accepted ID to this block ID
if err := b.vm.state.SetLastAccepted(blkID); err != nil {
return err
}
// Delete this block from verified blocks as it's accepted
delete(b.vm.verifiedBlocks, b.ID())
// Commit changes to database
return b.vm.state.Commit()
}
// Reject sets this block's status to Rejected and saves the status in state
// Recall that b.vm.DB.Commit() must be called to persist to the DB
func (b *Block) Reject() error {
b.SetStatus(choices.Rejected) // Change state of this block
if err := b.vm.state.PutBlock(b); err != nil {
return err
}
// Delete this block from verified blocks as it's rejected
delete(b.vm.verifiedBlocks, b.ID())
// Commit changes to database
return b.vm.state.Commit()
}
// ID returns the ID of this block
func (b *Block) ID() ids.ID { return b.id }
// ParentID returns [b]'s parent's ID
func (b *Block) Parent() ids.ID { return b.PrntID }
// Height returns this block's height. The genesis block has height 0.
func (b *Block) Height() uint64 { return b.Hght }
// Timestamp returns this block's time. The genesis block has time 0.
func (b *Block) Timestamp() time.Time { return time.Unix(b.Tmstmp, 0) }
// Status returns the status of this block
func (b *Block) Status() choices.Status { return b.status }
// Bytes returns the byte repr. of this block
func (b *Block) Bytes() []byte { return b.bytes }
// Data returns the data of this block
func (b *Block) Data() [dataLen]byte { return b.Dt }
// SetStatus sets the status of this block
func (b *Block) SetStatus(status choices.Status) { b.status = status } | timestampvm/block.go | 0.670069 | 0.426441 | block.go | starcoder |
package iso20022
// Set of characteristics related to a cheque instruction, such as cheque type or cheque number.
type Cheque6 struct {
// Specifies the type of cheque to be issued.
ChequeType *ChequeType2Code `xml:"ChqTp,omitempty"`
// Unique and unambiguous identifier for a cheque as assigned by the agent.
ChequeNumber *Max35Text `xml:"ChqNb,omitempty"`
// Identifies the party that ordered the issuance of the cheque.
ChequeFrom *NameAndAddress10 `xml:"ChqFr,omitempty"`
// Specifies the delivery method of the cheque by the debtor's agent.
DeliveryMethod *ChequeDeliveryMethod1Choice `xml:"DlvryMtd,omitempty"`
// Party to whom the debtor's agent needs to send the cheque.
DeliverTo *NameAndAddress10 `xml:"DlvrTo,omitempty"`
// Urgency or order of importance that the originator would like the recipient of the payment instruction to apply to the processing of the payment instruction.
InstructionPriority *Priority2Code `xml:"InstrPrty,omitempty"`
// Date when the draft becomes payable and the debtor's account is debited.
ChequeMaturityDate *ISODate `xml:"ChqMtrtyDt,omitempty"`
// Identifies, in a coded form, the cheque layout, company logo and digitised signature to be used to print the cheque, as agreed between the initiating party and the debtor's agent.
FormsCode *Max35Text `xml:"FrmsCd,omitempty"`
// Information that needs to be printed on a cheque, used by the payer to add miscellaneous information.
MemoField []*Max35Text `xml:"MemoFld,omitempty"`
// Regional area in which the cheque can be cleared, when a country has no nation-wide cheque clearing organisation.
RegionalClearingZone *Max35Text `xml:"RgnlClrZone,omitempty"`
// Specifies the print location of the cheque.
PrintLocation *Max35Text `xml:"PrtLctn,omitempty"`
}
func (c *Cheque6) SetChequeType(value string) {
c.ChequeType = (*ChequeType2Code)(&value)
}
func (c *Cheque6) SetChequeNumber(value string) {
c.ChequeNumber = (*Max35Text)(&value)
}
func (c *Cheque6) AddChequeFrom() *NameAndAddress10 {
c.ChequeFrom = new(NameAndAddress10)
return c.ChequeFrom
}
func (c *Cheque6) AddDeliveryMethod() *ChequeDeliveryMethod1Choice {
c.DeliveryMethod = new(ChequeDeliveryMethod1Choice)
return c.DeliveryMethod
}
func (c *Cheque6) AddDeliverTo() *NameAndAddress10 {
c.DeliverTo = new(NameAndAddress10)
return c.DeliverTo
}
func (c *Cheque6) SetInstructionPriority(value string) {
c.InstructionPriority = (*Priority2Code)(&value)
}
func (c *Cheque6) SetChequeMaturityDate(value string) {
c.ChequeMaturityDate = (*ISODate)(&value)
}
func (c *Cheque6) SetFormsCode(value string) {
c.FormsCode = (*Max35Text)(&value)
}
func (c *Cheque6) AddMemoField(value string) {
c.MemoField = append(c.MemoField, (*Max35Text)(&value))
}
func (c *Cheque6) SetRegionalClearingZone(value string) {
c.RegionalClearingZone = (*Max35Text)(&value)
}
func (c *Cheque6) SetPrintLocation(value string) {
c.PrintLocation = (*Max35Text)(&value)
} | Cheque6.go | 0.740268 | 0.452173 | Cheque6.go | starcoder |
package jwtee
import (
"time"
)
// RegisteredClaims are the IANA registered โJSON Web Token Claimsโ.
type RegisteredClaims struct {
// The "aud" (audience) claim identifies the recipients that the JWT is
// intended for. Each principal intended to process the JWT MUST
// identify itself with a value in the audience claim. If the principal
// processing the claim does not identify itself with a value in the
// "aud" claim when this claim is present, then the JWT MUST be
// rejected. In the general case, the "aud" value is an array of case-
// sensitive strings, each containing a StringOrURI value. In the
// special case when the JWT has one audience, the "aud" value MAY be a
// single case-sensitive string containing a StringOrURI value. The
// interpretation of audience values is generally application specific.
Aud []string `json:"aud,omitempty"`
// The "exp" (expiration time) claim identifies the expiration time on
// or after which the JWT MUST NOT be accepted for processing. The
// processing of the "exp" claim requires that the current date/time
// MUST be before the expiration date/time listed in the "exp" claim.
// Implementers MAY provide for some small leeway, usually no more than
// a few minutes, to account for clock skew. Its value MUST be a number
// containing a NumericDate value. Use of this claim is OPTIONAL.
Exp Timestamp `json:"exp,omitempty"`
// The "jti" (JWT ID) claim provides a unique identifier for the JWT.
// The identifier value MUST be assigned in a manner that ensures that
// there is a negligible probability that the same value will be
// accidentally assigned to a different data object; if the application
// uses multiple issuers, collisions MUST be prevented among values
// produced by different issuers as well. The "jti" claim can be used
// to prevent the JWT from being replayed. The "jti" value is a case-
// sensitive string. Use of this claim is OPTIONAL.
Jti string `json:"jti,omitempty"`
// The "iat" (issued at) claim identifies the time at which the JWT was
// issued. This claim can be used to determine the age of the JWT. Its
// value MUST be a number containing a NumericDate value. Use of this
// claim is OPTIONAL.
Iat Timestamp `json:"iat,omitempty"`
// The "iss" (issuer) claim identifies the principal that issued the
// JWT. The processing of this claim is generally application specific.
// The "iss" value is a case-sensitive string containing a StringOrURI
// value. Use of this claim is OPTIONAL.
Iss string `json:"iss,omitempty"`
// The "nbf" (not before) claim identifies the time before which the JWT
// MUST NOT be accepted for processing. The processing of the "nbf"
// claim requires that the current date/time MUST be after or equal to
// the not-before date/time listed in the "nbf" claim. Implementers MAY
// provide for some small leeway, usually no more than a few minutes, to
// account for clock skew. Its value MUST be a number containing a
// NumericDate value. Use of this claim is OPTIONAL.
Nbf Timestamp `json:"nbf,omitempty"`
// The "sub" (subject) claim identifies the principal that is the
// subject of the JWT. The claims in a JWT are normally statements
// about the subject. The subject value MUST either be scoped to be
// locally unique in the context of the issuer or be globally unique.
// The processing of this claim is generally application specific. The
// "sub" value is a case-sensitive string containing a StringOrURI
// value. Use of this claim is OPTIONAL.
Sub string `json:"sub,omitempty"`
}
// IsPermittedFor returns true if claims is allowed to be used by the audience.
func (c RegisteredClaims) IsPermittedFor(audience string) bool {
for _, tokenAudience := range c.Aud {
if tokenAudience == audience {
return true
}
}
return false
}
// IsIdentifiedBy returns true if claims has the given id.
func (c RegisteredClaims) IsIdentifiedBy(id string) bool {
return c.Jti == id
}
// IsRelatedTo returns true if claims has the given subject.
func (c RegisteredClaims) IsRelatedTo(subject string) bool {
return c.Sub == subject
}
// HasBeenIssuedBy returns true if the token was issued by any of given issuers.
func (c RegisteredClaims) HasBeenIssuedBy(issuers ...string) bool {
for _, issuer := range issuers {
if c.Iss == issuer {
return true
}
}
return false
}
// HasBeenIssuedBefore returns true if the token was issued before of given time.
func (c RegisteredClaims) HasBeenIssuedBefore(now time.Time) bool {
issuedAt := c.Iat.Time()
if issuedAt.IsZero() {
return false
}
return issuedAt.Before(now)
}
// HasBeenCrossedNotBefore returns true if the token activation (Not Before) time is before than given time.
func (c RegisteredClaims) HasBeenCrossedNotBefore(now time.Time) bool {
notBefore := c.Nbf.Time()
if notBefore.IsZero() {
return true
}
return notBefore.Before(now)
}
// IsExpired returns true if the token is expired.
func (c RegisteredClaims) IsExpired(now time.Time) bool {
expireAt := c.Exp.Time()
if expireAt.IsZero() {
return false
}
return expireAt.Before(now)
} | claims.go | 0.719876 | 0.457924 | claims.go | starcoder |
package schemax
type collection []interface{}
/*
len returns the length of the receiver as an int.
*/
func (c *collection) len() int {
return len(*c)
}
/*
index is a panic-proof slice indexer that returns an interface member based on the idx integer argument. This method is not thread-safe unto itself, and should only be called in situations where thread protection is provided at a higher level.
A negative index integer returns the element at index 'length+-idx'. A positive integer returns the nth slice member. If the provided integer is larger than the total length, the final element is returned.
*/
func (c collection) index(idx int) interface{} {
L := c.len()
if L == 0 {
return nil
}
if idx < 0 {
var x int = L + idx
if x < 0 {
return c[0]
} else {
return c[x]
}
} else if idx > L {
return c[L-1]
} else {
return c[idx]
}
return nil
}
func (r collection) isZero() bool {
if &r == nil {
return true
}
return r.len() == 0
}
/*
Equal returns a boolean value indicative of the test result involving the receiver instance and x. This test is meant to ascertain if the two instances represent the same type and have the same effective values.
*/
func (r collection) equal(x collection) (equals bool) {
if r.len() != x.len() {
return
}
if x.isZero() && r.isZero() {
equals = true
return
} else if x.isZero() || r.isZero() {
return
}
for i := 0; i < r.len(); i++ {
switch tv := r.index(i).(type) {
case *LDAPSyntax:
assert, ok := x.index(i).(*LDAPSyntax)
if !ok {
return
}
if !tv.Equal(assert) {
return
}
case *MatchingRule:
assert, ok := x.index(i).(*MatchingRule)
if !ok {
return
}
if !tv.Equal(assert) {
return
}
case *MatchingRuleUse:
assert, ok := x.index(i).(*MatchingRuleUse)
if !ok {
return
}
if !tv.Equal(assert) {
return
}
case *AttributeType:
assert, ok := x.index(i).(*AttributeType)
if !ok {
return
}
if !tv.Equal(assert) {
return
}
case *ObjectClass:
assert, ok := x.index(i).(*ObjectClass)
if !ok {
return
}
if !tv.Equal(assert) {
return
}
case *NameForm:
assert, ok := x.index(i).(*NameForm)
if !ok {
return
}
if !tv.Equal(assert) {
return
}
case *DITContentRule:
assert, ok := x.index(i).(*DITContentRule)
if !ok {
return
}
if !tv.Equal(assert) {
return
}
case *DITStructureRule:
assert, ok := x.index(i).(*DITStructureRule)
if !ok {
return
}
if !tv.Equal(assert) {
return
}
case string:
assert, ok := x.index(i).(string)
if !ok {
return
}
if tv != assert {
return
}
default:
return
}
}
equals = true
return
}
/*
containsOID returns an element index number and a boolean value indicative of the result of a search by OID conducted upon the receiver. This method is not thread-safe unto itself, and should only be called in situations where thread protection is provided at a higher level.
dITStructureRules do not possess an OID, and thus collections of that definition type have no meaningful use of this method.
*/
func (c collection) containsOID(x OID) (index int, contains bool) {
index = -1
for i := 0; i < c.len(); i++ {
el := c.index(i)
if el == nil {
continue
}
switch tv := el.(type) {
case *LDAPSyntax:
contains = tv.OID.Equal(x)
case *MatchingRule:
contains = tv.OID.Equal(x)
case *AttributeType:
contains = tv.OID.Equal(x)
case *MatchingRuleUse:
contains = tv.OID.Equal(x)
case *ObjectClass:
contains = tv.OID.Equal(x)
case *DITContentRule:
contains = tv.OID.Equal(x)
case *NameForm:
contains = tv.OID.Equal(x)
default:
return
}
if contains {
index = i
break
}
}
return
}
/*
containsID returns an element index number and a boolean value indicative of the result of a search by Rule ID conducted upon the receiver. This method is not thread-safe unto itself, and should only be called in situations where thread protection is provided at a higher level.
collections of dITStructureRules are the only types that will have any meaningful use for this method.
*/
func (c collection) containsID(x interface{}) (index int, found bool) {
index = -1
for i := 0; i < c.len(); i++ {
el := c.index(i)
if el == nil {
continue
}
switch tve := el.(type) {
case *DITStructureRule:
found = tve.ID.Equal(NewRuleID(x))
default:
return
}
if found {
index = i
break
}
}
return
}
/*
containsName returns an element index number and a boolean value indicative of the result of a search by a string name conducted upon the receiver. This method is not thread-safe unto itself, and should only be called in situations where thread protection is provided at a higher level.
ldapSyntax definitions do not possess a Name, and thus collections of that definition type have no meaningful use of this method.
*/
func (c collection) containsName(x string) (index int, found bool) {
index = -1
for i := 0; i < c.len(); i++ {
el := c.index(i)
if el == nil {
continue
}
switch tve := el.(type) {
case *MatchingRule:
found = tve.Name.Equal(x)
case *AttributeType:
found = tve.Name.Equal(x)
case *MatchingRuleUse:
found = tve.Name.Equal(x)
case *ObjectClass:
found = tve.Name.Equal(x)
case *DITStructureRule:
found = tve.Name.Equal(x)
case *DITContentRule:
found = tve.Name.Equal(x)
case *NameForm:
found = tve.Name.Equal(x)
default:
return
}
if found {
index = i
break
}
}
return
}
func (c collection) containsDesc(x string) (index int, found bool) {
index = -1
for i := 0; i < c.len(); i++ {
el := c.index(i)
if el == nil {
continue
}
switch tve := el.(type) {
case *LDAPSyntax:
// Syntaxes don't really have names like all other
// definition types, so instead we have to think a
// bit creatively and use the syntax DESC field.
desc := toLower(replaceAll(string(tve.Description), ` `, ``))
desc = replaceAll(desc, `syntax`, ``)
x = toLower(replaceAll(x, ` `, ``))
x = replaceAll(x, `syntax`, ``)
if contains(desc, x) {
found = true
break
}
default:
return
}
if found {
index = i
break
}
}
return
}
/*
contains returns an element index number and a boolean value indicative of the result of a search conducted upon the receiver. This method is not thread-safe unto itself, and should only be called in situations where thread protection is provided at a higher level.
Possible search terms are:
- string name (does not apply to collections of ldapSyntax definitions)
- Actual OID, or string representation of OID (does not apply to collections of dITStructureRule definitions)
- Rule ID, as an int or uint (only applies to collections of dITStructureRule definitions)
*/
func (c collection) contains(x interface{}) (index int, found bool) {
index = -1
if c.len() == 0 {
return
}
switch tv := x.(type) {
case OID:
// Everything EXCEPT dITStructureRules
index, found = c.containsOID(tv)
case string:
if isDigit(tv) {
index, found = c.containsID(tv)
} else if isNumericalOID(tv) {
// Everything EXCEPT dITStructureRules
index, found = c.containsOID(NewOID(tv))
} else {
// Everything EXCEPT ldapSyntaxes
index, found = c.containsName(tv)
// Try to satisfy LDAPSyntax searches based on
// DESC field IF an OID is *NOT* being used.
if !found {
index, found = c.containsDesc(tv)
}
}
case uint, int:
// ONLY dITStructureRules
index, found = c.containsID(tv)
}
return
}
/*
append assigns the provided interface value to the receiver. An error is returned if there is a type-mismatch, or if an unsupported type is provided. This method is not thread-safe unto itself, and should only be called in situations where thread protection is provided at a higher level.
*/
func (c *collection) append(x interface{}) error {
switch tv := x.(type) {
case *LDAPSyntax,
*MatchingRule,
*Equality,
*Substring,
*Ordering,
*AttributeType,
*SuperiorAttributeType,
*MatchingRuleUse,
*ObjectClass,
*StructuralObjectClass,
*DITContentRule,
*NameForm,
*DITStructureRule:
// ok
default:
return raise(unexpectedType, "Unsupported type (%T) for collection append", tv)
}
// If there is at least one element, make sure we
// only appending new elements of the same type.
if c.len() > 0 {
first := c.index(0)
if sprintf("%T", first) != sprintf("%T", x) {
return raise(unexpectedType, "Unsupported type mixture: cannot append %T to %T-based collection", x, first)
}
}
*c = append(*c, x)
return nil
}
func (r collection) attrs_oids_string() (str string) {
switch len(r) {
case 0:
return
case 1:
assert, ok := r[0].(*AttributeType)
if !ok {
return
}
return assert.Name.Index(0)
}
str += `( `
for i := 0; i < len(r); i++ {
assert, ok := r[i].(*AttributeType)
if !ok {
return ``
}
str += assert.Name.Index(0) + ` $ `
}
if str[len(str)-3:] == ` $ ` {
str = str[:len(str)-2]
}
str += `)`
return
}
func (r collection) dsrs_ids_string() (str string) {
switch len(r) {
case 0:
return
case 1:
assert, ok := r[0].(*DITStructureRule)
if !ok {
return
}
return assert.ID.String()
}
str += `( `
for i := 0; i < len(r); i++ {
assert, ok := r[i].(*DITStructureRule)
if !ok {
return ``
}
str += assert.ID.String() + ` $ `
}
if str[len(str)-3:] == ` $ ` {
str = str[:len(str)-2]
}
str += `)`
return
}
func (r collection) ocs_oids_string() (str string) {
switch len(r) {
case 0:
return
case 1:
assert, ok := r[0].(*ObjectClass)
if !ok {
return
}
return assert.Name.Index(0)
}
str += `( `
for i := 0; i < len(r); i++ {
assert, ok := r[0].(*ObjectClass)
if !ok {
return ``
}
str += assert.Name.Index(0) + ` $ `
}
if str[len(str)-3:] == ` $ ` {
str = str[:len(str)-2]
}
str += `)`
return
} | collection.go | 0.697609 | 0.53443 | collection.go | starcoder |
package material
import (
"math"
"github.com/nicholasblaskey/raytracer/matrix"
"github.com/nicholasblaskey/raytracer/tuple"
)
type Pattern interface {
At(tuple.Tuple) tuple.Tuple
AtObject(Object, tuple.Tuple) tuple.Tuple
GetTransform() matrix.Mat4
SetTransform(matrix.Mat4)
}
// Hmmm rethink this abstraction.
// Could be intersectable but that would cause an import cycle?
// Could move intersectable to its own package?
type Object interface {
GetTransform() matrix.Mat4
}
// TODO this currently does not handle groups!
// Need to add WorldToObject here.
// Reconsider this too.
func WorldToPattern(p Pattern, obj Object, worldPoint tuple.Tuple) tuple.Tuple {
objPoint := obj.GetTransform().Inv().Mul4x1(worldPoint)
patternPoint := p.GetTransform().Inv().Mul4x1(objPoint)
return patternPoint
}
type Stripe struct {
Color1 tuple.Tuple
Color2 tuple.Tuple
Transform matrix.Mat4
}
func StripePattern(c1, c2 tuple.Tuple) *Stripe {
return &Stripe{c1, c2, matrix.Ident4()}
}
func (s *Stripe) GetTransform() matrix.Mat4 {
return s.Transform
}
func (s *Stripe) SetTransform(m matrix.Mat4) {
s.Transform = m
}
func (s *Stripe) At(p tuple.Tuple) tuple.Tuple {
if int(math.Floor(p[0]))%2 == 0 {
return s.Color1
}
return s.Color2
}
func (s *Stripe) AtObject(obj Object, worldPoint tuple.Tuple) tuple.Tuple {
return s.At(WorldToPattern(s, obj, worldPoint))
}
type Gradient struct {
Color1 tuple.Tuple
Color2 tuple.Tuple
Transform matrix.Mat4
}
func GradientPattern(c1, c2 tuple.Tuple) *Gradient {
return &Gradient{c1, c2, matrix.Ident4()}
}
func (s *Gradient) GetTransform() matrix.Mat4 {
return s.Transform
}
func (s *Gradient) SetTransform(m matrix.Mat4) {
s.Transform = m
}
// This doesn't seem right for spheres. Come back to this.
// The pattern only goes half way across the sphere for some reaosn.a
func (s *Gradient) At(p tuple.Tuple) tuple.Tuple {
dist := s.Color2.Sub(s.Color1)
//fraction := p[0] - math.Floor(p[0])
fraction := p[0] - float64(int(p[0]))
return s.Color1.Add(dist.Mul(fraction))
}
func (s *Gradient) AtObject(obj Object, worldPoint tuple.Tuple) tuple.Tuple {
// TODO consider gradient pattern more
p := WorldToPattern(s, obj, worldPoint)
return s.At(p.Add(tuple.Vector(1.0, 1.0, 1.0)).Mul(1.0 / 2.0))
}
type Ring struct {
Color1 tuple.Tuple
Color2 tuple.Tuple
Transform matrix.Mat4
}
func RingPattern(c1, c2 tuple.Tuple) *Ring {
return &Ring{c1, c2, matrix.Ident4()}
}
func (s *Ring) GetTransform() matrix.Mat4 {
return s.Transform
}
func (s *Ring) SetTransform(m matrix.Mat4) {
s.Transform = m
}
func (s *Ring) At(p tuple.Tuple) tuple.Tuple {
if int(math.Floor(math.Sqrt(p[0]*p[0]+p[2]*p[2])))%2 == 0 {
return s.Color1
}
return s.Color2
}
func (s *Ring) AtObject(obj Object, worldPoint tuple.Tuple) tuple.Tuple {
return s.At(WorldToPattern(s, obj, worldPoint))
}
type Checker struct {
Color1 tuple.Tuple
Color2 tuple.Tuple
Transform matrix.Mat4
}
func CheckerPattern(c1, c2 tuple.Tuple) *Checker {
return &Checker{c1, c2, matrix.Ident4()}
}
func (s *Checker) GetTransform() matrix.Mat4 {
return s.Transform
}
func (s *Checker) SetTransform(m matrix.Mat4) {
s.Transform = m
}
func floor(x float64) float64 {
// TODO look into this. There is weird floating point causing
// some achne on checkerboard pattern.
const epsilon = 0.00000000001
if x < epsilon && x > -epsilon {
x = 0.0
}
return math.Floor(x)
}
func (s *Checker) At(p tuple.Tuple) tuple.Tuple {
if int(floor(p[0])+floor(p[1])+floor(p[2]))%2 == 0 {
return s.Color1
}
return s.Color2
}
func (s *Checker) AtObject(obj Object, worldPoint tuple.Tuple) tuple.Tuple {
return s.At(WorldToPattern(s, obj, worldPoint))
} | material/pattern.go | 0.503418 | 0.49469 | pattern.go | starcoder |
package parser
import (
"github.com/ywangd/gobufrkit/table"
"github.com/ywangd/gobufrkit/deserialize/ast"
"fmt"
"math/bits"
)
// Parser creates a tree of ast.Node from an UnexpandedTemplate.
// The tree is created according to the structural information of the template.
// The goal of having this tree is to ease the deserialization process by
// providing context to every descriptor, which makes it easier to construct
// the hierarchical BUFR object.
type Parser struct {
// The group of tables used for descriptor lookup
tableGroup table.TableGroup
// bit flags for states
states uint
}
func NewParser(tableGroup table.TableGroup) *Parser {
return &Parser{
tableGroup: tableGroup,
}
}
// Parse returns the root node of the parsed template tree.
func (p *Parser) Parse(ut *table.UnexpandedTemplate) (ast.Node, error) {
return populateMembers(p, newIdsKeeper(ut.Ids()), ast.NewBaseNode(table.RootDescriptor))
}
func (p *Parser) setState(state uint) {
p.states = p.states | state
}
func (p *Parser) unsetState(state uint) {
p.states = p.states & ^state
}
func (p *Parser) getState(state uint) bool {
return bits.OnesCount(p.states&state) == 1
}
// It is possible to add info about the associated fields here, i.e. number of
// associated fields and corresponding bits required. However, the significance
// values have to be deserialized from the data section. This means the information
// about associated fields still have to be tracked during deserialization process.
// Therefore it is not worthwhile to track it here.
// Also it is not right to store the significance value to the OpAssocFieldNode.
// 1. The structural tree should NOT be polluted with actual values.
// 2. If the OpAssocFieldNode is inside a replication, each of the replication
// may have its own value which is unwieldy for a single node to store all
// the values.
func (p *Parser) parseElementNode(keeper *idsKeeper) (ast.Node, error) {
var (
descriptor table.Descriptor
err error
)
if p.getState(stateOpSkipLocal) {
// Create an ad-hoc local descriptor so it does not error out
descriptor = table.NewLocalDescriptor(keeper.take())
} else {
if descriptor, err = p.tableGroup.Lookup(keeper.take()); err != nil {
return nil, err
}
}
switch descriptor.Id() {
case table.ID_031021:
return &ast.E031021Node{BaseNode: ast.NewBaseNode(descriptor)}, nil
default:
return &ast.ElementNode{BaseNode: ast.NewBaseNode(descriptor),
NotPresent: notPresent(p, descriptor)}, nil
}
}
func (p *Parser) parseSequenceNode(keeper *idsKeeper) (ast.Node, error) {
descriptor, err := p.tableGroup.Lookup(keeper.take())
if err != nil {
return nil, err
}
return populateMembers(p, newIdsKeeper(descriptor.Entry().(*table.Dentry).Members),
&ast.SequenceNode{BaseNode: ast.NewBaseNode(descriptor)})
}
func (p *Parser) parseDelayedReplicationNode(keeper *idsKeeper) (ast.Node, error) {
descriptor, err := p.tableGroup.Lookup(keeper.take())
if err != nil {
return nil, err
}
return populateMembers(p, newIdsKeeper(keeper.takeN(descriptor.X()+1)),
&ast.DelayedReplicationNode{BaseNode: ast.NewBaseNode(descriptor)})
}
func (p *Parser) parseFixedReplicationNode(keeper *idsKeeper) (ast.Node, error) {
descriptor, err := p.tableGroup.Lookup(keeper.take())
if err != nil {
return nil, err
}
return populateMembers(p, newIdsKeeper(keeper.takeN(descriptor.X())),
&ast.FixedReplicationNode{BaseNode: ast.NewBaseNode(descriptor)})
}
func (p *Parser) parseOperatorNode(keeper *idsKeeper) (ast.Node, error) {
descriptor, err := p.tableGroup.Lookup(keeper.take())
if err != nil {
return nil, err
}
switch descriptor.Operator() {
case table.OP_NBITS_OFFSET:
return &ast.OpNbitsOffsetNode{BaseNode: ast.NewBaseNode(descriptor)}, nil
case table.OP_SCALE_OFFSET:
return &ast.OpScaleOffsetNode{BaseNode: ast.NewBaseNode(descriptor)}, nil
case table.OP_NEW_REFVAL:
return assembleOpNewRefvalNode(p, keeper, descriptor)
case table.OP_ASSOCIATE_FIELD:
return assembleOpAssocFieldNode(p, keeper, descriptor)
case table.OP_INSERT_STRING:
return &ast.OpInsertStringNode{BaseNode: ast.NewBaseNode(descriptor)}, nil
case table.OP_SKIP_LOCAL:
return assembleOpSkipLocalNode(p, keeper, descriptor)
case table.OP_MODIFY_PACKING:
return &ast.OpModifyPackingNode{BaseNode: ast.NewBaseNode(descriptor)}, nil
case table.OP_SET_STRING_LENGTH:
return &ast.OpSetStringLengthNode{BaseNode: ast.NewBaseNode(descriptor)}, nil
case table.OP_DATA_NOT_PRESENT:
return assembleOpDataNotPresentNode(p, keeper, descriptor)
case table.OP_QUALITY_INFO:
return assembleOpQaInfoNode(p, keeper, descriptor)
case table.OP_SUBSTITUTION:
return assembleOpSubstitutionNode(p, keeper, descriptor)
case table.OP_FIRST_ORDER_STATS:
return assembleOpFirstOrderStatsNode(p, keeper, descriptor)
case table.OP_DIFFERENCE_STATS:
return assembleOpDiffStatsNode(p, keeper, descriptor)
case table.OP_REPLACEMENT:
return assembleOpReplacementNode(p, keeper, descriptor)
case table.OP_CANCEL_BACK_REFERENCE:
return &ast.OpCancelBackRefNode{BaseNode: ast.NewBaseNode(descriptor)}, nil
case table.OP_DEFINE_BITMAP:
keeper.back()
return p.parseBitmapNode(keeper)
case table.OP_RECALL_BITMAP:
if descriptor.Operand() == 255 {
return &ast.OpCancelBitmapNode{BaseNode: ast.NewBaseNode(descriptor)}, nil
}
keeper.back()
return p.parseBitmapNode(keeper)
case table.OP_DEFINE_EVENT, table.OP_DEFINE_CONDITIONING_EVENT, table.OP_CATEGORICAL_VALUES:
return nil, fmt.Errorf("operator not implemented: %v", descriptor)
default:
return nil, fmt.Errorf("unrecognised operator: %v", descriptor)
}
}
// parseBitmapNode creates a BitmapNode by reading from the given keeper.
func (p *Parser) parseBitmapNode(keeper *idsKeeper) (ast.Node, error) {
// The opening descriptor
descriptor, err := p.tableGroup.Lookup(keeper.take())
if err != nil {
return nil, err
}
switch {
case descriptor.Id() == table.ID_237000:
// A bitmap recall requires no further processing
return &ast.BitmapNode{BaseNode: ast.NewBaseNode(descriptor)}, nil
case descriptor.F() == 1 || descriptor.Id() == table.ID_031031:
// An ad-hoc bitmap definition, i.e. a definition follows directly after
// descriptors such as QA Info (222000), first order stats (224000)
keeper.back()
descriptor = nil
case descriptor.Id() == table.ID_236000:
// A reusable bitmap definition
default:
return nil, fmt.Errorf("invalid bitmap definition: %v", descriptor)
}
// Get all following IDs that relate to the bitmap definition
ids := keeper.takeWhile(func(id table.ID) bool {
return id == table.ID_031031
})
return populateMembers(p, newIdsKeeper(ids), &ast.BitmapNode{BaseNode: ast.NewBaseNode(descriptor)})
} | deserialize/parser/parser.go | 0.782829 | 0.54056 | parser.go | starcoder |
package gmono256
/**
This package implements monoalphabetic cipher for single byte, each byte may contain 256 numbers,
monoalphabetic cipher is also called simple substitution cipher.
Reference: https://en.wikipedia.org/wiki/Substitution_cipher
This is a simple encryption algorithm that can be used in short or low security requirements message transmission.
*/
import (
"encoding/base64"
"github.com/cryptowilliam/goutil/basic/gerrors"
"github.com/cryptowilliam/goutil/crypto/gcrypto"
"io"
"math/rand"
"strings"
"time"
)
// length of codec alphabet, its elements are [0, 255]
const alphabetLen = 256
type (
// codec alphabet
alphabet [alphabetLen]byte
// Mono256Cipher is monoalphabetic cipher codec
Mono256Cipher struct {
encAlphabet *alphabet // alphabet for encoding
decAlphabet *alphabet // alphabet for decoding
}
Mono256Maker struct {
cipher *Mono256Cipher
}
)
func init() {
// Update random seeds to prevent generating the same random alphabet.
rand.Seed(time.Now().Unix())
}
// Len implements sort.Interface
func (abt *alphabet) Len() int {
return alphabetLen
}
// Less implements sort.Interface
func (abt *alphabet) Less(i, j int) bool {
return abt[i] < abt[j]
}
// Swap implements sort.Interface
func (abt *alphabet) Swap(i, j int) {
abt[i], abt[j] = abt[j], abt[i]
}
// ToBase64 convert 256 bytes alphabet to base64 string.
func (abt *alphabet) ToBase64() string {
return base64.StdEncoding.EncodeToString(abt[:])
}
// Base64ToAlphabet convert base64 string to 256 bytes alphabet.
func Base64ToAlphabet(b64s string) (*alphabet, error) {
b, err := base64.StdEncoding.DecodeString(strings.TrimSpace(b64s))
if err != nil {
return nil, err
}
if len(b) != alphabetLen {
return nil, gerrors.New("alphabet length %d != %d", len(b), alphabetLen)
}
rst := alphabet{}
copy(rst[:], b)
return &rst, nil
}
// Generate a random combination of 256 byte alphabet, which are finally encoded as strings using base64,
// without any duplicate byte, and must consist of 0-255, and all need to be included.
func randAlphabet() string {
// Generate a random byte array consisting of 0~255
intArr := rand.Perm(alphabetLen)
abt := &alphabet{}
// Copy random int array to byte array.
for idx, val := range intArr {
abt[idx] = byte(val)
// Ensure that all bytes do not have the same index and value, and if they do, regenerate them.
if idx == val {
return randAlphabet()
}
}
// Convert to base64 string.
return abt.ToBase64()
}
// Encrypt plaintext data to ciphertext.
// It implements `EqLenCipher` interface.
func (cipher *Mono256Cipher) Encrypt(b []byte) error {
for i, v := range b {
b[i] = cipher.encAlphabet[v]
}
return nil
}
// Decrypt from ciphertext data to plaintext.
// It implements `EqLenCipher` interface.
func (cipher *Mono256Cipher) Decrypt(b []byte) error {
for i, v := range b {
b[i] = cipher.decAlphabet[v]
}
return nil
}
// NewMono256 create new monoalphabetic cipher codec.
func NewMono256(encAlphabet *alphabet) *Mono256Cipher {
decAlphabet := &alphabet{}
for i, v := range encAlphabet {
encAlphabet[i] = v
decAlphabet[v] = byte(i)
}
return &Mono256Cipher{
encAlphabet: encAlphabet,
decAlphabet: decAlphabet,
}
}
// NewRandKeyBase64 generates random key in base64 format.
func NewRandKeyBase64() string {
return randAlphabet()
}
func NewMono256Maker(b64alphabet string) (gcrypto.CipherRWCMaker, error) {
encAlphabet, err := Base64ToAlphabet(b64alphabet)
if err != nil {
return nil, err
}
return &Mono256Maker{cipher: NewMono256(encAlphabet)}, nil
}
func (m *Mono256Maker) NonceSize() int {
return 0
}
func (m *Mono256Maker) Make(rwc io.ReadWriteCloser, genNonce bool, timeout *time.Duration, nonceCodec gcrypto.EqLenCipher) (gcrypto.CipherRWC, error) {
return gcrypto.NewEqLenCipherRWC(m.cipher, rwc), nil
} | crypto/gmono256/mono256.go | 0.859472 | 0.41324 | mono256.go | starcoder |
package economist
import (
"github.com/coschain/contentos-go/app"
"github.com/coschain/contentos-go/common/constants"
. "github.com/coschain/contentos-go/dandelion"
"github.com/stretchr/testify/assert"
"math"
"math/big"
"testing"
)
type UtilTester struct {}
func (tester *UtilTester) Test(t *testing.T, d *Dandelion) {
proportionAlgorithmTest(t)
decay(t)
equalZero(t)
}
func proportionAlgorithmTest(t *testing.T) {
a := assert.New(t)
result1 := app.ProportionAlgorithm(new(big.Int).SetUint64(1), new(big.Int).SetUint64(constants.VpDecayTime), new(big.Int).SetUint64(1000)).Uint64()
result2 := uint64(1) * uint64(1000) / uint64(constants.VpDecayTime)
a.Equal(result1, result2)
result3 := app.ProportionAlgorithm(new(big.Int).SetUint64(1), new(big.Int).SetUint64(0), new(big.Int).SetUint64(1000)).Uint64()
a.Equal(uint64(0), result3)
result4 := app.ProportionAlgorithm(new(big.Int).SetUint64(0), new(big.Int).SetUint64(1000), new(big.Int).SetUint64(1000)).Uint64()
a.Equal(uint64(0), result4)
result5 := app.ProportionAlgorithm(new(big.Int).SetUint64(1), new(big.Int).SetUint64(1000), new(big.Int).SetUint64(0)).Uint64()
a.Equal(uint64(0), result5)
result6 := app.ProportionAlgorithm(new(big.Int).SetUint64(5), new(big.Int).SetUint64(math.MaxUint64), new(big.Int).SetUint64(math.MaxUint64)).Uint64()
a.Equal(uint64(5), result6)
}
func decay(t *testing.T) {
a := assert.New(t)
result1 := app.Decay(new(big.Int).SetUint64(constants.VpDecayTime)).Uint64()
a.Equal(result1, uint64(constants.VpDecayTime) - uint64(1))
result2 := app.Decay(new(big.Int).SetUint64(constants.VpDecayTime - 1)).Uint64()
a.Equal(result2, uint64(constants.VpDecayTime - 1))
result3 := app.Decay(new(big.Int).SetUint64(0)).Uint64()
a.Equal(result3, uint64(0))
beforeDecay := new(big.Int).SetUint64(constants.VpDecayTime)
app.Decay(beforeDecay)
a.Equal(beforeDecay.Uint64(), uint64(constants.VpDecayTime) - uint64(1))
}
func equalZero(t *testing.T) {
a := assert.New(t)
a.True(app.EqualZero(new(big.Int).SetUint64(0)))
a.True(app.EqualZero(new(big.Int).SetInt64(0)))
a.False(app.EqualZero(new(big.Int).SetUint64(1)))
}
//func greaterThanZero(t *testing.T) {
// a := assert.New(t)
// a.True(app.GreaterThanZero(new(big.Int).SetUint64(0)))
// a.True(app.EqualZero(new(big.Int).SetInt64(0)))
// a.False(app.EqualZero(new(big.Int).SetUint64(1)))
//} | tests/economist/util.go | 0.556882 | 0.60743 | util.go | starcoder |
package should
// Exports JSON (and potentially other data structures or mocks) as an interface
import (
"fmt"
"github.com/Jeffail/gabs"
)
type StructureParser func(rawBody string) (StructureExplorer, error)
// ParseJSONExplorer implements StructureParser, parsing a JSON string into a
// GabsExplorer
func ParseJSONExplorer(body string) (StructureExplorer, error) {
gabs, err := gabs.ParseJSON([]byte(body))
if err != nil {
return nil, fmt.Errorf(FormatFailure("Error parsing JSON.", err.Error(), "", ""))
}
return (*GabsExplorer)(gabs), nil
}
// StructureExplorer considers generalizing *gabs.Container with the methods
// needed to test a complex data structure's content and schema.
type StructureExplorer interface {
// String prettyprints the structure.
String() string
// Data gets the datum stored within a StructureExplorer
Data() interface{}
// IsArray returns true if the Structure has ordered values
IsArray() bool
// Len returns the number of items in an array structure
Len() int
// GetElement returns the the i-th element of an array structure (structure[i])
GetElement(i int) StructureExplorer
// IsObject returns true if the Structure explorer holds (possibly) unordered named values
IsObject() bool
// Has returns true if the key names an attribute in a structure
Keys() []string
// PathExists returns true if the structure has an element at path with a non-null value
PathExists(path string) bool
// GetPath returns the the element from an object structure by name if path exists and sets ok to true
GetPath(path string) StructureExplorer
// GetPathCheck returns (element, true) if path exists it's element has a
// non-null value. Otherwise it returns (undefined, false.)
GetPathCheck(path string) (result StructureExplorer, ok bool)
}
// func StructureHasKey(key string) bool{
// for _, k
// }
// GabsExplorer wraps a StructureExplorer over a gabs.Container
type GabsExplorer gabs.Container
// Data gets the datum stored within a StructureExplorer
func (ge *GabsExplorer) Data() interface{} {
g := (*gabs.Container)(ge)
return g.Data()
}
// IsArray returns true if the Structure has ordered values
func (ge *GabsExplorer) IsArray() bool {
g := (*gabs.Container)(ge)
_, ok := g.Data().([]interface{})
return ok
}
// Len returns the number of items in an array structure
func (ge *GabsExplorer) Len() int {
g := (*gabs.Container)(ge)
result, ok := g.Data().([]interface{})
if !ok {
return -1
}
return len(result)
}
// GetElement returns the the i-th element of an array structure (structure[i])
func (ge *GabsExplorer) GetElement(i int) StructureExplorer {
g := (*gabs.Container)(ge)
result := g.Index(i)
return (*GabsExplorer)(result)
}
// IsObject returns true if the Structure explorer holds (possibly) unordered named values
func (ge *GabsExplorer) IsObject() bool {
g := (*gabs.Container)(ge)
_, ok := g.Data().(map[string]interface{})
return ok
}
// Keys returns the names of the values within a structure
func (ge *GabsExplorer) Keys() (result []string) {
g := (*gabs.Container)(ge)
m, err := g.ChildrenMap()
if err != nil {
result = append(result, fmt.Sprintf("Error getting structure map: %s\n%#v", err, g))
} else {
for k := range m {
result = append(result, k)
}
}
return
}
// GetPathCheck returns the the element from an object structure by name
func (ge *GabsExplorer) GetPathCheck(path string) (se StructureExplorer, ok bool) {
se = ge.GetPath(path)
if se.Data() != nil {
ok = true
}
return
}
// PathExists returns true if the path exists and its element is non-null
// element isn't found, return's .Data() == nil
func (ge *GabsExplorer) PathExists(path string) bool {
se := ge.GetPath(path)
return se.Data() != nil
}
// GetPath returns the the element from an object structure by name. If the
// element isn't found, return's .Data() == nil
func (ge *GabsExplorer) GetPath(path string) StructureExplorer {
g := (*gabs.Container)(ge)
result := g.Path(path)
return (*GabsExplorer)(result)
}
// String converts the explorer to a pretty string
func (ge *GabsExplorer) String() string {
g := (*gabs.Container)(ge)
return g.StringIndent("", " ")
} | should/structureExplorer.go | 0.736021 | 0.556098 | structureExplorer.go | starcoder |
package main
var input = `rect 1x1
rotate row y=0 by 2
rect 1x1
rotate row y=0 by 5
rect 1x1
rotate row y=0 by 3
rect 1x1
rotate row y=0 by 3
rect 2x1
rotate row y=0 by 5
rect 1x1
rotate row y=0 by 5
rect 4x1
rotate row y=0 by 2
rect 1x1
rotate row y=0 by 2
rect 1x1
rotate row y=0 by 5
rect 4x1
rotate row y=0 by 3
rect 2x1
rotate row y=0 by 5
rect 4x1
rotate row y=0 by 2
rect 1x2
rotate row y=1 by 6
rotate row y=0 by 2
rect 1x2
rotate column x=32 by 1
rotate column x=23 by 1
rotate column x=13 by 1
rotate row y=0 by 6
rotate column x=0 by 1
rect 5x1
rotate row y=0 by 2
rotate column x=30 by 1
rotate row y=1 by 20
rotate row y=0 by 18
rotate column x=13 by 1
rotate column x=10 by 1
rotate column x=7 by 1
rotate column x=2 by 1
rotate column x=0 by 1
rect 17x1
rotate column x=16 by 3
rotate row y=3 by 7
rotate row y=0 by 5
rotate column x=2 by 1
rotate column x=0 by 1
rect 4x1
rotate column x=28 by 1
rotate row y=1 by 24
rotate row y=0 by 21
rotate column x=19 by 1
rotate column x=17 by 1
rotate column x=16 by 1
rotate column x=14 by 1
rotate column x=12 by 2
rotate column x=11 by 1
rotate column x=9 by 1
rotate column x=8 by 1
rotate column x=7 by 1
rotate column x=6 by 1
rotate column x=4 by 1
rotate column x=2 by 1
rotate column x=0 by 1
rect 20x1
rotate column x=47 by 1
rotate column x=40 by 2
rotate column x=35 by 2
rotate column x=30 by 2
rotate column x=10 by 3
rotate column x=5 by 3
rotate row y=4 by 20
rotate row y=3 by 10
rotate row y=2 by 20
rotate row y=1 by 16
rotate row y=0 by 9
rotate column x=7 by 2
rotate column x=5 by 2
rotate column x=3 by 2
rotate column x=0 by 2
rect 9x2
rotate column x=22 by 2
rotate row y=3 by 40
rotate row y=1 by 20
rotate row y=0 by 20
rotate column x=18 by 1
rotate column x=17 by 2
rotate column x=16 by 1
rotate column x=15 by 2
rotate column x=13 by 1
rotate column x=12 by 1
rotate column x=11 by 1
rotate column x=10 by 1
rotate column x=8 by 3
rotate column x=7 by 1
rotate column x=6 by 1
rotate column x=5 by 1
rotate column x=3 by 1
rotate column x=2 by 1
rotate column x=1 by 1
rotate column x=0 by 1
rect 19x1
rotate column x=44 by 2
rotate column x=40 by 3
rotate column x=29 by 1
rotate column x=27 by 2
rotate column x=25 by 5
rotate column x=24 by 2
rotate column x=22 by 2
rotate column x=20 by 5
rotate column x=14 by 3
rotate column x=12 by 2
rotate column x=10 by 4
rotate column x=9 by 3
rotate column x=7 by 3
rotate column x=3 by 5
rotate column x=2 by 2
rotate row y=5 by 10
rotate row y=4 by 8
rotate row y=3 by 8
rotate row y=2 by 48
rotate row y=1 by 47
rotate row y=0 by 40
rotate column x=47 by 5
rotate column x=46 by 5
rotate column x=45 by 4
rotate column x=43 by 2
rotate column x=42 by 3
rotate column x=41 by 2
rotate column x=38 by 5
rotate column x=37 by 5
rotate column x=36 by 5
rotate column x=33 by 1
rotate column x=28 by 1
rotate column x=27 by 5
rotate column x=26 by 5
rotate column x=25 by 1
rotate column x=23 by 5
rotate column x=22 by 1
rotate column x=21 by 2
rotate column x=18 by 1
rotate column x=17 by 3
rotate column x=12 by 2
rotate column x=11 by 2
rotate column x=7 by 5
rotate column x=6 by 5
rotate column x=5 by 4
rotate column x=3 by 5
rotate column x=2 by 5
rotate column x=1 by 3
rotate column x=0 by 4` | days/8/input.go | 0.628977 | 0.736282 | input.go | starcoder |
package idemix
import (
"io"
math "github.com/IBM/mathlib"
"github.com/pkg/errors"
)
// credRequestLabel is the label used in zero-knowledge proof (ZKP) to identify that this ZKP is a credential request
const credRequestLabel = "credRequest"
// Credential issuance is an interactive protocol between a user and an issuer
// The issuer takes its secret and public keys and user attribute values as input
// The user takes the issuer public key and user secret as input
// The issuance protocol consists of the following steps:
// 1) The issuer sends a random nonce to the user
// 2) The user creates a Credential Request using the public key of the issuer, user secret, and the nonce as input
// The request consists of a commitment to the user secret (can be seen as a public key) and a zero-knowledge proof
// of knowledge of the user secret key
// The user sends the credential request to the issuer
// 3) The issuer verifies the credential request by verifying the zero-knowledge proof
// If the request is valid, the issuer issues a credential to the user by signing the commitment to the secret key
// together with the attribute values and sends the credential back to the user
// 4) The user verifies the issuer's signature and stores the credential that consists of
// the signature value, a randomness used to create the signature, the user secret, and the attribute values
// NewCredRequest creates a new Credential Request, the first message of the interactive credential issuance protocol
// (from user to issuer)
func (i *Idemix) NewCredRequest(sk *math.Zr, IssuerNonce []byte, ipk *IssuerPublicKey, rng io.Reader, tr Translator) (*CredRequest, error) {
return newCredRequest(sk, IssuerNonce, ipk, rng, i.Curve, tr)
}
func newCredRequest(sk *math.Zr, IssuerNonce []byte, ipk *IssuerPublicKey, rng io.Reader, curve *math.Curve, tr Translator) (*CredRequest, error) {
// Set Nym as h_{sk}^{sk}
HSk, err := tr.G1FromProto(ipk.HSk)
if err != nil {
return nil, err
}
Nym := HSk.Mul(sk)
// generate a zero-knowledge proof of knowledge (ZK PoK) of the secret key
// Sample the randomness needed for the proof
rSk := curve.NewRandomZr(rng)
// Step 1: First message (t-values)
t := HSk.Mul(rSk) // t = h_{sk}^{r_{sk}}, cover Nym
// Step 2: Compute the Fiat-Shamir hash, forming the challenge of the ZKP.
// proofData is the data being hashed, it consists of:
// the credential request label
// 3 elements of G1 each taking 2*math.FieldBytes+1 bytes
// hash of the issuer public key of length math.FieldBytes
// issuer nonce of length math.FieldBytes
proofData := make([]byte, len([]byte(credRequestLabel))+3*(2*curve.FieldBytes+1)+2*curve.FieldBytes)
index := 0
index = appendBytesString(proofData, index, credRequestLabel)
index = appendBytesG1(proofData, index, t)
index = appendBytesG1(proofData, index, HSk)
index = appendBytesG1(proofData, index, Nym)
index = appendBytes(proofData, index, IssuerNonce)
copy(proofData[index:], ipk.Hash)
proofC := curve.HashToZr(proofData)
// Step 3: reply to the challenge message (s-values)
proofS := curve.ModAdd(curve.ModMul(proofC, sk, curve.GroupOrder), rSk, curve.GroupOrder) // s = r_{sk} + C \cdot sk
// Done
return &CredRequest{
Nym: tr.G1ToProto(Nym),
IssuerNonce: IssuerNonce,
ProofC: proofC.Bytes(),
ProofS: proofS.Bytes(),
}, nil
}
// Check cryptographically verifies the credential request
func (m *CredRequest) Check(ipk *IssuerPublicKey, curve *math.Curve, tr Translator) error {
Nym, err := tr.G1FromProto(m.GetNym())
if err != nil {
return err
}
IssuerNonce := m.GetIssuerNonce()
ProofC := curve.NewZrFromBytes(m.GetProofC())
ProofS := curve.NewZrFromBytes(m.GetProofS())
HSk, err := tr.G1FromProto(ipk.HSk)
if err != nil {
return err
}
if Nym == nil || IssuerNonce == nil || ProofC == nil || ProofS == nil {
return errors.Errorf("one of the proof values is undefined")
}
// Verify Proof
// Recompute t-values using s-values
t := HSk.Mul(ProofS)
t.Sub(Nym.Mul(ProofC)) // t = h_{sk}^s / Nym^C
// Recompute challenge
proofData := make([]byte, len([]byte(credRequestLabel))+3*(2*curve.FieldBytes+1)+2*curve.FieldBytes)
index := 0
index = appendBytesString(proofData, index, credRequestLabel)
index = appendBytesG1(proofData, index, t)
index = appendBytesG1(proofData, index, HSk)
index = appendBytesG1(proofData, index, Nym)
index = appendBytes(proofData, index, IssuerNonce)
copy(proofData[index:], ipk.Hash)
if !ProofC.Equals(curve.HashToZr(proofData)) {
return errors.Errorf("zero knowledge proof is invalid")
}
return nil
} | vendor/github.com/IBM/idemix/bccsp/schemes/dlog/crypto/credrequest.go | 0.727298 | 0.540621 | credrequest.go | starcoder |
package apitest
import (
"bufio"
"bytes"
"encoding/json"
"errors"
"fmt"
"net/http"
"reflect"
"runtime"
"strings"
"time"
"unicode"
"unicode/utf8"
)
// TestingT is an interface to wrap the native *testing.T interface, this allows integration with GinkgoT() interface
// GinkgoT interface defined in https://github.com/onsi/ginkgo/blob/55c858784e51c26077949c81b6defb6b97b76944/ginkgo_dsl.go#L91
type TestingT interface {
Errorf(format string, args ...interface{})
Fatal(args ...interface{})
Fatalf(format string, args ...interface{})
}
// Verifier is the assertion interface allowing consumers to inject a custom assertion implementation.
// It also allows failure scenarios to be tested within apitest
type Verifier interface {
Equal(t TestingT, expected, actual interface{}, msgAndArgs ...interface{}) bool
True(t TestingT, value bool, msgAndArgs ...interface{}) bool
JSONEq(t TestingT, expected string, actual string, msgAndArgs ...interface{}) bool
Fail(t TestingT, failureMessage string, msgAndArgs ...interface{}) bool
NoError(t TestingT, err error, msgAndArgs ...interface{}) bool
}
// DefaultVerifier is a verifier that uses some code from https://github.com/stretchr/testify to perform assertions
type DefaultVerifier struct{}
var _ Verifier = DefaultVerifier{}
func (a DefaultVerifier) True(t TestingT, value bool, msgAndArgs ...interface{}) bool {
if !value {
return a.Fail(t, "Should be true", msgAndArgs...)
}
return true
}
// JSONEq asserts that two JSON strings are equivalent
func (a DefaultVerifier) JSONEq(t TestingT, expected string, actual string, msgAndArgs ...interface{}) bool {
var expectedJSONAsInterface, actualJSONAsInterface interface{}
if err := json.Unmarshal([]byte(expected), &expectedJSONAsInterface); err != nil {
return a.Fail(t, fmt.Sprintf("Expected value ('%s') is not valid json.\nJSON parsing error: '%s'", expected, err.Error()), msgAndArgs...)
}
if err := json.Unmarshal([]byte(actual), &actualJSONAsInterface); err != nil {
return a.Fail(t, fmt.Sprintf("Input ('%s') needs to be valid json.\nJSON parsing error: '%s'", actual, err.Error()), msgAndArgs...)
}
return a.Equal(t, expectedJSONAsInterface, actualJSONAsInterface, msgAndArgs...)
}
func (a DefaultVerifier) Equal(t TestingT, expected, actual interface{}, msgAndArgs ...interface{}) bool {
if err := validateEqualArgs(expected, actual); err != nil {
return a.Fail(t, fmt.Sprintf("Invalid operation: %#v == %#v (%s)",
expected, actual, err), msgAndArgs...)
}
if !objectsAreEqual(expected, actual) {
diff := diff(expected, actual)
expected, actual = formatUnequalValues(expected, actual)
return a.Fail(t, fmt.Sprintf("Not equal: \n"+
"expected: %s\n"+
"actual : %s%s", expected, actual, diff), msgAndArgs...)
}
return true
}
// Fail reports a failure
func (a DefaultVerifier) Fail(t TestingT, failureMessage string, msgAndArgs ...interface{}) bool {
content := []labeledContent{
{"Error Trace", strings.Join(callerInfo(), "\n\t\t\t")},
{"Error", failureMessage},
}
// Add test name if the Go version supports it
if n, ok := t.(interface {
Name() string
}); ok {
content = append(content, labeledContent{"Test", n.Name()})
}
message := messageFromMsgAndArgs(msgAndArgs...)
if len(message) > 0 {
content = append(content, labeledContent{"Messages", message})
}
t.Errorf("\n%s", ""+labeledOutput(content...))
return false
}
// NoError asserts that a function returned no error
func (a DefaultVerifier) NoError(t TestingT, err error, msgAndArgs ...interface{}) bool {
if err != nil {
return a.Fail(t, fmt.Sprintf("Received unexpected error:\n%+v", err), msgAndArgs...)
}
return true
}
func formatUnequalValues(expected, actual interface{}) (e string, a string) {
if reflect.TypeOf(expected) != reflect.TypeOf(actual) {
return fmt.Sprintf("%T(%s)", expected, truncatingFormat(expected)),
fmt.Sprintf("%T(%s)", actual, truncatingFormat(actual))
}
switch expected.(type) {
case time.Duration:
return fmt.Sprintf("%v", expected), fmt.Sprintf("%v", actual)
}
return truncatingFormat(expected), truncatingFormat(actual)
}
func truncatingFormat(data interface{}) string {
value := fmt.Sprintf("%#v", data)
max := bufio.MaxScanTokenSize - 100 // Give us some space the type info too if needed.
if len(value) > max {
value = value[0:max] + "<... truncated>"
}
return value
}
func objectsAreEqual(expected, actual interface{}) bool {
if expected == nil || actual == nil {
return expected == actual
}
exp, ok := expected.([]byte)
if !ok {
return reflect.DeepEqual(expected, actual)
}
act, ok := actual.([]byte)
if !ok {
return false
}
if exp == nil || act == nil {
return exp == nil && act == nil
}
return bytes.Equal(exp, act)
}
func isFunction(arg interface{}) bool {
if arg == nil {
return false
}
return reflect.TypeOf(arg).Kind() == reflect.Func
}
func validateEqualArgs(expected, actual interface{}) error {
if expected == nil && actual == nil {
return nil
}
if isFunction(expected) || isFunction(actual) {
return errors.New("cannot take func type as argument")
}
return nil
}
func messageFromMsgAndArgs(msgAndArgs ...interface{}) string {
if len(msgAndArgs) == 0 || msgAndArgs == nil {
return ""
}
if len(msgAndArgs) == 1 {
msg := msgAndArgs[0]
if msgAsStr, ok := msg.(string); ok {
return msgAsStr
}
return fmt.Sprintf("%+v", msg)
}
if len(msgAndArgs) > 1 {
return fmt.Sprintf(msgAndArgs[0].(string), msgAndArgs[1:]...)
}
return ""
}
func labeledOutput(content ...labeledContent) string {
longestLabel := 0
for _, v := range content {
if len(v.label) > longestLabel {
longestLabel = len(v.label)
}
}
var output string
for _, v := range content {
output += "\t" + v.label + ":" + strings.Repeat(" ", longestLabel-len(v.label)) + "\t" + indentMessageLines(v.content, longestLabel) + "\n"
}
return output
}
func indentMessageLines(message string, longestLabelLen int) string {
outBuf := new(bytes.Buffer)
for i, scanner := 0, bufio.NewScanner(strings.NewReader(message)); scanner.Scan(); i++ {
if i != 0 {
outBuf.WriteString("\n\t" + strings.Repeat(" ", longestLabelLen+1) + "\t")
}
outBuf.WriteString(scanner.Text())
}
return outBuf.String()
}
func callerInfo() []string {
var pc uintptr
var ok bool
var file string
var line int
var name string
callers := []string{}
for i := 0; ; i++ {
pc, file, line, ok = runtime.Caller(i)
if !ok {
break
}
if file == "<autogenerated>" {
break
}
f := runtime.FuncForPC(pc)
if f == nil {
break
}
name = f.Name()
if name == "testing.tRunner" {
break
}
parts := strings.Split(file, "/")
file = parts[len(parts)-1]
if len(parts) > 1 {
dir := parts[len(parts)-2]
if (dir != "assert" && dir != "mock" && dir != "require") || file == "mock_test.go" {
callers = append(callers, fmt.Sprintf("%s:%d", file, line))
}
}
segments := strings.Split(name, ".")
name = segments[len(segments)-1]
if isTest(name, "Test") ||
isTest(name, "Benchmark") ||
isTest(name, "Example") {
break
}
}
return callers
}
func isTest(name, prefix string) bool {
if !strings.HasPrefix(name, prefix) {
return false
}
if len(name) == len(prefix) { // "Test" is ok
return true
}
r, _ := utf8.DecodeRuneInString(name[len(prefix):])
return !unicode.IsLower(r)
}
type labeledContent struct {
label string
content string
}
// NoopVerifier is a verifier that does not perform verification
type NoopVerifier struct{}
func (n NoopVerifier) True(t TestingT, v bool, msgAndArgs ...interface{}) bool {
return true
}
var _ Verifier = NoopVerifier{}
// Equal does not perform any assertion and always returns true
func (n NoopVerifier) Equal(t TestingT, expected, actual interface{}, msgAndArgs ...interface{}) bool {
return true
}
// JSONEq does not perform any assertion and always returns true
func (n NoopVerifier) JSONEq(t TestingT, expected string, actual string, msgAndArgs ...interface{}) bool {
return true
}
// Fail does not perform any assertion and always returns true
func (n NoopVerifier) Fail(t TestingT, failureMessage string, msgAndArgs ...interface{}) bool {
return true
}
// NoError asserts that a function returned no error
func (n NoopVerifier) NoError(t TestingT, err error, msgAndArgs ...interface{}) bool {
return true
}
// IsSuccess is a convenience function to assert on a range of happy path status codes
var IsSuccess Assert = func(response *http.Response, request *http.Request) error {
if response.StatusCode >= 200 && response.StatusCode < 400 {
return nil
}
return fmt.Errorf("not success. Status code=%d", response.StatusCode)
}
// IsClientError is a convenience function to assert on a range of client error status codes
var IsClientError Assert = func(response *http.Response, request *http.Request) error {
if response.StatusCode >= 400 && response.StatusCode < 500 {
return nil
}
return fmt.Errorf("not a client error. Status code=%d", response.StatusCode)
}
// IsServerError is a convenience function to assert on a range of server error status codes
var IsServerError Assert = func(response *http.Response, request *http.Request) error {
if response.StatusCode >= 500 {
return nil
}
return fmt.Errorf("not a server error. Status code=%d", response.StatusCode)
} | assert.go | 0.711531 | 0.410638 | assert.go | starcoder |
package tdutil
import (
"math"
"reflect"
"github.com/maxatome/go-testdeep/internal/visited"
)
func cmpRet(less, gt bool) int {
if less {
return -1
}
if gt {
return 1
}
return 0
}
func cmpFloat(a, b float64) int {
if math.IsNaN(a) {
return -1
}
if math.IsNaN(b) {
return 1
}
return cmpRet(a < b, a > b)
}
// cmp returns -1 if a < b, 1 if a > b, 0 if a == b.
func cmp(v visited.Visited, a, b reflect.Value) int {
if !a.IsValid() {
if !b.IsValid() {
return 0
}
return -1
}
if !b.IsValid() {
return 1
}
if at, bt := a.Type(), b.Type(); at != bt {
sat, sbt := at.String(), bt.String()
return cmpRet(sat < sbt, sat > sbt)
}
// Avoid looping forever on cyclic references
if v.Record(a, b) {
return 0
}
switch a.Kind() {
case reflect.Bool:
if a.Bool() {
if b.Bool() {
return 0
}
return 1
}
if b.Bool() {
return -1
}
return 0
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
na, nb := a.Int(), b.Int()
return cmpRet(na < nb, na > nb)
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32,
reflect.Uint64, reflect.Uintptr:
na, nb := a.Uint(), b.Uint()
return cmpRet(na < nb, na > nb)
case reflect.Float32, reflect.Float64:
return cmpFloat(a.Float(), b.Float())
case reflect.Complex64, reflect.Complex128:
na, nb := a.Complex(), b.Complex()
fa, fb := real(na), real(nb)
if r := cmpFloat(fa, fb); r != 0 {
return r
}
return cmpFloat(imag(na), imag(nb))
case reflect.String:
sa, sb := a.String(), b.String()
return cmpRet(sa < sb, sa > sb)
case reflect.Array:
for i := 0; i < a.Len(); i++ {
if r := cmp(v, a.Index(i), b.Index(i)); r != 0 {
return r
}
}
return 0
case reflect.Slice:
al, bl := a.Len(), b.Len()
maxl := al
if al > bl {
maxl = bl
}
for i := 0; i < maxl; i++ {
if r := cmp(v, a.Index(i), b.Index(i)); r != 0 {
return r
}
}
return cmpRet(al < bl, al > bl)
case reflect.Interface:
if a.IsNil() {
if b.IsNil() {
return 0
}
return -1
}
if b.IsNil() {
return 1
}
return cmp(v, a.Elem(), b.Elem())
case reflect.Struct:
for i, m := 0, a.NumField(); i < m; i++ {
if r := cmp(v, a.Field(i), b.Field(i)); r != 0 {
return r
}
}
return 0
case reflect.Ptr:
if a.Pointer() == b.Pointer() {
return 0
}
if a.IsNil() {
return -1
}
if b.IsNil() {
return 1
}
return cmp(v, a.Elem(), b.Elem())
case reflect.Map:
// consider shorter maps are before longer ones
al, bl := a.Len(), b.Len()
if r := cmpRet(al < bl, al > bl); r != 0 {
return r
}
// then fallback on pointers comparison. How to say a map is
// before another one otherwise?
fallthrough
case reflect.Func, reflect.Chan, reflect.UnsafePointer:
pa, pb := a.Pointer(), b.Pointer()
return cmpRet(pa < pb, pa > pb)
default:
panic("don't know how to compare " + a.Kind().String())
}
} | vendor/github.com/maxatome/go-testdeep/helpers/tdutil/sort.go | 0.615203 | 0.418816 | sort.go | starcoder |
package mbpqs
// SignatureSeqNo is the sequence number (index) of signatures and wotsKeys in channels and the root tree.
type SignatureSeqNo uint32
// Signature is the interface type for RootSignature, MsgSignature, and GrowSignature.
type Signature interface {
NextAuthNode(prevAuthNode ...[]byte) []byte // Retrieve the current Authentication root after this signature is verified.
}
// RootSignature holds a signature on a channel by a rootTree leaf.
type RootSignature struct {
ctx *Context // Defines the MBPQS instance which was used to create the Signature.
seqNo SignatureSeqNo // Index of the used leaf in the roottree used for signing.
wotsSig []byte // The WOTS signature over the channel root.
authPath []byte // The authentication path for this signature to the rootTree root node.
rootHash []byte // ChannelRoot which is signed.
}
// GrowSignature is a signature of the last OTS key in a chain tree over the next chain tree root node.
type GrowSignature struct {
ctx *Context
wotsSig []byte
rootHash []byte
chainSeqNo uint32
chIdx uint32
layer uint32
}
// MsgSignature holds a signature on a message in a channel.
type MsgSignature struct {
ctx *Context // Context defines the mbpqs instance which was used to create the signature.
seqNo SignatureSeqNo // Sequence number of this signature in the channel.
drv []byte // Digest randomized value (r).
wotsSig []byte // The WOTS signature over the channel message.
authPath []byte // Autpath to the rootSignature.
chainSeqNo uint32 // Sequence number of this signature in the used chain tree.
chIdx uint32 // In which channel the signature.
layer uint32 // From which chainTree layer the key comes.
}
// GetSignedRoot returns the root hash field from the the RootSignature.
// This is the channel root signed by this signature.
func (rtSig *RootSignature) GetSignedRoot() []byte {
return rtSig.rootHash
}
// NextAuthNode returns the authentication path for the RootSignature.
func (rtSig *RootSignature) NextAuthNode(prevAuthNode ...[]byte) []byte {
return rtSig.GetSignedRoot()
}
// NextAuthNode returns the growSig root hash field from the GrowSignature.
// This is the chainTree root signed in this signature.
func (gs *GrowSignature) NextAuthNode(prevAuthNode ...[]byte) []byte {
return gs.rootHash
}
// NextAuthNode returns the authentication node for the next signature from
// the current MsgSignature.
func (ms MsgSignature) NextAuthNode(prevAuthNode ...[]byte) []byte {
if ms.lastMsgInChain() {
return prevAuthNode[0]
}
return ms.authPath
}
// Return whether the msgsignature is the last one for the current chainTree.
func (ms *MsgSignature) lastMsgInChain() bool {
if ms.chainSeqNo == (ms.ctx.chainTreeHeight(ms.layer) - 1) {
return true
}
return false
} | signatures.go | 0.717012 | 0.450118 | signatures.go | starcoder |
package common
import (
"fmt"
"math"
"math/big"
"reflect"
"unicode"
"unicode/utf8"
)
// Numeric
func Float64GetExponent(v float64) int {
return int((math.Float64bits(v)>>52)&0x7ff) - 1023
}
// Determines how many numeric digits can be stored per X bits
var bitsToHexDigitsTable = []int{0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4}
var bitsToDecimalDigitsTable = []int{0, 1, 1, 1, 1, 2, 2, 2, 3, 3}
var decimalDigitsToBitsTable = []int{0, 4, 7}
func BitsToDecimalDigits(bitCount int) int {
return (bitCount/10)*3 + bitsToDecimalDigitsTable[bitCount%10]
}
func DecimalDigitsToBits(digitCount int) int {
triadCount := digitCount / 3
remainder := digitCount % 3
return triadCount*10 + decimalDigitsToBitsTable[remainder]
}
func BitsToHexDigits(bitCount int) int {
return (bitCount/16)*4 + bitsToHexDigitsTable[bitCount&15]
}
func HexDigitsToBits(digitCount int) int {
return digitCount * 4
}
// Architecture
func Is64BitArch() bool {
return oneIf64Bit == 1
}
// Reflect
func IsFieldExported(name string) bool {
ch, _ := utf8.DecodeRuneInString(name)
return unicode.IsUpper(ch)
}
func IsNegativeFloat(value float64) bool {
return math.Float64bits(value)&Float64SignMask != 0
}
func HasQuietNanBitSet64(value float64) bool {
return math.Float64bits(value)&Float64QuietNanBit != 0
}
func HasQuietNanBitSet32(value float32) bool {
return math.Float32bits(value)&Float32QuietNanBit != 0
}
func Float64FromFloat32Bits(bits uint32) float64 {
// Need to do this manually because go tends to lose the quiet bit
if bits&Float32SpecialMask == Float32SpecialMask && bits&Float32FractionMask != 0 {
if bits&Float32QuietNanBit == 0 {
return Float64SignalingNan
}
return Float64QuietNan
}
return float64(math.Float32frombits(bits))
}
func Float64FromFloat16Bits(bits uint16) float64 {
return Float64FromFloat32Bits(uint32(bits) << 16)
}
func Float32FromFloat16Bits(bits uint16) float32 {
// Need to do this manually because go tends to lose the quiet bit
if bits&Bfloat16SpecialMask == Bfloat16SpecialMask && bits&Bfloat16FractionMask != 0 {
if bits&Bfloat16QuietNanBit == 0 {
return Float32SignalingNan
}
return Float32QuietNan
}
return math.Float32frombits(uint32(bits) << 16)
}
func IsBigIntNegative(value *big.Int) bool {
return value.Cmp(BigInt0) < 0
}
func IsPointer(v reflect.Value) bool {
return kindProperties[v.Kind()]&KindPropertyPointer != 0
}
func IsLengthable(v reflect.Value) bool {
return kindProperties[v.Kind()]&KindPropertyLengthable != 0
}
func IsNullable(v reflect.Value) bool {
return kindProperties[v.Kind()]&KindPropertyNullable != 0
}
func IsNil(v reflect.Value) bool {
if !v.IsValid() {
return true
}
return IsNullable(v) && v.IsNil()
}
func NameOf(x interface{}) string {
return fmt.Sprintf("%v", reflect.TypeOf(x))
}
// Utility
func CloneBytes(bytes []byte) []byte {
bytesCopy := make([]byte, len(bytes))
copy(bytesCopy, bytes)
return bytesCopy
}
var requiresLowercaseAdjust [256]bool
func init() {
for i := 'A'; i <= 'Z'; i++ {
requiresLowercaseAdjust[i] = true
}
}
// Convert ASCII characters A-Z to a-z, ignoring locale.
func ASCIIBytesToLower(bytes []byte) (didChange bool) {
const lowercaseAdjust = byte('a' - 'A')
for i, b := range bytes {
if requiresLowercaseAdjust[b] {
bytes[i] += lowercaseAdjust
didChange = true
}
}
return
}
func ASCIIToLower(s string) string {
asBytes := []byte(s)
if ASCIIBytesToLower(asBytes) {
return string(asBytes)
}
return s
}
func ByteCountToElementCount(elementBitWidth int, byteCount uint64) uint64 {
return (byteCount * 8) / uint64(elementBitWidth)
}
func ElementCountToByteCount(elementBitWidth int, elementCount uint64) uint64 {
byteCount := (elementCount * uint64(elementBitWidth)) / 8
if elementBitWidth == 1 && elementCount&7 != 0 {
byteCount++
}
return byteCount
} | internal/common/common.go | 0.654453 | 0.492981 | common.go | starcoder |
// Get status of the first installed NVIDIA graphics card. Uses NVML to communicate with it.
package main
import (
"log"
"time"
"gitlab.com/Drauthius/gpu-monitoring-tools/bindings/go/nvml"
)
// The type of a graphic card result
type GraphicCardResult struct {
Temperature float64 // The temperature in the desired unit.
FanSpeed float64 // The intended fan speed in percent (0-1).
GPU float64 // The GPU utilization in percent (0-1).
Memory float64 // The memory utilization in percent (0-1).
Encoder float64 // The encoder utilization in percent (0-1).
Decoder float64 // The decoder utilization in percent (0-1).
PCIBandwidth float64 // The PCIe bandwidth utilization in percent (0-1).
}
// Run a loop that will continuously get status from the NVIDIA graphics card, at the specified interval.
func GraphicCardStats(interval time.Duration, unit string, results chan GraphicCardResult, quit chan bool) {
defer close(results)
if err := nvml.Init(); err != nil {
log.Println("Failed to initiate NVML:", err)
return
}
defer nvml.Shutdown()
count, err := nvml.GetDeviceCount()
if err != nil {
log.Println("Failed to get device count:", err)
return
} else if count < 1 {
log.Println("Found no NVIDIA device.")
return
}
device, err := nvml.NewDevice(0)
if err != nil {
log.Println("Failed to create device:", err)
return
}
for {
status, err := device.Status()
if err != nil {
log.Println("Failed to get device status:", err)
return
}
var temperature float64
switch unit {
case "F":
temperature = float64(*status.Temperature)*9/5 + 32
case "K":
temperature = float64(*status.Temperature) + 273.15
default:
fallthrough
case "C":
temperature = float64(*status.Temperature)
}
results <- GraphicCardResult{
Temperature: temperature,
FanSpeed: float64(*status.FanSpeed) / 100,
GPU: float64(*status.Utilization.GPU) / 100,
Memory: float64(*status.Utilization.Memory) / 100,
Encoder: float64(*status.Utilization.Encoder) / 100,
Decoder: float64(*status.Utilization.Decoder) / 100,
PCIBandwidth: float64(*status.PCI.Throughput.RX+*status.PCI.Throughput.TX) / float64(*device.PCI.Bandwidth),
}
select {
case <-time.After(interval):
case <-quit:
return
}
}
} | nvidia.go | 0.649134 | 0.455441 | nvidia.go | starcoder |
package glmki3d
import (
"errors"
"github.com/go-gl/gl/v3.3-core/gl"
"github.com/mki1967/go-mki3d/mki3d"
)
// DataShaderTr is a binding between data and a shader for triangles
type DataShaderTr struct {
ShaderPtr *ShaderTr // pointer to the GL shader program structure
VAO uint32 // GL Vertex Array Object
BufPtr *GLBufTr // pointer to GL buffers structure
UniPtr *GLUni // pointer to GL uniform parameters structure
Mki3dPtr *mki3d.Mki3dType // pointer to original Mki3dType data
}
// DataShaderSeg is a binding between data and a shader for segments
type DataShaderSeg struct {
ShaderPtr *ShaderSeg // pointer to the GL shader program structure
VAO uint32 // GL Vertex Array Object
BufPtr *GLBufSeg // pointer to GL buffers structure
UniPtr *GLUni // pointer to GL uniform parameters structure
Mki3dPtr *mki3d.Mki3dType // pointer to original Mki3dType data
}
// DataShader contains SegPtr (a pointer to binding between data and a shader for segments) and
// TrPtr (a pointer to binding between data and a shader for triangles)
type DataShader struct {
Mki3dPtr *mki3d.Mki3dType // redundant link to mki3d data
UniPtr *GLUni // redundant link to uniforms
SegPtr *DataShaderSeg
TrPtr *DataShaderTr
TexPtr *DataShaderTex
}
// Deletes GL data bound to the dsPtr when no longer needed
func (dsPtr *DataShader) DeleteData() {
dsPtr.SegPtr.BufPtr.Delete()
gl.DeleteVertexArrays(1, &dsPtr.SegPtr.VAO)
dsPtr.TrPtr.BufPtr.Delete()
gl.DeleteVertexArrays(1, &dsPtr.TrPtr.VAO)
if dsPtr.TexPtr != nil {
for _, texEl := range dsPtr.TexPtr.DataElements {
texEl.Delete()
}
}
}
// MakeDataShader creates DataShader with all required substructures for given ShaderSeg and mki3d.Mki3dType.
func MakeDataShader(sPtr *Shader, mPtr *mki3d.Mki3dType) (dsPtr *DataShader, err error) {
uPtr := MakeGLUni() // uniforms
if err != nil {
return nil, err
}
bPtr, err := MakeGLBuf(mPtr) // data buffers
if err != nil {
return nil, err
}
segPtr, err := MakeDataShaderSeg(sPtr.SegPtr, bPtr.SegPtr, uPtr, mPtr)
if err != nil {
return nil, err
}
trPtr, err := MakeDataShaderTr(sPtr.TrPtr, bPtr.TrPtr, uPtr, mPtr)
if err != nil {
return nil, err
}
texPtr, err := MakeDataShaderTex(sPtr.TexPtr, uPtr, mPtr)
if err != nil {
return nil, err
}
ds := DataShader{SegPtr: segPtr, TrPtr: trPtr, TexPtr: texPtr, Mki3dPtr: mPtr, UniPtr: uPtr}
return &ds, nil
}
// MakeDataShaderTr either returns a pointer to anewly created DataShaderTr or an error.
// The parameters should be pointers to existing and initiated objects.
// MakeDataShaderTr inits its VAO.
func MakeDataShaderTr(sPtr *ShaderTr, bPtr *GLBufTr, uPtr *GLUni, mPtr *mki3d.Mki3dType) (dsPtr *DataShaderTr, err error) {
if sPtr == nil {
return nil, errors.New("sPtr == nil // type *ShaderTr ")
}
if bPtr == nil {
return nil, errors.New("bPtr == nil // type *GLBufTr ")
}
if uPtr == nil {
return nil, errors.New("uPtr == nil // type *GLUni ")
}
if mPtr == nil {
return nil, errors.New("mPtr == nil // type *Mki3dType ")
}
ds := DataShaderTr{ShaderPtr: sPtr, BufPtr: bPtr, UniPtr: uPtr, Mki3dPtr: mPtr}
err = ds.InitVAO()
if err != nil {
return nil, err
}
/// ds.InitVAO()
return &ds, nil
}
// MakeDataShaderSeg either returns a pointer to anewly created DataShaderSeg or an error.
// The parameters should be pointers to existing and initiated objects.
// MakeDataShaderSeg inits its VAO.
func MakeDataShaderSeg(sPtr *ShaderSeg, bPtr *GLBufSeg, uPtr *GLUni, mPtr *mki3d.Mki3dType) (dsPtr *DataShaderSeg, err error) {
if sPtr == nil {
return nil, errors.New("sPtr == nil // type *ShaderTr ")
}
if bPtr == nil {
return nil, errors.New("bPtr == nil // type *GLBufTr ")
}
if uPtr == nil {
return nil, errors.New("uPtr == nil // type *GLUni ")
}
if mPtr == nil {
return nil, errors.New("mPtr == nil // type *Mki3dType ")
}
ds := DataShaderSeg{ShaderPtr: sPtr, BufPtr: bPtr, UniPtr: uPtr, Mki3dPtr: mPtr}
err = ds.InitVAO()
if err != nil {
return nil, err
}
/// ds.InitVAO()
return &ds, nil
}
// UniLightToShader sets light uniform parameters from ds.UniPtr to ds.ShaderPtr (both must be not nil and previously initiated).
func (ds *DataShaderTr) UniLightToShader() (err error) {
if ds.ShaderPtr == nil {
return errors.New("ds.ShaderPtr == nil // type *ShaderTr")
}
if ds.UniPtr == nil {
return errors.New("ds.UniPtr == nil // type *GLUni")
}
gl.UseProgram(ds.ShaderPtr.ProgramId)
gl.Uniform3fv(ds.ShaderPtr.LightUni, 1, &(ds.UniPtr.LightUni[0]))
gl.Uniform1f(ds.ShaderPtr.AmbientUni, ds.UniPtr.AmbientUni)
return nil
}
// UniModelToShader sets uniform parameter from ds.UniPtr to ds.ShaderPtr.
func (ds *DataShaderTr) UniModelToShader() (err error) {
if ds.ShaderPtr == nil {
return errors.New("ds.ShaderPtr == nil // type *ShaderTr")
}
if ds.UniPtr == nil {
return errors.New("ds.UniPtr == nil // type *GLUni")
}
gl.UseProgram(ds.ShaderPtr.ProgramId)
gl.UniformMatrix4fv(ds.ShaderPtr.ModelUni, 1, false, &(ds.UniPtr.ModelUni[0]))
return nil
}
// UniModelToShader sets uniform parameter from ds.UniPtr to ds.ShaderPtr.
func (ds *DataShaderSeg) UniModelToShader() (err error) {
if ds.ShaderPtr == nil {
return errors.New("ds.ShaderPtr == nil // type *ShaderTr")
}
if ds.UniPtr == nil {
return errors.New("ds.UniPtr == nil // type *GLUni")
}
gl.UseProgram(ds.ShaderPtr.ProgramId)
gl.UniformMatrix4fv(ds.ShaderPtr.ModelUni, 1, false, &(ds.UniPtr.ModelUni[0]))
return nil
}
// UniViewToShader sets uniform parameter from ds.UniPtr to ds.ShaderPtr.
func (ds *DataShaderTr) UniViewToShader() (err error) {
if ds.ShaderPtr == nil {
return errors.New("ds.ShaderPtr == nil // type *ShaderTr")
}
if ds.UniPtr == nil {
return errors.New("ds.UniPtr == nil // type *GLUni")
}
gl.UseProgram(ds.ShaderPtr.ProgramId)
gl.UniformMatrix4fv(ds.ShaderPtr.ViewUni, 1, false, &(ds.UniPtr.ViewUni[0]))
return nil
}
// UniViewToShader sets uniform parameter from ds.UniPtr to ds.ShaderPtr
func (ds *DataShaderSeg) UniViewToShader() (err error) {
if ds.ShaderPtr == nil {
return errors.New("ds.ShaderPtr == nil // type *ShaderTr")
}
if ds.UniPtr == nil {
return errors.New("ds.UniPtr == nil // type *GLUni")
}
gl.UseProgram(ds.ShaderPtr.ProgramId)
gl.UniformMatrix4fv(ds.ShaderPtr.ViewUni, 1, false, &(ds.UniPtr.ViewUni[0]))
return nil
}
// UniProjectionToShader sets uniform parameter from ds.UniPtr to ds.ShaderPtr
func (ds *DataShaderTr) UniProjectionToShader() (err error) {
if ds.ShaderPtr == nil {
return errors.New("ds.ShaderPtr == nil // type *ShaderTr")
}
if ds.UniPtr == nil {
return errors.New("ds.UniPtr == nil // type *GLUni")
}
gl.UseProgram(ds.ShaderPtr.ProgramId)
gl.UniformMatrix4fv(ds.ShaderPtr.ProjectionUni, 1, false, &(ds.UniPtr.ProjectionUni[0]))
return nil
}
// UniProjectionToShader sets uniform parameter from ds.UniPtr to ds.ShaderPtr.
func (ds *DataShaderSeg) UniProjectionToShader() (err error) {
if ds.ShaderPtr == nil {
return errors.New("ds.ShaderPtr == nil // type *ShaderTr")
}
if ds.UniPtr == nil {
return errors.New("ds.UniPtr == nil // type *GLUni")
}
gl.UseProgram(ds.ShaderPtr.ProgramId)
gl.UniformMatrix4fv(ds.ShaderPtr.ProjectionUni, 1, false, &(ds.UniPtr.ProjectionUni[0]))
return nil
}
// InitStage initiates stage parameters in ds.ShaderPtr assuming that ds is a stage.
func (ds *DataShaderTr) InitStage() (err error) {
if ds.Mki3dPtr == nil {
return errors.New("ds.Mki3dPtr == nil // type *Mki3dType")
}
err = ds.UniProjectionToShader() // set projection
if err != nil {
return err
}
err = ds.UniViewToShader() // set view
if err != nil {
return err
}
err = ds.UniLightToShader() // set light - for triangles only
if err != nil {
return err
}
return nil
}
// InitStage initiates stage parameters in ds.ShaderPtr assuming that ds is a stage.
func (ds *DataShaderSeg) InitStage() (err error) {
if ds.Mki3dPtr == nil {
return errors.New("ds.Mki3dPtr == nil // type *Mki3dType")
}
err = ds.UniProjectionToShader() // set projection
if err != nil {
return err
}
err = ds.UniViewToShader() // set view
if err != nil {
return err
}
return nil
}
// InitStage initiates stage parameters assuming that ds is a stage.
func (ds *DataShader) InitStage() (err error) {
if ds.SegPtr == nil {
return errors.New("ds.Mki3dPtr == nil // type *Mki3dType")
}
err = ds.SegPtr.InitStage()
if err != nil {
return err
}
if ds.TrPtr == nil {
return errors.New("ds.Mki3dPtr == nil // type *Mki3dType")
}
err = ds.TrPtr.InitStage() /// ???
if err != nil {
return err
}
if ds.TexPtr != nil {
err = ds.TexPtr.InitStage()
if err != nil {
return err
}
}
return nil
}
func (ds *DataShader) SetBackgroundColor() {
bg := ds.Mki3dPtr.BackgroundColor
gl.ClearColor(bg[0], bg[1], bg[2], 1.0)
}
// Draw a model (triangles).
func (ds *DataShaderTr) DrawModel() {
if ds.BufPtr.VertexCount == 0 {
return // nothing to draw
}
ds.UniModelToShader()
gl.UseProgram(ds.ShaderPtr.ProgramId)
gl.BindVertexArray(ds.VAO)
gl.DrawArrays(gl.TRIANGLES, 0, ds.BufPtr.VertexCount)
gl.BindVertexArray(0)
}
// Draw a model (segments).
func (ds *DataShaderSeg) DrawModel() {
if ds.BufPtr.VertexCount == 0 {
return // nothing to draw
}
ds.UniModelToShader()
gl.UseProgram(ds.ShaderPtr.ProgramId)
gl.BindVertexArray(ds.VAO)
gl.DrawArrays(gl.LINES, 0, ds.BufPtr.VertexCount)
gl.BindVertexArray(0)
}
// Draw a model (segments and triangles).
func (ds *DataShader) DrawModel() {
ds.TrPtr.DrawModel()
ds.SegPtr.DrawModel()
if ds.TexPtr != nil {
ds.TexPtr.DrawModel()
}
}
// Draw a stage (triangles).
func (ds *DataShaderTr) DrawStage() {
ds.InitStage()
ds.DrawModel()
}
// Draw a stage (segments).
func (ds *DataShaderSeg) DrawStage() {
ds.InitStage()
ds.DrawModel()
}
// Draw a stage.
// Use it once before drawing other models in the stage.
func (ds *DataShader) DrawStage() {
// first draw triangles, then segments
ds.TrPtr.DrawStage()
ds.SegPtr.DrawStage()
if ds.TexPtr != nil {
ds.TexPtr.DrawStage()
}
}
// InitVAO init the VAO field of ds. ds, ds.ShaderPtr and ds.BufPtr must be not nil and previously initiated.
func (ds *DataShaderTr) InitVAO() (err error) {
if ds == nil {
return errors.New("ds == nil // type *DataShaderTr ")
}
if ds.BufPtr == nil {
return errors.New("ds.BufPtr == nil // type *GLBufTr")
}
if ds.ShaderPtr == nil {
return errors.New("ds.ShaderPtr == nil // type *ShaderTr")
}
gl.UseProgram(ds.ShaderPtr.ProgramId)
gl.GenVertexArrays(1, &(ds.VAO))
gl.BindVertexArray(ds.VAO)
// bind vertex positions
gl.BindBuffer(gl.ARRAY_BUFFER, ds.BufPtr.PositionBuf)
gl.EnableVertexAttribArray(ds.ShaderPtr.PositionAttr)
gl.VertexAttribPointer(ds.ShaderPtr.PositionAttr, 3, gl.FLOAT, false, 0 /* stride */, gl.PtrOffset(0))
// bind vertex colors
gl.BindBuffer(gl.ARRAY_BUFFER, ds.BufPtr.ColorBuf)
gl.EnableVertexAttribArray(ds.ShaderPtr.ColorAttr)
gl.VertexAttribPointer(ds.ShaderPtr.ColorAttr, 3, gl.FLOAT, false, 0 /* stride */, gl.PtrOffset(0))
// bind vertex normals
gl.BindBuffer(gl.ARRAY_BUFFER, ds.BufPtr.NormalBuf)
gl.EnableVertexAttribArray(ds.ShaderPtr.NormalAttr)
gl.VertexAttribPointer(ds.ShaderPtr.NormalAttr, 3, gl.FLOAT, false, 0 /* stride */, gl.PtrOffset(0))
gl.BindVertexArray(0) // unbind VAO
return nil
}
// InitVAO init the VAO field of ds. ds, ds.ShaderPtr and ds.BufPtr must be not nil and previously initiated.
func (ds *DataShaderSeg) InitVAO() (err error) {
if ds == nil {
return errors.New("ds == nil // type *DataShaderTr ")
}
if ds.BufPtr == nil {
return errors.New("ds.BufPtr == nil // type *GLBufTr")
}
if ds.ShaderPtr == nil {
return errors.New("ds.ShaderPtr == nil // type *ShaderTr")
}
gl.UseProgram(ds.ShaderPtr.ProgramId)
gl.GenVertexArrays(1, &(ds.VAO))
gl.BindVertexArray(ds.VAO)
// bind vertex positions
gl.BindBuffer(gl.ARRAY_BUFFER, ds.BufPtr.PositionBuf)
gl.EnableVertexAttribArray(ds.ShaderPtr.PositionAttr)
gl.VertexAttribPointer(ds.ShaderPtr.PositionAttr, 3, gl.FLOAT, false, 0 /* stride */, gl.PtrOffset(0))
// bind vertex colors
gl.BindBuffer(gl.ARRAY_BUFFER, ds.BufPtr.ColorBuf)
gl.EnableVertexAttribArray(ds.ShaderPtr.ColorAttr)
gl.VertexAttribPointer(ds.ShaderPtr.ColorAttr, 3, gl.FLOAT, false, 0 /* stride */, gl.PtrOffset(0))
gl.BindVertexArray(0) // unbind VAO
return nil
} | glmki3d/data-shader.go | 0.606265 | 0.506469 | data-shader.go | starcoder |
package conf
// Uint64Var defines a uint64 flag and environment variable with specified name, default value, and usage string.
// The argument p points to a uint64 variable in which to store the value of the flag and/or environment variable.
func (c *Configurator) Uint64Var(p *uint64, name string, value uint64, usage string) {
c.env().Uint64Var(p, name, value, usage)
c.flag().Uint64Var(p, name, value, usage)
}
// Uint64 defines a uint64 flag and environment variable with specified name, default value, and usage string.
// The return value is the address of a uint64 variable that stores the value of the flag and/or environment variable.
func (c *Configurator) Uint64(name string, value uint64, usage string) *uint64 {
p := new(uint64)
c.Uint64Var(p, name, value, usage)
return p
}
// Uint64VarE defines a uint64 environment variable with specified name, default value, and usage string.
// The argument p points to a uint64 variable in which to store the value of the environment variable.
func (c *Configurator) Uint64VarE(p *uint64, name string, value uint64, usage string) {
c.env().Uint64Var(p, name, value, usage)
}
// Uint64E defines a uint64 environment variable with specified name, default value, and usage string.
// The return value is the address of a uint64 variable that stores the value of the environment variable.
func (c *Configurator) Uint64E(name string, value uint64, usage string) *uint64 {
p := new(uint64)
c.Uint64VarE(p, name, value, usage)
return p
}
// Uint64VarF defines a uint64 flag with specified name, default value, and usage string.
// The argument p points to a uint64 variable in which to store the value of the flag.
func (c *Configurator) Uint64VarF(p *uint64, name string, value uint64, usage string) {
c.flag().Uint64Var(p, name, value, usage)
}
// Uint64F defines a uint64 flag with specified name, default value, and usage string.
// The return value is the address of a uint64 variable that stores the value of the flag.
func (c *Configurator) Uint64F(name string, value uint64, usage string) *uint64 {
p := new(uint64)
c.Uint64VarF(p, name, value, usage)
return p
}
// Uint64Var defines a uint64 flag and environment variable with specified name, default value, and usage string.
// The argument p points to a uint64 variable in which to store the value of the flag and/or environment variable.
func Uint64Var(p *uint64, name string, value uint64, usage string) {
Global.Uint64Var(p, name, value, usage)
}
// Uint64 defines a uint64 flag and environment variable with specified name, default value, and usage string.
// The return value is the address of a uint64 variable that stores the value of the flag and/or environment variable.
func Uint64(name string, value uint64, usage string) *uint64 {
return Global.Uint64(name, value, usage)
}
// Uint64VarE defines a uint64 environment variable with specified name, default value, and usage string.
// The argument p points to a uint64 variable in which to store the value of the environment variable.
func Uint64VarE(p *uint64, name string, value uint64, usage string) {
Global.Uint64VarE(p, name, value, usage)
}
// Uint64E defines a uint64 environment variable with specified name, default value, and usage string.
// The return value is the address of a uint64 variable that stores the value of the environment variable.
func Uint64E(name string, value uint64, usage string) *uint64 {
return Global.Uint64E(name, value, usage)
}
// Uint64VarF defines a uint64 flag with specified name, default value, and usage string.
// The argument p points to a uint64 variable in which to store the value of the flag.
func Uint64VarF(p *uint64, name string, value uint64, usage string) {
Global.Uint64VarF(p, name, value, usage)
}
// Uint64F defines a uint64 flag with specified name, default value, and usage string.
// The return value is the address of a uint64 variable that stores the value of the flag.
func Uint64F(name string, value uint64, usage string) *uint64 {
return Global.Uint64F(name, value, usage)
} | value_uint64.go | 0.764012 | 0.715474 | value_uint64.go | starcoder |
package pgo
type ShapeFlags uint32
const (
/**
\brief The shape will partake in collision in the physical simulation.
\note It is illegal to raise the eSIMULATION_SHAPE and eTRIGGER_SHAPE flags.
In the event that one of these flags is already raised the sdk will reject any
attempt to raise the other. To raise the eSIMULATION_SHAPE first ensure that
eTRIGGER_SHAPE is already lowered.
\note This flag has no effect if simulation is disabled for the corresponding actor (see #PxActorFlag::eDISABLE_SIMULATION).
@see PxSimulationEventCallback.onContact() PxScene.setSimulationEventCallback() PxShape.setFlag(), PxShape.setFlags()
*/
ShapeFlags_eSIMULATION_SHAPE ShapeFlags = (1 << 0)
/**
\brief The shape will partake in scene queries (ray casts, overlap tests, sweeps, ...).
*/
ShapeFlags_eSCENE_QUERY_SHAPE ShapeFlags = (1 << 1)
/**
\brief The shape is a trigger which can send reports whenever other shapes enter/leave its volume.
\note Triangle meshes and heightfields can not be triggers. Shape creation will fail in these cases.
\note Shapes marked as triggers do not collide with other objects. If an object should act both
as a trigger shape and a collision shape then create a rigid body with two shapes, one being a
trigger shape and the other a collision shape. It is illegal to raise the eTRIGGER_SHAPE and
eSIMULATION_SHAPE flags on a single PxShape instance. In the event that one of these flags is already
raised the sdk will reject any attempt to raise the other. To raise the eTRIGGER_SHAPE flag first
ensure that eSIMULATION_SHAPE flag is already lowered.
\note Trigger shapes will no longer send notification events for interactions with other trigger shapes.
\note Shapes marked as triggers are allowed to participate in scene queries, provided the eSCENE_QUERY_SHAPE flag is set.
\note This flag has no effect if simulation is disabled for the corresponding actor (see #PxActorFlag::eDISABLE_SIMULATION).
@see PxSimulationEventCallback.onTrigger() PxScene.setSimulationEventCallback() PxShape.setFlag(), PxShape.setFlags()
*/
ShapeFlags_eTRIGGER_SHAPE ShapeFlags = (1 << 2)
/**
\brief Enable debug renderer for this shape
@see PxScene.getRenderBuffer() PxRenderBuffer PxVisualizationParameter
*/
ShapeFlags_eVISUALIZATION ShapeFlags = (1 << 3)
) | pgo/shaperflags.go | 0.674801 | 0.561215 | shaperflags.go | starcoder |
package main
import (
"bytes"
"fmt"
"log"
"math"
)
type Point struct {
X, Y float64
}
// Angle determines the angle of the vector defined by [0,0] to this point
func angle(p Point) float64 {
if p.X == 0.0 {
if p.Y > 0.0 {
return 90.0
} else {
return 270.0
}
}
ang := math.Atan(p.Y/p.X) / math.Pi * 180.0
if p.X > 0 {
return ang
} else {
return ang + 180.0
}
}
// Rotates a point around another point
func rotate(angle float64, a *Point, p Point) {
rad := angle / 180.0 * math.Pi
a.X -= p.X
a.Y -= p.Y
ox := a.X
oy := a.Y
a.X = math.Cos(rad)*ox - math.Sin(rad)*oy
a.Y = math.Sin(rad)*ox - math.Cos(rad)*oy
a.X += p.X
a.Y += p.Y
}
// Distance between two points
func distance(a, b Point) float64 {
dx := b.X - a.X
dy := b.Y - a.Y
return math.Sqrt(dx*dx + dy*dy)
}
type Line struct {
A, B Point
}
// Midpoint of the line segment
func midpoint(l Line) Point {
return Point{
X: (l.A.X + l.B.X) / 2.0,
Y: (l.A.Y + l.B.Y) / 2.0,
}
}
// Intersection btween two line segments if it exists
func intersection(l0, l1 Line) (pt Point, found bool) {
a, b := l0.A, l0.B
c, d := l1.A, l1.B
rtop := (a.Y-c.Y)*(d.X-c.X) - (a.X-c.X)*(d.Y-c.Y)
rbot := (b.X-a.X)*(d.Y-c.Y) - (b.Y-a.Y)*(d.X-c.X)
stop := (a.Y-c.Y)*(b.X-a.X) - (a.X-c.X)*(b.Y-a.Y)
sbot := (b.X-a.X)*(d.Y-c.Y) - (b.Y-a.Y)*(d.X-c.X)
if rbot == 0 || sbot == 0 {
// Lines are parallel
return
}
r := rtop / rbot
s := stop / sbot
if r > 0 && r < 1 && s > 0 && s < 1 {
pt.X = a.X + r*(b.X-a.X)
pt.Y = a.Y + r*(b.Y-a.Y)
found = true
}
return
}
// Distance between line segment and point
func linedist(l Line, n Point) float64 {
utop := (n.X-l.A.X)*(l.B.X-l.A.X) + (n.Y-l.A.Y)*(l.B.Y-l.A.Y)
ubot := distance(l.A, l.B)
ubot *= ubot
if ubot == 0.0 {
return 0.0
}
u := utop / ubot
if u < 0 || u > 1 {
d1 := distance(l.A, n)
d2 := distance(l.B, n)
if d1 < d2 {
return d1
} else {
return d2
}
}
p := Point{
X: l.A.X + u*(l.B.X-l.A.X),
Y: l.A.Y + u*(l.B.Y-l.A.Y),
}
return distance(p, n)
}
// Line segment length
func (l Line) Length() float64 {
return distance(l.A, l.B)
}
type Character struct {
RangeFinderAngles []float64
RadarAngles1 []float64
RadarAngles2 []float64
RangeFinders []float64
Radar []float64
Location Point
Heading float64
Speed float64
Radius float64
RangeFinderRange float64
AngularVelocity float64
}
type Environment struct {
Hero Character
Lines []Line
End Point
ReachGoal bool
}
func (e Environment) clone() Environment {
e2 := Environment{
Hero: Character{
Heading: e.Hero.Heading,
Speed: e.Hero.Speed,
Location: e.Hero.Location,
RangeFinderAngles: make([]float64, len(e.Hero.RangeFinderAngles)),
RadarAngles1: make([]float64, len(e.Hero.RadarAngles1)),
RadarAngles2: make([]float64, len(e.Hero.RadarAngles2)),
RangeFinders: make([]float64, len(e.Hero.RangeFinders)),
Radar: make([]float64, len(e.Hero.Radar)),
Radius: e.Hero.Radius,
RangeFinderRange: e.Hero.RangeFinderRange,
AngularVelocity: e.Hero.AngularVelocity,
},
Lines: e.Lines,
End: e.End,
}
copy(e2.Hero.RangeFinderAngles, e.Hero.RangeFinderAngles)
copy(e2.Hero.RadarAngles1, e.Hero.RadarAngles1)
copy(e2.Hero.RadarAngles2, e.Hero.RadarAngles2)
copy(e2.Hero.RangeFinders, e.Hero.RangeFinders)
copy(e2.Hero.Radar, e.Hero.Radar)
return e2
}
func copyEnvironment(e *Environment) *Environment {
cln := (*e).clone()
return &cln
}
// Initializes the environment
func (e *Environment) init() {
// Set up the hero
e.Hero.RangeFinderAngles = []float64{-90, -45, 0, 45, 90, -180}
e.Hero.RadarAngles1 = []float64{315, 45, 135, 225}
e.Hero.RadarAngles2 = []float64{405, 135, 225, 315}
e.Hero.Radius = 8.0
e.Hero.RangeFinderRange = 100.0
e.Hero.RangeFinders = make([]float64, len(e.Hero.RangeFinderAngles))
e.Hero.Radar = make([]float64, len(e.Hero.RadarAngles1))
}
// String displays debug information
func (e Environment) String() string {
b := bytes.NewBufferString("")
b.WriteString(fmt.Sprintf("Hero: %f %f\n", e.Hero.Location.X, e.Hero.Location.Y))
b.WriteString(fmt.Sprintf("EndPoint: %f %f\n", e.End.X, e.End.Y))
b.WriteString("Lines\n")
for _, l := range e.Lines {
b.WriteString(fmt.Sprintf("\t%f %f %f %f\n", l.A.X, l.A.Y, l.B.X, l.B.Y))
}
return b.String()
}
// Used for fitness calculations
func distanceToTarget(e *Environment) float64 {
dist := distance(e.Hero.Location, e.End)
if math.IsNaN(dist) {
log.Println("NAN distance error...") // Should this be an actual error?
return 500.0
}
if dist < 5.0 {
e.ReachGoal = true
}
return dist
}
// Create neural net inputs from sensors
func generateNeuralInputs(e Environment) (inputs []float64) {
inputs = make([]float64, 10)
// range finders
n := len(e.Hero.RangeFinders)
for i := 0; i < n; i++ {
inputs[i] = e.Hero.RangeFinders[i] / e.Hero.RangeFinderRange
if math.IsNaN(inputs[i]) {
log.Println("NAN in inputs")
}
}
// Radar
for i := 0; i < len(e.Hero.Radar); i++ {
inputs[i+n] = e.Hero.Radar[i]
if math.IsNaN(inputs[i+n]) {
log.Println("NAN in inputs")
}
}
return
}
// Transforms neural net outputs into angular velocity and speed
func interpretOutputs(e *Environment, o1, o2 float64) {
if math.IsNaN(o1) || math.IsNaN(o2) {
log.Println("NAN in outputs")
}
e.Hero.AngularVelocity += (o1 - 0.5) * 1.0
e.Hero.Speed += (o2 - 0.5) * 1.0
// constraints of speed and angular velocity
if e.Hero.Speed > 3.0 {
e.Hero.Speed = 3.0
} else if e.Hero.Speed < -3.0 {
e.Hero.Speed = -3.0
}
if e.Hero.AngularVelocity > 3.0 {
e.Hero.AngularVelocity = 3.0
} else if e.Hero.AngularVelocity < -3.0 {
e.Hero.AngularVelocity = -3.0
}
}
// Run a time step of the simulation
func update(e *Environment) {
if e.ReachGoal {
return
}
vx := math.Cos(e.Hero.Heading/180.0*math.Pi) * e.Hero.Speed
vy := math.Sin(e.Hero.Heading/180.0*math.Pi) * e.Hero.Speed
if math.IsNaN(vx) {
log.Println("vx is NAN")
}
e.Hero.Heading += e.Hero.AngularVelocity
if math.IsNaN(e.Hero.AngularVelocity) {
log.Println("Hero AngularVelocity is NAN")
}
if e.Hero.Heading > 360 {
e.Hero.Heading -= 360
} else if e.Hero.Heading < 0 {
e.Hero.Heading += 360
}
newloc := Point{
X: vx + e.Hero.Location.X,
Y: vy + e.Hero.Location.Y,
}
// collision detection
if !collideLines(e, newloc) {
e.Hero.Location = newloc
}
updateRangeFinders(e)
updateRadar(e)
}
// See if navigator has hit anything
func collideLines(e *Environment, loc Point) bool {
for _, l := range e.Lines {
if linedist(l, loc) < e.Hero.Radius {
return true
}
}
return false
}
// Range finder sensors
func updateRangeFinders(e *Environment) {
h := &e.Hero
for i := 0; i < len(h.RangeFinders); i++ {
rad := h.RangeFinderAngles[i] / 180.0 * math.Pi // radians
// Project a point from the hero's location outwards
projPoint := Point{
X: h.Location.X + math.Cos(rad)*h.RangeFinderRange,
Y: h.Location.Y + math.Sin(rad)*h.RangeFinderRange,
}
// Rotate the projected point by the hero's heading
rotate(h.Heading, &projPoint, h.Location)
// Create a line segment from the hero's location to projected
projectedLine := Line{A: h.Location, B: projPoint}
rnge := h.RangeFinderRange // Set range to max by default
// Now test against the environment to see if we hit anything
for _, line := range e.Lines {
if intpt, found := intersection(line, projectedLine); found {
// If so, then update the range to the distance
foundRange := distance(intpt, h.Location)
if foundRange < rnge {
rnge = foundRange
}
}
}
if math.IsNaN(rnge) {
log.Println("Range is NAN")
}
h.RangeFinders[i] = rnge
}
}
func updateRadar(e *Environment) {
h := &e.Hero
target := e.End
// Rotate goal with respect to heading of navigator
rotate(-h.Heading, &target, h.Location)
// Translate with respect to location of navigator
target.X -= h.Location.X
target.Y -= h.Location.Y
// What angle is the vector between target and navigator
a := angle(target)
// Fire the appropriate radar sensor
for i := 0; i < len(h.RadarAngles1); i++ {
h.Radar[i] = 0.0
if a >= h.RadarAngles1[i] && a <= h.RadarAngles2[i] {
h.Radar[i] = 1.0
} else if a+360.0 >= h.RadarAngles1[i] && a+360. <= h.RadarAngles2[i] {
h.Radar[i] = 1.0
}
}
} | x/examples/maze/maze.go | 0.708313 | 0.428891 | maze.go | starcoder |
package logging
import (
"github.com/go-logr/logr"
)
// A Logger logs messages. Messages may be supplemented by structured data.
type Logger interface {
// Info logs a message with optional structured data. Structured data must
// be supplied as an array that alternates between string keys and values of
// an arbitrary type. Use Info for messages that Crossplane operators are
// very likely to be concerned with when running Crossplane.
Info(msg string, keysAndValues ...interface{})
// Debug logs a message with optional structured data. Structured data must
// be supplied as an array that alternates between string keys and values of
// an arbitrary type. Use Debug for messages that Crossplane operators or
// developers may be concerned with when debugging Crossplane.
Debug(msg string, keysAndValues ...interface{})
// WithValues returns a Logger that will include the supplied structured
// data with any subsequent messages it logs. Structured data must
// be supplied as an array that alternates between string keys and values of
// an arbitrary type.
WithValues(keysAndValues ...interface{}) Logger
}
// NewNopLogger returns a Logger that does nothing.
func NewNopLogger() Logger { return nopLogger{} }
type nopLogger struct{}
func (l nopLogger) Info(msg string, keysAndValues ...interface{}) {}
func (l nopLogger) Debug(msg string, keysAndValues ...interface{}) {}
func (l nopLogger) WithValues(keysAndValues ...interface{}) Logger { return nopLogger{} }
// NewLogrLogger returns a Logger that is satisfied by the supplied logr.Logger,
// which may be satisfied in turn by various logging implementations (Zap, klog,
// etc). Debug messages are logged at V(1).
func NewLogrLogger(l logr.Logger) Logger {
return logrLogger{log: l}
}
type logrLogger struct {
log logr.Logger
}
func (l logrLogger) Info(msg string, keysAndValues ...interface{}) {
l.log.Info(msg, keysAndValues...)
}
func (l logrLogger) Debug(msg string, keysAndValues ...interface{}) {
l.log.V(1).Info(msg, keysAndValues...)
}
func (l logrLogger) WithValues(keysAndValues ...interface{}) Logger {
return logrLogger{log: l.log.WithValues(keysAndValues...)}
} | pkg/logging/logging.go | 0.716417 | 0.422207 | logging.go | starcoder |
package funk
import (
"fmt"
"reflect"
)
// ForEachOption defines the options for ForEach
type ForEachOption struct {
Reverse bool
}
var boolType = reflect.TypeOf(true)
// ForEach iterates over elements of collection and invokes iteratee for each element.
func ForEach(arr interface{}, predicate interface{}, options ...ForEachOption) {
if !IsIteratee(arr) {
panic("First parameter must be an iteratee")
}
option := ForEachOption{}
if len(options) > 0 {
option = options[0]
}
var (
funcValue = reflect.ValueOf(predicate)
arrValue = reflect.ValueOf(arr)
arrType = arrValue.Type()
funcType = funcValue.Type()
numIn = funcType.NumIn()
numOut = funcType.NumOut()
)
if numOut == 1 {
if t := funcType.Out(0); !t.ConvertibleTo(boolType) {
panic("Map function's return is not compatible with bool.")
}
}
if arrType.Kind() == reflect.Slice || arrType.Kind() == reflect.Array {
if !IsFunc(predicate, []int{1, 2}, []int{0, 1}) {
panic("Second argument must be a function with one/two parameter")
}
inOffset := IfInt(numIn != 2, 0, 1)
// Checking whether element type is convertible to function's first argument's type.
if t := arrValue.Type().Elem(); !t.ConvertibleTo(funcType.In(inOffset)) {
panic("Map function's argument is not compatible with type of array.")
}
switch {
case numIn == 1 && !option.Reverse:
for i := 0; i < arrValue.Len(); i++ {
outs := funcValue.Call([]reflect.Value{arrValue.Index(i)})
if numOut == 1 && !outs[0].Convert(boolType).Interface().(bool) {
break
}
}
case numIn == 2 && !option.Reverse:
for i := 0; i < arrValue.Len(); i++ {
outs := funcValue.Call([]reflect.Value{reflect.ValueOf(i), arrValue.Index(i)})
if numOut == 1 && !outs[0].Convert(boolType).Interface().(bool) {
break
}
}
case numIn == 1 && option.Reverse:
for i := arrValue.Len() - 1; i >= 0; i-- {
outs := funcValue.Call([]reflect.Value{arrValue.Index(i)})
if numOut == 1 && !outs[0].Convert(boolType).Interface().(bool) {
break
}
}
case numIn == 2 && option.Reverse:
for i := arrValue.Len() - 1; i >= 0; i-- {
outs := funcValue.Call([]reflect.Value{reflect.ValueOf(i), arrValue.Index(i)})
if numOut == 1 && !outs[0].Convert(boolType).Interface().(bool) {
break
}
}
}
}
if arrType.Kind() == reflect.Map {
if !IsFunc(predicate, []int{2, 3}, nil) {
panic("Second argument must be a function with two/three parameters")
}
// Type checking for Map<key, value> = (key, value)
inOffset := IfInt(numIn != 3, 0, 1)
if t := arrType.Key(); !t.ConvertibleTo(funcType.In(inOffset)) {
panic(fmt.Sprintf("function first argument is not compatible with %v", t))
}
if t := arrType.Elem(); !t.ConvertibleTo(funcType.In(inOffset + 1)) {
panic(fmt.Sprintf("function second argument is not compatible with %v", t))
}
switch numIn {
case 2:
for _, key := range arrValue.MapKeys() {
outs := funcValue.Call([]reflect.Value{key, arrValue.MapIndex(key)})
if numOut == 1 && !outs[0].Convert(boolType).Interface().(bool) {
break
}
}
default: // 3
for i, key := range arrValue.MapKeys() {
outs := funcValue.Call([]reflect.Value{reflect.ValueOf(i), key, arrValue.MapIndex(key)})
if numOut == 1 && !outs[0].Convert(boolType).Interface().(bool) {
break
}
}
}
}
}
// ForEachRight iterates over elements of collection from the right and invokes iteratee for each element.
func ForEachRight(arr interface{}, predicate interface{}) {
ForEach(arr, predicate, ForEachOption{Reverse: true})
}
// Head gets the first element of array.
func Head(arr interface{}) interface{} {
value := redirectValue(reflect.ValueOf(arr))
valueType := value.Type()
kind := value.Kind()
if kind == reflect.Array || kind == reflect.Slice {
if value.Len() == 0 {
return nil
}
return value.Index(0).Interface()
}
panic(fmt.Sprintf("Type %s is not supported by Head", valueType.String()))
}
// Last gets the last element of array.
func Last(arr interface{}) interface{} {
value := redirectValue(reflect.ValueOf(arr))
valueType := value.Type()
kind := value.Kind()
if kind == reflect.Array || kind == reflect.Slice {
if value.Len() == 0 {
return nil
}
return value.Index(value.Len() - 1).Interface()
}
panic(fmt.Sprintf("Type %s is not supported by Last", valueType.String()))
}
// Initial gets all but the last element of array.
func Initial(arr interface{}) interface{} {
value := redirectValue(reflect.ValueOf(arr))
valueType := value.Type()
kind := value.Kind()
if kind == reflect.Array || kind == reflect.Slice {
length := value.Len()
if length <= 1 {
return arr
}
return value.Slice(0, length-1).Interface()
}
panic(fmt.Sprintf("Type %s is not supported by Initial", valueType.String()))
}
// Tail gets all but the first element of array.
func Tail(arr interface{}) interface{} {
value := redirectValue(reflect.ValueOf(arr))
kind := value.Kind()
if kind == reflect.Array || kind == reflect.Slice {
length := value.Len()
if length <= 1 {
return arr
}
return value.Slice(1, length).Interface()
}
panic(fmt.Sprintf("Type %v is not supported by Tail", value.Type()))
}
// Len return the length of the slice or map.
func Len(arr interface{}) int {
value := redirectValue(reflect.ValueOf(arr))
switch value.Kind() {
case reflect.Array, reflect.Slice, reflect.Map:
return value.Len()
}
panic(fmt.Sprintf("Type %v is not supported by Len", value.Type()))
}
// Left return the left at most n items of the slice.
func Left(arr interface{}, n int) interface{} {
v := redirectValue(reflect.ValueOf(arr))
switch v.Kind() {
case reflect.Array, reflect.Slice:
if n >= v.Len() {
return arr
}
return v.Slice(0, n).Interface()
}
panic(fmt.Sprintf("Type %v is not supported by Len", v.Type()))
} | scan.go | 0.561455 | 0.412708 | scan.go | starcoder |
package eval
import (
"dito/src/object"
"io"
"math"
"math/rand"
"os"
"time"
)
// Builtins : map of builtin functions
var Builtins = map[string]*object.Builtin{
// type conversions.
"int": &object.Builtin{
Name: "int",
Fn: typeSwitch(object.IntType),
Info: "Convert value to `Int`",
ArgC: 1,
ArgT: []string{"Atom"},
ReturnT: "Int",
},
"float": &object.Builtin{
Name: "float",
Fn: typeSwitch(object.FloatType),
Info: "Convert value to `Float`",
ArgC: 1,
ArgT: []string{"Atom"},
ReturnT: "Float",
},
"string": &object.Builtin{
Name: "string",
Fn: typeSwitch(object.StringType),
Info: "Convert value to `String`",
ArgC: 1,
ArgT: []string{"Any"},
ReturnT: "String",
},
"bool": &object.Builtin{
Name: "bool",
Fn: typeSwitch(object.BoolType),
Info: "Convert value to `Bool`",
ArgC: 1,
ArgT: []string{"Any"},
ReturnT: "Bool",
},
"array": &object.Builtin{
Name: "array",
Fn: typeSwitch(object.ArrayType),
Info: "Convert value to `Array`",
ArgC: 1,
ArgT: []string{"Iter"},
ReturnT: "Array",
},
"error": &object.Builtin{
Name: "error",
Fn: objectError,
Info: "create a new error message with a string.",
ArgC: 1,
ArgT: []string{"String"},
ReturnT: "Error",
},
"type": &object.Builtin{
Name: "type",
Fn: objectType,
Info: "Reflect a values type",
ArgC: 1,
ArgT: []string{"Any"},
ReturnT: "String",
},
"len": &object.Builtin{
Name: "len",
Fn: objectLen,
Info: "Return the length of an `Iter`",
ArgC: 1,
ArgT: []string{"Iter"},
ReturnT: "Int",
},
"abs": &object.Builtin{
Name: "abs",
Fn: objectAbs,
Info: "Return the absolute value of an `Atom`",
ArgC: 1,
ArgT: []string{"Atom"},
ReturnT: "Atom",
},
"sin": &object.Builtin{
Name: "sin",
Fn: objectSin,
Info: "Return the sine of x radians of an `Atom`",
ArgC: 1,
ArgT: []string{"Atom"},
ReturnT: "Float",
},
"tan": &object.Builtin{
Name: "tan",
Fn: objectTan,
Info: "Return the tangent of x radians of an `Atom`",
ArgC: 1,
ArgT: []string{"Atom"},
ReturnT: "Float",
},
"cos": &object.Builtin{
Name: "cos",
Fn: objectCos,
Info: "Return the cosine of x radians of an `Atom`",
ArgC: 1,
ArgT: []string{"Atom"},
ReturnT: "Float",
},
"print": &object.Builtin{
Name: "print",
Fn: objectPrint,
Info: "Print a variable number of arguments to the std out.",
ArgC: -1,
ArgT: []string{"Any..."},
ReturnT: "None",
},
"range": &object.Builtin{
Name: "range",
Fn: objectRange,
Info: "Generate an Array of Int's within a given range.",
ArgC: 2,
ArgT: []string{"Int", "Int"},
ReturnT: "Array",
},
"random": &object.Builtin{
Name: "random",
Fn: objectRand,
Info: "Generate a random Float between 0-1.",
ArgC: 0,
ArgT: []string{},
ReturnT: "Float",
},
"time": &object.Builtin{
Name: "time",
Fn: objectTime,
Info: "Return current Unix timestamp as an Int",
ArgC: 0,
ArgT: []string{},
ReturnT: "Int",
},
"sleep": &object.Builtin{
Name: "sleep",
Fn: objectSleep,
Info: "Pause execution for x milliseconds",
ArgC: 1,
ArgT: []string{"Atom"},
ReturnT: "None",
},
"open": &object.Builtin{
Name: "fopen",
Fn: fileOpen,
Info: "Open and return a file object",
ArgC: 1,
ArgT: []string{"String"},
ReturnT: "None",
},
"close": &object.Builtin{
Name: "close",
Fn: fileClose,
Info: "close a file object",
ArgC: 1,
ArgT: []string{"File"},
ReturnT: "None",
},
"read": &object.Builtin{
Name: "read",
Fn: fileRead,
Info: "close a file object",
ArgC: 1,
ArgT: []string{"File"},
ReturnT: "String",
},
"write": &object.Builtin{
Name: "write",
Fn: fileWrite,
Info: "close a file object",
ArgC: 2,
ArgT: []string{"File"},
ReturnT: "String",
},
}
func fileOpen(args ...object.Object) object.Object {
if n := len(args); n != 2 {
return object.NewError(object.InvalidArgLenError, "open", 2, n)
}
path, ok := args[0].(*object.String)
if !ok {
return object.NewError("Argument '%s' to func 'open' is invalid", args[0].Inspect())
}
mode, ok := args[1].(*object.String)
if !ok {
return object.NewError("Argument '%s' to func 'open' is invalid", args[1].Inspect())
}
switch mode.Value {
case "w":
return object.FILE.Create(path.Value)
case "r":
return object.FILE.Open(path.Value)
default:
return object.NewError("Argument '%s' to func 'fopen' is invalid. unknown file mode", args[1].Inspect())
}
}
func fileRead(args ...object.Object) object.Object {
if n := len(args); n != 1 {
return object.NewError(object.InvalidArgLenError, "read", 2, n)
}
fp, ok := args[0].(*object.File)
if !ok {
return object.NewError("Invalid type to func 'read', want=File, got=%s", args[0].Type())
}
return fp.Read()
}
func fileClose(args ...object.Object) object.Object {
if n := len(args); n != 1 {
return object.NewError(object.InvalidArgLenError, "close", 2, n)
}
fp, ok := args[0].(*object.File)
if !ok {
return object.NewError("Invalid type to func 'close', want=File, got=%s", args[0].Type())
}
return fp.Close()
}
func fileWrite(args ...object.Object) object.Object {
if n := len(args); n != 2 {
return object.NewError(object.InvalidArgLenError, "write", 2, n)
}
fp, ok := args[0].(*object.File)
if !ok {
return object.NewError("Invalid type to func 'close', want=File, got=%s", args[0].Type())
}
str := args[1].ConvertType(object.StringType)
if str.Type() == object.ErrorType {
return str
}
return fp.Write(str.(*object.String).Value)
}
func typeSwitch(which object.TypeFlag) object.BuiltinFunction {
return func(args ...object.Object) object.Object {
n := len(args)
switch n {
case 1:
return args[0].ConvertType(which)
default:
return object.NewError(object.InvalidArgLenError, which.String(), 1, n)
}
}
}
func objectType(args ...object.Object) object.Object {
if n := len(args); n > 1 {
return object.NewError(object.InvalidArgLenError, "type", 1, n)
}
return object.NewString(args[0].Type().String())
}
func objectLen(args ...object.Object) object.Object {
if n := len(args); n != 1 {
return object.NewError(object.InvalidArgLenError, "len", 1, n)
}
iter, ok := args[0].(object.Iterable)
if ok {
return iter.Length()
}
return object.NewError("Argument '%s' to func 'len' is not a Iterable", args[0].Inspect())
}
func objectAbs(args ...object.Object) object.Object {
if n := len(args); n != 1 {
return object.NewError(object.InvalidArgLenError, "abs", 1, n)
}
num, ok := args[0].(object.Numeric)
if ok {
return num.Abs()
}
return object.NewError("Argument '%s' to func 'abs' is not a Numeric", args[0].Inspect())
}
func objectCos(args ...object.Object) object.Object {
if n := len(args); n != 1 {
return object.NewError(object.InvalidArgLenError, "cos", 1, n)
}
switch v := args[0].(type) {
case *object.Float:
return object.NewFloat(math.Cos(v.Value))
case *object.Int:
return object.NewFloat(math.Cos(float64(v.Value)))
default:
return object.NewError("Argument '%s' to func 'cos' is invalid", args[0].Inspect())
}
}
func objectTan(args ...object.Object) object.Object {
if n := len(args); n != 1 {
return object.NewError(object.InvalidArgLenError, "tan", 1, n)
}
switch v := args[0].(type) {
case *object.Float:
return object.NewFloat(math.Tan(v.Value))
case *object.Int:
return object.NewFloat(math.Tan(float64(v.Value)))
default:
return object.NewError("Argument '%s' to func 'tan' is invalid", args[0].Inspect())
}
}
func objectSin(args ...object.Object) object.Object {
if n := len(args); n != 1 {
return object.NewError(object.InvalidArgLenError, "sin", 1, n)
}
switch v := args[0].(type) {
case *object.Float:
return object.NewFloat(math.Sin(v.Value))
case *object.Int:
return object.NewFloat(math.Sin(float64(v.Value)))
default:
return object.NewError("Argument '%s' to func 'sin' is invalid", args[0].Inspect())
}
}
func objectPrint(args ...object.Object) object.Object {
for i, arg := range args {
io.WriteString(os.Stdout, arg.Inspect())
if i < len(args)-1 {
io.WriteString(os.Stdout, " ")
}
}
io.WriteString(os.Stdout, "\n")
return object.NONE
}
func objectError(args ...object.Object) object.Object {
if len(args) != 1 {
return object.NewError(object.InvalidArgLenError, "error", 2, len(args))
}
if args[0].Type() != object.StringType {
return object.NewError("Argument to `error` not supported, got %s", args[0].Type())
}
return object.NewError(args[0].Inspect())
}
func objectRange(args ...object.Object) object.Object {
if len(args) != 2 {
return object.NewError(object.InvalidArgLenError, "range", 2, len(args))
}
min, ok := args[0].(*object.Int)
if !ok {
return object.NewError("Invalid args[0] to `range` function. got=%T", args[0])
}
max, ok := args[1].(*object.Int)
if !ok {
return object.NewError("Invalid args[1] to `range` function. got=%T", args[1])
}
if max.Value-min.Value < 0 {
return object.NewError("Invalid args to `range` function. a > b.")
}
iter := make([]object.Object, max.Value-min.Value)
i := 0
for v := min.Value; v < max.Value; v++ {
iter[i] = object.NewInt(v)
i++
}
return object.NewArray(iter, -1)
}
func objectRand(args ...object.Object) object.Object {
if len(args) != 0 {
return object.NewError(object.InvalidArgLenError, "random", 0, len(args))
}
return object.NewFloat(rand.Float64())
}
func objectTime(args ...object.Object) object.Object {
if len(args) != 0 {
return object.NewError(object.InvalidArgLenError, "time", 0, len(args))
}
return object.NewInt(int(time.Now().Unix()))
}
func objectSleep(args ...object.Object) object.Object {
switch arg := args[0].(type) {
case *object.Int:
time.Sleep(time.Duration(arg.Value) * time.Millisecond)
return object.NONE
case *object.Float:
time.Sleep(time.Duration(arg.Value) * time.Millisecond)
return object.NONE
default:
return object.NewError("Argument to `sleep` not supported, got %s", args[0].Type())
}
} | src/eval/builtins.go | 0.64713 | 0.501953 | builtins.go | starcoder |
package mlpack
/*
#cgo CFLAGS: -I./capi -Wall
#cgo LDFLAGS: -L. -lmlpack_go_det
#include <capi/det.h>
#include <stdlib.h>
*/
import "C"
import "gonum.org/v1/gonum/mat"
type DetOptionalParam struct {
Folds int
InputModel *dTree
MaxLeafSize int
MinLeafSize int
PathFormat string
SkipPruning bool
Test *mat.Dense
Training *mat.Dense
Verbose bool
}
func DetOptions() *DetOptionalParam {
return &DetOptionalParam{
Folds: 10,
InputModel: nil,
MaxLeafSize: 10,
MinLeafSize: 5,
PathFormat: "lr",
SkipPruning: false,
Test: nil,
Training: nil,
Verbose: false,
}
}
/*
This program performs a number of functions related to Density Estimation
Trees. The optimal Density Estimation Tree (DET) can be trained on a set of
data (specified by "Training") using cross-validation (with number of folds
specified with the "Folds" parameter). This trained density estimation tree
may then be saved with the "OutputModel" output parameter.
The variable importances (that is, the feature importance values for each
dimension) may be saved with the "Vi" output parameter, and the density
estimates for each training point may be saved with the "TrainingSetEstimates"
output parameter.
Enabling path printing for each node outputs the path from the root node to a
leaf for each entry in the test set, or training set (if a test set is not
provided). Strings like 'LRLRLR' (indicating that traversal went to the left
child, then the right child, then the left child, and so forth) will be
output. If 'lr-id' or 'id-lr' are given as the "PathFormat" parameter, then
the ID (tag) of every node along the path will be printed after or before the
L or R character indicating the direction of traversal, respectively.
This program also can provide density estimates for a set of test points,
specified in the "Test" parameter. The density estimation tree used for this
task will be the tree that was trained on the given training points, or a tree
given as the parameter "InputModel". The density estimates for the test
points may be saved using the "TestSetEstimates" output parameter.
Input parameters:
- Folds (int): The number of folds of cross-validation to perform for
the estimation (0 is LOOCV) Default value 10.
- InputModel (dTree): Trained density estimation tree to load.
- MaxLeafSize (int): The maximum size of a leaf in the unpruned, fully
grown DET. Default value 10.
- MinLeafSize (int): The minimum size of a leaf in the unpruned, fully
grown DET. Default value 5.
- PathFormat (string): The format of path printing: 'lr', 'id-lr', or
'lr-id'. Default value 'lr'.
- SkipPruning (bool): Whether to bypass the pruning process and output
the unpruned tree only.
- Test (mat.Dense): A set of test points to estimate the density of.
- Training (mat.Dense): The data set on which to build a density
estimation tree.
- Verbose (bool): Display informational messages and the full list of
parameters and timers at the end of execution.
Output parameters:
- outputModel (dTree): Output to save trained density estimation tree
to.
- tagCountersFile (string): The file to output the number of points
that went to each leaf. Default value ''.
- tagFile (string): The file to output the tags (and possibly paths)
for each sample in the test set. Default value ''.
- testSetEstimates (mat.Dense): The output estimates on the test set
from the final optimally pruned tree.
- trainingSetEstimates (mat.Dense): The output density estimates on the
training set from the final optimally pruned tree.
- vi (mat.Dense): The output variable importance values for each
feature.
*/
func Det(param *DetOptionalParam) (dTree, string, string, *mat.Dense, *mat.Dense, *mat.Dense) {
resetTimers()
enableTimers()
disableBacktrace()
disableVerbose()
restoreSettings("Density Estimation With Density Estimation Trees")
// Detect if the parameter was passed; set if so.
if param.Folds != 10 {
setParamInt("folds", param.Folds)
setPassed("folds")
}
// Detect if the parameter was passed; set if so.
if param.InputModel != nil {
setDTree("input_model", param.InputModel)
setPassed("input_model")
}
// Detect if the parameter was passed; set if so.
if param.MaxLeafSize != 10 {
setParamInt("max_leaf_size", param.MaxLeafSize)
setPassed("max_leaf_size")
}
// Detect if the parameter was passed; set if so.
if param.MinLeafSize != 5 {
setParamInt("min_leaf_size", param.MinLeafSize)
setPassed("min_leaf_size")
}
// Detect if the parameter was passed; set if so.
if param.PathFormat != "lr" {
setParamString("path_format", param.PathFormat)
setPassed("path_format")
}
// Detect if the parameter was passed; set if so.
if param.SkipPruning != false {
setParamBool("skip_pruning", param.SkipPruning)
setPassed("skip_pruning")
}
// Detect if the parameter was passed; set if so.
if param.Test != nil {
gonumToArmaMat("test", param.Test)
setPassed("test")
}
// Detect if the parameter was passed; set if so.
if param.Training != nil {
gonumToArmaMat("training", param.Training)
setPassed("training")
}
// Detect if the parameter was passed; set if so.
if param.Verbose != false {
setParamBool("verbose", param.Verbose)
setPassed("verbose")
enableVerbose()
}
// Mark all output options as passed.
setPassed("output_model")
setPassed("tag_counters_file")
setPassed("tag_file")
setPassed("test_set_estimates")
setPassed("training_set_estimates")
setPassed("vi")
// Call the mlpack program.
C.mlpackDet()
// Initialize result variable and get output.
var outputModel dTree
outputModel.getDTree("output_model")
tagCountersFile := getParamString("tag_counters_file")
tagFile := getParamString("tag_file")
var testSetEstimatesPtr mlpackArma
testSetEstimates := testSetEstimatesPtr.armaToGonumMat("test_set_estimates")
var trainingSetEstimatesPtr mlpackArma
trainingSetEstimates := trainingSetEstimatesPtr.armaToGonumMat("training_set_estimates")
var viPtr mlpackArma
vi := viPtr.armaToGonumMat("vi")
// Clear settings.
clearSettings()
// Return output(s).
return outputModel, tagCountersFile, tagFile, testSetEstimates, trainingSetEstimates, vi
} | det.go | 0.645343 | 0.453746 | det.go | starcoder |
package specialized
// CacheMetrics carries metrics about a cache usage.
type CacheMetrics struct {
// Stats on MFA
// HitMFA is the count of hits that returned a value from MFA.
// Note that a hit on MFA will make the cache skip LRU.
HitMFA uint
// MissMFA is the count of misses on MFA access.
MissMFA uint
// Stats on LRU
// HitLRU is the count of hits that returned a value from LRU.
// Note that a LRU is only accessed on MFA misses.
HitLRU uint
// MissLRU is the count of misses on LRU access.
MissLRU uint
// Stats for the cache as a whole
// Miss is the overall amount of misses, which means the requested key was not in MFA nor in LRU.
Miss uint
// RecentlyEvictedMiss is the amount of misses that had a key which was recently evicted.
// If the cache is not running with evicMetrics it will be set to 0.
RecentlyEvictedMiss uint
}
// Hit returns the total amount of hits.
func (m CacheMetrics) Hit() uint { return m.HitMFA + m.HitLRU }
// Tot returns the total amount of accesses.
func (m CacheMetrics) Tot() uint { return m.Hit() + m.Miss }
// metrics is not safe for concurrent use, accessors should synchronize to access them
type metrics struct {
CacheMetrics
// store is a storage for the recently evicted ring
store []string
// pos is the cursor for the store
pos int
// header is a backing map header to quickly check if the ring has an item
header map[string]struct{}
}
func newMetrics(bufsize int, evictMetrics bool) metrics {
var m metrics
if !evictMetrics {
return m
}
m.store = make([]string, 0, bufsize)
m.header = make(map[string]struct{}, bufsize)
return m
}
func (m *metrics) hitMFA() { m.HitMFA++ }
func (m *metrics) hitLRU() { m.HitLRU++ }
func (m *metrics) missMFA() { m.MissMFA++ }
func (m *metrics) missLRU() { m.MissLRU++ }
func (m *metrics) miss(k string) {
m.Miss++
if m.store == nil {
return
}
if _, ok := m.header[k]; ok {
m.RecentlyEvictedMiss++
}
}
func (m *metrics) evict(k string) {
if m.store == nil {
return
}
if len(m.store) < cap(m.store) {
m.pos = len(m.store)
m.store = append(m.store, k)
m.header[k] = struct{}{}
return
}
m.pos = (m.pos + 1) % cap(m.store)
delete(m.header, m.store[m.pos])
m.header[k] = struct{}{}
m.store[m.pos] = k
} | proxy/internal/specialized/metrics.go | 0.656548 | 0.534248 | metrics.go | starcoder |
package main
import "fmt"
const (
RED = 0
BLACK = 1
)
type rb_node struct {
key int
val int
color int
parent *rb_node
left_child *rb_node
right_child *rb_node
}
type rb_tree struct {
root_node *rb_node
size int
}
var nil_node = __nil_node()
func assert(result bool) {
if !result {
panic(fmt.Sprintf("Assert failed!"))
}
}
func __nil_node() *rb_node {
node := &rb_node{}
node.color = BLACK
node.parent = node
node.left_child = node
node.right_child = node
return node
}
func new_node() *rb_node {
node := &rb_node{
color: RED,
left_child: nil_node,
right_child: nil_node,
parent: nil_node,
}
return node
}
func (tree *rb_tree) left_rotate(x *rb_node) {
y := x.right_child // Set y
x.right_child = y.left_child // Turn y's left subtree into x's right subtree
if y.left_child != nil_node {
y.left_child.parent = x
}
y.parent = x.parent // Link x's parent to y
if x.parent == nil_node {
tree.root_node = y
} else if x == x.parent.left_child {
x.parent.left_child = y
} else {
x.parent.right_child = y
}
y.left_child = x // Put x on y's left
x.parent = y
}
func (tree *rb_tree) right_rotate(y *rb_node) {
x := y.left_child // Set x
y.left_child = x.right_child // Turn x's right subtree into y's left subtree
if x.right_child != nil_node {
x.right_child.parent = y
}
x.parent = y.parent // Link y's parent to x
if y.parent == nil_node {
tree.root_node = x
} else if y == y.parent.left_child {
y.parent.left_child = x
} else {
y.parent.right_child = x
}
x.right_child = y // Put y on x's right
y.parent = x
}
func (tree *rb_tree) insert_fixup(z *rb_node) {
for z.parent.color == RED {
if z.parent == z.parent.parent.left_child {
y := z.parent.parent.right_child
if y.color == RED {
z.parent.color = BLACK // CASE 1
y.color = BLACK // CASE 1
z.parent.parent.color = RED // CASE 1
z = z.parent.parent // CASE 1
} else {
if z == z.parent.right_child {
z = z.parent // CASE 2
tree.left_rotate(z) // CASE 2
}
z.parent.color = BLACK // CASE 3
z.parent.parent.color = RED // CASE 3
tree.right_rotate(z.parent.parent) // CASE 3
}
} else {
y := z.parent.parent.left_child
if y.color == RED {
z.parent.color = BLACK // CASE 1
y.color = BLACK // CASE 1
z.parent.parent.color = RED // CASE 1
z = z.parent.parent // CASE 1
} else {
if z == z.parent.left_child {
z = z.parent // CASE 2
tree.right_rotate(z) // CASE 2
}
z.parent.color = BLACK // CASE 3
z.parent.parent.color = RED // CASE 3
tree.left_rotate(z.parent.parent) // CASE 3
}
}
}
tree.root_node.color = BLACK
}
func (tree *rb_tree) __insert(insert_key int, insert_val int) {
x := tree.root_node
y := nil_node // trailing pointer of x
z := new_node()
tree.size++
z.key = insert_key
z.val = insert_val
for x != nil_node {
y = x
if z.key < x.key {
x = x.left_child
} else {
x = x.right_child
}
}
z.parent = y
if y == nil_node {
tree.root_node = z
} else if z.key < y.key {
y.left_child = z
} else {
y.right_child = z
}
tree.insert_fixup(z)
}
func (x *rb_node) minimum() *rb_node {
for x.left_child != nil_node {
x = x.left_child
}
return x
}
func (x *rb_node) maximum() *rb_node {
for x.right_child != nil_node {
x = x.right_child
}
return x
}
func (x *rb_node) predecessor() *rb_node {
if x.left_child != nil_node {
return x.left_child.maximum()
}
y := x.parent
for y != nil_node && x == y.left_child {
x = y
y = y.parent
}
return y
}
func (x *rb_node) successor() *rb_node {
if x.right_child != nil_node {
return x.right_child.minimum()
}
y := x.parent
for y != nil_node && x == y.right_child {
x = y
y = y.parent
}
return y
}
func (tree *rb_tree) transplant(u *rb_node, v *rb_node) {
if u.parent == nil_node {
tree.root_node = v
} else if u == u.parent.left_child {
u.parent.left_child = v
} else {
u.parent.right_child = v
}
v.parent = u.parent
}
func (tree *rb_tree) delete_fixup(x *rb_node) {
for x != tree.root_node && x.color == BLACK {
if x == x.parent.left_child {
w := x.parent.right_child
if w.color == RED {
w.color = BLACK // CASE 1
x.parent.color = RED // CASE 1
tree.left_rotate(x.parent) // CASE 1
w = x.parent.right_child // CASE 1
}
if w.left_child.color == BLACK && w.right_child.color == BLACK {
w.color = RED // CASE 2
x = x.parent // CASE 2
} else {
if w.right_child.color == BLACK {
w.left_child.color = BLACK // CASE 3
w.color = RED // CASE 3
tree.right_rotate(w) // CASE 3
w = x.parent.right_child // CASE 3
}
w.color = x.parent.color // CASE 4
x.parent.color = BLACK // CASE 4
w.right_child.color = BLACK // CASE 4
tree.left_rotate(x.parent) // CASE 4
x = tree.root_node // CASE 4
}
} else {
w := x.parent.left_child
if w.color == RED {
w.color = BLACK // CASE 1
x.parent.color = RED // CASE 1
tree.right_rotate(x.parent) // CASE 1
w = x.parent.left_child // CASE 1
}
if w.right_child.color == BLACK && w.left_child.color == BLACK {
w.color = RED // CASE 2
x = x.parent // CASE 2
} else {
if w.left_child.color == BLACK {
w.right_child.color = BLACK // CASE 3
w.color = RED // CASE 3
tree.left_rotate(w) // CASE 3
w = x.parent.left_child // CASE 3
}
w.color = x.parent.color // CASE 4
x.parent.color = BLACK // CASE 4
w.left_child.color = BLACK // CASE 4
tree.right_rotate(x.parent) // CASE 4
x = tree.root_node // CASE 4
}
}
}
}
func (tree *rb_tree) __delete(z *rb_node) {
x := nil_node
y := z
y_original_color := y.color
tree.size--
if z.left_child == nil_node {
x = z.right_child
tree.transplant(z, z.right_child)
} else if z.right_child == nil_node {
x = z.left_child
tree.transplant(z, z.left_child)
} else {
y = z.right_child.minimum()
y_original_color = y.color
x = y.right_child
if y.parent == z {
if x == nil_node {
x.parent = y // This line is very important
} else {
assert(x.parent == y)
}
} else {
tree.transplant(y, y.right_child)
y.right_child = z.right_child
y.right_child.parent = y
}
tree.transplant(z, y)
y.left_child = z.left_child
y.left_child.parent = y
y.color = z.color
}
if y_original_color == BLACK {
tree.delete_fixup(x)
}
}
// ----------------------- API -----------------------
func new_tree() *rb_tree {
tree := &rb_tree{
root_node: nil_node,
size: 0,
}
return tree
}
func (tree *rb_tree) search(search_key int) *rb_node {
var __search func(tree *rb_tree, node *rb_node, search_key int) *rb_node
__search = func(tree *rb_tree, node *rb_node, search_key int) *rb_node {
if node == nil_node || node.key == search_key {
return node
}
if search_key < node.key {
return __search(tree, node.left_child, search_key)
} else {
return __search(tree, node.right_child, search_key)
}
}
return __search(tree, tree.root_node, search_key)
}
func (tree *rb_tree) insert(insert_key int, insert_val int) {
node := tree.search(insert_key)
if node == nil_node {
tree.__insert(insert_key, insert_val)
}
}
func (tree *rb_tree) delete(delete_key int) {
node := tree.search(delete_key)
if node != nil_node {
tree.__delete(node)
}
}
type iterator struct {
node *rb_node
}
func (tree *rb_tree) iterator() iterator {
return iterator{
node: tree.root_node.minimum(),
}
}
func (i *iterator) has_next() bool {
return i.node != nil_node
}
func (i *iterator) get_next() *rb_node {
n := i.node
i.node = i.node.successor()
return n
}
type reverse_iterator struct {
node *rb_node
}
func (tree *rb_tree) reverse_iterator() reverse_iterator {
return reverse_iterator{
node: tree.root_node.maximum(),
}
}
func (i *reverse_iterator) has_next() bool {
return i.node != nil_node
}
func (i *reverse_iterator) get_next() *rb_node {
n := i.node
i.node = i.node.predecessor()
return n
}
func main() {
tree := new_tree()
m := 100
for i := 1; i < m; i++ {
tree.insert(i, i*3)
}
for i := 1; i < m; i++ {
node := tree.search(i)
assert(node != nil)
assert(node.val == (i * 3))
}
for iter := tree.iterator(); iter.has_next(); {
node := iter.get_next()
fmt.Printf("%v %v\n", node.key, node.val)
}
for iter := tree.reverse_iterator(); iter.has_next(); {
node := iter.get_next()
fmt.Printf("%v %v\n", node.key, node.val)
}
for i := 1; i < m; i++ {
tree.delete(i)
}
for i := 1; i < m; i++ {
node := tree.search(i)
assert(node != nil)
assert(node.val == 0)
}
} | rb_tree.go | 0.547948 | 0.443118 | rb_tree.go | starcoder |
package main
import (
"bufio"
"fmt"
"os"
"strconv"
"strings"
)
type Line struct {
Xa, Ya, Xb, Yb int
}
func NewLine(xa, ya, xb, yb int) *Line {
return &Line{Xa: xa, Ya: ya, Xb: xb, Yb: yb}
}
func ParseLine(s string) *Line {
f := strings.Fields(s)
if f[1] != "->" {
panic("A line of input did not contain an arrow")
}
ptStrings := append(strings.Split(f[0], `,`), strings.Split(f[2], `,`)...)
pts := make([]int, len(ptStrings))
var err error
for i, ps := range ptStrings {
if pts[i], err = strconv.Atoi(ps); err != nil {
panic(err)
}
}
return NewLine(pts[0], pts[1], pts[2], pts[3])
}
func (n Line) IsHorizontal() bool {
return n.Ya == n.Yb
}
func (n Line) IsVertical() bool {
return n.Xa == n.Xb
}
type Point struct {
X int
Y int
}
func (n Line) PointsOn() []Point {
result := make([]Point, 0, 2)
if n.IsHorizontal() {
xS, xL := n.Xa, n.Xb
if xS > xL {
xS, xL = xL, xS
}
for x := xS; x <= xL; x++ {
result = append(result, Point{x, n.Ya})
}
} else if n.IsVertical() {
yS, yL := n.Ya, n.Yb
if yS > yL {
yS, yL = yL, yS
}
for y := yS; y <= yL; y++ {
result = append(result, Point{n.Xa, y})
}
} else {
// rearrange the endpoints so the first point is on the left
xL, yL, xR, yR := n.Xa, n.Ya, n.Xb, n.Yb
if xL > xR {
xL, xR, yL, yR = xR, xL, yR, yL
}
var slope int = 1
if yR < yL {
slope = -1
}
y := yL
for x := xL; x <= xR; x++ {
result = append(result, Point{x, y})
y += slope
}
}
return result
}
type Plane struct {
Grid [][]uint8
}
func NewPlane(xSize, ySize int) *Plane {
p := &Plane{
Grid: make([][]uint8, xSize),
}
for i := range p.Grid {
p.Grid[i] = make([]uint8, ySize)
}
return p
}
func (p *Plane) AddLine(n *Line) {
for _, pt := range n.PointsOn() {
p.Grid[pt.X][pt.Y] = p.Grid[pt.X][pt.Y] + 1
}
}
func main() {
scanner := bufio.NewScanner(os.Stdin)
plane := NewPlane(1000, 1000)
for scanner.Scan() {
plane.AddLine(ParseLine(scanner.Text()))
}
overlaps := 0
for _, row := range plane.Grid {
for _, o := range row {
if o >= 2 {
overlaps++
}
}
}
fmt.Printf("Found %d overlaps\n", overlaps)
} | five/main.go | 0.574872 | 0.428413 | main.go | starcoder |
package sorting
import "sort"
func activityNotifications(expenditure []int32, d int32) int32 {
// 0 <= expenditure[i] <= 200
// NOTE - https://en.wikipedia.org/wiki/Counting_sort
counts := make([]int32, 201)
for _, n := range expenditure[:d] {
counts[n]++
}
getMedian := medianOdd
if d%2 == 0 {
getMedian = medianEven
}
var res int32
for i, target := range expenditure[d:] {
median := getMedian(counts, d)
if float64(target) >= median*2 {
res++
}
nthPrev := expenditure[i]
counts[nthPrev]--
counts[target]++
}
return res
}
func medianOdd(counts []int32, d int32) float64 {
halfCount := d / 2
var median float64
var countSum int32
for num, count := range counts {
countSum += count
if countSum > halfCount {
median = float64(num)
break
}
}
return median
}
func medianEven(counts []int32, d int32) float64 {
halfCountLeft := d/2 - 1
halfCountRight := d / 2
var median0, median1 int
var countSum int32
for num, count := range counts {
countSum += count
if median0 == 0 && countSum > halfCountLeft {
median0 = num
}
if median1 == 0 && countSum > halfCountRight {
median1 = num
break
}
}
return float64(median0+median1) / 2
}
func activityNotificationsWithSort(expenditure []int32, d int32) int32 {
chunk := make([]int32, d)
var res int32
if d%2 == 0 {
idx0 := int(d/2 - 1)
idx1 := int(d / 2)
for pos := int(d); pos < len(expenditure); pos++ {
start := pos - int(d)
copy(chunk, expenditure[start:pos])
mid := medianEvenWithSort(chunk, idx0, idx1)
target := expenditure[pos]
if float64(target) >= mid*2 {
res++
}
}
} else {
idx := int(d / 2)
for pos := int(d); pos < len(expenditure); pos++ {
start := pos - int(d)
copy(chunk, expenditure[start:pos])
mid := medianOddWithSort(chunk, idx)
target := expenditure[pos]
if float64(target) >= mid*2 {
res++
}
}
}
return res
}
func medianOddWithSort(arr []int32, idx int) float64 {
sort.Slice(arr, func(i, j int) bool {
return arr[i] < arr[j]
})
return float64(arr[idx])
}
func medianEvenWithSort(arr []int32, idx0, idx1 int) float64 {
sort.Slice(arr, func(i, j int) bool {
return arr[i] < arr[j]
})
return float64(arr[idx0]+arr[idx1]) / 2
} | hacker_rank/interview_preparation_kit/sorting/activity_notifications.go | 0.643553 | 0.440229 | activity_notifications.go | starcoder |
package intintmap
import (
"math"
)
// IntPhi is for scrambling the keys
const (
IntPhi = uint64(0x9E3779B9)
minMapSize = 8
)
func phiMix(x uint64) uint64 {
h := x * IntPhi
return h ^ (h >> 16)
}
// Map is a map-like data-structure for int64s
type Map struct {
data []uint64 // interleaved keys and values
fillFactor float64
threshold int // we will resize a map once it reaches this size
size int
mask uint64 // mask to calculate the original position
mask2 uint64
}
func nextPowerOf2(x uint32) uint32 {
if x == math.MaxUint32 {
return x
}
if x == 0 {
return 1
}
x--
x |= x >> 1
x |= x >> 2
x |= x >> 4
x |= x >> 8
x |= x >> 16
return x + 1
}
// ArraySize returns a suitable allocation size
// This can be used in conjunction with NewWithMemory to provide your own memory
func ArraySize(exp int, fill float64) int {
s := nextPowerOf2(uint32(math.Ceil(float64(exp) / fill)))
if s < 2 {
s = 2
}
return int(s) * 2
}
// New returns a map initialized with n spaces and uses the stated fillFactor.
// The map will grow as needed.
func New(size int, fillFactor float64) *Map {
if fillFactor <= 0 || fillFactor >= 1 {
panic("FillFactor must be in (0, 1)")
}
if size <= 0 {
size = minMapSize
}
capacity := ArraySize(size, fillFactor)
return &Map{
data: make([]uint64, capacity),
fillFactor: fillFactor,
threshold: int(math.Floor(float64(capacity/2) * fillFactor)),
mask: uint64(capacity/2 - 1),
mask2: uint64(capacity - 1),
}
}
// NewWithMemory behaves like New but you provide the memory
func NewWithMemory(mem []uint64, fillFactor float64) *Map {
if fillFactor <= 0 || fillFactor >= 1 {
panic("FillFactor must be in (0, 1)")
}
if len(mem) < 2 {
panic("Memory must be > 2")
}
if (len(mem) & (len(mem) - 1)) != 0 {
panic("Memory must be power of two")
}
return &Map{
data: mem,
fillFactor: fillFactor,
threshold: int(math.Floor(float64(len(mem)/2) * fillFactor)),
mask: uint64(len(mem)/2 - 1),
mask2: uint64(len(mem) - 1),
}
}
// Clear clears all key->value associations in the map but preserves memory
func (m *Map) Clear() {
m.size = 0
for i := range m.data {
m.data[i] = 0
}
}
// Get returns the value if the key is found.
func (m *Map) Get(key uint64) (uint64, bool) {
if key == 0 {
return 0, false
}
ptr := (phiMix(key) & m.mask) << 1
if ptr < 0 || ptr >= uint64(len(m.data)) { // Check to help to compiler to eliminate a bounds check below.
return 0, false
}
k := m.data[ptr]
if k == 0 { // end of chain already
return 0, false
}
if k == key { // we check FREE prior to this call
return m.data[ptr+1], true
}
for {
ptr = (ptr + 2) & m.mask2
k = m.data[ptr]
if k == 0 {
return 0, false
}
if k == key {
return m.data[ptr+1], true
}
}
}
// Put adds or updates key with value val.
func (m *Map) Put(key uint64, val uint64) {
if key == 0 {
panic("zero keys are illegal")
}
ptr := (phiMix(key) & m.mask) << 1
k := m.data[ptr]
if k == 0 { // end of chain already
m.data[ptr] = key
m.data[ptr+1] = val
if m.size >= m.threshold {
m.rehash()
} else {
m.size++
}
return
} else if k == key { // overwrite existed value
m.data[ptr+1] = val
return
}
for {
ptr = (ptr + 2) & m.mask2
k = m.data[ptr]
if k == 0 {
m.data[ptr] = key
m.data[ptr+1] = val
if m.size >= m.threshold {
m.rehash()
} else {
m.size++
}
return
} else if k == key {
m.data[ptr+1] = val
return
}
}
}
// TryPut behaves like Put but will no grow the map if there is not enough space available
func (m *Map) TryPut(key uint64, val uint64) bool {
if key == 0 {
return false
}
ptr := (phiMix(key) & m.mask) << 1
k := m.data[ptr]
if k == 0 { // end of chain already
if m.size+1 >= m.threshold {
return false
}
m.data[ptr] = key
m.data[ptr+1] = val
m.size++
return true
} else if k == key { // overwrite existed value
m.data[ptr+1] = val
return true
}
for {
ptr = (ptr + 2) & m.mask2
k = m.data[ptr]
if k == 0 {
if m.size+1 >= m.threshold {
return false
}
m.data[ptr] = key
m.data[ptr+1] = val
m.size++
return true
} else if k == key {
m.data[ptr+1] = val
return true
}
}
}
// Del deletes a key and its value.
func (m *Map) Del(key uint64) {
if key == 0 {
return
}
ptr := (phiMix(key) & m.mask) << 1
k := m.data[ptr]
if k == key {
m.shiftKeys(ptr)
m.size--
return
} else if k == 0 { // end of chain already
return
}
for {
ptr = (ptr + 2) & m.mask2
k = m.data[ptr]
if k == key {
m.shiftKeys(ptr)
m.size--
return
} else if k == 0 {
return
}
}
}
func (m *Map) shiftKeys(pos uint64) uint64 {
// Shift entries with the same hash.
var last, slot uint64
var k uint64
var data = m.data
for {
last = pos
pos = (last + 2) & m.mask2
for {
k = data[pos]
if k == 0 {
data[last] = 0
return last
}
slot = (phiMix(k) & m.mask) << 1
if last <= pos {
if last >= slot || slot > pos {
break
}
} else {
if last >= slot && slot > pos {
break
}
}
pos = (pos + 2) & m.mask2
}
data[last] = k
data[last+1] = data[pos+1]
}
}
func (m *Map) rehash() {
newCapacity := len(m.data) * 2
m.threshold = int(math.Floor(float64(newCapacity/2) * m.fillFactor))
m.mask = uint64(newCapacity/2 - 1)
m.mask2 = uint64(newCapacity - 1)
data := m.data
m.data = make([]uint64, newCapacity, newCapacity)
m.size = 0
for i := 0; i < len(data); i += 2 {
if data[i] != 0 {
m.Put(data[i], data[i+1])
}
}
}
// Size returns size of the map.
func (m *Map) Size() int {
return m.size
}
// Cap returns the capacity of the map
func (m *Map) Cap() int {
return m.threshold
}
// Iter call the provided function for each key, value pair.
// The provided function should return true if the iteration should continue
func (m *Map) Iter(fn func(uint64, uint64) bool) {
data := m.data
var k uint64
for i := 0; i < len(data); i += 2 {
k = data[i]
if k == 0 {
continue
}
if !fn(k, data[i+1]) {
return
}
}
} | intintmap.go | 0.660939 | 0.529507 | intintmap.go | starcoder |
package main
import "lang1/ast"
import "log"
import "strconv"
import "math/rand"
/** Various tests for the robot to detect a wall and other features of the maze **/
func (rstate RobotState) test_wall(dir string) bool {
maze := rstate.maze
posx := nMod(rstate.x, rstate.mx)
posy := nMod(rstate.y,rstate.my)
wall := maze.grid[posx][posy]
switch {
case dir == "left" && (wall & LeftWall) == LeftWall:
log.Printf("Detected left wall")
return true
case dir == "right" && (wall & RightWall) == RightWall:
log.Printf("Detected right wall")
return true
case dir == "up" && (wall & UpWall) == UpWall:
log.Printf("Detected up wall")
return true
case dir == "down" && (wall & DownWall) == DownWall:
log.Printf("Detected down wall")
return true
}
return false
}
func (rstate RobotState) test_open(dir string) bool {
maze := rstate.maze
posx := nMod(rstate.x, rstate.mx)
posy := nMod(rstate.y,rstate.my)
wall := maze.grid[posx][posy]
log.Printf("Received dir: %s", dir)
switch {
case dir == "left" && (wall & LeftWall) != LeftWall:
log.Printf("Detected left passage")
return true
case dir == "right" && (wall & RightWall) != RightWall:
log.Printf("Detected right passage")
return true
case dir == "up" && (wall & UpWall) != UpWall:
log.Printf("Detected up passage")
return true
case dir == "down" && (wall & DownWall) != DownWall:
log.Printf("Detected down passage")
return true
}
return false
}
func (rstate RobotState) random_choice(times string) bool {
q,err := strconv.ParseInt(times, 10, 32)
if err != nil{
panic(err)
}
c := rand.Intn(int(q))
log.Printf("random_choice 1 / %d: %d == 0?",q, c)
return c == 0
}
func (rstate RobotState) test_dir(dir string) bool {
log.Printf("Direction in test_dir: %s", dir)
if rstate.prev_dir == GO_UNKNOWN {
log.Printf("Prev_dir == unknown")
return false
}
switch {
case dir == "left" && rstate.prev_dir == GO_LEFT:
log.Printf("Prev_dir == left")
return true
case dir == "right" && rstate.prev_dir == GO_RIGHT:
log.Printf("Prev_dir == right")
return true
case dir == "up" && rstate.prev_dir == GO_UP:
log.Printf("Prev_dir == up")
return true
case dir == "down" && rstate.prev_dir == GO_DOWN:
log.Printf("Prev_dir == down")
return true
}
return false
}
func (rstate RobotState) test_robot(dir string) bool {
log.Printf("test_robot has not been implemented, reverting to first branch")
return true
}
func (rstate RobotState) evaluatePredicate(predicate ast.Predicate) bool {
switch expr := predicate.(type) {
case ast.Test:
test_result := false
test_type := expr.Type
test_dir := expr.Dir
log.Printf("executing: %s %s", test_type, test_dir)
switch {
case test_type == "wall":
test_result = rstate.test_wall(test_dir)
case test_type == "open":
test_result = rstate.test_open(test_dir)
case test_type == "robot":
test_result = rstate.test_robot(test_dir)
case test_type == "rand":
test_result = rstate.random_choice(test_dir)
case test_type == "prev":
test_result = rstate.test_dir(test_dir)
}
return test_result
case ast.TestGroup:
test1 := expr.Group
return rstate.evaluatePredicate(test1)
case ast.TestAnd:
test1 := expr.Test1
test2 := expr.Test2
return rstate.evaluatePredicate(test1) && rstate.evaluatePredicate(test2)
case ast.TestOr:
test1 := expr.Test1
test2 := expr.Test2
return rstate.evaluatePredicate(test1) || rstate.evaluatePredicate(test2)
case ast.TestNot:
test1 := expr.Test1
return !rstate.evaluatePredicate(test1)
}
return false
} | robot_sensor.go | 0.655667 | 0.416441 | robot_sensor.go | starcoder |
package navigation
import (
"errors"
"math"
)
//Position holds the current location
type Position struct {
X, Y int
}
//ManhattanDistance returns the manhattan distance of the location
func (p *Position) ManhattanDistance() int {
return int(math.Abs(float64(p.X)) + math.Abs(float64(p.Y)))
}
//NewPosition creates and initializes a new location with x and y coordinates
func NewPosition(x, y int) *Position {
return &Position{X: x, Y: y}
}
//Direction is a cardinal direction interface
type Direction interface {
GetLetter() string
GetValue() int
}
//North implements Direction interface
type North struct{}
//GetLetter gets the short code for the direction
func (n North) GetLetter() string {
return "N"
}
//GetValue gets the int code for the direction
func (n North) GetValue() int {
return 0
}
//East implements Direction interface
type East struct{}
//GetLetter gets the short code for the direction
func (e East) GetLetter() string {
return "E"
}
//GetValue gets the int code for the direction
func (e East) GetValue() int {
return 1
}
//South implements Direction interface
type South struct{}
//GetLetter gets the short code for the direction
func (s South) GetLetter() string {
return "S"
}
//GetValue gets the int code for the direction
func (s South) GetValue() int {
return 2
}
//West implements Direction interface
type West struct{}
//GetLetter gets the short code for the direction
func (w West) GetLetter() string {
return "W"
}
//GetValue gets the int code for the direction
func (w West) GetValue() int {
return 3
}
//NewDirection returns a new cardinal direction
func NewDirection(value int) (Direction, error) {
switch value {
case 0:
return North{}, nil
case 1:
return East{}, nil
case 2:
return South{}, nil
case 3:
return West{}, nil
default:
return nil, errors.New("No such direction")
}
}
//Boat has a Heading and a Posi
type Boat struct {
Heading Direction
Position
}
//Boat2 is a boat with a waypoint
type Boat2 struct {
Position
Waypoint
}
//NewBoat gets you a new boat
func NewBoat(direction Direction, position Position) *Boat {
return &Boat{Heading: direction, Position: position}
}
//NewBoat2 gets you a new boat
func NewBoat2(position Position, waypoint Waypoint) *Boat2 {
return &Boat2{Position: position, Waypoint: waypoint}
}
//Move will move the boat according to the action and the value
func (b *Boat) Move(action string, value int) {
switch action {
case "N":
b.Position.Y = b.Position.Y + value
case "S":
b.Position.Y = b.Position.Y - value
case "E":
b.Position.X = b.Position.X + value
case "W":
b.Position.X = b.Position.X - value
case "R":
b.turn(action, value)
case "L":
b.turn(action, value)
case "F":
b.Move(b.Heading.GetLetter(), value)
}
}
func (b *Boat) turn(action string, value int) {
numTurns := value / 90
switch action {
case "R":
newHeading := b.Heading.GetValue() + numTurns
if newHeading >= 4 {
newHeading = newHeading - 4
}
b.Heading, _ = NewDirection(newHeading)
case "L":
newHeading := b.Heading.GetValue() - numTurns
if newHeading < 0 {
newHeading = newHeading + 4
}
b.Heading, _ = NewDirection(newHeading)
}
}
//Move the boat2
func (b *Boat2) Move(action string, value int) {
switch action {
case "N":
b.Waypoint.Position.Y = b.Waypoint.Position.Y + value
case "S":
b.Waypoint.Position.Y = b.Waypoint.Position.Y - value
case "E":
b.Waypoint.Position.X = b.Waypoint.Position.X + value
case "W":
b.Waypoint.Position.X = b.Waypoint.Position.X - value
case "R":
b.turn(action, value)
case "L":
b.turn(action, value)
case "F":
shipX := value * b.Waypoint.Position.X
shipY := value * b.Waypoint.Position.Y
b.Position.X = b.Position.X + shipX
b.Position.Y = b.Position.Y + shipY
}
}
func (b *Boat2) turn(action string, value int) {
numTurns := value / 90
switch action {
case "R":
newX := 0
newY := 0
for i := 0; i < numTurns; i++ {
newY = -b.Waypoint.Position.X
newX = b.Waypoint.Position.Y
b.Waypoint.Position.X = newX
b.Waypoint.Position.Y = newY
}
case "L":
newX := 0
newY := 0
for i := 0; i < numTurns; i++ {
newY = b.Waypoint.Position.X
newX = -b.Waypoint.Position.Y
b.Waypoint.Position.X = newX
b.Waypoint.Position.Y = newY
}
}
}
//Waypoint the boat is trying to reach
type Waypoint struct {
Position
}
//NewWaypoint creates a new waypoint
func NewWaypoint(position Position) *Waypoint {
return &Waypoint{Position: position}
} | pkg/navigation/navigation.go | 0.79736 | 0.613381 | navigation.go | starcoder |
package check
import "math"
// AllKeyValuePairsInMapStringString checks if all key-value pairs from one map[string]string are in another map[string]string.
// When any of the maps is empty, the function returns false.
func AllKeyValuePairsInMapStringString(Map1, Map2 map[string]string) bool {
if len(Map1) == 0 || len(Map2) == 0 {
return false
}
for key, valueMap1 := range Map1 {
valueMap2, ok := Map2[key]
if !ok {
return false
}
if valueMap1 != valueMap2 {
return false
}
}
return true
}
// AnyKeyValuePairInMapStringString checks if any key-value pair from one map[string]string is in another map[string]string.
// When any of the maps is empty, the function returns false.
func AnyKeyValuePairInMapStringString(Map1, Map2 map[string]string) bool {
if len(Map1) == 0 || len(Map2) == 0 {
return false
}
for key, valueMap1 := range Map1 {
valueMap2, ok := Map2[key]
if ok {
if valueMap1 == valueMap2 {
return true
}
}
}
return false
}
// WhichKeyValuePairsInMapStringString checks which key-value pairs from one map[string]string is in another map[string]string.
// Returns a tuple of a map with the found key-value pairs, and true if the map is not empty.
// When any of the maps is empty, the function returns empty slice and false.
func WhichKeyValuePairsInMapStringString(Map1, Map2 map[string]string) (map[string]string, bool) {
keys := make(map[string]string)
var exists bool
if len(Map1) == 0 || len(Map2) == 0 {
return keys, false
}
for key, valueMap1 := range Map1 {
valueMap2, ok := Map2[key]
if ok {
if valueMap1 == valueMap2 {
exists = true
keys[key] = valueMap1
}
}
}
return keys, exists
}
// AllKeyValuePairsInMapStringInt checks if all key-value pairs from one map[string]int are in another map[string]int.
// When any of the maps is empty, the function returns false.
func AllKeyValuePairsInMapStringInt(Map1, Map2 map[string]int) bool {
if len(Map1) == 0 || len(Map2) == 0 {
return false
}
for key, valueMap1 := range Map1 {
valueMap2, ok := Map2[key]
if !ok {
return false
}
if valueMap1 != valueMap2 {
return false
}
}
return true
}
// AnyKeyValuePairInMapStringInt checks if any key-value pair from one map[string]int is in another map[string]int.
// When any of the maps is empty, the function returns false.
func AnyKeyValuePairInMapStringInt(Map1, Map2 map[string]int) bool {
if len(Map1) == 0 || len(Map2) == 0 {
return false
}
for key, valueMap1 := range Map1 {
valueMap2, ok := Map2[key]
if ok {
if valueMap1 == valueMap2 {
return true
}
}
}
return false
}
// WhichKeyValuePairsInMapStringInt checks which key-value pair from one map[string]int is in another map[string]int.
// Returns a tuple of a map with the found key-value pairs, and true if the map is not empty.
// When any of the maps is empty, the function returns empty slice and false.
func WhichKeyValuePairsInMapStringInt(Map1, Map2 map[string]int) (map[string]int, bool) {
keys := make(map[string]int)
var exists bool
if len(Map1) == 0 || len(Map2) == 0 {
return keys, false
}
for key, valueMap1 := range Map1 {
valueMap2, ok := Map2[key]
if ok {
if valueMap1 == valueMap2 {
exists = true
keys[key] = valueMap1
}
}
}
return keys, exists
}
// AllKeyValuePairsInMapStringFloat64 checks if all key-value pairs from one map[string]float64 are in another map[string]float64.
// When any of the maps is empty, the function returns false.
// The Epsilon parameter sets the accuracy of the comparison of two floats.
func AllKeyValuePairsInMapStringFloat64(Map1, Map2 map[string]float64, Epsilon float64) bool {
if len(Map1) == 0 || len(Map2) == 0 {
return false
}
for key, valueMap1 := range Map1 {
valueMap2, ok := Map2[key]
if !ok {
return false
}
if math.Abs(valueMap1-valueMap2) > Epsilon {
return false
}
}
return true
}
// AnyKeyValuePairInMapStringFloat64 checks if any key-value pair from one map[string]float64 is in another map[string]float64.
// When any of the maps is empty, the function returns false.
// The Epsilon parameter sets the accuracy of the comparison of two floats.
func AnyKeyValuePairInMapStringFloat64(Map1, Map2 map[string]float64, Epsilon float64) bool {
if len(Map1) == 0 || len(Map2) == 0 {
return false
}
for key, valueMap1 := range Map1 {
valueMap2, ok := Map2[key]
if ok {
if math.Abs(valueMap1-valueMap2) <= Epsilon {
return true
}
}
}
return false
}
// WhichKeyValuePairsInMapStringFloat64 checks if any key-value pair from one map[string]float64 is in another map[string]float64.
// Returns a tuple of a map with the found key-value pairs, and true if the map is not empty.
// When any of the maps is empty, the function returns empty slice and false.
// The Epsilon parameter sets the accuracy of the comparison of two floats.
func WhichKeyValuePairsInMapStringFloat64(Map1, Map2 map[string]float64, Epsilon float64) (map[string]float64, bool) {
keys := make(map[string]float64)
var exists bool
if len(Map1) == 0 || len(Map2) == 0 {
return keys, false
}
for key, valueMap1 := range Map1 {
valueMap2, ok := Map2[key]
if ok {
if math.Abs(valueMap1-valueMap2) <= Epsilon {
exists = true
keys[key] = valueMap1
}
}
}
return keys, exists
}
// AllKeyValuePairsInMapIntString checks if all key-value pairs from one map[int]string are in another map[int]string.
// When any of the maps is empty, the function returns false.
func AllKeyValuePairsInMapIntString(Map1, Map2 map[int]string) bool {
if len(Map1) == 0 || len(Map2) == 0 {
return false
}
for key, valueMap1 := range Map1 {
valueMap2, ok := Map2[key]
if !ok {
return false
}
if valueMap1 != valueMap2 {
return false
}
}
return true
}
// AnyKeyValuePairInMapIntString checks if any key-value pair from one map[int]string is in another map[int]string.
// When any of the maps is empty, the function returns false.
func AnyKeyValuePairInMapIntString(Map1, Map2 map[int]string) bool {
if len(Map1) == 0 || len(Map2) == 0 {
return false
}
for key, valueMap1 := range Map1 {
valueMap2, ok := Map2[key]
if ok {
if valueMap1 == valueMap2 {
return true
}
}
}
return false
}
// WhichKeyValuePairsInMapIntString checks if any key-value pair from one map[int]string is in another map[int]string.
// Returns a tuple of a map with the found key-value pairs, and true if the map is not empty.
// When any of the maps is empty, the function returns empty slice and false.
func WhichKeyValuePairsInMapIntString(Map1, Map2 map[int]string) (map[int]string, bool) {
keys := make(map[int]string)
var exists bool
if len(Map1) == 0 || len(Map2) == 0 {
return keys, false
}
for key, valueMap1 := range Map1 {
valueMap2, ok := Map2[key]
if ok {
if valueMap1 == valueMap2 {
exists = true
keys[key] = valueMap1
}
}
}
return keys, exists
}
// AllKeyValuePairsInMapIntInt checks if all key-value pairs from one map[int]int are in another map[int]int.
// When any of the maps is empty, the function returns false.
func AllKeyValuePairsInMapIntInt(Map1, Map2 map[int]int) bool {
if len(Map1) == 0 || len(Map2) == 0 {
return false
}
for key, valueMap1 := range Map1 {
valueMap2, ok := Map2[key]
if !ok {
return false
}
if valueMap1 != valueMap2 {
return false
}
}
return true
}
// AnyKeyValuePairInMapIntInt checks if any key-value pair from one map[int]int is in another map[int]int.
// When any of the maps is empty, the function returns false.
func AnyKeyValuePairInMapIntInt(Map1, Map2 map[int]int) bool {
if len(Map1) == 0 || len(Map2) == 0 {
return false
}
for key, valueMap1 := range Map1 {
valueMap2, ok := Map2[key]
if ok {
if valueMap1 == valueMap2 {
return true
}
}
}
return false
}
// WhichKeyValuePairsInMapIntInt checks if any key-value pair from one map[int]int is in another map[int]int.
// Returns a tuple of a map with the found key-value pairs, and true if the map is not empty.
// When any of the maps is empty, the function returns empty slice and false.
func WhichKeyValuePairsInMapIntInt(Map1, Map2 map[int]int) (map[int]int, bool) {
keys := make(map[int]int)
var exists bool
if len(Map1) == 0 || len(Map2) == 0 {
return keys, false
}
for key, valueMap1 := range Map1 {
valueMap2, ok := Map2[key]
if ok {
if valueMap1 == valueMap2 {
exists = true
keys[key] = valueMap1
}
}
}
return keys, exists
}
// AllKeyValuePairsInMapIntFloat64 checks if all key-value pairs from one map[int]float64 are in another map[int]float64.
// When any of the maps is empty, the function returns false.
// The Epsilon parameter sets the accuracy of the comparison of two floats.
func AllKeyValuePairsInMapIntFloat64(Map1, Map2 map[int]float64, Epsilon float64) bool {
if len(Map1) == 0 || len(Map2) == 0 {
return false
}
for key, valueMap1 := range Map1 {
valueMap2, ok := Map2[key]
if !ok {
return false
}
if math.Abs(valueMap1-valueMap2) > Epsilon {
return false
}
}
return true
}
// AnyKeyValuePairInMapIntFloat64 checks if any key-value pair from one map[int]float64 is in another map[int]float64.
// When any of the maps is empty, the function returns false.
// The Epsilon parameter sets the accuracy of the comparison of two floats.
func AnyKeyValuePairInMapIntFloat64(Map1, Map2 map[int]float64, Epsilon float64) bool {
if len(Map1) == 0 || len(Map2) == 0 {
return false
}
for key, valueMap1 := range Map1 {
valueMap2, ok := Map2[key]
if ok {
if math.Abs(valueMap1-valueMap2) <= Epsilon {
return true
}
}
}
return false
}
// WhichKeyValuePairsInMapIntFloat64 checks if any key-value pair from one map[int]float64 is in another map[int]float64.
// Returns a tuple of a map with the found key-value pairs, and true if the map is not empty.
// When any of the maps is empty, the function returns empty slice and false.
// The Epsilon parameter sets the accuracy of the comparison of two floats.
func WhichKeyValuePairsInMapIntFloat64(Map1, Map2 map[int]float64, Epsilon float64) (map[int]float64, bool) {
keys := make(map[int]float64)
var exists bool
if len(Map1) == 0 || len(Map2) == 0 {
return keys, false
}
for key, valueMap1 := range Map1 {
valueMap2, ok := Map2[key]
if ok {
if math.Abs(valueMap1-valueMap2) <= Epsilon {
exists = true
keys[key] = valueMap1
}
}
}
return keys, exists
} | anyallkeyvaluepairs.go | 0.794704 | 0.688665 | anyallkeyvaluepairs.go | starcoder |
package math
import (
"math"
"math/rand"
"time"
)
// Rounds value up to precision number of digits.
func Ceil(value float64, precision int) float64 {
multiplier := math.Pow10(precision)
return math.Ceil(value*multiplier) / multiplier
}
// Rounds value down to precision number of digits.
func Floor(value float64, precision int) float64 {
multiplier := math.Pow10(precision)
return math.Floor(value*multiplier) / multiplier
}
// Rounds value to precision number of digits. When the digit after precision
// number of decimal places is 0.5 value is rounded away from zero.
func RoundHalfAwayFromZero(value float64, precision int) float64 {
return roundHalfAwayOrTowardsZero(value, precision, true)
}
// Rounds value to precision number of digits. When the digit after precision
// number of decimal places is 0.5 value is rounded down.
func RoundHalfDown(value float64, precision int) float64 {
return roundHalfUpOrDown(value, precision, false)
}
// Rounds value to precision number of digits. When the digit after precision
// number of decimal places is 0.5 value is rounded to nearest even number at
// the given number of digits.
func RoundHalfToEven(value float64, precision int) float64 {
return roundHalfToEvenOrOdd(value, precision, true)
}
// Rounds value to precision number of digits. When the digit after precision
// number of decimal places is 0.5 value is rounded to nearest odd number at
// the given number of digits.
func RoundHalfToOdd(value float64, precision int) float64 {
return roundHalfToEvenOrOdd(value, precision, false)
}
// Rounds value to precision number of digits. When the digit after precision
// number of decimal places is 0.5 value is rounded towards zero.
func RoundHalfTowardsZero(value float64, precision int) float64 {
return roundHalfAwayOrTowardsZero(value, precision, false)
}
// Rounds value to precision number of digits. When the digit after precision
// number of decimal places is 0.5 value is rounded up.
func RoundHalfUp(value float64, precision int) float64 {
return roundHalfUpOrDown(value, precision, true)
}
// Rounds value to precision number of digits. When the digit after precision
// number of decimal places is 0.5 value is randomly rounded up or down.
func StochasticRounding(value float64, precision int) float64 {
decimals := decimalsAfterPrecision(value, precision)
return roundToNearest(value, precision, decimals)
}
func init() {
rand.Seed(time.Now().UTC().UnixNano())
}
// Rounds value to precision number of digits. When the digit after precision
// number of decimal places is 0.5 value is rounded either away from zero when
// awayFromZero is true and towards zero when it is false.
func roundHalfAwayOrTowardsZero(value float64, precision int, awayFromZero bool) float64 {
if decimals := decimalsAfterPrecision(value, precision); decimals == 0.5 {
return roundUpOrDown(value, precision, value >= 0 == awayFromZero)
} else {
return roundToNearest(value, precision, decimals)
}
}
// Rounds value to precision number of digits. When the digit after precision
// number of decimal places is 0.5 value is rounded either to even when
// halfToEven is true and to odd when it is false.
func roundHalfToEvenOrOdd(value float64, precision int, halfToEven bool) float64 {
if decimals := decimalsAfterPrecision(value, precision); decimals == 0.5 {
if modulo := float64(int(value*math.Pow10(precision)) % 2); modulo == 0 {
return roundUpOrDown(value, precision, value < 0 == halfToEven)
} else {
return roundUpOrDown(value, precision, value >= 0 == halfToEven)
}
} else {
return roundToNearest(value, precision, decimals)
}
}
// Rounds value to precision number of digits. When the digit after precision
// number of decimal places is 0.5 value is rounded either up when halfUp is
// true and down when it is false.
func roundHalfUpOrDown(value float64, precision int, halfUp bool) float64 {
if decimals := decimalsAfterPrecision(value, precision); decimals == 0.5 {
if halfUp {
return Ceil(value, precision)
} else {
return Floor(value, precision)
}
} else {
return roundToNearest(value, precision, decimals)
}
}
// Rounds value to precision number of digits. When decimals is less than 0.5
// value is rounded towards zero and when it is more than 0.5 value is rounded
// away from zero. When decimals equals 0.5 value is rounded randomly up or
// down.
func roundToNearest(value float64, precision int, decimals float64) float64 {
if decimals < 0.5 {
return roundUpOrDown(value, precision, value < 0)
} else if decimals > 0.5 {
return roundUpOrDown(value, precision, value >= 0)
} else {
return roundUpOrDown(value, precision, rand.Intn(2) == 1)
}
}
// Rounds value to precision number of digits. When roundUp is true value is
// rounded up and down when it is false.
func roundUpOrDown(value float64, precision int, roundUp bool) float64 {
if roundUp {
return Ceil(value, precision)
} else {
return Floor(value, precision)
}
}
// Moves decimal place of the value by precision number of digits to the right
// and returns the remaining digits.
func decimalsAfterPrecision(value float64, precision int) float64 {
tmp := value * math.Pow10(precision)
return math.Abs(tmp - float64(int(tmp)))
} | round.go | 0.864582 | 0.794465 | round.go | starcoder |
package inode
import (
P "github.com/chadnetzer/hardlinkable/internal/pathpool"
)
type PathInfo struct {
P.Pathsplit
StatInfo
}
func (p1 PathInfo) EqualTime(p2 PathInfo) bool {
return p1.Mtim.Equal(p2.Mtim)
}
func (p1 PathInfo) EqualMode(p2 PathInfo) bool {
return p1.Mode == p2.Mode
}
func (p1 PathInfo) EqualOwnership(p2 PathInfo) bool {
return p1.Uid == p2.Uid && p1.Gid == p2.Gid
}
type PathsMap map[Ino]*FilenamePaths
func (pm PathsMap) ArbitraryPath(ino Ino) P.Pathsplit {
// ino must exist in f.InoPaths. If it does, there will be at least
// one pathname to return
return pm[ino].Any()
}
func (pm PathsMap) ArbitraryFilenamePath(ino Ino, filename string) P.Pathsplit {
return pm[ino].AnyWithFilename(filename)
}
func (pm PathsMap) HasPath(ino Ino, path P.Pathsplit) bool {
return pm[ino].HasPath(path)
}
func (pm PathsMap) AppendPath(ino Ino, path P.Pathsplit) {
fp, ok := pm[ino]
if !ok {
fp = newFilenamePaths()
pm[ino] = fp
}
fp.Add(path)
}
// AllPaths returns a channel that can be iterated over to sequentially access
// all the paths for a given inode.
func (pm PathsMap) AllPaths(ino Ino) <-chan P.Pathsplit {
// To avoid concurrent modification to the PathsMap maps while
// iterating from another goroutine, first place all the pathnames into
// a slice, in order to send them over the channel.
paths := pm[ino].PathsAsSlice()
out := make(chan P.Pathsplit)
go func() {
defer close(out)
for _, path := range paths {
out <- path
}
}()
return out
}
// MovePath moves the given destination path, from the given destination inode,
// to the source inode.
func (pm PathsMap) MovePath(dstPath P.Pathsplit, srcIno Ino, dstIno Ino) {
// Get pathnames slice matching Ino and filename
fp := pm[dstIno]
fp.Remove(dstPath)
if fp.IsEmpty() {
delete(pm, dstIno)
}
pm.AppendPath(srcIno, dstPath)
}
// PathCount returns the number of unique paths and dirs encountered after the
// initial walk is completed. This can give us an accurate count of the number
// of inode nlinks we should encounter if all linked paths are included in the
// walk. Conversely, if we count the nlinks from all the encountered inodes,
// and compare to the number of paths this function returns, we should have a
// count of how many inode paths were not seen by the walk.
func (pm PathsMap) PathCount() (paths int64, dirs int64) {
var numPaths, numDirs int64
// Make a set for storing unique dirs (key = dirname)
dirMap := make(map[string]struct{})
// loop over all inos, getting FilenamePaths
for _, fp := range pm {
// loop over all filenames, getting paths
for _, paths := range fp.FPMap {
// Loop over all paths
for p := range paths {
numPaths++
dirMap[p.Dirname] = struct{}{}
}
}
// Count the number of unique dirs and increment
}
numDirs = int64(len(dirMap))
return numPaths, numDirs
} | internal/inode/path.go | 0.625552 | 0.421969 | path.go | starcoder |
package callgraph
import (
"github.com/vkcom/nocolor/internal/palette"
)
// CallstackOfColoredFunctions is a structure for storing a stack of called
// colored functions with a quick check for the presence of a certain node.
type CallstackOfColoredFunctions struct {
// Stack is functions placed in order, only colored functions.
Stack Nodes
// ColorsChain is blended colors of stacked functions, one-by-one.
ColorsChain []palette.Color
// IndexSet is quick index for Contains(), has the same elements as stack.
IndexSet map[*Node]struct{}
// ColorsMask is mask of all ColorsChain.
ColorsMasks palette.ColorMasks
}
// NewCallstackOfColoredFunctions creates a new callstack.
func NewCallstackOfColoredFunctions() *CallstackOfColoredFunctions {
return &CallstackOfColoredFunctions{
Stack: make(Nodes, 0, 10),
IndexSet: make(map[*Node]struct{}, 10),
}
}
// Size returns the number of functions.
func (c *CallstackOfColoredFunctions) Size() int {
return len(c.Stack)
}
// AsVector returns a slice of the functions that are on the stack.
func (c *CallstackOfColoredFunctions) AsVector() Nodes {
return c.Stack
}
// Append adds the passed node onto the stack.
func (c *CallstackOfColoredFunctions) Append(fun *Node) {
c.Stack = append(c.Stack, fun)
c.IndexSet[fun] = struct{}{}
c.ColorsChain = append(c.ColorsChain, fun.Function.Colors.Colors...)
c.recalcMask(fun, true)
}
// PopBack removes the last function from the stack.
func (c *CallstackOfColoredFunctions) PopBack() {
if len(c.Stack) == 0 {
return
}
back := c.Stack[len(c.Stack)-1]
c.Stack = c.Stack[:len(c.Stack)-1]
delete(c.IndexSet, back)
c.ColorsChain = c.ColorsChain[:len(c.ColorsChain)-len(back.Function.Colors.Colors)]
c.recalcMask(back, false)
}
// Contains checks for the existence of the passed node.
func (c *CallstackOfColoredFunctions) Contains(fun *Node) bool {
_, ok := c.IndexSet[fun]
return ok
}
func (c *CallstackOfColoredFunctions) recalcMask(fun *Node, add bool) {
colors := fun.Function.Colors
if colors.Empty() {
return
}
if add {
for _, color := range colors.Colors {
c.ColorsMasks = c.ColorsMasks.Add(color)
}
return
}
c.ColorsMasks = palette.NewColorMasks(c.ColorsChain)
} | internal/callgraph/callstack.go | 0.731251 | 0.495728 | callstack.go | starcoder |
package core
// MetricSampleListener is a listener to receive samples for a distribution
type MetricSampleListener interface {
// AddSample will add a sample metric to the listener
AddSample(value float64, tags ...string)
}
// EmptyMetricSampleListener implements a sample listener that ignores everything.
type EmptyMetricSampleListener struct{}
// AddSample will add a metric sample to this listener
func (*EmptyMetricSampleListener) AddSample(value float64, tags ...string) {
// noop
}
// MetricSupplier will return the supplied metric value
type MetricSupplier func() (value float64, ok bool)
// NewIntMetricSupplierWrapper will wrap a int-return value func to a supplier func
func NewIntMetricSupplierWrapper(s func() int) MetricSupplier {
return MetricSupplier(func() (float64, bool) {
val := s()
return float64(val), true
})
}
// NewFloat64MetricSupplierWrapper will wrap a int-return value func to a supplier func
func NewFloat64MetricSupplierWrapper(s func() float64) MetricSupplier {
return MetricSupplier(func() (float64, bool) {
val := s()
return val, true
})
}
// MetricRegistry is a simple abstraction for tracking metrics in the limiters.
type MetricRegistry interface {
// RegisterDistribution will register a sample distribution. Samples are added to the distribution via the returned
// MetricSampleListener. Will reuse an existing MetricSampleListener if the distribution already exists.
RegisterDistribution(ID string, tags ...string) MetricSampleListener
// RegisterTiming will register a sample timing distribution. Samples are added to the distribution via the
// returned MetricSampleListener. Will reuse an existing MetricSampleListener if the distribution already exists.
RegisterTiming(ID string, tags ...string) MetricSampleListener
// RegisterCount will register a sample counter. Samples are added to the counter via the returned
// MetricSampleListener. Will reuse an existing MetricSampleListener if the counter already exists.
RegisterCount(ID string, tags ...string) MetricSampleListener
// RegisterGauge will register a gauge using the provided supplier. The supplier will be polled whenever the gauge
// value is flushed by the registry.
RegisterGauge(ID string, supplier MetricSupplier, tags ...string)
// Start will start the metric registry polling
Start()
// Stop will stop the metric registry polling
Stop()
}
// EmptyMetricRegistry implements a void reporting metric registry
type EmptyMetricRegistry struct{}
// EmptyMetricRegistryInstance is a singleton empty metric registry instance.
var EmptyMetricRegistryInstance = &EmptyMetricRegistry{}
// RegisterDistribution will register a distribution sample to this registry
func (*EmptyMetricRegistry) RegisterDistribution(ID string, tags ...string) MetricSampleListener {
return &EmptyMetricSampleListener{}
}
// RegisterTiming will register a timing distribution sample to this registry
func (*EmptyMetricRegistry) RegisterTiming(ID string, tags ...string) MetricSampleListener {
return &EmptyMetricSampleListener{}
}
// RegisterCount will register a count sample to this registry
func (*EmptyMetricRegistry) RegisterCount(ID string, tags ...string) MetricSampleListener {
return &EmptyMetricSampleListener{}
}
// RegisterGauge will register a gauge sample to this registry
func (*EmptyMetricRegistry) RegisterGauge(ID string, supplier MetricSupplier, tags ...string) {}
// Start will start the metric registry polling
func (*EmptyMetricRegistry) Start() {}
// Stop will stop the metric registry polling
func (*EmptyMetricRegistry) Stop() {}
// CommonMetricSampler is a set of common metrics reported by all Limit implementations
type CommonMetricSampler struct {
RTTListener MetricSampleListener
DropCounterListener MetricSampleListener
InFlightListener MetricSampleListener
}
// NewCommonMetricSampler will create a new CommonMetricSampler that will auto-instrument metrics
func NewCommonMetricSampler(registry MetricRegistry, limit Limit, name string, tags ...string) *CommonMetricSampler {
if registry == nil {
registry = EmptyMetricRegistryInstance
}
registry.RegisterGauge(
PrefixMetricWithName(MetricLimit, name),
NewIntMetricSupplierWrapper(limit.EstimatedLimit),
tags...,
)
return &CommonMetricSampler{
RTTListener: registry.RegisterTiming(PrefixMetricWithName(MetricRTT, name), tags...),
DropCounterListener: registry.RegisterCount(PrefixMetricWithName(MetricDropped, name), tags...),
InFlightListener: registry.RegisterDistribution(PrefixMetricWithName(MetricInFlight, name), tags...),
}
}
// Sample will sample the current sample for metric reporting.
func (s *CommonMetricSampler) Sample(rtt int64, inFlight int, didDrop bool) {
if didDrop {
s.InFlightListener.AddSample(1.0)
}
s.RTTListener.AddSample(float64(rtt))
s.InFlightListener.AddSample(float64(inFlight))
} | core/metric_registry.go | 0.903578 | 0.413773 | metric_registry.go | starcoder |
package processor
import (
"bytes"
"fmt"
"strconv"
"github.com/OneOfOne/xxhash"
"github.com/Jeffail/benthos/lib/log"
"github.com/Jeffail/benthos/lib/metrics"
"github.com/Jeffail/benthos/lib/types"
"github.com/Jeffail/gabs"
)
//------------------------------------------------------------------------------
func init() {
Constructors["dedupe"] = TypeSpec{
constructor: NewDedupe,
description: `
Dedupes messages by caching selected (and optionally hashed) parts, dropping
messages that are already cached. The hash type can be chosen from: none or
xxhash (more will come soon).
It's possible to dedupe based on JSON field data from message parts by setting
the value of ` + "`json_paths`" + `, which is an array of JSON dot paths that
will be extracted from the message payload and concatenated. The result will
then be used to deduplicate. If the result is empty (i.e. none of the target
paths were found in the data) then this is considered an error, and the message
will be dropped or propagated based on the value of ` + "`drop_on_err`." + `
For example, if each message is a single part containing a JSON blob of the
following format:
` + "``` json" + `
{
"id": "3274892374892374",
"content": "hello world"
}
` + "```" + `
Then you could deduplicate using the raw contents of the 'id' field instead of
the whole body with the following config:
` + "``` json" + `
type: dedupe
dedupe:
cache: foo_cache
parts: [0]
json_paths:
- id
hash: none
` + "```" + `
Caches should be configured as a resource, for more information check out the
[documentation here](../caches).`,
}
}
//------------------------------------------------------------------------------
// DedupeConfig contains any configuration for the Dedupe processor.
type DedupeConfig struct {
Cache string `json:"cache" yaml:"cache"`
HashType string `json:"hash" yaml:"hash"`
Parts []int `json:"parts" yaml:"parts"` // message parts to hash
JSONPaths []string `json:"json_paths" yaml:"json_paths"`
DropOnCacheErr bool `json:"drop_on_err" yaml:"drop_on_err"`
}
// NewDedupeConfig returns a DedupeConfig with default values.
func NewDedupeConfig() DedupeConfig {
return DedupeConfig{
Cache: "",
HashType: "none",
Parts: []int{0}, // only consider the 1st part
JSONPaths: []string{},
DropOnCacheErr: true,
}
}
//------------------------------------------------------------------------------
type hasher interface {
Write(str []byte) (int, error)
Bytes() []byte
}
type hasherFunc func() hasher
//------------------------------------------------------------------------------
type xxhashHasher struct {
h *xxhash.XXHash64
}
func (x *xxhashHasher) Write(str []byte) (int, error) {
return x.h.Write(str)
}
func (x *xxhashHasher) Bytes() []byte {
return []byte(strconv.FormatUint(x.h.Sum64(), 10))
}
//------------------------------------------------------------------------------
func strToHasher(str string) (hasherFunc, error) {
switch str {
case "none":
return func() hasher {
return bytes.NewBuffer(nil)
}, nil
case "xxhash":
return func() hasher {
return &xxhashHasher{
h: xxhash.New64(),
}
}, nil
}
return nil, fmt.Errorf("hash type not recognised: %v", str)
}
//------------------------------------------------------------------------------
// Dedupe is a processor that hashes each message and checks if the has is already
// present in the cache
type Dedupe struct {
conf Config
log log.Modular
stats metrics.Type
cache types.Cache
hasherFunc hasherFunc
jPaths []string
mCount metrics.StatCounter
mErrJSON metrics.StatCounter
mDropped metrics.StatCounter
mErrHash metrics.StatCounter
mErrCache metrics.StatCounter
mSent metrics.StatCounter
mSentParts metrics.StatCounter
}
// NewDedupe returns a Dedupe processor.
func NewDedupe(
conf Config, mgr types.Manager, log log.Modular, stats metrics.Type,
) (Type, error) {
c, err := mgr.GetCache(conf.Dedupe.Cache)
if err != nil {
return nil, err
}
hFunc, err := strToHasher(conf.Dedupe.HashType)
if err != nil {
return nil, err
}
return &Dedupe{
conf: conf,
log: log.NewModule(".processor.dedupe"),
stats: stats,
cache: c,
hasherFunc: hFunc,
jPaths: conf.Dedupe.JSONPaths,
mCount: stats.GetCounter("processor.dedupe.count"),
mErrJSON: stats.GetCounter("processor.dedupe.error.json_parse"),
mDropped: stats.GetCounter("processor.dedupe.dropped"),
mErrHash: stats.GetCounter("processor.dedupe.error.hash"),
mErrCache: stats.GetCounter("processor.dedupe.error.cache"),
mSent: stats.GetCounter("processor.dedupe.sent"),
mSentParts: stats.GetCounter("processor.dedupe.parts.sent"),
}, nil
}
//------------------------------------------------------------------------------
// ProcessMessage checks each message against a set of bounds.
func (d *Dedupe) ProcessMessage(msg types.Message) ([]types.Message, types.Response) {
d.mCount.Incr(1)
extractedHash := false
hasher := d.hasherFunc()
for _, index := range d.conf.Dedupe.Parts {
if len(d.jPaths) > 0 {
// Attempt to add JSON fields from part to hash.
jPart, err := msg.GetJSON(index)
if err != nil {
d.mErrJSON.Incr(1)
d.mDropped.Incr(1)
d.log.Errorf("JSON Parse error: %v\n", err)
continue
}
var gPart *gabs.Container
if gPart, err = gabs.Consume(jPart); err != nil {
d.mErrJSON.Incr(1)
d.mDropped.Incr(1)
d.log.Errorf("JSON Parse error: %v\n", err)
continue
}
for _, jPath := range d.jPaths {
gTarget := gPart.Path(jPath)
if gTarget.Data() == nil {
continue
}
var hashBytes []byte
switch t := gTarget.Data().(type) {
case string:
hashBytes = []byte(t)
default:
hashBytes = gTarget.Bytes()
}
if _, err := hasher.Write(hashBytes); nil != err {
d.mErrHash.Incr(1)
d.mDropped.Incr(1)
d.log.Errorf("Hash error: %v\n", err)
} else {
extractedHash = true
}
}
} else {
// Attempt to add whole part to hash.
if partBytes := msg.Get(index); partBytes != nil {
if _, err := hasher.Write(msg.Get(index)); nil != err {
d.mErrHash.Incr(1)
d.mDropped.Incr(1)
d.log.Errorf("Hash error: %v\n", err)
} else {
extractedHash = true
}
}
}
}
if !extractedHash {
if d.conf.Dedupe.DropOnCacheErr {
d.mDropped.Incr(1)
return nil, types.NewSimpleResponse(nil)
}
} else if err := d.cache.Add(string(hasher.Bytes()), []byte{'t'}); err != nil {
if err != types.ErrKeyAlreadyExists {
d.mErrCache.Incr(1)
d.log.Errorf("Cache error: %v\n", err)
if d.conf.Dedupe.DropOnCacheErr {
d.mDropped.Incr(1)
return nil, types.NewSimpleResponse(nil)
}
} else {
d.mDropped.Incr(1)
return nil, types.NewSimpleResponse(nil)
}
}
d.mSent.Incr(1)
d.mSentParts.Incr(int64(msg.Len()))
msgs := [1]types.Message{msg}
return msgs[:], nil
}
//------------------------------------------------------------------------------ | lib/processor/dedupe.go | 0.704262 | 0.611179 | dedupe.go | starcoder |
// Package ngt provides implementation of Go API for https://github.com/yahoojapan/NGT
package ngt
/*
#cgo LDFLAGS: -lngt
#include <NGT/Capi.h>
*/
import "C"
import (
"strings"
"github.com/kpango/fastime"
"github.com/vdaas/vald/internal/errors"
)
type Option func(*ngt) error
var (
DefaultPoolSize = uint32(10000)
DefaultRadius = float32(-1.0)
DefaultEpsilon = float32(0.01)
defaultOptions = []Option{
WithIndexPath("/tmp/ngt-" + string(fastime.FormattedNow())),
WithDimension(minimumDimensionSize),
WithDefaultRadius(DefaultRadius),
WithDefaultEpsilon(DefaultEpsilon),
WithDefaultPoolSize(DefaultPoolSize),
WithCreationEdgeSize(10),
WithSearchEdgeSize(40),
WithObjectType(Float),
WithDistanceType(L2),
WithBulkInsertChunkSize(100),
}
)
func WithInMemoryMode(flg bool) Option {
return func(n *ngt) error {
n.inMemory = flg
return nil
}
}
func WithIndexPath(path string) Option {
return func(n *ngt) error {
if len(path) == 0 {
return nil
}
n.idxPath = path
return nil
}
}
func WithBulkInsertChunkSize(size int) Option {
return func(n *ngt) error {
n.bulkInsertChunkSize = size
return nil
}
}
func WithDimension(size int) Option {
return func(n *ngt) error {
if size > ngtVectorDimensionSizeLimit || size < minimumDimensionSize {
return errors.ErrInvalidDimensionSize(size, ngtVectorDimensionSizeLimit)
}
if C.ngt_set_property_dimension(n.prop, C.int32_t(size), n.ebuf) == ErrorCode {
return errors.ErrFailedToSetDimension(n.newGoError(n.ebuf))
}
n.dimension = C.int32_t(size)
return nil
}
}
func WithDistanceTypeByString(dt string) Option {
var d distanceType
switch strings.NewReplacer("-", "", "_", "", " ", "").Replace(strings.ToLower(dt)) {
case "l1":
d = L1
case "l2":
d = L2
case "angle", "ang":
d = Angle
case "hamming", "ham":
d = Hamming
case "cosine", "cos":
d = Cosine
case "normalizedangle", "normalized angle", "normalized ang", "nang", "nangle":
d = NormalizedAngle
case "normalizedcosine", "normalized cosine", "normalized cos", "ncos", "ncosine":
d = NormalizedCosine
case "jaccard", "jac":
d = Jaccard
}
return WithDistanceType(d)
}
func WithDistanceType(t distanceType) Option {
return func(n *ngt) error {
switch t {
case L1:
if C.ngt_set_property_distance_type_l1(n.prop, n.ebuf) == ErrorCode {
return errors.ErrFailedToSetDistanceType(n.newGoError(n.ebuf), "L1")
}
case L2:
if C.ngt_set_property_distance_type_l2(n.prop, n.ebuf) == ErrorCode {
return errors.ErrFailedToSetDistanceType(n.newGoError(n.ebuf), "L2")
}
case Angle:
if C.ngt_set_property_distance_type_angle(n.prop, n.ebuf) == ErrorCode {
return errors.ErrFailedToSetDistanceType(n.newGoError(n.ebuf), "Angle")
}
case Hamming:
if C.ngt_set_property_distance_type_hamming(n.prop, n.ebuf) == ErrorCode {
return errors.ErrFailedToSetDistanceType(n.newGoError(n.ebuf), "Hamming")
}
case Cosine:
if C.ngt_set_property_distance_type_cosine(n.prop, n.ebuf) == ErrorCode {
return errors.ErrFailedToSetDistanceType(n.newGoError(n.ebuf), "Cosine")
}
case NormalizedAngle:
if C.ngt_set_property_distance_type_normalized_angle(n.prop, n.ebuf) == ErrorCode {
return errors.ErrFailedToSetDistanceType(n.newGoError(n.ebuf), "NormalizedAngle")
}
case NormalizedCosine:
if C.ngt_set_property_distance_type_normalized_cosine(n.prop, n.ebuf) == ErrorCode {
return errors.ErrFailedToSetDistanceType(n.newGoError(n.ebuf), "NormalizedCosine")
}
case Jaccard:
if C.ngt_set_property_distance_type_jaccard(n.prop, n.ebuf) == ErrorCode {
return errors.ErrFailedToSetDistanceType(n.newGoError(n.ebuf), "Jaccard")
}
default:
return errors.ErrUnsupportedDistanceType
}
return nil
}
}
func WithObjectTypeByString(ot string) Option {
var o objectType
switch strings.NewReplacer("-", "", "_", "", " ", "", "double", "float").Replace(strings.ToLower(ot)) {
case "uint8":
o = Uint8
case "float":
o = Float
}
return WithObjectType(o)
}
func WithObjectType(t objectType) Option {
return func(n *ngt) error {
switch t {
case Uint8:
if C.ngt_set_property_object_type_integer(n.prop, n.ebuf) == ErrorCode {
return errors.ErrFailedToSetObjectType(n.newGoError(n.ebuf), "Uint8")
}
case Float:
if C.ngt_set_property_object_type_float(n.prop, n.ebuf) == ErrorCode {
return errors.ErrFailedToSetObjectType(n.newGoError(n.ebuf), "Float")
}
default:
return errors.ErrUnsupportedObjectType
}
n.objectType = t
return nil
}
}
func WithCreationEdgeSize(size int) Option {
return func(n *ngt) error {
if C.ngt_set_property_edge_size_for_creation(n.prop, C.int16_t(size), n.ebuf) == ErrorCode {
return errors.ErrFailedToSetCreationEdgeSize(n.newGoError(n.ebuf))
}
return nil
}
}
func WithSearchEdgeSize(size int) Option {
return func(n *ngt) error {
if C.ngt_set_property_edge_size_for_search(n.prop, C.int16_t(size), n.ebuf) == ErrorCode {
return errors.ErrFailedToSetSearchEdgeSize(n.newGoError(n.ebuf))
}
return nil
}
}
func WithDefaultPoolSize(poolSize uint32) Option {
return func(n *ngt) error {
if poolSize != 0 {
n.poolSize = poolSize
}
return nil
}
}
func WithDefaultRadius(radius float32) Option {
return func(n *ngt) error {
if radius != 0 {
n.radius = radius
}
return nil
}
}
func WithDefaultEpsilon(epsilon float32) Option {
return func(n *ngt) error {
if epsilon != 0 {
n.epsilon = epsilon
}
return nil
}
} | internal/core/algorithm/ngt/option.go | 0.576304 | 0.457864 | option.go | starcoder |
// Package aggregated constructs a checkin struct from a given batterystats proto. The checkin struct contains data categorized by each metric.
package aggregated
import (
"fmt"
"log"
"sort"
"strings"
"time"
"github.com/golang/protobuf/proto"
"github.com/google/battery-historian/bugreportutils"
"github.com/google/battery-historian/checkinparse"
"github.com/google/battery-historian/historianutils"
bspb "github.com/google/battery-historian/pb/batterystats_proto"
)
const (
// msecsInMinute is the converter from msec to minute
msecsInMinute = 60000
)
// PkgAndSub splits up a string into the separate pkg name and metric sub name.
// If the two values are present in the string, then they must be delineated by a colon.
func PkgAndSub(t string) (string, string) {
s := strings.SplitN(t, ":", 2)
pkg := strings.TrimSpace(s[0])
sub := ""
if len(s) > 1 {
sub = strings.TrimSpace(s[1])
}
return pkg, sub
}
// MDuration holds the duration and the classification level for the value.
type MDuration struct {
V time.Duration
L string // Low, Medium, High
}
// MFloat32 holds the float value and the classification level for the value.
type MFloat32 struct {
V float32
L string // Low, Medium, High
}
// ActivityData contains count and duration stats about activity on the device.
// The UID field will be pouplated (non-zero) if the activity is connected to a specific UID.
type ActivityData struct {
Name string
Title string
UID int32
Count float32
CountPerHour float32
CountLevel string // Low, Medium, High
// MaxDuration is the single longest duration of this ActivityData. This could sometimes be
// greater than Duration (eg. in the case of a wakelock whose time was split with another
// wakelock -- 2 wakelocks held for an hour each -> Duration would be 30 minutes for each,
// but MaxDuration would be one hour).
MaxDuration time.Duration
Duration time.Duration
// TotalDuration is used to track the total duration of metrics. This may be different from
// Duration for metrics such as wakelocks, which will have Duration set to the apportioned
// duration.
TotalDuration time.Duration
// SecondsPerHr based on TotalDuration, if available, otherwise, based on Duration.
SecondsPerHr float32
DurationLevel string // Low, Medium, High
Level string // The maximum of CountLevel and DurationLevel.
}
// activityData converts a WakelockInfo into an ActivityData.
func activityData(wi *checkinparse.WakelockInfo, realtime time.Duration) ActivityData {
ad := ActivityData{
Name: wi.Name,
UID: wi.UID,
Count: wi.Count,
Duration: wi.Duration,
MaxDuration: wi.MaxDuration,
TotalDuration: wi.TotalDuration,
}
if realtime > 0 {
ad.CountPerHour = wi.Count / float32(realtime.Hours())
d := wi.Duration
if wi.TotalDuration > 0 {
d = wi.TotalDuration
}
ad.SecondsPerHr = float32(d.Seconds()) / float32(realtime.Hours())
}
return ad
}
// ANRCrashData contains ANR and crash data for a single app.
type ANRCrashData struct {
Name string
UID int32
ANRCount, CrashCount int32
}
// byCrashThenANR sorts ANRCrashData by the number of crashes, then by number of ANR, both in descending order.
type byCrashThenANR []*ANRCrashData
func (d byCrashThenANR) Len() int { return len(d) }
func (d byCrashThenANR) Swap(i, j int) { d[i], d[j] = d[j], d[i] }
func (d byCrashThenANR) Less(i, j int) bool {
if d[i].CrashCount == d[j].CrashCount {
return d[i].ANRCount > d[j].ANRCount
}
return d[i].CrashCount > d[j].CrashCount
}
// CPUData contains data about app CPU usage.
type CPUData struct {
Name string // App name.
UID int32
UserTime time.Duration
SystemTime time.Duration
PowerPct float32 // Percentage of device power used.
UserTimeLevel string // Low, Medium, High
SystemTimeLevel string // Low, Medium, High
Level string // The maximum of UserTimeLevel and SystemTimeLevel.
}
// CPUSecs returns the total CPU duration in seconds.
func (a CPUData) CPUSecs() float64 {
return a.UserTime.Seconds() + a.SystemTime.Seconds()
}
// ByCPUUsage sorts CPUData by the power usage in descending order. In case of similar
// power usage, sort according to cpu time.
type ByCPUUsage []CPUData
func (a ByCPUUsage) Len() int { return len(a) }
func (a ByCPUUsage) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
func (a ByCPUUsage) Less(i, j int) bool {
if historianutils.AbsFloat32(a[j].PowerPct-a[i].PowerPct) < 0.01 {
// Sort by time if power drain is equal.
return a[j].CPUSecs() < a[i].CPUSecs()
}
return a[j].PowerPct < a[i].PowerPct
}
// RateData contains total count and rate for various app metrics.
type RateData struct {
Name string
UID int32
Count float32
CountPerHr float32
CountLevel string // Low, Medium, High
}
// byCount sorts RateData by the total count in descending order.
type byCount []*RateData
func (a byCount) Len() int { return len(a) }
func (a byCount) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
func (a byCount) Less(i, j int) bool { return a[j].Count < a[i].Count }
// PowerUseData contains percentage battery consumption for apps and system elements.
type PowerUseData struct {
Name string
UID int32
Percent float32 // Percentage of total consumption
}
// Interesting
// byPercent sorts applications by percentage battery used.
type byPercent []*PowerUseData
func (a byPercent) Len() int { return len(a) }
func (a byPercent) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
// Less sorts by decreasing time order then increasing alphabetic order to break the tie.
// OVERCOUNTED and UNACCOUNTED are always sorted to the beginning.
func (a byPercent) Less(i, j int) bool {
if a[i].Name == bspb.BatteryStats_System_PowerUseItem_OVERCOUNTED.String() || a[i].Name == bspb.BatteryStats_System_PowerUseItem_UNACCOUNTED.String() {
return true
}
if a[j].Name == bspb.BatteryStats_System_PowerUseItem_OVERCOUNTED.String() || a[j].Name == bspb.BatteryStats_System_PowerUseItem_UNACCOUNTED.String() {
return false
}
if x, y := a[i].Percent, a[j].Percent; x != y {
return x > y
}
return a[i].Name < a[j].Name
}
// NetworkTrafficData contains the total amount of bytes transferred over mobile and wifi.
type NetworkTrafficData struct {
Name string
UID int32
WifiMegaBytes float32
MobileMegaBytes float32
WifiMegaBytesPerHour float32
MobileMegaBytesPerHour float32
WifiLevel string // Low, medium, high
MobileLevel string // Low, medium, high
Level string // The maximum of WifiLevel and MobileLevel.
}
// ByMobileBytes sorts NetworkTrafficData by the amount of bytes transferred over mobile.
type ByMobileBytes []NetworkTrafficData
func (n ByMobileBytes) Len() int { return len(n) }
func (n ByMobileBytes) Swap(i, j int) { n[i], n[j] = n[j], n[i] }
// Less sorts in decreasing order.
func (n ByMobileBytes) Less(i, j int) bool {
return n[i].MobileMegaBytes > n[j].MobileMegaBytes
}
// ByWifiBytes sorts NetworkTrafficData by the amount of bytes transferred over mobile.
type ByWifiBytes []NetworkTrafficData
func (n ByWifiBytes) Len() int { return len(n) }
func (n ByWifiBytes) Swap(i, j int) { n[i], n[j] = n[j], n[i] }
// Less sorts in decreasing order.
func (n ByWifiBytes) Less(i, j int) bool {
return n[i].WifiMegaBytes > n[j].WifiMegaBytes
}
func min(x, y int) int {
if x < y {
return x
}
return y
}
// AppData contains aggregated values for some app metrics.
type AppData struct {
Name string
UID int32
Alarms RateData
CPU CPUData
GPSUse ActivityData
ScheduledJobs ActivityData
Network NetworkTrafficData
PartialWakelocks ActivityData
Syncs ActivityData
WifiScan ActivityData
}
// stateData contains information about the different state levels an app can be in.
type stateData struct {
Name string
UID int32
Background MDuration
Cached MDuration
Foreground MDuration
ForegroundService MDuration
Top MDuration
TopSleeping MDuration
}
// byState sorts stateData in descending order of state duration, in order of state precedence
// (ie. top > foreground service > top sleeping > foreground > background > cached).
type byState []stateData
func (s byState) Len() int { return len(s) }
func (s byState) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
func (s byState) Less(i, j int) bool {
sI, sJ := s[i], s[j]
if sI.Top.V != sJ.Top.V {
return sI.Top.V > sJ.Top.V
}
if sI.ForegroundService.V != sJ.ForegroundService.V {
return sI.ForegroundService.V > sJ.ForegroundService.V
}
if sI.TopSleeping.V != sJ.TopSleeping.V {
return sI.TopSleeping.V > sJ.TopSleeping.V
}
if sI.Foreground.V != sJ.Foreground.V {
return sI.Foreground.V > sJ.Foreground.V
}
if sI.Background.V != sJ.Background.V {
return sI.Background.V > sJ.Background.V
}
return sI.Cached.V > sJ.Cached.V
}
// Checkin contains the aggregated batterystats data for a bugreport.
type Checkin struct {
Device string
Build string
BuildFingerprint string
ReportVersion int32
ScreenOffDischargePoints float32
ScreenOnDischargePoints float32
ActualDischarge float32 // mAh
EstimatedDischarge float32 // mAh
WifiDischargePoints float32
BluetoothDischargePoints float32
ModemDischargePoints float32
Realtime time.Duration
ScreenOffRealtime time.Duration
Uptime MDuration
ScreenOffUptime MDuration
ScreenOffUptimePercentage float32
ScreenOnTime MDuration
ScreenOnTimePercentage float32
PartialWakelockTime MDuration
PartialWakelockTimePercentage float32
KernelOverheadTime MDuration
KernelOverheadTimePercentage float32
SignalScanningTime MDuration
SignalScanningTimePercentage float32
MobileActiveTime MDuration
MobileActiveTimePercentage float32
WifiOnTime MDuration
WifiOnTimePercentage float32
WifiIdleTime MDuration
WifiTransferTime MDuration // tx + rx
WifiTransferTimePercentage float32
BluetoothIdleTime MDuration
BluetoothTransferTime MDuration // tx + rx
BluetoothTransferTimePercentage float32
ModemIdleTime MDuration
ModemTransferTime MDuration // tx + rx
ModemTransferTimePercentage float32
PhoneCallTime MDuration
PhoneCallTimePercentage float32
DeviceIdlingTime MDuration
DeviceIdlingTimePercentage float32
FullWakelockTime MDuration
FullWakelockTimePercentage float32
InteractiveTime MDuration
InteractiveTimePercentage float32
DeviceIdleModeEnabledTime MDuration
DeviceIdleModeEnabledTimePercentage float32
ScreenOffDischargeRatePerHr MFloat32
ScreenOnDischargeRatePerHr MFloat32
MobileKiloBytesPerHr MFloat32
WifiKiloBytesPerHr MFloat32
WifiDischargeRatePerHr MFloat32
BluetoothDischargeRatePerHr MFloat32
ModemDischargeRatePerHr MFloat32
// Aggregated across all apps/entries.
AggCameraUse ActivityData
AggFlashlightUse ActivityData
AggGPSUse ActivityData
AggKernelWakelocks ActivityData
AggScheduledJobs ActivityData
AggSyncTasks ActivityData
AggWakeupReasons ActivityData
AggWifiScanActivity ActivityData
AggWifiFullLockActivity ActivityData
AggAppWakeups RateData
AggCPUUsage CPUData
// Each element corresponds to a single entry/app.
UserspaceWakelocks []ActivityData
KernelWakelocks []ActivityData
ScheduledJobs []ActivityData
SyncTasks []ActivityData
WakeupReasons []ActivityData
GPSUse []ActivityData
TopMobileActiveApps []ActivityData
WifiScanActivity []ActivityData
WifiFullLockActivity []ActivityData
CameraUse []ActivityData
FlashlightUse []ActivityData
TopMobileTrafficApps []NetworkTrafficData
TopWifiTrafficApps []NetworkTrafficData
DevicePowerEstimates []PowerUseData
AppWakeups []RateData
AppWakeupsByAlarmName []RateData
ANRAndCrash []ANRCrashData
CPUUsage []CPUData
AppStates []stateData
AggregatedApps []AppData
TotalAppGPSUseTimePerHour float32
TotalAppCPUPowerPct float32
BluetoothOnTime MDuration
BluetoothOnTimePercentage float32
LowPowerModeEnabledTime MDuration
LowPowerModeEnabledTimePercentage float32
TotalAppANRCount int32
TotalAppANRRate float32
TotalAppCrashCount int32
TotalAppCrashRate float32
TotalAppScheduledJobsPerHr float32
TotalAppSyncsPerHr float32
TotalAppWakeupsPerHr float32
TotalAppFlashlightUsePerHr float32
TotalAppCameraUsePerHr float32
ConnectivityChanges float32
ScreenBrightness map[string]float32
SignalStrength map[string]float32
WifiSignalStrength map[string]float32
BluetoothState map[string]float32
DataConnection map[string]float32
}
// sumWakelockInfo sums the Count and Duration fields of the given WakelockInfos.
// CountPerHour and SecondsPerHour will be filled in if r is not 0.
func sumWakelockInfo(d []*checkinparse.WakelockInfo, r time.Duration) ActivityData {
wd := ActivityData{}
for _, w := range d {
wd.Count += w.Count
wd.Duration += w.Duration
}
if r != 0 {
wd.CountPerHour = wd.Count / float32(r.Hours())
wd.SecondsPerHr = float32(wd.Duration.Seconds()) / float32(r.Hours())
}
return wd
}
// ParseCheckinData creates a Checkin struct from the given aggregated battery stats.
func ParseCheckinData(c *bspb.BatteryStats) Checkin {
if c == nil {
return Checkin{}
}
realtime := time.Duration(c.System.Battery.GetBatteryRealtimeMsec()) * time.Millisecond
if realtime < 0 {
log.Printf("realtime was negative: %v\n", realtime)
}
out := Checkin{
Device: c.Build.GetDevice(),
Build: c.Build.GetBuildId(),
BuildFingerprint: c.Build.GetFingerprint(),
ReportVersion: c.GetReportVersion(),
Realtime: realtime,
ScreenOffRealtime: time.Duration(c.System.Battery.GetScreenOffRealtimeMsec()) * time.Millisecond,
ScreenOffDischargePoints: c.System.BatteryDischarge.GetScreenOff(),
ScreenOnDischargePoints: c.System.BatteryDischarge.GetScreenOn(),
EstimatedDischarge: c.System.PowerUseSummary.GetComputedPowerMah(),
ActualDischarge: (c.System.PowerUseSummary.GetMinDrainedPowerMah() + c.System.PowerUseSummary.GetMaxDrainedPowerMah()) / 2,
// Uptime is the same as screen-off uptime + screen on time
Uptime: MDuration{
V: (time.Duration(c.System.Battery.GetBatteryUptimeMsec()) * time.Millisecond),
},
ScreenOffUptime: MDuration{
V: (time.Duration(c.System.Battery.GetScreenOffUptimeMsec()) * time.Millisecond),
},
ScreenOnTime: MDuration{
V: (time.Duration(c.System.Misc.GetScreenOnTimeMsec()) * time.Millisecond),
},
PartialWakelockTime: MDuration{
V: (time.Duration(c.System.Misc.GetPartialWakelockTimeMsec()) * time.Millisecond),
},
KernelOverheadTime: MDuration{
V: (time.Duration(c.System.Battery.GetScreenOffUptimeMsec()-c.System.Misc.GetPartialWakelockTimeMsec()) * time.Millisecond),
},
SignalScanningTime: MDuration{
V: (time.Duration(c.System.SignalScanningTime.GetTimeMsec()) * time.Millisecond),
},
MobileActiveTime: MDuration{
V: (time.Duration(c.System.Misc.GetMobileActiveTimeMsec()) * time.Millisecond),
},
PhoneCallTime: MDuration{
V: (time.Duration(c.System.Misc.GetPhoneOnTimeMsec()) * time.Millisecond),
},
WifiOnTime: MDuration{
V: (time.Duration(c.System.Misc.GetWifiOnTimeMsec()) * time.Millisecond),
},
DeviceIdleModeEnabledTime: MDuration{
V: (time.Duration(c.System.Misc.GetDeviceIdleModeEnabledTimeMsec()) * time.Millisecond),
},
DeviceIdlingTime: MDuration{
V: (time.Duration(c.System.Misc.GetDeviceIdlingTimeMsec()) * time.Millisecond),
},
FullWakelockTime: MDuration{
V: (time.Duration(c.System.Misc.GetFullWakelockTimeMsec()) * time.Millisecond),
},
InteractiveTime: MDuration{
V: (time.Duration(c.System.Misc.GetInteractiveTimeMsec()) * time.Millisecond),
},
BluetoothOnTime: MDuration{
V: (time.Duration(c.System.Misc.GetBluetoothOnTimeMsec()) * time.Millisecond),
},
LowPowerModeEnabledTime: MDuration{
V: (time.Duration(c.System.Misc.GetLowPowerModeEnabledTimeMsec()) * time.Millisecond),
},
ConnectivityChanges: c.System.Misc.GetConnectivityChanges(),
}
if realtime > 0 {
out.ScreenOffUptimePercentage = (float32(out.ScreenOffUptime.V) / float32(realtime)) * 100
out.ScreenOnTimePercentage = (float32(out.ScreenOnTime.V) / float32(realtime)) * 100
out.PartialWakelockTimePercentage = (float32(out.PartialWakelockTime.V) / float32(realtime)) * 100
out.KernelOverheadTimePercentage = (float32(out.KernelOverheadTime.V) / float32(realtime)) * 100
out.SignalScanningTimePercentage = (float32(out.SignalScanningTime.V) / float32(realtime)) * 100
out.MobileActiveTimePercentage = (float32(out.MobileActiveTime.V) / float32(realtime)) * 100
out.FullWakelockTimePercentage = (float32(out.FullWakelockTime.V) / float32(realtime)) * 100
out.PhoneCallTimePercentage = (float32(out.PhoneCallTime.V) / float32(realtime)) * 100
out.DeviceIdleModeEnabledTimePercentage = (float32(out.DeviceIdleModeEnabledTime.V) / float32(realtime)) * 100
out.DeviceIdlingTimePercentage = (float32(out.DeviceIdlingTime.V) / float32(realtime)) * 100
out.InteractiveTimePercentage = (float32(out.InteractiveTime.V) / float32(realtime)) * 100
out.BluetoothOnTimePercentage = (float32(out.BluetoothOnTime.V) / float32(realtime)) * 100
out.LowPowerModeEnabledTimePercentage = (float32(out.LowPowerModeEnabledTime.V) / float32(realtime)) * 100
out.MobileKiloBytesPerHr = MFloat32{V: (c.System.GlobalNetwork.GetMobileBytesRx() + c.System.GlobalNetwork.GetMobileBytesTx()) / (1024 * float32(realtime.Hours()))}
out.WifiKiloBytesPerHr = MFloat32{V: (c.System.GlobalNetwork.GetWifiBytesRx() + c.System.GlobalNetwork.GetWifiBytesTx()) / (1024 * float32(realtime.Hours()))}
}
bCapMah := c.GetSystem().GetPowerUseSummary().GetBatteryCapacityMah()
if bCapMah < 0 {
log.Printf("battery capacity mAh was negative: %v\n", bCapMah)
}
if c.GetReportVersion() >= 14 {
out.WifiOnTime = MDuration{V: time.Duration(c.System.GlobalWifi.GetWifiOnTimeMsec()) * time.Millisecond}
out.WifiIdleTime = MDuration{V: time.Duration(c.System.GlobalWifi.GetWifiIdleTimeMsec()) * time.Millisecond}
out.WifiTransferTime = MDuration{V: time.Duration(c.System.GlobalWifi.GetWifiRxTimeMsec()+c.System.GlobalWifi.GetWifiTxTimeMsec()) * time.Millisecond}
if realtime > 0 {
out.WifiOnTimePercentage = (float32(out.WifiOnTime.V) / float32(realtime)) * 100
out.WifiTransferTimePercentage = (float32(out.WifiTransferTime.V) / float32(realtime)) * 100
}
if bCapMah > 0 {
out.WifiDischargePoints = 100 * c.System.GlobalWifi.GetWifiPowerMah() / bCapMah
if realtime > 0 {
out.WifiDischargeRatePerHr = MFloat32{
V: out.WifiDischargePoints / float32(realtime.Hours()),
}
}
}
out.BluetoothIdleTime = MDuration{V: time.Duration(c.System.GlobalBluetooth.GetBluetoothIdleTimeMsec()) * time.Millisecond}
out.BluetoothTransferTime = MDuration{V: time.Duration(c.System.GlobalBluetooth.GetBluetoothRxTimeMsec()+c.System.GlobalBluetooth.GetBluetoothTxTimeMsec()) * time.Millisecond}
if realtime > 0 {
out.BluetoothTransferTimePercentage = (float32(out.BluetoothTransferTime.V) / float32(realtime)) * 100
}
if bCapMah > 0 {
out.BluetoothDischargePoints = 100 * c.System.GlobalBluetooth.GetBluetoothPowerMah() / bCapMah
if realtime > 0 {
out.BluetoothDischargeRatePerHr = MFloat32{
V: out.BluetoothDischargePoints / float32(realtime.Hours()),
}
}
}
}
if c.GetReportVersion() >= 17 {
sumCtrlrTransferTime := func(ctrlr *bspb.BatteryStats_ControllerActivity) time.Duration {
if ctrlr == nil {
return 0
}
sum := ctrlr.GetRxTimeMsec()
for _, tx := range ctrlr.Tx {
sum += tx.GetTimeMsec()
}
return time.Duration(sum) * time.Millisecond
}
out.BluetoothIdleTime = MDuration{V: time.Duration(c.System.GlobalBluetoothController.GetIdleTimeMsec()) * time.Millisecond}
out.ModemIdleTime = MDuration{V: time.Duration(c.System.GlobalModemController.GetIdleTimeMsec()) * time.Millisecond}
out.WifiIdleTime = MDuration{V: time.Duration(c.System.GlobalWifiController.GetIdleTimeMsec()) * time.Millisecond}
out.BluetoothTransferTime = MDuration{V: sumCtrlrTransferTime(c.System.GlobalBluetoothController)}
out.ModemTransferTime = MDuration{V: sumCtrlrTransferTime(c.System.GlobalModemController)}
out.WifiTransferTime = MDuration{V: sumCtrlrTransferTime(c.System.GlobalWifiController)}
if realtime > 0 {
out.BluetoothTransferTimePercentage = (float32(out.BluetoothTransferTime.V) / float32(realtime)) * 100
out.ModemTransferTimePercentage = (float32(out.ModemTransferTime.V) / float32(realtime)) * 100
out.WifiTransferTimePercentage = (float32(out.WifiTransferTime.V) / float32(realtime)) * 100
}
if bCapMah > 0 {
out.BluetoothDischargePoints = 100 * float32(c.System.GlobalBluetoothController.GetPowerMah()) / bCapMah
out.ModemDischargePoints = 100 * float32(c.System.GlobalModemController.GetPowerMah()) / bCapMah
out.WifiDischargePoints = 100 * float32(c.System.GlobalWifiController.GetPowerMah()) / bCapMah
if realtime > 0 {
out.BluetoothDischargeRatePerHr = MFloat32{
V: out.BluetoothDischargePoints / float32(realtime.Hours()),
}
out.ModemDischargeRatePerHr = MFloat32{
V: out.ModemDischargePoints / float32(realtime.Hours()),
}
out.WifiDischargeRatePerHr = MFloat32{
V: out.WifiDischargePoints / float32(realtime.Hours()),
}
}
}
}
if s := c.System.Battery.GetScreenOffRealtimeMsec(); s > 0 {
out.ScreenOffDischargeRatePerHr = MFloat32{V: 60 * 60 * 1000 * c.System.BatteryDischarge.GetScreenOff() / s}
}
if s := c.System.Misc.GetScreenOnTimeMsec(); s > 0 {
out.ScreenOnDischargeRatePerHr = MFloat32{V: 60 * 60 * 1000 * c.System.BatteryDischarge.GetScreenOn() / s}
}
if realtime > 0 {
// Screen Brightness.
out.ScreenBrightness = make(map[string]float32)
for _, sb := range c.System.ScreenBrightness {
out.ScreenBrightness[sb.GetName().String()] += (sb.GetTimeMsec() / msecsInMinute) / float32(realtime.Hours())
}
// Signal Strength.
out.SignalStrength = make(map[string]float32)
for _, ss := range c.System.SignalStrength {
out.SignalStrength[ss.GetName().String()] += (ss.GetTimeMsec() / msecsInMinute) / float32(realtime.Hours())
}
// Wifi Signal Strength.
out.WifiSignalStrength = make(map[string]float32)
for _, ws := range c.System.WifiSignalStrength {
out.WifiSignalStrength[ws.GetName().String()] += (ws.GetTimeMsec() / msecsInMinute) / float32(realtime.Hours())
}
// Bluetooth States.
out.BluetoothState = make(map[string]float32)
for _, bs := range c.System.BluetoothState {
out.BluetoothState[bs.GetName().String()] += (bs.GetTimeMsec() / msecsInMinute) / float32(realtime.Hours())
}
// DataConnection
out.DataConnection = make(map[string]float32)
for _, dc := range c.System.DataConnection {
out.DataConnection[dc.GetName().String()] += (dc.GetTimeMsec() / msecsInMinute) / float32(realtime.Hours())
}
}
// Kernel Wakelocks.
var kwl []*checkinparse.WakelockInfo
for _, kw := range c.System.KernelWakelock {
if kw.GetName() != "PowerManagerService.WakeLocks" && kw.GetTimeMsec() >= 0.01 {
kwl = append(kwl, &checkinparse.WakelockInfo{
Name: kw.GetName(),
Duration: time.Duration(kw.GetTimeMsec()) * time.Millisecond,
Count: kw.GetCount(),
})
}
}
out.AggKernelWakelocks = sumWakelockInfo(kwl, realtime)
// Sorting Kernel Wakelocks by time.
checkinparse.SortByTime(kwl)
for _, kw := range kwl {
out.KernelWakelocks = append(out.KernelWakelocks, activityData(kw, realtime))
}
// Wakeup Reasons.
var wrl []*checkinparse.WakelockInfo
for _, wr := range c.System.WakeupReason {
if wr.GetTimeMsec() >= 0.01 {
wrl = append(wrl, &checkinparse.WakelockInfo{
Name: wr.GetName(),
Duration: time.Duration(wr.GetTimeMsec()) * time.Millisecond,
Count: wr.GetCount(),
})
}
}
out.AggWakeupReasons = sumWakelockInfo(wrl, realtime)
// Sorting Wakeup Reasons by count.
checkinparse.SortByCount(wrl)
for _, wr := range wrl {
out.WakeupReasons = append(out.WakeupReasons, activityData(wr, realtime))
}
// Power usage per app.
var e []*PowerUseData
// Network usage per app.
var m []*checkinparse.WakelockInfo
var n []*NetworkTrafficData
// App wakeup count.
// wubn is wakeup alarms broken down by name.
var wu, wubn []*RateData
// App ANR and crash count.
var ac []*ANRCrashData
// CPU use per app.
var cpu []*CPUData
// Wifi activity per app.
var wfScan []*checkinparse.WakelockInfo
var wfFull []*checkinparse.WakelockInfo
// Scheduled Jobs (JobScheduler Jobs).
var sj []*checkinparse.WakelockInfo
// SyncManager Tasks.
var stl []*checkinparse.WakelockInfo
// Userspace Partial Wakelocks and GPS use.
var pwl []*checkinparse.WakelockInfo
var gps []*checkinparse.WakelockInfo
// Camera use per app.
var ca []*checkinparse.WakelockInfo
// Flashlight use per app.
var fla []*checkinparse.WakelockInfo
au := make(map[string]int32)
for _, app := range c.App {
if app.GetName() == "" {
app.Name = proto.String(fmt.Sprintf("UNKNOWN_%d", app.GetUid()))
}
au[app.GetName()] = app.GetUid()
this := AppData{
Name: app.GetName(),
UID: app.GetUid(),
}
if pct := 100 * app.PowerUseItem.GetComputedPowerMah() / bCapMah; pct >= 0.01 {
e = append(e, &PowerUseData{
Name: app.GetName(),
UID: app.GetUid(),
Percent: pct,
})
}
if mat, mac := app.Network.GetMobileActiveTimeMsec(), app.Network.GetMobileActiveCount(); mat >= 0.01 || mac > 0 {
m = append(m, &checkinparse.WakelockInfo{
Name: app.GetName(),
UID: app.GetUid(),
Duration: time.Duration(mat) * time.Millisecond,
Count: mac,
})
}
wr := app.Network.GetWifiBytesRx()
wt := app.Network.GetWifiBytesTx()
mt := app.Network.GetMobileBytesTx()
mr := app.Network.GetMobileBytesRx()
if wr+wt+mt+mr >= 0.01 {
ntd := NetworkTrafficData{
Name: app.GetName(),
UID: app.GetUid(),
WifiMegaBytes: (wr + wt) / (1024 * 1024),
MobileMegaBytes: (mr + mt) / (1024 * 1024),
}
if realtime > 0 {
ntd.WifiMegaBytesPerHour = (wr + wt) / (1024 * 1024) / float32(realtime.Hours())
ntd.MobileMegaBytesPerHour = (mr + mt) / (1024 * 1024) / float32(realtime.Hours())
}
n = append(n, &ntd)
this.Network = ntd
}
if w := app.Apk.GetWakeups(); w > 0 {
rd := RateData{
Name: app.GetName(),
UID: app.GetUid(),
Count: w,
}
if realtime > 0 {
rd.CountPerHr = w / float32(realtime.Hours())
}
wu = append(wu, &rd)
this.Alarms = rd
}
for _, w := range app.WakeupAlarm {
if wc := w.GetCount(); wc > 0 {
rd := RateData{
Name: fmt.Sprintf("%s : %s", app.GetName(), w.GetName()),
UID: app.GetUid(),
Count: float32(wc),
}
if realtime > 0 {
rd.CountPerHr = float32(wc) / float32(realtime.Hours())
}
wubn = append(wubn, &rd)
}
}
for _, p := range app.Process {
if an, cr := p.GetAnrs(), p.GetCrashes(); an > 0 || cr > 0 {
ac = append(ac, &ANRCrashData{
Name: fmt.Sprintf("%s : %s", app.GetName(), p.GetName()),
UID: app.GetUid(),
ANRCount: int32(an),
CrashCount: int32(cr),
})
out.TotalAppANRCount += int32(an)
out.TotalAppCrashCount += int32(cr)
}
}
if realtime > 0 {
out.TotalAppANRRate = float32(out.TotalAppANRCount) / float32(realtime.Hours())
out.TotalAppCrashRate = float32(out.TotalAppCrashCount) / float32(realtime.Hours())
}
if ut, st := app.Cpu.GetUserTimeMs(), app.Cpu.GetSystemTimeMs(); ut > 0 || st > 0 {
cpud := CPUData{
Name: app.GetName(),
UID: app.GetUid(),
UserTime: time.Duration(ut) * time.Millisecond,
SystemTime: time.Duration(st) * time.Millisecond,
}
if bCapMah > 0 {
cpud.PowerPct = 100 * (app.Cpu.GetPowerMaMs() / (1000 * 60 * 60)) / bCapMah
}
cpu = append(cpu, &cpud)
this.CPU = cpud
out.AggCPUUsage.UserTime += time.Duration(ut) * time.Millisecond
out.AggCPUUsage.SystemTime += time.Duration(st) * time.Millisecond
out.AggCPUUsage.PowerPct += cpud.PowerPct
}
if wfl := app.Wifi.GetFullWifiLockTimeMsec(); wfl > 0 {
wfFull = append(wfFull, &checkinparse.WakelockInfo{
Name: app.GetName(),
UID: app.GetUid(),
Duration: time.Duration(wfl) * time.Millisecond,
})
}
wst := app.Wifi.GetScanTimeMsec()
wsc := app.Wifi.GetScanCount()
if wst > 0 || wsc > 0 {
wfScan = append(wfScan, &checkinparse.WakelockInfo{
Name: app.GetName(),
UID: app.GetUid(),
Duration: time.Duration(wst) * time.Millisecond,
Count: wsc,
})
this.WifiScan = ActivityData{
Name: app.GetName(),
UID: app.GetUid(),
Count: wsc,
Duration: time.Duration(wst) * time.Millisecond,
}
if realtime > 0 {
this.WifiScan.CountPerHour = wsc / float32(realtime.Hours())
this.WifiScan.SecondsPerHr = float32(time.Duration(wst).Seconds()) / float32(realtime.Hours())
}
}
var sjt []*checkinparse.WakelockInfo
for _, jst := range app.ScheduledJob {
sjt = append(sjt, &checkinparse.WakelockInfo{
Name: fmt.Sprintf("%s : %s", app.GetName(), jst.GetName()),
UID: app.GetUid(),
Duration: time.Duration(jst.GetTotalTimeMsec()) * time.Millisecond,
Count: jst.GetCount(),
})
}
sj = append(sj, sjt...)
this.ScheduledJobs = sumWakelockInfo(sjt, realtime)
this.ScheduledJobs.Name = app.GetName()
this.ScheduledJobs.UID = app.GetUid()
var stlt []*checkinparse.WakelockInfo
for _, st := range app.Sync {
stlt = append(stlt, &checkinparse.WakelockInfo{
Name: fmt.Sprintf("%s : %s", app.GetName(), st.GetName()),
UID: app.GetUid(),
Duration: time.Duration(st.GetTotalTimeMsec()) * time.Millisecond,
Count: st.GetCount(),
})
}
stl = append(stl, stlt...)
this.Syncs = sumWakelockInfo(stlt, realtime)
this.Syncs.Name = app.GetName()
this.Syncs.UID = app.GetUid()
var pwlt []*checkinparse.WakelockInfo
for _, pw := range app.Wakelock {
w := &checkinparse.WakelockInfo{
Name: fmt.Sprintf("%s : %s", app.GetName(), pw.GetName()),
UID: app.GetUid(),
Duration: time.Duration(pw.GetPartialTimeMsec()) * time.Millisecond,
Count: pw.GetPartialCount(),
}
if c.GetReportVersion() >= 20 {
// The values are only valid in v20+.
w.MaxDuration = time.Duration(pw.GetPartialMaxDurationMsec()) * time.Millisecond
if c.GetReportVersion() >= 21 {
// The values are only valid in v21+.
w.TotalDuration = time.Duration(pw.GetPartialTotalDurationMsec()) * time.Millisecond
}
}
pwlt = append(pwlt, w)
}
pwl = append(pwl, pwlt...)
this.PartialWakelocks = sumWakelockInfo(pwlt, realtime)
this.PartialWakelocks.Name = app.GetName()
this.PartialWakelocks.UID = app.GetUid()
var gpst []*checkinparse.WakelockInfo
for _, s := range app.Sensor {
if s.GetNumber() == bugreportutils.GPSSensorNumber {
gpst = append(gpst, &checkinparse.WakelockInfo{
Name: app.GetName(),
UID: app.GetUid(),
Duration: time.Duration(s.GetTotalTimeMsec()) * time.Millisecond,
Count: s.GetCount(),
})
continue
}
}
gps = append(gps, gpst...)
this.GPSUse = sumWakelockInfo(gpst, realtime)
this.GPSUse.Name = app.GetName()
this.GPSUse.UID = app.GetUid()
if cat, cac := app.Camera.GetTotalTimeMsec(), app.Camera.GetCount(); cat > 0 || cac > 0 {
ca = append(ca, &checkinparse.WakelockInfo{
Name: app.GetName(),
UID: app.GetUid(),
Duration: time.Duration(cat) * time.Millisecond,
Count: cac,
})
}
if flt, flc := app.Flashlight.GetTotalTimeMsec(), app.Flashlight.GetCount(); flt > 0 || flc > 0 {
fla = append(fla, &checkinparse.WakelockInfo{
Name: app.GetName(),
UID: app.GetUid(),
Duration: time.Duration(flt) * time.Millisecond,
Count: flc,
})
}
if c.GetReportVersion() >= 17 {
// The data is only valid in v17+.
bkg := app.GetStateTime().GetBackgroundTimeMsec()
cch := app.GetStateTime().GetCachedTimeMsec()
fre := app.GetStateTime().GetForegroundTimeMsec()
frs := app.GetStateTime().GetForegroundServiceTimeMsec()
top := app.GetStateTime().GetTopTimeMsec()
tps := app.GetStateTime().GetTopSleepingTimeMsec()
if bkg > 0 || cch > 0 || fre > 0 || frs > 0 || top > 0 || tps > 0 {
out.AppStates = append(out.AppStates, stateData{
Name: app.GetName(),
UID: app.GetUid(),
Background: MDuration{V: time.Duration(bkg) * time.Millisecond},
Cached: MDuration{V: time.Duration(cch) * time.Millisecond},
Foreground: MDuration{V: time.Duration(fre) * time.Millisecond},
ForegroundService: MDuration{V: time.Duration(frs) * time.Millisecond},
Top: MDuration{V: time.Duration(top) * time.Millisecond},
TopSleeping: MDuration{V: time.Duration(tps) * time.Millisecond},
})
}
}
out.AggregatedApps = append(out.AggregatedApps, this)
}
for _, pwi := range c.System.PowerUseItem {
if pwi.GetName() == bspb.BatteryStats_System_PowerUseItem_APP {
// We have the apps split up in the preceding for loop, and the APP entry is just the sum of all of them, so we skip it here.
continue
}
if pct := 100 * pwi.GetComputedPowerMah() / bCapMah; pct >= 0.01 {
e = append(e, &PowerUseData{
Name: pwi.GetName().String(),
Percent: pct,
})
}
}
sort.Sort(byPercent(e))
for _, ent := range e {
out.DevicePowerEstimates = append(out.DevicePowerEstimates, *ent)
}
checkinparse.SortByTime(m)
for _, mad := range m {
out.TopMobileActiveApps = append(out.TopMobileActiveApps, activityData(mad, realtime))
}
for _, ntd := range n {
if ntd.MobileMegaBytes >= 0.01 {
out.TopMobileTrafficApps = append(out.TopMobileTrafficApps, *ntd)
}
}
sort.Sort(ByMobileBytes(out.TopMobileTrafficApps))
for _, ntd := range n {
if ntd.WifiMegaBytes >= 0.01 {
out.TopWifiTrafficApps = append(out.TopWifiTrafficApps, *ntd)
}
}
sort.Sort(ByWifiBytes(out.TopWifiTrafficApps))
sort.Sort(byCount(wu))
for _, w := range wu {
out.AppWakeups = append(out.AppWakeups, *w)
out.AggAppWakeups.Count += w.Count
}
if realtime > 0 {
out.AggAppWakeups.CountPerHr = out.AggAppWakeups.Count / float32(realtime.Hours())
out.TotalAppWakeupsPerHr = out.AggAppWakeups.CountPerHr
}
sort.Sort(byCount(wubn))
for _, w := range wubn {
out.AppWakeupsByAlarmName = append(out.AppWakeupsByAlarmName, *w)
}
sort.Sort(byCrashThenANR(ac))
for _, x := range ac {
out.ANRAndCrash = append(out.ANRAndCrash, *x)
}
for _, cp := range cpu {
out.CPUUsage = append(out.CPUUsage, *cp)
out.TotalAppCPUPowerPct += cp.PowerPct
}
sort.Sort(ByCPUUsage(out.CPUUsage))
checkinparse.SortByTime(wfScan)
for _, w := range wfScan {
out.WifiScanActivity = append(out.WifiScanActivity, activityData(w, realtime))
}
out.AggWifiScanActivity = sumWakelockInfo(wfScan, realtime)
checkinparse.SortByTime(wfFull)
for _, w := range wfFull {
out.WifiFullLockActivity = append(out.WifiFullLockActivity, activityData(w, realtime))
}
out.AggWifiFullLockActivity = sumWakelockInfo(wfFull, realtime)
// Sorting JobScheduler Jobs by time.
checkinparse.SortByTime(sj)
for _, jst := range sj {
out.ScheduledJobs = append(out.ScheduledJobs, activityData(jst, realtime))
if realtime > 0 {
out.TotalAppScheduledJobsPerHr += float32(jst.Duration.Seconds()) / float32(realtime.Hours())
}
}
out.AggScheduledJobs = sumWakelockInfo(sj, realtime)
// Sorting SyncManager Tasks by time.
checkinparse.SortByTime(stl)
for _, st := range stl {
out.SyncTasks = append(out.SyncTasks, activityData(st, realtime))
if realtime > 0 {
out.TotalAppSyncsPerHr += float32(st.Duration.Seconds()) / float32(realtime.Hours())
}
}
out.AggSyncTasks = sumWakelockInfo(stl, realtime)
// Sorting Partial Wakelocks by time.
checkinparse.SortByTime(pwl)
for _, pw := range pwl {
out.UserspaceWakelocks = append(out.UserspaceWakelocks, activityData(pw, realtime))
}
// Sort GPS use by time.
checkinparse.SortByTime(gps)
for _, g := range gps {
out.GPSUse = append(out.GPSUse, activityData(g, realtime))
if realtime > 0 {
out.TotalAppGPSUseTimePerHour += float32(g.Duration.Seconds()) / float32(realtime.Hours())
}
}
out.AggGPSUse = sumWakelockInfo(gps, realtime)
// Sort camera use by time.
checkinparse.SortByTime(ca)
for _, c := range ca {
out.CameraUse = append(out.CameraUse, activityData(c, realtime))
if realtime > 0 {
out.TotalAppCameraUsePerHr += float32(c.Duration.Seconds()) / float32(realtime.Hours())
}
}
out.AggCameraUse = sumWakelockInfo(ca, realtime)
// Sort flashlight use by time.
checkinparse.SortByTime(fla)
for _, f := range fla {
out.FlashlightUse = append(out.FlashlightUse, activityData(f, realtime))
}
out.AggFlashlightUse = sumWakelockInfo(fla, realtime)
sort.Sort(byState(out.AppStates))
return out
} | aggregated/aggregated_stats.go | 0.804406 | 0.553204 | aggregated_stats.go | starcoder |
package gogm
import (
"fmt"
"math"
)
// Vec4 is a vector with 4 components, of type T.
type Vec4[T number] [4]T
// Vec4CopyVec4 copies the content of src to dst.
func Vec4CopyVec4[T1, T2 number](dst *Vec4[T1], src *Vec4[T2]) {
dst[0] = T1(src[0])
dst[1] = T1(src[1])
dst[2] = T1(src[2])
dst[3] = T1(src[3])
}
// Vec4CopyVec2 copies the content of src to dst.
func Vec4CopyVec2[T1, T2 number](dst *Vec4[T1], src *Vec2[T2]) {
dst[0] = T1(src[0])
dst[1] = T1(src[1])
}
// Vec4CopyVec3 copies the content of src to dst.
func Vec4CopyVec3[T1, T2 number](dst *Vec4[T1], src *Vec3[T2]) {
dst[0] = T1(src[0])
dst[1] = T1(src[1])
dst[2] = T1(src[2])
}
// String returns a string representation of the vector.
func (v1 *Vec4[T]) String() string {
return fmt.Sprintf("{%v, %v, %v, %v}", v1[0], v1[1], v1[2], v1[3])
}
// Len returns the length of the vector.
func (v1 *Vec4[T]) Len() float64 {
return math.Sqrt(math.Pow(float64(v1[0]), 2) + math.Pow(float64(v1[1]), 2) + math.Pow(float64(v1[2]), 2) + math.Pow(float64(v1[3]), 2))
}
// Normalize normalizes v2, and stores the result in v1.
func (v1 *Vec4[T]) Normalize(v2 *Vec4[T]) {
l := T(v2.Len())
v1[0] = v2[0] / l
v1[1] = v2[1] / l
v1[2] = v2[2] / l
v1[3] = v2[3] / l
}
// Inverse sets v1 to the inverse of v2.
// v1 = -v2
func (v1 *Vec4[T]) Inverse(v2 *Vec4[T]) {
v1[0] = -v2[0]
v1[1] = -v2[1]
v1[2] = -v2[2]
v1[3] = -v2[3]
}
// Add adds v2 with v3 component-wise, and stores the result in v1.
// v1 = v2 + v3
func (v1 *Vec4[T]) Add(v2 *Vec4[T], v3 *Vec4[T]) {
v1[0] = v2[0] + v3[0]
v1[1] = v2[1] + v3[1]
v1[2] = v2[2] + v3[2]
v1[3] = v2[3] + v3[3]
}
// Sub subtracts v2 from v3 component-wise, and stores the result in v1.
// v1 = v2 - v3
func (v1 *Vec4[T]) Sub(v2 *Vec4[T], v3 *Vec4[T]) {
v1[0] = v2[0] - v3[0]
v1[1] = v2[1] - v3[1]
v1[2] = v2[2] - v3[2]
v1[3] = v2[3] - v3[3]
}
// Mul multiplies v2 with v3 component-wise, and stores the result in v1.
// v1 = v2 * v3
func (v1 *Vec4[T]) Mul(v2 *Vec4[T], v3 *Vec4[T]) {
v1[0] = v2[0] * v3[0]
v1[1] = v2[1] * v3[1]
v1[2] = v2[2] * v3[2]
v1[3] = v2[3] * v3[3]
}
// Div divides v2 by v3 component-wise, and stores the result in v1.
// v1 = v2 / v3
func (v1 *Vec4[T]) Div(v2 *Vec4[T], v3 *Vec4[T]) {
v1[0] = v2[0] / v3[0]
v1[1] = v2[1] / v3[1]
v1[2] = v2[2] / v3[2]
v1[3] = v2[3] / v3[3]
}
// AddS adds each component of v2 with s, and stores the result in v1.
// v1 = v2 + s
func (v1 *Vec4[T]) AddS(v2 *Vec4[T], s T) {
v1[0] = v2[0] + s
v1[1] = v2[1] + s
v1[2] = v2[2] + s
v1[3] = v2[3] + s
}
// SubS subtracts each component of v2 by s, and stores the result in v1.
// v1 = v2 - s
func (v1 *Vec4[T]) SubS(v2 *Vec4[T], s T) {
v1[0] = v2[0] - s
v1[1] = v2[1] - s
v1[2] = v2[2] - s
v1[3] = v2[3] - s
}
// MulS multiplies each component of v2 with s, and stores the result in v1.
// v1 = v2 * s
func (v1 *Vec4[T]) MulS(v2 *Vec4[T], s T) {
v1[0] = v2[0] * s
v1[1] = v2[1] * s
v1[2] = v2[2] * s
v1[3] = v2[3] * s
}
// DivS divides each component of v2 by s, and stores the result in v1.
// v1 = v2 / s
func (v1 *Vec4[T]) DivS(v2 *Vec4[T], s T) {
v1[0] = v2[0] / s
v1[1] = v2[1] / s
v1[2] = v2[2] / s
v1[3] = v2[3] / s
}
// AddHomog adds v2 with v3 component-wise, and stores the result in v1.
// v1 = v2 + v3
func (v1 *Vec4[T]) AddHomog(v2 *Vec4[T], v3 *Vec4[T]) {
v1[0] = v2[0] + v3[0]
v1[1] = v2[1] + v3[1]
v1[2] = v2[2] + v3[2]
}
// CrossHomog takes the 3D cross product of v1 and v2, and stores the result in v1.
// v1 = v2 x v3
func (v1 *Vec4[T]) CrossHomog(v2 *Vec4[T], v3 *Vec4[T]) {
v1[0], v1[1], v1[2] = v2[1]*v3[2]-v3[1]*v2[2], v2[2]*v3[0]-v3[2]*v2[0], v2[0]*v3[1]-v3[0]*v2[1]
}
// CrossHomogFast takes the 3D cross product of v1 and v2, and stores the result in v1.
// v1 = v2 x v3
func (v1 *Vec4[T]) CrossHomogFast(v2 *Vec4[T], v3 *Vec4[T]) {
v1[0] = v2[1]*v3[2] - v3[1]*v2[2]
v1[1] = v2[2]*v3[0] - v3[2]*v2[0]
v1[2] = v2[0]*v3[1] - v3[0]*v2[1]
}
// Dot takes the dot product of v1 and v2, and returns the result.
func (v1 *Vec4[T]) Dot(v2 *Vec4[T]) T {
return v1[0]*v2[0] + v1[1]*v2[1] + v1[2]*v2[2] + v1[3]*v2[3]
}
// DotHomog takes the dot product of v1 and v2, and returns the result.
func (v1 *Vec4[T]) DotHomog(v2 *Vec4[T]) T {
return v1[0]*v2[0] + v1[1]*v2[1] + v1[2]*v2[2]
} | vec4.go | 0.743168 | 0.569853 | vec4.go | starcoder |
package iso20022
// Description of the financial instrument.
type FinancialInstrumentAttributes50 struct {
// Identification of a financial instrument.
FinancialInstrumentIdentification *SecurityIdentification14 `xml:"FinInstrmId,omitempty"`
// Place where the referenced financial instrument is listed.
PlaceOfListing *MarketIdentification3Choice `xml:"PlcOfListg,omitempty"`
// Specifies the computation method of (accrued) interest of the financial instrument.
DayCountBasis *InterestComputationMethodFormat1Choice `xml:"DayCntBsis,omitempty"`
// Classification type of the financial instrument, as per the ISO Classification of Financial Instrument (CFI).
ClassificationType *ClassificationType2Choice `xml:"ClssfctnTp,omitempty"`
// Currency in which a financial instrument is currently denominated.
DenominationCurrency *ActiveOrHistoricCurrencyCode `xml:"DnmtnCcy,omitempty"`
// Next payment date of an interest bearing financial instrument.
NextCouponDate *ISODate `xml:"NxtCpnDt,omitempty"`
// Date on which an order expires or at which a privilege or offer terminates.
ExpiryDate *ISODate `xml:"XpryDt,omitempty"`
// Date on which the interest rate or redemption price will be/was calculated according to the terms of the issue.
FloatingRateFixingDate *ISODate `xml:"FltgRateFxgDt,omitempty"`
// Date on which a financial instrument becomes due and assets are to be repaid.
MaturityDate *ISODate `xml:"MtrtyDt,omitempty"`
// Date on which the financial instrument is issued.
IssueDate *ISODate `xml:"IsseDt,omitempty"`
// Date on which a financial instrument is called away/redeemed before its scheduled maturity.
NextCallableDate *ISODate `xml:"NxtCllblDt,omitempty"`
// Date on which a holder of a financial instrument has the right to request redemption of the principal amount prior to its scheduled maturity date.
PutableDate *ISODate `xml:"PutblDt,omitempty"`
// Date on which an interest bearing financial instrument begins to accrue interest.
DatedDate *ISODate `xml:"DtdDt,omitempty"`
// Deadline by which a convertible security must be converted, according to the terms of the issue.
ConversionDate *ISODate `xml:"ConvsDt,omitempty"`
// Factor used to calculate the value of the outstanding principal of the financial instrument (for factored securities) until the next redemption (factor) date.
PreviousFactor *BaseOne14Rate `xml:"PrvsFctr,omitempty"`
// Factor used to calculate the value of the outstanding principal of the financial instrument (for factored securities) that will applicable after the redemption (factor) date.
NextFactor *BaseOne14Rate `xml:"NxtFctr,omitempty"`
// Annual rate of a financial instrument.
InterestRate *PercentageRate `xml:"IntrstRate,omitempty"`
// Interest rate applicable to the next interest payment period in relation to variable rate instruments.
NextInterestRate *PercentageRate `xml:"NxtIntrstRate,omitempty"`
// Also known as Minimum Nominal Value. Minimum nominal quantity of financial instrument that must be purchased/sold.
MinimumNominalQuantity *FinancialInstrumentQuantity1Choice `xml:"MinNmnlQty,omitempty"`
// Minimum quantity of financial instrument or lot of rights/warrants that must be exercised.
MinimumExercisableQuantity *FinancialInstrumentQuantity1Choice `xml:"MinExrcblQty,omitempty"`
// Minimum multiple quantity of financial instrument or lot of rights/warrants that must be exercised.
MinimumExercisableMultipleQuantity *FinancialInstrumentQuantity1Choice `xml:"MinExrcblMltplQty,omitempty"`
// Ratio or multiplying factor used to convert one contract into a financial instrument quantity.
ContractSize *FinancialInstrumentQuantity1Choice `xml:"CtrctSz,omitempty"`
}
func (f *FinancialInstrumentAttributes50) AddFinancialInstrumentIdentification() *SecurityIdentification14 {
f.FinancialInstrumentIdentification = new(SecurityIdentification14)
return f.FinancialInstrumentIdentification
}
func (f *FinancialInstrumentAttributes50) AddPlaceOfListing() *MarketIdentification3Choice {
f.PlaceOfListing = new(MarketIdentification3Choice)
return f.PlaceOfListing
}
func (f *FinancialInstrumentAttributes50) AddDayCountBasis() *InterestComputationMethodFormat1Choice {
f.DayCountBasis = new(InterestComputationMethodFormat1Choice)
return f.DayCountBasis
}
func (f *FinancialInstrumentAttributes50) AddClassificationType() *ClassificationType2Choice {
f.ClassificationType = new(ClassificationType2Choice)
return f.ClassificationType
}
func (f *FinancialInstrumentAttributes50) SetDenominationCurrency(value string) {
f.DenominationCurrency = (*ActiveOrHistoricCurrencyCode)(&value)
}
func (f *FinancialInstrumentAttributes50) SetNextCouponDate(value string) {
f.NextCouponDate = (*ISODate)(&value)
}
func (f *FinancialInstrumentAttributes50) SetExpiryDate(value string) {
f.ExpiryDate = (*ISODate)(&value)
}
func (f *FinancialInstrumentAttributes50) SetFloatingRateFixingDate(value string) {
f.FloatingRateFixingDate = (*ISODate)(&value)
}
func (f *FinancialInstrumentAttributes50) SetMaturityDate(value string) {
f.MaturityDate = (*ISODate)(&value)
}
func (f *FinancialInstrumentAttributes50) SetIssueDate(value string) {
f.IssueDate = (*ISODate)(&value)
}
func (f *FinancialInstrumentAttributes50) SetNextCallableDate(value string) {
f.NextCallableDate = (*ISODate)(&value)
}
func (f *FinancialInstrumentAttributes50) SetPutableDate(value string) {
f.PutableDate = (*ISODate)(&value)
}
func (f *FinancialInstrumentAttributes50) SetDatedDate(value string) {
f.DatedDate = (*ISODate)(&value)
}
func (f *FinancialInstrumentAttributes50) SetConversionDate(value string) {
f.ConversionDate = (*ISODate)(&value)
}
func (f *FinancialInstrumentAttributes50) SetPreviousFactor(value string) {
f.PreviousFactor = (*BaseOne14Rate)(&value)
}
func (f *FinancialInstrumentAttributes50) SetNextFactor(value string) {
f.NextFactor = (*BaseOne14Rate)(&value)
}
func (f *FinancialInstrumentAttributes50) SetInterestRate(value string) {
f.InterestRate = (*PercentageRate)(&value)
}
func (f *FinancialInstrumentAttributes50) SetNextInterestRate(value string) {
f.NextInterestRate = (*PercentageRate)(&value)
}
func (f *FinancialInstrumentAttributes50) AddMinimumNominalQuantity() *FinancialInstrumentQuantity1Choice {
f.MinimumNominalQuantity = new(FinancialInstrumentQuantity1Choice)
return f.MinimumNominalQuantity
}
func (f *FinancialInstrumentAttributes50) AddMinimumExercisableQuantity() *FinancialInstrumentQuantity1Choice {
f.MinimumExercisableQuantity = new(FinancialInstrumentQuantity1Choice)
return f.MinimumExercisableQuantity
}
func (f *FinancialInstrumentAttributes50) AddMinimumExercisableMultipleQuantity() *FinancialInstrumentQuantity1Choice {
f.MinimumExercisableMultipleQuantity = new(FinancialInstrumentQuantity1Choice)
return f.MinimumExercisableMultipleQuantity
}
func (f *FinancialInstrumentAttributes50) AddContractSize() *FinancialInstrumentQuantity1Choice {
f.ContractSize = new(FinancialInstrumentQuantity1Choice)
return f.ContractSize
} | data/train/go/caea5ead9e4849f100973105cb2ca758e0e9c3a1FinancialInstrumentAttributes50.go | 0.857813 | 0.469155 | caea5ead9e4849f100973105cb2ca758e0e9c3a1FinancialInstrumentAttributes50.go | starcoder |
package number
import (
"github.com/miscoler/xlsx/internal/ml"
)
var (
builtIn map[int]*builtInFormat
typeDefault map[Type]int
)
func init() {
typeDefault = map[Type]int{
General: 0x00,
Integer: 0x01,
Float: 0x02,
Date: 0x0e,
Time: 0x14,
DateTime: 0x16,
DeltaTime: 0x2d,
}
builtIn = map[int]*builtInFormat{
0x00: {ml.NumberFormat{ID: 0x00, Code: `General`}, General},
0x01: {ml.NumberFormat{ID: 0x01, Code: `0`}, Integer},
0x02: {ml.NumberFormat{ID: 0x02, Code: `0.00`}, Float},
0x03: {ml.NumberFormat{ID: 0x03, Code: `#,##0`}, Float},
0x04: {ml.NumberFormat{ID: 0x04, Code: `#,##0.00`}, Float},
0x05: {ml.NumberFormat{ID: 0x05, Code: `($#,##0_);($#,##0)`}, Float},
0x06: {ml.NumberFormat{ID: 0x06, Code: `($#,##0_);[RED]($#,##0)`}, Float},
0x07: {ml.NumberFormat{ID: 0x07, Code: `($#,##0.00_);($#,##0.00_)`}, Float},
0x08: {ml.NumberFormat{ID: 0x08, Code: `($#,##0.00_);[RED]($#,##0.00_)`}, Float},
0x09: {ml.NumberFormat{ID: 0x09, Code: `0%`}, Integer},
0x0a: {ml.NumberFormat{ID: 0x0a, Code: `0.00%`}, Float},
0x0b: {ml.NumberFormat{ID: 0x0b, Code: `0.00E+00`}, Float},
0x0c: {ml.NumberFormat{ID: 0x0c, Code: `# ?/?`}, Float},
0x0d: {ml.NumberFormat{ID: 0x0d, Code: `# ??/??`}, Float},
0x0e: {ml.NumberFormat{ID: 0x0e, Code: `m-d-yy`}, Date},
0x0f: {ml.NumberFormat{ID: 0x0f, Code: `d-mmm-yy`}, Date},
0x10: {ml.NumberFormat{ID: 0x10, Code: `d-mmm`}, Date},
0x11: {ml.NumberFormat{ID: 0x11, Code: `mmm-yy`}, Date},
0x12: {ml.NumberFormat{ID: 0x12, Code: `h:mm AM/PM`}, Time},
0x13: {ml.NumberFormat{ID: 0x13, Code: `h:mm:ss AM/PM`}, Time},
0x14: {ml.NumberFormat{ID: 0x14, Code: `h:mm`}, Time},
0x15: {ml.NumberFormat{ID: 0x15, Code: `h:mm:ss`}, Time},
0x16: {ml.NumberFormat{ID: 0x16, Code: `m-d-yy h:mm`}, DateTime},
//...
0x25: {ml.NumberFormat{ID: 0x25, Code: `(#,##0_);(#,##0)`}, Integer},
0x26: {ml.NumberFormat{ID: 0x26, Code: `(#,##0_);[RED](#,##0)`}, Integer},
0x27: {ml.NumberFormat{ID: 0x27, Code: `(#,##0.00);(#,##0.00)`}, Float},
0x28: {ml.NumberFormat{ID: 0x28, Code: `(#,##0.00);[RED](#,##0.00)`}, Float},
0x29: {ml.NumberFormat{ID: 0x29, Code: `_(*#,##0_);_(*(#,##0);_(*"-"_);_(@_)`}, Float},
0x2a: {ml.NumberFormat{ID: 0x2a, Code: `_($*#,##0_);_($*(#,##0);_(*"-"_);_(@_)`}, Float},
0x2b: {ml.NumberFormat{ID: 0x2b, Code: `_(*#,##0.00_);_(*(#,##0.00);_(*"-"??_);_(@_)`}, Float},
0x2c: {ml.NumberFormat{ID: 0x2c, Code: `_($*#,##0.00_);_($*(#,##0.00);_(*"-"??_);_(@_)`}, Float},
0x2d: {ml.NumberFormat{ID: 0x2d, Code: `mm:ss`}, DeltaTime},
0x2e: {ml.NumberFormat{ID: 0x2e, Code: `[h]:mm:ss`}, DeltaTime},
0x2f: {ml.NumberFormat{ID: 0x2f, Code: `mm:ss.0`}, DeltaTime},
0x30: {ml.NumberFormat{ID: 0x30, Code: `##0.0E+0`}, Float},
0x31: {ml.NumberFormat{ID: 0x31, Code: `@`}, General},
}
} | internal/number_format/indexed.go | 0.511961 | 0.561155 | indexed.go | starcoder |
package main
import (
"fmt"
"io/ioutil"
"log"
"net/http"
"regexp"
"strconv"
"strings"
)
// Command is the interface that must be implemented by all commands
// (eg: expect, tx)
type Command interface {
// Parse fills the command structure by parsing the data in the given
// scanner, thus implementing the command-specific parsing logic. The
// returned error is non-nil in case of parse errors
Parse(*scanner) error
}
// ExpectField represents the various attributes that the expect command can
// take. For example: req.method, resp.status, ...
type ExpectField int
const (
EXPECT_METHOD ExpectField = iota
EXPECT_HEADERS
EXPECT_BODY
EXPECT_STATUS
)
// Expect is a command used to test a certain assumption. For example, the
// command 'req.method eq "GET"' verifies that the request method is GET, and
// fail if it is not
type Expect struct {
verbatim string
field ExpectField
headerName string
operator tokenType
expected string
}
// String pretty-prints an Expect
func (e Expect) String() string {
return fmt.Sprintf("%q", e.verbatim)
}
// Parse an expect command. Support both requests (expect req[...]) and
// responses (expect resp[...])
func (e *Expect) Parse(s *scanner) error {
// Get something like 'req.method'
token := s.ScanUseful()
// Start building up e.verbatim
e.verbatim = token.val
if token.typ != REQ && token.typ != RESP {
return fmt.Errorf("Parse error in 'expect' command: expecting {req,resp}, got %q", token)
}
token = s.ScanUseful()
e.verbatim += token.val
if token.typ != DOT {
return fmt.Errorf("Parse error in 'expect' command: expecting something like 'req.method', got %q", token)
}
token = s.ScanUseful()
e.verbatim += token.val
if token.typ == METHOD {
e.field = EXPECT_METHOD
} else if token.typ == STATUS {
e.field = EXPECT_STATUS
} else if token.typ == BODY {
e.field = EXPECT_BODY
} else if token.typ == HEADERS {
e.field = EXPECT_HEADERS
// Get header name (open bracket, expect string, close bracket)
token = s.ScanUseful()
e.verbatim += token.val
if token.typ != OPEN_BRACKET {
return fmt.Errorf("Parse error in 'expect' command: expecting 'req.headers[$hdr_name]', got %q", token)
}
token = s.ScanUseful()
e.verbatim += token.val
if token.typ != STRING {
return fmt.Errorf("Parse error in 'expect' command: expecting 'req.headers[$hdr_name]', got %q", token)
}
// We've got something looking like a header name
e.headerName = token.val
token = s.ScanUseful()
e.verbatim += token.val
if token.typ != CLOSE_BRACKET {
return fmt.Errorf("Parse error in 'expect' command: expecting 'req.headers[$hdr_name]', got %q", token)
}
} else {
return fmt.Errorf("Parse error in 'expect' command: expecting 'req.{method,headers,body}', got %q", token)
}
// Get the operator
token = s.ScanUseful()
e.verbatim += " " + token.val
if token.typ != EQUAL && token.typ != NOTEQUAL && token.typ != TILDE {
return fmt.Errorf("Parse error in 'expect' command: expecting operator to be '{eq,ne,~}', got %q", token)
}
// TODO: if token.typ == TILDE, validate regexp with
// regexp.MustCompile(str)
e.operator = token.typ
// Get the value eg: "^(chrome|curl)"
token = s.ScanUseful()
e.verbatim += fmt.Sprintf(" %q", token.val)
if token.typ != STRING && token.typ != INTEGER {
return fmt.Errorf("Parse error in 'expect' command: expecting a string/integer, got %q", token)
}
e.expected = token.val
return nil
}
// expectThing returns true if what we expect is true given the value of
// 'actual'
func (e Expect) expectThing(actual string) bool {
switch e.operator {
case EQUAL:
return e.expected == actual
case NOTEQUAL:
return e.expected != actual
case TILDE:
ret, err := regexp.Match(e.expected, []byte(actual))
if err != nil {
log.Panic("regexp.Match error: ", err)
}
return ret
}
log.Panic("Unknown operator: ", e.operator)
return false
}
// ActualRequest returns the value in the given http.Request object
// corresponding to this Expect. For instance, if we are expecting something
// about the request method, here we return the actual request method sent
func (e Expect) ActualRequest(req http.Request) string {
var actual string
switch e.field {
case EXPECT_METHOD:
actual = req.Method
case EXPECT_HEADERS:
actual = req.Header.Get(e.headerName)
case EXPECT_BODY:
if req.Body == nil {
return ""
}
body, err := ioutil.ReadAll(req.Body)
if err != nil {
log.Panic(err)
} else {
actual = string(body)
}
case EXPECT_STATUS:
log.Fatal("Requests have no status")
}
return actual
}
// Request returns true if the expectations regarding the given request are
// met, false otherwise
func (e Expect) Request(req http.Request) bool {
return e.expectThing(e.ActualRequest(req))
}
// StringResponse returns a string representation of the given http.Response
func (e Expect) StringResponse(resp http.Response) string {
s := fmt.Sprintf("HTTP %d\n", resp.StatusCode)
for key, value := range resp.Header {
s += fmt.Sprintf("%s: %s\n", key, value)
}
return s
}
// ActualResponse returns the value in the given http.Response object
// corresponding to this Expect. For instance, if we are expecting something
// about the response status, here we return the actual response status
func (e Expect) ActualResponse(resp http.Response) string {
var actual string
switch e.field {
case EXPECT_STATUS:
actual = strconv.Itoa(resp.StatusCode)
case EXPECT_HEADERS:
actual = resp.Header.Get(e.headerName)
case EXPECT_BODY:
if resp.Body == nil {
return ""
}
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
log.Panic(err)
} else {
actual = string(body)
}
}
return actual
}
// Response returns true if the expectations regarding the given response are
// met, false otherwise
func (e Expect) Response(resp http.Response) bool {
return e.expectThing(e.ActualResponse(resp))
}
// TxResp is the command used to make origin servers return an HTTP response.
// An example is:
// tx -body "Hello world!" -header "X-HTC-Origin: true" -status 200
type TxResp struct {
statusCode int
headers map[string]string
body string
}
// String pretty-prints a TxResp
func (r TxResp) String() string {
return fmt.Sprintf("HTTP %d: %q", r.statusCode, r.body)
}
// Parse a tx command in the handle stanza, in other words a response. Eg:
// tx -body "Hello world!" -header "Cache-Control: s-maxage=120" -status 200
func (r *TxResp) Parse(s *scanner) error {
r.statusCode = 200
r.headers = make(map[string]string)
for {
token := s.ScanUseful()
if token.typ == EOF || token.typ == CLOSE_CURLY || token.typ == NEWLINE {
s.unread()
break
}
if token.typ == BODY_ARG {
token := s.ScanUseful()
if token.typ != STRING {
return fmt.Errorf("Parse error in 'tx' command: expecting a string, got %q", token)
}
r.body = token.val
} else if token.typ == HEADER_ARG {
token := s.ScanUseful()
if token.typ != STRING {
return fmt.Errorf("Parse error in 'tx' command: expecting a string, got %q", token)
}
splitted := strings.SplitN(token.val, ":", 2)
if len(splitted) != 2 {
return fmt.Errorf("Parse error in 'tx' command: expecting a header, got %q", token)
}
r.headers[splitted[0]] = splitted[1]
} else if token.typ == STATUS_ARG {
token := s.ScanUseful()
if token.typ != INTEGER {
return fmt.Errorf("Parse error in 'tx' command: expecting an integer, got %q", token)
}
r.statusCode, _ = strconv.Atoi(token.val)
} else {
return fmt.Errorf("Parse error in 'tx' command: expecting -body, -header, or -status, got %q", token)
}
}
return nil
}
// Send writes TxResp to the http.ResponseWriter 'writer'
func (r TxResp) Send(writer http.ResponseWriter) bool {
// Add all headers
for key, value := range r.headers {
writer.Header().Add(key, value)
}
// Send the status code
writer.WriteHeader(r.statusCode)
// Write body
fmt.Fprintf(writer, r.body)
return true
}
// TxReq is the command used to make clients send an HTTP request.
// An example is:
// tx -url "/hello/world" -header "X-HTC-Origin: true" -method "HEAD"
type TxReq struct {
uri string
method string
headers map[string]string
body string
}
// String pretty-prints a TxReq
func (r TxReq) String() string {
s := fmt.Sprintf("%s %s\n", r.method, r.uri)
for key, value := range r.headers {
s += fmt.Sprintf("%s: %s\n", key, value)
}
return s
}
// Parse a tx command in the client stanza, in other words a request. Eg:
// tx -url "/endpoint/1" -method "GET" -header "X-Debug: x-cache"
func (r *TxReq) Parse(s *scanner) error {
r.method = "GET"
r.headers = make(map[string]string)
for {
token := s.ScanUseful()
// Only "expect" is allowed after "tx" in the client stanza
if token.typ == EOF || token.typ == CLOSE_CURLY || token.typ == NEWLINE {
s.unread()
break
}
if token.typ == BODY_ARG {
token := s.ScanUseful()
if token.typ != STRING {
return fmt.Errorf("Parse error in 'tx' command: expecting a string, got %q", token)
}
r.body = token.val
} else if token.typ == HEADER_ARG {
token := s.ScanUseful()
if token.typ != STRING {
return fmt.Errorf("Parse error in 'tx' command: expecting a string, got %q", token)
}
splitted := strings.SplitN(token.val, ":", 2)
if len(splitted) != 2 {
return fmt.Errorf("Parse error in 'tx' command: expecting a header, got %q", token)
}
r.headers[splitted[0]] = splitted[1]
} else if token.typ == METHOD_ARG {
token := s.ScanUseful()
if token.typ != STRING {
return fmt.Errorf("Parse error in 'tx' command: expecting a string, got %q", token)
}
// XXX: check that method isn't "banana"
r.method = token.val
} else if token.typ == URL_ARG {
token := s.ScanUseful()
if token.typ != STRING {
return fmt.Errorf("Parse error in 'tx' command: expecting a string, got %q", token)
}
// XXX: check that url isn't "banana"
r.uri = token.val
} else {
return fmt.Errorf("Parse error in 'tx' command: expecting -url, -header, method, or -body, got %q", token)
}
}
return nil
}
// Send the TxReq to the given server
func (r TxReq) Send(server string) (*http.Response, error) {
client := &http.Client{}
req, err := http.NewRequest(r.method, fmt.Sprintf("http://%s%s", server, r.uri), strings.NewReader(r.body))
if err != nil {
return nil, err
}
// Add all headers
for key, value := range r.headers {
req.Header.Add(key, value)
}
return client.Do(req)
} | commands.go | 0.597138 | 0.403038 | commands.go | starcoder |
package types
import (
"time"
yaml "gopkg.in/yaml.v2"
sdk "github.com/line/lfb-sdk/types"
)
// NewCommissionRates returns an initialized validator commission rates.
func NewCommissionRates(rate, maxRate, maxChangeRate sdk.Dec) CommissionRates {
return CommissionRates{
Rate: rate,
MaxRate: maxRate,
MaxChangeRate: maxChangeRate,
}
}
// NewCommission returns an initialized validator commission.
func NewCommission(rate, maxRate, maxChangeRate sdk.Dec) Commission {
return Commission{
CommissionRates: NewCommissionRates(rate, maxRate, maxChangeRate),
UpdateTime: time.Unix(0, 0).UTC(),
}
}
// NewCommissionWithTime returns an initialized validator commission with a specified
// update time which should be the current block BFT time.
func NewCommissionWithTime(rate, maxRate, maxChangeRate sdk.Dec, updatedAt time.Time) Commission {
return Commission{
CommissionRates: NewCommissionRates(rate, maxRate, maxChangeRate),
UpdateTime: updatedAt,
}
}
// String implements the Stringer interface for a Commission object.
func (c Commission) String() string {
out, _ := yaml.Marshal(c)
return string(out)
}
// String implements the Stringer interface for a CommissionRates object.
func (cr CommissionRates) String() string {
out, _ := yaml.Marshal(cr)
return string(out)
}
// Validate performs basic sanity validation checks of initial commission
// parameters. If validation fails, an SDK error is returned.
func (cr CommissionRates) Validate() error {
switch {
case cr.MaxRate.IsNegative():
// max rate cannot be negative
return ErrCommissionNegative
case cr.MaxRate.GT(sdk.OneDec()):
// max rate cannot be greater than 1
return ErrCommissionHuge
case cr.Rate.IsNegative():
// rate cannot be negative
return ErrCommissionNegative
case cr.Rate.GT(cr.MaxRate):
// rate cannot be greater than the max rate
return ErrCommissionGTMaxRate
case cr.MaxChangeRate.IsNegative():
// change rate cannot be negative
return ErrCommissionChangeRateNegative
case cr.MaxChangeRate.GT(cr.MaxRate):
// change rate cannot be greater than the max rate
return ErrCommissionChangeRateGTMaxRate
}
return nil
}
// ValidateNewRate performs basic sanity validation checks of a new commission
// rate. If validation fails, an SDK error is returned.
func (c Commission) ValidateNewRate(newRate sdk.Dec, blockTime time.Time) error {
switch {
case blockTime.Sub(c.UpdateTime).Hours() < 24:
// new rate cannot be changed more than once within 24 hours
return ErrCommissionUpdateTime
case newRate.IsNegative():
// new rate cannot be negative
return ErrCommissionNegative
case newRate.GT(c.MaxRate):
// new rate cannot be greater than the max rate
return ErrCommissionGTMaxRate
case newRate.Sub(c.Rate).GT(c.MaxChangeRate):
// new rate % points change cannot be greater than the max change rate
return ErrCommissionGTMaxChangeRate
}
return nil
} | x/staking/types/commission.go | 0.823719 | 0.447219 | commission.go | starcoder |
package goyek
import (
"errors"
"strconv"
)
// BoolParam represents a named boolean parameter that can be registered.
type BoolParam struct {
Name string
Usage string
Default bool
}
// IntParam represents a named integer parameter that can be registered.
type IntParam struct {
Name string
Usage string
Default int
}
// StringParam represents a named string parameter that can be registered.
type StringParam struct {
Name string
Usage string
Default string
}
// ValueParam represents a named parameter for a custom type that can be registered.
// NewValue field must be set with a default value factory.
type ValueParam struct {
Name string
Usage string
NewValue func() ParamValue
}
// ParamValue represents an instance of a generic parameter.
type ParamValue interface {
// String returns the current value formatted as string.
// The returned format should be in a single line, representing the parameter
// as it could be provided on the command line.
String() string
// IsBool marks parameters that do not explicitly need to be set a value.
// Set will be called in case the flag is not explicitly parameterized.
IsBool() bool
// Get returns the current value, properly typed.
// Values must return their default value if Set() has not yet been called.
Get() interface{}
// Set parses the given string and sets the typed value.
Set(string) error
}
// RegisteredParam represents a parameter that has been registered to a Flow.
// It can be used as a parameter for a Task.
type RegisteredParam interface {
Name() string
Usage() string
Default() string
value(tf *TF) ParamValue
}
// registeredParam is a helper struct encapsulating concrete registered parameter type.
type registeredParam struct {
name string
usage string
newValue func() ParamValue
}
// Name returns the key of the parameter.
func (p registeredParam) Name() string {
return p.name
}
// Usage returns the parameter's description.
func (p registeredParam) Usage() string {
return p.usage
}
// Default returns the parameter's default value formatted as string.
func (p registeredParam) Default() string {
return p.newValue().String()
}
func (p registeredParam) value(tf *TF) ParamValue {
value, existing := tf.paramValues[p.name]
if !existing {
tf.Fatal(&ParamError{Key: p.name, Err: errors.New("parameter not registered")})
}
return value
}
// RegisteredValueParam represents a registered parameter based on a generic implementation.
type RegisteredValueParam struct {
registeredParam
}
// Get returns the concrete instance of the generic value in the given flow.
func (p RegisteredValueParam) Get(tf *TF) interface{} {
return p.value(tf).Get()
}
type boolValue bool
func (value *boolValue) Set(s string) error {
if len(s) == 0 {
*value = true
return nil
}
v, err := strconv.ParseBool(s)
if err != nil {
err = errors.New("parse error")
}
*value = boolValue(v)
return err
}
func (value *boolValue) Get() interface{} { return bool(*value) }
func (value *boolValue) String() string { return strconv.FormatBool(bool(*value)) }
func (value *boolValue) IsBool() bool { return true }
// RegisteredBoolParam represents a registered boolean parameter.
type RegisteredBoolParam struct {
registeredParam
}
// Get returns the boolean value of the parameter in the given flow.
func (p RegisteredBoolParam) Get(tf *TF) bool {
value := p.value(tf)
return value.Get().(bool)
}
type intValue int
func (value *intValue) Set(s string) error {
v, err := strconv.ParseInt(s, 0, strconv.IntSize)
if err != nil {
err = errors.New("parse error")
}
*value = intValue(v)
return err
}
func (value *intValue) Get() interface{} { return int(*value) }
func (value *intValue) String() string { return strconv.Itoa(int(*value)) }
func (value *intValue) IsBool() bool { return false }
// RegisteredIntParam represents a registered integer parameter.
type RegisteredIntParam struct {
registeredParam
}
// Get returns the integer value of the parameter in the given flow.
func (p RegisteredIntParam) Get(tf *TF) int {
value := p.value(tf)
return value.Get().(int)
}
type stringValue string
func (value *stringValue) Set(val string) error {
*value = stringValue(val)
return nil
}
func (value *stringValue) Get() interface{} { return string(*value) }
func (value *stringValue) String() string { return string(*value) }
func (value *stringValue) IsBool() bool { return false }
// RegisteredStringParam represents a registered string parameter.
type RegisteredStringParam struct {
registeredParam
}
// Get returns the string value of the parameter in the given flow.
func (p RegisteredStringParam) Get(tf *TF) string {
value := p.value(tf)
return value.Get().(string)
} | parameter.go | 0.755457 | 0.429489 | parameter.go | starcoder |
package unit
// Temperature represents a SI unit of temperature (in kelvin, K)
type Temperature Unit
// ...
const (
Kelvin Temperature = 1e0
)
// FromCelsius converts temperature from ยฐC to ยฐK
func FromCelsius(t float64) Temperature {
return Temperature(t + 273.15)
}
// FromDelisle converts temperature from ยฐDe to ยฐK
func FromDelisle(t float64) Temperature {
return Temperature(373.15 - (t * 2 / 3))
}
// FromFahrenheit converts temperature from ยฐF to ยฐK
func FromFahrenheit(t float64) Temperature {
return Temperature((t + 459.67) * 5 / 9)
}
// FromKelvin converts temperature from ยฐK to ยฐK
func FromKelvin(t float64) Temperature {
return Temperature(t)
}
// FromNewton converts temperature from ยฐN to ยฐK
func FromNewton(t float64) Temperature {
return Temperature(t*100/33 + 273.15)
}
// FromRankine converts temperature from ยฐRa to ยฐK
func FromRankine(t float64) Temperature {
return Temperature((t-491.67)*5/9 + 273.15)
}
// FromReaumur converts temperature from ยฐRe to ยฐK
func FromReaumur(t float64) Temperature {
return Temperature(t*5/4 + 273.15)
}
// FromRomer converts temperature from ยฐRo to ยฐK
func FromRomer(t float64) Temperature {
return Temperature((t-7.5)*40/21 + 273.15)
}
// Celsius returns the temperature in ยฐC
func (t Temperature) Celsius() float64 {
return float64(t - 273.15)
}
// Delisle returns the temperature in ยฐDe
func (t Temperature) Delisle() float64 {
return float64((373.15 - t) * 3 / 2)
}
// Fahrenheit returns the temperature in ยฐF
func (t Temperature) Fahrenheit() float64 {
return float64((t * 9 / 5) - 459.67)
}
// Kelvin returns the temperature in ยฐK
func (t Temperature) Kelvin() float64 {
return float64(t)
}
// Newton returns the temperature in ยฐN
func (t Temperature) Newton() float64 {
return float64((t - 273.15) * 33 / 100)
}
// Rankine returns the temperature in ยฐR
func (t Temperature) Rankine() float64 {
return float64((t-273.15)*9/5 + 491.67)
}
// Reaumur returns the temperature in ยฐRรฉ
func (t Temperature) Reaumur() float64 {
return float64((t - 273.15) * 4 / 5)
}
// Romer returns the temperature in ยฐRรธ
func (t Temperature) Romer() float64 {
return float64((t-273.15)*21/40 + 7.5)
} | temperature.go | 0.93435 | 0.671999 | temperature.go | starcoder |
package fwk
import (
"go-hep.org/x/hep/hbook"
)
// Hist is a histogram, scatter or profile object that can
// be saved or loaded by the HistSvc.
type Hist interface {
Name() string
Value() interface{}
}
// HID is a histogram, scatter or profile identifier
type HID string
// H1D wraps a hbook.H1D for safe concurrent access
type H1D struct {
ID HID // unique id
Hist *hbook.H1D
}
func (h H1D) Name() string {
return string(h.ID)
}
func (h H1D) Value() interface{} {
return h.Hist
}
// H2D wraps a hbook.H2D for safe concurrent access
type H2D struct {
ID HID // unique id
Hist *hbook.H2D
}
func (h H2D) Name() string {
return string(h.ID)
}
func (h H2D) Value() interface{} {
return h.Hist
}
// P1D wraps a hbook.P1D for safe concurrent access
type P1D struct {
ID HID // unique id
Profile *hbook.P1D
}
func (p P1D) Name() string {
return string(p.ID)
}
func (p P1D) Value() interface{} {
return p.Profile
}
// S2D wraps a hbook.S2D for safe concurrent access
type S2D struct {
ID HID // unique id
Scatter *hbook.S2D
}
func (s S2D) Name() string {
return string(s.ID)
}
func (s S2D) Value() interface{} {
return s.Scatter
}
// HistSvc is the interface providing access to histograms
type HistSvc interface {
Svc
// BookH1D books a 1D histogram.
// name should be of the form: "/fwk/streams/<stream-name>/<path>/<histogram-name>"
BookH1D(name string, nbins int, xmin, xmax float64) (H1D, error)
// BookH2D books a 2D histogram.
// name should be of the form: "/fwk/streams/<stream-name>/<path>/<histogram-name>"
BookH2D(name string, nx int, xmin, xmax float64, ny int, ymin, ymax float64) (H2D, error)
// BookP1D books a 1D profile.
// name should be of the form: "/fwk/streams/<stream-name>/<path>/<profile-name>"
BookP1D(name string, nbins int, xmin, xmax float64) (P1D, error)
// BookS2D books a 2D scatter.
// name should be of the form: "/fwk/streams/<stream-name>/<path>/<scatter-name>"
BookS2D(name string) (S2D, error)
// FillH1D fills the 1D-histogram id with data x and weight w.
FillH1D(id HID, x, w float64)
// FillH2D fills the 2D-histogram id with data (x,y) and weight w.
FillH2D(id HID, x, y, w float64)
// FillP1D fills the 1D-profile id with data (x,y) and weight w.
FillP1D(id HID, x, y, w float64)
// FillS2D fills the 2D-scatter id with data (x,y).
FillS2D(id HID, x, y float64)
}
var _ Hist = (*H1D)(nil)
var _ Hist = (*H2D)(nil)
var _ Hist = (*P1D)(nil)
var _ Hist = (*S2D)(nil) | fwk/hsvc.go | 0.738292 | 0.418519 | hsvc.go | starcoder |
package types
// UniversityTeachingStaff holds the data for university teaching staff.
type UniversityTeachingStaff struct {
AssistantProfessors int `json:"assistant_professors" fake:"{number:100,1000}"`
AssociateProfessors int `json:"associate_professors" fake:"{number:100,1000}"`
FullProfessors int `json:"full_professors" fake:"{number:300,1500}"`
Institution string `json:"institution" fake:"{randomstring:[ฮฮฮฮฮฮ & ฮฮฮ ฮฮฮฮฃฮคฮกฮฮฮฮ ฮ ฮฮฮฮ ฮฮฃฮคฮฮฮฮ ฮฮฮฮฮฉฮ,ฮฮกฮฮฃฮคฮฮคฮฮฮฮฮ ฮ ฮฮฮฮ ฮฮฃฮคฮฮฮฮ ฮฮฮฃฮฃฮฮฮฮฮฮฮฮฃ,ฮฮฮฮฮฮ ฮฮฮคฮฃฮฮฮฮ ฮ ฮฮฮฅฮคฮฮงฮฮฮฮ]}"`
Lecturers int `json:"lecturers" fake:"{number:10,100}"`
PracticeLecturers int `json:"practice_lecturers" fake:"{number:0,15}"`
PracticeProfessors int `json:"practice_professors" fake:"{number:0,5}"`
Year int `json:"year" fake:"{year}"`
}
// StudentsBySchool holds the data for students by school.
type StudentsBySchool struct {
District string `json:"district" fake:"{randomstring:[ฮฮฮฮฅฮฮฅฮฮฃฮ ฮ.ฮ. ฮฮฮฮฮฮฮฃ,ฮฮฮฮฅฮฮฅฮฮฃฮ ฮ .ฮ. ฮฮฮกฮฮฅ,ฮฮฮฮฅฮฮฅฮฮฃฮ ฮ.ฮ. ฮฮฮฮคฮฮฮฮฮฮฃ ฮฮคฮคฮฮฮฮฃ]}"`
Jurisdiction string `json:"jurisdiction" fake:"{randomstring:[ฮ ฮฮกฮฮฆฮฮกฮฮฮฮฮ ฮ/ฮฮฃฮ ฮ/ฮฮฮฮฮฃ ฮฮฮ ฮ/ฮฮฮฮฮฃ ฮฮฮ /ฮฃฮฮฃ ฮฮฮฮคฮกฮฮฮฮฃ ฮฮฮฮฮฮฮฮฮฮฃ,ฮ ฮฮกฮฮฆฮฮกฮฮฮฮฮ ฮ/ฮฮฃฮ ฮ/ฮฮฮฮฮฃ ฮฮฮ ฮ/ฮฮฮฮฮฃ ฮฮฮ /ฮฃฮฮฃ ฮฮคฮคฮฮฮฮฃ]}"`
RegisteredStudentBoys int `json:"registered_student_boys" fake:"{number:1,200}"`
RegisteredStudentGirls int `json:"registered_student_girls" fake:"{number:1,200}"`
SchoolClass string `json:"school_class" fake:"{randomstring:[ฮฮทฯฮนฮฑฮณฯฮณฮตฮฏฮฑ,ฮฮทฮผฮฟฯฮนฮบฮฌ ฮฃฯฮฟฮปฮตฮฏฮฑ,ฮฯ
ฮผฮฝฮฌฯฮนฮฑ,ฮฯฮบฮตฮนฮฑ]}"`
SchoolName string `json:"school_name" fake:"{randomstring:[4ฮฟ ฮฮฮฮกฮฮฃฮฮ ฮฮฅฮฮฮฮฃฮฮ ฮฮฮคฮฮฮฮกฮฆฮฉฮฃฮฮฃ ฮฮคฮคฮฮฮฮฃ,7ฮฟ ฮฮฮฮกฮฮฃฮฮ ฮฮฮฮฮฮ ฮฮฅฮฮฮฮ ฮฮงฮฮกฮฮฉฮ]}"`
SchoolType string `json:"school_type" fake:"{randomstring:[ฮฮผฮตฯฮฎฯฮนฮฟ ฮฯ
ฮผฮฝฮฌฯฮนฮฟ,ฮฮผฮตฯฮฎฯฮนฮฟ ฮฮตฮฝฮนฮบฯ ฮฯฮบฮตฮนฮฟ,ฮฮดฮนฯฯฮนฮบฯ ฮฮทฯฮนฮฑฮณฯฮณฮตฮฏฮฟ]}"`
Year int `json:"year" fake:"{year}"`
}
// AtlasInternshipStatistics holds the data for Atlas internship system.
type AtlasInternshipStatistics struct {
Year int `json:"year" fake:"{year}"`
Institution string `json:"institution" fake:"{randomstring:[ฮฮฮฮฮฮฮฮฮฮ ฮ ฮฮฮฮ ฮฮฃฮคฮฮฮฮ ฮฮฮฮฮฉฮ,ฮ ฮฮฮฮ ฮฮฃฮคฮฮฮฮ ฮฮฮฮฮฮฮฅ,ฮคฮฮ ฮฮฮฮฮฮฃ]}"`
PrivateSector int `json:"private_sector" fake:"{number:0,500}"`
PublicSector int `json:"public_sector" fake:"{number:0,500}"`
NGO int `json:"ngo" fake:"{number:0,1}"`
}
// EudoksosRequestsAndDeliveries holds the data for Eudoksos requests and deliveries.
type EudoksosRequestsAndDeliveries struct {
Year int `json:"year" fake:"{year}"`
Period string `json:"period" fake:"{randomstring:[ฮงฮตฮนฮผฮตฯฮนฮฝฮฎ,ฮฮฑฯฮนฮฝฮฎ]}"`
Institution string `json:"institution" fake:"{randomstring:[ฮฮฮฮฮฮฮฮฮฮ ฮ ฮฮฮฮ ฮฮฃฮคฮฮฮฮ ฮฮฮฮฮฉฮ,ฮ ฮฮฮฮ ฮฮฃฮคฮฮฮฮ ฮฮฮฮฮฮฮฅ,ฮคฮฮ ฮฮฮฮฮฮฃ]}"`
Department string `json:"department" fake:"{randomstring:[ฮฮฮฃฮฮฮคฮฮฮฮฃ ฮฮฮ ฮฮฮฃฮฮฮคฮฮฮฮฮฮฮฃ,ฮฆฮฅฮฃฮฮฮฮฮฮกฮฮ ฮฮฮฮฃ,ฮฮฮฮฮฮฮฃ]}"`
StudentsWithStatements int `json:"studentswithstatements" fake:"{number:0,2500}"`
StudentWithDeliveries int `json:"studentwithdeliveries" fake:"{number:0,2500}"`
} | types/education.go | 0.500244 | 0.623979 | education.go | starcoder |
package abi
import (
"encoding/binary"
"fmt"
"math/big"
"reflect"
"strconv"
"github.com/mitchellh/mapstructure"
"github.com/umbracle/go-web3"
)
// Decode decodes the input with a given type
func Decode(t *Type, input []byte) (interface{}, error) {
if len(input) == 0 {
return nil, fmt.Errorf("empty input")
}
val, _, err := decode(t, input)
return val, err
}
// DecodeStruct decodes the input with a type to a struct
func DecodeStruct(t *Type, input []byte, out interface{}) error {
val, err := Decode(t, input)
if err != nil {
return err
}
if err := mapstructure.Decode(val, out); err != nil {
return err
}
return nil
}
func decode(t *Type, input []byte) (interface{}, []byte, error) {
var data []byte
var length int
var err error
// safe check, input should be at least 32 bytes
if len(input) < 32 {
return nil, nil, fmt.Errorf("incorrect length")
}
if t.isVariableInput() {
length, err = readLength(input)
if err != nil {
return nil, nil, err
}
} else {
data = input[:32]
}
switch t.kind {
case KindTuple:
return decodeTuple(t, input)
case KindSlice:
return decodeArraySlice(t, input[32:], length)
case KindArray:
return decodeArraySlice(t, input, t.size)
}
var val interface{}
switch t.kind {
case KindBool:
val, err = decodeBool(data)
case KindInt, KindUInt:
val = readInteger(t, data)
case KindString:
val = string(input[32 : 32+length])
case KindBytes:
val = input[32 : 32+length]
case KindAddress:
val, err = readAddr(data)
case KindFixedBytes:
val, err = readFixedBytes(t, data)
case KindFunction:
val, err = readFunctionType(t, data)
default:
return nil, nil, fmt.Errorf("decoding not available for type '%s'", t.kind)
}
return val, input[32:], err
}
var (
maxUint256 = big.NewInt(0).Add(
big.NewInt(0).Exp(big.NewInt(2), big.NewInt(256), nil),
big.NewInt(-1))
maxInt256 = big.NewInt(0).Add(
big.NewInt(0).Exp(big.NewInt(2), big.NewInt(255), nil),
big.NewInt(-1))
)
func readAddr(b []byte) (web3.Address, error) {
res := web3.Address{}
if len(b) != 32 {
return res, fmt.Errorf("len is not correct")
}
copy(res[:], b[12:])
return res, nil
}
func readInteger(t *Type, b []byte) interface{} {
switch t.t.Kind() {
case reflect.Uint8:
return b[len(b)-1]
case reflect.Uint16:
return binary.BigEndian.Uint16(b[len(b)-2:])
case reflect.Uint32:
return binary.BigEndian.Uint32(b[len(b)-4:])
case reflect.Uint64:
return binary.BigEndian.Uint64(b[len(b)-8:])
case reflect.Int8:
return int8(b[len(b)-1])
case reflect.Int16:
return int16(binary.BigEndian.Uint16(b[len(b)-2:]))
case reflect.Int32:
return int32(binary.BigEndian.Uint32(b[len(b)-4:]))
case reflect.Int64:
return int64(binary.BigEndian.Uint64(b[len(b)-8:]))
default:
ret := new(big.Int).SetBytes(b)
if t.kind == KindUInt {
return ret
}
if ret.Cmp(maxInt256) > 0 {
ret.Add(maxUint256, big.NewInt(0).Neg(ret))
ret.Add(ret, big.NewInt(1))
ret.Neg(ret)
}
return ret
}
}
func readFunctionType(t *Type, word []byte) ([24]byte, error) {
res := [24]byte{}
if !allZeros(word[24:32]) {
return res, fmt.Errorf("function type expects the last 8 bytes to be empty but found: %b", word[24:32])
}
copy(res[:], word[0:24])
return res, nil
}
func readFixedBytes(t *Type, word []byte) (interface{}, error) {
array := reflect.New(t.t).Elem()
reflect.Copy(array, reflect.ValueOf(word[0:t.size]))
return array.Interface(), nil
}
func decodeTuple(t *Type, data []byte) (interface{}, []byte, error) {
res := make(map[string]interface{})
orig := data
origLen := len(orig)
for indx, arg := range t.tuple {
entry := data
if arg.Elem.isDynamicType() {
offset, err := readOffset(data, origLen)
if err != nil {
return nil, nil, err
}
entry = orig[offset:]
}
val, tail, err := decode(arg.Elem, entry)
if err != nil {
return nil, nil, err
}
if !arg.Elem.isDynamicType() {
data = tail
} else {
data = data[32:]
}
name := arg.Name
if name == "" {
name = strconv.Itoa(indx)
}
if _, ok := res[name]; !ok {
res[name] = val
} else {
return nil, nil, fmt.Errorf("tuple with repeated values")
}
}
return res, data, nil
}
func decodeArraySlice(t *Type, data []byte, size int) (interface{}, []byte, error) {
if size < 0 {
return nil, nil, fmt.Errorf("size is lower than zero")
}
if 32*size > len(data) {
return nil, nil, fmt.Errorf("size is too big")
}
var res reflect.Value
if t.kind == KindSlice {
res = reflect.MakeSlice(t.t, size, size)
} else if t.kind == KindArray {
res = reflect.New(t.t).Elem()
}
orig := data
origLen := len(orig)
for indx := 0; indx < size; indx++ {
isDynamic := t.elem.isDynamicType()
entry := data
if isDynamic {
offset, err := readOffset(data, origLen)
if err != nil {
return nil, nil, err
}
entry = orig[offset:]
}
val, tail, err := decode(t.elem, entry)
if err != nil {
return nil, nil, err
}
if !isDynamic {
data = tail
} else {
data = data[32:]
}
res.Index(indx).Set(reflect.ValueOf(val))
}
return res.Interface(), data, nil
}
func decodeBool(data []byte) (interface{}, error) {
switch data[31] {
case 0:
return false, nil
case 1:
return true, nil
default:
return false, fmt.Errorf("bad boolean")
}
}
func readOffset(data []byte, len int) (int, error) {
offsetBig := big.NewInt(0).SetBytes(data[0:32])
if offsetBig.BitLen() > 63 {
return 0, fmt.Errorf("offset larger than int64: %v", offsetBig.Int64())
}
offset := int(offsetBig.Int64())
if offset > len {
return 0, fmt.Errorf("offset insufficient %v require %v", len, offset)
}
return offset, nil
}
func readLength(data []byte) (int, error) {
lengthBig := big.NewInt(0).SetBytes(data[0:32])
if lengthBig.BitLen() > 63 {
return 0, fmt.Errorf("length larger than int64: %v", lengthBig.Int64())
}
length := int(lengthBig.Uint64())
if length > len(data) {
return 0, fmt.Errorf("length insufficient %v require %v", len(data), length)
}
return length, nil
}
func allZeros(b []byte) bool {
for _, i := range b {
if i != 0 {
return false
}
}
return true
} | vendor/github.com/umbracle/go-web3/abi/decode.go | 0.68721 | 0.410815 | decode.go | starcoder |
package linbuf
import (
"encoding/binary"
"math"
"github.com/valyala/bytebufferpool"
)
type Encoder struct {
b *bytebufferpool.ByteBuffer
}
func NewEncoder() Encoder {
return Encoder{
b: bytebufferpool.Get(),
}
}
func (e Encoder) Reset() {
e.b.Reset()
}
func (e Encoder) Finalize() *bytebufferpool.ByteBuffer {
return e.b
}
func (e Encoder) FinalizeBytes() []byte {
return e.b.Bytes()
}
func (e Encoder) Destroy() {
bytebufferpool.Put(e.b)
}
func (e Encoder) Uint8(v uint8) Encoder {
e.b.WriteByte(v)
return e
}
func (e Encoder) Uint16(v uint16) Encoder {
var b [2]byte
b[0] = byte(v)
b[1] = byte(v >> 8)
e.b.Write(b[:])
return e
}
func (e Encoder) Uint32(v uint32) Encoder {
var b [4]byte
b[0] = byte(v)
b[1] = byte(v >> 8)
b[2] = byte(v >> 16)
b[3] = byte(v >> 24)
e.b.Write(b[:])
return e
}
func (e Encoder) Uint64(v uint64) Encoder {
var b [8]byte
b[0] = byte(v)
b[1] = byte(v >> 8)
b[2] = byte(v >> 16)
b[3] = byte(v >> 24)
b[4] = byte(v >> 32)
b[5] = byte(v >> 40)
b[6] = byte(v >> 48)
b[7] = byte(v >> 56)
e.b.Write(b[:])
return e
}
func (e Encoder) Int8(v int8) Encoder {
return e.Uint8(uint8(v))
}
func (e Encoder) Int16(v int16) Encoder {
return e.Uint16(uint16(v))
}
func (e Encoder) Int32(v int32) Encoder {
return e.Uint32(uint32(v))
}
func (e Encoder) Int64(v int64) Encoder {
return e.Uint64(uint64(v))
}
func (e Encoder) Float32(v float32) Encoder {
return e.Uint32(math.Float32bits(v))
}
func (e Encoder) Float64(v float64) Encoder {
return e.Uint64(math.Float64bits(v))
}
func (e Encoder) VarUint64(v uint64) Encoder {
var b [10]byte
n := binary.PutUvarint(b[:], v)
e.b.Write(b[:n])
return e
}
func (e Encoder) VarUint32(v uint32) Encoder {
var b [6]byte
n := binary.PutUvarint(b[:], uint64(v))
e.b.Write(b[:n])
return e
}
func (e Encoder) VarUint16(v uint16) Encoder {
var b [4]byte
n := binary.PutUvarint(b[:], uint64(v))
e.b.Write(b[:n])
return e
}
func (e Encoder) VarInt64(v int64) Encoder {
var b [10]byte
n := binary.PutVarint(b[:], v)
e.b.Write(b[:n])
return e
}
func (e Encoder) VarInt32(v int32) Encoder {
var b [6]byte
n := binary.PutVarint(b[:], int64(v))
e.b.Write(b[:n])
return e
}
func (e Encoder) VarInt16(v int16) Encoder {
var b [4]byte
n := binary.PutVarint(b[:], int64(v))
e.b.Write(b[:n])
return e
}
func (e Encoder) String(s string) Encoder {
e.VarUint64(uint64(len(s)))
e.b.WriteString(s)
return e
}
func (e Encoder) Bytes(b []byte) Encoder {
e.VarUint64(uint64(len(b)))
e.b.Write(b)
return e
}
func (e Encoder) Boolean(b bool) Encoder {
if b {
e.Uint8(1)
} else {
e.Uint8(0)
}
return e
} | encoder.go | 0.706393 | 0.429429 | encoder.go | starcoder |
package sl
import "github.com/theplant/cldr"
var calendar = cldr.Calendar{
Formats: cldr.CalendarFormats{
Date: cldr.CalendarDateFormat{Full: "EEEE, dd. MMMM y", Long: "dd. MMMM y", Medium: "d. MMM y", Short: "d. MM. yy"},
Time: cldr.CalendarDateFormat{Full: "HH:mm:ss zzzz", Long: "HH:mm:ss z", Medium: "HH:mm:ss", Short: "HH:mm"},
DateTime: cldr.CalendarDateFormat{Full: "{1} {0}", Long: "{1} {0}", Medium: "{1} {0}", Short: "{1} {0}"},
},
FormatNames: cldr.CalendarFormatNames{
Months: cldr.CalendarMonthFormatNames{
Abbreviated: cldr.CalendarMonthFormatNameValue{Jan: "jan", Feb: "feb", Mar: "mar", Apr: "apr", May: "maj", Jun: "jun", Jul: "jul", Aug: "avg", Sep: "sep", Oct: "okt", Nov: "nov", Dec: "dec"},
Narrow: cldr.CalendarMonthFormatNameValue{Jan: "j", Feb: "f", Mar: "m", Apr: "a", May: "m", Jun: "j", Jul: "j", Aug: "a", Sep: "s", Oct: "o", Nov: "n", Dec: "d"},
Short: cldr.CalendarMonthFormatNameValue{},
Wide: cldr.CalendarMonthFormatNameValue{Jan: "januar", Feb: "februar", Mar: "marec", Apr: "april", May: "maj", Jun: "junij", Jul: "julij", Aug: "avgust", Sep: "september", Oct: "oktober", Nov: "november", Dec: "december"},
},
Days: cldr.CalendarDayFormatNames{
Abbreviated: cldr.CalendarDayFormatNameValue{Sun: "ned", Mon: "pon", Tue: "tor", Wed: "sre", Thu: "ฤet", Fri: "pet", Sat: "sob"},
Narrow: cldr.CalendarDayFormatNameValue{Sun: "n", Mon: "p", Tue: "t", Wed: "s", Thu: "ฤ", Fri: "p", Sat: "s"},
Short: cldr.CalendarDayFormatNameValue{Sun: "ned.", Mon: "pon.", Tue: "tor.", Wed: "sre.", Thu: "ฤet.", Fri: "pet.", Sat: "sob."},
Wide: cldr.CalendarDayFormatNameValue{Sun: "nedelja", Mon: "ponedeljek", Tue: "torek", Wed: "sreda", Thu: "ฤetrtek", Fri: "petek", Sat: "sobota"},
},
Periods: cldr.CalendarPeriodFormatNames{
Abbreviated: cldr.CalendarPeriodFormatNameValue{},
Narrow: cldr.CalendarPeriodFormatNameValue{AM: "d", PM: "p"},
Short: cldr.CalendarPeriodFormatNameValue{},
Wide: cldr.CalendarPeriodFormatNameValue{AM: "dop.", PM: "pop."},
},
},
} | resources/locales/sl/calendar.go | 0.500244 | 0.459197 | calendar.go | starcoder |
package eplot
import (
"strings"
"github.com/emer/etable/etable"
"github.com/emer/etable/minmax"
"github.com/goki/gi/gi"
"github.com/goki/gi/gist"
"github.com/goki/ki/kit"
)
// PlotParams are parameters for overall plot
type PlotParams struct {
Title string `desc:"optional title at top of plot"`
Type PlotTypes `desc:"type of plot to generate. For a Bar plot, items are plotted ordinally by row and the XAxis is optional"`
Lines bool `desc:"plot lines"`
Points bool `desc:"plot points with symbols"`
LineWidth float64 `desc:"width of lines"`
PointSize float64 `desc:"size of points"`
BarWidth float64 `min:"0.01" max:"1" desc:"width of bars for bar plot, as fraction of available space -- 1 = no gaps, .8 default"`
NegXDraw bool `desc:"draw lines that connect points with a negative X-axis direction -- otherwise these are treated as breaks between repeated series and not drawn"`
Scale float64 `def:"2" desc:"overall scaling factor -- the larger the number, the larger the fonts are relative to the graph"`
XAxisCol string `desc:"what column to use for the common X axis -- if empty or not found, the row number is used. This optional for Bar plots -- if present and LegendCol is also present, then an extra space will be put between X values."`
LegendCol string `desc:"optional column for adding a separate colored / styled line or bar according to this value -- acts just like a separate Y variable, crossed with Y variables"`
XAxisRot float64 `desc:"rotation of the X Axis labels, in degrees"`
XAxisLabel string `desc:"optional label to use for XAxis instead of column name"`
YAxisLabel string `desc:"optional label to use for YAxis -- if empty, first column name is used"`
Plot *Plot2D `copy:"-" json:"-" xml:"-" view:"-" desc:"our plot, for update method"`
}
// Defaults sets defaults if nil vals present
func (pp *PlotParams) Defaults() {
if pp.LineWidth == 0 {
pp.LineWidth = 1
pp.Lines = true
pp.Points = false
pp.PointSize = 3
pp.BarWidth = .8
}
if pp.Scale == 0 {
pp.Scale = 2
}
}
// Update satisfies the gi.Updater interface and will trigger display update on edits
func (pp *PlotParams) Update() {
if pp.BarWidth > 1 {
pp.BarWidth = .8
}
if pp.Plot != nil {
pp.Plot.Update()
}
}
// CopyFrom copies from other col params
func (pp *PlotParams) CopyFrom(fr *PlotParams) {
pl := pp.Plot
*pp = *fr
pp.Plot = pl
}
// FmMeta sets plot params from meta data
func (pp *PlotParams) FmMeta(dt *etable.Table) {
pp.FmMetaMap(dt.MetaData)
}
// MetaMapLower tries meta data access by lower-case version of key too
func MetaMapLower(meta map[string]string, key string) (string, bool) {
vl, has := meta[key]
if has {
return vl, has
}
vl, has = meta[strings.ToLower(key)]
return vl, has
}
// FmMetaMap sets plot params from meta data map
func (pp *PlotParams) FmMetaMap(meta map[string]string) {
if typ, has := MetaMapLower(meta, "Type"); has {
pp.Type.FromString(typ)
}
if op, has := MetaMapLower(meta, "Lines"); has {
if op == "+" || op == "true" {
pp.Lines = true
} else {
pp.Lines = false
}
}
if op, has := MetaMapLower(meta, "Points"); has {
if op == "+" || op == "true" {
pp.Points = true
} else {
pp.Points = false
}
}
if lw, has := MetaMapLower(meta, "LineWidth"); has {
pp.LineWidth, _ = kit.ToFloat(lw)
}
if ps, has := MetaMapLower(meta, "PointSize"); has {
pp.PointSize, _ = kit.ToFloat(ps)
}
if bw, has := MetaMapLower(meta, "BarWidth"); has {
pp.BarWidth, _ = kit.ToFloat(bw)
}
if op, has := MetaMapLower(meta, "NegXDraw"); has {
if op == "+" || op == "true" {
pp.NegXDraw = true
} else {
pp.NegXDraw = false
}
}
if scl, has := MetaMapLower(meta, "Scale"); has {
pp.Scale, _ = kit.ToFloat(scl)
}
if xc, has := MetaMapLower(meta, "XAxisCol"); has {
pp.XAxisCol = xc
}
if lc, has := MetaMapLower(meta, "LegendCol"); has {
pp.LegendCol = lc
}
if xrot, has := MetaMapLower(meta, "XAxisRot"); has {
pp.XAxisRot, _ = kit.ToFloat(xrot)
}
if lb, has := MetaMapLower(meta, "XAxisLabel"); has {
pp.XAxisLabel = lb
}
if lb, has := MetaMapLower(meta, "YAxisLabel"); has {
pp.YAxisLabel = lb
}
}
// ColParams are parameters for plotting one column of data
type ColParams struct {
On bool `desc:"plot this column"`
Col string `desc:"name of column we're plotting"`
Range minmax.Range64 `desc:"effective range of data to plot -- either end can be fixed"`
FullRange minmax.F64 `desc:"full actual range of data -- only valid if specifically computed"`
ColorName gi.ColorName `desc:"if non-empty, color is set by this name"`
Color gist.Color `desc:"color to use in plotting the line"`
NTicks int `desc:"desired number of ticks"`
Lbl string `desc:"if non-empty, this is an alternative label to use in plotting"`
TensorIdx int `desc:"if column has n-dimensional tensor cells in each row, this is the index within each cell to plot -- use -1 to plot *all* indexes as separate lines"`
ErrCol string `desc:"specifies a column containing error bars for this column"`
IsString bool `inactive:"+" desc:"if true this is a string column -- plots as labels"`
Plot *Plot2D `copy:"-" json:"-" xml:"-" view:"-" desc:"our plot, for update method"`
}
// Defaults sets defaults if nil vals present
func (cp *ColParams) Defaults() {
if cp.NTicks == 0 {
cp.NTicks = 10
}
}
// Update satisfies the gi.Updater interface and will trigger display update on edits
func (cp *ColParams) Update() {
cp.UpdateVals()
if cp.Plot != nil {
cp.Plot.Update()
}
}
// CopyFrom copies from other col params
func (cp *ColParams) CopyFrom(fr *ColParams) {
pl := cp.Plot
*cp = *fr
cp.Plot = pl
}
// UpdateVals update derived values e.g., color from color name
func (cp *ColParams) UpdateVals() {
if cp.ColorName != "" {
clr, err := gist.ColorFromString(string(cp.ColorName), nil)
if err == nil {
cp.Color = clr
}
}
}
func (cp *ColParams) Label() string {
if cp.Lbl != "" {
return cp.Lbl
}
return cp.Col
}
// PlotTypes are different types of plots
type PlotTypes int32
//go:generate stringer -type=PlotTypes
var KiT_PlotTypes = kit.Enums.AddEnum(PlotTypesN, kit.NotBitFlag, nil)
func (ev PlotTypes) MarshalJSON() ([]byte, error) { return kit.EnumMarshalJSON(ev) }
func (ev *PlotTypes) UnmarshalJSON(b []byte) error { return kit.EnumUnmarshalJSON(ev, b) }
const (
// XY is a standard line / point plot
XY PlotTypes = iota
// Bar plots vertical bars
Bar
PlotTypesN
) | eplot/params.go | 0.681303 | 0.470007 | params.go | starcoder |
package batchnorm
import (
"encoding/gob"
"github.com/nlpodyssey/spago/ag"
"github.com/nlpodyssey/spago/mat"
"github.com/nlpodyssey/spago/mat/float"
"github.com/nlpodyssey/spago/nn"
)
var _ nn.Model = &Model{}
// Model contains the serializable parameters.
type Model struct {
nn.Module
W nn.Param `spago:"type:weights"`
B nn.Param `spago:"type:biases"`
Mean nn.Param `spago:"type:undefined"`
StdDev nn.Param `spago:"type:undefined"`
Momentum nn.Param `spago:"type:undefined"`
}
const epsilon = 1e-5
const defaultMomentum = 0.9
func init() {
gob.Register(&Model{})
}
// NewWithMomentum returns a new model with supplied size and momentum.
func NewWithMomentum[T float.DType](size int, momentum T) *Model {
return &Model{
W: nn.NewParam(mat.NewInitVecDense[T](size, epsilon)),
B: nn.NewParam(mat.NewEmptyVecDense[T](size)),
Mean: nn.NewParam(mat.NewEmptyVecDense[T](size), nn.RequiresGrad(false)),
StdDev: nn.NewParam(mat.NewEmptyVecDense[T](size), nn.RequiresGrad(false)),
Momentum: nn.NewParam(mat.NewScalar[T](momentum), nn.RequiresGrad(false)),
}
}
// New returns a new model with the supplied size and default momentum
func New[T float.DType](size int) *Model {
return NewWithMomentum[T](size, defaultMomentum)
}
// Forward performs the forward step for each input node and returns the result.
func (m *Model) Forward(xs ...ag.Node) []ag.Node {
meanVector := ag.StopGrad(m.Mean)
devVector := ag.StopGrad(m.StdDev)
return m.process(xs, devVector, meanVector)
}
// ForwardT performs the forward step for each input node and returns the result.
func (m *Model) ForwardT(xs ...ag.Node) []ag.Node {
meanVector := m.mean(xs)
devVector := m.stdDev(meanVector, xs)
m.updateBatchNormParameters(meanVector.Value(), devVector.Value())
return m.process(xs, devVector, meanVector)
}
func (m *Model) process(xs []ag.Node, devVector ag.Node, meanVector ag.Node) []ag.Node {
devVector = ag.Div(m.W, ag.AddScalar(devVector, ag.Var(m.W.Value().NewScalar(epsilon))))
ys := make([]ag.Node, len(xs))
for i, x := range xs {
ys[i] = ag.Add(ag.Prod(ag.Sub(x, meanVector), devVector), m.B)
}
return ys
}
func (m *Model) updateBatchNormParameters(meanVector, devVector mat.Matrix) {
momentum := m.Momentum.Value().Scalar().F64()
m.Mean.ReplaceValue(
m.Mean.Value().ProdScalar(momentum).Add(meanVector.ProdScalar(1.0 - momentum)))
m.StdDev.ReplaceValue(
m.StdDev.Value().ProdScalar(momentum).Add(devVector.ProdScalar(1.0 - momentum)))
}
// Mean computes the mean of the input.
func (m *Model) mean(xs []ag.Node) ag.Node {
sumVector := xs[0]
for i := 1; i < len(xs); i++ {
sumVector = ag.Add(sumVector, xs[i])
}
return ag.DivScalar(sumVector, ag.Var(xs[0].Value().NewScalar(float64(len(xs))+epsilon)))
}
// StdDev computes the standard deviation of the input.
func (m *Model) stdDev(meanVector ag.Node, xs []ag.Node) ag.Node {
devVector := ag.Node(ag.Var(meanVector.Value().ZerosLike()))
for _, x := range xs {
diffVector := ag.Square(ag.Sub(meanVector, x))
devVector = ag.Add(devVector, diffVector)
}
devVector = ag.Sqrt(ag.DivScalar(devVector, ag.Var(xs[0].Value().NewScalar(float64(len(xs))+epsilon))))
return devVector
} | nn/normalization/batchnorm/batchnorm.go | 0.897528 | 0.517388 | batchnorm.go | starcoder |
package tsm1
/*
Tombstone file format:
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโTombstone Fileโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โ โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ โ
โ โ โโ โ โ
โ โ โโ โ โ
โ โ โโ โ โ
โ โ โโ โ โ
โ โ โโ โ โ
โ โ Header โโ โ โ
โ โ 4 bytes โโ Tombstone Entries โ โ
โ โ โโ โ โ
โ โ โโ โ โ
โ โ โโ โ โ
โ โ โโ โ โ
โ โ โโ โ โ
โ โ โโ โ โ
โ โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ โ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโTombstone Entryโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โ โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ โ
โ โ โโ โโ โโ โโ โโ โ โ
โ โ โโ โโ โโ โโ โโ โ โ
โ โ โโ โโ โโ โโ โโ โ โ
โ โ โโ โโ โโ โโ โโ โ โ
โ โ โโ โโ โโ โโ โโ โ โ
โ โPrefixโโ Reserved โโ Key Length โโ Key โโ Min Time โโ Max Time โ โ
โ โ Bit โโ 7 bits โโ 24 bits โโ N bytes โโ 8 bytes โโ 8 bytes โ โ
โ โ โโ โโ โโ โโ โโ โ โ
โ โ โโ โโ โโ โโ โโ โ โ
โ โ โโ โโ โโ โโ โโ โ โ
โ โ โโ โโ โโ โโ โโ โ โ
โ โ โโ โโ โโ โโ โโ โ โ
โ โ โโ โโ โโ โโ โโ โ โ
โ โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ โ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
NOTE: v1, v2 and v3 tombstone supports have been dropped from 2.x. Only v4 is now
supported.
*/
import (
"bufio"
"compress/gzip"
"encoding/binary"
"errors"
"fmt"
"io"
"math"
"os"
"path/filepath"
"strings"
"sync"
"github.com/influxdata/influxdb/v2/pkg/fs"
)
const (
headerSize = 4
v4header = 0x1504
)
var errIncompatibleV4Version = errors.New("incompatible v4 version")
// Tombstoner records tombstones when entries are deleted.
type Tombstoner struct {
mu sync.RWMutex
// Path is the location of the file to record tombstone. This should be the
// full path to a TSM file.
Path string
FilterFn func(k []byte) bool
// cache of the stats for this tombstone
fileStats []FileStat
// indicates that the stats may be out of sync with what is on disk and they
// should be refreshed.
statsLoaded bool
// Tombstones that have been written but not flushed to disk yet.
tombstones []Tombstone
// These are references used for pending writes that have not been committed. If
// these are nil, then no pending writes are in progress.
gz *gzip.Writer
bw *bufio.Writer
pendingFile *os.File
tmp [8]byte
lastAppliedOffset int64
// Optional observer for when tombstone files are written.
obs FileStoreObserver
}
// NewTombstoner constructs a Tombstoner for the given path. FilterFn can be nil.
func NewTombstoner(path string, filterFn func(k []byte) bool) *Tombstoner {
return &Tombstoner{
Path: path,
FilterFn: filterFn,
obs: noFileStoreObserver{},
}
}
// Tombstone represents an individual deletion.
type Tombstone struct {
// Key is the tombstoned series key.
Key []byte
// Prefix indicates if this tombstone entry is a prefix key, meaning all
// keys with a prefix matching Key should be removed for the [Min, Max] range.
Prefix bool
// Min and Max are the min and max unix nanosecond time ranges of Key that are deleted.
Min, Max int64
// Predicate stores the marshaled form of some predicate for matching keys.
Predicate []byte
}
func (t Tombstone) String() string {
prefix := "Key"
if t.Prefix {
prefix = "Prefix"
}
return fmt.Sprintf("%s: %q, [%d, %d] pred:%v", prefix, t.Key, t.Min, t.Max, len(t.Predicate) > 0)
}
// WithObserver sets a FileStoreObserver for when the tombstone file is written.
func (t *Tombstoner) WithObserver(obs FileStoreObserver) {
if obs == nil {
obs = noFileStoreObserver{}
}
t.obs = obs
}
// AddPrefixRange adds a prefix-based tombstone key with an explicit range.
func (t *Tombstoner) AddPrefixRange(key []byte, min, max int64, predicate []byte) error {
t.mu.Lock()
defer t.mu.Unlock()
// If this TSMFile has not been written (mainly in tests), don't write a
// tombstone because the keys will not be written when it's actually saved.
if t.Path == "" {
return nil
}
t.statsLoaded = false
if err := t.prepareLatest(); err != nil {
return err
}
return t.writeTombstoneV4(t.gz, Tombstone{
Key: key,
Min: min,
Max: max,
Prefix: true,
Predicate: predicate,
})
}
// Add adds the all keys, across all timestamps, to the tombstone.
func (t *Tombstoner) Add(keys [][]byte) error {
return t.AddRange(keys, math.MinInt64, math.MaxInt64)
}
// AddRange adds all keys to the tombstone specifying only the data between min and max to be removed.
func (t *Tombstoner) AddRange(keys [][]byte, min, max int64) error {
for t.FilterFn != nil && len(keys) > 0 && !t.FilterFn(keys[0]) {
keys = keys[1:]
}
if len(keys) == 0 {
return nil
}
t.mu.Lock()
defer t.mu.Unlock()
// If this TSMFile has not been written (mainly in tests), don't write a
// tombstone because the keys will not be written when it's actually saved.
if t.Path == "" {
return nil
}
t.statsLoaded = false
if err := t.prepareLatest(); err != nil {
return err
}
for _, k := range keys {
if t.FilterFn != nil && !t.FilterFn(k) {
continue
}
if err := t.writeTombstoneV4(t.gz, Tombstone{
Key: k,
Min: min,
Max: max,
Prefix: false,
}); err != nil {
return err
}
}
return nil
}
func (t *Tombstoner) Flush() error {
t.mu.Lock()
defer t.mu.Unlock()
if err := t.commit(); err != nil {
// Reset our temp references and clean up.
_ = t.rollback()
return err
}
return nil
}
func (t *Tombstoner) Rollback() error {
t.mu.Lock()
defer t.mu.Unlock()
return t.rollback()
}
// Delete removes all the tombstone files from disk.
func (t *Tombstoner) Delete() error {
t.mu.Lock()
defer t.mu.Unlock()
if err := os.RemoveAll(t.tombstonePath()); err != nil {
return err
}
t.statsLoaded = false
t.lastAppliedOffset = 0
return nil
}
// HasTombstones return true if there are any tombstone entries recorded.
func (t *Tombstoner) HasTombstones() bool {
files := t.TombstoneFiles()
t.mu.RLock()
n := len(t.tombstones)
t.mu.RUnlock()
return len(files) > 0 && files[0].Size > 0 || n > 0
}
// TombstoneFiles returns any tombstone files associated with Tombstoner's TSM file.
func (t *Tombstoner) TombstoneFiles() []FileStat {
t.mu.RLock()
if t.statsLoaded {
stats := t.fileStats
t.mu.RUnlock()
return stats
}
t.mu.RUnlock()
stat, err := os.Stat(t.tombstonePath())
if os.IsNotExist(err) || err != nil {
t.mu.Lock()
// The file doesn't exist so record that we tried to load it so
// we don't continue to keep trying. This is the common case.
t.statsLoaded = os.IsNotExist(err)
t.fileStats = t.fileStats[:0]
t.mu.Unlock()
return nil
}
t.mu.Lock()
t.fileStats = append(t.fileStats[:0], FileStat{
Path: t.tombstonePath(),
CreatedAt: stat.ModTime().UnixNano(),
LastModified: stat.ModTime().UnixNano(),
Size: uint32(stat.Size()),
})
t.statsLoaded = true
stats := t.fileStats
t.mu.Unlock()
return stats
}
// Walk calls fn for every Tombstone under the Tombstoner.
func (t *Tombstoner) Walk(fn func(t Tombstone) error) error {
t.mu.Lock()
defer t.mu.Unlock()
f, err := os.Open(t.tombstonePath())
if os.IsNotExist(err) {
return nil
} else if err != nil {
return err
}
defer f.Close()
var b [4]byte
if _, err := f.Read(b[:]); err != nil {
return errors.New("unable to read header")
}
if _, err := f.Seek(0, io.SeekStart); err != nil {
return err
}
header := binary.BigEndian.Uint32(b[:])
if header == v4header {
return t.readTombstoneV4(f, fn)
}
return errors.New("invalid tombstone file")
}
func (t *Tombstoner) prepareLatest() error {
if t.pendingFile != nil { // There is already a pending tombstone file open.
return nil
}
tmpPath := fmt.Sprintf("%s.%s", t.tombstonePath(), CompactionTempExtension)
tmp, err := os.OpenFile(tmpPath, os.O_CREATE|os.O_RDWR|os.O_EXCL, 0666)
if err != nil {
return err
}
removeTmp := func() {
tmp.Close()
os.Remove(tmp.Name())
}
// Copy the existing v4 file if it exists
f, err := os.Open(t.tombstonePath())
if err != nil && !os.IsNotExist(err) {
// An unexpected error should be returned
removeTmp()
return err
} else if err == nil {
// No error so load the tombstone file.
defer f.Close()
var b [4]byte
if n, err := f.Read(b[:]); n == 4 && err == nil {
header := binary.BigEndian.Uint32(b[:])
// There is an existing tombstone on disk and it's not a v4.
// We can't support it.
if header != v4header {
removeTmp()
return errIncompatibleV4Version
}
// Seek back to the beginning we copy the header
if _, err := f.Seek(0, io.SeekStart); err != nil {
removeTmp()
return err
}
// Copy the whole file
if _, err := io.Copy(tmp, f); err != nil {
f.Close()
removeTmp()
return err
}
}
}
// Else, the error was that the file does not exist. Create a new one.
var b [8]byte
bw := bufio.NewWriterSize(tmp, 64*1024)
// Write the header only if the file is new
if os.IsNotExist(err) {
binary.BigEndian.PutUint32(b[:4], v4header)
if _, err := bw.Write(b[:4]); err != nil {
removeTmp()
return err
}
}
// Write the tombstones
gz := gzip.NewWriter(bw)
t.pendingFile = tmp
t.gz = gz
t.bw = bw
return nil
}
func (t *Tombstoner) commit() error {
// No pending writes
if t.pendingFile == nil {
return nil
}
if err := t.gz.Close(); err != nil {
return err
}
if err := t.bw.Flush(); err != nil {
return err
}
// fsync the file to flush the write
if err := t.pendingFile.Sync(); err != nil {
return err
}
tmpFilename := t.pendingFile.Name()
t.pendingFile.Close()
if err := t.obs.FileFinishing(tmpFilename); err != nil {
return err
}
if err := fs.RenameFileWithReplacement(tmpFilename, t.tombstonePath()); err != nil {
return err
}
if err := fs.SyncDir(filepath.Dir(t.tombstonePath())); err != nil {
return err
}
t.pendingFile = nil
t.bw = nil
t.gz = nil
return nil
}
func (t *Tombstoner) rollback() error {
if t.pendingFile == nil {
return nil
}
tmpFilename := t.pendingFile.Name()
t.pendingFile.Close()
t.gz = nil
t.bw = nil
t.pendingFile = nil
return os.Remove(tmpFilename)
}
// readTombstoneV4 reads the fourth version of tombstone files that are capable
// of storing multiple v3 files appended together.
func (t *Tombstoner) readTombstoneV4(f *os.File, fn func(t Tombstone) error) error {
// Skip header, already checked earlier
if t.lastAppliedOffset != 0 {
if _, err := f.Seek(t.lastAppliedOffset, io.SeekStart); err != nil {
return err
}
} else {
if _, err := f.Seek(headerSize, io.SeekStart); err != nil {
return err
}
}
const kmask = int64(0xff000000) // Mask for non key-length bits
br := bufio.NewReaderSize(f, 64*1024)
gr, err := gzip.NewReader(br)
if err == io.EOF {
return nil
} else if err != nil {
return err
}
defer gr.Close()
var ( // save these buffers across loop iterations to avoid allocations
keyBuf []byte
predBuf []byte
)
for {
gr.Multistream(false)
if err := func() error {
for {
var buf [8]byte
if _, err = io.ReadFull(gr, buf[:4]); err == io.EOF || err == io.ErrUnexpectedEOF {
return nil
} else if err != nil {
return err
}
keyLen := int64(binary.BigEndian.Uint32(buf[:4]))
prefix := keyLen>>31&1 == 1 // Prefix is set according to whether the highest bit is set.
hasPred := keyLen>>30&1 == 1
// Remove 8 MSB to get correct length.
keyLen &^= kmask
if int64(len(keyBuf)) < keyLen {
keyBuf = make([]byte, keyLen)
}
// cap slice protects against invalid usages of append in callback
key := keyBuf[:keyLen:keyLen]
if _, err := io.ReadFull(gr, key); err != nil {
return err
}
if _, err := io.ReadFull(gr, buf[:8]); err != nil {
return err
}
min := int64(binary.BigEndian.Uint64(buf[:8]))
if _, err := io.ReadFull(gr, buf[:8]); err != nil {
return err
}
max := int64(binary.BigEndian.Uint64(buf[:8]))
var predicate []byte
if hasPred {
if _, err := io.ReadFull(gr, buf[:8]); err != nil {
return err
}
predLen := binary.BigEndian.Uint64(buf[:8])
if uint64(len(predBuf)) < predLen {
predBuf = make([]byte, predLen)
}
// cap slice protects against invalid usages of append in callback
predicate = predBuf[:predLen:predLen]
if _, err := io.ReadFull(gr, predicate); err != nil {
return err
}
}
if err := fn(Tombstone{
Key: key,
Min: min,
Max: max,
Prefix: prefix,
Predicate: predicate,
}); err != nil {
return err
}
}
}(); err != nil {
return err
}
for _, t := range t.tombstones {
if err := fn(t); err != nil {
return err
}
}
err = gr.Reset(br)
if err == io.EOF {
break
}
}
// Save the position of tombstone file so we don't re-apply the same set again if there are
// more deletes.
pos, err := f.Seek(0, io.SeekCurrent)
if err != nil {
return err
}
t.lastAppliedOffset = pos
return nil
}
func (t *Tombstoner) tombstonePath() string {
if strings.HasSuffix(t.Path, "tombstone") {
return t.Path
}
// Filename is 0000001.tsm1
filename := filepath.Base(t.Path)
// Strip off the tsm1
ext := filepath.Ext(filename)
if ext != "" {
filename = strings.TrimSuffix(filename, ext)
}
// Append the "tombstone" suffix to create a 0000001.tombstone file
return filepath.Join(filepath.Dir(t.Path), filename+".tombstone")
}
func (t *Tombstoner) writeTombstoneV4(dst io.Writer, ts Tombstone) error {
maxKeyLen := 0x00ffffff // 24 bit key length. Top 8 bits for other information.
// Maximum key length. Leaves 8 spare bits.
if len(ts.Key) > maxKeyLen {
return fmt.Errorf("key has length %d, maximum allowed key length %d", len(ts.Key), maxKeyLen)
}
l := uint32(len(ts.Key))
if ts.Prefix {
// A mask to set the prefix bit on a tombstone.
l |= 1 << 31
}
if len(ts.Predicate) > 0 {
// A mask to set the predicate bit on a tombstone
l |= 1 << 30
}
binary.BigEndian.PutUint32(t.tmp[:4], l)
if _, err := dst.Write(t.tmp[:4]); err != nil {
return err
}
if _, err := dst.Write([]byte(ts.Key)); err != nil {
return err
}
binary.BigEndian.PutUint64(t.tmp[:], uint64(ts.Min))
if _, err := dst.Write(t.tmp[:]); err != nil {
return err
}
binary.BigEndian.PutUint64(t.tmp[:], uint64(ts.Max))
if _, err := dst.Write(t.tmp[:]); err != nil {
return err
}
if len(ts.Predicate) > 0 {
binary.BigEndian.PutUint64(t.tmp[:], uint64(len(ts.Predicate)))
if _, err := dst.Write(t.tmp[:]); err != nil {
return err
}
if _, err := dst.Write(ts.Predicate); err != nil {
return err
}
}
return nil
} | tsdb/tsm1/tombstone.go | 0.650245 | 0.646586 | tombstone.go | starcoder |
package zstring
import (
"errors"
"strings"
)
// Cut a string where a separator occurs
func Cut(val string, separator string) (left, right string, found bool) {
if i := strings.Index(val, separator); i >= 0 {
return val[:i], val[i+len(separator):], true
}
return val, "", false
}
// Cut the string where the first occurence of a separator occurs
func CutAny(val string, seperators ...string) (left, right string, found bool) {
for _, sep := range seperators {
left, right, found = Cut(val, sep)
if found {
return
}
}
return
}
// Extract the part of a string surrounded by two separators
func CutOut(val, leftBound, rightBound string) (remaining string, found bool) {
_, remaining, found = Cut(val, leftBound)
if !found {
return remaining, false
}
remaining, _, found = Cut(remaining, rightBound)
return
}
// Extract the part of a string surrounded by multiple sets of separators
func CutOutAny(val string, leftBounds []string, rightBounds []string) (remaining string, found bool) {
if len(leftBounds) != len(rightBounds) {
panic(errors.New("left and right bounds must be the same length"))
}
for i := range leftBounds {
remaining, found = CutOut(val, leftBounds[i], rightBounds[i])
if found {
return
}
}
return
}
// Cut the string at the specified index
func CutAt(val string, index int) (left, right string) {
if index < 0 {
index += len(val)
}
if len(val) > index {
return val[:index], val[index:]
}
return val, ""
}
// Like ReplaceAll, but for replacing several sequences at once
func ReplaceManyWithOne(val string, needles []string, replacement string) string {
for _, needle := range needles {
val = strings.ReplaceAll(val, needle, replacement)
}
return val
}
// Like TrimSuffix, but for removing several suffixes at the same time
func TrimSuffixes(val string, suffixes ...string) string {
for _, suffix := range suffixes {
val = strings.TrimSuffix(val, suffix)
}
return val
}
// Like TrimPrefix, but for removing several prefixes at the same time
func TrimPrefixes(val string, prefixes ...string) string {
for _, prefix := range prefixes {
val = strings.TrimPrefix(val, prefix)
}
return val
}
// Determines if all characters in a string are uppercase
func IsUppercase(val string) bool {
return strings.ToUpper(val) == val
}
// Determines if all characters in a string are lowercase
func IsLowercase(val string) bool {
return strings.ToLower(val) == val
}
// Convert a camel string to snake case. Already snake cased strings aren't
// modified.
func CamelToSnake(val, separator string, minWordSize int) string {
if separator == "" {
separator = "_"
}
changes := []int{}
wordSize := 0
for i := range val {
wordSize++
if i == 0 || (val[i:i+1] != strings.ToLower(val[i:i+1]) && wordSize > minWordSize) {
changes = append(changes, i)
wordSize = 0
}
}
changes = append(changes, len(val))
parts := []string{}
for i := 0; i < len(changes)-1; i++ {
part := val[changes[i]:changes[i+1]]
parts = append(parts, strings.ToLower(part))
}
return strings.Join(parts, separator)
}
// Convert a snake cased string to camel case. Ignores strings that already have
// camel casing.
func SnakeToCamel(val, separator string, minWordSize int) (res string) {
if separator == "" {
separator = "_"
}
val = CamelToSnake(val, separator, minWordSize)
parts := strings.Split(val, separator)
if len(parts) == 1 {
return val
}
for _, p := range parts {
if len(p) > 0 {
res += strings.ToUpper(p[:1])
if len(p) > 0 {
res += strings.ToLower(p[1:])
}
}
}
return
}
// func SplitMany(val string, seperators ...string) (parts []string) {
// parts = append(parts, val)
// for _, sep := range seperators {
// for i, part := range parts {
// vals := strings.Split(part, sep)
// if len(vals) > 1 {
// // Replace existing element with all vals
// parts = append(parts[:i], append(vals, parts[i+1:]...)...)
// }
// }
// }
// return
// } | zstring/zstring.go | 0.555676 | 0.448547 | zstring.go | starcoder |
package univ
import (
"reflect"
"strconv"
)
// StructsIntSlice returns a slice of int. For more info refer to Slice types StructIntSlice() method.
func StructsIntSlice(s interface{}, fieldName string) []int {
return NewSlice(s).StructIntSlice(fieldName)
}
// StructsUintSlice returns a slice of int. For more info refer to Slice types v() method.
func StructsUintSlice(s interface{}, fieldName string) []uint {
return NewSlice(s).StructUintSlice(fieldName)
}
// StructsInt64Slice returns a slice of int64. For more info refer to Slice types StructInt64Slice() method.
func StructsInt64Slice(s interface{}, fieldName string) []int64 {
return NewSlice(s).StructInt64Slice(fieldName)
}
// StructsUint64Slice returns a slice of int64. For more info refer to Slice types StructUint64Slice() method.
func StructsUint64Slice(s interface{}, fieldName string) []uint64 {
return NewSlice(s).StructUint64Slice(fieldName)
}
// StructStringSlice returns a slice of int64. For more info refer to Slice types StructStringSlice() method.
func StructStringSlice(s interface{}, fieldName string) []string {
return NewSlice(s).StructStringSlice(fieldName)
}
// IntSlice returns a slice of int. For more info refer to Slice types IntSlice() method.
func IntSlice(s interface{}) []int {
return NewSlice(s).IntSlice()
}
// UintSlice returns a slice of uint. For more info refer to Slice types UintSlice() method.
func UintSlice(s interface{}) []uint {
return NewSlice(s).UintSlice()
}
// Int8Slice returns a slice of int8. For more info refer to Slice types Int8Slice() method.
func Int8Slice(s interface{}) []int8 {
return NewSlice(s).Int8Slice()
}
// Uint8Slice returns a slice of uint8. For more info refer to Slice types Uint8Slice() method.
func Uint8Slice(s interface{}) []uint8 {
return NewSlice(s).Uint8Slice()
}
// Int16Slice returns a slice of int16. For more info refer to Slice types Int16Slice() method.
func Int16Slice(s interface{}) []int16 {
return NewSlice(s).Int16Slice()
}
// Uint16Slice returns a slice of uint16. For more info refer to Slice types Uint16Slice() method.
func Uint16Slice(s interface{}) []uint16 {
return NewSlice(s).Uint16Slice()
}
// Int32Slice returns a slice of int32. For more info refer to Slice types Int32Slice() method.
func Int32Slice(s interface{}) []int32 {
return NewSlice(s).Int32Slice()
}
// Uint32Slice returns a slice of uint32. For more info refer to Slice types Uint32Slice() method.
func Uint32Slice(s interface{}) []uint32 {
return NewSlice(s).Uint32Slice()
}
// Int64Slice returns a slice of int64. For more info refer to Slice types Int64Slice() method.
func Int64Slice(s interface{}) []int64 {
return NewSlice(s).Int64Slice()
}
// Uint64Slice returns a slice of uint64. For more info refer to Slice types Uint64Slice() method.
func Uint64Slice(s interface{}) []uint64 {
return NewSlice(s).Uint64Slice()
}
// Slice hold a slice reflect.value
type Slice struct {
value reflect.Value
}
// NewSlice returns a new *Slice with the slice s. It panics if the s's kind is not slice.
func NewSlice(s interface{}) *Slice {
v := reflect.Indirect(reflect.ValueOf(s))
if kind := v.Kind(); !(kind == reflect.Slice || kind == reflect.Array) {
panic("NewSlice: require a slice or array")
}
return &Slice{v}
}
// StructIntSlice extracts the given s slice's every element, which is struct, to []int by the field.
// It panics if the s's element is not struct, or field is not exits, or the value of field is not integer.
func (sf *Slice) StructIntSlice(fieldName string) []int {
length := sf.value.Len()
slice := make([]int, length)
for i := 0; i < length; i++ {
v := sf.structFieldVal(i, fieldName)
slice[i] = int(valueInteger(v))
}
return slice
}
// StructUintSlice extracts the given s slice's every element, which is struct, to []uint by the field.
// It panics if the s's element is not struct, or field is not exits, or the value of field is not integer.
func (sf *Slice) StructUintSlice(fieldName string) []uint {
length := sf.value.Len()
slice := make([]uint, length)
for i := 0; i < length; i++ {
v := sf.structFieldVal(i, fieldName)
slice[i] = uint(valueInteger(v))
}
return slice
}
// StructInt64Slice extracts the given s slice's every element, which is struct, to []int64 by the field.
// It panics if the s's element is not struct, or field is not exits, or the value of field is not integer.
func (sf *Slice) StructInt64Slice(fieldName string) []int64 {
length := sf.value.Len()
slice := make([]int64, length)
for i := 0; i < length; i++ {
v := sf.structFieldVal(i, fieldName)
slice[i] = int64(valueInteger(v))
}
return slice
}
// StructUint64Slice extracts the given s slice's every element, which is struct, to []int64 by the field.
// It panics if the s's element is not struct, or field is not exits, or the value of field is not integer.
func (sf *Slice) StructUint64Slice(fieldName string) []uint64 {
length := sf.value.Len()
slice := make([]uint64, length)
for i := 0; i < length; i++ {
v := sf.structFieldVal(i, fieldName)
slice[i] = valueInteger(v)
}
return slice
}
// StructStringSlice extracts the given s slice's every element, which is struct, to []string by the field.
// It panics if the s's element is not struct, or field is not exits, or the value of field is not integer or string.
func (sf *Slice) StructStringSlice(fieldName string) []string {
length := sf.value.Len()
slice := make([]string, length)
for i := 0; i < length; i++ {
v := sf.structFieldVal(i, fieldName)
switch v.Kind() { // nolint: exhaustive
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
slice[i] = strconv.FormatInt(v.Int(), 10)
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
slice[i] = strconv.FormatUint(v.Uint(), 10)
case reflect.String:
slice[i] = v.String()
case reflect.Float32:
slice[i] = strconv.FormatFloat(v.Float(), 'f', -1, 32)
case reflect.Float64:
slice[i] = strconv.FormatFloat(v.Float(), 'f', -1, 64)
default:
panic("StructStringSlice: the value of field is not integer or float or string.")
}
}
return slice
}
// IntSlice extracts the given s slice's every element, which is integer or float, to []int by the field.
// It panics if the s's element is not integer or float, or field is not invalid.
func (sf *Slice) IntSlice() []int {
length := sf.value.Len()
slice := make([]int, length)
for i := 0; i < length; i++ {
v := reflect.Indirect(sf.value.Index(i))
switch v.Kind() { // nolint: exhaustive
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
slice[i] = int(v.Int())
case reflect.Float32, reflect.Float64:
slice[i] = int(v.Float())
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
slice[i] = int(v.Uint())
default:
panic("StructSlice: the value of field is not integer or float.")
}
}
return slice
}
// UintSlice extracts the given s slice's every element, which is integer or float, to []uint by the field.
// It panics if the s's element is not integer or float, or field is not invalid.
func (sf *Slice) UintSlice() []uint {
length := sf.value.Len()
slice := make([]uint, length)
for i := 0; i < length; i++ {
v := reflect.Indirect(sf.value.Index(i))
switch v.Kind() { // nolint: exhaustive
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
slice[i] = uint(v.Int())
case reflect.Float32, reflect.Float64:
slice[i] = uint(v.Float())
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
slice[i] = uint(v.Uint())
default:
panic("StructSlice: the value of field is not integer or float.")
}
}
return slice
}
// Int8Slice extracts the given s slice's every element, which is integer or float, to []int8 by the field.
// It panics if the s's element is not integer or float, or field is not invalid.
func (sf *Slice) Int8Slice() []int8 {
length := sf.value.Len()
slice := make([]int8, length)
for i := 0; i < length; i++ {
v := reflect.Indirect(sf.value.Index(i))
switch v.Kind() { // nolint: exhaustive
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
slice[i] = int8(v.Int())
case reflect.Float32, reflect.Float64:
slice[i] = int8(v.Float())
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
slice[i] = int8(v.Uint())
default:
panic("StructSlice: the value of field is not integer or float.")
}
}
return slice
}
// Uint8Slice extracts the given s slice's every element, which is integer or float, to []uint8 by the field.
// It panics if the s's element is not integer or float, or field is not invalid.
func (sf *Slice) Uint8Slice() []uint8 {
length := sf.value.Len()
slice := make([]uint8, length)
for i := 0; i < length; i++ {
v := reflect.Indirect(sf.value.Index(i))
switch v.Kind() { // nolint: exhaustive
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
slice[i] = uint8(v.Int())
case reflect.Float32, reflect.Float64:
slice[i] = uint8(v.Float())
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
slice[i] = uint8(v.Uint())
default:
panic("StructSlice: the value of field is not integer or float.")
}
}
return slice
}
// Int16Slice extracts the given s slice's every element, which is integer or float, to []int16 by the field.
// It panics if the s's element is not integer or float, or field is not invalid.
func (sf *Slice) Int16Slice() []int16 {
length := sf.value.Len()
slice := make([]int16, length)
for i := 0; i < length; i++ {
v := reflect.Indirect(sf.value.Index(i))
switch v.Kind() { // nolint: exhaustive
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
slice[i] = int16(v.Int())
case reflect.Float32, reflect.Float64:
slice[i] = int16(v.Float())
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
slice[i] = int16(v.Uint())
default:
panic("StructSlice: the value of field is not integer or float.")
}
}
return slice
}
// Uint16Slice extracts the given s slice's every element, which is integer or float, to []uint16 by the field.
// It panics if the s's element is not integer or float, or field is not invalid.
func (sf *Slice) Uint16Slice() []uint16 {
length := sf.value.Len()
slice := make([]uint16, length)
for i := 0; i < length; i++ {
v := reflect.Indirect(sf.value.Index(i))
switch v.Kind() { // nolint: exhaustive
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
slice[i] = uint16(v.Int())
case reflect.Float32, reflect.Float64:
slice[i] = uint16(v.Float())
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
slice[i] = uint16(v.Uint())
default:
panic("StructSlice: the value of field is not integer or float.")
}
}
return slice
}
// Int32Slice extracts the given s slice's every element, which is integer or float, to []int32 by the field.
// It panics if the s's element is not integer or float, or field is not invalid.
func (sf *Slice) Int32Slice() []int32 {
length := sf.value.Len()
slice := make([]int32, length)
for i := 0; i < length; i++ {
v := reflect.Indirect(sf.value.Index(i))
switch v.Kind() { // nolint: exhaustive
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
slice[i] = int32(v.Int())
case reflect.Float32, reflect.Float64:
slice[i] = int32(v.Float())
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
slice[i] = int32(v.Uint())
default:
panic("StructSlice: the value of field is not integer or float.")
}
}
return slice
}
// Uint32Slice extracts the given s slice's every element, which is integer or float, to []uint32 by the field.
// It panics if the s's element is not integer or float, or field is not invalid.
func (sf *Slice) Uint32Slice() []uint32 {
length := sf.value.Len()
slice := make([]uint32, length)
for i := 0; i < length; i++ {
v := reflect.Indirect(sf.value.Index(i))
switch v.Kind() { // nolint: exhaustive
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
slice[i] = uint32(v.Int())
case reflect.Float32, reflect.Float64:
slice[i] = uint32(v.Float())
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
slice[i] = uint32(v.Uint())
default:
panic("StructSlice: the value of field is not integer or float.")
}
}
return slice
}
// Int64Slice extracts the given s slice's every element, which is integer or float, to []int64 by the field.
// It panics if the s's element is not integer or float, or field is not invalid.
func (sf *Slice) Int64Slice() []int64 {
length := sf.value.Len()
slice := make([]int64, length)
for i := 0; i < length; i++ {
v := reflect.Indirect(sf.value.Index(i))
switch v.Kind() { // nolint: exhaustive
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
slice[i] = v.Int()
case reflect.Float32, reflect.Float64:
slice[i] = int64(v.Float())
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
slice[i] = int64(v.Uint())
default:
panic("StructSlice: the value of field is not integer or float.")
}
}
return slice
}
// Uint64Slice extracts the given s slice's every element, which is integer or float, to []uint64 by the field.
// It panics if the s's element is not integer or float, or field is not invalid.
func (sf *Slice) Uint64Slice() []uint64 {
length := sf.value.Len()
slice := make([]uint64, length)
for i := 0; i < length; i++ {
v := reflect.Indirect(sf.value.Index(i))
switch v.Kind() { // nolint: exhaustive
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
slice[i] = uint64(v.Int())
case reflect.Float32, reflect.Float64:
slice[i] = uint64(v.Float())
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
slice[i] = v.Uint()
default:
panic("StructSlice: the value of field is not integer or float.")
}
}
return slice
}
func (sf *Slice) structFieldVal(i int, fieldName string) reflect.Value {
val := sf.value.Index(i)
val = reflect.Indirect(val)
// check is struct
if !(val.Kind() != reflect.Invalid && val.Kind() == reflect.Struct) {
panic("structFieldVal: the slice's element is not struct or pointer of struct!")
}
v := val.FieldByName(fieldName)
if !v.IsValid() {
panic("structFieldVal: the struct of slice's element has not the field:" + fieldName)
}
return v
}
// Name returns the slice's type name within its package. For more info refer
// to Name() function.
func (sf *Slice) Name() string {
return sf.value.Type().Name()
}
func valueInteger(v reflect.Value) uint64 {
switch v.Kind() { // nolint: exhaustive
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return uint64(v.Int())
case reflect.Float32, reflect.Float64:
return uint64(v.Float())
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
return v.Uint()
default:
panic("StructSlice: the value of field is not integer or float.")
}
} | lib/univ/slice.go | 0.850251 | 0.526038 | slice.go | starcoder |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.